summaryrefslogtreecommitdiff
path: root/venv/lib/python3.11/site-packages/pygments
diff options
context:
space:
mode:
Diffstat (limited to 'venv/lib/python3.11/site-packages/pygments')
-rw-r--r--venv/lib/python3.11/site-packages/pygments/__init__.py82
-rw-r--r--venv/lib/python3.11/site-packages/pygments/__main__.py17
-rw-r--r--venv/lib/python3.11/site-packages/pygments/__pycache__/__init__.cpython-311.pycbin3800 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/__pycache__/__main__.cpython-311.pycbin802 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/__pycache__/cmdline.cpython-311.pycbin30095 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/__pycache__/console.cpython-311.pycbin3036 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/__pycache__/filter.cpython-311.pycbin3497 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/__pycache__/formatter.cpython-311.pycbin4788 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/__pycache__/lexer.cpython-311.pycbin42662 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/__pycache__/modeline.cpython-311.pycbin1716 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/__pycache__/plugin.cpython-311.pycbin3717 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/__pycache__/regexopt.cpython-311.pycbin5023 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/__pycache__/scanner.cpython-311.pycbin4878 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/__pycache__/sphinxext.cpython-311.pycbin13799 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/__pycache__/style.cpython-311.pycbin7448 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/__pycache__/token.cpython-311.pycbin7496 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/__pycache__/unistring.cpython-311.pycbin33830 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/__pycache__/util.cpython-311.pycbin15682 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/cmdline.py668
-rw-r--r--venv/lib/python3.11/site-packages/pygments/console.py70
-rw-r--r--venv/lib/python3.11/site-packages/pygments/filter.py71
-rw-r--r--venv/lib/python3.11/site-packages/pygments/filters/__init__.py940
-rw-r--r--venv/lib/python3.11/site-packages/pygments/filters/__pycache__/__init__.cpython-311.pycbin40049 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/formatter.py124
-rw-r--r--venv/lib/python3.11/site-packages/pygments/formatters/__init__.py158
-rw-r--r--venv/lib/python3.11/site-packages/pygments/formatters/__pycache__/__init__.cpython-311.pycbin7737 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/formatters/__pycache__/_mapping.cpython-311.pycbin4215 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/formatters/__pycache__/bbcode.cpython-311.pycbin4447 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/formatters/__pycache__/groff.cpython-311.pycbin7820 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/formatters/__pycache__/html.cpython-311.pycbin42707 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/formatters/__pycache__/img.cpython-311.pycbin30048 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/formatters/__pycache__/irc.cpython-311.pycbin6361 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/formatters/__pycache__/latex.cpython-311.pycbin21749 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/formatters/__pycache__/other.cpython-311.pycbin7577 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/formatters/__pycache__/pangomarkup.cpython-311.pycbin3157 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/formatters/__pycache__/rtf.cpython-311.pycbin6812 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/formatters/__pycache__/svg.cpython-311.pycbin9620 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/formatters/__pycache__/terminal.cpython-311.pycbin5987 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/formatters/__pycache__/terminal256.cpython-311.pycbin16365 -> 0 bytes
-rwxr-xr-xvenv/lib/python3.11/site-packages/pygments/formatters/_mapping.py23
-rw-r--r--venv/lib/python3.11/site-packages/pygments/formatters/bbcode.py108
-rw-r--r--venv/lib/python3.11/site-packages/pygments/formatters/groff.py170
-rw-r--r--venv/lib/python3.11/site-packages/pygments/formatters/html.py990
-rw-r--r--venv/lib/python3.11/site-packages/pygments/formatters/img.py684
-rw-r--r--venv/lib/python3.11/site-packages/pygments/formatters/irc.py154
-rw-r--r--venv/lib/python3.11/site-packages/pygments/formatters/latex.py521
-rw-r--r--venv/lib/python3.11/site-packages/pygments/formatters/other.py161
-rw-r--r--venv/lib/python3.11/site-packages/pygments/formatters/pangomarkup.py83
-rw-r--r--venv/lib/python3.11/site-packages/pygments/formatters/rtf.py146
-rw-r--r--venv/lib/python3.11/site-packages/pygments/formatters/svg.py188
-rw-r--r--venv/lib/python3.11/site-packages/pygments/formatters/terminal.py127
-rw-r--r--venv/lib/python3.11/site-packages/pygments/formatters/terminal256.py338
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexer.py959
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__init__.py363
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/__init__.cpython-311.pycbin16321 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/_ada_builtins.cpython-311.pycbin1245 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/_asy_builtins.cpython-311.pycbin17662 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/_cl_builtins.cpython-311.pycbin11746 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/_cocoa_builtins.cpython-311.pycbin98063 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/_csound_builtins.cpython-311.pycbin16459 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/_css_builtins.cpython-311.pycbin9395 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/_julia_builtins.cpython-311.pycbin8282 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/_lasso_builtins.cpython-311.pycbin76750 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/_lilypond_builtins.cpython-311.pycbin88650 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/_lua_builtins.cpython-311.pycbin9488 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/_mapping.cpython-311.pycbin64699 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/_mql_builtins.cpython-311.pycbin18019 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/_mysql_builtins.cpython-311.pycbin20410 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/_openedge_builtins.cpython-311.pycbin34103 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/_php_builtins.cpython-311.pycbin66412 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/_postgres_builtins.cpython-311.pycbin12353 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/_qlik_builtins.cpython-311.pycbin6445 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/_scheme_builtins.cpython-311.pycbin23207 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/_scilab_builtins.cpython-311.pycbin35748 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/_sourcemod_builtins.cpython-311.pycbin22536 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/_stan_builtins.cpython-311.pycbin9962 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/_stata_builtins.cpython-311.pycbin21248 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/_tsql_builtins.cpython-311.pycbin8900 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/_usd_builtins.cpython-311.pycbin1448 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/_vbscript_builtins.cpython-311.pycbin3003 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/_vim_builtins.cpython-311.pycbin30806 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/actionscript.cpython-311.pycbin11171 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/ada.cpython-311.pycbin5441 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/agile.cpython-311.pycbin1568 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/algebra.cpython-311.pycbin11474 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/ambient.cpython-311.pycbin3103 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/amdgpu.cpython-311.pycbin2188 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/ampl.cpython-311.pycbin4168 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/apdlexer.cpython-311.pycbin19148 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/apl.cpython-311.pycbin2529 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/archetype.cpython-311.pycbin9499 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/arrow.cpython-311.pycbin3652 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/arturo.cpython-311.pycbin9826 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/asc.cpython-311.pycbin2167 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/asm.cpython-311.pycbin36031 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/asn1.cpython-311.pycbin4655 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/automation.cpython-311.pycbin18404 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/bare.cpython-311.pycbin3063 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/basic.cpython-311.pycbin25365 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/bdd.cpython-311.pycbin2300 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/berry.cpython-311.pycbin3721 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/bibtex.cpython-311.pycbin5633 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/blueprint.cpython-311.pycbin5529 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/boa.cpython-311.pycbin3665 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/bqn.cpython-311.pycbin2381 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/business.cpython-311.pycbin21975 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/c_cpp.cpython-311.pycbin16485 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/c_like.cpython-311.pycbin26448 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/capnproto.cpython-311.pycbin2433 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/carbon.cpython-311.pycbin3632 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/cddl.cpython-311.pycbin4569 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/chapel.cpython-311.pycbin4397 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/clean.cpython-311.pycbin6282 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/comal.cpython-311.pycbin3351 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/compiled.cpython-311.pycbin2401 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/configs.cpython-311.pycbin45582 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/console.cpython-311.pycbin4370 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/cplint.cpython-311.pycbin1897 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/crystal.cpython-311.pycbin15302 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/csound.cpython-311.pycbin14637 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/css.cpython-311.pycbin22095 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/d.cpython-311.pycbin8306 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/dalvik.cpython-311.pycbin4821 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/data.cpython-311.pycbin23101 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/dax.cpython-311.pycbin6310 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/devicetree.cpython-311.pycbin4109 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/diff.cpython-311.pycbin5708 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/dns.cpython-311.pycbin3822 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/dotnet.cpython-311.pycbin34708 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/dsls.cpython-311.pycbin34488 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/dylan.cpython-311.pycbin9977 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/ecl.cpython-311.pycbin5761 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/eiffel.cpython-311.pycbin3054 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/elm.cpython-311.pycbin3232 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/elpi.cpython-311.pycbin7140 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/email.cpython-311.pycbin6065 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/erlang.cpython-311.pycbin21324 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/esoteric.cpython-311.pycbin10102 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/ezhil.cpython-311.pycbin4140 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/factor.cpython-311.pycbin17030 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/fantom.cpython-311.pycbin8445 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/felix.cpython-311.pycbin8370 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/fift.cpython-311.pycbin1961 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/floscript.cpython-311.pycbin3071 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/forth.cpython-311.pycbin5448 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/fortran.cpython-311.pycbin8932 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/foxpro.cpython-311.pycbin20716 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/freefem.cpython-311.pycbin12954 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/func.cpython-311.pycbin3502 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/functional.cpython-311.pycbin1246 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/futhark.cpython-311.pycbin3928 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/gcodelexer.cpython-311.pycbin1380 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/gdscript.cpython-311.pycbin7341 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/go.cpython-311.pycbin3490 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/grammar_notation.cpython-311.pycbin7819 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/graph.cpython-311.pycbin4295 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/graphics.cpython-311.pycbin30259 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/graphql.cpython-311.pycbin4668 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/graphviz.cpython-311.pycbin2314 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/gsql.cpython-311.pycbin4073 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/haskell.cpython-311.pycbin29499 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/haxe.cpython-311.pycbin23185 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/hdl.cpython-311.pycbin17153 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/hexdump.cpython-311.pycbin3797 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/html.cpython-311.pycbin19621 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/idl.cpython-311.pycbin12582 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/igor.cpython-311.pycbin25839 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/inferno.cpython-311.pycbin3398 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/installers.cpython-311.pycbin12913 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/int_fiction.cpython-311.pycbin48888 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/iolang.cpython-311.pycbin2228 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/j.cpython-311.pycbin4305 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/javascript.cpython-311.pycbin55789 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/jmespath.cpython-311.pycbin2583 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/jslt.cpython-311.pycbin3880 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/jsonnet.cpython-311.pycbin5178 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/jsx.cpython-311.pycbin2556 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/julia.cpython-311.pycbin11399 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/jvm.cpython-311.pycbin62899 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/kuin.cpython-311.pycbin10894 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/kusto.cpython-311.pycbin3019 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/ldap.cpython-311.pycbin6802 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/lean.cpython-311.pycbin4256 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/lilypond.cpython-311.pycbin7770 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/lisp.cpython-311.pycbin112065 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/macaulay2.cpython-311.pycbin22387 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/make.cpython-311.pycbin7122 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/markup.cpython-311.pycbin59704 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/math.cpython-311.pycbin1217 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/matlab.cpython-311.pycbin56538 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/maxima.cpython-311.pycbin3295 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/meson.cpython-311.pycbin3705 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/mime.cpython-311.pycbin11141 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/minecraft.cpython-311.pycbin10481 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/mips.cpython-311.pycbin3526 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/ml.cpython-311.pycbin24533 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/modeling.cpython-311.pycbin11931 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/modula2.cpython-311.pycbin27075 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/monte.cpython-311.pycbin5122 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/mosel.cpython-311.pycbin6890 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/ncl.cpython-311.pycbin46068 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/nimrod.cpython-311.pycbin6235 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/nit.cpython-311.pycbin2857 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/nix.cpython-311.pycbin5382 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/oberon.cpython-311.pycbin3915 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/objective.cpython-311.pycbin19880 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/ooc.cpython-311.pycbin3187 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/openscad.cpython-311.pycbin3774 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/other.cpython-311.pycbin3031 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/parasail.cpython-311.pycbin2925 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/parsers.cpython-311.pycbin27244 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/pascal.cpython-311.pycbin24892 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/pawn.cpython-311.pycbin7535 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/perl.cpython-311.pycbin39228 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/phix.cpython-311.pycbin18494 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/php.cpython-311.pycbin14686 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/pointless.cpython-311.pycbin2429 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/pony.cpython-311.pycbin3455 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/praat.cpython-311.pycbin10712 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/procfile.cpython-311.pycbin1720 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/prolog.cpython-311.pycbin10311 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/promql.cpython-311.pycbin3513 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/prql.cpython-311.pycbin8257 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/ptx.cpython-311.pycbin3941 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/python.cpython-311.pycbin43268 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/q.cpython-311.pycbin5843 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/qlik.cpython-311.pycbin3611 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/qvt.cpython-311.pycbin5470 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/r.cpython-311.pycbin6388 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/rdf.cpython-311.pycbin12091 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/rebol.cpython-311.pycbin18951 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/resource.cpython-311.pycbin3517 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/ride.cpython-311.pycbin4593 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/rita.cpython-311.pycbin1619 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/rnc.cpython-311.pycbin2055 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/roboconf.cpython-311.pycbin2449 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/robotframework.cpython-311.pycbin32606 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/ruby.cpython-311.pycbin22906 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/rust.cpython-311.pycbin7123 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/sas.cpython-311.pycbin7189 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/savi.cpython-311.pycbin3902 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/scdoc.cpython-311.pycbin3075 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/scripting.cpython-311.pycbin60381 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/sgf.cpython-311.pycbin2238 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/shell.cpython-311.pycbin38703 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/sieve.cpython-311.pycbin2744 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/slash.cpython-311.pycbin8439 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/smalltalk.cpython-311.pycbin6763 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/smithy.cpython-311.pycbin3132 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/smv.cpython-311.pycbin2896 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/snobol.cpython-311.pycbin2572 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/solidity.cpython-311.pycbin3528 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/sophia.cpython-311.pycbin3712 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/special.cpython-311.pycbin5912 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/spice.cpython-311.pycbin3233 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/sql.cpython-311.pycbin41388 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/srcinfo.cpython-311.pycbin2090 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/stata.cpython-311.pycbin4849 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/supercollider.cpython-311.pycbin4064 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/tal.cpython-311.pycbin2923 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/tcl.cpython-311.pycbin5495 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/teal.cpython-311.pycbin3685 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/templates.cpython-311.pycbin90635 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/teraterm.cpython-311.pycbin5706 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/testing.cpython-311.pycbin10166 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/text.cpython-311.pycbin1845 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/textedit.cpython-311.pycbin8892 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/textfmts.cpython-311.pycbin16464 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/theorem.cpython-311.pycbin14122 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/thingsdb.cpython-311.pycbin4229 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/tlb.cpython-311.pycbin1886 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/tls.cpython-311.pycbin2060 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/tnt.cpython-311.pycbin14887 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/trafficscript.cpython-311.pycbin1901 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/typoscript.cpython-311.pycbin7389 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/ul4.cpython-311.pycbin8112 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/unicon.cpython-311.pycbin12498 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/urbi.cpython-311.pycbin6032 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/usd.cpython-311.pycbin4417 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/varnish.cpython-311.pycbin7130 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/verification.cpython-311.pycbin4072 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/verifpal.cpython-311.pycbin3160 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/vip.cpython-311.pycbin5962 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/vyper.cpython-311.pycbin4889 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/web.cpython-311.pycbin1620 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/webassembly.cpython-311.pycbin5693 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/webidl.cpython-311.pycbin8601 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/webmisc.cpython-311.pycbin44936 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/wgsl.cpython-311.pycbin11108 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/whiley.cpython-311.pycbin3596 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/wowtoc.cpython-311.pycbin3432 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/wren.cpython-311.pycbin3119 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/x10.cpython-311.pycbin2357 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/xorg.cpython-311.pycbin1448 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/yang.cpython-311.pycbin4189 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/yara.cpython-311.pycbin2779 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/zig.cpython-311.pycbin3973 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/_ada_builtins.py103
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/_asy_builtins.py1644
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/_cl_builtins.py231
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/_cocoa_builtins.py75
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/_csound_builtins.py1780
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/_css_builtins.py558
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/_julia_builtins.py411
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/_lasso_builtins.py5326
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/_lilypond_builtins.py4932
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/_lua_builtins.py285
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/_mapping.py580
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/_mql_builtins.py1171
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/_mysql_builtins.py1335
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/_openedge_builtins.py2600
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/_php_builtins.py3325
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/_postgres_builtins.py739
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/_qlik_builtins.py666
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/_scheme_builtins.py1609
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/_scilab_builtins.py3093
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/_sourcemod_builtins.py1151
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/_stan_builtins.py648
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/_stata_builtins.py457
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/_tsql_builtins.py1003
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/_usd_builtins.py112
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/_vbscript_builtins.py279
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/_vim_builtins.py1938
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/actionscript.py245
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/ada.py144
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/agile.py23
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/algebra.py302
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/ambient.py76
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/amdgpu.py54
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/ampl.py88
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/apdlexer.py592
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/apl.py104
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/archetype.py319
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/arrow.py117
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/arturo.py250
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/asc.py55
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/asm.py1037
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/asn1.py179
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/automation.py381
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/bare.py102
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/basic.py665
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/bdd.py58
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/berry.py99
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/bibtex.py159
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/blueprint.py174
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/boa.py97
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/bqn.py110
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/business.py626
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/c_cpp.py411
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/c_like.py666
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/capnproto.py75
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/carbon.py96
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/cddl.py173
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/chapel.py140
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/clean.py179
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/comal.py80
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/compiled.py34
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/configs.py1435
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/console.py114
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/cplint.py44
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/crystal.py365
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/csound.py468
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/css.py602
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/d.py258
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/dalvik.py127
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/data.py767
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/dax.py136
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/devicetree.py109
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/diff.py168
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/dns.py106
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/dotnet.py841
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/dsls.py982
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/dylan.py281
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/ecl.py145
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/eiffel.py69
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/elm.py124
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/elpi.py173
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/email.py132
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/erlang.py528
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/esoteric.py301
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/ezhil.py77
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/factor.py364
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/fantom.py251
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/felix.py276
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/fift.py67
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/floscript.py82
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/forth.py179
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/fortran.py213
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/foxpro.py427
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/freefem.py894
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/func.py108
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/functional.py20
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/futhark.py106
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/gcodelexer.py35
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/gdscript.py188
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/go.py98
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/grammar_notation.py265
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/graph.py109
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/graphics.py798
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/graphql.py177
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/graphviz.py59
-rwxr-xr-xvenv/lib/python3.11/site-packages/pygments/lexers/gsql.py104
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/haskell.py871
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/haxe.py937
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/hdl.py465
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/hexdump.py102
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/html.py623
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/idl.py285
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/igor.py436
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/inferno.py96
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/installers.py327
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/int_fiction.py1382
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/iolang.py62
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/j.py152
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/javascript.py1588
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/jmespath.py68
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/jslt.py95
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/jsonnet.py168
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/jsx.py76
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/julia.py294
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/jvm.py1820
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/kuin.py333
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/kusto.py94
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/ldap.py157
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/lean.py122
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/lilypond.py226
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/lisp.py2848
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/macaulay2.py1755
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/make.py211
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/markup.py1550
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/math.py20
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/matlab.py3308
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/maxima.py85
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/meson.py140
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/mime.py210
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/minecraft.py394
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/mips.py128
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/ml.py960
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/modeling.py369
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/modula2.py1580
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/monte.py204
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/mosel.py447
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/ncl.py893
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/nimrod.py200
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/nit.py64
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/nix.py145
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/oberon.py120
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/objective.py505
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/ooc.py85
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/openscad.py97
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/other.py40
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/parasail.py79
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/parsers.py801
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/pascal.py641
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/pawn.py202
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/perl.py733
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/phix.py364
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/php.py335
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/pointless.py71
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/pony.py93
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/praat.py304
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/procfile.py42
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/prolog.py309
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/promql.py175
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/prql.py252
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/ptx.py120
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/python.py1198
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/q.py188
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/qlik.py117
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/qvt.py151
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/r.py190
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/rdf.py466
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/rebol.py419
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/resource.py84
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/ride.py139
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/rita.py43
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/rnc.py67
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/roboconf.py81
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/robotframework.py552
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/ruby.py516
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/rust.py223
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/sas.py227
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/savi.py170
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/scdoc.py86
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/scripting.py1286
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/sgf.py60
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/shell.py920
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/sieve.py78
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/slash.py184
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/smalltalk.py196
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/smithy.py78
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/smv.py78
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/snobol.py82
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/solidity.py87
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/sophia.py103
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/special.py116
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/spice.py70
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/sql.py1027
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/srcinfo.py62
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/stata.py171
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/supercollider.py95
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/tal.py77
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/tcl.py149
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/teal.py89
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/templates.py2296
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/teraterm.py326
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/testing.py210
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/text.py26
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/textedit.py202
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/textfmts.py436
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/theorem.py391
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/thingsdb.py116
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/tlb.py57
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/tls.py55
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/tnt.py271
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/trafficscript.py51
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/typoscript.py217
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/ul4.py267
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/unicon.py411
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/urbi.py145
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/usd.py90
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/varnish.py189
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/verification.py114
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/verifpal.py66
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/vip.py152
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/vyper.py141
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/web.py23
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/webassembly.py120
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/webidl.py299
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/webmisc.py1010
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/wgsl.py407
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/whiley.py116
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/wowtoc.py120
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/wren.py99
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/x10.py67
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/xorg.py37
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/yang.py104
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/yara.py70
-rw-r--r--venv/lib/python3.11/site-packages/pygments/lexers/zig.py124
-rw-r--r--venv/lib/python3.11/site-packages/pygments/modeline.py43
-rw-r--r--venv/lib/python3.11/site-packages/pygments/plugin.py88
-rw-r--r--venv/lib/python3.11/site-packages/pygments/regexopt.py91
-rw-r--r--venv/lib/python3.11/site-packages/pygments/scanner.py104
-rw-r--r--venv/lib/python3.11/site-packages/pygments/sphinxext.py239
-rw-r--r--venv/lib/python3.11/site-packages/pygments/style.py203
-rw-r--r--venv/lib/python3.11/site-packages/pygments/styles/__init__.py61
-rw-r--r--venv/lib/python3.11/site-packages/pygments/styles/__pycache__/__init__.cpython-311.pycbin3056 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/styles/__pycache__/_mapping.cpython-311.pycbin3607 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/styles/__pycache__/abap.cpython-311.pycbin1205 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/styles/__pycache__/algol.cpython-311.pycbin2570 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/styles/__pycache__/algol_nu.cpython-311.pycbin2585 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/styles/__pycache__/arduino.cpython-311.pycbin3753 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/styles/__pycache__/autumn.cpython-311.pycbin2712 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/styles/__pycache__/borland.cpython-311.pycbin2196 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/styles/__pycache__/bw.cpython-311.pycbin1850 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/styles/__pycache__/colorful.cpython-311.pycbin3364 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/styles/__pycache__/default.cpython-311.pycbin3005 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/styles/__pycache__/dracula.cpython-311.pycbin3018 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/styles/__pycache__/emacs.cpython-311.pycbin3033 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/styles/__pycache__/friendly.cpython-311.pycbin3129 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/styles/__pycache__/friendly_grayscale.cpython-311.pycbin3339 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/styles/__pycache__/fruity.cpython-311.pycbin1962 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/styles/__pycache__/gh_dark.cpython-311.pycbin3825 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/styles/__pycache__/gruvbox.cpython-311.pycbin4012 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/styles/__pycache__/igor.cpython-311.pycbin1198 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/styles/__pycache__/inkpot.cpython-311.pycbin2842 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/styles/__pycache__/lightbulb.cpython-311.pycbin4370 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/styles/__pycache__/lilypond.cpython-311.pycbin2767 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/styles/__pycache__/lovelace.cpython-311.pycbin3899 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/styles/__pycache__/manni.cpython-311.pycbin3264 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/styles/__pycache__/material.cpython-311.pycbin4363 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/styles/__pycache__/monokai.cpython-311.pycbin4308 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/styles/__pycache__/murphy.cpython-311.pycbin3314 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/styles/__pycache__/native.cpython-311.pycbin2827 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/styles/__pycache__/nord.cpython-311.pycbin5113 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/styles/__pycache__/onedark.cpython-311.pycbin2213 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/styles/__pycache__/paraiso_dark.cpython-311.pycbin4643 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/styles/__pycache__/paraiso_light.cpython-311.pycbin4649 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/styles/__pycache__/pastie.cpython-311.pycbin3175 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/styles/__pycache__/perldoc.cpython-311.pycbin2884 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/styles/__pycache__/rainbow_dash.cpython-311.pycbin4241 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/styles/__pycache__/rrt.cpython-311.pycbin1450 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/styles/__pycache__/sas.cpython-311.pycbin1888 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/styles/__pycache__/solarized.cpython-311.pycbin5904 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/styles/__pycache__/staroffice.cpython-311.pycbin1218 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/styles/__pycache__/stata_dark.cpython-311.pycbin1763 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/styles/__pycache__/stata_light.cpython-311.pycbin1771 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/styles/__pycache__/tango.cpython-311.pycbin5639 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/styles/__pycache__/trac.cpython-311.pycbin2471 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/styles/__pycache__/vim.cpython-311.pycbin2398 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/styles/__pycache__/vs.cpython-311.pycbin1511 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/styles/__pycache__/xcode.cpython-311.pycbin1846 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/styles/__pycache__/zenburn.cpython-311.pycbin3203 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pygments/styles/_mapping.py53
-rw-r--r--venv/lib/python3.11/site-packages/pygments/styles/abap.py32
-rw-r--r--venv/lib/python3.11/site-packages/pygments/styles/algol.py65
-rw-r--r--venv/lib/python3.11/site-packages/pygments/styles/algol_nu.py65
-rw-r--r--venv/lib/python3.11/site-packages/pygments/styles/arduino.py100
-rw-r--r--venv/lib/python3.11/site-packages/pygments/styles/autumn.py67
-rw-r--r--venv/lib/python3.11/site-packages/pygments/styles/borland.py53
-rw-r--r--venv/lib/python3.11/site-packages/pygments/styles/bw.py52
-rw-r--r--venv/lib/python3.11/site-packages/pygments/styles/colorful.py83
-rw-r--r--venv/lib/python3.11/site-packages/pygments/styles/default.py76
-rw-r--r--venv/lib/python3.11/site-packages/pygments/styles/dracula.py90
-rw-r--r--venv/lib/python3.11/site-packages/pygments/styles/emacs.py75
-rw-r--r--venv/lib/python3.11/site-packages/pygments/styles/friendly.py76
-rw-r--r--venv/lib/python3.11/site-packages/pygments/styles/friendly_grayscale.py80
-rw-r--r--venv/lib/python3.11/site-packages/pygments/styles/fruity.py47
-rw-r--r--venv/lib/python3.11/site-packages/pygments/styles/gh_dark.py113
-rw-r--r--venv/lib/python3.11/site-packages/pygments/styles/gruvbox.py118
-rw-r--r--venv/lib/python3.11/site-packages/pygments/styles/igor.py32
-rw-r--r--venv/lib/python3.11/site-packages/pygments/styles/inkpot.py72
-rw-r--r--venv/lib/python3.11/site-packages/pygments/styles/lightbulb.py110
-rw-r--r--venv/lib/python3.11/site-packages/pygments/styles/lilypond.py62
-rw-r--r--venv/lib/python3.11/site-packages/pygments/styles/lovelace.py100
-rw-r--r--venv/lib/python3.11/site-packages/pygments/styles/manni.py79
-rw-r--r--venv/lib/python3.11/site-packages/pygments/styles/material.py124
-rw-r--r--venv/lib/python3.11/site-packages/pygments/styles/monokai.py112
-rw-r--r--venv/lib/python3.11/site-packages/pygments/styles/murphy.py82
-rw-r--r--venv/lib/python3.11/site-packages/pygments/styles/native.py70
-rw-r--r--venv/lib/python3.11/site-packages/pygments/styles/nord.py156
-rw-r--r--venv/lib/python3.11/site-packages/pygments/styles/onedark.py63
-rw-r--r--venv/lib/python3.11/site-packages/pygments/styles/paraiso_dark.py124
-rw-r--r--venv/lib/python3.11/site-packages/pygments/styles/paraiso_light.py124
-rw-r--r--venv/lib/python3.11/site-packages/pygments/styles/pastie.py78
-rw-r--r--venv/lib/python3.11/site-packages/pygments/styles/perldoc.py73
-rw-r--r--venv/lib/python3.11/site-packages/pygments/styles/rainbow_dash.py95
-rw-r--r--venv/lib/python3.11/site-packages/pygments/styles/rrt.py39
-rw-r--r--venv/lib/python3.11/site-packages/pygments/styles/sas.py46
-rw-r--r--venv/lib/python3.11/site-packages/pygments/styles/solarized.py144
-rw-r--r--venv/lib/python3.11/site-packages/pygments/styles/staroffice.py31
-rw-r--r--venv/lib/python3.11/site-packages/pygments/styles/stata_dark.py42
-rw-r--r--venv/lib/python3.11/site-packages/pygments/styles/stata_light.py42
-rw-r--r--venv/lib/python3.11/site-packages/pygments/styles/tango.py143
-rw-r--r--venv/lib/python3.11/site-packages/pygments/styles/trac.py66
-rw-r--r--venv/lib/python3.11/site-packages/pygments/styles/vim.py67
-rw-r--r--venv/lib/python3.11/site-packages/pygments/styles/vs.py41
-rw-r--r--venv/lib/python3.11/site-packages/pygments/styles/xcode.py53
-rw-r--r--venv/lib/python3.11/site-packages/pygments/styles/zenburn.py83
-rw-r--r--venv/lib/python3.11/site-packages/pygments/token.py214
-rw-r--r--venv/lib/python3.11/site-packages/pygments/unistring.py153
-rw-r--r--venv/lib/python3.11/site-packages/pygments/util.py330
642 files changed, 0 insertions, 119810 deletions
diff --git a/venv/lib/python3.11/site-packages/pygments/__init__.py b/venv/lib/python3.11/site-packages/pygments/__init__.py
deleted file mode 100644
index 6b77c46..0000000
--- a/venv/lib/python3.11/site-packages/pygments/__init__.py
+++ /dev/null
@@ -1,82 +0,0 @@
-"""
- Pygments
- ~~~~~~~~
-
- Pygments is a syntax highlighting package written in Python.
-
- It is a generic syntax highlighter for general use in all kinds of software
- such as forum systems, wikis or other applications that need to prettify
- source code. Highlights are:
-
- * a wide range of common languages and markup formats is supported
- * special attention is paid to details, increasing quality by a fair amount
- * support for new languages and formats are added easily
- * a number of output formats, presently HTML, LaTeX, RTF, SVG, all image
- formats that PIL supports, and ANSI sequences
- * it is usable as a command-line tool and as a library
- * ... and it highlights even Brainfuck!
-
- The `Pygments master branch`_ is installable with ``easy_install Pygments==dev``.
-
- .. _Pygments master branch:
- https://github.com/pygments/pygments/archive/master.zip#egg=Pygments-dev
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-from io import StringIO, BytesIO
-
-__version__ = '2.17.2'
-__docformat__ = 'restructuredtext'
-
-__all__ = ['lex', 'format', 'highlight']
-
-
-def lex(code, lexer):
- """
- Lex `code` with the `lexer` (must be a `Lexer` instance)
- and return an iterable of tokens. Currently, this only calls
- `lexer.get_tokens()`.
- """
- try:
- return lexer.get_tokens(code)
- except TypeError:
- # Heuristic to catch a common mistake.
- from pygments.lexer import RegexLexer
- if isinstance(lexer, type) and issubclass(lexer, RegexLexer):
- raise TypeError('lex() argument must be a lexer instance, '
- 'not a class')
- raise
-
-
-def format(tokens, formatter, outfile=None): # pylint: disable=redefined-builtin
- """
- Format ``tokens`` (an iterable of tokens) with the formatter ``formatter``
- (a `Formatter` instance).
-
- If ``outfile`` is given and a valid file object (an object with a
- ``write`` method), the result will be written to it, otherwise it
- is returned as a string.
- """
- try:
- if not outfile:
- realoutfile = getattr(formatter, 'encoding', None) and BytesIO() or StringIO()
- formatter.format(tokens, realoutfile)
- return realoutfile.getvalue()
- else:
- formatter.format(tokens, outfile)
- except TypeError:
- # Heuristic to catch a common mistake.
- from pygments.formatter import Formatter
- if isinstance(formatter, type) and issubclass(formatter, Formatter):
- raise TypeError('format() argument must be a formatter instance, '
- 'not a class')
- raise
-
-
-def highlight(code, lexer, formatter, outfile=None):
- """
- This is the most high-level highlighting function. It combines `lex` and
- `format` in one function.
- """
- return format(lex(code, lexer), formatter, outfile)
diff --git a/venv/lib/python3.11/site-packages/pygments/__main__.py b/venv/lib/python3.11/site-packages/pygments/__main__.py
deleted file mode 100644
index 5eb2c74..0000000
--- a/venv/lib/python3.11/site-packages/pygments/__main__.py
+++ /dev/null
@@ -1,17 +0,0 @@
-"""
- pygments.__main__
- ~~~~~~~~~~~~~~~~~
-
- Main entry point for ``python -m pygments``.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import sys
-import pygments.cmdline
-
-try:
- sys.exit(pygments.cmdline.main(sys.argv))
-except KeyboardInterrupt:
- sys.exit(1)
diff --git a/venv/lib/python3.11/site-packages/pygments/__pycache__/__init__.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/__pycache__/__init__.cpython-311.pyc
deleted file mode 100644
index 19a66d8..0000000
--- a/venv/lib/python3.11/site-packages/pygments/__pycache__/__init__.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/__pycache__/__main__.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/__pycache__/__main__.cpython-311.pyc
deleted file mode 100644
index 9e17973..0000000
--- a/venv/lib/python3.11/site-packages/pygments/__pycache__/__main__.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/__pycache__/cmdline.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/__pycache__/cmdline.cpython-311.pyc
deleted file mode 100644
index c8701eb..0000000
--- a/venv/lib/python3.11/site-packages/pygments/__pycache__/cmdline.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/__pycache__/console.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/__pycache__/console.cpython-311.pyc
deleted file mode 100644
index f6053f7..0000000
--- a/venv/lib/python3.11/site-packages/pygments/__pycache__/console.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/__pycache__/filter.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/__pycache__/filter.cpython-311.pyc
deleted file mode 100644
index 9875b8a..0000000
--- a/venv/lib/python3.11/site-packages/pygments/__pycache__/filter.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/__pycache__/formatter.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/__pycache__/formatter.cpython-311.pyc
deleted file mode 100644
index ed4b5ff..0000000
--- a/venv/lib/python3.11/site-packages/pygments/__pycache__/formatter.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/__pycache__/lexer.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/__pycache__/lexer.cpython-311.pyc
deleted file mode 100644
index 4bf40ea..0000000
--- a/venv/lib/python3.11/site-packages/pygments/__pycache__/lexer.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/__pycache__/modeline.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/__pycache__/modeline.cpython-311.pyc
deleted file mode 100644
index 8115386..0000000
--- a/venv/lib/python3.11/site-packages/pygments/__pycache__/modeline.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/__pycache__/plugin.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/__pycache__/plugin.cpython-311.pyc
deleted file mode 100644
index 33a25b3..0000000
--- a/venv/lib/python3.11/site-packages/pygments/__pycache__/plugin.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/__pycache__/regexopt.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/__pycache__/regexopt.cpython-311.pyc
deleted file mode 100644
index d8be4e4..0000000
--- a/venv/lib/python3.11/site-packages/pygments/__pycache__/regexopt.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/__pycache__/scanner.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/__pycache__/scanner.cpython-311.pyc
deleted file mode 100644
index 2d356e9..0000000
--- a/venv/lib/python3.11/site-packages/pygments/__pycache__/scanner.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/__pycache__/sphinxext.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/__pycache__/sphinxext.cpython-311.pyc
deleted file mode 100644
index f8dbe70..0000000
--- a/venv/lib/python3.11/site-packages/pygments/__pycache__/sphinxext.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/__pycache__/style.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/__pycache__/style.cpython-311.pyc
deleted file mode 100644
index e4fd479..0000000
--- a/venv/lib/python3.11/site-packages/pygments/__pycache__/style.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/__pycache__/token.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/__pycache__/token.cpython-311.pyc
deleted file mode 100644
index 1617029..0000000
--- a/venv/lib/python3.11/site-packages/pygments/__pycache__/token.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/__pycache__/unistring.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/__pycache__/unistring.cpython-311.pyc
deleted file mode 100644
index 1334a03..0000000
--- a/venv/lib/python3.11/site-packages/pygments/__pycache__/unistring.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/__pycache__/util.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/__pycache__/util.cpython-311.pyc
deleted file mode 100644
index 3f1a0e1..0000000
--- a/venv/lib/python3.11/site-packages/pygments/__pycache__/util.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/cmdline.py b/venv/lib/python3.11/site-packages/pygments/cmdline.py
deleted file mode 100644
index 435231e..0000000
--- a/venv/lib/python3.11/site-packages/pygments/cmdline.py
+++ /dev/null
@@ -1,668 +0,0 @@
-"""
- pygments.cmdline
- ~~~~~~~~~~~~~~~~
-
- Command line interface.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import os
-import sys
-import shutil
-import argparse
-from textwrap import dedent
-
-from pygments import __version__, highlight
-from pygments.util import ClassNotFound, OptionError, docstring_headline, \
- guess_decode, guess_decode_from_terminal, terminal_encoding, \
- UnclosingTextIOWrapper
-from pygments.lexers import get_all_lexers, get_lexer_by_name, guess_lexer, \
- load_lexer_from_file, get_lexer_for_filename, find_lexer_class_for_filename
-from pygments.lexers.special import TextLexer
-from pygments.formatters.latex import LatexEmbeddedLexer, LatexFormatter
-from pygments.formatters import get_all_formatters, get_formatter_by_name, \
- load_formatter_from_file, get_formatter_for_filename, find_formatter_class
-from pygments.formatters.terminal import TerminalFormatter
-from pygments.formatters.terminal256 import Terminal256Formatter, TerminalTrueColorFormatter
-from pygments.filters import get_all_filters, find_filter_class
-from pygments.styles import get_all_styles, get_style_by_name
-
-
-def _parse_options(o_strs):
- opts = {}
- if not o_strs:
- return opts
- for o_str in o_strs:
- if not o_str.strip():
- continue
- o_args = o_str.split(',')
- for o_arg in o_args:
- o_arg = o_arg.strip()
- try:
- o_key, o_val = o_arg.split('=', 1)
- o_key = o_key.strip()
- o_val = o_val.strip()
- except ValueError:
- opts[o_arg] = True
- else:
- opts[o_key] = o_val
- return opts
-
-
-def _parse_filters(f_strs):
- filters = []
- if not f_strs:
- return filters
- for f_str in f_strs:
- if ':' in f_str:
- fname, fopts = f_str.split(':', 1)
- filters.append((fname, _parse_options([fopts])))
- else:
- filters.append((f_str, {}))
- return filters
-
-
-def _print_help(what, name):
- try:
- if what == 'lexer':
- cls = get_lexer_by_name(name)
- print("Help on the %s lexer:" % cls.name)
- print(dedent(cls.__doc__))
- elif what == 'formatter':
- cls = find_formatter_class(name)
- print("Help on the %s formatter:" % cls.name)
- print(dedent(cls.__doc__))
- elif what == 'filter':
- cls = find_filter_class(name)
- print("Help on the %s filter:" % name)
- print(dedent(cls.__doc__))
- return 0
- except (AttributeError, ValueError):
- print("%s not found!" % what, file=sys.stderr)
- return 1
-
-
-def _print_list(what):
- if what == 'lexer':
- print()
- print("Lexers:")
- print("~~~~~~~")
-
- info = []
- for fullname, names, exts, _ in get_all_lexers():
- tup = (', '.join(names)+':', fullname,
- exts and '(filenames ' + ', '.join(exts) + ')' or '')
- info.append(tup)
- info.sort()
- for i in info:
- print(('* %s\n %s %s') % i)
-
- elif what == 'formatter':
- print()
- print("Formatters:")
- print("~~~~~~~~~~~")
-
- info = []
- for cls in get_all_formatters():
- doc = docstring_headline(cls)
- tup = (', '.join(cls.aliases) + ':', doc, cls.filenames and
- '(filenames ' + ', '.join(cls.filenames) + ')' or '')
- info.append(tup)
- info.sort()
- for i in info:
- print(('* %s\n %s %s') % i)
-
- elif what == 'filter':
- print()
- print("Filters:")
- print("~~~~~~~~")
-
- for name in get_all_filters():
- cls = find_filter_class(name)
- print("* " + name + ':')
- print(" %s" % docstring_headline(cls))
-
- elif what == 'style':
- print()
- print("Styles:")
- print("~~~~~~~")
-
- for name in get_all_styles():
- cls = get_style_by_name(name)
- print("* " + name + ':')
- print(" %s" % docstring_headline(cls))
-
-
-def _print_list_as_json(requested_items):
- import json
- result = {}
- if 'lexer' in requested_items:
- info = {}
- for fullname, names, filenames, mimetypes in get_all_lexers():
- info[fullname] = {
- 'aliases': names,
- 'filenames': filenames,
- 'mimetypes': mimetypes
- }
- result['lexers'] = info
-
- if 'formatter' in requested_items:
- info = {}
- for cls in get_all_formatters():
- doc = docstring_headline(cls)
- info[cls.name] = {
- 'aliases': cls.aliases,
- 'filenames': cls.filenames,
- 'doc': doc
- }
- result['formatters'] = info
-
- if 'filter' in requested_items:
- info = {}
- for name in get_all_filters():
- cls = find_filter_class(name)
- info[name] = {
- 'doc': docstring_headline(cls)
- }
- result['filters'] = info
-
- if 'style' in requested_items:
- info = {}
- for name in get_all_styles():
- cls = get_style_by_name(name)
- info[name] = {
- 'doc': docstring_headline(cls)
- }
- result['styles'] = info
-
- json.dump(result, sys.stdout)
-
-def main_inner(parser, argns):
- if argns.help:
- parser.print_help()
- return 0
-
- if argns.V:
- print('Pygments version %s, (c) 2006-2023 by Georg Brandl, Matthäus '
- 'Chajdas and contributors.' % __version__)
- return 0
-
- def is_only_option(opt):
- return not any(v for (k, v) in vars(argns).items() if k != opt)
-
- # handle ``pygmentize -L``
- if argns.L is not None:
- arg_set = set()
- for k, v in vars(argns).items():
- if v:
- arg_set.add(k)
-
- arg_set.discard('L')
- arg_set.discard('json')
-
- if arg_set:
- parser.print_help(sys.stderr)
- return 2
-
- # print version
- if not argns.json:
- main(['', '-V'])
- allowed_types = {'lexer', 'formatter', 'filter', 'style'}
- largs = [arg.rstrip('s') for arg in argns.L]
- if any(arg not in allowed_types for arg in largs):
- parser.print_help(sys.stderr)
- return 0
- if not largs:
- largs = allowed_types
- if not argns.json:
- for arg in largs:
- _print_list(arg)
- else:
- _print_list_as_json(largs)
- return 0
-
- # handle ``pygmentize -H``
- if argns.H:
- if not is_only_option('H'):
- parser.print_help(sys.stderr)
- return 2
- what, name = argns.H
- if what not in ('lexer', 'formatter', 'filter'):
- parser.print_help(sys.stderr)
- return 2
- return _print_help(what, name)
-
- # parse -O options
- parsed_opts = _parse_options(argns.O or [])
-
- # parse -P options
- for p_opt in argns.P or []:
- try:
- name, value = p_opt.split('=', 1)
- except ValueError:
- parsed_opts[p_opt] = True
- else:
- parsed_opts[name] = value
-
- # encodings
- inencoding = parsed_opts.get('inencoding', parsed_opts.get('encoding'))
- outencoding = parsed_opts.get('outencoding', parsed_opts.get('encoding'))
-
- # handle ``pygmentize -N``
- if argns.N:
- lexer = find_lexer_class_for_filename(argns.N)
- if lexer is None:
- lexer = TextLexer
-
- print(lexer.aliases[0])
- return 0
-
- # handle ``pygmentize -C``
- if argns.C:
- inp = sys.stdin.buffer.read()
- try:
- lexer = guess_lexer(inp, inencoding=inencoding)
- except ClassNotFound:
- lexer = TextLexer
-
- print(lexer.aliases[0])
- return 0
-
- # handle ``pygmentize -S``
- S_opt = argns.S
- a_opt = argns.a
- if S_opt is not None:
- f_opt = argns.f
- if not f_opt:
- parser.print_help(sys.stderr)
- return 2
- if argns.l or argns.INPUTFILE:
- parser.print_help(sys.stderr)
- return 2
-
- try:
- parsed_opts['style'] = S_opt
- fmter = get_formatter_by_name(f_opt, **parsed_opts)
- except ClassNotFound as err:
- print(err, file=sys.stderr)
- return 1
-
- print(fmter.get_style_defs(a_opt or ''))
- return 0
-
- # if no -S is given, -a is not allowed
- if argns.a is not None:
- parser.print_help(sys.stderr)
- return 2
-
- # parse -F options
- F_opts = _parse_filters(argns.F or [])
-
- # -x: allow custom (eXternal) lexers and formatters
- allow_custom_lexer_formatter = bool(argns.x)
-
- # select lexer
- lexer = None
-
- # given by name?
- lexername = argns.l
- if lexername:
- # custom lexer, located relative to user's cwd
- if allow_custom_lexer_formatter and '.py' in lexername:
- try:
- filename = None
- name = None
- if ':' in lexername:
- filename, name = lexername.rsplit(':', 1)
-
- if '.py' in name:
- # This can happen on Windows: If the lexername is
- # C:\lexer.py -- return to normal load path in that case
- name = None
-
- if filename and name:
- lexer = load_lexer_from_file(filename, name,
- **parsed_opts)
- else:
- lexer = load_lexer_from_file(lexername, **parsed_opts)
- except ClassNotFound as err:
- print('Error:', err, file=sys.stderr)
- return 1
- else:
- try:
- lexer = get_lexer_by_name(lexername, **parsed_opts)
- except (OptionError, ClassNotFound) as err:
- print('Error:', err, file=sys.stderr)
- return 1
-
- # read input code
- code = None
-
- if argns.INPUTFILE:
- if argns.s:
- print('Error: -s option not usable when input file specified',
- file=sys.stderr)
- return 2
-
- infn = argns.INPUTFILE
- try:
- with open(infn, 'rb') as infp:
- code = infp.read()
- except Exception as err:
- print('Error: cannot read infile:', err, file=sys.stderr)
- return 1
- if not inencoding:
- code, inencoding = guess_decode(code)
-
- # do we have to guess the lexer?
- if not lexer:
- try:
- lexer = get_lexer_for_filename(infn, code, **parsed_opts)
- except ClassNotFound as err:
- if argns.g:
- try:
- lexer = guess_lexer(code, **parsed_opts)
- except ClassNotFound:
- lexer = TextLexer(**parsed_opts)
- else:
- print('Error:', err, file=sys.stderr)
- return 1
- except OptionError as err:
- print('Error:', err, file=sys.stderr)
- return 1
-
- elif not argns.s: # treat stdin as full file (-s support is later)
- # read code from terminal, always in binary mode since we want to
- # decode ourselves and be tolerant with it
- code = sys.stdin.buffer.read() # use .buffer to get a binary stream
- if not inencoding:
- code, inencoding = guess_decode_from_terminal(code, sys.stdin)
- # else the lexer will do the decoding
- if not lexer:
- try:
- lexer = guess_lexer(code, **parsed_opts)
- except ClassNotFound:
- lexer = TextLexer(**parsed_opts)
-
- else: # -s option needs a lexer with -l
- if not lexer:
- print('Error: when using -s a lexer has to be selected with -l',
- file=sys.stderr)
- return 2
-
- # process filters
- for fname, fopts in F_opts:
- try:
- lexer.add_filter(fname, **fopts)
- except ClassNotFound as err:
- print('Error:', err, file=sys.stderr)
- return 1
-
- # select formatter
- outfn = argns.o
- fmter = argns.f
- if fmter:
- # custom formatter, located relative to user's cwd
- if allow_custom_lexer_formatter and '.py' in fmter:
- try:
- filename = None
- name = None
- if ':' in fmter:
- # Same logic as above for custom lexer
- filename, name = fmter.rsplit(':', 1)
-
- if '.py' in name:
- name = None
-
- if filename and name:
- fmter = load_formatter_from_file(filename, name,
- **parsed_opts)
- else:
- fmter = load_formatter_from_file(fmter, **parsed_opts)
- except ClassNotFound as err:
- print('Error:', err, file=sys.stderr)
- return 1
- else:
- try:
- fmter = get_formatter_by_name(fmter, **parsed_opts)
- except (OptionError, ClassNotFound) as err:
- print('Error:', err, file=sys.stderr)
- return 1
-
- if outfn:
- if not fmter:
- try:
- fmter = get_formatter_for_filename(outfn, **parsed_opts)
- except (OptionError, ClassNotFound) as err:
- print('Error:', err, file=sys.stderr)
- return 1
- try:
- outfile = open(outfn, 'wb')
- except Exception as err:
- print('Error: cannot open outfile:', err, file=sys.stderr)
- return 1
- else:
- if not fmter:
- if os.environ.get('COLORTERM','') in ('truecolor', '24bit'):
- fmter = TerminalTrueColorFormatter(**parsed_opts)
- elif '256' in os.environ.get('TERM', ''):
- fmter = Terminal256Formatter(**parsed_opts)
- else:
- fmter = TerminalFormatter(**parsed_opts)
- outfile = sys.stdout.buffer
-
- # determine output encoding if not explicitly selected
- if not outencoding:
- if outfn:
- # output file? use lexer encoding for now (can still be None)
- fmter.encoding = inencoding
- else:
- # else use terminal encoding
- fmter.encoding = terminal_encoding(sys.stdout)
-
- # provide coloring under Windows, if possible
- if not outfn and sys.platform in ('win32', 'cygwin') and \
- fmter.name in ('Terminal', 'Terminal256'): # pragma: no cover
- # unfortunately colorama doesn't support binary streams on Py3
- outfile = UnclosingTextIOWrapper(outfile, encoding=fmter.encoding)
- fmter.encoding = None
- try:
- import colorama.initialise
- except ImportError:
- pass
- else:
- outfile = colorama.initialise.wrap_stream(
- outfile, convert=None, strip=None, autoreset=False, wrap=True)
-
- # When using the LaTeX formatter and the option `escapeinside` is
- # specified, we need a special lexer which collects escaped text
- # before running the chosen language lexer.
- escapeinside = parsed_opts.get('escapeinside', '')
- if len(escapeinside) == 2 and isinstance(fmter, LatexFormatter):
- left = escapeinside[0]
- right = escapeinside[1]
- lexer = LatexEmbeddedLexer(left, right, lexer)
-
- # ... and do it!
- if not argns.s:
- # process whole input as per normal...
- try:
- highlight(code, lexer, fmter, outfile)
- finally:
- if outfn:
- outfile.close()
- return 0
- else:
- # line by line processing of stdin (eg: for 'tail -f')...
- try:
- while 1:
- line = sys.stdin.buffer.readline()
- if not line:
- break
- if not inencoding:
- line = guess_decode_from_terminal(line, sys.stdin)[0]
- highlight(line, lexer, fmter, outfile)
- if hasattr(outfile, 'flush'):
- outfile.flush()
- return 0
- except KeyboardInterrupt: # pragma: no cover
- return 0
- finally:
- if outfn:
- outfile.close()
-
-
-class HelpFormatter(argparse.HelpFormatter):
- def __init__(self, prog, indent_increment=2, max_help_position=16, width=None):
- if width is None:
- try:
- width = shutil.get_terminal_size().columns - 2
- except Exception:
- pass
- argparse.HelpFormatter.__init__(self, prog, indent_increment,
- max_help_position, width)
-
-
-def main(args=sys.argv):
- """
- Main command line entry point.
- """
- desc = "Highlight an input file and write the result to an output file."
- parser = argparse.ArgumentParser(description=desc, add_help=False,
- formatter_class=HelpFormatter)
-
- operation = parser.add_argument_group('Main operation')
- lexersel = operation.add_mutually_exclusive_group()
- lexersel.add_argument(
- '-l', metavar='LEXER',
- help='Specify the lexer to use. (Query names with -L.) If not '
- 'given and -g is not present, the lexer is guessed from the filename.')
- lexersel.add_argument(
- '-g', action='store_true',
- help='Guess the lexer from the file contents, or pass through '
- 'as plain text if nothing can be guessed.')
- operation.add_argument(
- '-F', metavar='FILTER[:options]', action='append',
- help='Add a filter to the token stream. (Query names with -L.) '
- 'Filter options are given after a colon if necessary.')
- operation.add_argument(
- '-f', metavar='FORMATTER',
- help='Specify the formatter to use. (Query names with -L.) '
- 'If not given, the formatter is guessed from the output filename, '
- 'and defaults to the terminal formatter if the output is to the '
- 'terminal or an unknown file extension.')
- operation.add_argument(
- '-O', metavar='OPTION=value[,OPTION=value,...]', action='append',
- help='Give options to the lexer and formatter as a comma-separated '
- 'list of key-value pairs. '
- 'Example: `-O bg=light,python=cool`.')
- operation.add_argument(
- '-P', metavar='OPTION=value', action='append',
- help='Give a single option to the lexer and formatter - with this '
- 'you can pass options whose value contains commas and equal signs. '
- 'Example: `-P "heading=Pygments, the Python highlighter"`.')
- operation.add_argument(
- '-o', metavar='OUTPUTFILE',
- help='Where to write the output. Defaults to standard output.')
-
- operation.add_argument(
- 'INPUTFILE', nargs='?',
- help='Where to read the input. Defaults to standard input.')
-
- flags = parser.add_argument_group('Operation flags')
- flags.add_argument(
- '-v', action='store_true',
- help='Print a detailed traceback on unhandled exceptions, which '
- 'is useful for debugging and bug reports.')
- flags.add_argument(
- '-s', action='store_true',
- help='Process lines one at a time until EOF, rather than waiting to '
- 'process the entire file. This only works for stdin, only for lexers '
- 'with no line-spanning constructs, and is intended for streaming '
- 'input such as you get from `tail -f`. '
- 'Example usage: `tail -f sql.log | pygmentize -s -l sql`.')
- flags.add_argument(
- '-x', action='store_true',
- help='Allow custom lexers and formatters to be loaded from a .py file '
- 'relative to the current working directory. For example, '
- '`-l ./customlexer.py -x`. By default, this option expects a file '
- 'with a class named CustomLexer or CustomFormatter; you can also '
- 'specify your own class name with a colon (`-l ./lexer.py:MyLexer`). '
- 'Users should be very careful not to use this option with untrusted '
- 'files, because it will import and run them.')
- flags.add_argument('--json', help='Output as JSON. This can '
- 'be only used in conjunction with -L.',
- default=False,
- action='store_true')
-
- special_modes_group = parser.add_argument_group(
- 'Special modes - do not do any highlighting')
- special_modes = special_modes_group.add_mutually_exclusive_group()
- special_modes.add_argument(
- '-S', metavar='STYLE -f formatter',
- help='Print style definitions for STYLE for a formatter '
- 'given with -f. The argument given by -a is formatter '
- 'dependent.')
- special_modes.add_argument(
- '-L', nargs='*', metavar='WHAT',
- help='List lexers, formatters, styles or filters -- '
- 'give additional arguments for the thing(s) you want to list '
- '(e.g. "styles"), or omit them to list everything.')
- special_modes.add_argument(
- '-N', metavar='FILENAME',
- help='Guess and print out a lexer name based solely on the given '
- 'filename. Does not take input or highlight anything. If no specific '
- 'lexer can be determined, "text" is printed.')
- special_modes.add_argument(
- '-C', action='store_true',
- help='Like -N, but print out a lexer name based solely on '
- 'a given content from standard input.')
- special_modes.add_argument(
- '-H', action='store', nargs=2, metavar=('NAME', 'TYPE'),
- help='Print detailed help for the object <name> of type <type>, '
- 'where <type> is one of "lexer", "formatter" or "filter".')
- special_modes.add_argument(
- '-V', action='store_true',
- help='Print the package version.')
- special_modes.add_argument(
- '-h', '--help', action='store_true',
- help='Print this help.')
- special_modes_group.add_argument(
- '-a', metavar='ARG',
- help='Formatter-specific additional argument for the -S (print '
- 'style sheet) mode.')
-
- argns = parser.parse_args(args[1:])
-
- try:
- return main_inner(parser, argns)
- except BrokenPipeError:
- # someone closed our stdout, e.g. by quitting a pager.
- return 0
- except Exception:
- if argns.v:
- print(file=sys.stderr)
- print('*' * 65, file=sys.stderr)
- print('An unhandled exception occurred while highlighting.',
- file=sys.stderr)
- print('Please report the whole traceback to the issue tracker at',
- file=sys.stderr)
- print('<https://github.com/pygments/pygments/issues>.',
- file=sys.stderr)
- print('*' * 65, file=sys.stderr)
- print(file=sys.stderr)
- raise
- import traceback
- info = traceback.format_exception(*sys.exc_info())
- msg = info[-1].strip()
- if len(info) >= 3:
- # extract relevant file and position info
- msg += '\n (f%s)' % info[-2].split('\n')[0].strip()[1:]
- print(file=sys.stderr)
- print('*** Error while highlighting:', file=sys.stderr)
- print(msg, file=sys.stderr)
- print('*** If this is a bug you want to report, please rerun with -v.',
- file=sys.stderr)
- return 1
diff --git a/venv/lib/python3.11/site-packages/pygments/console.py b/venv/lib/python3.11/site-packages/pygments/console.py
deleted file mode 100644
index deb4937..0000000
--- a/venv/lib/python3.11/site-packages/pygments/console.py
+++ /dev/null
@@ -1,70 +0,0 @@
-"""
- pygments.console
- ~~~~~~~~~~~~~~~~
-
- Format colored console output.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-esc = "\x1b["
-
-codes = {}
-codes[""] = ""
-codes["reset"] = esc + "39;49;00m"
-
-codes["bold"] = esc + "01m"
-codes["faint"] = esc + "02m"
-codes["standout"] = esc + "03m"
-codes["underline"] = esc + "04m"
-codes["blink"] = esc + "05m"
-codes["overline"] = esc + "06m"
-
-dark_colors = ["black", "red", "green", "yellow", "blue",
- "magenta", "cyan", "gray"]
-light_colors = ["brightblack", "brightred", "brightgreen", "brightyellow", "brightblue",
- "brightmagenta", "brightcyan", "white"]
-
-x = 30
-for d, l in zip(dark_colors, light_colors):
- codes[d] = esc + "%im" % x
- codes[l] = esc + "%im" % (60 + x)
- x += 1
-
-del d, l, x
-
-codes["white"] = codes["bold"]
-
-
-def reset_color():
- return codes["reset"]
-
-
-def colorize(color_key, text):
- return codes[color_key] + text + codes["reset"]
-
-
-def ansiformat(attr, text):
- """
- Format ``text`` with a color and/or some attributes::
-
- color normal color
- *color* bold color
- _color_ underlined color
- +color+ blinking color
- """
- result = []
- if attr[:1] == attr[-1:] == '+':
- result.append(codes['blink'])
- attr = attr[1:-1]
- if attr[:1] == attr[-1:] == '*':
- result.append(codes['bold'])
- attr = attr[1:-1]
- if attr[:1] == attr[-1:] == '_':
- result.append(codes['underline'])
- attr = attr[1:-1]
- result.append(codes[attr])
- result.append(text)
- result.append(codes['reset'])
- return ''.join(result)
diff --git a/venv/lib/python3.11/site-packages/pygments/filter.py b/venv/lib/python3.11/site-packages/pygments/filter.py
deleted file mode 100644
index dafa08d..0000000
--- a/venv/lib/python3.11/site-packages/pygments/filter.py
+++ /dev/null
@@ -1,71 +0,0 @@
-"""
- pygments.filter
- ~~~~~~~~~~~~~~~
-
- Module that implements the default filter.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-
-def apply_filters(stream, filters, lexer=None):
- """
- Use this method to apply an iterable of filters to
- a stream. If lexer is given it's forwarded to the
- filter, otherwise the filter receives `None`.
- """
- def _apply(filter_, stream):
- yield from filter_.filter(lexer, stream)
- for filter_ in filters:
- stream = _apply(filter_, stream)
- return stream
-
-
-def simplefilter(f):
- """
- Decorator that converts a function into a filter::
-
- @simplefilter
- def lowercase(self, lexer, stream, options):
- for ttype, value in stream:
- yield ttype, value.lower()
- """
- return type(f.__name__, (FunctionFilter,), {
- '__module__': getattr(f, '__module__'),
- '__doc__': f.__doc__,
- 'function': f,
- })
-
-
-class Filter:
- """
- Default filter. Subclass this class or use the `simplefilter`
- decorator to create own filters.
- """
-
- def __init__(self, **options):
- self.options = options
-
- def filter(self, lexer, stream):
- raise NotImplementedError()
-
-
-class FunctionFilter(Filter):
- """
- Abstract class used by `simplefilter` to create simple
- function filters on the fly. The `simplefilter` decorator
- automatically creates subclasses of this class for
- functions passed to it.
- """
- function = None
-
- def __init__(self, **options):
- if not hasattr(self, 'function'):
- raise TypeError('%r used without bound function' %
- self.__class__.__name__)
- Filter.__init__(self, **options)
-
- def filter(self, lexer, stream):
- # pylint: disable=not-callable
- yield from self.function(lexer, stream, self.options)
diff --git a/venv/lib/python3.11/site-packages/pygments/filters/__init__.py b/venv/lib/python3.11/site-packages/pygments/filters/__init__.py
deleted file mode 100644
index 8bd5374..0000000
--- a/venv/lib/python3.11/site-packages/pygments/filters/__init__.py
+++ /dev/null
@@ -1,940 +0,0 @@
-"""
- pygments.filters
- ~~~~~~~~~~~~~~~~
-
- Module containing filter lookup functions and default
- filters.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.token import String, Comment, Keyword, Name, Error, Whitespace, \
- string_to_tokentype
-from pygments.filter import Filter
-from pygments.util import get_list_opt, get_int_opt, get_bool_opt, \
- get_choice_opt, ClassNotFound, OptionError
-from pygments.plugin import find_plugin_filters
-
-
-def find_filter_class(filtername):
- """Lookup a filter by name. Return None if not found."""
- if filtername in FILTERS:
- return FILTERS[filtername]
- for name, cls in find_plugin_filters():
- if name == filtername:
- return cls
- return None
-
-
-def get_filter_by_name(filtername, **options):
- """Return an instantiated filter.
-
- Options are passed to the filter initializer if wanted.
- Raise a ClassNotFound if not found.
- """
- cls = find_filter_class(filtername)
- if cls:
- return cls(**options)
- else:
- raise ClassNotFound('filter %r not found' % filtername)
-
-
-def get_all_filters():
- """Return a generator of all filter names."""
- yield from FILTERS
- for name, _ in find_plugin_filters():
- yield name
-
-
-def _replace_special(ttype, value, regex, specialttype,
- replacefunc=lambda x: x):
- last = 0
- for match in regex.finditer(value):
- start, end = match.start(), match.end()
- if start != last:
- yield ttype, value[last:start]
- yield specialttype, replacefunc(value[start:end])
- last = end
- if last != len(value):
- yield ttype, value[last:]
-
-
-class CodeTagFilter(Filter):
- """Highlight special code tags in comments and docstrings.
-
- Options accepted:
-
- `codetags` : list of strings
- A list of strings that are flagged as code tags. The default is to
- highlight ``XXX``, ``TODO``, ``FIXME``, ``BUG`` and ``NOTE``.
-
- .. versionchanged:: 2.13
- Now recognizes ``FIXME`` by default.
- """
-
- def __init__(self, **options):
- Filter.__init__(self, **options)
- tags = get_list_opt(options, 'codetags',
- ['XXX', 'TODO', 'FIXME', 'BUG', 'NOTE'])
- self.tag_re = re.compile(r'\b(%s)\b' % '|'.join([
- re.escape(tag) for tag in tags if tag
- ]))
-
- def filter(self, lexer, stream):
- regex = self.tag_re
- for ttype, value in stream:
- if ttype in String.Doc or \
- ttype in Comment and \
- ttype not in Comment.Preproc:
- yield from _replace_special(ttype, value, regex, Comment.Special)
- else:
- yield ttype, value
-
-
-class SymbolFilter(Filter):
- """Convert mathematical symbols such as \\<longrightarrow> in Isabelle
- or \\longrightarrow in LaTeX into Unicode characters.
-
- This is mostly useful for HTML or console output when you want to
- approximate the source rendering you'd see in an IDE.
-
- Options accepted:
-
- `lang` : string
- The symbol language. Must be one of ``'isabelle'`` or
- ``'latex'``. The default is ``'isabelle'``.
- """
-
- latex_symbols = {
- '\\alpha' : '\U000003b1',
- '\\beta' : '\U000003b2',
- '\\gamma' : '\U000003b3',
- '\\delta' : '\U000003b4',
- '\\varepsilon' : '\U000003b5',
- '\\zeta' : '\U000003b6',
- '\\eta' : '\U000003b7',
- '\\vartheta' : '\U000003b8',
- '\\iota' : '\U000003b9',
- '\\kappa' : '\U000003ba',
- '\\lambda' : '\U000003bb',
- '\\mu' : '\U000003bc',
- '\\nu' : '\U000003bd',
- '\\xi' : '\U000003be',
- '\\pi' : '\U000003c0',
- '\\varrho' : '\U000003c1',
- '\\sigma' : '\U000003c3',
- '\\tau' : '\U000003c4',
- '\\upsilon' : '\U000003c5',
- '\\varphi' : '\U000003c6',
- '\\chi' : '\U000003c7',
- '\\psi' : '\U000003c8',
- '\\omega' : '\U000003c9',
- '\\Gamma' : '\U00000393',
- '\\Delta' : '\U00000394',
- '\\Theta' : '\U00000398',
- '\\Lambda' : '\U0000039b',
- '\\Xi' : '\U0000039e',
- '\\Pi' : '\U000003a0',
- '\\Sigma' : '\U000003a3',
- '\\Upsilon' : '\U000003a5',
- '\\Phi' : '\U000003a6',
- '\\Psi' : '\U000003a8',
- '\\Omega' : '\U000003a9',
- '\\leftarrow' : '\U00002190',
- '\\longleftarrow' : '\U000027f5',
- '\\rightarrow' : '\U00002192',
- '\\longrightarrow' : '\U000027f6',
- '\\Leftarrow' : '\U000021d0',
- '\\Longleftarrow' : '\U000027f8',
- '\\Rightarrow' : '\U000021d2',
- '\\Longrightarrow' : '\U000027f9',
- '\\leftrightarrow' : '\U00002194',
- '\\longleftrightarrow' : '\U000027f7',
- '\\Leftrightarrow' : '\U000021d4',
- '\\Longleftrightarrow' : '\U000027fa',
- '\\mapsto' : '\U000021a6',
- '\\longmapsto' : '\U000027fc',
- '\\relbar' : '\U00002500',
- '\\Relbar' : '\U00002550',
- '\\hookleftarrow' : '\U000021a9',
- '\\hookrightarrow' : '\U000021aa',
- '\\leftharpoondown' : '\U000021bd',
- '\\rightharpoondown' : '\U000021c1',
- '\\leftharpoonup' : '\U000021bc',
- '\\rightharpoonup' : '\U000021c0',
- '\\rightleftharpoons' : '\U000021cc',
- '\\leadsto' : '\U0000219d',
- '\\downharpoonleft' : '\U000021c3',
- '\\downharpoonright' : '\U000021c2',
- '\\upharpoonleft' : '\U000021bf',
- '\\upharpoonright' : '\U000021be',
- '\\restriction' : '\U000021be',
- '\\uparrow' : '\U00002191',
- '\\Uparrow' : '\U000021d1',
- '\\downarrow' : '\U00002193',
- '\\Downarrow' : '\U000021d3',
- '\\updownarrow' : '\U00002195',
- '\\Updownarrow' : '\U000021d5',
- '\\langle' : '\U000027e8',
- '\\rangle' : '\U000027e9',
- '\\lceil' : '\U00002308',
- '\\rceil' : '\U00002309',
- '\\lfloor' : '\U0000230a',
- '\\rfloor' : '\U0000230b',
- '\\flqq' : '\U000000ab',
- '\\frqq' : '\U000000bb',
- '\\bot' : '\U000022a5',
- '\\top' : '\U000022a4',
- '\\wedge' : '\U00002227',
- '\\bigwedge' : '\U000022c0',
- '\\vee' : '\U00002228',
- '\\bigvee' : '\U000022c1',
- '\\forall' : '\U00002200',
- '\\exists' : '\U00002203',
- '\\nexists' : '\U00002204',
- '\\neg' : '\U000000ac',
- '\\Box' : '\U000025a1',
- '\\Diamond' : '\U000025c7',
- '\\vdash' : '\U000022a2',
- '\\models' : '\U000022a8',
- '\\dashv' : '\U000022a3',
- '\\surd' : '\U0000221a',
- '\\le' : '\U00002264',
- '\\ge' : '\U00002265',
- '\\ll' : '\U0000226a',
- '\\gg' : '\U0000226b',
- '\\lesssim' : '\U00002272',
- '\\gtrsim' : '\U00002273',
- '\\lessapprox' : '\U00002a85',
- '\\gtrapprox' : '\U00002a86',
- '\\in' : '\U00002208',
- '\\notin' : '\U00002209',
- '\\subset' : '\U00002282',
- '\\supset' : '\U00002283',
- '\\subseteq' : '\U00002286',
- '\\supseteq' : '\U00002287',
- '\\sqsubset' : '\U0000228f',
- '\\sqsupset' : '\U00002290',
- '\\sqsubseteq' : '\U00002291',
- '\\sqsupseteq' : '\U00002292',
- '\\cap' : '\U00002229',
- '\\bigcap' : '\U000022c2',
- '\\cup' : '\U0000222a',
- '\\bigcup' : '\U000022c3',
- '\\sqcup' : '\U00002294',
- '\\bigsqcup' : '\U00002a06',
- '\\sqcap' : '\U00002293',
- '\\Bigsqcap' : '\U00002a05',
- '\\setminus' : '\U00002216',
- '\\propto' : '\U0000221d',
- '\\uplus' : '\U0000228e',
- '\\bigplus' : '\U00002a04',
- '\\sim' : '\U0000223c',
- '\\doteq' : '\U00002250',
- '\\simeq' : '\U00002243',
- '\\approx' : '\U00002248',
- '\\asymp' : '\U0000224d',
- '\\cong' : '\U00002245',
- '\\equiv' : '\U00002261',
- '\\Join' : '\U000022c8',
- '\\bowtie' : '\U00002a1d',
- '\\prec' : '\U0000227a',
- '\\succ' : '\U0000227b',
- '\\preceq' : '\U0000227c',
- '\\succeq' : '\U0000227d',
- '\\parallel' : '\U00002225',
- '\\mid' : '\U000000a6',
- '\\pm' : '\U000000b1',
- '\\mp' : '\U00002213',
- '\\times' : '\U000000d7',
- '\\div' : '\U000000f7',
- '\\cdot' : '\U000022c5',
- '\\star' : '\U000022c6',
- '\\circ' : '\U00002218',
- '\\dagger' : '\U00002020',
- '\\ddagger' : '\U00002021',
- '\\lhd' : '\U000022b2',
- '\\rhd' : '\U000022b3',
- '\\unlhd' : '\U000022b4',
- '\\unrhd' : '\U000022b5',
- '\\triangleleft' : '\U000025c3',
- '\\triangleright' : '\U000025b9',
- '\\triangle' : '\U000025b3',
- '\\triangleq' : '\U0000225c',
- '\\oplus' : '\U00002295',
- '\\bigoplus' : '\U00002a01',
- '\\otimes' : '\U00002297',
- '\\bigotimes' : '\U00002a02',
- '\\odot' : '\U00002299',
- '\\bigodot' : '\U00002a00',
- '\\ominus' : '\U00002296',
- '\\oslash' : '\U00002298',
- '\\dots' : '\U00002026',
- '\\cdots' : '\U000022ef',
- '\\sum' : '\U00002211',
- '\\prod' : '\U0000220f',
- '\\coprod' : '\U00002210',
- '\\infty' : '\U0000221e',
- '\\int' : '\U0000222b',
- '\\oint' : '\U0000222e',
- '\\clubsuit' : '\U00002663',
- '\\diamondsuit' : '\U00002662',
- '\\heartsuit' : '\U00002661',
- '\\spadesuit' : '\U00002660',
- '\\aleph' : '\U00002135',
- '\\emptyset' : '\U00002205',
- '\\nabla' : '\U00002207',
- '\\partial' : '\U00002202',
- '\\flat' : '\U0000266d',
- '\\natural' : '\U0000266e',
- '\\sharp' : '\U0000266f',
- '\\angle' : '\U00002220',
- '\\copyright' : '\U000000a9',
- '\\textregistered' : '\U000000ae',
- '\\textonequarter' : '\U000000bc',
- '\\textonehalf' : '\U000000bd',
- '\\textthreequarters' : '\U000000be',
- '\\textordfeminine' : '\U000000aa',
- '\\textordmasculine' : '\U000000ba',
- '\\euro' : '\U000020ac',
- '\\pounds' : '\U000000a3',
- '\\yen' : '\U000000a5',
- '\\textcent' : '\U000000a2',
- '\\textcurrency' : '\U000000a4',
- '\\textdegree' : '\U000000b0',
- }
-
- isabelle_symbols = {
- '\\<zero>' : '\U0001d7ec',
- '\\<one>' : '\U0001d7ed',
- '\\<two>' : '\U0001d7ee',
- '\\<three>' : '\U0001d7ef',
- '\\<four>' : '\U0001d7f0',
- '\\<five>' : '\U0001d7f1',
- '\\<six>' : '\U0001d7f2',
- '\\<seven>' : '\U0001d7f3',
- '\\<eight>' : '\U0001d7f4',
- '\\<nine>' : '\U0001d7f5',
- '\\<A>' : '\U0001d49c',
- '\\<B>' : '\U0000212c',
- '\\<C>' : '\U0001d49e',
- '\\<D>' : '\U0001d49f',
- '\\<E>' : '\U00002130',
- '\\<F>' : '\U00002131',
- '\\<G>' : '\U0001d4a2',
- '\\<H>' : '\U0000210b',
- '\\<I>' : '\U00002110',
- '\\<J>' : '\U0001d4a5',
- '\\<K>' : '\U0001d4a6',
- '\\<L>' : '\U00002112',
- '\\<M>' : '\U00002133',
- '\\<N>' : '\U0001d4a9',
- '\\<O>' : '\U0001d4aa',
- '\\<P>' : '\U0001d4ab',
- '\\<Q>' : '\U0001d4ac',
- '\\<R>' : '\U0000211b',
- '\\<S>' : '\U0001d4ae',
- '\\<T>' : '\U0001d4af',
- '\\<U>' : '\U0001d4b0',
- '\\<V>' : '\U0001d4b1',
- '\\<W>' : '\U0001d4b2',
- '\\<X>' : '\U0001d4b3',
- '\\<Y>' : '\U0001d4b4',
- '\\<Z>' : '\U0001d4b5',
- '\\<a>' : '\U0001d5ba',
- '\\<b>' : '\U0001d5bb',
- '\\<c>' : '\U0001d5bc',
- '\\<d>' : '\U0001d5bd',
- '\\<e>' : '\U0001d5be',
- '\\<f>' : '\U0001d5bf',
- '\\<g>' : '\U0001d5c0',
- '\\<h>' : '\U0001d5c1',
- '\\<i>' : '\U0001d5c2',
- '\\<j>' : '\U0001d5c3',
- '\\<k>' : '\U0001d5c4',
- '\\<l>' : '\U0001d5c5',
- '\\<m>' : '\U0001d5c6',
- '\\<n>' : '\U0001d5c7',
- '\\<o>' : '\U0001d5c8',
- '\\<p>' : '\U0001d5c9',
- '\\<q>' : '\U0001d5ca',
- '\\<r>' : '\U0001d5cb',
- '\\<s>' : '\U0001d5cc',
- '\\<t>' : '\U0001d5cd',
- '\\<u>' : '\U0001d5ce',
- '\\<v>' : '\U0001d5cf',
- '\\<w>' : '\U0001d5d0',
- '\\<x>' : '\U0001d5d1',
- '\\<y>' : '\U0001d5d2',
- '\\<z>' : '\U0001d5d3',
- '\\<AA>' : '\U0001d504',
- '\\<BB>' : '\U0001d505',
- '\\<CC>' : '\U0000212d',
- '\\<DD>' : '\U0001d507',
- '\\<EE>' : '\U0001d508',
- '\\<FF>' : '\U0001d509',
- '\\<GG>' : '\U0001d50a',
- '\\<HH>' : '\U0000210c',
- '\\<II>' : '\U00002111',
- '\\<JJ>' : '\U0001d50d',
- '\\<KK>' : '\U0001d50e',
- '\\<LL>' : '\U0001d50f',
- '\\<MM>' : '\U0001d510',
- '\\<NN>' : '\U0001d511',
- '\\<OO>' : '\U0001d512',
- '\\<PP>' : '\U0001d513',
- '\\<QQ>' : '\U0001d514',
- '\\<RR>' : '\U0000211c',
- '\\<SS>' : '\U0001d516',
- '\\<TT>' : '\U0001d517',
- '\\<UU>' : '\U0001d518',
- '\\<VV>' : '\U0001d519',
- '\\<WW>' : '\U0001d51a',
- '\\<XX>' : '\U0001d51b',
- '\\<YY>' : '\U0001d51c',
- '\\<ZZ>' : '\U00002128',
- '\\<aa>' : '\U0001d51e',
- '\\<bb>' : '\U0001d51f',
- '\\<cc>' : '\U0001d520',
- '\\<dd>' : '\U0001d521',
- '\\<ee>' : '\U0001d522',
- '\\<ff>' : '\U0001d523',
- '\\<gg>' : '\U0001d524',
- '\\<hh>' : '\U0001d525',
- '\\<ii>' : '\U0001d526',
- '\\<jj>' : '\U0001d527',
- '\\<kk>' : '\U0001d528',
- '\\<ll>' : '\U0001d529',
- '\\<mm>' : '\U0001d52a',
- '\\<nn>' : '\U0001d52b',
- '\\<oo>' : '\U0001d52c',
- '\\<pp>' : '\U0001d52d',
- '\\<qq>' : '\U0001d52e',
- '\\<rr>' : '\U0001d52f',
- '\\<ss>' : '\U0001d530',
- '\\<tt>' : '\U0001d531',
- '\\<uu>' : '\U0001d532',
- '\\<vv>' : '\U0001d533',
- '\\<ww>' : '\U0001d534',
- '\\<xx>' : '\U0001d535',
- '\\<yy>' : '\U0001d536',
- '\\<zz>' : '\U0001d537',
- '\\<alpha>' : '\U000003b1',
- '\\<beta>' : '\U000003b2',
- '\\<gamma>' : '\U000003b3',
- '\\<delta>' : '\U000003b4',
- '\\<epsilon>' : '\U000003b5',
- '\\<zeta>' : '\U000003b6',
- '\\<eta>' : '\U000003b7',
- '\\<theta>' : '\U000003b8',
- '\\<iota>' : '\U000003b9',
- '\\<kappa>' : '\U000003ba',
- '\\<lambda>' : '\U000003bb',
- '\\<mu>' : '\U000003bc',
- '\\<nu>' : '\U000003bd',
- '\\<xi>' : '\U000003be',
- '\\<pi>' : '\U000003c0',
- '\\<rho>' : '\U000003c1',
- '\\<sigma>' : '\U000003c3',
- '\\<tau>' : '\U000003c4',
- '\\<upsilon>' : '\U000003c5',
- '\\<phi>' : '\U000003c6',
- '\\<chi>' : '\U000003c7',
- '\\<psi>' : '\U000003c8',
- '\\<omega>' : '\U000003c9',
- '\\<Gamma>' : '\U00000393',
- '\\<Delta>' : '\U00000394',
- '\\<Theta>' : '\U00000398',
- '\\<Lambda>' : '\U0000039b',
- '\\<Xi>' : '\U0000039e',
- '\\<Pi>' : '\U000003a0',
- '\\<Sigma>' : '\U000003a3',
- '\\<Upsilon>' : '\U000003a5',
- '\\<Phi>' : '\U000003a6',
- '\\<Psi>' : '\U000003a8',
- '\\<Omega>' : '\U000003a9',
- '\\<bool>' : '\U0001d539',
- '\\<complex>' : '\U00002102',
- '\\<nat>' : '\U00002115',
- '\\<rat>' : '\U0000211a',
- '\\<real>' : '\U0000211d',
- '\\<int>' : '\U00002124',
- '\\<leftarrow>' : '\U00002190',
- '\\<longleftarrow>' : '\U000027f5',
- '\\<rightarrow>' : '\U00002192',
- '\\<longrightarrow>' : '\U000027f6',
- '\\<Leftarrow>' : '\U000021d0',
- '\\<Longleftarrow>' : '\U000027f8',
- '\\<Rightarrow>' : '\U000021d2',
- '\\<Longrightarrow>' : '\U000027f9',
- '\\<leftrightarrow>' : '\U00002194',
- '\\<longleftrightarrow>' : '\U000027f7',
- '\\<Leftrightarrow>' : '\U000021d4',
- '\\<Longleftrightarrow>' : '\U000027fa',
- '\\<mapsto>' : '\U000021a6',
- '\\<longmapsto>' : '\U000027fc',
- '\\<midarrow>' : '\U00002500',
- '\\<Midarrow>' : '\U00002550',
- '\\<hookleftarrow>' : '\U000021a9',
- '\\<hookrightarrow>' : '\U000021aa',
- '\\<leftharpoondown>' : '\U000021bd',
- '\\<rightharpoondown>' : '\U000021c1',
- '\\<leftharpoonup>' : '\U000021bc',
- '\\<rightharpoonup>' : '\U000021c0',
- '\\<rightleftharpoons>' : '\U000021cc',
- '\\<leadsto>' : '\U0000219d',
- '\\<downharpoonleft>' : '\U000021c3',
- '\\<downharpoonright>' : '\U000021c2',
- '\\<upharpoonleft>' : '\U000021bf',
- '\\<upharpoonright>' : '\U000021be',
- '\\<restriction>' : '\U000021be',
- '\\<Colon>' : '\U00002237',
- '\\<up>' : '\U00002191',
- '\\<Up>' : '\U000021d1',
- '\\<down>' : '\U00002193',
- '\\<Down>' : '\U000021d3',
- '\\<updown>' : '\U00002195',
- '\\<Updown>' : '\U000021d5',
- '\\<langle>' : '\U000027e8',
- '\\<rangle>' : '\U000027e9',
- '\\<lceil>' : '\U00002308',
- '\\<rceil>' : '\U00002309',
- '\\<lfloor>' : '\U0000230a',
- '\\<rfloor>' : '\U0000230b',
- '\\<lparr>' : '\U00002987',
- '\\<rparr>' : '\U00002988',
- '\\<lbrakk>' : '\U000027e6',
- '\\<rbrakk>' : '\U000027e7',
- '\\<lbrace>' : '\U00002983',
- '\\<rbrace>' : '\U00002984',
- '\\<guillemotleft>' : '\U000000ab',
- '\\<guillemotright>' : '\U000000bb',
- '\\<bottom>' : '\U000022a5',
- '\\<top>' : '\U000022a4',
- '\\<and>' : '\U00002227',
- '\\<And>' : '\U000022c0',
- '\\<or>' : '\U00002228',
- '\\<Or>' : '\U000022c1',
- '\\<forall>' : '\U00002200',
- '\\<exists>' : '\U00002203',
- '\\<nexists>' : '\U00002204',
- '\\<not>' : '\U000000ac',
- '\\<box>' : '\U000025a1',
- '\\<diamond>' : '\U000025c7',
- '\\<turnstile>' : '\U000022a2',
- '\\<Turnstile>' : '\U000022a8',
- '\\<tturnstile>' : '\U000022a9',
- '\\<TTurnstile>' : '\U000022ab',
- '\\<stileturn>' : '\U000022a3',
- '\\<surd>' : '\U0000221a',
- '\\<le>' : '\U00002264',
- '\\<ge>' : '\U00002265',
- '\\<lless>' : '\U0000226a',
- '\\<ggreater>' : '\U0000226b',
- '\\<lesssim>' : '\U00002272',
- '\\<greatersim>' : '\U00002273',
- '\\<lessapprox>' : '\U00002a85',
- '\\<greaterapprox>' : '\U00002a86',
- '\\<in>' : '\U00002208',
- '\\<notin>' : '\U00002209',
- '\\<subset>' : '\U00002282',
- '\\<supset>' : '\U00002283',
- '\\<subseteq>' : '\U00002286',
- '\\<supseteq>' : '\U00002287',
- '\\<sqsubset>' : '\U0000228f',
- '\\<sqsupset>' : '\U00002290',
- '\\<sqsubseteq>' : '\U00002291',
- '\\<sqsupseteq>' : '\U00002292',
- '\\<inter>' : '\U00002229',
- '\\<Inter>' : '\U000022c2',
- '\\<union>' : '\U0000222a',
- '\\<Union>' : '\U000022c3',
- '\\<squnion>' : '\U00002294',
- '\\<Squnion>' : '\U00002a06',
- '\\<sqinter>' : '\U00002293',
- '\\<Sqinter>' : '\U00002a05',
- '\\<setminus>' : '\U00002216',
- '\\<propto>' : '\U0000221d',
- '\\<uplus>' : '\U0000228e',
- '\\<Uplus>' : '\U00002a04',
- '\\<noteq>' : '\U00002260',
- '\\<sim>' : '\U0000223c',
- '\\<doteq>' : '\U00002250',
- '\\<simeq>' : '\U00002243',
- '\\<approx>' : '\U00002248',
- '\\<asymp>' : '\U0000224d',
- '\\<cong>' : '\U00002245',
- '\\<smile>' : '\U00002323',
- '\\<equiv>' : '\U00002261',
- '\\<frown>' : '\U00002322',
- '\\<Join>' : '\U000022c8',
- '\\<bowtie>' : '\U00002a1d',
- '\\<prec>' : '\U0000227a',
- '\\<succ>' : '\U0000227b',
- '\\<preceq>' : '\U0000227c',
- '\\<succeq>' : '\U0000227d',
- '\\<parallel>' : '\U00002225',
- '\\<bar>' : '\U000000a6',
- '\\<plusminus>' : '\U000000b1',
- '\\<minusplus>' : '\U00002213',
- '\\<times>' : '\U000000d7',
- '\\<div>' : '\U000000f7',
- '\\<cdot>' : '\U000022c5',
- '\\<star>' : '\U000022c6',
- '\\<bullet>' : '\U00002219',
- '\\<circ>' : '\U00002218',
- '\\<dagger>' : '\U00002020',
- '\\<ddagger>' : '\U00002021',
- '\\<lhd>' : '\U000022b2',
- '\\<rhd>' : '\U000022b3',
- '\\<unlhd>' : '\U000022b4',
- '\\<unrhd>' : '\U000022b5',
- '\\<triangleleft>' : '\U000025c3',
- '\\<triangleright>' : '\U000025b9',
- '\\<triangle>' : '\U000025b3',
- '\\<triangleq>' : '\U0000225c',
- '\\<oplus>' : '\U00002295',
- '\\<Oplus>' : '\U00002a01',
- '\\<otimes>' : '\U00002297',
- '\\<Otimes>' : '\U00002a02',
- '\\<odot>' : '\U00002299',
- '\\<Odot>' : '\U00002a00',
- '\\<ominus>' : '\U00002296',
- '\\<oslash>' : '\U00002298',
- '\\<dots>' : '\U00002026',
- '\\<cdots>' : '\U000022ef',
- '\\<Sum>' : '\U00002211',
- '\\<Prod>' : '\U0000220f',
- '\\<Coprod>' : '\U00002210',
- '\\<infinity>' : '\U0000221e',
- '\\<integral>' : '\U0000222b',
- '\\<ointegral>' : '\U0000222e',
- '\\<clubsuit>' : '\U00002663',
- '\\<diamondsuit>' : '\U00002662',
- '\\<heartsuit>' : '\U00002661',
- '\\<spadesuit>' : '\U00002660',
- '\\<aleph>' : '\U00002135',
- '\\<emptyset>' : '\U00002205',
- '\\<nabla>' : '\U00002207',
- '\\<partial>' : '\U00002202',
- '\\<flat>' : '\U0000266d',
- '\\<natural>' : '\U0000266e',
- '\\<sharp>' : '\U0000266f',
- '\\<angle>' : '\U00002220',
- '\\<copyright>' : '\U000000a9',
- '\\<registered>' : '\U000000ae',
- '\\<hyphen>' : '\U000000ad',
- '\\<inverse>' : '\U000000af',
- '\\<onequarter>' : '\U000000bc',
- '\\<onehalf>' : '\U000000bd',
- '\\<threequarters>' : '\U000000be',
- '\\<ordfeminine>' : '\U000000aa',
- '\\<ordmasculine>' : '\U000000ba',
- '\\<section>' : '\U000000a7',
- '\\<paragraph>' : '\U000000b6',
- '\\<exclamdown>' : '\U000000a1',
- '\\<questiondown>' : '\U000000bf',
- '\\<euro>' : '\U000020ac',
- '\\<pounds>' : '\U000000a3',
- '\\<yen>' : '\U000000a5',
- '\\<cent>' : '\U000000a2',
- '\\<currency>' : '\U000000a4',
- '\\<degree>' : '\U000000b0',
- '\\<amalg>' : '\U00002a3f',
- '\\<mho>' : '\U00002127',
- '\\<lozenge>' : '\U000025ca',
- '\\<wp>' : '\U00002118',
- '\\<wrong>' : '\U00002240',
- '\\<struct>' : '\U000022c4',
- '\\<acute>' : '\U000000b4',
- '\\<index>' : '\U00000131',
- '\\<dieresis>' : '\U000000a8',
- '\\<cedilla>' : '\U000000b8',
- '\\<hungarumlaut>' : '\U000002dd',
- '\\<some>' : '\U000003f5',
- '\\<newline>' : '\U000023ce',
- '\\<open>' : '\U00002039',
- '\\<close>' : '\U0000203a',
- '\\<here>' : '\U00002302',
- '\\<^sub>' : '\U000021e9',
- '\\<^sup>' : '\U000021e7',
- '\\<^bold>' : '\U00002759',
- '\\<^bsub>' : '\U000021d8',
- '\\<^esub>' : '\U000021d9',
- '\\<^bsup>' : '\U000021d7',
- '\\<^esup>' : '\U000021d6',
- }
-
- lang_map = {'isabelle' : isabelle_symbols, 'latex' : latex_symbols}
-
- def __init__(self, **options):
- Filter.__init__(self, **options)
- lang = get_choice_opt(options, 'lang',
- ['isabelle', 'latex'], 'isabelle')
- self.symbols = self.lang_map[lang]
-
- def filter(self, lexer, stream):
- for ttype, value in stream:
- if value in self.symbols:
- yield ttype, self.symbols[value]
- else:
- yield ttype, value
-
-
-class KeywordCaseFilter(Filter):
- """Convert keywords to lowercase or uppercase or capitalize them, which
- means first letter uppercase, rest lowercase.
-
- This can be useful e.g. if you highlight Pascal code and want to adapt the
- code to your styleguide.
-
- Options accepted:
-
- `case` : string
- The casing to convert keywords to. Must be one of ``'lower'``,
- ``'upper'`` or ``'capitalize'``. The default is ``'lower'``.
- """
-
- def __init__(self, **options):
- Filter.__init__(self, **options)
- case = get_choice_opt(options, 'case',
- ['lower', 'upper', 'capitalize'], 'lower')
- self.convert = getattr(str, case)
-
- def filter(self, lexer, stream):
- for ttype, value in stream:
- if ttype in Keyword:
- yield ttype, self.convert(value)
- else:
- yield ttype, value
-
-
-class NameHighlightFilter(Filter):
- """Highlight a normal Name (and Name.*) token with a different token type.
-
- Example::
-
- filter = NameHighlightFilter(
- names=['foo', 'bar', 'baz'],
- tokentype=Name.Function,
- )
-
- This would highlight the names "foo", "bar" and "baz"
- as functions. `Name.Function` is the default token type.
-
- Options accepted:
-
- `names` : list of strings
- A list of names that should be given the different token type.
- There is no default.
- `tokentype` : TokenType or string
- A token type or a string containing a token type name that is
- used for highlighting the strings in `names`. The default is
- `Name.Function`.
- """
-
- def __init__(self, **options):
- Filter.__init__(self, **options)
- self.names = set(get_list_opt(options, 'names', []))
- tokentype = options.get('tokentype')
- if tokentype:
- self.tokentype = string_to_tokentype(tokentype)
- else:
- self.tokentype = Name.Function
-
- def filter(self, lexer, stream):
- for ttype, value in stream:
- if ttype in Name and value in self.names:
- yield self.tokentype, value
- else:
- yield ttype, value
-
-
-class ErrorToken(Exception):
- pass
-
-
-class RaiseOnErrorTokenFilter(Filter):
- """Raise an exception when the lexer generates an error token.
-
- Options accepted:
-
- `excclass` : Exception class
- The exception class to raise.
- The default is `pygments.filters.ErrorToken`.
-
- .. versionadded:: 0.8
- """
-
- def __init__(self, **options):
- Filter.__init__(self, **options)
- self.exception = options.get('excclass', ErrorToken)
- try:
- # issubclass() will raise TypeError if first argument is not a class
- if not issubclass(self.exception, Exception):
- raise TypeError
- except TypeError:
- raise OptionError('excclass option is not an exception class')
-
- def filter(self, lexer, stream):
- for ttype, value in stream:
- if ttype is Error:
- raise self.exception(value)
- yield ttype, value
-
-
-class VisibleWhitespaceFilter(Filter):
- """Convert tabs, newlines and/or spaces to visible characters.
-
- Options accepted:
-
- `spaces` : string or bool
- If this is a one-character string, spaces will be replaces by this string.
- If it is another true value, spaces will be replaced by ``·`` (unicode
- MIDDLE DOT). If it is a false value, spaces will not be replaced. The
- default is ``False``.
- `tabs` : string or bool
- The same as for `spaces`, but the default replacement character is ``»``
- (unicode RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK). The default value
- is ``False``. Note: this will not work if the `tabsize` option for the
- lexer is nonzero, as tabs will already have been expanded then.
- `tabsize` : int
- If tabs are to be replaced by this filter (see the `tabs` option), this
- is the total number of characters that a tab should be expanded to.
- The default is ``8``.
- `newlines` : string or bool
- The same as for `spaces`, but the default replacement character is ``¶``
- (unicode PILCROW SIGN). The default value is ``False``.
- `wstokentype` : bool
- If true, give whitespace the special `Whitespace` token type. This allows
- styling the visible whitespace differently (e.g. greyed out), but it can
- disrupt background colors. The default is ``True``.
-
- .. versionadded:: 0.8
- """
-
- def __init__(self, **options):
- Filter.__init__(self, **options)
- for name, default in [('spaces', '·'),
- ('tabs', '»'),
- ('newlines', '¶')]:
- opt = options.get(name, False)
- if isinstance(opt, str) and len(opt) == 1:
- setattr(self, name, opt)
- else:
- setattr(self, name, (opt and default or ''))
- tabsize = get_int_opt(options, 'tabsize', 8)
- if self.tabs:
- self.tabs += ' ' * (tabsize - 1)
- if self.newlines:
- self.newlines += '\n'
- self.wstt = get_bool_opt(options, 'wstokentype', True)
-
- def filter(self, lexer, stream):
- if self.wstt:
- spaces = self.spaces or ' '
- tabs = self.tabs or '\t'
- newlines = self.newlines or '\n'
- regex = re.compile(r'\s')
-
- def replacefunc(wschar):
- if wschar == ' ':
- return spaces
- elif wschar == '\t':
- return tabs
- elif wschar == '\n':
- return newlines
- return wschar
-
- for ttype, value in stream:
- yield from _replace_special(ttype, value, regex, Whitespace,
- replacefunc)
- else:
- spaces, tabs, newlines = self.spaces, self.tabs, self.newlines
- # simpler processing
- for ttype, value in stream:
- if spaces:
- value = value.replace(' ', spaces)
- if tabs:
- value = value.replace('\t', tabs)
- if newlines:
- value = value.replace('\n', newlines)
- yield ttype, value
-
-
-class GobbleFilter(Filter):
- """Gobbles source code lines (eats initial characters).
-
- This filter drops the first ``n`` characters off every line of code. This
- may be useful when the source code fed to the lexer is indented by a fixed
- amount of space that isn't desired in the output.
-
- Options accepted:
-
- `n` : int
- The number of characters to gobble.
-
- .. versionadded:: 1.2
- """
- def __init__(self, **options):
- Filter.__init__(self, **options)
- self.n = get_int_opt(options, 'n', 0)
-
- def gobble(self, value, left):
- if left < len(value):
- return value[left:], 0
- else:
- return '', left - len(value)
-
- def filter(self, lexer, stream):
- n = self.n
- left = n # How many characters left to gobble.
- for ttype, value in stream:
- # Remove ``left`` tokens from first line, ``n`` from all others.
- parts = value.split('\n')
- (parts[0], left) = self.gobble(parts[0], left)
- for i in range(1, len(parts)):
- (parts[i], left) = self.gobble(parts[i], n)
- value = '\n'.join(parts)
-
- if value != '':
- yield ttype, value
-
-
-class TokenMergeFilter(Filter):
- """Merges consecutive tokens with the same token type in the output
- stream of a lexer.
-
- .. versionadded:: 1.2
- """
- def __init__(self, **options):
- Filter.__init__(self, **options)
-
- def filter(self, lexer, stream):
- current_type = None
- current_value = None
- for ttype, value in stream:
- if ttype is current_type:
- current_value += value
- else:
- if current_type is not None:
- yield current_type, current_value
- current_type = ttype
- current_value = value
- if current_type is not None:
- yield current_type, current_value
-
-
-FILTERS = {
- 'codetagify': CodeTagFilter,
- 'keywordcase': KeywordCaseFilter,
- 'highlight': NameHighlightFilter,
- 'raiseonerror': RaiseOnErrorTokenFilter,
- 'whitespace': VisibleWhitespaceFilter,
- 'gobble': GobbleFilter,
- 'tokenmerge': TokenMergeFilter,
- 'symbols': SymbolFilter,
-}
diff --git a/venv/lib/python3.11/site-packages/pygments/filters/__pycache__/__init__.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/filters/__pycache__/__init__.cpython-311.pyc
deleted file mode 100644
index ee230d5..0000000
--- a/venv/lib/python3.11/site-packages/pygments/filters/__pycache__/__init__.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/formatter.py b/venv/lib/python3.11/site-packages/pygments/formatter.py
deleted file mode 100644
index 87183ab..0000000
--- a/venv/lib/python3.11/site-packages/pygments/formatter.py
+++ /dev/null
@@ -1,124 +0,0 @@
-"""
- pygments.formatter
- ~~~~~~~~~~~~~~~~~~
-
- Base formatter class.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import codecs
-
-from pygments.util import get_bool_opt
-from pygments.styles import get_style_by_name
-
-__all__ = ['Formatter']
-
-
-def _lookup_style(style):
- if isinstance(style, str):
- return get_style_by_name(style)
- return style
-
-
-class Formatter:
- """
- Converts a token stream to text.
-
- Formatters should have attributes to help selecting them. These
- are similar to the corresponding :class:`~pygments.lexer.Lexer`
- attributes.
-
- .. autoattribute:: name
- :no-value:
-
- .. autoattribute:: aliases
- :no-value:
-
- .. autoattribute:: filenames
- :no-value:
-
- You can pass options as keyword arguments to the constructor.
- All formatters accept these basic options:
-
- ``style``
- The style to use, can be a string or a Style subclass
- (default: "default"). Not used by e.g. the
- TerminalFormatter.
- ``full``
- Tells the formatter to output a "full" document, i.e.
- a complete self-contained document. This doesn't have
- any effect for some formatters (default: false).
- ``title``
- If ``full`` is true, the title that should be used to
- caption the document (default: '').
- ``encoding``
- If given, must be an encoding name. This will be used to
- convert the Unicode token strings to byte strings in the
- output. If it is "" or None, Unicode strings will be written
- to the output file, which most file-like objects do not
- support (default: None).
- ``outencoding``
- Overrides ``encoding`` if given.
-
- """
-
- #: Full name for the formatter, in human-readable form.
- name = None
-
- #: A list of short, unique identifiers that can be used to lookup
- #: the formatter from a list, e.g. using :func:`.get_formatter_by_name()`.
- aliases = []
-
- #: A list of fnmatch patterns that match filenames for which this
- #: formatter can produce output. The patterns in this list should be unique
- #: among all formatters.
- filenames = []
-
- #: If True, this formatter outputs Unicode strings when no encoding
- #: option is given.
- unicodeoutput = True
-
- def __init__(self, **options):
- """
- As with lexers, this constructor takes arbitrary optional arguments,
- and if you override it, you should first process your own options, then
- call the base class implementation.
- """
- self.style = _lookup_style(options.get('style', 'default'))
- self.full = get_bool_opt(options, 'full', False)
- self.title = options.get('title', '')
- self.encoding = options.get('encoding', None) or None
- if self.encoding in ('guess', 'chardet'):
- # can happen for e.g. pygmentize -O encoding=guess
- self.encoding = 'utf-8'
- self.encoding = options.get('outencoding') or self.encoding
- self.options = options
-
- def get_style_defs(self, arg=''):
- """
- This method must return statements or declarations suitable to define
- the current style for subsequent highlighted text (e.g. CSS classes
- in the `HTMLFormatter`).
-
- The optional argument `arg` can be used to modify the generation and
- is formatter dependent (it is standardized because it can be given on
- the command line).
-
- This method is called by the ``-S`` :doc:`command-line option <cmdline>`,
- the `arg` is then given by the ``-a`` option.
- """
- return ''
-
- def format(self, tokensource, outfile):
- """
- This method must format the tokens from the `tokensource` iterable and
- write the formatted version to the file object `outfile`.
-
- Formatter options can control how exactly the tokens are converted.
- """
- if self.encoding:
- # wrap the outfile in a StreamWriter
- outfile = codecs.lookup(self.encoding)[3](outfile)
- return self.format_unencoded(tokensource, outfile)
diff --git a/venv/lib/python3.11/site-packages/pygments/formatters/__init__.py b/venv/lib/python3.11/site-packages/pygments/formatters/__init__.py
deleted file mode 100644
index 6e482a1..0000000
--- a/venv/lib/python3.11/site-packages/pygments/formatters/__init__.py
+++ /dev/null
@@ -1,158 +0,0 @@
-"""
- pygments.formatters
- ~~~~~~~~~~~~~~~~~~~
-
- Pygments formatters.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-import sys
-import types
-import fnmatch
-from os.path import basename
-
-from pygments.formatters._mapping import FORMATTERS
-from pygments.plugin import find_plugin_formatters
-from pygments.util import ClassNotFound
-
-__all__ = ['get_formatter_by_name', 'get_formatter_for_filename',
- 'get_all_formatters', 'load_formatter_from_file'] + list(FORMATTERS)
-
-_formatter_cache = {} # classes by name
-_pattern_cache = {}
-
-
-def _fn_matches(fn, glob):
- """Return whether the supplied file name fn matches pattern filename."""
- if glob not in _pattern_cache:
- pattern = _pattern_cache[glob] = re.compile(fnmatch.translate(glob))
- return pattern.match(fn)
- return _pattern_cache[glob].match(fn)
-
-
-def _load_formatters(module_name):
- """Load a formatter (and all others in the module too)."""
- mod = __import__(module_name, None, None, ['__all__'])
- for formatter_name in mod.__all__:
- cls = getattr(mod, formatter_name)
- _formatter_cache[cls.name] = cls
-
-
-def get_all_formatters():
- """Return a generator for all formatter classes."""
- # NB: this returns formatter classes, not info like get_all_lexers().
- for info in FORMATTERS.values():
- if info[1] not in _formatter_cache:
- _load_formatters(info[0])
- yield _formatter_cache[info[1]]
- for _, formatter in find_plugin_formatters():
- yield formatter
-
-
-def find_formatter_class(alias):
- """Lookup a formatter by alias.
-
- Returns None if not found.
- """
- for module_name, name, aliases, _, _ in FORMATTERS.values():
- if alias in aliases:
- if name not in _formatter_cache:
- _load_formatters(module_name)
- return _formatter_cache[name]
- for _, cls in find_plugin_formatters():
- if alias in cls.aliases:
- return cls
-
-
-def get_formatter_by_name(_alias, **options):
- """
- Return an instance of a :class:`.Formatter` subclass that has `alias` in its
- aliases list. The formatter is given the `options` at its instantiation.
-
- Will raise :exc:`pygments.util.ClassNotFound` if no formatter with that
- alias is found.
- """
- cls = find_formatter_class(_alias)
- if cls is None:
- raise ClassNotFound("no formatter found for name %r" % _alias)
- return cls(**options)
-
-
-def load_formatter_from_file(filename, formattername="CustomFormatter", **options):
- """
- Return a `Formatter` subclass instance loaded from the provided file, relative
- to the current directory.
-
- The file is expected to contain a Formatter class named ``formattername``
- (by default, CustomFormatter). Users should be very careful with the input, because
- this method is equivalent to running ``eval()`` on the input file. The formatter is
- given the `options` at its instantiation.
-
- :exc:`pygments.util.ClassNotFound` is raised if there are any errors loading
- the formatter.
-
- .. versionadded:: 2.2
- """
- try:
- # This empty dict will contain the namespace for the exec'd file
- custom_namespace = {}
- with open(filename, 'rb') as f:
- exec(f.read(), custom_namespace)
- # Retrieve the class `formattername` from that namespace
- if formattername not in custom_namespace:
- raise ClassNotFound('no valid %s class found in %s' %
- (formattername, filename))
- formatter_class = custom_namespace[formattername]
- # And finally instantiate it with the options
- return formatter_class(**options)
- except OSError as err:
- raise ClassNotFound('cannot read %s: %s' % (filename, err))
- except ClassNotFound:
- raise
- except Exception as err:
- raise ClassNotFound('error when loading custom formatter: %s' % err)
-
-
-def get_formatter_for_filename(fn, **options):
- """
- Return a :class:`.Formatter` subclass instance that has a filename pattern
- matching `fn`. The formatter is given the `options` at its instantiation.
-
- Will raise :exc:`pygments.util.ClassNotFound` if no formatter for that filename
- is found.
- """
- fn = basename(fn)
- for modname, name, _, filenames, _ in FORMATTERS.values():
- for filename in filenames:
- if _fn_matches(fn, filename):
- if name not in _formatter_cache:
- _load_formatters(modname)
- return _formatter_cache[name](**options)
- for _name, cls in find_plugin_formatters():
- for filename in cls.filenames:
- if _fn_matches(fn, filename):
- return cls(**options)
- raise ClassNotFound("no formatter found for file name %r" % fn)
-
-
-class _automodule(types.ModuleType):
- """Automatically import formatters."""
-
- def __getattr__(self, name):
- info = FORMATTERS.get(name)
- if info:
- _load_formatters(info[0])
- cls = _formatter_cache[info[1]]
- setattr(self, name, cls)
- return cls
- raise AttributeError(name)
-
-
-oldmod = sys.modules[__name__]
-newmod = _automodule(__name__)
-newmod.__dict__.update(oldmod.__dict__)
-sys.modules[__name__] = newmod
-del newmod.newmod, newmod.oldmod, newmod.sys, newmod.types
diff --git a/venv/lib/python3.11/site-packages/pygments/formatters/__pycache__/__init__.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/formatters/__pycache__/__init__.cpython-311.pyc
deleted file mode 100644
index de3734e..0000000
--- a/venv/lib/python3.11/site-packages/pygments/formatters/__pycache__/__init__.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/formatters/__pycache__/_mapping.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/formatters/__pycache__/_mapping.cpython-311.pyc
deleted file mode 100644
index 268b46e..0000000
--- a/venv/lib/python3.11/site-packages/pygments/formatters/__pycache__/_mapping.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/formatters/__pycache__/bbcode.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/formatters/__pycache__/bbcode.cpython-311.pyc
deleted file mode 100644
index f958bd3..0000000
--- a/venv/lib/python3.11/site-packages/pygments/formatters/__pycache__/bbcode.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/formatters/__pycache__/groff.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/formatters/__pycache__/groff.cpython-311.pyc
deleted file mode 100644
index d578aa0..0000000
--- a/venv/lib/python3.11/site-packages/pygments/formatters/__pycache__/groff.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/formatters/__pycache__/html.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/formatters/__pycache__/html.cpython-311.pyc
deleted file mode 100644
index 42eb8bd..0000000
--- a/venv/lib/python3.11/site-packages/pygments/formatters/__pycache__/html.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/formatters/__pycache__/img.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/formatters/__pycache__/img.cpython-311.pyc
deleted file mode 100644
index 60484d1..0000000
--- a/venv/lib/python3.11/site-packages/pygments/formatters/__pycache__/img.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/formatters/__pycache__/irc.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/formatters/__pycache__/irc.cpython-311.pyc
deleted file mode 100644
index 022a40e..0000000
--- a/venv/lib/python3.11/site-packages/pygments/formatters/__pycache__/irc.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/formatters/__pycache__/latex.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/formatters/__pycache__/latex.cpython-311.pyc
deleted file mode 100644
index 2bbb094..0000000
--- a/venv/lib/python3.11/site-packages/pygments/formatters/__pycache__/latex.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/formatters/__pycache__/other.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/formatters/__pycache__/other.cpython-311.pyc
deleted file mode 100644
index 6bc4288..0000000
--- a/venv/lib/python3.11/site-packages/pygments/formatters/__pycache__/other.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/formatters/__pycache__/pangomarkup.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/formatters/__pycache__/pangomarkup.cpython-311.pyc
deleted file mode 100644
index 8db5e7e..0000000
--- a/venv/lib/python3.11/site-packages/pygments/formatters/__pycache__/pangomarkup.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/formatters/__pycache__/rtf.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/formatters/__pycache__/rtf.cpython-311.pyc
deleted file mode 100644
index 173c930..0000000
--- a/venv/lib/python3.11/site-packages/pygments/formatters/__pycache__/rtf.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/formatters/__pycache__/svg.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/formatters/__pycache__/svg.cpython-311.pyc
deleted file mode 100644
index d579678..0000000
--- a/venv/lib/python3.11/site-packages/pygments/formatters/__pycache__/svg.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/formatters/__pycache__/terminal.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/formatters/__pycache__/terminal.cpython-311.pyc
deleted file mode 100644
index e46767f..0000000
--- a/venv/lib/python3.11/site-packages/pygments/formatters/__pycache__/terminal.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/formatters/__pycache__/terminal256.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/formatters/__pycache__/terminal256.cpython-311.pyc
deleted file mode 100644
index 560606f..0000000
--- a/venv/lib/python3.11/site-packages/pygments/formatters/__pycache__/terminal256.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/formatters/_mapping.py b/venv/lib/python3.11/site-packages/pygments/formatters/_mapping.py
deleted file mode 100755
index 72ca840..0000000
--- a/venv/lib/python3.11/site-packages/pygments/formatters/_mapping.py
+++ /dev/null
@@ -1,23 +0,0 @@
-# Automatically generated by scripts/gen_mapfiles.py.
-# DO NOT EDIT BY HAND; run `tox -e mapfiles` instead.
-
-FORMATTERS = {
- 'BBCodeFormatter': ('pygments.formatters.bbcode', 'BBCode', ('bbcode', 'bb'), (), 'Format tokens with BBcodes. These formatting codes are used by many bulletin boards, so you can highlight your sourcecode with pygments before posting it there.'),
- 'BmpImageFormatter': ('pygments.formatters.img', 'img_bmp', ('bmp', 'bitmap'), ('*.bmp',), 'Create a bitmap image from source code. This uses the Python Imaging Library to generate a pixmap from the source code.'),
- 'GifImageFormatter': ('pygments.formatters.img', 'img_gif', ('gif',), ('*.gif',), 'Create a GIF image from source code. This uses the Python Imaging Library to generate a pixmap from the source code.'),
- 'GroffFormatter': ('pygments.formatters.groff', 'groff', ('groff', 'troff', 'roff'), (), 'Format tokens with groff escapes to change their color and font style.'),
- 'HtmlFormatter': ('pygments.formatters.html', 'HTML', ('html',), ('*.html', '*.htm'), "Format tokens as HTML 4 ``<span>`` tags. By default, the content is enclosed in a ``<pre>`` tag, itself wrapped in a ``<div>`` tag (but see the `nowrap` option). The ``<div>``'s CSS class can be set by the `cssclass` option."),
- 'IRCFormatter': ('pygments.formatters.irc', 'IRC', ('irc', 'IRC'), (), 'Format tokens with IRC color sequences'),
- 'ImageFormatter': ('pygments.formatters.img', 'img', ('img', 'IMG', 'png'), ('*.png',), 'Create a PNG image from source code. This uses the Python Imaging Library to generate a pixmap from the source code.'),
- 'JpgImageFormatter': ('pygments.formatters.img', 'img_jpg', ('jpg', 'jpeg'), ('*.jpg',), 'Create a JPEG image from source code. This uses the Python Imaging Library to generate a pixmap from the source code.'),
- 'LatexFormatter': ('pygments.formatters.latex', 'LaTeX', ('latex', 'tex'), ('*.tex',), 'Format tokens as LaTeX code. This needs the `fancyvrb` and `color` standard packages.'),
- 'NullFormatter': ('pygments.formatters.other', 'Text only', ('text', 'null'), ('*.txt',), 'Output the text unchanged without any formatting.'),
- 'PangoMarkupFormatter': ('pygments.formatters.pangomarkup', 'Pango Markup', ('pango', 'pangomarkup'), (), 'Format tokens as Pango Markup code. It can then be rendered to an SVG.'),
- 'RawTokenFormatter': ('pygments.formatters.other', 'Raw tokens', ('raw', 'tokens'), ('*.raw',), 'Format tokens as a raw representation for storing token streams.'),
- 'RtfFormatter': ('pygments.formatters.rtf', 'RTF', ('rtf',), ('*.rtf',), 'Format tokens as RTF markup. This formatter automatically outputs full RTF documents with color information and other useful stuff. Perfect for Copy and Paste into Microsoft(R) Word(R) documents.'),
- 'SvgFormatter': ('pygments.formatters.svg', 'SVG', ('svg',), ('*.svg',), 'Format tokens as an SVG graphics file. This formatter is still experimental. Each line of code is a ``<text>`` element with explicit ``x`` and ``y`` coordinates containing ``<tspan>`` elements with the individual token styles.'),
- 'Terminal256Formatter': ('pygments.formatters.terminal256', 'Terminal256', ('terminal256', 'console256', '256'), (), 'Format tokens with ANSI color sequences, for output in a 256-color terminal or console. Like in `TerminalFormatter` color sequences are terminated at newlines, so that paging the output works correctly.'),
- 'TerminalFormatter': ('pygments.formatters.terminal', 'Terminal', ('terminal', 'console'), (), 'Format tokens with ANSI color sequences, for output in a text console. Color sequences are terminated at newlines, so that paging the output works correctly.'),
- 'TerminalTrueColorFormatter': ('pygments.formatters.terminal256', 'TerminalTrueColor', ('terminal16m', 'console16m', '16m'), (), 'Format tokens with ANSI color sequences, for output in a true-color terminal or console. Like in `TerminalFormatter` color sequences are terminated at newlines, so that paging the output works correctly.'),
- 'TestcaseFormatter': ('pygments.formatters.other', 'Testcase', ('testcase',), (), 'Format tokens as appropriate for a new testcase.'),
-}
diff --git a/venv/lib/python3.11/site-packages/pygments/formatters/bbcode.py b/venv/lib/python3.11/site-packages/pygments/formatters/bbcode.py
deleted file mode 100644
index 9ce4ebc..0000000
--- a/venv/lib/python3.11/site-packages/pygments/formatters/bbcode.py
+++ /dev/null
@@ -1,108 +0,0 @@
-"""
- pygments.formatters.bbcode
- ~~~~~~~~~~~~~~~~~~~~~~~~~~
-
- BBcode formatter.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-
-from pygments.formatter import Formatter
-from pygments.util import get_bool_opt
-
-__all__ = ['BBCodeFormatter']
-
-
-class BBCodeFormatter(Formatter):
- """
- Format tokens with BBcodes. These formatting codes are used by many
- bulletin boards, so you can highlight your sourcecode with pygments before
- posting it there.
-
- This formatter has no support for background colors and borders, as there
- are no common BBcode tags for that.
-
- Some board systems (e.g. phpBB) don't support colors in their [code] tag,
- so you can't use the highlighting together with that tag.
- Text in a [code] tag usually is shown with a monospace font (which this
- formatter can do with the ``monofont`` option) and no spaces (which you
- need for indentation) are removed.
-
- Additional options accepted:
-
- `style`
- The style to use, can be a string or a Style subclass (default:
- ``'default'``).
-
- `codetag`
- If set to true, put the output into ``[code]`` tags (default:
- ``false``)
-
- `monofont`
- If set to true, add a tag to show the code with a monospace font
- (default: ``false``).
- """
- name = 'BBCode'
- aliases = ['bbcode', 'bb']
- filenames = []
-
- def __init__(self, **options):
- Formatter.__init__(self, **options)
- self._code = get_bool_opt(options, 'codetag', False)
- self._mono = get_bool_opt(options, 'monofont', False)
-
- self.styles = {}
- self._make_styles()
-
- def _make_styles(self):
- for ttype, ndef in self.style:
- start = end = ''
- if ndef['color']:
- start += '[color=#%s]' % ndef['color']
- end = '[/color]' + end
- if ndef['bold']:
- start += '[b]'
- end = '[/b]' + end
- if ndef['italic']:
- start += '[i]'
- end = '[/i]' + end
- if ndef['underline']:
- start += '[u]'
- end = '[/u]' + end
- # there are no common BBcodes for background-color and border
-
- self.styles[ttype] = start, end
-
- def format_unencoded(self, tokensource, outfile):
- if self._code:
- outfile.write('[code]')
- if self._mono:
- outfile.write('[font=monospace]')
-
- lastval = ''
- lasttype = None
-
- for ttype, value in tokensource:
- while ttype not in self.styles:
- ttype = ttype.parent
- if ttype == lasttype:
- lastval += value
- else:
- if lastval:
- start, end = self.styles[lasttype]
- outfile.write(''.join((start, lastval, end)))
- lastval = value
- lasttype = ttype
-
- if lastval:
- start, end = self.styles[lasttype]
- outfile.write(''.join((start, lastval, end)))
-
- if self._mono:
- outfile.write('[/font]')
- if self._code:
- outfile.write('[/code]')
- if self._code or self._mono:
- outfile.write('\n')
diff --git a/venv/lib/python3.11/site-packages/pygments/formatters/groff.py b/venv/lib/python3.11/site-packages/pygments/formatters/groff.py
deleted file mode 100644
index 687fd54..0000000
--- a/venv/lib/python3.11/site-packages/pygments/formatters/groff.py
+++ /dev/null
@@ -1,170 +0,0 @@
-"""
- pygments.formatters.groff
- ~~~~~~~~~~~~~~~~~~~~~~~~~
-
- Formatter for groff output.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import math
-from pygments.formatter import Formatter
-from pygments.util import get_bool_opt, get_int_opt
-
-__all__ = ['GroffFormatter']
-
-
-class GroffFormatter(Formatter):
- """
- Format tokens with groff escapes to change their color and font style.
-
- .. versionadded:: 2.11
-
- Additional options accepted:
-
- `style`
- The style to use, can be a string or a Style subclass (default:
- ``'default'``).
-
- `monospaced`
- If set to true, monospace font will be used (default: ``true``).
-
- `linenos`
- If set to true, print the line numbers (default: ``false``).
-
- `wrap`
- Wrap lines to the specified number of characters. Disabled if set to 0
- (default: ``0``).
- """
-
- name = 'groff'
- aliases = ['groff','troff','roff']
- filenames = []
-
- def __init__(self, **options):
- Formatter.__init__(self, **options)
-
- self.monospaced = get_bool_opt(options, 'monospaced', True)
- self.linenos = get_bool_opt(options, 'linenos', False)
- self._lineno = 0
- self.wrap = get_int_opt(options, 'wrap', 0)
- self._linelen = 0
-
- self.styles = {}
- self._make_styles()
-
-
- def _make_styles(self):
- regular = '\\f[CR]' if self.monospaced else '\\f[R]'
- bold = '\\f[CB]' if self.monospaced else '\\f[B]'
- italic = '\\f[CI]' if self.monospaced else '\\f[I]'
-
- for ttype, ndef in self.style:
- start = end = ''
- if ndef['color']:
- start += '\\m[%s]' % ndef['color']
- end = '\\m[]' + end
- if ndef['bold']:
- start += bold
- end = regular + end
- if ndef['italic']:
- start += italic
- end = regular + end
- if ndef['bgcolor']:
- start += '\\M[%s]' % ndef['bgcolor']
- end = '\\M[]' + end
-
- self.styles[ttype] = start, end
-
-
- def _define_colors(self, outfile):
- colors = set()
- for _, ndef in self.style:
- if ndef['color'] is not None:
- colors.add(ndef['color'])
-
- for color in sorted(colors):
- outfile.write('.defcolor ' + color + ' rgb #' + color + '\n')
-
-
- def _write_lineno(self, outfile):
- self._lineno += 1
- outfile.write("%s% 4d " % (self._lineno != 1 and '\n' or '', self._lineno))
-
-
- def _wrap_line(self, line):
- length = len(line.rstrip('\n'))
- space = ' ' if self.linenos else ''
- newline = ''
-
- if length > self.wrap:
- for i in range(0, math.floor(length / self.wrap)):
- chunk = line[i*self.wrap:i*self.wrap+self.wrap]
- newline += (chunk + '\n' + space)
- remainder = length % self.wrap
- if remainder > 0:
- newline += line[-remainder-1:]
- self._linelen = remainder
- elif self._linelen + length > self.wrap:
- newline = ('\n' + space) + line
- self._linelen = length
- else:
- newline = line
- self._linelen += length
-
- return newline
-
-
- def _escape_chars(self, text):
- text = text.replace('\\', '\\[u005C]'). \
- replace('.', '\\[char46]'). \
- replace('\'', '\\[u0027]'). \
- replace('`', '\\[u0060]'). \
- replace('~', '\\[u007E]')
- copy = text
-
- for char in copy:
- if len(char) != len(char.encode()):
- uni = char.encode('unicode_escape') \
- .decode()[1:] \
- .replace('x', 'u00') \
- .upper()
- text = text.replace(char, '\\[u' + uni[1:] + ']')
-
- return text
-
-
- def format_unencoded(self, tokensource, outfile):
- self._define_colors(outfile)
-
- outfile.write('.nf\n\\f[CR]\n')
-
- if self.linenos:
- self._write_lineno(outfile)
-
- for ttype, value in tokensource:
- while ttype not in self.styles:
- ttype = ttype.parent
- start, end = self.styles[ttype]
-
- for line in value.splitlines(True):
- if self.wrap > 0:
- line = self._wrap_line(line)
-
- if start and end:
- text = self._escape_chars(line.rstrip('\n'))
- if text != '':
- outfile.write(''.join((start, text, end)))
- else:
- outfile.write(self._escape_chars(line.rstrip('\n')))
-
- if line.endswith('\n'):
- if self.linenos:
- self._write_lineno(outfile)
- self._linelen = 0
- else:
- outfile.write('\n')
- self._linelen = 0
-
- outfile.write('\n.fi')
diff --git a/venv/lib/python3.11/site-packages/pygments/formatters/html.py b/venv/lib/python3.11/site-packages/pygments/formatters/html.py
deleted file mode 100644
index df2469e..0000000
--- a/venv/lib/python3.11/site-packages/pygments/formatters/html.py
+++ /dev/null
@@ -1,990 +0,0 @@
-"""
- pygments.formatters.html
- ~~~~~~~~~~~~~~~~~~~~~~~~
-
- Formatter for HTML output.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import functools
-import os
-import sys
-import os.path
-from io import StringIO
-
-from pygments.formatter import Formatter
-from pygments.token import Token, Text, STANDARD_TYPES
-from pygments.util import get_bool_opt, get_int_opt, get_list_opt
-
-try:
- import ctags
-except ImportError:
- ctags = None
-
-__all__ = ['HtmlFormatter']
-
-
-_escape_html_table = {
- ord('&'): '&amp;',
- ord('<'): '&lt;',
- ord('>'): '&gt;',
- ord('"'): '&quot;',
- ord("'"): '&#39;',
-}
-
-
-def escape_html(text, table=_escape_html_table):
- """Escape &, <, > as well as single and double quotes for HTML."""
- return text.translate(table)
-
-
-def webify(color):
- if color.startswith('calc') or color.startswith('var'):
- return color
- else:
- return '#' + color
-
-
-def _get_ttype_class(ttype):
- fname = STANDARD_TYPES.get(ttype)
- if fname:
- return fname
- aname = ''
- while fname is None:
- aname = '-' + ttype[-1] + aname
- ttype = ttype.parent
- fname = STANDARD_TYPES.get(ttype)
- return fname + aname
-
-
-CSSFILE_TEMPLATE = '''\
-/*
-generated by Pygments <https://pygments.org/>
-Copyright 2006-2023 by the Pygments team.
-Licensed under the BSD license, see LICENSE for details.
-*/
-%(styledefs)s
-'''
-
-DOC_HEADER = '''\
-<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 4.01//EN"
- "http://www.w3.org/TR/html4/strict.dtd">
-<!--
-generated by Pygments <https://pygments.org/>
-Copyright 2006-2023 by the Pygments team.
-Licensed under the BSD license, see LICENSE for details.
--->
-<html>
-<head>
- <title>%(title)s</title>
- <meta http-equiv="content-type" content="text/html; charset=%(encoding)s">
- <style type="text/css">
-''' + CSSFILE_TEMPLATE + '''
- </style>
-</head>
-<body>
-<h2>%(title)s</h2>
-
-'''
-
-DOC_HEADER_EXTERNALCSS = '''\
-<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 4.01//EN"
- "http://www.w3.org/TR/html4/strict.dtd">
-
-<html>
-<head>
- <title>%(title)s</title>
- <meta http-equiv="content-type" content="text/html; charset=%(encoding)s">
- <link rel="stylesheet" href="%(cssfile)s" type="text/css">
-</head>
-<body>
-<h2>%(title)s</h2>
-
-'''
-
-DOC_FOOTER = '''\
-</body>
-</html>
-'''
-
-
-class HtmlFormatter(Formatter):
- r"""
- Format tokens as HTML 4 ``<span>`` tags. By default, the content is enclosed
- in a ``<pre>`` tag, itself wrapped in a ``<div>`` tag (but see the `nowrap` option).
- The ``<div>``'s CSS class can be set by the `cssclass` option.
-
- If the `linenos` option is set to ``"table"``, the ``<pre>`` is
- additionally wrapped inside a ``<table>`` which has one row and two
- cells: one containing the line numbers and one containing the code.
- Example:
-
- .. sourcecode:: html
-
- <div class="highlight" >
- <table><tr>
- <td class="linenos" title="click to toggle"
- onclick="with (this.firstChild.style)
- { display = (display == '') ? 'none' : '' }">
- <pre>1
- 2</pre>
- </td>
- <td class="code">
- <pre><span class="Ke">def </span><span class="NaFu">foo</span>(bar):
- <span class="Ke">pass</span>
- </pre>
- </td>
- </tr></table></div>
-
- (whitespace added to improve clarity).
-
- A list of lines can be specified using the `hl_lines` option to make these
- lines highlighted (as of Pygments 0.11).
-
- With the `full` option, a complete HTML 4 document is output, including
- the style definitions inside a ``<style>`` tag, or in a separate file if
- the `cssfile` option is given.
-
- When `tagsfile` is set to the path of a ctags index file, it is used to
- generate hyperlinks from names to their definition. You must enable
- `lineanchors` and run ctags with the `-n` option for this to work. The
- `python-ctags` module from PyPI must be installed to use this feature;
- otherwise a `RuntimeError` will be raised.
-
- The `get_style_defs(arg='')` method of a `HtmlFormatter` returns a string
- containing CSS rules for the CSS classes used by the formatter. The
- argument `arg` can be used to specify additional CSS selectors that
- are prepended to the classes. A call `fmter.get_style_defs('td .code')`
- would result in the following CSS classes:
-
- .. sourcecode:: css
-
- td .code .kw { font-weight: bold; color: #00FF00 }
- td .code .cm { color: #999999 }
- ...
-
- If you have Pygments 0.6 or higher, you can also pass a list or tuple to the
- `get_style_defs()` method to request multiple prefixes for the tokens:
-
- .. sourcecode:: python
-
- formatter.get_style_defs(['div.syntax pre', 'pre.syntax'])
-
- The output would then look like this:
-
- .. sourcecode:: css
-
- div.syntax pre .kw,
- pre.syntax .kw { font-weight: bold; color: #00FF00 }
- div.syntax pre .cm,
- pre.syntax .cm { color: #999999 }
- ...
-
- Additional options accepted:
-
- `nowrap`
- If set to ``True``, don't add a ``<pre>`` and a ``<div>`` tag
- around the tokens. This disables most other options (default: ``False``).
-
- `full`
- Tells the formatter to output a "full" document, i.e. a complete
- self-contained document (default: ``False``).
-
- `title`
- If `full` is true, the title that should be used to caption the
- document (default: ``''``).
-
- `style`
- The style to use, can be a string or a Style subclass (default:
- ``'default'``). This option has no effect if the `cssfile`
- and `noclobber_cssfile` option are given and the file specified in
- `cssfile` exists.
-
- `noclasses`
- If set to true, token ``<span>`` tags (as well as line number elements)
- will not use CSS classes, but inline styles. This is not recommended
- for larger pieces of code since it increases output size by quite a bit
- (default: ``False``).
-
- `classprefix`
- Since the token types use relatively short class names, they may clash
- with some of your own class names. In this case you can use the
- `classprefix` option to give a string to prepend to all Pygments-generated
- CSS class names for token types.
- Note that this option also affects the output of `get_style_defs()`.
-
- `cssclass`
- CSS class for the wrapping ``<div>`` tag (default: ``'highlight'``).
- If you set this option, the default selector for `get_style_defs()`
- will be this class.
-
- .. versionadded:: 0.9
- If you select the ``'table'`` line numbers, the wrapping table will
- have a CSS class of this string plus ``'table'``, the default is
- accordingly ``'highlighttable'``.
-
- `cssstyles`
- Inline CSS styles for the wrapping ``<div>`` tag (default: ``''``).
-
- `prestyles`
- Inline CSS styles for the ``<pre>`` tag (default: ``''``).
-
- .. versionadded:: 0.11
-
- `cssfile`
- If the `full` option is true and this option is given, it must be the
- name of an external file. If the filename does not include an absolute
- path, the file's path will be assumed to be relative to the main output
- file's path, if the latter can be found. The stylesheet is then written
- to this file instead of the HTML file.
-
- .. versionadded:: 0.6
-
- `noclobber_cssfile`
- If `cssfile` is given and the specified file exists, the css file will
- not be overwritten. This allows the use of the `full` option in
- combination with a user specified css file. Default is ``False``.
-
- .. versionadded:: 1.1
-
- `linenos`
- If set to ``'table'``, output line numbers as a table with two cells,
- one containing the line numbers, the other the whole code. This is
- copy-and-paste-friendly, but may cause alignment problems with some
- browsers or fonts. If set to ``'inline'``, the line numbers will be
- integrated in the ``<pre>`` tag that contains the code (that setting
- is *new in Pygments 0.8*).
-
- For compatibility with Pygments 0.7 and earlier, every true value
- except ``'inline'`` means the same as ``'table'`` (in particular, that
- means also ``True``).
-
- The default value is ``False``, which means no line numbers at all.
-
- **Note:** with the default ("table") line number mechanism, the line
- numbers and code can have different line heights in Internet Explorer
- unless you give the enclosing ``<pre>`` tags an explicit ``line-height``
- CSS property (you get the default line spacing with ``line-height:
- 125%``).
-
- `hl_lines`
- Specify a list of lines to be highlighted. The line numbers are always
- relative to the input (i.e. the first line is line 1) and are
- independent of `linenostart`.
-
- .. versionadded:: 0.11
-
- `linenostart`
- The line number for the first line (default: ``1``).
-
- `linenostep`
- If set to a number n > 1, only every nth line number is printed.
-
- `linenospecial`
- If set to a number n > 0, every nth line number is given the CSS
- class ``"special"`` (default: ``0``).
-
- `nobackground`
- If set to ``True``, the formatter won't output the background color
- for the wrapping element (this automatically defaults to ``False``
- when there is no wrapping element [eg: no argument for the
- `get_syntax_defs` method given]) (default: ``False``).
-
- .. versionadded:: 0.6
-
- `lineseparator`
- This string is output between lines of code. It defaults to ``"\n"``,
- which is enough to break a line inside ``<pre>`` tags, but you can
- e.g. set it to ``"<br>"`` to get HTML line breaks.
-
- .. versionadded:: 0.7
-
- `lineanchors`
- If set to a nonempty string, e.g. ``foo``, the formatter will wrap each
- output line in an anchor tag with an ``id`` (and `name`) of ``foo-linenumber``.
- This allows easy linking to certain lines.
-
- .. versionadded:: 0.9
-
- `linespans`
- If set to a nonempty string, e.g. ``foo``, the formatter will wrap each
- output line in a span tag with an ``id`` of ``foo-linenumber``.
- This allows easy access to lines via javascript.
-
- .. versionadded:: 1.6
-
- `anchorlinenos`
- If set to `True`, will wrap line numbers in <a> tags. Used in
- combination with `linenos` and `lineanchors`.
-
- `tagsfile`
- If set to the path of a ctags file, wrap names in anchor tags that
- link to their definitions. `lineanchors` should be used, and the
- tags file should specify line numbers (see the `-n` option to ctags).
- The tags file is assumed to be encoded in UTF-8.
-
- .. versionadded:: 1.6
-
- `tagurlformat`
- A string formatting pattern used to generate links to ctags definitions.
- Available variables are `%(path)s`, `%(fname)s` and `%(fext)s`.
- Defaults to an empty string, resulting in just `#prefix-number` links.
-
- .. versionadded:: 1.6
-
- `filename`
- A string used to generate a filename when rendering ``<pre>`` blocks,
- for example if displaying source code. If `linenos` is set to
- ``'table'`` then the filename will be rendered in an initial row
- containing a single `<th>` which spans both columns.
-
- .. versionadded:: 2.1
-
- `wrapcode`
- Wrap the code inside ``<pre>`` blocks using ``<code>``, as recommended
- by the HTML5 specification.
-
- .. versionadded:: 2.4
-
- `debug_token_types`
- Add ``title`` attributes to all token ``<span>`` tags that show the
- name of the token.
-
- .. versionadded:: 2.10
-
-
- **Subclassing the HTML formatter**
-
- .. versionadded:: 0.7
-
- The HTML formatter is now built in a way that allows easy subclassing, thus
- customizing the output HTML code. The `format()` method calls
- `self._format_lines()` which returns a generator that yields tuples of ``(1,
- line)``, where the ``1`` indicates that the ``line`` is a line of the
- formatted source code.
-
- If the `nowrap` option is set, the generator is the iterated over and the
- resulting HTML is output.
-
- Otherwise, `format()` calls `self.wrap()`, which wraps the generator with
- other generators. These may add some HTML code to the one generated by
- `_format_lines()`, either by modifying the lines generated by the latter,
- then yielding them again with ``(1, line)``, and/or by yielding other HTML
- code before or after the lines, with ``(0, html)``. The distinction between
- source lines and other code makes it possible to wrap the generator multiple
- times.
-
- The default `wrap()` implementation adds a ``<div>`` and a ``<pre>`` tag.
-
- A custom `HtmlFormatter` subclass could look like this:
-
- .. sourcecode:: python
-
- class CodeHtmlFormatter(HtmlFormatter):
-
- def wrap(self, source, *, include_div):
- return self._wrap_code(source)
-
- def _wrap_code(self, source):
- yield 0, '<code>'
- for i, t in source:
- if i == 1:
- # it's a line of formatted code
- t += '<br>'
- yield i, t
- yield 0, '</code>'
-
- This results in wrapping the formatted lines with a ``<code>`` tag, where the
- source lines are broken using ``<br>`` tags.
-
- After calling `wrap()`, the `format()` method also adds the "line numbers"
- and/or "full document" wrappers if the respective options are set. Then, all
- HTML yielded by the wrapped generator is output.
- """
-
- name = 'HTML'
- aliases = ['html']
- filenames = ['*.html', '*.htm']
-
- def __init__(self, **options):
- Formatter.__init__(self, **options)
- self.title = self._decodeifneeded(self.title)
- self.nowrap = get_bool_opt(options, 'nowrap', False)
- self.noclasses = get_bool_opt(options, 'noclasses', False)
- self.classprefix = options.get('classprefix', '')
- self.cssclass = self._decodeifneeded(options.get('cssclass', 'highlight'))
- self.cssstyles = self._decodeifneeded(options.get('cssstyles', ''))
- self.prestyles = self._decodeifneeded(options.get('prestyles', ''))
- self.cssfile = self._decodeifneeded(options.get('cssfile', ''))
- self.noclobber_cssfile = get_bool_opt(options, 'noclobber_cssfile', False)
- self.tagsfile = self._decodeifneeded(options.get('tagsfile', ''))
- self.tagurlformat = self._decodeifneeded(options.get('tagurlformat', ''))
- self.filename = self._decodeifneeded(options.get('filename', ''))
- self.wrapcode = get_bool_opt(options, 'wrapcode', False)
- self.span_element_openers = {}
- self.debug_token_types = get_bool_opt(options, 'debug_token_types', False)
-
- if self.tagsfile:
- if not ctags:
- raise RuntimeError('The "ctags" package must to be installed '
- 'to be able to use the "tagsfile" feature.')
- self._ctags = ctags.CTags(self.tagsfile)
-
- linenos = options.get('linenos', False)
- if linenos == 'inline':
- self.linenos = 2
- elif linenos:
- # compatibility with <= 0.7
- self.linenos = 1
- else:
- self.linenos = 0
- self.linenostart = abs(get_int_opt(options, 'linenostart', 1))
- self.linenostep = abs(get_int_opt(options, 'linenostep', 1))
- self.linenospecial = abs(get_int_opt(options, 'linenospecial', 0))
- self.nobackground = get_bool_opt(options, 'nobackground', False)
- self.lineseparator = options.get('lineseparator', '\n')
- self.lineanchors = options.get('lineanchors', '')
- self.linespans = options.get('linespans', '')
- self.anchorlinenos = get_bool_opt(options, 'anchorlinenos', False)
- self.hl_lines = set()
- for lineno in get_list_opt(options, 'hl_lines', []):
- try:
- self.hl_lines.add(int(lineno))
- except ValueError:
- pass
-
- self._create_stylesheet()
-
- def _get_css_class(self, ttype):
- """Return the css class of this token type prefixed with
- the classprefix option."""
- ttypeclass = _get_ttype_class(ttype)
- if ttypeclass:
- return self.classprefix + ttypeclass
- return ''
-
- def _get_css_classes(self, ttype):
- """Return the CSS classes of this token type prefixed with the classprefix option."""
- cls = self._get_css_class(ttype)
- while ttype not in STANDARD_TYPES:
- ttype = ttype.parent
- cls = self._get_css_class(ttype) + ' ' + cls
- return cls or ''
-
- def _get_css_inline_styles(self, ttype):
- """Return the inline CSS styles for this token type."""
- cclass = self.ttype2class.get(ttype)
- while cclass is None:
- ttype = ttype.parent
- cclass = self.ttype2class.get(ttype)
- return cclass or ''
-
- def _create_stylesheet(self):
- t2c = self.ttype2class = {Token: ''}
- c2s = self.class2style = {}
- for ttype, ndef in self.style:
- name = self._get_css_class(ttype)
- style = ''
- if ndef['color']:
- style += 'color: %s; ' % webify(ndef['color'])
- if ndef['bold']:
- style += 'font-weight: bold; '
- if ndef['italic']:
- style += 'font-style: italic; '
- if ndef['underline']:
- style += 'text-decoration: underline; '
- if ndef['bgcolor']:
- style += 'background-color: %s; ' % webify(ndef['bgcolor'])
- if ndef['border']:
- style += 'border: 1px solid %s; ' % webify(ndef['border'])
- if style:
- t2c[ttype] = name
- # save len(ttype) to enable ordering the styles by
- # hierarchy (necessary for CSS cascading rules!)
- c2s[name] = (style[:-2], ttype, len(ttype))
-
- def get_style_defs(self, arg=None):
- """
- Return CSS style definitions for the classes produced by the current
- highlighting style. ``arg`` can be a string or list of selectors to
- insert before the token type classes.
- """
- style_lines = []
-
- style_lines.extend(self.get_linenos_style_defs())
- style_lines.extend(self.get_background_style_defs(arg))
- style_lines.extend(self.get_token_style_defs(arg))
-
- return '\n'.join(style_lines)
-
- def get_token_style_defs(self, arg=None):
- prefix = self.get_css_prefix(arg)
-
- styles = [
- (level, ttype, cls, style)
- for cls, (style, ttype, level) in self.class2style.items()
- if cls and style
- ]
- styles.sort()
-
- lines = [
- '%s { %s } /* %s */' % (prefix(cls), style, repr(ttype)[6:])
- for (level, ttype, cls, style) in styles
- ]
-
- return lines
-
- def get_background_style_defs(self, arg=None):
- prefix = self.get_css_prefix(arg)
- bg_color = self.style.background_color
- hl_color = self.style.highlight_color
-
- lines = []
-
- if arg and not self.nobackground and bg_color is not None:
- text_style = ''
- if Text in self.ttype2class:
- text_style = ' ' + self.class2style[self.ttype2class[Text]][0]
- lines.insert(
- 0, '%s{ background: %s;%s }' % (
- prefix(''), bg_color, text_style
- )
- )
- if hl_color is not None:
- lines.insert(
- 0, '%s { background-color: %s }' % (prefix('hll'), hl_color)
- )
-
- return lines
-
- def get_linenos_style_defs(self):
- lines = [
- 'pre { %s }' % self._pre_style,
- 'td.linenos .normal { %s }' % self._linenos_style,
- 'span.linenos { %s }' % self._linenos_style,
- 'td.linenos .special { %s }' % self._linenos_special_style,
- 'span.linenos.special { %s }' % self._linenos_special_style,
- ]
-
- return lines
-
- def get_css_prefix(self, arg):
- if arg is None:
- arg = ('cssclass' in self.options and '.'+self.cssclass or '')
- if isinstance(arg, str):
- args = [arg]
- else:
- args = list(arg)
-
- def prefix(cls):
- if cls:
- cls = '.' + cls
- tmp = []
- for arg in args:
- tmp.append((arg and arg + ' ' or '') + cls)
- return ', '.join(tmp)
-
- return prefix
-
- @property
- def _pre_style(self):
- return 'line-height: 125%;'
-
- @property
- def _linenos_style(self):
- return 'color: %s; background-color: %s; padding-left: 5px; padding-right: 5px;' % (
- self.style.line_number_color,
- self.style.line_number_background_color
- )
-
- @property
- def _linenos_special_style(self):
- return 'color: %s; background-color: %s; padding-left: 5px; padding-right: 5px;' % (
- self.style.line_number_special_color,
- self.style.line_number_special_background_color
- )
-
- def _decodeifneeded(self, value):
- if isinstance(value, bytes):
- if self.encoding:
- return value.decode(self.encoding)
- return value.decode()
- return value
-
- def _wrap_full(self, inner, outfile):
- if self.cssfile:
- if os.path.isabs(self.cssfile):
- # it's an absolute filename
- cssfilename = self.cssfile
- else:
- try:
- filename = outfile.name
- if not filename or filename[0] == '<':
- # pseudo files, e.g. name == '<fdopen>'
- raise AttributeError
- cssfilename = os.path.join(os.path.dirname(filename),
- self.cssfile)
- except AttributeError:
- print('Note: Cannot determine output file name, '
- 'using current directory as base for the CSS file name',
- file=sys.stderr)
- cssfilename = self.cssfile
- # write CSS file only if noclobber_cssfile isn't given as an option.
- try:
- if not os.path.exists(cssfilename) or not self.noclobber_cssfile:
- with open(cssfilename, "w", encoding="utf-8") as cf:
- cf.write(CSSFILE_TEMPLATE %
- {'styledefs': self.get_style_defs('body')})
- except OSError as err:
- err.strerror = 'Error writing CSS file: ' + err.strerror
- raise
-
- yield 0, (DOC_HEADER_EXTERNALCSS %
- dict(title=self.title,
- cssfile=self.cssfile,
- encoding=self.encoding))
- else:
- yield 0, (DOC_HEADER %
- dict(title=self.title,
- styledefs=self.get_style_defs('body'),
- encoding=self.encoding))
-
- yield from inner
- yield 0, DOC_FOOTER
-
- def _wrap_tablelinenos(self, inner):
- dummyoutfile = StringIO()
- lncount = 0
- for t, line in inner:
- if t:
- lncount += 1
- dummyoutfile.write(line)
-
- fl = self.linenostart
- mw = len(str(lncount + fl - 1))
- sp = self.linenospecial
- st = self.linenostep
- anchor_name = self.lineanchors or self.linespans
- aln = self.anchorlinenos
- nocls = self.noclasses
-
- lines = []
-
- for i in range(fl, fl+lncount):
- print_line = i % st == 0
- special_line = sp and i % sp == 0
-
- if print_line:
- line = '%*d' % (mw, i)
- if aln:
- line = '<a href="#%s-%d">%s</a>' % (anchor_name, i, line)
- else:
- line = ' ' * mw
-
- if nocls:
- if special_line:
- style = ' style="%s"' % self._linenos_special_style
- else:
- style = ' style="%s"' % self._linenos_style
- else:
- if special_line:
- style = ' class="special"'
- else:
- style = ' class="normal"'
-
- if style:
- line = '<span%s>%s</span>' % (style, line)
-
- lines.append(line)
-
- ls = '\n'.join(lines)
-
- # If a filename was specified, we can't put it into the code table as it
- # would misalign the line numbers. Hence we emit a separate row for it.
- filename_tr = ""
- if self.filename:
- filename_tr = (
- '<tr><th colspan="2" class="filename">'
- '<span class="filename">' + self.filename + '</span>'
- '</th></tr>')
-
- # in case you wonder about the seemingly redundant <div> here: since the
- # content in the other cell also is wrapped in a div, some browsers in
- # some configurations seem to mess up the formatting...
- yield 0, (f'<table class="{self.cssclass}table">' + filename_tr +
- '<tr><td class="linenos"><div class="linenodiv"><pre>' +
- ls + '</pre></div></td><td class="code">')
- yield 0, '<div>'
- yield 0, dummyoutfile.getvalue()
- yield 0, '</div>'
- yield 0, '</td></tr></table>'
-
-
- def _wrap_inlinelinenos(self, inner):
- # need a list of lines since we need the width of a single number :(
- inner_lines = list(inner)
- sp = self.linenospecial
- st = self.linenostep
- num = self.linenostart
- mw = len(str(len(inner_lines) + num - 1))
- anchor_name = self.lineanchors or self.linespans
- aln = self.anchorlinenos
- nocls = self.noclasses
-
- for _, inner_line in inner_lines:
- print_line = num % st == 0
- special_line = sp and num % sp == 0
-
- if print_line:
- line = '%*d' % (mw, num)
- else:
- line = ' ' * mw
-
- if nocls:
- if special_line:
- style = ' style="%s"' % self._linenos_special_style
- else:
- style = ' style="%s"' % self._linenos_style
- else:
- if special_line:
- style = ' class="linenos special"'
- else:
- style = ' class="linenos"'
-
- if style:
- linenos = '<span%s>%s</span>' % (style, line)
- else:
- linenos = line
-
- if aln:
- yield 1, ('<a href="#%s-%d">%s</a>' % (anchor_name, num, linenos) +
- inner_line)
- else:
- yield 1, linenos + inner_line
- num += 1
-
- def _wrap_lineanchors(self, inner):
- s = self.lineanchors
- # subtract 1 since we have to increment i *before* yielding
- i = self.linenostart - 1
- for t, line in inner:
- if t:
- i += 1
- href = "" if self.linenos else ' href="#%s-%d"' % (s, i)
- yield 1, '<a id="%s-%d" name="%s-%d"%s></a>' % (s, i, s, i, href) + line
- else:
- yield 0, line
-
- def _wrap_linespans(self, inner):
- s = self.linespans
- i = self.linenostart - 1
- for t, line in inner:
- if t:
- i += 1
- yield 1, '<span id="%s-%d">%s</span>' % (s, i, line)
- else:
- yield 0, line
-
- def _wrap_div(self, inner):
- style = []
- if (self.noclasses and not self.nobackground and
- self.style.background_color is not None):
- style.append('background: %s' % (self.style.background_color,))
- if self.cssstyles:
- style.append(self.cssstyles)
- style = '; '.join(style)
-
- yield 0, ('<div' + (self.cssclass and ' class="%s"' % self.cssclass) +
- (style and (' style="%s"' % style)) + '>')
- yield from inner
- yield 0, '</div>\n'
-
- def _wrap_pre(self, inner):
- style = []
- if self.prestyles:
- style.append(self.prestyles)
- if self.noclasses:
- style.append(self._pre_style)
- style = '; '.join(style)
-
- if self.filename and self.linenos != 1:
- yield 0, ('<span class="filename">' + self.filename + '</span>')
-
- # the empty span here is to keep leading empty lines from being
- # ignored by HTML parsers
- yield 0, ('<pre' + (style and ' style="%s"' % style) + '><span></span>')
- yield from inner
- yield 0, '</pre>'
-
- def _wrap_code(self, inner):
- yield 0, '<code>'
- yield from inner
- yield 0, '</code>'
-
- @functools.lru_cache(maxsize=100)
- def _translate_parts(self, value):
- """HTML-escape a value and split it by newlines."""
- return value.translate(_escape_html_table).split('\n')
-
- def _format_lines(self, tokensource):
- """
- Just format the tokens, without any wrapping tags.
- Yield individual lines.
- """
- nocls = self.noclasses
- lsep = self.lineseparator
- tagsfile = self.tagsfile
-
- lspan = ''
- line = []
- for ttype, value in tokensource:
- try:
- cspan = self.span_element_openers[ttype]
- except KeyError:
- title = ' title="%s"' % '.'.join(ttype) if self.debug_token_types else ''
- if nocls:
- css_style = self._get_css_inline_styles(ttype)
- if css_style:
- css_style = self.class2style[css_style][0]
- cspan = '<span style="%s"%s>' % (css_style, title)
- else:
- cspan = ''
- else:
- css_class = self._get_css_classes(ttype)
- if css_class:
- cspan = '<span class="%s"%s>' % (css_class, title)
- else:
- cspan = ''
- self.span_element_openers[ttype] = cspan
-
- parts = self._translate_parts(value)
-
- if tagsfile and ttype in Token.Name:
- filename, linenumber = self._lookup_ctag(value)
- if linenumber:
- base, filename = os.path.split(filename)
- if base:
- base += '/'
- filename, extension = os.path.splitext(filename)
- url = self.tagurlformat % {'path': base, 'fname': filename,
- 'fext': extension}
- parts[0] = "<a href=\"%s#%s-%d\">%s" % \
- (url, self.lineanchors, linenumber, parts[0])
- parts[-1] = parts[-1] + "</a>"
-
- # for all but the last line
- for part in parts[:-1]:
- if line:
- # Also check for part being non-empty, so we avoid creating
- # empty <span> tags
- if lspan != cspan and part:
- line.extend(((lspan and '</span>'), cspan, part,
- (cspan and '</span>'), lsep))
- else: # both are the same, or the current part was empty
- line.extend((part, (lspan and '</span>'), lsep))
- yield 1, ''.join(line)
- line = []
- elif part:
- yield 1, ''.join((cspan, part, (cspan and '</span>'), lsep))
- else:
- yield 1, lsep
- # for the last line
- if line and parts[-1]:
- if lspan != cspan:
- line.extend(((lspan and '</span>'), cspan, parts[-1]))
- lspan = cspan
- else:
- line.append(parts[-1])
- elif parts[-1]:
- line = [cspan, parts[-1]]
- lspan = cspan
- # else we neither have to open a new span nor set lspan
-
- if line:
- line.extend(((lspan and '</span>'), lsep))
- yield 1, ''.join(line)
-
- def _lookup_ctag(self, token):
- entry = ctags.TagEntry()
- if self._ctags.find(entry, token.encode(), 0):
- return entry['file'].decode(), entry['lineNumber']
- else:
- return None, None
-
- def _highlight_lines(self, tokensource):
- """
- Highlighted the lines specified in the `hl_lines` option by
- post-processing the token stream coming from `_format_lines`.
- """
- hls = self.hl_lines
-
- for i, (t, value) in enumerate(tokensource):
- if t != 1:
- yield t, value
- if i + 1 in hls: # i + 1 because Python indexes start at 0
- if self.noclasses:
- style = ''
- if self.style.highlight_color is not None:
- style = (' style="background-color: %s"' %
- (self.style.highlight_color,))
- yield 1, '<span%s>%s</span>' % (style, value)
- else:
- yield 1, '<span class="hll">%s</span>' % value
- else:
- yield 1, value
-
- def wrap(self, source):
- """
- Wrap the ``source``, which is a generator yielding
- individual lines, in custom generators. See docstring
- for `format`. Can be overridden.
- """
-
- output = source
- if self.wrapcode:
- output = self._wrap_code(output)
-
- output = self._wrap_pre(output)
-
- return output
-
- def format_unencoded(self, tokensource, outfile):
- """
- The formatting process uses several nested generators; which of
- them are used is determined by the user's options.
-
- Each generator should take at least one argument, ``inner``,
- and wrap the pieces of text generated by this.
-
- Always yield 2-tuples: (code, text). If "code" is 1, the text
- is part of the original tokensource being highlighted, if it's
- 0, the text is some piece of wrapping. This makes it possible to
- use several different wrappers that process the original source
- linewise, e.g. line number generators.
- """
- source = self._format_lines(tokensource)
-
- # As a special case, we wrap line numbers before line highlighting
- # so the line numbers get wrapped in the highlighting tag.
- if not self.nowrap and self.linenos == 2:
- source = self._wrap_inlinelinenos(source)
-
- if self.hl_lines:
- source = self._highlight_lines(source)
-
- if not self.nowrap:
- if self.lineanchors:
- source = self._wrap_lineanchors(source)
- if self.linespans:
- source = self._wrap_linespans(source)
- source = self.wrap(source)
- if self.linenos == 1:
- source = self._wrap_tablelinenos(source)
- source = self._wrap_div(source)
- if self.full:
- source = self._wrap_full(source, outfile)
-
- for t, piece in source:
- outfile.write(piece)
diff --git a/venv/lib/python3.11/site-packages/pygments/formatters/img.py b/venv/lib/python3.11/site-packages/pygments/formatters/img.py
deleted file mode 100644
index dcf09da..0000000
--- a/venv/lib/python3.11/site-packages/pygments/formatters/img.py
+++ /dev/null
@@ -1,684 +0,0 @@
-"""
- pygments.formatters.img
- ~~~~~~~~~~~~~~~~~~~~~~~
-
- Formatter for Pixmap output.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-import os
-import sys
-
-from pygments.formatter import Formatter
-from pygments.util import get_bool_opt, get_int_opt, get_list_opt, \
- get_choice_opt
-
-import subprocess
-
-# Import this carefully
-try:
- from PIL import Image, ImageDraw, ImageFont
- pil_available = True
-except ImportError:
- pil_available = False
-
-try:
- import _winreg
-except ImportError:
- try:
- import winreg as _winreg
- except ImportError:
- _winreg = None
-
-__all__ = ['ImageFormatter', 'GifImageFormatter', 'JpgImageFormatter',
- 'BmpImageFormatter']
-
-
-# For some unknown reason every font calls it something different
-STYLES = {
- 'NORMAL': ['', 'Roman', 'Book', 'Normal', 'Regular', 'Medium'],
- 'ITALIC': ['Oblique', 'Italic'],
- 'BOLD': ['Bold'],
- 'BOLDITALIC': ['Bold Oblique', 'Bold Italic'],
-}
-
-# A sane default for modern systems
-DEFAULT_FONT_NAME_NIX = 'DejaVu Sans Mono'
-DEFAULT_FONT_NAME_WIN = 'Courier New'
-DEFAULT_FONT_NAME_MAC = 'Menlo'
-
-
-class PilNotAvailable(ImportError):
- """When Python imaging library is not available"""
-
-
-class FontNotFound(Exception):
- """When there are no usable fonts specified"""
-
-
-class FontManager:
- """
- Manages a set of fonts: normal, italic, bold, etc...
- """
-
- def __init__(self, font_name, font_size=14):
- self.font_name = font_name
- self.font_size = font_size
- self.fonts = {}
- self.encoding = None
- self.variable = False
- if hasattr(font_name, 'read') or os.path.isfile(font_name):
- font = ImageFont.truetype(font_name, self.font_size)
- self.variable = True
- for style in STYLES:
- self.fonts[style] = font
-
- return
-
- if sys.platform.startswith('win'):
- if not font_name:
- self.font_name = DEFAULT_FONT_NAME_WIN
- self._create_win()
- elif sys.platform.startswith('darwin'):
- if not font_name:
- self.font_name = DEFAULT_FONT_NAME_MAC
- self._create_mac()
- else:
- if not font_name:
- self.font_name = DEFAULT_FONT_NAME_NIX
- self._create_nix()
-
- def _get_nix_font_path(self, name, style):
- proc = subprocess.Popen(['fc-list', "%s:style=%s" % (name, style), 'file'],
- stdout=subprocess.PIPE, stderr=None)
- stdout, _ = proc.communicate()
- if proc.returncode == 0:
- lines = stdout.splitlines()
- for line in lines:
- if line.startswith(b'Fontconfig warning:'):
- continue
- path = line.decode().strip().strip(':')
- if path:
- return path
- return None
-
- def _create_nix(self):
- for name in STYLES['NORMAL']:
- path = self._get_nix_font_path(self.font_name, name)
- if path is not None:
- self.fonts['NORMAL'] = ImageFont.truetype(path, self.font_size)
- break
- else:
- raise FontNotFound('No usable fonts named: "%s"' %
- self.font_name)
- for style in ('ITALIC', 'BOLD', 'BOLDITALIC'):
- for stylename in STYLES[style]:
- path = self._get_nix_font_path(self.font_name, stylename)
- if path is not None:
- self.fonts[style] = ImageFont.truetype(path, self.font_size)
- break
- else:
- if style == 'BOLDITALIC':
- self.fonts[style] = self.fonts['BOLD']
- else:
- self.fonts[style] = self.fonts['NORMAL']
-
- def _get_mac_font_path(self, font_map, name, style):
- return font_map.get((name + ' ' + style).strip().lower())
-
- def _create_mac(self):
- font_map = {}
- for font_dir in (os.path.join(os.getenv("HOME"), 'Library/Fonts/'),
- '/Library/Fonts/', '/System/Library/Fonts/'):
- font_map.update(
- (os.path.splitext(f)[0].lower(), os.path.join(font_dir, f))
- for f in os.listdir(font_dir)
- if f.lower().endswith(('ttf', 'ttc')))
-
- for name in STYLES['NORMAL']:
- path = self._get_mac_font_path(font_map, self.font_name, name)
- if path is not None:
- self.fonts['NORMAL'] = ImageFont.truetype(path, self.font_size)
- break
- else:
- raise FontNotFound('No usable fonts named: "%s"' %
- self.font_name)
- for style in ('ITALIC', 'BOLD', 'BOLDITALIC'):
- for stylename in STYLES[style]:
- path = self._get_mac_font_path(font_map, self.font_name, stylename)
- if path is not None:
- self.fonts[style] = ImageFont.truetype(path, self.font_size)
- break
- else:
- if style == 'BOLDITALIC':
- self.fonts[style] = self.fonts['BOLD']
- else:
- self.fonts[style] = self.fonts['NORMAL']
-
- def _lookup_win(self, key, basename, styles, fail=False):
- for suffix in ('', ' (TrueType)'):
- for style in styles:
- try:
- valname = '%s%s%s' % (basename, style and ' '+style, suffix)
- val, _ = _winreg.QueryValueEx(key, valname)
- return val
- except OSError:
- continue
- else:
- if fail:
- raise FontNotFound('Font %s (%s) not found in registry' %
- (basename, styles[0]))
- return None
-
- def _create_win(self):
- lookuperror = None
- keynames = [ (_winreg.HKEY_CURRENT_USER, r'Software\Microsoft\Windows NT\CurrentVersion\Fonts'),
- (_winreg.HKEY_CURRENT_USER, r'Software\Microsoft\Windows\CurrentVersion\Fonts'),
- (_winreg.HKEY_LOCAL_MACHINE, r'Software\Microsoft\Windows NT\CurrentVersion\Fonts'),
- (_winreg.HKEY_LOCAL_MACHINE, r'Software\Microsoft\Windows\CurrentVersion\Fonts') ]
- for keyname in keynames:
- try:
- key = _winreg.OpenKey(*keyname)
- try:
- path = self._lookup_win(key, self.font_name, STYLES['NORMAL'], True)
- self.fonts['NORMAL'] = ImageFont.truetype(path, self.font_size)
- for style in ('ITALIC', 'BOLD', 'BOLDITALIC'):
- path = self._lookup_win(key, self.font_name, STYLES[style])
- if path:
- self.fonts[style] = ImageFont.truetype(path, self.font_size)
- else:
- if style == 'BOLDITALIC':
- self.fonts[style] = self.fonts['BOLD']
- else:
- self.fonts[style] = self.fonts['NORMAL']
- return
- except FontNotFound as err:
- lookuperror = err
- finally:
- _winreg.CloseKey(key)
- except OSError:
- pass
- else:
- # If we get here, we checked all registry keys and had no luck
- # We can be in one of two situations now:
- # * All key lookups failed. In this case lookuperror is None and we
- # will raise a generic error
- # * At least one lookup failed with a FontNotFound error. In this
- # case, we will raise that as a more specific error
- if lookuperror:
- raise lookuperror
- raise FontNotFound('Can\'t open Windows font registry key')
-
- def get_char_size(self):
- """
- Get the character size.
- """
- return self.get_text_size('M')
-
- def get_text_size(self, text):
- """
- Get the text size (width, height).
- """
- font = self.fonts['NORMAL']
- if hasattr(font, 'getbbox'): # Pillow >= 9.2.0
- return font.getbbox(text)[2:4]
- else:
- return font.getsize(text)
-
- def get_font(self, bold, oblique):
- """
- Get the font based on bold and italic flags.
- """
- if bold and oblique:
- if self.variable:
- return self.get_style('BOLDITALIC')
-
- return self.fonts['BOLDITALIC']
- elif bold:
- if self.variable:
- return self.get_style('BOLD')
-
- return self.fonts['BOLD']
- elif oblique:
- if self.variable:
- return self.get_style('ITALIC')
-
- return self.fonts['ITALIC']
- else:
- if self.variable:
- return self.get_style('NORMAL')
-
- return self.fonts['NORMAL']
-
- def get_style(self, style):
- """
- Get the specified style of the font if it is a variable font.
- If not found, return the normal font.
- """
- font = self.fonts[style]
- for style_name in STYLES[style]:
- try:
- font.set_variation_by_name(style_name)
- return font
- except ValueError:
- pass
- except OSError:
- return font
-
- return font
-
-
-class ImageFormatter(Formatter):
- """
- Create a PNG image from source code. This uses the Python Imaging Library to
- generate a pixmap from the source code.
-
- .. versionadded:: 0.10
-
- Additional options accepted:
-
- `image_format`
- An image format to output to that is recognised by PIL, these include:
-
- * "PNG" (default)
- * "JPEG"
- * "BMP"
- * "GIF"
-
- `line_pad`
- The extra spacing (in pixels) between each line of text.
-
- Default: 2
-
- `font_name`
- The font name to be used as the base font from which others, such as
- bold and italic fonts will be generated. This really should be a
- monospace font to look sane.
- If a filename or a file-like object is specified, the user must
- provide different styles of the font.
-
- Default: "Courier New" on Windows, "Menlo" on Mac OS, and
- "DejaVu Sans Mono" on \\*nix
-
- `font_size`
- The font size in points to be used.
-
- Default: 14
-
- `image_pad`
- The padding, in pixels to be used at each edge of the resulting image.
-
- Default: 10
-
- `line_numbers`
- Whether line numbers should be shown: True/False
-
- Default: True
-
- `line_number_start`
- The line number of the first line.
-
- Default: 1
-
- `line_number_step`
- The step used when printing line numbers.
-
- Default: 1
-
- `line_number_bg`
- The background colour (in "#123456" format) of the line number bar, or
- None to use the style background color.
-
- Default: "#eed"
-
- `line_number_fg`
- The text color of the line numbers (in "#123456"-like format).
-
- Default: "#886"
-
- `line_number_chars`
- The number of columns of line numbers allowable in the line number
- margin.
-
- Default: 2
-
- `line_number_bold`
- Whether line numbers will be bold: True/False
-
- Default: False
-
- `line_number_italic`
- Whether line numbers will be italicized: True/False
-
- Default: False
-
- `line_number_separator`
- Whether a line will be drawn between the line number area and the
- source code area: True/False
-
- Default: True
-
- `line_number_pad`
- The horizontal padding (in pixels) between the line number margin, and
- the source code area.
-
- Default: 6
-
- `hl_lines`
- Specify a list of lines to be highlighted.
-
- .. versionadded:: 1.2
-
- Default: empty list
-
- `hl_color`
- Specify the color for highlighting lines.
-
- .. versionadded:: 1.2
-
- Default: highlight color of the selected style
- """
-
- # Required by the pygments mapper
- name = 'img'
- aliases = ['img', 'IMG', 'png']
- filenames = ['*.png']
-
- unicodeoutput = False
-
- default_image_format = 'png'
-
- def __init__(self, **options):
- """
- See the class docstring for explanation of options.
- """
- if not pil_available:
- raise PilNotAvailable(
- 'Python Imaging Library is required for this formatter')
- Formatter.__init__(self, **options)
- self.encoding = 'latin1' # let pygments.format() do the right thing
- # Read the style
- self.styles = dict(self.style)
- if self.style.background_color is None:
- self.background_color = '#fff'
- else:
- self.background_color = self.style.background_color
- # Image options
- self.image_format = get_choice_opt(
- options, 'image_format', ['png', 'jpeg', 'gif', 'bmp'],
- self.default_image_format, normcase=True)
- self.image_pad = get_int_opt(options, 'image_pad', 10)
- self.line_pad = get_int_opt(options, 'line_pad', 2)
- # The fonts
- fontsize = get_int_opt(options, 'font_size', 14)
- self.fonts = FontManager(options.get('font_name', ''), fontsize)
- self.fontw, self.fonth = self.fonts.get_char_size()
- # Line number options
- self.line_number_fg = options.get('line_number_fg', '#886')
- self.line_number_bg = options.get('line_number_bg', '#eed')
- self.line_number_chars = get_int_opt(options,
- 'line_number_chars', 2)
- self.line_number_bold = get_bool_opt(options,
- 'line_number_bold', False)
- self.line_number_italic = get_bool_opt(options,
- 'line_number_italic', False)
- self.line_number_pad = get_int_opt(options, 'line_number_pad', 6)
- self.line_numbers = get_bool_opt(options, 'line_numbers', True)
- self.line_number_separator = get_bool_opt(options,
- 'line_number_separator', True)
- self.line_number_step = get_int_opt(options, 'line_number_step', 1)
- self.line_number_start = get_int_opt(options, 'line_number_start', 1)
- if self.line_numbers:
- self.line_number_width = (self.fontw * self.line_number_chars +
- self.line_number_pad * 2)
- else:
- self.line_number_width = 0
- self.hl_lines = []
- hl_lines_str = get_list_opt(options, 'hl_lines', [])
- for line in hl_lines_str:
- try:
- self.hl_lines.append(int(line))
- except ValueError:
- pass
- self.hl_color = options.get('hl_color',
- self.style.highlight_color) or '#f90'
- self.drawables = []
-
- def get_style_defs(self, arg=''):
- raise NotImplementedError('The -S option is meaningless for the image '
- 'formatter. Use -O style=<stylename> instead.')
-
- def _get_line_height(self):
- """
- Get the height of a line.
- """
- return self.fonth + self.line_pad
-
- def _get_line_y(self, lineno):
- """
- Get the Y coordinate of a line number.
- """
- return lineno * self._get_line_height() + self.image_pad
-
- def _get_char_width(self):
- """
- Get the width of a character.
- """
- return self.fontw
-
- def _get_char_x(self, linelength):
- """
- Get the X coordinate of a character position.
- """
- return linelength + self.image_pad + self.line_number_width
-
- def _get_text_pos(self, linelength, lineno):
- """
- Get the actual position for a character and line position.
- """
- return self._get_char_x(linelength), self._get_line_y(lineno)
-
- def _get_linenumber_pos(self, lineno):
- """
- Get the actual position for the start of a line number.
- """
- return (self.image_pad, self._get_line_y(lineno))
-
- def _get_text_color(self, style):
- """
- Get the correct color for the token from the style.
- """
- if style['color'] is not None:
- fill = '#' + style['color']
- else:
- fill = '#000'
- return fill
-
- def _get_text_bg_color(self, style):
- """
- Get the correct background color for the token from the style.
- """
- if style['bgcolor'] is not None:
- bg_color = '#' + style['bgcolor']
- else:
- bg_color = None
- return bg_color
-
- def _get_style_font(self, style):
- """
- Get the correct font for the style.
- """
- return self.fonts.get_font(style['bold'], style['italic'])
-
- def _get_image_size(self, maxlinelength, maxlineno):
- """
- Get the required image size.
- """
- return (self._get_char_x(maxlinelength) + self.image_pad,
- self._get_line_y(maxlineno + 0) + self.image_pad)
-
- def _draw_linenumber(self, posno, lineno):
- """
- Remember a line number drawable to paint later.
- """
- self._draw_text(
- self._get_linenumber_pos(posno),
- str(lineno).rjust(self.line_number_chars),
- font=self.fonts.get_font(self.line_number_bold,
- self.line_number_italic),
- text_fg=self.line_number_fg,
- text_bg=None,
- )
-
- def _draw_text(self, pos, text, font, text_fg, text_bg):
- """
- Remember a single drawable tuple to paint later.
- """
- self.drawables.append((pos, text, font, text_fg, text_bg))
-
- def _create_drawables(self, tokensource):
- """
- Create drawables for the token content.
- """
- lineno = charno = maxcharno = 0
- maxlinelength = linelength = 0
- for ttype, value in tokensource:
- while ttype not in self.styles:
- ttype = ttype.parent
- style = self.styles[ttype]
- # TODO: make sure tab expansion happens earlier in the chain. It
- # really ought to be done on the input, as to do it right here is
- # quite complex.
- value = value.expandtabs(4)
- lines = value.splitlines(True)
- # print lines
- for i, line in enumerate(lines):
- temp = line.rstrip('\n')
- if temp:
- self._draw_text(
- self._get_text_pos(linelength, lineno),
- temp,
- font = self._get_style_font(style),
- text_fg = self._get_text_color(style),
- text_bg = self._get_text_bg_color(style),
- )
- temp_width, _ = self.fonts.get_text_size(temp)
- linelength += temp_width
- maxlinelength = max(maxlinelength, linelength)
- charno += len(temp)
- maxcharno = max(maxcharno, charno)
- if line.endswith('\n'):
- # add a line for each extra line in the value
- linelength = 0
- charno = 0
- lineno += 1
- self.maxlinelength = maxlinelength
- self.maxcharno = maxcharno
- self.maxlineno = lineno
-
- def _draw_line_numbers(self):
- """
- Create drawables for the line numbers.
- """
- if not self.line_numbers:
- return
- for p in range(self.maxlineno):
- n = p + self.line_number_start
- if (n % self.line_number_step) == 0:
- self._draw_linenumber(p, n)
-
- def _paint_line_number_bg(self, im):
- """
- Paint the line number background on the image.
- """
- if not self.line_numbers:
- return
- if self.line_number_fg is None:
- return
- draw = ImageDraw.Draw(im)
- recth = im.size[-1]
- rectw = self.image_pad + self.line_number_width - self.line_number_pad
- draw.rectangle([(0, 0), (rectw, recth)],
- fill=self.line_number_bg)
- if self.line_number_separator:
- draw.line([(rectw, 0), (rectw, recth)], fill=self.line_number_fg)
- del draw
-
- def format(self, tokensource, outfile):
- """
- Format ``tokensource``, an iterable of ``(tokentype, tokenstring)``
- tuples and write it into ``outfile``.
-
- This implementation calculates where it should draw each token on the
- pixmap, then calculates the required pixmap size and draws the items.
- """
- self._create_drawables(tokensource)
- self._draw_line_numbers()
- im = Image.new(
- 'RGB',
- self._get_image_size(self.maxlinelength, self.maxlineno),
- self.background_color
- )
- self._paint_line_number_bg(im)
- draw = ImageDraw.Draw(im)
- # Highlight
- if self.hl_lines:
- x = self.image_pad + self.line_number_width - self.line_number_pad + 1
- recth = self._get_line_height()
- rectw = im.size[0] - x
- for linenumber in self.hl_lines:
- y = self._get_line_y(linenumber - 1)
- draw.rectangle([(x, y), (x + rectw, y + recth)],
- fill=self.hl_color)
- for pos, value, font, text_fg, text_bg in self.drawables:
- if text_bg:
- text_size = draw.textsize(text=value, font=font)
- draw.rectangle([pos[0], pos[1], pos[0] + text_size[0], pos[1] + text_size[1]], fill=text_bg)
- draw.text(pos, value, font=font, fill=text_fg)
- im.save(outfile, self.image_format.upper())
-
-
-# Add one formatter per format, so that the "-f gif" option gives the correct result
-# when used in pygmentize.
-
-class GifImageFormatter(ImageFormatter):
- """
- Create a GIF image from source code. This uses the Python Imaging Library to
- generate a pixmap from the source code.
-
- .. versionadded:: 1.0
- """
-
- name = 'img_gif'
- aliases = ['gif']
- filenames = ['*.gif']
- default_image_format = 'gif'
-
-
-class JpgImageFormatter(ImageFormatter):
- """
- Create a JPEG image from source code. This uses the Python Imaging Library to
- generate a pixmap from the source code.
-
- .. versionadded:: 1.0
- """
-
- name = 'img_jpg'
- aliases = ['jpg', 'jpeg']
- filenames = ['*.jpg']
- default_image_format = 'jpeg'
-
-
-class BmpImageFormatter(ImageFormatter):
- """
- Create a bitmap image from source code. This uses the Python Imaging Library to
- generate a pixmap from the source code.
-
- .. versionadded:: 1.0
- """
-
- name = 'img_bmp'
- aliases = ['bmp', 'bitmap']
- filenames = ['*.bmp']
- default_image_format = 'bmp'
diff --git a/venv/lib/python3.11/site-packages/pygments/formatters/irc.py b/venv/lib/python3.11/site-packages/pygments/formatters/irc.py
deleted file mode 100644
index 334aeef..0000000
--- a/venv/lib/python3.11/site-packages/pygments/formatters/irc.py
+++ /dev/null
@@ -1,154 +0,0 @@
-"""
- pygments.formatters.irc
- ~~~~~~~~~~~~~~~~~~~~~~~
-
- Formatter for IRC output
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.formatter import Formatter
-from pygments.token import Keyword, Name, Comment, String, Error, \
- Number, Operator, Generic, Token, Whitespace
-from pygments.util import get_choice_opt
-
-
-__all__ = ['IRCFormatter']
-
-
-#: Map token types to a tuple of color values for light and dark
-#: backgrounds.
-IRC_COLORS = {
- Token: ('', ''),
-
- Whitespace: ('gray', 'brightblack'),
- Comment: ('gray', 'brightblack'),
- Comment.Preproc: ('cyan', 'brightcyan'),
- Keyword: ('blue', 'brightblue'),
- Keyword.Type: ('cyan', 'brightcyan'),
- Operator.Word: ('magenta', 'brightcyan'),
- Name.Builtin: ('cyan', 'brightcyan'),
- Name.Function: ('green', 'brightgreen'),
- Name.Namespace: ('_cyan_', '_brightcyan_'),
- Name.Class: ('_green_', '_brightgreen_'),
- Name.Exception: ('cyan', 'brightcyan'),
- Name.Decorator: ('brightblack', 'gray'),
- Name.Variable: ('red', 'brightred'),
- Name.Constant: ('red', 'brightred'),
- Name.Attribute: ('cyan', 'brightcyan'),
- Name.Tag: ('brightblue', 'brightblue'),
- String: ('yellow', 'yellow'),
- Number: ('blue', 'brightblue'),
-
- Generic.Deleted: ('brightred', 'brightred'),
- Generic.Inserted: ('green', 'brightgreen'),
- Generic.Heading: ('**', '**'),
- Generic.Subheading: ('*magenta*', '*brightmagenta*'),
- Generic.Error: ('brightred', 'brightred'),
-
- Error: ('_brightred_', '_brightred_'),
-}
-
-
-IRC_COLOR_MAP = {
- 'white': 0,
- 'black': 1,
- 'blue': 2,
- 'brightgreen': 3,
- 'brightred': 4,
- 'yellow': 5,
- 'magenta': 6,
- 'orange': 7,
- 'green': 7, #compat w/ ansi
- 'brightyellow': 8,
- 'lightgreen': 9,
- 'brightcyan': 9, # compat w/ ansi
- 'cyan': 10,
- 'lightblue': 11,
- 'red': 11, # compat w/ ansi
- 'brightblue': 12,
- 'brightmagenta': 13,
- 'brightblack': 14,
- 'gray': 15,
-}
-
-def ircformat(color, text):
- if len(color) < 1:
- return text
- add = sub = ''
- if '_' in color: # italic
- add += '\x1D'
- sub = '\x1D' + sub
- color = color.strip('_')
- if '*' in color: # bold
- add += '\x02'
- sub = '\x02' + sub
- color = color.strip('*')
- # underline (\x1F) not supported
- # backgrounds (\x03FF,BB) not supported
- if len(color) > 0: # actual color - may have issues with ircformat("red", "blah")+"10" type stuff
- add += '\x03' + str(IRC_COLOR_MAP[color]).zfill(2)
- sub = '\x03' + sub
- return add + text + sub
- return '<'+add+'>'+text+'</'+sub+'>'
-
-
-class IRCFormatter(Formatter):
- r"""
- Format tokens with IRC color sequences
-
- The `get_style_defs()` method doesn't do anything special since there is
- no support for common styles.
-
- Options accepted:
-
- `bg`
- Set to ``"light"`` or ``"dark"`` depending on the terminal's background
- (default: ``"light"``).
-
- `colorscheme`
- A dictionary mapping token types to (lightbg, darkbg) color names or
- ``None`` (default: ``None`` = use builtin colorscheme).
-
- `linenos`
- Set to ``True`` to have line numbers in the output as well
- (default: ``False`` = no line numbers).
- """
- name = 'IRC'
- aliases = ['irc', 'IRC']
- filenames = []
-
- def __init__(self, **options):
- Formatter.__init__(self, **options)
- self.darkbg = get_choice_opt(options, 'bg',
- ['light', 'dark'], 'light') == 'dark'
- self.colorscheme = options.get('colorscheme', None) or IRC_COLORS
- self.linenos = options.get('linenos', False)
- self._lineno = 0
-
- def _write_lineno(self, outfile):
- if self.linenos:
- self._lineno += 1
- outfile.write("%04d: " % self._lineno)
-
- def format_unencoded(self, tokensource, outfile):
- self._write_lineno(outfile)
-
- for ttype, value in tokensource:
- color = self.colorscheme.get(ttype)
- while color is None:
- ttype = ttype[:-1]
- color = self.colorscheme.get(ttype)
- if color:
- color = color[self.darkbg]
- spl = value.split('\n')
- for line in spl[:-1]:
- if line:
- outfile.write(ircformat(color, line))
- outfile.write('\n')
- self._write_lineno(outfile)
- if spl[-1]:
- outfile.write(ircformat(color, spl[-1]))
- else:
- outfile.write(value)
diff --git a/venv/lib/python3.11/site-packages/pygments/formatters/latex.py b/venv/lib/python3.11/site-packages/pygments/formatters/latex.py
deleted file mode 100644
index b130bfa..0000000
--- a/venv/lib/python3.11/site-packages/pygments/formatters/latex.py
+++ /dev/null
@@ -1,521 +0,0 @@
-"""
- pygments.formatters.latex
- ~~~~~~~~~~~~~~~~~~~~~~~~~
-
- Formatter for LaTeX fancyvrb output.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from io import StringIO
-
-from pygments.formatter import Formatter
-from pygments.lexer import Lexer, do_insertions
-from pygments.token import Token, STANDARD_TYPES
-from pygments.util import get_bool_opt, get_int_opt
-
-
-__all__ = ['LatexFormatter']
-
-
-def escape_tex(text, commandprefix):
- return text.replace('\\', '\x00'). \
- replace('{', '\x01'). \
- replace('}', '\x02'). \
- replace('\x00', r'\%sZbs{}' % commandprefix). \
- replace('\x01', r'\%sZob{}' % commandprefix). \
- replace('\x02', r'\%sZcb{}' % commandprefix). \
- replace('^', r'\%sZca{}' % commandprefix). \
- replace('_', r'\%sZus{}' % commandprefix). \
- replace('&', r'\%sZam{}' % commandprefix). \
- replace('<', r'\%sZlt{}' % commandprefix). \
- replace('>', r'\%sZgt{}' % commandprefix). \
- replace('#', r'\%sZsh{}' % commandprefix). \
- replace('%', r'\%sZpc{}' % commandprefix). \
- replace('$', r'\%sZdl{}' % commandprefix). \
- replace('-', r'\%sZhy{}' % commandprefix). \
- replace("'", r'\%sZsq{}' % commandprefix). \
- replace('"', r'\%sZdq{}' % commandprefix). \
- replace('~', r'\%sZti{}' % commandprefix)
-
-
-DOC_TEMPLATE = r'''
-\documentclass{%(docclass)s}
-\usepackage{fancyvrb}
-\usepackage{color}
-\usepackage[%(encoding)s]{inputenc}
-%(preamble)s
-
-%(styledefs)s
-
-\begin{document}
-
-\section*{%(title)s}
-
-%(code)s
-\end{document}
-'''
-
-## Small explanation of the mess below :)
-#
-# The previous version of the LaTeX formatter just assigned a command to
-# each token type defined in the current style. That obviously is
-# problematic if the highlighted code is produced for a different style
-# than the style commands themselves.
-#
-# This version works much like the HTML formatter which assigns multiple
-# CSS classes to each <span> tag, from the most specific to the least
-# specific token type, thus falling back to the parent token type if one
-# is not defined. Here, the classes are there too and use the same short
-# forms given in token.STANDARD_TYPES.
-#
-# Highlighted code now only uses one custom command, which by default is
-# \PY and selectable by the commandprefix option (and in addition the
-# escapes \PYZat, \PYZlb and \PYZrb which haven't been renamed for
-# backwards compatibility purposes).
-#
-# \PY has two arguments: the classes, separated by +, and the text to
-# render in that style. The classes are resolved into the respective
-# style commands by magic, which serves to ignore unknown classes.
-#
-# The magic macros are:
-# * \PY@it, \PY@bf, etc. are unconditionally wrapped around the text
-# to render in \PY@do. Their definition determines the style.
-# * \PY@reset resets \PY@it etc. to do nothing.
-# * \PY@toks parses the list of classes, using magic inspired by the
-# keyval package (but modified to use plusses instead of commas
-# because fancyvrb redefines commas inside its environments).
-# * \PY@tok processes one class, calling the \PY@tok@classname command
-# if it exists.
-# * \PY@tok@classname sets the \PY@it etc. to reflect the chosen style
-# for its class.
-# * \PY resets the style, parses the classnames and then calls \PY@do.
-#
-# Tip: to read this code, print it out in substituted form using e.g.
-# >>> print STYLE_TEMPLATE % {'cp': 'PY'}
-
-STYLE_TEMPLATE = r'''
-\makeatletter
-\def\%(cp)s@reset{\let\%(cp)s@it=\relax \let\%(cp)s@bf=\relax%%
- \let\%(cp)s@ul=\relax \let\%(cp)s@tc=\relax%%
- \let\%(cp)s@bc=\relax \let\%(cp)s@ff=\relax}
-\def\%(cp)s@tok#1{\csname %(cp)s@tok@#1\endcsname}
-\def\%(cp)s@toks#1+{\ifx\relax#1\empty\else%%
- \%(cp)s@tok{#1}\expandafter\%(cp)s@toks\fi}
-\def\%(cp)s@do#1{\%(cp)s@bc{\%(cp)s@tc{\%(cp)s@ul{%%
- \%(cp)s@it{\%(cp)s@bf{\%(cp)s@ff{#1}}}}}}}
-\def\%(cp)s#1#2{\%(cp)s@reset\%(cp)s@toks#1+\relax+\%(cp)s@do{#2}}
-
-%(styles)s
-
-\def\%(cp)sZbs{\char`\\}
-\def\%(cp)sZus{\char`\_}
-\def\%(cp)sZob{\char`\{}
-\def\%(cp)sZcb{\char`\}}
-\def\%(cp)sZca{\char`\^}
-\def\%(cp)sZam{\char`\&}
-\def\%(cp)sZlt{\char`\<}
-\def\%(cp)sZgt{\char`\>}
-\def\%(cp)sZsh{\char`\#}
-\def\%(cp)sZpc{\char`\%%}
-\def\%(cp)sZdl{\char`\$}
-\def\%(cp)sZhy{\char`\-}
-\def\%(cp)sZsq{\char`\'}
-\def\%(cp)sZdq{\char`\"}
-\def\%(cp)sZti{\char`\~}
-%% for compatibility with earlier versions
-\def\%(cp)sZat{@}
-\def\%(cp)sZlb{[}
-\def\%(cp)sZrb{]}
-\makeatother
-'''
-
-
-def _get_ttype_name(ttype):
- fname = STANDARD_TYPES.get(ttype)
- if fname:
- return fname
- aname = ''
- while fname is None:
- aname = ttype[-1] + aname
- ttype = ttype.parent
- fname = STANDARD_TYPES.get(ttype)
- return fname + aname
-
-
-class LatexFormatter(Formatter):
- r"""
- Format tokens as LaTeX code. This needs the `fancyvrb` and `color`
- standard packages.
-
- Without the `full` option, code is formatted as one ``Verbatim``
- environment, like this:
-
- .. sourcecode:: latex
-
- \begin{Verbatim}[commandchars=\\\{\}]
- \PY{k}{def }\PY{n+nf}{foo}(\PY{n}{bar}):
- \PY{k}{pass}
- \end{Verbatim}
-
- Wrapping can be disabled using the `nowrap` option.
-
- The special command used here (``\PY``) and all the other macros it needs
- are output by the `get_style_defs` method.
-
- With the `full` option, a complete LaTeX document is output, including
- the command definitions in the preamble.
-
- The `get_style_defs()` method of a `LatexFormatter` returns a string
- containing ``\def`` commands defining the macros needed inside the
- ``Verbatim`` environments.
-
- Additional options accepted:
-
- `nowrap`
- If set to ``True``, don't wrap the tokens at all, not even inside a
- ``\begin{Verbatim}`` environment. This disables most other options
- (default: ``False``).
-
- `style`
- The style to use, can be a string or a Style subclass (default:
- ``'default'``).
-
- `full`
- Tells the formatter to output a "full" document, i.e. a complete
- self-contained document (default: ``False``).
-
- `title`
- If `full` is true, the title that should be used to caption the
- document (default: ``''``).
-
- `docclass`
- If the `full` option is enabled, this is the document class to use
- (default: ``'article'``).
-
- `preamble`
- If the `full` option is enabled, this can be further preamble commands,
- e.g. ``\usepackage`` (default: ``''``).
-
- `linenos`
- If set to ``True``, output line numbers (default: ``False``).
-
- `linenostart`
- The line number for the first line (default: ``1``).
-
- `linenostep`
- If set to a number n > 1, only every nth line number is printed.
-
- `verboptions`
- Additional options given to the Verbatim environment (see the *fancyvrb*
- docs for possible values) (default: ``''``).
-
- `commandprefix`
- The LaTeX commands used to produce colored output are constructed
- using this prefix and some letters (default: ``'PY'``).
-
- .. versionadded:: 0.7
- .. versionchanged:: 0.10
- The default is now ``'PY'`` instead of ``'C'``.
-
- `texcomments`
- If set to ``True``, enables LaTeX comment lines. That is, LaTex markup
- in comment tokens is not escaped so that LaTeX can render it (default:
- ``False``).
-
- .. versionadded:: 1.2
-
- `mathescape`
- If set to ``True``, enables LaTeX math mode escape in comments. That
- is, ``'$...$'`` inside a comment will trigger math mode (default:
- ``False``).
-
- .. versionadded:: 1.2
-
- `escapeinside`
- If set to a string of length 2, enables escaping to LaTeX. Text
- delimited by these 2 characters is read as LaTeX code and
- typeset accordingly. It has no effect in string literals. It has
- no effect in comments if `texcomments` or `mathescape` is
- set. (default: ``''``).
-
- .. versionadded:: 2.0
-
- `envname`
- Allows you to pick an alternative environment name replacing Verbatim.
- The alternate environment still has to support Verbatim's option syntax.
- (default: ``'Verbatim'``).
-
- .. versionadded:: 2.0
- """
- name = 'LaTeX'
- aliases = ['latex', 'tex']
- filenames = ['*.tex']
-
- def __init__(self, **options):
- Formatter.__init__(self, **options)
- self.nowrap = get_bool_opt(options, 'nowrap', False)
- self.docclass = options.get('docclass', 'article')
- self.preamble = options.get('preamble', '')
- self.linenos = get_bool_opt(options, 'linenos', False)
- self.linenostart = abs(get_int_opt(options, 'linenostart', 1))
- self.linenostep = abs(get_int_opt(options, 'linenostep', 1))
- self.verboptions = options.get('verboptions', '')
- self.nobackground = get_bool_opt(options, 'nobackground', False)
- self.commandprefix = options.get('commandprefix', 'PY')
- self.texcomments = get_bool_opt(options, 'texcomments', False)
- self.mathescape = get_bool_opt(options, 'mathescape', False)
- self.escapeinside = options.get('escapeinside', '')
- if len(self.escapeinside) == 2:
- self.left = self.escapeinside[0]
- self.right = self.escapeinside[1]
- else:
- self.escapeinside = ''
- self.envname = options.get('envname', 'Verbatim')
-
- self._create_stylesheet()
-
- def _create_stylesheet(self):
- t2n = self.ttype2name = {Token: ''}
- c2d = self.cmd2def = {}
- cp = self.commandprefix
-
- def rgbcolor(col):
- if col:
- return ','.join(['%.2f' % (int(col[i] + col[i + 1], 16) / 255.0)
- for i in (0, 2, 4)])
- else:
- return '1,1,1'
-
- for ttype, ndef in self.style:
- name = _get_ttype_name(ttype)
- cmndef = ''
- if ndef['bold']:
- cmndef += r'\let\$$@bf=\textbf'
- if ndef['italic']:
- cmndef += r'\let\$$@it=\textit'
- if ndef['underline']:
- cmndef += r'\let\$$@ul=\underline'
- if ndef['roman']:
- cmndef += r'\let\$$@ff=\textrm'
- if ndef['sans']:
- cmndef += r'\let\$$@ff=\textsf'
- if ndef['mono']:
- cmndef += r'\let\$$@ff=\textsf'
- if ndef['color']:
- cmndef += (r'\def\$$@tc##1{\textcolor[rgb]{%s}{##1}}' %
- rgbcolor(ndef['color']))
- if ndef['border']:
- cmndef += (r'\def\$$@bc##1{{\setlength{\fboxsep}{\string -\fboxrule}'
- r'\fcolorbox[rgb]{%s}{%s}{\strut ##1}}}' %
- (rgbcolor(ndef['border']),
- rgbcolor(ndef['bgcolor'])))
- elif ndef['bgcolor']:
- cmndef += (r'\def\$$@bc##1{{\setlength{\fboxsep}{0pt}'
- r'\colorbox[rgb]{%s}{\strut ##1}}}' %
- rgbcolor(ndef['bgcolor']))
- if cmndef == '':
- continue
- cmndef = cmndef.replace('$$', cp)
- t2n[ttype] = name
- c2d[name] = cmndef
-
- def get_style_defs(self, arg=''):
- """
- Return the command sequences needed to define the commands
- used to format text in the verbatim environment. ``arg`` is ignored.
- """
- cp = self.commandprefix
- styles = []
- for name, definition in self.cmd2def.items():
- styles.append(r'\@namedef{%s@tok@%s}{%s}' % (cp, name, definition))
- return STYLE_TEMPLATE % {'cp': self.commandprefix,
- 'styles': '\n'.join(styles)}
-
- def format_unencoded(self, tokensource, outfile):
- # TODO: add support for background colors
- t2n = self.ttype2name
- cp = self.commandprefix
-
- if self.full:
- realoutfile = outfile
- outfile = StringIO()
-
- if not self.nowrap:
- outfile.write('\\begin{' + self.envname + '}[commandchars=\\\\\\{\\}')
- if self.linenos:
- start, step = self.linenostart, self.linenostep
- outfile.write(',numbers=left' +
- (start and ',firstnumber=%d' % start or '') +
- (step and ',stepnumber=%d' % step or ''))
- if self.mathescape or self.texcomments or self.escapeinside:
- outfile.write(',codes={\\catcode`\\$=3\\catcode`\\^=7'
- '\\catcode`\\_=8\\relax}')
- if self.verboptions:
- outfile.write(',' + self.verboptions)
- outfile.write(']\n')
-
- for ttype, value in tokensource:
- if ttype in Token.Comment:
- if self.texcomments:
- # Try to guess comment starting lexeme and escape it ...
- start = value[0:1]
- for i in range(1, len(value)):
- if start[0] != value[i]:
- break
- start += value[i]
-
- value = value[len(start):]
- start = escape_tex(start, cp)
-
- # ... but do not escape inside comment.
- value = start + value
- elif self.mathescape:
- # Only escape parts not inside a math environment.
- parts = value.split('$')
- in_math = False
- for i, part in enumerate(parts):
- if not in_math:
- parts[i] = escape_tex(part, cp)
- in_math = not in_math
- value = '$'.join(parts)
- elif self.escapeinside:
- text = value
- value = ''
- while text:
- a, sep1, text = text.partition(self.left)
- if sep1:
- b, sep2, text = text.partition(self.right)
- if sep2:
- value += escape_tex(a, cp) + b
- else:
- value += escape_tex(a + sep1 + b, cp)
- else:
- value += escape_tex(a, cp)
- else:
- value = escape_tex(value, cp)
- elif ttype not in Token.Escape:
- value = escape_tex(value, cp)
- styles = []
- while ttype is not Token:
- try:
- styles.append(t2n[ttype])
- except KeyError:
- # not in current style
- styles.append(_get_ttype_name(ttype))
- ttype = ttype.parent
- styleval = '+'.join(reversed(styles))
- if styleval:
- spl = value.split('\n')
- for line in spl[:-1]:
- if line:
- outfile.write("\\%s{%s}{%s}" % (cp, styleval, line))
- outfile.write('\n')
- if spl[-1]:
- outfile.write("\\%s{%s}{%s}" % (cp, styleval, spl[-1]))
- else:
- outfile.write(value)
-
- if not self.nowrap:
- outfile.write('\\end{' + self.envname + '}\n')
-
- if self.full:
- encoding = self.encoding or 'utf8'
- # map known existings encodings from LaTeX distribution
- encoding = {
- 'utf_8': 'utf8',
- 'latin_1': 'latin1',
- 'iso_8859_1': 'latin1',
- }.get(encoding.replace('-', '_'), encoding)
- realoutfile.write(DOC_TEMPLATE %
- dict(docclass = self.docclass,
- preamble = self.preamble,
- title = self.title,
- encoding = encoding,
- styledefs = self.get_style_defs(),
- code = outfile.getvalue()))
-
-
-class LatexEmbeddedLexer(Lexer):
- """
- This lexer takes one lexer as argument, the lexer for the language
- being formatted, and the left and right delimiters for escaped text.
-
- First everything is scanned using the language lexer to obtain
- strings and comments. All other consecutive tokens are merged and
- the resulting text is scanned for escaped segments, which are given
- the Token.Escape type. Finally text that is not escaped is scanned
- again with the language lexer.
- """
- def __init__(self, left, right, lang, **options):
- self.left = left
- self.right = right
- self.lang = lang
- Lexer.__init__(self, **options)
-
- def get_tokens_unprocessed(self, text):
- # find and remove all the escape tokens (replace with an empty string)
- # this is very similar to DelegatingLexer.get_tokens_unprocessed.
- buffered = ''
- insertions = []
- insertion_buf = []
- for i, t, v in self._find_safe_escape_tokens(text):
- if t is None:
- if insertion_buf:
- insertions.append((len(buffered), insertion_buf))
- insertion_buf = []
- buffered += v
- else:
- insertion_buf.append((i, t, v))
- if insertion_buf:
- insertions.append((len(buffered), insertion_buf))
- return do_insertions(insertions,
- self.lang.get_tokens_unprocessed(buffered))
-
- def _find_safe_escape_tokens(self, text):
- """ find escape tokens that are not in strings or comments """
- for i, t, v in self._filter_to(
- self.lang.get_tokens_unprocessed(text),
- lambda t: t in Token.Comment or t in Token.String
- ):
- if t is None:
- for i2, t2, v2 in self._find_escape_tokens(v):
- yield i + i2, t2, v2
- else:
- yield i, None, v
-
- def _filter_to(self, it, pred):
- """ Keep only the tokens that match `pred`, merge the others together """
- buf = ''
- idx = 0
- for i, t, v in it:
- if pred(t):
- if buf:
- yield idx, None, buf
- buf = ''
- yield i, t, v
- else:
- if not buf:
- idx = i
- buf += v
- if buf:
- yield idx, None, buf
-
- def _find_escape_tokens(self, text):
- """ Find escape tokens within text, give token=None otherwise """
- index = 0
- while text:
- a, sep1, text = text.partition(self.left)
- if a:
- yield index, None, a
- index += len(a)
- if sep1:
- b, sep2, text = text.partition(self.right)
- if sep2:
- yield index + len(sep1), Token.Escape, b
- index += len(sep1) + len(b) + len(sep2)
- else:
- yield index, Token.Error, sep1
- index += len(sep1)
- text = b
diff --git a/venv/lib/python3.11/site-packages/pygments/formatters/other.py b/venv/lib/python3.11/site-packages/pygments/formatters/other.py
deleted file mode 100644
index 8004764..0000000
--- a/venv/lib/python3.11/site-packages/pygments/formatters/other.py
+++ /dev/null
@@ -1,161 +0,0 @@
-"""
- pygments.formatters.other
- ~~~~~~~~~~~~~~~~~~~~~~~~~
-
- Other formatters: NullFormatter, RawTokenFormatter.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.formatter import Formatter
-from pygments.util import get_choice_opt
-from pygments.token import Token
-from pygments.console import colorize
-
-__all__ = ['NullFormatter', 'RawTokenFormatter', 'TestcaseFormatter']
-
-
-class NullFormatter(Formatter):
- """
- Output the text unchanged without any formatting.
- """
- name = 'Text only'
- aliases = ['text', 'null']
- filenames = ['*.txt']
-
- def format(self, tokensource, outfile):
- enc = self.encoding
- for ttype, value in tokensource:
- if enc:
- outfile.write(value.encode(enc))
- else:
- outfile.write(value)
-
-
-class RawTokenFormatter(Formatter):
- r"""
- Format tokens as a raw representation for storing token streams.
-
- The format is ``tokentype<TAB>repr(tokenstring)\n``. The output can later
- be converted to a token stream with the `RawTokenLexer`, described in the
- :doc:`lexer list <lexers>`.
-
- Only two options are accepted:
-
- `compress`
- If set to ``'gz'`` or ``'bz2'``, compress the output with the given
- compression algorithm after encoding (default: ``''``).
- `error_color`
- If set to a color name, highlight error tokens using that color. If
- set but with no value, defaults to ``'red'``.
-
- .. versionadded:: 0.11
-
- """
- name = 'Raw tokens'
- aliases = ['raw', 'tokens']
- filenames = ['*.raw']
-
- unicodeoutput = False
-
- def __init__(self, **options):
- Formatter.__init__(self, **options)
- # We ignore self.encoding if it is set, since it gets set for lexer
- # and formatter if given with -Oencoding on the command line.
- # The RawTokenFormatter outputs only ASCII. Override here.
- self.encoding = 'ascii' # let pygments.format() do the right thing
- self.compress = get_choice_opt(options, 'compress',
- ['', 'none', 'gz', 'bz2'], '')
- self.error_color = options.get('error_color', None)
- if self.error_color is True:
- self.error_color = 'red'
- if self.error_color is not None:
- try:
- colorize(self.error_color, '')
- except KeyError:
- raise ValueError("Invalid color %r specified" %
- self.error_color)
-
- def format(self, tokensource, outfile):
- try:
- outfile.write(b'')
- except TypeError:
- raise TypeError('The raw tokens formatter needs a binary '
- 'output file')
- if self.compress == 'gz':
- import gzip
- outfile = gzip.GzipFile('', 'wb', 9, outfile)
-
- write = outfile.write
- flush = outfile.close
- elif self.compress == 'bz2':
- import bz2
- compressor = bz2.BZ2Compressor(9)
-
- def write(text):
- outfile.write(compressor.compress(text))
-
- def flush():
- outfile.write(compressor.flush())
- outfile.flush()
- else:
- write = outfile.write
- flush = outfile.flush
-
- if self.error_color:
- for ttype, value in tokensource:
- line = b"%r\t%r\n" % (ttype, value)
- if ttype is Token.Error:
- write(colorize(self.error_color, line))
- else:
- write(line)
- else:
- for ttype, value in tokensource:
- write(b"%r\t%r\n" % (ttype, value))
- flush()
-
-
-TESTCASE_BEFORE = '''\
- def testNeedsName(lexer):
- fragment = %r
- tokens = [
-'''
-TESTCASE_AFTER = '''\
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-'''
-
-
-class TestcaseFormatter(Formatter):
- """
- Format tokens as appropriate for a new testcase.
-
- .. versionadded:: 2.0
- """
- name = 'Testcase'
- aliases = ['testcase']
-
- def __init__(self, **options):
- Formatter.__init__(self, **options)
- if self.encoding is not None and self.encoding != 'utf-8':
- raise ValueError("Only None and utf-8 are allowed encodings.")
-
- def format(self, tokensource, outfile):
- indentation = ' ' * 12
- rawbuf = []
- outbuf = []
- for ttype, value in tokensource:
- rawbuf.append(value)
- outbuf.append('%s(%s, %r),\n' % (indentation, ttype, value))
-
- before = TESTCASE_BEFORE % (''.join(rawbuf),)
- during = ''.join(outbuf)
- after = TESTCASE_AFTER
- if self.encoding is None:
- outfile.write(before + during + after)
- else:
- outfile.write(before.encode('utf-8'))
- outfile.write(during.encode('utf-8'))
- outfile.write(after.encode('utf-8'))
- outfile.flush()
diff --git a/venv/lib/python3.11/site-packages/pygments/formatters/pangomarkup.py b/venv/lib/python3.11/site-packages/pygments/formatters/pangomarkup.py
deleted file mode 100644
index 50872fe..0000000
--- a/venv/lib/python3.11/site-packages/pygments/formatters/pangomarkup.py
+++ /dev/null
@@ -1,83 +0,0 @@
-"""
- pygments.formatters.pangomarkup
- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
- Formatter for Pango markup output.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.formatter import Formatter
-
-
-__all__ = ['PangoMarkupFormatter']
-
-
-_escape_table = {
- ord('&'): '&amp;',
- ord('<'): '&lt;',
-}
-
-
-def escape_special_chars(text, table=_escape_table):
- """Escape & and < for Pango Markup."""
- return text.translate(table)
-
-
-class PangoMarkupFormatter(Formatter):
- """
- Format tokens as Pango Markup code. It can then be rendered to an SVG.
-
- .. versionadded:: 2.9
- """
-
- name = 'Pango Markup'
- aliases = ['pango', 'pangomarkup']
- filenames = []
-
- def __init__(self, **options):
- Formatter.__init__(self, **options)
-
- self.styles = {}
-
- for token, style in self.style:
- start = ''
- end = ''
- if style['color']:
- start += '<span fgcolor="#%s">' % style['color']
- end = '</span>' + end
- if style['bold']:
- start += '<b>'
- end = '</b>' + end
- if style['italic']:
- start += '<i>'
- end = '</i>' + end
- if style['underline']:
- start += '<u>'
- end = '</u>' + end
- self.styles[token] = (start, end)
-
- def format_unencoded(self, tokensource, outfile):
- lastval = ''
- lasttype = None
-
- outfile.write('<tt>')
-
- for ttype, value in tokensource:
- while ttype not in self.styles:
- ttype = ttype.parent
- if ttype == lasttype:
- lastval += escape_special_chars(value)
- else:
- if lastval:
- stylebegin, styleend = self.styles[lasttype]
- outfile.write(stylebegin + lastval + styleend)
- lastval = escape_special_chars(value)
- lasttype = ttype
-
- if lastval:
- stylebegin, styleend = self.styles[lasttype]
- outfile.write(stylebegin + lastval + styleend)
-
- outfile.write('</tt>')
diff --git a/venv/lib/python3.11/site-packages/pygments/formatters/rtf.py b/venv/lib/python3.11/site-packages/pygments/formatters/rtf.py
deleted file mode 100644
index d3a83fa..0000000
--- a/venv/lib/python3.11/site-packages/pygments/formatters/rtf.py
+++ /dev/null
@@ -1,146 +0,0 @@
-"""
- pygments.formatters.rtf
- ~~~~~~~~~~~~~~~~~~~~~~~
-
- A formatter that generates RTF files.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.formatter import Formatter
-from pygments.util import get_int_opt, surrogatepair
-
-
-__all__ = ['RtfFormatter']
-
-
-class RtfFormatter(Formatter):
- """
- Format tokens as RTF markup. This formatter automatically outputs full RTF
- documents with color information and other useful stuff. Perfect for Copy and
- Paste into Microsoft(R) Word(R) documents.
-
- Please note that ``encoding`` and ``outencoding`` options are ignored.
- The RTF format is ASCII natively, but handles unicode characters correctly
- thanks to escape sequences.
-
- .. versionadded:: 0.6
-
- Additional options accepted:
-
- `style`
- The style to use, can be a string or a Style subclass (default:
- ``'default'``).
-
- `fontface`
- The used font family, for example ``Bitstream Vera Sans``. Defaults to
- some generic font which is supposed to have fixed width.
-
- `fontsize`
- Size of the font used. Size is specified in half points. The
- default is 24 half-points, giving a size 12 font.
-
- .. versionadded:: 2.0
- """
- name = 'RTF'
- aliases = ['rtf']
- filenames = ['*.rtf']
-
- def __init__(self, **options):
- r"""
- Additional options accepted:
-
- ``fontface``
- Name of the font used. Could for example be ``'Courier New'``
- to further specify the default which is ``'\fmodern'``. The RTF
- specification claims that ``\fmodern`` are "Fixed-pitch serif
- and sans serif fonts". Hope every RTF implementation thinks
- the same about modern...
-
- """
- Formatter.__init__(self, **options)
- self.fontface = options.get('fontface') or ''
- self.fontsize = get_int_opt(options, 'fontsize', 0)
-
- def _escape(self, text):
- return text.replace('\\', '\\\\') \
- .replace('{', '\\{') \
- .replace('}', '\\}')
-
- def _escape_text(self, text):
- # empty strings, should give a small performance improvement
- if not text:
- return ''
-
- # escape text
- text = self._escape(text)
-
- buf = []
- for c in text:
- cn = ord(c)
- if cn < (2**7):
- # ASCII character
- buf.append(str(c))
- elif (2**7) <= cn < (2**16):
- # single unicode escape sequence
- buf.append('{\\u%d}' % cn)
- elif (2**16) <= cn:
- # RTF limits unicode to 16 bits.
- # Force surrogate pairs
- buf.append('{\\u%d}{\\u%d}' % surrogatepair(cn))
-
- return ''.join(buf).replace('\n', '\\par\n')
-
- def format_unencoded(self, tokensource, outfile):
- # rtf 1.8 header
- outfile.write('{\\rtf1\\ansi\\uc0\\deff0'
- '{\\fonttbl{\\f0\\fmodern\\fprq1\\fcharset0%s;}}'
- '{\\colortbl;' % (self.fontface and
- ' ' + self._escape(self.fontface) or
- ''))
-
- # convert colors and save them in a mapping to access them later.
- color_mapping = {}
- offset = 1
- for _, style in self.style:
- for color in style['color'], style['bgcolor'], style['border']:
- if color and color not in color_mapping:
- color_mapping[color] = offset
- outfile.write('\\red%d\\green%d\\blue%d;' % (
- int(color[0:2], 16),
- int(color[2:4], 16),
- int(color[4:6], 16)
- ))
- offset += 1
- outfile.write('}\\f0 ')
- if self.fontsize:
- outfile.write('\\fs%d' % self.fontsize)
-
- # highlight stream
- for ttype, value in tokensource:
- while not self.style.styles_token(ttype) and ttype.parent:
- ttype = ttype.parent
- style = self.style.style_for_token(ttype)
- buf = []
- if style['bgcolor']:
- buf.append('\\cb%d' % color_mapping[style['bgcolor']])
- if style['color']:
- buf.append('\\cf%d' % color_mapping[style['color']])
- if style['bold']:
- buf.append('\\b')
- if style['italic']:
- buf.append('\\i')
- if style['underline']:
- buf.append('\\ul')
- if style['border']:
- buf.append('\\chbrdr\\chcfpat%d' %
- color_mapping[style['border']])
- start = ''.join(buf)
- if start:
- outfile.write('{%s ' % start)
- outfile.write(self._escape_text(value))
- if start:
- outfile.write('}')
-
- outfile.write('}')
diff --git a/venv/lib/python3.11/site-packages/pygments/formatters/svg.py b/venv/lib/python3.11/site-packages/pygments/formatters/svg.py
deleted file mode 100644
index e3cd269..0000000
--- a/venv/lib/python3.11/site-packages/pygments/formatters/svg.py
+++ /dev/null
@@ -1,188 +0,0 @@
-"""
- pygments.formatters.svg
- ~~~~~~~~~~~~~~~~~~~~~~~
-
- Formatter for SVG output.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.formatter import Formatter
-from pygments.token import Comment
-from pygments.util import get_bool_opt, get_int_opt
-
-__all__ = ['SvgFormatter']
-
-
-def escape_html(text):
- """Escape &, <, > as well as single and double quotes for HTML."""
- return text.replace('&', '&amp;'). \
- replace('<', '&lt;'). \
- replace('>', '&gt;'). \
- replace('"', '&quot;'). \
- replace("'", '&#39;')
-
-
-class2style = {}
-
-class SvgFormatter(Formatter):
- """
- Format tokens as an SVG graphics file. This formatter is still experimental.
- Each line of code is a ``<text>`` element with explicit ``x`` and ``y``
- coordinates containing ``<tspan>`` elements with the individual token styles.
-
- By default, this formatter outputs a full SVG document including doctype
- declaration and the ``<svg>`` root element.
-
- .. versionadded:: 0.9
-
- Additional options accepted:
-
- `nowrap`
- Don't wrap the SVG ``<text>`` elements in ``<svg><g>`` elements and
- don't add a XML declaration and a doctype. If true, the `fontfamily`
- and `fontsize` options are ignored. Defaults to ``False``.
-
- `fontfamily`
- The value to give the wrapping ``<g>`` element's ``font-family``
- attribute, defaults to ``"monospace"``.
-
- `fontsize`
- The value to give the wrapping ``<g>`` element's ``font-size``
- attribute, defaults to ``"14px"``.
-
- `linenos`
- If ``True``, add line numbers (default: ``False``).
-
- `linenostart`
- The line number for the first line (default: ``1``).
-
- `linenostep`
- If set to a number n > 1, only every nth line number is printed.
-
- `linenowidth`
- Maximum width devoted to line numbers (default: ``3*ystep``, sufficient
- for up to 4-digit line numbers. Increase width for longer code blocks).
-
- `xoffset`
- Starting offset in X direction, defaults to ``0``.
-
- `yoffset`
- Starting offset in Y direction, defaults to the font size if it is given
- in pixels, or ``20`` else. (This is necessary since text coordinates
- refer to the text baseline, not the top edge.)
-
- `ystep`
- Offset to add to the Y coordinate for each subsequent line. This should
- roughly be the text size plus 5. It defaults to that value if the text
- size is given in pixels, or ``25`` else.
-
- `spacehack`
- Convert spaces in the source to ``&#160;``, which are non-breaking
- spaces. SVG provides the ``xml:space`` attribute to control how
- whitespace inside tags is handled, in theory, the ``preserve`` value
- could be used to keep all whitespace as-is. However, many current SVG
- viewers don't obey that rule, so this option is provided as a workaround
- and defaults to ``True``.
- """
- name = 'SVG'
- aliases = ['svg']
- filenames = ['*.svg']
-
- def __init__(self, **options):
- Formatter.__init__(self, **options)
- self.nowrap = get_bool_opt(options, 'nowrap', False)
- self.fontfamily = options.get('fontfamily', 'monospace')
- self.fontsize = options.get('fontsize', '14px')
- self.xoffset = get_int_opt(options, 'xoffset', 0)
- fs = self.fontsize.strip()
- if fs.endswith('px'): fs = fs[:-2].strip()
- try:
- int_fs = int(fs)
- except:
- int_fs = 20
- self.yoffset = get_int_opt(options, 'yoffset', int_fs)
- self.ystep = get_int_opt(options, 'ystep', int_fs + 5)
- self.spacehack = get_bool_opt(options, 'spacehack', True)
- self.linenos = get_bool_opt(options,'linenos',False)
- self.linenostart = get_int_opt(options,'linenostart',1)
- self.linenostep = get_int_opt(options,'linenostep',1)
- self.linenowidth = get_int_opt(options,'linenowidth', 3*self.ystep)
- self._stylecache = {}
-
- def format_unencoded(self, tokensource, outfile):
- """
- Format ``tokensource``, an iterable of ``(tokentype, tokenstring)``
- tuples and write it into ``outfile``.
-
- For our implementation we put all lines in their own 'line group'.
- """
- x = self.xoffset
- y = self.yoffset
- if not self.nowrap:
- if self.encoding:
- outfile.write('<?xml version="1.0" encoding="%s"?>\n' %
- self.encoding)
- else:
- outfile.write('<?xml version="1.0"?>\n')
- outfile.write('<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.0//EN" '
- '"http://www.w3.org/TR/2001/REC-SVG-20010904/DTD/'
- 'svg10.dtd">\n')
- outfile.write('<svg xmlns="http://www.w3.org/2000/svg">\n')
- outfile.write('<g font-family="%s" font-size="%s">\n' %
- (self.fontfamily, self.fontsize))
-
- counter = self.linenostart
- counter_step = self.linenostep
- counter_style = self._get_style(Comment)
- line_x = x
-
- if self.linenos:
- if counter % counter_step == 0:
- outfile.write('<text x="%s" y="%s" %s text-anchor="end">%s</text>' %
- (x+self.linenowidth,y,counter_style,counter))
- line_x += self.linenowidth + self.ystep
- counter += 1
-
- outfile.write('<text x="%s" y="%s" xml:space="preserve">' % (line_x, y))
- for ttype, value in tokensource:
- style = self._get_style(ttype)
- tspan = style and '<tspan' + style + '>' or ''
- tspanend = tspan and '</tspan>' or ''
- value = escape_html(value)
- if self.spacehack:
- value = value.expandtabs().replace(' ', '&#160;')
- parts = value.split('\n')
- for part in parts[:-1]:
- outfile.write(tspan + part + tspanend)
- y += self.ystep
- outfile.write('</text>\n')
- if self.linenos and counter % counter_step == 0:
- outfile.write('<text x="%s" y="%s" text-anchor="end" %s>%s</text>' %
- (x+self.linenowidth,y,counter_style,counter))
-
- counter += 1
- outfile.write('<text x="%s" y="%s" ' 'xml:space="preserve">' % (line_x,y))
- outfile.write(tspan + parts[-1] + tspanend)
- outfile.write('</text>')
-
- if not self.nowrap:
- outfile.write('</g></svg>\n')
-
- def _get_style(self, tokentype):
- if tokentype in self._stylecache:
- return self._stylecache[tokentype]
- otokentype = tokentype
- while not self.style.styles_token(tokentype):
- tokentype = tokentype.parent
- value = self.style.style_for_token(tokentype)
- result = ''
- if value['color']:
- result = ' fill="#' + value['color'] + '"'
- if value['bold']:
- result += ' font-weight="bold"'
- if value['italic']:
- result += ' font-style="italic"'
- self._stylecache[otokentype] = result
- return result
diff --git a/venv/lib/python3.11/site-packages/pygments/formatters/terminal.py b/venv/lib/python3.11/site-packages/pygments/formatters/terminal.py
deleted file mode 100644
index 636f350..0000000
--- a/venv/lib/python3.11/site-packages/pygments/formatters/terminal.py
+++ /dev/null
@@ -1,127 +0,0 @@
-"""
- pygments.formatters.terminal
- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
- Formatter for terminal output with ANSI sequences.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.formatter import Formatter
-from pygments.token import Keyword, Name, Comment, String, Error, \
- Number, Operator, Generic, Token, Whitespace
-from pygments.console import ansiformat
-from pygments.util import get_choice_opt
-
-
-__all__ = ['TerminalFormatter']
-
-
-#: Map token types to a tuple of color values for light and dark
-#: backgrounds.
-TERMINAL_COLORS = {
- Token: ('', ''),
-
- Whitespace: ('gray', 'brightblack'),
- Comment: ('gray', 'brightblack'),
- Comment.Preproc: ('cyan', 'brightcyan'),
- Keyword: ('blue', 'brightblue'),
- Keyword.Type: ('cyan', 'brightcyan'),
- Operator.Word: ('magenta', 'brightmagenta'),
- Name.Builtin: ('cyan', 'brightcyan'),
- Name.Function: ('green', 'brightgreen'),
- Name.Namespace: ('_cyan_', '_brightcyan_'),
- Name.Class: ('_green_', '_brightgreen_'),
- Name.Exception: ('cyan', 'brightcyan'),
- Name.Decorator: ('brightblack', 'gray'),
- Name.Variable: ('red', 'brightred'),
- Name.Constant: ('red', 'brightred'),
- Name.Attribute: ('cyan', 'brightcyan'),
- Name.Tag: ('brightblue', 'brightblue'),
- String: ('yellow', 'yellow'),
- Number: ('blue', 'brightblue'),
-
- Generic.Deleted: ('brightred', 'brightred'),
- Generic.Inserted: ('green', 'brightgreen'),
- Generic.Heading: ('**', '**'),
- Generic.Subheading: ('*magenta*', '*brightmagenta*'),
- Generic.Prompt: ('**', '**'),
- Generic.Error: ('brightred', 'brightred'),
-
- Error: ('_brightred_', '_brightred_'),
-}
-
-
-class TerminalFormatter(Formatter):
- r"""
- Format tokens with ANSI color sequences, for output in a text console.
- Color sequences are terminated at newlines, so that paging the output
- works correctly.
-
- The `get_style_defs()` method doesn't do anything special since there is
- no support for common styles.
-
- Options accepted:
-
- `bg`
- Set to ``"light"`` or ``"dark"`` depending on the terminal's background
- (default: ``"light"``).
-
- `colorscheme`
- A dictionary mapping token types to (lightbg, darkbg) color names or
- ``None`` (default: ``None`` = use builtin colorscheme).
-
- `linenos`
- Set to ``True`` to have line numbers on the terminal output as well
- (default: ``False`` = no line numbers).
- """
- name = 'Terminal'
- aliases = ['terminal', 'console']
- filenames = []
-
- def __init__(self, **options):
- Formatter.__init__(self, **options)
- self.darkbg = get_choice_opt(options, 'bg',
- ['light', 'dark'], 'light') == 'dark'
- self.colorscheme = options.get('colorscheme', None) or TERMINAL_COLORS
- self.linenos = options.get('linenos', False)
- self._lineno = 0
-
- def format(self, tokensource, outfile):
- return Formatter.format(self, tokensource, outfile)
-
- def _write_lineno(self, outfile):
- self._lineno += 1
- outfile.write("%s%04d: " % (self._lineno != 1 and '\n' or '', self._lineno))
-
- def _get_color(self, ttype):
- # self.colorscheme is a dict containing usually generic types, so we
- # have to walk the tree of dots. The base Token type must be a key,
- # even if it's empty string, as in the default above.
- colors = self.colorscheme.get(ttype)
- while colors is None:
- ttype = ttype.parent
- colors = self.colorscheme.get(ttype)
- return colors[self.darkbg]
-
- def format_unencoded(self, tokensource, outfile):
- if self.linenos:
- self._write_lineno(outfile)
-
- for ttype, value in tokensource:
- color = self._get_color(ttype)
-
- for line in value.splitlines(True):
- if color:
- outfile.write(ansiformat(color, line.rstrip('\n')))
- else:
- outfile.write(line.rstrip('\n'))
- if line.endswith('\n'):
- if self.linenos:
- self._write_lineno(outfile)
- else:
- outfile.write('\n')
-
- if self.linenos:
- outfile.write("\n")
diff --git a/venv/lib/python3.11/site-packages/pygments/formatters/terminal256.py b/venv/lib/python3.11/site-packages/pygments/formatters/terminal256.py
deleted file mode 100644
index dba5b63..0000000
--- a/venv/lib/python3.11/site-packages/pygments/formatters/terminal256.py
+++ /dev/null
@@ -1,338 +0,0 @@
-"""
- pygments.formatters.terminal256
- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
- Formatter for 256-color terminal output with ANSI sequences.
-
- RGB-to-XTERM color conversion routines adapted from xterm256-conv
- tool (http://frexx.de/xterm-256-notes/data/xterm256-conv2.tar.bz2)
- by Wolfgang Frisch.
-
- Formatter version 1.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-# TODO:
-# - Options to map style's bold/underline/italic/border attributes
-# to some ANSI attrbutes (something like 'italic=underline')
-# - An option to output "style RGB to xterm RGB/index" conversion table
-# - An option to indicate that we are running in "reverse background"
-# xterm. This means that default colors are white-on-black, not
-# black-on-while, so colors like "white background" need to be converted
-# to "white background, black foreground", etc...
-
-from pygments.formatter import Formatter
-from pygments.console import codes
-from pygments.style import ansicolors
-
-
-__all__ = ['Terminal256Formatter', 'TerminalTrueColorFormatter']
-
-
-class EscapeSequence:
- def __init__(self, fg=None, bg=None, bold=False, underline=False, italic=False):
- self.fg = fg
- self.bg = bg
- self.bold = bold
- self.underline = underline
- self.italic = italic
-
- def escape(self, attrs):
- if len(attrs):
- return "\x1b[" + ";".join(attrs) + "m"
- return ""
-
- def color_string(self):
- attrs = []
- if self.fg is not None:
- if self.fg in ansicolors:
- esc = codes[self.fg.replace('ansi','')]
- if ';01m' in esc:
- self.bold = True
- # extract fg color code.
- attrs.append(esc[2:4])
- else:
- attrs.extend(("38", "5", "%i" % self.fg))
- if self.bg is not None:
- if self.bg in ansicolors:
- esc = codes[self.bg.replace('ansi','')]
- # extract fg color code, add 10 for bg.
- attrs.append(str(int(esc[2:4])+10))
- else:
- attrs.extend(("48", "5", "%i" % self.bg))
- if self.bold:
- attrs.append("01")
- if self.underline:
- attrs.append("04")
- if self.italic:
- attrs.append("03")
- return self.escape(attrs)
-
- def true_color_string(self):
- attrs = []
- if self.fg:
- attrs.extend(("38", "2", str(self.fg[0]), str(self.fg[1]), str(self.fg[2])))
- if self.bg:
- attrs.extend(("48", "2", str(self.bg[0]), str(self.bg[1]), str(self.bg[2])))
- if self.bold:
- attrs.append("01")
- if self.underline:
- attrs.append("04")
- if self.italic:
- attrs.append("03")
- return self.escape(attrs)
-
- def reset_string(self):
- attrs = []
- if self.fg is not None:
- attrs.append("39")
- if self.bg is not None:
- attrs.append("49")
- if self.bold or self.underline or self.italic:
- attrs.append("00")
- return self.escape(attrs)
-
-
-class Terminal256Formatter(Formatter):
- """
- Format tokens with ANSI color sequences, for output in a 256-color
- terminal or console. Like in `TerminalFormatter` color sequences
- are terminated at newlines, so that paging the output works correctly.
-
- The formatter takes colors from a style defined by the `style` option
- and converts them to nearest ANSI 256-color escape sequences. Bold and
- underline attributes from the style are preserved (and displayed).
-
- .. versionadded:: 0.9
-
- .. versionchanged:: 2.2
- If the used style defines foreground colors in the form ``#ansi*``, then
- `Terminal256Formatter` will map these to non extended foreground color.
- See :ref:`AnsiTerminalStyle` for more information.
-
- .. versionchanged:: 2.4
- The ANSI color names have been updated with names that are easier to
- understand and align with colornames of other projects and terminals.
- See :ref:`this table <new-ansi-color-names>` for more information.
-
-
- Options accepted:
-
- `style`
- The style to use, can be a string or a Style subclass (default:
- ``'default'``).
-
- `linenos`
- Set to ``True`` to have line numbers on the terminal output as well
- (default: ``False`` = no line numbers).
- """
- name = 'Terminal256'
- aliases = ['terminal256', 'console256', '256']
- filenames = []
-
- def __init__(self, **options):
- Formatter.__init__(self, **options)
-
- self.xterm_colors = []
- self.best_match = {}
- self.style_string = {}
-
- self.usebold = 'nobold' not in options
- self.useunderline = 'nounderline' not in options
- self.useitalic = 'noitalic' not in options
-
- self._build_color_table() # build an RGB-to-256 color conversion table
- self._setup_styles() # convert selected style's colors to term. colors
-
- self.linenos = options.get('linenos', False)
- self._lineno = 0
-
- def _build_color_table(self):
- # colors 0..15: 16 basic colors
-
- self.xterm_colors.append((0x00, 0x00, 0x00)) # 0
- self.xterm_colors.append((0xcd, 0x00, 0x00)) # 1
- self.xterm_colors.append((0x00, 0xcd, 0x00)) # 2
- self.xterm_colors.append((0xcd, 0xcd, 0x00)) # 3
- self.xterm_colors.append((0x00, 0x00, 0xee)) # 4
- self.xterm_colors.append((0xcd, 0x00, 0xcd)) # 5
- self.xterm_colors.append((0x00, 0xcd, 0xcd)) # 6
- self.xterm_colors.append((0xe5, 0xe5, 0xe5)) # 7
- self.xterm_colors.append((0x7f, 0x7f, 0x7f)) # 8
- self.xterm_colors.append((0xff, 0x00, 0x00)) # 9
- self.xterm_colors.append((0x00, 0xff, 0x00)) # 10
- self.xterm_colors.append((0xff, 0xff, 0x00)) # 11
- self.xterm_colors.append((0x5c, 0x5c, 0xff)) # 12
- self.xterm_colors.append((0xff, 0x00, 0xff)) # 13
- self.xterm_colors.append((0x00, 0xff, 0xff)) # 14
- self.xterm_colors.append((0xff, 0xff, 0xff)) # 15
-
- # colors 16..232: the 6x6x6 color cube
-
- valuerange = (0x00, 0x5f, 0x87, 0xaf, 0xd7, 0xff)
-
- for i in range(217):
- r = valuerange[(i // 36) % 6]
- g = valuerange[(i // 6) % 6]
- b = valuerange[i % 6]
- self.xterm_colors.append((r, g, b))
-
- # colors 233..253: grayscale
-
- for i in range(1, 22):
- v = 8 + i * 10
- self.xterm_colors.append((v, v, v))
-
- def _closest_color(self, r, g, b):
- distance = 257*257*3 # "infinity" (>distance from #000000 to #ffffff)
- match = 0
-
- for i in range(0, 254):
- values = self.xterm_colors[i]
-
- rd = r - values[0]
- gd = g - values[1]
- bd = b - values[2]
- d = rd*rd + gd*gd + bd*bd
-
- if d < distance:
- match = i
- distance = d
- return match
-
- def _color_index(self, color):
- index = self.best_match.get(color, None)
- if color in ansicolors:
- # strip the `ansi/#ansi` part and look up code
- index = color
- self.best_match[color] = index
- if index is None:
- try:
- rgb = int(str(color), 16)
- except ValueError:
- rgb = 0
-
- r = (rgb >> 16) & 0xff
- g = (rgb >> 8) & 0xff
- b = rgb & 0xff
- index = self._closest_color(r, g, b)
- self.best_match[color] = index
- return index
-
- def _setup_styles(self):
- for ttype, ndef in self.style:
- escape = EscapeSequence()
- # get foreground from ansicolor if set
- if ndef['ansicolor']:
- escape.fg = self._color_index(ndef['ansicolor'])
- elif ndef['color']:
- escape.fg = self._color_index(ndef['color'])
- if ndef['bgansicolor']:
- escape.bg = self._color_index(ndef['bgansicolor'])
- elif ndef['bgcolor']:
- escape.bg = self._color_index(ndef['bgcolor'])
- if self.usebold and ndef['bold']:
- escape.bold = True
- if self.useunderline and ndef['underline']:
- escape.underline = True
- if self.useitalic and ndef['italic']:
- escape.italic = True
- self.style_string[str(ttype)] = (escape.color_string(),
- escape.reset_string())
-
- def _write_lineno(self, outfile):
- self._lineno += 1
- outfile.write("%s%04d: " % (self._lineno != 1 and '\n' or '', self._lineno))
-
- def format(self, tokensource, outfile):
- return Formatter.format(self, tokensource, outfile)
-
- def format_unencoded(self, tokensource, outfile):
- if self.linenos:
- self._write_lineno(outfile)
-
- for ttype, value in tokensource:
- not_found = True
- while ttype and not_found:
- try:
- # outfile.write( "<" + str(ttype) + ">" )
- on, off = self.style_string[str(ttype)]
-
- # Like TerminalFormatter, add "reset colors" escape sequence
- # on newline.
- spl = value.split('\n')
- for line in spl[:-1]:
- if line:
- outfile.write(on + line + off)
- if self.linenos:
- self._write_lineno(outfile)
- else:
- outfile.write('\n')
-
- if spl[-1]:
- outfile.write(on + spl[-1] + off)
-
- not_found = False
- # outfile.write( '#' + str(ttype) + '#' )
-
- except KeyError:
- # ottype = ttype
- ttype = ttype.parent
- # outfile.write( '!' + str(ottype) + '->' + str(ttype) + '!' )
-
- if not_found:
- outfile.write(value)
-
- if self.linenos:
- outfile.write("\n")
-
-
-
-class TerminalTrueColorFormatter(Terminal256Formatter):
- r"""
- Format tokens with ANSI color sequences, for output in a true-color
- terminal or console. Like in `TerminalFormatter` color sequences
- are terminated at newlines, so that paging the output works correctly.
-
- .. versionadded:: 2.1
-
- Options accepted:
-
- `style`
- The style to use, can be a string or a Style subclass (default:
- ``'default'``).
- """
- name = 'TerminalTrueColor'
- aliases = ['terminal16m', 'console16m', '16m']
- filenames = []
-
- def _build_color_table(self):
- pass
-
- def _color_tuple(self, color):
- try:
- rgb = int(str(color), 16)
- except ValueError:
- return None
- r = (rgb >> 16) & 0xff
- g = (rgb >> 8) & 0xff
- b = rgb & 0xff
- return (r, g, b)
-
- def _setup_styles(self):
- for ttype, ndef in self.style:
- escape = EscapeSequence()
- if ndef['color']:
- escape.fg = self._color_tuple(ndef['color'])
- if ndef['bgcolor']:
- escape.bg = self._color_tuple(ndef['bgcolor'])
- if self.usebold and ndef['bold']:
- escape.bold = True
- if self.useunderline and ndef['underline']:
- escape.underline = True
- if self.useitalic and ndef['italic']:
- escape.italic = True
- self.style_string[str(ttype)] = (escape.true_color_string(),
- escape.reset_string())
diff --git a/venv/lib/python3.11/site-packages/pygments/lexer.py b/venv/lib/python3.11/site-packages/pygments/lexer.py
deleted file mode 100644
index eb5403e..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexer.py
+++ /dev/null
@@ -1,959 +0,0 @@
-"""
- pygments.lexer
- ~~~~~~~~~~~~~~
-
- Base lexer classes.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-import sys
-import time
-
-from pygments.filter import apply_filters, Filter
-from pygments.filters import get_filter_by_name
-from pygments.token import Error, Text, Other, Whitespace, _TokenType
-from pygments.util import get_bool_opt, get_int_opt, get_list_opt, \
- make_analysator, Future, guess_decode
-from pygments.regexopt import regex_opt
-
-__all__ = ['Lexer', 'RegexLexer', 'ExtendedRegexLexer', 'DelegatingLexer',
- 'LexerContext', 'include', 'inherit', 'bygroups', 'using', 'this',
- 'default', 'words', 'line_re']
-
-line_re = re.compile('.*?\n')
-
-_encoding_map = [(b'\xef\xbb\xbf', 'utf-8'),
- (b'\xff\xfe\0\0', 'utf-32'),
- (b'\0\0\xfe\xff', 'utf-32be'),
- (b'\xff\xfe', 'utf-16'),
- (b'\xfe\xff', 'utf-16be')]
-
-_default_analyse = staticmethod(lambda x: 0.0)
-
-
-class LexerMeta(type):
- """
- This metaclass automagically converts ``analyse_text`` methods into
- static methods which always return float values.
- """
-
- def __new__(mcs, name, bases, d):
- if 'analyse_text' in d:
- d['analyse_text'] = make_analysator(d['analyse_text'])
- return type.__new__(mcs, name, bases, d)
-
-
-class Lexer(metaclass=LexerMeta):
- """
- Lexer for a specific language.
-
- See also :doc:`lexerdevelopment`, a high-level guide to writing
- lexers.
-
- Lexer classes have attributes used for choosing the most appropriate
- lexer based on various criteria.
-
- .. autoattribute:: name
- :no-value:
- .. autoattribute:: aliases
- :no-value:
- .. autoattribute:: filenames
- :no-value:
- .. autoattribute:: alias_filenames
- .. autoattribute:: mimetypes
- :no-value:
- .. autoattribute:: priority
-
- Lexers included in Pygments should have an additional attribute:
-
- .. autoattribute:: url
- :no-value:
-
- Lexers included in Pygments may have additional attributes:
-
- .. autoattribute:: _example
- :no-value:
-
- You can pass options to the constructor. The basic options recognized
- by all lexers and processed by the base `Lexer` class are:
-
- ``stripnl``
- Strip leading and trailing newlines from the input (default: True).
- ``stripall``
- Strip all leading and trailing whitespace from the input
- (default: False).
- ``ensurenl``
- Make sure that the input ends with a newline (default: True). This
- is required for some lexers that consume input linewise.
-
- .. versionadded:: 1.3
-
- ``tabsize``
- If given and greater than 0, expand tabs in the input (default: 0).
- ``encoding``
- If given, must be an encoding name. This encoding will be used to
- convert the input string to Unicode, if it is not already a Unicode
- string (default: ``'guess'``, which uses a simple UTF-8 / Locale /
- Latin1 detection. Can also be ``'chardet'`` to use the chardet
- library, if it is installed.
- ``inencoding``
- Overrides the ``encoding`` if given.
- """
-
- #: Full name of the lexer, in human-readable form
- name = None
-
- #: A list of short, unique identifiers that can be used to look
- #: up the lexer from a list, e.g., using `get_lexer_by_name()`.
- aliases = []
-
- #: A list of `fnmatch` patterns that match filenames which contain
- #: content for this lexer. The patterns in this list should be unique among
- #: all lexers.
- filenames = []
-
- #: A list of `fnmatch` patterns that match filenames which may or may not
- #: contain content for this lexer. This list is used by the
- #: :func:`.guess_lexer_for_filename()` function, to determine which lexers
- #: are then included in guessing the correct one. That means that
- #: e.g. every lexer for HTML and a template language should include
- #: ``\*.html`` in this list.
- alias_filenames = []
-
- #: A list of MIME types for content that can be lexed with this lexer.
- mimetypes = []
-
- #: Priority, should multiple lexers match and no content is provided
- priority = 0
-
- #: URL of the language specification/definition. Used in the Pygments
- #: documentation.
- url = None
-
- #: Example file name. Relative to the ``tests/examplefiles`` directory.
- #: This is used by the documentation generator to show an example.
- _example = None
-
- def __init__(self, **options):
- """
- This constructor takes arbitrary options as keyword arguments.
- Every subclass must first process its own options and then call
- the `Lexer` constructor, since it processes the basic
- options like `stripnl`.
-
- An example looks like this:
-
- .. sourcecode:: python
-
- def __init__(self, **options):
- self.compress = options.get('compress', '')
- Lexer.__init__(self, **options)
-
- As these options must all be specifiable as strings (due to the
- command line usage), there are various utility functions
- available to help with that, see `Utilities`_.
- """
- self.options = options
- self.stripnl = get_bool_opt(options, 'stripnl', True)
- self.stripall = get_bool_opt(options, 'stripall', False)
- self.ensurenl = get_bool_opt(options, 'ensurenl', True)
- self.tabsize = get_int_opt(options, 'tabsize', 0)
- self.encoding = options.get('encoding', 'guess')
- self.encoding = options.get('inencoding') or self.encoding
- self.filters = []
- for filter_ in get_list_opt(options, 'filters', ()):
- self.add_filter(filter_)
-
- def __repr__(self):
- if self.options:
- return '<pygments.lexers.%s with %r>' % (self.__class__.__name__,
- self.options)
- else:
- return '<pygments.lexers.%s>' % self.__class__.__name__
-
- def add_filter(self, filter_, **options):
- """
- Add a new stream filter to this lexer.
- """
- if not isinstance(filter_, Filter):
- filter_ = get_filter_by_name(filter_, **options)
- self.filters.append(filter_)
-
- def analyse_text(text):
- """
- A static method which is called for lexer guessing.
-
- It should analyse the text and return a float in the range
- from ``0.0`` to ``1.0``. If it returns ``0.0``, the lexer
- will not be selected as the most probable one, if it returns
- ``1.0``, it will be selected immediately. This is used by
- `guess_lexer`.
-
- The `LexerMeta` metaclass automatically wraps this function so
- that it works like a static method (no ``self`` or ``cls``
- parameter) and the return value is automatically converted to
- `float`. If the return value is an object that is boolean `False`
- it's the same as if the return values was ``0.0``.
- """
-
- def _preprocess_lexer_input(self, text):
- """Apply preprocessing such as decoding the input, removing BOM and normalizing newlines."""
-
- if not isinstance(text, str):
- if self.encoding == 'guess':
- text, _ = guess_decode(text)
- elif self.encoding == 'chardet':
- try:
- import chardet
- except ImportError as e:
- raise ImportError('To enable chardet encoding guessing, '
- 'please install the chardet library '
- 'from http://chardet.feedparser.org/') from e
- # check for BOM first
- decoded = None
- for bom, encoding in _encoding_map:
- if text.startswith(bom):
- decoded = text[len(bom):].decode(encoding, 'replace')
- break
- # no BOM found, so use chardet
- if decoded is None:
- enc = chardet.detect(text[:1024]) # Guess using first 1KB
- decoded = text.decode(enc.get('encoding') or 'utf-8',
- 'replace')
- text = decoded
- else:
- text = text.decode(self.encoding)
- if text.startswith('\ufeff'):
- text = text[len('\ufeff'):]
- else:
- if text.startswith('\ufeff'):
- text = text[len('\ufeff'):]
-
- # text now *is* a unicode string
- text = text.replace('\r\n', '\n')
- text = text.replace('\r', '\n')
- if self.stripall:
- text = text.strip()
- elif self.stripnl:
- text = text.strip('\n')
- if self.tabsize > 0:
- text = text.expandtabs(self.tabsize)
- if self.ensurenl and not text.endswith('\n'):
- text += '\n'
-
- return text
-
- def get_tokens(self, text, unfiltered=False):
- """
- This method is the basic interface of a lexer. It is called by
- the `highlight()` function. It must process the text and return an
- iterable of ``(tokentype, value)`` pairs from `text`.
-
- Normally, you don't need to override this method. The default
- implementation processes the options recognized by all lexers
- (`stripnl`, `stripall` and so on), and then yields all tokens
- from `get_tokens_unprocessed()`, with the ``index`` dropped.
-
- If `unfiltered` is set to `True`, the filtering mechanism is
- bypassed even if filters are defined.
- """
- text = self._preprocess_lexer_input(text)
-
- def streamer():
- for _, t, v in self.get_tokens_unprocessed(text):
- yield t, v
- stream = streamer()
- if not unfiltered:
- stream = apply_filters(stream, self.filters, self)
- return stream
-
- def get_tokens_unprocessed(self, text):
- """
- This method should process the text and return an iterable of
- ``(index, tokentype, value)`` tuples where ``index`` is the starting
- position of the token within the input text.
-
- It must be overridden by subclasses. It is recommended to
- implement it as a generator to maximize effectiveness.
- """
- raise NotImplementedError
-
-
-class DelegatingLexer(Lexer):
- """
- This lexer takes two lexer as arguments. A root lexer and
- a language lexer. First everything is scanned using the language
- lexer, afterwards all ``Other`` tokens are lexed using the root
- lexer.
-
- The lexers from the ``template`` lexer package use this base lexer.
- """
-
- def __init__(self, _root_lexer, _language_lexer, _needle=Other, **options):
- self.root_lexer = _root_lexer(**options)
- self.language_lexer = _language_lexer(**options)
- self.needle = _needle
- Lexer.__init__(self, **options)
-
- def get_tokens_unprocessed(self, text):
- buffered = ''
- insertions = []
- lng_buffer = []
- for i, t, v in self.language_lexer.get_tokens_unprocessed(text):
- if t is self.needle:
- if lng_buffer:
- insertions.append((len(buffered), lng_buffer))
- lng_buffer = []
- buffered += v
- else:
- lng_buffer.append((i, t, v))
- if lng_buffer:
- insertions.append((len(buffered), lng_buffer))
- return do_insertions(insertions,
- self.root_lexer.get_tokens_unprocessed(buffered))
-
-
-# ------------------------------------------------------------------------------
-# RegexLexer and ExtendedRegexLexer
-#
-
-
-class include(str): # pylint: disable=invalid-name
- """
- Indicates that a state should include rules from another state.
- """
- pass
-
-
-class _inherit:
- """
- Indicates the a state should inherit from its superclass.
- """
- def __repr__(self):
- return 'inherit'
-
-inherit = _inherit() # pylint: disable=invalid-name
-
-
-class combined(tuple): # pylint: disable=invalid-name
- """
- Indicates a state combined from multiple states.
- """
-
- def __new__(cls, *args):
- return tuple.__new__(cls, args)
-
- def __init__(self, *args):
- # tuple.__init__ doesn't do anything
- pass
-
-
-class _PseudoMatch:
- """
- A pseudo match object constructed from a string.
- """
-
- def __init__(self, start, text):
- self._text = text
- self._start = start
-
- def start(self, arg=None):
- return self._start
-
- def end(self, arg=None):
- return self._start + len(self._text)
-
- def group(self, arg=None):
- if arg:
- raise IndexError('No such group')
- return self._text
-
- def groups(self):
- return (self._text,)
-
- def groupdict(self):
- return {}
-
-
-def bygroups(*args):
- """
- Callback that yields multiple actions for each group in the match.
- """
- def callback(lexer, match, ctx=None):
- for i, action in enumerate(args):
- if action is None:
- continue
- elif type(action) is _TokenType:
- data = match.group(i + 1)
- if data:
- yield match.start(i + 1), action, data
- else:
- data = match.group(i + 1)
- if data is not None:
- if ctx:
- ctx.pos = match.start(i + 1)
- for item in action(lexer,
- _PseudoMatch(match.start(i + 1), data), ctx):
- if item:
- yield item
- if ctx:
- ctx.pos = match.end()
- return callback
-
-
-class _This:
- """
- Special singleton used for indicating the caller class.
- Used by ``using``.
- """
-
-this = _This()
-
-
-def using(_other, **kwargs):
- """
- Callback that processes the match with a different lexer.
-
- The keyword arguments are forwarded to the lexer, except `state` which
- is handled separately.
-
- `state` specifies the state that the new lexer will start in, and can
- be an enumerable such as ('root', 'inline', 'string') or a simple
- string which is assumed to be on top of the root state.
-
- Note: For that to work, `_other` must not be an `ExtendedRegexLexer`.
- """
- gt_kwargs = {}
- if 'state' in kwargs:
- s = kwargs.pop('state')
- if isinstance(s, (list, tuple)):
- gt_kwargs['stack'] = s
- else:
- gt_kwargs['stack'] = ('root', s)
-
- if _other is this:
- def callback(lexer, match, ctx=None):
- # if keyword arguments are given the callback
- # function has to create a new lexer instance
- if kwargs:
- # XXX: cache that somehow
- kwargs.update(lexer.options)
- lx = lexer.__class__(**kwargs)
- else:
- lx = lexer
- s = match.start()
- for i, t, v in lx.get_tokens_unprocessed(match.group(), **gt_kwargs):
- yield i + s, t, v
- if ctx:
- ctx.pos = match.end()
- else:
- def callback(lexer, match, ctx=None):
- # XXX: cache that somehow
- kwargs.update(lexer.options)
- lx = _other(**kwargs)
-
- s = match.start()
- for i, t, v in lx.get_tokens_unprocessed(match.group(), **gt_kwargs):
- yield i + s, t, v
- if ctx:
- ctx.pos = match.end()
- return callback
-
-
-class default:
- """
- Indicates a state or state action (e.g. #pop) to apply.
- For example default('#pop') is equivalent to ('', Token, '#pop')
- Note that state tuples may be used as well.
-
- .. versionadded:: 2.0
- """
- def __init__(self, state):
- self.state = state
-
-
-class words(Future):
- """
- Indicates a list of literal words that is transformed into an optimized
- regex that matches any of the words.
-
- .. versionadded:: 2.0
- """
- def __init__(self, words, prefix='', suffix=''):
- self.words = words
- self.prefix = prefix
- self.suffix = suffix
-
- def get(self):
- return regex_opt(self.words, prefix=self.prefix, suffix=self.suffix)
-
-
-class RegexLexerMeta(LexerMeta):
- """
- Metaclass for RegexLexer, creates the self._tokens attribute from
- self.tokens on the first instantiation.
- """
-
- def _process_regex(cls, regex, rflags, state):
- """Preprocess the regular expression component of a token definition."""
- if isinstance(regex, Future):
- regex = regex.get()
- return re.compile(regex, rflags).match
-
- def _process_token(cls, token):
- """Preprocess the token component of a token definition."""
- assert type(token) is _TokenType or callable(token), \
- 'token type must be simple type or callable, not %r' % (token,)
- return token
-
- def _process_new_state(cls, new_state, unprocessed, processed):
- """Preprocess the state transition action of a token definition."""
- if isinstance(new_state, str):
- # an existing state
- if new_state == '#pop':
- return -1
- elif new_state in unprocessed:
- return (new_state,)
- elif new_state == '#push':
- return new_state
- elif new_state[:5] == '#pop:':
- return -int(new_state[5:])
- else:
- assert False, 'unknown new state %r' % new_state
- elif isinstance(new_state, combined):
- # combine a new state from existing ones
- tmp_state = '_tmp_%d' % cls._tmpname
- cls._tmpname += 1
- itokens = []
- for istate in new_state:
- assert istate != new_state, 'circular state ref %r' % istate
- itokens.extend(cls._process_state(unprocessed,
- processed, istate))
- processed[tmp_state] = itokens
- return (tmp_state,)
- elif isinstance(new_state, tuple):
- # push more than one state
- for istate in new_state:
- assert (istate in unprocessed or
- istate in ('#pop', '#push')), \
- 'unknown new state ' + istate
- return new_state
- else:
- assert False, 'unknown new state def %r' % new_state
-
- def _process_state(cls, unprocessed, processed, state):
- """Preprocess a single state definition."""
- assert type(state) is str, "wrong state name %r" % state
- assert state[0] != '#', "invalid state name %r" % state
- if state in processed:
- return processed[state]
- tokens = processed[state] = []
- rflags = cls.flags
- for tdef in unprocessed[state]:
- if isinstance(tdef, include):
- # it's a state reference
- assert tdef != state, "circular state reference %r" % state
- tokens.extend(cls._process_state(unprocessed, processed,
- str(tdef)))
- continue
- if isinstance(tdef, _inherit):
- # should be processed already, but may not in the case of:
- # 1. the state has no counterpart in any parent
- # 2. the state includes more than one 'inherit'
- continue
- if isinstance(tdef, default):
- new_state = cls._process_new_state(tdef.state, unprocessed, processed)
- tokens.append((re.compile('').match, None, new_state))
- continue
-
- assert type(tdef) is tuple, "wrong rule def %r" % tdef
-
- try:
- rex = cls._process_regex(tdef[0], rflags, state)
- except Exception as err:
- raise ValueError("uncompilable regex %r in state %r of %r: %s" %
- (tdef[0], state, cls, err)) from err
-
- token = cls._process_token(tdef[1])
-
- if len(tdef) == 2:
- new_state = None
- else:
- new_state = cls._process_new_state(tdef[2],
- unprocessed, processed)
-
- tokens.append((rex, token, new_state))
- return tokens
-
- def process_tokendef(cls, name, tokendefs=None):
- """Preprocess a dictionary of token definitions."""
- processed = cls._all_tokens[name] = {}
- tokendefs = tokendefs or cls.tokens[name]
- for state in list(tokendefs):
- cls._process_state(tokendefs, processed, state)
- return processed
-
- def get_tokendefs(cls):
- """
- Merge tokens from superclasses in MRO order, returning a single tokendef
- dictionary.
-
- Any state that is not defined by a subclass will be inherited
- automatically. States that *are* defined by subclasses will, by
- default, override that state in the superclass. If a subclass wishes to
- inherit definitions from a superclass, it can use the special value
- "inherit", which will cause the superclass' state definition to be
- included at that point in the state.
- """
- tokens = {}
- inheritable = {}
- for c in cls.__mro__:
- toks = c.__dict__.get('tokens', {})
-
- for state, items in toks.items():
- curitems = tokens.get(state)
- if curitems is None:
- # N.b. because this is assigned by reference, sufficiently
- # deep hierarchies are processed incrementally (e.g. for
- # A(B), B(C), C(RegexLexer), B will be premodified so X(B)
- # will not see any inherits in B).
- tokens[state] = items
- try:
- inherit_ndx = items.index(inherit)
- except ValueError:
- continue
- inheritable[state] = inherit_ndx
- continue
-
- inherit_ndx = inheritable.pop(state, None)
- if inherit_ndx is None:
- continue
-
- # Replace the "inherit" value with the items
- curitems[inherit_ndx:inherit_ndx+1] = items
- try:
- # N.b. this is the index in items (that is, the superclass
- # copy), so offset required when storing below.
- new_inh_ndx = items.index(inherit)
- except ValueError:
- pass
- else:
- inheritable[state] = inherit_ndx + new_inh_ndx
-
- return tokens
-
- def __call__(cls, *args, **kwds):
- """Instantiate cls after preprocessing its token definitions."""
- if '_tokens' not in cls.__dict__:
- cls._all_tokens = {}
- cls._tmpname = 0
- if hasattr(cls, 'token_variants') and cls.token_variants:
- # don't process yet
- pass
- else:
- cls._tokens = cls.process_tokendef('', cls.get_tokendefs())
-
- return type.__call__(cls, *args, **kwds)
-
-
-class RegexLexer(Lexer, metaclass=RegexLexerMeta):
- """
- Base for simple stateful regular expression-based lexers.
- Simplifies the lexing process so that you need only
- provide a list of states and regular expressions.
- """
-
- #: Flags for compiling the regular expressions.
- #: Defaults to MULTILINE.
- flags = re.MULTILINE
-
- #: At all time there is a stack of states. Initially, the stack contains
- #: a single state 'root'. The top of the stack is called "the current state".
- #:
- #: Dict of ``{'state': [(regex, tokentype, new_state), ...], ...}``
- #:
- #: ``new_state`` can be omitted to signify no state transition.
- #: If ``new_state`` is a string, it is pushed on the stack. This ensure
- #: the new current state is ``new_state``.
- #: If ``new_state`` is a tuple of strings, all of those strings are pushed
- #: on the stack and the current state will be the last element of the list.
- #: ``new_state`` can also be ``combined('state1', 'state2', ...)``
- #: to signify a new, anonymous state combined from the rules of two
- #: or more existing ones.
- #: Furthermore, it can be '#pop' to signify going back one step in
- #: the state stack, or '#push' to push the current state on the stack
- #: again. Note that if you push while in a combined state, the combined
- #: state itself is pushed, and not only the state in which the rule is
- #: defined.
- #:
- #: The tuple can also be replaced with ``include('state')``, in which
- #: case the rules from the state named by the string are included in the
- #: current one.
- tokens = {}
-
- def get_tokens_unprocessed(self, text, stack=('root',)):
- """
- Split ``text`` into (tokentype, text) pairs.
-
- ``stack`` is the initial stack (default: ``['root']``)
- """
- pos = 0
- tokendefs = self._tokens
- statestack = list(stack)
- statetokens = tokendefs[statestack[-1]]
- while 1:
- for rexmatch, action, new_state in statetokens:
- m = rexmatch(text, pos)
- if m:
- if action is not None:
- if type(action) is _TokenType:
- yield pos, action, m.group()
- else:
- yield from action(self, m)
- pos = m.end()
- if new_state is not None:
- # state transition
- if isinstance(new_state, tuple):
- for state in new_state:
- if state == '#pop':
- if len(statestack) > 1:
- statestack.pop()
- elif state == '#push':
- statestack.append(statestack[-1])
- else:
- statestack.append(state)
- elif isinstance(new_state, int):
- # pop, but keep at least one state on the stack
- # (random code leading to unexpected pops should
- # not allow exceptions)
- if abs(new_state) >= len(statestack):
- del statestack[1:]
- else:
- del statestack[new_state:]
- elif new_state == '#push':
- statestack.append(statestack[-1])
- else:
- assert False, "wrong state def: %r" % new_state
- statetokens = tokendefs[statestack[-1]]
- break
- else:
- # We are here only if all state tokens have been considered
- # and there was not a match on any of them.
- try:
- if text[pos] == '\n':
- # at EOL, reset state to "root"
- statestack = ['root']
- statetokens = tokendefs['root']
- yield pos, Whitespace, '\n'
- pos += 1
- continue
- yield pos, Error, text[pos]
- pos += 1
- except IndexError:
- break
-
-
-class LexerContext:
- """
- A helper object that holds lexer position data.
- """
-
- def __init__(self, text, pos, stack=None, end=None):
- self.text = text
- self.pos = pos
- self.end = end or len(text) # end=0 not supported ;-)
- self.stack = stack or ['root']
-
- def __repr__(self):
- return 'LexerContext(%r, %r, %r)' % (
- self.text, self.pos, self.stack)
-
-
-class ExtendedRegexLexer(RegexLexer):
- """
- A RegexLexer that uses a context object to store its state.
- """
-
- def get_tokens_unprocessed(self, text=None, context=None):
- """
- Split ``text`` into (tokentype, text) pairs.
- If ``context`` is given, use this lexer context instead.
- """
- tokendefs = self._tokens
- if not context:
- ctx = LexerContext(text, 0)
- statetokens = tokendefs['root']
- else:
- ctx = context
- statetokens = tokendefs[ctx.stack[-1]]
- text = ctx.text
- while 1:
- for rexmatch, action, new_state in statetokens:
- m = rexmatch(text, ctx.pos, ctx.end)
- if m:
- if action is not None:
- if type(action) is _TokenType:
- yield ctx.pos, action, m.group()
- ctx.pos = m.end()
- else:
- yield from action(self, m, ctx)
- if not new_state:
- # altered the state stack?
- statetokens = tokendefs[ctx.stack[-1]]
- # CAUTION: callback must set ctx.pos!
- if new_state is not None:
- # state transition
- if isinstance(new_state, tuple):
- for state in new_state:
- if state == '#pop':
- if len(ctx.stack) > 1:
- ctx.stack.pop()
- elif state == '#push':
- ctx.stack.append(ctx.stack[-1])
- else:
- ctx.stack.append(state)
- elif isinstance(new_state, int):
- # see RegexLexer for why this check is made
- if abs(new_state) >= len(ctx.stack):
- del ctx.stack[1:]
- else:
- del ctx.stack[new_state:]
- elif new_state == '#push':
- ctx.stack.append(ctx.stack[-1])
- else:
- assert False, "wrong state def: %r" % new_state
- statetokens = tokendefs[ctx.stack[-1]]
- break
- else:
- try:
- if ctx.pos >= ctx.end:
- break
- if text[ctx.pos] == '\n':
- # at EOL, reset state to "root"
- ctx.stack = ['root']
- statetokens = tokendefs['root']
- yield ctx.pos, Text, '\n'
- ctx.pos += 1
- continue
- yield ctx.pos, Error, text[ctx.pos]
- ctx.pos += 1
- except IndexError:
- break
-
-
-def do_insertions(insertions, tokens):
- """
- Helper for lexers which must combine the results of several
- sublexers.
-
- ``insertions`` is a list of ``(index, itokens)`` pairs.
- Each ``itokens`` iterable should be inserted at position
- ``index`` into the token stream given by the ``tokens``
- argument.
-
- The result is a combined token stream.
-
- TODO: clean up the code here.
- """
- insertions = iter(insertions)
- try:
- index, itokens = next(insertions)
- except StopIteration:
- # no insertions
- yield from tokens
- return
-
- realpos = None
- insleft = True
-
- # iterate over the token stream where we want to insert
- # the tokens from the insertion list.
- for i, t, v in tokens:
- # first iteration. store the position of first item
- if realpos is None:
- realpos = i
- oldi = 0
- while insleft and i + len(v) >= index:
- tmpval = v[oldi:index - i]
- if tmpval:
- yield realpos, t, tmpval
- realpos += len(tmpval)
- for it_index, it_token, it_value in itokens:
- yield realpos, it_token, it_value
- realpos += len(it_value)
- oldi = index - i
- try:
- index, itokens = next(insertions)
- except StopIteration:
- insleft = False
- break # not strictly necessary
- if oldi < len(v):
- yield realpos, t, v[oldi:]
- realpos += len(v) - oldi
-
- # leftover tokens
- while insleft:
- # no normal tokens, set realpos to zero
- realpos = realpos or 0
- for p, t, v in itokens:
- yield realpos, t, v
- realpos += len(v)
- try:
- index, itokens = next(insertions)
- except StopIteration:
- insleft = False
- break # not strictly necessary
-
-
-class ProfilingRegexLexerMeta(RegexLexerMeta):
- """Metaclass for ProfilingRegexLexer, collects regex timing info."""
-
- def _process_regex(cls, regex, rflags, state):
- if isinstance(regex, words):
- rex = regex_opt(regex.words, prefix=regex.prefix,
- suffix=regex.suffix)
- else:
- rex = regex
- compiled = re.compile(rex, rflags)
-
- def match_func(text, pos, endpos=sys.maxsize):
- info = cls._prof_data[-1].setdefault((state, rex), [0, 0.0])
- t0 = time.time()
- res = compiled.match(text, pos, endpos)
- t1 = time.time()
- info[0] += 1
- info[1] += t1 - t0
- return res
- return match_func
-
-
-class ProfilingRegexLexer(RegexLexer, metaclass=ProfilingRegexLexerMeta):
- """Drop-in replacement for RegexLexer that does profiling of its regexes."""
-
- _prof_data = []
- _prof_sort_index = 4 # defaults to time per call
-
- def get_tokens_unprocessed(self, text, stack=('root',)):
- # this needs to be a stack, since using(this) will produce nested calls
- self.__class__._prof_data.append({})
- yield from RegexLexer.get_tokens_unprocessed(self, text, stack)
- rawdata = self.__class__._prof_data.pop()
- data = sorted(((s, repr(r).strip('u\'').replace('\\\\', '\\')[:65],
- n, 1000 * t, 1000 * t / n)
- for ((s, r), (n, t)) in rawdata.items()),
- key=lambda x: x[self._prof_sort_index],
- reverse=True)
- sum_total = sum(x[3] for x in data)
-
- print()
- print('Profiling result for %s lexing %d chars in %.3f ms' %
- (self.__class__.__name__, len(text), sum_total))
- print('=' * 110)
- print('%-20s %-64s ncalls tottime percall' % ('state', 'regex'))
- print('-' * 110)
- for d in data:
- print('%-20s %-65s %5d %8.4f %8.4f' % d)
- print('=' * 110)
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__init__.py b/venv/lib/python3.11/site-packages/pygments/lexers/__init__.py
deleted file mode 100644
index 5701be7..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__init__.py
+++ /dev/null
@@ -1,363 +0,0 @@
-"""
- pygments.lexers
- ~~~~~~~~~~~~~~~
-
- Pygments lexers.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-import sys
-import types
-import fnmatch
-from os.path import basename
-
-from pygments.lexers._mapping import LEXERS
-from pygments.modeline import get_filetype_from_buffer
-from pygments.plugin import find_plugin_lexers
-from pygments.util import ClassNotFound, guess_decode
-
-COMPAT = {
- 'Python3Lexer': 'PythonLexer',
- 'Python3TracebackLexer': 'PythonTracebackLexer',
- 'LeanLexer': 'Lean3Lexer',
-}
-
-__all__ = ['get_lexer_by_name', 'get_lexer_for_filename', 'find_lexer_class',
- 'guess_lexer', 'load_lexer_from_file'] + list(LEXERS) + list(COMPAT)
-
-_lexer_cache = {}
-_pattern_cache = {}
-
-
-def _fn_matches(fn, glob):
- """Return whether the supplied file name fn matches pattern filename."""
- if glob not in _pattern_cache:
- pattern = _pattern_cache[glob] = re.compile(fnmatch.translate(glob))
- return pattern.match(fn)
- return _pattern_cache[glob].match(fn)
-
-
-def _load_lexers(module_name):
- """Load a lexer (and all others in the module too)."""
- mod = __import__(module_name, None, None, ['__all__'])
- for lexer_name in mod.__all__:
- cls = getattr(mod, lexer_name)
- _lexer_cache[cls.name] = cls
-
-
-def get_all_lexers(plugins=True):
- """Return a generator of tuples in the form ``(name, aliases,
- filenames, mimetypes)`` of all know lexers.
-
- If *plugins* is true (the default), plugin lexers supplied by entrypoints
- are also returned. Otherwise, only builtin ones are considered.
- """
- for item in LEXERS.values():
- yield item[1:]
- if plugins:
- for lexer in find_plugin_lexers():
- yield lexer.name, lexer.aliases, lexer.filenames, lexer.mimetypes
-
-
-def find_lexer_class(name):
- """
- Return the `Lexer` subclass that with the *name* attribute as given by
- the *name* argument.
- """
- if name in _lexer_cache:
- return _lexer_cache[name]
- # lookup builtin lexers
- for module_name, lname, aliases, _, _ in LEXERS.values():
- if name == lname:
- _load_lexers(module_name)
- return _lexer_cache[name]
- # continue with lexers from setuptools entrypoints
- for cls in find_plugin_lexers():
- if cls.name == name:
- return cls
-
-
-def find_lexer_class_by_name(_alias):
- """
- Return the `Lexer` subclass that has `alias` in its aliases list, without
- instantiating it.
-
- Like `get_lexer_by_name`, but does not instantiate the class.
-
- Will raise :exc:`pygments.util.ClassNotFound` if no lexer with that alias is
- found.
-
- .. versionadded:: 2.2
- """
- if not _alias:
- raise ClassNotFound('no lexer for alias %r found' % _alias)
- # lookup builtin lexers
- for module_name, name, aliases, _, _ in LEXERS.values():
- if _alias.lower() in aliases:
- if name not in _lexer_cache:
- _load_lexers(module_name)
- return _lexer_cache[name]
- # continue with lexers from setuptools entrypoints
- for cls in find_plugin_lexers():
- if _alias.lower() in cls.aliases:
- return cls
- raise ClassNotFound('no lexer for alias %r found' % _alias)
-
-
-def get_lexer_by_name(_alias, **options):
- """
- Return an instance of a `Lexer` subclass that has `alias` in its
- aliases list. The lexer is given the `options` at its
- instantiation.
-
- Will raise :exc:`pygments.util.ClassNotFound` if no lexer with that alias is
- found.
- """
- if not _alias:
- raise ClassNotFound('no lexer for alias %r found' % _alias)
-
- # lookup builtin lexers
- for module_name, name, aliases, _, _ in LEXERS.values():
- if _alias.lower() in aliases:
- if name not in _lexer_cache:
- _load_lexers(module_name)
- return _lexer_cache[name](**options)
- # continue with lexers from setuptools entrypoints
- for cls in find_plugin_lexers():
- if _alias.lower() in cls.aliases:
- return cls(**options)
- raise ClassNotFound('no lexer for alias %r found' % _alias)
-
-
-def load_lexer_from_file(filename, lexername="CustomLexer", **options):
- """Load a lexer from a file.
-
- This method expects a file located relative to the current working
- directory, which contains a Lexer class. By default, it expects the
- Lexer to be name CustomLexer; you can specify your own class name
- as the second argument to this function.
-
- Users should be very careful with the input, because this method
- is equivalent to running eval on the input file.
-
- Raises ClassNotFound if there are any problems importing the Lexer.
-
- .. versionadded:: 2.2
- """
- try:
- # This empty dict will contain the namespace for the exec'd file
- custom_namespace = {}
- with open(filename, 'rb') as f:
- exec(f.read(), custom_namespace)
- # Retrieve the class `lexername` from that namespace
- if lexername not in custom_namespace:
- raise ClassNotFound('no valid %s class found in %s' %
- (lexername, filename))
- lexer_class = custom_namespace[lexername]
- # And finally instantiate it with the options
- return lexer_class(**options)
- except OSError as err:
- raise ClassNotFound('cannot read %s: %s' % (filename, err))
- except ClassNotFound:
- raise
- except Exception as err:
- raise ClassNotFound('error when loading custom lexer: %s' % err)
-
-
-def find_lexer_class_for_filename(_fn, code=None):
- """Get a lexer for a filename.
-
- If multiple lexers match the filename pattern, use ``analyse_text()`` to
- figure out which one is more appropriate.
-
- Returns None if not found.
- """
- matches = []
- fn = basename(_fn)
- for modname, name, _, filenames, _ in LEXERS.values():
- for filename in filenames:
- if _fn_matches(fn, filename):
- if name not in _lexer_cache:
- _load_lexers(modname)
- matches.append((_lexer_cache[name], filename))
- for cls in find_plugin_lexers():
- for filename in cls.filenames:
- if _fn_matches(fn, filename):
- matches.append((cls, filename))
-
- if isinstance(code, bytes):
- # decode it, since all analyse_text functions expect unicode
- code = guess_decode(code)
-
- def get_rating(info):
- cls, filename = info
- # explicit patterns get a bonus
- bonus = '*' not in filename and 0.5 or 0
- # The class _always_ defines analyse_text because it's included in
- # the Lexer class. The default implementation returns None which
- # gets turned into 0.0. Run scripts/detect_missing_analyse_text.py
- # to find lexers which need it overridden.
- if code:
- return cls.analyse_text(code) + bonus, cls.__name__
- return cls.priority + bonus, cls.__name__
-
- if matches:
- matches.sort(key=get_rating)
- # print "Possible lexers, after sort:", matches
- return matches[-1][0]
-
-
-def get_lexer_for_filename(_fn, code=None, **options):
- """Get a lexer for a filename.
-
- Return a `Lexer` subclass instance that has a filename pattern
- matching `fn`. The lexer is given the `options` at its
- instantiation.
-
- Raise :exc:`pygments.util.ClassNotFound` if no lexer for that filename
- is found.
-
- If multiple lexers match the filename pattern, use their ``analyse_text()``
- methods to figure out which one is more appropriate.
- """
- res = find_lexer_class_for_filename(_fn, code)
- if not res:
- raise ClassNotFound('no lexer for filename %r found' % _fn)
- return res(**options)
-
-
-def get_lexer_for_mimetype(_mime, **options):
- """
- Return a `Lexer` subclass instance that has `mime` in its mimetype
- list. The lexer is given the `options` at its instantiation.
-
- Will raise :exc:`pygments.util.ClassNotFound` if not lexer for that mimetype
- is found.
- """
- for modname, name, _, _, mimetypes in LEXERS.values():
- if _mime in mimetypes:
- if name not in _lexer_cache:
- _load_lexers(modname)
- return _lexer_cache[name](**options)
- for cls in find_plugin_lexers():
- if _mime in cls.mimetypes:
- return cls(**options)
- raise ClassNotFound('no lexer for mimetype %r found' % _mime)
-
-
-def _iter_lexerclasses(plugins=True):
- """Return an iterator over all lexer classes."""
- for key in sorted(LEXERS):
- module_name, name = LEXERS[key][:2]
- if name not in _lexer_cache:
- _load_lexers(module_name)
- yield _lexer_cache[name]
- if plugins:
- yield from find_plugin_lexers()
-
-
-def guess_lexer_for_filename(_fn, _text, **options):
- """
- As :func:`guess_lexer()`, but only lexers which have a pattern in `filenames`
- or `alias_filenames` that matches `filename` are taken into consideration.
-
- :exc:`pygments.util.ClassNotFound` is raised if no lexer thinks it can
- handle the content.
- """
- fn = basename(_fn)
- primary = {}
- matching_lexers = set()
- for lexer in _iter_lexerclasses():
- for filename in lexer.filenames:
- if _fn_matches(fn, filename):
- matching_lexers.add(lexer)
- primary[lexer] = True
- for filename in lexer.alias_filenames:
- if _fn_matches(fn, filename):
- matching_lexers.add(lexer)
- primary[lexer] = False
- if not matching_lexers:
- raise ClassNotFound('no lexer for filename %r found' % fn)
- if len(matching_lexers) == 1:
- return matching_lexers.pop()(**options)
- result = []
- for lexer in matching_lexers:
- rv = lexer.analyse_text(_text)
- if rv == 1.0:
- return lexer(**options)
- result.append((rv, lexer))
-
- def type_sort(t):
- # sort by:
- # - analyse score
- # - is primary filename pattern?
- # - priority
- # - last resort: class name
- return (t[0], primary[t[1]], t[1].priority, t[1].__name__)
- result.sort(key=type_sort)
-
- return result[-1][1](**options)
-
-
-def guess_lexer(_text, **options):
- """
- Return a `Lexer` subclass instance that's guessed from the text in
- `text`. For that, the :meth:`.analyse_text()` method of every known lexer
- class is called with the text as argument, and the lexer which returned the
- highest value will be instantiated and returned.
-
- :exc:`pygments.util.ClassNotFound` is raised if no lexer thinks it can
- handle the content.
- """
-
- if not isinstance(_text, str):
- inencoding = options.get('inencoding', options.get('encoding'))
- if inencoding:
- _text = _text.decode(inencoding or 'utf8')
- else:
- _text, _ = guess_decode(_text)
-
- # try to get a vim modeline first
- ft = get_filetype_from_buffer(_text)
-
- if ft is not None:
- try:
- return get_lexer_by_name(ft, **options)
- except ClassNotFound:
- pass
-
- best_lexer = [0.0, None]
- for lexer in _iter_lexerclasses():
- rv = lexer.analyse_text(_text)
- if rv == 1.0:
- return lexer(**options)
- if rv > best_lexer[0]:
- best_lexer[:] = (rv, lexer)
- if not best_lexer[0] or best_lexer[1] is None:
- raise ClassNotFound('no lexer matching the text found')
- return best_lexer[1](**options)
-
-
-class _automodule(types.ModuleType):
- """Automatically import lexers."""
-
- def __getattr__(self, name):
- info = LEXERS.get(name)
- if info:
- _load_lexers(info[0])
- cls = _lexer_cache[info[1]]
- setattr(self, name, cls)
- return cls
- if name in COMPAT:
- return getattr(self, COMPAT[name])
- raise AttributeError(name)
-
-
-oldmod = sys.modules[__name__]
-newmod = _automodule(__name__)
-newmod.__dict__.update(oldmod.__dict__)
-sys.modules[__name__] = newmod
-del newmod.newmod, newmod.oldmod, newmod.sys, newmod.types
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/__init__.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/__init__.cpython-311.pyc
deleted file mode 100644
index 91f4166..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/__init__.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/_ada_builtins.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/_ada_builtins.cpython-311.pyc
deleted file mode 100644
index 0ca0e44..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/_ada_builtins.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/_asy_builtins.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/_asy_builtins.cpython-311.pyc
deleted file mode 100644
index 2a4966f..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/_asy_builtins.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/_cl_builtins.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/_cl_builtins.cpython-311.pyc
deleted file mode 100644
index 9415329..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/_cl_builtins.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/_cocoa_builtins.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/_cocoa_builtins.cpython-311.pyc
deleted file mode 100644
index 4d173d1..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/_cocoa_builtins.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/_csound_builtins.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/_csound_builtins.cpython-311.pyc
deleted file mode 100644
index b22d9b8..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/_csound_builtins.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/_css_builtins.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/_css_builtins.cpython-311.pyc
deleted file mode 100644
index 88fa3c9..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/_css_builtins.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/_julia_builtins.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/_julia_builtins.cpython-311.pyc
deleted file mode 100644
index 2e9a0be..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/_julia_builtins.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/_lasso_builtins.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/_lasso_builtins.cpython-311.pyc
deleted file mode 100644
index 4dcb28c..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/_lasso_builtins.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/_lilypond_builtins.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/_lilypond_builtins.cpython-311.pyc
deleted file mode 100644
index 9675da0..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/_lilypond_builtins.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/_lua_builtins.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/_lua_builtins.cpython-311.pyc
deleted file mode 100644
index 894ce79..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/_lua_builtins.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/_mapping.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/_mapping.cpython-311.pyc
deleted file mode 100644
index 909ca45..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/_mapping.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/_mql_builtins.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/_mql_builtins.cpython-311.pyc
deleted file mode 100644
index 74a1d97..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/_mql_builtins.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/_mysql_builtins.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/_mysql_builtins.cpython-311.pyc
deleted file mode 100644
index 27a24a7..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/_mysql_builtins.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/_openedge_builtins.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/_openedge_builtins.cpython-311.pyc
deleted file mode 100644
index ad2cfaa..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/_openedge_builtins.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/_php_builtins.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/_php_builtins.cpython-311.pyc
deleted file mode 100644
index c1a109c..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/_php_builtins.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/_postgres_builtins.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/_postgres_builtins.cpython-311.pyc
deleted file mode 100644
index e786073..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/_postgres_builtins.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/_qlik_builtins.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/_qlik_builtins.cpython-311.pyc
deleted file mode 100644
index 8f83ea6..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/_qlik_builtins.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/_scheme_builtins.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/_scheme_builtins.cpython-311.pyc
deleted file mode 100644
index 9e1930c..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/_scheme_builtins.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/_scilab_builtins.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/_scilab_builtins.cpython-311.pyc
deleted file mode 100644
index 6778997..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/_scilab_builtins.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/_sourcemod_builtins.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/_sourcemod_builtins.cpython-311.pyc
deleted file mode 100644
index 67525bf..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/_sourcemod_builtins.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/_stan_builtins.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/_stan_builtins.cpython-311.pyc
deleted file mode 100644
index 26552b9..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/_stan_builtins.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/_stata_builtins.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/_stata_builtins.cpython-311.pyc
deleted file mode 100644
index 65b66cd..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/_stata_builtins.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/_tsql_builtins.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/_tsql_builtins.cpython-311.pyc
deleted file mode 100644
index 882673f..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/_tsql_builtins.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/_usd_builtins.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/_usd_builtins.cpython-311.pyc
deleted file mode 100644
index b10df8e..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/_usd_builtins.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/_vbscript_builtins.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/_vbscript_builtins.cpython-311.pyc
deleted file mode 100644
index c65ce27..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/_vbscript_builtins.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/_vim_builtins.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/_vim_builtins.cpython-311.pyc
deleted file mode 100644
index 5a43fb7..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/_vim_builtins.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/actionscript.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/actionscript.cpython-311.pyc
deleted file mode 100644
index c81c965..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/actionscript.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/ada.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/ada.cpython-311.pyc
deleted file mode 100644
index 59ce522..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/ada.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/agile.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/agile.cpython-311.pyc
deleted file mode 100644
index 0c86fe6..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/agile.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/algebra.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/algebra.cpython-311.pyc
deleted file mode 100644
index 616d3ff..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/algebra.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/ambient.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/ambient.cpython-311.pyc
deleted file mode 100644
index 0071fce..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/ambient.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/amdgpu.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/amdgpu.cpython-311.pyc
deleted file mode 100644
index 0d86a29..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/amdgpu.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/ampl.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/ampl.cpython-311.pyc
deleted file mode 100644
index 4cd9f8c..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/ampl.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/apdlexer.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/apdlexer.cpython-311.pyc
deleted file mode 100644
index 8c65a53..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/apdlexer.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/apl.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/apl.cpython-311.pyc
deleted file mode 100644
index 014d73a..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/apl.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/archetype.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/archetype.cpython-311.pyc
deleted file mode 100644
index 2c0b3a2..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/archetype.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/arrow.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/arrow.cpython-311.pyc
deleted file mode 100644
index 7bb8fac..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/arrow.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/arturo.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/arturo.cpython-311.pyc
deleted file mode 100644
index f95f22d..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/arturo.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/asc.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/asc.cpython-311.pyc
deleted file mode 100644
index f6fe6e6..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/asc.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/asm.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/asm.cpython-311.pyc
deleted file mode 100644
index 349d246..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/asm.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/asn1.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/asn1.cpython-311.pyc
deleted file mode 100644
index cd30285..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/asn1.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/automation.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/automation.cpython-311.pyc
deleted file mode 100644
index ab4bd6e..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/automation.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/bare.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/bare.cpython-311.pyc
deleted file mode 100644
index 938dbf7..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/bare.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/basic.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/basic.cpython-311.pyc
deleted file mode 100644
index ea2a750..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/basic.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/bdd.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/bdd.cpython-311.pyc
deleted file mode 100644
index 1adde80..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/bdd.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/berry.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/berry.cpython-311.pyc
deleted file mode 100644
index 82718a9..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/berry.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/bibtex.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/bibtex.cpython-311.pyc
deleted file mode 100644
index e2dd5a4..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/bibtex.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/blueprint.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/blueprint.cpython-311.pyc
deleted file mode 100644
index 582b46a..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/blueprint.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/boa.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/boa.cpython-311.pyc
deleted file mode 100644
index 9a36339..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/boa.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/bqn.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/bqn.cpython-311.pyc
deleted file mode 100644
index 63fb2cc..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/bqn.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/business.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/business.cpython-311.pyc
deleted file mode 100644
index 414381e..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/business.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/c_cpp.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/c_cpp.cpython-311.pyc
deleted file mode 100644
index 8c84a4b..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/c_cpp.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/c_like.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/c_like.cpython-311.pyc
deleted file mode 100644
index 364c9a1..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/c_like.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/capnproto.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/capnproto.cpython-311.pyc
deleted file mode 100644
index 68fafc1..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/capnproto.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/carbon.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/carbon.cpython-311.pyc
deleted file mode 100644
index 766035a..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/carbon.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/cddl.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/cddl.cpython-311.pyc
deleted file mode 100644
index 252ec83..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/cddl.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/chapel.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/chapel.cpython-311.pyc
deleted file mode 100644
index 71f2cfd..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/chapel.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/clean.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/clean.cpython-311.pyc
deleted file mode 100644
index 4336818..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/clean.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/comal.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/comal.cpython-311.pyc
deleted file mode 100644
index 6f0cd95..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/comal.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/compiled.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/compiled.cpython-311.pyc
deleted file mode 100644
index 64a2e01..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/compiled.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/configs.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/configs.cpython-311.pyc
deleted file mode 100644
index e0ebc72..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/configs.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/console.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/console.cpython-311.pyc
deleted file mode 100644
index 03c7f16..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/console.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/cplint.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/cplint.cpython-311.pyc
deleted file mode 100644
index f2a1f8a..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/cplint.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/crystal.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/crystal.cpython-311.pyc
deleted file mode 100644
index 89a70d1..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/crystal.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/csound.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/csound.cpython-311.pyc
deleted file mode 100644
index 15c5af2..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/csound.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/css.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/css.cpython-311.pyc
deleted file mode 100644
index 3148653..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/css.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/d.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/d.cpython-311.pyc
deleted file mode 100644
index 26f25a1..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/d.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/dalvik.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/dalvik.cpython-311.pyc
deleted file mode 100644
index 139ca7a..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/dalvik.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/data.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/data.cpython-311.pyc
deleted file mode 100644
index 3ef529b..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/data.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/dax.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/dax.cpython-311.pyc
deleted file mode 100644
index c28f941..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/dax.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/devicetree.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/devicetree.cpython-311.pyc
deleted file mode 100644
index 3ab92e4..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/devicetree.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/diff.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/diff.cpython-311.pyc
deleted file mode 100644
index 993f904..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/diff.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/dns.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/dns.cpython-311.pyc
deleted file mode 100644
index aabf00a..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/dns.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/dotnet.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/dotnet.cpython-311.pyc
deleted file mode 100644
index b378ed5..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/dotnet.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/dsls.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/dsls.cpython-311.pyc
deleted file mode 100644
index 09ef8a3..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/dsls.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/dylan.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/dylan.cpython-311.pyc
deleted file mode 100644
index 94152cc..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/dylan.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/ecl.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/ecl.cpython-311.pyc
deleted file mode 100644
index a3a2652..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/ecl.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/eiffel.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/eiffel.cpython-311.pyc
deleted file mode 100644
index 365c5e8..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/eiffel.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/elm.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/elm.cpython-311.pyc
deleted file mode 100644
index 0ea918f..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/elm.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/elpi.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/elpi.cpython-311.pyc
deleted file mode 100644
index aec231e..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/elpi.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/email.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/email.cpython-311.pyc
deleted file mode 100644
index 0a57a2a..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/email.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/erlang.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/erlang.cpython-311.pyc
deleted file mode 100644
index b0b2c23..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/erlang.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/esoteric.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/esoteric.cpython-311.pyc
deleted file mode 100644
index 8eff71e..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/esoteric.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/ezhil.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/ezhil.cpython-311.pyc
deleted file mode 100644
index 0b92293..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/ezhil.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/factor.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/factor.cpython-311.pyc
deleted file mode 100644
index 6921ab0..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/factor.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/fantom.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/fantom.cpython-311.pyc
deleted file mode 100644
index 5ce3025..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/fantom.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/felix.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/felix.cpython-311.pyc
deleted file mode 100644
index 8b16464..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/felix.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/fift.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/fift.cpython-311.pyc
deleted file mode 100644
index 17d81f0..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/fift.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/floscript.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/floscript.cpython-311.pyc
deleted file mode 100644
index 069f5ff..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/floscript.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/forth.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/forth.cpython-311.pyc
deleted file mode 100644
index 423fb16..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/forth.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/fortran.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/fortran.cpython-311.pyc
deleted file mode 100644
index 01b79a0..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/fortran.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/foxpro.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/foxpro.cpython-311.pyc
deleted file mode 100644
index 086555e..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/foxpro.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/freefem.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/freefem.cpython-311.pyc
deleted file mode 100644
index 0e8fbd7..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/freefem.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/func.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/func.cpython-311.pyc
deleted file mode 100644
index 2349e5b..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/func.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/functional.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/functional.cpython-311.pyc
deleted file mode 100644
index f9f6846..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/functional.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/futhark.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/futhark.cpython-311.pyc
deleted file mode 100644
index 5c6d078..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/futhark.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/gcodelexer.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/gcodelexer.cpython-311.pyc
deleted file mode 100644
index dd54f2a..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/gcodelexer.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/gdscript.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/gdscript.cpython-311.pyc
deleted file mode 100644
index 038641a..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/gdscript.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/go.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/go.cpython-311.pyc
deleted file mode 100644
index 9998557..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/go.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/grammar_notation.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/grammar_notation.cpython-311.pyc
deleted file mode 100644
index 603f089..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/grammar_notation.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/graph.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/graph.cpython-311.pyc
deleted file mode 100644
index 2591c59..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/graph.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/graphics.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/graphics.cpython-311.pyc
deleted file mode 100644
index 1783ef5..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/graphics.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/graphql.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/graphql.cpython-311.pyc
deleted file mode 100644
index 1a6194b..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/graphql.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/graphviz.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/graphviz.cpython-311.pyc
deleted file mode 100644
index e4abf17..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/graphviz.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/gsql.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/gsql.cpython-311.pyc
deleted file mode 100644
index 35d9f5b..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/gsql.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/haskell.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/haskell.cpython-311.pyc
deleted file mode 100644
index 5582ebd..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/haskell.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/haxe.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/haxe.cpython-311.pyc
deleted file mode 100644
index a7f3e67..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/haxe.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/hdl.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/hdl.cpython-311.pyc
deleted file mode 100644
index 41dca61..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/hdl.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/hexdump.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/hexdump.cpython-311.pyc
deleted file mode 100644
index 428bedd..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/hexdump.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/html.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/html.cpython-311.pyc
deleted file mode 100644
index b644d21..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/html.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/idl.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/idl.cpython-311.pyc
deleted file mode 100644
index 749a490..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/idl.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/igor.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/igor.cpython-311.pyc
deleted file mode 100644
index 3c9e1a0..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/igor.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/inferno.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/inferno.cpython-311.pyc
deleted file mode 100644
index 3c5e6f5..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/inferno.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/installers.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/installers.cpython-311.pyc
deleted file mode 100644
index a5c0363..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/installers.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/int_fiction.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/int_fiction.cpython-311.pyc
deleted file mode 100644
index 951138d..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/int_fiction.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/iolang.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/iolang.cpython-311.pyc
deleted file mode 100644
index 3cd2aed..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/iolang.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/j.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/j.cpython-311.pyc
deleted file mode 100644
index 201ca35..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/j.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/javascript.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/javascript.cpython-311.pyc
deleted file mode 100644
index 10782ff..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/javascript.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/jmespath.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/jmespath.cpython-311.pyc
deleted file mode 100644
index dc28eab..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/jmespath.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/jslt.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/jslt.cpython-311.pyc
deleted file mode 100644
index 6230cb1..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/jslt.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/jsonnet.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/jsonnet.cpython-311.pyc
deleted file mode 100644
index 576a736..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/jsonnet.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/jsx.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/jsx.cpython-311.pyc
deleted file mode 100644
index 9733502..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/jsx.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/julia.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/julia.cpython-311.pyc
deleted file mode 100644
index d85bf85..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/julia.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/jvm.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/jvm.cpython-311.pyc
deleted file mode 100644
index d92ec21..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/jvm.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/kuin.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/kuin.cpython-311.pyc
deleted file mode 100644
index 78a923e..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/kuin.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/kusto.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/kusto.cpython-311.pyc
deleted file mode 100644
index 39c87f6..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/kusto.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/ldap.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/ldap.cpython-311.pyc
deleted file mode 100644
index 800b662..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/ldap.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/lean.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/lean.cpython-311.pyc
deleted file mode 100644
index 267daca..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/lean.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/lilypond.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/lilypond.cpython-311.pyc
deleted file mode 100644
index e47ff87..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/lilypond.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/lisp.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/lisp.cpython-311.pyc
deleted file mode 100644
index eb12e4c..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/lisp.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/macaulay2.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/macaulay2.cpython-311.pyc
deleted file mode 100644
index 95f5698..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/macaulay2.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/make.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/make.cpython-311.pyc
deleted file mode 100644
index bdc1693..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/make.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/markup.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/markup.cpython-311.pyc
deleted file mode 100644
index 6116f29..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/markup.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/math.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/math.cpython-311.pyc
deleted file mode 100644
index 577807a..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/math.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/matlab.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/matlab.cpython-311.pyc
deleted file mode 100644
index f587721..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/matlab.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/maxima.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/maxima.cpython-311.pyc
deleted file mode 100644
index e8d51f1..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/maxima.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/meson.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/meson.cpython-311.pyc
deleted file mode 100644
index 4ef4f45..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/meson.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/mime.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/mime.cpython-311.pyc
deleted file mode 100644
index aa5e3fc..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/mime.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/minecraft.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/minecraft.cpython-311.pyc
deleted file mode 100644
index 9cdff14..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/minecraft.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/mips.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/mips.cpython-311.pyc
deleted file mode 100644
index 01afeff..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/mips.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/ml.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/ml.cpython-311.pyc
deleted file mode 100644
index 5d491c1..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/ml.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/modeling.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/modeling.cpython-311.pyc
deleted file mode 100644
index 06b8f31..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/modeling.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/modula2.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/modula2.cpython-311.pyc
deleted file mode 100644
index a3cbceb..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/modula2.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/monte.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/monte.cpython-311.pyc
deleted file mode 100644
index 8eedd20..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/monte.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/mosel.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/mosel.cpython-311.pyc
deleted file mode 100644
index 7dd0084..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/mosel.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/ncl.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/ncl.cpython-311.pyc
deleted file mode 100644
index e6c3af5..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/ncl.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/nimrod.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/nimrod.cpython-311.pyc
deleted file mode 100644
index 99a5408..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/nimrod.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/nit.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/nit.cpython-311.pyc
deleted file mode 100644
index feb4355..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/nit.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/nix.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/nix.cpython-311.pyc
deleted file mode 100644
index 69b6698..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/nix.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/oberon.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/oberon.cpython-311.pyc
deleted file mode 100644
index 80cced0..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/oberon.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/objective.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/objective.cpython-311.pyc
deleted file mode 100644
index cf0d9c2..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/objective.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/ooc.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/ooc.cpython-311.pyc
deleted file mode 100644
index 109617e..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/ooc.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/openscad.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/openscad.cpython-311.pyc
deleted file mode 100644
index ab4fec6..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/openscad.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/other.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/other.cpython-311.pyc
deleted file mode 100644
index 6fc1630..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/other.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/parasail.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/parasail.cpython-311.pyc
deleted file mode 100644
index 698cc8a..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/parasail.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/parsers.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/parsers.cpython-311.pyc
deleted file mode 100644
index ad3dab1..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/parsers.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/pascal.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/pascal.cpython-311.pyc
deleted file mode 100644
index 0fd233d..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/pascal.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/pawn.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/pawn.cpython-311.pyc
deleted file mode 100644
index c21b6db..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/pawn.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/perl.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/perl.cpython-311.pyc
deleted file mode 100644
index a9d6f8e..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/perl.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/phix.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/phix.cpython-311.pyc
deleted file mode 100644
index 5b40a5b..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/phix.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/php.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/php.cpython-311.pyc
deleted file mode 100644
index 3637a12..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/php.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/pointless.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/pointless.cpython-311.pyc
deleted file mode 100644
index 1029d23..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/pointless.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/pony.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/pony.cpython-311.pyc
deleted file mode 100644
index 1212c2d..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/pony.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/praat.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/praat.cpython-311.pyc
deleted file mode 100644
index 04c3eff..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/praat.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/procfile.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/procfile.cpython-311.pyc
deleted file mode 100644
index f0e6148..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/procfile.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/prolog.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/prolog.cpython-311.pyc
deleted file mode 100644
index 3ac1d6b..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/prolog.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/promql.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/promql.cpython-311.pyc
deleted file mode 100644
index b98cd47..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/promql.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/prql.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/prql.cpython-311.pyc
deleted file mode 100644
index 377e070..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/prql.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/ptx.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/ptx.cpython-311.pyc
deleted file mode 100644
index 2477f4a..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/ptx.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/python.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/python.cpython-311.pyc
deleted file mode 100644
index 943b5eb..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/python.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/q.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/q.cpython-311.pyc
deleted file mode 100644
index 1ffdb28..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/q.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/qlik.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/qlik.cpython-311.pyc
deleted file mode 100644
index 3680da1..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/qlik.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/qvt.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/qvt.cpython-311.pyc
deleted file mode 100644
index ee5795c..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/qvt.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/r.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/r.cpython-311.pyc
deleted file mode 100644
index b80d97c..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/r.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/rdf.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/rdf.cpython-311.pyc
deleted file mode 100644
index 58c85a4..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/rdf.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/rebol.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/rebol.cpython-311.pyc
deleted file mode 100644
index 315e936..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/rebol.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/resource.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/resource.cpython-311.pyc
deleted file mode 100644
index f6fd0cf..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/resource.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/ride.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/ride.cpython-311.pyc
deleted file mode 100644
index 4871b5f..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/ride.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/rita.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/rita.cpython-311.pyc
deleted file mode 100644
index b8fad98..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/rita.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/rnc.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/rnc.cpython-311.pyc
deleted file mode 100644
index a418539..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/rnc.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/roboconf.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/roboconf.cpython-311.pyc
deleted file mode 100644
index abf3297..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/roboconf.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/robotframework.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/robotframework.cpython-311.pyc
deleted file mode 100644
index adc843d..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/robotframework.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/ruby.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/ruby.cpython-311.pyc
deleted file mode 100644
index f5e56b0..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/ruby.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/rust.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/rust.cpython-311.pyc
deleted file mode 100644
index d2258ee..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/rust.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/sas.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/sas.cpython-311.pyc
deleted file mode 100644
index 2eb8942..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/sas.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/savi.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/savi.cpython-311.pyc
deleted file mode 100644
index 206484e..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/savi.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/scdoc.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/scdoc.cpython-311.pyc
deleted file mode 100644
index 50649f7..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/scdoc.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/scripting.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/scripting.cpython-311.pyc
deleted file mode 100644
index 06f0bce..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/scripting.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/sgf.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/sgf.cpython-311.pyc
deleted file mode 100644
index a4f9b4e..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/sgf.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/shell.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/shell.cpython-311.pyc
deleted file mode 100644
index 4ddeb4b..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/shell.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/sieve.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/sieve.cpython-311.pyc
deleted file mode 100644
index 75962bd..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/sieve.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/slash.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/slash.cpython-311.pyc
deleted file mode 100644
index e0336de..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/slash.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/smalltalk.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/smalltalk.cpython-311.pyc
deleted file mode 100644
index c00cd37..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/smalltalk.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/smithy.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/smithy.cpython-311.pyc
deleted file mode 100644
index 928fc8d..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/smithy.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/smv.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/smv.cpython-311.pyc
deleted file mode 100644
index 088b4c5..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/smv.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/snobol.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/snobol.cpython-311.pyc
deleted file mode 100644
index 2d1b9d3..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/snobol.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/solidity.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/solidity.cpython-311.pyc
deleted file mode 100644
index 7fb8e61..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/solidity.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/sophia.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/sophia.cpython-311.pyc
deleted file mode 100644
index fc8bb90..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/sophia.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/special.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/special.cpython-311.pyc
deleted file mode 100644
index d86d21f..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/special.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/spice.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/spice.cpython-311.pyc
deleted file mode 100644
index f58f758..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/spice.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/sql.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/sql.cpython-311.pyc
deleted file mode 100644
index 1fd7f04..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/sql.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/srcinfo.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/srcinfo.cpython-311.pyc
deleted file mode 100644
index c4c252e..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/srcinfo.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/stata.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/stata.cpython-311.pyc
deleted file mode 100644
index c47696a..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/stata.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/supercollider.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/supercollider.cpython-311.pyc
deleted file mode 100644
index 8e505e5..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/supercollider.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/tal.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/tal.cpython-311.pyc
deleted file mode 100644
index 2bb1aa5..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/tal.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/tcl.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/tcl.cpython-311.pyc
deleted file mode 100644
index 42c5e45..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/tcl.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/teal.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/teal.cpython-311.pyc
deleted file mode 100644
index 0a27f47..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/teal.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/templates.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/templates.cpython-311.pyc
deleted file mode 100644
index 00bdb35..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/templates.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/teraterm.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/teraterm.cpython-311.pyc
deleted file mode 100644
index 6937be5..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/teraterm.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/testing.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/testing.cpython-311.pyc
deleted file mode 100644
index c40dc2f..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/testing.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/text.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/text.cpython-311.pyc
deleted file mode 100644
index 09e49be..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/text.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/textedit.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/textedit.cpython-311.pyc
deleted file mode 100644
index 2dabaf1..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/textedit.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/textfmts.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/textfmts.cpython-311.pyc
deleted file mode 100644
index c71d5bd..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/textfmts.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/theorem.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/theorem.cpython-311.pyc
deleted file mode 100644
index 7983651..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/theorem.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/thingsdb.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/thingsdb.cpython-311.pyc
deleted file mode 100644
index 0614ee0..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/thingsdb.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/tlb.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/tlb.cpython-311.pyc
deleted file mode 100644
index cf251e5..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/tlb.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/tls.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/tls.cpython-311.pyc
deleted file mode 100644
index 2e9482d..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/tls.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/tnt.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/tnt.cpython-311.pyc
deleted file mode 100644
index cfcb66f..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/tnt.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/trafficscript.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/trafficscript.cpython-311.pyc
deleted file mode 100644
index 511b5a3..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/trafficscript.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/typoscript.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/typoscript.cpython-311.pyc
deleted file mode 100644
index c68f4ab..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/typoscript.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/ul4.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/ul4.cpython-311.pyc
deleted file mode 100644
index 73bbe5e..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/ul4.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/unicon.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/unicon.cpython-311.pyc
deleted file mode 100644
index b54c8d7..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/unicon.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/urbi.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/urbi.cpython-311.pyc
deleted file mode 100644
index 3193957..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/urbi.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/usd.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/usd.cpython-311.pyc
deleted file mode 100644
index d11e8b9..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/usd.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/varnish.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/varnish.cpython-311.pyc
deleted file mode 100644
index ba7df6c..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/varnish.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/verification.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/verification.cpython-311.pyc
deleted file mode 100644
index d63e33d..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/verification.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/verifpal.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/verifpal.cpython-311.pyc
deleted file mode 100644
index 301a125..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/verifpal.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/vip.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/vip.cpython-311.pyc
deleted file mode 100644
index fc21212..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/vip.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/vyper.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/vyper.cpython-311.pyc
deleted file mode 100644
index e5adf97..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/vyper.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/web.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/web.cpython-311.pyc
deleted file mode 100644
index 13f8c8f..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/web.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/webassembly.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/webassembly.cpython-311.pyc
deleted file mode 100644
index dc1814d..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/webassembly.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/webidl.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/webidl.cpython-311.pyc
deleted file mode 100644
index 994fd42..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/webidl.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/webmisc.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/webmisc.cpython-311.pyc
deleted file mode 100644
index 14278e4..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/webmisc.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/wgsl.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/wgsl.cpython-311.pyc
deleted file mode 100644
index d61f298..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/wgsl.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/whiley.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/whiley.cpython-311.pyc
deleted file mode 100644
index b18275d..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/whiley.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/wowtoc.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/wowtoc.cpython-311.pyc
deleted file mode 100644
index 0eeeb09..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/wowtoc.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/wren.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/wren.cpython-311.pyc
deleted file mode 100644
index e95acd8..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/wren.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/x10.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/x10.cpython-311.pyc
deleted file mode 100644
index 65163bf..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/x10.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/xorg.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/xorg.cpython-311.pyc
deleted file mode 100644
index bdb8c07..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/xorg.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/yang.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/yang.cpython-311.pyc
deleted file mode 100644
index f1af46b..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/yang.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/yara.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/yara.cpython-311.pyc
deleted file mode 100644
index a491577..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/yara.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/zig.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/zig.cpython-311.pyc
deleted file mode 100644
index d59397f..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/zig.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/_ada_builtins.py b/venv/lib/python3.11/site-packages/pygments/lexers/_ada_builtins.py
deleted file mode 100644
index 24f9b19..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/_ada_builtins.py
+++ /dev/null
@@ -1,103 +0,0 @@
-"""
- pygments.lexers._ada_builtins
- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
- Ada builtins.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-KEYWORD_LIST = (
- 'abort',
- 'abs',
- 'abstract',
- 'accept',
- 'access',
- 'aliased',
- 'all',
- 'array',
- 'at',
- 'begin',
- 'body',
- 'case',
- 'constant',
- 'declare',
- 'delay',
- 'delta',
- 'digits',
- 'do',
- 'else',
- 'elsif',
- 'end',
- 'entry',
- 'exception',
- 'exit',
- 'interface',
- 'for',
- 'goto',
- 'if',
- 'is',
- 'limited',
- 'loop',
- 'new',
- 'null',
- 'of',
- 'or',
- 'others',
- 'out',
- 'overriding',
- 'pragma',
- 'protected',
- 'raise',
- 'range',
- 'record',
- 'renames',
- 'requeue',
- 'return',
- 'reverse',
- 'select',
- 'separate',
- 'some',
- 'subtype',
- 'synchronized',
- 'task',
- 'tagged',
- 'terminate',
- 'then',
- 'type',
- 'until',
- 'when',
- 'while',
- 'xor'
-)
-
-BUILTIN_LIST = (
- 'Address',
- 'Byte',
- 'Boolean',
- 'Character',
- 'Controlled',
- 'Count',
- 'Cursor',
- 'Duration',
- 'File_Mode',
- 'File_Type',
- 'Float',
- 'Generator',
- 'Integer',
- 'Long_Float',
- 'Long_Integer',
- 'Long_Long_Float',
- 'Long_Long_Integer',
- 'Natural',
- 'Positive',
- 'Reference_Type',
- 'Short_Float',
- 'Short_Integer',
- 'Short_Short_Float',
- 'Short_Short_Integer',
- 'String',
- 'Wide_Character',
- 'Wide_String'
-)
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/_asy_builtins.py b/venv/lib/python3.11/site-packages/pygments/lexers/_asy_builtins.py
deleted file mode 100644
index c4a362c..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/_asy_builtins.py
+++ /dev/null
@@ -1,1644 +0,0 @@
-"""
- pygments.lexers._asy_builtins
- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
- This file contains the asy-function names and asy-variable names of
- Asymptote.
-
- Do not edit the ASYFUNCNAME and ASYVARNAME sets by hand.
- TODO: perl/python script in Asymptote SVN similar to asy-list.pl but only
- for function and variable names.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-ASYFUNCNAME = {
- 'AND',
- 'Arc',
- 'ArcArrow',
- 'ArcArrows',
- 'Arrow',
- 'Arrows',
- 'Automatic',
- 'AvantGarde',
- 'BBox',
- 'BWRainbow',
- 'BWRainbow2',
- 'Bar',
- 'Bars',
- 'BeginArcArrow',
- 'BeginArrow',
- 'BeginBar',
- 'BeginDotMargin',
- 'BeginMargin',
- 'BeginPenMargin',
- 'Blank',
- 'Bookman',
- 'Bottom',
- 'BottomTop',
- 'Bounds',
- 'Break',
- 'Broken',
- 'BrokenLog',
- 'Ceil',
- 'Circle',
- 'CircleBarIntervalMarker',
- 'Cos',
- 'Courier',
- 'CrossIntervalMarker',
- 'DefaultFormat',
- 'DefaultLogFormat',
- 'Degrees',
- 'Dir',
- 'DotMargin',
- 'DotMargins',
- 'Dotted',
- 'Draw',
- 'Drawline',
- 'Embed',
- 'EndArcArrow',
- 'EndArrow',
- 'EndBar',
- 'EndDotMargin',
- 'EndMargin',
- 'EndPenMargin',
- 'Fill',
- 'FillDraw',
- 'Floor',
- 'Format',
- 'Full',
- 'Gaussian',
- 'Gaussrand',
- 'Gaussrandpair',
- 'Gradient',
- 'Grayscale',
- 'Helvetica',
- 'Hermite',
- 'HookHead',
- 'InOutTicks',
- 'InTicks',
- 'J',
- 'Label',
- 'Landscape',
- 'Left',
- 'LeftRight',
- 'LeftTicks',
- 'Legend',
- 'Linear',
- 'Link',
- 'Log',
- 'LogFormat',
- 'Margin',
- 'Margins',
- 'Mark',
- 'MidArcArrow',
- 'MidArrow',
- 'NOT',
- 'NewCenturySchoolBook',
- 'NoBox',
- 'NoMargin',
- 'NoModifier',
- 'NoTicks',
- 'NoTicks3',
- 'NoZero',
- 'NoZeroFormat',
- 'None',
- 'OR',
- 'OmitFormat',
- 'OmitTick',
- 'OutTicks',
- 'Ox',
- 'Oy',
- 'Palatino',
- 'PaletteTicks',
- 'Pen',
- 'PenMargin',
- 'PenMargins',
- 'Pentype',
- 'Portrait',
- 'RadialShade',
- 'Rainbow',
- 'Range',
- 'Relative',
- 'Right',
- 'RightTicks',
- 'Rotate',
- 'Round',
- 'SQR',
- 'Scale',
- 'ScaleX',
- 'ScaleY',
- 'ScaleZ',
- 'Seascape',
- 'Shift',
- 'Sin',
- 'Slant',
- 'Spline',
- 'StickIntervalMarker',
- 'Straight',
- 'Symbol',
- 'Tan',
- 'TeXify',
- 'Ticks',
- 'Ticks3',
- 'TildeIntervalMarker',
- 'TimesRoman',
- 'Top',
- 'TrueMargin',
- 'UnFill',
- 'UpsideDown',
- 'Wheel',
- 'X',
- 'XEquals',
- 'XOR',
- 'XY',
- 'XYEquals',
- 'XYZero',
- 'XYgrid',
- 'XZEquals',
- 'XZZero',
- 'XZero',
- 'XZgrid',
- 'Y',
- 'YEquals',
- 'YXgrid',
- 'YZ',
- 'YZEquals',
- 'YZZero',
- 'YZero',
- 'YZgrid',
- 'Z',
- 'ZX',
- 'ZXgrid',
- 'ZYgrid',
- 'ZapfChancery',
- 'ZapfDingbats',
- '_cputime',
- '_draw',
- '_eval',
- '_image',
- '_labelpath',
- '_projection',
- '_strokepath',
- '_texpath',
- 'aCos',
- 'aSin',
- 'aTan',
- 'abort',
- 'abs',
- 'accel',
- 'acos',
- 'acosh',
- 'acot',
- 'acsc',
- 'add',
- 'addArrow',
- 'addMargins',
- 'addSaveFunction',
- 'addnode',
- 'addnodes',
- 'addpenarc',
- 'addpenline',
- 'addseg',
- 'adjust',
- 'alias',
- 'align',
- 'all',
- 'altitude',
- 'angabscissa',
- 'angle',
- 'angpoint',
- 'animate',
- 'annotate',
- 'anticomplementary',
- 'antipedal',
- 'apply',
- 'approximate',
- 'arc',
- 'arcarrowsize',
- 'arccircle',
- 'arcdir',
- 'arcfromcenter',
- 'arcfromfocus',
- 'arclength',
- 'arcnodesnumber',
- 'arcpoint',
- 'arcsubtended',
- 'arcsubtendedcenter',
- 'arctime',
- 'arctopath',
- 'array',
- 'arrow',
- 'arrow2',
- 'arrowbase',
- 'arrowbasepoints',
- 'arrowsize',
- 'asec',
- 'asin',
- 'asinh',
- 'ask',
- 'assert',
- 'asy',
- 'asycode',
- 'asydir',
- 'asyfigure',
- 'asyfilecode',
- 'asyinclude',
- 'asywrite',
- 'atan',
- 'atan2',
- 'atanh',
- 'atbreakpoint',
- 'atexit',
- 'atime',
- 'attach',
- 'attract',
- 'atupdate',
- 'autoformat',
- 'autoscale',
- 'autoscale3',
- 'axes',
- 'axes3',
- 'axialshade',
- 'axis',
- 'axiscoverage',
- 'azimuth',
- 'babel',
- 'background',
- 'bangles',
- 'bar',
- 'barmarksize',
- 'barsize',
- 'basealign',
- 'baseline',
- 'bbox',
- 'beep',
- 'begin',
- 'beginclip',
- 'begingroup',
- 'beginpoint',
- 'between',
- 'bevel',
- 'bezier',
- 'bezierP',
- 'bezierPP',
- 'bezierPPP',
- 'bezulate',
- 'bibliography',
- 'bibliographystyle',
- 'binarytree',
- 'binarytreeNode',
- 'binomial',
- 'binput',
- 'bins',
- 'bisector',
- 'bisectorpoint',
- 'blend',
- 'boutput',
- 'box',
- 'bqe',
- 'breakpoint',
- 'breakpoints',
- 'brick',
- 'buildRestoreDefaults',
- 'buildRestoreThunk',
- 'buildcycle',
- 'bulletcolor',
- 'canonical',
- 'canonicalcartesiansystem',
- 'cartesiansystem',
- 'case1',
- 'case2',
- 'case3',
- 'cbrt',
- 'cd',
- 'ceil',
- 'center',
- 'centerToFocus',
- 'centroid',
- 'cevian',
- 'change2',
- 'changecoordsys',
- 'checkSegment',
- 'checkconditionlength',
- 'checker',
- 'checklengths',
- 'checkposition',
- 'checktriangle',
- 'choose',
- 'circle',
- 'circlebarframe',
- 'circlemarkradius',
- 'circlenodesnumber',
- 'circumcenter',
- 'circumcircle',
- 'clamped',
- 'clear',
- 'clip',
- 'clipdraw',
- 'close',
- 'cmyk',
- 'code',
- 'colatitude',
- 'collect',
- 'collinear',
- 'color',
- 'colorless',
- 'colors',
- 'colorspace',
- 'comma',
- 'compassmark',
- 'complement',
- 'complementary',
- 'concat',
- 'concurrent',
- 'cone',
- 'conic',
- 'conicnodesnumber',
- 'conictype',
- 'conj',
- 'connect',
- 'containmentTree',
- 'contains',
- 'contour',
- 'contour3',
- 'controlSpecifier',
- 'convert',
- 'coordinates',
- 'coordsys',
- 'copy',
- 'cos',
- 'cosh',
- 'cot',
- 'countIntersections',
- 'cputime',
- 'crop',
- 'cropcode',
- 'cross',
- 'crossframe',
- 'crosshatch',
- 'crossmarksize',
- 'csc',
- 'cubicroots',
- 'curabscissa',
- 'curlSpecifier',
- 'curpoint',
- 'currentarrow',
- 'currentexitfunction',
- 'currentmomarrow',
- 'currentpolarconicroutine',
- 'curve',
- 'cut',
- 'cutafter',
- 'cutbefore',
- 'cyclic',
- 'cylinder',
- 'debugger',
- 'deconstruct',
- 'defaultdir',
- 'defaultformat',
- 'defaultpen',
- 'defined',
- 'degenerate',
- 'degrees',
- 'delete',
- 'deletepreamble',
- 'determinant',
- 'diagonal',
- 'diamond',
- 'diffdiv',
- 'dir',
- 'dirSpecifier',
- 'dirtime',
- 'display',
- 'distance',
- 'divisors',
- 'do_overpaint',
- 'dot',
- 'dotframe',
- 'dotsize',
- 'downcase',
- 'draw',
- 'drawAll',
- 'drawDoubleLine',
- 'drawFermion',
- 'drawGhost',
- 'drawGluon',
- 'drawMomArrow',
- 'drawPhoton',
- 'drawScalar',
- 'drawVertex',
- 'drawVertexBox',
- 'drawVertexBoxO',
- 'drawVertexBoxX',
- 'drawVertexO',
- 'drawVertexOX',
- 'drawVertexTriangle',
- 'drawVertexTriangleO',
- 'drawVertexX',
- 'drawarrow',
- 'drawarrow2',
- 'drawline',
- 'drawtick',
- 'duplicate',
- 'elle',
- 'ellipse',
- 'ellipsenodesnumber',
- 'embed',
- 'embed3',
- 'empty',
- 'enclose',
- 'end',
- 'endScript',
- 'endclip',
- 'endgroup',
- 'endl',
- 'endpoint',
- 'endpoints',
- 'eof',
- 'eol',
- 'equation',
- 'equations',
- 'erase',
- 'erasestep',
- 'erf',
- 'erfc',
- 'error',
- 'errorbar',
- 'errorbars',
- 'eval',
- 'excenter',
- 'excircle',
- 'exit',
- 'exitXasyMode',
- 'exitfunction',
- 'exp',
- 'expfactors',
- 'expi',
- 'expm1',
- 'exradius',
- 'extend',
- 'extension',
- 'extouch',
- 'fabs',
- 'factorial',
- 'fermat',
- 'fft',
- 'fhorner',
- 'figure',
- 'file',
- 'filecode',
- 'fill',
- 'filldraw',
- 'filloutside',
- 'fillrule',
- 'filltype',
- 'find',
- 'finite',
- 'finiteDifferenceJacobian',
- 'firstcut',
- 'firstframe',
- 'fit',
- 'fit2',
- 'fixedscaling',
- 'floor',
- 'flush',
- 'fmdefaults',
- 'fmod',
- 'focusToCenter',
- 'font',
- 'fontcommand',
- 'fontsize',
- 'foot',
- 'format',
- 'frac',
- 'frequency',
- 'fromCenter',
- 'fromFocus',
- 'fspline',
- 'functionshade',
- 'gamma',
- 'generate_random_backtrace',
- 'generateticks',
- 'gergonne',
- 'getc',
- 'getint',
- 'getpair',
- 'getreal',
- 'getstring',
- 'gettriple',
- 'gluon',
- 'gouraudshade',
- 'graph',
- 'graphic',
- 'gray',
- 'grestore',
- 'grid',
- 'grid3',
- 'gsave',
- 'halfbox',
- 'hatch',
- 'hdiffdiv',
- 'hermite',
- 'hex',
- 'histogram',
- 'history',
- 'hline',
- 'hprojection',
- 'hsv',
- 'hyperbola',
- 'hyperbolanodesnumber',
- 'hyperlink',
- 'hypot',
- 'identity',
- 'image',
- 'incenter',
- 'incentral',
- 'incircle',
- 'increasing',
- 'incrementposition',
- 'indexedTransform',
- 'indexedfigure',
- 'initXasyMode',
- 'initdefaults',
- 'input',
- 'inradius',
- 'insert',
- 'inside',
- 'integrate',
- 'interactive',
- 'interior',
- 'interp',
- 'interpolate',
- 'intersect',
- 'intersection',
- 'intersectionpoint',
- 'intersectionpoints',
- 'intersections',
- 'intouch',
- 'inverse',
- 'inversion',
- 'invisible',
- 'is3D',
- 'isDuplicate',
- 'isogonal',
- 'isogonalconjugate',
- 'isotomic',
- 'isotomicconjugate',
- 'isparabola',
- 'italic',
- 'item',
- 'key',
- 'kurtosis',
- 'kurtosisexcess',
- 'label',
- 'labelaxis',
- 'labelmargin',
- 'labelpath',
- 'labels',
- 'labeltick',
- 'labelx',
- 'labelx3',
- 'labely',
- 'labely3',
- 'labelz',
- 'labelz3',
- 'lastcut',
- 'latex',
- 'latitude',
- 'latticeshade',
- 'layer',
- 'layout',
- 'ldexp',
- 'leastsquares',
- 'legend',
- 'legenditem',
- 'length',
- 'lift',
- 'light',
- 'limits',
- 'line',
- 'linear',
- 'linecap',
- 'lineinversion',
- 'linejoin',
- 'linemargin',
- 'lineskip',
- 'linetype',
- 'linewidth',
- 'link',
- 'list',
- 'lm_enorm',
- 'lm_evaluate_default',
- 'lm_lmdif',
- 'lm_lmpar',
- 'lm_minimize',
- 'lm_print_default',
- 'lm_print_quiet',
- 'lm_qrfac',
- 'lm_qrsolv',
- 'locale',
- 'locate',
- 'locatefile',
- 'location',
- 'log',
- 'log10',
- 'log1p',
- 'logaxiscoverage',
- 'longitude',
- 'lookup',
- 'magnetize',
- 'makeNode',
- 'makedraw',
- 'makepen',
- 'map',
- 'margin',
- 'markangle',
- 'markangleradius',
- 'markanglespace',
- 'markarc',
- 'marker',
- 'markinterval',
- 'marknodes',
- 'markrightangle',
- 'markuniform',
- 'mass',
- 'masscenter',
- 'massformat',
- 'math',
- 'max',
- 'max3',
- 'maxbezier',
- 'maxbound',
- 'maxcoords',
- 'maxlength',
- 'maxratio',
- 'maxtimes',
- 'mean',
- 'medial',
- 'median',
- 'midpoint',
- 'min',
- 'min3',
- 'minbezier',
- 'minbound',
- 'minipage',
- 'minratio',
- 'mintimes',
- 'miterlimit',
- 'momArrowPath',
- 'momarrowsize',
- 'monotonic',
- 'multifigure',
- 'nativeformat',
- 'natural',
- 'needshipout',
- 'newl',
- 'newpage',
- 'newslide',
- 'newton',
- 'newtree',
- 'nextframe',
- 'nextnormal',
- 'nextpage',
- 'nib',
- 'nodabscissa',
- 'none',
- 'norm',
- 'normalvideo',
- 'notaknot',
- 'nowarn',
- 'numberpage',
- 'nurb',
- 'object',
- 'offset',
- 'onpath',
- 'opacity',
- 'opposite',
- 'orientation',
- 'orig_circlenodesnumber',
- 'orig_circlenodesnumber1',
- 'orig_draw',
- 'orig_ellipsenodesnumber',
- 'orig_ellipsenodesnumber1',
- 'orig_hyperbolanodesnumber',
- 'orig_parabolanodesnumber',
- 'origin',
- 'orthic',
- 'orthocentercenter',
- 'outformat',
- 'outline',
- 'outprefix',
- 'output',
- 'overloadedMessage',
- 'overwrite',
- 'pack',
- 'pad',
- 'pairs',
- 'palette',
- 'parabola',
- 'parabolanodesnumber',
- 'parallel',
- 'partialsum',
- 'path',
- 'path3',
- 'pattern',
- 'pause',
- 'pdf',
- 'pedal',
- 'periodic',
- 'perp',
- 'perpendicular',
- 'perpendicularmark',
- 'phantom',
- 'phi1',
- 'phi2',
- 'phi3',
- 'photon',
- 'piecewisestraight',
- 'point',
- 'polar',
- 'polarconicroutine',
- 'polargraph',
- 'polygon',
- 'postcontrol',
- 'postscript',
- 'pow10',
- 'ppoint',
- 'prc',
- 'prc0',
- 'precision',
- 'precontrol',
- 'prepend',
- 'print_random_addresses',
- 'project',
- 'projection',
- 'purge',
- 'pwhermite',
- 'quadrant',
- 'quadraticroots',
- 'quantize',
- 'quarticroots',
- 'quotient',
- 'radialshade',
- 'radians',
- 'radicalcenter',
- 'radicalline',
- 'radius',
- 'rand',
- 'randompath',
- 'rd',
- 'readline',
- 'realmult',
- 'realquarticroots',
- 'rectangle',
- 'rectangular',
- 'rectify',
- 'reflect',
- 'relabscissa',
- 'relative',
- 'relativedistance',
- 'reldir',
- 'relpoint',
- 'reltime',
- 'remainder',
- 'remark',
- 'removeDuplicates',
- 'rename',
- 'replace',
- 'report',
- 'resetdefaultpen',
- 'restore',
- 'restoredefaults',
- 'reverse',
- 'reversevideo',
- 'rf',
- 'rfind',
- 'rgb',
- 'rgba',
- 'rgbint',
- 'rms',
- 'rotate',
- 'rotateO',
- 'rotation',
- 'round',
- 'roundbox',
- 'roundedpath',
- 'roundrectangle',
- 'samecoordsys',
- 'sameside',
- 'sample',
- 'save',
- 'savedefaults',
- 'saveline',
- 'scale',
- 'scale3',
- 'scaleO',
- 'scaleT',
- 'scaleless',
- 'scientific',
- 'search',
- 'searchtree',
- 'sec',
- 'secondaryX',
- 'secondaryY',
- 'seconds',
- 'section',
- 'sector',
- 'seek',
- 'seekeof',
- 'segment',
- 'sequence',
- 'setpens',
- 'sgn',
- 'sgnd',
- 'sharpangle',
- 'sharpdegrees',
- 'shift',
- 'shiftless',
- 'shipout',
- 'shipout3',
- 'show',
- 'side',
- 'simeq',
- 'simpson',
- 'sin',
- 'single',
- 'sinh',
- 'size',
- 'size3',
- 'skewness',
- 'skip',
- 'slant',
- 'sleep',
- 'slope',
- 'slopefield',
- 'solve',
- 'solveBVP',
- 'sort',
- 'sourceline',
- 'sphere',
- 'split',
- 'sqrt',
- 'square',
- 'srand',
- 'standardizecoordsys',
- 'startScript',
- 'startTrembling',
- 'stdev',
- 'step',
- 'stickframe',
- 'stickmarksize',
- 'stickmarkspace',
- 'stop',
- 'straight',
- 'straightness',
- 'string',
- 'stripdirectory',
- 'stripextension',
- 'stripfile',
- 'strokepath',
- 'subdivide',
- 'subitem',
- 'subpath',
- 'substr',
- 'sum',
- 'surface',
- 'symmedial',
- 'symmedian',
- 'system',
- 'tab',
- 'tableau',
- 'tan',
- 'tangent',
- 'tangential',
- 'tangents',
- 'tanh',
- 'tell',
- 'tensionSpecifier',
- 'tensorshade',
- 'tex',
- 'texcolor',
- 'texify',
- 'texpath',
- 'texpreamble',
- 'texreset',
- 'texshipout',
- 'texsize',
- 'textpath',
- 'thick',
- 'thin',
- 'tick',
- 'tickMax',
- 'tickMax3',
- 'tickMin',
- 'tickMin3',
- 'ticklabelshift',
- 'ticklocate',
- 'tildeframe',
- 'tildemarksize',
- 'tile',
- 'tiling',
- 'time',
- 'times',
- 'title',
- 'titlepage',
- 'topbox',
- 'transform',
- 'transformation',
- 'transpose',
- 'tremble',
- 'trembleFuzz',
- 'tremble_circlenodesnumber',
- 'tremble_circlenodesnumber1',
- 'tremble_draw',
- 'tremble_ellipsenodesnumber',
- 'tremble_ellipsenodesnumber1',
- 'tremble_hyperbolanodesnumber',
- 'tremble_marknodes',
- 'tremble_markuniform',
- 'tremble_parabolanodesnumber',
- 'triangle',
- 'triangleAbc',
- 'triangleabc',
- 'triangulate',
- 'tricoef',
- 'tridiagonal',
- 'trilinear',
- 'trim',
- 'trueMagnetize',
- 'truepoint',
- 'tube',
- 'uncycle',
- 'unfill',
- 'uniform',
- 'unit',
- 'unitrand',
- 'unitsize',
- 'unityroot',
- 'unstraighten',
- 'upcase',
- 'updatefunction',
- 'uperiodic',
- 'upscale',
- 'uptodate',
- 'usepackage',
- 'usersetting',
- 'usetypescript',
- 'usleep',
- 'value',
- 'variance',
- 'variancebiased',
- 'vbox',
- 'vector',
- 'vectorfield',
- 'verbatim',
- 'view',
- 'vline',
- 'vperiodic',
- 'vprojection',
- 'warn',
- 'warning',
- 'windingnumber',
- 'write',
- 'xaxis',
- 'xaxis3',
- 'xaxis3At',
- 'xaxisAt',
- 'xequals',
- 'xinput',
- 'xlimits',
- 'xoutput',
- 'xpart',
- 'xscale',
- 'xscaleO',
- 'xtick',
- 'xtick3',
- 'xtrans',
- 'yaxis',
- 'yaxis3',
- 'yaxis3At',
- 'yaxisAt',
- 'yequals',
- 'ylimits',
- 'ypart',
- 'yscale',
- 'yscaleO',
- 'ytick',
- 'ytick3',
- 'ytrans',
- 'zaxis3',
- 'zaxis3At',
- 'zero',
- 'zero3',
- 'zlimits',
- 'zpart',
- 'ztick',
- 'ztick3',
- 'ztrans'
-}
-
-ASYVARNAME = {
- 'AliceBlue',
- 'Align',
- 'Allow',
- 'AntiqueWhite',
- 'Apricot',
- 'Aqua',
- 'Aquamarine',
- 'Aspect',
- 'Azure',
- 'BeginPoint',
- 'Beige',
- 'Bisque',
- 'Bittersweet',
- 'Black',
- 'BlanchedAlmond',
- 'Blue',
- 'BlueGreen',
- 'BlueViolet',
- 'Both',
- 'Break',
- 'BrickRed',
- 'Brown',
- 'BurlyWood',
- 'BurntOrange',
- 'CCW',
- 'CW',
- 'CadetBlue',
- 'CarnationPink',
- 'Center',
- 'Centered',
- 'Cerulean',
- 'Chartreuse',
- 'Chocolate',
- 'Coeff',
- 'Coral',
- 'CornflowerBlue',
- 'Cornsilk',
- 'Crimson',
- 'Crop',
- 'Cyan',
- 'Dandelion',
- 'DarkBlue',
- 'DarkCyan',
- 'DarkGoldenrod',
- 'DarkGray',
- 'DarkGreen',
- 'DarkKhaki',
- 'DarkMagenta',
- 'DarkOliveGreen',
- 'DarkOrange',
- 'DarkOrchid',
- 'DarkRed',
- 'DarkSalmon',
- 'DarkSeaGreen',
- 'DarkSlateBlue',
- 'DarkSlateGray',
- 'DarkTurquoise',
- 'DarkViolet',
- 'DeepPink',
- 'DeepSkyBlue',
- 'DefaultHead',
- 'DimGray',
- 'DodgerBlue',
- 'Dotted',
- 'Draw',
- 'E',
- 'ENE',
- 'EPS',
- 'ESE',
- 'E_Euler',
- 'E_PC',
- 'E_RK2',
- 'E_RK3BS',
- 'Emerald',
- 'EndPoint',
- 'Euler',
- 'Fill',
- 'FillDraw',
- 'FireBrick',
- 'FloralWhite',
- 'ForestGreen',
- 'Fuchsia',
- 'Gainsboro',
- 'GhostWhite',
- 'Gold',
- 'Goldenrod',
- 'Gray',
- 'Green',
- 'GreenYellow',
- 'Honeydew',
- 'HookHead',
- 'Horizontal',
- 'HotPink',
- 'I',
- 'IgnoreAspect',
- 'IndianRed',
- 'Indigo',
- 'Ivory',
- 'JOIN_IN',
- 'JOIN_OUT',
- 'JungleGreen',
- 'Khaki',
- 'LM_DWARF',
- 'LM_MACHEP',
- 'LM_SQRT_DWARF',
- 'LM_SQRT_GIANT',
- 'LM_USERTOL',
- 'Label',
- 'Lavender',
- 'LavenderBlush',
- 'LawnGreen',
- 'LeftJustified',
- 'LeftSide',
- 'LemonChiffon',
- 'LightBlue',
- 'LightCoral',
- 'LightCyan',
- 'LightGoldenrodYellow',
- 'LightGreen',
- 'LightGrey',
- 'LightPink',
- 'LightSalmon',
- 'LightSeaGreen',
- 'LightSkyBlue',
- 'LightSlateGray',
- 'LightSteelBlue',
- 'LightYellow',
- 'Lime',
- 'LimeGreen',
- 'Linear',
- 'Linen',
- 'Log',
- 'Logarithmic',
- 'Magenta',
- 'Mahogany',
- 'Mark',
- 'MarkFill',
- 'Maroon',
- 'Max',
- 'MediumAquamarine',
- 'MediumBlue',
- 'MediumOrchid',
- 'MediumPurple',
- 'MediumSeaGreen',
- 'MediumSlateBlue',
- 'MediumSpringGreen',
- 'MediumTurquoise',
- 'MediumVioletRed',
- 'Melon',
- 'MidPoint',
- 'MidnightBlue',
- 'Min',
- 'MintCream',
- 'MistyRose',
- 'Moccasin',
- 'Move',
- 'MoveQuiet',
- 'Mulberry',
- 'N',
- 'NE',
- 'NNE',
- 'NNW',
- 'NW',
- 'NavajoWhite',
- 'Navy',
- 'NavyBlue',
- 'NoAlign',
- 'NoCrop',
- 'NoFill',
- 'NoSide',
- 'OldLace',
- 'Olive',
- 'OliveDrab',
- 'OliveGreen',
- 'Orange',
- 'OrangeRed',
- 'Orchid',
- 'Ox',
- 'Oy',
- 'PC',
- 'PaleGoldenrod',
- 'PaleGreen',
- 'PaleTurquoise',
- 'PaleVioletRed',
- 'PapayaWhip',
- 'Peach',
- 'PeachPuff',
- 'Periwinkle',
- 'Peru',
- 'PineGreen',
- 'Pink',
- 'Plum',
- 'PowderBlue',
- 'ProcessBlue',
- 'Purple',
- 'RK2',
- 'RK3',
- 'RK3BS',
- 'RK4',
- 'RK5',
- 'RK5DP',
- 'RK5F',
- 'RawSienna',
- 'Red',
- 'RedOrange',
- 'RedViolet',
- 'Rhodamine',
- 'RightJustified',
- 'RightSide',
- 'RosyBrown',
- 'RoyalBlue',
- 'RoyalPurple',
- 'RubineRed',
- 'S',
- 'SE',
- 'SSE',
- 'SSW',
- 'SW',
- 'SaddleBrown',
- 'Salmon',
- 'SandyBrown',
- 'SeaGreen',
- 'Seashell',
- 'Sepia',
- 'Sienna',
- 'Silver',
- 'SimpleHead',
- 'SkyBlue',
- 'SlateBlue',
- 'SlateGray',
- 'Snow',
- 'SpringGreen',
- 'SteelBlue',
- 'Suppress',
- 'SuppressQuiet',
- 'Tan',
- 'TeXHead',
- 'Teal',
- 'TealBlue',
- 'Thistle',
- 'Ticksize',
- 'Tomato',
- 'Turquoise',
- 'UnFill',
- 'VERSION',
- 'Value',
- 'Vertical',
- 'Violet',
- 'VioletRed',
- 'W',
- 'WNW',
- 'WSW',
- 'Wheat',
- 'White',
- 'WhiteSmoke',
- 'WildStrawberry',
- 'XYAlign',
- 'YAlign',
- 'Yellow',
- 'YellowGreen',
- 'YellowOrange',
- 'addpenarc',
- 'addpenline',
- 'align',
- 'allowstepping',
- 'angularsystem',
- 'animationdelay',
- 'appendsuffix',
- 'arcarrowangle',
- 'arcarrowfactor',
- 'arrow2sizelimit',
- 'arrowangle',
- 'arrowbarb',
- 'arrowdir',
- 'arrowfactor',
- 'arrowhookfactor',
- 'arrowlength',
- 'arrowsizelimit',
- 'arrowtexfactor',
- 'authorpen',
- 'axis',
- 'axiscoverage',
- 'axislabelfactor',
- 'background',
- 'backgroundcolor',
- 'backgroundpen',
- 'barfactor',
- 'barmarksizefactor',
- 'basealign',
- 'baselinetemplate',
- 'beveljoin',
- 'bigvertexpen',
- 'bigvertexsize',
- 'black',
- 'blue',
- 'bm',
- 'bottom',
- 'bp',
- 'brown',
- 'bullet',
- 'byfoci',
- 'byvertices',
- 'camerafactor',
- 'chartreuse',
- 'circlemarkradiusfactor',
- 'circlenodesnumberfactor',
- 'circleprecision',
- 'circlescale',
- 'cm',
- 'codefile',
- 'codepen',
- 'codeskip',
- 'colorPen',
- 'coloredNodes',
- 'coloredSegments',
- 'conditionlength',
- 'conicnodesfactor',
- 'count',
- 'cputimeformat',
- 'crossmarksizefactor',
- 'currentcoordsys',
- 'currentlight',
- 'currentpatterns',
- 'currentpen',
- 'currentpicture',
- 'currentposition',
- 'currentprojection',
- 'curvilinearsystem',
- 'cuttings',
- 'cyan',
- 'darkblue',
- 'darkbrown',
- 'darkcyan',
- 'darkgray',
- 'darkgreen',
- 'darkgrey',
- 'darkmagenta',
- 'darkolive',
- 'darkred',
- 'dashdotted',
- 'dashed',
- 'datepen',
- 'dateskip',
- 'debuggerlines',
- 'debugging',
- 'deepblue',
- 'deepcyan',
- 'deepgray',
- 'deepgreen',
- 'deepgrey',
- 'deepmagenta',
- 'deepred',
- 'default',
- 'defaultControl',
- 'defaultS',
- 'defaultbackpen',
- 'defaultcoordsys',
- 'defaultfilename',
- 'defaultformat',
- 'defaultmassformat',
- 'defaultpen',
- 'diagnostics',
- 'differentlengths',
- 'dot',
- 'dotfactor',
- 'dotframe',
- 'dotted',
- 'doublelinepen',
- 'doublelinespacing',
- 'down',
- 'duplicateFuzz',
- 'ellipsenodesnumberfactor',
- 'eps',
- 'epsgeo',
- 'epsilon',
- 'evenodd',
- 'extendcap',
- 'fermionpen',
- 'figureborder',
- 'figuremattpen',
- 'firstnode',
- 'firststep',
- 'foregroundcolor',
- 'fuchsia',
- 'fuzz',
- 'gapfactor',
- 'ghostpen',
- 'gluonamplitude',
- 'gluonpen',
- 'gluonratio',
- 'gray',
- 'green',
- 'grey',
- 'hatchepsilon',
- 'havepagenumber',
- 'heavyblue',
- 'heavycyan',
- 'heavygray',
- 'heavygreen',
- 'heavygrey',
- 'heavymagenta',
- 'heavyred',
- 'hline',
- 'hwratio',
- 'hyperbolanodesnumberfactor',
- 'identity4',
- 'ignore',
- 'inXasyMode',
- 'inch',
- 'inches',
- 'includegraphicscommand',
- 'inf',
- 'infinity',
- 'institutionpen',
- 'intMax',
- 'intMin',
- 'invert',
- 'invisible',
- 'itempen',
- 'itemskip',
- 'itemstep',
- 'labelmargin',
- 'landscape',
- 'lastnode',
- 'left',
- 'legendhskip',
- 'legendlinelength',
- 'legendmargin',
- 'legendmarkersize',
- 'legendmaxrelativewidth',
- 'legendvskip',
- 'lightblue',
- 'lightcyan',
- 'lightgray',
- 'lightgreen',
- 'lightgrey',
- 'lightmagenta',
- 'lightolive',
- 'lightred',
- 'lightyellow',
- 'linemargin',
- 'lm_infmsg',
- 'lm_shortmsg',
- 'longdashdotted',
- 'longdashed',
- 'magenta',
- 'magneticPoints',
- 'magneticRadius',
- 'mantissaBits',
- 'markangleradius',
- 'markangleradiusfactor',
- 'markanglespace',
- 'markanglespacefactor',
- 'mediumblue',
- 'mediumcyan',
- 'mediumgray',
- 'mediumgreen',
- 'mediumgrey',
- 'mediummagenta',
- 'mediumred',
- 'mediumyellow',
- 'middle',
- 'minDistDefault',
- 'minblockheight',
- 'minblockwidth',
- 'mincirclediameter',
- 'minipagemargin',
- 'minipagewidth',
- 'minvertexangle',
- 'miterjoin',
- 'mm',
- 'momarrowfactor',
- 'momarrowlength',
- 'momarrowmargin',
- 'momarrowoffset',
- 'momarrowpen',
- 'monoPen',
- 'morepoints',
- 'nCircle',
- 'newbulletcolor',
- 'ngraph',
- 'nil',
- 'nmesh',
- 'nobasealign',
- 'nodeMarginDefault',
- 'nodesystem',
- 'nomarker',
- 'nopoint',
- 'noprimary',
- 'nullpath',
- 'nullpen',
- 'numarray',
- 'ocgindex',
- 'oldbulletcolor',
- 'olive',
- 'orange',
- 'origin',
- 'overpaint',
- 'page',
- 'pageheight',
- 'pagemargin',
- 'pagenumberalign',
- 'pagenumberpen',
- 'pagenumberposition',
- 'pagewidth',
- 'paleblue',
- 'palecyan',
- 'palegray',
- 'palegreen',
- 'palegrey',
- 'palemagenta',
- 'palered',
- 'paleyellow',
- 'parabolanodesnumberfactor',
- 'perpfactor',
- 'phi',
- 'photonamplitude',
- 'photonpen',
- 'photonratio',
- 'pi',
- 'pink',
- 'plain',
- 'plus',
- 'preamblenodes',
- 'pt',
- 'purple',
- 'r3',
- 'r4a',
- 'r4b',
- 'randMax',
- 'realDigits',
- 'realEpsilon',
- 'realMax',
- 'realMin',
- 'red',
- 'relativesystem',
- 'reverse',
- 'right',
- 'roundcap',
- 'roundjoin',
- 'royalblue',
- 'salmon',
- 'saveFunctions',
- 'scalarpen',
- 'sequencereal',
- 'settings',
- 'shipped',
- 'signedtrailingzero',
- 'solid',
- 'springgreen',
- 'sqrtEpsilon',
- 'squarecap',
- 'squarepen',
- 'startposition',
- 'stdin',
- 'stdout',
- 'stepfactor',
- 'stepfraction',
- 'steppagenumberpen',
- 'stepping',
- 'stickframe',
- 'stickmarksizefactor',
- 'stickmarkspacefactor',
- 'textpen',
- 'ticksize',
- 'tildeframe',
- 'tildemarksizefactor',
- 'tinv',
- 'titlealign',
- 'titlepagepen',
- 'titlepageposition',
- 'titlepen',
- 'titleskip',
- 'top',
- 'trailingzero',
- 'treeLevelStep',
- 'treeMinNodeWidth',
- 'treeNodeStep',
- 'trembleAngle',
- 'trembleFrequency',
- 'trembleRandom',
- 'tremblingMode',
- 'undefined',
- 'unitcircle',
- 'unitsquare',
- 'up',
- 'urlpen',
- 'urlskip',
- 'version',
- 'vertexpen',
- 'vertexsize',
- 'viewportmargin',
- 'viewportsize',
- 'vline',
- 'white',
- 'wye',
- 'xformStack',
- 'yellow',
- 'ylabelwidth',
- 'zerotickfuzz',
- 'zerowinding'
-}
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/_cl_builtins.py b/venv/lib/python3.11/site-packages/pygments/lexers/_cl_builtins.py
deleted file mode 100644
index beb7b4d..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/_cl_builtins.py
+++ /dev/null
@@ -1,231 +0,0 @@
-"""
- pygments.lexers._cl_builtins
- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
- ANSI Common Lisp builtins.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-BUILTIN_FUNCTIONS = { # 638 functions
- '<', '<=', '=', '>', '>=', '-', '/', '/=', '*', '+', '1-', '1+',
- 'abort', 'abs', 'acons', 'acos', 'acosh', 'add-method', 'adjoin',
- 'adjustable-array-p', 'adjust-array', 'allocate-instance',
- 'alpha-char-p', 'alphanumericp', 'append', 'apply', 'apropos',
- 'apropos-list', 'aref', 'arithmetic-error-operands',
- 'arithmetic-error-operation', 'array-dimension', 'array-dimensions',
- 'array-displacement', 'array-element-type', 'array-has-fill-pointer-p',
- 'array-in-bounds-p', 'arrayp', 'array-rank', 'array-row-major-index',
- 'array-total-size', 'ash', 'asin', 'asinh', 'assoc', 'assoc-if',
- 'assoc-if-not', 'atan', 'atanh', 'atom', 'bit', 'bit-and', 'bit-andc1',
- 'bit-andc2', 'bit-eqv', 'bit-ior', 'bit-nand', 'bit-nor', 'bit-not',
- 'bit-orc1', 'bit-orc2', 'bit-vector-p', 'bit-xor', 'boole',
- 'both-case-p', 'boundp', 'break', 'broadcast-stream-streams',
- 'butlast', 'byte', 'byte-position', 'byte-size', 'caaaar', 'caaadr',
- 'caaar', 'caadar', 'caaddr', 'caadr', 'caar', 'cadaar', 'cadadr',
- 'cadar', 'caddar', 'cadddr', 'caddr', 'cadr', 'call-next-method', 'car',
- 'cdaaar', 'cdaadr', 'cdaar', 'cdadar', 'cdaddr', 'cdadr', 'cdar',
- 'cddaar', 'cddadr', 'cddar', 'cdddar', 'cddddr', 'cdddr', 'cddr', 'cdr',
- 'ceiling', 'cell-error-name', 'cerror', 'change-class', 'char', 'char<',
- 'char<=', 'char=', 'char>', 'char>=', 'char/=', 'character',
- 'characterp', 'char-code', 'char-downcase', 'char-equal',
- 'char-greaterp', 'char-int', 'char-lessp', 'char-name',
- 'char-not-equal', 'char-not-greaterp', 'char-not-lessp', 'char-upcase',
- 'cis', 'class-name', 'class-of', 'clear-input', 'clear-output',
- 'close', 'clrhash', 'code-char', 'coerce', 'compile',
- 'compiled-function-p', 'compile-file', 'compile-file-pathname',
- 'compiler-macro-function', 'complement', 'complex', 'complexp',
- 'compute-applicable-methods', 'compute-restarts', 'concatenate',
- 'concatenated-stream-streams', 'conjugate', 'cons', 'consp',
- 'constantly', 'constantp', 'continue', 'copy-alist', 'copy-list',
- 'copy-pprint-dispatch', 'copy-readtable', 'copy-seq', 'copy-structure',
- 'copy-symbol', 'copy-tree', 'cos', 'cosh', 'count', 'count-if',
- 'count-if-not', 'decode-float', 'decode-universal-time', 'delete',
- 'delete-duplicates', 'delete-file', 'delete-if', 'delete-if-not',
- 'delete-package', 'denominator', 'deposit-field', 'describe',
- 'describe-object', 'digit-char', 'digit-char-p', 'directory',
- 'directory-namestring', 'disassemble', 'documentation', 'dpb',
- 'dribble', 'echo-stream-input-stream', 'echo-stream-output-stream',
- 'ed', 'eighth', 'elt', 'encode-universal-time', 'endp',
- 'enough-namestring', 'ensure-directories-exist',
- 'ensure-generic-function', 'eq', 'eql', 'equal', 'equalp', 'error',
- 'eval', 'evenp', 'every', 'exp', 'export', 'expt', 'fboundp',
- 'fceiling', 'fdefinition', 'ffloor', 'fifth', 'file-author',
- 'file-error-pathname', 'file-length', 'file-namestring',
- 'file-position', 'file-string-length', 'file-write-date',
- 'fill', 'fill-pointer', 'find', 'find-all-symbols', 'find-class',
- 'find-if', 'find-if-not', 'find-method', 'find-package', 'find-restart',
- 'find-symbol', 'finish-output', 'first', 'float', 'float-digits',
- 'floatp', 'float-precision', 'float-radix', 'float-sign', 'floor',
- 'fmakunbound', 'force-output', 'format', 'fourth', 'fresh-line',
- 'fround', 'ftruncate', 'funcall', 'function-keywords',
- 'function-lambda-expression', 'functionp', 'gcd', 'gensym', 'gentemp',
- 'get', 'get-decoded-time', 'get-dispatch-macro-character', 'getf',
- 'gethash', 'get-internal-real-time', 'get-internal-run-time',
- 'get-macro-character', 'get-output-stream-string', 'get-properties',
- 'get-setf-expansion', 'get-universal-time', 'graphic-char-p',
- 'hash-table-count', 'hash-table-p', 'hash-table-rehash-size',
- 'hash-table-rehash-threshold', 'hash-table-size', 'hash-table-test',
- 'host-namestring', 'identity', 'imagpart', 'import',
- 'initialize-instance', 'input-stream-p', 'inspect',
- 'integer-decode-float', 'integer-length', 'integerp',
- 'interactive-stream-p', 'intern', 'intersection',
- 'invalid-method-error', 'invoke-debugger', 'invoke-restart',
- 'invoke-restart-interactively', 'isqrt', 'keywordp', 'last', 'lcm',
- 'ldb', 'ldb-test', 'ldiff', 'length', 'lisp-implementation-type',
- 'lisp-implementation-version', 'list', 'list*', 'list-all-packages',
- 'listen', 'list-length', 'listp', 'load',
- 'load-logical-pathname-translations', 'log', 'logand', 'logandc1',
- 'logandc2', 'logbitp', 'logcount', 'logeqv', 'logical-pathname',
- 'logical-pathname-translations', 'logior', 'lognand', 'lognor',
- 'lognot', 'logorc1', 'logorc2', 'logtest', 'logxor', 'long-site-name',
- 'lower-case-p', 'machine-instance', 'machine-type', 'machine-version',
- 'macroexpand', 'macroexpand-1', 'macro-function', 'make-array',
- 'make-broadcast-stream', 'make-concatenated-stream', 'make-condition',
- 'make-dispatch-macro-character', 'make-echo-stream', 'make-hash-table',
- 'make-instance', 'make-instances-obsolete', 'make-list',
- 'make-load-form', 'make-load-form-saving-slots', 'make-package',
- 'make-pathname', 'make-random-state', 'make-sequence', 'make-string',
- 'make-string-input-stream', 'make-string-output-stream', 'make-symbol',
- 'make-synonym-stream', 'make-two-way-stream', 'makunbound', 'map',
- 'mapc', 'mapcan', 'mapcar', 'mapcon', 'maphash', 'map-into', 'mapl',
- 'maplist', 'mask-field', 'max', 'member', 'member-if', 'member-if-not',
- 'merge', 'merge-pathnames', 'method-combination-error',
- 'method-qualifiers', 'min', 'minusp', 'mismatch', 'mod',
- 'muffle-warning', 'name-char', 'namestring', 'nbutlast', 'nconc',
- 'next-method-p', 'nintersection', 'ninth', 'no-applicable-method',
- 'no-next-method', 'not', 'notany', 'notevery', 'nreconc', 'nreverse',
- 'nset-difference', 'nset-exclusive-or', 'nstring-capitalize',
- 'nstring-downcase', 'nstring-upcase', 'nsublis', 'nsubst', 'nsubst-if',
- 'nsubst-if-not', 'nsubstitute', 'nsubstitute-if', 'nsubstitute-if-not',
- 'nth', 'nthcdr', 'null', 'numberp', 'numerator', 'nunion', 'oddp',
- 'open', 'open-stream-p', 'output-stream-p', 'package-error-package',
- 'package-name', 'package-nicknames', 'packagep',
- 'package-shadowing-symbols', 'package-used-by-list', 'package-use-list',
- 'pairlis', 'parse-integer', 'parse-namestring', 'pathname',
- 'pathname-device', 'pathname-directory', 'pathname-host',
- 'pathname-match-p', 'pathname-name', 'pathnamep', 'pathname-type',
- 'pathname-version', 'peek-char', 'phase', 'plusp', 'position',
- 'position-if', 'position-if-not', 'pprint', 'pprint-dispatch',
- 'pprint-fill', 'pprint-indent', 'pprint-linear', 'pprint-newline',
- 'pprint-tab', 'pprint-tabular', 'prin1', 'prin1-to-string', 'princ',
- 'princ-to-string', 'print', 'print-object', 'probe-file', 'proclaim',
- 'provide', 'random', 'random-state-p', 'rassoc', 'rassoc-if',
- 'rassoc-if-not', 'rational', 'rationalize', 'rationalp', 'read',
- 'read-byte', 'read-char', 'read-char-no-hang', 'read-delimited-list',
- 'read-from-string', 'read-line', 'read-preserving-whitespace',
- 'read-sequence', 'readtable-case', 'readtablep', 'realp', 'realpart',
- 'reduce', 'reinitialize-instance', 'rem', 'remhash', 'remove',
- 'remove-duplicates', 'remove-if', 'remove-if-not', 'remove-method',
- 'remprop', 'rename-file', 'rename-package', 'replace', 'require',
- 'rest', 'restart-name', 'revappend', 'reverse', 'room', 'round',
- 'row-major-aref', 'rplaca', 'rplacd', 'sbit', 'scale-float', 'schar',
- 'search', 'second', 'set', 'set-difference',
- 'set-dispatch-macro-character', 'set-exclusive-or',
- 'set-macro-character', 'set-pprint-dispatch', 'set-syntax-from-char',
- 'seventh', 'shadow', 'shadowing-import', 'shared-initialize',
- 'short-site-name', 'signal', 'signum', 'simple-bit-vector-p',
- 'simple-condition-format-arguments', 'simple-condition-format-control',
- 'simple-string-p', 'simple-vector-p', 'sin', 'sinh', 'sixth', 'sleep',
- 'slot-boundp', 'slot-exists-p', 'slot-makunbound', 'slot-missing',
- 'slot-unbound', 'slot-value', 'software-type', 'software-version',
- 'some', 'sort', 'special-operator-p', 'sqrt', 'stable-sort',
- 'standard-char-p', 'store-value', 'stream-element-type',
- 'stream-error-stream', 'stream-external-format', 'streamp', 'string',
- 'string<', 'string<=', 'string=', 'string>', 'string>=', 'string/=',
- 'string-capitalize', 'string-downcase', 'string-equal',
- 'string-greaterp', 'string-left-trim', 'string-lessp',
- 'string-not-equal', 'string-not-greaterp', 'string-not-lessp',
- 'stringp', 'string-right-trim', 'string-trim', 'string-upcase',
- 'sublis', 'subseq', 'subsetp', 'subst', 'subst-if', 'subst-if-not',
- 'substitute', 'substitute-if', 'substitute-if-not', 'subtypep','svref',
- 'sxhash', 'symbol-function', 'symbol-name', 'symbolp', 'symbol-package',
- 'symbol-plist', 'symbol-value', 'synonym-stream-symbol', 'syntax:',
- 'tailp', 'tan', 'tanh', 'tenth', 'terpri', 'third',
- 'translate-logical-pathname', 'translate-pathname', 'tree-equal',
- 'truename', 'truncate', 'two-way-stream-input-stream',
- 'two-way-stream-output-stream', 'type-error-datum',
- 'type-error-expected-type', 'type-of', 'typep', 'unbound-slot-instance',
- 'unexport', 'unintern', 'union', 'unread-char', 'unuse-package',
- 'update-instance-for-different-class',
- 'update-instance-for-redefined-class', 'upgraded-array-element-type',
- 'upgraded-complex-part-type', 'upper-case-p', 'use-package',
- 'user-homedir-pathname', 'use-value', 'values', 'values-list', 'vector',
- 'vectorp', 'vector-pop', 'vector-push', 'vector-push-extend', 'warn',
- 'wild-pathname-p', 'write', 'write-byte', 'write-char', 'write-line',
- 'write-sequence', 'write-string', 'write-to-string', 'yes-or-no-p',
- 'y-or-n-p', 'zerop',
-}
-
-SPECIAL_FORMS = {
- 'block', 'catch', 'declare', 'eval-when', 'flet', 'function', 'go', 'if',
- 'labels', 'lambda', 'let', 'let*', 'load-time-value', 'locally', 'macrolet',
- 'multiple-value-call', 'multiple-value-prog1', 'progn', 'progv', 'quote',
- 'return-from', 'setq', 'symbol-macrolet', 'tagbody', 'the', 'throw',
- 'unwind-protect',
-}
-
-MACROS = {
- 'and', 'assert', 'call-method', 'case', 'ccase', 'check-type', 'cond',
- 'ctypecase', 'decf', 'declaim', 'defclass', 'defconstant', 'defgeneric',
- 'define-compiler-macro', 'define-condition', 'define-method-combination',
- 'define-modify-macro', 'define-setf-expander', 'define-symbol-macro',
- 'defmacro', 'defmethod', 'defpackage', 'defparameter', 'defsetf',
- 'defstruct', 'deftype', 'defun', 'defvar', 'destructuring-bind', 'do',
- 'do*', 'do-all-symbols', 'do-external-symbols', 'dolist', 'do-symbols',
- 'dotimes', 'ecase', 'etypecase', 'formatter', 'handler-bind',
- 'handler-case', 'ignore-errors', 'incf', 'in-package', 'lambda', 'loop',
- 'loop-finish', 'make-method', 'multiple-value-bind', 'multiple-value-list',
- 'multiple-value-setq', 'nth-value', 'or', 'pop',
- 'pprint-exit-if-list-exhausted', 'pprint-logical-block', 'pprint-pop',
- 'print-unreadable-object', 'prog', 'prog*', 'prog1', 'prog2', 'psetf',
- 'psetq', 'push', 'pushnew', 'remf', 'restart-bind', 'restart-case',
- 'return', 'rotatef', 'setf', 'shiftf', 'step', 'time', 'trace', 'typecase',
- 'unless', 'untrace', 'when', 'with-accessors', 'with-compilation-unit',
- 'with-condition-restarts', 'with-hash-table-iterator',
- 'with-input-from-string', 'with-open-file', 'with-open-stream',
- 'with-output-to-string', 'with-package-iterator', 'with-simple-restart',
- 'with-slots', 'with-standard-io-syntax',
-}
-
-LAMBDA_LIST_KEYWORDS = {
- '&allow-other-keys', '&aux', '&body', '&environment', '&key', '&optional',
- '&rest', '&whole',
-}
-
-DECLARATIONS = {
- 'dynamic-extent', 'ignore', 'optimize', 'ftype', 'inline', 'special',
- 'ignorable', 'notinline', 'type',
-}
-
-BUILTIN_TYPES = {
- 'atom', 'boolean', 'base-char', 'base-string', 'bignum', 'bit',
- 'compiled-function', 'extended-char', 'fixnum', 'keyword', 'nil',
- 'signed-byte', 'short-float', 'single-float', 'double-float', 'long-float',
- 'simple-array', 'simple-base-string', 'simple-bit-vector', 'simple-string',
- 'simple-vector', 'standard-char', 'unsigned-byte',
-
- # Condition Types
- 'arithmetic-error', 'cell-error', 'condition', 'control-error',
- 'division-by-zero', 'end-of-file', 'error', 'file-error',
- 'floating-point-inexact', 'floating-point-overflow',
- 'floating-point-underflow', 'floating-point-invalid-operation',
- 'parse-error', 'package-error', 'print-not-readable', 'program-error',
- 'reader-error', 'serious-condition', 'simple-condition', 'simple-error',
- 'simple-type-error', 'simple-warning', 'stream-error', 'storage-condition',
- 'style-warning', 'type-error', 'unbound-variable', 'unbound-slot',
- 'undefined-function', 'warning',
-}
-
-BUILTIN_CLASSES = {
- 'array', 'broadcast-stream', 'bit-vector', 'built-in-class', 'character',
- 'class', 'complex', 'concatenated-stream', 'cons', 'echo-stream',
- 'file-stream', 'float', 'function', 'generic-function', 'hash-table',
- 'integer', 'list', 'logical-pathname', 'method-combination', 'method',
- 'null', 'number', 'package', 'pathname', 'ratio', 'rational', 'readtable',
- 'real', 'random-state', 'restart', 'sequence', 'standard-class',
- 'standard-generic-function', 'standard-method', 'standard-object',
- 'string-stream', 'stream', 'string', 'structure-class', 'structure-object',
- 'symbol', 'synonym-stream', 't', 'two-way-stream', 'vector',
-}
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/_cocoa_builtins.py b/venv/lib/python3.11/site-packages/pygments/lexers/_cocoa_builtins.py
deleted file mode 100644
index 4b7dc1a..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/_cocoa_builtins.py
+++ /dev/null
@@ -1,75 +0,0 @@
-"""
- pygments.lexers._cocoa_builtins
- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
- This file defines a set of types used across Cocoa frameworks from Apple.
- There is a list of @interfaces, @protocols and some other (structs, unions)
-
- File may be also used as standalone generator for above.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-COCOA_INTERFACES = {'AAAttribution', 'ABNewPersonViewController', 'ABPeoplePickerNavigationController', 'ABPersonViewController', 'ABUnknownPersonViewController', 'ACAccount', 'ACAccountCredential', 'ACAccountStore', 'ACAccountType', 'ADBannerView', 'ADClient', 'ADInterstitialAd', 'ADInterstitialAdPresentationViewController', 'AEAssessmentConfiguration', 'AEAssessmentSession', 'ALAsset', 'ALAssetRepresentation', 'ALAssetsFilter', 'ALAssetsGroup', 'ALAssetsLibrary', 'APActivationPayload', 'ARAnchor', 'ARAppClipCodeAnchor', 'ARBody2D', 'ARBodyAnchor', 'ARBodyTrackingConfiguration', 'ARCamera', 'ARCoachingOverlayView', 'ARCollaborationData', 'ARConfiguration', 'ARDepthData', 'ARDirectionalLightEstimate', 'AREnvironmentProbeAnchor', 'ARFaceAnchor', 'ARFaceGeometry', 'ARFaceTrackingConfiguration', 'ARFrame', 'ARGeoAnchor', 'ARGeoTrackingConfiguration', 'ARGeoTrackingStatus', 'ARGeometryElement', 'ARGeometrySource', 'ARHitTestResult', 'ARImageAnchor', 'ARImageTrackingConfiguration', 'ARLightEstimate', 'ARMatteGenerator', 'ARMeshAnchor', 'ARMeshGeometry', 'ARObjectAnchor', 'ARObjectScanningConfiguration', 'AROrientationTrackingConfiguration', 'ARParticipantAnchor', 'ARPlaneAnchor', 'ARPlaneGeometry', 'ARPointCloud', 'ARPositionalTrackingConfiguration', 'ARQuickLookPreviewItem', 'ARRaycastQuery', 'ARRaycastResult', 'ARReferenceImage', 'ARReferenceObject', 'ARSCNFaceGeometry', 'ARSCNPlaneGeometry', 'ARSCNView', 'ARSKView', 'ARSession', 'ARSkeleton', 'ARSkeleton2D', 'ARSkeleton3D', 'ARSkeletonDefinition', 'ARTrackedRaycast', 'ARVideoFormat', 'ARView', 'ARWorldMap', 'ARWorldTrackingConfiguration', 'ASAccountAuthenticationModificationController', 'ASAccountAuthenticationModificationExtensionContext', 'ASAccountAuthenticationModificationReplacePasswordWithSignInWithAppleRequest', 'ASAccountAuthenticationModificationRequest', 'ASAccountAuthenticationModificationUpgradePasswordToStrongPasswordRequest', 'ASAccountAuthenticationModificationViewController', 'ASAuthorization', 'ASAuthorizationAppleIDButton', 'ASAuthorizationAppleIDCredential', 'ASAuthorizationAppleIDProvider', 'ASAuthorizationAppleIDRequest', 'ASAuthorizationController', 'ASAuthorizationOpenIDRequest', 'ASAuthorizationPasswordProvider', 'ASAuthorizationPasswordRequest', 'ASAuthorizationProviderExtensionAuthorizationRequest', 'ASAuthorizationRequest', 'ASAuthorizationSingleSignOnCredential', 'ASAuthorizationSingleSignOnProvider', 'ASAuthorizationSingleSignOnRequest', 'ASCredentialIdentityStore', 'ASCredentialIdentityStoreState', 'ASCredentialProviderExtensionContext', 'ASCredentialProviderViewController', 'ASCredentialServiceIdentifier', 'ASIdentifierManager', 'ASPasswordCredential', 'ASPasswordCredentialIdentity', 'ASWebAuthenticationSession', 'ASWebAuthenticationSessionRequest', 'ASWebAuthenticationSessionWebBrowserSessionManager', 'ATTrackingManager', 'AUAudioUnit', 'AUAudioUnitBus', 'AUAudioUnitBusArray', 'AUAudioUnitPreset', 'AUAudioUnitV2Bridge', 'AUAudioUnitViewConfiguration', 'AUParameter', 'AUParameterGroup', 'AUParameterNode', 'AUParameterTree', 'AUViewController', 'AVAggregateAssetDownloadTask', 'AVAsset', 'AVAssetCache', 'AVAssetDownloadStorageManagementPolicy', 'AVAssetDownloadStorageManager', 'AVAssetDownloadTask', 'AVAssetDownloadURLSession', 'AVAssetExportSession', 'AVAssetImageGenerator', 'AVAssetReader', 'AVAssetReaderAudioMixOutput', 'AVAssetReaderOutput', 'AVAssetReaderOutputMetadataAdaptor', 'AVAssetReaderSampleReferenceOutput', 'AVAssetReaderTrackOutput', 'AVAssetReaderVideoCompositionOutput', 'AVAssetResourceLoader', 'AVAssetResourceLoadingContentInformationRequest', 'AVAssetResourceLoadingDataRequest', 'AVAssetResourceLoadingRequest', 'AVAssetResourceLoadingRequestor', 'AVAssetResourceRenewalRequest', 'AVAssetSegmentReport', 'AVAssetSegmentReportSampleInformation', 'AVAssetSegmentTrackReport', 'AVAssetTrack', 'AVAssetTrackGroup', 'AVAssetTrackSegment', 'AVAssetWriter', 'AVAssetWriterInput', 'AVAssetWriterInputGroup', 'AVAssetWriterInputMetadataAdaptor', 'AVAssetWriterInputPassDescription', 'AVAssetWriterInputPixelBufferAdaptor', 'AVAsynchronousCIImageFilteringRequest', 'AVAsynchronousVideoCompositionRequest', 'AVAudioMix', 'AVAudioMixInputParameters', 'AVAudioSession', 'AVCameraCalibrationData', 'AVCaptureAudioChannel', 'AVCaptureAudioDataOutput', 'AVCaptureAudioFileOutput', 'AVCaptureAudioPreviewOutput', 'AVCaptureAutoExposureBracketedStillImageSettings', 'AVCaptureBracketedStillImageSettings', 'AVCaptureConnection', 'AVCaptureDataOutputSynchronizer', 'AVCaptureDepthDataOutput', 'AVCaptureDevice', 'AVCaptureDeviceDiscoverySession', 'AVCaptureDeviceFormat', 'AVCaptureDeviceInput', 'AVCaptureDeviceInputSource', 'AVCaptureFileOutput', 'AVCaptureInput', 'AVCaptureInputPort', 'AVCaptureManualExposureBracketedStillImageSettings', 'AVCaptureMetadataInput', 'AVCaptureMetadataOutput', 'AVCaptureMovieFileOutput', 'AVCaptureMultiCamSession', 'AVCaptureOutput', 'AVCapturePhoto', 'AVCapturePhotoBracketSettings', 'AVCapturePhotoOutput', 'AVCapturePhotoSettings', 'AVCaptureResolvedPhotoSettings', 'AVCaptureScreenInput', 'AVCaptureSession', 'AVCaptureStillImageOutput', 'AVCaptureSynchronizedData', 'AVCaptureSynchronizedDataCollection', 'AVCaptureSynchronizedDepthData', 'AVCaptureSynchronizedMetadataObjectData', 'AVCaptureSynchronizedSampleBufferData', 'AVCaptureSystemPressureState', 'AVCaptureVideoDataOutput', 'AVCaptureVideoPreviewLayer', 'AVComposition', 'AVCompositionTrack', 'AVCompositionTrackFormatDescriptionReplacement', 'AVCompositionTrackSegment', 'AVContentKeyRequest', 'AVContentKeyResponse', 'AVContentKeySession', 'AVDateRangeMetadataGroup', 'AVDepthData', 'AVDisplayCriteria', 'AVFragmentedAsset', 'AVFragmentedAssetMinder', 'AVFragmentedAssetTrack', 'AVFragmentedMovie', 'AVFragmentedMovieMinder', 'AVFragmentedMovieTrack', 'AVFrameRateRange', 'AVMediaDataStorage', 'AVMediaSelection', 'AVMediaSelectionGroup', 'AVMediaSelectionOption', 'AVMetadataBodyObject', 'AVMetadataCatBodyObject', 'AVMetadataDogBodyObject', 'AVMetadataFaceObject', 'AVMetadataGroup', 'AVMetadataHumanBodyObject', 'AVMetadataItem', 'AVMetadataItemFilter', 'AVMetadataItemValueRequest', 'AVMetadataMachineReadableCodeObject', 'AVMetadataObject', 'AVMetadataSalientObject', 'AVMovie', 'AVMovieTrack', 'AVMutableAssetDownloadStorageManagementPolicy', 'AVMutableAudioMix', 'AVMutableAudioMixInputParameters', 'AVMutableComposition', 'AVMutableCompositionTrack', 'AVMutableDateRangeMetadataGroup', 'AVMutableMediaSelection', 'AVMutableMetadataItem', 'AVMutableMovie', 'AVMutableMovieTrack', 'AVMutableTimedMetadataGroup', 'AVMutableVideoComposition', 'AVMutableVideoCompositionInstruction', 'AVMutableVideoCompositionLayerInstruction', 'AVOutputSettingsAssistant', 'AVPersistableContentKeyRequest', 'AVPictureInPictureController', 'AVPlayer', 'AVPlayerItem', 'AVPlayerItemAccessLog', 'AVPlayerItemAccessLogEvent', 'AVPlayerItemErrorLog', 'AVPlayerItemErrorLogEvent', 'AVPlayerItemLegibleOutput', 'AVPlayerItemMediaDataCollector', 'AVPlayerItemMetadataCollector', 'AVPlayerItemMetadataOutput', 'AVPlayerItemOutput', 'AVPlayerItemTrack', 'AVPlayerItemVideoOutput', 'AVPlayerLayer', 'AVPlayerLooper', 'AVPlayerMediaSelectionCriteria', 'AVPlayerViewController', 'AVPortraitEffectsMatte', 'AVQueuePlayer', 'AVRouteDetector', 'AVRoutePickerView', 'AVSampleBufferAudioRenderer', 'AVSampleBufferDisplayLayer', 'AVSampleBufferRenderSynchronizer', 'AVSemanticSegmentationMatte', 'AVSynchronizedLayer', 'AVTextStyleRule', 'AVTimedMetadataGroup', 'AVURLAsset', 'AVVideoComposition', 'AVVideoCompositionCoreAnimationTool', 'AVVideoCompositionInstruction', 'AVVideoCompositionLayerInstruction', 'AVVideoCompositionRenderContext', 'AVVideoCompositionRenderHint', 'AXCustomContent', 'BCChatAction', 'BCChatButton', 'BGAppRefreshTask', 'BGAppRefreshTaskRequest', 'BGProcessingTask', 'BGProcessingTaskRequest', 'BGTask', 'BGTaskRequest', 'BGTaskScheduler', 'CAAnimation', 'CAAnimationGroup', 'CABTMIDICentralViewController', 'CABTMIDILocalPeripheralViewController', 'CABasicAnimation', 'CADisplayLink', 'CAEAGLLayer', 'CAEmitterCell', 'CAEmitterLayer', 'CAGradientLayer', 'CAInterAppAudioSwitcherView', 'CAInterAppAudioTransportView', 'CAKeyframeAnimation', 'CALayer', 'CAMediaTimingFunction', 'CAMetalLayer', 'CAPropertyAnimation', 'CAReplicatorLayer', 'CAScrollLayer', 'CAShapeLayer', 'CASpringAnimation', 'CATextLayer', 'CATiledLayer', 'CATransaction', 'CATransformLayer', 'CATransition', 'CAValueFunction', 'CBATTRequest', 'CBAttribute', 'CBCentral', 'CBCentralManager', 'CBCharacteristic', 'CBDescriptor', 'CBL2CAPChannel', 'CBManager', 'CBMutableCharacteristic', 'CBMutableDescriptor', 'CBMutableService', 'CBPeer', 'CBPeripheral', 'CBPeripheralManager', 'CBService', 'CBUUID', 'CHHapticDynamicParameter', 'CHHapticEngine', 'CHHapticEvent', 'CHHapticEventParameter', 'CHHapticParameterCurve', 'CHHapticParameterCurveControlPoint', 'CHHapticPattern', 'CIAztecCodeDescriptor', 'CIBarcodeDescriptor', 'CIBlendKernel', 'CIColor', 'CIColorKernel', 'CIContext', 'CIDataMatrixCodeDescriptor', 'CIDetector', 'CIFaceFeature', 'CIFeature', 'CIFilter', 'CIFilterGenerator', 'CIFilterShape', 'CIImage', 'CIImageAccumulator', 'CIImageProcessorKernel', 'CIKernel', 'CIPDF417CodeDescriptor', 'CIPlugIn', 'CIQRCodeDescriptor', 'CIQRCodeFeature', 'CIRectangleFeature', 'CIRenderDestination', 'CIRenderInfo', 'CIRenderTask', 'CISampler', 'CITextFeature', 'CIVector', 'CIWarpKernel', 'CKAcceptSharesOperation', 'CKAsset', 'CKContainer', 'CKDatabase', 'CKDatabaseNotification', 'CKDatabaseOperation', 'CKDatabaseSubscription', 'CKDiscoverAllUserIdentitiesOperation', 'CKDiscoverUserIdentitiesOperation', 'CKFetchDatabaseChangesOperation', 'CKFetchNotificationChangesOperation', 'CKFetchRecordChangesOperation', 'CKFetchRecordZoneChangesConfiguration', 'CKFetchRecordZoneChangesOperation', 'CKFetchRecordZoneChangesOptions', 'CKFetchRecordZonesOperation', 'CKFetchRecordsOperation', 'CKFetchShareMetadataOperation', 'CKFetchShareParticipantsOperation', 'CKFetchSubscriptionsOperation', 'CKFetchWebAuthTokenOperation', 'CKLocationSortDescriptor', 'CKMarkNotificationsReadOperation', 'CKModifyBadgeOperation', 'CKModifyRecordZonesOperation', 'CKModifyRecordsOperation', 'CKModifySubscriptionsOperation', 'CKNotification', 'CKNotificationID', 'CKNotificationInfo', 'CKOperation', 'CKOperationConfiguration', 'CKOperationGroup', 'CKQuery', 'CKQueryCursor', 'CKQueryNotification', 'CKQueryOperation', 'CKQuerySubscription', 'CKRecord', 'CKRecordID', 'CKRecordZone', 'CKRecordZoneID', 'CKRecordZoneNotification', 'CKRecordZoneSubscription', 'CKReference', 'CKServerChangeToken', 'CKShare', 'CKShareMetadata', 'CKShareParticipant', 'CKSubscription', 'CKUserIdentity', 'CKUserIdentityLookupInfo', 'CLBeacon', 'CLBeaconIdentityConstraint', 'CLBeaconRegion', 'CLCircularRegion', 'CLFloor', 'CLGeocoder', 'CLHeading', 'CLKComplication', 'CLKComplicationDescriptor', 'CLKComplicationServer', 'CLKComplicationTemplate', 'CLKComplicationTemplateCircularSmallRingImage', 'CLKComplicationTemplateCircularSmallRingText', 'CLKComplicationTemplateCircularSmallSimpleImage', 'CLKComplicationTemplateCircularSmallSimpleText', 'CLKComplicationTemplateCircularSmallStackImage', 'CLKComplicationTemplateCircularSmallStackText', 'CLKComplicationTemplateExtraLargeColumnsText', 'CLKComplicationTemplateExtraLargeRingImage', 'CLKComplicationTemplateExtraLargeRingText', 'CLKComplicationTemplateExtraLargeSimpleImage', 'CLKComplicationTemplateExtraLargeSimpleText', 'CLKComplicationTemplateExtraLargeStackImage', 'CLKComplicationTemplateExtraLargeStackText', 'CLKComplicationTemplateGraphicBezelCircularText', 'CLKComplicationTemplateGraphicCircular', 'CLKComplicationTemplateGraphicCircularClosedGaugeImage', 'CLKComplicationTemplateGraphicCircularClosedGaugeText', 'CLKComplicationTemplateGraphicCircularImage', 'CLKComplicationTemplateGraphicCircularOpenGaugeImage', 'CLKComplicationTemplateGraphicCircularOpenGaugeRangeText', 'CLKComplicationTemplateGraphicCircularOpenGaugeSimpleText', 'CLKComplicationTemplateGraphicCircularStackImage', 'CLKComplicationTemplateGraphicCircularStackText', 'CLKComplicationTemplateGraphicCornerCircularImage', 'CLKComplicationTemplateGraphicCornerGaugeImage', 'CLKComplicationTemplateGraphicCornerGaugeText', 'CLKComplicationTemplateGraphicCornerStackText', 'CLKComplicationTemplateGraphicCornerTextImage', 'CLKComplicationTemplateGraphicExtraLargeCircular', 'CLKComplicationTemplateGraphicExtraLargeCircularClosedGaugeImage', 'CLKComplicationTemplateGraphicExtraLargeCircularClosedGaugeText', 'CLKComplicationTemplateGraphicExtraLargeCircularImage', 'CLKComplicationTemplateGraphicExtraLargeCircularOpenGaugeImage', 'CLKComplicationTemplateGraphicExtraLargeCircularOpenGaugeRangeText', 'CLKComplicationTemplateGraphicExtraLargeCircularOpenGaugeSimpleText', 'CLKComplicationTemplateGraphicExtraLargeCircularStackImage', 'CLKComplicationTemplateGraphicExtraLargeCircularStackText', 'CLKComplicationTemplateGraphicRectangularFullImage', 'CLKComplicationTemplateGraphicRectangularLargeImage', 'CLKComplicationTemplateGraphicRectangularStandardBody', 'CLKComplicationTemplateGraphicRectangularTextGauge', 'CLKComplicationTemplateModularLargeColumns', 'CLKComplicationTemplateModularLargeStandardBody', 'CLKComplicationTemplateModularLargeTable', 'CLKComplicationTemplateModularLargeTallBody', 'CLKComplicationTemplateModularSmallColumnsText', 'CLKComplicationTemplateModularSmallRingImage', 'CLKComplicationTemplateModularSmallRingText', 'CLKComplicationTemplateModularSmallSimpleImage', 'CLKComplicationTemplateModularSmallSimpleText', 'CLKComplicationTemplateModularSmallStackImage', 'CLKComplicationTemplateModularSmallStackText', 'CLKComplicationTemplateUtilitarianLargeFlat', 'CLKComplicationTemplateUtilitarianSmallFlat', 'CLKComplicationTemplateUtilitarianSmallRingImage', 'CLKComplicationTemplateUtilitarianSmallRingText', 'CLKComplicationTemplateUtilitarianSmallSquare', 'CLKComplicationTimelineEntry', 'CLKDateTextProvider', 'CLKFullColorImageProvider', 'CLKGaugeProvider', 'CLKImageProvider', 'CLKRelativeDateTextProvider', 'CLKSimpleGaugeProvider', 'CLKSimpleTextProvider', 'CLKTextProvider', 'CLKTimeIntervalGaugeProvider', 'CLKTimeIntervalTextProvider', 'CLKTimeTextProvider', 'CLKWatchFaceLibrary', 'CLLocation', 'CLLocationManager', 'CLPlacemark', 'CLRegion', 'CLSActivity', 'CLSActivityItem', 'CLSBinaryItem', 'CLSContext', 'CLSDataStore', 'CLSObject', 'CLSProgressReportingCapability', 'CLSQuantityItem', 'CLSScoreItem', 'CLVisit', 'CMAccelerometerData', 'CMAltimeter', 'CMAltitudeData', 'CMAttitude', 'CMDeviceMotion', 'CMDyskineticSymptomResult', 'CMFallDetectionEvent', 'CMFallDetectionManager', 'CMGyroData', 'CMHeadphoneMotionManager', 'CMLogItem', 'CMMagnetometerData', 'CMMotionActivity', 'CMMotionActivityManager', 'CMMotionManager', 'CMMovementDisorderManager', 'CMPedometer', 'CMPedometerData', 'CMPedometerEvent', 'CMRecordedAccelerometerData', 'CMRecordedRotationRateData', 'CMRotationRateData', 'CMSensorDataList', 'CMSensorRecorder', 'CMStepCounter', 'CMTremorResult', 'CNChangeHistoryAddContactEvent', 'CNChangeHistoryAddGroupEvent', 'CNChangeHistoryAddMemberToGroupEvent', 'CNChangeHistoryAddSubgroupToGroupEvent', 'CNChangeHistoryDeleteContactEvent', 'CNChangeHistoryDeleteGroupEvent', 'CNChangeHistoryDropEverythingEvent', 'CNChangeHistoryEvent', 'CNChangeHistoryFetchRequest', 'CNChangeHistoryRemoveMemberFromGroupEvent', 'CNChangeHistoryRemoveSubgroupFromGroupEvent', 'CNChangeHistoryUpdateContactEvent', 'CNChangeHistoryUpdateGroupEvent', 'CNContact', 'CNContactFetchRequest', 'CNContactFormatter', 'CNContactPickerViewController', 'CNContactProperty', 'CNContactRelation', 'CNContactStore', 'CNContactVCardSerialization', 'CNContactViewController', 'CNContactsUserDefaults', 'CNContainer', 'CNFetchRequest', 'CNFetchResult', 'CNGroup', 'CNInstantMessageAddress', 'CNLabeledValue', 'CNMutableContact', 'CNMutableGroup', 'CNMutablePostalAddress', 'CNPhoneNumber', 'CNPostalAddress', 'CNPostalAddressFormatter', 'CNSaveRequest', 'CNSocialProfile', 'CPActionSheetTemplate', 'CPAlertAction', 'CPAlertTemplate', 'CPBarButton', 'CPButton', 'CPContact', 'CPContactCallButton', 'CPContactDirectionsButton', 'CPContactMessageButton', 'CPContactTemplate', 'CPDashboardButton', 'CPDashboardController', 'CPGridButton', 'CPGridTemplate', 'CPImageSet', 'CPInformationItem', 'CPInformationRatingItem', 'CPInformationTemplate', 'CPInterfaceController', 'CPListImageRowItem', 'CPListItem', 'CPListSection', 'CPListTemplate', 'CPManeuver', 'CPMapButton', 'CPMapTemplate', 'CPMessageComposeBarButton', 'CPMessageListItem', 'CPMessageListItemLeadingConfiguration', 'CPMessageListItemTrailingConfiguration', 'CPNavigationAlert', 'CPNavigationSession', 'CPNowPlayingAddToLibraryButton', 'CPNowPlayingButton', 'CPNowPlayingImageButton', 'CPNowPlayingMoreButton', 'CPNowPlayingPlaybackRateButton', 'CPNowPlayingRepeatButton', 'CPNowPlayingShuffleButton', 'CPNowPlayingTemplate', 'CPPointOfInterest', 'CPPointOfInterestTemplate', 'CPRouteChoice', 'CPSearchTemplate', 'CPSessionConfiguration', 'CPTabBarTemplate', 'CPTemplate', 'CPTemplateApplicationDashboardScene', 'CPTemplateApplicationScene', 'CPTextButton', 'CPTravelEstimates', 'CPTrip', 'CPTripPreviewTextConfiguration', 'CPVoiceControlState', 'CPVoiceControlTemplate', 'CPWindow', 'CSCustomAttributeKey', 'CSIndexExtensionRequestHandler', 'CSLocalizedString', 'CSPerson', 'CSSearchQuery', 'CSSearchableIndex', 'CSSearchableItem', 'CSSearchableItemAttributeSet', 'CTCall', 'CTCallCenter', 'CTCarrier', 'CTCellularData', 'CTCellularPlanProvisioning', 'CTCellularPlanProvisioningRequest', 'CTSubscriber', 'CTSubscriberInfo', 'CTTelephonyNetworkInfo', 'CXAction', 'CXAnswerCallAction', 'CXCall', 'CXCallAction', 'CXCallController', 'CXCallDirectoryExtensionContext', 'CXCallDirectoryManager', 'CXCallDirectoryProvider', 'CXCallObserver', 'CXCallUpdate', 'CXEndCallAction', 'CXHandle', 'CXPlayDTMFCallAction', 'CXProvider', 'CXProviderConfiguration', 'CXSetGroupCallAction', 'CXSetHeldCallAction', 'CXSetMutedCallAction', 'CXStartCallAction', 'CXTransaction', 'DCAppAttestService', 'DCDevice', 'EAAccessory', 'EAAccessoryManager', 'EAGLContext', 'EAGLSharegroup', 'EASession', 'EAWiFiUnconfiguredAccessory', 'EAWiFiUnconfiguredAccessoryBrowser', 'EKAlarm', 'EKCalendar', 'EKCalendarChooser', 'EKCalendarItem', 'EKEvent', 'EKEventEditViewController', 'EKEventStore', 'EKEventViewController', 'EKObject', 'EKParticipant', 'EKRecurrenceDayOfWeek', 'EKRecurrenceEnd', 'EKRecurrenceRule', 'EKReminder', 'EKSource', 'EKStructuredLocation', 'ENExposureConfiguration', 'ENExposureDaySummary', 'ENExposureDetectionSummary', 'ENExposureInfo', 'ENExposureSummaryItem', 'ENExposureWindow', 'ENManager', 'ENScanInstance', 'ENTemporaryExposureKey', 'EntityRotationGestureRecognizer', 'EntityScaleGestureRecognizer', 'EntityTranslationGestureRecognizer', 'FPUIActionExtensionContext', 'FPUIActionExtensionViewController', 'GCColor', 'GCController', 'GCControllerAxisInput', 'GCControllerButtonInput', 'GCControllerDirectionPad', 'GCControllerElement', 'GCControllerTouchpad', 'GCDeviceBattery', 'GCDeviceCursor', 'GCDeviceHaptics', 'GCDeviceLight', 'GCDirectionalGamepad', 'GCDualShockGamepad', 'GCEventViewController', 'GCExtendedGamepad', 'GCExtendedGamepadSnapshot', 'GCGamepad', 'GCGamepadSnapshot', 'GCKeyboard', 'GCKeyboardInput', 'GCMicroGamepad', 'GCMicroGamepadSnapshot', 'GCMotion', 'GCMouse', 'GCMouseInput', 'GCPhysicalInputProfile', 'GCXboxGamepad', 'GKARC4RandomSource', 'GKAccessPoint', 'GKAchievement', 'GKAchievementChallenge', 'GKAchievementDescription', 'GKAchievementViewController', 'GKAgent', 'GKAgent2D', 'GKAgent3D', 'GKBasePlayer', 'GKBehavior', 'GKBillowNoiseSource', 'GKChallenge', 'GKChallengeEventHandler', 'GKCheckerboardNoiseSource', 'GKCircleObstacle', 'GKCloudPlayer', 'GKCoherentNoiseSource', 'GKComponent', 'GKComponentSystem', 'GKCompositeBehavior', 'GKConstantNoiseSource', 'GKCylindersNoiseSource', 'GKDecisionNode', 'GKDecisionTree', 'GKEntity', 'GKFriendRequestComposeViewController', 'GKGameCenterViewController', 'GKGameSession', 'GKGameSessionSharingViewController', 'GKGaussianDistribution', 'GKGoal', 'GKGraph', 'GKGraphNode', 'GKGraphNode2D', 'GKGraphNode3D', 'GKGridGraph', 'GKGridGraphNode', 'GKInvite', 'GKLeaderboard', 'GKLeaderboardEntry', 'GKLeaderboardScore', 'GKLeaderboardSet', 'GKLeaderboardViewController', 'GKLinearCongruentialRandomSource', 'GKLocalPlayer', 'GKMatch', 'GKMatchRequest', 'GKMatchmaker', 'GKMatchmakerViewController', 'GKMersenneTwisterRandomSource', 'GKMeshGraph', 'GKMinmaxStrategist', 'GKMonteCarloStrategist', 'GKNSPredicateRule', 'GKNoise', 'GKNoiseMap', 'GKNoiseSource', 'GKNotificationBanner', 'GKObstacle', 'GKObstacleGraph', 'GKOctree', 'GKOctreeNode', 'GKPath', 'GKPeerPickerController', 'GKPerlinNoiseSource', 'GKPlayer', 'GKPolygonObstacle', 'GKQuadtree', 'GKQuadtreeNode', 'GKRTree', 'GKRandomDistribution', 'GKRandomSource', 'GKRidgedNoiseSource', 'GKRule', 'GKRuleSystem', 'GKSCNNodeComponent', 'GKSKNodeComponent', 'GKSavedGame', 'GKScene', 'GKScore', 'GKScoreChallenge', 'GKSession', 'GKShuffledDistribution', 'GKSphereObstacle', 'GKSpheresNoiseSource', 'GKState', 'GKStateMachine', 'GKTurnBasedEventHandler', 'GKTurnBasedExchangeReply', 'GKTurnBasedMatch', 'GKTurnBasedMatchmakerViewController', 'GKTurnBasedParticipant', 'GKVoiceChat', 'GKVoiceChatService', 'GKVoronoiNoiseSource', 'GLKBaseEffect', 'GLKEffectProperty', 'GLKEffectPropertyFog', 'GLKEffectPropertyLight', 'GLKEffectPropertyMaterial', 'GLKEffectPropertyTexture', 'GLKEffectPropertyTransform', 'GLKMesh', 'GLKMeshBuffer', 'GLKMeshBufferAllocator', 'GLKReflectionMapEffect', 'GLKSkyboxEffect', 'GLKSubmesh', 'GLKTextureInfo', 'GLKTextureLoader', 'GLKView', 'GLKViewController', 'HKActivityMoveModeObject', 'HKActivityRingView', 'HKActivitySummary', 'HKActivitySummaryQuery', 'HKActivitySummaryType', 'HKAnchoredObjectQuery', 'HKAudiogramSample', 'HKAudiogramSampleType', 'HKAudiogramSensitivityPoint', 'HKBiologicalSexObject', 'HKBloodTypeObject', 'HKCDADocument', 'HKCDADocumentSample', 'HKCategorySample', 'HKCategoryType', 'HKCharacteristicType', 'HKClinicalRecord', 'HKClinicalType', 'HKCorrelation', 'HKCorrelationQuery', 'HKCorrelationType', 'HKCumulativeQuantitySample', 'HKCumulativeQuantitySeriesSample', 'HKDeletedObject', 'HKDevice', 'HKDiscreteQuantitySample', 'HKDocumentQuery', 'HKDocumentSample', 'HKDocumentType', 'HKElectrocardiogram', 'HKElectrocardiogramQuery', 'HKElectrocardiogramType', 'HKElectrocardiogramVoltageMeasurement', 'HKFHIRResource', 'HKFHIRVersion', 'HKFitzpatrickSkinTypeObject', 'HKHealthStore', 'HKHeartbeatSeriesBuilder', 'HKHeartbeatSeriesQuery', 'HKHeartbeatSeriesSample', 'HKLiveWorkoutBuilder', 'HKLiveWorkoutDataSource', 'HKObject', 'HKObjectType', 'HKObserverQuery', 'HKQuantity', 'HKQuantitySample', 'HKQuantitySeriesSampleBuilder', 'HKQuantitySeriesSampleQuery', 'HKQuantityType', 'HKQuery', 'HKQueryAnchor', 'HKSample', 'HKSampleQuery', 'HKSampleType', 'HKSeriesBuilder', 'HKSeriesSample', 'HKSeriesType', 'HKSource', 'HKSourceQuery', 'HKSourceRevision', 'HKStatistics', 'HKStatisticsCollection', 'HKStatisticsCollectionQuery', 'HKStatisticsQuery', 'HKUnit', 'HKWheelchairUseObject', 'HKWorkout', 'HKWorkoutBuilder', 'HKWorkoutConfiguration', 'HKWorkoutEvent', 'HKWorkoutRoute', 'HKWorkoutRouteBuilder', 'HKWorkoutRouteQuery', 'HKWorkoutSession', 'HKWorkoutType', 'HMAccessControl', 'HMAccessory', 'HMAccessoryBrowser', 'HMAccessoryCategory', 'HMAccessoryOwnershipToken', 'HMAccessoryProfile', 'HMAccessorySetupPayload', 'HMAction', 'HMActionSet', 'HMAddAccessoryRequest', 'HMCalendarEvent', 'HMCameraAudioControl', 'HMCameraControl', 'HMCameraProfile', 'HMCameraSettingsControl', 'HMCameraSnapshot', 'HMCameraSnapshotControl', 'HMCameraSource', 'HMCameraStream', 'HMCameraStreamControl', 'HMCameraView', 'HMCharacteristic', 'HMCharacteristicEvent', 'HMCharacteristicMetadata', 'HMCharacteristicThresholdRangeEvent', 'HMCharacteristicWriteAction', 'HMDurationEvent', 'HMEvent', 'HMEventTrigger', 'HMHome', 'HMHomeAccessControl', 'HMHomeManager', 'HMLocationEvent', 'HMMutableCalendarEvent', 'HMMutableCharacteristicEvent', 'HMMutableCharacteristicThresholdRangeEvent', 'HMMutableDurationEvent', 'HMMutableLocationEvent', 'HMMutablePresenceEvent', 'HMMutableSignificantTimeEvent', 'HMNetworkConfigurationProfile', 'HMNumberRange', 'HMPresenceEvent', 'HMRoom', 'HMService', 'HMServiceGroup', 'HMSignificantTimeEvent', 'HMTimeEvent', 'HMTimerTrigger', 'HMTrigger', 'HMUser', 'HMZone', 'ICCameraDevice', 'ICCameraFile', 'ICCameraFolder', 'ICCameraItem', 'ICDevice', 'ICDeviceBrowser', 'ICScannerBandData', 'ICScannerDevice', 'ICScannerFeature', 'ICScannerFeatureBoolean', 'ICScannerFeatureEnumeration', 'ICScannerFeatureRange', 'ICScannerFeatureTemplate', 'ICScannerFunctionalUnit', 'ICScannerFunctionalUnitDocumentFeeder', 'ICScannerFunctionalUnitFlatbed', 'ICScannerFunctionalUnitNegativeTransparency', 'ICScannerFunctionalUnitPositiveTransparency', 'ILCallClassificationRequest', 'ILCallCommunication', 'ILClassificationRequest', 'ILClassificationResponse', 'ILClassificationUIExtensionContext', 'ILClassificationUIExtensionViewController', 'ILCommunication', 'ILMessageClassificationRequest', 'ILMessageCommunication', 'ILMessageFilterExtension', 'ILMessageFilterExtensionContext', 'ILMessageFilterQueryRequest', 'ILMessageFilterQueryResponse', 'ILNetworkResponse', 'INAccountTypeResolutionResult', 'INActivateCarSignalIntent', 'INActivateCarSignalIntentResponse', 'INAddMediaIntent', 'INAddMediaIntentResponse', 'INAddMediaMediaDestinationResolutionResult', 'INAddMediaMediaItemResolutionResult', 'INAddTasksIntent', 'INAddTasksIntentResponse', 'INAddTasksTargetTaskListResolutionResult', 'INAddTasksTemporalEventTriggerResolutionResult', 'INAirline', 'INAirport', 'INAirportGate', 'INAppendToNoteIntent', 'INAppendToNoteIntentResponse', 'INBalanceAmount', 'INBalanceTypeResolutionResult', 'INBillDetails', 'INBillPayee', 'INBillPayeeResolutionResult', 'INBillTypeResolutionResult', 'INBoatReservation', 'INBoatTrip', 'INBookRestaurantReservationIntent', 'INBookRestaurantReservationIntentResponse', 'INBooleanResolutionResult', 'INBusReservation', 'INBusTrip', 'INCallCapabilityResolutionResult', 'INCallDestinationTypeResolutionResult', 'INCallRecord', 'INCallRecordFilter', 'INCallRecordResolutionResult', 'INCallRecordTypeOptionsResolutionResult', 'INCallRecordTypeResolutionResult', 'INCancelRideIntent', 'INCancelRideIntentResponse', 'INCancelWorkoutIntent', 'INCancelWorkoutIntentResponse', 'INCar', 'INCarAirCirculationModeResolutionResult', 'INCarAudioSourceResolutionResult', 'INCarDefrosterResolutionResult', 'INCarHeadUnit', 'INCarSeatResolutionResult', 'INCarSignalOptionsResolutionResult', 'INCreateNoteIntent', 'INCreateNoteIntentResponse', 'INCreateTaskListIntent', 'INCreateTaskListIntentResponse', 'INCurrencyAmount', 'INCurrencyAmountResolutionResult', 'INDailyRoutineRelevanceProvider', 'INDateComponentsRange', 'INDateComponentsRangeResolutionResult', 'INDateComponentsResolutionResult', 'INDateRelevanceProvider', 'INDateSearchTypeResolutionResult', 'INDefaultCardTemplate', 'INDeleteTasksIntent', 'INDeleteTasksIntentResponse', 'INDeleteTasksTaskListResolutionResult', 'INDeleteTasksTaskResolutionResult', 'INDoubleResolutionResult', 'INEndWorkoutIntent', 'INEndWorkoutIntentResponse', 'INEnergyResolutionResult', 'INEnumResolutionResult', 'INExtension', 'INFile', 'INFileResolutionResult', 'INFlight', 'INFlightReservation', 'INGetAvailableRestaurantReservationBookingDefaultsIntent', 'INGetAvailableRestaurantReservationBookingDefaultsIntentResponse', 'INGetAvailableRestaurantReservationBookingsIntent', 'INGetAvailableRestaurantReservationBookingsIntentResponse', 'INGetCarLockStatusIntent', 'INGetCarLockStatusIntentResponse', 'INGetCarPowerLevelStatusIntent', 'INGetCarPowerLevelStatusIntentResponse', 'INGetReservationDetailsIntent', 'INGetReservationDetailsIntentResponse', 'INGetRestaurantGuestIntent', 'INGetRestaurantGuestIntentResponse', 'INGetRideStatusIntent', 'INGetRideStatusIntentResponse', 'INGetUserCurrentRestaurantReservationBookingsIntent', 'INGetUserCurrentRestaurantReservationBookingsIntentResponse', 'INGetVisualCodeIntent', 'INGetVisualCodeIntentResponse', 'INImage', 'INImageNoteContent', 'INIntegerResolutionResult', 'INIntent', 'INIntentResolutionResult', 'INIntentResponse', 'INInteraction', 'INLengthResolutionResult', 'INListCarsIntent', 'INListCarsIntentResponse', 'INListRideOptionsIntent', 'INListRideOptionsIntentResponse', 'INLocationRelevanceProvider', 'INLocationSearchTypeResolutionResult', 'INLodgingReservation', 'INMassResolutionResult', 'INMediaAffinityTypeResolutionResult', 'INMediaDestination', 'INMediaDestinationResolutionResult', 'INMediaItem', 'INMediaItemResolutionResult', 'INMediaSearch', 'INMediaUserContext', 'INMessage', 'INMessageAttributeOptionsResolutionResult', 'INMessageAttributeResolutionResult', 'INNote', 'INNoteContent', 'INNoteContentResolutionResult', 'INNoteContentTypeResolutionResult', 'INNoteResolutionResult', 'INNotebookItemTypeResolutionResult', 'INObject', 'INObjectCollection', 'INObjectResolutionResult', 'INObjectSection', 'INOutgoingMessageTypeResolutionResult', 'INParameter', 'INPauseWorkoutIntent', 'INPauseWorkoutIntentResponse', 'INPayBillIntent', 'INPayBillIntentResponse', 'INPaymentAccount', 'INPaymentAccountResolutionResult', 'INPaymentAmount', 'INPaymentAmountResolutionResult', 'INPaymentMethod', 'INPaymentMethodResolutionResult', 'INPaymentRecord', 'INPaymentStatusResolutionResult', 'INPerson', 'INPersonHandle', 'INPersonResolutionResult', 'INPlacemarkResolutionResult', 'INPlayMediaIntent', 'INPlayMediaIntentResponse', 'INPlayMediaMediaItemResolutionResult', 'INPlayMediaPlaybackSpeedResolutionResult', 'INPlaybackQueueLocationResolutionResult', 'INPlaybackRepeatModeResolutionResult', 'INPreferences', 'INPriceRange', 'INRadioTypeResolutionResult', 'INRecurrenceRule', 'INRelativeReferenceResolutionResult', 'INRelativeSettingResolutionResult', 'INRelevanceProvider', 'INRelevantShortcut', 'INRelevantShortcutStore', 'INRentalCar', 'INRentalCarReservation', 'INRequestPaymentCurrencyAmountResolutionResult', 'INRequestPaymentIntent', 'INRequestPaymentIntentResponse', 'INRequestPaymentPayerResolutionResult', 'INRequestRideIntent', 'INRequestRideIntentResponse', 'INReservation', 'INReservationAction', 'INRestaurant', 'INRestaurantGuest', 'INRestaurantGuestDisplayPreferences', 'INRestaurantGuestResolutionResult', 'INRestaurantOffer', 'INRestaurantReservation', 'INRestaurantReservationBooking', 'INRestaurantReservationUserBooking', 'INRestaurantResolutionResult', 'INResumeWorkoutIntent', 'INResumeWorkoutIntentResponse', 'INRideCompletionStatus', 'INRideDriver', 'INRideFareLineItem', 'INRideOption', 'INRidePartySizeOption', 'INRideStatus', 'INRideVehicle', 'INSaveProfileInCarIntent', 'INSaveProfileInCarIntentResponse', 'INSearchCallHistoryIntent', 'INSearchCallHistoryIntentResponse', 'INSearchForAccountsIntent', 'INSearchForAccountsIntentResponse', 'INSearchForBillsIntent', 'INSearchForBillsIntentResponse', 'INSearchForMediaIntent', 'INSearchForMediaIntentResponse', 'INSearchForMediaMediaItemResolutionResult', 'INSearchForMessagesIntent', 'INSearchForMessagesIntentResponse', 'INSearchForNotebookItemsIntent', 'INSearchForNotebookItemsIntentResponse', 'INSearchForPhotosIntent', 'INSearchForPhotosIntentResponse', 'INSeat', 'INSendMessageAttachment', 'INSendMessageIntent', 'INSendMessageIntentResponse', 'INSendMessageRecipientResolutionResult', 'INSendPaymentCurrencyAmountResolutionResult', 'INSendPaymentIntent', 'INSendPaymentIntentResponse', 'INSendPaymentPayeeResolutionResult', 'INSendRideFeedbackIntent', 'INSendRideFeedbackIntentResponse', 'INSetAudioSourceInCarIntent', 'INSetAudioSourceInCarIntentResponse', 'INSetCarLockStatusIntent', 'INSetCarLockStatusIntentResponse', 'INSetClimateSettingsInCarIntent', 'INSetClimateSettingsInCarIntentResponse', 'INSetDefrosterSettingsInCarIntent', 'INSetDefrosterSettingsInCarIntentResponse', 'INSetMessageAttributeIntent', 'INSetMessageAttributeIntentResponse', 'INSetProfileInCarIntent', 'INSetProfileInCarIntentResponse', 'INSetRadioStationIntent', 'INSetRadioStationIntentResponse', 'INSetSeatSettingsInCarIntent', 'INSetSeatSettingsInCarIntentResponse', 'INSetTaskAttributeIntent', 'INSetTaskAttributeIntentResponse', 'INSetTaskAttributeTemporalEventTriggerResolutionResult', 'INShortcut', 'INSnoozeTasksIntent', 'INSnoozeTasksIntentResponse', 'INSnoozeTasksTaskResolutionResult', 'INSpatialEventTrigger', 'INSpatialEventTriggerResolutionResult', 'INSpeakableString', 'INSpeakableStringResolutionResult', 'INSpeedResolutionResult', 'INStartAudioCallIntent', 'INStartAudioCallIntentResponse', 'INStartCallCallCapabilityResolutionResult', 'INStartCallCallRecordToCallBackResolutionResult', 'INStartCallContactResolutionResult', 'INStartCallIntent', 'INStartCallIntentResponse', 'INStartPhotoPlaybackIntent', 'INStartPhotoPlaybackIntentResponse', 'INStartVideoCallIntent', 'INStartVideoCallIntentResponse', 'INStartWorkoutIntent', 'INStartWorkoutIntentResponse', 'INStringResolutionResult', 'INTask', 'INTaskList', 'INTaskListResolutionResult', 'INTaskPriorityResolutionResult', 'INTaskResolutionResult', 'INTaskStatusResolutionResult', 'INTemperatureResolutionResult', 'INTemporalEventTrigger', 'INTemporalEventTriggerResolutionResult', 'INTemporalEventTriggerTypeOptionsResolutionResult', 'INTermsAndConditions', 'INTextNoteContent', 'INTicketedEvent', 'INTicketedEventReservation', 'INTimeIntervalResolutionResult', 'INTrainReservation', 'INTrainTrip', 'INTransferMoneyIntent', 'INTransferMoneyIntentResponse', 'INUIAddVoiceShortcutButton', 'INUIAddVoiceShortcutViewController', 'INUIEditVoiceShortcutViewController', 'INURLResolutionResult', 'INUpcomingMediaManager', 'INUpdateMediaAffinityIntent', 'INUpdateMediaAffinityIntentResponse', 'INUpdateMediaAffinityMediaItemResolutionResult', 'INUserContext', 'INVisualCodeTypeResolutionResult', 'INVocabulary', 'INVoiceShortcut', 'INVoiceShortcutCenter', 'INVolumeResolutionResult', 'INWorkoutGoalUnitTypeResolutionResult', 'INWorkoutLocationTypeResolutionResult', 'IOSurface', 'JSContext', 'JSManagedValue', 'JSValue', 'JSVirtualMachine', 'LAContext', 'LPLinkMetadata', 'LPLinkView', 'LPMetadataProvider', 'MCAdvertiserAssistant', 'MCBrowserViewController', 'MCNearbyServiceAdvertiser', 'MCNearbyServiceBrowser', 'MCPeerID', 'MCSession', 'MDLAnimatedMatrix4x4', 'MDLAnimatedQuaternion', 'MDLAnimatedQuaternionArray', 'MDLAnimatedScalar', 'MDLAnimatedScalarArray', 'MDLAnimatedValue', 'MDLAnimatedVector2', 'MDLAnimatedVector3', 'MDLAnimatedVector3Array', 'MDLAnimatedVector4', 'MDLAnimationBindComponent', 'MDLAreaLight', 'MDLAsset', 'MDLBundleAssetResolver', 'MDLCamera', 'MDLCheckerboardTexture', 'MDLColorSwatchTexture', 'MDLLight', 'MDLLightProbe', 'MDLMaterial', 'MDLMaterialProperty', 'MDLMaterialPropertyConnection', 'MDLMaterialPropertyGraph', 'MDLMaterialPropertyNode', 'MDLMatrix4x4Array', 'MDLMesh', 'MDLMeshBufferData', 'MDLMeshBufferDataAllocator', 'MDLMeshBufferMap', 'MDLMeshBufferZoneDefault', 'MDLNoiseTexture', 'MDLNormalMapTexture', 'MDLObject', 'MDLObjectContainer', 'MDLPackedJointAnimation', 'MDLPathAssetResolver', 'MDLPhotometricLight', 'MDLPhysicallyPlausibleLight', 'MDLPhysicallyPlausibleScatteringFunction', 'MDLRelativeAssetResolver', 'MDLScatteringFunction', 'MDLSkeleton', 'MDLSkyCubeTexture', 'MDLStereoscopicCamera', 'MDLSubmesh', 'MDLSubmeshTopology', 'MDLTexture', 'MDLTextureFilter', 'MDLTextureSampler', 'MDLTransform', 'MDLTransformMatrixOp', 'MDLTransformOrientOp', 'MDLTransformRotateOp', 'MDLTransformRotateXOp', 'MDLTransformRotateYOp', 'MDLTransformRotateZOp', 'MDLTransformScaleOp', 'MDLTransformStack', 'MDLTransformTranslateOp', 'MDLURLTexture', 'MDLVertexAttribute', 'MDLVertexAttributeData', 'MDLVertexBufferLayout', 'MDLVertexDescriptor', 'MDLVoxelArray', 'MFMailComposeViewController', 'MFMessageComposeViewController', 'MIDICIDeviceInfo', 'MIDICIDiscoveredNode', 'MIDICIDiscoveryManager', 'MIDICIProfile', 'MIDICIProfileState', 'MIDICIResponder', 'MIDICISession', 'MIDINetworkConnection', 'MIDINetworkHost', 'MIDINetworkSession', 'MKAnnotationView', 'MKCircle', 'MKCircleRenderer', 'MKCircleView', 'MKClusterAnnotation', 'MKCompassButton', 'MKDirections', 'MKDirectionsRequest', 'MKDirectionsResponse', 'MKDistanceFormatter', 'MKETAResponse', 'MKGeoJSONDecoder', 'MKGeoJSONFeature', 'MKGeodesicPolyline', 'MKGradientPolylineRenderer', 'MKLocalPointsOfInterestRequest', 'MKLocalSearch', 'MKLocalSearchCompleter', 'MKLocalSearchCompletion', 'MKLocalSearchRequest', 'MKLocalSearchResponse', 'MKMapCamera', 'MKMapCameraBoundary', 'MKMapCameraZoomRange', 'MKMapItem', 'MKMapSnapshot', 'MKMapSnapshotOptions', 'MKMapSnapshotter', 'MKMapView', 'MKMarkerAnnotationView', 'MKMultiPoint', 'MKMultiPolygon', 'MKMultiPolygonRenderer', 'MKMultiPolyline', 'MKMultiPolylineRenderer', 'MKOverlayPathRenderer', 'MKOverlayPathView', 'MKOverlayRenderer', 'MKOverlayView', 'MKPinAnnotationView', 'MKPitchControl', 'MKPlacemark', 'MKPointAnnotation', 'MKPointOfInterestFilter', 'MKPolygon', 'MKPolygonRenderer', 'MKPolygonView', 'MKPolyline', 'MKPolylineRenderer', 'MKPolylineView', 'MKReverseGeocoder', 'MKRoute', 'MKRouteStep', 'MKScaleView', 'MKShape', 'MKTileOverlay', 'MKTileOverlayRenderer', 'MKUserLocation', 'MKUserLocationView', 'MKUserTrackingBarButtonItem', 'MKUserTrackingButton', 'MKZoomControl', 'MLArrayBatchProvider', 'MLCActivationDescriptor', 'MLCActivationLayer', 'MLCArithmeticLayer', 'MLCBatchNormalizationLayer', 'MLCConcatenationLayer', 'MLCConvolutionDescriptor', 'MLCConvolutionLayer', 'MLCDevice', 'MLCDropoutLayer', 'MLCEmbeddingDescriptor', 'MLCEmbeddingLayer', 'MLCFullyConnectedLayer', 'MLCGramMatrixLayer', 'MLCGraph', 'MLCGroupNormalizationLayer', 'MLCInferenceGraph', 'MLCInstanceNormalizationLayer', 'MLCLSTMDescriptor', 'MLCLSTMLayer', 'MLCLayer', 'MLCLayerNormalizationLayer', 'MLCLossDescriptor', 'MLCLossLayer', 'MLCMatMulDescriptor', 'MLCMatMulLayer', 'MLCMultiheadAttentionDescriptor', 'MLCMultiheadAttentionLayer', 'MLCPaddingLayer', 'MLCPoolingDescriptor', 'MLCPoolingLayer', 'MLCReductionLayer', 'MLCReshapeLayer', 'MLCSliceLayer', 'MLCSoftmaxLayer', 'MLCSplitLayer', 'MLCTensor', 'MLCTensorData', 'MLCTensorDescriptor', 'MLCTensorOptimizerDeviceData', 'MLCTensorParameter', 'MLCTrainingGraph', 'MLCTransposeLayer', 'MLCUpsampleLayer', 'MLCYOLOLossDescriptor', 'MLCYOLOLossLayer', 'MLDictionaryConstraint', 'MLDictionaryFeatureProvider', 'MLFeatureDescription', 'MLFeatureValue', 'MLImageConstraint', 'MLImageSize', 'MLImageSizeConstraint', 'MLKey', 'MLMetricKey', 'MLModel', 'MLModelCollection', 'MLModelCollectionEntry', 'MLModelConfiguration', 'MLModelDescription', 'MLMultiArray', 'MLMultiArrayConstraint', 'MLMultiArrayShapeConstraint', 'MLNumericConstraint', 'MLParameterDescription', 'MLParameterKey', 'MLPredictionOptions', 'MLSequence', 'MLSequenceConstraint', 'MLTask', 'MLUpdateContext', 'MLUpdateProgressHandlers', 'MLUpdateTask', 'MPChangeLanguageOptionCommandEvent', 'MPChangePlaybackPositionCommand', 'MPChangePlaybackPositionCommandEvent', 'MPChangePlaybackRateCommand', 'MPChangePlaybackRateCommandEvent', 'MPChangeRepeatModeCommand', 'MPChangeRepeatModeCommandEvent', 'MPChangeShuffleModeCommand', 'MPChangeShuffleModeCommandEvent', 'MPContentItem', 'MPFeedbackCommand', 'MPFeedbackCommandEvent', 'MPMediaEntity', 'MPMediaItem', 'MPMediaItemArtwork', 'MPMediaItemCollection', 'MPMediaLibrary', 'MPMediaPickerController', 'MPMediaPlaylist', 'MPMediaPlaylistCreationMetadata', 'MPMediaPredicate', 'MPMediaPropertyPredicate', 'MPMediaQuery', 'MPMediaQuerySection', 'MPMovieAccessLog', 'MPMovieAccessLogEvent', 'MPMovieErrorLog', 'MPMovieErrorLogEvent', 'MPMoviePlayerController', 'MPMoviePlayerViewController', 'MPMusicPlayerApplicationController', 'MPMusicPlayerController', 'MPMusicPlayerControllerMutableQueue', 'MPMusicPlayerControllerQueue', 'MPMusicPlayerMediaItemQueueDescriptor', 'MPMusicPlayerPlayParameters', 'MPMusicPlayerPlayParametersQueueDescriptor', 'MPMusicPlayerQueueDescriptor', 'MPMusicPlayerStoreQueueDescriptor', 'MPNowPlayingInfoCenter', 'MPNowPlayingInfoLanguageOption', 'MPNowPlayingInfoLanguageOptionGroup', 'MPNowPlayingSession', 'MPPlayableContentManager', 'MPPlayableContentManagerContext', 'MPRatingCommand', 'MPRatingCommandEvent', 'MPRemoteCommand', 'MPRemoteCommandCenter', 'MPRemoteCommandEvent', 'MPSGraph', 'MPSGraphConvolution2DOpDescriptor', 'MPSGraphDepthwiseConvolution2DOpDescriptor', 'MPSGraphDevice', 'MPSGraphExecutionDescriptor', 'MPSGraphOperation', 'MPSGraphPooling2DOpDescriptor', 'MPSGraphShapedType', 'MPSGraphTensor', 'MPSGraphTensorData', 'MPSGraphVariableOp', 'MPSeekCommandEvent', 'MPSkipIntervalCommand', 'MPSkipIntervalCommandEvent', 'MPTimedMetadata', 'MPVolumeView', 'MSConversation', 'MSMessage', 'MSMessageLayout', 'MSMessageLiveLayout', 'MSMessageTemplateLayout', 'MSMessagesAppViewController', 'MSServiceAccount', 'MSSession', 'MSSetupSession', 'MSSticker', 'MSStickerBrowserView', 'MSStickerBrowserViewController', 'MSStickerView', 'MTKMesh', 'MTKMeshBuffer', 'MTKMeshBufferAllocator', 'MTKSubmesh', 'MTKTextureLoader', 'MTKView', 'MTLAccelerationStructureBoundingBoxGeometryDescriptor', 'MTLAccelerationStructureDescriptor', 'MTLAccelerationStructureGeometryDescriptor', 'MTLAccelerationStructureTriangleGeometryDescriptor', 'MTLArgument', 'MTLArgumentDescriptor', 'MTLArrayType', 'MTLAttribute', 'MTLAttributeDescriptor', 'MTLAttributeDescriptorArray', 'MTLBinaryArchiveDescriptor', 'MTLBlitPassDescriptor', 'MTLBlitPassSampleBufferAttachmentDescriptor', 'MTLBlitPassSampleBufferAttachmentDescriptorArray', 'MTLBufferLayoutDescriptor', 'MTLBufferLayoutDescriptorArray', 'MTLCaptureDescriptor', 'MTLCaptureManager', 'MTLCommandBufferDescriptor', 'MTLCompileOptions', 'MTLComputePassDescriptor', 'MTLComputePassSampleBufferAttachmentDescriptor', 'MTLComputePassSampleBufferAttachmentDescriptorArray', 'MTLComputePipelineDescriptor', 'MTLComputePipelineReflection', 'MTLCounterSampleBufferDescriptor', 'MTLDepthStencilDescriptor', 'MTLFunctionConstant', 'MTLFunctionConstantValues', 'MTLFunctionDescriptor', 'MTLHeapDescriptor', 'MTLIndirectCommandBufferDescriptor', 'MTLInstanceAccelerationStructureDescriptor', 'MTLIntersectionFunctionDescriptor', 'MTLIntersectionFunctionTableDescriptor', 'MTLLinkedFunctions', 'MTLPipelineBufferDescriptor', 'MTLPipelineBufferDescriptorArray', 'MTLPointerType', 'MTLPrimitiveAccelerationStructureDescriptor', 'MTLRasterizationRateLayerArray', 'MTLRasterizationRateLayerDescriptor', 'MTLRasterizationRateMapDescriptor', 'MTLRasterizationRateSampleArray', 'MTLRenderPassAttachmentDescriptor', 'MTLRenderPassColorAttachmentDescriptor', 'MTLRenderPassColorAttachmentDescriptorArray', 'MTLRenderPassDepthAttachmentDescriptor', 'MTLRenderPassDescriptor', 'MTLRenderPassSampleBufferAttachmentDescriptor', 'MTLRenderPassSampleBufferAttachmentDescriptorArray', 'MTLRenderPassStencilAttachmentDescriptor', 'MTLRenderPipelineColorAttachmentDescriptor', 'MTLRenderPipelineColorAttachmentDescriptorArray', 'MTLRenderPipelineDescriptor', 'MTLRenderPipelineReflection', 'MTLResourceStatePassDescriptor', 'MTLResourceStatePassSampleBufferAttachmentDescriptor', 'MTLResourceStatePassSampleBufferAttachmentDescriptorArray', 'MTLSamplerDescriptor', 'MTLSharedEventHandle', 'MTLSharedEventListener', 'MTLSharedTextureHandle', 'MTLStageInputOutputDescriptor', 'MTLStencilDescriptor', 'MTLStructMember', 'MTLStructType', 'MTLTextureDescriptor', 'MTLTextureReferenceType', 'MTLTileRenderPipelineColorAttachmentDescriptor', 'MTLTileRenderPipelineColorAttachmentDescriptorArray', 'MTLTileRenderPipelineDescriptor', 'MTLType', 'MTLVertexAttribute', 'MTLVertexAttributeDescriptor', 'MTLVertexAttributeDescriptorArray', 'MTLVertexBufferLayoutDescriptor', 'MTLVertexBufferLayoutDescriptorArray', 'MTLVertexDescriptor', 'MTLVisibleFunctionTableDescriptor', 'MXAnimationMetric', 'MXAppExitMetric', 'MXAppLaunchMetric', 'MXAppResponsivenessMetric', 'MXAppRunTimeMetric', 'MXAverage', 'MXBackgroundExitData', 'MXCPUExceptionDiagnostic', 'MXCPUMetric', 'MXCallStackTree', 'MXCellularConditionMetric', 'MXCrashDiagnostic', 'MXDiagnostic', 'MXDiagnosticPayload', 'MXDiskIOMetric', 'MXDiskWriteExceptionDiagnostic', 'MXDisplayMetric', 'MXForegroundExitData', 'MXGPUMetric', 'MXHangDiagnostic', 'MXHistogram', 'MXHistogramBucket', 'MXLocationActivityMetric', 'MXMemoryMetric', 'MXMetaData', 'MXMetric', 'MXMetricManager', 'MXMetricPayload', 'MXNetworkTransferMetric', 'MXSignpostIntervalData', 'MXSignpostMetric', 'MXUnitAveragePixelLuminance', 'MXUnitSignalBars', 'MyClass', 'NCWidgetController', 'NEAppProxyFlow', 'NEAppProxyProvider', 'NEAppProxyProviderManager', 'NEAppProxyTCPFlow', 'NEAppProxyUDPFlow', 'NEAppPushManager', 'NEAppPushProvider', 'NEAppRule', 'NEDNSOverHTTPSSettings', 'NEDNSOverTLSSettings', 'NEDNSProxyManager', 'NEDNSProxyProvider', 'NEDNSProxyProviderProtocol', 'NEDNSSettings', 'NEDNSSettingsManager', 'NEEvaluateConnectionRule', 'NEFilterBrowserFlow', 'NEFilterControlProvider', 'NEFilterControlVerdict', 'NEFilterDataProvider', 'NEFilterDataVerdict', 'NEFilterFlow', 'NEFilterManager', 'NEFilterNewFlowVerdict', 'NEFilterPacketContext', 'NEFilterPacketProvider', 'NEFilterProvider', 'NEFilterProviderConfiguration', 'NEFilterRemediationVerdict', 'NEFilterReport', 'NEFilterRule', 'NEFilterSettings', 'NEFilterSocketFlow', 'NEFilterVerdict', 'NEFlowMetaData', 'NEHotspotConfiguration', 'NEHotspotConfigurationManager', 'NEHotspotEAPSettings', 'NEHotspotHS20Settings', 'NEHotspotHelper', 'NEHotspotHelperCommand', 'NEHotspotHelperResponse', 'NEHotspotNetwork', 'NEIPv4Route', 'NEIPv4Settings', 'NEIPv6Route', 'NEIPv6Settings', 'NENetworkRule', 'NEOnDemandRule', 'NEOnDemandRuleConnect', 'NEOnDemandRuleDisconnect', 'NEOnDemandRuleEvaluateConnection', 'NEOnDemandRuleIgnore', 'NEPacket', 'NEPacketTunnelFlow', 'NEPacketTunnelNetworkSettings', 'NEPacketTunnelProvider', 'NEProvider', 'NEProxyServer', 'NEProxySettings', 'NETransparentProxyManager', 'NETransparentProxyNetworkSettings', 'NETransparentProxyProvider', 'NETunnelNetworkSettings', 'NETunnelProvider', 'NETunnelProviderManager', 'NETunnelProviderProtocol', 'NETunnelProviderSession', 'NEVPNConnection', 'NEVPNIKEv2SecurityAssociationParameters', 'NEVPNManager', 'NEVPNProtocol', 'NEVPNProtocolIKEv2', 'NEVPNProtocolIPSec', 'NFCISO15693CustomCommandConfiguration', 'NFCISO15693ReadMultipleBlocksConfiguration', 'NFCISO15693ReaderSession', 'NFCISO7816APDU', 'NFCNDEFMessage', 'NFCNDEFPayload', 'NFCNDEFReaderSession', 'NFCReaderSession', 'NFCTagCommandConfiguration', 'NFCTagReaderSession', 'NFCVASCommandConfiguration', 'NFCVASReaderSession', 'NFCVASResponse', 'NIConfiguration', 'NIDiscoveryToken', 'NINearbyObject', 'NINearbyPeerConfiguration', 'NISession', 'NKAssetDownload', 'NKIssue', 'NKLibrary', 'NLEmbedding', 'NLGazetteer', 'NLLanguageRecognizer', 'NLModel', 'NLModelConfiguration', 'NLTagger', 'NLTokenizer', 'NSArray', 'NSAssertionHandler', 'NSAsynchronousFetchRequest', 'NSAsynchronousFetchResult', 'NSAtomicStore', 'NSAtomicStoreCacheNode', 'NSAttributeDescription', 'NSAttributedString', 'NSAutoreleasePool', 'NSBatchDeleteRequest', 'NSBatchDeleteResult', 'NSBatchInsertRequest', 'NSBatchInsertResult', 'NSBatchUpdateRequest', 'NSBatchUpdateResult', 'NSBlockOperation', 'NSBundle', 'NSBundleResourceRequest', 'NSByteCountFormatter', 'NSCache', 'NSCachedURLResponse', 'NSCalendar', 'NSCharacterSet', 'NSCoder', 'NSCollectionLayoutAnchor', 'NSCollectionLayoutBoundarySupplementaryItem', 'NSCollectionLayoutDecorationItem', 'NSCollectionLayoutDimension', 'NSCollectionLayoutEdgeSpacing', 'NSCollectionLayoutGroup', 'NSCollectionLayoutGroupCustomItem', 'NSCollectionLayoutItem', 'NSCollectionLayoutSection', 'NSCollectionLayoutSize', 'NSCollectionLayoutSpacing', 'NSCollectionLayoutSupplementaryItem', 'NSComparisonPredicate', 'NSCompoundPredicate', 'NSCondition', 'NSConditionLock', 'NSConstantString', 'NSConstraintConflict', 'NSCoreDataCoreSpotlightDelegate', 'NSCountedSet', 'NSData', 'NSDataAsset', 'NSDataDetector', 'NSDate', 'NSDateComponents', 'NSDateComponentsFormatter', 'NSDateFormatter', 'NSDateInterval', 'NSDateIntervalFormatter', 'NSDecimalNumber', 'NSDecimalNumberHandler', 'NSDerivedAttributeDescription', 'NSDictionary', 'NSDiffableDataSourceSectionSnapshot', 'NSDiffableDataSourceSectionTransaction', 'NSDiffableDataSourceSnapshot', 'NSDiffableDataSourceTransaction', 'NSDimension', 'NSDirectoryEnumerator', 'NSEnergyFormatter', 'NSEntityDescription', 'NSEntityMapping', 'NSEntityMigrationPolicy', 'NSEnumerator', 'NSError', 'NSEvent', 'NSException', 'NSExpression', 'NSExpressionDescription', 'NSExtensionContext', 'NSExtensionItem', 'NSFetchIndexDescription', 'NSFetchIndexElementDescription', 'NSFetchRequest', 'NSFetchRequestExpression', 'NSFetchedPropertyDescription', 'NSFetchedResultsController', 'NSFileAccessIntent', 'NSFileCoordinator', 'NSFileHandle', 'NSFileManager', 'NSFileProviderDomain', 'NSFileProviderExtension', 'NSFileProviderManager', 'NSFileProviderService', 'NSFileSecurity', 'NSFileVersion', 'NSFileWrapper', 'NSFormatter', 'NSHTTPCookie', 'NSHTTPCookieStorage', 'NSHTTPURLResponse', 'NSHashTable', 'NSISO8601DateFormatter', 'NSIncrementalStore', 'NSIncrementalStoreNode', 'NSIndexPath', 'NSIndexSet', 'NSInputStream', 'NSInvocation', 'NSInvocationOperation', 'NSItemProvider', 'NSJSONSerialization', 'NSKeyedArchiver', 'NSKeyedUnarchiver', 'NSLayoutAnchor', 'NSLayoutConstraint', 'NSLayoutDimension', 'NSLayoutManager', 'NSLayoutXAxisAnchor', 'NSLayoutYAxisAnchor', 'NSLengthFormatter', 'NSLinguisticTagger', 'NSListFormatter', 'NSLocale', 'NSLock', 'NSMachPort', 'NSManagedObject', 'NSManagedObjectContext', 'NSManagedObjectID', 'NSManagedObjectModel', 'NSMapTable', 'NSMappingModel', 'NSMassFormatter', 'NSMeasurement', 'NSMeasurementFormatter', 'NSMenuToolbarItem', 'NSMergeConflict', 'NSMergePolicy', 'NSMessagePort', 'NSMetadataItem', 'NSMetadataQuery', 'NSMetadataQueryAttributeValueTuple', 'NSMetadataQueryResultGroup', 'NSMethodSignature', 'NSMigrationManager', 'NSMutableArray', 'NSMutableAttributedString', 'NSMutableCharacterSet', 'NSMutableData', 'NSMutableDictionary', 'NSMutableIndexSet', 'NSMutableOrderedSet', 'NSMutableParagraphStyle', 'NSMutableSet', 'NSMutableString', 'NSMutableURLRequest', 'NSNetService', 'NSNetServiceBrowser', 'NSNotification', 'NSNotificationCenter', 'NSNotificationQueue', 'NSNull', 'NSNumber', 'NSNumberFormatter', 'NSObject', 'NSOperation', 'NSOperationQueue', 'NSOrderedCollectionChange', 'NSOrderedCollectionDifference', 'NSOrderedSet', 'NSOrthography', 'NSOutputStream', 'NSParagraphStyle', 'NSPersistentCloudKitContainer', 'NSPersistentCloudKitContainerEvent', 'NSPersistentCloudKitContainerEventRequest', 'NSPersistentCloudKitContainerEventResult', 'NSPersistentCloudKitContainerOptions', 'NSPersistentContainer', 'NSPersistentHistoryChange', 'NSPersistentHistoryChangeRequest', 'NSPersistentHistoryResult', 'NSPersistentHistoryToken', 'NSPersistentHistoryTransaction', 'NSPersistentStore', 'NSPersistentStoreAsynchronousResult', 'NSPersistentStoreCoordinator', 'NSPersistentStoreDescription', 'NSPersistentStoreRequest', 'NSPersistentStoreResult', 'NSPersonNameComponents', 'NSPersonNameComponentsFormatter', 'NSPipe', 'NSPointerArray', 'NSPointerFunctions', 'NSPort', 'NSPredicate', 'NSProcessInfo', 'NSProgress', 'NSPropertyDescription', 'NSPropertyListSerialization', 'NSPropertyMapping', 'NSProxy', 'NSPurgeableData', 'NSQueryGenerationToken', 'NSRecursiveLock', 'NSRegularExpression', 'NSRelationshipDescription', 'NSRelativeDateTimeFormatter', 'NSRunLoop', 'NSSaveChangesRequest', 'NSScanner', 'NSSecureUnarchiveFromDataTransformer', 'NSSet', 'NSShadow', 'NSSharingServicePickerToolbarItem', 'NSSharingServicePickerTouchBarItem', 'NSSimpleCString', 'NSSocketPort', 'NSSortDescriptor', 'NSStream', 'NSString', 'NSStringDrawingContext', 'NSTextAttachment', 'NSTextCheckingResult', 'NSTextContainer', 'NSTextStorage', 'NSTextTab', 'NSThread', 'NSTimeZone', 'NSTimer', 'NSToolbarItem', 'NSURL', 'NSURLAuthenticationChallenge', 'NSURLCache', 'NSURLComponents', 'NSURLConnection', 'NSURLCredential', 'NSURLCredentialStorage', 'NSURLProtectionSpace', 'NSURLProtocol', 'NSURLQueryItem', 'NSURLRequest', 'NSURLResponse', 'NSURLSession', 'NSURLSessionConfiguration', 'NSURLSessionDataTask', 'NSURLSessionDownloadTask', 'NSURLSessionStreamTask', 'NSURLSessionTask', 'NSURLSessionTaskMetrics', 'NSURLSessionTaskTransactionMetrics', 'NSURLSessionUploadTask', 'NSURLSessionWebSocketMessage', 'NSURLSessionWebSocketTask', 'NSUUID', 'NSUbiquitousKeyValueStore', 'NSUndoManager', 'NSUnit', 'NSUnitAcceleration', 'NSUnitAngle', 'NSUnitArea', 'NSUnitConcentrationMass', 'NSUnitConverter', 'NSUnitConverterLinear', 'NSUnitDispersion', 'NSUnitDuration', 'NSUnitElectricCharge', 'NSUnitElectricCurrent', 'NSUnitElectricPotentialDifference', 'NSUnitElectricResistance', 'NSUnitEnergy', 'NSUnitFrequency', 'NSUnitFuelEfficiency', 'NSUnitIlluminance', 'NSUnitInformationStorage', 'NSUnitLength', 'NSUnitMass', 'NSUnitPower', 'NSUnitPressure', 'NSUnitSpeed', 'NSUnitTemperature', 'NSUnitVolume', 'NSUserActivity', 'NSUserDefaults', 'NSValue', 'NSValueTransformer', 'NSXMLParser', 'NSXPCCoder', 'NSXPCConnection', 'NSXPCInterface', 'NSXPCListener', 'NSXPCListenerEndpoint', 'NWBonjourServiceEndpoint', 'NWEndpoint', 'NWHostEndpoint', 'NWPath', 'NWTCPConnection', 'NWTLSParameters', 'NWUDPSession', 'OSLogEntry', 'OSLogEntryActivity', 'OSLogEntryBoundary', 'OSLogEntryLog', 'OSLogEntrySignpost', 'OSLogEnumerator', 'OSLogMessageComponent', 'OSLogPosition', 'OSLogStore', 'PDFAction', 'PDFActionGoTo', 'PDFActionNamed', 'PDFActionRemoteGoTo', 'PDFActionResetForm', 'PDFActionURL', 'PDFAnnotation', 'PDFAppearanceCharacteristics', 'PDFBorder', 'PDFDestination', 'PDFDocument', 'PDFOutline', 'PDFPage', 'PDFSelection', 'PDFThumbnailView', 'PDFView', 'PHAdjustmentData', 'PHAsset', 'PHAssetChangeRequest', 'PHAssetCollection', 'PHAssetCollectionChangeRequest', 'PHAssetCreationRequest', 'PHAssetResource', 'PHAssetResourceCreationOptions', 'PHAssetResourceManager', 'PHAssetResourceRequestOptions', 'PHCachingImageManager', 'PHChange', 'PHChangeRequest', 'PHCloudIdentifier', 'PHCollection', 'PHCollectionList', 'PHCollectionListChangeRequest', 'PHContentEditingInput', 'PHContentEditingInputRequestOptions', 'PHContentEditingOutput', 'PHEditingExtensionContext', 'PHFetchOptions', 'PHFetchResult', 'PHFetchResultChangeDetails', 'PHImageManager', 'PHImageRequestOptions', 'PHLivePhoto', 'PHLivePhotoEditingContext', 'PHLivePhotoRequestOptions', 'PHLivePhotoView', 'PHObject', 'PHObjectChangeDetails', 'PHObjectPlaceholder', 'PHPhotoLibrary', 'PHPickerConfiguration', 'PHPickerFilter', 'PHPickerResult', 'PHPickerViewController', 'PHProject', 'PHProjectChangeRequest', 'PHVideoRequestOptions', 'PKAddCarKeyPassConfiguration', 'PKAddPassButton', 'PKAddPassesViewController', 'PKAddPaymentPassRequest', 'PKAddPaymentPassRequestConfiguration', 'PKAddPaymentPassViewController', 'PKAddSecureElementPassConfiguration', 'PKAddSecureElementPassViewController', 'PKAddShareablePassConfiguration', 'PKBarcodeEventConfigurationRequest', 'PKBarcodeEventMetadataRequest', 'PKBarcodeEventMetadataResponse', 'PKBarcodeEventSignatureRequest', 'PKBarcodeEventSignatureResponse', 'PKCanvasView', 'PKContact', 'PKDisbursementAuthorizationController', 'PKDisbursementRequest', 'PKDisbursementVoucher', 'PKDrawing', 'PKEraserTool', 'PKFloatRange', 'PKInk', 'PKInkingTool', 'PKIssuerProvisioningExtensionHandler', 'PKIssuerProvisioningExtensionPassEntry', 'PKIssuerProvisioningExtensionPaymentPassEntry', 'PKIssuerProvisioningExtensionStatus', 'PKLabeledValue', 'PKLassoTool', 'PKObject', 'PKPass', 'PKPassLibrary', 'PKPayment', 'PKPaymentAuthorizationController', 'PKPaymentAuthorizationResult', 'PKPaymentAuthorizationViewController', 'PKPaymentButton', 'PKPaymentInformationEventExtension', 'PKPaymentMerchantSession', 'PKPaymentMethod', 'PKPaymentPass', 'PKPaymentRequest', 'PKPaymentRequestMerchantSessionUpdate', 'PKPaymentRequestPaymentMethodUpdate', 'PKPaymentRequestShippingContactUpdate', 'PKPaymentRequestShippingMethodUpdate', 'PKPaymentRequestUpdate', 'PKPaymentSummaryItem', 'PKPaymentToken', 'PKPushCredentials', 'PKPushPayload', 'PKPushRegistry', 'PKSecureElementPass', 'PKShareablePassMetadata', 'PKShippingMethod', 'PKStroke', 'PKStrokePath', 'PKStrokePoint', 'PKSuicaPassProperties', 'PKTool', 'PKToolPicker', 'PKTransitPassProperties', 'QLFileThumbnailRequest', 'QLPreviewController', 'QLThumbnailGenerationRequest', 'QLThumbnailGenerator', 'QLThumbnailProvider', 'QLThumbnailReply', 'QLThumbnailRepresentation', 'RPBroadcastActivityController', 'RPBroadcastActivityViewController', 'RPBroadcastConfiguration', 'RPBroadcastController', 'RPBroadcastHandler', 'RPBroadcastMP4ClipHandler', 'RPBroadcastSampleHandler', 'RPPreviewViewController', 'RPScreenRecorder', 'RPSystemBroadcastPickerView', 'SCNAccelerationConstraint', 'SCNAction', 'SCNAnimation', 'SCNAnimationEvent', 'SCNAnimationPlayer', 'SCNAudioPlayer', 'SCNAudioSource', 'SCNAvoidOccluderConstraint', 'SCNBillboardConstraint', 'SCNBox', 'SCNCamera', 'SCNCameraController', 'SCNCapsule', 'SCNCone', 'SCNConstraint', 'SCNCylinder', 'SCNDistanceConstraint', 'SCNFloor', 'SCNGeometry', 'SCNGeometryElement', 'SCNGeometrySource', 'SCNGeometryTessellator', 'SCNHitTestResult', 'SCNIKConstraint', 'SCNLevelOfDetail', 'SCNLight', 'SCNLookAtConstraint', 'SCNMaterial', 'SCNMaterialProperty', 'SCNMorpher', 'SCNNode', 'SCNParticlePropertyController', 'SCNParticleSystem', 'SCNPhysicsBallSocketJoint', 'SCNPhysicsBehavior', 'SCNPhysicsBody', 'SCNPhysicsConeTwistJoint', 'SCNPhysicsContact', 'SCNPhysicsField', 'SCNPhysicsHingeJoint', 'SCNPhysicsShape', 'SCNPhysicsSliderJoint', 'SCNPhysicsVehicle', 'SCNPhysicsVehicleWheel', 'SCNPhysicsWorld', 'SCNPlane', 'SCNProgram', 'SCNPyramid', 'SCNReferenceNode', 'SCNRenderer', 'SCNReplicatorConstraint', 'SCNScene', 'SCNSceneSource', 'SCNShape', 'SCNSkinner', 'SCNSliderConstraint', 'SCNSphere', 'SCNTechnique', 'SCNText', 'SCNTimingFunction', 'SCNTorus', 'SCNTransaction', 'SCNTransformConstraint', 'SCNTube', 'SCNView', 'SFAcousticFeature', 'SFAuthenticationSession', 'SFContentBlockerManager', 'SFContentBlockerState', 'SFSafariViewController', 'SFSafariViewControllerConfiguration', 'SFSpeechAudioBufferRecognitionRequest', 'SFSpeechRecognitionRequest', 'SFSpeechRecognitionResult', 'SFSpeechRecognitionTask', 'SFSpeechRecognizer', 'SFSpeechURLRecognitionRequest', 'SFTranscription', 'SFTranscriptionSegment', 'SFVoiceAnalytics', 'SK3DNode', 'SKAction', 'SKAdNetwork', 'SKArcadeService', 'SKAttribute', 'SKAttributeValue', 'SKAudioNode', 'SKCameraNode', 'SKCloudServiceController', 'SKCloudServiceSetupViewController', 'SKConstraint', 'SKCropNode', 'SKDownload', 'SKEffectNode', 'SKEmitterNode', 'SKFieldNode', 'SKKeyframeSequence', 'SKLabelNode', 'SKLightNode', 'SKMutablePayment', 'SKMutableTexture', 'SKNode', 'SKOverlay', 'SKOverlayAppClipConfiguration', 'SKOverlayAppConfiguration', 'SKOverlayConfiguration', 'SKOverlayTransitionContext', 'SKPayment', 'SKPaymentDiscount', 'SKPaymentQueue', 'SKPaymentTransaction', 'SKPhysicsBody', 'SKPhysicsContact', 'SKPhysicsJoint', 'SKPhysicsJointFixed', 'SKPhysicsJointLimit', 'SKPhysicsJointPin', 'SKPhysicsJointSliding', 'SKPhysicsJointSpring', 'SKPhysicsWorld', 'SKProduct', 'SKProductDiscount', 'SKProductStorePromotionController', 'SKProductSubscriptionPeriod', 'SKProductsRequest', 'SKProductsResponse', 'SKRange', 'SKReachConstraints', 'SKReceiptRefreshRequest', 'SKReferenceNode', 'SKRegion', 'SKRenderer', 'SKRequest', 'SKScene', 'SKShader', 'SKShapeNode', 'SKSpriteNode', 'SKStoreProductViewController', 'SKStoreReviewController', 'SKStorefront', 'SKTexture', 'SKTextureAtlas', 'SKTileDefinition', 'SKTileGroup', 'SKTileGroupRule', 'SKTileMapNode', 'SKTileSet', 'SKTransformNode', 'SKTransition', 'SKUniform', 'SKVideoNode', 'SKView', 'SKWarpGeometry', 'SKWarpGeometryGrid', 'SLComposeServiceViewController', 'SLComposeSheetConfigurationItem', 'SLComposeViewController', 'SLRequest', 'SNAudioFileAnalyzer', 'SNAudioStreamAnalyzer', 'SNClassification', 'SNClassificationResult', 'SNClassifySoundRequest', 'SRAmbientLightSample', 'SRApplicationUsage', 'SRDeletionRecord', 'SRDevice', 'SRDeviceUsageReport', 'SRFetchRequest', 'SRFetchResult', 'SRKeyboardMetrics', 'SRKeyboardProbabilityMetric', 'SRMessagesUsageReport', 'SRNotificationUsage', 'SRPhoneUsageReport', 'SRSensorReader', 'SRVisit', 'SRWebUsage', 'SRWristDetection', 'SSReadingList', 'STScreenTimeConfiguration', 'STScreenTimeConfigurationObserver', 'STWebHistory', 'STWebpageController', 'TKBERTLVRecord', 'TKCompactTLVRecord', 'TKSimpleTLVRecord', 'TKSmartCard', 'TKSmartCardATR', 'TKSmartCardATRInterfaceGroup', 'TKSmartCardPINFormat', 'TKSmartCardSlot', 'TKSmartCardSlotManager', 'TKSmartCardToken', 'TKSmartCardTokenDriver', 'TKSmartCardTokenSession', 'TKSmartCardUserInteraction', 'TKSmartCardUserInteractionForPINOperation', 'TKSmartCardUserInteractionForSecurePINChange', 'TKSmartCardUserInteractionForSecurePINVerification', 'TKTLVRecord', 'TKToken', 'TKTokenAuthOperation', 'TKTokenConfiguration', 'TKTokenDriver', 'TKTokenDriverConfiguration', 'TKTokenKeyAlgorithm', 'TKTokenKeyExchangeParameters', 'TKTokenKeychainCertificate', 'TKTokenKeychainContents', 'TKTokenKeychainItem', 'TKTokenKeychainKey', 'TKTokenPasswordAuthOperation', 'TKTokenSession', 'TKTokenSmartCardPINAuthOperation', 'TKTokenWatcher', 'TWRequest', 'TWTweetComposeViewController', 'UIAcceleration', 'UIAccelerometer', 'UIAccessibilityCustomAction', 'UIAccessibilityCustomRotor', 'UIAccessibilityCustomRotorItemResult', 'UIAccessibilityCustomRotorSearchPredicate', 'UIAccessibilityElement', 'UIAccessibilityLocationDescriptor', 'UIAction', 'UIActionSheet', 'UIActivity', 'UIActivityIndicatorView', 'UIActivityItemProvider', 'UIActivityItemsConfiguration', 'UIActivityViewController', 'UIAlertAction', 'UIAlertController', 'UIAlertView', 'UIApplication', 'UIApplicationShortcutIcon', 'UIApplicationShortcutItem', 'UIAttachmentBehavior', 'UIBackgroundConfiguration', 'UIBarAppearance', 'UIBarButtonItem', 'UIBarButtonItemAppearance', 'UIBarButtonItemGroup', 'UIBarButtonItemStateAppearance', 'UIBarItem', 'UIBezierPath', 'UIBlurEffect', 'UIButton', 'UICellAccessory', 'UICellAccessoryCheckmark', 'UICellAccessoryCustomView', 'UICellAccessoryDelete', 'UICellAccessoryDisclosureIndicator', 'UICellAccessoryInsert', 'UICellAccessoryLabel', 'UICellAccessoryMultiselect', 'UICellAccessoryOutlineDisclosure', 'UICellAccessoryReorder', 'UICellConfigurationState', 'UICloudSharingController', 'UICollectionLayoutListConfiguration', 'UICollectionReusableView', 'UICollectionView', 'UICollectionViewCell', 'UICollectionViewCellRegistration', 'UICollectionViewCompositionalLayout', 'UICollectionViewCompositionalLayoutConfiguration', 'UICollectionViewController', 'UICollectionViewDiffableDataSource', 'UICollectionViewDiffableDataSourceReorderingHandlers', 'UICollectionViewDiffableDataSourceSectionSnapshotHandlers', 'UICollectionViewDropPlaceholder', 'UICollectionViewDropProposal', 'UICollectionViewFlowLayout', 'UICollectionViewFlowLayoutInvalidationContext', 'UICollectionViewFocusUpdateContext', 'UICollectionViewLayout', 'UICollectionViewLayoutAttributes', 'UICollectionViewLayoutInvalidationContext', 'UICollectionViewListCell', 'UICollectionViewPlaceholder', 'UICollectionViewSupplementaryRegistration', 'UICollectionViewTransitionLayout', 'UICollectionViewUpdateItem', 'UICollisionBehavior', 'UIColor', 'UIColorPickerViewController', 'UIColorWell', 'UICommand', 'UICommandAlternate', 'UIContextMenuConfiguration', 'UIContextMenuInteraction', 'UIContextualAction', 'UIControl', 'UICubicTimingParameters', 'UIDatePicker', 'UIDeferredMenuElement', 'UIDevice', 'UIDictationPhrase', 'UIDocument', 'UIDocumentBrowserAction', 'UIDocumentBrowserTransitionController', 'UIDocumentBrowserViewController', 'UIDocumentInteractionController', 'UIDocumentMenuViewController', 'UIDocumentPickerExtensionViewController', 'UIDocumentPickerViewController', 'UIDragInteraction', 'UIDragItem', 'UIDragPreview', 'UIDragPreviewParameters', 'UIDragPreviewTarget', 'UIDropInteraction', 'UIDropProposal', 'UIDynamicAnimator', 'UIDynamicBehavior', 'UIDynamicItemBehavior', 'UIDynamicItemGroup', 'UIEvent', 'UIFeedbackGenerator', 'UIFieldBehavior', 'UIFocusAnimationCoordinator', 'UIFocusDebugger', 'UIFocusGuide', 'UIFocusMovementHint', 'UIFocusSystem', 'UIFocusUpdateContext', 'UIFont', 'UIFontDescriptor', 'UIFontMetrics', 'UIFontPickerViewController', 'UIFontPickerViewControllerConfiguration', 'UIGestureRecognizer', 'UIGraphicsImageRenderer', 'UIGraphicsImageRendererContext', 'UIGraphicsImageRendererFormat', 'UIGraphicsPDFRenderer', 'UIGraphicsPDFRendererContext', 'UIGraphicsPDFRendererFormat', 'UIGraphicsRenderer', 'UIGraphicsRendererContext', 'UIGraphicsRendererFormat', 'UIGravityBehavior', 'UIHoverGestureRecognizer', 'UIImage', 'UIImageAsset', 'UIImageConfiguration', 'UIImagePickerController', 'UIImageSymbolConfiguration', 'UIImageView', 'UIImpactFeedbackGenerator', 'UIIndirectScribbleInteraction', 'UIInputView', 'UIInputViewController', 'UIInterpolatingMotionEffect', 'UIKey', 'UIKeyCommand', 'UILabel', 'UILargeContentViewerInteraction', 'UILayoutGuide', 'UILexicon', 'UILexiconEntry', 'UIListContentConfiguration', 'UIListContentImageProperties', 'UIListContentTextProperties', 'UIListContentView', 'UILocalNotification', 'UILocalizedIndexedCollation', 'UILongPressGestureRecognizer', 'UIManagedDocument', 'UIMarkupTextPrintFormatter', 'UIMenu', 'UIMenuController', 'UIMenuElement', 'UIMenuItem', 'UIMenuSystem', 'UIMotionEffect', 'UIMotionEffectGroup', 'UIMutableApplicationShortcutItem', 'UIMutableUserNotificationAction', 'UIMutableUserNotificationCategory', 'UINavigationBar', 'UINavigationBarAppearance', 'UINavigationController', 'UINavigationItem', 'UINib', 'UINotificationFeedbackGenerator', 'UIOpenURLContext', 'UIPageControl', 'UIPageViewController', 'UIPanGestureRecognizer', 'UIPasteConfiguration', 'UIPasteboard', 'UIPencilInteraction', 'UIPercentDrivenInteractiveTransition', 'UIPickerView', 'UIPinchGestureRecognizer', 'UIPointerEffect', 'UIPointerHighlightEffect', 'UIPointerHoverEffect', 'UIPointerInteraction', 'UIPointerLiftEffect', 'UIPointerLockState', 'UIPointerRegion', 'UIPointerRegionRequest', 'UIPointerShape', 'UIPointerStyle', 'UIPopoverBackgroundView', 'UIPopoverController', 'UIPopoverPresentationController', 'UIPresentationController', 'UIPress', 'UIPressesEvent', 'UIPreviewAction', 'UIPreviewActionGroup', 'UIPreviewInteraction', 'UIPreviewParameters', 'UIPreviewTarget', 'UIPrintFormatter', 'UIPrintInfo', 'UIPrintInteractionController', 'UIPrintPageRenderer', 'UIPrintPaper', 'UIPrinter', 'UIPrinterPickerController', 'UIProgressView', 'UIPushBehavior', 'UIReferenceLibraryViewController', 'UIRefreshControl', 'UIRegion', 'UIResponder', 'UIRotationGestureRecognizer', 'UIScene', 'UISceneActivationConditions', 'UISceneActivationRequestOptions', 'UISceneConfiguration', 'UISceneConnectionOptions', 'UISceneDestructionRequestOptions', 'UISceneOpenExternalURLOptions', 'UISceneOpenURLOptions', 'UISceneSession', 'UISceneSizeRestrictions', 'UIScreen', 'UIScreenEdgePanGestureRecognizer', 'UIScreenMode', 'UIScreenshotService', 'UIScribbleInteraction', 'UIScrollView', 'UISearchBar', 'UISearchContainerViewController', 'UISearchController', 'UISearchDisplayController', 'UISearchSuggestionItem', 'UISearchTextField', 'UISearchToken', 'UISegmentedControl', 'UISelectionFeedbackGenerator', 'UISimpleTextPrintFormatter', 'UISlider', 'UISnapBehavior', 'UISplitViewController', 'UISpringLoadedInteraction', 'UISpringTimingParameters', 'UIStackView', 'UIStatusBarManager', 'UIStepper', 'UIStoryboard', 'UIStoryboardPopoverSegue', 'UIStoryboardSegue', 'UIStoryboardUnwindSegueSource', 'UISwipeActionsConfiguration', 'UISwipeGestureRecognizer', 'UISwitch', 'UITabBar', 'UITabBarAppearance', 'UITabBarController', 'UITabBarItem', 'UITabBarItemAppearance', 'UITabBarItemStateAppearance', 'UITableView', 'UITableViewCell', 'UITableViewController', 'UITableViewDiffableDataSource', 'UITableViewDropPlaceholder', 'UITableViewDropProposal', 'UITableViewFocusUpdateContext', 'UITableViewHeaderFooterView', 'UITableViewPlaceholder', 'UITableViewRowAction', 'UITapGestureRecognizer', 'UITargetedDragPreview', 'UITargetedPreview', 'UITextChecker', 'UITextDragPreviewRenderer', 'UITextDropProposal', 'UITextField', 'UITextFormattingCoordinator', 'UITextInputAssistantItem', 'UITextInputMode', 'UITextInputPasswordRules', 'UITextInputStringTokenizer', 'UITextInteraction', 'UITextPlaceholder', 'UITextPosition', 'UITextRange', 'UITextSelectionRect', 'UITextView', 'UITitlebar', 'UIToolbar', 'UIToolbarAppearance', 'UITouch', 'UITraitCollection', 'UIUserNotificationAction', 'UIUserNotificationCategory', 'UIUserNotificationSettings', 'UIVibrancyEffect', 'UIVideoEditorController', 'UIView', 'UIViewConfigurationState', 'UIViewController', 'UIViewPrintFormatter', 'UIViewPropertyAnimator', 'UIVisualEffect', 'UIVisualEffectView', 'UIWebView', 'UIWindow', 'UIWindowScene', 'UIWindowSceneDestructionRequestOptions', 'UNCalendarNotificationTrigger', 'UNLocationNotificationTrigger', 'UNMutableNotificationContent', 'UNNotification', 'UNNotificationAction', 'UNNotificationAttachment', 'UNNotificationCategory', 'UNNotificationContent', 'UNNotificationRequest', 'UNNotificationResponse', 'UNNotificationServiceExtension', 'UNNotificationSettings', 'UNNotificationSound', 'UNNotificationTrigger', 'UNPushNotificationTrigger', 'UNTextInputNotificationAction', 'UNTextInputNotificationResponse', 'UNTimeIntervalNotificationTrigger', 'UNUserNotificationCenter', 'UTType', 'VNBarcodeObservation', 'VNCircle', 'VNClassificationObservation', 'VNClassifyImageRequest', 'VNContour', 'VNContoursObservation', 'VNCoreMLFeatureValueObservation', 'VNCoreMLModel', 'VNCoreMLRequest', 'VNDetectBarcodesRequest', 'VNDetectContoursRequest', 'VNDetectFaceCaptureQualityRequest', 'VNDetectFaceLandmarksRequest', 'VNDetectFaceRectanglesRequest', 'VNDetectHorizonRequest', 'VNDetectHumanBodyPoseRequest', 'VNDetectHumanHandPoseRequest', 'VNDetectHumanRectanglesRequest', 'VNDetectRectanglesRequest', 'VNDetectTextRectanglesRequest', 'VNDetectTrajectoriesRequest', 'VNDetectedObjectObservation', 'VNDetectedPoint', 'VNDocumentCameraScan', 'VNDocumentCameraViewController', 'VNFaceLandmarkRegion', 'VNFaceLandmarkRegion2D', 'VNFaceLandmarks', 'VNFaceLandmarks2D', 'VNFaceObservation', 'VNFeaturePrintObservation', 'VNGenerateAttentionBasedSaliencyImageRequest', 'VNGenerateImageFeaturePrintRequest', 'VNGenerateObjectnessBasedSaliencyImageRequest', 'VNGenerateOpticalFlowRequest', 'VNGeometryUtils', 'VNHomographicImageRegistrationRequest', 'VNHorizonObservation', 'VNHumanBodyPoseObservation', 'VNHumanHandPoseObservation', 'VNImageAlignmentObservation', 'VNImageBasedRequest', 'VNImageHomographicAlignmentObservation', 'VNImageRegistrationRequest', 'VNImageRequestHandler', 'VNImageTranslationAlignmentObservation', 'VNObservation', 'VNPixelBufferObservation', 'VNPoint', 'VNRecognizeAnimalsRequest', 'VNRecognizeTextRequest', 'VNRecognizedObjectObservation', 'VNRecognizedPoint', 'VNRecognizedPointsObservation', 'VNRecognizedText', 'VNRecognizedTextObservation', 'VNRectangleObservation', 'VNRequest', 'VNSaliencyImageObservation', 'VNSequenceRequestHandler', 'VNStatefulRequest', 'VNTargetedImageRequest', 'VNTextObservation', 'VNTrackObjectRequest', 'VNTrackRectangleRequest', 'VNTrackingRequest', 'VNTrajectoryObservation', 'VNTranslationalImageRegistrationRequest', 'VNVector', 'VNVideoProcessor', 'VNVideoProcessorCadence', 'VNVideoProcessorFrameRateCadence', 'VNVideoProcessorRequestProcessingOptions', 'VNVideoProcessorTimeIntervalCadence', 'VSAccountApplicationProvider', 'VSAccountManager', 'VSAccountManagerResult', 'VSAccountMetadata', 'VSAccountMetadataRequest', 'VSAccountProviderResponse', 'VSSubscription', 'VSSubscriptionRegistrationCenter', 'WCSession', 'WCSessionFile', 'WCSessionFileTransfer', 'WCSessionUserInfoTransfer', 'WKBackForwardList', 'WKBackForwardListItem', 'WKContentRuleList', 'WKContentRuleListStore', 'WKContentWorld', 'WKContextMenuElementInfo', 'WKFindConfiguration', 'WKFindResult', 'WKFrameInfo', 'WKHTTPCookieStore', 'WKNavigation', 'WKNavigationAction', 'WKNavigationResponse', 'WKOpenPanelParameters', 'WKPDFConfiguration', 'WKPreferences', 'WKPreviewElementInfo', 'WKProcessPool', 'WKScriptMessage', 'WKSecurityOrigin', 'WKSnapshotConfiguration', 'WKUserContentController', 'WKUserScript', 'WKWebView', 'WKWebViewConfiguration', 'WKWebpagePreferences', 'WKWebsiteDataRecord', 'WKWebsiteDataStore', 'WKWindowFeatures', '__EntityAccessibilityWrapper'}
-COCOA_PROTOCOLS = {'ABNewPersonViewControllerDelegate', 'ABPeoplePickerNavigationControllerDelegate', 'ABPersonViewControllerDelegate', 'ABUnknownPersonViewControllerDelegate', 'ADActionViewControllerChildInterface', 'ADActionViewControllerInterface', 'ADBannerViewDelegate', 'ADInterstitialAdDelegate', 'AEAssessmentSessionDelegate', 'ARAnchorCopying', 'ARCoachingOverlayViewDelegate', 'ARSCNViewDelegate', 'ARSKViewDelegate', 'ARSessionDelegate', 'ARSessionObserver', 'ARSessionProviding', 'ARTrackable', 'ASAccountAuthenticationModificationControllerDelegate', 'ASAccountAuthenticationModificationControllerPresentationContextProviding', 'ASAuthorizationControllerDelegate', 'ASAuthorizationControllerPresentationContextProviding', 'ASAuthorizationCredential', 'ASAuthorizationProvider', 'ASAuthorizationProviderExtensionAuthorizationRequestHandler', 'ASWebAuthenticationPresentationContextProviding', 'ASWebAuthenticationSessionRequestDelegate', 'ASWebAuthenticationSessionWebBrowserSessionHandling', 'AUAudioUnitFactory', 'AVAssetDownloadDelegate', 'AVAssetResourceLoaderDelegate', 'AVAssetWriterDelegate', 'AVAsynchronousKeyValueLoading', 'AVCaptureAudioDataOutputSampleBufferDelegate', 'AVCaptureDataOutputSynchronizerDelegate', 'AVCaptureDepthDataOutputDelegate', 'AVCaptureFileOutputDelegate', 'AVCaptureFileOutputRecordingDelegate', 'AVCaptureMetadataOutputObjectsDelegate', 'AVCapturePhotoCaptureDelegate', 'AVCapturePhotoFileDataRepresentationCustomizer', 'AVCaptureVideoDataOutputSampleBufferDelegate', 'AVContentKeyRecipient', 'AVContentKeySessionDelegate', 'AVFragmentMinding', 'AVPictureInPictureControllerDelegate', 'AVPlayerItemLegibleOutputPushDelegate', 'AVPlayerItemMetadataCollectorPushDelegate', 'AVPlayerItemMetadataOutputPushDelegate', 'AVPlayerItemOutputPullDelegate', 'AVPlayerItemOutputPushDelegate', 'AVPlayerViewControllerDelegate', 'AVQueuedSampleBufferRendering', 'AVRoutePickerViewDelegate', 'AVVideoCompositing', 'AVVideoCompositionInstruction', 'AVVideoCompositionValidationHandling', 'AXCustomContentProvider', 'CAAction', 'CAAnimationDelegate', 'CALayerDelegate', 'CAMediaTiming', 'CAMetalDrawable', 'CBCentralManagerDelegate', 'CBPeripheralDelegate', 'CBPeripheralManagerDelegate', 'CHHapticAdvancedPatternPlayer', 'CHHapticDeviceCapability', 'CHHapticParameterAttributes', 'CHHapticPatternPlayer', 'CIAccordionFoldTransition', 'CIAffineClamp', 'CIAffineTile', 'CIAreaAverage', 'CIAreaHistogram', 'CIAreaMaximum', 'CIAreaMaximumAlpha', 'CIAreaMinMax', 'CIAreaMinMaxRed', 'CIAreaMinimum', 'CIAreaMinimumAlpha', 'CIAreaReductionFilter', 'CIAttributedTextImageGenerator', 'CIAztecCodeGenerator', 'CIBarcodeGenerator', 'CIBarsSwipeTransition', 'CIBicubicScaleTransform', 'CIBlendWithMask', 'CIBloom', 'CIBokehBlur', 'CIBoxBlur', 'CIBumpDistortion', 'CIBumpDistortionLinear', 'CICMYKHalftone', 'CICheckerboardGenerator', 'CICircleSplashDistortion', 'CICircularScreen', 'CICircularWrap', 'CICode128BarcodeGenerator', 'CIColorAbsoluteDifference', 'CIColorClamp', 'CIColorControls', 'CIColorCrossPolynomial', 'CIColorCube', 'CIColorCubeWithColorSpace', 'CIColorCubesMixedWithMask', 'CIColorCurves', 'CIColorInvert', 'CIColorMap', 'CIColorMatrix', 'CIColorMonochrome', 'CIColorPolynomial', 'CIColorPosterize', 'CIColorThreshold', 'CIColorThresholdOtsu', 'CIColumnAverage', 'CIComicEffect', 'CICompositeOperation', 'CIConvolution', 'CICopyMachineTransition', 'CICoreMLModel', 'CICrystallize', 'CIDepthOfField', 'CIDepthToDisparity', 'CIDiscBlur', 'CIDisintegrateWithMaskTransition', 'CIDisparityToDepth', 'CIDisplacementDistortion', 'CIDissolveTransition', 'CIDither', 'CIDocumentEnhancer', 'CIDotScreen', 'CIDroste', 'CIEdgePreserveUpsample', 'CIEdgeWork', 'CIEdges', 'CIEightfoldReflectedTile', 'CIExposureAdjust', 'CIFalseColor', 'CIFilter', 'CIFilterConstructor', 'CIFlashTransition', 'CIFourCoordinateGeometryFilter', 'CIFourfoldReflectedTile', 'CIFourfoldRotatedTile', 'CIFourfoldTranslatedTile', 'CIGaborGradients', 'CIGammaAdjust', 'CIGaussianBlur', 'CIGaussianGradient', 'CIGlassDistortion', 'CIGlassLozenge', 'CIGlideReflectedTile', 'CIGloom', 'CIHatchedScreen', 'CIHeightFieldFromMask', 'CIHexagonalPixellate', 'CIHighlightShadowAdjust', 'CIHistogramDisplay', 'CIHoleDistortion', 'CIHueAdjust', 'CIHueSaturationValueGradient', 'CIImageProcessorInput', 'CIImageProcessorOutput', 'CIKMeans', 'CIKaleidoscope', 'CIKeystoneCorrectionCombined', 'CIKeystoneCorrectionHorizontal', 'CIKeystoneCorrectionVertical', 'CILabDeltaE', 'CILanczosScaleTransform', 'CILenticularHaloGenerator', 'CILightTunnel', 'CILineOverlay', 'CILineScreen', 'CILinearGradient', 'CILinearToSRGBToneCurve', 'CIMaskToAlpha', 'CIMaskedVariableBlur', 'CIMaximumComponent', 'CIMedian', 'CIMeshGenerator', 'CIMinimumComponent', 'CIMix', 'CIModTransition', 'CIMorphologyGradient', 'CIMorphologyMaximum', 'CIMorphologyMinimum', 'CIMorphologyRectangleMaximum', 'CIMorphologyRectangleMinimum', 'CIMotionBlur', 'CINinePartStretched', 'CINinePartTiled', 'CINoiseReduction', 'CIOpTile', 'CIPDF417BarcodeGenerator', 'CIPageCurlTransition', 'CIPageCurlWithShadowTransition', 'CIPaletteCentroid', 'CIPalettize', 'CIParallelogramTile', 'CIPerspectiveCorrection', 'CIPerspectiveRotate', 'CIPerspectiveTile', 'CIPerspectiveTransform', 'CIPerspectiveTransformWithExtent', 'CIPhotoEffect', 'CIPinchDistortion', 'CIPixellate', 'CIPlugInRegistration', 'CIPointillize', 'CIQRCodeGenerator', 'CIRadialGradient', 'CIRandomGenerator', 'CIRippleTransition', 'CIRoundedRectangleGenerator', 'CIRowAverage', 'CISRGBToneCurveToLinear', 'CISaliencyMap', 'CISepiaTone', 'CIShadedMaterial', 'CISharpenLuminance', 'CISixfoldReflectedTile', 'CISixfoldRotatedTile', 'CISmoothLinearGradient', 'CISpotColor', 'CISpotLight', 'CIStarShineGenerator', 'CIStraighten', 'CIStretchCrop', 'CIStripesGenerator', 'CISunbeamsGenerator', 'CISwipeTransition', 'CITemperatureAndTint', 'CITextImageGenerator', 'CIThermal', 'CIToneCurve', 'CITorusLensDistortion', 'CITransitionFilter', 'CITriangleKaleidoscope', 'CITriangleTile', 'CITwelvefoldReflectedTile', 'CITwirlDistortion', 'CIUnsharpMask', 'CIVibrance', 'CIVignette', 'CIVignetteEffect', 'CIVortexDistortion', 'CIWhitePointAdjust', 'CIXRay', 'CIZoomBlur', 'CKRecordKeyValueSetting', 'CKRecordValue', 'CLKComplicationDataSource', 'CLLocationManagerDelegate', 'CLSContextProvider', 'CLSDataStoreDelegate', 'CMFallDetectionDelegate', 'CMHeadphoneMotionManagerDelegate', 'CNChangeHistoryEventVisitor', 'CNContactPickerDelegate', 'CNContactViewControllerDelegate', 'CNKeyDescriptor', 'CPApplicationDelegate', 'CPBarButtonProviding', 'CPInterfaceControllerDelegate', 'CPListTemplateDelegate', 'CPListTemplateItem', 'CPMapTemplateDelegate', 'CPNowPlayingTemplateObserver', 'CPPointOfInterestTemplateDelegate', 'CPSearchTemplateDelegate', 'CPSelectableListItem', 'CPSessionConfigurationDelegate', 'CPTabBarTemplateDelegate', 'CPTemplateApplicationDashboardSceneDelegate', 'CPTemplateApplicationSceneDelegate', 'CSSearchableIndexDelegate', 'CTSubscriberDelegate', 'CTTelephonyNetworkInfoDelegate', 'CXCallDirectoryExtensionContextDelegate', 'CXCallObserverDelegate', 'CXProviderDelegate', 'EAAccessoryDelegate', 'EAGLDrawable', 'EAWiFiUnconfiguredAccessoryBrowserDelegate', 'EKCalendarChooserDelegate', 'EKEventEditViewDelegate', 'EKEventViewDelegate', 'GCDevice', 'GKAchievementViewControllerDelegate', 'GKAgentDelegate', 'GKChallengeEventHandlerDelegate', 'GKChallengeListener', 'GKFriendRequestComposeViewControllerDelegate', 'GKGameCenterControllerDelegate', 'GKGameModel', 'GKGameModelPlayer', 'GKGameModelUpdate', 'GKGameSessionEventListener', 'GKGameSessionSharingViewControllerDelegate', 'GKInviteEventListener', 'GKLeaderboardViewControllerDelegate', 'GKLocalPlayerListener', 'GKMatchDelegate', 'GKMatchmakerViewControllerDelegate', 'GKPeerPickerControllerDelegate', 'GKRandom', 'GKSavedGameListener', 'GKSceneRootNodeType', 'GKSessionDelegate', 'GKStrategist', 'GKTurnBasedEventListener', 'GKTurnBasedMatchmakerViewControllerDelegate', 'GKVoiceChatClient', 'GLKNamedEffect', 'GLKViewControllerDelegate', 'GLKViewDelegate', 'HKLiveWorkoutBuilderDelegate', 'HKWorkoutSessionDelegate', 'HMAccessoryBrowserDelegate', 'HMAccessoryDelegate', 'HMCameraSnapshotControlDelegate', 'HMCameraStreamControlDelegate', 'HMHomeDelegate', 'HMHomeManagerDelegate', 'HMNetworkConfigurationProfileDelegate', 'ICCameraDeviceDelegate', 'ICCameraDeviceDownloadDelegate', 'ICDeviceBrowserDelegate', 'ICDeviceDelegate', 'ICScannerDeviceDelegate', 'ILMessageFilterQueryHandling', 'INActivateCarSignalIntentHandling', 'INAddMediaIntentHandling', 'INAddTasksIntentHandling', 'INAppendToNoteIntentHandling', 'INBookRestaurantReservationIntentHandling', 'INCallsDomainHandling', 'INCancelRideIntentHandling', 'INCancelWorkoutIntentHandling', 'INCarCommandsDomainHandling', 'INCarPlayDomainHandling', 'INCreateNoteIntentHandling', 'INCreateTaskListIntentHandling', 'INDeleteTasksIntentHandling', 'INEndWorkoutIntentHandling', 'INGetAvailableRestaurantReservationBookingDefaultsIntentHandling', 'INGetAvailableRestaurantReservationBookingsIntentHandling', 'INGetCarLockStatusIntentHandling', 'INGetCarPowerLevelStatusIntentHandling', 'INGetCarPowerLevelStatusIntentResponseObserver', 'INGetRestaurantGuestIntentHandling', 'INGetRideStatusIntentHandling', 'INGetRideStatusIntentResponseObserver', 'INGetUserCurrentRestaurantReservationBookingsIntentHandling', 'INGetVisualCodeIntentHandling', 'INIntentHandlerProviding', 'INListCarsIntentHandling', 'INListRideOptionsIntentHandling', 'INMessagesDomainHandling', 'INNotebookDomainHandling', 'INPauseWorkoutIntentHandling', 'INPayBillIntentHandling', 'INPaymentsDomainHandling', 'INPhotosDomainHandling', 'INPlayMediaIntentHandling', 'INRadioDomainHandling', 'INRequestPaymentIntentHandling', 'INRequestRideIntentHandling', 'INResumeWorkoutIntentHandling', 'INRidesharingDomainHandling', 'INSaveProfileInCarIntentHandling', 'INSearchCallHistoryIntentHandling', 'INSearchForAccountsIntentHandling', 'INSearchForBillsIntentHandling', 'INSearchForMediaIntentHandling', 'INSearchForMessagesIntentHandling', 'INSearchForNotebookItemsIntentHandling', 'INSearchForPhotosIntentHandling', 'INSendMessageIntentHandling', 'INSendPaymentIntentHandling', 'INSendRideFeedbackIntentHandling', 'INSetAudioSourceInCarIntentHandling', 'INSetCarLockStatusIntentHandling', 'INSetClimateSettingsInCarIntentHandling', 'INSetDefrosterSettingsInCarIntentHandling', 'INSetMessageAttributeIntentHandling', 'INSetProfileInCarIntentHandling', 'INSetRadioStationIntentHandling', 'INSetSeatSettingsInCarIntentHandling', 'INSetTaskAttributeIntentHandling', 'INSnoozeTasksIntentHandling', 'INSpeakable', 'INStartAudioCallIntentHandling', 'INStartCallIntentHandling', 'INStartPhotoPlaybackIntentHandling', 'INStartVideoCallIntentHandling', 'INStartWorkoutIntentHandling', 'INTransferMoneyIntentHandling', 'INUIAddVoiceShortcutButtonDelegate', 'INUIAddVoiceShortcutViewControllerDelegate', 'INUIEditVoiceShortcutViewControllerDelegate', 'INUIHostedViewControlling', 'INUIHostedViewSiriProviding', 'INUpdateMediaAffinityIntentHandling', 'INVisualCodeDomainHandling', 'INWorkoutsDomainHandling', 'JSExport', 'MCAdvertiserAssistantDelegate', 'MCBrowserViewControllerDelegate', 'MCNearbyServiceAdvertiserDelegate', 'MCNearbyServiceBrowserDelegate', 'MCSessionDelegate', 'MDLAssetResolver', 'MDLComponent', 'MDLJointAnimation', 'MDLLightProbeIrradianceDataSource', 'MDLMeshBuffer', 'MDLMeshBufferAllocator', 'MDLMeshBufferZone', 'MDLNamed', 'MDLObjectContainerComponent', 'MDLTransformComponent', 'MDLTransformOp', 'MFMailComposeViewControllerDelegate', 'MFMessageComposeViewControllerDelegate', 'MIDICIProfileResponderDelegate', 'MKAnnotation', 'MKGeoJSONObject', 'MKLocalSearchCompleterDelegate', 'MKMapViewDelegate', 'MKOverlay', 'MKReverseGeocoderDelegate', 'MLBatchProvider', 'MLCustomLayer', 'MLCustomModel', 'MLFeatureProvider', 'MLWritable', 'MPMediaPickerControllerDelegate', 'MPMediaPlayback', 'MPNowPlayingSessionDelegate', 'MPPlayableContentDataSource', 'MPPlayableContentDelegate', 'MPSystemMusicPlayerController', 'MSAuthenticationPresentationContext', 'MSMessagesAppTranscriptPresentation', 'MSStickerBrowserViewDataSource', 'MTKViewDelegate', 'MTLAccelerationStructure', 'MTLAccelerationStructureCommandEncoder', 'MTLArgumentEncoder', 'MTLBinaryArchive', 'MTLBlitCommandEncoder', 'MTLBuffer', 'MTLCaptureScope', 'MTLCommandBuffer', 'MTLCommandBufferEncoderInfo', 'MTLCommandEncoder', 'MTLCommandQueue', 'MTLComputeCommandEncoder', 'MTLComputePipelineState', 'MTLCounter', 'MTLCounterSampleBuffer', 'MTLCounterSet', 'MTLDepthStencilState', 'MTLDevice', 'MTLDrawable', 'MTLDynamicLibrary', 'MTLEvent', 'MTLFence', 'MTLFunction', 'MTLFunctionHandle', 'MTLFunctionLog', 'MTLFunctionLogDebugLocation', 'MTLHeap', 'MTLIndirectCommandBuffer', 'MTLIndirectComputeCommand', 'MTLIndirectComputeCommandEncoder', 'MTLIndirectRenderCommand', 'MTLIndirectRenderCommandEncoder', 'MTLIntersectionFunctionTable', 'MTLLibrary', 'MTLLogContainer', 'MTLParallelRenderCommandEncoder', 'MTLRasterizationRateMap', 'MTLRenderCommandEncoder', 'MTLRenderPipelineState', 'MTLResource', 'MTLResourceStateCommandEncoder', 'MTLSamplerState', 'MTLSharedEvent', 'MTLTexture', 'MTLVisibleFunctionTable', 'MXMetricManagerSubscriber', 'MyClassJavaScriptMethods', 'NCWidgetProviding', 'NEAppPushDelegate', 'NFCFeliCaTag', 'NFCISO15693Tag', 'NFCISO7816Tag', 'NFCMiFareTag', 'NFCNDEFReaderSessionDelegate', 'NFCNDEFTag', 'NFCReaderSession', 'NFCReaderSessionDelegate', 'NFCTag', 'NFCTagReaderSessionDelegate', 'NFCVASReaderSessionDelegate', 'NISessionDelegate', 'NSCacheDelegate', 'NSCoding', 'NSCollectionLayoutContainer', 'NSCollectionLayoutEnvironment', 'NSCollectionLayoutVisibleItem', 'NSCopying', 'NSDecimalNumberBehaviors', 'NSDiscardableContent', 'NSExtensionRequestHandling', 'NSFastEnumeration', 'NSFetchRequestResult', 'NSFetchedResultsControllerDelegate', 'NSFetchedResultsSectionInfo', 'NSFileManagerDelegate', 'NSFilePresenter', 'NSFileProviderChangeObserver', 'NSFileProviderEnumerationObserver', 'NSFileProviderEnumerator', 'NSFileProviderItem', 'NSFileProviderServiceSource', 'NSItemProviderReading', 'NSItemProviderWriting', 'NSKeyedArchiverDelegate', 'NSKeyedUnarchiverDelegate', 'NSLayoutManagerDelegate', 'NSLocking', 'NSMachPortDelegate', 'NSMetadataQueryDelegate', 'NSMutableCopying', 'NSNetServiceBrowserDelegate', 'NSNetServiceDelegate', 'NSPortDelegate', 'NSProgressReporting', 'NSSecureCoding', 'NSStreamDelegate', 'NSTextAttachmentContainer', 'NSTextLayoutOrientationProvider', 'NSTextStorageDelegate', 'NSURLAuthenticationChallengeSender', 'NSURLConnectionDataDelegate', 'NSURLConnectionDelegate', 'NSURLConnectionDownloadDelegate', 'NSURLProtocolClient', 'NSURLSessionDataDelegate', 'NSURLSessionDelegate', 'NSURLSessionDownloadDelegate', 'NSURLSessionStreamDelegate', 'NSURLSessionTaskDelegate', 'NSURLSessionWebSocketDelegate', 'NSUserActivityDelegate', 'NSXMLParserDelegate', 'NSXPCListenerDelegate', 'NSXPCProxyCreating', 'NWTCPConnectionAuthenticationDelegate', 'OSLogEntryFromProcess', 'OSLogEntryWithPayload', 'PDFDocumentDelegate', 'PDFViewDelegate', 'PHContentEditingController', 'PHLivePhotoFrame', 'PHLivePhotoViewDelegate', 'PHPhotoLibraryAvailabilityObserver', 'PHPhotoLibraryChangeObserver', 'PHPickerViewControllerDelegate', 'PKAddPassesViewControllerDelegate', 'PKAddPaymentPassViewControllerDelegate', 'PKAddSecureElementPassViewControllerDelegate', 'PKCanvasViewDelegate', 'PKDisbursementAuthorizationControllerDelegate', 'PKIssuerProvisioningExtensionAuthorizationProviding', 'PKPaymentAuthorizationControllerDelegate', 'PKPaymentAuthorizationViewControllerDelegate', 'PKPaymentInformationRequestHandling', 'PKPushRegistryDelegate', 'PKToolPickerObserver', 'PreviewDisplaying', 'QLPreviewControllerDataSource', 'QLPreviewControllerDelegate', 'QLPreviewItem', 'QLPreviewingController', 'RPBroadcastActivityControllerDelegate', 'RPBroadcastActivityViewControllerDelegate', 'RPBroadcastControllerDelegate', 'RPPreviewViewControllerDelegate', 'RPScreenRecorderDelegate', 'SCNActionable', 'SCNAnimatable', 'SCNAnimation', 'SCNAvoidOccluderConstraintDelegate', 'SCNBoundingVolume', 'SCNBufferStream', 'SCNCameraControlConfiguration', 'SCNCameraControllerDelegate', 'SCNNodeRendererDelegate', 'SCNPhysicsContactDelegate', 'SCNProgramDelegate', 'SCNSceneExportDelegate', 'SCNSceneRenderer', 'SCNSceneRendererDelegate', 'SCNShadable', 'SCNTechniqueSupport', 'SFSafariViewControllerDelegate', 'SFSpeechRecognitionTaskDelegate', 'SFSpeechRecognizerDelegate', 'SKCloudServiceSetupViewControllerDelegate', 'SKOverlayDelegate', 'SKPaymentQueueDelegate', 'SKPaymentTransactionObserver', 'SKPhysicsContactDelegate', 'SKProductsRequestDelegate', 'SKRequestDelegate', 'SKSceneDelegate', 'SKStoreProductViewControllerDelegate', 'SKViewDelegate', 'SKWarpable', 'SNRequest', 'SNResult', 'SNResultsObserving', 'SRSensorReaderDelegate', 'TKSmartCardTokenDriverDelegate', 'TKSmartCardUserInteractionDelegate', 'TKTokenDelegate', 'TKTokenDriverDelegate', 'TKTokenSessionDelegate', 'UIAccelerometerDelegate', 'UIAccessibilityContainerDataTable', 'UIAccessibilityContainerDataTableCell', 'UIAccessibilityContentSizeCategoryImageAdjusting', 'UIAccessibilityIdentification', 'UIAccessibilityReadingContent', 'UIActionSheetDelegate', 'UIActivityItemSource', 'UIActivityItemsConfigurationReading', 'UIAdaptivePresentationControllerDelegate', 'UIAlertViewDelegate', 'UIAppearance', 'UIAppearanceContainer', 'UIApplicationDelegate', 'UIBarPositioning', 'UIBarPositioningDelegate', 'UICloudSharingControllerDelegate', 'UICollectionViewDataSource', 'UICollectionViewDataSourcePrefetching', 'UICollectionViewDelegate', 'UICollectionViewDelegateFlowLayout', 'UICollectionViewDragDelegate', 'UICollectionViewDropCoordinator', 'UICollectionViewDropDelegate', 'UICollectionViewDropItem', 'UICollectionViewDropPlaceholderContext', 'UICollisionBehaviorDelegate', 'UIColorPickerViewControllerDelegate', 'UIConfigurationState', 'UIContentConfiguration', 'UIContentContainer', 'UIContentSizeCategoryAdjusting', 'UIContentView', 'UIContextMenuInteractionAnimating', 'UIContextMenuInteractionCommitAnimating', 'UIContextMenuInteractionDelegate', 'UICoordinateSpace', 'UIDataSourceModelAssociation', 'UIDataSourceTranslating', 'UIDocumentBrowserViewControllerDelegate', 'UIDocumentInteractionControllerDelegate', 'UIDocumentMenuDelegate', 'UIDocumentPickerDelegate', 'UIDragAnimating', 'UIDragDropSession', 'UIDragInteractionDelegate', 'UIDragSession', 'UIDropInteractionDelegate', 'UIDropSession', 'UIDynamicAnimatorDelegate', 'UIDynamicItem', 'UIFocusAnimationContext', 'UIFocusDebuggerOutput', 'UIFocusEnvironment', 'UIFocusItem', 'UIFocusItemContainer', 'UIFocusItemScrollableContainer', 'UIFontPickerViewControllerDelegate', 'UIGestureRecognizerDelegate', 'UIGuidedAccessRestrictionDelegate', 'UIImageConfiguration', 'UIImagePickerControllerDelegate', 'UIIndirectScribbleInteractionDelegate', 'UIInputViewAudioFeedback', 'UIInteraction', 'UIItemProviderPresentationSizeProviding', 'UIKeyInput', 'UILargeContentViewerInteractionDelegate', 'UILargeContentViewerItem', 'UILayoutSupport', 'UIMenuBuilder', 'UINavigationBarDelegate', 'UINavigationControllerDelegate', 'UIObjectRestoration', 'UIPageViewControllerDataSource', 'UIPageViewControllerDelegate', 'UIPasteConfigurationSupporting', 'UIPencilInteractionDelegate', 'UIPickerViewAccessibilityDelegate', 'UIPickerViewDataSource', 'UIPickerViewDelegate', 'UIPointerInteractionAnimating', 'UIPointerInteractionDelegate', 'UIPopoverBackgroundViewMethods', 'UIPopoverControllerDelegate', 'UIPopoverPresentationControllerDelegate', 'UIPreviewActionItem', 'UIPreviewInteractionDelegate', 'UIPrintInteractionControllerDelegate', 'UIPrinterPickerControllerDelegate', 'UIResponderStandardEditActions', 'UISceneDelegate', 'UIScreenshotServiceDelegate', 'UIScribbleInteractionDelegate', 'UIScrollViewAccessibilityDelegate', 'UIScrollViewDelegate', 'UISearchBarDelegate', 'UISearchControllerDelegate', 'UISearchDisplayDelegate', 'UISearchResultsUpdating', 'UISearchSuggestion', 'UISearchTextFieldDelegate', 'UISearchTextFieldPasteItem', 'UISplitViewControllerDelegate', 'UISpringLoadedInteractionBehavior', 'UISpringLoadedInteractionContext', 'UISpringLoadedInteractionEffect', 'UISpringLoadedInteractionSupporting', 'UIStateRestoring', 'UITabBarControllerDelegate', 'UITabBarDelegate', 'UITableViewDataSource', 'UITableViewDataSourcePrefetching', 'UITableViewDelegate', 'UITableViewDragDelegate', 'UITableViewDropCoordinator', 'UITableViewDropDelegate', 'UITableViewDropItem', 'UITableViewDropPlaceholderContext', 'UITextDocumentProxy', 'UITextDragDelegate', 'UITextDragRequest', 'UITextDraggable', 'UITextDropDelegate', 'UITextDropRequest', 'UITextDroppable', 'UITextFieldDelegate', 'UITextFormattingCoordinatorDelegate', 'UITextInput', 'UITextInputDelegate', 'UITextInputTokenizer', 'UITextInputTraits', 'UITextInteractionDelegate', 'UITextPasteConfigurationSupporting', 'UITextPasteDelegate', 'UITextPasteItem', 'UITextSelecting', 'UITextViewDelegate', 'UITimingCurveProvider', 'UIToolbarDelegate', 'UITraitEnvironment', 'UIUserActivityRestoring', 'UIVideoEditorControllerDelegate', 'UIViewAnimating', 'UIViewControllerAnimatedTransitioning', 'UIViewControllerContextTransitioning', 'UIViewControllerInteractiveTransitioning', 'UIViewControllerPreviewing', 'UIViewControllerPreviewingDelegate', 'UIViewControllerRestoration', 'UIViewControllerTransitionCoordinator', 'UIViewControllerTransitionCoordinatorContext', 'UIViewControllerTransitioningDelegate', 'UIViewImplicitlyAnimating', 'UIWebViewDelegate', 'UIWindowSceneDelegate', 'UNNotificationContentExtension', 'UNUserNotificationCenterDelegate', 'VNDocumentCameraViewControllerDelegate', 'VNFaceObservationAccepting', 'VNRequestProgressProviding', 'VNRequestRevisionProviding', 'VSAccountManagerDelegate', 'WCSessionDelegate', 'WKHTTPCookieStoreObserver', 'WKNavigationDelegate', 'WKPreviewActionItem', 'WKScriptMessageHandler', 'WKScriptMessageHandlerWithReply', 'WKUIDelegate', 'WKURLSchemeHandler', 'WKURLSchemeTask'}
-COCOA_PRIMITIVES = {'ACErrorCode', 'ALCcontext_struct', 'ALCdevice_struct', 'ALMXGlyphEntry', 'ALMXHeader', 'API_UNAVAILABLE', 'AUChannelInfo', 'AUDependentParameter', 'AUDistanceAttenuationData', 'AUHostIdentifier', 'AUHostVersionIdentifier', 'AUInputSamplesInOutputCallbackStruct', 'AUMIDIEvent', 'AUMIDIOutputCallbackStruct', 'AUNodeInteraction', 'AUNodeRenderCallback', 'AUNumVersion', 'AUParameterAutomationEvent', 'AUParameterEvent', 'AUParameterMIDIMapping', 'AUPreset', 'AUPresetEvent', 'AURecordedParameterEvent', 'AURenderCallbackStruct', 'AURenderEventHeader', 'AUSamplerBankPresetData', 'AUSamplerInstrumentData', 'AnchorPoint', 'AnchorPointTable', 'AnkrTable', 'AudioBalanceFade', 'AudioBuffer', 'AudioBufferList', 'AudioBytePacketTranslation', 'AudioChannelDescription', 'AudioChannelLayout', 'AudioClassDescription', 'AudioCodecMagicCookieInfo', 'AudioCodecPrimeInfo', 'AudioComponentDescription', 'AudioComponentPlugInInterface', 'AudioConverterPrimeInfo', 'AudioFileMarker', 'AudioFileMarkerList', 'AudioFilePacketTableInfo', 'AudioFileRegion', 'AudioFileRegionList', 'AudioFileTypeAndFormatID', 'AudioFile_SMPTE_Time', 'AudioFormatInfo', 'AudioFormatListItem', 'AudioFramePacketTranslation', 'AudioIndependentPacketTranslation', 'AudioOutputUnitMIDICallbacks', 'AudioOutputUnitStartAtTimeParams', 'AudioPacketDependencyInfoTranslation', 'AudioPacketRangeByteCountTranslation', 'AudioPacketRollDistanceTranslation', 'AudioPanningInfo', 'AudioQueueBuffer', 'AudioQueueChannelAssignment', 'AudioQueueLevelMeterState', 'AudioQueueParameterEvent', 'AudioStreamBasicDescription', 'AudioStreamPacketDescription', 'AudioTimeStamp', 'AudioUnitCocoaViewInfo', 'AudioUnitConnection', 'AudioUnitExternalBuffer', 'AudioUnitFrequencyResponseBin', 'AudioUnitMIDIControlMapping', 'AudioUnitMeterClipping', 'AudioUnitNodeConnection', 'AudioUnitOtherPluginDesc', 'AudioUnitParameter', 'AudioUnitParameterEvent', 'AudioUnitParameterHistoryInfo', 'AudioUnitParameterInfo', 'AudioUnitParameterNameInfo', 'AudioUnitParameterStringFromValue', 'AudioUnitParameterValueFromString', 'AudioUnitParameterValueName', 'AudioUnitParameterValueTranslation', 'AudioUnitPresetMAS_SettingData', 'AudioUnitPresetMAS_Settings', 'AudioUnitProperty', 'AudioUnitRenderContext', 'AudioValueRange', 'AudioValueTranslation', 'AuthorizationOpaqueRef', 'BslnFormat0Part', 'BslnFormat1Part', 'BslnFormat2Part', 'BslnFormat3Part', 'BslnTable', 'CABarBeatTime', 'CAFAudioDescription', 'CAFChunkHeader', 'CAFDataChunk', 'CAFFileHeader', 'CAFInfoStrings', 'CAFInstrumentChunk', 'CAFMarker', 'CAFMarkerChunk', 'CAFOverviewChunk', 'CAFOverviewSample', 'CAFPacketTableHeader', 'CAFPeakChunk', 'CAFPositionPeak', 'CAFRegion', 'CAFRegionChunk', 'CAFStringID', 'CAFStrings', 'CAFUMIDChunk', 'CAF_SMPTE_Time', 'CAF_UUID_ChunkHeader', 'CA_BOXABLE', 'CFHostClientContext', 'CFNetServiceClientContext', 'CF_BRIDGED_MUTABLE_TYPE', 'CF_BRIDGED_TYPE', 'CF_RELATED_TYPE', 'CGAffineTransform', 'CGDataConsumerCallbacks', 'CGDataProviderDirectCallbacks', 'CGDataProviderSequentialCallbacks', 'CGFunctionCallbacks', 'CGPDFArray', 'CGPDFContentStream', 'CGPDFDictionary', 'CGPDFObject', 'CGPDFOperatorTable', 'CGPDFScanner', 'CGPDFStream', 'CGPDFString', 'CGPathElement', 'CGPatternCallbacks', 'CGVector', 'CG_BOXABLE', 'CLLocationCoordinate2D', 'CM_BRIDGED_TYPE', 'CTParagraphStyleSetting', 'CVPlanarComponentInfo', 'CVPlanarPixelBufferInfo', 'CVPlanarPixelBufferInfo_YCbCrBiPlanar', 'CVPlanarPixelBufferInfo_YCbCrPlanar', 'CVSMPTETime', 'CV_BRIDGED_TYPE', 'ComponentInstanceRecord', 'ExtendedAudioFormatInfo', 'ExtendedControlEvent', 'ExtendedNoteOnEvent', 'ExtendedTempoEvent', 'FontVariation', 'GCQuaternion', 'GKBox', 'GKQuad', 'GKTriangle', 'GLKEffectPropertyPrv', 'HostCallbackInfo', 'IIO_BRIDGED_TYPE', 'IUnknownVTbl', 'JustDirectionTable', 'JustPCAction', 'JustPCActionSubrecord', 'JustPCConditionalAddAction', 'JustPCDecompositionAction', 'JustPCDuctilityAction', 'JustPCGlyphRepeatAddAction', 'JustPostcompTable', 'JustTable', 'JustWidthDeltaEntry', 'JustWidthDeltaGroup', 'KernIndexArrayHeader', 'KernKerningPair', 'KernOffsetTable', 'KernOrderedListEntry', 'KernOrderedListHeader', 'KernSimpleArrayHeader', 'KernStateEntry', 'KernStateHeader', 'KernSubtableHeader', 'KernTableHeader', 'KernVersion0Header', 'KernVersion0SubtableHeader', 'KerxAnchorPointAction', 'KerxControlPointAction', 'KerxControlPointEntry', 'KerxControlPointHeader', 'KerxCoordinateAction', 'KerxIndexArrayHeader', 'KerxKerningPair', 'KerxOrderedListEntry', 'KerxOrderedListHeader', 'KerxSimpleArrayHeader', 'KerxStateEntry', 'KerxStateHeader', 'KerxSubtableHeader', 'KerxTableHeader', 'LcarCaretClassEntry', 'LcarCaretTable', 'LtagStringRange', 'LtagTable', 'MDL_CLASS_EXPORT', 'MIDICIDeviceIdentification', 'MIDIChannelMessage', 'MIDIControlTransform', 'MIDIDriverInterface', 'MIDIEventList', 'MIDIEventPacket', 'MIDIIOErrorNotification', 'MIDIMessage_128', 'MIDIMessage_64', 'MIDIMessage_96', 'MIDIMetaEvent', 'MIDINoteMessage', 'MIDINotification', 'MIDIObjectAddRemoveNotification', 'MIDIObjectPropertyChangeNotification', 'MIDIPacket', 'MIDIPacketList', 'MIDIRawData', 'MIDISysexSendRequest', 'MIDIThruConnectionEndpoint', 'MIDIThruConnectionParams', 'MIDITransform', 'MIDIValueMap', 'MPSDeviceOptions', 'MixerDistanceParams', 'MortChain', 'MortContextualSubtable', 'MortFeatureEntry', 'MortInsertionSubtable', 'MortLigatureSubtable', 'MortRearrangementSubtable', 'MortSubtable', 'MortSwashSubtable', 'MortTable', 'MorxChain', 'MorxContextualSubtable', 'MorxInsertionSubtable', 'MorxLigatureSubtable', 'MorxRearrangementSubtable', 'MorxSubtable', 'MorxTable', 'MusicDeviceNoteParams', 'MusicDeviceStdNoteParams', 'MusicEventUserData', 'MusicTrackLoopInfo', 'NoteParamsControlValue', 'OpaqueAudioComponent', 'OpaqueAudioComponentInstance', 'OpaqueAudioConverter', 'OpaqueAudioQueue', 'OpaqueAudioQueueProcessingTap', 'OpaqueAudioQueueTimeline', 'OpaqueExtAudioFile', 'OpaqueJSClass', 'OpaqueJSContext', 'OpaqueJSContextGroup', 'OpaqueJSPropertyNameAccumulator', 'OpaqueJSPropertyNameArray', 'OpaqueJSString', 'OpaqueJSValue', 'OpaqueMusicEventIterator', 'OpaqueMusicPlayer', 'OpaqueMusicSequence', 'OpaqueMusicTrack', 'OpbdSideValues', 'OpbdTable', 'ParameterEvent', 'PropLookupSegment', 'PropLookupSingle', 'PropTable', 'ROTAGlyphEntry', 'ROTAHeader', 'SCNMatrix4', 'SCNVector3', 'SCNVector4', 'SFNTLookupArrayHeader', 'SFNTLookupBinarySearchHeader', 'SFNTLookupSegment', 'SFNTLookupSegmentHeader', 'SFNTLookupSingle', 'SFNTLookupSingleHeader', 'SFNTLookupTable', 'SFNTLookupTrimmedArrayHeader', 'SFNTLookupVectorHeader', 'SMPTETime', 'STClassTable', 'STEntryOne', 'STEntryTwo', 'STEntryZero', 'STHeader', 'STXEntryOne', 'STXEntryTwo', 'STXEntryZero', 'STXHeader', 'ScheduledAudioFileRegion', 'ScheduledAudioSlice', 'SecKeychainAttribute', 'SecKeychainAttributeInfo', 'SecKeychainAttributeList', 'TrakTable', 'TrakTableData', 'TrakTableEntry', 'UIAccessibility', 'VTDecompressionOutputCallbackRecord', 'VTInt32Point', 'VTInt32Size', '_CFHTTPAuthentication', '_GLKMatrix2', '_GLKMatrix3', '_GLKMatrix4', '_GLKQuaternion', '_GLKVector2', '_GLKVector3', '_GLKVector4', '_GLKVertexAttributeParameters', '_MTLAxisAlignedBoundingBox', '_MTLPackedFloat3', '_MTLPackedFloat4x3', '_NSRange', '_NSZone', '__CFHTTPMessage', '__CFHost', '__CFNetDiagnostic', '__CFNetService', '__CFNetServiceBrowser', '__CFNetServiceMonitor', '__CFXMLNode', '__CFXMLParser', '__GLsync', '__SecAccess', '__SecCertificate', '__SecIdentity', '__SecKey', '__SecRandom', '__attribute__', 'gss_OID_desc_struct', 'gss_OID_set_desc_struct', 'gss_auth_identity', 'gss_buffer_desc_struct', 'gss_buffer_set_desc_struct', 'gss_channel_bindings_struct', 'gss_cred_id_t_desc_struct', 'gss_ctx_id_t_desc_struct', 'gss_iov_buffer_desc_struct', 'gss_krb5_cfx_keydata', 'gss_krb5_lucid_context_v1', 'gss_krb5_lucid_context_version', 'gss_krb5_lucid_key', 'gss_krb5_rfc1964_keydata', 'gss_name_t_desc_struct', 'opaqueCMBufferQueueTriggerToken', 'sfntCMapEncoding', 'sfntCMapExtendedSubHeader', 'sfntCMapHeader', 'sfntCMapSubHeader', 'sfntDescriptorHeader', 'sfntDirectory', 'sfntDirectoryEntry', 'sfntFeatureHeader', 'sfntFeatureName', 'sfntFontDescriptor', 'sfntFontFeatureSetting', 'sfntFontRunFeature', 'sfntInstance', 'sfntNameHeader', 'sfntNameRecord', 'sfntVariationAxis', 'sfntVariationHeader'}
-
-if __name__ == '__main__': # pragma: no cover
- import os
- import re
-
- FRAMEWORKS_PATH = '/Applications/Xcode.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/'
- frameworks = os.listdir(FRAMEWORKS_PATH)
-
- all_interfaces = set()
- all_protocols = set()
- all_primitives = set()
- for framework in frameworks:
- frameworkHeadersDir = FRAMEWORKS_PATH + framework + '/Headers/'
- if not os.path.exists(frameworkHeadersDir):
- continue
-
- headerFilenames = os.listdir(frameworkHeadersDir)
-
- for f in headerFilenames:
- if not f.endswith('.h'):
- continue
- headerFilePath = frameworkHeadersDir + f
-
- try:
- with open(headerFilePath, encoding='utf-8') as f:
- content = f.read()
- except UnicodeDecodeError:
- print("Decoding error for file: {0}".format(headerFilePath))
- continue
-
- res = re.findall(r'(?<=@interface )\w+', content)
- for r in res:
- all_interfaces.add(r)
-
- res = re.findall(r'(?<=@protocol )\w+', content)
- for r in res:
- all_protocols.add(r)
-
- res = re.findall(r'(?<=typedef enum )\w+', content)
- for r in res:
- all_primitives.add(r)
-
- res = re.findall(r'(?<=typedef struct )\w+', content)
- for r in res:
- all_primitives.add(r)
-
- res = re.findall(r'(?<=typedef const struct )\w+', content)
- for r in res:
- all_primitives.add(r)
-
-
- print("ALL interfaces: \n")
- print(sorted(list(all_interfaces)))
-
- print("\nALL protocols: \n")
- print(sorted(list(all_protocols)))
-
- print("\nALL primitives: \n")
- print(sorted(list(all_primitives)))
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/_csound_builtins.py b/venv/lib/python3.11/site-packages/pygments/lexers/_csound_builtins.py
deleted file mode 100644
index 5f37306..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/_csound_builtins.py
+++ /dev/null
@@ -1,1780 +0,0 @@
-"""
- pygments.lexers._csound_builtins
- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-REMOVED_OPCODES = set('''
-OSCsendA
-beadsynt
-beosc
-buchla
-getrowlin
-lua_exec
-lua_iaopcall
-lua_iaopcall_off
-lua_ikopcall
-lua_ikopcall_off
-lua_iopcall
-lua_iopcall_off
-lua_opdef
-mp3scal_check
-mp3scal_load
-mp3scal_load2
-mp3scal_play
-mp3scal_play2
-pvsgendy
-socksend_k
-signalflowgraph
-sumTableFilter
-systime
-tabrowlin
-vbap1move
-'''.split())
-
-# Opcodes in Csound 6.18.0 using:
-# python3 -c "
-# import re
-# from subprocess import Popen, PIPE
-# output = Popen(['csound', '--list-opcodes0'], stderr=PIPE, text=True).communicate()[1]
-# opcodes = output[re.search(r'^\$', output, re.M).end() : re.search(r'^\d+ opcodes\$', output, re.M).start()].split()
-# output = Popen(['csound', '--list-opcodes2'], stderr=PIPE, text=True).communicate()[1]
-# all_opcodes = output[re.search(r'^\$', output, re.M).end() : re.search(r'^\d+ opcodes\$', output, re.M).start()].split()
-# deprecated_opcodes = [opcode for opcode in all_opcodes if opcode not in opcodes]
-# # Remove opcodes that csound.py treats as keywords.
-# keyword_opcodes = [
-# 'cggoto', # https://csound.com/docs/manual/cggoto.html
-# 'cigoto', # https://csound.com/docs/manual/cigoto.html
-# 'cingoto', # (undocumented)
-# 'ckgoto', # https://csound.com/docs/manual/ckgoto.html
-# 'cngoto', # https://csound.com/docs/manual/cngoto.html
-# 'cnkgoto', # (undocumented)
-# 'endin', # https://csound.com/docs/manual/endin.html
-# 'endop', # https://csound.com/docs/manual/endop.html
-# 'goto', # https://csound.com/docs/manual/goto.html
-# 'igoto', # https://csound.com/docs/manual/igoto.html
-# 'instr', # https://csound.com/docs/manual/instr.html
-# 'kgoto', # https://csound.com/docs/manual/kgoto.html
-# 'loop_ge', # https://csound.com/docs/manual/loop_ge.html
-# 'loop_gt', # https://csound.com/docs/manual/loop_gt.html
-# 'loop_le', # https://csound.com/docs/manual/loop_le.html
-# 'loop_lt', # https://csound.com/docs/manual/loop_lt.html
-# 'opcode', # https://csound.com/docs/manual/opcode.html
-# 'reinit', # https://csound.com/docs/manual/reinit.html
-# 'return', # https://csound.com/docs/manual/return.html
-# 'rireturn', # https://csound.com/docs/manual/rireturn.html
-# 'rigoto', # https://csound.com/docs/manual/rigoto.html
-# 'tigoto', # https://csound.com/docs/manual/tigoto.html
-# 'timout' # https://csound.com/docs/manual/timout.html
-# ]
-# opcodes = [opcode for opcode in opcodes if opcode not in keyword_opcodes]
-# newline = '\n'
-# print(f'''OPCODES = set(\'''
-# {newline.join(opcodes)}
-# \'''.split())
-#
-# DEPRECATED_OPCODES = set(\'''
-# {newline.join(deprecated_opcodes)}
-# \'''.split())
-# ''')
-# "
-
-OPCODES = set('''
-ATSadd
-ATSaddnz
-ATSbufread
-ATScross
-ATSinfo
-ATSinterpread
-ATSpartialtap
-ATSread
-ATSreadnz
-ATSsinnoi
-FLbox
-FLbutBank
-FLbutton
-FLcloseButton
-FLcolor
-FLcolor2
-FLcount
-FLexecButton
-FLgetsnap
-FLgroup
-FLgroupEnd
-FLgroup_end
-FLhide
-FLhvsBox
-FLhvsBoxSetValue
-FLjoy
-FLkeyIn
-FLknob
-FLlabel
-FLloadsnap
-FLmouse
-FLpack
-FLpackEnd
-FLpack_end
-FLpanel
-FLpanelEnd
-FLpanel_end
-FLprintk
-FLprintk2
-FLroller
-FLrun
-FLsavesnap
-FLscroll
-FLscrollEnd
-FLscroll_end
-FLsetAlign
-FLsetBox
-FLsetColor
-FLsetColor2
-FLsetFont
-FLsetPosition
-FLsetSize
-FLsetSnapGroup
-FLsetText
-FLsetTextColor
-FLsetTextSize
-FLsetTextType
-FLsetVal
-FLsetVal_i
-FLsetVali
-FLsetsnap
-FLshow
-FLslidBnk
-FLslidBnk2
-FLslidBnk2Set
-FLslidBnk2Setk
-FLslidBnkGetHandle
-FLslidBnkSet
-FLslidBnkSetk
-FLslider
-FLtabs
-FLtabsEnd
-FLtabs_end
-FLtext
-FLupdate
-FLvalue
-FLvkeybd
-FLvslidBnk
-FLvslidBnk2
-FLxyin
-JackoAudioIn
-JackoAudioInConnect
-JackoAudioOut
-JackoAudioOutConnect
-JackoFreewheel
-JackoInfo
-JackoInit
-JackoMidiInConnect
-JackoMidiOut
-JackoMidiOutConnect
-JackoNoteOut
-JackoOn
-JackoTransport
-K35_hpf
-K35_lpf
-MixerClear
-MixerGetLevel
-MixerReceive
-MixerSend
-MixerSetLevel
-MixerSetLevel_i
-OSCbundle
-OSCcount
-OSCinit
-OSCinitM
-OSClisten
-OSCraw
-OSCsend
-OSCsend_lo
-S
-STKBandedWG
-STKBeeThree
-STKBlowBotl
-STKBlowHole
-STKBowed
-STKBrass
-STKClarinet
-STKDrummer
-STKFMVoices
-STKFlute
-STKHevyMetl
-STKMandolin
-STKModalBar
-STKMoog
-STKPercFlut
-STKPlucked
-STKResonate
-STKRhodey
-STKSaxofony
-STKShakers
-STKSimple
-STKSitar
-STKStifKarp
-STKTubeBell
-STKVoicForm
-STKWhistle
-STKWurley
-a
-abs
-active
-adsr
-adsyn
-adsynt
-adsynt2
-aftouch
-allpole
-alpass
-alwayson
-ampdb
-ampdbfs
-ampmidi
-ampmidicurve
-ampmidid
-apoleparams
-arduinoRead
-arduinoReadF
-arduinoStart
-arduinoStop
-areson
-aresonk
-atone
-atonek
-atonex
-autocorr
-babo
-balance
-balance2
-bamboo
-barmodel
-bbcutm
-bbcuts
-betarand
-bexprnd
-bformdec1
-bformdec2
-bformenc1
-binit
-biquad
-biquada
-birnd
-bob
-bpf
-bpfcos
-bqrez
-butbp
-butbr
-buthp
-butlp
-butterbp
-butterbr
-butterhp
-butterlp
-button
-buzz
-c2r
-cabasa
-cauchy
-cauchyi
-cbrt
-ceil
-cell
-cent
-centroid
-ceps
-cepsinv
-chanctrl
-changed
-changed2
-chani
-chano
-chebyshevpoly
-checkbox
-chn_S
-chn_a
-chn_k
-chnclear
-chnexport
-chnget
-chngeta
-chngeti
-chngetk
-chngetks
-chngets
-chnmix
-chnparams
-chnset
-chnseta
-chnseti
-chnsetk
-chnsetks
-chnsets
-chuap
-clear
-clfilt
-clip
-clockoff
-clockon
-cmp
-cmplxprod
-cntCreate
-cntCycles
-cntDelete
-cntDelete_i
-cntRead
-cntReset
-cntState
-comb
-combinv
-compilecsd
-compileorc
-compilestr
-compress
-compress2
-connect
-control
-convle
-convolve
-copya2ftab
-copyf2array
-cos
-cosh
-cosinv
-cosseg
-cossegb
-cossegr
-count
-count_i
-cps2pch
-cpsmidi
-cpsmidib
-cpsmidinn
-cpsoct
-cpspch
-cpstmid
-cpstun
-cpstuni
-cpsxpch
-cpumeter
-cpuprc
-cross2
-crossfm
-crossfmi
-crossfmpm
-crossfmpmi
-crosspm
-crosspmi
-crunch
-ctlchn
-ctrl14
-ctrl21
-ctrl7
-ctrlinit
-ctrlpreset
-ctrlprint
-ctrlprintpresets
-ctrlsave
-ctrlselect
-cuserrnd
-dam
-date
-dates
-db
-dbamp
-dbfsamp
-dcblock
-dcblock2
-dconv
-dct
-dctinv
-deinterleave
-delay
-delay1
-delayk
-delayr
-delayw
-deltap
-deltap3
-deltapi
-deltapn
-deltapx
-deltapxw
-denorm
-diff
-diode_ladder
-directory
-diskgrain
-diskin
-diskin2
-dispfft
-display
-distort
-distort1
-divz
-doppler
-dot
-downsamp
-dripwater
-dssiactivate
-dssiaudio
-dssictls
-dssiinit
-dssilist
-dumpk
-dumpk2
-dumpk3
-dumpk4
-duserrnd
-dust
-dust2
-elapsedcycles
-elapsedtime
-envlpx
-envlpxr
-ephasor
-eqfil
-evalstr
-event
-event_i
-eventcycles
-eventtime
-exciter
-exitnow
-exp
-expcurve
-expon
-exprand
-exprandi
-expseg
-expsega
-expsegb
-expsegba
-expsegr
-fareylen
-fareyleni
-faustaudio
-faustcompile
-faustctl
-faustdsp
-faustgen
-faustplay
-fft
-fftinv
-ficlose
-filebit
-filelen
-filenchnls
-filepeak
-filescal
-filesr
-filevalid
-fillarray
-filter2
-fin
-fini
-fink
-fiopen
-flanger
-flashtxt
-flooper
-flooper2
-floor
-fluidAllOut
-fluidCCi
-fluidCCk
-fluidControl
-fluidEngine
-fluidInfo
-fluidLoad
-fluidNote
-fluidOut
-fluidProgramSelect
-fluidSetInterpMethod
-fmanal
-fmax
-fmb3
-fmbell
-fmin
-fmmetal
-fmod
-fmpercfl
-fmrhode
-fmvoice
-fmwurlie
-fof
-fof2
-fofilter
-fog
-fold
-follow
-follow2
-foscil
-foscili
-fout
-fouti
-foutir
-foutk
-fprintks
-fprints
-frac
-fractalnoise
-framebuffer
-freeverb
-ftaudio
-ftchnls
-ftconv
-ftcps
-ftexists
-ftfree
-ftgen
-ftgenonce
-ftgentmp
-ftlen
-ftload
-ftloadk
-ftlptim
-ftmorf
-ftom
-ftprint
-ftresize
-ftresizei
-ftsamplebank
-ftsave
-ftsavek
-ftset
-ftslice
-ftslicei
-ftsr
-gain
-gainslider
-gauss
-gaussi
-gausstrig
-gbuzz
-genarray
-genarray_i
-gendy
-gendyc
-gendyx
-getcfg
-getcol
-getftargs
-getrow
-getseed
-gogobel
-grain
-grain2
-grain3
-granule
-gtadsr
-gtf
-guiro
-harmon
-harmon2
-harmon3
-harmon4
-hdf5read
-hdf5write
-hilbert
-hilbert2
-hrtfearly
-hrtfmove
-hrtfmove2
-hrtfreverb
-hrtfstat
-hsboscil
-hvs1
-hvs2
-hvs3
-hypot
-i
-ihold
-imagecreate
-imagefree
-imagegetpixel
-imageload
-imagesave
-imagesetpixel
-imagesize
-in
-in32
-inch
-inh
-init
-initc14
-initc21
-initc7
-inleta
-inletf
-inletk
-inletkid
-inletv
-ino
-inq
-inrg
-ins
-insglobal
-insremot
-int
-integ
-interleave
-interp
-invalue
-inx
-inz
-jacktransport
-jitter
-jitter2
-joystick
-jspline
-k
-la_i_add_mc
-la_i_add_mr
-la_i_add_vc
-la_i_add_vr
-la_i_assign_mc
-la_i_assign_mr
-la_i_assign_t
-la_i_assign_vc
-la_i_assign_vr
-la_i_conjugate_mc
-la_i_conjugate_mr
-la_i_conjugate_vc
-la_i_conjugate_vr
-la_i_distance_vc
-la_i_distance_vr
-la_i_divide_mc
-la_i_divide_mr
-la_i_divide_vc
-la_i_divide_vr
-la_i_dot_mc
-la_i_dot_mc_vc
-la_i_dot_mr
-la_i_dot_mr_vr
-la_i_dot_vc
-la_i_dot_vr
-la_i_get_mc
-la_i_get_mr
-la_i_get_vc
-la_i_get_vr
-la_i_invert_mc
-la_i_invert_mr
-la_i_lower_solve_mc
-la_i_lower_solve_mr
-la_i_lu_det_mc
-la_i_lu_det_mr
-la_i_lu_factor_mc
-la_i_lu_factor_mr
-la_i_lu_solve_mc
-la_i_lu_solve_mr
-la_i_mc_create
-la_i_mc_set
-la_i_mr_create
-la_i_mr_set
-la_i_multiply_mc
-la_i_multiply_mr
-la_i_multiply_vc
-la_i_multiply_vr
-la_i_norm1_mc
-la_i_norm1_mr
-la_i_norm1_vc
-la_i_norm1_vr
-la_i_norm_euclid_mc
-la_i_norm_euclid_mr
-la_i_norm_euclid_vc
-la_i_norm_euclid_vr
-la_i_norm_inf_mc
-la_i_norm_inf_mr
-la_i_norm_inf_vc
-la_i_norm_inf_vr
-la_i_norm_max_mc
-la_i_norm_max_mr
-la_i_print_mc
-la_i_print_mr
-la_i_print_vc
-la_i_print_vr
-la_i_qr_eigen_mc
-la_i_qr_eigen_mr
-la_i_qr_factor_mc
-la_i_qr_factor_mr
-la_i_qr_sym_eigen_mc
-la_i_qr_sym_eigen_mr
-la_i_random_mc
-la_i_random_mr
-la_i_random_vc
-la_i_random_vr
-la_i_size_mc
-la_i_size_mr
-la_i_size_vc
-la_i_size_vr
-la_i_subtract_mc
-la_i_subtract_mr
-la_i_subtract_vc
-la_i_subtract_vr
-la_i_t_assign
-la_i_trace_mc
-la_i_trace_mr
-la_i_transpose_mc
-la_i_transpose_mr
-la_i_upper_solve_mc
-la_i_upper_solve_mr
-la_i_vc_create
-la_i_vc_set
-la_i_vr_create
-la_i_vr_set
-la_k_a_assign
-la_k_add_mc
-la_k_add_mr
-la_k_add_vc
-la_k_add_vr
-la_k_assign_a
-la_k_assign_f
-la_k_assign_mc
-la_k_assign_mr
-la_k_assign_t
-la_k_assign_vc
-la_k_assign_vr
-la_k_conjugate_mc
-la_k_conjugate_mr
-la_k_conjugate_vc
-la_k_conjugate_vr
-la_k_current_f
-la_k_current_vr
-la_k_distance_vc
-la_k_distance_vr
-la_k_divide_mc
-la_k_divide_mr
-la_k_divide_vc
-la_k_divide_vr
-la_k_dot_mc
-la_k_dot_mc_vc
-la_k_dot_mr
-la_k_dot_mr_vr
-la_k_dot_vc
-la_k_dot_vr
-la_k_f_assign
-la_k_get_mc
-la_k_get_mr
-la_k_get_vc
-la_k_get_vr
-la_k_invert_mc
-la_k_invert_mr
-la_k_lower_solve_mc
-la_k_lower_solve_mr
-la_k_lu_det_mc
-la_k_lu_det_mr
-la_k_lu_factor_mc
-la_k_lu_factor_mr
-la_k_lu_solve_mc
-la_k_lu_solve_mr
-la_k_mc_set
-la_k_mr_set
-la_k_multiply_mc
-la_k_multiply_mr
-la_k_multiply_vc
-la_k_multiply_vr
-la_k_norm1_mc
-la_k_norm1_mr
-la_k_norm1_vc
-la_k_norm1_vr
-la_k_norm_euclid_mc
-la_k_norm_euclid_mr
-la_k_norm_euclid_vc
-la_k_norm_euclid_vr
-la_k_norm_inf_mc
-la_k_norm_inf_mr
-la_k_norm_inf_vc
-la_k_norm_inf_vr
-la_k_norm_max_mc
-la_k_norm_max_mr
-la_k_qr_eigen_mc
-la_k_qr_eigen_mr
-la_k_qr_factor_mc
-la_k_qr_factor_mr
-la_k_qr_sym_eigen_mc
-la_k_qr_sym_eigen_mr
-la_k_random_mc
-la_k_random_mr
-la_k_random_vc
-la_k_random_vr
-la_k_subtract_mc
-la_k_subtract_mr
-la_k_subtract_vc
-la_k_subtract_vr
-la_k_t_assign
-la_k_trace_mc
-la_k_trace_mr
-la_k_upper_solve_mc
-la_k_upper_solve_mr
-la_k_vc_set
-la_k_vr_set
-lag
-lagud
-lastcycle
-lenarray
-lfo
-lfsr
-limit
-limit1
-lincos
-line
-linen
-linenr
-lineto
-link_beat_force
-link_beat_get
-link_beat_request
-link_create
-link_enable
-link_is_enabled
-link_metro
-link_peers
-link_tempo_get
-link_tempo_set
-linlin
-linrand
-linseg
-linsegb
-linsegr
-liveconv
-locsend
-locsig
-log
-log10
-log2
-logbtwo
-logcurve
-loopseg
-loopsegp
-looptseg
-loopxseg
-lorenz
-loscil
-loscil3
-loscil3phs
-loscilphs
-loscilx
-lowpass2
-lowres
-lowresx
-lpcanal
-lpcfilter
-lpf18
-lpform
-lpfreson
-lphasor
-lpinterp
-lposcil
-lposcil3
-lposcila
-lposcilsa
-lposcilsa2
-lpread
-lpreson
-lpshold
-lpsholdp
-lpslot
-lufs
-mac
-maca
-madsr
-mags
-mandel
-mandol
-maparray
-maparray_i
-marimba
-massign
-max
-max_k
-maxabs
-maxabsaccum
-maxaccum
-maxalloc
-maxarray
-mclock
-mdelay
-median
-mediank
-metro
-metro2
-metrobpm
-mfb
-midglobal
-midiarp
-midic14
-midic21
-midic7
-midichannelaftertouch
-midichn
-midicontrolchange
-midictrl
-mididefault
-midifilestatus
-midiin
-midinoteoff
-midinoteoncps
-midinoteonkey
-midinoteonoct
-midinoteonpch
-midion
-midion2
-midiout
-midiout_i
-midipgm
-midipitchbend
-midipolyaftertouch
-midiprogramchange
-miditempo
-midremot
-min
-minabs
-minabsaccum
-minaccum
-minarray
-mincer
-mirror
-mode
-modmatrix
-monitor
-moog
-moogladder
-moogladder2
-moogvcf
-moogvcf2
-moscil
-mp3bitrate
-mp3in
-mp3len
-mp3nchnls
-mp3out
-mp3scal
-mp3sr
-mpulse
-mrtmsg
-ms2st
-mtof
-mton
-multitap
-mute
-mvchpf
-mvclpf1
-mvclpf2
-mvclpf3
-mvclpf4
-mvmfilter
-mxadsr
-nchnls_hw
-nestedap
-nlalp
-nlfilt
-nlfilt2
-noise
-noteoff
-noteon
-noteondur
-noteondur2
-notnum
-nreverb
-nrpn
-nsamp
-nstance
-nstrnum
-nstrstr
-ntof
-ntom
-ntrpol
-nxtpow2
-octave
-octcps
-octmidi
-octmidib
-octmidinn
-octpch
-olabuffer
-oscbnk
-oscil
-oscil1
-oscil1i
-oscil3
-oscili
-oscilikt
-osciliktp
-oscilikts
-osciln
-oscils
-oscilx
-out
-out32
-outall
-outc
-outch
-outh
-outiat
-outic
-outic14
-outipat
-outipb
-outipc
-outkat
-outkc
-outkc14
-outkpat
-outkpb
-outkpc
-outleta
-outletf
-outletk
-outletkid
-outletv
-outo
-outq
-outq1
-outq2
-outq3
-outq4
-outrg
-outs
-outs1
-outs2
-outvalue
-outx
-outz
-p
-p5gconnect
-p5gdata
-pan
-pan2
-pareq
-part2txt
-partials
-partikkel
-partikkelget
-partikkelset
-partikkelsync
-passign
-paulstretch
-pcauchy
-pchbend
-pchmidi
-pchmidib
-pchmidinn
-pchoct
-pchtom
-pconvolve
-pcount
-pdclip
-pdhalf
-pdhalfy
-peak
-pgmassign
-pgmchn
-phaser1
-phaser2
-phasor
-phasorbnk
-phs
-pindex
-pinker
-pinkish
-pitch
-pitchac
-pitchamdf
-planet
-platerev
-plltrack
-pluck
-poisson
-pol2rect
-polyaft
-polynomial
-port
-portk
-poscil
-poscil3
-pow
-powershape
-powoftwo
-pows
-prealloc
-prepiano
-print
-print_type
-printarray
-printf
-printf_i
-printk
-printk2
-printks
-printks2
-println
-prints
-printsk
-product
-pset
-ptablew
-ptrack
-puts
-pvadd
-pvbufread
-pvcross
-pvinterp
-pvoc
-pvread
-pvs2array
-pvs2tab
-pvsadsyn
-pvsanal
-pvsarp
-pvsbandp
-pvsbandr
-pvsbandwidth
-pvsbin
-pvsblur
-pvsbuffer
-pvsbufread
-pvsbufread2
-pvscale
-pvscent
-pvsceps
-pvscfs
-pvscross
-pvsdemix
-pvsdiskin
-pvsdisp
-pvsenvftw
-pvsfilter
-pvsfread
-pvsfreeze
-pvsfromarray
-pvsftr
-pvsftw
-pvsfwrite
-pvsgain
-pvsgendy
-pvshift
-pvsifd
-pvsin
-pvsinfo
-pvsinit
-pvslock
-pvslpc
-pvsmaska
-pvsmix
-pvsmooth
-pvsmorph
-pvsosc
-pvsout
-pvspitch
-pvstanal
-pvstencil
-pvstrace
-pvsvoc
-pvswarp
-pvsynth
-pwd
-pyassign
-pyassigni
-pyassignt
-pycall
-pycall1
-pycall1i
-pycall1t
-pycall2
-pycall2i
-pycall2t
-pycall3
-pycall3i
-pycall3t
-pycall4
-pycall4i
-pycall4t
-pycall5
-pycall5i
-pycall5t
-pycall6
-pycall6i
-pycall6t
-pycall7
-pycall7i
-pycall7t
-pycall8
-pycall8i
-pycall8t
-pycalli
-pycalln
-pycallni
-pycallt
-pyeval
-pyevali
-pyevalt
-pyexec
-pyexeci
-pyexect
-pyinit
-pylassign
-pylassigni
-pylassignt
-pylcall
-pylcall1
-pylcall1i
-pylcall1t
-pylcall2
-pylcall2i
-pylcall2t
-pylcall3
-pylcall3i
-pylcall3t
-pylcall4
-pylcall4i
-pylcall4t
-pylcall5
-pylcall5i
-pylcall5t
-pylcall6
-pylcall6i
-pylcall6t
-pylcall7
-pylcall7i
-pylcall7t
-pylcall8
-pylcall8i
-pylcall8t
-pylcalli
-pylcalln
-pylcallni
-pylcallt
-pyleval
-pylevali
-pylevalt
-pylexec
-pylexeci
-pylexect
-pylrun
-pylruni
-pylrunt
-pyrun
-pyruni
-pyrunt
-qinf
-qnan
-r2c
-rand
-randc
-randh
-randi
-random
-randomh
-randomi
-rbjeq
-readclock
-readf
-readfi
-readk
-readk2
-readk3
-readk4
-readks
-readscore
-readscratch
-rect2pol
-release
-remoteport
-remove
-repluck
-reshapearray
-reson
-resonbnk
-resonk
-resonr
-resonx
-resonxk
-resony
-resonz
-resyn
-reverb
-reverb2
-reverbsc
-rewindscore
-rezzy
-rfft
-rifft
-rms
-rnd
-rnd31
-rndseed
-round
-rspline
-rtclock
-s16b14
-s32b14
-samphold
-sandpaper
-sc_lag
-sc_lagud
-sc_phasor
-sc_trig
-scale
-scale2
-scalearray
-scanhammer
-scanmap
-scans
-scansmap
-scantable
-scanu
-scanu2
-schedkwhen
-schedkwhennamed
-schedule
-schedulek
-schedwhen
-scoreline
-scoreline_i
-seed
-sekere
-select
-semitone
-sense
-sensekey
-seqtime
-seqtime2
-sequ
-sequstate
-serialBegin
-serialEnd
-serialFlush
-serialPrint
-serialRead
-serialWrite
-serialWrite_i
-setcol
-setctrl
-setksmps
-setrow
-setscorepos
-sfilist
-sfinstr
-sfinstr3
-sfinstr3m
-sfinstrm
-sfload
-sflooper
-sfpassign
-sfplay
-sfplay3
-sfplay3m
-sfplaym
-sfplist
-sfpreset
-shaker
-shiftin
-shiftout
-signum
-sin
-sinh
-sininv
-sinsyn
-skf
-sleighbells
-slicearray
-slicearray_i
-slider16
-slider16f
-slider16table
-slider16tablef
-slider32
-slider32f
-slider32table
-slider32tablef
-slider64
-slider64f
-slider64table
-slider64tablef
-slider8
-slider8f
-slider8table
-slider8tablef
-sliderKawai
-sndloop
-sndwarp
-sndwarpst
-sockrecv
-sockrecvs
-socksend
-socksends
-sorta
-sortd
-soundin
-space
-spat3d
-spat3di
-spat3dt
-spdist
-spf
-splitrig
-sprintf
-sprintfk
-spsend
-sqrt
-squinewave
-st2ms
-statevar
-sterrain
-stix
-strcat
-strcatk
-strchar
-strchark
-strcmp
-strcmpk
-strcpy
-strcpyk
-strecv
-streson
-strfromurl
-strget
-strindex
-strindexk
-string2array
-strlen
-strlenk
-strlower
-strlowerk
-strrindex
-strrindexk
-strset
-strstrip
-strsub
-strsubk
-strtod
-strtodk
-strtol
-strtolk
-strupper
-strupperk
-stsend
-subinstr
-subinstrinit
-sum
-sumarray
-svfilter
-svn
-syncgrain
-syncloop
-syncphasor
-system
-system_i
-tab
-tab2array
-tab2pvs
-tab_i
-tabifd
-table
-table3
-table3kt
-tablecopy
-tablefilter
-tablefilteri
-tablegpw
-tablei
-tableicopy
-tableigpw
-tableikt
-tableimix
-tablekt
-tablemix
-tableng
-tablera
-tableseg
-tableshuffle
-tableshufflei
-tablew
-tablewa
-tablewkt
-tablexkt
-tablexseg
-tabmorph
-tabmorpha
-tabmorphak
-tabmorphi
-tabplay
-tabrec
-tabsum
-tabw
-tabw_i
-tambourine
-tan
-tanh
-taninv
-taninv2
-tbvcf
-tempest
-tempo
-temposcal
-tempoval
-timedseq
-timeinstk
-timeinsts
-timek
-times
-tival
-tlineto
-tone
-tonek
-tonex
-tradsyn
-trandom
-transeg
-transegb
-transegr
-trcross
-trfilter
-trhighest
-trigExpseg
-trigLinseg
-trigexpseg
-trigger
-trighold
-triglinseg
-trigphasor
-trigseq
-trim
-trim_i
-trirand
-trlowest
-trmix
-trscale
-trshift
-trsplit
-turnoff
-turnoff2
-turnoff2_i
-turnoff3
-turnon
-tvconv
-unirand
-unwrap
-upsamp
-urandom
-urd
-vactrol
-vadd
-vadd_i
-vaddv
-vaddv_i
-vaget
-valpass
-vaset
-vbap
-vbapg
-vbapgmove
-vbaplsinit
-vbapmove
-vbapz
-vbapzmove
-vcella
-vclpf
-vco
-vco2
-vco2ft
-vco2ift
-vco2init
-vcomb
-vcopy
-vcopy_i
-vdel_k
-vdelay
-vdelay3
-vdelayk
-vdelayx
-vdelayxq
-vdelayxs
-vdelayxw
-vdelayxwq
-vdelayxws
-vdivv
-vdivv_i
-vecdelay
-veloc
-vexp
-vexp_i
-vexpseg
-vexpv
-vexpv_i
-vibes
-vibr
-vibrato
-vincr
-vlimit
-vlinseg
-vlowres
-vmap
-vmirror
-vmult
-vmult_i
-vmultv
-vmultv_i
-voice
-vosim
-vphaseseg
-vport
-vpow
-vpow_i
-vpowv
-vpowv_i
-vps
-vpvoc
-vrandh
-vrandi
-vsubv
-vsubv_i
-vtaba
-vtabi
-vtabk
-vtable1k
-vtablea
-vtablei
-vtablek
-vtablewa
-vtablewi
-vtablewk
-vtabwa
-vtabwi
-vtabwk
-vwrap
-waveset
-websocket
-weibull
-wgbow
-wgbowedbar
-wgbrass
-wgclar
-wgflute
-wgpluck
-wgpluck2
-wguide1
-wguide2
-wiiconnect
-wiidata
-wiirange
-wiisend
-window
-wrap
-writescratch
-wterrain
-wterrain2
-xadsr
-xin
-xout
-xtratim
-xyscale
-zacl
-zakinit
-zamod
-zar
-zarg
-zaw
-zawm
-zdf_1pole
-zdf_1pole_mode
-zdf_2pole
-zdf_2pole_mode
-zdf_ladder
-zfilter2
-zir
-ziw
-ziwm
-zkcl
-zkmod
-zkr
-zkw
-zkwm
-'''.split())
-
-DEPRECATED_OPCODES = set('''
-array
-bformdec
-bformenc
-copy2ftab
-copy2ttab
-hrtfer
-ktableseg
-lentab
-maxtab
-mintab
-pop
-pop_f
-ptable
-ptable3
-ptablei
-ptableiw
-push
-push_f
-scalet
-sndload
-soundout
-soundouts
-specaddm
-specdiff
-specdisp
-specfilt
-spechist
-specptrk
-specscal
-specsum
-spectrum
-stack
-sumtab
-tabgen
-tableiw
-tabmap
-tabmap_i
-tabslice
-tb0
-tb0_init
-tb1
-tb10
-tb10_init
-tb11
-tb11_init
-tb12
-tb12_init
-tb13
-tb13_init
-tb14
-tb14_init
-tb15
-tb15_init
-tb1_init
-tb2
-tb2_init
-tb3
-tb3_init
-tb4
-tb4_init
-tb5
-tb5_init
-tb6
-tb6_init
-tb7
-tb7_init
-tb8
-tb8_init
-tb9
-tb9_init
-vbap16
-vbap4
-vbap4move
-vbap8
-vbap8move
-xscanmap
-xscans
-xscansmap
-xscanu
-xyin
-'''.split())
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/_css_builtins.py b/venv/lib/python3.11/site-packages/pygments/lexers/_css_builtins.py
deleted file mode 100644
index fff9924..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/_css_builtins.py
+++ /dev/null
@@ -1,558 +0,0 @@
-"""
- pygments.lexers._css_builtins
- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
- This file is autogenerated by scripts/get_css_properties.py
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-_css_properties = (
- '-webkit-line-clamp',
- 'accent-color',
- 'align-content',
- 'align-items',
- 'align-self',
- 'alignment-baseline',
- 'all',
- 'animation',
- 'animation-delay',
- 'animation-direction',
- 'animation-duration',
- 'animation-fill-mode',
- 'animation-iteration-count',
- 'animation-name',
- 'animation-play-state',
- 'animation-timing-function',
- 'appearance',
- 'aspect-ratio',
- 'azimuth',
- 'backface-visibility',
- 'background',
- 'background-attachment',
- 'background-blend-mode',
- 'background-clip',
- 'background-color',
- 'background-image',
- 'background-origin',
- 'background-position',
- 'background-repeat',
- 'background-size',
- 'baseline-shift',
- 'baseline-source',
- 'block-ellipsis',
- 'block-size',
- 'block-step',
- 'block-step-align',
- 'block-step-insert',
- 'block-step-round',
- 'block-step-size',
- 'bookmark-label',
- 'bookmark-level',
- 'bookmark-state',
- 'border',
- 'border-block',
- 'border-block-color',
- 'border-block-end',
- 'border-block-end-color',
- 'border-block-end-style',
- 'border-block-end-width',
- 'border-block-start',
- 'border-block-start-color',
- 'border-block-start-style',
- 'border-block-start-width',
- 'border-block-style',
- 'border-block-width',
- 'border-bottom',
- 'border-bottom-color',
- 'border-bottom-left-radius',
- 'border-bottom-right-radius',
- 'border-bottom-style',
- 'border-bottom-width',
- 'border-boundary',
- 'border-collapse',
- 'border-color',
- 'border-end-end-radius',
- 'border-end-start-radius',
- 'border-image',
- 'border-image-outset',
- 'border-image-repeat',
- 'border-image-slice',
- 'border-image-source',
- 'border-image-width',
- 'border-inline',
- 'border-inline-color',
- 'border-inline-end',
- 'border-inline-end-color',
- 'border-inline-end-style',
- 'border-inline-end-width',
- 'border-inline-start',
- 'border-inline-start-color',
- 'border-inline-start-style',
- 'border-inline-start-width',
- 'border-inline-style',
- 'border-inline-width',
- 'border-left',
- 'border-left-color',
- 'border-left-style',
- 'border-left-width',
- 'border-radius',
- 'border-right',
- 'border-right-color',
- 'border-right-style',
- 'border-right-width',
- 'border-spacing',
- 'border-start-end-radius',
- 'border-start-start-radius',
- 'border-style',
- 'border-top',
- 'border-top-color',
- 'border-top-left-radius',
- 'border-top-right-radius',
- 'border-top-style',
- 'border-top-width',
- 'border-width',
- 'bottom',
- 'box-decoration-break',
- 'box-shadow',
- 'box-sizing',
- 'box-snap',
- 'break-after',
- 'break-before',
- 'break-inside',
- 'caption-side',
- 'caret',
- 'caret-color',
- 'caret-shape',
- 'chains',
- 'clear',
- 'clip',
- 'clip-path',
- 'clip-rule',
- 'color',
- 'color-adjust',
- 'color-interpolation-filters',
- 'color-scheme',
- 'column-count',
- 'column-fill',
- 'column-gap',
- 'column-rule',
- 'column-rule-color',
- 'column-rule-style',
- 'column-rule-width',
- 'column-span',
- 'column-width',
- 'columns',
- 'contain',
- 'contain-intrinsic-block-size',
- 'contain-intrinsic-height',
- 'contain-intrinsic-inline-size',
- 'contain-intrinsic-size',
- 'contain-intrinsic-width',
- 'container',
- 'container-name',
- 'container-type',
- 'content',
- 'content-visibility',
- 'continue',
- 'counter-increment',
- 'counter-reset',
- 'counter-set',
- 'cue',
- 'cue-after',
- 'cue-before',
- 'cursor',
- 'direction',
- 'display',
- 'dominant-baseline',
- 'elevation',
- 'empty-cells',
- 'fill',
- 'fill-break',
- 'fill-color',
- 'fill-image',
- 'fill-opacity',
- 'fill-origin',
- 'fill-position',
- 'fill-repeat',
- 'fill-rule',
- 'fill-size',
- 'filter',
- 'flex',
- 'flex-basis',
- 'flex-direction',
- 'flex-flow',
- 'flex-grow',
- 'flex-shrink',
- 'flex-wrap',
- 'float',
- 'float-defer',
- 'float-offset',
- 'float-reference',
- 'flood-color',
- 'flood-opacity',
- 'flow',
- 'flow-from',
- 'flow-into',
- 'font',
- 'font-family',
- 'font-feature-settings',
- 'font-kerning',
- 'font-language-override',
- 'font-optical-sizing',
- 'font-palette',
- 'font-size',
- 'font-size-adjust',
- 'font-stretch',
- 'font-style',
- 'font-synthesis',
- 'font-synthesis-small-caps',
- 'font-synthesis-style',
- 'font-synthesis-weight',
- 'font-variant',
- 'font-variant-alternates',
- 'font-variant-caps',
- 'font-variant-east-asian',
- 'font-variant-emoji',
- 'font-variant-ligatures',
- 'font-variant-numeric',
- 'font-variant-position',
- 'font-variation-settings',
- 'font-weight',
- 'footnote-display',
- 'footnote-policy',
- 'forced-color-adjust',
- 'gap',
- 'glyph-orientation-vertical',
- 'grid',
- 'grid-area',
- 'grid-auto-columns',
- 'grid-auto-flow',
- 'grid-auto-rows',
- 'grid-column',
- 'grid-column-end',
- 'grid-column-start',
- 'grid-row',
- 'grid-row-end',
- 'grid-row-start',
- 'grid-template',
- 'grid-template-areas',
- 'grid-template-columns',
- 'grid-template-rows',
- 'hanging-punctuation',
- 'height',
- 'hyphenate-character',
- 'hyphenate-limit-chars',
- 'hyphenate-limit-last',
- 'hyphenate-limit-lines',
- 'hyphenate-limit-zone',
- 'hyphens',
- 'image-orientation',
- 'image-rendering',
- 'image-resolution',
- 'initial-letter',
- 'initial-letter-align',
- 'initial-letter-wrap',
- 'inline-size',
- 'inline-sizing',
- 'input-security',
- 'inset',
- 'inset-block',
- 'inset-block-end',
- 'inset-block-start',
- 'inset-inline',
- 'inset-inline-end',
- 'inset-inline-start',
- 'isolation',
- 'justify-content',
- 'justify-items',
- 'justify-self',
- 'leading-trim',
- 'left',
- 'letter-spacing',
- 'lighting-color',
- 'line-break',
- 'line-clamp',
- 'line-grid',
- 'line-height',
- 'line-height-step',
- 'line-padding',
- 'line-snap',
- 'list-style',
- 'list-style-image',
- 'list-style-position',
- 'list-style-type',
- 'margin',
- 'margin-block',
- 'margin-block-end',
- 'margin-block-start',
- 'margin-bottom',
- 'margin-break',
- 'margin-inline',
- 'margin-inline-end',
- 'margin-inline-start',
- 'margin-left',
- 'margin-right',
- 'margin-top',
- 'margin-trim',
- 'marker',
- 'marker-end',
- 'marker-knockout-left',
- 'marker-knockout-right',
- 'marker-mid',
- 'marker-pattern',
- 'marker-segment',
- 'marker-side',
- 'marker-start',
- 'mask',
- 'mask-border',
- 'mask-border-mode',
- 'mask-border-outset',
- 'mask-border-repeat',
- 'mask-border-slice',
- 'mask-border-source',
- 'mask-border-width',
- 'mask-clip',
- 'mask-composite',
- 'mask-image',
- 'mask-mode',
- 'mask-origin',
- 'mask-position',
- 'mask-repeat',
- 'mask-size',
- 'mask-type',
- 'max-block-size',
- 'max-height',
- 'max-inline-size',
- 'max-lines',
- 'max-width',
- 'min-block-size',
- 'min-height',
- 'min-inline-size',
- 'min-intrinsic-sizing',
- 'min-width',
- 'mix-blend-mode',
- 'nav-down',
- 'nav-left',
- 'nav-right',
- 'nav-up',
- 'object-fit',
- 'object-overflow',
- 'object-position',
- 'object-view-box',
- 'offset',
- 'offset-anchor',
- 'offset-distance',
- 'offset-path',
- 'offset-position',
- 'offset-rotate',
- 'opacity',
- 'order',
- 'orphans',
- 'outline',
- 'outline-color',
- 'outline-offset',
- 'outline-style',
- 'outline-width',
- 'overflow',
- 'overflow-anchor',
- 'overflow-block',
- 'overflow-clip-margin',
- 'overflow-inline',
- 'overflow-wrap',
- 'overflow-x',
- 'overflow-y',
- 'overscroll-behavior',
- 'overscroll-behavior-block',
- 'overscroll-behavior-inline',
- 'overscroll-behavior-x',
- 'overscroll-behavior-y',
- 'padding',
- 'padding-block',
- 'padding-block-end',
- 'padding-block-start',
- 'padding-bottom',
- 'padding-inline',
- 'padding-inline-end',
- 'padding-inline-start',
- 'padding-left',
- 'padding-right',
- 'padding-top',
- 'page',
- 'page-break-after',
- 'page-break-before',
- 'page-break-inside',
- 'pause',
- 'pause-after',
- 'pause-before',
- 'perspective',
- 'perspective-origin',
- 'pitch',
- 'pitch-range',
- 'place-content',
- 'place-items',
- 'place-self',
- 'play-during',
- 'pointer-events',
- 'position',
- 'print-color-adjust',
- 'property-name',
- 'quotes',
- 'region-fragment',
- 'resize',
- 'rest',
- 'rest-after',
- 'rest-before',
- 'richness',
- 'right',
- 'rotate',
- 'row-gap',
- 'ruby-align',
- 'ruby-merge',
- 'ruby-overhang',
- 'ruby-position',
- 'running',
- 'scale',
- 'scroll-behavior',
- 'scroll-margin',
- 'scroll-margin-block',
- 'scroll-margin-block-end',
- 'scroll-margin-block-start',
- 'scroll-margin-bottom',
- 'scroll-margin-inline',
- 'scroll-margin-inline-end',
- 'scroll-margin-inline-start',
- 'scroll-margin-left',
- 'scroll-margin-right',
- 'scroll-margin-top',
- 'scroll-padding',
- 'scroll-padding-block',
- 'scroll-padding-block-end',
- 'scroll-padding-block-start',
- 'scroll-padding-bottom',
- 'scroll-padding-inline',
- 'scroll-padding-inline-end',
- 'scroll-padding-inline-start',
- 'scroll-padding-left',
- 'scroll-padding-right',
- 'scroll-padding-top',
- 'scroll-snap-align',
- 'scroll-snap-stop',
- 'scroll-snap-type',
- 'scrollbar-color',
- 'scrollbar-gutter',
- 'scrollbar-width',
- 'shape-image-threshold',
- 'shape-inside',
- 'shape-margin',
- 'shape-outside',
- 'spatial-navigation-action',
- 'spatial-navigation-contain',
- 'spatial-navigation-function',
- 'speak',
- 'speak-as',
- 'speak-header',
- 'speak-numeral',
- 'speak-punctuation',
- 'speech-rate',
- 'stress',
- 'string-set',
- 'stroke',
- 'stroke-align',
- 'stroke-alignment',
- 'stroke-break',
- 'stroke-color',
- 'stroke-dash-corner',
- 'stroke-dash-justify',
- 'stroke-dashadjust',
- 'stroke-dasharray',
- 'stroke-dashcorner',
- 'stroke-dashoffset',
- 'stroke-image',
- 'stroke-linecap',
- 'stroke-linejoin',
- 'stroke-miterlimit',
- 'stroke-opacity',
- 'stroke-origin',
- 'stroke-position',
- 'stroke-repeat',
- 'stroke-size',
- 'stroke-width',
- 'tab-size',
- 'table-layout',
- 'text-align',
- 'text-align-all',
- 'text-align-last',
- 'text-combine-upright',
- 'text-decoration',
- 'text-decoration-color',
- 'text-decoration-line',
- 'text-decoration-skip',
- 'text-decoration-skip-box',
- 'text-decoration-skip-ink',
- 'text-decoration-skip-inset',
- 'text-decoration-skip-self',
- 'text-decoration-skip-spaces',
- 'text-decoration-style',
- 'text-decoration-thickness',
- 'text-edge',
- 'text-emphasis',
- 'text-emphasis-color',
- 'text-emphasis-position',
- 'text-emphasis-skip',
- 'text-emphasis-style',
- 'text-group-align',
- 'text-indent',
- 'text-justify',
- 'text-orientation',
- 'text-overflow',
- 'text-shadow',
- 'text-space-collapse',
- 'text-space-trim',
- 'text-spacing',
- 'text-transform',
- 'text-underline-offset',
- 'text-underline-position',
- 'text-wrap',
- 'top',
- 'transform',
- 'transform-box',
- 'transform-origin',
- 'transform-style',
- 'transition',
- 'transition-delay',
- 'transition-duration',
- 'transition-property',
- 'transition-timing-function',
- 'translate',
- 'unicode-bidi',
- 'user-select',
- 'vertical-align',
- 'visibility',
- 'voice-balance',
- 'voice-duration',
- 'voice-family',
- 'voice-pitch',
- 'voice-range',
- 'voice-rate',
- 'voice-stress',
- 'voice-volume',
- 'volume',
- 'white-space',
- 'widows',
- 'width',
- 'will-change',
- 'word-boundary-detection',
- 'word-boundary-expansion',
- 'word-break',
- 'word-spacing',
- 'word-wrap',
- 'wrap-after',
- 'wrap-before',
- 'wrap-flow',
- 'wrap-inside',
- 'wrap-through',
- 'writing-mode',
- 'z-index',
-) \ No newline at end of file
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/_julia_builtins.py b/venv/lib/python3.11/site-packages/pygments/lexers/_julia_builtins.py
deleted file mode 100644
index 5d3c96f..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/_julia_builtins.py
+++ /dev/null
@@ -1,411 +0,0 @@
-"""
- pygments.lexers._julia_builtins
- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
- Julia builtins.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-# operators
-# see https://github.com/JuliaLang/julia/blob/master/src/julia-parser.scm
-# Julia v1.6.0-rc1
-OPERATORS_LIST = [
- # other
- '->',
- # prec-assignment
- ':=', '$=',
- # prec-conditional, prec-lazy-or, prec-lazy-and
- '?', '||', '&&',
- # prec-colon
- ':',
- # prec-plus
- '$',
- # prec-decl
- '::',
-]
-DOTTED_OPERATORS_LIST = [
- # prec-assignment
- r'=', r'+=', r'-=', r'*=', r'/=', r'//=', r'\=', r'^=', r'÷=', r'%=', r'<<=',
- r'>>=', r'>>>=', r'|=', r'&=', r'⊻=', r'≔', r'⩴', r"≕'", r'~',
- # prec-pair
- '=>',
- # prec-arrow
- r'→', r'↔', r'↚', r'↛', r'↞', r'↠', r'↢', r'↣', r'↦', r'↤', r'↮', r'⇎', r'⇍', r'⇏',
- r'⇐', r'⇒', r'⇔', r'⇴', r'⇶', r'⇷', r'⇸', r'⇹', r'⇺', r'⇻', r'⇼', r'⇽', r'⇾', r'⇿',
- r'⟵', r'⟶', r'⟷', r'⟹', r'⟺', r'⟻', r'⟼', r'⟽', r'⟾', r'⟿', r'⤀', r'⤁', r'⤂', r'⤃',
- r'⤄', r'⤅', r'⤆', r'⤇', r'⤌', r'⤍', r'⤎', r'⤏', r'⤐', r'⤑', r'⤔', r'⤕', r'⤖', r'⤗',
- r'⤘', r'⤝', r'⤞', r'⤟', r'⤠', r'⥄', r'⥅', r'⥆', r'⥇', r'⥈', r'⥊', r'⥋', r'⥎', r'⥐',
- r'⥒', r'⥓', r'⥖', r'⥗', r'⥚', r'⥛', r'⥞', r'⥟', r'⥢', r'⥤', r'⥦', r'⥧', r'⥨', r'⥩',
- r'⥪', r'⥫', r'⥬', r'⥭', r'⥰', r'⧴', r'⬱', r'⬰', r'⬲', r'⬳', r'⬴', r'⬵', r'⬶', r'⬷',
- r'⬸', r'⬹', r'⬺', r'⬻', r'⬼', r'⬽', r'⬾', r'⬿', r'⭀', r'⭁', r'⭂', r'⭃', r'⭄', r'⭇',
- r'⭈', r'⭉', r'⭊', r'⭋', r'⭌', r'←', r'→', r'⇜', r'⇝', r'↜', r'↝', r'↩', r'↪', r'↫',
- r'↬', r'↼', r'↽', r'⇀', r'⇁', r'⇄', r'⇆', r'⇇', r'⇉', r'⇋', r'⇌', r'⇚', r'⇛', r'⇠',
- r'⇢', r'↷', r'↶', r'↺', r'↻', r'-->', r'<--', r'<-->',
- # prec-comparison
- r'>', r'<', r'>=', r'≥', r'<=', r'≤', r'==', r'===', r'≡', r'!=', r'≠', r'!==',
- r'≢', r'∈', r'∉', r'∋', r'∌', r'⊆', r'⊈', r'⊂', r'⊄', r'⊊', r'∝', r'∊', r'∍', r'∥',
- r'∦', r'∷', r'∺', r'∻', r'∽', r'∾', r'≁', r'≃', r'≂', r'≄', r'≅', r'≆', r'≇', r'≈',
- r'≉', r'≊', r'≋', r'≌', r'≍', r'≎', r'≐', r'≑', r'≒', r'≓', r'≖', r'≗', r'≘', r'≙',
- r'≚', r'≛', r'≜', r'≝', r'≞', r'≟', r'≣', r'≦', r'≧', r'≨', r'≩', r'≪', r'≫', r'≬',
- r'≭', r'≮', r'≯', r'≰', r'≱', r'≲', r'≳', r'≴', r'≵', r'≶', r'≷', r'≸', r'≹', r'≺',
- r'≻', r'≼', r'≽', r'≾', r'≿', r'⊀', r'⊁', r'⊃', r'⊅', r'⊇', r'⊉', r'⊋', r'⊏', r'⊐',
- r'⊑', r'⊒', r'⊜', r'⊩', r'⊬', r'⊮', r'⊰', r'⊱', r'⊲', r'⊳', r'⊴', r'⊵', r'⊶', r'⊷',
- r'⋍', r'⋐', r'⋑', r'⋕', r'⋖', r'⋗', r'⋘', r'⋙', r'⋚', r'⋛', r'⋜', r'⋝', r'⋞', r'⋟',
- r'⋠', r'⋡', r'⋢', r'⋣', r'⋤', r'⋥', r'⋦', r'⋧', r'⋨', r'⋩', r'⋪', r'⋫', r'⋬', r'⋭',
- r'⋲', r'⋳', r'⋴', r'⋵', r'⋶', r'⋷', r'⋸', r'⋹', r'⋺', r'⋻', r'⋼', r'⋽', r'⋾', r'⋿',
- r'⟈', r'⟉', r'⟒', r'⦷', r'⧀', r'⧁', r'⧡', r'⧣', r'⧤', r'⧥', r'⩦', r'⩧', r'⩪', r'⩫',
- r'⩬', r'⩭', r'⩮', r'⩯', r'⩰', r'⩱', r'⩲', r'⩳', r'⩵', r'⩶', r'⩷', r'⩸', r'⩹', r'⩺',
- r'⩻', r'⩼', r'⩽', r'⩾', r'⩿', r'⪀', r'⪁', r'⪂', r'⪃', r'⪄', r'⪅', r'⪆', r'⪇', r'⪈',
- r'⪉', r'⪊', r'⪋', r'⪌', r'⪍', r'⪎', r'⪏', r'⪐', r'⪑', r'⪒', r'⪓', r'⪔', r'⪕', r'⪖',
- r'⪗', r'⪘', r'⪙', r'⪚', r'⪛', r'⪜', r'⪝', r'⪞', r'⪟', r'⪠', r'⪡', r'⪢', r'⪣', r'⪤',
- r'⪥', r'⪦', r'⪧', r'⪨', r'⪩', r'⪪', r'⪫', r'⪬', r'⪭', r'⪮', r'⪯', r'⪰', r'⪱', r'⪲',
- r'⪳', r'⪴', r'⪵', r'⪶', r'⪷', r'⪸', r'⪹', r'⪺', r'⪻', r'⪼', r'⪽', r'⪾', r'⪿', r'⫀',
- r'⫁', r'⫂', r'⫃', r'⫄', r'⫅', r'⫆', r'⫇', r'⫈', r'⫉', r'⫊', r'⫋', r'⫌', r'⫍', r'⫎',
- r'⫏', r'⫐', r'⫑', r'⫒', r'⫓', r'⫔', r'⫕', r'⫖', r'⫗', r'⫘', r'⫙', r'⫷', r'⫸', r'⫹',
- r'⫺', r'⊢', r'⊣', r'⟂', r'<:', r'>:',
- # prec-pipe
- '<|', '|>',
- # prec-colon
- r'…', r'⁝', r'⋮', r'⋱', r'⋰', r'⋯',
- # prec-plus
- r'+', r'-', r'¦', r'|', r'⊕', r'⊖', r'⊞', r'⊟', r'++', r'∪', r'∨', r'⊔', r'±', r'∓',
- r'∔', r'∸', r'≏', r'⊎', r'⊻', r'⊽', r'⋎', r'⋓', r'⧺', r'⧻', r'⨈', r'⨢', r'⨣', r'⨤',
- r'⨥', r'⨦', r'⨧', r'⨨', r'⨩', r'⨪', r'⨫', r'⨬', r'⨭', r'⨮', r'⨹', r'⨺', r'⩁', r'⩂',
- r'⩅', r'⩊', r'⩌', r'⩏', r'⩐', r'⩒', r'⩔', r'⩖', r'⩗', r'⩛', r'⩝', r'⩡', r'⩢', r'⩣',
- # prec-times
- r'*', r'/', r'⌿', r'÷', r'%', r'&', r'⋅', r'∘', r'×', '\\', r'∩', r'∧', r'⊗', r'⊘',
- r'⊙', r'⊚', r'⊛', r'⊠', r'⊡', r'⊓', r'∗', r'∙', r'∤', r'⅋', r'≀', r'⊼', r'⋄', r'⋆',
- r'⋇', r'⋉', r'⋊', r'⋋', r'⋌', r'⋏', r'⋒', r'⟑', r'⦸', r'⦼', r'⦾', r'⦿', r'⧶', r'⧷',
- r'⨇', r'⨰', r'⨱', r'⨲', r'⨳', r'⨴', r'⨵', r'⨶', r'⨷', r'⨸', r'⨻', r'⨼', r'⨽', r'⩀',
- r'⩃', r'⩄', r'⩋', r'⩍', r'⩎', r'⩑', r'⩓', r'⩕', r'⩘', r'⩚', r'⩜', r'⩞', r'⩟', r'⩠',
- r'⫛', r'⊍', r'▷', r'⨝', r'⟕', r'⟖', r'⟗', r'⨟',
- # prec-rational, prec-bitshift
- '//', '>>', '<<', '>>>',
- # prec-power
- r'^', r'↑', r'↓', r'⇵', r'⟰', r'⟱', r'⤈', r'⤉', r'⤊', r'⤋', r'⤒', r'⤓', r'⥉', r'⥌',
- r'⥍', r'⥏', r'⥑', r'⥔', r'⥕', r'⥘', r'⥙', r'⥜', r'⥝', r'⥠', r'⥡', r'⥣', r'⥥', r'⥮',
- r'⥯', r'↑', r'↓',
- # unary-ops, excluding unary-and-binary-ops
- '!', r'¬', r'√', r'∛', r'∜'
-]
-
-# Generated with the following in Julia v1.6.0-rc1
-'''
-#!/usr/bin/env julia
-
-import REPL.REPLCompletions
-res = String["in", "isa", "where"]
-for kw in collect(x.keyword for x in REPLCompletions.complete_keyword(""))
- if !(contains(kw, " ") || kw == "struct")
- push!(res, kw)
- end
-end
-sort!(unique!(setdiff!(res, ["true", "false"])))
-foreach(x -> println("\'", x, "\',"), res)
-'''
-KEYWORD_LIST = (
- 'baremodule',
- 'begin',
- 'break',
- 'catch',
- 'ccall',
- 'const',
- 'continue',
- 'do',
- 'else',
- 'elseif',
- 'end',
- 'export',
- 'finally',
- 'for',
- 'function',
- 'global',
- 'if',
- 'import',
- 'in',
- 'isa',
- 'let',
- 'local',
- 'macro',
- 'module',
- 'quote',
- 'return',
- 'try',
- 'using',
- 'where',
- 'while',
-)
-
-# Generated with the following in Julia v1.6.0-rc1
-'''
-#!/usr/bin/env julia
-
-import REPL.REPLCompletions
-res = String[]
-for compl in filter!(x -> isa(x, REPLCompletions.ModuleCompletion) && (x.parent === Base || x.parent === Core),
- REPLCompletions.completions("", 0)[1])
- try
- v = eval(Symbol(compl.mod))
- if (v isa Type || v isa TypeVar) && (compl.mod != "=>")
- push!(res, compl.mod)
- end
- catch e
- end
-end
-sort!(unique!(res))
-foreach(x -> println("\'", x, "\',"), res)
-'''
-BUILTIN_LIST = (
- 'AbstractArray',
- 'AbstractChannel',
- 'AbstractChar',
- 'AbstractDict',
- 'AbstractDisplay',
- 'AbstractFloat',
- 'AbstractIrrational',
- 'AbstractMatch',
- 'AbstractMatrix',
- 'AbstractPattern',
- 'AbstractRange',
- 'AbstractSet',
- 'AbstractString',
- 'AbstractUnitRange',
- 'AbstractVecOrMat',
- 'AbstractVector',
- 'Any',
- 'ArgumentError',
- 'Array',
- 'AssertionError',
- 'BigFloat',
- 'BigInt',
- 'BitArray',
- 'BitMatrix',
- 'BitSet',
- 'BitVector',
- 'Bool',
- 'BoundsError',
- 'CapturedException',
- 'CartesianIndex',
- 'CartesianIndices',
- 'Cchar',
- 'Cdouble',
- 'Cfloat',
- 'Channel',
- 'Char',
- 'Cint',
- 'Cintmax_t',
- 'Clong',
- 'Clonglong',
- 'Cmd',
- 'Colon',
- 'Complex',
- 'ComplexF16',
- 'ComplexF32',
- 'ComplexF64',
- 'ComposedFunction',
- 'CompositeException',
- 'Condition',
- 'Cptrdiff_t',
- 'Cshort',
- 'Csize_t',
- 'Cssize_t',
- 'Cstring',
- 'Cuchar',
- 'Cuint',
- 'Cuintmax_t',
- 'Culong',
- 'Culonglong',
- 'Cushort',
- 'Cvoid',
- 'Cwchar_t',
- 'Cwstring',
- 'DataType',
- 'DenseArray',
- 'DenseMatrix',
- 'DenseVecOrMat',
- 'DenseVector',
- 'Dict',
- 'DimensionMismatch',
- 'Dims',
- 'DivideError',
- 'DomainError',
- 'EOFError',
- 'Enum',
- 'ErrorException',
- 'Exception',
- 'ExponentialBackOff',
- 'Expr',
- 'Float16',
- 'Float32',
- 'Float64',
- 'Function',
- 'GlobalRef',
- 'HTML',
- 'IO',
- 'IOBuffer',
- 'IOContext',
- 'IOStream',
- 'IdDict',
- 'IndexCartesian',
- 'IndexLinear',
- 'IndexStyle',
- 'InexactError',
- 'InitError',
- 'Int',
- 'Int128',
- 'Int16',
- 'Int32',
- 'Int64',
- 'Int8',
- 'Integer',
- 'InterruptException',
- 'InvalidStateException',
- 'Irrational',
- 'KeyError',
- 'LinRange',
- 'LineNumberNode',
- 'LinearIndices',
- 'LoadError',
- 'MIME',
- 'Matrix',
- 'Method',
- 'MethodError',
- 'Missing',
- 'MissingException',
- 'Module',
- 'NTuple',
- 'NamedTuple',
- 'Nothing',
- 'Number',
- 'OrdinalRange',
- 'OutOfMemoryError',
- 'OverflowError',
- 'Pair',
- 'PartialQuickSort',
- 'PermutedDimsArray',
- 'Pipe',
- 'ProcessFailedException',
- 'Ptr',
- 'QuoteNode',
- 'Rational',
- 'RawFD',
- 'ReadOnlyMemoryError',
- 'Real',
- 'ReentrantLock',
- 'Ref',
- 'Regex',
- 'RegexMatch',
- 'RoundingMode',
- 'SegmentationFault',
- 'Set',
- 'Signed',
- 'Some',
- 'StackOverflowError',
- 'StepRange',
- 'StepRangeLen',
- 'StridedArray',
- 'StridedMatrix',
- 'StridedVecOrMat',
- 'StridedVector',
- 'String',
- 'StringIndexError',
- 'SubArray',
- 'SubString',
- 'SubstitutionString',
- 'Symbol',
- 'SystemError',
- 'Task',
- 'TaskFailedException',
- 'Text',
- 'TextDisplay',
- 'Timer',
- 'Tuple',
- 'Type',
- 'TypeError',
- 'TypeVar',
- 'UInt',
- 'UInt128',
- 'UInt16',
- 'UInt32',
- 'UInt64',
- 'UInt8',
- 'UndefInitializer',
- 'UndefKeywordError',
- 'UndefRefError',
- 'UndefVarError',
- 'Union',
- 'UnionAll',
- 'UnitRange',
- 'Unsigned',
- 'Val',
- 'Vararg',
- 'VecElement',
- 'VecOrMat',
- 'Vector',
- 'VersionNumber',
- 'WeakKeyDict',
- 'WeakRef',
-)
-
-# Generated with the following in Julia v1.6.0-rc1
-'''
-#!/usr/bin/env julia
-
-import REPL.REPLCompletions
-res = String["true", "false"]
-for compl in filter!(x -> isa(x, REPLCompletions.ModuleCompletion) && (x.parent === Base || x.parent === Core),
- REPLCompletions.completions("", 0)[1])
- try
- v = eval(Symbol(compl.mod))
- if !(v isa Function || v isa Type || v isa TypeVar || v isa Module || v isa Colon)
- push!(res, compl.mod)
- end
- catch e
- end
-end
-sort!(unique!(res))
-foreach(x -> println("\'", x, "\',"), res)
-'''
-LITERAL_LIST = (
- 'ARGS',
- 'C_NULL',
- 'DEPOT_PATH',
- 'ENDIAN_BOM',
- 'ENV',
- 'Inf',
- 'Inf16',
- 'Inf32',
- 'Inf64',
- 'InsertionSort',
- 'LOAD_PATH',
- 'MergeSort',
- 'NaN',
- 'NaN16',
- 'NaN32',
- 'NaN64',
- 'PROGRAM_FILE',
- 'QuickSort',
- 'RoundDown',
- 'RoundFromZero',
- 'RoundNearest',
- 'RoundNearestTiesAway',
- 'RoundNearestTiesUp',
- 'RoundToZero',
- 'RoundUp',
- 'VERSION',
- 'devnull',
- 'false',
- 'im',
- 'missing',
- 'nothing',
- 'pi',
- 'stderr',
- 'stdin',
- 'stdout',
- 'true',
- 'undef',
- 'π',
- 'ℯ',
-)
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/_lasso_builtins.py b/venv/lib/python3.11/site-packages/pygments/lexers/_lasso_builtins.py
deleted file mode 100644
index 1fbe681..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/_lasso_builtins.py
+++ /dev/null
@@ -1,5326 +0,0 @@
-"""
- pygments.lexers._lasso_builtins
- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
- Built-in Lasso types, traits, methods, and members.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-BUILTINS = {
- 'Types': (
- 'array',
- 'atbegin',
- 'boolean',
- 'bson_iter',
- 'bson',
- 'bytes_document_body',
- 'bytes',
- 'cache_server_element',
- 'cache_server',
- 'capture',
- 'client_address',
- 'client_ip',
- 'component_container',
- 'component_render_state',
- 'component',
- 'curl',
- 'curltoken',
- 'currency',
- 'custom',
- 'data_document',
- 'database_registry',
- 'date',
- 'dateandtime',
- 'dbgp_packet',
- 'dbgp_server',
- 'debugging_stack',
- 'decimal',
- 'delve',
- 'dir',
- 'dirdesc',
- 'dns_response',
- 'document_base',
- 'document_body',
- 'document_header',
- 'dsinfo',
- 'duration',
- 'eacher',
- 'email_compose',
- 'email_parse',
- 'email_pop',
- 'email_queue_impl_base',
- 'email_queue_impl',
- 'email_smtp',
- 'email_stage_impl_base',
- 'email_stage_impl',
- 'fastcgi_each_fcgi_param',
- 'fastcgi_server',
- 'fcgi_record',
- 'fcgi_request',
- 'file',
- 'filedesc',
- 'filemaker_datasource',
- 'generateforeachkeyed',
- 'generateforeachunkeyed',
- 'generateseries',
- 'hash_map',
- 'html_atomic_element',
- 'html_attr',
- 'html_base',
- 'html_binary',
- 'html_br',
- 'html_cdata',
- 'html_container_element',
- 'html_div',
- 'html_document_body',
- 'html_document_head',
- 'html_eol',
- 'html_fieldset',
- 'html_form',
- 'html_h1',
- 'html_h2',
- 'html_h3',
- 'html_h4',
- 'html_h5',
- 'html_h6',
- 'html_hr',
- 'html_img',
- 'html_input',
- 'html_json',
- 'html_label',
- 'html_legend',
- 'html_link',
- 'html_meta',
- 'html_object',
- 'html_option',
- 'html_raw',
- 'html_script',
- 'html_select',
- 'html_span',
- 'html_style',
- 'html_table',
- 'html_td',
- 'html_text',
- 'html_th',
- 'html_tr',
- 'http_document_header',
- 'http_document',
- 'http_error',
- 'http_header_field',
- 'http_server_connection_handler_globals',
- 'http_server_connection_handler',
- 'http_server_request_logger_thread',
- 'http_server_web_connection',
- 'http_server',
- 'image',
- 'include_cache',
- 'inline_type',
- 'integer',
- 'java_jnienv',
- 'jbyte',
- 'jbytearray',
- 'jchar',
- 'jchararray',
- 'jfieldid',
- 'jfloat',
- 'jint',
- 'jmethodid',
- 'jobject',
- 'jshort',
- 'json_decode',
- 'json_encode',
- 'json_literal',
- 'json_object',
- 'keyword',
- 'lassoapp_compiledsrc_appsource',
- 'lassoapp_compiledsrc_fileresource',
- 'lassoapp_content_rep_halt',
- 'lassoapp_dirsrc_appsource',
- 'lassoapp_dirsrc_fileresource',
- 'lassoapp_installer',
- 'lassoapp_livesrc_appsource',
- 'lassoapp_livesrc_fileresource',
- 'lassoapp_long_expiring_bytes',
- 'lassoapp_manualsrc_appsource',
- 'lassoapp_zip_file_server',
- 'lassoapp_zipsrc_appsource',
- 'lassoapp_zipsrc_fileresource',
- 'ldap',
- 'library_thread_loader',
- 'list_node',
- 'list',
- 'locale',
- 'log_impl_base',
- 'log_impl',
- 'magick_image',
- 'map_node',
- 'map',
- 'memberstream',
- 'memory_session_driver_impl_entry',
- 'memory_session_driver_impl',
- 'memory_session_driver',
- 'mime_reader',
- 'mongo_client',
- 'mongo_collection',
- 'mongo_cursor',
- 'mustache_ctx',
- 'mysql_session_driver_impl',
- 'mysql_session_driver',
- 'net_named_pipe',
- 'net_tcp_ssl',
- 'net_tcp',
- 'net_udp_packet',
- 'net_udp',
- 'null',
- 'odbc_session_driver_impl',
- 'odbc_session_driver',
- 'opaque',
- 'os_process',
- 'pair_compare',
- 'pair',
- 'pairup',
- 'pdf_barcode',
- 'pdf_chunk',
- 'pdf_color',
- 'pdf_doc',
- 'pdf_font',
- 'pdf_hyphenator',
- 'pdf_image',
- 'pdf_list',
- 'pdf_paragraph',
- 'pdf_phrase',
- 'pdf_read',
- 'pdf_table',
- 'pdf_text',
- 'pdf_typebase',
- 'percent',
- 'portal_impl',
- 'queriable_groupby',
- 'queriable_grouping',
- 'queriable_groupjoin',
- 'queriable_join',
- 'queriable_orderby',
- 'queriable_orderbydescending',
- 'queriable_select',
- 'queriable_selectmany',
- 'queriable_skip',
- 'queriable_take',
- 'queriable_thenby',
- 'queriable_thenbydescending',
- 'queriable_where',
- 'queue',
- 'raw_document_body',
- 'regexp',
- 'repeat',
- 'scientific',
- 'security_registry',
- 'serialization_element',
- 'serialization_object_identity_compare',
- 'serialization_reader',
- 'serialization_writer_ref',
- 'serialization_writer_standin',
- 'serialization_writer',
- 'session_delete_expired_thread',
- 'set',
- 'signature',
- 'sourcefile',
- 'sqlite_column',
- 'sqlite_currentrow',
- 'sqlite_db',
- 'sqlite_results',
- 'sqlite_session_driver_impl_entry',
- 'sqlite_session_driver_impl',
- 'sqlite_session_driver',
- 'sqlite_table',
- 'sqlite3_stmt',
- 'sqlite3',
- 'staticarray',
- 'string',
- 'sys_process',
- 'tag',
- 'text_document',
- 'tie',
- 'timeonly',
- 'trait',
- 'tree_base',
- 'tree_node',
- 'tree_nullnode',
- 'ucal',
- 'usgcpu',
- 'usgvm',
- 'void',
- 'web_error_atend',
- 'web_node_base',
- 'web_node_content_representation_css_specialized',
- 'web_node_content_representation_html_specialized',
- 'web_node_content_representation_js_specialized',
- 'web_node_content_representation_xhr_container',
- 'web_node_echo',
- 'web_node_root',
- 'web_request_impl',
- 'web_request',
- 'web_response_impl',
- 'web_response',
- 'web_router',
- 'websocket_handler',
- 'worker_pool',
- 'xml_attr',
- 'xml_cdatasection',
- 'xml_characterdata',
- 'xml_comment',
- 'xml_document',
- 'xml_documentfragment',
- 'xml_documenttype',
- 'xml_domimplementation',
- 'xml_element',
- 'xml_entity',
- 'xml_entityreference',
- 'xml_namednodemap_attr',
- 'xml_namednodemap_ht',
- 'xml_namednodemap',
- 'xml_node',
- 'xml_nodelist',
- 'xml_notation',
- 'xml_processinginstruction',
- 'xml_text',
- 'xmlstream',
- 'zip_file_impl',
- 'zip_file',
- 'zip_impl',
- 'zip',
- ),
- 'Traits': (
- 'any',
- 'formattingbase',
- 'html_attributed',
- 'html_element_coreattrs',
- 'html_element_eventsattrs',
- 'html_element_i18nattrs',
- 'lassoapp_capabilities',
- 'lassoapp_resource',
- 'lassoapp_source',
- 'queriable_asstring',
- 'session_driver',
- 'trait_array',
- 'trait_asstring',
- 'trait_backcontractible',
- 'trait_backended',
- 'trait_backexpandable',
- 'trait_close',
- 'trait_contractible',
- 'trait_decompose_assignment',
- 'trait_doubleended',
- 'trait_each_sub',
- 'trait_encodeurl',
- 'trait_endedfullymutable',
- 'trait_expandable',
- 'trait_file',
- 'trait_finite',
- 'trait_finiteforeach',
- 'trait_foreach',
- 'trait_foreachtextelement',
- 'trait_frontcontractible',
- 'trait_frontended',
- 'trait_frontexpandable',
- 'trait_fullymutable',
- 'trait_generator',
- 'trait_generatorcentric',
- 'trait_hashable',
- 'trait_json_serialize',
- 'trait_keyed',
- 'trait_keyedfinite',
- 'trait_keyedforeach',
- 'trait_keyedmutable',
- 'trait_list',
- 'trait_map',
- 'trait_net',
- 'trait_pathcomponents',
- 'trait_positionallykeyed',
- 'trait_positionallysearchable',
- 'trait_queriable',
- 'trait_queriablelambda',
- 'trait_readbytes',
- 'trait_readstring',
- 'trait_scalar',
- 'trait_searchable',
- 'trait_serializable',
- 'trait_setencoding',
- 'trait_setoperations',
- 'trait_stack',
- 'trait_treenode',
- 'trait_writebytes',
- 'trait_writestring',
- 'trait_xml_elementcompat',
- 'trait_xml_nodecompat',
- 'web_connection',
- 'web_node_container',
- 'web_node_content_css_specialized',
- 'web_node_content_document',
- 'web_node_content_html_specialized',
- 'web_node_content_js_specialized',
- 'web_node_content_json_specialized',
- 'web_node_content_representation',
- 'web_node_content',
- 'web_node_postable',
- 'web_node',
- ),
- 'Unbound Methods': (
- 'abort_clear',
- 'abort_now',
- 'abort',
- 'action_param',
- 'action_params',
- 'action_statement',
- 'admin_authorization',
- 'admin_currentgroups',
- 'admin_currentuserid',
- 'admin_currentusername',
- 'admin_getpref',
- 'admin_initialize',
- 'admin_lassoservicepath',
- 'admin_removepref',
- 'admin_setpref',
- 'admin_userexists',
- 'all',
- 'auth_admin',
- 'auth_check',
- 'auth_custom',
- 'auth_group',
- 'auth_prompt',
- 'auth_user',
- 'bom_utf16be',
- 'bom_utf16le',
- 'bom_utf32be',
- 'bom_utf32le',
- 'bom_utf8',
- 'bw',
- 'capture_nearestloopabort',
- 'capture_nearestloopcontinue',
- 'capture_nearestloopcount',
- 'checked',
- 'cipher_decrypt_private',
- 'cipher_decrypt_public',
- 'cipher_decrypt',
- 'cipher_digest',
- 'cipher_encrypt_private',
- 'cipher_encrypt_public',
- 'cipher_encrypt',
- 'cipher_generate_key',
- 'cipher_hmac',
- 'cipher_keylength',
- 'cipher_list',
- 'cipher_open',
- 'cipher_seal',
- 'cipher_sign',
- 'cipher_verify',
- 'client_addr',
- 'client_authorization',
- 'client_browser',
- 'client_contentlength',
- 'client_contenttype',
- 'client_cookielist',
- 'client_cookies',
- 'client_encoding',
- 'client_formmethod',
- 'client_getargs',
- 'client_getparam',
- 'client_getparams',
- 'client_headers',
- 'client_integertoip',
- 'client_iptointeger',
- 'client_password',
- 'client_postargs',
- 'client_postparam',
- 'client_postparams',
- 'client_type',
- 'client_url',
- 'client_username',
- 'cn',
- 'column_name',
- 'column_names',
- 'column_type',
- 'column',
- 'compress',
- 'content_addheader',
- 'content_body',
- 'content_encoding',
- 'content_header',
- 'content_replaceheader',
- 'content_type',
- 'cookie_set',
- 'cookie',
- 'curl_easy_cleanup',
- 'curl_easy_duphandle',
- 'curl_easy_getinfo',
- 'curl_easy_init',
- 'curl_easy_reset',
- 'curl_easy_setopt',
- 'curl_easy_strerror',
- 'curl_getdate',
- 'curl_http_version_1_0',
- 'curl_http_version_1_1',
- 'curl_http_version_none',
- 'curl_ipresolve_v4',
- 'curl_ipresolve_v6',
- 'curl_ipresolve_whatever',
- 'curl_multi_perform',
- 'curl_multi_result',
- 'curl_netrc_ignored',
- 'curl_netrc_optional',
- 'curl_netrc_required',
- 'curl_sslversion_default',
- 'curl_sslversion_sslv2',
- 'curl_sslversion_sslv3',
- 'curl_sslversion_tlsv1',
- 'curl_version_asynchdns',
- 'curl_version_debug',
- 'curl_version_gssnegotiate',
- 'curl_version_idn',
- 'curl_version_info',
- 'curl_version_ipv6',
- 'curl_version_kerberos4',
- 'curl_version_largefile',
- 'curl_version_libz',
- 'curl_version_ntlm',
- 'curl_version_spnego',
- 'curl_version_ssl',
- 'curl_version',
- 'curlauth_any',
- 'curlauth_anysafe',
- 'curlauth_basic',
- 'curlauth_digest',
- 'curlauth_gssnegotiate',
- 'curlauth_none',
- 'curlauth_ntlm',
- 'curle_aborted_by_callback',
- 'curle_bad_calling_order',
- 'curle_bad_content_encoding',
- 'curle_bad_download_resume',
- 'curle_bad_function_argument',
- 'curle_bad_password_entered',
- 'curle_couldnt_connect',
- 'curle_couldnt_resolve_host',
- 'curle_couldnt_resolve_proxy',
- 'curle_failed_init',
- 'curle_file_couldnt_read_file',
- 'curle_filesize_exceeded',
- 'curle_ftp_access_denied',
- 'curle_ftp_cant_get_host',
- 'curle_ftp_cant_reconnect',
- 'curle_ftp_couldnt_get_size',
- 'curle_ftp_couldnt_retr_file',
- 'curle_ftp_couldnt_set_ascii',
- 'curle_ftp_couldnt_set_binary',
- 'curle_ftp_couldnt_use_rest',
- 'curle_ftp_port_failed',
- 'curle_ftp_quote_error',
- 'curle_ftp_ssl_failed',
- 'curle_ftp_user_password_incorrect',
- 'curle_ftp_weird_227_format',
- 'curle_ftp_weird_pass_reply',
- 'curle_ftp_weird_pasv_reply',
- 'curle_ftp_weird_server_reply',
- 'curle_ftp_weird_user_reply',
- 'curle_ftp_write_error',
- 'curle_function_not_found',
- 'curle_got_nothing',
- 'curle_http_post_error',
- 'curle_http_range_error',
- 'curle_http_returned_error',
- 'curle_interface_failed',
- 'curle_ldap_cannot_bind',
- 'curle_ldap_invalid_url',
- 'curle_ldap_search_failed',
- 'curle_library_not_found',
- 'curle_login_denied',
- 'curle_malformat_user',
- 'curle_obsolete',
- 'curle_ok',
- 'curle_operation_timeouted',
- 'curle_out_of_memory',
- 'curle_partial_file',
- 'curle_read_error',
- 'curle_recv_error',
- 'curle_send_error',
- 'curle_send_fail_rewind',
- 'curle_share_in_use',
- 'curle_ssl_cacert',
- 'curle_ssl_certproblem',
- 'curle_ssl_cipher',
- 'curle_ssl_connect_error',
- 'curle_ssl_engine_initfailed',
- 'curle_ssl_engine_notfound',
- 'curle_ssl_engine_setfailed',
- 'curle_ssl_peer_certificate',
- 'curle_telnet_option_syntax',
- 'curle_too_many_redirects',
- 'curle_unknown_telnet_option',
- 'curle_unsupported_protocol',
- 'curle_url_malformat_user',
- 'curle_url_malformat',
- 'curle_write_error',
- 'curlftpauth_default',
- 'curlftpauth_ssl',
- 'curlftpauth_tls',
- 'curlftpssl_all',
- 'curlftpssl_control',
- 'curlftpssl_last',
- 'curlftpssl_none',
- 'curlftpssl_try',
- 'curlinfo_connect_time',
- 'curlinfo_content_length_download',
- 'curlinfo_content_length_upload',
- 'curlinfo_content_type',
- 'curlinfo_effective_url',
- 'curlinfo_filetime',
- 'curlinfo_header_size',
- 'curlinfo_http_connectcode',
- 'curlinfo_httpauth_avail',
- 'curlinfo_namelookup_time',
- 'curlinfo_num_connects',
- 'curlinfo_os_errno',
- 'curlinfo_pretransfer_time',
- 'curlinfo_proxyauth_avail',
- 'curlinfo_redirect_count',
- 'curlinfo_redirect_time',
- 'curlinfo_request_size',
- 'curlinfo_response_code',
- 'curlinfo_size_download',
- 'curlinfo_size_upload',
- 'curlinfo_speed_download',
- 'curlinfo_speed_upload',
- 'curlinfo_ssl_engines',
- 'curlinfo_ssl_verifyresult',
- 'curlinfo_starttransfer_time',
- 'curlinfo_total_time',
- 'curlmsg_done',
- 'curlopt_autoreferer',
- 'curlopt_buffersize',
- 'curlopt_cainfo',
- 'curlopt_capath',
- 'curlopt_connecttimeout',
- 'curlopt_cookie',
- 'curlopt_cookiefile',
- 'curlopt_cookiejar',
- 'curlopt_cookiesession',
- 'curlopt_crlf',
- 'curlopt_customrequest',
- 'curlopt_dns_use_global_cache',
- 'curlopt_egdsocket',
- 'curlopt_encoding',
- 'curlopt_failonerror',
- 'curlopt_filetime',
- 'curlopt_followlocation',
- 'curlopt_forbid_reuse',
- 'curlopt_fresh_connect',
- 'curlopt_ftp_account',
- 'curlopt_ftp_create_missing_dirs',
- 'curlopt_ftp_response_timeout',
- 'curlopt_ftp_ssl',
- 'curlopt_ftp_use_eprt',
- 'curlopt_ftp_use_epsv',
- 'curlopt_ftpappend',
- 'curlopt_ftplistonly',
- 'curlopt_ftpport',
- 'curlopt_ftpsslauth',
- 'curlopt_header',
- 'curlopt_http_version',
- 'curlopt_http200aliases',
- 'curlopt_httpauth',
- 'curlopt_httpget',
- 'curlopt_httpheader',
- 'curlopt_httppost',
- 'curlopt_httpproxytunnel',
- 'curlopt_infilesize_large',
- 'curlopt_infilesize',
- 'curlopt_interface',
- 'curlopt_ipresolve',
- 'curlopt_krb4level',
- 'curlopt_low_speed_limit',
- 'curlopt_low_speed_time',
- 'curlopt_mail_from',
- 'curlopt_mail_rcpt',
- 'curlopt_maxconnects',
- 'curlopt_maxfilesize_large',
- 'curlopt_maxfilesize',
- 'curlopt_maxredirs',
- 'curlopt_netrc_file',
- 'curlopt_netrc',
- 'curlopt_nobody',
- 'curlopt_noprogress',
- 'curlopt_port',
- 'curlopt_post',
- 'curlopt_postfields',
- 'curlopt_postfieldsize_large',
- 'curlopt_postfieldsize',
- 'curlopt_postquote',
- 'curlopt_prequote',
- 'curlopt_proxy',
- 'curlopt_proxyauth',
- 'curlopt_proxyport',
- 'curlopt_proxytype',
- 'curlopt_proxyuserpwd',
- 'curlopt_put',
- 'curlopt_quote',
- 'curlopt_random_file',
- 'curlopt_range',
- 'curlopt_readdata',
- 'curlopt_referer',
- 'curlopt_resume_from_large',
- 'curlopt_resume_from',
- 'curlopt_ssl_cipher_list',
- 'curlopt_ssl_verifyhost',
- 'curlopt_ssl_verifypeer',
- 'curlopt_sslcert',
- 'curlopt_sslcerttype',
- 'curlopt_sslengine_default',
- 'curlopt_sslengine',
- 'curlopt_sslkey',
- 'curlopt_sslkeypasswd',
- 'curlopt_sslkeytype',
- 'curlopt_sslversion',
- 'curlopt_tcp_nodelay',
- 'curlopt_timecondition',
- 'curlopt_timeout',
- 'curlopt_timevalue',
- 'curlopt_transfertext',
- 'curlopt_unrestricted_auth',
- 'curlopt_upload',
- 'curlopt_url',
- 'curlopt_use_ssl',
- 'curlopt_useragent',
- 'curlopt_userpwd',
- 'curlopt_verbose',
- 'curlopt_writedata',
- 'curlproxy_http',
- 'curlproxy_socks4',
- 'curlproxy_socks5',
- 'database_adddefaultsqlitehost',
- 'database_database',
- 'database_initialize',
- 'database_name',
- 'database_qs',
- 'database_table_database_tables',
- 'database_table_datasource_databases',
- 'database_table_datasource_hosts',
- 'database_table_datasources',
- 'database_table_table_fields',
- 'database_util_cleanpath',
- 'dbgp_stop_stack_name',
- 'debugging_break',
- 'debugging_breakpoint_get',
- 'debugging_breakpoint_list',
- 'debugging_breakpoint_remove',
- 'debugging_breakpoint_set',
- 'debugging_breakpoint_update',
- 'debugging_context_locals',
- 'debugging_context_self',
- 'debugging_context_vars',
- 'debugging_detach',
- 'debugging_enabled',
- 'debugging_get_context',
- 'debugging_get_stack',
- 'debugging_run',
- 'debugging_step_in',
- 'debugging_step_out',
- 'debugging_step_over',
- 'debugging_stop',
- 'debugging_terminate',
- 'decimal_random',
- 'decompress',
- 'decrypt_blowfish',
- 'define_atbegin',
- 'define_atend',
- 'dns_default',
- 'dns_lookup',
- 'document',
- 'email_attachment_mime_type',
- 'email_batch',
- 'email_digestchallenge',
- 'email_digestresponse',
- 'email_extract',
- 'email_findemails',
- 'email_fix_address_list',
- 'email_fix_address',
- 'email_fs_error_clean',
- 'email_immediate',
- 'email_initialize',
- 'email_merge',
- 'email_mxlookup',
- 'email_pop_priv_extract',
- 'email_pop_priv_quote',
- 'email_pop_priv_substring',
- 'email_queue',
- 'email_result',
- 'email_safeemail',
- 'email_send',
- 'email_status',
- 'email_token',
- 'email_translatebreakstocrlf',
- 'encode_qheader',
- 'encoding_iso88591',
- 'encoding_utf8',
- 'encrypt_blowfish',
- 'encrypt_crammd5',
- 'encrypt_hmac',
- 'encrypt_md5',
- 'eol',
- 'eq',
- 'error_code_aborted',
- 'error_code_dividebyzero',
- 'error_code_filenotfound',
- 'error_code_invalidparameter',
- 'error_code_methodnotfound',
- 'error_code_networkerror',
- 'error_code_noerror',
- 'error_code_resnotfound',
- 'error_code_runtimeassertion',
- 'error_code',
- 'error_msg_aborted',
- 'error_msg_dividebyzero',
- 'error_msg_filenotfound',
- 'error_msg_invalidparameter',
- 'error_msg_methodnotfound',
- 'error_msg_networkerror',
- 'error_msg_noerror',
- 'error_msg_resnotfound',
- 'error_msg_runtimeassertion',
- 'error_msg',
- 'error_obj',
- 'error_pop',
- 'error_push',
- 'error_reset',
- 'error_stack',
- 'escape_tag',
- 'evdns_resolve_ipv4',
- 'evdns_resolve_ipv6',
- 'evdns_resolve_reverse_ipv6',
- 'evdns_resolve_reverse',
- 'ew',
- 'fail_if',
- 'fail_ifnot',
- 'fail_now',
- 'fail',
- 'failure_clear',
- 'fastcgi_createfcgirequest',
- 'fastcgi_handlecon',
- 'fastcgi_handlereq',
- 'fastcgi_initialize',
- 'fastcgi_initiate_request',
- 'fcgi_abort_request',
- 'fcgi_authorize',
- 'fcgi_begin_request',
- 'fcgi_bodychunksize',
- 'fcgi_cant_mpx_conn',
- 'fcgi_data',
- 'fcgi_end_request',
- 'fcgi_filter',
- 'fcgi_get_values_result',
- 'fcgi_get_values',
- 'fcgi_keep_conn',
- 'fcgi_makeendrequestbody',
- 'fcgi_makestdoutbody',
- 'fcgi_max_conns',
- 'fcgi_max_reqs',
- 'fcgi_mpxs_conns',
- 'fcgi_null_request_id',
- 'fcgi_overloaded',
- 'fcgi_params',
- 'fcgi_read_timeout_seconds',
- 'fcgi_readparam',
- 'fcgi_request_complete',
- 'fcgi_responder',
- 'fcgi_stderr',
- 'fcgi_stdin',
- 'fcgi_stdout',
- 'fcgi_unknown_role',
- 'fcgi_unknown_type',
- 'fcgi_version_1',
- 'fcgi_x_stdin',
- 'field_name',
- 'field_names',
- 'field',
- 'file_copybuffersize',
- 'file_defaultencoding',
- 'file_forceroot',
- 'file_modechar',
- 'file_modeline',
- 'file_stderr',
- 'file_stdin',
- 'file_stdout',
- 'file_tempfile',
- 'filemakerds_initialize',
- 'filemakerds',
- 'found_count',
- 'ft',
- 'ftp_deletefile',
- 'ftp_getdata',
- 'ftp_getfile',
- 'ftp_getlisting',
- 'ftp_putdata',
- 'ftp_putfile',
- 'full',
- 'generateforeach',
- 'gt',
- 'gte',
- 'handle_failure',
- 'handle',
- 'hash_primes',
- 'html_comment',
- 'http_char_colon',
- 'http_char_cr',
- 'http_char_htab',
- 'http_char_lf',
- 'http_char_question',
- 'http_char_space',
- 'http_default_files',
- 'http_read_headers',
- 'http_read_timeout_secs',
- 'http_server_apps_path',
- 'http_server_request_logger',
- 'if_empty',
- 'if_false',
- 'if_null',
- 'if_true',
- 'include_cache_compare',
- 'include_currentpath',
- 'include_filepath',
- 'include_localpath',
- 'include_once',
- 'include_path',
- 'include_raw',
- 'include_url',
- 'include',
- 'includes',
- 'inline_colinfo_name_pos',
- 'inline_colinfo_type_pos',
- 'inline_colinfo_valuelist_pos',
- 'inline_columninfo_pos',
- 'inline_foundcount_pos',
- 'inline_namedget',
- 'inline_namedput',
- 'inline_resultrows_pos',
- 'inline_scopeget',
- 'inline_scopepop',
- 'inline_scopepush',
- 'inline',
- 'integer_bitor',
- 'integer_random',
- 'io_dir_dt_blk',
- 'io_dir_dt_chr',
- 'io_dir_dt_dir',
- 'io_dir_dt_fifo',
- 'io_dir_dt_lnk',
- 'io_dir_dt_reg',
- 'io_dir_dt_sock',
- 'io_dir_dt_unknown',
- 'io_dir_dt_wht',
- 'io_file_access',
- 'io_file_chdir',
- 'io_file_chmod',
- 'io_file_chown',
- 'io_file_dirname',
- 'io_file_f_dupfd',
- 'io_file_f_getfd',
- 'io_file_f_getfl',
- 'io_file_f_getlk',
- 'io_file_f_rdlck',
- 'io_file_f_setfd',
- 'io_file_f_setfl',
- 'io_file_f_setlk',
- 'io_file_f_setlkw',
- 'io_file_f_test',
- 'io_file_f_tlock',
- 'io_file_f_ulock',
- 'io_file_f_unlck',
- 'io_file_f_wrlck',
- 'io_file_fd_cloexec',
- 'io_file_fioasync',
- 'io_file_fioclex',
- 'io_file_fiodtype',
- 'io_file_fiogetown',
- 'io_file_fionbio',
- 'io_file_fionclex',
- 'io_file_fionread',
- 'io_file_fiosetown',
- 'io_file_getcwd',
- 'io_file_lchown',
- 'io_file_link',
- 'io_file_lockf',
- 'io_file_lstat_atime',
- 'io_file_lstat_mode',
- 'io_file_lstat_mtime',
- 'io_file_lstat_size',
- 'io_file_mkdir',
- 'io_file_mkfifo',
- 'io_file_mkstemp',
- 'io_file_o_append',
- 'io_file_o_async',
- 'io_file_o_creat',
- 'io_file_o_excl',
- 'io_file_o_exlock',
- 'io_file_o_fsync',
- 'io_file_o_nofollow',
- 'io_file_o_nonblock',
- 'io_file_o_rdonly',
- 'io_file_o_rdwr',
- 'io_file_o_shlock',
- 'io_file_o_sync',
- 'io_file_o_trunc',
- 'io_file_o_wronly',
- 'io_file_pipe',
- 'io_file_readlink',
- 'io_file_realpath',
- 'io_file_remove',
- 'io_file_rename',
- 'io_file_rmdir',
- 'io_file_s_ifblk',
- 'io_file_s_ifchr',
- 'io_file_s_ifdir',
- 'io_file_s_ififo',
- 'io_file_s_iflnk',
- 'io_file_s_ifmt',
- 'io_file_s_ifreg',
- 'io_file_s_ifsock',
- 'io_file_s_irgrp',
- 'io_file_s_iroth',
- 'io_file_s_irusr',
- 'io_file_s_irwxg',
- 'io_file_s_irwxo',
- 'io_file_s_irwxu',
- 'io_file_s_isgid',
- 'io_file_s_isuid',
- 'io_file_s_isvtx',
- 'io_file_s_iwgrp',
- 'io_file_s_iwoth',
- 'io_file_s_iwusr',
- 'io_file_s_ixgrp',
- 'io_file_s_ixoth',
- 'io_file_s_ixusr',
- 'io_file_seek_cur',
- 'io_file_seek_end',
- 'io_file_seek_set',
- 'io_file_stat_atime',
- 'io_file_stat_mode',
- 'io_file_stat_mtime',
- 'io_file_stat_size',
- 'io_file_stderr',
- 'io_file_stdin',
- 'io_file_stdout',
- 'io_file_symlink',
- 'io_file_tempnam',
- 'io_file_truncate',
- 'io_file_umask',
- 'io_file_unlink',
- 'io_net_accept',
- 'io_net_af_inet',
- 'io_net_af_inet6',
- 'io_net_af_unix',
- 'io_net_bind',
- 'io_net_connect',
- 'io_net_getpeername',
- 'io_net_getsockname',
- 'io_net_ipproto_ip',
- 'io_net_ipproto_udp',
- 'io_net_listen',
- 'io_net_msg_oob',
- 'io_net_msg_peek',
- 'io_net_msg_waitall',
- 'io_net_recv',
- 'io_net_recvfrom',
- 'io_net_send',
- 'io_net_sendto',
- 'io_net_shut_rd',
- 'io_net_shut_rdwr',
- 'io_net_shut_wr',
- 'io_net_shutdown',
- 'io_net_so_acceptconn',
- 'io_net_so_broadcast',
- 'io_net_so_debug',
- 'io_net_so_dontroute',
- 'io_net_so_error',
- 'io_net_so_keepalive',
- 'io_net_so_linger',
- 'io_net_so_oobinline',
- 'io_net_so_rcvbuf',
- 'io_net_so_rcvlowat',
- 'io_net_so_rcvtimeo',
- 'io_net_so_reuseaddr',
- 'io_net_so_sndbuf',
- 'io_net_so_sndlowat',
- 'io_net_so_sndtimeo',
- 'io_net_so_timestamp',
- 'io_net_so_type',
- 'io_net_so_useloopback',
- 'io_net_sock_dgram',
- 'io_net_sock_raw',
- 'io_net_sock_rdm',
- 'io_net_sock_seqpacket',
- 'io_net_sock_stream',
- 'io_net_socket',
- 'io_net_sol_socket',
- 'io_net_ssl_accept',
- 'io_net_ssl_begin',
- 'io_net_ssl_connect',
- 'io_net_ssl_end',
- 'io_net_ssl_error',
- 'io_net_ssl_errorstring',
- 'io_net_ssl_funcerrorstring',
- 'io_net_ssl_liberrorstring',
- 'io_net_ssl_read',
- 'io_net_ssl_reasonerrorstring',
- 'io_net_ssl_setacceptstate',
- 'io_net_ssl_setconnectstate',
- 'io_net_ssl_setverifylocations',
- 'io_net_ssl_shutdown',
- 'io_net_ssl_usecertificatechainfile',
- 'io_net_ssl_useprivatekeyfile',
- 'io_net_ssl_write',
- 'java_jvm_create',
- 'java_jvm_getenv',
- 'jdbc_initialize',
- 'json_back_slash',
- 'json_back_space',
- 'json_close_array',
- 'json_close_object',
- 'json_colon',
- 'json_comma',
- 'json_consume_array',
- 'json_consume_object',
- 'json_consume_string',
- 'json_consume_token',
- 'json_cr',
- 'json_debug',
- 'json_deserialize',
- 'json_e_lower',
- 'json_e_upper',
- 'json_f_lower',
- 'json_form_feed',
- 'json_forward_slash',
- 'json_lf',
- 'json_n_lower',
- 'json_negative',
- 'json_open_array',
- 'json_open_object',
- 'json_period',
- 'json_positive',
- 'json_quote_double',
- 'json_rpccall',
- 'json_serialize',
- 'json_t_lower',
- 'json_tab',
- 'json_white_space',
- 'keycolumn_name',
- 'keycolumn_value',
- 'keyfield_name',
- 'keyfield_value',
- 'lasso_currentaction',
- 'lasso_errorreporting',
- 'lasso_executiontimelimit',
- 'lasso_methodexists',
- 'lasso_tagexists',
- 'lasso_uniqueid',
- 'lasso_version',
- 'lassoapp_current_app',
- 'lassoapp_current_include',
- 'lassoapp_do_with_include',
- 'lassoapp_exists',
- 'lassoapp_find_missing_file',
- 'lassoapp_format_mod_date',
- 'lassoapp_get_capabilities_name',
- 'lassoapp_include_current',
- 'lassoapp_include',
- 'lassoapp_initialize_db',
- 'lassoapp_initialize',
- 'lassoapp_invoke_resource',
- 'lassoapp_issourcefileextension',
- 'lassoapp_link',
- 'lassoapp_load_module',
- 'lassoapp_mime_get',
- 'lassoapp_mime_type_appcache',
- 'lassoapp_mime_type_css',
- 'lassoapp_mime_type_csv',
- 'lassoapp_mime_type_doc',
- 'lassoapp_mime_type_docx',
- 'lassoapp_mime_type_eof',
- 'lassoapp_mime_type_eot',
- 'lassoapp_mime_type_gif',
- 'lassoapp_mime_type_html',
- 'lassoapp_mime_type_ico',
- 'lassoapp_mime_type_jpg',
- 'lassoapp_mime_type_js',
- 'lassoapp_mime_type_lasso',
- 'lassoapp_mime_type_map',
- 'lassoapp_mime_type_pdf',
- 'lassoapp_mime_type_png',
- 'lassoapp_mime_type_ppt',
- 'lassoapp_mime_type_rss',
- 'lassoapp_mime_type_svg',
- 'lassoapp_mime_type_swf',
- 'lassoapp_mime_type_tif',
- 'lassoapp_mime_type_ttf',
- 'lassoapp_mime_type_txt',
- 'lassoapp_mime_type_woff',
- 'lassoapp_mime_type_xaml',
- 'lassoapp_mime_type_xap',
- 'lassoapp_mime_type_xbap',
- 'lassoapp_mime_type_xhr',
- 'lassoapp_mime_type_xml',
- 'lassoapp_mime_type_zip',
- 'lassoapp_path_to_method_name',
- 'lassoapp_settingsdb',
- 'layout_name',
- 'lcapi_datasourceadd',
- 'lcapi_datasourcecloseconnection',
- 'lcapi_datasourcedelete',
- 'lcapi_datasourceduplicate',
- 'lcapi_datasourceexecsql',
- 'lcapi_datasourcefindall',
- 'lcapi_datasourceimage',
- 'lcapi_datasourceinfo',
- 'lcapi_datasourceinit',
- 'lcapi_datasourcematchesname',
- 'lcapi_datasourcenames',
- 'lcapi_datasourcenothing',
- 'lcapi_datasourceopand',
- 'lcapi_datasourceopany',
- 'lcapi_datasourceopbw',
- 'lcapi_datasourceopct',
- 'lcapi_datasourceopeq',
- 'lcapi_datasourceopew',
- 'lcapi_datasourceopft',
- 'lcapi_datasourceopgt',
- 'lcapi_datasourceopgteq',
- 'lcapi_datasourceopin',
- 'lcapi_datasourceoplt',
- 'lcapi_datasourceoplteq',
- 'lcapi_datasourceopnbw',
- 'lcapi_datasourceopnct',
- 'lcapi_datasourceopneq',
- 'lcapi_datasourceopnew',
- 'lcapi_datasourceopnin',
- 'lcapi_datasourceopno',
- 'lcapi_datasourceopnot',
- 'lcapi_datasourceopnrx',
- 'lcapi_datasourceopor',
- 'lcapi_datasourceoprx',
- 'lcapi_datasourcepreparesql',
- 'lcapi_datasourceprotectionnone',
- 'lcapi_datasourceprotectionreadonly',
- 'lcapi_datasourcerandom',
- 'lcapi_datasourceschemanames',
- 'lcapi_datasourcescripts',
- 'lcapi_datasourcesearch',
- 'lcapi_datasourcesortascending',
- 'lcapi_datasourcesortcustom',
- 'lcapi_datasourcesortdescending',
- 'lcapi_datasourcetablenames',
- 'lcapi_datasourceterm',
- 'lcapi_datasourcetickle',
- 'lcapi_datasourcetypeblob',
- 'lcapi_datasourcetypeboolean',
- 'lcapi_datasourcetypedate',
- 'lcapi_datasourcetypedecimal',
- 'lcapi_datasourcetypeinteger',
- 'lcapi_datasourcetypestring',
- 'lcapi_datasourceunpreparesql',
- 'lcapi_datasourceupdate',
- 'lcapi_fourchartointeger',
- 'lcapi_listdatasources',
- 'lcapi_loadmodule',
- 'lcapi_loadmodules',
- 'lcapi_updatedatasourceslist',
- 'ldap_scope_base',
- 'ldap_scope_children',
- 'ldap_scope_onelevel',
- 'ldap_scope_subtree',
- 'library_once',
- 'library',
- 'ljapi_initialize',
- 'locale_availablelocales',
- 'locale_canada',
- 'locale_canadafrench',
- 'locale_china',
- 'locale_chinese',
- 'locale_default',
- 'locale_english',
- 'locale_format_style_date_time',
- 'locale_format_style_default',
- 'locale_format_style_full',
- 'locale_format_style_long',
- 'locale_format_style_medium',
- 'locale_format_style_none',
- 'locale_format_style_short',
- 'locale_format',
- 'locale_france',
- 'locale_french',
- 'locale_german',
- 'locale_germany',
- 'locale_isocountries',
- 'locale_isolanguages',
- 'locale_italian',
- 'locale_italy',
- 'locale_japan',
- 'locale_japanese',
- 'locale_korea',
- 'locale_korean',
- 'locale_prc',
- 'locale_setdefault',
- 'locale_simplifiedchinese',
- 'locale_taiwan',
- 'locale_traditionalchinese',
- 'locale_uk',
- 'locale_us',
- 'log_always',
- 'log_critical',
- 'log_deprecated',
- 'log_destination_console',
- 'log_destination_database',
- 'log_destination_file',
- 'log_detail',
- 'log_initialize',
- 'log_level_critical',
- 'log_level_deprecated',
- 'log_level_detail',
- 'log_level_sql',
- 'log_level_warning',
- 'log_max_file_size',
- 'log_setdestination',
- 'log_sql',
- 'log_trim_file_size',
- 'log_warning',
- 'log',
- 'loop_abort',
- 'loop_continue',
- 'loop_count',
- 'loop_key_pop',
- 'loop_key_push',
- 'loop_key',
- 'loop_pop',
- 'loop_push',
- 'loop_value_pop',
- 'loop_value_push',
- 'loop_value',
- 'loop',
- 'lt',
- 'lte',
- 'main_thread_only',
- 'max',
- 'maxrecords_value',
- 'median',
- 'method_name',
- 'micros',
- 'millis',
- 'min',
- 'minimal',
- 'mongo_insert_continue_on_error',
- 'mongo_insert_no_validate',
- 'mongo_insert_none',
- 'mongo_query_await_data',
- 'mongo_query_exhaust',
- 'mongo_query_no_cursor_timeout',
- 'mongo_query_none',
- 'mongo_query_oplog_replay',
- 'mongo_query_partial',
- 'mongo_query_slave_ok',
- 'mongo_query_tailable_cursor',
- 'mongo_remove_none',
- 'mongo_remove_single_remove',
- 'mongo_update_multi_update',
- 'mongo_update_no_validate',
- 'mongo_update_none',
- 'mongo_update_upsert',
- 'mustache_compile_file',
- 'mustache_compile_string',
- 'mustache_include',
- 'mysqlds',
- 'namespace_global',
- 'namespace_import',
- 'namespace_using',
- 'nbw',
- 'ncn',
- 'neq',
- 'net_connectinprogress',
- 'net_connectok',
- 'net_typessl',
- 'net_typessltcp',
- 'net_typessludp',
- 'net_typetcp',
- 'net_typeudp',
- 'net_waitread',
- 'net_waittimeout',
- 'net_waitwrite',
- 'new',
- 'none',
- 'nrx',
- 'nslookup',
- 'odbc_session_driver_mssql',
- 'odbc',
- 'output_none',
- 'output',
- 'pdf_package',
- 'pdf_rectangle',
- 'pdf_serve',
- 'pi',
- 'portal',
- 'postgresql',
- 'process',
- 'protect_now',
- 'protect',
- 'queriable_average',
- 'queriable_defaultcompare',
- 'queriable_do',
- 'queriable_internal_combinebindings',
- 'queriable_max',
- 'queriable_min',
- 'queriable_qsort',
- 'queriable_reversecompare',
- 'queriable_sum',
- 'random_seed',
- 'range',
- 'records_array',
- 'records_map',
- 'records',
- 'redirect_url',
- 'referer_url',
- 'referrer_url',
- 'register_thread',
- 'register',
- 'response_filepath',
- 'response_localpath',
- 'response_path',
- 'response_realm',
- 'response_root',
- 'resultset_count',
- 'resultset',
- 'resultsets',
- 'rows_array',
- 'rows_impl',
- 'rows',
- 'rx',
- 'schema_name',
- 'security_database',
- 'security_default_realm',
- 'security_initialize',
- 'security_table_groups',
- 'security_table_ug_map',
- 'security_table_users',
- 'selected',
- 'series',
- 'server_admin',
- 'server_ip',
- 'server_name',
- 'server_port',
- 'server_protocol',
- 'server_push',
- 'server_signature',
- 'server_software',
- 'session_abort',
- 'session_addvar',
- 'session_decorate',
- 'session_deleteexpired',
- 'session_end',
- 'session_getdefaultdriver',
- 'session_id',
- 'session_initialize',
- 'session_removevar',
- 'session_result',
- 'session_setdefaultdriver',
- 'session_start',
- 'shown_count',
- 'shown_first',
- 'shown_last',
- 'site_id',
- 'site_name',
- 'skiprecords_value',
- 'sleep',
- 'split_thread',
- 'sqlite_abort',
- 'sqlite_auth',
- 'sqlite_blob',
- 'sqlite_busy',
- 'sqlite_cantopen',
- 'sqlite_constraint',
- 'sqlite_corrupt',
- 'sqlite_createdb',
- 'sqlite_done',
- 'sqlite_empty',
- 'sqlite_error',
- 'sqlite_float',
- 'sqlite_format',
- 'sqlite_full',
- 'sqlite_integer',
- 'sqlite_internal',
- 'sqlite_interrupt',
- 'sqlite_ioerr',
- 'sqlite_locked',
- 'sqlite_mismatch',
- 'sqlite_misuse',
- 'sqlite_nolfs',
- 'sqlite_nomem',
- 'sqlite_notadb',
- 'sqlite_notfound',
- 'sqlite_null',
- 'sqlite_ok',
- 'sqlite_perm',
- 'sqlite_protocol',
- 'sqlite_range',
- 'sqlite_readonly',
- 'sqlite_row',
- 'sqlite_schema',
- 'sqlite_setsleepmillis',
- 'sqlite_setsleeptries',
- 'sqlite_text',
- 'sqlite_toobig',
- 'sqliteconnector',
- 'staticarray_join',
- 'stdout',
- 'stdoutnl',
- 'string_validcharset',
- 'suspend',
- 'sys_appspath',
- 'sys_chroot',
- 'sys_clock',
- 'sys_clockspersec',
- 'sys_credits',
- 'sys_databasespath',
- 'sys_detach_exec',
- 'sys_difftime',
- 'sys_dll_ext',
- 'sys_drand48',
- 'sys_environ',
- 'sys_eol',
- 'sys_erand48',
- 'sys_errno',
- 'sys_exec_pid_to_os_pid',
- 'sys_exec',
- 'sys_exit',
- 'sys_fork',
- 'sys_garbagecollect',
- 'sys_getbytessincegc',
- 'sys_getchar',
- 'sys_getegid',
- 'sys_getenv',
- 'sys_geteuid',
- 'sys_getgid',
- 'sys_getgrnam',
- 'sys_getheapfreebytes',
- 'sys_getheapsize',
- 'sys_getlogin',
- 'sys_getpid',
- 'sys_getppid',
- 'sys_getpwnam',
- 'sys_getpwuid',
- 'sys_getstartclock',
- 'sys_getthreadcount',
- 'sys_getuid',
- 'sys_growheapby',
- 'sys_homepath',
- 'sys_is_full_path',
- 'sys_is_windows',
- 'sys_isfullpath',
- 'sys_iswindows',
- 'sys_iterate',
- 'sys_jrand48',
- 'sys_kill_exec',
- 'sys_kill',
- 'sys_lcong48',
- 'sys_librariespath',
- 'sys_listtraits',
- 'sys_listtypes',
- 'sys_listunboundmethods',
- 'sys_loadlibrary',
- 'sys_lrand48',
- 'sys_masterhomepath',
- 'sys_mrand48',
- 'sys_nrand48',
- 'sys_pid_exec',
- 'sys_pointersize',
- 'sys_rand',
- 'sys_random',
- 'sys_seed48',
- 'sys_setenv',
- 'sys_setgid',
- 'sys_setsid',
- 'sys_setuid',
- 'sys_sigabrt',
- 'sys_sigalrm',
- 'sys_sigbus',
- 'sys_sigchld',
- 'sys_sigcont',
- 'sys_sigfpe',
- 'sys_sighup',
- 'sys_sigill',
- 'sys_sigint',
- 'sys_sigkill',
- 'sys_sigpipe',
- 'sys_sigprof',
- 'sys_sigquit',
- 'sys_sigsegv',
- 'sys_sigstop',
- 'sys_sigsys',
- 'sys_sigterm',
- 'sys_sigtrap',
- 'sys_sigtstp',
- 'sys_sigttin',
- 'sys_sigttou',
- 'sys_sigurg',
- 'sys_sigusr1',
- 'sys_sigusr2',
- 'sys_sigvtalrm',
- 'sys_sigxcpu',
- 'sys_sigxfsz',
- 'sys_srand',
- 'sys_srand48',
- 'sys_srandom',
- 'sys_strerror',
- 'sys_supportpath',
- 'sys_test_exec',
- 'sys_time',
- 'sys_uname',
- 'sys_unsetenv',
- 'sys_usercapimodulepath',
- 'sys_userstartuppath',
- 'sys_version',
- 'sys_wait_exec',
- 'sys_waitpid',
- 'sys_wcontinued',
- 'sys_while',
- 'sys_wnohang',
- 'sys_wuntraced',
- 'table_name',
- 'tag_exists',
- 'tag_name',
- 'thread_var_get',
- 'thread_var_pop',
- 'thread_var_push',
- 'threadvar_find',
- 'threadvar_get',
- 'threadvar_set_asrt',
- 'threadvar_set',
- 'timer',
- 'token_value',
- 'treemap',
- 'u_lb_alphabetic',
- 'u_lb_ambiguous',
- 'u_lb_break_after',
- 'u_lb_break_before',
- 'u_lb_break_both',
- 'u_lb_break_symbols',
- 'u_lb_carriage_return',
- 'u_lb_close_punctuation',
- 'u_lb_combining_mark',
- 'u_lb_complex_context',
- 'u_lb_contingent_break',
- 'u_lb_exclamation',
- 'u_lb_glue',
- 'u_lb_h2',
- 'u_lb_h3',
- 'u_lb_hyphen',
- 'u_lb_ideographic',
- 'u_lb_infix_numeric',
- 'u_lb_inseparable',
- 'u_lb_jl',
- 'u_lb_jt',
- 'u_lb_jv',
- 'u_lb_line_feed',
- 'u_lb_mandatory_break',
- 'u_lb_next_line',
- 'u_lb_nonstarter',
- 'u_lb_numeric',
- 'u_lb_open_punctuation',
- 'u_lb_postfix_numeric',
- 'u_lb_prefix_numeric',
- 'u_lb_quotation',
- 'u_lb_space',
- 'u_lb_surrogate',
- 'u_lb_unknown',
- 'u_lb_word_joiner',
- 'u_lb_zwspace',
- 'u_nt_decimal',
- 'u_nt_digit',
- 'u_nt_none',
- 'u_nt_numeric',
- 'u_sb_aterm',
- 'u_sb_close',
- 'u_sb_format',
- 'u_sb_lower',
- 'u_sb_numeric',
- 'u_sb_oletter',
- 'u_sb_other',
- 'u_sb_sep',
- 'u_sb_sp',
- 'u_sb_sterm',
- 'u_sb_upper',
- 'u_wb_aletter',
- 'u_wb_extendnumlet',
- 'u_wb_format',
- 'u_wb_katakana',
- 'u_wb_midletter',
- 'u_wb_midnum',
- 'u_wb_numeric',
- 'u_wb_other',
- 'ucal_ampm',
- 'ucal_dayofmonth',
- 'ucal_dayofweek',
- 'ucal_dayofweekinmonth',
- 'ucal_dayofyear',
- 'ucal_daysinfirstweek',
- 'ucal_dowlocal',
- 'ucal_dstoffset',
- 'ucal_era',
- 'ucal_extendedyear',
- 'ucal_firstdayofweek',
- 'ucal_hour',
- 'ucal_hourofday',
- 'ucal_julianday',
- 'ucal_lenient',
- 'ucal_listtimezones',
- 'ucal_millisecond',
- 'ucal_millisecondsinday',
- 'ucal_minute',
- 'ucal_month',
- 'ucal_second',
- 'ucal_weekofmonth',
- 'ucal_weekofyear',
- 'ucal_year',
- 'ucal_yearwoy',
- 'ucal_zoneoffset',
- 'uchar_age',
- 'uchar_alphabetic',
- 'uchar_ascii_hex_digit',
- 'uchar_bidi_class',
- 'uchar_bidi_control',
- 'uchar_bidi_mirrored',
- 'uchar_bidi_mirroring_glyph',
- 'uchar_block',
- 'uchar_canonical_combining_class',
- 'uchar_case_folding',
- 'uchar_case_sensitive',
- 'uchar_dash',
- 'uchar_decomposition_type',
- 'uchar_default_ignorable_code_point',
- 'uchar_deprecated',
- 'uchar_diacritic',
- 'uchar_east_asian_width',
- 'uchar_extender',
- 'uchar_full_composition_exclusion',
- 'uchar_general_category_mask',
- 'uchar_general_category',
- 'uchar_grapheme_base',
- 'uchar_grapheme_cluster_break',
- 'uchar_grapheme_extend',
- 'uchar_grapheme_link',
- 'uchar_hangul_syllable_type',
- 'uchar_hex_digit',
- 'uchar_hyphen',
- 'uchar_id_continue',
- 'uchar_ideographic',
- 'uchar_ids_binary_operator',
- 'uchar_ids_trinary_operator',
- 'uchar_iso_comment',
- 'uchar_join_control',
- 'uchar_joining_group',
- 'uchar_joining_type',
- 'uchar_lead_canonical_combining_class',
- 'uchar_line_break',
- 'uchar_logical_order_exception',
- 'uchar_lowercase_mapping',
- 'uchar_lowercase',
- 'uchar_math',
- 'uchar_name',
- 'uchar_nfc_inert',
- 'uchar_nfc_quick_check',
- 'uchar_nfd_inert',
- 'uchar_nfd_quick_check',
- 'uchar_nfkc_inert',
- 'uchar_nfkc_quick_check',
- 'uchar_nfkd_inert',
- 'uchar_nfkd_quick_check',
- 'uchar_noncharacter_code_point',
- 'uchar_numeric_type',
- 'uchar_numeric_value',
- 'uchar_pattern_syntax',
- 'uchar_pattern_white_space',
- 'uchar_posix_alnum',
- 'uchar_posix_blank',
- 'uchar_posix_graph',
- 'uchar_posix_print',
- 'uchar_posix_xdigit',
- 'uchar_quotation_mark',
- 'uchar_radical',
- 'uchar_s_term',
- 'uchar_script',
- 'uchar_segment_starter',
- 'uchar_sentence_break',
- 'uchar_simple_case_folding',
- 'uchar_simple_lowercase_mapping',
- 'uchar_simple_titlecase_mapping',
- 'uchar_simple_uppercase_mapping',
- 'uchar_soft_dotted',
- 'uchar_terminal_punctuation',
- 'uchar_titlecase_mapping',
- 'uchar_trail_canonical_combining_class',
- 'uchar_unicode_1_name',
- 'uchar_unified_ideograph',
- 'uchar_uppercase_mapping',
- 'uchar_uppercase',
- 'uchar_variation_selector',
- 'uchar_white_space',
- 'uchar_word_break',
- 'uchar_xid_continue',
- 'uncompress',
- 'usage',
- 'uuid_compare',
- 'uuid_copy',
- 'uuid_generate_random',
- 'uuid_generate_time',
- 'uuid_generate',
- 'uuid_is_null',
- 'uuid_parse',
- 'uuid_unparse_lower',
- 'uuid_unparse_upper',
- 'uuid_unparse',
- 'value_list',
- 'value_listitem',
- 'valuelistitem',
- 'var_keys',
- 'var_values',
- 'wap_isenabled',
- 'wap_maxbuttons',
- 'wap_maxcolumns',
- 'wap_maxhorzpixels',
- 'wap_maxrows',
- 'wap_maxvertpixels',
- 'web_handlefcgirequest',
- 'web_node_content_representation_css',
- 'web_node_content_representation_html',
- 'web_node_content_representation_js',
- 'web_node_content_representation_xhr',
- 'web_node_forpath',
- 'web_nodes_initialize',
- 'web_nodes_normalizeextension',
- 'web_nodes_processcontentnode',
- 'web_nodes_requesthandler',
- 'web_response_nodesentry',
- 'web_router_database',
- 'web_router_initialize',
- 'websocket_handler_timeout',
- 'wexitstatus',
- 'wifcontinued',
- 'wifexited',
- 'wifsignaled',
- 'wifstopped',
- 'wstopsig',
- 'wtermsig',
- 'xml_transform',
- 'xml',
- 'zip_add_dir',
- 'zip_add',
- 'zip_checkcons',
- 'zip_close',
- 'zip_cm_bzip2',
- 'zip_cm_default',
- 'zip_cm_deflate',
- 'zip_cm_deflate64',
- 'zip_cm_implode',
- 'zip_cm_pkware_implode',
- 'zip_cm_reduce_1',
- 'zip_cm_reduce_2',
- 'zip_cm_reduce_3',
- 'zip_cm_reduce_4',
- 'zip_cm_shrink',
- 'zip_cm_store',
- 'zip_create',
- 'zip_delete',
- 'zip_em_3des_112',
- 'zip_em_3des_168',
- 'zip_em_aes_128',
- 'zip_em_aes_192',
- 'zip_em_aes_256',
- 'zip_em_des',
- 'zip_em_none',
- 'zip_em_rc2_old',
- 'zip_em_rc2',
- 'zip_em_rc4',
- 'zip_em_trad_pkware',
- 'zip_em_unknown',
- 'zip_er_changed',
- 'zip_er_close',
- 'zip_er_compnotsupp',
- 'zip_er_crc',
- 'zip_er_deleted',
- 'zip_er_eof',
- 'zip_er_exists',
- 'zip_er_incons',
- 'zip_er_internal',
- 'zip_er_inval',
- 'zip_er_memory',
- 'zip_er_multidisk',
- 'zip_er_noent',
- 'zip_er_nozip',
- 'zip_er_ok',
- 'zip_er_open',
- 'zip_er_read',
- 'zip_er_remove',
- 'zip_er_rename',
- 'zip_er_seek',
- 'zip_er_tmpopen',
- 'zip_er_write',
- 'zip_er_zipclosed',
- 'zip_er_zlib',
- 'zip_error_get_sys_type',
- 'zip_error_get',
- 'zip_error_to_str',
- 'zip_et_none',
- 'zip_et_sys',
- 'zip_et_zlib',
- 'zip_excl',
- 'zip_fclose',
- 'zip_file_error_get',
- 'zip_file_strerror',
- 'zip_fl_compressed',
- 'zip_fl_nocase',
- 'zip_fl_nodir',
- 'zip_fl_unchanged',
- 'zip_fopen_index',
- 'zip_fopen',
- 'zip_fread',
- 'zip_get_archive_comment',
- 'zip_get_file_comment',
- 'zip_get_name',
- 'zip_get_num_files',
- 'zip_name_locate',
- 'zip_open',
- 'zip_rename',
- 'zip_replace',
- 'zip_set_archive_comment',
- 'zip_set_file_comment',
- 'zip_stat_index',
- 'zip_stat',
- 'zip_strerror',
- 'zip_unchange_all',
- 'zip_unchange_archive',
- 'zip_unchange',
- 'zlib_version',
- ),
- 'Lasso 8 Tags': (
- '__char',
- '__sync_timestamp__',
- '_admin_addgroup',
- '_admin_adduser',
- '_admin_defaultconnector',
- '_admin_defaultconnectornames',
- '_admin_defaultdatabase',
- '_admin_defaultfield',
- '_admin_defaultgroup',
- '_admin_defaulthost',
- '_admin_defaulttable',
- '_admin_defaultuser',
- '_admin_deleteconnector',
- '_admin_deletedatabase',
- '_admin_deletefield',
- '_admin_deletegroup',
- '_admin_deletehost',
- '_admin_deletetable',
- '_admin_deleteuser',
- '_admin_duplicategroup',
- '_admin_internaldatabase',
- '_admin_listconnectors',
- '_admin_listdatabases',
- '_admin_listfields',
- '_admin_listgroups',
- '_admin_listhosts',
- '_admin_listtables',
- '_admin_listusers',
- '_admin_refreshconnector',
- '_admin_refreshsecurity',
- '_admin_servicepath',
- '_admin_updateconnector',
- '_admin_updatedatabase',
- '_admin_updatefield',
- '_admin_updategroup',
- '_admin_updatehost',
- '_admin_updatetable',
- '_admin_updateuser',
- '_chartfx_activation_string',
- '_chartfx_getchallengestring',
- '_chop_args',
- '_chop_mimes',
- '_client_addr_old',
- '_client_address_old',
- '_client_ip_old',
- '_database_names',
- '_datasource_reload',
- '_date_current',
- '_date_format',
- '_date_msec',
- '_date_parse',
- '_execution_timelimit',
- '_file_chmod',
- '_initialize',
- '_jdbc_acceptsurl',
- '_jdbc_debug',
- '_jdbc_deletehost',
- '_jdbc_driverclasses',
- '_jdbc_driverinfo',
- '_jdbc_metainfo',
- '_jdbc_propertyinfo',
- '_jdbc_setdriver',
- '_lasso_param',
- '_log_helper',
- '_proc_noparam',
- '_proc_withparam',
- '_recursion_limit',
- '_request_param',
- '_security_binaryexpiration',
- '_security_flushcaches',
- '_security_isserialized',
- '_security_serialexpiration',
- '_srand',
- '_strict_literals',
- '_substring',
- '_xmlrpc_exconverter',
- '_xmlrpc_inconverter',
- '_xmlrpc_xmlinconverter',
- 'abort',
- 'action_addinfo',
- 'action_addrecord',
- 'action_param',
- 'action_params',
- 'action_setfoundcount',
- 'action_setrecordid',
- 'action_settotalcount',
- 'action_statement',
- 'admin_allowedfileroots',
- 'admin_changeuser',
- 'admin_createuser',
- 'admin_currentgroups',
- 'admin_currentuserid',
- 'admin_currentusername',
- 'admin_getpref',
- 'admin_groupassignuser',
- 'admin_grouplistusers',
- 'admin_groupremoveuser',
- 'admin_lassoservicepath',
- 'admin_listgroups',
- 'admin_refreshlicensing',
- 'admin_refreshsecurity',
- 'admin_reloaddatasource',
- 'admin_removepref',
- 'admin_setpref',
- 'admin_userexists',
- 'admin_userlistgroups',
- 'all',
- 'and',
- 'array',
- 'array_iterator',
- 'auth',
- 'auth_admin',
- 'auth_auth',
- 'auth_custom',
- 'auth_group',
- 'auth_prompt',
- 'auth_user',
- 'base64',
- 'bean',
- 'bigint',
- 'bom_utf16be',
- 'bom_utf16le',
- 'bom_utf32be',
- 'bom_utf32le',
- 'bom_utf8',
- 'boolean',
- 'bw',
- 'bytes',
- 'cache',
- 'cache_delete',
- 'cache_empty',
- 'cache_exists',
- 'cache_fetch',
- 'cache_internal',
- 'cache_maintenance',
- 'cache_object',
- 'cache_preferences',
- 'cache_store',
- 'case',
- 'chartfx',
- 'chartfx_records',
- 'chartfx_serve',
- 'checked',
- 'choice_list',
- 'choice_listitem',
- 'choicelistitem',
- 'cipher_decrypt',
- 'cipher_digest',
- 'cipher_encrypt',
- 'cipher_hmac',
- 'cipher_keylength',
- 'cipher_list',
- 'click_text',
- 'client_addr',
- 'client_address',
- 'client_authorization',
- 'client_browser',
- 'client_contentlength',
- 'client_contenttype',
- 'client_cookielist',
- 'client_cookies',
- 'client_encoding',
- 'client_formmethod',
- 'client_getargs',
- 'client_getparams',
- 'client_headers',
- 'client_ip',
- 'client_ipfrominteger',
- 'client_iptointeger',
- 'client_password',
- 'client_postargs',
- 'client_postparams',
- 'client_type',
- 'client_url',
- 'client_username',
- 'cn',
- 'column',
- 'column_name',
- 'column_names',
- 'compare_beginswith',
- 'compare_contains',
- 'compare_endswith',
- 'compare_equalto',
- 'compare_greaterthan',
- 'compare_greaterthanorequals',
- 'compare_greaterthanorequls',
- 'compare_lessthan',
- 'compare_lessthanorequals',
- 'compare_notbeginswith',
- 'compare_notcontains',
- 'compare_notendswith',
- 'compare_notequalto',
- 'compare_notregexp',
- 'compare_regexp',
- 'compare_strictequalto',
- 'compare_strictnotequalto',
- 'compiler_removecacheddoc',
- 'compiler_setdefaultparserflags',
- 'compress',
- 'content_body',
- 'content_encoding',
- 'content_header',
- 'content_type',
- 'cookie',
- 'cookie_set',
- 'curl_ftp_getfile',
- 'curl_ftp_getlisting',
- 'curl_ftp_putfile',
- 'curl_include_url',
- 'currency',
- 'database_changecolumn',
- 'database_changefield',
- 'database_createcolumn',
- 'database_createfield',
- 'database_createtable',
- 'database_fmcontainer',
- 'database_hostinfo',
- 'database_inline',
- 'database_name',
- 'database_nameitem',
- 'database_names',
- 'database_realname',
- 'database_removecolumn',
- 'database_removefield',
- 'database_removetable',
- 'database_repeating',
- 'database_repeating_valueitem',
- 'database_repeatingvalueitem',
- 'database_schemanameitem',
- 'database_schemanames',
- 'database_tablecolumn',
- 'database_tablenameitem',
- 'database_tablenames',
- 'datasource_name',
- 'datasource_register',
- 'date',
- 'date__date_current',
- 'date__date_format',
- 'date__date_msec',
- 'date__date_parse',
- 'date_add',
- 'date_date',
- 'date_difference',
- 'date_duration',
- 'date_format',
- 'date_getcurrentdate',
- 'date_getday',
- 'date_getdayofweek',
- 'date_gethour',
- 'date_getlocaltimezone',
- 'date_getminute',
- 'date_getmonth',
- 'date_getsecond',
- 'date_gettime',
- 'date_getyear',
- 'date_gmttolocal',
- 'date_localtogmt',
- 'date_maximum',
- 'date_minimum',
- 'date_msec',
- 'date_setformat',
- 'date_subtract',
- 'db_layoutnameitem',
- 'db_layoutnames',
- 'db_nameitem',
- 'db_names',
- 'db_tablenameitem',
- 'db_tablenames',
- 'dbi_column_names',
- 'dbi_field_names',
- 'decimal',
- 'decimal_setglobaldefaultprecision',
- 'decode_base64',
- 'decode_bheader',
- 'decode_hex',
- 'decode_html',
- 'decode_json',
- 'decode_qheader',
- 'decode_quotedprintable',
- 'decode_quotedprintablebytes',
- 'decode_url',
- 'decode_xml',
- 'decompress',
- 'decrypt_blowfish',
- 'decrypt_blowfish2',
- 'default',
- 'define_atbegin',
- 'define_atend',
- 'define_constant',
- 'define_prototype',
- 'define_tag',
- 'define_tagp',
- 'define_type',
- 'define_typep',
- 'deserialize',
- 'directory_directorynameitem',
- 'directory_lister',
- 'directory_nameitem',
- 'directorynameitem',
- 'dns_default',
- 'dns_lookup',
- 'dns_response',
- 'duration',
- 'else',
- 'email_batch',
- 'email_compose',
- 'email_digestchallenge',
- 'email_digestresponse',
- 'email_extract',
- 'email_findemails',
- 'email_immediate',
- 'email_merge',
- 'email_mxerror',
- 'email_mxlookup',
- 'email_parse',
- 'email_pop',
- 'email_queue',
- 'email_result',
- 'email_safeemail',
- 'email_send',
- 'email_smtp',
- 'email_status',
- 'email_token',
- 'email_translatebreakstocrlf',
- 'encode_base64',
- 'encode_bheader',
- 'encode_break',
- 'encode_breaks',
- 'encode_crc32',
- 'encode_hex',
- 'encode_html',
- 'encode_htmltoxml',
- 'encode_json',
- 'encode_qheader',
- 'encode_quotedprintable',
- 'encode_quotedprintablebytes',
- 'encode_set',
- 'encode_smart',
- 'encode_sql',
- 'encode_sql92',
- 'encode_stricturl',
- 'encode_url',
- 'encode_xml',
- 'encrypt_blowfish',
- 'encrypt_blowfish2',
- 'encrypt_crammd5',
- 'encrypt_hmac',
- 'encrypt_md5',
- 'eq',
- 'error_adderror',
- 'error_code',
- 'error_code_aborted',
- 'error_code_assert',
- 'error_code_bof',
- 'error_code_connectioninvalid',
- 'error_code_couldnotclosefile',
- 'error_code_couldnotcreateoropenfile',
- 'error_code_couldnotdeletefile',
- 'error_code_couldnotdisposememory',
- 'error_code_couldnotlockmemory',
- 'error_code_couldnotreadfromfile',
- 'error_code_couldnotunlockmemory',
- 'error_code_couldnotwritetofile',
- 'error_code_criterianotmet',
- 'error_code_datasourceerror',
- 'error_code_directoryfull',
- 'error_code_diskfull',
- 'error_code_dividebyzero',
- 'error_code_eof',
- 'error_code_failure',
- 'error_code_fieldrestriction',
- 'error_code_file',
- 'error_code_filealreadyexists',
- 'error_code_filecorrupt',
- 'error_code_fileinvalid',
- 'error_code_fileinvalidaccessmode',
- 'error_code_fileisclosed',
- 'error_code_fileisopen',
- 'error_code_filelocked',
- 'error_code_filenotfound',
- 'error_code_fileunlocked',
- 'error_code_httpfilenotfound',
- 'error_code_illegalinstruction',
- 'error_code_illegaluseoffrozeninstance',
- 'error_code_invaliddatabase',
- 'error_code_invalidfilename',
- 'error_code_invalidmemoryobject',
- 'error_code_invalidparameter',
- 'error_code_invalidpassword',
- 'error_code_invalidpathname',
- 'error_code_invalidusername',
- 'error_code_ioerror',
- 'error_code_loopaborted',
- 'error_code_memory',
- 'error_code_network',
- 'error_code_nilpointer',
- 'error_code_noerr',
- 'error_code_nopermission',
- 'error_code_outofmemory',
- 'error_code_outofstackspace',
- 'error_code_overflow',
- 'error_code_postconditionfailed',
- 'error_code_preconditionfailed',
- 'error_code_resnotfound',
- 'error_code_resource',
- 'error_code_streamreaderror',
- 'error_code_streamwriteerror',
- 'error_code_syntaxerror',
- 'error_code_tagnotfound',
- 'error_code_unknownerror',
- 'error_code_varnotfound',
- 'error_code_volumedoesnotexist',
- 'error_code_webactionnotsupported',
- 'error_code_webadderror',
- 'error_code_webdeleteerror',
- 'error_code_webmodulenotfound',
- 'error_code_webnosuchobject',
- 'error_code_webrepeatingrelatedfield',
- 'error_code_webrequiredfieldmissing',
- 'error_code_webtimeout',
- 'error_code_webupdateerror',
- 'error_columnrestriction',
- 'error_currenterror',
- 'error_databaseconnectionunavailable',
- 'error_databasetimeout',
- 'error_deleteerror',
- 'error_fieldrestriction',
- 'error_filenotfound',
- 'error_invaliddatabase',
- 'error_invalidpassword',
- 'error_invalidusername',
- 'error_modulenotfound',
- 'error_msg',
- 'error_msg_aborted',
- 'error_msg_assert',
- 'error_msg_bof',
- 'error_msg_connectioninvalid',
- 'error_msg_couldnotclosefile',
- 'error_msg_couldnotcreateoropenfile',
- 'error_msg_couldnotdeletefile',
- 'error_msg_couldnotdisposememory',
- 'error_msg_couldnotlockmemory',
- 'error_msg_couldnotreadfromfile',
- 'error_msg_couldnotunlockmemory',
- 'error_msg_couldnotwritetofile',
- 'error_msg_criterianotmet',
- 'error_msg_datasourceerror',
- 'error_msg_directoryfull',
- 'error_msg_diskfull',
- 'error_msg_dividebyzero',
- 'error_msg_eof',
- 'error_msg_failure',
- 'error_msg_fieldrestriction',
- 'error_msg_file',
- 'error_msg_filealreadyexists',
- 'error_msg_filecorrupt',
- 'error_msg_fileinvalid',
- 'error_msg_fileinvalidaccessmode',
- 'error_msg_fileisclosed',
- 'error_msg_fileisopen',
- 'error_msg_filelocked',
- 'error_msg_filenotfound',
- 'error_msg_fileunlocked',
- 'error_msg_httpfilenotfound',
- 'error_msg_illegalinstruction',
- 'error_msg_illegaluseoffrozeninstance',
- 'error_msg_invaliddatabase',
- 'error_msg_invalidfilename',
- 'error_msg_invalidmemoryobject',
- 'error_msg_invalidparameter',
- 'error_msg_invalidpassword',
- 'error_msg_invalidpathname',
- 'error_msg_invalidusername',
- 'error_msg_ioerror',
- 'error_msg_loopaborted',
- 'error_msg_memory',
- 'error_msg_network',
- 'error_msg_nilpointer',
- 'error_msg_noerr',
- 'error_msg_nopermission',
- 'error_msg_outofmemory',
- 'error_msg_outofstackspace',
- 'error_msg_overflow',
- 'error_msg_postconditionfailed',
- 'error_msg_preconditionfailed',
- 'error_msg_resnotfound',
- 'error_msg_resource',
- 'error_msg_streamreaderror',
- 'error_msg_streamwriteerror',
- 'error_msg_syntaxerror',
- 'error_msg_tagnotfound',
- 'error_msg_unknownerror',
- 'error_msg_varnotfound',
- 'error_msg_volumedoesnotexist',
- 'error_msg_webactionnotsupported',
- 'error_msg_webadderror',
- 'error_msg_webdeleteerror',
- 'error_msg_webmodulenotfound',
- 'error_msg_webnosuchobject',
- 'error_msg_webrepeatingrelatedfield',
- 'error_msg_webrequiredfieldmissing',
- 'error_msg_webtimeout',
- 'error_msg_webupdateerror',
- 'error_noerror',
- 'error_nopermission',
- 'error_norecordsfound',
- 'error_outofmemory',
- 'error_pop',
- 'error_push',
- 'error_reqcolumnmissing',
- 'error_reqfieldmissing',
- 'error_requiredcolumnmissing',
- 'error_requiredfieldmissing',
- 'error_reset',
- 'error_seterrorcode',
- 'error_seterrormessage',
- 'error_updateerror',
- 'euro',
- 'event_schedule',
- 'ew',
- 'fail',
- 'fail_if',
- 'false',
- 'field',
- 'field_name',
- 'field_names',
- 'file',
- 'file_autoresolvefullpaths',
- 'file_chmod',
- 'file_control',
- 'file_copy',
- 'file_create',
- 'file_creationdate',
- 'file_currenterror',
- 'file_delete',
- 'file_exists',
- 'file_getlinecount',
- 'file_getsize',
- 'file_isdirectory',
- 'file_listdirectory',
- 'file_moddate',
- 'file_modechar',
- 'file_modeline',
- 'file_move',
- 'file_openread',
- 'file_openreadwrite',
- 'file_openwrite',
- 'file_openwriteappend',
- 'file_openwritetruncate',
- 'file_probeeol',
- 'file_processuploads',
- 'file_read',
- 'file_readline',
- 'file_rename',
- 'file_serve',
- 'file_setsize',
- 'file_stream',
- 'file_streamcopy',
- 'file_uploads',
- 'file_waitread',
- 'file_waittimeout',
- 'file_waitwrite',
- 'file_write',
- 'find_soap_ops',
- 'form_param',
- 'found_count',
- 'ft',
- 'ftp_getfile',
- 'ftp_getlisting',
- 'ftp_putfile',
- 'full',
- 'global',
- 'global_defined',
- 'global_remove',
- 'global_reset',
- 'globals',
- 'gt',
- 'gte',
- 'handle',
- 'handle_error',
- 'header',
- 'html_comment',
- 'http_getfile',
- 'ical_alarm',
- 'ical_attribute',
- 'ical_calendar',
- 'ical_daylight',
- 'ical_event',
- 'ical_freebusy',
- 'ical_item',
- 'ical_journal',
- 'ical_parse',
- 'ical_standard',
- 'ical_timezone',
- 'ical_todo',
- 'if',
- 'if_empty',
- 'if_false',
- 'if_null',
- 'if_true',
- 'image',
- 'image_url',
- 'img',
- 'include',
- 'include_cgi',
- 'include_currentpath',
- 'include_once',
- 'include_raw',
- 'include_url',
- 'inline',
- 'integer',
- 'iterate',
- 'iterator',
- 'java',
- 'java_bean',
- 'json_records',
- 'json_rpccall',
- 'keycolumn_name',
- 'keycolumn_value',
- 'keyfield_name',
- 'keyfield_value',
- 'lasso_comment',
- 'lasso_currentaction',
- 'lasso_datasourceis',
- 'lasso_datasourceis4d',
- 'lasso_datasourceisfilemaker',
- 'lasso_datasourceisfilemaker7',
- 'lasso_datasourceisfilemaker9',
- 'lasso_datasourceisfilemakersa',
- 'lasso_datasourceisjdbc',
- 'lasso_datasourceislassomysql',
- 'lasso_datasourceismysql',
- 'lasso_datasourceisodbc',
- 'lasso_datasourceisopenbase',
- 'lasso_datasourceisoracle',
- 'lasso_datasourceispostgresql',
- 'lasso_datasourceisspotlight',
- 'lasso_datasourceissqlite',
- 'lasso_datasourceissqlserver',
- 'lasso_datasourcemodulename',
- 'lasso_datatype',
- 'lasso_disableondemand',
- 'lasso_errorreporting',
- 'lasso_executiontimelimit',
- 'lasso_parser',
- 'lasso_process',
- 'lasso_sessionid',
- 'lasso_siteid',
- 'lasso_siteisrunning',
- 'lasso_sitename',
- 'lasso_siterestart',
- 'lasso_sitestart',
- 'lasso_sitestop',
- 'lasso_tagexists',
- 'lasso_tagmodulename',
- 'lasso_uniqueid',
- 'lasso_updatecheck',
- 'lasso_uptime',
- 'lasso_version',
- 'lassoapp_create',
- 'lassoapp_dump',
- 'lassoapp_flattendir',
- 'lassoapp_getappdata',
- 'lassoapp_link',
- 'lassoapp_list',
- 'lassoapp_process',
- 'lassoapp_unitize',
- 'layout_name',
- 'ldap',
- 'ldap_scope_base',
- 'ldap_scope_onelevel',
- 'ldap_scope_subtree',
- 'ldml',
- 'ldml_ldml',
- 'library',
- 'library_once',
- 'link',
- 'link_currentaction',
- 'link_currentactionparams',
- 'link_currentactionurl',
- 'link_currentgroup',
- 'link_currentgroupparams',
- 'link_currentgroupurl',
- 'link_currentrecord',
- 'link_currentrecordparams',
- 'link_currentrecordurl',
- 'link_currentsearch',
- 'link_currentsearchparams',
- 'link_currentsearchurl',
- 'link_detail',
- 'link_detailparams',
- 'link_detailurl',
- 'link_firstgroup',
- 'link_firstgroupparams',
- 'link_firstgroupurl',
- 'link_firstrecord',
- 'link_firstrecordparams',
- 'link_firstrecordurl',
- 'link_lastgroup',
- 'link_lastgroupparams',
- 'link_lastgroupurl',
- 'link_lastrecord',
- 'link_lastrecordparams',
- 'link_lastrecordurl',
- 'link_nextgroup',
- 'link_nextgroupparams',
- 'link_nextgroupurl',
- 'link_nextrecord',
- 'link_nextrecordparams',
- 'link_nextrecordurl',
- 'link_params',
- 'link_prevgroup',
- 'link_prevgroupparams',
- 'link_prevgroupurl',
- 'link_prevrecord',
- 'link_prevrecordparams',
- 'link_prevrecordurl',
- 'link_setformat',
- 'link_url',
- 'list',
- 'list_additem',
- 'list_fromlist',
- 'list_fromstring',
- 'list_getitem',
- 'list_itemcount',
- 'list_iterator',
- 'list_removeitem',
- 'list_replaceitem',
- 'list_reverseiterator',
- 'list_tostring',
- 'literal',
- 'ljax_end',
- 'ljax_hastarget',
- 'ljax_include',
- 'ljax_start',
- 'ljax_target',
- 'local',
- 'local_defined',
- 'local_remove',
- 'local_reset',
- 'locale_format',
- 'locals',
- 'log',
- 'log_always',
- 'log_critical',
- 'log_deprecated',
- 'log_destination_console',
- 'log_destination_database',
- 'log_destination_file',
- 'log_detail',
- 'log_level_critical',
- 'log_level_deprecated',
- 'log_level_detail',
- 'log_level_sql',
- 'log_level_warning',
- 'log_setdestination',
- 'log_sql',
- 'log_warning',
- 'logicalop_value',
- 'logicaloperator_value',
- 'loop',
- 'loop_abort',
- 'loop_continue',
- 'loop_count',
- 'lt',
- 'lte',
- 'magick_image',
- 'map',
- 'map_iterator',
- 'match_comparator',
- 'match_notrange',
- 'match_notregexp',
- 'match_range',
- 'match_regexp',
- 'math_abs',
- 'math_acos',
- 'math_add',
- 'math_asin',
- 'math_atan',
- 'math_atan2',
- 'math_ceil',
- 'math_converteuro',
- 'math_cos',
- 'math_div',
- 'math_exp',
- 'math_floor',
- 'math_internal_rand',
- 'math_internal_randmax',
- 'math_internal_srand',
- 'math_ln',
- 'math_log',
- 'math_log10',
- 'math_max',
- 'math_min',
- 'math_mod',
- 'math_mult',
- 'math_pow',
- 'math_random',
- 'math_range',
- 'math_rint',
- 'math_roman',
- 'math_round',
- 'math_sin',
- 'math_sqrt',
- 'math_sub',
- 'math_tan',
- 'maxrecords_value',
- 'memory_session_driver',
- 'mime_type',
- 'minimal',
- 'misc__srand',
- 'misc_randomnumber',
- 'misc_roman',
- 'misc_valid_creditcard',
- 'mysql_session_driver',
- 'named_param',
- 'namespace_current',
- 'namespace_delimiter',
- 'namespace_exists',
- 'namespace_file_fullpathexists',
- 'namespace_global',
- 'namespace_import',
- 'namespace_load',
- 'namespace_page',
- 'namespace_unload',
- 'namespace_using',
- 'neq',
- 'net',
- 'net_connectinprogress',
- 'net_connectok',
- 'net_typessl',
- 'net_typessltcp',
- 'net_typessludp',
- 'net_typetcp',
- 'net_typeudp',
- 'net_waitread',
- 'net_waittimeout',
- 'net_waitwrite',
- 'no_default_output',
- 'none',
- 'noprocess',
- 'not',
- 'nrx',
- 'nslookup',
- 'null',
- 'object',
- 'once',
- 'oneoff',
- 'op_logicalvalue',
- 'operator_logicalvalue',
- 'option',
- 'or',
- 'os_process',
- 'output',
- 'output_none',
- 'pair',
- 'params_up',
- 'pdf_barcode',
- 'pdf_color',
- 'pdf_doc',
- 'pdf_font',
- 'pdf_image',
- 'pdf_list',
- 'pdf_read',
- 'pdf_serve',
- 'pdf_table',
- 'pdf_text',
- 'percent',
- 'portal',
- 'postcondition',
- 'precondition',
- 'prettyprintingnsmap',
- 'prettyprintingtypemap',
- 'priorityqueue',
- 'private',
- 'proc_convert',
- 'proc_convertbody',
- 'proc_convertone',
- 'proc_extract',
- 'proc_extractone',
- 'proc_find',
- 'proc_first',
- 'proc_foreach',
- 'proc_get',
- 'proc_join',
- 'proc_lasso',
- 'proc_last',
- 'proc_map_entry',
- 'proc_null',
- 'proc_regexp',
- 'proc_xml',
- 'proc_xslt',
- 'process',
- 'protect',
- 'queue',
- 'rand',
- 'randomnumber',
- 'raw',
- 'recid_value',
- 'record_count',
- 'recordcount',
- 'recordid_value',
- 'records',
- 'records_array',
- 'records_map',
- 'redirect_url',
- 'reference',
- 'referer',
- 'referer_url',
- 'referrer',
- 'referrer_url',
- 'regexp',
- 'repeating',
- 'repeating_valueitem',
- 'repeatingvalueitem',
- 'repetition',
- 'req_column',
- 'req_field',
- 'required_column',
- 'required_field',
- 'response_fileexists',
- 'response_filepath',
- 'response_localpath',
- 'response_path',
- 'response_realm',
- 'resultset',
- 'resultset_count',
- 'return',
- 'return_value',
- 'reverseiterator',
- 'roman',
- 'row_count',
- 'rows',
- 'rows_array',
- 'run_children',
- 'rx',
- 'schema_name',
- 'scientific',
- 'search_args',
- 'search_arguments',
- 'search_columnitem',
- 'search_fielditem',
- 'search_operatoritem',
- 'search_opitem',
- 'search_valueitem',
- 'searchfielditem',
- 'searchoperatoritem',
- 'searchopitem',
- 'searchvalueitem',
- 'select',
- 'selected',
- 'self',
- 'serialize',
- 'series',
- 'server_date',
- 'server_day',
- 'server_ip',
- 'server_name',
- 'server_port',
- 'server_push',
- 'server_siteisrunning',
- 'server_sitestart',
- 'server_sitestop',
- 'server_time',
- 'session_abort',
- 'session_addoutputfilter',
- 'session_addvar',
- 'session_addvariable',
- 'session_deleteexpired',
- 'session_driver',
- 'session_end',
- 'session_id',
- 'session_removevar',
- 'session_removevariable',
- 'session_result',
- 'session_setdriver',
- 'session_start',
- 'set',
- 'set_iterator',
- 'set_reverseiterator',
- 'shown_count',
- 'shown_first',
- 'shown_last',
- 'site_atbegin',
- 'site_id',
- 'site_name',
- 'site_restart',
- 'skiprecords_value',
- 'sleep',
- 'soap_convertpartstopairs',
- 'soap_definetag',
- 'soap_info',
- 'soap_lastrequest',
- 'soap_lastresponse',
- 'soap_stub',
- 'sort_args',
- 'sort_arguments',
- 'sort_columnitem',
- 'sort_fielditem',
- 'sort_orderitem',
- 'sortcolumnitem',
- 'sortfielditem',
- 'sortorderitem',
- 'sqlite_createdb',
- 'sqlite_session_driver',
- 'sqlite_setsleepmillis',
- 'sqlite_setsleeptries',
- 'srand',
- 'stack',
- 'stock_quote',
- 'string',
- 'string_charfromname',
- 'string_concatenate',
- 'string_countfields',
- 'string_endswith',
- 'string_extract',
- 'string_findposition',
- 'string_findregexp',
- 'string_fordigit',
- 'string_getfield',
- 'string_getunicodeversion',
- 'string_insert',
- 'string_isalpha',
- 'string_isalphanumeric',
- 'string_isdigit',
- 'string_ishexdigit',
- 'string_islower',
- 'string_isnumeric',
- 'string_ispunctuation',
- 'string_isspace',
- 'string_isupper',
- 'string_length',
- 'string_lowercase',
- 'string_remove',
- 'string_removeleading',
- 'string_removetrailing',
- 'string_replace',
- 'string_replaceregexp',
- 'string_todecimal',
- 'string_tointeger',
- 'string_uppercase',
- 'string_validcharset',
- 'table_name',
- 'table_realname',
- 'tag',
- 'tag_name',
- 'tags',
- 'tags_find',
- 'tags_list',
- 'tcp_close',
- 'tcp_open',
- 'tcp_send',
- 'tcp_tcp_close',
- 'tcp_tcp_open',
- 'tcp_tcp_send',
- 'thread_abort',
- 'thread_atomic',
- 'thread_event',
- 'thread_exists',
- 'thread_getcurrentid',
- 'thread_getpriority',
- 'thread_info',
- 'thread_list',
- 'thread_lock',
- 'thread_pipe',
- 'thread_priority_default',
- 'thread_priority_high',
- 'thread_priority_low',
- 'thread_rwlock',
- 'thread_semaphore',
- 'thread_setpriority',
- 'token_value',
- 'total_records',
- 'treemap',
- 'treemap_iterator',
- 'true',
- 'url_rewrite',
- 'valid_creditcard',
- 'valid_date',
- 'valid_email',
- 'valid_url',
- 'value_list',
- 'value_listitem',
- 'valuelistitem',
- 'var',
- 'var_defined',
- 'var_remove',
- 'var_reset',
- 'var_set',
- 'variable',
- 'variable_defined',
- 'variable_set',
- 'variables',
- 'variant_count',
- 'vars',
- 'wap_isenabled',
- 'wap_maxbuttons',
- 'wap_maxcolumns',
- 'wap_maxhorzpixels',
- 'wap_maxrows',
- 'wap_maxvertpixels',
- 'while',
- 'wsdl_extract',
- 'wsdl_getbinding',
- 'wsdl_getbindingforoperation',
- 'wsdl_getbindingoperations',
- 'wsdl_getmessagenamed',
- 'wsdl_getmessageparts',
- 'wsdl_getmessagetriofromporttype',
- 'wsdl_getopbodystyle',
- 'wsdl_getopbodyuse',
- 'wsdl_getoperation',
- 'wsdl_getoplocation',
- 'wsdl_getopmessagetypes',
- 'wsdl_getopsoapaction',
- 'wsdl_getportaddress',
- 'wsdl_getportsforservice',
- 'wsdl_getporttype',
- 'wsdl_getporttypeoperation',
- 'wsdl_getservicedocumentation',
- 'wsdl_getservices',
- 'wsdl_gettargetnamespace',
- 'wsdl_issoapoperation',
- 'wsdl_listoperations',
- 'wsdl_maketest',
- 'xml',
- 'xml_extract',
- 'xml_rpc',
- 'xml_rpccall',
- 'xml_rw',
- 'xml_serve',
- 'xml_transform',
- 'xml_xml',
- 'xml_xmlstream',
- 'xmlstream',
- 'xsd_attribute',
- 'xsd_blankarraybase',
- 'xsd_blankbase',
- 'xsd_buildtype',
- 'xsd_cache',
- 'xsd_checkcardinality',
- 'xsd_continueall',
- 'xsd_continueannotation',
- 'xsd_continueany',
- 'xsd_continueanyattribute',
- 'xsd_continueattribute',
- 'xsd_continueattributegroup',
- 'xsd_continuechoice',
- 'xsd_continuecomplexcontent',
- 'xsd_continuecomplextype',
- 'xsd_continuedocumentation',
- 'xsd_continueextension',
- 'xsd_continuegroup',
- 'xsd_continuekey',
- 'xsd_continuelist',
- 'xsd_continuerestriction',
- 'xsd_continuesequence',
- 'xsd_continuesimplecontent',
- 'xsd_continuesimpletype',
- 'xsd_continueunion',
- 'xsd_deserialize',
- 'xsd_fullyqualifyname',
- 'xsd_generate',
- 'xsd_generateblankfromtype',
- 'xsd_generateblanksimpletype',
- 'xsd_generatetype',
- 'xsd_getschematype',
- 'xsd_issimpletype',
- 'xsd_loadschema',
- 'xsd_lookupnamespaceuri',
- 'xsd_lookuptype',
- 'xsd_processany',
- 'xsd_processattribute',
- 'xsd_processattributegroup',
- 'xsd_processcomplextype',
- 'xsd_processelement',
- 'xsd_processgroup',
- 'xsd_processimport',
- 'xsd_processinclude',
- 'xsd_processschema',
- 'xsd_processsimpletype',
- 'xsd_ref',
- 'xsd_type',
- )
-}
-MEMBERS = {
- 'Member Methods': (
- 'abort',
- 'abs',
- 'accept_charset',
- 'accept',
- 'acceptconnections',
- 'acceptdeserializedelement',
- 'acceptnossl',
- 'acceptpost',
- 'accesskey',
- 'acos',
- 'acosh',
- 'action',
- 'actionparams',
- 'active_tick',
- 'add',
- 'addatend',
- 'addattachment',
- 'addbarcode',
- 'addchapter',
- 'addcheckbox',
- 'addcolumninfo',
- 'addcombobox',
- 'addcomment',
- 'addcomponent',
- 'addcomponents',
- 'addcss',
- 'adddatabasetable',
- 'adddatasource',
- 'adddatasourcedatabase',
- 'adddatasourcehost',
- 'adddir',
- 'adddirpath',
- 'addendjs',
- 'addendjstext',
- 'adderror',
- 'addfavicon',
- 'addfile',
- 'addgroup',
- 'addheader',
- 'addhiddenfield',
- 'addhtmlpart',
- 'addimage',
- 'addjavascript',
- 'addjs',
- 'addjstext',
- 'addlist',
- 'addmathfunctions',
- 'addmember',
- 'addoneheaderline',
- 'addpage',
- 'addparagraph',
- 'addpart',
- 'addpasswordfield',
- 'addphrase',
- 'addpostdispatch',
- 'addpredispatch',
- 'addradiobutton',
- 'addradiogroup',
- 'addresetbutton',
- 'addrow',
- 'addsection',
- 'addselectlist',
- 'addset',
- 'addsubmitbutton',
- 'addsubnode',
- 'addtable',
- 'addtask',
- 'addtext',
- 'addtextarea',
- 'addtextfield',
- 'addtextpart',
- 'addtobuffer',
- 'addtrait',
- 'adduser',
- 'addusertogroup',
- 'addwarning',
- 'addzip',
- 'allocobject',
- 'am',
- 'ampm',
- 'annotate',
- 'answer',
- 'apop',
- 'append',
- 'appendarray',
- 'appendarraybegin',
- 'appendarrayend',
- 'appendbool',
- 'appendbytes',
- 'appendchar',
- 'appendchild',
- 'appendcolon',
- 'appendcomma',
- 'appenddata',
- 'appenddatetime',
- 'appenddbpointer',
- 'appenddecimal',
- 'appenddocument',
- 'appendimagetolist',
- 'appendinteger',
- 'appendnowutc',
- 'appendnull',
- 'appendoid',
- 'appendregex',
- 'appendreplacement',
- 'appendstring',
- 'appendtail',
- 'appendtime',
- 'applyheatcolors',
- 'appmessage',
- 'appname',
- 'appprefix',
- 'appstatus',
- 'arc',
- 'archive',
- 'arguments',
- 'argumentvalue',
- 'asarray',
- 'asarraystring',
- 'asasync',
- 'asbytes',
- 'ascopy',
- 'ascopydeep',
- 'asdecimal',
- 'asgenerator',
- 'asin',
- 'asinh',
- 'asinteger',
- 'askeyedgenerator',
- 'aslazystring',
- 'aslist',
- 'asraw',
- 'asstaticarray',
- 'asstring',
- 'asstringhex',
- 'asstringoct',
- 'asxml',
- 'atan',
- 'atan2',
- 'atanh',
- 'atend',
- 'atends',
- 'atime',
- 'attributecount',
- 'attributes',
- 'attrs',
- 'auth',
- 'authenticate',
- 'authorize',
- 'autocollectbuffer',
- 'average',
- 'back',
- 'basename',
- 'basepaths',
- 'baseuri',
- 'bcc',
- 'beginssl',
- 'beginswith',
- 'begintls',
- 'bestcharset',
- 'bind_blob',
- 'bind_double',
- 'bind_int',
- 'bind_null',
- 'bind_parameter_index',
- 'bind_text',
- 'bind',
- 'bindcount',
- 'bindone',
- 'bindparam',
- 'bitand',
- 'bitclear',
- 'bitflip',
- 'bitformat',
- 'bitnot',
- 'bitor',
- 'bitset',
- 'bitshiftleft',
- 'bitshiftright',
- 'bittest',
- 'bitxor',
- 'blur',
- 'body',
- 'bodybytes',
- 'boundary',
- 'bptoxml',
- 'bptypetostr',
- 'bucketnumber',
- 'buff',
- 'buildquery',
- 'businessdaysbetween',
- 'by',
- 'bytes',
- 'cachedappprefix',
- 'cachedroot',
- 'callboolean',
- 'callbooleanmethod',
- 'callbytemethod',
- 'callcharmethod',
- 'calldoublemethod',
- 'calledname',
- 'callfirst',
- 'callfloat',
- 'callfloatmethod',
- 'callint',
- 'callintmethod',
- 'calllongmethod',
- 'callnonvirtualbooleanmethod',
- 'callnonvirtualbytemethod',
- 'callnonvirtualcharmethod',
- 'callnonvirtualdoublemethod',
- 'callnonvirtualfloatmethod',
- 'callnonvirtualintmethod',
- 'callnonvirtuallongmethod',
- 'callnonvirtualobjectmethod',
- 'callnonvirtualshortmethod',
- 'callnonvirtualvoidmethod',
- 'callobject',
- 'callobjectmethod',
- 'callshortmethod',
- 'callsite_col',
- 'callsite_file',
- 'callsite_line',
- 'callstack',
- 'callstaticboolean',
- 'callstaticbooleanmethod',
- 'callstaticbytemethod',
- 'callstaticcharmethod',
- 'callstaticdoublemethod',
- 'callstaticfloatmethod',
- 'callstaticint',
- 'callstaticintmethod',
- 'callstaticlongmethod',
- 'callstaticobject',
- 'callstaticobjectmethod',
- 'callstaticshortmethod',
- 'callstaticstring',
- 'callstaticvoidmethod',
- 'callstring',
- 'callvoid',
- 'callvoidmethod',
- 'cancel',
- 'cap',
- 'capa',
- 'capabilities',
- 'capi',
- 'cbrt',
- 'cc',
- 'ceil',
- 'chardigitvalue',
- 'charname',
- 'charset',
- 'chartype',
- 'checkdebugging',
- 'checked',
- 'checkuser',
- 'childnodes',
- 'chk',
- 'chmod',
- 'choosecolumntype',
- 'chown',
- 'chunked',
- 'circle',
- 'class',
- 'classid',
- 'clear',
- 'clonenode',
- 'close',
- 'closepath',
- 'closeprepared',
- 'closewrite',
- 'code',
- 'codebase',
- 'codetype',
- 'colmap',
- 'colorspace',
- 'column_blob',
- 'column_count',
- 'column_decltype',
- 'column_double',
- 'column_int64',
- 'column_name',
- 'column_text',
- 'column_type',
- 'command',
- 'comments',
- 'compare',
- 'comparecodepointorder',
- 'componentdelimiter',
- 'components',
- 'composite',
- 'compress',
- 'concat',
- 'condtoint',
- 'configureds',
- 'configuredskeys',
- 'connect',
- 'connection',
- 'connectionhandler',
- 'connhandler',
- 'consume_domain',
- 'consume_label',
- 'consume_message',
- 'consume_rdata',
- 'consume_string',
- 'contains',
- 'content_disposition',
- 'content_transfer_encoding',
- 'content_type',
- 'content',
- 'contentlength',
- 'contents',
- 'contenttype',
- 'continuation',
- 'continuationpacket',
- 'continuationpoint',
- 'continuationstack',
- 'continue',
- 'contrast',
- 'conventionaltop',
- 'convert',
- 'cookie',
- 'cookies',
- 'cookiesarray',
- 'cookiesary',
- 'copyto',
- 'cos',
- 'cosh',
- 'count',
- 'countkeys',
- 'country',
- 'countusersbygroup',
- 'crc',
- 'create',
- 'createattribute',
- 'createattributens',
- 'createcdatasection',
- 'createcomment',
- 'createdocument',
- 'createdocumentfragment',
- 'createdocumenttype',
- 'createelement',
- 'createelementns',
- 'createentityreference',
- 'createindex',
- 'createprocessinginstruction',
- 'createtable',
- 'createtextnode',
- 'criteria',
- 'crop',
- 'csscontent',
- 'curl',
- 'current',
- 'currentfile',
- 'curveto',
- 'd',
- 'data',
- 'databasecolumnnames',
- 'databasecolumns',
- 'databasemap',
- 'databasename',
- 'datasourcecolumnnames',
- 'datasourcecolumns',
- 'datasourcemap',
- 'date',
- 'day',
- 'dayofmonth',
- 'dayofweek',
- 'dayofweekinmonth',
- 'dayofyear',
- 'days',
- 'daysbetween',
- 'db',
- 'dbtablestable',
- 'debug',
- 'declare',
- 'decodebase64',
- 'decodehex',
- 'decodehtml',
- 'decodeqp',
- 'decodeurl',
- 'decodexml',
- 'decompose',
- 'decomposeassignment',
- 'defaultcontentrepresentation',
- 'defer',
- 'deg2rad',
- 'dele',
- 'delete',
- 'deletedata',
- 'deleteglobalref',
- 'deletelocalref',
- 'delim',
- 'depth',
- 'dereferencepointer',
- 'describe',
- 'description',
- 'deserialize',
- 'detach',
- 'detectcharset',
- 'didinclude',
- 'difference',
- 'digit',
- 'dir',
- 'displaycountry',
- 'displaylanguage',
- 'displayname',
- 'displayscript',
- 'displayvariant',
- 'div',
- 'dns_response',
- 'do',
- 'doatbegins',
- 'doatends',
- 'doccomment',
- 'doclose',
- 'doctype',
- 'document',
- 'documentelement',
- 'documentroot',
- 'domainbody',
- 'done',
- 'dosessions',
- 'dowithclose',
- 'dowlocal',
- 'download',
- 'drawtext',
- 'drop',
- 'dropindex',
- 'dsdbtable',
- 'dshoststable',
- 'dsinfo',
- 'dst',
- 'dstable',
- 'dstoffset',
- 'dtdid',
- 'dup',
- 'dup2',
- 'each',
- 'eachbyte',
- 'eachcharacter',
- 'eachchild',
- 'eachcomponent',
- 'eachdir',
- 'eachdirpath',
- 'eachdirpathrecursive',
- 'eachentry',
- 'eachfile',
- 'eachfilename',
- 'eachfilepath',
- 'eachfilepathrecursive',
- 'eachkey',
- 'eachline',
- 'eachlinebreak',
- 'eachmatch',
- 'eachnode',
- 'eachpair',
- 'eachpath',
- 'eachpathrecursive',
- 'eachrow',
- 'eachsub',
- 'eachword',
- 'eachwordbreak',
- 'element',
- 'eligiblepath',
- 'eligiblepaths',
- 'encodebase64',
- 'encodehex',
- 'encodehtml',
- 'encodehtmltoxml',
- 'encodemd5',
- 'encodepassword',
- 'encodeqp',
- 'encodesql',
- 'encodesql92',
- 'encodeurl',
- 'encodevalue',
- 'encodexml',
- 'encoding',
- 'enctype',
- 'end',
- 'endjs',
- 'endssl',
- 'endswith',
- 'endtls',
- 'enhance',
- 'ensurestopped',
- 'entities',
- 'entry',
- 'env',
- 'equals',
- 'era',
- 'erf',
- 'erfc',
- 'err',
- 'errcode',
- 'errmsg',
- 'error',
- 'errors',
- 'errstack',
- 'escape_member',
- 'establisherrorstate',
- 'exceptioncheck',
- 'exceptionclear',
- 'exceptiondescribe',
- 'exceptionoccurred',
- 'exchange',
- 'execinits',
- 'execinstalls',
- 'execute',
- 'executelazy',
- 'executenow',
- 'exists',
- 'exit',
- 'exitcode',
- 'exp',
- 'expire',
- 'expireminutes',
- 'expiresminutes',
- 'expm1',
- 'export16bits',
- 'export32bits',
- 'export64bits',
- 'export8bits',
- 'exportas',
- 'exportbytes',
- 'exportfdf',
- 'exportpointerbits',
- 'exportsigned16bits',
- 'exportsigned32bits',
- 'exportsigned64bits',
- 'exportsigned8bits',
- 'exportstring',
- 'expose',
- 'extendedyear',
- 'extensiondelimiter',
- 'extensions',
- 'extract',
- 'extractfast',
- 'extractfastone',
- 'extractimage',
- 'extractone',
- 'f',
- 'fabs',
- 'fail',
- 'failnoconnectionhandler',
- 'family',
- 'fatalerror',
- 'fcgireq',
- 'fchdir',
- 'fchmod',
- 'fchown',
- 'fd',
- 'features',
- 'fetchdata',
- 'fieldnames',
- 'fieldposition',
- 'fieldstable',
- 'fieldtype',
- 'fieldvalue',
- 'file',
- 'filename',
- 'filenames',
- 'filequeue',
- 'fileuploads',
- 'fileuploadsary',
- 'filterinputcolumn',
- 'finalize',
- 'find',
- 'findall',
- 'findandmodify',
- 'findbucket',
- 'findcase',
- 'findclass',
- 'findcount',
- 'finddescendant',
- 'findfirst',
- 'findinclude',
- 'findinctx',
- 'findindex',
- 'findlast',
- 'findpattern',
- 'findposition',
- 'findsymbols',
- 'first',
- 'firstchild',
- 'firstcomponent',
- 'firstdayofweek',
- 'firstnode',
- 'fixformat',
- 'flags',
- 'fliph',
- 'flipv',
- 'floor',
- 'flush',
- 'foldcase',
- 'foo',
- 'for',
- 'forcedrowid',
- 'foreach',
- 'foreachaccept',
- 'foreachbyte',
- 'foreachcharacter',
- 'foreachchild',
- 'foreachday',
- 'foreachentry',
- 'foreachfile',
- 'foreachfilename',
- 'foreachkey',
- 'foreachline',
- 'foreachlinebreak',
- 'foreachmatch',
- 'foreachnode',
- 'foreachpair',
- 'foreachpathcomponent',
- 'foreachrow',
- 'foreachspool',
- 'foreachsub',
- 'foreachwordbreak',
- 'form',
- 'format',
- 'formatas',
- 'formatcontextelement',
- 'formatcontextelements',
- 'formatnumber',
- 'free',
- 'frexp',
- 'from',
- 'fromname',
- 'fromport',
- 'fromreflectedfield',
- 'fromreflectedmethod',
- 'front',
- 'fsync',
- 'ftpdeletefile',
- 'ftpgetlisting',
- 'ftruncate',
- 'fullpath',
- 'fx',
- 'gamma',
- 'gatewayinterface',
- 'gen',
- 'generatechecksum',
- 'get',
- 'getabswidth',
- 'getalignment',
- 'getappsource',
- 'getarraylength',
- 'getattr',
- 'getattribute',
- 'getattributenamespace',
- 'getattributenode',
- 'getattributenodens',
- 'getattributens',
- 'getbarheight',
- 'getbarmultiplier',
- 'getbarwidth',
- 'getbaseline',
- 'getbold',
- 'getbooleanarrayelements',
- 'getbooleanarrayregion',
- 'getbooleanfield',
- 'getbordercolor',
- 'getborderwidth',
- 'getbytearrayelements',
- 'getbytearrayregion',
- 'getbytefield',
- 'getchararrayelements',
- 'getchararrayregion',
- 'getcharfield',
- 'getclass',
- 'getcode',
- 'getcolor',
- 'getcolumn',
- 'getcolumncount',
- 'getcolumns',
- 'getdatabasebyalias',
- 'getdatabasebyid',
- 'getdatabasebyname',
- 'getdatabasehost',
- 'getdatabasetable',
- 'getdatabasetablebyalias',
- 'getdatabasetablebyid',
- 'getdatabasetablepart',
- 'getdatasource',
- 'getdatasourcedatabase',
- 'getdatasourcedatabasebyid',
- 'getdatasourcehost',
- 'getdatasourceid',
- 'getdatasourcename',
- 'getdefaultstorage',
- 'getdoublearrayelements',
- 'getdoublearrayregion',
- 'getdoublefield',
- 'getelementbyid',
- 'getelementsbytagname',
- 'getelementsbytagnamens',
- 'getencoding',
- 'getface',
- 'getfield',
- 'getfieldid',
- 'getfile',
- 'getfloatarrayelements',
- 'getfloatarrayregion',
- 'getfloatfield',
- 'getfont',
- 'getformat',
- 'getfullfontname',
- 'getgroup',
- 'getgroupid',
- 'getheader',
- 'getheaders',
- 'gethostdatabase',
- 'gethtmlattr',
- 'gethtmlattrstring',
- 'getinclude',
- 'getintarrayelements',
- 'getintarrayregion',
- 'getintfield',
- 'getisocomment',
- 'getitalic',
- 'getlasterror',
- 'getlcapitype',
- 'getlibrary',
- 'getlongarrayelements',
- 'getlongarrayregion',
- 'getlongfield',
- 'getmargins',
- 'getmethodid',
- 'getmode',
- 'getnameditem',
- 'getnameditemns',
- 'getnode',
- 'getnumericvalue',
- 'getobjectarrayelement',
- 'getobjectclass',
- 'getobjectfield',
- 'getpadding',
- 'getpagenumber',
- 'getparts',
- 'getprefs',
- 'getpropertyvalue',
- 'getprowcount',
- 'getpsfontname',
- 'getrange',
- 'getrowcount',
- 'getset',
- 'getshortarrayelements',
- 'getshortarrayregion',
- 'getshortfield',
- 'getsize',
- 'getsortfieldspart',
- 'getspacing',
- 'getstaticbooleanfield',
- 'getstaticbytefield',
- 'getstaticcharfield',
- 'getstaticdoublefield',
- 'getstaticfieldid',
- 'getstaticfloatfield',
- 'getstaticintfield',
- 'getstaticlongfield',
- 'getstaticmethodid',
- 'getstaticobjectfield',
- 'getstaticshortfield',
- 'getstatus',
- 'getstringchars',
- 'getstringlength',
- 'getstyle',
- 'getsupportedencodings',
- 'gettablebyid',
- 'gettext',
- 'gettextalignment',
- 'gettextsize',
- 'gettrigger',
- 'gettype',
- 'getunderline',
- 'getuniquealiasname',
- 'getuser',
- 'getuserbykey',
- 'getuserid',
- 'getversion',
- 'getzipfilebytes',
- 'givenblock',
- 'gmt',
- 'gotconnection',
- 'gotfileupload',
- 'groupby',
- 'groupcolumns',
- 'groupcount',
- 'groupjoin',
- 'handlebreakpointget',
- 'handlebreakpointlist',
- 'handlebreakpointremove',
- 'handlebreakpointset',
- 'handlebreakpointupdate',
- 'handlecontextget',
- 'handlecontextnames',
- 'handlecontinuation',
- 'handledefinitionbody',
- 'handledefinitionhead',
- 'handledefinitionresource',
- 'handledevconnection',
- 'handleevalexpired',
- 'handlefeatureget',
- 'handlefeatureset',
- 'handlelassoappcontent',
- 'handlelassoappresponse',
- 'handlenested',
- 'handlenormalconnection',
- 'handlepop',
- 'handleresource',
- 'handlesource',
- 'handlestackget',
- 'handlestderr',
- 'handlestdin',
- 'handlestdout',
- 'handshake',
- 'hasattribute',
- 'hasattributens',
- 'hasattributes',
- 'hasbinaryproperty',
- 'haschildnodes',
- 'hasexpired',
- 'hasfeature',
- 'hasfield',
- 'hash',
- 'hashtmlattr',
- 'hasmethod',
- 'hastable',
- 'hastrailingcomponent',
- 'hasvalue',
- 'head',
- 'header',
- 'headerbytes',
- 'headers',
- 'headersarray',
- 'headersmap',
- 'height',
- 'histogram',
- 'home',
- 'host',
- 'hostcolumnnames',
- 'hostcolumnnames2',
- 'hostcolumns',
- 'hostcolumns2',
- 'hostdatasource',
- 'hostextra',
- 'hostid',
- 'hostisdynamic',
- 'hostmap',
- 'hostmap2',
- 'hostname',
- 'hostpassword',
- 'hostport',
- 'hostschema',
- 'hosttableencoding',
- 'hosttonet16',
- 'hosttonet32',
- 'hosttonet64',
- 'hostusername',
- 'hour',
- 'hourofampm',
- 'hourofday',
- 'hoursbetween',
- 'href',
- 'hreflang',
- 'htmlcontent',
- 'htmlizestacktrace',
- 'htmlizestacktracelink',
- 'httpaccept',
- 'httpacceptencoding',
- 'httpacceptlanguage',
- 'httpauthorization',
- 'httpcachecontrol',
- 'httpconnection',
- 'httpcookie',
- 'httpequiv',
- 'httphost',
- 'httpreferer',
- 'httpreferrer',
- 'httpuseragent',
- 'hypot',
- 'id',
- 'idealinmemory',
- 'idle',
- 'idmap',
- 'ifempty',
- 'ifkey',
- 'ifnotempty',
- 'ifnotkey',
- 'ignorecase',
- 'ilogb',
- 'imgptr',
- 'implementation',
- 'import16bits',
- 'import32bits',
- 'import64bits',
- 'import8bits',
- 'importas',
- 'importbytes',
- 'importfdf',
- 'importnode',
- 'importpointer',
- 'importstring',
- 'in',
- 'include',
- 'includebytes',
- 'includelibrary',
- 'includelibraryonce',
- 'includeonce',
- 'includes',
- 'includestack',
- 'indaylighttime',
- 'index',
- 'init',
- 'initialize',
- 'initrequest',
- 'inits',
- 'inneroncompare',
- 'input',
- 'inputcolumns',
- 'inputtype',
- 'insert',
- 'insertback',
- 'insertbefore',
- 'insertdata',
- 'insertfirst',
- 'insertfrom',
- 'insertfront',
- 'insertinternal',
- 'insertlast',
- 'insertpage',
- 'install',
- 'installs',
- 'integer',
- 'internalsubset',
- 'interrupt',
- 'intersection',
- 'inttocond',
- 'invoke',
- 'invokeautocollect',
- 'invokeuntil',
- 'invokewhile',
- 'ioctl',
- 'isa',
- 'isalive',
- 'isallof',
- 'isalnum',
- 'isalpha',
- 'isanyof',
- 'isbase',
- 'isblank',
- 'iscntrl',
- 'isdigit',
- 'isdir',
- 'isdirectory',
- 'isempty',
- 'isemptyelement',
- 'isfirststep',
- 'isfullpath',
- 'isgraph',
- 'ishttps',
- 'isidle',
- 'isinstanceof',
- 'islink',
- 'islower',
- 'ismultipart',
- 'isnan',
- 'isnota',
- 'isnotempty',
- 'isnothing',
- 'iso3country',
- 'iso3language',
- 'isopen',
- 'isprint',
- 'ispunct',
- 'issameobject',
- 'isset',
- 'issourcefile',
- 'isspace',
- 'isssl',
- 'issupported',
- 'istitle',
- 'istruetype',
- 'istype',
- 'isualphabetic',
- 'isulowercase',
- 'isupper',
- 'isuuppercase',
- 'isuwhitespace',
- 'isvalid',
- 'iswhitespace',
- 'isxdigit',
- 'isxhr',
- 'item',
- 'j0',
- 'j1',
- 'javascript',
- 'jbarcode',
- 'jcolor',
- 'jfont',
- 'jimage',
- 'jlist',
- 'jn',
- 'jobjectisa',
- 'join',
- 'jread',
- 'jscontent',
- 'jsonfornode',
- 'jsonhtml',
- 'jsonisleaf',
- 'jsonlabel',
- 'jtable',
- 'jtext',
- 'julianday',
- 'kernel',
- 'key',
- 'keycolumns',
- 'keys',
- 'keywords',
- 'kill',
- 'label',
- 'lang',
- 'language',
- 'last_insert_rowid',
- 'last',
- 'lastaccessdate',
- 'lastaccesstime',
- 'lastchild',
- 'lastcomponent',
- 'lasterror',
- 'lastinsertid',
- 'lastnode',
- 'lastpoint',
- 'lasttouched',
- 'lazyvalue',
- 'ldexp',
- 'leaveopen',
- 'left',
- 'length',
- 'lgamma',
- 'line',
- 'linediffers',
- 'linkto',
- 'linktype',
- 'list',
- 'listactivedatasources',
- 'listalldatabases',
- 'listalltables',
- 'listdatabasetables',
- 'listdatasourcedatabases',
- 'listdatasourcehosts',
- 'listdatasources',
- 'listen',
- 'listgroups',
- 'listgroupsbyuser',
- 'listhostdatabases',
- 'listhosts',
- 'listmethods',
- 'listnode',
- 'listusers',
- 'listusersbygroup',
- 'loadcerts',
- 'loaddatasourcehostinfo',
- 'loaddatasourceinfo',
- 'loadlibrary',
- 'localaddress',
- 'localname',
- 'locals',
- 'lock',
- 'log',
- 'log10',
- 'log1p',
- 'logb',
- 'lookupnamespace',
- 'lop',
- 'lowagiefont',
- 'lowercase',
- 'makecolor',
- 'makecolumnlist',
- 'makecolumnmap',
- 'makecookieyumyum',
- 'makefullpath',
- 'makeinheritedcopy',
- 'makenonrelative',
- 'makeurl',
- 'map',
- 'marker',
- 'matches',
- 'matchesstart',
- 'matchposition',
- 'matchstring',
- 'matchtriggers',
- 'max',
- 'maxinmemory',
- 'maxlength',
- 'maxrows',
- 'maxworkers',
- 'maybeslash',
- 'maybevalue',
- 'md5hex',
- 'media',
- 'members',
- 'merge',
- 'meta',
- 'method',
- 'methodname',
- 'millisecond',
- 'millisecondsinday',
- 'mime_boundary',
- 'mime_contenttype',
- 'mime_hdrs',
- 'mime',
- 'mimes',
- 'min',
- 'minute',
- 'minutesbetween',
- 'moddatestr',
- 'mode',
- 'modf',
- 'modificationdate',
- 'modificationtime',
- 'modulate',
- 'monitorenter',
- 'monitorexit',
- 'month',
- 'moveto',
- 'movetoattribute',
- 'movetoattributenamespace',
- 'movetoelement',
- 'movetofirstattribute',
- 'movetonextattribute',
- 'msg',
- 'mtime',
- 'multiple',
- 'n',
- 'name',
- 'named',
- 'namespaceuri',
- 'needinitialization',
- 'net',
- 'nettohost16',
- 'nettohost32',
- 'nettohost64',
- 'new',
- 'newbooleanarray',
- 'newbytearray',
- 'newchararray',
- 'newdoublearray',
- 'newfloatarray',
- 'newglobalref',
- 'newintarray',
- 'newlongarray',
- 'newobject',
- 'newobjectarray',
- 'newshortarray',
- 'newstring',
- 'next',
- 'nextafter',
- 'nextnode',
- 'nextprime',
- 'nextprune',
- 'nextprunedelta',
- 'nextsibling',
- 'nodeforpath',
- 'nodelist',
- 'nodename',
- 'nodetype',
- 'nodevalue',
- 'noop',
- 'normalize',
- 'notationname',
- 'notations',
- 'novaluelists',
- 'numsets',
- 'object',
- 'objects',
- 'objecttype',
- 'onclick',
- 'oncompare',
- 'oncomparestrict',
- 'onconvert',
- 'oncreate',
- 'ondblclick',
- 'onkeydown',
- 'onkeypress',
- 'onkeyup',
- 'onmousedown',
- 'onmousemove',
- 'onmouseout',
- 'onmouseover',
- 'onmouseup',
- 'onreset',
- 'onsubmit',
- 'ontop',
- 'open',
- 'openappend',
- 'openread',
- 'opentruncate',
- 'openwith',
- 'openwrite',
- 'openwriteonly',
- 'orderby',
- 'orderbydescending',
- 'out',
- 'output',
- 'outputencoding',
- 'ownerdocument',
- 'ownerelement',
- 'padleading',
- 'padtrailing',
- 'padzero',
- 'pagecount',
- 'pagerotation',
- 'pagesize',
- 'param',
- 'paramdescs',
- 'params',
- 'parent',
- 'parentdir',
- 'parentnode',
- 'parse_body',
- 'parse_boundary',
- 'parse_charset',
- 'parse_content_disposition',
- 'parse_content_transfer_encoding',
- 'parse_content_type',
- 'parse_hdrs',
- 'parse_mode',
- 'parse_msg',
- 'parse_parts',
- 'parse_rawhdrs',
- 'parse',
- 'parseas',
- 'parsedocument',
- 'parsenumber',
- 'parseoneheaderline',
- 'pass',
- 'path',
- 'pathinfo',
- 'pathtouri',
- 'pathtranslated',
- 'pause',
- 'payload',
- 'pdifference',
- 'perform',
- 'performonce',
- 'perms',
- 'pid',
- 'pixel',
- 'pm',
- 'polldbg',
- 'pollide',
- 'pop_capa',
- 'pop_cmd',
- 'pop_debug',
- 'pop_err',
- 'pop_get',
- 'pop_ids',
- 'pop_index',
- 'pop_log',
- 'pop_mode',
- 'pop_net',
- 'pop_res',
- 'pop_server',
- 'pop_timeout',
- 'pop_token',
- 'pop',
- 'popctx',
- 'popinclude',
- 'populate',
- 'port',
- 'position',
- 'postdispatch',
- 'postparam',
- 'postparams',
- 'postparamsary',
- 'poststring',
- 'pow',
- 'predispatch',
- 'prefix',
- 'preflight',
- 'prepare',
- 'prepared',
- 'pretty',
- 'prev',
- 'previoussibling',
- 'printsimplemsg',
- 'private_compare',
- 'private_find',
- 'private_findlast',
- 'private_merge',
- 'private_rebalanceforinsert',
- 'private_rebalanceforremove',
- 'private_replaceall',
- 'private_replacefirst',
- 'private_rotateleft',
- 'private_rotateright',
- 'private_setrange',
- 'private_split',
- 'probemimetype',
- 'provides',
- 'proxying',
- 'prune',
- 'publicid',
- 'pullhttpheader',
- 'pullmimepost',
- 'pulloneheaderline',
- 'pullpost',
- 'pullrawpost',
- 'pullrawpostchunks',
- 'pullrequest',
- 'pullrequestline',
- 'push',
- 'pushctx',
- 'pushinclude',
- 'qdarray',
- 'qdcount',
- 'queryparam',
- 'queryparams',
- 'queryparamsary',
- 'querystring',
- 'queue_maintenance',
- 'queue_messages',
- 'queue_status',
- 'queue',
- 'quit',
- 'r',
- 'raw',
- 'rawcontent',
- 'rawdiff',
- 'rawheader',
- 'rawheaders',
- 'rawinvokable',
- 'read',
- 'readattributevalue',
- 'readbytes',
- 'readbytesfully',
- 'readdestinations',
- 'readerror',
- 'readidobjects',
- 'readline',
- 'readmessage',
- 'readnumber',
- 'readobject',
- 'readobjecttcp',
- 'readpacket',
- 'readsomebytes',
- 'readstring',
- 'ready',
- 'realdoc',
- 'realpath',
- 'receivefd',
- 'recipients',
- 'recover',
- 'rect',
- 'rectype',
- 'red',
- 'redirectto',
- 'referrals',
- 'refid',
- 'refobj',
- 'refresh',
- 'rel',
- 'remainder',
- 'remoteaddr',
- 'remoteaddress',
- 'remoteport',
- 'remove',
- 'removeall',
- 'removeattribute',
- 'removeattributenode',
- 'removeattributens',
- 'removeback',
- 'removechild',
- 'removedatabasetable',
- 'removedatasource',
- 'removedatasourcedatabase',
- 'removedatasourcehost',
- 'removefield',
- 'removefirst',
- 'removefront',
- 'removegroup',
- 'removelast',
- 'removeleading',
- 'removenameditem',
- 'removenameditemns',
- 'removenode',
- 'removesubnode',
- 'removetrailing',
- 'removeuser',
- 'removeuserfromallgroups',
- 'removeuserfromgroup',
- 'rename',
- 'renderbytes',
- 'renderdocumentbytes',
- 'renderstring',
- 'replace',
- 'replaceall',
- 'replacechild',
- 'replacedata',
- 'replacefirst',
- 'replaceheader',
- 'replacepattern',
- 'representnode',
- 'representnoderesult',
- 'reqid',
- 'requestid',
- 'requestmethod',
- 'requestparams',
- 'requesturi',
- 'requires',
- 'reserve',
- 'reset',
- 'resize',
- 'resolutionh',
- 'resolutionv',
- 'resolvelinks',
- 'resourcedata',
- 'resourceinvokable',
- 'resourcename',
- 'resources',
- 'respond',
- 'restart',
- 'restname',
- 'result',
- 'results',
- 'resume',
- 'retr',
- 'retrieve',
- 'returncolumns',
- 'returntype',
- 'rev',
- 'reverse',
- 'rewind',
- 'right',
- 'rint',
- 'roll',
- 'root',
- 'rootmap',
- 'rotate',
- 'route',
- 'rowsfound',
- 'rset',
- 'rule',
- 'rules',
- 'run',
- 'running',
- 'runonce',
- 's',
- 'sa',
- 'safeexport8bits',
- 'sameas',
- 'save',
- 'savedata',
- 'scalb',
- 'scale',
- 'scanfordatasource',
- 'scantasks',
- 'scanworkers',
- 'schemaname',
- 'scheme',
- 'script',
- 'scriptextensions',
- 'scriptfilename',
- 'scriptname',
- 'scripttype',
- 'scripturi',
- 'scripturl',
- 'scrubkeywords',
- 'search',
- 'searchinbucket',
- 'searchurl',
- 'second',
- 'secondsbetween',
- 'seek',
- 'select',
- 'selected',
- 'selectmany',
- 'self',
- 'send',
- 'sendchunk',
- 'sendfd',
- 'sendfile',
- 'sendpacket',
- 'sendresponse',
- 'separator',
- 'serializationelements',
- 'serialize',
- 'serveraddr',
- 'serveradmin',
- 'servername',
- 'serverport',
- 'serverprotocol',
- 'serversignature',
- 'serversoftware',
- 'sessionsdump',
- 'sessionsmap',
- 'set',
- 'setalignment',
- 'setattr',
- 'setattribute',
- 'setattributenode',
- 'setattributenodens',
- 'setattributens',
- 'setbarheight',
- 'setbarmultiplier',
- 'setbarwidth',
- 'setbaseline',
- 'setbold',
- 'setbooleanarrayregion',
- 'setbooleanfield',
- 'setbordercolor',
- 'setborderwidth',
- 'setbytearrayregion',
- 'setbytefield',
- 'setchararrayregion',
- 'setcharfield',
- 'setcode',
- 'setcolor',
- 'setcolorspace',
- 'setcookie',
- 'setcwd',
- 'setdefaultstorage',
- 'setdestination',
- 'setdoublearrayregion',
- 'setdoublefield',
- 'setencoding',
- 'setface',
- 'setfieldvalue',
- 'setfindpattern',
- 'setfloatarrayregion',
- 'setfloatfield',
- 'setfont',
- 'setformat',
- 'setgeneratechecksum',
- 'setheaders',
- 'sethtmlattr',
- 'setignorecase',
- 'setinput',
- 'setintarrayregion',
- 'setintfield',
- 'setitalic',
- 'setlinewidth',
- 'setlongarrayregion',
- 'setlongfield',
- 'setmarker',
- 'setmaxfilesize',
- 'setmode',
- 'setname',
- 'setnameditem',
- 'setnameditemns',
- 'setobjectarrayelement',
- 'setobjectfield',
- 'setpadding',
- 'setpagenumber',
- 'setpagerange',
- 'setposition',
- 'setrange',
- 'setreplacepattern',
- 'setshortarrayregion',
- 'setshortfield',
- 'setshowchecksum',
- 'setsize',
- 'setspacing',
- 'setstaticbooleanfield',
- 'setstaticbytefield',
- 'setstaticcharfield',
- 'setstaticdoublefield',
- 'setstaticfloatfield',
- 'setstaticintfield',
- 'setstaticlongfield',
- 'setstaticobjectfield',
- 'setstaticshortfield',
- 'setstatus',
- 'settextalignment',
- 'settextsize',
- 'settimezone',
- 'settrait',
- 'setunderline',
- 'sharpen',
- 'shouldabort',
- 'shouldclose',
- 'showchecksum',
- 'showcode39startstop',
- 'showeanguardbars',
- 'shutdownrd',
- 'shutdownrdwr',
- 'shutdownwr',
- 'sin',
- 'sinh',
- 'size',
- 'skip',
- 'skiprows',
- 'sort',
- 'sortcolumns',
- 'source',
- 'sourcecolumn',
- 'sourcefile',
- 'sourceline',
- 'specified',
- 'split',
- 'splitconnection',
- 'splitdebuggingthread',
- 'splitextension',
- 'splittext',
- 'splitthread',
- 'splittoprivatedev',
- 'splituppath',
- 'sql',
- 'sqlite3',
- 'sqrt',
- 'src',
- 'srcpath',
- 'sslerrfail',
- 'stack',
- 'standby',
- 'start',
- 'startone',
- 'startup',
- 'stat',
- 'statement',
- 'statementonly',
- 'stats',
- 'status',
- 'statuscode',
- 'statusmsg',
- 'stdin',
- 'step',
- 'stls',
- 'stop',
- 'stoprunning',
- 'storedata',
- 'stripfirstcomponent',
- 'striplastcomponent',
- 'style',
- 'styletype',
- 'sub',
- 'subject',
- 'subnode',
- 'subnodes',
- 'substringdata',
- 'subtract',
- 'subtraits',
- 'sum',
- 'supportscontentrepresentation',
- 'swapbytes',
- 'systemid',
- 't',
- 'tabindex',
- 'table',
- 'tablecolumnnames',
- 'tablecolumns',
- 'tablehascolumn',
- 'tableizestacktrace',
- 'tableizestacktracelink',
- 'tablemap',
- 'tablename',
- 'tables',
- 'tabs',
- 'tabstr',
- 'tag',
- 'tagname',
- 'take',
- 'tan',
- 'tanh',
- 'target',
- 'tasks',
- 'tb',
- 'tell',
- 'testexitcode',
- 'testlock',
- 'textwidth',
- 'thenby',
- 'thenbydescending',
- 'threadreaddesc',
- 'throw',
- 'thrownew',
- 'time',
- 'timezone',
- 'title',
- 'titlecase',
- 'to',
- 'token',
- 'tolower',
- 'top',
- 'toreflectedfield',
- 'toreflectedmethod',
- 'total_changes',
- 'totitle',
- 'touch',
- 'toupper',
- 'toxmlstring',
- 'trace',
- 'trackingid',
- 'trait',
- 'transform',
- 'trigger',
- 'trim',
- 'trunk',
- 'tryfinderrorfile',
- 'trylock',
- 'tryreadobject',
- 'type',
- 'typename',
- 'uidl',
- 'uncompress',
- 'unescape',
- 'union',
- 'uniqueid',
- 'unlock',
- 'unspool',
- 'up',
- 'update',
- 'updategroup',
- 'upload',
- 'uppercase',
- 'url',
- 'used',
- 'usemap',
- 'user',
- 'usercolumns',
- 'valid',
- 'validate',
- 'validatesessionstable',
- 'value',
- 'values',
- 'valuetype',
- 'variant',
- 'version',
- 'wait',
- 'waitforcompletion',
- 'warnings',
- 'week',
- 'weekofmonth',
- 'weekofyear',
- 'where',
- 'width',
- 'workers',
- 'workinginputcolumns',
- 'workingkeycolumns',
- 'workingkeyfield_name',
- 'workingreturncolumns',
- 'workingsortcolumns',
- 'write',
- 'writebodybytes',
- 'writebytes',
- 'writeheader',
- 'writeheaderbytes',
- 'writeheaderline',
- 'writeid',
- 'writemessage',
- 'writeobject',
- 'writeobjecttcp',
- 'writestring',
- 'wroteheaders',
- 'xhtml',
- 'xmllang',
- 'y0',
- 'y1',
- 'year',
- 'yearwoy',
- 'yn',
- 'z',
- 'zip',
- 'zipfile',
- 'zipfilename',
- 'zipname',
- 'zips',
- 'zoneoffset',
- ),
- 'Lasso 8 Member Tags': (
- 'accept',
- 'add',
- 'addattachment',
- 'addattribute',
- 'addbarcode',
- 'addchapter',
- 'addcheckbox',
- 'addchild',
- 'addcombobox',
- 'addcomment',
- 'addcontent',
- 'addhiddenfield',
- 'addhtmlpart',
- 'addimage',
- 'addjavascript',
- 'addlist',
- 'addnamespace',
- 'addnextsibling',
- 'addpage',
- 'addparagraph',
- 'addparenttype',
- 'addpart',
- 'addpasswordfield',
- 'addphrase',
- 'addprevsibling',
- 'addradiobutton',
- 'addradiogroup',
- 'addresetbutton',
- 'addsection',
- 'addselectlist',
- 'addsibling',
- 'addsubmitbutton',
- 'addtable',
- 'addtext',
- 'addtextarea',
- 'addtextfield',
- 'addtextpart',
- 'alarms',
- 'annotate',
- 'answer',
- 'append',
- 'appendreplacement',
- 'appendtail',
- 'arc',
- 'asasync',
- 'astype',
- 'atbegin',
- 'atbottom',
- 'atend',
- 'atfarleft',
- 'atfarright',
- 'attop',
- 'attributecount',
- 'attributes',
- 'authenticate',
- 'authorize',
- 'backward',
- 'baseuri',
- 'bcc',
- 'beanproperties',
- 'beginswith',
- 'bind',
- 'bitand',
- 'bitclear',
- 'bitflip',
- 'bitformat',
- 'bitnot',
- 'bitor',
- 'bitset',
- 'bitshiftleft',
- 'bitshiftright',
- 'bittest',
- 'bitxor',
- 'blur',
- 'body',
- 'boundary',
- 'bytes',
- 'call',
- 'cancel',
- 'capabilities',
- 'cc',
- 'chardigitvalue',
- 'charname',
- 'charset',
- 'chartype',
- 'children',
- 'circle',
- 'close',
- 'closepath',
- 'closewrite',
- 'code',
- 'colorspace',
- 'command',
- 'comments',
- 'compare',
- 'comparecodepointorder',
- 'compile',
- 'composite',
- 'connect',
- 'contains',
- 'content_disposition',
- 'content_transfer_encoding',
- 'content_type',
- 'contents',
- 'contrast',
- 'convert',
- 'crop',
- 'curveto',
- 'data',
- 'date',
- 'day',
- 'daylights',
- 'dayofweek',
- 'dayofyear',
- 'decrement',
- 'delete',
- 'depth',
- 'describe',
- 'description',
- 'deserialize',
- 'detach',
- 'detachreference',
- 'difference',
- 'digit',
- 'document',
- 'down',
- 'drawtext',
- 'dst',
- 'dump',
- 'endswith',
- 'enhance',
- 'equals',
- 'errors',
- 'eval',
- 'events',
- 'execute',
- 'export16bits',
- 'export32bits',
- 'export64bits',
- 'export8bits',
- 'exportfdf',
- 'exportstring',
- 'extract',
- 'extractone',
- 'fieldnames',
- 'fieldtype',
- 'fieldvalue',
- 'file',
- 'find',
- 'findindex',
- 'findnamespace',
- 'findnamespacebyhref',
- 'findpattern',
- 'findposition',
- 'first',
- 'firstchild',
- 'fliph',
- 'flipv',
- 'flush',
- 'foldcase',
- 'foreach',
- 'format',
- 'forward',
- 'freebusies',
- 'freezetype',
- 'freezevalue',
- 'from',
- 'fulltype',
- 'generatechecksum',
- 'get',
- 'getabswidth',
- 'getalignment',
- 'getattribute',
- 'getattributenamespace',
- 'getbarheight',
- 'getbarmultiplier',
- 'getbarwidth',
- 'getbaseline',
- 'getbordercolor',
- 'getborderwidth',
- 'getcode',
- 'getcolor',
- 'getcolumncount',
- 'getencoding',
- 'getface',
- 'getfont',
- 'getformat',
- 'getfullfontname',
- 'getheaders',
- 'getmargins',
- 'getmethod',
- 'getnumericvalue',
- 'getpadding',
- 'getpagenumber',
- 'getparams',
- 'getproperty',
- 'getpsfontname',
- 'getrange',
- 'getrowcount',
- 'getsize',
- 'getspacing',
- 'getsupportedencodings',
- 'gettextalignment',
- 'gettextsize',
- 'gettype',
- 'gmt',
- 'groupcount',
- 'hasattribute',
- 'haschildren',
- 'hasvalue',
- 'header',
- 'headers',
- 'height',
- 'histogram',
- 'hosttonet16',
- 'hosttonet32',
- 'hour',
- 'id',
- 'ignorecase',
- 'import16bits',
- 'import32bits',
- 'import64bits',
- 'import8bits',
- 'importfdf',
- 'importstring',
- 'increment',
- 'input',
- 'insert',
- 'insertatcurrent',
- 'insertfirst',
- 'insertfrom',
- 'insertlast',
- 'insertpage',
- 'integer',
- 'intersection',
- 'invoke',
- 'isa',
- 'isalnum',
- 'isalpha',
- 'isbase',
- 'iscntrl',
- 'isdigit',
- 'isemptyelement',
- 'islower',
- 'isopen',
- 'isprint',
- 'isspace',
- 'istitle',
- 'istruetype',
- 'isualphabetic',
- 'isulowercase',
- 'isupper',
- 'isuuppercase',
- 'isuwhitespace',
- 'iswhitespace',
- 'iterator',
- 'javascript',
- 'join',
- 'journals',
- 'key',
- 'keys',
- 'last',
- 'lastchild',
- 'lasterror',
- 'left',
- 'length',
- 'line',
- 'listen',
- 'localaddress',
- 'localname',
- 'lock',
- 'lookupnamespace',
- 'lowercase',
- 'marker',
- 'matches',
- 'matchesstart',
- 'matchposition',
- 'matchstring',
- 'merge',
- 'millisecond',
- 'minute',
- 'mode',
- 'modulate',
- 'month',
- 'moveto',
- 'movetoattributenamespace',
- 'movetoelement',
- 'movetofirstattribute',
- 'movetonextattribute',
- 'name',
- 'namespaces',
- 'namespaceuri',
- 'nettohost16',
- 'nettohost32',
- 'newchild',
- 'next',
- 'nextsibling',
- 'nodetype',
- 'open',
- 'output',
- 'padleading',
- 'padtrailing',
- 'pagecount',
- 'pagesize',
- 'paraminfo',
- 'params',
- 'parent',
- 'path',
- 'pixel',
- 'position',
- 'prefix',
- 'previoussibling',
- 'properties',
- 'rawheaders',
- 'read',
- 'readattributevalue',
- 'readerror',
- 'readfrom',
- 'readline',
- 'readlock',
- 'readstring',
- 'readunlock',
- 'recipients',
- 'rect',
- 'refcount',
- 'referrals',
- 'remoteaddress',
- 'remove',
- 'removeall',
- 'removeattribute',
- 'removechild',
- 'removecurrent',
- 'removefirst',
- 'removelast',
- 'removeleading',
- 'removenamespace',
- 'removetrailing',
- 'render',
- 'replace',
- 'replaceall',
- 'replacefirst',
- 'replacepattern',
- 'replacewith',
- 'reserve',
- 'reset',
- 'resolutionh',
- 'resolutionv',
- 'response',
- 'results',
- 'retrieve',
- 'returntype',
- 'reverse',
- 'reverseiterator',
- 'right',
- 'rotate',
- 'run',
- 'save',
- 'scale',
- 'search',
- 'second',
- 'send',
- 'serialize',
- 'set',
- 'setalignment',
- 'setbarheight',
- 'setbarmultiplier',
- 'setbarwidth',
- 'setbaseline',
- 'setblocking',
- 'setbordercolor',
- 'setborderwidth',
- 'setbytes',
- 'setcode',
- 'setcolor',
- 'setcolorspace',
- 'setdatatype',
- 'setencoding',
- 'setface',
- 'setfieldvalue',
- 'setfont',
- 'setformat',
- 'setgeneratechecksum',
- 'setheight',
- 'setlassodata',
- 'setlinewidth',
- 'setmarker',
- 'setmode',
- 'setname',
- 'setpadding',
- 'setpagenumber',
- 'setpagerange',
- 'setposition',
- 'setproperty',
- 'setrange',
- 'setshowchecksum',
- 'setsize',
- 'setspacing',
- 'settemplate',
- 'settemplatestr',
- 'settextalignment',
- 'settextdata',
- 'settextsize',
- 'settype',
- 'setunderline',
- 'setwidth',
- 'setxmldata',
- 'sharpen',
- 'showchecksum',
- 'showcode39startstop',
- 'showeanguardbars',
- 'signal',
- 'signalall',
- 'size',
- 'smooth',
- 'sort',
- 'sortwith',
- 'split',
- 'standards',
- 'steal',
- 'subject',
- 'substring',
- 'subtract',
- 'swapbytes',
- 'textwidth',
- 'time',
- 'timezones',
- 'titlecase',
- 'to',
- 'todos',
- 'tolower',
- 'totitle',
- 'toupper',
- 'transform',
- 'trim',
- 'type',
- 'unescape',
- 'union',
- 'uniqueid',
- 'unlock',
- 'unserialize',
- 'up',
- 'uppercase',
- 'value',
- 'values',
- 'valuetype',
- 'wait',
- 'waskeyword',
- 'week',
- 'width',
- 'write',
- 'writelock',
- 'writeto',
- 'writeunlock',
- 'xmllang',
- 'xmlschematype',
- 'year',
- )
-}
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/_lilypond_builtins.py b/venv/lib/python3.11/site-packages/pygments/lexers/_lilypond_builtins.py
deleted file mode 100644
index 013c85c..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/_lilypond_builtins.py
+++ /dev/null
@@ -1,4932 +0,0 @@
-"""
- pygments.lexers._lilypond_builtins
- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
- LilyPond builtins.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-# Contents generated by the script lilypond-builtins-generator.ly
-# found in the external/ directory of the source tree.
-
-keywords = [
- "accepts",
- "addlyrics",
- "alias",
- "book",
- "bookpart",
- "chordmode",
- "chords",
- "consists",
- "context",
- "defaultchild",
- "denies",
- "description",
- "drummode",
- "drums",
- "etc",
- "figuremode",
- "figures",
- "header",
- "include",
- "inherit-acceptability",
- "language",
- "layout",
- "lyricmode",
- "lyricsto",
- "midi",
- "name",
- "new",
- "notemode",
- "paper",
- "remove",
- "score",
- "type",
- "version",
- "with",
-]
-
-clefs = [
- "C",
- "F",
- "G",
- "G2",
- "GG",
- "alto",
- "altovarC",
- "baritone",
- "baritonevarC",
- "baritonevarF",
- "bass",
- "blackmensural-c1",
- "blackmensural-c2",
- "blackmensural-c3",
- "blackmensural-c4",
- "blackmensural-c5",
- "french",
- "hufnagel-do-fa",
- "hufnagel-do1",
- "hufnagel-do2",
- "hufnagel-do3",
- "hufnagel-fa1",
- "hufnagel-fa2",
- "kievan-do",
- "medicaea-do1",
- "medicaea-do2",
- "medicaea-do3",
- "medicaea-fa1",
- "medicaea-fa2",
- "mensural-c1",
- "mensural-c2",
- "mensural-c3",
- "mensural-c4",
- "mensural-c5",
- "mensural-f",
- "mensural-g",
- "mezzosoprano",
- "moderntab",
- "neomensural-c1",
- "neomensural-c2",
- "neomensural-c3",
- "neomensural-c4",
- "neomensural-c5",
- "percussion",
- "petrucci-c1",
- "petrucci-c2",
- "petrucci-c3",
- "petrucci-c4",
- "petrucci-c5",
- "petrucci-f",
- "petrucci-f2",
- "petrucci-f3",
- "petrucci-f4",
- "petrucci-f5",
- "petrucci-g",
- "petrucci-g1",
- "petrucci-g2",
- "soprano",
- "subbass",
- "tab",
- "tenor",
- "tenorG",
- "tenorvarC",
- "treble",
- "varC",
- "varbaritone",
- "varpercussion",
- "vaticana-do1",
- "vaticana-do2",
- "vaticana-do3",
- "vaticana-fa1",
- "vaticana-fa2",
- "violin",
-]
-
-scales = [
- "aeolian",
- "dorian",
- "ionian",
- "locrian",
- "lydian",
- "major",
- "minor",
- "mixolydian",
- "phrygian",
-]
-
-repeat_types = [
- "percent",
- "segno",
- "tremolo",
- "unfold",
- "volta",
-]
-
-units = [
- "cm",
- "in",
- "mm",
- "pt",
- "staff-space",
-]
-
-chord_modifiers = [
- "aug",
- "dim",
- "m",
- "maj",
-]
-
-pitch_language_names = [
- "arabic",
- "catalan",
- "català",
- "deutsch",
- "english",
- "espanol",
- "español",
- "français",
- "italiano",
- "nederlands",
- "norsk",
- "portugues",
- "português",
- "suomi",
- "svenska",
- "vlaams",
-]
-
-pitches = [
- "R",
- "a",
- "a-flat",
- "a-flatflat",
- "a-natural",
- "a-sharp",
- "a-sharpsharp",
- "ab",
- "acousticbassdrum",
- "acousticsnare",
- "ad",
- "adb",
- "add",
- "aeh",
- "aes",
- "aeseh",
- "aeses",
- "aess",
- "aesseh",
- "aessess",
- "af",
- "aff",
- "afhb",
- "afhd",
- "agh",
- "agl",
- "ah",
- "aih",
- "ais",
- "aisih",
- "aisis",
- "aiss",
- "aissih",
- "aississ",
- "aqf",
- "aqs",
- "as",
- "asah",
- "asas",
- "aseh",
- "ases",
- "ashb",
- "ashd",
- "ass",
- "asseh",
- "assess",
- "atqb",
- "atqd",
- "atqf",
- "atqs",
- "ax",
- "b",
- "b-flat",
- "b-flatflat",
- "b-natural",
- "b-sharp",
- "b-sharpsharp",
- "bassdrum",
- "bb",
- "bd",
- "bda",
- "bdb",
- "bdd",
- "beh",
- "bes",
- "beseh",
- "beses",
- "bess",
- "bf",
- "bff",
- "bfhb",
- "bfhd",
- "bih",
- "bis",
- "bisih",
- "bisis",
- "boh",
- "bohm",
- "boho",
- "bol",
- "bolm",
- "bolo",
- "bqf",
- "bqs",
- "bs",
- "bshb",
- "bshd",
- "bss",
- "btqb",
- "btqd",
- "btqf",
- "btqs",
- "bx",
- "c",
- "c-flat",
- "c-flatflat",
- "c-natural",
- "c-sharp",
- "c-sharpsharp",
- "cab",
- "cabasa",
- "cb",
- "cd",
- "cdb",
- "cdd",
- "ceh",
- "ces",
- "ceseh",
- "ceses",
- "cess",
- "cesseh",
- "cessess",
- "cf",
- "cff",
- "cfhb",
- "cfhd",
- "cgh",
- "cghm",
- "cgho",
- "cgl",
- "cglm",
- "cglo",
- "chinesecymbal",
- "cih",
- "cis",
- "cisih",
- "cisis",
- "ciss",
- "cissih",
- "cississ",
- "cl",
- "claves",
- "closedhihat",
- "cowbell",
- "cqf",
- "cqs",
- "crashcymbal",
- "crashcymbala",
- "crashcymbalb",
- "cs",
- "cshb",
- "cshd",
- "css",
- "ctqb",
- "ctqd",
- "ctqf",
- "ctqs",
- "cuim",
- "cuio",
- "cx",
- "cymc",
- "cymca",
- "cymcb",
- "cymch",
- "cymr",
- "cymra",
- "cymrb",
- "cyms",
- "d",
- "d-flat",
- "d-flatflat",
- "d-natural",
- "d-sharp",
- "d-sharpsharp",
- "db",
- "dd",
- "ddb",
- "ddd",
- "deh",
- "des",
- "deseh",
- "deses",
- "dess",
- "desseh",
- "dessess",
- "df",
- "dff",
- "dfhb",
- "dfhd",
- "dih",
- "dis",
- "disih",
- "disis",
- "diss",
- "dissih",
- "dississ",
- "do",
- "dob",
- "dobb",
- "dobhb",
- "dobqt",
- "dobsb",
- "dobtqt",
- "docb",
- "docs",
- "dod",
- "dodd",
- "dodsd",
- "dohb",
- "dohk",
- "dok",
- "dokhk",
- "dokk",
- "doqb",
- "doqd",
- "doqs",
- "dos",
- "dosb",
- "dosd",
- "dosqt",
- "doss",
- "dostqt",
- "dotcb",
- "dotcs",
- "dotqb",
- "dotqd",
- "dotqs",
- "dox",
- "dqf",
- "dqs",
- "ds",
- "dshb",
- "dshd",
- "dss",
- "dtqb",
- "dtqd",
- "dtqf",
- "dtqs",
- "dx",
- "e",
- "e-flat",
- "e-flatflat",
- "e-natural",
- "e-sharp",
- "e-sharpsharp",
- "eb",
- "ed",
- "edb",
- "edd",
- "eeh",
- "ees",
- "eeseh",
- "eeses",
- "eess",
- "eesseh",
- "eessess",
- "ef",
- "eff",
- "efhb",
- "efhd",
- "eh",
- "eih",
- "eis",
- "eisih",
- "eisis",
- "eiss",
- "eissih",
- "eississ",
- "electricsnare",
- "eqf",
- "eqs",
- "es",
- "eseh",
- "eses",
- "eshb",
- "eshd",
- "ess",
- "esseh",
- "essess",
- "etqb",
- "etqd",
- "etqf",
- "etqs",
- "ex",
- "f",
- "f-flat",
- "f-flatflat",
- "f-natural",
- "f-sharp",
- "f-sharpsharp",
- "fa",
- "fab",
- "fabb",
- "fabhb",
- "fabqt",
- "fabsb",
- "fabtqt",
- "facb",
- "facs",
- "fad",
- "fadd",
- "fadsd",
- "fahb",
- "fahk",
- "fak",
- "fakhk",
- "fakk",
- "faqb",
- "faqd",
- "faqs",
- "fas",
- "fasb",
- "fasd",
- "fasqt",
- "fass",
- "fastqt",
- "fatcb",
- "fatcs",
- "fatqb",
- "fatqd",
- "fatqs",
- "fax",
- "fb",
- "fd",
- "fdb",
- "fdd",
- "feh",
- "fes",
- "feseh",
- "feses",
- "fess",
- "fesseh",
- "fessess",
- "ff",
- "fff",
- "ffhb",
- "ffhd",
- "fih",
- "fis",
- "fisih",
- "fisis",
- "fiss",
- "fissih",
- "fississ",
- "fqf",
- "fqs",
- "fs",
- "fshb",
- "fshd",
- "fss",
- "ftqb",
- "ftqd",
- "ftqf",
- "ftqs",
- "fx",
- "g",
- "g-flat",
- "g-flatflat",
- "g-natural",
- "g-sharp",
- "g-sharpsharp",
- "gb",
- "gd",
- "gdb",
- "gdd",
- "geh",
- "ges",
- "geseh",
- "geses",
- "gess",
- "gesseh",
- "gessess",
- "gf",
- "gff",
- "gfhb",
- "gfhd",
- "gih",
- "gis",
- "gisih",
- "gisis",
- "giss",
- "gissih",
- "gississ",
- "gqf",
- "gqs",
- "gs",
- "gshb",
- "gshd",
- "gss",
- "gtqb",
- "gtqd",
- "gtqf",
- "gtqs",
- "gui",
- "guil",
- "guiro",
- "guis",
- "gx",
- "h",
- "halfopenhihat",
- "handclap",
- "hc",
- "heh",
- "heseh",
- "heses",
- "hesseh",
- "hessess",
- "hh",
- "hhc",
- "hhho",
- "hho",
- "hhp",
- "hiagogo",
- "hibongo",
- "hiconga",
- "highfloortom",
- "hightom",
- "hih",
- "hihat",
- "himidtom",
- "his",
- "hisidestick",
- "hisih",
- "hisis",
- "hiss",
- "hissih",
- "hississ",
- "hitimbale",
- "hiwoodblock",
- "la",
- "lab",
- "labb",
- "labhb",
- "labqt",
- "labsb",
- "labtqt",
- "lacb",
- "lacs",
- "lad",
- "ladd",
- "ladsd",
- "lahb",
- "lahk",
- "lak",
- "lakhk",
- "lakk",
- "laqb",
- "laqd",
- "laqs",
- "las",
- "lasb",
- "lasd",
- "lasqt",
- "lass",
- "lastqt",
- "latcb",
- "latcs",
- "latqb",
- "latqd",
- "latqs",
- "lax",
- "loagogo",
- "lobongo",
- "loconga",
- "longguiro",
- "longwhistle",
- "losidestick",
- "lotimbale",
- "lowfloortom",
- "lowmidtom",
- "lowoodblock",
- "lowtom",
- "mar",
- "maracas",
- "mi",
- "mib",
- "mibb",
- "mibhb",
- "mibqt",
- "mibsb",
- "mibtqt",
- "micb",
- "mics",
- "mid",
- "midd",
- "midsd",
- "mihb",
- "mihk",
- "mik",
- "mikhk",
- "mikk",
- "miqb",
- "miqd",
- "miqs",
- "mis",
- "misb",
- "misd",
- "misqt",
- "miss",
- "mistqt",
- "mitcb",
- "mitcs",
- "mitqb",
- "mitqd",
- "mitqs",
- "mix",
- "mutecuica",
- "mutehibongo",
- "mutehiconga",
- "mutelobongo",
- "muteloconga",
- "mutetriangle",
- "opencuica",
- "openhibongo",
- "openhiconga",
- "openhihat",
- "openlobongo",
- "openloconga",
- "opentriangle",
- "pedalhihat",
- "r",
- "rb",
- "re",
- "reb",
- "rebb",
- "rebhb",
- "rebqt",
- "rebsb",
- "rebtqt",
- "recb",
- "recs",
- "red",
- "redd",
- "redsd",
- "rehb",
- "rehk",
- "rek",
- "rekhk",
- "rekk",
- "reqb",
- "reqd",
- "reqs",
- "res",
- "resb",
- "resd",
- "resqt",
- "ress",
- "restqt",
- "retcb",
- "retcs",
- "retqb",
- "retqd",
- "retqs",
- "rex",
- "ridebell",
- "ridecymbal",
- "ridecymbala",
- "ridecymbalb",
- "ré",
- "réb",
- "rébb",
- "rébsb",
- "réd",
- "rédd",
- "rédsd",
- "résb",
- "résd",
- "réx",
- "shortguiro",
- "shortwhistle",
- "si",
- "sib",
- "sibb",
- "sibhb",
- "sibqt",
- "sibsb",
- "sibtqt",
- "sicb",
- "sics",
- "sid",
- "sidd",
- "sidestick",
- "sidsd",
- "sihb",
- "sihk",
- "sik",
- "sikhk",
- "sikk",
- "siqb",
- "siqd",
- "siqs",
- "sis",
- "sisb",
- "sisd",
- "sisqt",
- "siss",
- "sistqt",
- "sitcb",
- "sitcs",
- "sitqb",
- "sitqd",
- "sitqs",
- "six",
- "sn",
- "sna",
- "snare",
- "sne",
- "sol",
- "solb",
- "solbb",
- "solbhb",
- "solbqt",
- "solbsb",
- "solbtqt",
- "solcb",
- "solcs",
- "sold",
- "soldd",
- "soldsd",
- "solhb",
- "solhk",
- "solk",
- "solkhk",
- "solkk",
- "solqb",
- "solqd",
- "solqs",
- "sols",
- "solsb",
- "solsd",
- "solsqt",
- "solss",
- "solstqt",
- "soltcb",
- "soltcs",
- "soltqb",
- "soltqd",
- "soltqs",
- "solx",
- "splashcymbal",
- "ss",
- "ssh",
- "ssl",
- "tamb",
- "tambourine",
- "timh",
- "timl",
- "tomfh",
- "tomfl",
- "tomh",
- "toml",
- "tommh",
- "tomml",
- "tri",
- "triangle",
- "trim",
- "trio",
- "tt",
- "vibraslap",
- "vibs",
- "wbh",
- "wbl",
- "whl",
- "whs",
-]
-
-music_functions = [
- "=",
- "absolute",
- "acciaccatura",
- "accidentalStyle",
- "addChordShape",
- "addInstrumentDefinition",
- "addQuote",
- "after",
- "afterGrace",
- "allowPageTurn",
- "allowVoltaHook",
- "alterBroken",
- "alternative",
- "ambitusAfter",
- "appendToTag",
- "applyContext",
- "applyMusic",
- "applyOutput",
- "appoggiatura",
- "assertBeamQuant",
- "assertBeamSlope",
- "autoChange",
- "balloonGrobText",
- "balloonText",
- "bar",
- "barNumberCheck",
- "beamExceptions",
- "bendAfter",
- "bendHold",
- "bendStartLevel",
- "bookOutputName",
- "bookOutputSuffix",
- "breathe",
- "caesura",
- "change",
- "chordRepeats",
- "clef",
- "codaMark",
- "compoundMeter",
- "compressMMRests",
- "crossStaff",
- "cueClef",
- "cueClefUnset",
- "cueDuring",
- "cueDuringWithClef",
- "deadNote",
- "defineBarLine",
- "displayLilyMusic",
- "displayMusic",
- "displayScheme",
- "dropNote",
- "enablePolymeter",
- "endSpanners",
- "eventChords",
- "featherDurations",
- "finger",
- "fixed",
- "footnote",
- "grace",
- "grobdescriptions",
- "harmonicByFret",
- "harmonicByRatio",
- "harmonicNote",
- "harmonicsOn",
- "hide",
- "inStaffSegno",
- "incipit",
- "inherit-acceptability",
- "instrumentSwitch",
- "inversion",
- "invertChords",
- "jump",
- "keepWithTag",
- "key",
- "killCues",
- "label",
- "language",
- "languageRestore",
- "languageSaveAndChange",
- "magnifyMusic",
- "magnifyStaff",
- "makeClusters",
- "makeDefaultStringTuning",
- "mark",
- "markupMap",
- "modalInversion",
- "modalTranspose",
- "musicMap",
- "noPageBreak",
- "noPageTurn",
- "octaveCheck",
- "offset",
- "omit",
- "once",
- "ottava",
- "override",
- "overrideProperty",
- "overrideTimeSignatureSettings",
- "pageBreak",
- "pageTurn",
- "palmMute",
- "palmMuteOn",
- "parallelMusic",
- "parenthesize",
- "partCombine",
- "partCombineDown",
- "partCombineForce",
- "partCombineUp",
- "partial",
- "phrasingSlurDashPattern",
- "pitchedTrill",
- "pointAndClickOff",
- "pointAndClickOn",
- "pointAndClickTypes",
- "preBend",
- "preBendHold",
- "propertyOverride",
- "propertyRevert",
- "propertySet",
- "propertyTweak",
- "propertyUnset",
- "pushToTag",
- "quoteDuring",
- "raiseNote",
- "reduceChords",
- "relative",
- "removeWithTag",
- "repeat",
- "resetRelativeOctave",
- "retrograde",
- "revert",
- "revertTimeSignatureSettings",
- "rightHandFinger",
- "scaleDurations",
- "sectionLabel",
- "segnoMark",
- "set",
- "settingsFrom",
- "shape",
- "shiftDurations",
- "single",
- "skip",
- "slashedGrace",
- "slurDashPattern",
- "staffHighlight",
- "storePredefinedDiagram",
- "stringTuning",
- "styledNoteHeads",
- "tabChordRepeats",
- "tabChordRepetition",
- "tag",
- "tagGroup",
- "tempo",
- "temporary",
- "textEndMark",
- "textMark",
- "tieDashPattern",
- "time",
- "times",
- "tocItem",
- "transpose",
- "transposedCueDuring",
- "transposition",
- "tuplet",
- "tupletSpan",
- "tweak",
- "undo",
- "unfoldRepeats",
- "unfolded",
- "unset",
- "voices",
- "void",
- "volta",
- "vshape",
- "withMusicProperty",
- "xNote",
-]
-
-dynamics = [
- "!",
- "<",
- ">",
- "cr",
- "cresc",
- "decr",
- "decresc",
- "dim",
- "endcr",
- "endcresc",
- "enddecr",
- "enddecresc",
- "enddim",
- "f",
- "ff",
- "fff",
- "ffff",
- "fffff",
- "fp",
- "fz",
- "mf",
- "mp",
- "n",
- "p",
- "pp",
- "ppp",
- "pppp",
- "ppppp",
- "rfz",
- "sf",
- "sff",
- "sfp",
- "sfz",
- "sp",
- "spp",
-]
-
-articulations = [
- "(",
- ")",
- "-",
- "[",
- "]",
- "^",
- "accent",
- "arpeggio",
- "breakDynamicSpan",
- "coda",
- "dashBang",
- "dashDash",
- "dashDot",
- "dashHat",
- "dashLarger",
- "dashPlus",
- "dashUnderscore",
- "downbow",
- "downmordent",
- "downprall",
- "episemFinis",
- "episemInitium",
- "espressivo",
- "fermata",
- "flageolet",
- "glide",
- "glissando",
- "halfopen",
- "harmonic",
- "haydnturn",
- "henzelongfermata",
- "henzeshortfermata",
- "laissezVibrer",
- "lheel",
- "lineprall",
- "longfermata",
- "ltoe",
- "marcato",
- "mordent",
- "noBeam",
- "open",
- "portato",
- "prall",
- "pralldown",
- "prallmordent",
- "prallprall",
- "prallup",
- "repeatTie",
- "reverseturn",
- "rheel",
- "rtoe",
- "segno",
- "shortfermata",
- "signumcongruentiae",
- "slashturn",
- "snappizzicato",
- "sostenutoOff",
- "sostenutoOn",
- "staccatissimo",
- "staccato",
- "startGraceSlur",
- "startGroup",
- "startTextSpan",
- "startTrillSpan",
- "stopGraceSlur",
- "stopGroup",
- "stopTextSpan",
- "stopTrillSpan",
- "stopped",
- "sustainOff",
- "sustainOn",
- "tenuto",
- "thumb",
- "treCorde",
- "trill",
- "turn",
- "unaCorda",
- "upbow",
- "upmordent",
- "upprall",
- "varcoda",
- "verylongfermata",
- "veryshortfermata",
- "vowelTransition",
- "~",
-]
-
-music_commands = [
- "[",
- "]",
- "aikenHeads",
- "aikenHeadsMinor",
- "aikenThinHeads",
- "aikenThinHeadsMinor",
- "allowBreak",
- "arabicStringNumbers",
- "arpeggioArrowDown",
- "arpeggioArrowUp",
- "arpeggioBracket",
- "arpeggioNormal",
- "arpeggioParenthesis",
- "arpeggioParenthesisDashed",
- "autoBeamOff",
- "autoBeamOn",
- "autoBreaksOff",
- "autoBreaksOn",
- "autoLineBreaksOff",
- "autoLineBreaksOn",
- "autoPageBreaksOff",
- "autoPageBreaksOn",
- "balloonLengthOff",
- "balloonLengthOn",
- "bassFigureExtendersOff",
- "bassFigureExtendersOn",
- "bassFigureStaffAlignmentDown",
- "bassFigureStaffAlignmentNeutral",
- "bassFigureStaffAlignmentUp",
- "break",
- "cadenzaOff",
- "cadenzaOn",
- "compressEmptyMeasures",
- "crescHairpin",
- "crescTextCresc",
- "deadNotesOff",
- "deadNotesOn",
- "defaultNoteHeads",
- "defaultTimeSignature",
- "deprecatedcresc",
- "deprecateddim",
- "deprecatedendcresc",
- "deprecatedenddim",
- "dimHairpin",
- "dimTextDecr",
- "dimTextDecresc",
- "dimTextDim",
- "dotsDown",
- "dotsNeutral",
- "dotsUp",
- "dynamicDown",
- "dynamicNeutral",
- "dynamicUp",
- "easyHeadsOff",
- "easyHeadsOn",
- "endSkipNCs",
- "expandEmptyMeasures",
- "fine",
- "frenchChords",
- "funkHeads",
- "funkHeadsMinor",
- "germanChords",
- "harmonicsOff",
- "hideNotes",
- "hideSplitTiedTabNotes",
- "hideStaffSwitch",
- "huge",
- "ignatzekExceptionMusic",
- "improvisationOff",
- "improvisationOn",
- "italianChords",
- "kievanOff",
- "kievanOn",
- "large",
- "markLengthOff",
- "markLengthOn",
- "medianChordGridStyle",
- "melisma",
- "melismaEnd",
- "mergeDifferentlyDottedOff",
- "mergeDifferentlyDottedOn",
- "mergeDifferentlyHeadedOff",
- "mergeDifferentlyHeadedOn",
- "newSpacingSection",
- "noBreak",
- "normalsize",
- "numericTimeSignature",
- "oneVoice",
- "palmMuteOff",
- "partCombineApart",
- "partCombineAutomatic",
- "partCombineChords",
- "partCombineSoloI",
- "partCombineSoloII",
- "partCombineUnisono",
- "phrasingSlurDashed",
- "phrasingSlurDotted",
- "phrasingSlurDown",
- "phrasingSlurHalfDashed",
- "phrasingSlurHalfSolid",
- "phrasingSlurNeutral",
- "phrasingSlurSolid",
- "phrasingSlurUp",
- "predefinedFretboardsOff",
- "predefinedFretboardsOn",
- "romanStringNumbers",
- "sacredHarpHeads",
- "sacredHarpHeadsMinor",
- "section",
- "semiGermanChords",
- "setDefaultDurationToQuarter",
- "shiftOff",
- "shiftOn",
- "shiftOnn",
- "shiftOnnn",
- "showSplitTiedTabNotes",
- "showStaffSwitch",
- "skipNC",
- "skipNCs",
- "slurDashed",
- "slurDotted",
- "slurDown",
- "slurHalfDashed",
- "slurHalfSolid",
- "slurNeutral",
- "slurSolid",
- "slurUp",
- "small",
- "southernHarmonyHeads",
- "southernHarmonyHeadsMinor",
- "startAcciaccaturaMusic",
- "startAppoggiaturaMusic",
- "startGraceMusic",
- "startMeasureCount",
- "startMeasureSpanner",
- "startSlashedGraceMusic",
- "startStaff",
- "stemDown",
- "stemNeutral",
- "stemUp",
- "stopAcciaccaturaMusic",
- "stopAppoggiaturaMusic",
- "stopGraceMusic",
- "stopMeasureCount",
- "stopMeasureSpanner",
- "stopSlashedGraceMusic",
- "stopStaff",
- "stopStaffHighlight",
- "tabFullNotation",
- "teeny",
- "textLengthOff",
- "textLengthOn",
- "textSpannerDown",
- "textSpannerNeutral",
- "textSpannerUp",
- "tieDashed",
- "tieDotted",
- "tieDown",
- "tieHalfDashed",
- "tieHalfSolid",
- "tieNeutral",
- "tieSolid",
- "tieUp",
- "tiny",
- "tupletDown",
- "tupletNeutral",
- "tupletUp",
- "unHideNotes",
- "voiceFour",
- "voiceFourStyle",
- "voiceNeutralStyle",
- "voiceOne",
- "voiceOneStyle",
- "voiceThree",
- "voiceThreeStyle",
- "voiceTwo",
- "voiceTwoStyle",
- "walkerHeads",
- "walkerHeadsMinor",
- "xNotesOff",
- "xNotesOn",
- "|",
- "~",
-]
-
-markup_commands = [
- "abs-fontsize",
- "accidental",
- "align-on-other",
- "arrow-head",
- "auto-footnote",
- "backslashed-digit",
- "beam",
- "bold",
- "box",
- "bracket",
- "caps",
- "center-align",
- "center-column",
- "char",
- "circle",
- "coda",
- "column",
- "column-lines",
- "combine",
- "compound-meter",
- "concat",
- "conditional-trill-markup",
- "customTabClef",
- "dir-column",
- "discant",
- "doubleflat",
- "doublesharp",
- "draw-circle",
- "draw-dashed-line",
- "draw-dotted-line",
- "draw-hline",
- "draw-line",
- "draw-squiggle-line",
- "dynamic",
- "ellipse",
- "epsfile",
- "eyeglasses",
- "fermata",
- "figured-bass",
- "fill-line",
- "fill-with-pattern",
- "filled-box",
- "finger",
- "first-visible",
- "flat",
- "fontCaps",
- "fontsize",
- "footnote",
- "fraction",
- "freeBass",
- "fret-diagram",
- "fret-diagram-terse",
- "fret-diagram-verbose",
- "fromproperty",
- "general-align",
- "halign",
- "harp-pedal",
- "hbracket",
- "hcenter-in",
- "hspace",
- "huge",
- "if",
- "italic",
- "justified-lines",
- "justify",
- "justify-field",
- "justify-line",
- "justify-string",
- "large",
- "larger",
- "left-align",
- "left-brace",
- "left-column",
- "line",
- "lookup",
- "lower",
- "magnify",
- "map-markup-commands",
- "markalphabet",
- "markletter",
- "markup",
- "markuplist",
- "medium",
- "multi-measure-rest-by-number",
- "musicglyph",
- "natural",
- "normal-size-sub",
- "normal-size-super",
- "normal-text",
- "normalsize",
- "note",
- "note-by-number",
- "null",
- "number",
- "on-the-fly",
- "oval",
- "overlay",
- "override",
- "override-lines",
- "overtie",
- "pad-around",
- "pad-markup",
- "pad-to-box",
- "pad-x",
- "page-link",
- "page-ref",
- "parenthesize",
- "path",
- "pattern",
- "polygon",
- "postscript",
- "property-recursive",
- "put-adjacent",
- "raise",
- "replace",
- "rest",
- "rest-by-number",
- "rhythm",
- "right-align",
- "right-brace",
- "right-column",
- "roman",
- "rotate",
- "rounded-box",
- "sans",
- "scale",
- "score",
- "score-lines",
- "segno",
- "semiflat",
- "semisharp",
- "sesquiflat",
- "sesquisharp",
- "sharp",
- "simple",
- "slashed-digit",
- "small",
- "smallCaps",
- "smaller",
- "stdBass",
- "stdBassIV",
- "stdBassV",
- "stdBassVI",
- "stencil",
- "string-lines",
- "strut",
- "sub",
- "super",
- "table",
- "table-of-contents",
- "teeny",
- "text",
- "tie",
- "tied-lyric",
- "tiny",
- "translate",
- "translate-scaled",
- "transparent",
- "triangle",
- "typewriter",
- "underline",
- "undertie",
- "unless",
- "upright",
- "varcoda",
- "vcenter",
- "verbatim-file",
- "vspace",
- "whiteout",
- "with-color",
- "with-dimension",
- "with-dimension-from",
- "with-dimensions",
- "with-dimensions-from",
- "with-link",
- "with-outline",
- "with-string-transformer",
- "with-true-dimension",
- "with-true-dimensions",
- "with-url",
- "woodwind-diagram",
- "wordwrap",
- "wordwrap-field",
- "wordwrap-internal",
- "wordwrap-lines",
- "wordwrap-string",
- "wordwrap-string-internal",
-]
-
-grobs = [
- "Accidental",
- "AccidentalCautionary",
- "AccidentalPlacement",
- "AccidentalSuggestion",
- "Ambitus",
- "AmbitusAccidental",
- "AmbitusLine",
- "AmbitusNoteHead",
- "Arpeggio",
- "BalloonText",
- "BarLine",
- "BarNumber",
- "BassFigure",
- "BassFigureAlignment",
- "BassFigureAlignmentPositioning",
- "BassFigureBracket",
- "BassFigureContinuation",
- "BassFigureLine",
- "Beam",
- "BendAfter",
- "BendSpanner",
- "BreakAlignGroup",
- "BreakAlignment",
- "BreathingSign",
- "CaesuraScript",
- "CenteredBarNumber",
- "CenteredBarNumberLineSpanner",
- "ChordName",
- "ChordSquare",
- "Clef",
- "ClefModifier",
- "ClusterSpanner",
- "ClusterSpannerBeacon",
- "CodaMark",
- "CombineTextScript",
- "ControlPoint",
- "ControlPolygon",
- "CueClef",
- "CueEndClef",
- "Custos",
- "Divisio",
- "DotColumn",
- "Dots",
- "DoublePercentRepeat",
- "DoublePercentRepeatCounter",
- "DoubleRepeatSlash",
- "DurationLine",
- "DynamicLineSpanner",
- "DynamicText",
- "DynamicTextSpanner",
- "Episema",
- "FingerGlideSpanner",
- "Fingering",
- "FingeringColumn",
- "Flag",
- "Footnote",
- "FretBoard",
- "Glissando",
- "GraceSpacing",
- "GridChordName",
- "GridLine",
- "GridPoint",
- "Hairpin",
- "HorizontalBracket",
- "HorizontalBracketText",
- "InstrumentName",
- "InstrumentSwitch",
- "JumpScript",
- "KeyCancellation",
- "KeySignature",
- "KievanLigature",
- "LaissezVibrerTie",
- "LaissezVibrerTieColumn",
- "LedgerLineSpanner",
- "LeftEdge",
- "LigatureBracket",
- "LyricExtender",
- "LyricHyphen",
- "LyricRepeatCount",
- "LyricSpace",
- "LyricText",
- "MeasureCounter",
- "MeasureGrouping",
- "MeasureSpanner",
- "MelodyItem",
- "MensuralLigature",
- "MetronomeMark",
- "MultiMeasureRest",
- "MultiMeasureRestNumber",
- "MultiMeasureRestScript",
- "MultiMeasureRestText",
- "NonMusicalPaperColumn",
- "NoteCollision",
- "NoteColumn",
- "NoteHead",
- "NoteName",
- "NoteSpacing",
- "OttavaBracket",
- "PaperColumn",
- "Parentheses",
- "PercentRepeat",
- "PercentRepeatCounter",
- "PhrasingSlur",
- "PianoPedalBracket",
- "RehearsalMark",
- "RepeatSlash",
- "RepeatTie",
- "RepeatTieColumn",
- "Rest",
- "RestCollision",
- "Script",
- "ScriptColumn",
- "ScriptRow",
- "SectionLabel",
- "SegnoMark",
- "SignumRepetitionis",
- "Slur",
- "SostenutoPedal",
- "SostenutoPedalLineSpanner",
- "SpacingSpanner",
- "SpanBar",
- "SpanBarStub",
- "StaffEllipsis",
- "StaffGrouper",
- "StaffHighlight",
- "StaffSpacing",
- "StaffSymbol",
- "StanzaNumber",
- "Stem",
- "StemStub",
- "StemTremolo",
- "StringNumber",
- "StrokeFinger",
- "SustainPedal",
- "SustainPedalLineSpanner",
- "System",
- "SystemStartBar",
- "SystemStartBrace",
- "SystemStartBracket",
- "SystemStartSquare",
- "TabNoteHead",
- "TextMark",
- "TextScript",
- "TextSpanner",
- "Tie",
- "TieColumn",
- "TimeSignature",
- "TrillPitchAccidental",
- "TrillPitchGroup",
- "TrillPitchHead",
- "TrillPitchParentheses",
- "TrillSpanner",
- "TupletBracket",
- "TupletNumber",
- "UnaCordaPedal",
- "UnaCordaPedalLineSpanner",
- "VaticanaLigature",
- "VerticalAlignment",
- "VerticalAxisGroup",
- "VoiceFollower",
- "VoltaBracket",
- "VoltaBracketSpanner",
- "VowelTransition",
-]
-
-contexts = [
- "ChoirStaff",
- "ChordGrid",
- "ChordGridScore",
- "ChordNames",
- "CueVoice",
- "Devnull",
- "DrumStaff",
- "DrumVoice",
- "Dynamics",
- "FiguredBass",
- "FretBoards",
- "Global",
- "GrandStaff",
- "GregorianTranscriptionLyrics",
- "GregorianTranscriptionStaff",
- "GregorianTranscriptionVoice",
- "InternalGregorianStaff",
- "KievanStaff",
- "KievanVoice",
- "Lyrics",
- "MensuralStaff",
- "MensuralVoice",
- "NoteNames",
- "NullVoice",
- "OneStaff",
- "PetrucciStaff",
- "PetrucciVoice",
- "PianoStaff",
- "RhythmicStaff",
- "Score",
- "Staff",
- "StaffGroup",
- "StandaloneRhythmScore",
- "StandaloneRhythmStaff",
- "StandaloneRhythmVoice",
- "TabStaff",
- "TabVoice",
- "Timing",
- "VaticanaLyrics",
- "VaticanaStaff",
- "VaticanaVoice",
- "Voice",
-]
-
-translators = [
- "Accidental_engraver",
- "Alteration_glyph_engraver",
- "Ambitus_engraver",
- "Arpeggio_engraver",
- "Auto_beam_engraver",
- "Axis_group_engraver",
- "Balloon_engraver",
- "Bar_engraver",
- "Bar_number_engraver",
- "Beam_collision_engraver",
- "Beam_engraver",
- "Beam_performer",
- "Beat_engraver",
- "Beat_performer",
- "Bend_engraver",
- "Bend_spanner_engraver",
- "Break_align_engraver",
- "Breathing_sign_engraver",
- "Caesura_engraver",
- "Centered_bar_number_align_engraver",
- "Chord_name_engraver",
- "Chord_square_engraver",
- "Chord_tremolo_engraver",
- "Clef_engraver",
- "Cluster_spanner_engraver",
- "Collision_engraver",
- "Completion_heads_engraver",
- "Completion_rest_engraver",
- "Concurrent_hairpin_engraver",
- "Control_track_performer",
- "Cue_clef_engraver",
- "Current_chord_text_engraver",
- "Custos_engraver",
- "Divisio_engraver",
- "Dot_column_engraver",
- "Dots_engraver",
- "Double_percent_repeat_engraver",
- "Drum_note_performer",
- "Drum_notes_engraver",
- "Duration_line_engraver",
- "Dynamic_align_engraver",
- "Dynamic_engraver",
- "Dynamic_performer",
- "Episema_engraver",
- "Extender_engraver",
- "Figured_bass_engraver",
- "Figured_bass_position_engraver",
- "Finger_glide_engraver",
- "Fingering_column_engraver",
- "Fingering_engraver",
- "Font_size_engraver",
- "Footnote_engraver",
- "Forbid_line_break_engraver",
- "Fretboard_engraver",
- "Glissando_engraver",
- "Grace_auto_beam_engraver",
- "Grace_beam_engraver",
- "Grace_engraver",
- "Grace_spacing_engraver",
- "Grid_chord_name_engraver",
- "Grid_line_span_engraver",
- "Grid_point_engraver",
- "Grob_pq_engraver",
- "Horizontal_bracket_engraver",
- "Hyphen_engraver",
- "Instrument_name_engraver",
- "Instrument_switch_engraver",
- "Jump_engraver",
- "Keep_alive_together_engraver",
- "Key_engraver",
- "Key_performer",
- "Kievan_ligature_engraver",
- "Laissez_vibrer_engraver",
- "Ledger_line_engraver",
- "Ligature_bracket_engraver",
- "Lyric_engraver",
- "Lyric_performer",
- "Lyric_repeat_count_engraver",
- "Mark_engraver",
- "Mark_performer",
- "Mark_tracking_translator",
- "Measure_counter_engraver",
- "Measure_grouping_engraver",
- "Measure_spanner_engraver",
- "Melody_engraver",
- "Mensural_ligature_engraver",
- "Merge_mmrest_numbers_engraver",
- "Merge_rests_engraver",
- "Metronome_mark_engraver",
- "Midi_control_change_performer",
- "Multi_measure_rest_engraver",
- "New_fingering_engraver",
- "Non_musical_script_column_engraver",
- "Note_head_line_engraver",
- "Note_heads_engraver",
- "Note_name_engraver",
- "Note_performer",
- "Note_spacing_engraver",
- "Ottava_spanner_engraver",
- "Output_property_engraver",
- "Page_turn_engraver",
- "Paper_column_engraver",
- "Parenthesis_engraver",
- "Part_combine_engraver",
- "Percent_repeat_engraver",
- "Phrasing_slur_engraver",
- "Piano_pedal_align_engraver",
- "Piano_pedal_engraver",
- "Piano_pedal_performer",
- "Pitch_squash_engraver",
- "Pitched_trill_engraver",
- "Pure_from_neighbor_engraver",
- "Repeat_acknowledge_engraver",
- "Repeat_tie_engraver",
- "Rest_collision_engraver",
- "Rest_engraver",
- "Rhythmic_column_engraver",
- "Script_column_engraver",
- "Script_engraver",
- "Script_row_engraver",
- "Separating_line_group_engraver",
- "Show_control_points_engraver",
- "Signum_repetitionis_engraver",
- "Skip_typesetting_engraver",
- "Slash_repeat_engraver",
- "Slur_engraver",
- "Slur_performer",
- "Spacing_engraver",
- "Span_arpeggio_engraver",
- "Span_bar_engraver",
- "Span_bar_stub_engraver",
- "Span_stem_engraver",
- "Spanner_break_forbid_engraver",
- "Spanner_tracking_engraver",
- "Staff_collecting_engraver",
- "Staff_highlight_engraver",
- "Staff_performer",
- "Staff_symbol_engraver",
- "Stanza_number_align_engraver",
- "Stanza_number_engraver",
- "Stem_engraver",
- "System_start_delimiter_engraver",
- "Tab_note_heads_engraver",
- "Tab_staff_symbol_engraver",
- "Tab_tie_follow_engraver",
- "Tempo_performer",
- "Text_engraver",
- "Text_mark_engraver",
- "Text_spanner_engraver",
- "Tie_engraver",
- "Tie_performer",
- "Time_signature_engraver",
- "Time_signature_performer",
- "Timing_translator",
- "Trill_spanner_engraver",
- "Tuplet_engraver",
- "Tweak_engraver",
- "Vaticana_ligature_engraver",
- "Vertical_align_engraver",
- "Volta_engraver",
-]
-
-scheme_functions = [
- "!=",
- "*location*",
- "*parser*",
- "Alteration_glyph_engraver",
- "Beat_performer",
- "Bend_spanner_engraver",
- "Breathing_sign_engraver",
- "Centered_bar_number_align_engraver",
- "Chord_name_engraver",
- "Chord_square_engraver",
- "Current_chord_text_engraver",
- "Divisio_engraver",
- "Duration_line_engraver",
- "Finger_glide_engraver",
- "G_",
- "Grid_chord_name_engraver",
- "Lyric_repeat_count_engraver",
- "Measure_counter_engraver",
- "Measure_spanner_engraver",
- "Merge_mmrest_numbers_engraver",
- "Merge_rests_engraver",
- "Show_control_points_engraver",
- "Signum_repetitionis_engraver",
- "Skip_typesetting_engraver",
- "Span_stem_engraver",
- "Spanner_tracking_engraver",
- "Staff_highlight_engraver",
- "Text_mark_engraver",
- "Trill_spanner_engraver",
- "_i",
- "abs-fontsize-markup",
- "accidental->markup",
- "accidental->markup-italian",
- "accidental-interface::calc-alteration",
- "accidental-interface::calc-glyph-name",
- "accidental-invalid?",
- "accidental-markup",
- "add-bar-glyph-print-procedure",
- "add-font",
- "add-grace-property",
- "add-music",
- "add-music-fonts",
- "add-new-clef",
- "add-pango-fonts",
- "add-point",
- "add-quotable",
- "add-score",
- "add-simple-time-signature-style",
- "add-stroke-glyph",
- "add-stroke-straight",
- "add-text",
- "adjust-slash-stencil",
- "align-on-other-markup",
- "aligned-text-stencil-function",
- "alist->hash-table",
- "alist<?",
- "alist?",
- "all-bar-numbers-visible",
- "all-equal?",
- "all-repeat-counts-visible",
- "allow-volta-hook",
- "alteration->text-accidental-markup",
- "alterations-in-key",
- "ambitus-line::calc-gap",
- "ambitus::print",
- "analyse-spanner-states",
- "ancestor-lookup-initialize",
- "angle-0-2pi",
- "angle-0-360",
- "annotate-spacing-spec",
- "annotate-y-interval",
- "any-mmrest-events",
- "apply-durations",
- "apply-group-draw-rule-series",
- "arrow-head-markup",
- "arrow-stencil",
- "arrow-stencil-maker",
- "assemble-stencils",
- "assoc-get",
- "assoc-keys",
- "assoc-values",
- "at-bar-line-substitute-caesura-type",
- "aug-modifier",
- "auto-footnote-markup",
- "average",
- "b",
- "backslashed-digit-markup",
- "bar-line::bar-y-extent",
- "bar-line::calc-blot",
- "bar-line::calc-break-visibility",
- "bar-line::calc-glyph-name",
- "bar-line::calc-glyph-name-for-direction",
- "bar-line::compound-bar-line",
- "bar-line::draw-filled-box",
- "bar-line::widen-bar-extent-on-span",
- "base-length",
- "bass-clarinet-rh-ees-key-stencil",
- "bassoon-bend-info-maker",
- "bassoon-cc-six-key-stencil",
- "bassoon-lh-a-flick-key-stencil",
- "bassoon-lh-c-flick-key-stencil",
- "bassoon-lh-cis-key-stencil",
- "bassoon-lh-d-flick-key-stencil",
- "bassoon-lh-ees-key-stencil",
- "bassoon-lh-he-key-stencil",
- "bassoon-lh-hees-key-stencil",
- "bassoon-lh-lb-key-stencil",
- "bassoon-lh-lbes-key-stencil",
- "bassoon-lh-lc-key-stencil",
- "bassoon-lh-ld-key-stencil",
- "bassoon-lh-lhees-key-stencil",
- "bassoon-lh-thumb-cis-key-stencil",
- "bassoon-lh-whisper-key-stencil",
- "bassoon-midline-rule",
- "bassoon-rh-bes-key-stencil",
- "bassoon-rh-cis-key-stencil",
- "bassoon-rh-f-key-stencil",
- "bassoon-rh-fis-key-stencil",
- "bassoon-rh-gis-key-stencil",
- "bassoon-rh-thumb-bes-key-stencil",
- "bassoon-rh-thumb-e-key-stencil",
- "bassoon-rh-thumb-fis-key-stencil",
- "bassoon-rh-thumb-gis-key-stencil",
- "bassoon-uber-key-stencil",
- "beam-exceptions",
- "beam-markup",
- "beam::align-with-broken-parts",
- "beam::get-kievan-positions",
- "beam::get-kievan-quantized-positions",
- "beam::place-broken-parts-individually",
- "beam::slope-like-broken-parts",
- "beat-grouping-internal",
- "beat-structure",
- "bend-spanner::print",
- "bend::arrow-head-stencil",
- "bend::calc-bend-x-begin",
- "bend::calc-bend-x-end",
- "bend::calc-y-coordinates",
- "bend::draw-curves",
- "bend::make-line-curve-stencil",
- "bend::print",
- "bend::remove-certain-tab-note-heads",
- "bend::target-cautionary",
- "bend::text-stencil",
- "bend::text-string",
- "bezier-head-for-stencil",
- "binary-search",
- "bold-markup",
- "book-first-page",
- "boolean-or-number?",
- "boolean-or-symbol?",
- "bounding-note-heads-pitches",
- "box-grob-stencil",
- "box-markup",
- "box-stencil",
- "bracket-markup",
- "bracketify-stencil",
- "break-alignable-interface::self-alignment-of-anchor",
- "break-alignable-interface::self-alignment-opposite-of-anchor",
- "break-alignment-list",
- "breathe::midi-length",
- "buildflag",
- "cached-file-contents",
- "caesura-script-interface::before-line-breaking",
- "caesura-to-bar-line-or-divisio",
- "caesura-to-divisio",
- "calc-harmonic-pitch",
- "calc-line-thickness",
- "calc-repeat-slash-count",
- "calculate-complex-compound-time",
- "calculate-compound-base-beat",
- "calculate-compound-base-beat-full",
- "calculate-compound-beat-grouping",
- "calculate-compound-measure-length",
- "calculate-time-fraction",
- "call-after-session",
- "caps-markup",
- "car-or-identity",
- "car<",
- "car<=",
- "cdr-or-identity",
- "center-align-markup",
- "center-column-markup",
- "centered-spanner-interface::calc-x-offset",
- "centered-stencil",
- "chain-assoc-get",
- "change-pitches",
- "char-markup",
- "cheap-list?",
- "cheap-markup?",
- "check-beam-quant",
- "check-beam-slope-sign",
- "check-broken-spanner",
- "check-context-path",
- "check-division-alist",
- "check-for-annotation",
- "check-for-replacement",
- "check-grob-path",
- "check-music-path",
- "check-pitch-against-signature",
- "check-quant-callbacks",
- "check-slope-callbacks",
- "chord-name->german-markup",
- "chord-name->italian-markup",
- "chord-square::height",
- "chord-square::print",
- "chord-square::width",
- "circle-markup",
- "circle-stencil",
- "clarinet-lh-R-key-stencil",
- "clarinet-lh-a-key-stencil",
- "clarinet-lh-cis-key-stencil",
- "clarinet-lh-d-key-stencil",
- "clarinet-lh-e-key-stencil",
- "clarinet-lh-ees-key-stencil",
- "clarinet-lh-f-key-stencil",
- "clarinet-lh-fis-key-stencil",
- "clarinet-lh-gis-key-stencil",
- "clarinet-lh-thumb-key-stencil",
- "clarinet-rh-b-key-stencil",
- "clarinet-rh-d-key-stencil",
- "clarinet-rh-e-key-stencil",
- "clarinet-rh-f-key-stencil",
- "clarinet-rh-fis-key-stencil",
- "clarinet-rh-four-key-stencil",
- "clarinet-rh-gis-key-stencil",
- "clarinet-rh-low-c-key-stencil",
- "clarinet-rh-low-cis-key-stencil",
- "clarinet-rh-low-d-key-stencil",
- "clarinet-rh-one-key-stencil",
- "clarinet-rh-three-key-stencil",
- "clarinet-rh-two-key-stencil",
- "clef-transposition-markup",
- "clef::print-modern-tab-if-set",
- "clip-systems-to-region-stencils",
- "clipped-systems-stencils",
- "close-enough?",
- "close-port-rename",
- "coda-markup",
- "collect-book-music-for-book",
- "collect-bookpart-for-book",
- "collect-music-aux",
- "collect-music-for-book",
- "collect-scores-for-book",
- "color?",
- "column-circle-stencil",
- "column-lines-markup-list",
- "column-markup",
- "combine-markup",
- "comparable-note-events",
- "comparator-from-key",
- "compile-all-markup-args",
- "compile-all-markup-expressions",
- "compile-markup-arg",
- "compile-markup-expression",
- "completize-formats",
- "completize-grob-entry",
- "compound-meter-markup",
- "concat-markup",
- "conditional-kern-before",
- "conditional-string-capitalize",
- "conditional-trill-markup-markup",
- "configuration",
- "cons-fret",
- "constante-hairpin",
- "construct-chord-elements",
- "context-defs-from-music",
- "context-mod-from-music",
- "context-spec-music",
- "control-point::calc-offset",
- "control-polygon::calc-text",
- "coord-axis",
- "coord-rotate",
- "coord-rotated",
- "coord-scale",
- "coord-translate",
- "coord-x",
- "coord-y",
- "copy-binary-file",
- "copy-repeat-chord",
- "count-list",
- "create-file-exclusive",
- "create-fretboard",
- "create-glyph-flag",
- "cross-staff-connect",
- "css-color",
- "cue-substitute",
- "current-or-previous-voice-states",
- "customTabClef-markup",
- "cyclic-base-value",
- "debugf",
- "def-grace-function",
- "default-auto-beam-check",
- "default-flag",
- "default-paren-color",
- "define-bar-line",
- "define-event-class",
- "define-event-function",
- "define-fonts",
- "define-grob-property",
- "define-internal-grob-property",
- "define-markup-command",
- "define-markup-command-internal",
- "define-markup-list-command",
- "define-music-function",
- "define-scheme-function",
- "define-session",
- "define-session-public",
- "define-syntax-function",
- "define-syntax-public",
- "define-syntax-rule-public",
- "define-tag-group",
- "define-void-function",
- "degree-first-true",
- "degrees->radians",
- "descend-to-context",
- "determine-frets",
- "determine-split-list",
- "determine-string-fret-finger",
- "dim-modifier",
- "dimension-arrows",
- "dir-basename",
- "dir-column-markup",
- "display-lily-music",
- "display-music",
- "display-scheme-music",
- "dodecaphonic-no-repeat-rule",
- "done?",
- "dot-column-interface::pad-by-one-dot-width",
- "dot-has-color",
- "dot-is-inverted",
- "dot-is-parenthesized",
- "dots::calc-dot-count",
- "dots::calc-dot-stencil",
- "dots::calc-glyph-name",
- "dots::calc-staff-position",
- "doubleflat-markup",
- "doublesharp-markup",
- "draw-circle-markup",
- "draw-dashed-line-markup",
- "draw-dotted-line-markup",
- "draw-hline-markup",
- "draw-line-markup",
- "draw-squiggle-line-markup",
- "dump-zombies",
- "duration",
- "duration-dot-factor",
- "duration-length",
- "duration-line::calc",
- "duration-line::calc-thickness",
- "duration-line::print",
- "duration-log-factor",
- "duration-of-note",
- "duration-or-music?",
- "duration-visual",
- "duration-visual-length",
- "dynamic-markup",
- "dynamic-text-spanner::before-line-breaking",
- "elbowed-hairpin",
- "ellipse-markup",
- "ellipse-radius",
- "ellipse-stencil",
- "empty-music",
- "end-broken-spanner?",
- "entry-greater-than-x?",
- "eps-file->stencil",
- "epsfile-markup",
- "eval-carefully",
- "event-cause",
- "event-chord-notes",
- "event-chord-pitches",
- "event-chord-reduce",
- "event-chord-wrap!",
- "event-class-cons",
- "event-has-articulation?",
- "events",
- "every-nth-bar-number-visible",
- "every-nth-repeat-count-visible",
- "exact-rational?",
- "expand-repeat-chords!",
- "expand-repeat-notes!",
- "extent-combine",
- "extract-alteration",
- "extract-beam-exceptions",
- "extract-music",
- "extract-named-music",
- "extract-typed-music",
- "eyeglasses-markup",
- "fermata-markup",
- "figured-bass-markup",
- "fill-line-markup",
- "fill-with-pattern-markup",
- "filled-box-markup",
- "find-named-props",
- "find-pitch-entry",
- "find-value-to-offset",
- "finger-glide::print",
- "finger-markup",
- "fingering::calc-text",
- "first-assoc",
- "first-bar-number-invisible",
- "first-bar-number-invisible-and-no-parenthesized-bar-numbers",
- "first-bar-number-invisible-save-broken-bars",
- "first-broken-spanner?",
- "first-member",
- "first-visible-markup",
- "flared-hairpin",
- "flat-flag",
- "flat-markup",
- "flatten-alist",
- "flatten-list",
- "flip-stencil",
- "flute-lh-b-key-stencil",
- "flute-lh-bes-key-stencil",
- "flute-lh-gis-key-stencil",
- "flute-lh-gis-rh-bes-key-stencil",
- "flute-rh-b-key-stencil",
- "flute-rh-bes-key-stencil",
- "flute-rh-c-key-stencil",
- "flute-rh-cis-key-stencil",
- "flute-rh-d-key-stencil",
- "flute-rh-dis-key-stencil",
- "flute-rh-ees-key-stencil",
- "flute-rh-gz-key-stencil",
- "fold-some-music",
- "font-children",
- "font-default",
- "font-name-split",
- "font-name-style",
- "font-qualifier",
- "fontCaps-markup",
- "fontsize-markup",
- "footnote-markup",
- "for-some-music",
- "forced-configuration",
- "format",
- "format-bass-figure",
- "format-coda-mark",
- "format-compound-time",
- "format-dal-segno-text",
- "format-dal-segno-text-brief",
- "format-mark-alphabet",
- "format-mark-barnumbers",
- "format-mark-box-alphabet",
- "format-mark-box-barnumbers",
- "format-mark-box-letters",
- "format-mark-box-numbers",
- "format-mark-circle-alphabet",
- "format-mark-circle-barnumbers",
- "format-mark-circle-letters",
- "format-mark-circle-numbers",
- "format-mark-generic",
- "format-mark-letters",
- "format-mark-numbers",
- "format-metronome-markup",
- "format-segno-mark",
- "format-segno-mark-considering-bar-lines",
- "format-sign-with-number",
- "format-time-element",
- "format-time-fraction",
- "format-time-list",
- "format-time-numerator",
- "format-varcoda-mark",
- "fraction->moment",
- "fraction-markup",
- "fraction?",
- "fret->pitch",
- "fret-board::calc-stencil",
- "fret-count",
- "fret-diagram-markup",
- "fret-diagram-terse-markup",
- "fret-diagram-verbose-markup",
- "fret-letter-tablature-format",
- "fret-number-tablature-format",
- "fret-number-tablature-format-banjo",
- "fret-parse-definition-string",
- "fret-parse-marking-list",
- "fret-parse-terse-definition-string",
- "fromproperty-markup",
- "function-chain",
- "g",
- "g-lookup-font",
- "general-align-markup",
- "general-column",
- "generate-bassoon-family-entry",
- "generate-clarinet-family-entry",
- "generate-crop-stencil",
- "generate-flute-family-entry",
- "generate-oboe-family-entry",
- "generate-preview-stencil",
- "generate-saxophone-family-entry",
- "generate-system-stencils",
- "generate-tin-whistle-family-entry",
- "get-bound-note-heads",
- "get-chord-shape",
- "get-current-filename",
- "get-current-suffix",
- "get-fill-space",
- "get-key",
- "get-named-spreadsheet-column",
- "get-next-unique-voice-name",
- "get-numeric-from-key",
- "get-outfile-name",
- "get-postscript-bbox",
- "get-quarter-diffs",
- "get-setting",
- "get-slope-offset",
- "get-span-glyph",
- "get-spreadsheet-column",
- "get-step",
- "get-sub-list",
- "get-top-most-tab-head",
- "get-tweakable-music",
- "get-woodwind-key-list",
- "glissando::calc-tab-extra-dy",
- "glissando::draw-tab-glissando",
- "glyph->stencil",
- "glyph-flag",
- "grace-spacing::calc-shortest-duration",
- "gray-colorize",
- "grid-chord-name::calc-X-offset",
- "grid-chord-name::calc-Y-offset",
- "grid-chord-name::calc-offset-on-axis",
- "grob-interpret-markup",
- "grob-list?",
- "grob-transformer",
- "grob::all-objects",
- "grob::calc-property-by-copy",
- "grob::compose-function",
- "grob::display-objects",
- "grob::has-interface",
- "grob::inherit-parent-property",
- "grob::is-live?",
- "grob::name",
- "grob::objects-from-interface",
- "grob::offset-function",
- "grob::relay-other-property",
- "grob::rhythmic-location",
- "grob::show-skylines-if-debug-skylines-set",
- "grob::unpure-Y-extent-from-stencil",
- "grob::when",
- "group-automate-rule",
- "group-draw-rule",
- "group-extra-offset-rule",
- "gs-cmd-args",
- "gs-safe-run",
- "hairpin::calc-grow-direction",
- "halign-markup",
- "harp-pedal-check",
- "harp-pedal-info",
- "harp-pedal-markup",
- "harp-pedals-parse-string",
- "has-at-least-two?",
- "has-one-or-less?",
- "hash-table->alist",
- "hbracket-markup",
- "hcenter-in-markup",
- "header-to-file",
- "headers-property-alist-chain",
- "hook-stencil",
- "horizontal-slash-interval",
- "hspace-markup",
- "huge-markup",
- "if-markup",
- "ignatzek-chord-names",
- "index-cell",
- "index-or-markup?",
- "index?",
- "insert-markups",
- "internal-set-paper-size",
- "interpret-markup",
- "interpret-markup-list",
- "interval-bound",
- "interval-center",
- "interval-contains?",
- "interval-empty?",
- "interval-end",
- "interval-index",
- "interval-intersection",
- "interval-length",
- "interval-sane?",
- "interval-scale",
- "interval-start",
- "interval-union",
- "interval-widen",
- "invalidate-alterations",
- "inverter-factory",
- "is-absolute?",
- "is-square?",
- "italic-markup",
- "item::extra-spacing-height-including-staff",
- "justified-lines-markup-list",
- "justify-field-markup",
- "justify-line-helper",
- "justify-line-markup",
- "justify-markup",
- "justify-string-markup",
- "key-crawler",
- "key-entry-alteration",
- "key-entry-bar-number",
- "key-entry-end-mom",
- "key-entry-notename",
- "key-entry-octave",
- "key-fill-translate",
- "key-list-or-music?",
- "key-list-or-symbol?",
- "key-list?",
- "key-signature-interface::alteration-position",
- "key-signature-interface::alteration-positions",
- "key?",
- "keyword->make-markup",
- "large-markup",
- "larger-markup",
- "layout-blot-diameter",
- "layout-extract-page-properties",
- "layout-line-thickness",
- "layout-set-absolute-staff-size",
- "layout-set-absolute-staff-size-in-module",
- "layout-set-staff-size",
- "left-align-markup",
- "left-brace-markup",
- "left-column-markup",
- "lexicographic-list-compare?",
- "lh-woodwind-text-stencil",
- "lilypond-all",
- "lilypond-file",
- "lilypond-main",
- "lilypond-version",
- "lilypond-version-outdated?",
- "line-markup",
- "list-all-possible-keys",
- "list-all-possible-keys-verbose",
- "list-element-index",
- "list-insert-separator",
- "list-join",
- "listener->once-listener",
- "little-elliptical-key-stencil",
- "long-midline-stencil",
- "lookup-font",
- "lookup-markup",
- "lookup-markup-command",
- "lookup-markup-command-aux",
- "lookup-markup-list-command",
- "lookup-paper-name",
- "low-bass-clarinet-rh-ees-key-stencil",
- "lower-markup",
- "ly-getcwd",
- "ly-type?",
- "ly:accidental-interface::height",
- "ly:accidental-interface::horizontal-skylines",
- "ly:accidental-interface::print",
- "ly:accidental-interface::remove-tied",
- "ly:accidental-placement::calc-positioning-done",
- "ly:add-context-mod",
- "ly:add-interface",
- "ly:add-listener",
- "ly:add-option",
- "ly:align-interface::align-to-ideal-distances",
- "ly:align-interface::align-to-minimum-distances",
- "ly:all-grob-interfaces",
- "ly:all-options",
- "ly:all-output-backend-commands",
- "ly:all-stencil-commands",
- "ly:all-stencil-expressions",
- "ly:alternative-sequence-iterator::constructor",
- "ly:angle",
- "ly:apply-context-iterator::constructor",
- "ly:arpeggio::brew-chord-bracket",
- "ly:arpeggio::brew-chord-slur",
- "ly:arpeggio::calc-cross-staff",
- "ly:arpeggio::calc-positions",
- "ly:arpeggio::print",
- "ly:arpeggio::pure-height",
- "ly:arpeggio::width",
- "ly:assoc-get",
- "ly:axis-group-interface::add-element",
- "ly:axis-group-interface::adjacent-pure-heights",
- "ly:axis-group-interface::calc-pure-relevant-grobs",
- "ly:axis-group-interface::calc-pure-staff-staff-spacing",
- "ly:axis-group-interface::calc-pure-y-common",
- "ly:axis-group-interface::calc-skylines",
- "ly:axis-group-interface::calc-staff-staff-spacing",
- "ly:axis-group-interface::calc-x-common",
- "ly:axis-group-interface::calc-y-common",
- "ly:axis-group-interface::combine-skylines",
- "ly:axis-group-interface::height",
- "ly:axis-group-interface::pure-height",
- "ly:axis-group-interface::width",
- "ly:balloon-interface::print",
- "ly:balloon-interface::pure-height",
- "ly:balloon-interface::remove-irrelevant-spanner",
- "ly:balloon-interface::width",
- "ly:bar-check-iterator::constructor",
- "ly:bar-line::calc-anchor",
- "ly:bar-line::calc-bar-extent",
- "ly:bar-line::print",
- "ly:basic-progress",
- "ly:beam::calc-beam-segments",
- "ly:beam::calc-beaming",
- "ly:beam::calc-cross-staff",
- "ly:beam::calc-direction",
- "ly:beam::calc-normal-stems",
- "ly:beam::calc-stem-shorten",
- "ly:beam::calc-x-positions",
- "ly:beam::print",
- "ly:beam::pure-rest-collision-callback",
- "ly:beam::quanting",
- "ly:beam::rest-collision-callback",
- "ly:beam::set-stem-lengths",
- "ly:bezier-extent",
- "ly:bezier-extract",
- "ly:book-add-bookpart!",
- "ly:book-add-score!",
- "ly:book-book-parts",
- "ly:book-header",
- "ly:book-paper",
- "ly:book-process",
- "ly:book-process-to-systems",
- "ly:book-scores",
- "ly:book-set-header!",
- "ly:book?",
- "ly:bp",
- "ly:bracket",
- "ly:break-alignable-interface::find-parent",
- "ly:break-alignable-interface::self-align-callback",
- "ly:break-aligned-interface::calc-average-anchor",
- "ly:break-aligned-interface::calc-break-visibility",
- "ly:break-aligned-interface::calc-extent-aligned-anchor",
- "ly:break-aligned-interface::calc-joint-anchor-alignment",
- "ly:break-alignment-interface::calc-positioning-done",
- "ly:break-alignment-interface::find-nonempty-break-align-group",
- "ly:breathing-sign::divisio-maior",
- "ly:breathing-sign::divisio-maxima",
- "ly:breathing-sign::divisio-minima",
- "ly:breathing-sign::finalis",
- "ly:breathing-sign::offset-callback",
- "ly:breathing-sign::set-breath-properties",
- "ly:broadcast",
- "ly:cairo-output-stencil",
- "ly:cairo-output-stencils",
- "ly:calculated-sequential-music::length",
- "ly:calculated-sequential-music::start",
- "ly:camel-case->lisp-identifier",
- "ly:chain-assoc-get",
- "ly:change-iterator::constructor",
- "ly:check-expected-warnings",
- "ly:chord-name::after-line-breaking",
- "ly:clef-modifier::calc-parent-alignment",
- "ly:clef::calc-glyph-name",
- "ly:clef::print",
- "ly:cluster-beacon::height",
- "ly:cluster::calc-cross-staff",
- "ly:cluster::print",
- "ly:cm",
- "ly:command-line-code",
- "ly:command-line-options",
- "ly:connect-dispatchers",
- "ly:context-current-moment",
- "ly:context-def-lookup",
- "ly:context-def-modify",
- "ly:context-def?",
- "ly:context-event-source",
- "ly:context-events-below",
- "ly:context-find",
- "ly:context-grob-definition",
- "ly:context-id",
- "ly:context-matched-pop-property",
- "ly:context-mod-apply!",
- "ly:context-mod?",
- "ly:context-name",
- "ly:context-output-def",
- "ly:context-parent",
- "ly:context-property",
- "ly:context-property-where-defined",
- "ly:context-pushpop-property",
- "ly:context-set-property!",
- "ly:context-specced-music-iterator::constructor",
- "ly:context-unset-property",
- "ly:context?",
- "ly:custos::print",
- "ly:debug",
- "ly:default-scale",
- "ly:dimension?",
- "ly:dir?",
- "ly:directed",
- "ly:disconnect-dispatchers",
- "ly:dispatcher?",
- "ly:dot-column::calc-positioning-done",
- "ly:dots::print",
- "ly:duration->string",
- "ly:duration-compress",
- "ly:duration-dot-count",
- "ly:duration-factor",
- "ly:duration-length",
- "ly:duration-log",
- "ly:duration-scale",
- "ly:duration::less?",
- "ly:duration<?",
- "ly:duration?",
- "ly:effective-prefix",
- "ly:enclosing-bracket::print",
- "ly:enclosing-bracket::width",
- "ly:engraver-announce-end-grob",
- "ly:engraver-make-grob",
- "ly:engraver-make-item",
- "ly:engraver-make-spanner",
- "ly:engraver-make-sticky",
- "ly:error",
- "ly:event-chord-iterator::constructor",
- "ly:event-deep-copy",
- "ly:event-iterator::constructor",
- "ly:event-property",
- "ly:event-set-property!",
- "ly:event-warning",
- "ly:event?",
- "ly:exit",
- "ly:expect-warning",
- "ly:extract-subfont-from-collection",
- "ly:figured-bass-continuation::center-on-figures",
- "ly:figured-bass-continuation::print",
- "ly:find-file",
- "ly:fine-iterator::constructor",
- "ly:fingering-column::calc-positioning-done",
- "ly:flag::calc-x-offset",
- "ly:flag::calc-y-offset",
- "ly:flag::glyph-name",
- "ly:flag::print",
- "ly:flag::pure-calc-y-offset",
- "ly:flag::width",
- "ly:font-config-add-directory",
- "ly:font-config-add-font",
- "ly:font-config-display-fonts",
- "ly:font-config-get-font-file",
- "ly:font-design-size",
- "ly:font-file-name",
- "ly:font-get-glyph",
- "ly:font-glyph-name-to-charcode",
- "ly:font-glyph-name-to-index",
- "ly:font-index-to-charcode",
- "ly:font-magnification",
- "ly:font-metric?",
- "ly:font-name",
- "ly:font-sub-fonts",
- "ly:format",
- "ly:format-output",
- "ly:generic-bound-extent",
- "ly:get-all-function-documentation",
- "ly:get-all-translators",
- "ly:get-cff-offset",
- "ly:get-context-mods",
- "ly:get-font-format",
- "ly:get-option",
- "ly:get-spacing-spec",
- "ly:grace-iterator::constructor",
- "ly:grace-music::start-callback",
- "ly:grid-line-interface::print",
- "ly:grid-line-interface::width",
- "ly:grob-alist-chain",
- "ly:grob-array->list",
- "ly:grob-array-length",
- "ly:grob-array-ref",
- "ly:grob-array?",
- "ly:grob-basic-properties",
- "ly:grob-chain-callback",
- "ly:grob-common-refpoint",
- "ly:grob-common-refpoint-of-array",
- "ly:grob-default-font",
- "ly:grob-extent",
- "ly:grob-get-vertical-axis-group-index",
- "ly:grob-interfaces",
- "ly:grob-layout",
- "ly:grob-list->grob-array",
- "ly:grob-object",
- "ly:grob-original",
- "ly:grob-parent",
- "ly:grob-pq<?",
- "ly:grob-properties?",
- "ly:grob-property",
- "ly:grob-property-data",
- "ly:grob-pure-height",
- "ly:grob-pure-property",
- "ly:grob-relative-coordinate",
- "ly:grob-robust-relative-extent",
- "ly:grob-script-priority-less",
- "ly:grob-set-nested-property!",
- "ly:grob-set-object!",
- "ly:grob-set-parent!",
- "ly:grob-set-property!",
- "ly:grob-spanned-column-rank-interval",
- "ly:grob-staff-position",
- "ly:grob-suicide!",
- "ly:grob-system",
- "ly:grob-translate-axis!",
- "ly:grob-vertical<?",
- "ly:grob-warning",
- "ly:grob::horizontal-skylines-from-element-stencils",
- "ly:grob::horizontal-skylines-from-stencil",
- "ly:grob::pure-horizontal-skylines-from-element-stencils",
- "ly:grob::pure-simple-horizontal-skylines-from-extents",
- "ly:grob::pure-simple-vertical-skylines-from-extents",
- "ly:grob::pure-stencil-height",
- "ly:grob::pure-vertical-skylines-from-element-stencils",
- "ly:grob::simple-horizontal-skylines-from-extents",
- "ly:grob::simple-vertical-skylines-from-extents",
- "ly:grob::stencil-height",
- "ly:grob::stencil-width",
- "ly:grob::vertical-skylines-from-element-stencils",
- "ly:grob::vertical-skylines-from-stencil",
- "ly:grob::x-parent-positioning",
- "ly:grob::y-parent-positioning",
- "ly:grob?",
- "ly:gs-cli",
- "ly:gulp-file",
- "ly:gulp-file-utf8",
- "ly:hairpin::broken-bound-padding",
- "ly:hairpin::print",
- "ly:hairpin::pure-height",
- "ly:hara-kiri-group-spanner::calc-skylines",
- "ly:hara-kiri-group-spanner::force-hara-kiri-callback",
- "ly:hara-kiri-group-spanner::force-hara-kiri-in-y-parent-callback",
- "ly:hara-kiri-group-spanner::pure-height",
- "ly:hara-kiri-group-spanner::y-extent",
- "ly:has-glyph-names?",
- "ly:hash-table-keys",
- "ly:horizontal-bracket-text::calc-direction",
- "ly:horizontal-bracket-text::print",
- "ly:horizontal-bracket::print",
- "ly:horizontal-line-spanner::calc-left-bound-info",
- "ly:horizontal-line-spanner::calc-left-bound-info-and-text",
- "ly:horizontal-line-spanner::calc-right-bound-info",
- "ly:in-event-class?",
- "ly:inch",
- "ly:input-both-locations",
- "ly:input-file-line-char-column",
- "ly:input-location?",
- "ly:input-message",
- "ly:input-warning",
- "ly:interpret-music-expression",
- "ly:intlog2",
- "ly:item-break-dir",
- "ly:item-get-column",
- "ly:item?",
- "ly:iterator?",
- "ly:key-signature-interface::print",
- "ly:kievan-ligature::print",
- "ly:ledger-line-spanner::print",
- "ly:ledger-line-spanner::set-spacing-rods",
- "ly:length",
- "ly:lily-lexer?",
- "ly:lily-parser?",
- "ly:line-interface::line",
- "ly:line-spanner::calc-cross-staff",
- "ly:line-spanner::calc-left-bound-info",
- "ly:line-spanner::calc-left-bound-info-and-text",
- "ly:line-spanner::calc-right-bound-info",
- "ly:line-spanner::print",
- "ly:list->offsets",
- "ly:listened-event-class?",
- "ly:listened-event-types",
- "ly:listener?",
- "ly:load",
- "ly:lyric-combine-music-iterator::constructor",
- "ly:lyric-combine-music::length-callback",
- "ly:lyric-extender::print",
- "ly:lyric-hyphen::print",
- "ly:lyric-hyphen::set-spacing-rods",
- "ly:make-book",
- "ly:make-book-part",
- "ly:make-context-mod",
- "ly:make-dispatcher",
- "ly:make-duration",
- "ly:make-event-class",
- "ly:make-global-context",
- "ly:make-global-translator",
- "ly:make-grob-properties",
- "ly:make-listener",
- "ly:make-moment",
- "ly:make-music",
- "ly:make-music-function",
- "ly:make-music-relative!",
- "ly:make-output-def",
- "ly:make-page-label-marker",
- "ly:make-page-permission-marker",
- "ly:make-pango-description-string",
- "ly:make-paper-outputter",
- "ly:make-pitch",
- "ly:make-prob",
- "ly:make-rotation",
- "ly:make-scale",
- "ly:make-scaling",
- "ly:make-score",
- "ly:make-skyline",
- "ly:make-spring",
- "ly:make-stencil",
- "ly:make-stream-event",
- "ly:make-transform",
- "ly:make-translation",
- "ly:make-unpure-pure-container",
- "ly:measure-grouping::print",
- "ly:measure-spanner::calc-connect-to-neighbors",
- "ly:measure-spanner::print",
- "ly:melody-spanner::calc-neutral-stem-direction",
- "ly:mensural-ligature::brew-ligature-primitive",
- "ly:mensural-ligature::print",
- "ly:message",
- "ly:minimal-breaking",
- "ly:mm",
- "ly:module->alist",
- "ly:module-copy",
- "ly:modules-lookup",
- "ly:moment-add",
- "ly:moment-div",
- "ly:moment-grace",
- "ly:moment-grace-denominator",
- "ly:moment-grace-numerator",
- "ly:moment-main",
- "ly:moment-main-denominator",
- "ly:moment-main-numerator",
- "ly:moment-mod",
- "ly:moment-mul",
- "ly:moment-sub",
- "ly:moment<?",
- "ly:moment?",
- "ly:multi-measure-rest::height",
- "ly:multi-measure-rest::print",
- "ly:multi-measure-rest::set-spacing-rods",
- "ly:multi-measure-rest::set-text-rods",
- "ly:music-compress",
- "ly:music-deep-copy",
- "ly:music-duration-compress",
- "ly:music-duration-length",
- "ly:music-error",
- "ly:music-function-extract",
- "ly:music-function-signature",
- "ly:music-function?",
- "ly:music-iterator::constructor",
- "ly:music-length",
- "ly:music-list?",
- "ly:music-message",
- "ly:music-mutable-properties",
- "ly:music-output?",
- "ly:music-property",
- "ly:music-sequence::cumulative-length-callback",
- "ly:music-sequence::event-chord-length-callback",
- "ly:music-sequence::event-chord-relative-callback",
- "ly:music-sequence::first-start-callback",
- "ly:music-sequence::maximum-length-callback",
- "ly:music-sequence::minimum-start-callback",
- "ly:music-sequence::simultaneous-relative-callback",
- "ly:music-set-property!",
- "ly:music-start",
- "ly:music-transpose",
- "ly:music-warning",
- "ly:music-wrapper-iterator::constructor",
- "ly:music-wrapper::length-callback",
- "ly:music-wrapper::start-callback",
- "ly:music::duration-length-callback",
- "ly:music?",
- "ly:non-fatal-error",
- "ly:note-collision-interface::calc-positioning-done",
- "ly:note-column-accidentals",
- "ly:note-column-dot-column",
- "ly:note-column::calc-main-extent",
- "ly:note-extra-source-file",
- "ly:note-head::calc-stem-attachment",
- "ly:note-head::calc-tab-stem-attachment",
- "ly:note-head::include-ledger-line-height",
- "ly:note-head::print",
- "ly:note-head::stem-attachment",
- "ly:note-head::stem-x-shift",
- "ly:number->string",
- "ly:number-pair->string",
- "ly:one-line-auto-height-breaking",
- "ly:one-line-breaking",
- "ly:one-page-breaking",
- "ly:optimal-breaking",
- "ly:option-usage",
- "ly:otf->cff",
- "ly:otf-font-glyph-info",
- "ly:otf-font-table-data",
- "ly:otf-font?",
- "ly:otf-glyph-count",
- "ly:otf-glyph-list",
- "ly:ottava-bracket::print",
- "ly:output-def-clone",
- "ly:output-def-lookup",
- "ly:output-def-parent",
- "ly:output-def-scope",
- "ly:output-def-set-variable!",
- "ly:output-def?",
- "ly:output-description",
- "ly:output-find-context-def",
- "ly:outputter-close",
- "ly:outputter-dump-stencil",
- "ly:outputter-dump-string",
- "ly:outputter-output-scheme",
- "ly:outputter-port",
- "ly:page-marker?",
- "ly:page-turn-breaking",
- "ly:pango-font-physical-fonts",
- "ly:pango-font?",
- "ly:paper-book-header",
- "ly:paper-book-pages",
- "ly:paper-book-paper",
- "ly:paper-book-performances",
- "ly:paper-book-scopes",
- "ly:paper-book-systems",
- "ly:paper-book?",
- "ly:paper-column::break-align-width",
- "ly:paper-column::print",
- "ly:paper-fonts",
- "ly:paper-get-font",
- "ly:paper-get-number",
- "ly:paper-outputscale",
- "ly:paper-score-paper-systems",
- "ly:paper-system-minimum-distance",
- "ly:paper-system?",
- "ly:parse-file",
- "ly:parse-init",
- "ly:parse-string-expression",
- "ly:parsed-undead-list!",
- "ly:parser-clear-error",
- "ly:parser-clone",
- "ly:parser-define!",
- "ly:parser-error",
- "ly:parser-has-error?",
- "ly:parser-include-string",
- "ly:parser-lookup",
- "ly:parser-output-name",
- "ly:parser-parse-string",
- "ly:parser-set-note-names",
- "ly:part-combine-iterator::constructor",
- "ly:partial-iterator::constructor",
- "ly:partial-iterator::finalization",
- "ly:percent-repeat-interface::beat-slash",
- "ly:percent-repeat-interface::double-percent",
- "ly:percent-repeat-interface::percent",
- "ly:percent-repeat-iterator::constructor",
- "ly:perform-text-replacements",
- "ly:performance-headers",
- "ly:performance-write",
- "ly:piano-pedal-bracket::print",
- "ly:pitch-alteration",
- "ly:pitch-diff",
- "ly:pitch-negate",
- "ly:pitch-notename",
- "ly:pitch-octave",
- "ly:pitch-quartertones",
- "ly:pitch-semitones",
- "ly:pitch-steps",
- "ly:pitch-tones",
- "ly:pitch-transpose",
- "ly:pitch::less?",
- "ly:pitch<?",
- "ly:pitch?",
- "ly:pointer-group-interface::add-grob",
- "ly:pop-property-iterator::constructor",
- "ly:position-on-line?",
- "ly:prob-immutable-properties",
- "ly:prob-mutable-properties",
- "ly:prob-property",
- "ly:prob-property?",
- "ly:prob-set-property!",
- "ly:prob-type?",
- "ly:prob?",
- "ly:programming-error",
- "ly:progress",
- "ly:property-iterator::constructor",
- "ly:property-lookup-stats",
- "ly:property-unset-iterator::constructor",
- "ly:pt",
- "ly:pure-call",
- "ly:pure-from-neighbor-interface::calc-pure-relevant-grobs",
- "ly:push-property-iterator::constructor",
- "ly:quote-iterator::constructor",
- "ly:randomize-rand-seed",
- "ly:register-stencil-expression",
- "ly:register-translator",
- "ly:relative-group-extent",
- "ly:relative-octave-check::relative-callback",
- "ly:relative-octave-music::no-relative-callback",
- "ly:relative-octave-music::relative-callback",
- "ly:rename-file",
- "ly:reset-all-fonts",
- "ly:rest-collision::calc-positioning-done",
- "ly:rest-collision::force-shift-callback-rest",
- "ly:rest::calc-cross-staff",
- "ly:rest::height",
- "ly:rest::print",
- "ly:rest::pure-height",
- "ly:rest::width",
- "ly:rest::y-offset-callback",
- "ly:rhythmic-music-iterator::constructor",
- "ly:round-filled-box",
- "ly:round-polygon",
- "ly:run-translator",
- "ly:score-add-output-def!",
- "ly:score-embedded-format",
- "ly:score-error?",
- "ly:score-header",
- "ly:score-music",
- "ly:score-output-defs",
- "ly:score-set-header!",
- "ly:score?",
- "ly:script-column::before-line-breaking",
- "ly:script-column::row-before-line-breaking",
- "ly:script-interface::calc-cross-staff",
- "ly:script-interface::calc-direction",
- "ly:script-interface::calc-positioning-done",
- "ly:script-interface::print",
- "ly:self-alignment-interface::aligned-on-x-parent",
- "ly:self-alignment-interface::aligned-on-y-parent",
- "ly:self-alignment-interface::centered-on-x-parent",
- "ly:self-alignment-interface::centered-on-y-parent",
- "ly:self-alignment-interface::pure-y-aligned-on-self",
- "ly:self-alignment-interface::x-aligned-on-self",
- "ly:self-alignment-interface::y-aligned-on-self",
- "ly:semi-tie-column::calc-head-direction",
- "ly:semi-tie-column::calc-positioning-done",
- "ly:semi-tie::calc-control-points",
- "ly:separation-item::calc-skylines",
- "ly:sequential-iterator::constructor",
- "ly:set-color-names",
- "ly:set-default-scale",
- "ly:set-grob-creation-callback",
- "ly:set-grob-modification-callback",
- "ly:set-middle-C!",
- "ly:set-option",
- "ly:set-origin!",
- "ly:set-property-cache-callback",
- "ly:side-position-interface::calc-cross-staff",
- "ly:side-position-interface::move-to-extremal-staff",
- "ly:side-position-interface::pure-y-aligned-side",
- "ly:side-position-interface::set-axis!",
- "ly:side-position-interface::x-aligned-side",
- "ly:side-position-interface::y-aligned-side",
- "ly:simple-music-iterator::constructor",
- "ly:simultaneous-music-iterator::constructor",
- "ly:skyline->points",
- "ly:skyline-distance",
- "ly:skyline-empty?",
- "ly:skyline-height",
- "ly:skyline-max-height",
- "ly:skyline-max-height-position",
- "ly:skyline-merge",
- "ly:skyline-pad",
- "ly:skyline-pair?",
- "ly:skyline-touching-point",
- "ly:skyline?",
- "ly:skylines-for-stencil",
- "ly:slur::calc-control-points",
- "ly:slur::calc-cross-staff",
- "ly:slur::calc-direction",
- "ly:slur::height",
- "ly:slur::outside-slur-callback",
- "ly:slur::outside-slur-cross-staff",
- "ly:slur::print",
- "ly:slur::pure-height",
- "ly:slur::pure-outside-slur-callback",
- "ly:smob-protects",
- "ly:solve-spring-rod-problem",
- "ly:source-file?",
- "ly:source-files",
- "ly:spacing-spanner::calc-common-shortest-duration",
- "ly:spacing-spanner::set-springs",
- "ly:span-bar::before-line-breaking",
- "ly:span-bar::calc-anchor",
- "ly:span-bar::calc-glyph-name",
- "ly:span-bar::choose-model-bar-line",
- "ly:span-bar::print",
- "ly:span-bar::width",
- "ly:spanner-bound",
- "ly:spanner-broken-into",
- "ly:spanner-set-bound!",
- "ly:spanner::bounds-width",
- "ly:spanner::calc-normalized-endpoints",
- "ly:spanner::kill-zero-spanned-time",
- "ly:spanner::set-spacing-rods",
- "ly:spanner?",
- "ly:spawn",
- "ly:spring-set-inverse-compress-strength!",
- "ly:spring-set-inverse-stretch-strength!",
- "ly:spring?",
- "ly:staff-symbol-line-thickness",
- "ly:staff-symbol-referencer::callback",
- "ly:staff-symbol-staff-radius",
- "ly:staff-symbol-staff-space",
- "ly:staff-symbol::height",
- "ly:staff-symbol::print",
- "ly:stderr-redirect",
- "ly:stem-tremolo::calc-cross-staff",
- "ly:stem-tremolo::calc-direction",
- "ly:stem-tremolo::calc-shape",
- "ly:stem-tremolo::calc-slope",
- "ly:stem-tremolo::calc-width",
- "ly:stem-tremolo::calc-y-offset",
- "ly:stem-tremolo::print",
- "ly:stem-tremolo::pure-calc-y-offset",
- "ly:stem-tremolo::pure-height",
- "ly:stem-tremolo::width",
- "ly:stem::calc-cross-staff",
- "ly:stem::calc-default-direction",
- "ly:stem::calc-direction",
- "ly:stem::calc-length",
- "ly:stem::calc-positioning-done",
- "ly:stem::calc-stem-begin-position",
- "ly:stem::calc-stem-end-position",
- "ly:stem::calc-stem-info",
- "ly:stem::height",
- "ly:stem::offset-callback",
- "ly:stem::print",
- "ly:stem::pure-calc-length",
- "ly:stem::pure-calc-stem-begin-position",
- "ly:stem::pure-calc-stem-end-position",
- "ly:stem::pure-height",
- "ly:stem::width",
- "ly:stencil-add",
- "ly:stencil-aligned-to",
- "ly:stencil-combine-at-edge",
- "ly:stencil-empty?",
- "ly:stencil-expr",
- "ly:stencil-extent",
- "ly:stencil-in-color",
- "ly:stencil-outline",
- "ly:stencil-rotate",
- "ly:stencil-rotate-absolute",
- "ly:stencil-scale",
- "ly:stencil-stack",
- "ly:stencil-translate",
- "ly:stencil-translate-axis",
- "ly:stencil?",
- "ly:stream-event?",
- "ly:string-percent-encode",
- "ly:string-substitute",
- "ly:sustain-pedal::print",
- "ly:system",
- "ly:system-font-load",
- "ly:system-start-delimiter::print",
- "ly:system::calc-pure-height",
- "ly:system::calc-pure-relevant-grobs",
- "ly:system::footnotes-after-line-breaking",
- "ly:system::footnotes-before-line-breaking",
- "ly:system::get-nonspaceable-staves",
- "ly:system::get-spaceable-staves",
- "ly:system::get-staves",
- "ly:system::get-vertical-alignment",
- "ly:system::height",
- "ly:system::vertical-skyline-elements",
- "ly:text-interface::interpret-markup",
- "ly:text-interface::interpret-string",
- "ly:text-interface::print",
- "ly:tie-column::before-line-breaking",
- "ly:tie-column::calc-positioning-done",
- "ly:tie::calc-control-points",
- "ly:tie::calc-direction",
- "ly:tie::print",
- "ly:time-signature::print",
- "ly:transform->list",
- "ly:transform?",
- "ly:translate-cpp-warning-scheme",
- "ly:translator-context",
- "ly:translator-description",
- "ly:translator-group?",
- "ly:translator-name",
- "ly:translator?",
- "ly:transpose-key-alist",
- "ly:ttf->pfa",
- "ly:ttf-ps-name",
- "ly:tuplet-bracket::calc-connect-to-neighbors",
- "ly:tuplet-bracket::calc-cross-staff",
- "ly:tuplet-bracket::calc-direction",
- "ly:tuplet-bracket::calc-positions",
- "ly:tuplet-bracket::calc-x-positions",
- "ly:tuplet-bracket::print",
- "ly:tuplet-iterator::constructor",
- "ly:tuplet-number::calc-cross-staff",
- "ly:tuplet-number::calc-x-offset",
- "ly:tuplet-number::calc-y-offset",
- "ly:tuplet-number::print",
- "ly:type1->pfa",
- "ly:unit",
- "ly:unpure-call",
- "ly:unpure-pure-container-pure-part",
- "ly:unpure-pure-container-unpure-part",
- "ly:unpure-pure-container?",
- "ly:usage",
- "ly:vaticana-ligature::brew-ligature-primitive",
- "ly:vaticana-ligature::print",
- "ly:verbose-output?",
- "ly:version",
- "ly:version?",
- "ly:volta-bracket-interface::print",
- "ly:volta-bracket::calc-shorten-pair",
- "ly:volta-repeat-iterator::constructor",
- "ly:volta-specced-music-iterator::constructor",
- "ly:vowel-transition::set-spacing-rods",
- "ly:warning",
- "ly:warning-located",
- "ly:wide-char->utf-8",
- "lyric-hyphen::vaticana-style",
- "lyric-text::print",
- "magnification->font-size",
- "magnify-markup",
- "magnifyStaff-is-set?",
- "magstep",
- "maj7-modifier",
- "make-abs-fontsize-markup",
- "make-accidental-dodecaphonic-rule",
- "make-accidental-markup",
- "make-accidental-rule",
- "make-align-on-other-markup",
- "make-apply-context",
- "make-arrow-head-markup",
- "make-articulation",
- "make-auto-footnote-markup",
- "make-autochange-music",
- "make-backslashed-digit-markup",
- "make-beam-markup",
- "make-bezier-sandwich-stencil",
- "make-bold-markup",
- "make-bow-stencil",
- "make-box-markup",
- "make-bracket-bar-line",
- "make-bracket-markup",
- "make-c-time-signature-markup",
- "make-caps-markup",
- "make-center-align-markup",
- "make-center-column-markup",
- "make-central-column-hole-addresses",
- "make-char-markup",
- "make-chord-elements",
- "make-circle-markup",
- "make-circle-stencil",
- "make-clef-set",
- "make-coda-markup",
- "make-colon-bar-line",
- "make-color-handler",
- "make-column-lines-markup-list",
- "make-column-markup",
- "make-combine-markup",
- "make-compound-meter-markup",
- "make-concat-markup",
- "make-conditional-trill-markup-markup",
- "make-connected-line",
- "make-connected-path-stencil",
- "make-cue-clef-set",
- "make-cue-clef-unset",
- "make-customTabClef-markup",
- "make-dashed-bar-line",
- "make-default-fonts-tree",
- "make-dir-column-markup",
- "make-dotted-bar-line",
- "make-doubleflat-markup",
- "make-doublesharp-markup",
- "make-draw-circle-markup",
- "make-draw-dashed-line-markup",
- "make-draw-dotted-line-markup",
- "make-draw-hline-markup",
- "make-draw-line-markup",
- "make-draw-squiggle-line-markup",
- "make-duration-of-length",
- "make-dynamic-markup",
- "make-ellipse-markup",
- "make-ellipse-stencil",
- "make-empty-bar-line",
- "make-engraver",
- "make-epsfile-markup",
- "make-event-chord",
- "make-extended-scale",
- "make-eyeglasses-markup",
- "make-fermata-markup",
- "make-figured-bass-markup",
- "make-fill-line-markup",
- "make-fill-with-pattern-markup",
- "make-filled-box-markup",
- "make-filled-box-stencil",
- "make-finger-markup",
- "make-first-visible-markup",
- "make-flat-markup",
- "make-font-tree-leaf",
- "make-font-tree-node",
- "make-fontCaps-markup",
- "make-fontsize-markup",
- "make-footnote-markup",
- "make-fraction-markup",
- "make-fret-diagram",
- "make-fret-diagram-markup",
- "make-fret-diagram-terse-markup",
- "make-fret-diagram-verbose-markup",
- "make-fromproperty-markup",
- "make-general-align-markup",
- "make-glyph-time-signature-markup",
- "make-grace-music",
- "make-graceless-rhythmic-location",
- "make-grob-property-override",
- "make-grob-property-revert",
- "make-grob-property-set",
- "make-halign-markup",
- "make-harmonic",
- "make-harp-pedal-markup",
- "make-hbracket-markup",
- "make-hcenter-in-markup",
- "make-hspace-markup",
- "make-huge-markup",
- "make-if-markup",
- "make-italic-markup",
- "make-justified-lines-markup-list",
- "make-justify-field-markup",
- "make-justify-line-markup",
- "make-justify-markup",
- "make-justify-string-markup",
- "make-key-alist",
- "make-key-symbols",
- "make-kievan-bar-line",
- "make-large-markup",
- "make-larger-markup",
- "make-left-align-markup",
- "make-left-brace-markup",
- "make-left-column-markup",
- "make-left-hand-key-addresses",
- "make-line-markup",
- "make-line-stencil",
- "make-lookup-markup",
- "make-lower-markup",
- "make-lyric-event",
- "make-lyric-repeat-count-formatter",
- "make-magnify-markup",
- "make-map-markup-commands-markup-list",
- "make-markalphabet-markup",
- "make-markletter-markup",
- "make-markup",
- "make-medium-markup",
- "make-modal-inverter",
- "make-modal-transposer",
- "make-multi-measure-rest",
- "make-multi-measure-rest-by-number-markup",
- "make-music",
- "make-musicglyph-markup",
- "make-name-keylist",
- "make-named-spreadsheet",
- "make-natural-markup",
- "make-no-bar-line",
- "make-non-relative-music",
- "make-normal-size-sub-markup",
- "make-normal-size-super-markup",
- "make-normal-text-markup",
- "make-normalsize-markup",
- "make-note-by-number-markup",
- "make-note-markup",
- "make-null-markup",
- "make-number-keylist",
- "make-number-markup",
- "make-on-the-fly-markup",
- "make-oval-markup",
- "make-oval-stencil",
- "make-overlay-markup",
- "make-override-lines-markup-list",
- "make-override-markup",
- "make-overtie-markup",
- "make-pad-around-markup",
- "make-pad-markup-markup",
- "make-pad-to-box-markup",
- "make-pad-x-markup",
- "make-page-link-markup",
- "make-page-ref-markup",
- "make-pango-font-tree",
- "make-parenthesis-stencil",
- "make-parenthesize-markup",
- "make-part-combine-context-changes",
- "make-part-combine-marks",
- "make-partial-ellipse-stencil",
- "make-path-markup",
- "make-path-stencil",
- "make-pattern-markup",
- "make-percent-set",
- "make-performer",
- "make-polygon-markup",
- "make-postscript-markup",
- "make-property-recursive-markup",
- "make-property-set",
- "make-property-unset",
- "make-put-adjacent-markup",
- "make-raise-markup",
- "make-relative",
- "make-relative::to-relative-callback",
- "make-repeat",
- "make-replace-markup",
- "make-rest-by-number-markup",
- "make-rest-markup",
- "make-rhythm-markup",
- "make-rhythmic-location",
- "make-right-align-markup",
- "make-right-brace-markup",
- "make-right-column-markup",
- "make-right-hand-key-addresses",
- "make-roman-markup",
- "make-rotate-markup",
- "make-rounded-box-markup",
- "make-sans-markup",
- "make-scale",
- "make-scale-markup",
- "make-score-lines-markup-list",
- "make-score-markup",
- "make-segno-bar-line",
- "make-segno-markup",
- "make-semiflat-markup",
- "make-semisharp-markup",
- "make-semitone->pitch",
- "make-sequential-music",
- "make-sesquiflat-markup",
- "make-sesquisharp-markup",
- "make-session-variable",
- "make-setting",
- "make-sharp-markup",
- "make-short-bar-line",
- "make-simple-bar-line",
- "make-simple-markup",
- "make-simultaneous-music",
- "make-skip-music",
- "make-skipped",
- "make-slashed-digit-markup",
- "make-small-markup",
- "make-smallCaps-markup",
- "make-smaller-markup",
- "make-spacer-bar-line",
- "make-span-event",
- "make-split-state",
- "make-spreadsheet",
- "make-stem-span!",
- "make-stem-spans!",
- "make-stencil-boxer",
- "make-stencil-circler",
- "make-stencil-markup",
- "make-string-lines-markup-list",
- "make-strut-markup",
- "make-sub-markup",
- "make-super-markup",
- "make-symbol-alist",
- "make-tab-heads-transparent",
- "make-table-markup-list",
- "make-teeny-markup",
- "make-text-markup",
- "make-thick-bar-line",
- "make-tick-bar-line",
- "make-tie-markup",
- "make-tie-stencil",
- "make-tied-lyric-markup",
- "make-tilted-portion",
- "make-time-signature-set",
- "make-tiny-markup",
- "make-tmpfile",
- "make-translate-markup",
- "make-translate-scaled-markup",
- "make-translator",
- "make-translator-component",
- "make-translator-internal",
- "make-transparent-box-stencil",
- "make-transparent-markup",
- "make-tremolo-set",
- "make-triangle-markup",
- "make-type-checker",
- "make-typewriter-markup",
- "make-underline-markup",
- "make-undertie-markup",
- "make-unfolded-set",
- "make-unless-markup",
- "make-upright-markup",
- "make-varcoda-markup",
- "make-vcenter-markup",
- "make-verbatim-file-markup",
- "make-voice-props-override",
- "make-voice-props-revert",
- "make-voice-props-set",
- "make-voice-states",
- "make-volta-set",
- "make-vspace-markup",
- "make-whiteout-markup",
- "make-with-color-markup",
- "make-with-dimension-from-markup",
- "make-with-dimension-markup",
- "make-with-dimensions-from-markup",
- "make-with-dimensions-markup",
- "make-with-link-markup",
- "make-with-outline-markup",
- "make-with-string-transformer-markup",
- "make-with-true-dimension-markup",
- "make-with-true-dimensions-markup",
- "make-with-url-markup",
- "make-woodwind-diagram-markup",
- "make-wordwrap-field-markup",
- "make-wordwrap-internal-markup-list",
- "make-wordwrap-lines-markup-list",
- "make-wordwrap-markup",
- "make-wordwrap-string-internal-markup-list",
- "make-wordwrap-string-markup",
- "map-alist-keys",
- "map-alist-vals",
- "map-markup-commands-markup-list",
- "map-selected-alist-keys",
- "map-some-music",
- "markalphabet-markup",
- "marked-up-headfoot",
- "marked-up-title",
- "markgeneric-string",
- "markletter-markup",
- "markup",
- "markup->string",
- "markup-argument-list-error",
- "markup-argument-list?",
- "markup-command-list?",
- "markup-command-signature",
- "markup-default-to-string-method",
- "markup-expression->make-markup",
- "markup-function-as-string-method",
- "markup-function-category",
- "markup-function-properties",
- "markup-function?",
- "markup-join",
- "markup-lambda",
- "markup-lambda-listify",
- "markup-lambda-worker",
- "markup-list-function?",
- "markup-list-lambda",
- "markup-list?",
- "markup-thrower-typecheck",
- "markup-typecheck?",
- "markup?",
- "match-predicate",
- "measure-counter::text",
- "medium-markup",
- "mensural-flag",
- "merge-details",
- "metronome-markup",
- "middle-broken-spanner?",
- "midi-program",
- "midline-stencil",
- "minor-modifier",
- "mkdir-if-not-exist",
- "mm-rest-child-list",
- "mmrest-of-length",
- "modern-straight-flag",
- "modified-font-metric-font-scaling",
- "modulo-bar-number-visible",
- "moment",
- "moment->fraction",
- "moment-min",
- "moment-pair?",
- "moment<=?",
- "move-chord-note",
- "multi-fork",
- "multi-measure-rest-by-number-markup",
- "music->make-music",
- "music-check-error",
- "music-clone",
- "music-filter",
- "music-invert",
- "music-is-of-type?",
- "music-map",
- "music-pitches",
- "music-property-description",
- "music-selective-filter",
- "music-selective-map",
- "music-separator?",
- "music-type-predicate",
- "musicglyph-markup",
- "n-true-entries",
- "narrow-glyph?",
- "natural-chord-alteration",
- "natural-markup",
- "negate-extent",
- "neo-modern-accidental-rule",
- "no-flag",
- "normal-flag",
- "normal-size-sub-markup",
- "normal-size-super-markup",
- "normal-text-markup",
- "normalize-fraction",
- "normalsize-markup",
- "not-first-broken-spanner?",
- "not-last-broken-spanner?",
- "note-by-number-markup",
- "note-events",
- "note-head::brew-ez-stencil",
- "note-head::calc-duration-log",
- "note-head::calc-glyph-name",
- "note-head::calc-kievan-duration-log",
- "note-markup",
- "note-name->german-markup",
- "note-name->markup",
- "note-name->string",
- "note-name-markup",
- "note-names-language",
- "note-to-cluster",
- "notes-to-clusters",
- "null-markup",
- "number->octal-string",
- "number-column-stencil",
- "number-format",
- "number-list?",
- "number-markup",
- "number-or-grob?",
- "number-or-pair?",
- "number-or-string?",
- "number-pair-list?",
- "number-pair?",
- "numbered-footnotes",
- "numerify",
- "object-type",
- "object-type-name",
- "oboe-lh-I-key-stencil",
- "oboe-lh-II-key-stencil",
- "oboe-lh-III-key-stencil",
- "oboe-lh-b-key-stencil",
- "oboe-lh-bes-key-stencil",
- "oboe-lh-cis-key-stencil",
- "oboe-lh-d-key-stencil",
- "oboe-lh-ees-key-stencil",
- "oboe-lh-ees-lh-bes-key-stencil",
- "oboe-lh-f-key-stencil",
- "oboe-lh-gis-key-stencil",
- "oboe-lh-gis-lh-low-b-key-stencil",
- "oboe-lh-low-b-key-stencil",
- "oboe-lh-octave-key-stencil",
- "oboe-rh-a-key-stencil",
- "oboe-rh-banana-key-stencil",
- "oboe-rh-c-key-stencil",
- "oboe-rh-c-rh-ees-key-stencil",
- "oboe-rh-cis-key-stencil",
- "oboe-rh-d-key-stencil",
- "oboe-rh-ees-key-stencil",
- "oboe-rh-f-key-stencil",
- "oboe-rh-gis-key-stencil",
- "octave-woodwind-text-stencil",
- "offset-add",
- "offset-flip-y",
- "offset-fret",
- "offset-multiple-types",
- "offset-scale",
- "offsetter",
- "old-straight-flag",
- "on-the-fly-markup",
- "only-if-beamed",
- "ordered-cons",
- "other-axis",
- "output-module?",
- "output-scopes",
- "outputproperty-compatibility",
- "oval-markup",
- "oval-stencil",
- "overlay-markup",
- "override-head-style",
- "override-lines-markup-list",
- "override-markup",
- "override-property-setting",
- "override-time-signature-setting",
- "overtie-markup",
- "pad-around-markup",
- "pad-markup-markup",
- "pad-to-box-markup",
- "pad-x-markup",
- "page-link-markup",
- "page-ref-markup",
- "pair-map",
- "pango-font-name",
- "pango-pf-file-name",
- "pango-pf-font-name",
- "pango-pf-fontindex",
- "paper-variable",
- "parentheses-interface::calc-angled-bracket-stencils",
- "parentheses-interface::calc-parenthesis-stencils",
- "parentheses-interface::print",
- "parentheses-interface::y-extent",
- "parenthesize-elements",
- "parenthesize-markup",
- "parenthesize-stencil",
- "parse-and-check-version",
- "parse-lily-version",
- "parse-terse-string",
- "path-markup",
- "pattern-markup",
- "percussion?",
- "performance-name-from-headers",
- "piccolo-rh-x-key-stencil",
- "pitch-alteration-semitones",
- "pitch-invert",
- "pitch-of-note",
- "pitch-step",
- "polar->rectangular",
- "polygon-markup",
- "position-true-endpoint",
- "postprocess-output",
- "postscript->pdf",
- "postscript->png",
- "postscript->ps",
- "postscript-markup",
- "precompute-music-length",
- "prepend-alist-chain",
- "prepend-props",
- "pretty-printable?",
- "previous-span-state",
- "previous-voice-state",
- "print-book-with",
- "print-book-with-defaults",
- "print-book-with-defaults-as-systems",
- "print-circled-text-callback",
- "print-keys",
- "print-keys-verbose",
- "process-fill-value",
- "property-recursive-markup",
- "pure-chain-offset-callback",
- "pure-from-neighbor-interface::account-for-span-bar",
- "pure-from-neighbor-interface::extra-spacing-height",
- "pure-from-neighbor-interface::extra-spacing-height-at-beginning-of-line",
- "pure-from-neighbor-interface::extra-spacing-height-including-staff",
- "pure-from-neighbor-interface::pure-height",
- "put-adjacent-markup",
- "quarterdiff->string",
- "quote-substitute",
- "raise-markup",
- "randomize-rand-seed",
- "ratio->fret",
- "ratio->pitch",
- "rational-or-procedure?",
- "read-lily-expression",
- "read-lily-expression-internal",
- "recent-enough?",
- "recompute-music-length",
- "recording-group-emulate",
- "regexp-split",
- "relevant-book-systems",
- "relevant-dump-systems",
- "remove-grace-property",
- "remove-step",
- "remove-whitespace",
- "repeat-tie::handle-tab-note-head",
- "replace-markup",
- "replace-step",
- "replicate-modify",
- "reset-stencil-colors",
- "rest-by-number-markup",
- "rest-markup",
- "retrieve-glyph-flag",
- "retrograde-music",
- "return-1",
- "reverse-interval",
- "revert-fontSize",
- "revert-head-style",
- "revert-property-setting",
- "revert-props",
- "revert-time-signature-setting",
- "rgb-color",
- "rh-woodwind-text-stencil",
- "rhythm-markup",
- "rhythmic-location->file-string",
- "rhythmic-location->string",
- "rhythmic-location-bar-number",
- "rhythmic-location-measure-position",
- "rhythmic-location<=?",
- "rhythmic-location<?",
- "rhythmic-location=?",
- "rhythmic-location>=?",
- "rhythmic-location>?",
- "rhythmic-location?",
- "rich-bassoon-uber-key-stencil",
- "rich-e-stencil",
- "rich-group-draw-rule",
- "rich-group-extra-offset-rule",
- "rich-path-stencil",
- "rich-pe-stencil",
- "right-align-markup",
- "right-brace-markup",
- "right-column-markup",
- "ring-column-circle-stencil",
- "robust-bar-number-function",
- "roman-markup",
- "rotate-markup",
- "rounded-box-markup",
- "rounded-box-stencil",
- "sans-markup",
- "sans-serif-stencil",
- "saxophone-lh-T-key-stencil",
- "saxophone-lh-b-cis-key-stencil",
- "saxophone-lh-b-key-stencil",
- "saxophone-lh-bes-key-stencil",
- "saxophone-lh-cis-key-stencil",
- "saxophone-lh-d-key-stencil",
- "saxophone-lh-ees-key-stencil",
- "saxophone-lh-f-key-stencil",
- "saxophone-lh-front-f-key-stencil",
- "saxophone-lh-gis-key-stencil",
- "saxophone-lh-low-a-key-stencil",
- "saxophone-lh-low-bes-key-stencil",
- "saxophone-name-passerelle",
- "saxophone-rh-bes-key-stencil",
- "saxophone-rh-c-key-stencil",
- "saxophone-rh-e-key-stencil",
- "saxophone-rh-ees-key-stencil",
- "saxophone-rh-fis-key-stencil",
- "saxophone-rh-high-fis-key-stencil",
- "saxophone-rh-low-c-key-stencil",
- "saxophone-rh-side-key-stencil",
- "scale->factor",
- "scale-beam-thickness",
- "scale-by-font-size",
- "scale-fontSize",
- "scale-layout",
- "scale-markup",
- "scale-props",
- "scale?",
- "scheme?",
- "scm->string",
- "score-lines-markup-list",
- "score-markup",
- "scorify-music",
- "script-interface::calc-x-offset",
- "script-or-side-position-cross-staff",
- "search-executable",
- "seconds->moment",
- "segno-markup",
- "select-head-glyph",
- "select-option",
- "self-alignment-interface::self-aligned-on-breakable",
- "self-evaluating?",
- "semi-tie::calc-cross-staff",
- "semiflat-markup",
- "semisharp-markup",
- "sequential-music-to-chord-exceptions",
- "sesquiflat-markup",
- "sesquisharp-markup",
- "session-replay",
- "session-save",
- "session-start-record",
- "session-terminate",
- "set-accidental-style",
- "set-bar-number-visibility",
- "set-counter-text!",
- "set-default-paper-size",
- "set-global-fonts",
- "set-global-staff-size",
- "set-mus-properties!",
- "set-output-property",
- "set-paper-dimension-variables",
- "set-paper-dimensions",
- "set-paper-size",
- "sharp-markup",
- "shift-duration-log",
- "shift-octave",
- "shift-one-duration-log",
- "shift-right-at-line-begin",
- "shift-semitone->pitch",
- "short-glyph?",
- "sign",
- "silence-events",
- "simple-markup",
- "simple-stencil-alist",
- "skip->rest",
- "skip-as-needed",
- "skip-of-length",
- "skip-of-moment-span",
- "skyline-pair-and-non-empty?",
- "skyline-pair::empty?",
- "slashed-digit-internal",
- "slashed-digit-markup",
- "slashify",
- "small-markup",
- "smallCaps-markup",
- "smaller-markup",
- "space-lines",
- "span-bar::compound-bar-line",
- "span-state",
- "split-at-predicate",
- "split-index",
- "split-list",
- "split-list-by-separator",
- "stack-lines",
- "stack-stencil-line",
- "stack-stencils",
- "stack-stencils-padding-list",
- "stack-thirds",
- "staff-ellipsis::calc-y-extent",
- "staff-ellipsis::print",
- "staff-highlight::height",
- "staff-highlight::print",
- "staff-highlight::width",
- "staff-magnification-is-changing?",
- "staff-symbol-line-count",
- "staff-symbol-line-positions",
- "staff-symbol-line-span",
- "staff-symbol-y-extent-from-line-positions",
- "staff-symbol::calc-widened-extent",
- "standard-e-stencil",
- "standard-path-stencil",
- "stderr",
- "stem-connectable?",
- "stem-is-root?",
- "stem-span-stencil",
- "stem-stub::do-calculations",
- "stem-stub::extra-spacing-height",
- "stem-stub::pure-height",
- "stem-stub::width",
- "stem-tremolo::calc-tab-width",
- "stem::calc-duration-log",
- "stem::kievan-offset-callback",
- "stencil-fretboard-extent",
- "stencil-fretboard-offset",
- "stencil-markup",
- "stencil-true-extent",
- "stencil-whiteout",
- "stencil-whiteout-box",
- "stencil-whiteout-outline",
- "stencil-with-color",
- "sticky-grob-interface::inherit-property",
- "straight-flag",
- "string->string-list",
- "string-encode-integer",
- "string-endswith",
- "string-lines-markup-list",
- "string-number::calc-text",
- "string-or-music?",
- "string-or-pair?",
- "string-or-symbol?",
- "string-regexp-substitute",
- "string-startswith",
- "string-thickness",
- "strip-string-annotation",
- "stroke-finger::calc-text",
- "strut-markup",
- "style-note-heads",
- "sub-markup",
- "subtract-base-fret",
- "suggest-convert-ly-message",
- "super-markup",
- "sus-modifier",
- "symbol-concatenate",
- "symbol-footnotes",
- "symbol-key-alist?",
- "symbol-key<?",
- "symbol-list-or-music?",
- "symbol-list-or-symbol?",
- "symbol-list?",
- "symbol<?",
- "symlink-if-not-exist",
- "symlink-or-copy-if-not-exist",
- "symmetric-interval",
- "synced?",
- "system-start-text::calc-x-offset",
- "system-start-text::calc-y-offset",
- "system-start-text::print",
- "tab-note-head::calc-glyph-name",
- "tab-note-head::print",
- "tab-note-head::print-custom-fret-label",
- "tab-note-head::whiteout-if-style-set",
- "tablature-position-on-lines",
- "table-markup-list",
- "tabvoice::draw-double-stem-for-half-notes",
- "tabvoice::make-double-stem-width-for-half-notes",
- "tag-group-get",
- "tags-keep-predicate",
- "tags-remove-predicate",
- "teaching-accidental-rule",
- "teeny-markup",
- "text-fill-translate",
- "text-mark-interface::calc-break-visibility",
- "text-mark-interface::calc-self-alignment-X",
- "text-markup",
- "tie-markup",
- "tie::handle-tab-note-head",
- "tied-lyric-markup",
- "tiny-markup",
- "translate-draw-instructions",
- "translate-key-instruction",
- "translate-markup",
- "translate-scaled-markup",
- "translator-property-description",
- "transparent-markup",
- "transposer-factory",
- "triangle-markup",
- "trill-pitch-group::pure-height",
- "true-entry?",
- "tuning",
- "tuplet-number::append-note-wrapper",
- "tuplet-number::calc-denominator-text",
- "tuplet-number::calc-direction",
- "tuplet-number::calc-fraction-text",
- "tuplet-number::fraction-with-notes",
- "tuplet-number::non-default-fraction-with-notes",
- "tuplet-number::non-default-tuplet-denominator-text",
- "tuplet-number::non-default-tuplet-fraction-text",
- "type-name",
- "typewriter-markup",
- "unbroken-or-first-broken-spanner?",
- "unbroken-or-last-broken-spanner?",
- "unbroken-spanner?",
- "underline-markup",
- "undertie-markup",
- "unfold-repeats",
- "unfold-repeats-fully",
- "uniform-draw-instructions",
- "uniform-extra-offset-rule",
- "uniq-list",
- "uniqued-alist",
- "unity-if-multimeasure",
- "universal-color",
- "unless-markup",
- "update-possb-list",
- "upper-key-stencil",
- "upright-markup",
- "value-for-spanner-piece",
- "varcoda-markup",
- "variable-column-circle-stencil",
- "vcenter-markup",
- "vector-for-each",
- "verbatim-file-markup",
- "version-not-seen-message",
- "voice-states",
- "voicify-chord",
- "voicify-list",
- "voicify-music",
- "void?",
- "volta-bracket-interface::pure-height",
- "volta-bracket::calc-hook-visibility",
- "volta-spec-music",
- "vspace-markup",
- "whiteout-markup",
- "with-color-markup",
- "with-dimension-from-markup",
- "with-dimension-markup",
- "with-dimensions-from-markup",
- "with-dimensions-markup",
- "with-link-markup",
- "with-outline-markup",
- "with-string-transformer-markup",
- "with-true-dimension-markup",
- "with-true-dimensions-markup",
- "with-url-markup",
- "woodwind-diagram-markup",
- "wordwrap-field-markup",
- "wordwrap-internal-markup-list",
- "wordwrap-lines-markup-list",
- "wordwrap-markup",
- "wordwrap-stencils",
- "wordwrap-string-internal-markup-list",
- "wordwrap-string-markup",
- "write-lilypond-book-aux-files",
- "write-me",
- "write-performances-midis",
- "x11-color",
-]
-
-context_properties = [
- "aDueText",
- "accidentalGrouping",
- "additionalBassStrings",
- "additionalPitchPrefix",
- "alignAboveContext",
- "alignBelowContext",
- "alterationGlyphs",
- "alternativeNumber",
- "alternativeNumberingStyle",
- "alternativeRestores",
- "associatedVoice",
- "associatedVoiceContext",
- "associatedVoiceType",
- "autoAccidentals",
- "autoBeamCheck",
- "autoBeaming",
- "autoCautionaries",
- "barCheckLastFail",
- "barCheckSynchronize",
- "barExtraVelocity",
- "barNumberFormatter",
- "barNumberVisibility",
- "baseMoment",
- "beamExceptions",
- "beamHalfMeasure",
- "beamMelismaBusy",
- "beatExtraVelocity",
- "beatStructure",
- "breathMarkDefinitions",
- "breathMarkType",
- "busyGrobs",
- "caesuraType",
- "caesuraTypeTransform",
- "centerBarNumbers",
- "chordChanges",
- "chordNameExceptions",
- "chordNameFunction",
- "chordNameLowercaseMinor",
- "chordNameSeparator",
- "chordNoteNamer",
- "chordPrefixSpacer",
- "chordRootNamer",
- "clefGlyph",
- "clefPosition",
- "clefTransposition",
- "clefTranspositionFormatter",
- "clefTranspositionStyle",
- "codaMarkCount",
- "codaMarkFormatter",
- "completionBusy",
- "completionFactor",
- "completionUnit",
- "connectArpeggios",
- "countPercentRepeats",
- "createKeyOnClefChange",
- "createSpacing",
- "crescendoSpanner",
- "crescendoText",
- "cueClefGlyph",
- "cueClefPosition",
- "cueClefTransposition",
- "cueClefTranspositionFormatter",
- "cueClefTranspositionStyle",
- "currentBarLine",
- "currentBarNumber",
- "currentChordCause",
- "currentChordText",
- "currentCommandColumn",
- "currentMusicalColumn",
- "currentPerformanceMarkEvent",
- "currentRehearsalMarkEvent",
- "dalSegnoTextFormatter",
- "decrescendoSpanner",
- "decrescendoText",
- "defaultStrings",
- "doubleRepeatBarType",
- "doubleRepeatSegnoBarType",
- "doubleSlurs",
- "drumPitchTable",
- "drumStyleTable",
- "dynamicAbsoluteVolumeFunction",
- "endAtSkip",
- "endRepeatBarType",
- "endRepeatSegnoBarType",
- "explicitClefVisibility",
- "explicitCueClefVisibility",
- "explicitKeySignatureVisibility",
- "extendersOverRests",
- "extraNatural",
- "figuredBassAlterationDirection",
- "figuredBassCenterContinuations",
- "figuredBassFormatter",
- "figuredBassLargeNumberAlignment",
- "figuredBassPlusDirection",
- "figuredBassPlusStrokedAlist",
- "finalFineTextVisibility",
- "finalizations",
- "fineBarType",
- "fineSegnoBarType",
- "fineStartRepeatSegnoBarType",
- "fineText",
- "fingeringOrientations",
- "firstClef",
- "followVoice",
- "fontSize",
- "forbidBreak",
- "forbidBreakBetweenBarLines",
- "forceBreak",
- "forceClef",
- "fretLabels",
- "glissandoMap",
- "graceSettings",
- "gridInterval",
- "handleNegativeFrets",
- "harmonicAccidentals",
- "harmonicDots",
- "hasAxisGroup",
- "hasStaffSpacing",
- "highStringOne",
- "ignoreBarChecks",
- "ignoreBarNumberChecks",
- "ignoreFiguredBassRest",
- "ignoreMelismata",
- "implicitBassFigures",
- "includeGraceNotes",
- "initialTimeSignatureVisibility",
- "instrumentCueName",
- "instrumentEqualizer",
- "instrumentName",
- "instrumentTransposition",
- "internalBarNumber",
- "keepAliveInterfaces",
- "keyAlterationOrder",
- "keyAlterations",
- "lastChord",
- "lastKeyAlterations",
- "localAlterations",
- "lyricMelismaAlignment",
- "lyricRepeatCountFormatter",
- "magnifyStaffValue",
- "majorSevenSymbol",
- "maximumFretStretch",
- "measureBarType",
- "measureLength",
- "measurePosition",
- "measureStartNow",
- "melismaBusy",
- "melismaBusyProperties",
- "metronomeMarkFormatter",
- "middleCClefPosition",
- "middleCCuePosition",
- "middleCOffset",
- "middleCPosition",
- "midiBalance",
- "midiChannelMapping",
- "midiChorusLevel",
- "midiExpression",
- "midiInstrument",
- "midiMaximumVolume",
- "midiMergeUnisons",
- "midiMinimumVolume",
- "midiPanPosition",
- "midiReverbLevel",
- "midiSkipOffset",
- "minimumFret",
- "minimumPageTurnLength",
- "minimumRepeatLengthForPageTurn",
- "minorChordModifier",
- "noChordSymbol",
- "noteNameFunction",
- "noteNameSeparator",
- "noteToFretFunction",
- "nullAccidentals",
- "ottavaStartNow",
- "ottavation",
- "ottavationMarkups",
- "output",
- "partCombineForced",
- "partCombineTextsOnNote",
- "partialBusy",
- "pedalSostenutoStrings",
- "pedalSostenutoStyle",
- "pedalSustainStrings",
- "pedalSustainStyle",
- "pedalUnaCordaStrings",
- "pedalUnaCordaStyle",
- "predefinedDiagramTable",
- "printAccidentalNames",
- "printKeyCancellation",
- "printNotesLanguage",
- "printOctaveNames",
- "printPartCombineTexts",
- "proportionalNotationDuration",
- "quotedCueEventTypes",
- "quotedEventTypes",
- "rehearsalMark",
- "rehearsalMarkFormatter",
- "repeatCommands",
- "repeatCountVisibility",
- "restCompletionBusy",
- "restNumberThreshold",
- "restrainOpenStrings",
- "rootSystem",
- "scriptDefinitions",
- "searchForVoice",
- "sectionBarType",
- "segnoBarType",
- "segnoMarkCount",
- "segnoMarkFormatter",
- "segnoStyle",
- "shapeNoteStyles",
- "shortInstrumentName",
- "shortVocalName",
- "skipBars",
- "skipTypesetting",
- "slashChordSeparator",
- "slurMelismaBusy",
- "soloIIText",
- "soloText",
- "squashedPosition",
- "staffLineLayoutFunction",
- "stanza",
- "startAtNoteColumn",
- "startAtSkip",
- "startRepeatBarType",
- "startRepeatSegnoBarType",
- "stavesFound",
- "stemLeftBeamCount",
- "stemRightBeamCount",
- "strictBeatBeaming",
- "stringFretFingerList",
- "stringNumberOrientations",
- "stringOneTopmost",
- "stringTunings",
- "strokeFingerOrientations",
- "subdivideBeams",
- "suggestAccidentals",
- "supportNonIntegerFret",
- "suspendMelodyDecisions",
- "suspendRestMerging",
- "systemStartDelimiter",
- "systemStartDelimiterHierarchy",
- "tabStaffLineLayoutFunction",
- "tablatureFormat",
- "tempoHideNote",
- "tempoWholesPerMinute",
- "tieMelismaBusy",
- "tieWaitForNote",
- "timeSignatureFraction",
- "timeSignatureSettings",
- "timing",
- "tonic",
- "topLevelAlignment",
- "tupletFullLength",
- "tupletFullLengthNote",
- "tupletSpannerDuration",
- "underlyingRepeatBarType",
- "useBassFigureExtenders",
- "vocalName",
- "voltaSpannerDuration",
- "whichBar",
-]
-
-grob_properties = [
- "X-align-on-main-noteheads",
- "X-attachment",
- "X-common",
- "X-extent",
- "X-offset",
- "X-positions",
- "Y-attachment",
- "Y-common",
- "Y-extent",
- "Y-offset",
- "accidental-grob",
- "accidental-grobs",
- "add-cauda",
- "add-join",
- "add-stem",
- "add-stem-support",
- "adjacent-pure-heights",
- "adjacent-spanners",
- "after-line-breaking",
- "align-dir",
- "all-elements",
- "allow-loose-spacing",
- "allow-span-bar",
- "alteration",
- "alteration-alist",
- "alteration-glyph-name-alist",
- "annotation",
- "annotation-balloon",
- "annotation-line",
- "arpeggio-direction",
- "arrow-length",
- "arrow-width",
- "ascendens",
- "auctum",
- "auto-knee-gap",
- "automatically-numbered",
- "average-spacing-wishes",
- "avoid-note-head",
- "avoid-scripts",
- "avoid-slur",
- "axes",
- "axis-group-parent-X",
- "axis-group-parent-Y",
- "bar-extent",
- "bars",
- "base-shortest-duration",
- "baseline-skip",
- "beam",
- "beam-segments",
- "beam-thickness",
- "beam-width",
- "beamed-stem-shorten",
- "beaming",
- "beamlet-default-length",
- "beamlet-max-length-proportion",
- "before-line-breaking",
- "begin-of-line-visible",
- "bend-me",
- "between-cols",
- "bezier",
- "bound-alignment-interfaces",
- "bound-details",
- "bound-padding",
- "bound-prefatory-paddings",
- "bounded-by-me",
- "bracket",
- "bracket-flare",
- "bracket-text",
- "bracket-visibility",
- "break-align-anchor",
- "break-align-anchor-alignment",
- "break-align-orders",
- "break-align-symbol",
- "break-align-symbols",
- "break-alignment",
- "break-overshoot",
- "break-visibility",
- "breakable",
- "broken-bound-padding",
- "c0-position",
- "cause",
- "cavum",
- "chord-dots-limit",
- "chord-names",
- "circled-tip",
- "clef-alignments",
- "clip-edges",
- "collapse-height",
- "collision-interfaces",
- "collision-voice-only",
- "color",
- "columns",
- "common-shortest-duration",
- "concaveness",
- "concurrent-hairpins",
- "conditional-elements",
- "connect-to-neighbor",
- "context-info",
- "control-points",
- "count-from",
- "covered-grobs",
- "cross-staff",
- "damping",
- "dash-definition",
- "dash-fraction",
- "dash-period",
- "dashed-edge",
- "default-direction",
- "default-staff-staff-spacing",
- "delta-position",
- "deminutum",
- "descendens",
- "details",
- "digit-names",
- "direction",
- "direction-source",
- "display-cautionary",
- "dot",
- "dot-count",
- "dot-negative-kern",
- "dot-placement-list",
- "dot-stencil",
- "dots",
- "double-stem-separation",
- "duration-log",
- "eccentricity",
- "edge-height",
- "edge-text",
- "elements",
- "encompass-objects",
- "endpoint-alignments",
- "expand-limit",
- "extra-dy",
- "extra-offset",
- "extra-spacing-height",
- "extra-spacing-width",
- "extroversion",
- "fa-merge-direction",
- "fa-styles",
- "figures",
- "filled",
- "flag",
- "flag-count",
- "flag-style",
- "flat-positions",
- "flexa-height",
- "flexa-interval",
- "flexa-width",
- "font",
- "font-encoding",
- "font-family",
- "font-features",
- "font-name",
- "font-series",
- "font-shape",
- "font-size",
- "footnote",
- "footnote-music",
- "footnote-stencil",
- "footnote-text",
- "footnotes-after-line-breaking",
- "footnotes-before-line-breaking",
- "force-hshift",
- "forced",
- "forced-spacing",
- "fraction",
- "french-beaming",
- "french-beaming-stem-adjustment",
- "fret-diagram-details",
- "full-length-padding",
- "full-length-to-extent",
- "full-measure-extra-space",
- "full-size-change",
- "gap",
- "gap-count",
- "glissando-index",
- "glissando-skip",
- "glyph",
- "glyph-left",
- "glyph-name",
- "glyph-right",
- "grace-spacing",
- "graphical",
- "grow-direction",
- "hair-thickness",
- "harp-pedal-details",
- "has-span-bar",
- "head-direction",
- "head-width",
- "heads",
- "height",
- "height-limit",
- "hide-tied-accidental-after-break",
- "horizon-padding",
- "horizontal-shift",
- "horizontal-skylines",
- "id",
- "ideal-distances",
- "ignore-ambitus",
- "ignore-collision",
- "implicit",
- "important-column-ranks",
- "in-note-direction",
- "in-note-padding",
- "in-note-stencil",
- "inclinatum",
- "index",
- "inspect-quants",
- "interfaces",
- "items-worth-living",
- "keep-alive-with",
- "keep-inside-line",
- "kern",
- "knee",
- "knee-spacing-correction",
- "knee-to-beam",
- "labels",
- "layer",
- "least-squares-dy",
- "ledger-extra",
- "ledger-line-thickness",
- "ledger-positions",
- "ledger-positions-function",
- "left-bound-info",
- "left-items",
- "left-neighbor",
- "left-number-text",
- "left-padding",
- "length",
- "length-fraction",
- "ligature-flexa",
- "line-break-penalty",
- "line-break-permission",
- "line-break-system-details",
- "line-count",
- "line-positions",
- "line-thickness",
- "linea",
- "long-text",
- "main-extent",
- "make-dead-when",
- "max-beam-connect",
- "max-symbol-separation",
- "maximum-gap",
- "maybe-loose",
- "measure-count",
- "measure-division",
- "measure-division-chord-placement-alist",
- "measure-division-lines-alist",
- "measure-length",
- "melody-spanner",
- "merge-differently-dotted",
- "merge-differently-headed",
- "meta",
- "minimum-X-extent",
- "minimum-Y-extent",
- "minimum-distance",
- "minimum-distances",
- "minimum-length",
- "minimum-length-after-break",
- "minimum-length-fraction",
- "minimum-space",
- "minimum-translations-alist",
- "neighbors",
- "neutral-direction",
- "neutral-position",
- "next",
- "no-ledgers",
- "no-stem-extend",
- "non-break-align-symbols",
- "non-default",
- "non-musical",
- "nonstaff-nonstaff-spacing",
- "nonstaff-relatedstaff-spacing",
- "nonstaff-unrelatedstaff-spacing",
- "normal-stems",
- "normalized-endpoints",
- "note-collision",
- "note-collision-threshold",
- "note-columns",
- "note-head",
- "note-heads",
- "note-names",
- "number-range-separator",
- "number-type",
- "numbering-assertion-function",
- "oriscus",
- "output-attributes",
- "outside-staff-horizontal-padding",
- "outside-staff-padding",
- "outside-staff-placement-directive",
- "outside-staff-priority",
- "packed-spacing",
- "padding",
- "padding-pairs",
- "page-break-penalty",
- "page-break-permission",
- "page-number",
- "page-turn-penalty",
- "page-turn-permission",
- "parent-alignment-X",
- "parent-alignment-Y",
- "parenthesis-friends",
- "parenthesis-id",
- "parenthesized",
- "pedal-text",
- "pes-or-flexa",
- "positioning-done",
- "positions",
- "prefer-dotted-right",
- "prefix-set",
- "primitive",
- "protrusion",
- "pure-Y-common",
- "pure-Y-extent",
- "pure-Y-offset-in-progress",
- "pure-relevant-grobs",
- "pure-relevant-items",
- "pure-relevant-spanners",
- "quantize-position",
- "quantized-positions",
- "quilisma",
- "rank-on-page",
- "ratio",
- "remove-empty",
- "remove-first",
- "remove-layer",
- "replacement-alist",
- "rest",
- "rest-collision",
- "restore-first",
- "rests",
- "rhythmic-location",
- "right-bound-info",
- "right-items",
- "right-neighbor",
- "right-number-text",
- "right-padding",
- "rotation",
- "round-up-exceptions",
- "round-up-to-longer-rest",
- "rounded",
- "same-direction-correction",
- "script-column",
- "script-priority",
- "script-stencil",
- "scripts",
- "segno-kern",
- "self-alignment-X",
- "self-alignment-Y",
- "shape",
- "sharp-positions",
- "shorten",
- "shorten-pair",
- "shortest-duration-space",
- "shortest-playing-duration",
- "shortest-starter-duration",
- "show-control-points",
- "show-horizontal-skylines",
- "show-vertical-skylines",
- "side-axis",
- "side-relative-direction",
- "side-support-elements",
- "size",
- "skip-quanting",
- "skyline-horizontal-padding",
- "skyline-vertical-padding",
- "slash-negative-kern",
- "slope",
- "slur",
- "slur-padding",
- "snap-radius",
- "space-alist",
- "space-increment",
- "space-to-barline",
- "spacing",
- "spacing-increment",
- "spacing-pair",
- "spacing-wishes",
- "span-start",
- "spanner-broken",
- "spanner-id",
- "spanner-placement",
- "springs-and-rods",
- "stacking-dir",
- "staff-affinity",
- "staff-grouper",
- "staff-padding",
- "staff-position",
- "staff-space",
- "staff-staff-spacing",
- "staff-symbol",
- "staffgroup-staff-spacing",
- "stem",
- "stem-attachment",
- "stem-begin-position",
- "stem-info",
- "stem-spacing-correction",
- "stemlet-length",
- "stems",
- "stencil",
- "stencils",
- "sticky-host",
- "strict-grace-spacing",
- "strict-note-spacing",
- "stroke-style",
- "stropha",
- "style",
- "system-Y-offset",
- "text",
- "text-alignment-X",
- "text-alignment-Y",
- "text-direction",
- "thick-thickness",
- "thickness",
- "tie",
- "tie-configuration",
- "ties",
- "to-barline",
- "toward-stem-shift",
- "toward-stem-shift-in-column",
- "transparent",
- "tremolo-flag",
- "tuplet-number",
- "tuplet-slur",
- "tuplet-start",
- "tuplets",
- "uniform-stretching",
- "usable-duration-logs",
- "use-skylines",
- "used",
- "vertical-alignment",
- "vertical-skyline-elements",
- "vertical-skylines",
- "virga",
- "visible-over-note-heads",
- "voiced-position",
- "when",
- "whiteout",
- "whiteout-style",
- "widened-extent",
- "width",
- "woodwind-diagram-details",
- "word-space",
- "x-offset",
- "zigzag-length",
- "zigzag-width",
-]
-
-paper_variables = [
- "auto-first-page-number",
- "basic-distance",
- "binding-offset",
- "blank-last-page-penalty",
- "blank-page-penalty",
- "bookTitleMarkup",
- "bottom-margin",
- "check-consistency",
- "evenFooterMarkup",
- "evenHeaderMarkup",
- "first-page-number",
- "footnote-separator-markup",
- "horizontal-shift",
- "indent",
- "inner-margin",
- "last-bottom-spacing",
- "left-margin",
- "line-width",
- "markup-markup-spacing",
- "markup-system-spacing",
- "max-systems-per-page",
- "min-systems-per-page",
- "minimum-distance",
- "oddFooterMarkup",
- "oddHeaderMarkup",
- "outer-margin",
- "padding",
- "page-breaking",
- "page-breaking-system-system-spacing",
- "page-count",
- "page-number-type",
- "page-spacing-weight",
- "paper-height",
- "paper-width",
- "print-all-headers",
- "print-first-page-number",
- "ragged-bottom",
- "ragged-last",
- "ragged-last-bottom",
- "ragged-right",
- "right-margin",
- "score-markup-spacing",
- "score-system-spacing",
- "scoreTitleMarkup",
- "short-indent",
- "stretchability",
- "system-count",
- "system-separator-markup",
- "system-system-spacing",
- "systems-per-page",
- "top-margin",
- "top-markup-spacing",
- "top-system-spacing",
- "two-sided",
-]
-
-header_variables = [
- "arranger",
- "composer",
- "copyright",
- "dedication",
- "doctitle",
- "instrument",
- "lsrtags",
- "meter",
- "opus",
- "piece",
- "poet",
- "subsubtitle",
- "subtitle",
- "tagline",
- "texidoc",
- "title",
-]
-
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/_lua_builtins.py b/venv/lib/python3.11/site-packages/pygments/lexers/_lua_builtins.py
deleted file mode 100644
index 8170d5e..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/_lua_builtins.py
+++ /dev/null
@@ -1,285 +0,0 @@
-"""
- pygments.lexers._lua_builtins
- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
- This file contains the names and modules of lua functions
- It is able to re-generate itself, but for adding new functions you
- probably have to add some callbacks (see function module_callbacks).
-
- Do not edit the MODULES dict by hand.
-
- Run with `python -I` to regenerate.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-MODULES = {'basic': ('_G',
- '_VERSION',
- 'assert',
- 'collectgarbage',
- 'dofile',
- 'error',
- 'getmetatable',
- 'ipairs',
- 'load',
- 'loadfile',
- 'next',
- 'pairs',
- 'pcall',
- 'print',
- 'rawequal',
- 'rawget',
- 'rawlen',
- 'rawset',
- 'select',
- 'setmetatable',
- 'tonumber',
- 'tostring',
- 'type',
- 'warn',
- 'xpcall'),
- 'bit32': ('bit32.arshift',
- 'bit32.band',
- 'bit32.bnot',
- 'bit32.bor',
- 'bit32.btest',
- 'bit32.bxor',
- 'bit32.extract',
- 'bit32.lrotate',
- 'bit32.lshift',
- 'bit32.replace',
- 'bit32.rrotate',
- 'bit32.rshift'),
- 'coroutine': ('coroutine.close',
- 'coroutine.create',
- 'coroutine.isyieldable',
- 'coroutine.resume',
- 'coroutine.running',
- 'coroutine.status',
- 'coroutine.wrap',
- 'coroutine.yield'),
- 'debug': ('debug.debug',
- 'debug.gethook',
- 'debug.getinfo',
- 'debug.getlocal',
- 'debug.getmetatable',
- 'debug.getregistry',
- 'debug.getupvalue',
- 'debug.getuservalue',
- 'debug.sethook',
- 'debug.setlocal',
- 'debug.setmetatable',
- 'debug.setupvalue',
- 'debug.setuservalue',
- 'debug.traceback',
- 'debug.upvalueid',
- 'debug.upvaluejoin'),
- 'io': ('io.close',
- 'io.flush',
- 'io.input',
- 'io.lines',
- 'io.open',
- 'io.output',
- 'io.popen',
- 'io.read',
- 'io.stderr',
- 'io.stdin',
- 'io.stdout',
- 'io.tmpfile',
- 'io.type',
- 'io.write'),
- 'math': ('math.abs',
- 'math.acos',
- 'math.asin',
- 'math.atan',
- 'math.atan2',
- 'math.ceil',
- 'math.cos',
- 'math.cosh',
- 'math.deg',
- 'math.exp',
- 'math.floor',
- 'math.fmod',
- 'math.frexp',
- 'math.huge',
- 'math.ldexp',
- 'math.log',
- 'math.max',
- 'math.maxinteger',
- 'math.min',
- 'math.mininteger',
- 'math.modf',
- 'math.pi',
- 'math.pow',
- 'math.rad',
- 'math.random',
- 'math.randomseed',
- 'math.sin',
- 'math.sinh',
- 'math.sqrt',
- 'math.tan',
- 'math.tanh',
- 'math.tointeger',
- 'math.type',
- 'math.ult'),
- 'modules': ('package.config',
- 'package.cpath',
- 'package.loaded',
- 'package.loadlib',
- 'package.path',
- 'package.preload',
- 'package.searchers',
- 'package.searchpath',
- 'require'),
- 'os': ('os.clock',
- 'os.date',
- 'os.difftime',
- 'os.execute',
- 'os.exit',
- 'os.getenv',
- 'os.remove',
- 'os.rename',
- 'os.setlocale',
- 'os.time',
- 'os.tmpname'),
- 'string': ('string.byte',
- 'string.char',
- 'string.dump',
- 'string.find',
- 'string.format',
- 'string.gmatch',
- 'string.gsub',
- 'string.len',
- 'string.lower',
- 'string.match',
- 'string.pack',
- 'string.packsize',
- 'string.rep',
- 'string.reverse',
- 'string.sub',
- 'string.unpack',
- 'string.upper'),
- 'table': ('table.concat',
- 'table.insert',
- 'table.move',
- 'table.pack',
- 'table.remove',
- 'table.sort',
- 'table.unpack'),
- 'utf8': ('utf8.char',
- 'utf8.charpattern',
- 'utf8.codepoint',
- 'utf8.codes',
- 'utf8.len',
- 'utf8.offset')}
-
-if __name__ == '__main__': # pragma: no cover
- import re
- from urllib.request import urlopen
- import pprint
-
- # you can't generally find out what module a function belongs to if you
- # have only its name. Because of this, here are some callback functions
- # that recognize if a gioven function belongs to a specific module
- def module_callbacks():
- def is_in_coroutine_module(name):
- return name.startswith('coroutine.')
-
- def is_in_modules_module(name):
- if name in ['require', 'module'] or name.startswith('package'):
- return True
- else:
- return False
-
- def is_in_string_module(name):
- return name.startswith('string.')
-
- def is_in_table_module(name):
- return name.startswith('table.')
-
- def is_in_math_module(name):
- return name.startswith('math')
-
- def is_in_io_module(name):
- return name.startswith('io.')
-
- def is_in_os_module(name):
- return name.startswith('os.')
-
- def is_in_debug_module(name):
- return name.startswith('debug.')
-
- return {'coroutine': is_in_coroutine_module,
- 'modules': is_in_modules_module,
- 'string': is_in_string_module,
- 'table': is_in_table_module,
- 'math': is_in_math_module,
- 'io': is_in_io_module,
- 'os': is_in_os_module,
- 'debug': is_in_debug_module}
-
-
-
- def get_newest_version():
- f = urlopen('http://www.lua.org/manual/')
- r = re.compile(r'^<A HREF="(\d\.\d)/">(Lua )?\1</A>')
- for line in f:
- m = r.match(line.decode('iso-8859-1'))
- if m is not None:
- return m.groups()[0]
-
- def get_lua_functions(version):
- f = urlopen('http://www.lua.org/manual/%s/' % version)
- r = re.compile(r'^<A HREF="manual.html#pdf-(?!lua|LUA)([^:]+)">\1</A>')
- functions = []
- for line in f:
- m = r.match(line.decode('iso-8859-1'))
- if m is not None:
- functions.append(m.groups()[0])
- return functions
-
- def get_function_module(name):
- for mod, cb in module_callbacks().items():
- if cb(name):
- return mod
- if '.' in name:
- return name.split('.')[0]
- else:
- return 'basic'
-
- def regenerate(filename, modules):
- with open(filename, encoding='utf-8') as fp:
- content = fp.read()
-
- header = content[:content.find('MODULES = {')]
- footer = content[content.find("if __name__ == '__main__':"):]
-
-
- with open(filename, 'w', encoding='utf-8') as fp:
- fp.write(header)
- fp.write('MODULES = %s\n\n' % pprint.pformat(modules))
- fp.write(footer)
-
- def run():
- version = get_newest_version()
- functions = set()
- for v in ('5.2', version):
- print('> Downloading function index for Lua %s' % v)
- f = get_lua_functions(v)
- print('> %d functions found, %d new:' %
- (len(f), len(set(f) - functions)))
- functions |= set(f)
-
- functions = sorted(functions)
-
- modules = {}
- for full_function_name in functions:
- print('>> %s' % full_function_name)
- m = get_function_module(full_function_name)
- modules.setdefault(m, []).append(full_function_name)
- modules = {k: tuple(v) for k, v in modules.items()}
-
- regenerate(__file__, modules)
-
- run()
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/_mapping.py b/venv/lib/python3.11/site-packages/pygments/lexers/_mapping.py
deleted file mode 100644
index aaec802..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/_mapping.py
+++ /dev/null
@@ -1,580 +0,0 @@
-# Automatically generated by scripts/gen_mapfiles.py.
-# DO NOT EDIT BY HAND; run `tox -e mapfiles` instead.
-
-LEXERS = {
- 'ABAPLexer': ('pygments.lexers.business', 'ABAP', ('abap',), ('*.abap', '*.ABAP'), ('text/x-abap',)),
- 'AMDGPULexer': ('pygments.lexers.amdgpu', 'AMDGPU', ('amdgpu',), ('*.isa',), ()),
- 'APLLexer': ('pygments.lexers.apl', 'APL', ('apl',), ('*.apl', '*.aplf', '*.aplo', '*.apln', '*.aplc', '*.apli', '*.dyalog'), ()),
- 'AbnfLexer': ('pygments.lexers.grammar_notation', 'ABNF', ('abnf',), ('*.abnf',), ('text/x-abnf',)),
- 'ActionScript3Lexer': ('pygments.lexers.actionscript', 'ActionScript 3', ('actionscript3', 'as3'), ('*.as',), ('application/x-actionscript3', 'text/x-actionscript3', 'text/actionscript3')),
- 'ActionScriptLexer': ('pygments.lexers.actionscript', 'ActionScript', ('actionscript', 'as'), ('*.as',), ('application/x-actionscript', 'text/x-actionscript', 'text/actionscript')),
- 'AdaLexer': ('pygments.lexers.ada', 'Ada', ('ada', 'ada95', 'ada2005'), ('*.adb', '*.ads', '*.ada'), ('text/x-ada',)),
- 'AdlLexer': ('pygments.lexers.archetype', 'ADL', ('adl',), ('*.adl', '*.adls', '*.adlf', '*.adlx'), ()),
- 'AgdaLexer': ('pygments.lexers.haskell', 'Agda', ('agda',), ('*.agda',), ('text/x-agda',)),
- 'AheuiLexer': ('pygments.lexers.esoteric', 'Aheui', ('aheui',), ('*.aheui',), ()),
- 'AlloyLexer': ('pygments.lexers.dsls', 'Alloy', ('alloy',), ('*.als',), ('text/x-alloy',)),
- 'AmbientTalkLexer': ('pygments.lexers.ambient', 'AmbientTalk', ('ambienttalk', 'ambienttalk/2', 'at'), ('*.at',), ('text/x-ambienttalk',)),
- 'AmplLexer': ('pygments.lexers.ampl', 'Ampl', ('ampl',), ('*.run',), ()),
- 'Angular2HtmlLexer': ('pygments.lexers.templates', 'HTML + Angular2', ('html+ng2',), ('*.ng2',), ()),
- 'Angular2Lexer': ('pygments.lexers.templates', 'Angular2', ('ng2',), (), ()),
- 'AntlrActionScriptLexer': ('pygments.lexers.parsers', 'ANTLR With ActionScript Target', ('antlr-actionscript', 'antlr-as'), ('*.G', '*.g'), ()),
- 'AntlrCSharpLexer': ('pygments.lexers.parsers', 'ANTLR With C# Target', ('antlr-csharp', 'antlr-c#'), ('*.G', '*.g'), ()),
- 'AntlrCppLexer': ('pygments.lexers.parsers', 'ANTLR With CPP Target', ('antlr-cpp',), ('*.G', '*.g'), ()),
- 'AntlrJavaLexer': ('pygments.lexers.parsers', 'ANTLR With Java Target', ('antlr-java',), ('*.G', '*.g'), ()),
- 'AntlrLexer': ('pygments.lexers.parsers', 'ANTLR', ('antlr',), (), ()),
- 'AntlrObjectiveCLexer': ('pygments.lexers.parsers', 'ANTLR With ObjectiveC Target', ('antlr-objc',), ('*.G', '*.g'), ()),
- 'AntlrPerlLexer': ('pygments.lexers.parsers', 'ANTLR With Perl Target', ('antlr-perl',), ('*.G', '*.g'), ()),
- 'AntlrPythonLexer': ('pygments.lexers.parsers', 'ANTLR With Python Target', ('antlr-python',), ('*.G', '*.g'), ()),
- 'AntlrRubyLexer': ('pygments.lexers.parsers', 'ANTLR With Ruby Target', ('antlr-ruby', 'antlr-rb'), ('*.G', '*.g'), ()),
- 'ApacheConfLexer': ('pygments.lexers.configs', 'ApacheConf', ('apacheconf', 'aconf', 'apache'), ('.htaccess', 'apache.conf', 'apache2.conf'), ('text/x-apacheconf',)),
- 'AppleScriptLexer': ('pygments.lexers.scripting', 'AppleScript', ('applescript',), ('*.applescript',), ()),
- 'ArduinoLexer': ('pygments.lexers.c_like', 'Arduino', ('arduino',), ('*.ino',), ('text/x-arduino',)),
- 'ArrowLexer': ('pygments.lexers.arrow', 'Arrow', ('arrow',), ('*.arw',), ()),
- 'ArturoLexer': ('pygments.lexers.arturo', 'Arturo', ('arturo', 'art'), ('*.art',), ()),
- 'AscLexer': ('pygments.lexers.asc', 'ASCII armored', ('asc', 'pem'), ('*.asc', '*.pem', 'id_dsa', 'id_ecdsa', 'id_ecdsa_sk', 'id_ed25519', 'id_ed25519_sk', 'id_rsa'), ('application/pgp-keys', 'application/pgp-encrypted', 'application/pgp-signature', 'application/pem-certificate-chain')),
- 'Asn1Lexer': ('pygments.lexers.asn1', 'ASN.1', ('asn1',), ('*.asn1',), ()),
- 'AspectJLexer': ('pygments.lexers.jvm', 'AspectJ', ('aspectj',), ('*.aj',), ('text/x-aspectj',)),
- 'AsymptoteLexer': ('pygments.lexers.graphics', 'Asymptote', ('asymptote', 'asy'), ('*.asy',), ('text/x-asymptote',)),
- 'AugeasLexer': ('pygments.lexers.configs', 'Augeas', ('augeas',), ('*.aug',), ()),
- 'AutoItLexer': ('pygments.lexers.automation', 'AutoIt', ('autoit',), ('*.au3',), ('text/x-autoit',)),
- 'AutohotkeyLexer': ('pygments.lexers.automation', 'autohotkey', ('autohotkey', 'ahk'), ('*.ahk', '*.ahkl'), ('text/x-autohotkey',)),
- 'AwkLexer': ('pygments.lexers.textedit', 'Awk', ('awk', 'gawk', 'mawk', 'nawk'), ('*.awk',), ('application/x-awk',)),
- 'BBCBasicLexer': ('pygments.lexers.basic', 'BBC Basic', ('bbcbasic',), ('*.bbc',), ()),
- 'BBCodeLexer': ('pygments.lexers.markup', 'BBCode', ('bbcode',), (), ('text/x-bbcode',)),
- 'BCLexer': ('pygments.lexers.algebra', 'BC', ('bc',), ('*.bc',), ()),
- 'BQNLexer': ('pygments.lexers.bqn', 'BQN', ('bqn',), ('*.bqn',), ()),
- 'BSTLexer': ('pygments.lexers.bibtex', 'BST', ('bst', 'bst-pybtex'), ('*.bst',), ()),
- 'BareLexer': ('pygments.lexers.bare', 'BARE', ('bare',), ('*.bare',), ()),
- 'BaseMakefileLexer': ('pygments.lexers.make', 'Base Makefile', ('basemake',), (), ()),
- 'BashLexer': ('pygments.lexers.shell', 'Bash', ('bash', 'sh', 'ksh', 'zsh', 'shell'), ('*.sh', '*.ksh', '*.bash', '*.ebuild', '*.eclass', '*.exheres-0', '*.exlib', '*.zsh', '.bashrc', 'bashrc', '.bash_*', 'bash_*', 'zshrc', '.zshrc', '.kshrc', 'kshrc', 'PKGBUILD'), ('application/x-sh', 'application/x-shellscript', 'text/x-shellscript')),
- 'BashSessionLexer': ('pygments.lexers.shell', 'Bash Session', ('console', 'shell-session'), ('*.sh-session', '*.shell-session'), ('application/x-shell-session', 'application/x-sh-session')),
- 'BatchLexer': ('pygments.lexers.shell', 'Batchfile', ('batch', 'bat', 'dosbatch', 'winbatch'), ('*.bat', '*.cmd'), ('application/x-dos-batch',)),
- 'BddLexer': ('pygments.lexers.bdd', 'Bdd', ('bdd',), ('*.feature',), ('text/x-bdd',)),
- 'BefungeLexer': ('pygments.lexers.esoteric', 'Befunge', ('befunge',), ('*.befunge',), ('application/x-befunge',)),
- 'BerryLexer': ('pygments.lexers.berry', 'Berry', ('berry', 'be'), ('*.be',), ('text/x-berry', 'application/x-berry')),
- 'BibTeXLexer': ('pygments.lexers.bibtex', 'BibTeX', ('bibtex', 'bib'), ('*.bib',), ('text/x-bibtex',)),
- 'BlitzBasicLexer': ('pygments.lexers.basic', 'BlitzBasic', ('blitzbasic', 'b3d', 'bplus'), ('*.bb', '*.decls'), ('text/x-bb',)),
- 'BlitzMaxLexer': ('pygments.lexers.basic', 'BlitzMax', ('blitzmax', 'bmax'), ('*.bmx',), ('text/x-bmx',)),
- 'BlueprintLexer': ('pygments.lexers.blueprint', 'Blueprint', ('blueprint',), ('*.blp',), ('text/x-blueprint',)),
- 'BnfLexer': ('pygments.lexers.grammar_notation', 'BNF', ('bnf',), ('*.bnf',), ('text/x-bnf',)),
- 'BoaLexer': ('pygments.lexers.boa', 'Boa', ('boa',), ('*.boa',), ()),
- 'BooLexer': ('pygments.lexers.dotnet', 'Boo', ('boo',), ('*.boo',), ('text/x-boo',)),
- 'BoogieLexer': ('pygments.lexers.verification', 'Boogie', ('boogie',), ('*.bpl',), ()),
- 'BrainfuckLexer': ('pygments.lexers.esoteric', 'Brainfuck', ('brainfuck', 'bf'), ('*.bf', '*.b'), ('application/x-brainfuck',)),
- 'BugsLexer': ('pygments.lexers.modeling', 'BUGS', ('bugs', 'winbugs', 'openbugs'), ('*.bug',), ()),
- 'CAmkESLexer': ('pygments.lexers.esoteric', 'CAmkES', ('camkes', 'idl4'), ('*.camkes', '*.idl4'), ()),
- 'CLexer': ('pygments.lexers.c_cpp', 'C', ('c',), ('*.c', '*.h', '*.idc', '*.x[bp]m'), ('text/x-chdr', 'text/x-csrc', 'image/x-xbitmap', 'image/x-xpixmap')),
- 'CMakeLexer': ('pygments.lexers.make', 'CMake', ('cmake',), ('*.cmake', 'CMakeLists.txt'), ('text/x-cmake',)),
- 'CObjdumpLexer': ('pygments.lexers.asm', 'c-objdump', ('c-objdump',), ('*.c-objdump',), ('text/x-c-objdump',)),
- 'CPSALexer': ('pygments.lexers.lisp', 'CPSA', ('cpsa',), ('*.cpsa',), ()),
- 'CSSUL4Lexer': ('pygments.lexers.ul4', 'CSS+UL4', ('css+ul4',), ('*.cssul4',), ()),
- 'CSharpAspxLexer': ('pygments.lexers.dotnet', 'aspx-cs', ('aspx-cs',), ('*.aspx', '*.asax', '*.ascx', '*.ashx', '*.asmx', '*.axd'), ()),
- 'CSharpLexer': ('pygments.lexers.dotnet', 'C#', ('csharp', 'c#', 'cs'), ('*.cs',), ('text/x-csharp',)),
- 'Ca65Lexer': ('pygments.lexers.asm', 'ca65 assembler', ('ca65',), ('*.s',), ()),
- 'CadlLexer': ('pygments.lexers.archetype', 'cADL', ('cadl',), ('*.cadl',), ()),
- 'CapDLLexer': ('pygments.lexers.esoteric', 'CapDL', ('capdl',), ('*.cdl',), ()),
- 'CapnProtoLexer': ('pygments.lexers.capnproto', "Cap'n Proto", ('capnp',), ('*.capnp',), ()),
- 'CarbonLexer': ('pygments.lexers.carbon', 'Carbon', ('carbon',), ('*.carbon',), ('text/x-carbon',)),
- 'CbmBasicV2Lexer': ('pygments.lexers.basic', 'CBM BASIC V2', ('cbmbas',), ('*.bas',), ()),
- 'CddlLexer': ('pygments.lexers.cddl', 'CDDL', ('cddl',), ('*.cddl',), ('text/x-cddl',)),
- 'CeylonLexer': ('pygments.lexers.jvm', 'Ceylon', ('ceylon',), ('*.ceylon',), ('text/x-ceylon',)),
- 'Cfengine3Lexer': ('pygments.lexers.configs', 'CFEngine3', ('cfengine3', 'cf3'), ('*.cf',), ()),
- 'ChaiscriptLexer': ('pygments.lexers.scripting', 'ChaiScript', ('chaiscript', 'chai'), ('*.chai',), ('text/x-chaiscript', 'application/x-chaiscript')),
- 'ChapelLexer': ('pygments.lexers.chapel', 'Chapel', ('chapel', 'chpl'), ('*.chpl',), ()),
- 'CharmciLexer': ('pygments.lexers.c_like', 'Charmci', ('charmci',), ('*.ci',), ()),
- 'CheetahHtmlLexer': ('pygments.lexers.templates', 'HTML+Cheetah', ('html+cheetah', 'html+spitfire', 'htmlcheetah'), (), ('text/html+cheetah', 'text/html+spitfire')),
- 'CheetahJavascriptLexer': ('pygments.lexers.templates', 'JavaScript+Cheetah', ('javascript+cheetah', 'js+cheetah', 'javascript+spitfire', 'js+spitfire'), (), ('application/x-javascript+cheetah', 'text/x-javascript+cheetah', 'text/javascript+cheetah', 'application/x-javascript+spitfire', 'text/x-javascript+spitfire', 'text/javascript+spitfire')),
- 'CheetahLexer': ('pygments.lexers.templates', 'Cheetah', ('cheetah', 'spitfire'), ('*.tmpl', '*.spt'), ('application/x-cheetah', 'application/x-spitfire')),
- 'CheetahXmlLexer': ('pygments.lexers.templates', 'XML+Cheetah', ('xml+cheetah', 'xml+spitfire'), (), ('application/xml+cheetah', 'application/xml+spitfire')),
- 'CirruLexer': ('pygments.lexers.webmisc', 'Cirru', ('cirru',), ('*.cirru',), ('text/x-cirru',)),
- 'ClayLexer': ('pygments.lexers.c_like', 'Clay', ('clay',), ('*.clay',), ('text/x-clay',)),
- 'CleanLexer': ('pygments.lexers.clean', 'Clean', ('clean',), ('*.icl', '*.dcl'), ()),
- 'ClojureLexer': ('pygments.lexers.jvm', 'Clojure', ('clojure', 'clj'), ('*.clj', '*.cljc'), ('text/x-clojure', 'application/x-clojure')),
- 'ClojureScriptLexer': ('pygments.lexers.jvm', 'ClojureScript', ('clojurescript', 'cljs'), ('*.cljs',), ('text/x-clojurescript', 'application/x-clojurescript')),
- 'CobolFreeformatLexer': ('pygments.lexers.business', 'COBOLFree', ('cobolfree',), ('*.cbl', '*.CBL'), ()),
- 'CobolLexer': ('pygments.lexers.business', 'COBOL', ('cobol',), ('*.cob', '*.COB', '*.cpy', '*.CPY'), ('text/x-cobol',)),
- 'CoffeeScriptLexer': ('pygments.lexers.javascript', 'CoffeeScript', ('coffeescript', 'coffee-script', 'coffee'), ('*.coffee',), ('text/coffeescript',)),
- 'ColdfusionCFCLexer': ('pygments.lexers.templates', 'Coldfusion CFC', ('cfc',), ('*.cfc',), ()),
- 'ColdfusionHtmlLexer': ('pygments.lexers.templates', 'Coldfusion HTML', ('cfm',), ('*.cfm', '*.cfml'), ('application/x-coldfusion',)),
- 'ColdfusionLexer': ('pygments.lexers.templates', 'cfstatement', ('cfs',), (), ()),
- 'Comal80Lexer': ('pygments.lexers.comal', 'COMAL-80', ('comal', 'comal80'), ('*.cml', '*.comal'), ()),
- 'CommonLispLexer': ('pygments.lexers.lisp', 'Common Lisp', ('common-lisp', 'cl', 'lisp'), ('*.cl', '*.lisp'), ('text/x-common-lisp',)),
- 'ComponentPascalLexer': ('pygments.lexers.oberon', 'Component Pascal', ('componentpascal', 'cp'), ('*.cp', '*.cps'), ('text/x-component-pascal',)),
- 'CoqLexer': ('pygments.lexers.theorem', 'Coq', ('coq',), ('*.v',), ('text/x-coq',)),
- 'CplintLexer': ('pygments.lexers.cplint', 'cplint', ('cplint',), ('*.ecl', '*.prolog', '*.pro', '*.pl', '*.P', '*.lpad', '*.cpl'), ('text/x-cplint',)),
- 'CppLexer': ('pygments.lexers.c_cpp', 'C++', ('cpp', 'c++'), ('*.cpp', '*.hpp', '*.c++', '*.h++', '*.cc', '*.hh', '*.cxx', '*.hxx', '*.C', '*.H', '*.cp', '*.CPP', '*.tpp'), ('text/x-c++hdr', 'text/x-c++src')),
- 'CppObjdumpLexer': ('pygments.lexers.asm', 'cpp-objdump', ('cpp-objdump', 'c++-objdumb', 'cxx-objdump'), ('*.cpp-objdump', '*.c++-objdump', '*.cxx-objdump'), ('text/x-cpp-objdump',)),
- 'CrmshLexer': ('pygments.lexers.dsls', 'Crmsh', ('crmsh', 'pcmk'), ('*.crmsh', '*.pcmk'), ()),
- 'CrocLexer': ('pygments.lexers.d', 'Croc', ('croc',), ('*.croc',), ('text/x-crocsrc',)),
- 'CryptolLexer': ('pygments.lexers.haskell', 'Cryptol', ('cryptol', 'cry'), ('*.cry',), ('text/x-cryptol',)),
- 'CrystalLexer': ('pygments.lexers.crystal', 'Crystal', ('cr', 'crystal'), ('*.cr',), ('text/x-crystal',)),
- 'CsoundDocumentLexer': ('pygments.lexers.csound', 'Csound Document', ('csound-document', 'csound-csd'), ('*.csd',), ()),
- 'CsoundOrchestraLexer': ('pygments.lexers.csound', 'Csound Orchestra', ('csound', 'csound-orc'), ('*.orc', '*.udo'), ()),
- 'CsoundScoreLexer': ('pygments.lexers.csound', 'Csound Score', ('csound-score', 'csound-sco'), ('*.sco',), ()),
- 'CssDjangoLexer': ('pygments.lexers.templates', 'CSS+Django/Jinja', ('css+django', 'css+jinja'), ('*.css.j2', '*.css.jinja2'), ('text/css+django', 'text/css+jinja')),
- 'CssErbLexer': ('pygments.lexers.templates', 'CSS+Ruby', ('css+ruby', 'css+erb'), (), ('text/css+ruby',)),
- 'CssGenshiLexer': ('pygments.lexers.templates', 'CSS+Genshi Text', ('css+genshitext', 'css+genshi'), (), ('text/css+genshi',)),
- 'CssLexer': ('pygments.lexers.css', 'CSS', ('css',), ('*.css',), ('text/css',)),
- 'CssPhpLexer': ('pygments.lexers.templates', 'CSS+PHP', ('css+php',), (), ('text/css+php',)),
- 'CssSmartyLexer': ('pygments.lexers.templates', 'CSS+Smarty', ('css+smarty',), (), ('text/css+smarty',)),
- 'CudaLexer': ('pygments.lexers.c_like', 'CUDA', ('cuda', 'cu'), ('*.cu', '*.cuh'), ('text/x-cuda',)),
- 'CypherLexer': ('pygments.lexers.graph', 'Cypher', ('cypher',), ('*.cyp', '*.cypher'), ()),
- 'CythonLexer': ('pygments.lexers.python', 'Cython', ('cython', 'pyx', 'pyrex'), ('*.pyx', '*.pxd', '*.pxi'), ('text/x-cython', 'application/x-cython')),
- 'DLexer': ('pygments.lexers.d', 'D', ('d',), ('*.d', '*.di'), ('text/x-dsrc',)),
- 'DObjdumpLexer': ('pygments.lexers.asm', 'd-objdump', ('d-objdump',), ('*.d-objdump',), ('text/x-d-objdump',)),
- 'DarcsPatchLexer': ('pygments.lexers.diff', 'Darcs Patch', ('dpatch',), ('*.dpatch', '*.darcspatch'), ()),
- 'DartLexer': ('pygments.lexers.javascript', 'Dart', ('dart',), ('*.dart',), ('text/x-dart',)),
- 'Dasm16Lexer': ('pygments.lexers.asm', 'DASM16', ('dasm16',), ('*.dasm16', '*.dasm'), ('text/x-dasm16',)),
- 'DaxLexer': ('pygments.lexers.dax', 'Dax', ('dax',), ('*.dax',), ()),
- 'DebianControlLexer': ('pygments.lexers.installers', 'Debian Control file', ('debcontrol', 'control'), ('control',), ()),
- 'DelphiLexer': ('pygments.lexers.pascal', 'Delphi', ('delphi', 'pas', 'pascal', 'objectpascal'), ('*.pas', '*.dpr'), ('text/x-pascal',)),
- 'DesktopLexer': ('pygments.lexers.configs', 'Desktop file', ('desktop',), ('*.desktop',), ()),
- 'DevicetreeLexer': ('pygments.lexers.devicetree', 'Devicetree', ('devicetree', 'dts'), ('*.dts', '*.dtsi'), ('text/x-c',)),
- 'DgLexer': ('pygments.lexers.python', 'dg', ('dg',), ('*.dg',), ('text/x-dg',)),
- 'DiffLexer': ('pygments.lexers.diff', 'Diff', ('diff', 'udiff'), ('*.diff', '*.patch'), ('text/x-diff', 'text/x-patch')),
- 'DjangoLexer': ('pygments.lexers.templates', 'Django/Jinja', ('django', 'jinja'), (), ('application/x-django-templating', 'application/x-jinja')),
- 'DnsZoneLexer': ('pygments.lexers.dns', 'Zone', ('zone',), ('*.zone',), ('text/dns',)),
- 'DockerLexer': ('pygments.lexers.configs', 'Docker', ('docker', 'dockerfile'), ('Dockerfile', '*.docker'), ('text/x-dockerfile-config',)),
- 'DtdLexer': ('pygments.lexers.html', 'DTD', ('dtd',), ('*.dtd',), ('application/xml-dtd',)),
- 'DuelLexer': ('pygments.lexers.webmisc', 'Duel', ('duel', 'jbst', 'jsonml+bst'), ('*.duel', '*.jbst'), ('text/x-duel', 'text/x-jbst')),
- 'DylanConsoleLexer': ('pygments.lexers.dylan', 'Dylan session', ('dylan-console', 'dylan-repl'), ('*.dylan-console',), ('text/x-dylan-console',)),
- 'DylanLexer': ('pygments.lexers.dylan', 'Dylan', ('dylan',), ('*.dylan', '*.dyl', '*.intr'), ('text/x-dylan',)),
- 'DylanLidLexer': ('pygments.lexers.dylan', 'DylanLID', ('dylan-lid', 'lid'), ('*.lid', '*.hdp'), ('text/x-dylan-lid',)),
- 'ECLLexer': ('pygments.lexers.ecl', 'ECL', ('ecl',), ('*.ecl',), ('application/x-ecl',)),
- 'ECLexer': ('pygments.lexers.c_like', 'eC', ('ec',), ('*.ec', '*.eh'), ('text/x-echdr', 'text/x-ecsrc')),
- 'EarlGreyLexer': ('pygments.lexers.javascript', 'Earl Grey', ('earl-grey', 'earlgrey', 'eg'), ('*.eg',), ('text/x-earl-grey',)),
- 'EasytrieveLexer': ('pygments.lexers.scripting', 'Easytrieve', ('easytrieve',), ('*.ezt', '*.mac'), ('text/x-easytrieve',)),
- 'EbnfLexer': ('pygments.lexers.parsers', 'EBNF', ('ebnf',), ('*.ebnf',), ('text/x-ebnf',)),
- 'EiffelLexer': ('pygments.lexers.eiffel', 'Eiffel', ('eiffel',), ('*.e',), ('text/x-eiffel',)),
- 'ElixirConsoleLexer': ('pygments.lexers.erlang', 'Elixir iex session', ('iex',), (), ('text/x-elixir-shellsession',)),
- 'ElixirLexer': ('pygments.lexers.erlang', 'Elixir', ('elixir', 'ex', 'exs'), ('*.ex', '*.eex', '*.exs', '*.leex'), ('text/x-elixir',)),
- 'ElmLexer': ('pygments.lexers.elm', 'Elm', ('elm',), ('*.elm',), ('text/x-elm',)),
- 'ElpiLexer': ('pygments.lexers.elpi', 'Elpi', ('elpi',), ('*.elpi',), ('text/x-elpi',)),
- 'EmacsLispLexer': ('pygments.lexers.lisp', 'EmacsLisp', ('emacs-lisp', 'elisp', 'emacs'), ('*.el',), ('text/x-elisp', 'application/x-elisp')),
- 'EmailLexer': ('pygments.lexers.email', 'E-mail', ('email', 'eml'), ('*.eml',), ('message/rfc822',)),
- 'ErbLexer': ('pygments.lexers.templates', 'ERB', ('erb',), (), ('application/x-ruby-templating',)),
- 'ErlangLexer': ('pygments.lexers.erlang', 'Erlang', ('erlang',), ('*.erl', '*.hrl', '*.es', '*.escript'), ('text/x-erlang',)),
- 'ErlangShellLexer': ('pygments.lexers.erlang', 'Erlang erl session', ('erl',), ('*.erl-sh',), ('text/x-erl-shellsession',)),
- 'EvoqueHtmlLexer': ('pygments.lexers.templates', 'HTML+Evoque', ('html+evoque',), ('*.html',), ('text/html+evoque',)),
- 'EvoqueLexer': ('pygments.lexers.templates', 'Evoque', ('evoque',), ('*.evoque',), ('application/x-evoque',)),
- 'EvoqueXmlLexer': ('pygments.lexers.templates', 'XML+Evoque', ('xml+evoque',), ('*.xml',), ('application/xml+evoque',)),
- 'ExeclineLexer': ('pygments.lexers.shell', 'execline', ('execline',), ('*.exec',), ()),
- 'EzhilLexer': ('pygments.lexers.ezhil', 'Ezhil', ('ezhil',), ('*.n',), ('text/x-ezhil',)),
- 'FSharpLexer': ('pygments.lexers.dotnet', 'F#', ('fsharp', 'f#'), ('*.fs', '*.fsi', '*.fsx'), ('text/x-fsharp',)),
- 'FStarLexer': ('pygments.lexers.ml', 'FStar', ('fstar',), ('*.fst', '*.fsti'), ('text/x-fstar',)),
- 'FactorLexer': ('pygments.lexers.factor', 'Factor', ('factor',), ('*.factor',), ('text/x-factor',)),
- 'FancyLexer': ('pygments.lexers.ruby', 'Fancy', ('fancy', 'fy'), ('*.fy', '*.fancypack'), ('text/x-fancysrc',)),
- 'FantomLexer': ('pygments.lexers.fantom', 'Fantom', ('fan',), ('*.fan',), ('application/x-fantom',)),
- 'FelixLexer': ('pygments.lexers.felix', 'Felix', ('felix', 'flx'), ('*.flx', '*.flxh'), ('text/x-felix',)),
- 'FennelLexer': ('pygments.lexers.lisp', 'Fennel', ('fennel', 'fnl'), ('*.fnl',), ()),
- 'FiftLexer': ('pygments.lexers.fift', 'Fift', ('fift', 'fif'), ('*.fif',), ()),
- 'FishShellLexer': ('pygments.lexers.shell', 'Fish', ('fish', 'fishshell'), ('*.fish', '*.load'), ('application/x-fish',)),
- 'FlatlineLexer': ('pygments.lexers.dsls', 'Flatline', ('flatline',), (), ('text/x-flatline',)),
- 'FloScriptLexer': ('pygments.lexers.floscript', 'FloScript', ('floscript', 'flo'), ('*.flo',), ()),
- 'ForthLexer': ('pygments.lexers.forth', 'Forth', ('forth',), ('*.frt', '*.fs'), ('application/x-forth',)),
- 'FortranFixedLexer': ('pygments.lexers.fortran', 'FortranFixed', ('fortranfixed',), ('*.f', '*.F'), ()),
- 'FortranLexer': ('pygments.lexers.fortran', 'Fortran', ('fortran', 'f90'), ('*.f03', '*.f90', '*.F03', '*.F90'), ('text/x-fortran',)),
- 'FoxProLexer': ('pygments.lexers.foxpro', 'FoxPro', ('foxpro', 'vfp', 'clipper', 'xbase'), ('*.PRG', '*.prg'), ()),
- 'FreeFemLexer': ('pygments.lexers.freefem', 'Freefem', ('freefem',), ('*.edp',), ('text/x-freefem',)),
- 'FuncLexer': ('pygments.lexers.func', 'FunC', ('func', 'fc'), ('*.fc', '*.func'), ()),
- 'FutharkLexer': ('pygments.lexers.futhark', 'Futhark', ('futhark',), ('*.fut',), ('text/x-futhark',)),
- 'GAPConsoleLexer': ('pygments.lexers.algebra', 'GAP session', ('gap-console', 'gap-repl'), ('*.tst',), ()),
- 'GAPLexer': ('pygments.lexers.algebra', 'GAP', ('gap',), ('*.g', '*.gd', '*.gi', '*.gap'), ()),
- 'GDScriptLexer': ('pygments.lexers.gdscript', 'GDScript', ('gdscript', 'gd'), ('*.gd',), ('text/x-gdscript', 'application/x-gdscript')),
- 'GLShaderLexer': ('pygments.lexers.graphics', 'GLSL', ('glsl',), ('*.vert', '*.frag', '*.geo'), ('text/x-glslsrc',)),
- 'GSQLLexer': ('pygments.lexers.gsql', 'GSQL', ('gsql',), ('*.gsql',), ()),
- 'GasLexer': ('pygments.lexers.asm', 'GAS', ('gas', 'asm'), ('*.s', '*.S'), ('text/x-gas',)),
- 'GcodeLexer': ('pygments.lexers.gcodelexer', 'g-code', ('gcode',), ('*.gcode',), ()),
- 'GenshiLexer': ('pygments.lexers.templates', 'Genshi', ('genshi', 'kid', 'xml+genshi', 'xml+kid'), ('*.kid',), ('application/x-genshi', 'application/x-kid')),
- 'GenshiTextLexer': ('pygments.lexers.templates', 'Genshi Text', ('genshitext',), (), ('application/x-genshi-text', 'text/x-genshi')),
- 'GettextLexer': ('pygments.lexers.textfmts', 'Gettext Catalog', ('pot', 'po'), ('*.pot', '*.po'), ('application/x-gettext', 'text/x-gettext', 'text/gettext')),
- 'GherkinLexer': ('pygments.lexers.testing', 'Gherkin', ('gherkin', 'cucumber'), ('*.feature',), ('text/x-gherkin',)),
- 'GnuplotLexer': ('pygments.lexers.graphics', 'Gnuplot', ('gnuplot',), ('*.plot', '*.plt'), ('text/x-gnuplot',)),
- 'GoLexer': ('pygments.lexers.go', 'Go', ('go', 'golang'), ('*.go',), ('text/x-gosrc',)),
- 'GoloLexer': ('pygments.lexers.jvm', 'Golo', ('golo',), ('*.golo',), ()),
- 'GoodDataCLLexer': ('pygments.lexers.business', 'GoodData-CL', ('gooddata-cl',), ('*.gdc',), ('text/x-gooddata-cl',)),
- 'GosuLexer': ('pygments.lexers.jvm', 'Gosu', ('gosu',), ('*.gs', '*.gsx', '*.gsp', '*.vark'), ('text/x-gosu',)),
- 'GosuTemplateLexer': ('pygments.lexers.jvm', 'Gosu Template', ('gst',), ('*.gst',), ('text/x-gosu-template',)),
- 'GraphQLLexer': ('pygments.lexers.graphql', 'GraphQL', ('graphql',), ('*.graphql',), ()),
- 'GraphvizLexer': ('pygments.lexers.graphviz', 'Graphviz', ('graphviz', 'dot'), ('*.gv', '*.dot'), ('text/x-graphviz', 'text/vnd.graphviz')),
- 'GroffLexer': ('pygments.lexers.markup', 'Groff', ('groff', 'nroff', 'man'), ('*.[1-9]', '*.man', '*.1p', '*.3pm'), ('application/x-troff', 'text/troff')),
- 'GroovyLexer': ('pygments.lexers.jvm', 'Groovy', ('groovy',), ('*.groovy', '*.gradle'), ('text/x-groovy',)),
- 'HLSLShaderLexer': ('pygments.lexers.graphics', 'HLSL', ('hlsl',), ('*.hlsl', '*.hlsli'), ('text/x-hlsl',)),
- 'HTMLUL4Lexer': ('pygments.lexers.ul4', 'HTML+UL4', ('html+ul4',), ('*.htmlul4',), ()),
- 'HamlLexer': ('pygments.lexers.html', 'Haml', ('haml',), ('*.haml',), ('text/x-haml',)),
- 'HandlebarsHtmlLexer': ('pygments.lexers.templates', 'HTML+Handlebars', ('html+handlebars',), ('*.handlebars', '*.hbs'), ('text/html+handlebars', 'text/x-handlebars-template')),
- 'HandlebarsLexer': ('pygments.lexers.templates', 'Handlebars', ('handlebars',), (), ()),
- 'HaskellLexer': ('pygments.lexers.haskell', 'Haskell', ('haskell', 'hs'), ('*.hs',), ('text/x-haskell',)),
- 'HaxeLexer': ('pygments.lexers.haxe', 'Haxe', ('haxe', 'hxsl', 'hx'), ('*.hx', '*.hxsl'), ('text/haxe', 'text/x-haxe', 'text/x-hx')),
- 'HexdumpLexer': ('pygments.lexers.hexdump', 'Hexdump', ('hexdump',), (), ()),
- 'HsailLexer': ('pygments.lexers.asm', 'HSAIL', ('hsail', 'hsa'), ('*.hsail',), ('text/x-hsail',)),
- 'HspecLexer': ('pygments.lexers.haskell', 'Hspec', ('hspec',), ('*Spec.hs',), ()),
- 'HtmlDjangoLexer': ('pygments.lexers.templates', 'HTML+Django/Jinja', ('html+django', 'html+jinja', 'htmldjango'), ('*.html.j2', '*.htm.j2', '*.xhtml.j2', '*.html.jinja2', '*.htm.jinja2', '*.xhtml.jinja2'), ('text/html+django', 'text/html+jinja')),
- 'HtmlGenshiLexer': ('pygments.lexers.templates', 'HTML+Genshi', ('html+genshi', 'html+kid'), (), ('text/html+genshi',)),
- 'HtmlLexer': ('pygments.lexers.html', 'HTML', ('html',), ('*.html', '*.htm', '*.xhtml', '*.xslt'), ('text/html', 'application/xhtml+xml')),
- 'HtmlPhpLexer': ('pygments.lexers.templates', 'HTML+PHP', ('html+php',), ('*.phtml',), ('application/x-php', 'application/x-httpd-php', 'application/x-httpd-php3', 'application/x-httpd-php4', 'application/x-httpd-php5')),
- 'HtmlSmartyLexer': ('pygments.lexers.templates', 'HTML+Smarty', ('html+smarty',), (), ('text/html+smarty',)),
- 'HttpLexer': ('pygments.lexers.textfmts', 'HTTP', ('http',), (), ()),
- 'HxmlLexer': ('pygments.lexers.haxe', 'Hxml', ('haxeml', 'hxml'), ('*.hxml',), ()),
- 'HyLexer': ('pygments.lexers.lisp', 'Hy', ('hylang',), ('*.hy',), ('text/x-hy', 'application/x-hy')),
- 'HybrisLexer': ('pygments.lexers.scripting', 'Hybris', ('hybris', 'hy'), ('*.hy', '*.hyb'), ('text/x-hybris', 'application/x-hybris')),
- 'IDLLexer': ('pygments.lexers.idl', 'IDL', ('idl',), ('*.pro',), ('text/idl',)),
- 'IconLexer': ('pygments.lexers.unicon', 'Icon', ('icon',), ('*.icon', '*.ICON'), ()),
- 'IdrisLexer': ('pygments.lexers.haskell', 'Idris', ('idris', 'idr'), ('*.idr',), ('text/x-idris',)),
- 'IgorLexer': ('pygments.lexers.igor', 'Igor', ('igor', 'igorpro'), ('*.ipf',), ('text/ipf',)),
- 'Inform6Lexer': ('pygments.lexers.int_fiction', 'Inform 6', ('inform6', 'i6'), ('*.inf',), ()),
- 'Inform6TemplateLexer': ('pygments.lexers.int_fiction', 'Inform 6 template', ('i6t',), ('*.i6t',), ()),
- 'Inform7Lexer': ('pygments.lexers.int_fiction', 'Inform 7', ('inform7', 'i7'), ('*.ni', '*.i7x'), ()),
- 'IniLexer': ('pygments.lexers.configs', 'INI', ('ini', 'cfg', 'dosini'), ('*.ini', '*.cfg', '*.inf', '.editorconfig'), ('text/x-ini', 'text/inf')),
- 'IoLexer': ('pygments.lexers.iolang', 'Io', ('io',), ('*.io',), ('text/x-iosrc',)),
- 'IokeLexer': ('pygments.lexers.jvm', 'Ioke', ('ioke', 'ik'), ('*.ik',), ('text/x-iokesrc',)),
- 'IrcLogsLexer': ('pygments.lexers.textfmts', 'IRC logs', ('irc',), ('*.weechatlog',), ('text/x-irclog',)),
- 'IsabelleLexer': ('pygments.lexers.theorem', 'Isabelle', ('isabelle',), ('*.thy',), ('text/x-isabelle',)),
- 'JLexer': ('pygments.lexers.j', 'J', ('j',), ('*.ijs',), ('text/x-j',)),
- 'JMESPathLexer': ('pygments.lexers.jmespath', 'JMESPath', ('jmespath', 'jp'), ('*.jp',), ()),
- 'JSLTLexer': ('pygments.lexers.jslt', 'JSLT', ('jslt',), ('*.jslt',), ('text/x-jslt',)),
- 'JagsLexer': ('pygments.lexers.modeling', 'JAGS', ('jags',), ('*.jag', '*.bug'), ()),
- 'JasminLexer': ('pygments.lexers.jvm', 'Jasmin', ('jasmin', 'jasminxt'), ('*.j',), ()),
- 'JavaLexer': ('pygments.lexers.jvm', 'Java', ('java',), ('*.java',), ('text/x-java',)),
- 'JavascriptDjangoLexer': ('pygments.lexers.templates', 'JavaScript+Django/Jinja', ('javascript+django', 'js+django', 'javascript+jinja', 'js+jinja'), ('*.js.j2', '*.js.jinja2'), ('application/x-javascript+django', 'application/x-javascript+jinja', 'text/x-javascript+django', 'text/x-javascript+jinja', 'text/javascript+django', 'text/javascript+jinja')),
- 'JavascriptErbLexer': ('pygments.lexers.templates', 'JavaScript+Ruby', ('javascript+ruby', 'js+ruby', 'javascript+erb', 'js+erb'), (), ('application/x-javascript+ruby', 'text/x-javascript+ruby', 'text/javascript+ruby')),
- 'JavascriptGenshiLexer': ('pygments.lexers.templates', 'JavaScript+Genshi Text', ('js+genshitext', 'js+genshi', 'javascript+genshitext', 'javascript+genshi'), (), ('application/x-javascript+genshi', 'text/x-javascript+genshi', 'text/javascript+genshi')),
- 'JavascriptLexer': ('pygments.lexers.javascript', 'JavaScript', ('javascript', 'js'), ('*.js', '*.jsm', '*.mjs', '*.cjs'), ('application/javascript', 'application/x-javascript', 'text/x-javascript', 'text/javascript')),
- 'JavascriptPhpLexer': ('pygments.lexers.templates', 'JavaScript+PHP', ('javascript+php', 'js+php'), (), ('application/x-javascript+php', 'text/x-javascript+php', 'text/javascript+php')),
- 'JavascriptSmartyLexer': ('pygments.lexers.templates', 'JavaScript+Smarty', ('javascript+smarty', 'js+smarty'), (), ('application/x-javascript+smarty', 'text/x-javascript+smarty', 'text/javascript+smarty')),
- 'JavascriptUL4Lexer': ('pygments.lexers.ul4', 'Javascript+UL4', ('js+ul4',), ('*.jsul4',), ()),
- 'JclLexer': ('pygments.lexers.scripting', 'JCL', ('jcl',), ('*.jcl',), ('text/x-jcl',)),
- 'JsgfLexer': ('pygments.lexers.grammar_notation', 'JSGF', ('jsgf',), ('*.jsgf',), ('application/jsgf', 'application/x-jsgf', 'text/jsgf')),
- 'JsonBareObjectLexer': ('pygments.lexers.data', 'JSONBareObject', (), (), ()),
- 'JsonLdLexer': ('pygments.lexers.data', 'JSON-LD', ('jsonld', 'json-ld'), ('*.jsonld',), ('application/ld+json',)),
- 'JsonLexer': ('pygments.lexers.data', 'JSON', ('json', 'json-object'), ('*.json', '*.jsonl', '*.ndjson', 'Pipfile.lock'), ('application/json', 'application/json-object', 'application/x-ndjson', 'application/jsonl', 'application/json-seq')),
- 'JsonnetLexer': ('pygments.lexers.jsonnet', 'Jsonnet', ('jsonnet',), ('*.jsonnet', '*.libsonnet'), ()),
- 'JspLexer': ('pygments.lexers.templates', 'Java Server Page', ('jsp',), ('*.jsp',), ('application/x-jsp',)),
- 'JsxLexer': ('pygments.lexers.jsx', 'JSX', ('jsx', 'react'), ('*.jsx', '*.react'), ('text/jsx', 'text/typescript-jsx')),
- 'JuliaConsoleLexer': ('pygments.lexers.julia', 'Julia console', ('jlcon', 'julia-repl'), (), ()),
- 'JuliaLexer': ('pygments.lexers.julia', 'Julia', ('julia', 'jl'), ('*.jl',), ('text/x-julia', 'application/x-julia')),
- 'JuttleLexer': ('pygments.lexers.javascript', 'Juttle', ('juttle',), ('*.juttle',), ('application/juttle', 'application/x-juttle', 'text/x-juttle', 'text/juttle')),
- 'KLexer': ('pygments.lexers.q', 'K', ('k',), ('*.k',), ()),
- 'KalLexer': ('pygments.lexers.javascript', 'Kal', ('kal',), ('*.kal',), ('text/kal', 'application/kal')),
- 'KconfigLexer': ('pygments.lexers.configs', 'Kconfig', ('kconfig', 'menuconfig', 'linux-config', 'kernel-config'), ('Kconfig*', '*Config.in*', 'external.in*', 'standard-modules.in'), ('text/x-kconfig',)),
- 'KernelLogLexer': ('pygments.lexers.textfmts', 'Kernel log', ('kmsg', 'dmesg'), ('*.kmsg', '*.dmesg'), ()),
- 'KokaLexer': ('pygments.lexers.haskell', 'Koka', ('koka',), ('*.kk', '*.kki'), ('text/x-koka',)),
- 'KotlinLexer': ('pygments.lexers.jvm', 'Kotlin', ('kotlin',), ('*.kt', '*.kts'), ('text/x-kotlin',)),
- 'KuinLexer': ('pygments.lexers.kuin', 'Kuin', ('kuin',), ('*.kn',), ()),
- 'KustoLexer': ('pygments.lexers.kusto', 'Kusto', ('kql', 'kusto'), ('*.kql', '*.kusto', '.csl'), ()),
- 'LSLLexer': ('pygments.lexers.scripting', 'LSL', ('lsl',), ('*.lsl',), ('text/x-lsl',)),
- 'LassoCssLexer': ('pygments.lexers.templates', 'CSS+Lasso', ('css+lasso',), (), ('text/css+lasso',)),
- 'LassoHtmlLexer': ('pygments.lexers.templates', 'HTML+Lasso', ('html+lasso',), (), ('text/html+lasso', 'application/x-httpd-lasso', 'application/x-httpd-lasso[89]')),
- 'LassoJavascriptLexer': ('pygments.lexers.templates', 'JavaScript+Lasso', ('javascript+lasso', 'js+lasso'), (), ('application/x-javascript+lasso', 'text/x-javascript+lasso', 'text/javascript+lasso')),
- 'LassoLexer': ('pygments.lexers.javascript', 'Lasso', ('lasso', 'lassoscript'), ('*.lasso', '*.lasso[89]'), ('text/x-lasso',)),
- 'LassoXmlLexer': ('pygments.lexers.templates', 'XML+Lasso', ('xml+lasso',), (), ('application/xml+lasso',)),
- 'LdaprcLexer': ('pygments.lexers.ldap', 'LDAP configuration file', ('ldapconf', 'ldaprc'), ('.ldaprc', 'ldaprc', 'ldap.conf'), ('text/x-ldapconf',)),
- 'LdifLexer': ('pygments.lexers.ldap', 'LDIF', ('ldif',), ('*.ldif',), ('text/x-ldif',)),
- 'Lean3Lexer': ('pygments.lexers.lean', 'Lean', ('lean', 'lean3'), ('*.lean',), ('text/x-lean', 'text/x-lean3')),
- 'LessCssLexer': ('pygments.lexers.css', 'LessCss', ('less',), ('*.less',), ('text/x-less-css',)),
- 'LighttpdConfLexer': ('pygments.lexers.configs', 'Lighttpd configuration file', ('lighttpd', 'lighty'), ('lighttpd.conf',), ('text/x-lighttpd-conf',)),
- 'LilyPondLexer': ('pygments.lexers.lilypond', 'LilyPond', ('lilypond',), ('*.ly',), ()),
- 'LimboLexer': ('pygments.lexers.inferno', 'Limbo', ('limbo',), ('*.b',), ('text/limbo',)),
- 'LiquidLexer': ('pygments.lexers.templates', 'liquid', ('liquid',), ('*.liquid',), ()),
- 'LiterateAgdaLexer': ('pygments.lexers.haskell', 'Literate Agda', ('literate-agda', 'lagda'), ('*.lagda',), ('text/x-literate-agda',)),
- 'LiterateCryptolLexer': ('pygments.lexers.haskell', 'Literate Cryptol', ('literate-cryptol', 'lcryptol', 'lcry'), ('*.lcry',), ('text/x-literate-cryptol',)),
- 'LiterateHaskellLexer': ('pygments.lexers.haskell', 'Literate Haskell', ('literate-haskell', 'lhaskell', 'lhs'), ('*.lhs',), ('text/x-literate-haskell',)),
- 'LiterateIdrisLexer': ('pygments.lexers.haskell', 'Literate Idris', ('literate-idris', 'lidris', 'lidr'), ('*.lidr',), ('text/x-literate-idris',)),
- 'LiveScriptLexer': ('pygments.lexers.javascript', 'LiveScript', ('livescript', 'live-script'), ('*.ls',), ('text/livescript',)),
- 'LlvmLexer': ('pygments.lexers.asm', 'LLVM', ('llvm',), ('*.ll',), ('text/x-llvm',)),
- 'LlvmMirBodyLexer': ('pygments.lexers.asm', 'LLVM-MIR Body', ('llvm-mir-body',), (), ()),
- 'LlvmMirLexer': ('pygments.lexers.asm', 'LLVM-MIR', ('llvm-mir',), ('*.mir',), ()),
- 'LogosLexer': ('pygments.lexers.objective', 'Logos', ('logos',), ('*.x', '*.xi', '*.xm', '*.xmi'), ('text/x-logos',)),
- 'LogtalkLexer': ('pygments.lexers.prolog', 'Logtalk', ('logtalk',), ('*.lgt', '*.logtalk'), ('text/x-logtalk',)),
- 'LuaLexer': ('pygments.lexers.scripting', 'Lua', ('lua',), ('*.lua', '*.wlua'), ('text/x-lua', 'application/x-lua')),
- 'MCFunctionLexer': ('pygments.lexers.minecraft', 'MCFunction', ('mcfunction', 'mcf'), ('*.mcfunction',), ('text/mcfunction',)),
- 'MCSchemaLexer': ('pygments.lexers.minecraft', 'MCSchema', ('mcschema',), ('*.mcschema',), ('text/mcschema',)),
- 'MIMELexer': ('pygments.lexers.mime', 'MIME', ('mime',), (), ('multipart/mixed', 'multipart/related', 'multipart/alternative')),
- 'MIPSLexer': ('pygments.lexers.mips', 'MIPS', ('mips',), ('*.mips', '*.MIPS'), ()),
- 'MOOCodeLexer': ('pygments.lexers.scripting', 'MOOCode', ('moocode', 'moo'), ('*.moo',), ('text/x-moocode',)),
- 'MSDOSSessionLexer': ('pygments.lexers.shell', 'MSDOS Session', ('doscon',), (), ()),
- 'Macaulay2Lexer': ('pygments.lexers.macaulay2', 'Macaulay2', ('macaulay2',), ('*.m2',), ()),
- 'MakefileLexer': ('pygments.lexers.make', 'Makefile', ('make', 'makefile', 'mf', 'bsdmake'), ('*.mak', '*.mk', 'Makefile', 'makefile', 'Makefile.*', 'GNUmakefile'), ('text/x-makefile',)),
- 'MakoCssLexer': ('pygments.lexers.templates', 'CSS+Mako', ('css+mako',), (), ('text/css+mako',)),
- 'MakoHtmlLexer': ('pygments.lexers.templates', 'HTML+Mako', ('html+mako',), (), ('text/html+mako',)),
- 'MakoJavascriptLexer': ('pygments.lexers.templates', 'JavaScript+Mako', ('javascript+mako', 'js+mako'), (), ('application/x-javascript+mako', 'text/x-javascript+mako', 'text/javascript+mako')),
- 'MakoLexer': ('pygments.lexers.templates', 'Mako', ('mako',), ('*.mao',), ('application/x-mako',)),
- 'MakoXmlLexer': ('pygments.lexers.templates', 'XML+Mako', ('xml+mako',), (), ('application/xml+mako',)),
- 'MaqlLexer': ('pygments.lexers.business', 'MAQL', ('maql',), ('*.maql',), ('text/x-gooddata-maql', 'application/x-gooddata-maql')),
- 'MarkdownLexer': ('pygments.lexers.markup', 'Markdown', ('markdown', 'md'), ('*.md', '*.markdown'), ('text/x-markdown',)),
- 'MaskLexer': ('pygments.lexers.javascript', 'Mask', ('mask',), ('*.mask',), ('text/x-mask',)),
- 'MasonLexer': ('pygments.lexers.templates', 'Mason', ('mason',), ('*.m', '*.mhtml', '*.mc', '*.mi', 'autohandler', 'dhandler'), ('application/x-mason',)),
- 'MathematicaLexer': ('pygments.lexers.algebra', 'Mathematica', ('mathematica', 'mma', 'nb'), ('*.nb', '*.cdf', '*.nbp', '*.ma'), ('application/mathematica', 'application/vnd.wolfram.mathematica', 'application/vnd.wolfram.mathematica.package', 'application/vnd.wolfram.cdf')),
- 'MatlabLexer': ('pygments.lexers.matlab', 'Matlab', ('matlab',), ('*.m',), ('text/matlab',)),
- 'MatlabSessionLexer': ('pygments.lexers.matlab', 'Matlab session', ('matlabsession',), (), ()),
- 'MaximaLexer': ('pygments.lexers.maxima', 'Maxima', ('maxima', 'macsyma'), ('*.mac', '*.max'), ()),
- 'MesonLexer': ('pygments.lexers.meson', 'Meson', ('meson', 'meson.build'), ('meson.build', 'meson_options.txt'), ('text/x-meson',)),
- 'MiniDLexer': ('pygments.lexers.d', 'MiniD', ('minid',), (), ('text/x-minidsrc',)),
- 'MiniScriptLexer': ('pygments.lexers.scripting', 'MiniScript', ('miniscript', 'ms'), ('*.ms',), ('text/x-minicript', 'application/x-miniscript')),
- 'ModelicaLexer': ('pygments.lexers.modeling', 'Modelica', ('modelica',), ('*.mo',), ('text/x-modelica',)),
- 'Modula2Lexer': ('pygments.lexers.modula2', 'Modula-2', ('modula2', 'm2'), ('*.def', '*.mod'), ('text/x-modula2',)),
- 'MoinWikiLexer': ('pygments.lexers.markup', 'MoinMoin/Trac Wiki markup', ('trac-wiki', 'moin'), (), ('text/x-trac-wiki',)),
- 'MonkeyLexer': ('pygments.lexers.basic', 'Monkey', ('monkey',), ('*.monkey',), ('text/x-monkey',)),
- 'MonteLexer': ('pygments.lexers.monte', 'Monte', ('monte',), ('*.mt',), ()),
- 'MoonScriptLexer': ('pygments.lexers.scripting', 'MoonScript', ('moonscript', 'moon'), ('*.moon',), ('text/x-moonscript', 'application/x-moonscript')),
- 'MoselLexer': ('pygments.lexers.mosel', 'Mosel', ('mosel',), ('*.mos',), ()),
- 'MozPreprocCssLexer': ('pygments.lexers.markup', 'CSS+mozpreproc', ('css+mozpreproc',), ('*.css.in',), ()),
- 'MozPreprocHashLexer': ('pygments.lexers.markup', 'mozhashpreproc', ('mozhashpreproc',), (), ()),
- 'MozPreprocJavascriptLexer': ('pygments.lexers.markup', 'Javascript+mozpreproc', ('javascript+mozpreproc',), ('*.js.in',), ()),
- 'MozPreprocPercentLexer': ('pygments.lexers.markup', 'mozpercentpreproc', ('mozpercentpreproc',), (), ()),
- 'MozPreprocXulLexer': ('pygments.lexers.markup', 'XUL+mozpreproc', ('xul+mozpreproc',), ('*.xul.in',), ()),
- 'MqlLexer': ('pygments.lexers.c_like', 'MQL', ('mql', 'mq4', 'mq5', 'mql4', 'mql5'), ('*.mq4', '*.mq5', '*.mqh'), ('text/x-mql',)),
- 'MscgenLexer': ('pygments.lexers.dsls', 'Mscgen', ('mscgen', 'msc'), ('*.msc',), ()),
- 'MuPADLexer': ('pygments.lexers.algebra', 'MuPAD', ('mupad',), ('*.mu',), ()),
- 'MxmlLexer': ('pygments.lexers.actionscript', 'MXML', ('mxml',), ('*.mxml',), ()),
- 'MySqlLexer': ('pygments.lexers.sql', 'MySQL', ('mysql',), (), ('text/x-mysql',)),
- 'MyghtyCssLexer': ('pygments.lexers.templates', 'CSS+Myghty', ('css+myghty',), (), ('text/css+myghty',)),
- 'MyghtyHtmlLexer': ('pygments.lexers.templates', 'HTML+Myghty', ('html+myghty',), (), ('text/html+myghty',)),
- 'MyghtyJavascriptLexer': ('pygments.lexers.templates', 'JavaScript+Myghty', ('javascript+myghty', 'js+myghty'), (), ('application/x-javascript+myghty', 'text/x-javascript+myghty', 'text/javascript+mygthy')),
- 'MyghtyLexer': ('pygments.lexers.templates', 'Myghty', ('myghty',), ('*.myt', 'autodelegate'), ('application/x-myghty',)),
- 'MyghtyXmlLexer': ('pygments.lexers.templates', 'XML+Myghty', ('xml+myghty',), (), ('application/xml+myghty',)),
- 'NCLLexer': ('pygments.lexers.ncl', 'NCL', ('ncl',), ('*.ncl',), ('text/ncl',)),
- 'NSISLexer': ('pygments.lexers.installers', 'NSIS', ('nsis', 'nsi', 'nsh'), ('*.nsi', '*.nsh'), ('text/x-nsis',)),
- 'NasmLexer': ('pygments.lexers.asm', 'NASM', ('nasm',), ('*.asm', '*.ASM', '*.nasm'), ('text/x-nasm',)),
- 'NasmObjdumpLexer': ('pygments.lexers.asm', 'objdump-nasm', ('objdump-nasm',), ('*.objdump-intel',), ('text/x-nasm-objdump',)),
- 'NemerleLexer': ('pygments.lexers.dotnet', 'Nemerle', ('nemerle',), ('*.n',), ('text/x-nemerle',)),
- 'NesCLexer': ('pygments.lexers.c_like', 'nesC', ('nesc',), ('*.nc',), ('text/x-nescsrc',)),
- 'NestedTextLexer': ('pygments.lexers.configs', 'NestedText', ('nestedtext', 'nt'), ('*.nt',), ()),
- 'NewLispLexer': ('pygments.lexers.lisp', 'NewLisp', ('newlisp',), ('*.lsp', '*.nl', '*.kif'), ('text/x-newlisp', 'application/x-newlisp')),
- 'NewspeakLexer': ('pygments.lexers.smalltalk', 'Newspeak', ('newspeak',), ('*.ns2',), ('text/x-newspeak',)),
- 'NginxConfLexer': ('pygments.lexers.configs', 'Nginx configuration file', ('nginx',), ('nginx.conf',), ('text/x-nginx-conf',)),
- 'NimrodLexer': ('pygments.lexers.nimrod', 'Nimrod', ('nimrod', 'nim'), ('*.nim', '*.nimrod'), ('text/x-nim',)),
- 'NitLexer': ('pygments.lexers.nit', 'Nit', ('nit',), ('*.nit',), ()),
- 'NixLexer': ('pygments.lexers.nix', 'Nix', ('nixos', 'nix'), ('*.nix',), ('text/x-nix',)),
- 'NodeConsoleLexer': ('pygments.lexers.javascript', 'Node.js REPL console session', ('nodejsrepl',), (), ('text/x-nodejsrepl',)),
- 'NotmuchLexer': ('pygments.lexers.textfmts', 'Notmuch', ('notmuch',), (), ()),
- 'NuSMVLexer': ('pygments.lexers.smv', 'NuSMV', ('nusmv',), ('*.smv',), ()),
- 'NumPyLexer': ('pygments.lexers.python', 'NumPy', ('numpy',), (), ()),
- 'ObjdumpLexer': ('pygments.lexers.asm', 'objdump', ('objdump',), ('*.objdump',), ('text/x-objdump',)),
- 'ObjectiveCLexer': ('pygments.lexers.objective', 'Objective-C', ('objective-c', 'objectivec', 'obj-c', 'objc'), ('*.m', '*.h'), ('text/x-objective-c',)),
- 'ObjectiveCppLexer': ('pygments.lexers.objective', 'Objective-C++', ('objective-c++', 'objectivec++', 'obj-c++', 'objc++'), ('*.mm', '*.hh'), ('text/x-objective-c++',)),
- 'ObjectiveJLexer': ('pygments.lexers.javascript', 'Objective-J', ('objective-j', 'objectivej', 'obj-j', 'objj'), ('*.j',), ('text/x-objective-j',)),
- 'OcamlLexer': ('pygments.lexers.ml', 'OCaml', ('ocaml',), ('*.ml', '*.mli', '*.mll', '*.mly'), ('text/x-ocaml',)),
- 'OctaveLexer': ('pygments.lexers.matlab', 'Octave', ('octave',), ('*.m',), ('text/octave',)),
- 'OdinLexer': ('pygments.lexers.archetype', 'ODIN', ('odin',), ('*.odin',), ('text/odin',)),
- 'OmgIdlLexer': ('pygments.lexers.c_like', 'OMG Interface Definition Language', ('omg-idl',), ('*.idl', '*.pidl'), ()),
- 'OocLexer': ('pygments.lexers.ooc', 'Ooc', ('ooc',), ('*.ooc',), ('text/x-ooc',)),
- 'OpaLexer': ('pygments.lexers.ml', 'Opa', ('opa',), ('*.opa',), ('text/x-opa',)),
- 'OpenEdgeLexer': ('pygments.lexers.business', 'OpenEdge ABL', ('openedge', 'abl', 'progress'), ('*.p', '*.cls'), ('text/x-openedge', 'application/x-openedge')),
- 'OpenScadLexer': ('pygments.lexers.openscad', 'OpenSCAD', ('openscad',), ('*.scad',), ('application/x-openscad',)),
- 'OutputLexer': ('pygments.lexers.special', 'Text output', ('output',), (), ()),
- 'PacmanConfLexer': ('pygments.lexers.configs', 'PacmanConf', ('pacmanconf',), ('pacman.conf',), ()),
- 'PanLexer': ('pygments.lexers.dsls', 'Pan', ('pan',), ('*.pan',), ()),
- 'ParaSailLexer': ('pygments.lexers.parasail', 'ParaSail', ('parasail',), ('*.psi', '*.psl'), ('text/x-parasail',)),
- 'PawnLexer': ('pygments.lexers.pawn', 'Pawn', ('pawn',), ('*.p', '*.pwn', '*.inc'), ('text/x-pawn',)),
- 'PegLexer': ('pygments.lexers.grammar_notation', 'PEG', ('peg',), ('*.peg',), ('text/x-peg',)),
- 'Perl6Lexer': ('pygments.lexers.perl', 'Perl6', ('perl6', 'pl6', 'raku'), ('*.pl', '*.pm', '*.nqp', '*.p6', '*.6pl', '*.p6l', '*.pl6', '*.6pm', '*.p6m', '*.pm6', '*.t', '*.raku', '*.rakumod', '*.rakutest', '*.rakudoc'), ('text/x-perl6', 'application/x-perl6')),
- 'PerlLexer': ('pygments.lexers.perl', 'Perl', ('perl', 'pl'), ('*.pl', '*.pm', '*.t', '*.perl'), ('text/x-perl', 'application/x-perl')),
- 'PhixLexer': ('pygments.lexers.phix', 'Phix', ('phix',), ('*.exw',), ('text/x-phix',)),
- 'PhpLexer': ('pygments.lexers.php', 'PHP', ('php', 'php3', 'php4', 'php5'), ('*.php', '*.php[345]', '*.inc'), ('text/x-php',)),
- 'PigLexer': ('pygments.lexers.jvm', 'Pig', ('pig',), ('*.pig',), ('text/x-pig',)),
- 'PikeLexer': ('pygments.lexers.c_like', 'Pike', ('pike',), ('*.pike', '*.pmod'), ('text/x-pike',)),
- 'PkgConfigLexer': ('pygments.lexers.configs', 'PkgConfig', ('pkgconfig',), ('*.pc',), ()),
- 'PlPgsqlLexer': ('pygments.lexers.sql', 'PL/pgSQL', ('plpgsql',), (), ('text/x-plpgsql',)),
- 'PointlessLexer': ('pygments.lexers.pointless', 'Pointless', ('pointless',), ('*.ptls',), ()),
- 'PonyLexer': ('pygments.lexers.pony', 'Pony', ('pony',), ('*.pony',), ()),
- 'PortugolLexer': ('pygments.lexers.pascal', 'Portugol', ('portugol',), ('*.alg', '*.portugol'), ()),
- 'PostScriptLexer': ('pygments.lexers.graphics', 'PostScript', ('postscript', 'postscr'), ('*.ps', '*.eps'), ('application/postscript',)),
- 'PostgresConsoleLexer': ('pygments.lexers.sql', 'PostgreSQL console (psql)', ('psql', 'postgresql-console', 'postgres-console'), (), ('text/x-postgresql-psql',)),
- 'PostgresExplainLexer': ('pygments.lexers.sql', 'PostgreSQL EXPLAIN dialect', ('postgres-explain',), ('*.explain',), ('text/x-postgresql-explain',)),
- 'PostgresLexer': ('pygments.lexers.sql', 'PostgreSQL SQL dialect', ('postgresql', 'postgres'), (), ('text/x-postgresql',)),
- 'PovrayLexer': ('pygments.lexers.graphics', 'POVRay', ('pov',), ('*.pov', '*.inc'), ('text/x-povray',)),
- 'PowerShellLexer': ('pygments.lexers.shell', 'PowerShell', ('powershell', 'pwsh', 'posh', 'ps1', 'psm1'), ('*.ps1', '*.psm1'), ('text/x-powershell',)),
- 'PowerShellSessionLexer': ('pygments.lexers.shell', 'PowerShell Session', ('pwsh-session', 'ps1con'), (), ()),
- 'PraatLexer': ('pygments.lexers.praat', 'Praat', ('praat',), ('*.praat', '*.proc', '*.psc'), ()),
- 'ProcfileLexer': ('pygments.lexers.procfile', 'Procfile', ('procfile',), ('Procfile',), ()),
- 'PrologLexer': ('pygments.lexers.prolog', 'Prolog', ('prolog',), ('*.ecl', '*.prolog', '*.pro', '*.pl'), ('text/x-prolog',)),
- 'PromQLLexer': ('pygments.lexers.promql', 'PromQL', ('promql',), ('*.promql',), ()),
- 'PropertiesLexer': ('pygments.lexers.configs', 'Properties', ('properties', 'jproperties'), ('*.properties',), ('text/x-java-properties',)),
- 'ProtoBufLexer': ('pygments.lexers.dsls', 'Protocol Buffer', ('protobuf', 'proto'), ('*.proto',), ()),
- 'PrqlLexer': ('pygments.lexers.prql', 'PRQL', ('prql',), ('*.prql',), ('application/prql', 'application/x-prql')),
- 'PsyshConsoleLexer': ('pygments.lexers.php', 'PsySH console session for PHP', ('psysh',), (), ()),
- 'PtxLexer': ('pygments.lexers.ptx', 'PTX', ('ptx',), ('*.ptx',), ('text/x-ptx',)),
- 'PugLexer': ('pygments.lexers.html', 'Pug', ('pug', 'jade'), ('*.pug', '*.jade'), ('text/x-pug', 'text/x-jade')),
- 'PuppetLexer': ('pygments.lexers.dsls', 'Puppet', ('puppet',), ('*.pp',), ()),
- 'PyPyLogLexer': ('pygments.lexers.console', 'PyPy Log', ('pypylog', 'pypy'), ('*.pypylog',), ('application/x-pypylog',)),
- 'Python2Lexer': ('pygments.lexers.python', 'Python 2.x', ('python2', 'py2'), (), ('text/x-python2', 'application/x-python2')),
- 'Python2TracebackLexer': ('pygments.lexers.python', 'Python 2.x Traceback', ('py2tb',), ('*.py2tb',), ('text/x-python2-traceback',)),
- 'PythonConsoleLexer': ('pygments.lexers.python', 'Python console session', ('pycon',), (), ('text/x-python-doctest',)),
- 'PythonLexer': ('pygments.lexers.python', 'Python', ('python', 'py', 'sage', 'python3', 'py3', 'bazel', 'starlark'), ('*.py', '*.pyw', '*.pyi', '*.jy', '*.sage', '*.sc', 'SConstruct', 'SConscript', '*.bzl', 'BUCK', 'BUILD', 'BUILD.bazel', 'WORKSPACE', '*.tac'), ('text/x-python', 'application/x-python', 'text/x-python3', 'application/x-python3')),
- 'PythonTracebackLexer': ('pygments.lexers.python', 'Python Traceback', ('pytb', 'py3tb'), ('*.pytb', '*.py3tb'), ('text/x-python-traceback', 'text/x-python3-traceback')),
- 'PythonUL4Lexer': ('pygments.lexers.ul4', 'Python+UL4', ('py+ul4',), ('*.pyul4',), ()),
- 'QBasicLexer': ('pygments.lexers.basic', 'QBasic', ('qbasic', 'basic'), ('*.BAS', '*.bas'), ('text/basic',)),
- 'QLexer': ('pygments.lexers.q', 'Q', ('q',), ('*.q',), ()),
- 'QVToLexer': ('pygments.lexers.qvt', 'QVTO', ('qvto', 'qvt'), ('*.qvto',), ()),
- 'QlikLexer': ('pygments.lexers.qlik', 'Qlik', ('qlik', 'qlikview', 'qliksense', 'qlikscript'), ('*.qvs', '*.qvw'), ()),
- 'QmlLexer': ('pygments.lexers.webmisc', 'QML', ('qml', 'qbs'), ('*.qml', '*.qbs'), ('application/x-qml', 'application/x-qt.qbs+qml')),
- 'RConsoleLexer': ('pygments.lexers.r', 'RConsole', ('rconsole', 'rout'), ('*.Rout',), ()),
- 'RNCCompactLexer': ('pygments.lexers.rnc', 'Relax-NG Compact', ('rng-compact', 'rnc'), ('*.rnc',), ()),
- 'RPMSpecLexer': ('pygments.lexers.installers', 'RPMSpec', ('spec',), ('*.spec',), ('text/x-rpm-spec',)),
- 'RacketLexer': ('pygments.lexers.lisp', 'Racket', ('racket', 'rkt'), ('*.rkt', '*.rktd', '*.rktl'), ('text/x-racket', 'application/x-racket')),
- 'RagelCLexer': ('pygments.lexers.parsers', 'Ragel in C Host', ('ragel-c',), ('*.rl',), ()),
- 'RagelCppLexer': ('pygments.lexers.parsers', 'Ragel in CPP Host', ('ragel-cpp',), ('*.rl',), ()),
- 'RagelDLexer': ('pygments.lexers.parsers', 'Ragel in D Host', ('ragel-d',), ('*.rl',), ()),
- 'RagelEmbeddedLexer': ('pygments.lexers.parsers', 'Embedded Ragel', ('ragel-em',), ('*.rl',), ()),
- 'RagelJavaLexer': ('pygments.lexers.parsers', 'Ragel in Java Host', ('ragel-java',), ('*.rl',), ()),
- 'RagelLexer': ('pygments.lexers.parsers', 'Ragel', ('ragel',), (), ()),
- 'RagelObjectiveCLexer': ('pygments.lexers.parsers', 'Ragel in Objective C Host', ('ragel-objc',), ('*.rl',), ()),
- 'RagelRubyLexer': ('pygments.lexers.parsers', 'Ragel in Ruby Host', ('ragel-ruby', 'ragel-rb'), ('*.rl',), ()),
- 'RawTokenLexer': ('pygments.lexers.special', 'Raw token data', (), (), ('application/x-pygments-tokens',)),
- 'RdLexer': ('pygments.lexers.r', 'Rd', ('rd',), ('*.Rd',), ('text/x-r-doc',)),
- 'ReasonLexer': ('pygments.lexers.ml', 'ReasonML', ('reasonml', 'reason'), ('*.re', '*.rei'), ('text/x-reasonml',)),
- 'RebolLexer': ('pygments.lexers.rebol', 'REBOL', ('rebol',), ('*.r', '*.r3', '*.reb'), ('text/x-rebol',)),
- 'RedLexer': ('pygments.lexers.rebol', 'Red', ('red', 'red/system'), ('*.red', '*.reds'), ('text/x-red', 'text/x-red-system')),
- 'RedcodeLexer': ('pygments.lexers.esoteric', 'Redcode', ('redcode',), ('*.cw',), ()),
- 'RegeditLexer': ('pygments.lexers.configs', 'reg', ('registry',), ('*.reg',), ('text/x-windows-registry',)),
- 'ResourceLexer': ('pygments.lexers.resource', 'ResourceBundle', ('resourcebundle', 'resource'), (), ()),
- 'RexxLexer': ('pygments.lexers.scripting', 'Rexx', ('rexx', 'arexx'), ('*.rexx', '*.rex', '*.rx', '*.arexx'), ('text/x-rexx',)),
- 'RhtmlLexer': ('pygments.lexers.templates', 'RHTML', ('rhtml', 'html+erb', 'html+ruby'), ('*.rhtml',), ('text/html+ruby',)),
- 'RideLexer': ('pygments.lexers.ride', 'Ride', ('ride',), ('*.ride',), ('text/x-ride',)),
- 'RitaLexer': ('pygments.lexers.rita', 'Rita', ('rita',), ('*.rita',), ('text/rita',)),
- 'RoboconfGraphLexer': ('pygments.lexers.roboconf', 'Roboconf Graph', ('roboconf-graph',), ('*.graph',), ()),
- 'RoboconfInstancesLexer': ('pygments.lexers.roboconf', 'Roboconf Instances', ('roboconf-instances',), ('*.instances',), ()),
- 'RobotFrameworkLexer': ('pygments.lexers.robotframework', 'RobotFramework', ('robotframework',), ('*.robot', '*.resource'), ('text/x-robotframework',)),
- 'RqlLexer': ('pygments.lexers.sql', 'RQL', ('rql',), ('*.rql',), ('text/x-rql',)),
- 'RslLexer': ('pygments.lexers.dsls', 'RSL', ('rsl',), ('*.rsl',), ('text/rsl',)),
- 'RstLexer': ('pygments.lexers.markup', 'reStructuredText', ('restructuredtext', 'rst', 'rest'), ('*.rst', '*.rest'), ('text/x-rst', 'text/prs.fallenstein.rst')),
- 'RtsLexer': ('pygments.lexers.trafficscript', 'TrafficScript', ('trafficscript', 'rts'), ('*.rts',), ()),
- 'RubyConsoleLexer': ('pygments.lexers.ruby', 'Ruby irb session', ('rbcon', 'irb'), (), ('text/x-ruby-shellsession',)),
- 'RubyLexer': ('pygments.lexers.ruby', 'Ruby', ('ruby', 'rb', 'duby'), ('*.rb', '*.rbw', 'Rakefile', '*.rake', '*.gemspec', '*.rbx', '*.duby', 'Gemfile', 'Vagrantfile'), ('text/x-ruby', 'application/x-ruby')),
- 'RustLexer': ('pygments.lexers.rust', 'Rust', ('rust', 'rs'), ('*.rs', '*.rs.in'), ('text/rust', 'text/x-rust')),
- 'SASLexer': ('pygments.lexers.sas', 'SAS', ('sas',), ('*.SAS', '*.sas'), ('text/x-sas', 'text/sas', 'application/x-sas')),
- 'SLexer': ('pygments.lexers.r', 'S', ('splus', 's', 'r'), ('*.S', '*.R', '.Rhistory', '.Rprofile', '.Renviron'), ('text/S-plus', 'text/S', 'text/x-r-source', 'text/x-r', 'text/x-R', 'text/x-r-history', 'text/x-r-profile')),
- 'SMLLexer': ('pygments.lexers.ml', 'Standard ML', ('sml',), ('*.sml', '*.sig', '*.fun'), ('text/x-standardml', 'application/x-standardml')),
- 'SNBTLexer': ('pygments.lexers.minecraft', 'SNBT', ('snbt',), ('*.snbt',), ('text/snbt',)),
- 'SarlLexer': ('pygments.lexers.jvm', 'SARL', ('sarl',), ('*.sarl',), ('text/x-sarl',)),
- 'SassLexer': ('pygments.lexers.css', 'Sass', ('sass',), ('*.sass',), ('text/x-sass',)),
- 'SaviLexer': ('pygments.lexers.savi', 'Savi', ('savi',), ('*.savi',), ()),
- 'ScalaLexer': ('pygments.lexers.jvm', 'Scala', ('scala',), ('*.scala',), ('text/x-scala',)),
- 'ScamlLexer': ('pygments.lexers.html', 'Scaml', ('scaml',), ('*.scaml',), ('text/x-scaml',)),
- 'ScdocLexer': ('pygments.lexers.scdoc', 'scdoc', ('scdoc', 'scd'), ('*.scd', '*.scdoc'), ()),
- 'SchemeLexer': ('pygments.lexers.lisp', 'Scheme', ('scheme', 'scm'), ('*.scm', '*.ss'), ('text/x-scheme', 'application/x-scheme')),
- 'ScilabLexer': ('pygments.lexers.matlab', 'Scilab', ('scilab',), ('*.sci', '*.sce', '*.tst'), ('text/scilab',)),
- 'ScssLexer': ('pygments.lexers.css', 'SCSS', ('scss',), ('*.scss',), ('text/x-scss',)),
- 'SedLexer': ('pygments.lexers.textedit', 'Sed', ('sed', 'gsed', 'ssed'), ('*.sed', '*.[gs]sed'), ('text/x-sed',)),
- 'ShExCLexer': ('pygments.lexers.rdf', 'ShExC', ('shexc', 'shex'), ('*.shex',), ('text/shex',)),
- 'ShenLexer': ('pygments.lexers.lisp', 'Shen', ('shen',), ('*.shen',), ('text/x-shen', 'application/x-shen')),
- 'SieveLexer': ('pygments.lexers.sieve', 'Sieve', ('sieve',), ('*.siv', '*.sieve'), ()),
- 'SilverLexer': ('pygments.lexers.verification', 'Silver', ('silver',), ('*.sil', '*.vpr'), ()),
- 'SingularityLexer': ('pygments.lexers.configs', 'Singularity', ('singularity',), ('*.def', 'Singularity'), ()),
- 'SlashLexer': ('pygments.lexers.slash', 'Slash', ('slash',), ('*.sla',), ()),
- 'SlimLexer': ('pygments.lexers.webmisc', 'Slim', ('slim',), ('*.slim',), ('text/x-slim',)),
- 'SlurmBashLexer': ('pygments.lexers.shell', 'Slurm', ('slurm', 'sbatch'), ('*.sl',), ()),
- 'SmaliLexer': ('pygments.lexers.dalvik', 'Smali', ('smali',), ('*.smali',), ('text/smali',)),
- 'SmalltalkLexer': ('pygments.lexers.smalltalk', 'Smalltalk', ('smalltalk', 'squeak', 'st'), ('*.st',), ('text/x-smalltalk',)),
- 'SmartGameFormatLexer': ('pygments.lexers.sgf', 'SmartGameFormat', ('sgf',), ('*.sgf',), ()),
- 'SmartyLexer': ('pygments.lexers.templates', 'Smarty', ('smarty',), ('*.tpl',), ('application/x-smarty',)),
- 'SmithyLexer': ('pygments.lexers.smithy', 'Smithy', ('smithy',), ('*.smithy',), ()),
- 'SnobolLexer': ('pygments.lexers.snobol', 'Snobol', ('snobol',), ('*.snobol',), ('text/x-snobol',)),
- 'SnowballLexer': ('pygments.lexers.dsls', 'Snowball', ('snowball',), ('*.sbl',), ()),
- 'SolidityLexer': ('pygments.lexers.solidity', 'Solidity', ('solidity',), ('*.sol',), ()),
- 'SophiaLexer': ('pygments.lexers.sophia', 'Sophia', ('sophia',), ('*.aes',), ()),
- 'SourcePawnLexer': ('pygments.lexers.pawn', 'SourcePawn', ('sp',), ('*.sp',), ('text/x-sourcepawn',)),
- 'SourcesListLexer': ('pygments.lexers.installers', 'Debian Sourcelist', ('debsources', 'sourceslist', 'sources.list'), ('sources.list',), ()),
- 'SparqlLexer': ('pygments.lexers.rdf', 'SPARQL', ('sparql',), ('*.rq', '*.sparql'), ('application/sparql-query',)),
- 'SpiceLexer': ('pygments.lexers.spice', 'Spice', ('spice', 'spicelang'), ('*.spice',), ('text/x-spice',)),
- 'SqlJinjaLexer': ('pygments.lexers.templates', 'SQL+Jinja', ('sql+jinja',), ('*.sql', '*.sql.j2', '*.sql.jinja2'), ()),
- 'SqlLexer': ('pygments.lexers.sql', 'SQL', ('sql',), ('*.sql',), ('text/x-sql',)),
- 'SqliteConsoleLexer': ('pygments.lexers.sql', 'sqlite3con', ('sqlite3',), ('*.sqlite3-console',), ('text/x-sqlite3-console',)),
- 'SquidConfLexer': ('pygments.lexers.configs', 'SquidConf', ('squidconf', 'squid.conf', 'squid'), ('squid.conf',), ('text/x-squidconf',)),
- 'SrcinfoLexer': ('pygments.lexers.srcinfo', 'Srcinfo', ('srcinfo',), ('.SRCINFO',), ()),
- 'SspLexer': ('pygments.lexers.templates', 'Scalate Server Page', ('ssp',), ('*.ssp',), ('application/x-ssp',)),
- 'StanLexer': ('pygments.lexers.modeling', 'Stan', ('stan',), ('*.stan',), ()),
- 'StataLexer': ('pygments.lexers.stata', 'Stata', ('stata', 'do'), ('*.do', '*.ado'), ('text/x-stata', 'text/stata', 'application/x-stata')),
- 'SuperColliderLexer': ('pygments.lexers.supercollider', 'SuperCollider', ('supercollider', 'sc'), ('*.sc', '*.scd'), ('application/supercollider', 'text/supercollider')),
- 'SwiftLexer': ('pygments.lexers.objective', 'Swift', ('swift',), ('*.swift',), ('text/x-swift',)),
- 'SwigLexer': ('pygments.lexers.c_like', 'SWIG', ('swig',), ('*.swg', '*.i'), ('text/swig',)),
- 'SystemVerilogLexer': ('pygments.lexers.hdl', 'systemverilog', ('systemverilog', 'sv'), ('*.sv', '*.svh'), ('text/x-systemverilog',)),
- 'SystemdLexer': ('pygments.lexers.configs', 'Systemd', ('systemd',), ('*.service', '*.socket', '*.device', '*.mount', '*.automount', '*.swap', '*.target', '*.path', '*.timer', '*.slice', '*.scope'), ()),
- 'TAPLexer': ('pygments.lexers.testing', 'TAP', ('tap',), ('*.tap',), ()),
- 'TNTLexer': ('pygments.lexers.tnt', 'Typographic Number Theory', ('tnt',), ('*.tnt',), ()),
- 'TOMLLexer': ('pygments.lexers.configs', 'TOML', ('toml',), ('*.toml', 'Pipfile', 'poetry.lock'), ('application/toml',)),
- 'Tads3Lexer': ('pygments.lexers.int_fiction', 'TADS 3', ('tads3',), ('*.t',), ()),
- 'TalLexer': ('pygments.lexers.tal', 'Tal', ('tal', 'uxntal'), ('*.tal',), ('text/x-uxntal',)),
- 'TasmLexer': ('pygments.lexers.asm', 'TASM', ('tasm',), ('*.asm', '*.ASM', '*.tasm'), ('text/x-tasm',)),
- 'TclLexer': ('pygments.lexers.tcl', 'Tcl', ('tcl',), ('*.tcl', '*.rvt'), ('text/x-tcl', 'text/x-script.tcl', 'application/x-tcl')),
- 'TcshLexer': ('pygments.lexers.shell', 'Tcsh', ('tcsh', 'csh'), ('*.tcsh', '*.csh'), ('application/x-csh',)),
- 'TcshSessionLexer': ('pygments.lexers.shell', 'Tcsh Session', ('tcshcon',), (), ()),
- 'TeaTemplateLexer': ('pygments.lexers.templates', 'Tea', ('tea',), ('*.tea',), ('text/x-tea',)),
- 'TealLexer': ('pygments.lexers.teal', 'teal', ('teal',), ('*.teal',), ()),
- 'TeraTermLexer': ('pygments.lexers.teraterm', 'Tera Term macro', ('teratermmacro', 'teraterm', 'ttl'), ('*.ttl',), ('text/x-teratermmacro',)),
- 'TermcapLexer': ('pygments.lexers.configs', 'Termcap', ('termcap',), ('termcap', 'termcap.src'), ()),
- 'TerminfoLexer': ('pygments.lexers.configs', 'Terminfo', ('terminfo',), ('terminfo', 'terminfo.src'), ()),
- 'TerraformLexer': ('pygments.lexers.configs', 'Terraform', ('terraform', 'tf', 'hcl'), ('*.tf', '*.hcl'), ('application/x-tf', 'application/x-terraform')),
- 'TexLexer': ('pygments.lexers.markup', 'TeX', ('tex', 'latex'), ('*.tex', '*.aux', '*.toc'), ('text/x-tex', 'text/x-latex')),
- 'TextLexer': ('pygments.lexers.special', 'Text only', ('text',), ('*.txt',), ('text/plain',)),
- 'ThingsDBLexer': ('pygments.lexers.thingsdb', 'ThingsDB', ('ti', 'thingsdb'), ('*.ti',), ()),
- 'ThriftLexer': ('pygments.lexers.dsls', 'Thrift', ('thrift',), ('*.thrift',), ('application/x-thrift',)),
- 'TiddlyWiki5Lexer': ('pygments.lexers.markup', 'tiddler', ('tid',), ('*.tid',), ('text/vnd.tiddlywiki',)),
- 'TlbLexer': ('pygments.lexers.tlb', 'Tl-b', ('tlb',), ('*.tlb',), ()),
- 'TlsLexer': ('pygments.lexers.tls', 'TLS Presentation Language', ('tls',), (), ()),
- 'TodotxtLexer': ('pygments.lexers.textfmts', 'Todotxt', ('todotxt',), ('todo.txt', '*.todotxt'), ('text/x-todo',)),
- 'TransactSqlLexer': ('pygments.lexers.sql', 'Transact-SQL', ('tsql', 't-sql'), ('*.sql',), ('text/x-tsql',)),
- 'TreetopLexer': ('pygments.lexers.parsers', 'Treetop', ('treetop',), ('*.treetop', '*.tt'), ()),
- 'TurtleLexer': ('pygments.lexers.rdf', 'Turtle', ('turtle',), ('*.ttl',), ('text/turtle', 'application/x-turtle')),
- 'TwigHtmlLexer': ('pygments.lexers.templates', 'HTML+Twig', ('html+twig',), ('*.twig',), ('text/html+twig',)),
- 'TwigLexer': ('pygments.lexers.templates', 'Twig', ('twig',), (), ('application/x-twig',)),
- 'TypeScriptLexer': ('pygments.lexers.javascript', 'TypeScript', ('typescript', 'ts'), ('*.ts',), ('application/x-typescript', 'text/x-typescript')),
- 'TypoScriptCssDataLexer': ('pygments.lexers.typoscript', 'TypoScriptCssData', ('typoscriptcssdata',), (), ()),
- 'TypoScriptHtmlDataLexer': ('pygments.lexers.typoscript', 'TypoScriptHtmlData', ('typoscripthtmldata',), (), ()),
- 'TypoScriptLexer': ('pygments.lexers.typoscript', 'TypoScript', ('typoscript',), ('*.typoscript',), ('text/x-typoscript',)),
- 'UL4Lexer': ('pygments.lexers.ul4', 'UL4', ('ul4',), ('*.ul4',), ()),
- 'UcodeLexer': ('pygments.lexers.unicon', 'ucode', ('ucode',), ('*.u', '*.u1', '*.u2'), ()),
- 'UniconLexer': ('pygments.lexers.unicon', 'Unicon', ('unicon',), ('*.icn',), ('text/unicon',)),
- 'UnixConfigLexer': ('pygments.lexers.configs', 'Unix/Linux config files', ('unixconfig', 'linuxconfig'), (), ()),
- 'UrbiscriptLexer': ('pygments.lexers.urbi', 'UrbiScript', ('urbiscript',), ('*.u',), ('application/x-urbiscript',)),
- 'UrlEncodedLexer': ('pygments.lexers.html', 'urlencoded', ('urlencoded',), (), ('application/x-www-form-urlencoded',)),
- 'UsdLexer': ('pygments.lexers.usd', 'USD', ('usd', 'usda'), ('*.usd', '*.usda'), ()),
- 'VBScriptLexer': ('pygments.lexers.basic', 'VBScript', ('vbscript',), ('*.vbs', '*.VBS'), ()),
- 'VCLLexer': ('pygments.lexers.varnish', 'VCL', ('vcl',), ('*.vcl',), ('text/x-vclsrc',)),
- 'VCLSnippetLexer': ('pygments.lexers.varnish', 'VCLSnippets', ('vclsnippets', 'vclsnippet'), (), ('text/x-vclsnippet',)),
- 'VCTreeStatusLexer': ('pygments.lexers.console', 'VCTreeStatus', ('vctreestatus',), (), ()),
- 'VGLLexer': ('pygments.lexers.dsls', 'VGL', ('vgl',), ('*.rpf',), ()),
- 'ValaLexer': ('pygments.lexers.c_like', 'Vala', ('vala', 'vapi'), ('*.vala', '*.vapi'), ('text/x-vala',)),
- 'VbNetAspxLexer': ('pygments.lexers.dotnet', 'aspx-vb', ('aspx-vb',), ('*.aspx', '*.asax', '*.ascx', '*.ashx', '*.asmx', '*.axd'), ()),
- 'VbNetLexer': ('pygments.lexers.dotnet', 'VB.net', ('vb.net', 'vbnet', 'lobas', 'oobas', 'sobas'), ('*.vb', '*.bas'), ('text/x-vbnet', 'text/x-vba')),
- 'VelocityHtmlLexer': ('pygments.lexers.templates', 'HTML+Velocity', ('html+velocity',), (), ('text/html+velocity',)),
- 'VelocityLexer': ('pygments.lexers.templates', 'Velocity', ('velocity',), ('*.vm', '*.fhtml'), ()),
- 'VelocityXmlLexer': ('pygments.lexers.templates', 'XML+Velocity', ('xml+velocity',), (), ('application/xml+velocity',)),
- 'VerifpalLexer': ('pygments.lexers.verifpal', 'Verifpal', ('verifpal',), ('*.vp',), ('text/x-verifpal',)),
- 'VerilogLexer': ('pygments.lexers.hdl', 'verilog', ('verilog', 'v'), ('*.v',), ('text/x-verilog',)),
- 'VhdlLexer': ('pygments.lexers.hdl', 'vhdl', ('vhdl',), ('*.vhdl', '*.vhd'), ('text/x-vhdl',)),
- 'VimLexer': ('pygments.lexers.textedit', 'VimL', ('vim',), ('*.vim', '.vimrc', '.exrc', '.gvimrc', '_vimrc', '_exrc', '_gvimrc', 'vimrc', 'gvimrc'), ('text/x-vim',)),
- 'VisualPrologGrammarLexer': ('pygments.lexers.vip', 'Visual Prolog Grammar', ('visualprologgrammar',), ('*.vipgrm',), ()),
- 'VisualPrologLexer': ('pygments.lexers.vip', 'Visual Prolog', ('visualprolog',), ('*.pro', '*.cl', '*.i', '*.pack', '*.ph'), ()),
- 'VyperLexer': ('pygments.lexers.vyper', 'Vyper', ('vyper',), ('*.vy',), ()),
- 'WDiffLexer': ('pygments.lexers.diff', 'WDiff', ('wdiff',), ('*.wdiff',), ()),
- 'WatLexer': ('pygments.lexers.webassembly', 'WebAssembly', ('wast', 'wat'), ('*.wat', '*.wast'), ()),
- 'WebIDLLexer': ('pygments.lexers.webidl', 'Web IDL', ('webidl',), ('*.webidl',), ()),
- 'WgslLexer': ('pygments.lexers.wgsl', 'WebGPU Shading Language', ('wgsl',), ('*.wgsl',), ('text/wgsl',)),
- 'WhileyLexer': ('pygments.lexers.whiley', 'Whiley', ('whiley',), ('*.whiley',), ('text/x-whiley',)),
- 'WikitextLexer': ('pygments.lexers.markup', 'Wikitext', ('wikitext', 'mediawiki'), (), ('text/x-wiki',)),
- 'WoWTocLexer': ('pygments.lexers.wowtoc', 'World of Warcraft TOC', ('wowtoc',), ('*.toc',), ()),
- 'WrenLexer': ('pygments.lexers.wren', 'Wren', ('wren',), ('*.wren',), ()),
- 'X10Lexer': ('pygments.lexers.x10', 'X10', ('x10', 'xten'), ('*.x10',), ('text/x-x10',)),
- 'XMLUL4Lexer': ('pygments.lexers.ul4', 'XML+UL4', ('xml+ul4',), ('*.xmlul4',), ()),
- 'XQueryLexer': ('pygments.lexers.webmisc', 'XQuery', ('xquery', 'xqy', 'xq', 'xql', 'xqm'), ('*.xqy', '*.xquery', '*.xq', '*.xql', '*.xqm'), ('text/xquery', 'application/xquery')),
- 'XmlDjangoLexer': ('pygments.lexers.templates', 'XML+Django/Jinja', ('xml+django', 'xml+jinja'), ('*.xml.j2', '*.xml.jinja2'), ('application/xml+django', 'application/xml+jinja')),
- 'XmlErbLexer': ('pygments.lexers.templates', 'XML+Ruby', ('xml+ruby', 'xml+erb'), (), ('application/xml+ruby',)),
- 'XmlLexer': ('pygments.lexers.html', 'XML', ('xml',), ('*.xml', '*.xsl', '*.rss', '*.xslt', '*.xsd', '*.wsdl', '*.wsf'), ('text/xml', 'application/xml', 'image/svg+xml', 'application/rss+xml', 'application/atom+xml')),
- 'XmlPhpLexer': ('pygments.lexers.templates', 'XML+PHP', ('xml+php',), (), ('application/xml+php',)),
- 'XmlSmartyLexer': ('pygments.lexers.templates', 'XML+Smarty', ('xml+smarty',), (), ('application/xml+smarty',)),
- 'XorgLexer': ('pygments.lexers.xorg', 'Xorg', ('xorg.conf',), ('xorg.conf',), ()),
- 'XppLexer': ('pygments.lexers.dotnet', 'X++', ('xpp', 'x++'), ('*.xpp',), ()),
- 'XsltLexer': ('pygments.lexers.html', 'XSLT', ('xslt',), ('*.xsl', '*.xslt', '*.xpl'), ('application/xsl+xml', 'application/xslt+xml')),
- 'XtendLexer': ('pygments.lexers.jvm', 'Xtend', ('xtend',), ('*.xtend',), ('text/x-xtend',)),
- 'XtlangLexer': ('pygments.lexers.lisp', 'xtlang', ('extempore',), ('*.xtm',), ()),
- 'YamlJinjaLexer': ('pygments.lexers.templates', 'YAML+Jinja', ('yaml+jinja', 'salt', 'sls'), ('*.sls', '*.yaml.j2', '*.yml.j2', '*.yaml.jinja2', '*.yml.jinja2'), ('text/x-yaml+jinja', 'text/x-sls')),
- 'YamlLexer': ('pygments.lexers.data', 'YAML', ('yaml',), ('*.yaml', '*.yml'), ('text/x-yaml',)),
- 'YangLexer': ('pygments.lexers.yang', 'YANG', ('yang',), ('*.yang',), ('application/yang',)),
- 'YaraLexer': ('pygments.lexers.yara', 'YARA', ('yara', 'yar'), ('*.yar',), ('text/x-yara',)),
- 'ZeekLexer': ('pygments.lexers.dsls', 'Zeek', ('zeek', 'bro'), ('*.zeek', '*.bro'), ()),
- 'ZephirLexer': ('pygments.lexers.php', 'Zephir', ('zephir',), ('*.zep',), ()),
- 'ZigLexer': ('pygments.lexers.zig', 'Zig', ('zig',), ('*.zig',), ('text/zig',)),
- 'apdlexer': ('pygments.lexers.apdlexer', 'ANSYS parametric design language', ('ansys', 'apdl'), ('*.ans',), ()),
-}
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/_mql_builtins.py b/venv/lib/python3.11/site-packages/pygments/lexers/_mql_builtins.py
deleted file mode 100644
index 3b89f1d..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/_mql_builtins.py
+++ /dev/null
@@ -1,1171 +0,0 @@
-"""
- pygments.lexers._mql_builtins
- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
- Builtins for the MqlLexer.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-types = (
- 'AccountBalance',
- 'AccountCompany',
- 'AccountCredit',
- 'AccountCurrency',
- 'AccountEquity',
- 'AccountFreeMarginCheck',
- 'AccountFreeMarginMode',
- 'AccountFreeMargin',
- 'AccountInfoDouble',
- 'AccountInfoInteger',
- 'AccountInfoString',
- 'AccountLeverage',
- 'AccountMargin',
- 'AccountName',
- 'AccountNumber',
- 'AccountProfit',
- 'AccountServer',
- 'AccountStopoutLevel',
- 'AccountStopoutMode',
- 'Alert',
- 'ArrayBsearch',
- 'ArrayCompare',
- 'ArrayCopyRates',
- 'ArrayCopySeries',
- 'ArrayCopy',
- 'ArrayDimension',
- 'ArrayFill',
- 'ArrayFree',
- 'ArrayGetAsSeries',
- 'ArrayInitialize',
- 'ArrayIsDynamic',
- 'ArrayIsSeries',
- 'ArrayMaximum',
- 'ArrayMinimum',
- 'ArrayRange',
- 'ArrayResize',
- 'ArraySetAsSeries',
- 'ArraySize',
- 'ArraySort',
- 'CharArrayToString',
- 'CharToString',
- 'CharToStr',
- 'CheckPointer',
- 'ColorToARGB',
- 'ColorToString',
- 'Comment',
- 'CopyClose',
- 'CopyHigh',
- 'CopyLow',
- 'CopyOpen',
- 'CopyRates',
- 'CopyRealVolume',
- 'CopySpread',
- 'CopyTickVolume',
- 'CopyTime',
- 'DayOfWeek',
- 'DayOfYear',
- 'Day',
- 'DebugBreak',
- 'Digits',
- 'DoubleToString',
- 'DoubleToStr',
- 'EnumToString',
- 'EventChartCustom',
- 'EventKillTimer',
- 'EventSetMillisecondTimer',
- 'EventSetTimer',
- 'ExpertRemove',
- 'FileClose',
- 'FileCopy',
- 'FileDelete',
- 'FileFindClose',
- 'FileFindFirst',
- 'FileFindNext',
- 'FileFlush',
- 'FileGetInteger',
- 'FileIsEnding',
- 'FileIsExist',
- 'FileIsLineEnding',
- 'FileMove',
- 'FileOpenHistory',
- 'FileOpen',
- 'FileReadArray',
- 'FileReadBool',
- 'FileReadDatetime',
- 'FileReadDouble',
- 'FileReadFloat',
- 'FileReadInteger',
- 'FileReadLong',
- 'FileReadNumber',
- 'FileReadString',
- 'FileReadStruct',
- 'FileSeek',
- 'FileSize',
- 'FileTell',
- 'FileWriteArray',
- 'FileWriteDouble',
- 'FileWriteFloat',
- 'FileWriteInteger',
- 'FileWriteLong',
- 'FileWriteString',
- 'FileWriteStruct',
- 'FileWrite',
- 'FolderClean',
- 'FolderCreate',
- 'FolderDelete',
- 'GetLastError',
- 'GetPointer',
- 'GetTickCount',
- 'GlobalVariableCheck',
- 'GlobalVariableDel',
- 'GlobalVariableGet',
- 'GlobalVariableName',
- 'GlobalVariableSetOnCondition',
- 'GlobalVariableSet',
- 'GlobalVariableTemp',
- 'GlobalVariableTime',
- 'GlobalVariablesDeleteAll',
- 'GlobalVariablesFlush',
- 'GlobalVariablesTotal',
- 'HideTestIndicators',
- 'Hour',
- 'IndicatorBuffers',
- 'IndicatorCounted',
- 'IndicatorDigits',
- 'IndicatorSetDouble',
- 'IndicatorSetInteger',
- 'IndicatorSetString',
- 'IndicatorShortName',
- 'IntegerToString',
- 'IsConnected',
- 'IsDemo',
- 'IsDllsAllowed',
- 'IsExpertEnabled',
- 'IsLibrariesAllowed',
- 'IsOptimization',
- 'IsStopped',
- 'IsTesting',
- 'IsTradeAllowed',
- 'IsTradeContextBusy',
- 'IsVisualMode',
- 'MQLInfoInteger',
- 'MQLInfoString',
- 'MarketInfo',
- 'MathAbs',
- 'MathArccos',
- 'MathArcsin',
- 'MathArctan',
- 'MathCeil',
- 'MathCos',
- 'MathExp',
- 'MathFloor',
- 'MathIsValidNumber',
- 'MathLog',
- 'MathMax',
- 'MathMin',
- 'MathMod',
- 'MathPow',
- 'MathRand',
- 'MathRound',
- 'MathSin',
- 'MathSqrt',
- 'MathSrand',
- 'MathTan',
- 'MessageBox',
- 'Minute',
- 'Month',
- 'NormalizeDouble',
- 'ObjectCreate',
- 'ObjectDelete',
- 'ObjectDescription',
- 'ObjectFind',
- 'ObjectGetDouble',
- 'ObjectGetFiboDescription',
- 'ObjectGetInteger',
- 'ObjectGetShiftByValue',
- 'ObjectGetString',
- 'ObjectGetTimeByValue',
- 'ObjectGetValueByShift',
- 'ObjectGetValueByTime',
- 'ObjectGet',
- 'ObjectMove',
- 'ObjectName',
- 'ObjectSetDouble',
- 'ObjectSetFiboDescription',
- 'ObjectSetInteger',
- 'ObjectSetString',
- 'ObjectSetText',
- 'ObjectSet',
- 'ObjectType',
- 'ObjectsDeleteAll',
- 'ObjectsTotal',
- 'OrderCloseBy',
- 'OrderClosePrice',
- 'OrderCloseTime',
- 'OrderClose',
- 'OrderComment',
- 'OrderCommission',
- 'OrderDelete',
- 'OrderExpiration',
- 'OrderLots',
- 'OrderMagicNumber',
- 'OrderModify',
- 'OrderOpenPrice',
- 'OrderOpenTime',
- 'OrderPrint',
- 'OrderProfit',
- 'OrderSelect',
- 'OrderSend',
- 'OrderStopLoss',
- 'OrderSwap',
- 'OrderSymbol',
- 'OrderTakeProfit',
- 'OrderTicket',
- 'OrderType',
- 'OrdersHistoryTotal',
- 'OrdersTotal',
- 'PeriodSeconds',
- 'Period',
- 'PlaySound',
- 'Point',
- 'PrintFormat',
- 'Print',
- 'RefreshRates',
- 'ResetLastError',
- 'ResourceCreate',
- 'ResourceFree',
- 'ResourceReadImage',
- 'ResourceSave',
- 'Seconds',
- 'SendFTP',
- 'SendMail',
- 'SendNotification',
- 'SeriesInfoInteger',
- 'SetIndexArrow',
- 'SetIndexBuffer',
- 'SetIndexDrawBegin',
- 'SetIndexEmptyValue',
- 'SetIndexLabel',
- 'SetIndexShift',
- 'SetIndexStyle',
- 'SetLevelStyle',
- 'SetLevelValue',
- 'ShortArrayToString',
- 'ShortToString',
- 'Sleep',
- 'StrToDouble',
- 'StrToInteger',
- 'StrToTime',
- 'StringAdd',
- 'StringBufferLen',
- 'StringCompare',
- 'StringConcatenate',
- 'StringFill',
- 'StringFind',
- 'StringFormat',
- 'StringGetCharacter',
- 'StringGetChar',
- 'StringInit',
- 'StringLen',
- 'StringReplace',
- 'StringSetCharacter',
- 'StringSetChar',
- 'StringSplit',
- 'StringSubstr',
- 'StringToCharArray',
- 'StringToColor',
- 'StringToDouble',
- 'StringToInteger',
- 'StringToLower',
- 'StringToShortArray',
- 'StringToTime',
- 'StringToUpper',
- 'StringTrimLeft',
- 'StringTrimRight',
- 'StructToTime',
- 'SymbolInfoDouble',
- 'SymbolInfoInteger',
- 'SymbolInfoSessionQuote',
- 'SymbolInfoSessionTrade',
- 'SymbolInfoString',
- 'SymbolInfoTick',
- 'SymbolIsSynchronized',
- 'SymbolName',
- 'SymbolSelect',
- 'SymbolsTotal',
- 'Symbol',
- 'TerminalClose',
- 'TerminalCompany',
- 'TerminalName',
- 'TerminalPath',
- 'TesterStatistics',
- 'TextGetSize',
- 'TextOut',
- 'TextSetFont',
- 'TimeCurrent',
- 'TimeDayOfWeek',
- 'TimeDayOfYear',
- 'TimeDaylightSavings',
- 'TimeDay',
- 'TimeGMTOffset',
- 'TimeGMT',
- 'TimeHour',
- 'TimeLocal',
- 'TimeMinute',
- 'TimeMonth',
- 'TimeSeconds',
- 'TimeToString',
- 'TimeToStruct',
- 'TimeToStr',
- 'TimeTradeServer',
- 'TimeYear',
- 'UninitializeReason',
- 'WindowBarsPerChart',
- 'WindowExpertName',
- 'WindowFind',
- 'WindowFirstVisibleBar',
- 'WindowHandle',
- 'WindowIsVisible',
- 'WindowOnDropped',
- 'WindowPriceMax',
- 'WindowPriceMin',
- 'WindowPriceOnDropped',
- 'WindowRedraw',
- 'WindowScreenShot',
- 'WindowTimeOnDropped',
- 'WindowXOnDropped',
- 'WindowYOnDropped',
- 'WindowsTotal',
- 'Year',
- 'ZeroMemory',
- 'iAC',
- 'iADX',
- 'iAD',
- 'iAO',
- 'iATR',
- 'iAlligator',
- 'iBWMFI',
- 'iBandsOnArray',
- 'iBands',
- 'iBarShift',
- 'iBars',
- 'iBearsPower',
- 'iBullsPower',
- 'iCCIOnArray',
- 'iCCI',
- 'iClose',
- 'iCustom',
- 'iDeMarker',
- 'iEnvelopesOnArray',
- 'iEnvelopes',
- 'iForce',
- 'iFractals',
- 'iGator',
- 'iHighest',
- 'iHigh',
- 'iIchimoku',
- 'iLowest',
- 'iLow',
- 'iMACD',
- 'iMAOnArray',
- 'iMA',
- 'iMFI',
- 'iMomentumOnArray',
- 'iMomentum',
- 'iOBV',
- 'iOpen',
- 'iOsMA',
- 'iRSIOnArray',
- 'iRSI',
- 'iRVI',
- 'iSAR',
- 'iStdDevOnArray',
- 'iStdDev',
- 'iStochastic',
- 'iTime',
- 'iVolume',
- 'iWPR',
-)
-
-constants = (
- 'ACCOUNT_BALANCE',
- 'ACCOUNT_COMPANY',
- 'ACCOUNT_CREDIT',
- 'ACCOUNT_CURRENCY',
- 'ACCOUNT_EQUITY',
- 'ACCOUNT_FREEMARGIN',
- 'ACCOUNT_LEVERAGE',
- 'ACCOUNT_LIMIT_ORDERS',
- 'ACCOUNT_LOGIN',
- 'ACCOUNT_MARGIN',
- 'ACCOUNT_MARGIN_LEVEL',
- 'ACCOUNT_MARGIN_SO_CALL',
- 'ACCOUNT_MARGIN_SO_MODE',
- 'ACCOUNT_MARGIN_SO_SO',
- 'ACCOUNT_NAME',
- 'ACCOUNT_PROFIT',
- 'ACCOUNT_SERVER',
- 'ACCOUNT_STOPOUT_MODE_MONEY',
- 'ACCOUNT_STOPOUT_MODE_PERCENT',
- 'ACCOUNT_TRADE_ALLOWED',
- 'ACCOUNT_TRADE_EXPERT',
- 'ACCOUNT_TRADE_MODE',
- 'ACCOUNT_TRADE_MODE_CONTEST',
- 'ACCOUNT_TRADE_MODE_DEMO',
- 'ACCOUNT_TRADE_MODE_REAL',
- 'ALIGN_CENTER',
- 'ALIGN_LEFT',
- 'ALIGN_RIGHT',
- 'ANCHOR_BOTTOM',
- 'ANCHOR_CENTER',
- 'ANCHOR_LEFT',
- 'ANCHOR_LEFT_LOWER',
- 'ANCHOR_LEFT_UPPER',
- 'ANCHOR_LOWER',
- 'ANCHOR_RIGHT',
- 'ANCHOR_RIGHT_LOWER',
- 'ANCHOR_RIGHT_UPPER',
- 'ANCHOR_TOP',
- 'ANCHOR_UPPER',
- 'BORDER_FLAT',
- 'BORDER_RAISED',
- 'BORDER_SUNKEN',
- 'CHARTEVENT_CHART_CHANGE',
- 'CHARTEVENT_CLICK',
- 'CHARTEVENT_CUSTOM',
- 'CHARTEVENT_CUSTOM_LAST',
- 'CHARTEVENT_KEYDOWN',
- 'CHARTEVENT_MOUSE_MOVE',
- 'CHARTEVENT_OBJECT_CHANGE',
- 'CHARTEVENT_OBJECT_CLICK',
- 'CHARTEVENT_OBJECT_CREATE',
- 'CHARTEVENT_OBJECT_DELETE',
- 'CHARTEVENT_OBJECT_DRAG',
- 'CHARTEVENT_OBJECT_ENDEDIT',
- 'CHARTS_MAX',
- 'CHART_AUTOSCROLL',
- 'CHART_BARS',
- 'CHART_BEGIN',
- 'CHART_BRING_TO_TOP',
- 'CHART_CANDLES',
- 'CHART_COLOR_ASK',
- 'CHART_COLOR_BACKGROUND',
- 'CHART_COLOR_BID',
- 'CHART_COLOR_CANDLE_BEAR',
- 'CHART_COLOR_CANDLE_BULL',
- 'CHART_COLOR_CHART_DOWN',
- 'CHART_COLOR_CHART_LINE',
- 'CHART_COLOR_CHART_UP',
- 'CHART_COLOR_FOREGROUND',
- 'CHART_COLOR_GRID',
- 'CHART_COLOR_LAST',
- 'CHART_COLOR_STOP_LEVEL',
- 'CHART_COLOR_VOLUME',
- 'CHART_COMMENT',
- 'CHART_CURRENT_POS',
- 'CHART_DRAG_TRADE_LEVELS',
- 'CHART_END',
- 'CHART_EVENT_MOUSE_MOVE',
- 'CHART_EVENT_OBJECT_CREATE',
- 'CHART_EVENT_OBJECT_DELETE',
- 'CHART_FIRST_VISIBLE_BAR',
- 'CHART_FIXED_MAX',
- 'CHART_FIXED_MIN',
- 'CHART_FIXED_POSITION',
- 'CHART_FOREGROUND',
- 'CHART_HEIGHT_IN_PIXELS',
- 'CHART_IS_OBJECT',
- 'CHART_LINE',
- 'CHART_MODE',
- 'CHART_MOUSE_SCROLL',
- 'CHART_POINTS_PER_BAR',
- 'CHART_PRICE_MAX',
- 'CHART_PRICE_MIN',
- 'CHART_SCALEFIX',
- 'CHART_SCALEFIX_11',
- 'CHART_SCALE',
- 'CHART_SCALE_PT_PER_BAR',
- 'CHART_SHIFT',
- 'CHART_SHIFT_SIZE',
- 'CHART_SHOW_ASK_LINE',
- 'CHART_SHOW_BID_LINE',
- 'CHART_SHOW_DATE_SCALE',
- 'CHART_SHOW_GRID',
- 'CHART_SHOW_LAST_LINE',
- 'CHART_SHOW_OBJECT_DESCR',
- 'CHART_SHOW_OHLC',
- 'CHART_SHOW_PERIOD_SEP',
- 'CHART_SHOW_PRICE_SCALE',
- 'CHART_SHOW_TRADE_LEVELS',
- 'CHART_SHOW_VOLUMES',
- 'CHART_VISIBLE_BARS',
- 'CHART_VOLUME_HIDE',
- 'CHART_VOLUME_REAL',
- 'CHART_VOLUME_TICK',
- 'CHART_WIDTH_IN_BARS',
- 'CHART_WIDTH_IN_PIXELS',
- 'CHART_WINDOWS_TOTAL',
- 'CHART_WINDOW_HANDLE',
- 'CHART_WINDOW_IS_VISIBLE',
- 'CHART_WINDOW_YDISTANCE',
- 'CHAR_MAX',
- 'CHAR_MIN',
- 'CLR_NONE',
- 'CORNER_LEFT_LOWER',
- 'CORNER_LEFT_UPPER',
- 'CORNER_RIGHT_LOWER',
- 'CORNER_RIGHT_UPPER',
- 'CP_ACP',
- 'CP_MACCP',
- 'CP_OEMCP',
- 'CP_SYMBOL',
- 'CP_THREAD_ACP',
- 'CP_UTF7',
- 'CP_UTF8',
- 'DBL_DIG',
- 'DBL_EPSILON',
- 'DBL_MANT_DIG',
- 'DBL_MAX',
- 'DBL_MAX_10_EXP',
- 'DBL_MAX_EXP',
- 'DBL_MIN',
- 'DBL_MIN_10_EXP',
- 'DBL_MIN_EXP',
- 'DRAW_ARROW',
- 'DRAW_FILLING',
- 'DRAW_HISTOGRAM',
- 'DRAW_LINE',
- 'DRAW_NONE',
- 'DRAW_SECTION',
- 'DRAW_ZIGZAG',
- 'EMPTY',
- 'EMPTY_VALUE',
- 'ERR_ACCOUNT_DISABLED',
- 'ERR_BROKER_BUSY',
- 'ERR_COMMON_ERROR',
- 'ERR_INVALID_ACCOUNT',
- 'ERR_INVALID_PRICE',
- 'ERR_INVALID_STOPS',
- 'ERR_INVALID_TRADE_PARAMETERS',
- 'ERR_INVALID_TRADE_VOLUME',
- 'ERR_LONG_POSITIONS_ONLY_ALLOWED',
- 'ERR_MALFUNCTIONAL_TRADE',
- 'ERR_MARKET_CLOSED',
- 'ERR_NOT_ENOUGH_MONEY',
- 'ERR_NOT_ENOUGH_RIGHTS',
- 'ERR_NO_CONNECTION',
- 'ERR_NO_ERROR',
- 'ERR_NO_RESULT',
- 'ERR_OFF_QUOTES',
- 'ERR_OLD_VERSION',
- 'ERR_ORDER_LOCKED',
- 'ERR_PRICE_CHANGED',
- 'ERR_REQUOTE',
- 'ERR_SERVER_BUSY',
- 'ERR_TOO_FREQUENT_REQUESTS',
- 'ERR_TOO_MANY_REQUESTS',
- 'ERR_TRADE_CONTEXT_BUSY',
- 'ERR_TRADE_DISABLED',
- 'ERR_TRADE_EXPIRATION_DENIED',
- 'ERR_TRADE_HEDGE_PROHIBITED',
- 'ERR_TRADE_MODIFY_DENIED',
- 'ERR_TRADE_PROHIBITED_BY_FIFO',
- 'ERR_TRADE_TIMEOUT',
- 'ERR_TRADE_TOO_MANY_ORDERS',
- 'FILE_ACCESS_DATE',
- 'FILE_ANSI',
- 'FILE_BIN',
- 'FILE_COMMON',
- 'FILE_CREATE_DATE',
- 'FILE_CSV',
- 'FILE_END',
- 'FILE_EXISTS',
- 'FILE_IS_ANSI',
- 'FILE_IS_BINARY',
- 'FILE_IS_COMMON',
- 'FILE_IS_CSV',
- 'FILE_IS_READABLE',
- 'FILE_IS_TEXT',
- 'FILE_IS_WRITABLE',
- 'FILE_LINE_END',
- 'FILE_MODIFY_DATE',
- 'FILE_POSITION',
- 'FILE_READ',
- 'FILE_REWRITE',
- 'FILE_SHARE_READ',
- 'FILE_SHARE_WRITE',
- 'FILE_SIZE',
- 'FILE_TXT',
- 'FILE_UNICODE',
- 'FILE_WRITE',
- 'FLT_DIG',
- 'FLT_EPSILON',
- 'FLT_MANT_DIG',
- 'FLT_MAX',
- 'FLT_MAX_10_EXP',
- 'FLT_MAX_EXP',
- 'FLT_MIN',
- 'FLT_MIN_10_EXP',
- 'FLT_MIN_EXP',
- 'FRIDAY',
- 'GANN_DOWN_TREND',
- 'GANN_UP_TREND',
- 'IDABORT',
- 'IDCANCEL',
- 'IDCONTINUE',
- 'IDIGNORE',
- 'IDNO',
- 'IDOK',
- 'IDRETRY',
- 'IDTRYAGAIN',
- 'IDYES',
- 'INDICATOR_CALCULATIONS',
- 'INDICATOR_COLOR_INDEX',
- 'INDICATOR_DATA',
- 'INDICATOR_DIGITS',
- 'INDICATOR_HEIGHT',
- 'INDICATOR_LEVELCOLOR',
- 'INDICATOR_LEVELSTYLE',
- 'INDICATOR_LEVELS',
- 'INDICATOR_LEVELTEXT',
- 'INDICATOR_LEVELVALUE',
- 'INDICATOR_LEVELWIDTH',
- 'INDICATOR_MAXIMUM',
- 'INDICATOR_MINIMUM',
- 'INDICATOR_SHORTNAME',
- 'INT_MAX',
- 'INT_MIN',
- 'INVALID_HANDLE',
- 'IS_DEBUG_MODE',
- 'IS_PROFILE_MODE',
- 'LICENSE_DEMO',
- 'LICENSE_FREE',
- 'LICENSE_FULL',
- 'LICENSE_TIME',
- 'LONG_MAX',
- 'LONG_MIN',
- 'MB_ABORTRETRYIGNORE',
- 'MB_CANCELTRYCONTINUE',
- 'MB_DEFBUTTON1',
- 'MB_DEFBUTTON2',
- 'MB_DEFBUTTON3',
- 'MB_DEFBUTTON4',
- 'MB_ICONASTERISK',
- 'MB_ICONERROR',
- 'MB_ICONEXCLAMATION',
- 'MB_ICONHAND',
- 'MB_ICONINFORMATION',
- 'MB_ICONQUESTION',
- 'MB_ICONSTOP',
- 'MB_ICONWARNING',
- 'MB_OKCANCEL',
- 'MB_OK',
- 'MB_RETRYCANCEL',
- 'MB_YESNOCANCEL',
- 'MB_YESNO',
- 'MODE_ASK',
- 'MODE_BID',
- 'MODE_CHINKOUSPAN',
- 'MODE_CLOSE',
- 'MODE_DIGITS',
- 'MODE_EMA',
- 'MODE_EXPIRATION',
- 'MODE_FREEZELEVEL',
- 'MODE_GATORJAW',
- 'MODE_GATORLIPS',
- 'MODE_GATORTEETH',
- 'MODE_HIGH',
- 'MODE_KIJUNSEN',
- 'MODE_LOTSIZE',
- 'MODE_LOTSTEP',
- 'MODE_LOWER',
- 'MODE_LOW',
- 'MODE_LWMA',
- 'MODE_MAIN',
- 'MODE_MARGINCALCMODE',
- 'MODE_MARGINHEDGED',
- 'MODE_MARGININIT',
- 'MODE_MARGINMAINTENANCE',
- 'MODE_MARGINREQUIRED',
- 'MODE_MAXLOT',
- 'MODE_MINLOT',
- 'MODE_MINUSDI',
- 'MODE_OPEN',
- 'MODE_PLUSDI',
- 'MODE_POINT',
- 'MODE_PROFITCALCMODE',
- 'MODE_SENKOUSPANA',
- 'MODE_SENKOUSPANB',
- 'MODE_SIGNAL',
- 'MODE_SMA',
- 'MODE_SMMA',
- 'MODE_SPREAD',
- 'MODE_STARTING',
- 'MODE_STOPLEVEL',
- 'MODE_SWAPLONG',
- 'MODE_SWAPSHORT',
- 'MODE_SWAPTYPE',
- 'MODE_TENKANSEN',
- 'MODE_TICKSIZE',
- 'MODE_TICKVALUE',
- 'MODE_TIME',
- 'MODE_TRADEALLOWED',
- 'MODE_UPPER',
- 'MODE_VOLUME',
- 'MONDAY',
- 'MQL_DEBUG',
- 'MQL_DLLS_ALLOWED',
- 'MQL_FRAME_MODE',
- 'MQL_LICENSE_TYPE',
- 'MQL_OPTIMIZATION',
- 'MQL_PROFILER',
- 'MQL_PROGRAM_NAME',
- 'MQL_PROGRAM_PATH',
- 'MQL_PROGRAM_TYPE',
- 'MQL_TESTER',
- 'MQL_TRADE_ALLOWED',
- 'MQL_VISUAL_MODE',
- 'M_1_PI',
- 'M_2_PI',
- 'M_2_SQRTPI',
- 'M_E',
- 'M_LN2',
- 'M_LN10',
- 'M_LOG2E',
- 'M_LOG10E',
- 'M_PI',
- 'M_PI_2',
- 'M_PI_4',
- 'M_SQRT1_2',
- 'M_SQRT2',
- 'NULL',
- 'OBJPROP_ALIGN',
- 'OBJPROP_ANCHOR',
- 'OBJPROP_ANGLE',
- 'OBJPROP_ARROWCODE',
- 'OBJPROP_BACK',
- 'OBJPROP_BGCOLOR',
- 'OBJPROP_BMPFILE',
- 'OBJPROP_BORDER_COLOR',
- 'OBJPROP_BORDER_TYPE',
- 'OBJPROP_CHART_ID',
- 'OBJPROP_CHART_SCALE',
- 'OBJPROP_COLOR',
- 'OBJPROP_CORNER',
- 'OBJPROP_CREATETIME',
- 'OBJPROP_DATE_SCALE',
- 'OBJPROP_DEVIATION',
- 'OBJPROP_DRAWLINES',
- 'OBJPROP_ELLIPSE',
- 'OBJPROP_FIBOLEVELS',
- 'OBJPROP_FILL',
- 'OBJPROP_FIRSTLEVEL',
- 'OBJPROP_FONTSIZE',
- 'OBJPROP_FONT',
- 'OBJPROP_HIDDEN',
- 'OBJPROP_LEVELCOLOR',
- 'OBJPROP_LEVELSTYLE',
- 'OBJPROP_LEVELS',
- 'OBJPROP_LEVELTEXT',
- 'OBJPROP_LEVELVALUE',
- 'OBJPROP_LEVELWIDTH',
- 'OBJPROP_NAME',
- 'OBJPROP_PERIOD',
- 'OBJPROP_PRICE1',
- 'OBJPROP_PRICE2',
- 'OBJPROP_PRICE3',
- 'OBJPROP_PRICE',
- 'OBJPROP_PRICE_SCALE',
- 'OBJPROP_RAY',
- 'OBJPROP_RAY_RIGHT',
- 'OBJPROP_READONLY',
- 'OBJPROP_SCALE',
- 'OBJPROP_SELECTABLE',
- 'OBJPROP_SELECTED',
- 'OBJPROP_STATE',
- 'OBJPROP_STYLE',
- 'OBJPROP_SYMBOL',
- 'OBJPROP_TEXT',
- 'OBJPROP_TIME1',
- 'OBJPROP_TIME2',
- 'OBJPROP_TIME3',
- 'OBJPROP_TIMEFRAMES',
- 'OBJPROP_TIME',
- 'OBJPROP_TOOLTIP',
- 'OBJPROP_TYPE',
- 'OBJPROP_WIDTH',
- 'OBJPROP_XDISTANCE',
- 'OBJPROP_XOFFSET',
- 'OBJPROP_XSIZE',
- 'OBJPROP_YDISTANCE',
- 'OBJPROP_YOFFSET',
- 'OBJPROP_YSIZE',
- 'OBJPROP_ZORDER',
- 'OBJ_ALL_PERIODS',
- 'OBJ_ARROW',
- 'OBJ_ARROW_BUY',
- 'OBJ_ARROW_CHECK',
- 'OBJ_ARROW_DOWN',
- 'OBJ_ARROW_LEFT_PRICE',
- 'OBJ_ARROW_RIGHT_PRICE',
- 'OBJ_ARROW_SELL',
- 'OBJ_ARROW_STOP',
- 'OBJ_ARROW_THUMB_DOWN',
- 'OBJ_ARROW_THUMB_UP',
- 'OBJ_ARROW_UP',
- 'OBJ_BITMAP',
- 'OBJ_BITMAP_LABEL',
- 'OBJ_BUTTON',
- 'OBJ_CHANNEL',
- 'OBJ_CYCLES',
- 'OBJ_EDIT',
- 'OBJ_ELLIPSE',
- 'OBJ_EVENT',
- 'OBJ_EXPANSION',
- 'OBJ_FIBOARC',
- 'OBJ_FIBOCHANNEL',
- 'OBJ_FIBOFAN',
- 'OBJ_FIBOTIMES',
- 'OBJ_FIBO',
- 'OBJ_GANNFAN',
- 'OBJ_GANNGRID',
- 'OBJ_GANNLINE',
- 'OBJ_HLINE',
- 'OBJ_LABEL',
- 'OBJ_NO_PERIODS',
- 'OBJ_PERIOD_D1',
- 'OBJ_PERIOD_H1',
- 'OBJ_PERIOD_H4',
- 'OBJ_PERIOD_M1',
- 'OBJ_PERIOD_M5',
- 'OBJ_PERIOD_M15',
- 'OBJ_PERIOD_M30',
- 'OBJ_PERIOD_MN1',
- 'OBJ_PERIOD_W1',
- 'OBJ_PITCHFORK',
- 'OBJ_RECTANGLE',
- 'OBJ_RECTANGLE_LABEL',
- 'OBJ_REGRESSION',
- 'OBJ_STDDEVCHANNEL',
- 'OBJ_TEXT',
- 'OBJ_TRENDBYANGLE',
- 'OBJ_TREND',
- 'OBJ_TRIANGLE',
- 'OBJ_VLINE',
- 'OP_BUYLIMIT',
- 'OP_BUYSTOP',
- 'OP_BUY',
- 'OP_SELLLIMIT',
- 'OP_SELLSTOP',
- 'OP_SELL',
- 'PERIOD_CURRENT',
- 'PERIOD_D1',
- 'PERIOD_H1',
- 'PERIOD_H2',
- 'PERIOD_H3',
- 'PERIOD_H4',
- 'PERIOD_H6',
- 'PERIOD_H8',
- 'PERIOD_H12',
- 'PERIOD_M1',
- 'PERIOD_M2',
- 'PERIOD_M3',
- 'PERIOD_M4',
- 'PERIOD_M5',
- 'PERIOD_M6',
- 'PERIOD_M10',
- 'PERIOD_M12',
- 'PERIOD_M15',
- 'PERIOD_M20',
- 'PERIOD_M30',
- 'PERIOD_MN1',
- 'PERIOD_W1',
- 'POINTER_AUTOMATIC',
- 'POINTER_DYNAMIC',
- 'POINTER_INVALID',
- 'PRICE_CLOSE',
- 'PRICE_HIGH',
- 'PRICE_LOW',
- 'PRICE_MEDIAN',
- 'PRICE_OPEN',
- 'PRICE_TYPICAL',
- 'PRICE_WEIGHTED',
- 'PROGRAM_EXPERT',
- 'PROGRAM_INDICATOR',
- 'PROGRAM_SCRIPT',
- 'REASON_ACCOUNT',
- 'REASON_CHARTCHANGE',
- 'REASON_CHARTCLOSE',
- 'REASON_CLOSE',
- 'REASON_INITFAILED',
- 'REASON_PARAMETERS',
- 'REASON_PROGRAM'
- 'REASON_RECOMPILE',
- 'REASON_REMOVE',
- 'REASON_TEMPLATE',
- 'SATURDAY',
- 'SEEK_CUR',
- 'SEEK_END',
- 'SEEK_SET',
- 'SERIES_BARS_COUNT',
- 'SERIES_FIRSTDATE',
- 'SERIES_LASTBAR_DATE',
- 'SERIES_SERVER_FIRSTDATE',
- 'SERIES_SYNCHRONIZED',
- 'SERIES_TERMINAL_FIRSTDATE',
- 'SHORT_MAX',
- 'SHORT_MIN',
- 'STAT_BALANCEDD_PERCENT',
- 'STAT_BALANCEMIN',
- 'STAT_BALANCE_DDREL_PERCENT',
- 'STAT_BALANCE_DD',
- 'STAT_BALANCE_DD_RELATIVE',
- 'STAT_CONLOSSMAX',
- 'STAT_CONLOSSMAX_TRADES',
- 'STAT_CONPROFITMAX',
- 'STAT_CONPROFITMAX_TRADES',
- 'STAT_CUSTOM_ONTESTER',
- 'STAT_DEALS',
- 'STAT_EQUITYDD_PERCENT',
- 'STAT_EQUITYMIN',
- 'STAT_EQUITY_DDREL_PERCENT',
- 'STAT_EQUITY_DD',
- 'STAT_EQUITY_DD_RELATIVE',
- 'STAT_EXPECTED_PAYOFF',
- 'STAT_GROSS_LOSS',
- 'STAT_GROSS_PROFIT',
- 'STAT_INITIAL_DEPOSIT',
- 'STAT_LONG_TRADES',
- 'STAT_LOSSTRADES_AVGCON',
- 'STAT_LOSS_TRADES',
- 'STAT_MAX_CONLOSSES',
- 'STAT_MAX_CONLOSS_TRADES',
- 'STAT_MAX_CONPROFIT_TRADES',
- 'STAT_MAX_CONWINS',
- 'STAT_MAX_LOSSTRADE',
- 'STAT_MAX_PROFITTRADE',
- 'STAT_MIN_MARGINLEVEL',
- 'STAT_PROFITTRADES_AVGCON',
- 'STAT_PROFIT',
- 'STAT_PROFIT_FACTOR',
- 'STAT_PROFIT_LONGTRADES',
- 'STAT_PROFIT_SHORTTRADES',
- 'STAT_PROFIT_TRADES',
- 'STAT_RECOVERY_FACTOR',
- 'STAT_SHARPE_RATIO',
- 'STAT_SHORT_TRADES',
- 'STAT_TRADES',
- 'STAT_WITHDRAWAL',
- 'STO_CLOSECLOSE',
- 'STO_LOWHIGH',
- 'STYLE_DASHDOTDOT',
- 'STYLE_DASHDOT',
- 'STYLE_DASH',
- 'STYLE_DOT',
- 'STYLE_SOLID',
- 'SUNDAY',
- 'SYMBOL_ARROWDOWN',
- 'SYMBOL_ARROWUP',
- 'SYMBOL_CHECKSIGN',
- 'SYMBOL_LEFTPRICE',
- 'SYMBOL_RIGHTPRICE',
- 'SYMBOL_STOPSIGN',
- 'SYMBOL_THUMBSDOWN',
- 'SYMBOL_THUMBSUP',
- 'TERMINAL_BUILD',
- 'TERMINAL_CODEPAGE',
- 'TERMINAL_COMMONDATA_PATH',
- 'TERMINAL_COMPANY',
- 'TERMINAL_CONNECTED',
- 'TERMINAL_CPU_CORES',
- 'TERMINAL_DATA_PATH',
- 'TERMINAL_DISK_SPACE',
- 'TERMINAL_DLLS_ALLOWED',
- 'TERMINAL_EMAIL_ENABLED',
- 'TERMINAL_FTP_ENABLED',
- 'TERMINAL_LANGUAGE',
- 'TERMINAL_MAXBARS',
- 'TERMINAL_MEMORY_AVAILABLE',
- 'TERMINAL_MEMORY_PHYSICAL',
- 'TERMINAL_MEMORY_TOTAL',
- 'TERMINAL_MEMORY_USED',
- 'TERMINAL_NAME',
- 'TERMINAL_OPENCL_SUPPORT',
- 'TERMINAL_PATH',
- 'TERMINAL_TRADE_ALLOWED',
- 'TERMINAL_X64',
- 'THURSDAY',
- 'TRADE_ACTION_DEAL',
- 'TRADE_ACTION_MODIFY',
- 'TRADE_ACTION_PENDING',
- 'TRADE_ACTION_REMOVE',
- 'TRADE_ACTION_SLTP',
- 'TUESDAY',
- 'UCHAR_MAX',
- 'UINT_MAX',
- 'ULONG_MAX',
- 'USHORT_MAX',
- 'VOLUME_REAL',
- 'VOLUME_TICK',
- 'WEDNESDAY',
- 'WHOLE_ARRAY',
- 'WRONG_VALUE',
- 'clrNONE',
- '__DATETIME__',
- '__DATE__',
- '__FILE__',
- '__FUNCSIG__',
- '__FUNCTION__',
- '__LINE__',
- '__MQL4BUILD__',
- '__MQLBUILD__',
- '__PATH__',
-)
-
-colors = (
- 'AliceBlue',
- 'AntiqueWhite',
- 'Aquamarine',
- 'Aqua',
- 'Beige',
- 'Bisque',
- 'Black',
- 'BlanchedAlmond',
- 'BlueViolet',
- 'Blue',
- 'Brown',
- 'BurlyWood',
- 'CadetBlue',
- 'Chartreuse',
- 'Chocolate',
- 'Coral',
- 'CornflowerBlue',
- 'Cornsilk',
- 'Crimson',
- 'DarkBlue',
- 'DarkGoldenrod',
- 'DarkGray',
- 'DarkGreen',
- 'DarkKhaki',
- 'DarkOliveGreen',
- 'DarkOrange',
- 'DarkOrchid',
- 'DarkSalmon',
- 'DarkSeaGreen',
- 'DarkSlateBlue',
- 'DarkSlateGray',
- 'DarkTurquoise',
- 'DarkViolet',
- 'DeepPink',
- 'DeepSkyBlue',
- 'DimGray',
- 'DodgerBlue',
- 'FireBrick',
- 'ForestGreen',
- 'Gainsboro',
- 'Goldenrod',
- 'Gold',
- 'Gray',
- 'GreenYellow',
- 'Green',
- 'Honeydew',
- 'HotPink',
- 'IndianRed',
- 'Indigo',
- 'Ivory',
- 'Khaki',
- 'LavenderBlush',
- 'Lavender',
- 'LawnGreen',
- 'LemonChiffon',
- 'LightBlue',
- 'LightCoral',
- 'LightCyan',
- 'LightGoldenrod',
- 'LightGray',
- 'LightGreen',
- 'LightPink',
- 'LightSalmon',
- 'LightSeaGreen',
- 'LightSkyBlue',
- 'LightSlateGray',
- 'LightSteelBlue',
- 'LightYellow',
- 'LimeGreen',
- 'Lime',
- 'Linen',
- 'Magenta',
- 'Maroon',
- 'MediumAquamarine',
- 'MediumBlue',
- 'MediumOrchid',
- 'MediumPurple',
- 'MediumSeaGreen',
- 'MediumSlateBlue',
- 'MediumSpringGreen',
- 'MediumTurquoise',
- 'MediumVioletRed',
- 'MidnightBlue',
- 'MintCream',
- 'MistyRose',
- 'Moccasin',
- 'NavajoWhite',
- 'Navy',
- 'OldLace',
- 'OliveDrab',
- 'Olive',
- 'OrangeRed',
- 'Orange',
- 'Orchid',
- 'PaleGoldenrod',
- 'PaleGreen',
- 'PaleTurquoise',
- 'PaleVioletRed',
- 'PapayaWhip',
- 'PeachPuff',
- 'Peru',
- 'Pink',
- 'Plum',
- 'PowderBlue',
- 'Purple',
- 'Red',
- 'RosyBrown',
- 'RoyalBlue',
- 'SaddleBrown',
- 'Salmon',
- 'SandyBrown',
- 'SeaGreen',
- 'Seashell',
- 'Sienna',
- 'Silver',
- 'SkyBlue',
- 'SlateBlue',
- 'SlateGray',
- 'Snow',
- 'SpringGreen',
- 'SteelBlue',
- 'Tan',
- 'Teal',
- 'Thistle',
- 'Tomato',
- 'Turquoise',
- 'Violet',
- 'Wheat',
- 'WhiteSmoke',
- 'White',
- 'YellowGreen',
- 'Yellow',
-)
-
-keywords = (
- 'input', '_Digits', '_Point', '_LastError', '_Period', '_RandomSeed',
- '_StopFlag', '_Symbol', '_UninitReason', 'Ask', 'Bars', 'Bid',
- 'Close', 'Digits', 'High', 'Low', 'Open', 'Point', 'Time',
- 'Volume',
-)
-c_types = (
- 'void', 'char', 'uchar', 'bool', 'short', 'ushort', 'int', 'uint',
- 'color', 'long', 'ulong', 'datetime', 'float', 'double',
- 'string',
-)
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/_mysql_builtins.py b/venv/lib/python3.11/site-packages/pygments/lexers/_mysql_builtins.py
deleted file mode 100644
index d25ed9c..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/_mysql_builtins.py
+++ /dev/null
@@ -1,1335 +0,0 @@
-"""
- pygments.lexers._mysql_builtins
- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
- Self-updating data files for the MySQL lexer.
-
- Run with `python -I` to update.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-
-MYSQL_CONSTANTS = (
- 'false',
- 'null',
- 'true',
- 'unknown',
-)
-
-
-# At this time, no easily-parsed, definitive list of data types
-# has been found in the MySQL source code or documentation. (The
-# `sql/sql_yacc.yy` file is definitive but is difficult to parse.)
-# Therefore these types are currently maintained manually.
-#
-# Some words in this list -- like "long", "national", "precision",
-# and "varying" -- appear to only occur in combination with other
-# data type keywords. Therefore they are included as separate words
-# even though they do not naturally occur in syntax separately.
-#
-# This list is also used to strip data types out of the list of
-# MySQL keywords, which is automatically updated later in the file.
-#
-MYSQL_DATATYPES = (
- # Numeric data types
- 'bigint',
- 'bit',
- 'bool',
- 'boolean',
- 'dec',
- 'decimal',
- 'double',
- 'fixed',
- 'float',
- 'float4',
- 'float8',
- 'int',
- 'int1',
- 'int2',
- 'int3',
- 'int4',
- 'int8',
- 'integer',
- 'mediumint',
- 'middleint',
- 'numeric',
- 'precision',
- 'real',
- 'serial',
- 'smallint',
- 'tinyint',
-
- # Date and time data types
- 'date',
- 'datetime',
- 'time',
- 'timestamp',
- 'year',
-
- # String data types
- 'binary',
- 'blob',
- 'char',
- 'enum',
- 'long',
- 'longblob',
- 'longtext',
- 'mediumblob',
- 'mediumtext',
- 'national',
- 'nchar',
- 'nvarchar',
- 'set',
- 'text',
- 'tinyblob',
- 'tinytext',
- 'varbinary',
- 'varchar',
- 'varcharacter',
- 'varying',
-
- # Spatial data types
- 'geometry',
- 'geometrycollection',
- 'linestring',
- 'multilinestring',
- 'multipoint',
- 'multipolygon',
- 'point',
- 'polygon',
-
- # JSON data types
- 'json',
-)
-
-# Everything below this line is auto-generated from the MySQL source code.
-# Run this file in Python and it will update itself.
-# -----------------------------------------------------------------------------
-
-MYSQL_FUNCTIONS = (
- 'abs',
- 'acos',
- 'adddate',
- 'addtime',
- 'aes_decrypt',
- 'aes_encrypt',
- 'any_value',
- 'asin',
- 'atan',
- 'atan2',
- 'benchmark',
- 'bin',
- 'bin_to_uuid',
- 'bit_and',
- 'bit_count',
- 'bit_length',
- 'bit_or',
- 'bit_xor',
- 'can_access_column',
- 'can_access_database',
- 'can_access_event',
- 'can_access_resource_group',
- 'can_access_routine',
- 'can_access_table',
- 'can_access_trigger',
- 'can_access_user',
- 'can_access_view',
- 'cast',
- 'ceil',
- 'ceiling',
- 'char_length',
- 'character_length',
- 'coercibility',
- 'compress',
- 'concat',
- 'concat_ws',
- 'connection_id',
- 'conv',
- 'convert_cpu_id_mask',
- 'convert_interval_to_user_interval',
- 'convert_tz',
- 'cos',
- 'cot',
- 'count',
- 'crc32',
- 'curdate',
- 'current_role',
- 'curtime',
- 'date_add',
- 'date_format',
- 'date_sub',
- 'datediff',
- 'dayname',
- 'dayofmonth',
- 'dayofweek',
- 'dayofyear',
- 'degrees',
- 'elt',
- 'exp',
- 'export_set',
- 'extract',
- 'extractvalue',
- 'field',
- 'find_in_set',
- 'floor',
- 'format_bytes',
- 'format_pico_time',
- 'found_rows',
- 'from_base64',
- 'from_days',
- 'from_unixtime',
- 'get_dd_column_privileges',
- 'get_dd_create_options',
- 'get_dd_index_private_data',
- 'get_dd_index_sub_part_length',
- 'get_dd_property_key_value',
- 'get_dd_schema_options',
- 'get_dd_tablespace_private_data',
- 'get_lock',
- 'greatest',
- 'group_concat',
- 'gtid_subset',
- 'gtid_subtract',
- 'hex',
- 'icu_version',
- 'ifnull',
- 'inet6_aton',
- 'inet6_ntoa',
- 'inet_aton',
- 'inet_ntoa',
- 'instr',
- 'internal_auto_increment',
- 'internal_avg_row_length',
- 'internal_check_time',
- 'internal_checksum',
- 'internal_data_free',
- 'internal_data_length',
- 'internal_dd_char_length',
- 'internal_get_comment_or_error',
- 'internal_get_dd_column_extra',
- 'internal_get_enabled_role_json',
- 'internal_get_hostname',
- 'internal_get_mandatory_roles_json',
- 'internal_get_partition_nodegroup',
- 'internal_get_username',
- 'internal_get_view_warning_or_error',
- 'internal_index_column_cardinality',
- 'internal_index_length',
- 'internal_is_enabled_role',
- 'internal_is_mandatory_role',
- 'internal_keys_disabled',
- 'internal_max_data_length',
- 'internal_table_rows',
- 'internal_tablespace_autoextend_size',
- 'internal_tablespace_data_free',
- 'internal_tablespace_extent_size',
- 'internal_tablespace_extra',
- 'internal_tablespace_free_extents',
- 'internal_tablespace_id',
- 'internal_tablespace_initial_size',
- 'internal_tablespace_logfile_group_name',
- 'internal_tablespace_logfile_group_number',
- 'internal_tablespace_maximum_size',
- 'internal_tablespace_row_format',
- 'internal_tablespace_status',
- 'internal_tablespace_total_extents',
- 'internal_tablespace_type',
- 'internal_tablespace_version',
- 'internal_update_time',
- 'is_free_lock',
- 'is_ipv4',
- 'is_ipv4_compat',
- 'is_ipv4_mapped',
- 'is_ipv6',
- 'is_used_lock',
- 'is_uuid',
- 'is_visible_dd_object',
- 'isnull',
- 'json_array',
- 'json_array_append',
- 'json_array_insert',
- 'json_arrayagg',
- 'json_contains',
- 'json_contains_path',
- 'json_depth',
- 'json_extract',
- 'json_insert',
- 'json_keys',
- 'json_length',
- 'json_merge',
- 'json_merge_patch',
- 'json_merge_preserve',
- 'json_object',
- 'json_objectagg',
- 'json_overlaps',
- 'json_pretty',
- 'json_quote',
- 'json_remove',
- 'json_replace',
- 'json_schema_valid',
- 'json_schema_validation_report',
- 'json_search',
- 'json_set',
- 'json_storage_free',
- 'json_storage_size',
- 'json_type',
- 'json_unquote',
- 'json_valid',
- 'last_day',
- 'last_insert_id',
- 'lcase',
- 'least',
- 'length',
- 'like_range_max',
- 'like_range_min',
- 'ln',
- 'load_file',
- 'locate',
- 'log',
- 'log10',
- 'log2',
- 'lower',
- 'lpad',
- 'ltrim',
- 'make_set',
- 'makedate',
- 'maketime',
- 'master_pos_wait',
- 'max',
- 'mbrcontains',
- 'mbrcoveredby',
- 'mbrcovers',
- 'mbrdisjoint',
- 'mbrequals',
- 'mbrintersects',
- 'mbroverlaps',
- 'mbrtouches',
- 'mbrwithin',
- 'md5',
- 'mid',
- 'min',
- 'monthname',
- 'name_const',
- 'now',
- 'nullif',
- 'oct',
- 'octet_length',
- 'ord',
- 'period_add',
- 'period_diff',
- 'pi',
- 'position',
- 'pow',
- 'power',
- 'ps_current_thread_id',
- 'ps_thread_id',
- 'quote',
- 'radians',
- 'rand',
- 'random_bytes',
- 'regexp_instr',
- 'regexp_like',
- 'regexp_replace',
- 'regexp_substr',
- 'release_all_locks',
- 'release_lock',
- 'remove_dd_property_key',
- 'reverse',
- 'roles_graphml',
- 'round',
- 'rpad',
- 'rtrim',
- 'sec_to_time',
- 'session_user',
- 'sha',
- 'sha1',
- 'sha2',
- 'sign',
- 'sin',
- 'sleep',
- 'soundex',
- 'source_pos_wait',
- 'space',
- 'sqrt',
- 'st_area',
- 'st_asbinary',
- 'st_asgeojson',
- 'st_astext',
- 'st_aswkb',
- 'st_aswkt',
- 'st_buffer',
- 'st_buffer_strategy',
- 'st_centroid',
- 'st_collect',
- 'st_contains',
- 'st_convexhull',
- 'st_crosses',
- 'st_difference',
- 'st_dimension',
- 'st_disjoint',
- 'st_distance',
- 'st_distance_sphere',
- 'st_endpoint',
- 'st_envelope',
- 'st_equals',
- 'st_exteriorring',
- 'st_frechetdistance',
- 'st_geohash',
- 'st_geomcollfromtext',
- 'st_geomcollfromtxt',
- 'st_geomcollfromwkb',
- 'st_geometrycollectionfromtext',
- 'st_geometrycollectionfromwkb',
- 'st_geometryfromtext',
- 'st_geometryfromwkb',
- 'st_geometryn',
- 'st_geometrytype',
- 'st_geomfromgeojson',
- 'st_geomfromtext',
- 'st_geomfromwkb',
- 'st_hausdorffdistance',
- 'st_interiorringn',
- 'st_intersection',
- 'st_intersects',
- 'st_isclosed',
- 'st_isempty',
- 'st_issimple',
- 'st_isvalid',
- 'st_latfromgeohash',
- 'st_latitude',
- 'st_length',
- 'st_linefromtext',
- 'st_linefromwkb',
- 'st_lineinterpolatepoint',
- 'st_lineinterpolatepoints',
- 'st_linestringfromtext',
- 'st_linestringfromwkb',
- 'st_longfromgeohash',
- 'st_longitude',
- 'st_makeenvelope',
- 'st_mlinefromtext',
- 'st_mlinefromwkb',
- 'st_mpointfromtext',
- 'st_mpointfromwkb',
- 'st_mpolyfromtext',
- 'st_mpolyfromwkb',
- 'st_multilinestringfromtext',
- 'st_multilinestringfromwkb',
- 'st_multipointfromtext',
- 'st_multipointfromwkb',
- 'st_multipolygonfromtext',
- 'st_multipolygonfromwkb',
- 'st_numgeometries',
- 'st_numinteriorring',
- 'st_numinteriorrings',
- 'st_numpoints',
- 'st_overlaps',
- 'st_pointatdistance',
- 'st_pointfromgeohash',
- 'st_pointfromtext',
- 'st_pointfromwkb',
- 'st_pointn',
- 'st_polyfromtext',
- 'st_polyfromwkb',
- 'st_polygonfromtext',
- 'st_polygonfromwkb',
- 'st_simplify',
- 'st_srid',
- 'st_startpoint',
- 'st_swapxy',
- 'st_symdifference',
- 'st_touches',
- 'st_transform',
- 'st_union',
- 'st_validate',
- 'st_within',
- 'st_x',
- 'st_y',
- 'statement_digest',
- 'statement_digest_text',
- 'std',
- 'stddev',
- 'stddev_pop',
- 'stddev_samp',
- 'str_to_date',
- 'strcmp',
- 'subdate',
- 'substr',
- 'substring',
- 'substring_index',
- 'subtime',
- 'sum',
- 'sysdate',
- 'system_user',
- 'tan',
- 'time_format',
- 'time_to_sec',
- 'timediff',
- 'to_base64',
- 'to_days',
- 'to_seconds',
- 'trim',
- 'ucase',
- 'uncompress',
- 'uncompressed_length',
- 'unhex',
- 'unix_timestamp',
- 'updatexml',
- 'upper',
- 'uuid',
- 'uuid_short',
- 'uuid_to_bin',
- 'validate_password_strength',
- 'var_pop',
- 'var_samp',
- 'variance',
- 'version',
- 'wait_for_executed_gtid_set',
- 'wait_until_sql_thread_after_gtids',
- 'weekday',
- 'weekofyear',
- 'yearweek',
-)
-
-
-MYSQL_OPTIMIZER_HINTS = (
- 'bka',
- 'bnl',
- 'derived_condition_pushdown',
- 'dupsweedout',
- 'firstmatch',
- 'group_index',
- 'hash_join',
- 'index',
- 'index_merge',
- 'intoexists',
- 'join_fixed_order',
- 'join_index',
- 'join_order',
- 'join_prefix',
- 'join_suffix',
- 'loosescan',
- 'materialization',
- 'max_execution_time',
- 'merge',
- 'mrr',
- 'no_bka',
- 'no_bnl',
- 'no_derived_condition_pushdown',
- 'no_group_index',
- 'no_hash_join',
- 'no_icp',
- 'no_index',
- 'no_index_merge',
- 'no_join_index',
- 'no_merge',
- 'no_mrr',
- 'no_order_index',
- 'no_range_optimization',
- 'no_semijoin',
- 'no_skip_scan',
- 'order_index',
- 'qb_name',
- 'resource_group',
- 'semijoin',
- 'set_var',
- 'skip_scan',
- 'subquery',
-)
-
-
-MYSQL_KEYWORDS = (
- 'accessible',
- 'account',
- 'action',
- 'active',
- 'add',
- 'admin',
- 'after',
- 'against',
- 'aggregate',
- 'algorithm',
- 'all',
- 'alter',
- 'always',
- 'analyze',
- 'and',
- 'any',
- 'array',
- 'as',
- 'asc',
- 'ascii',
- 'asensitive',
- 'assign_gtids_to_anonymous_transactions',
- 'at',
- 'attribute',
- 'authentication',
- 'auto_increment',
- 'autoextend_size',
- 'avg',
- 'avg_row_length',
- 'backup',
- 'before',
- 'begin',
- 'between',
- 'binlog',
- 'block',
- 'both',
- 'btree',
- 'buckets',
- 'by',
- 'byte',
- 'cache',
- 'call',
- 'cascade',
- 'cascaded',
- 'case',
- 'catalog_name',
- 'chain',
- 'challenge_response',
- 'change',
- 'changed',
- 'channel',
- 'character',
- 'charset',
- 'check',
- 'checksum',
- 'cipher',
- 'class_origin',
- 'client',
- 'clone',
- 'close',
- 'coalesce',
- 'code',
- 'collate',
- 'collation',
- 'column',
- 'column_format',
- 'column_name',
- 'columns',
- 'comment',
- 'commit',
- 'committed',
- 'compact',
- 'completion',
- 'component',
- 'compressed',
- 'compression',
- 'concurrent',
- 'condition',
- 'connection',
- 'consistent',
- 'constraint',
- 'constraint_catalog',
- 'constraint_name',
- 'constraint_schema',
- 'contains',
- 'context',
- 'continue',
- 'convert',
- 'cpu',
- 'create',
- 'cross',
- 'cube',
- 'cume_dist',
- 'current',
- 'current_date',
- 'current_time',
- 'current_timestamp',
- 'current_user',
- 'cursor',
- 'cursor_name',
- 'data',
- 'database',
- 'databases',
- 'datafile',
- 'day',
- 'day_hour',
- 'day_microsecond',
- 'day_minute',
- 'day_second',
- 'deallocate',
- 'declare',
- 'default',
- 'default_auth',
- 'definer',
- 'definition',
- 'delay_key_write',
- 'delayed',
- 'delete',
- 'dense_rank',
- 'desc',
- 'describe',
- 'description',
- 'deterministic',
- 'diagnostics',
- 'directory',
- 'disable',
- 'discard',
- 'disk',
- 'distinct',
- 'distinctrow',
- 'div',
- 'do',
- 'drop',
- 'dual',
- 'dumpfile',
- 'duplicate',
- 'dynamic',
- 'each',
- 'else',
- 'elseif',
- 'empty',
- 'enable',
- 'enclosed',
- 'encryption',
- 'end',
- 'ends',
- 'enforced',
- 'engine',
- 'engine_attribute',
- 'engines',
- 'error',
- 'errors',
- 'escape',
- 'escaped',
- 'event',
- 'events',
- 'every',
- 'except',
- 'exchange',
- 'exclude',
- 'execute',
- 'exists',
- 'exit',
- 'expansion',
- 'expire',
- 'explain',
- 'export',
- 'extended',
- 'extent_size',
- 'factor',
- 'failed_login_attempts',
- 'false',
- 'fast',
- 'faults',
- 'fetch',
- 'fields',
- 'file',
- 'file_block_size',
- 'filter',
- 'finish',
- 'first',
- 'first_value',
- 'flush',
- 'following',
- 'follows',
- 'for',
- 'force',
- 'foreign',
- 'format',
- 'found',
- 'from',
- 'full',
- 'fulltext',
- 'function',
- 'general',
- 'generated',
- 'geomcollection',
- 'get',
- 'get_format',
- 'get_master_public_key',
- 'get_source_public_key',
- 'global',
- 'grant',
- 'grants',
- 'group',
- 'group_replication',
- 'grouping',
- 'groups',
- 'gtid_only',
- 'handler',
- 'hash',
- 'having',
- 'help',
- 'high_priority',
- 'histogram',
- 'history',
- 'host',
- 'hosts',
- 'hour',
- 'hour_microsecond',
- 'hour_minute',
- 'hour_second',
- 'identified',
- 'if',
- 'ignore',
- 'ignore_server_ids',
- 'import',
- 'in',
- 'inactive',
- 'index',
- 'indexes',
- 'infile',
- 'initial',
- 'initial_size',
- 'initiate',
- 'inner',
- 'inout',
- 'insensitive',
- 'insert',
- 'insert_method',
- 'install',
- 'instance',
- 'interval',
- 'into',
- 'invisible',
- 'invoker',
- 'io',
- 'io_after_gtids',
- 'io_before_gtids',
- 'io_thread',
- 'ipc',
- 'is',
- 'isolation',
- 'issuer',
- 'iterate',
- 'join',
- 'json_table',
- 'json_value',
- 'key',
- 'key_block_size',
- 'keyring',
- 'keys',
- 'kill',
- 'lag',
- 'language',
- 'last',
- 'last_value',
- 'lateral',
- 'lead',
- 'leading',
- 'leave',
- 'leaves',
- 'left',
- 'less',
- 'level',
- 'like',
- 'limit',
- 'linear',
- 'lines',
- 'list',
- 'load',
- 'local',
- 'localtime',
- 'localtimestamp',
- 'lock',
- 'locked',
- 'locks',
- 'logfile',
- 'logs',
- 'loop',
- 'low_priority',
- 'master',
- 'master_auto_position',
- 'master_bind',
- 'master_compression_algorithms',
- 'master_connect_retry',
- 'master_delay',
- 'master_heartbeat_period',
- 'master_host',
- 'master_log_file',
- 'master_log_pos',
- 'master_password',
- 'master_port',
- 'master_public_key_path',
- 'master_retry_count',
- 'master_ssl',
- 'master_ssl_ca',
- 'master_ssl_capath',
- 'master_ssl_cert',
- 'master_ssl_cipher',
- 'master_ssl_crl',
- 'master_ssl_crlpath',
- 'master_ssl_key',
- 'master_ssl_verify_server_cert',
- 'master_tls_ciphersuites',
- 'master_tls_version',
- 'master_user',
- 'master_zstd_compression_level',
- 'match',
- 'max_connections_per_hour',
- 'max_queries_per_hour',
- 'max_rows',
- 'max_size',
- 'max_updates_per_hour',
- 'max_user_connections',
- 'maxvalue',
- 'medium',
- 'member',
- 'memory',
- 'merge',
- 'message_text',
- 'microsecond',
- 'migrate',
- 'min_rows',
- 'minute',
- 'minute_microsecond',
- 'minute_second',
- 'mod',
- 'mode',
- 'modifies',
- 'modify',
- 'month',
- 'mutex',
- 'mysql_errno',
- 'name',
- 'names',
- 'natural',
- 'ndb',
- 'ndbcluster',
- 'nested',
- 'network_namespace',
- 'never',
- 'new',
- 'next',
- 'no',
- 'no_wait',
- 'no_write_to_binlog',
- 'nodegroup',
- 'none',
- 'not',
- 'nowait',
- 'nth_value',
- 'ntile',
- 'null',
- 'nulls',
- 'number',
- 'of',
- 'off',
- 'offset',
- 'oj',
- 'old',
- 'on',
- 'one',
- 'only',
- 'open',
- 'optimize',
- 'optimizer_costs',
- 'option',
- 'optional',
- 'optionally',
- 'options',
- 'or',
- 'order',
- 'ordinality',
- 'organization',
- 'others',
- 'out',
- 'outer',
- 'outfile',
- 'over',
- 'owner',
- 'pack_keys',
- 'page',
- 'parser',
- 'partial',
- 'partition',
- 'partitioning',
- 'partitions',
- 'password',
- 'password_lock_time',
- 'path',
- 'percent_rank',
- 'persist',
- 'persist_only',
- 'phase',
- 'plugin',
- 'plugin_dir',
- 'plugins',
- 'port',
- 'precedes',
- 'preceding',
- 'prepare',
- 'preserve',
- 'prev',
- 'primary',
- 'privilege_checks_user',
- 'privileges',
- 'procedure',
- 'process',
- 'processlist',
- 'profile',
- 'profiles',
- 'proxy',
- 'purge',
- 'quarter',
- 'query',
- 'quick',
- 'random',
- 'range',
- 'rank',
- 'read',
- 'read_only',
- 'read_write',
- 'reads',
- 'rebuild',
- 'recover',
- 'recursive',
- 'redo_buffer_size',
- 'redundant',
- 'reference',
- 'references',
- 'regexp',
- 'registration',
- 'relay',
- 'relay_log_file',
- 'relay_log_pos',
- 'relay_thread',
- 'relaylog',
- 'release',
- 'reload',
- 'remove',
- 'rename',
- 'reorganize',
- 'repair',
- 'repeat',
- 'repeatable',
- 'replace',
- 'replica',
- 'replicas',
- 'replicate_do_db',
- 'replicate_do_table',
- 'replicate_ignore_db',
- 'replicate_ignore_table',
- 'replicate_rewrite_db',
- 'replicate_wild_do_table',
- 'replicate_wild_ignore_table',
- 'replication',
- 'require',
- 'require_row_format',
- 'require_table_primary_key_check',
- 'reset',
- 'resignal',
- 'resource',
- 'respect',
- 'restart',
- 'restore',
- 'restrict',
- 'resume',
- 'retain',
- 'return',
- 'returned_sqlstate',
- 'returning',
- 'returns',
- 'reuse',
- 'reverse',
- 'revoke',
- 'right',
- 'rlike',
- 'role',
- 'rollback',
- 'rollup',
- 'rotate',
- 'routine',
- 'row',
- 'row_count',
- 'row_format',
- 'row_number',
- 'rows',
- 'rtree',
- 'savepoint',
- 'schedule',
- 'schema',
- 'schema_name',
- 'schemas',
- 'second',
- 'second_microsecond',
- 'secondary',
- 'secondary_engine',
- 'secondary_engine_attribute',
- 'secondary_load',
- 'secondary_unload',
- 'security',
- 'select',
- 'sensitive',
- 'separator',
- 'serializable',
- 'server',
- 'session',
- 'share',
- 'show',
- 'shutdown',
- 'signal',
- 'signed',
- 'simple',
- 'skip',
- 'slave',
- 'slow',
- 'snapshot',
- 'socket',
- 'some',
- 'soname',
- 'sounds',
- 'source',
- 'source_auto_position',
- 'source_bind',
- 'source_compression_algorithms',
- 'source_connect_retry',
- 'source_connection_auto_failover',
- 'source_delay',
- 'source_heartbeat_period',
- 'source_host',
- 'source_log_file',
- 'source_log_pos',
- 'source_password',
- 'source_port',
- 'source_public_key_path',
- 'source_retry_count',
- 'source_ssl',
- 'source_ssl_ca',
- 'source_ssl_capath',
- 'source_ssl_cert',
- 'source_ssl_cipher',
- 'source_ssl_crl',
- 'source_ssl_crlpath',
- 'source_ssl_key',
- 'source_ssl_verify_server_cert',
- 'source_tls_ciphersuites',
- 'source_tls_version',
- 'source_user',
- 'source_zstd_compression_level',
- 'spatial',
- 'specific',
- 'sql',
- 'sql_after_gtids',
- 'sql_after_mts_gaps',
- 'sql_before_gtids',
- 'sql_big_result',
- 'sql_buffer_result',
- 'sql_calc_found_rows',
- 'sql_no_cache',
- 'sql_small_result',
- 'sql_thread',
- 'sql_tsi_day',
- 'sql_tsi_hour',
- 'sql_tsi_minute',
- 'sql_tsi_month',
- 'sql_tsi_quarter',
- 'sql_tsi_second',
- 'sql_tsi_week',
- 'sql_tsi_year',
- 'sqlexception',
- 'sqlstate',
- 'sqlwarning',
- 'srid',
- 'ssl',
- 'stacked',
- 'start',
- 'starting',
- 'starts',
- 'stats_auto_recalc',
- 'stats_persistent',
- 'stats_sample_pages',
- 'status',
- 'stop',
- 'storage',
- 'stored',
- 'straight_join',
- 'stream',
- 'string',
- 'subclass_origin',
- 'subject',
- 'subpartition',
- 'subpartitions',
- 'super',
- 'suspend',
- 'swaps',
- 'switches',
- 'system',
- 'table',
- 'table_checksum',
- 'table_name',
- 'tables',
- 'tablespace',
- 'temporary',
- 'temptable',
- 'terminated',
- 'than',
- 'then',
- 'thread_priority',
- 'ties',
- 'timestampadd',
- 'timestampdiff',
- 'tls',
- 'to',
- 'trailing',
- 'transaction',
- 'trigger',
- 'triggers',
- 'true',
- 'truncate',
- 'type',
- 'types',
- 'unbounded',
- 'uncommitted',
- 'undefined',
- 'undo',
- 'undo_buffer_size',
- 'undofile',
- 'unicode',
- 'uninstall',
- 'union',
- 'unique',
- 'unknown',
- 'unlock',
- 'unregister',
- 'unsigned',
- 'until',
- 'update',
- 'upgrade',
- 'usage',
- 'use',
- 'use_frm',
- 'user',
- 'user_resources',
- 'using',
- 'utc_date',
- 'utc_time',
- 'utc_timestamp',
- 'validation',
- 'value',
- 'values',
- 'variables',
- 'vcpu',
- 'view',
- 'virtual',
- 'visible',
- 'wait',
- 'warnings',
- 'week',
- 'weight_string',
- 'when',
- 'where',
- 'while',
- 'window',
- 'with',
- 'without',
- 'work',
- 'wrapper',
- 'write',
- 'x509',
- 'xa',
- 'xid',
- 'xml',
- 'xor',
- 'year_month',
- 'zerofill',
- 'zone',
-)
-
-
-if __name__ == '__main__': # pragma: no cover
- import re
- from urllib.request import urlopen
-
- from pygments.util import format_lines
-
- # MySQL source code
- SOURCE_URL = 'https://github.com/mysql/mysql-server/raw/8.0'
- LEX_URL = SOURCE_URL + '/sql/lex.h'
- ITEM_CREATE_URL = SOURCE_URL + '/sql/item_create.cc'
-
-
- def update_myself():
- # Pull content from lex.h.
- lex_file = urlopen(LEX_URL).read().decode('utf8', errors='ignore')
- keywords = parse_lex_keywords(lex_file)
- functions = parse_lex_functions(lex_file)
- optimizer_hints = parse_lex_optimizer_hints(lex_file)
-
- # Parse content in item_create.cc.
- item_create_file = urlopen(ITEM_CREATE_URL).read().decode('utf8', errors='ignore')
- functions.update(parse_item_create_functions(item_create_file))
-
- # Remove data types from the set of keywords.
- keywords -= set(MYSQL_DATATYPES)
-
- update_content('MYSQL_FUNCTIONS', tuple(sorted(functions)))
- update_content('MYSQL_KEYWORDS', tuple(sorted(keywords)))
- update_content('MYSQL_OPTIMIZER_HINTS', tuple(sorted(optimizer_hints)))
-
-
- def parse_lex_keywords(f):
- """Parse keywords in lex.h."""
-
- results = set()
- for m in re.finditer(r'{SYM(?:_HK)?\("(?P<keyword>[a-z0-9_]+)",', f, flags=re.I):
- results.add(m.group('keyword').lower())
-
- if not results:
- raise ValueError('No keywords found')
-
- return results
-
-
- def parse_lex_optimizer_hints(f):
- """Parse optimizer hints in lex.h."""
-
- results = set()
- for m in re.finditer(r'{SYM_H\("(?P<keyword>[a-z0-9_]+)",', f, flags=re.I):
- results.add(m.group('keyword').lower())
-
- if not results:
- raise ValueError('No optimizer hints found')
-
- return results
-
-
- def parse_lex_functions(f):
- """Parse MySQL function names from lex.h."""
-
- results = set()
- for m in re.finditer(r'{SYM_FN?\("(?P<function>[a-z0-9_]+)",', f, flags=re.I):
- results.add(m.group('function').lower())
-
- if not results:
- raise ValueError('No lex functions found')
-
- return results
-
-
- def parse_item_create_functions(f):
- """Parse MySQL function names from item_create.cc."""
-
- results = set()
- for m in re.finditer(r'{"(?P<function>[^"]+?)",\s*SQL_F[^(]+?\(', f, flags=re.I):
- results.add(m.group('function').lower())
-
- if not results:
- raise ValueError('No item_create functions found')
-
- return results
-
-
- def update_content(field_name, content):
- """Overwrite this file with content parsed from MySQL's source code."""
-
- with open(__file__, encoding="utf-8") as f:
- data = f.read()
-
- # Line to start/end inserting
- re_match = re.compile(r'^%s\s*=\s*\($.*?^\s*\)$' % field_name, re.M | re.S)
- m = re_match.search(data)
- if not m:
- raise ValueError('Could not find an existing definition for %s' % field_name)
-
- new_block = format_lines(field_name, content)
- data = data[:m.start()] + new_block + data[m.end():]
-
- with open(__file__, 'w', encoding='utf-8', newline='\n') as f:
- f.write(data)
-
- update_myself()
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/_openedge_builtins.py b/venv/lib/python3.11/site-packages/pygments/lexers/_openedge_builtins.py
deleted file mode 100644
index 7fdfb41..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/_openedge_builtins.py
+++ /dev/null
@@ -1,2600 +0,0 @@
-"""
- pygments.lexers._openedge_builtins
- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
- Builtin list for the OpenEdgeLexer.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-OPENEDGEKEYWORDS = (
- 'ABS',
- 'ABSO',
- 'ABSOL',
- 'ABSOLU',
- 'ABSOLUT',
- 'ABSOLUTE',
- 'ABSTRACT',
- 'ACCELERATOR',
- 'ACCUM',
- 'ACCUMU',
- 'ACCUMUL',
- 'ACCUMULA',
- 'ACCUMULAT',
- 'ACCUMULATE',
- 'ACTIVE-FORM',
- 'ACTIVE-WINDOW',
- 'ADD',
- 'ADD-BUFFER',
- 'ADD-CALC-COLUMN',
- 'ADD-COLUMNS-FROM',
- 'ADD-EVENTS-PROCEDURE',
- 'ADD-FIELDS-FROM',
- 'ADD-FIRST',
- 'ADD-INDEX-FIELD',
- 'ADD-LAST',
- 'ADD-LIKE-COLUMN',
- 'ADD-LIKE-FIELD',
- 'ADD-LIKE-INDEX',
- 'ADD-NEW-FIELD',
- 'ADD-NEW-INDEX',
- 'ADD-SCHEMA-LOCATION',
- 'ADD-SUPER-PROCEDURE',
- 'ADM-DATA',
- 'ADVISE',
- 'ALERT-BOX',
- 'ALIAS',
- 'ALL',
- 'ALLOW-COLUMN-SEARCHING',
- 'ALLOW-REPLICATION',
- 'ALTER',
- 'ALWAYS-ON-TOP',
- 'AMBIG',
- 'AMBIGU',
- 'AMBIGUO',
- 'AMBIGUOU',
- 'AMBIGUOUS',
- 'ANALYZ',
- 'ANALYZE',
- 'AND',
- 'ANSI-ONLY',
- 'ANY',
- 'ANYWHERE',
- 'APPEND',
- 'APPL-ALERT',
- 'APPL-ALERT-',
- 'APPL-ALERT-B',
- 'APPL-ALERT-BO',
- 'APPL-ALERT-BOX',
- 'APPL-ALERT-BOXE',
- 'APPL-ALERT-BOXES',
- 'APPL-CONTEXT-ID',
- 'APPLICATION',
- 'APPLY',
- 'APPSERVER-INFO',
- 'APPSERVER-PASSWORD',
- 'APPSERVER-USERID',
- 'ARRAY-MESSAGE',
- 'AS',
- 'ASC',
- 'ASCE',
- 'ASCEN',
- 'ASCEND',
- 'ASCENDI',
- 'ASCENDIN',
- 'ASCENDING',
- 'ASK-OVERWRITE',
- 'ASSEMBLY',
- 'ASSIGN',
- 'ASYNC-REQUEST-COUNT',
- 'ASYNC-REQUEST-HANDLE',
- 'ASYNCHRONOUS',
- 'AT',
- 'ATTACHED-PAIRLIST',
- 'ATTR',
- 'ATTR-SPACE',
- 'ATTRI',
- 'ATTRIB',
- 'ATTRIBU',
- 'ATTRIBUT',
- 'AUDIT-CONTROL',
- 'AUDIT-ENABLED',
- 'AUDIT-EVENT-CONTEXT',
- 'AUDIT-POLICY',
- 'AUTHENTICATION-FAILED',
- 'AUTHORIZATION',
- 'AUTO-COMP',
- 'AUTO-COMPL',
- 'AUTO-COMPLE',
- 'AUTO-COMPLET',
- 'AUTO-COMPLETI',
- 'AUTO-COMPLETIO',
- 'AUTO-COMPLETION',
- 'AUTO-END-KEY',
- 'AUTO-ENDKEY',
- 'AUTO-GO',
- 'AUTO-IND',
- 'AUTO-INDE',
- 'AUTO-INDEN',
- 'AUTO-INDENT',
- 'AUTO-RESIZE',
- 'AUTO-RET',
- 'AUTO-RETU',
- 'AUTO-RETUR',
- 'AUTO-RETURN',
- 'AUTO-SYNCHRONIZE',
- 'AUTO-Z',
- 'AUTO-ZA',
- 'AUTO-ZAP',
- 'AUTOMATIC',
- 'AVAIL',
- 'AVAILA',
- 'AVAILAB',
- 'AVAILABL',
- 'AVAILABLE',
- 'AVAILABLE-FORMATS',
- 'AVE',
- 'AVER',
- 'AVERA',
- 'AVERAG',
- 'AVERAGE',
- 'AVG',
- 'BACK',
- 'BACKG',
- 'BACKGR',
- 'BACKGRO',
- 'BACKGROU',
- 'BACKGROUN',
- 'BACKGROUND',
- 'BACKWARD',
- 'BACKWARDS',
- 'BASE64-DECODE',
- 'BASE64-ENCODE',
- 'BASE-ADE',
- 'BASE-KEY',
- 'BATCH',
- 'BATCH-',
- 'BATCH-M',
- 'BATCH-MO',
- 'BATCH-MOD',
- 'BATCH-MODE',
- 'BATCH-SIZE',
- 'BEFORE-H',
- 'BEFORE-HI',
- 'BEFORE-HID',
- 'BEFORE-HIDE',
- 'BEGIN-EVENT-GROUP',
- 'BEGINS',
- 'BELL',
- 'BETWEEN',
- 'BGC',
- 'BGCO',
- 'BGCOL',
- 'BGCOLO',
- 'BGCOLOR',
- 'BIG-ENDIAN',
- 'BINARY',
- 'BIND',
- 'BIND-WHERE',
- 'BLANK',
- 'BLOCK-ITERATION-DISPLAY',
- 'BLOCK-LEVEL',
- 'BORDER-B',
- 'BORDER-BO',
- 'BORDER-BOT',
- 'BORDER-BOTT',
- 'BORDER-BOTTO',
- 'BORDER-BOTTOM-CHARS',
- 'BORDER-BOTTOM-P',
- 'BORDER-BOTTOM-PI',
- 'BORDER-BOTTOM-PIX',
- 'BORDER-BOTTOM-PIXE',
- 'BORDER-BOTTOM-PIXEL',
- 'BORDER-BOTTOM-PIXELS',
- 'BORDER-L',
- 'BORDER-LE',
- 'BORDER-LEF',
- 'BORDER-LEFT',
- 'BORDER-LEFT-',
- 'BORDER-LEFT-C',
- 'BORDER-LEFT-CH',
- 'BORDER-LEFT-CHA',
- 'BORDER-LEFT-CHAR',
- 'BORDER-LEFT-CHARS',
- 'BORDER-LEFT-P',
- 'BORDER-LEFT-PI',
- 'BORDER-LEFT-PIX',
- 'BORDER-LEFT-PIXE',
- 'BORDER-LEFT-PIXEL',
- 'BORDER-LEFT-PIXELS',
- 'BORDER-R',
- 'BORDER-RI',
- 'BORDER-RIG',
- 'BORDER-RIGH',
- 'BORDER-RIGHT',
- 'BORDER-RIGHT-',
- 'BORDER-RIGHT-C',
- 'BORDER-RIGHT-CH',
- 'BORDER-RIGHT-CHA',
- 'BORDER-RIGHT-CHAR',
- 'BORDER-RIGHT-CHARS',
- 'BORDER-RIGHT-P',
- 'BORDER-RIGHT-PI',
- 'BORDER-RIGHT-PIX',
- 'BORDER-RIGHT-PIXE',
- 'BORDER-RIGHT-PIXEL',
- 'BORDER-RIGHT-PIXELS',
- 'BORDER-T',
- 'BORDER-TO',
- 'BORDER-TOP',
- 'BORDER-TOP-',
- 'BORDER-TOP-C',
- 'BORDER-TOP-CH',
- 'BORDER-TOP-CHA',
- 'BORDER-TOP-CHAR',
- 'BORDER-TOP-CHARS',
- 'BORDER-TOP-P',
- 'BORDER-TOP-PI',
- 'BORDER-TOP-PIX',
- 'BORDER-TOP-PIXE',
- 'BORDER-TOP-PIXEL',
- 'BORDER-TOP-PIXELS',
- 'BOX',
- 'BOX-SELECT',
- 'BOX-SELECTA',
- 'BOX-SELECTAB',
- 'BOX-SELECTABL',
- 'BOX-SELECTABLE',
- 'BREAK',
- 'BROWSE',
- 'BUFFER',
- 'BUFFER-CHARS',
- 'BUFFER-COMPARE',
- 'BUFFER-COPY',
- 'BUFFER-CREATE',
- 'BUFFER-DELETE',
- 'BUFFER-FIELD',
- 'BUFFER-HANDLE',
- 'BUFFER-LINES',
- 'BUFFER-NAME',
- 'BUFFER-PARTITION-ID',
- 'BUFFER-RELEASE',
- 'BUFFER-VALUE',
- 'BUTTON',
- 'BUTTONS',
- 'BY',
- 'BY-POINTER',
- 'BY-VARIANT-POINTER',
- 'CACHE',
- 'CACHE-SIZE',
- 'CALL',
- 'CALL-NAME',
- 'CALL-TYPE',
- 'CAN-CREATE',
- 'CAN-DELETE',
- 'CAN-DO',
- 'CAN-DO-DOMAIN-SUPPORT',
- 'CAN-FIND',
- 'CAN-QUERY',
- 'CAN-READ',
- 'CAN-SET',
- 'CAN-WRITE',
- 'CANCEL-BREAK',
- 'CANCEL-BUTTON',
- 'CAPS',
- 'CAREFUL-PAINT',
- 'CASE',
- 'CASE-SEN',
- 'CASE-SENS',
- 'CASE-SENSI',
- 'CASE-SENSIT',
- 'CASE-SENSITI',
- 'CASE-SENSITIV',
- 'CASE-SENSITIVE',
- 'CAST',
- 'CATCH',
- 'CDECL',
- 'CENTER',
- 'CENTERE',
- 'CENTERED',
- 'CHAINED',
- 'CHARACTER',
- 'CHARACTER_LENGTH',
- 'CHARSET',
- 'CHECK',
- 'CHECKED',
- 'CHOOSE',
- 'CHR',
- 'CLASS',
- 'CLASS-TYPE',
- 'CLEAR',
- 'CLEAR-APPL-CONTEXT',
- 'CLEAR-LOG',
- 'CLEAR-SELECT',
- 'CLEAR-SELECTI',
- 'CLEAR-SELECTIO',
- 'CLEAR-SELECTION',
- 'CLEAR-SORT-ARROW',
- 'CLEAR-SORT-ARROWS',
- 'CLIENT-CONNECTION-ID',
- 'CLIENT-PRINCIPAL',
- 'CLIENT-TTY',
- 'CLIENT-TYPE',
- 'CLIENT-WORKSTATION',
- 'CLIPBOARD',
- 'CLOSE',
- 'CLOSE-LOG',
- 'CODE',
- 'CODEBASE-LOCATOR',
- 'CODEPAGE',
- 'CODEPAGE-CONVERT',
- 'COL',
- 'COL-OF',
- 'COLLATE',
- 'COLON',
- 'COLON-ALIGN',
- 'COLON-ALIGNE',
- 'COLON-ALIGNED',
- 'COLOR',
- 'COLOR-TABLE',
- 'COLU',
- 'COLUM',
- 'COLUMN',
- 'COLUMN-BGCOLOR',
- 'COLUMN-DCOLOR',
- 'COLUMN-FGCOLOR',
- 'COLUMN-FONT',
- 'COLUMN-LAB',
- 'COLUMN-LABE',
- 'COLUMN-LABEL',
- 'COLUMN-MOVABLE',
- 'COLUMN-OF',
- 'COLUMN-PFCOLOR',
- 'COLUMN-READ-ONLY',
- 'COLUMN-RESIZABLE',
- 'COLUMN-SCROLLING',
- 'COLUMNS',
- 'COM-HANDLE',
- 'COM-SELF',
- 'COMBO-BOX',
- 'COMMAND',
- 'COMPARES',
- 'COMPILE',
- 'COMPILER',
- 'COMPLETE',
- 'CONFIG-NAME',
- 'CONNECT',
- 'CONNECTED',
- 'CONSTRUCTOR',
- 'CONTAINS',
- 'CONTENTS',
- 'CONTEXT',
- 'CONTEXT-HELP',
- 'CONTEXT-HELP-FILE',
- 'CONTEXT-HELP-ID',
- 'CONTEXT-POPUP',
- 'CONTROL',
- 'CONTROL-BOX',
- 'CONTROL-FRAME',
- 'CONVERT',
- 'CONVERT-3D-COLORS',
- 'CONVERT-TO-OFFS',
- 'CONVERT-TO-OFFSE',
- 'CONVERT-TO-OFFSET',
- 'COPY-DATASET',
- 'COPY-LOB',
- 'COPY-SAX-ATTRIBUTES',
- 'COPY-TEMP-TABLE',
- 'COUNT',
- 'COUNT-OF',
- 'CPCASE',
- 'CPCOLL',
- 'CPINTERNAL',
- 'CPLOG',
- 'CPPRINT',
- 'CPRCODEIN',
- 'CPRCODEOUT',
- 'CPSTREAM',
- 'CPTERM',
- 'CRC-VALUE',
- 'CREATE',
- 'CREATE-LIKE',
- 'CREATE-LIKE-SEQUENTIAL',
- 'CREATE-NODE-NAMESPACE',
- 'CREATE-RESULT-LIST-ENTRY',
- 'CREATE-TEST-FILE',
- 'CURRENT',
- 'CURRENT-CHANGED',
- 'CURRENT-COLUMN',
- 'CURRENT-ENV',
- 'CURRENT-ENVI',
- 'CURRENT-ENVIR',
- 'CURRENT-ENVIRO',
- 'CURRENT-ENVIRON',
- 'CURRENT-ENVIRONM',
- 'CURRENT-ENVIRONME',
- 'CURRENT-ENVIRONMEN',
- 'CURRENT-ENVIRONMENT',
- 'CURRENT-ITERATION',
- 'CURRENT-LANG',
- 'CURRENT-LANGU',
- 'CURRENT-LANGUA',
- 'CURRENT-LANGUAG',
- 'CURRENT-LANGUAGE',
- 'CURRENT-QUERY',
- 'CURRENT-REQUEST-INFO',
- 'CURRENT-RESPONSE-INFO',
- 'CURRENT-RESULT-ROW',
- 'CURRENT-ROW-MODIFIED',
- 'CURRENT-VALUE',
- 'CURRENT-WINDOW',
- 'CURRENT_DATE',
- 'CURS',
- 'CURSO',
- 'CURSOR',
- 'CURSOR-CHAR',
- 'CURSOR-LINE',
- 'CURSOR-OFFSET',
- 'DATA-BIND',
- 'DATA-ENTRY-RET',
- 'DATA-ENTRY-RETU',
- 'DATA-ENTRY-RETUR',
- 'DATA-ENTRY-RETURN',
- 'DATA-REL',
- 'DATA-RELA',
- 'DATA-RELAT',
- 'DATA-RELATI',
- 'DATA-RELATIO',
- 'DATA-RELATION',
- 'DATA-SOURCE',
- 'DATA-SOURCE-COMPLETE-MAP',
- 'DATA-SOURCE-MODIFIED',
- 'DATA-SOURCE-ROWID',
- 'DATA-T',
- 'DATA-TY',
- 'DATA-TYP',
- 'DATA-TYPE',
- 'DATABASE',
- 'DATASERVERS',
- 'DATASET',
- 'DATASET-HANDLE',
- 'DATE',
- 'DATE-F',
- 'DATE-FO',
- 'DATE-FOR',
- 'DATE-FORM',
- 'DATE-FORMA',
- 'DATE-FORMAT',
- 'DAY',
- 'DB-CONTEXT',
- 'DB-REFERENCES',
- 'DBCODEPAGE',
- 'DBCOLLATION',
- 'DBNAME',
- 'DBPARAM',
- 'DBREST',
- 'DBRESTR',
- 'DBRESTRI',
- 'DBRESTRIC',
- 'DBRESTRICT',
- 'DBRESTRICTI',
- 'DBRESTRICTIO',
- 'DBRESTRICTION',
- 'DBRESTRICTIONS',
- 'DBTASKID',
- 'DBTYPE',
- 'DBVERS',
- 'DBVERSI',
- 'DBVERSIO',
- 'DBVERSION',
- 'DCOLOR',
- 'DDE',
- 'DDE-ERROR',
- 'DDE-I',
- 'DDE-ID',
- 'DDE-ITEM',
- 'DDE-NAME',
- 'DDE-TOPIC',
- 'DEBLANK',
- 'DEBU',
- 'DEBUG',
- 'DEBUG-ALERT',
- 'DEBUG-LIST',
- 'DEBUGGER',
- 'DECIMAL',
- 'DECIMALS',
- 'DECLARE',
- 'DECLARE-NAMESPACE',
- 'DECRYPT',
- 'DEFAULT',
- 'DEFAULT-B',
- 'DEFAULT-BU',
- 'DEFAULT-BUFFER-HANDLE',
- 'DEFAULT-BUT',
- 'DEFAULT-BUTT',
- 'DEFAULT-BUTTO',
- 'DEFAULT-BUTTON',
- 'DEFAULT-COMMIT',
- 'DEFAULT-EX',
- 'DEFAULT-EXT',
- 'DEFAULT-EXTE',
- 'DEFAULT-EXTEN',
- 'DEFAULT-EXTENS',
- 'DEFAULT-EXTENSI',
- 'DEFAULT-EXTENSIO',
- 'DEFAULT-EXTENSION',
- 'DEFAULT-NOXL',
- 'DEFAULT-NOXLA',
- 'DEFAULT-NOXLAT',
- 'DEFAULT-NOXLATE',
- 'DEFAULT-VALUE',
- 'DEFAULT-WINDOW',
- 'DEFINE',
- 'DEFINE-USER-EVENT-MANAGER',
- 'DEFINED',
- 'DEL',
- 'DELE',
- 'DELEGATE',
- 'DELET',
- 'DELETE PROCEDURE',
- 'DELETE',
- 'DELETE-CHAR',
- 'DELETE-CHARA',
- 'DELETE-CHARAC',
- 'DELETE-CHARACT',
- 'DELETE-CHARACTE',
- 'DELETE-CHARACTER',
- 'DELETE-CURRENT-ROW',
- 'DELETE-LINE',
- 'DELETE-RESULT-LIST-ENTRY',
- 'DELETE-SELECTED-ROW',
- 'DELETE-SELECTED-ROWS',
- 'DELIMITER',
- 'DESC',
- 'DESCE',
- 'DESCEN',
- 'DESCEND',
- 'DESCENDI',
- 'DESCENDIN',
- 'DESCENDING',
- 'DESELECT-FOCUSED-ROW',
- 'DESELECT-ROWS',
- 'DESELECT-SELECTED-ROW',
- 'DESELECTION',
- 'DESTRUCTOR',
- 'DIALOG-BOX',
- 'DICT',
- 'DICTI',
- 'DICTIO',
- 'DICTION',
- 'DICTIONA',
- 'DICTIONAR',
- 'DICTIONARY',
- 'DIR',
- 'DISABLE',
- 'DISABLE-AUTO-ZAP',
- 'DISABLE-DUMP-TRIGGERS',
- 'DISABLE-LOAD-TRIGGERS',
- 'DISABLED',
- 'DISCON',
- 'DISCONN',
- 'DISCONNE',
- 'DISCONNEC',
- 'DISCONNECT',
- 'DISP',
- 'DISPL',
- 'DISPLA',
- 'DISPLAY',
- 'DISPLAY-MESSAGE',
- 'DISPLAY-T',
- 'DISPLAY-TY',
- 'DISPLAY-TYP',
- 'DISPLAY-TYPE',
- 'DISTINCT',
- 'DO',
- 'DOMAIN-DESCRIPTION',
- 'DOMAIN-NAME',
- 'DOMAIN-TYPE',
- 'DOS',
- 'DOUBLE',
- 'DOWN',
- 'DRAG-ENABLED',
- 'DROP',
- 'DROP-DOWN',
- 'DROP-DOWN-LIST',
- 'DROP-FILE-NOTIFY',
- 'DROP-TARGET',
- 'DS-CLOSE-CURSOR',
- 'DSLOG-MANAGER',
- 'DUMP',
- 'DYNAMIC',
- 'DYNAMIC-ENUM',
- 'DYNAMIC-FUNCTION',
- 'DYNAMIC-INVOKE',
- 'EACH',
- 'ECHO',
- 'EDGE',
- 'EDGE-',
- 'EDGE-C',
- 'EDGE-CH',
- 'EDGE-CHA',
- 'EDGE-CHAR',
- 'EDGE-CHARS',
- 'EDGE-P',
- 'EDGE-PI',
- 'EDGE-PIX',
- 'EDGE-PIXE',
- 'EDGE-PIXEL',
- 'EDGE-PIXELS',
- 'EDIT-CAN-PASTE',
- 'EDIT-CAN-UNDO',
- 'EDIT-CLEAR',
- 'EDIT-COPY',
- 'EDIT-CUT',
- 'EDIT-PASTE',
- 'EDIT-UNDO',
- 'EDITING',
- 'EDITOR',
- 'ELSE',
- 'EMPTY',
- 'EMPTY-TEMP-TABLE',
- 'ENABLE',
- 'ENABLED-FIELDS',
- 'ENCODE',
- 'ENCRYPT',
- 'ENCRYPT-AUDIT-MAC-KEY',
- 'ENCRYPTION-SALT',
- 'END',
- 'END-DOCUMENT',
- 'END-ELEMENT',
- 'END-EVENT-GROUP',
- 'END-FILE-DROP',
- 'END-KEY',
- 'END-MOVE',
- 'END-RESIZE',
- 'END-ROW-RESIZE',
- 'END-USER-PROMPT',
- 'ENDKEY',
- 'ENTERED',
- 'ENTITY-EXPANSION-LIMIT',
- 'ENTRY',
- 'ENUM',
- 'EQ',
- 'ERROR',
- 'ERROR-COL',
- 'ERROR-COLU',
- 'ERROR-COLUM',
- 'ERROR-COLUMN',
- 'ERROR-ROW',
- 'ERROR-STACK-TRACE',
- 'ERROR-STAT',
- 'ERROR-STATU',
- 'ERROR-STATUS',
- 'ESCAPE',
- 'ETIME',
- 'EVENT',
- 'EVENT-GROUP-ID',
- 'EVENT-PROCEDURE',
- 'EVENT-PROCEDURE-CONTEXT',
- 'EVENT-T',
- 'EVENT-TY',
- 'EVENT-TYP',
- 'EVENT-TYPE',
- 'EVENTS',
- 'EXCEPT',
- 'EXCLUSIVE',
- 'EXCLUSIVE-',
- 'EXCLUSIVE-ID',
- 'EXCLUSIVE-L',
- 'EXCLUSIVE-LO',
- 'EXCLUSIVE-LOC',
- 'EXCLUSIVE-LOCK',
- 'EXCLUSIVE-WEB-USER',
- 'EXECUTE',
- 'EXISTS',
- 'EXP',
- 'EXPAND',
- 'EXPANDABLE',
- 'EXPLICIT',
- 'EXPORT',
- 'EXPORT-PRINCIPAL',
- 'EXTENDED',
- 'EXTENT',
- 'EXTERNAL',
- 'FALSE',
- 'FETCH',
- 'FETCH-SELECTED-ROW',
- 'FGC',
- 'FGCO',
- 'FGCOL',
- 'FGCOLO',
- 'FGCOLOR',
- 'FIELD',
- 'FIELDS',
- 'FILE',
- 'FILE-CREATE-DATE',
- 'FILE-CREATE-TIME',
- 'FILE-INFO',
- 'FILE-INFOR',
- 'FILE-INFORM',
- 'FILE-INFORMA',
- 'FILE-INFORMAT',
- 'FILE-INFORMATI',
- 'FILE-INFORMATIO',
- 'FILE-INFORMATION',
- 'FILE-MOD-DATE',
- 'FILE-MOD-TIME',
- 'FILE-NAME',
- 'FILE-OFF',
- 'FILE-OFFS',
- 'FILE-OFFSE',
- 'FILE-OFFSET',
- 'FILE-SIZE',
- 'FILE-TYPE',
- 'FILENAME',
- 'FILL',
- 'FILL-IN',
- 'FILLED',
- 'FILTERS',
- 'FINAL',
- 'FINALLY',
- 'FIND',
- 'FIND-BY-ROWID',
- 'FIND-CASE-SENSITIVE',
- 'FIND-CURRENT',
- 'FIND-FIRST',
- 'FIND-GLOBAL',
- 'FIND-LAST',
- 'FIND-NEXT-OCCURRENCE',
- 'FIND-PREV-OCCURRENCE',
- 'FIND-SELECT',
- 'FIND-UNIQUE',
- 'FIND-WRAP-AROUND',
- 'FINDER',
- 'FIRST',
- 'FIRST-ASYNCH-REQUEST',
- 'FIRST-CHILD',
- 'FIRST-COLUMN',
- 'FIRST-FORM',
- 'FIRST-OBJECT',
- 'FIRST-OF',
- 'FIRST-PROC',
- 'FIRST-PROCE',
- 'FIRST-PROCED',
- 'FIRST-PROCEDU',
- 'FIRST-PROCEDUR',
- 'FIRST-PROCEDURE',
- 'FIRST-SERVER',
- 'FIRST-TAB-I',
- 'FIRST-TAB-IT',
- 'FIRST-TAB-ITE',
- 'FIRST-TAB-ITEM',
- 'FIT-LAST-COLUMN',
- 'FIXED-ONLY',
- 'FLAT-BUTTON',
- 'FLOAT',
- 'FOCUS',
- 'FOCUSED-ROW',
- 'FOCUSED-ROW-SELECTED',
- 'FONT',
- 'FONT-TABLE',
- 'FOR',
- 'FORCE-FILE',
- 'FORE',
- 'FOREG',
- 'FOREGR',
- 'FOREGRO',
- 'FOREGROU',
- 'FOREGROUN',
- 'FOREGROUND',
- 'FORM INPUT',
- 'FORM',
- 'FORM-LONG-INPUT',
- 'FORMA',
- 'FORMAT',
- 'FORMATTE',
- 'FORMATTED',
- 'FORWARD',
- 'FORWARDS',
- 'FRAGMEN',
- 'FRAGMENT',
- 'FRAM',
- 'FRAME',
- 'FRAME-COL',
- 'FRAME-DB',
- 'FRAME-DOWN',
- 'FRAME-FIELD',
- 'FRAME-FILE',
- 'FRAME-INDE',
- 'FRAME-INDEX',
- 'FRAME-LINE',
- 'FRAME-NAME',
- 'FRAME-ROW',
- 'FRAME-SPA',
- 'FRAME-SPAC',
- 'FRAME-SPACI',
- 'FRAME-SPACIN',
- 'FRAME-SPACING',
- 'FRAME-VAL',
- 'FRAME-VALU',
- 'FRAME-VALUE',
- 'FRAME-X',
- 'FRAME-Y',
- 'FREQUENCY',
- 'FROM',
- 'FROM-C',
- 'FROM-CH',
- 'FROM-CHA',
- 'FROM-CHAR',
- 'FROM-CHARS',
- 'FROM-CUR',
- 'FROM-CURR',
- 'FROM-CURRE',
- 'FROM-CURREN',
- 'FROM-CURRENT',
- 'FROM-P',
- 'FROM-PI',
- 'FROM-PIX',
- 'FROM-PIXE',
- 'FROM-PIXEL',
- 'FROM-PIXELS',
- 'FULL-HEIGHT',
- 'FULL-HEIGHT-',
- 'FULL-HEIGHT-C',
- 'FULL-HEIGHT-CH',
- 'FULL-HEIGHT-CHA',
- 'FULL-HEIGHT-CHAR',
- 'FULL-HEIGHT-CHARS',
- 'FULL-HEIGHT-P',
- 'FULL-HEIGHT-PI',
- 'FULL-HEIGHT-PIX',
- 'FULL-HEIGHT-PIXE',
- 'FULL-HEIGHT-PIXEL',
- 'FULL-HEIGHT-PIXELS',
- 'FULL-PATHN',
- 'FULL-PATHNA',
- 'FULL-PATHNAM',
- 'FULL-PATHNAME',
- 'FULL-WIDTH',
- 'FULL-WIDTH-',
- 'FULL-WIDTH-C',
- 'FULL-WIDTH-CH',
- 'FULL-WIDTH-CHA',
- 'FULL-WIDTH-CHAR',
- 'FULL-WIDTH-CHARS',
- 'FULL-WIDTH-P',
- 'FULL-WIDTH-PI',
- 'FULL-WIDTH-PIX',
- 'FULL-WIDTH-PIXE',
- 'FULL-WIDTH-PIXEL',
- 'FULL-WIDTH-PIXELS',
- 'FUNCTION',
- 'FUNCTION-CALL-TYPE',
- 'GATEWAY',
- 'GATEWAYS',
- 'GE',
- 'GENERATE-MD5',
- 'GENERATE-PBE-KEY',
- 'GENERATE-PBE-SALT',
- 'GENERATE-RANDOM-KEY',
- 'GENERATE-UUID',
- 'GET',
- 'GET-ATTR-CALL-TYPE',
- 'GET-ATTRIBUTE-NODE',
- 'GET-BINARY-DATA',
- 'GET-BLUE',
- 'GET-BLUE-',
- 'GET-BLUE-V',
- 'GET-BLUE-VA',
- 'GET-BLUE-VAL',
- 'GET-BLUE-VALU',
- 'GET-BLUE-VALUE',
- 'GET-BROWSE-COLUMN',
- 'GET-BUFFER-HANDLE',
- 'GET-BYTE',
- 'GET-CALLBACK-PROC-CONTEXT',
- 'GET-CALLBACK-PROC-NAME',
- 'GET-CGI-LIST',
- 'GET-CGI-LONG-VALUE',
- 'GET-CGI-VALUE',
- 'GET-CLASS',
- 'GET-CODEPAGES',
- 'GET-COLLATIONS',
- 'GET-CONFIG-VALUE',
- 'GET-CURRENT',
- 'GET-DOUBLE',
- 'GET-DROPPED-FILE',
- 'GET-DYNAMIC',
- 'GET-ERROR-COLUMN',
- 'GET-ERROR-ROW',
- 'GET-FILE',
- 'GET-FILE-NAME',
- 'GET-FILE-OFFSE',
- 'GET-FILE-OFFSET',
- 'GET-FIRST',
- 'GET-FLOAT',
- 'GET-GREEN',
- 'GET-GREEN-',
- 'GET-GREEN-V',
- 'GET-GREEN-VA',
- 'GET-GREEN-VAL',
- 'GET-GREEN-VALU',
- 'GET-GREEN-VALUE',
- 'GET-INDEX-BY-NAMESPACE-NAME',
- 'GET-INDEX-BY-QNAME',
- 'GET-INT64',
- 'GET-ITERATION',
- 'GET-KEY-VAL',
- 'GET-KEY-VALU',
- 'GET-KEY-VALUE',
- 'GET-LAST',
- 'GET-LOCALNAME-BY-INDEX',
- 'GET-LONG',
- 'GET-MESSAGE',
- 'GET-NEXT',
- 'GET-NUMBER',
- 'GET-POINTER-VALUE',
- 'GET-PREV',
- 'GET-PRINTERS',
- 'GET-PROPERTY',
- 'GET-QNAME-BY-INDEX',
- 'GET-RED',
- 'GET-RED-',
- 'GET-RED-V',
- 'GET-RED-VA',
- 'GET-RED-VAL',
- 'GET-RED-VALU',
- 'GET-RED-VALUE',
- 'GET-REPOSITIONED-ROW',
- 'GET-RGB-VALUE',
- 'GET-SELECTED',
- 'GET-SELECTED-',
- 'GET-SELECTED-W',
- 'GET-SELECTED-WI',
- 'GET-SELECTED-WID',
- 'GET-SELECTED-WIDG',
- 'GET-SELECTED-WIDGE',
- 'GET-SELECTED-WIDGET',
- 'GET-SHORT',
- 'GET-SIGNATURE',
- 'GET-SIZE',
- 'GET-STRING',
- 'GET-TAB-ITEM',
- 'GET-TEXT-HEIGHT',
- 'GET-TEXT-HEIGHT-',
- 'GET-TEXT-HEIGHT-C',
- 'GET-TEXT-HEIGHT-CH',
- 'GET-TEXT-HEIGHT-CHA',
- 'GET-TEXT-HEIGHT-CHAR',
- 'GET-TEXT-HEIGHT-CHARS',
- 'GET-TEXT-HEIGHT-P',
- 'GET-TEXT-HEIGHT-PI',
- 'GET-TEXT-HEIGHT-PIX',
- 'GET-TEXT-HEIGHT-PIXE',
- 'GET-TEXT-HEIGHT-PIXEL',
- 'GET-TEXT-HEIGHT-PIXELS',
- 'GET-TEXT-WIDTH',
- 'GET-TEXT-WIDTH-',
- 'GET-TEXT-WIDTH-C',
- 'GET-TEXT-WIDTH-CH',
- 'GET-TEXT-WIDTH-CHA',
- 'GET-TEXT-WIDTH-CHAR',
- 'GET-TEXT-WIDTH-CHARS',
- 'GET-TEXT-WIDTH-P',
- 'GET-TEXT-WIDTH-PI',
- 'GET-TEXT-WIDTH-PIX',
- 'GET-TEXT-WIDTH-PIXE',
- 'GET-TEXT-WIDTH-PIXEL',
- 'GET-TEXT-WIDTH-PIXELS',
- 'GET-TYPE-BY-INDEX',
- 'GET-TYPE-BY-NAMESPACE-NAME',
- 'GET-TYPE-BY-QNAME',
- 'GET-UNSIGNED-LONG',
- 'GET-UNSIGNED-SHORT',
- 'GET-URI-BY-INDEX',
- 'GET-VALUE-BY-INDEX',
- 'GET-VALUE-BY-NAMESPACE-NAME',
- 'GET-VALUE-BY-QNAME',
- 'GET-WAIT-STATE',
- 'GETBYTE',
- 'GLOBAL',
- 'GO-ON',
- 'GO-PEND',
- 'GO-PENDI',
- 'GO-PENDIN',
- 'GO-PENDING',
- 'GRANT',
- 'GRAPHIC-E',
- 'GRAPHIC-ED',
- 'GRAPHIC-EDG',
- 'GRAPHIC-EDGE',
- 'GRID-FACTOR-H',
- 'GRID-FACTOR-HO',
- 'GRID-FACTOR-HOR',
- 'GRID-FACTOR-HORI',
- 'GRID-FACTOR-HORIZ',
- 'GRID-FACTOR-HORIZO',
- 'GRID-FACTOR-HORIZON',
- 'GRID-FACTOR-HORIZONT',
- 'GRID-FACTOR-HORIZONTA',
- 'GRID-FACTOR-HORIZONTAL',
- 'GRID-FACTOR-V',
- 'GRID-FACTOR-VE',
- 'GRID-FACTOR-VER',
- 'GRID-FACTOR-VERT',
- 'GRID-FACTOR-VERTI',
- 'GRID-FACTOR-VERTIC',
- 'GRID-FACTOR-VERTICA',
- 'GRID-FACTOR-VERTICAL',
- 'GRID-SNAP',
- 'GRID-UNIT-HEIGHT',
- 'GRID-UNIT-HEIGHT-',
- 'GRID-UNIT-HEIGHT-C',
- 'GRID-UNIT-HEIGHT-CH',
- 'GRID-UNIT-HEIGHT-CHA',
- 'GRID-UNIT-HEIGHT-CHARS',
- 'GRID-UNIT-HEIGHT-P',
- 'GRID-UNIT-HEIGHT-PI',
- 'GRID-UNIT-HEIGHT-PIX',
- 'GRID-UNIT-HEIGHT-PIXE',
- 'GRID-UNIT-HEIGHT-PIXEL',
- 'GRID-UNIT-HEIGHT-PIXELS',
- 'GRID-UNIT-WIDTH',
- 'GRID-UNIT-WIDTH-',
- 'GRID-UNIT-WIDTH-C',
- 'GRID-UNIT-WIDTH-CH',
- 'GRID-UNIT-WIDTH-CHA',
- 'GRID-UNIT-WIDTH-CHAR',
- 'GRID-UNIT-WIDTH-CHARS',
- 'GRID-UNIT-WIDTH-P',
- 'GRID-UNIT-WIDTH-PI',
- 'GRID-UNIT-WIDTH-PIX',
- 'GRID-UNIT-WIDTH-PIXE',
- 'GRID-UNIT-WIDTH-PIXEL',
- 'GRID-UNIT-WIDTH-PIXELS',
- 'GRID-VISIBLE',
- 'GROUP',
- 'GT',
- 'GUID',
- 'HANDLE',
- 'HANDLER',
- 'HAS-RECORDS',
- 'HAVING',
- 'HEADER',
- 'HEIGHT',
- 'HEIGHT-',
- 'HEIGHT-C',
- 'HEIGHT-CH',
- 'HEIGHT-CHA',
- 'HEIGHT-CHAR',
- 'HEIGHT-CHARS',
- 'HEIGHT-P',
- 'HEIGHT-PI',
- 'HEIGHT-PIX',
- 'HEIGHT-PIXE',
- 'HEIGHT-PIXEL',
- 'HEIGHT-PIXELS',
- 'HELP',
- 'HEX-DECODE',
- 'HEX-ENCODE',
- 'HIDDEN',
- 'HIDE',
- 'HORI',
- 'HORIZ',
- 'HORIZO',
- 'HORIZON',
- 'HORIZONT',
- 'HORIZONTA',
- 'HORIZONTAL',
- 'HOST-BYTE-ORDER',
- 'HTML-CHARSET',
- 'HTML-END-OF-LINE',
- 'HTML-END-OF-PAGE',
- 'HTML-FRAME-BEGIN',
- 'HTML-FRAME-END',
- 'HTML-HEADER-BEGIN',
- 'HTML-HEADER-END',
- 'HTML-TITLE-BEGIN',
- 'HTML-TITLE-END',
- 'HWND',
- 'ICON',
- 'IF',
- 'IMAGE',
- 'IMAGE-DOWN',
- 'IMAGE-INSENSITIVE',
- 'IMAGE-SIZE',
- 'IMAGE-SIZE-C',
- 'IMAGE-SIZE-CH',
- 'IMAGE-SIZE-CHA',
- 'IMAGE-SIZE-CHAR',
- 'IMAGE-SIZE-CHARS',
- 'IMAGE-SIZE-P',
- 'IMAGE-SIZE-PI',
- 'IMAGE-SIZE-PIX',
- 'IMAGE-SIZE-PIXE',
- 'IMAGE-SIZE-PIXEL',
- 'IMAGE-SIZE-PIXELS',
- 'IMAGE-UP',
- 'IMMEDIATE-DISPLAY',
- 'IMPLEMENTS',
- 'IMPORT',
- 'IMPORT-PRINCIPAL',
- 'IN',
- 'IN-HANDLE',
- 'INCREMENT-EXCLUSIVE-ID',
- 'INDEX',
- 'INDEX-HINT',
- 'INDEX-INFORMATION',
- 'INDEXED-REPOSITION',
- 'INDICATOR',
- 'INFO',
- 'INFOR',
- 'INFORM',
- 'INFORMA',
- 'INFORMAT',
- 'INFORMATI',
- 'INFORMATIO',
- 'INFORMATION',
- 'INHERIT-BGC',
- 'INHERIT-BGCO',
- 'INHERIT-BGCOL',
- 'INHERIT-BGCOLO',
- 'INHERIT-BGCOLOR',
- 'INHERIT-FGC',
- 'INHERIT-FGCO',
- 'INHERIT-FGCOL',
- 'INHERIT-FGCOLO',
- 'INHERIT-FGCOLOR',
- 'INHERITS',
- 'INIT',
- 'INITI',
- 'INITIA',
- 'INITIAL',
- 'INITIAL-DIR',
- 'INITIAL-FILTER',
- 'INITIALIZE-DOCUMENT-TYPE',
- 'INITIATE',
- 'INNER-CHARS',
- 'INNER-LINES',
- 'INPUT',
- 'INPUT-O',
- 'INPUT-OU',
- 'INPUT-OUT',
- 'INPUT-OUTP',
- 'INPUT-OUTPU',
- 'INPUT-OUTPUT',
- 'INPUT-VALUE',
- 'INSERT',
- 'INSERT-ATTRIBUTE',
- 'INSERT-B',
- 'INSERT-BA',
- 'INSERT-BAC',
- 'INSERT-BACK',
- 'INSERT-BACKT',
- 'INSERT-BACKTA',
- 'INSERT-BACKTAB',
- 'INSERT-FILE',
- 'INSERT-ROW',
- 'INSERT-STRING',
- 'INSERT-T',
- 'INSERT-TA',
- 'INSERT-TAB',
- 'INT64',
- 'INT',
- 'INTEGER',
- 'INTERFACE',
- 'INTERNAL-ENTRIES',
- 'INTO',
- 'INVOKE',
- 'IS',
- 'IS-ATTR',
- 'IS-ATTR-',
- 'IS-ATTR-S',
- 'IS-ATTR-SP',
- 'IS-ATTR-SPA',
- 'IS-ATTR-SPAC',
- 'IS-ATTR-SPACE',
- 'IS-CLASS',
- 'IS-JSON',
- 'IS-LEAD-BYTE',
- 'IS-OPEN',
- 'IS-PARAMETER-SET',
- 'IS-PARTITIONED',
- 'IS-ROW-SELECTED',
- 'IS-SELECTED',
- 'IS-XML',
- 'ITEM',
- 'ITEMS-PER-ROW',
- 'JOIN',
- 'JOIN-BY-SQLDB',
- 'KBLABEL',
- 'KEEP-CONNECTION-OPEN',
- 'KEEP-FRAME-Z',
- 'KEEP-FRAME-Z-',
- 'KEEP-FRAME-Z-O',
- 'KEEP-FRAME-Z-OR',
- 'KEEP-FRAME-Z-ORD',
- 'KEEP-FRAME-Z-ORDE',
- 'KEEP-FRAME-Z-ORDER',
- 'KEEP-MESSAGES',
- 'KEEP-SECURITY-CACHE',
- 'KEEP-TAB-ORDER',
- 'KEY',
- 'KEY-CODE',
- 'KEY-FUNC',
- 'KEY-FUNCT',
- 'KEY-FUNCTI',
- 'KEY-FUNCTIO',
- 'KEY-FUNCTION',
- 'KEY-LABEL',
- 'KEYCODE',
- 'KEYFUNC',
- 'KEYFUNCT',
- 'KEYFUNCTI',
- 'KEYFUNCTIO',
- 'KEYFUNCTION',
- 'KEYLABEL',
- 'KEYS',
- 'KEYWORD',
- 'KEYWORD-ALL',
- 'LABEL',
- 'LABEL-BGC',
- 'LABEL-BGCO',
- 'LABEL-BGCOL',
- 'LABEL-BGCOLO',
- 'LABEL-BGCOLOR',
- 'LABEL-DC',
- 'LABEL-DCO',
- 'LABEL-DCOL',
- 'LABEL-DCOLO',
- 'LABEL-DCOLOR',
- 'LABEL-FGC',
- 'LABEL-FGCO',
- 'LABEL-FGCOL',
- 'LABEL-FGCOLO',
- 'LABEL-FGCOLOR',
- 'LABEL-FONT',
- 'LABEL-PFC',
- 'LABEL-PFCO',
- 'LABEL-PFCOL',
- 'LABEL-PFCOLO',
- 'LABEL-PFCOLOR',
- 'LABELS',
- 'LABELS-HAVE-COLONS',
- 'LANDSCAPE',
- 'LANGUAGE',
- 'LANGUAGES',
- 'LARGE',
- 'LARGE-TO-SMALL',
- 'LAST',
- 'LAST-ASYNCH-REQUEST',
- 'LAST-BATCH',
- 'LAST-CHILD',
- 'LAST-EVEN',
- 'LAST-EVENT',
- 'LAST-FORM',
- 'LAST-KEY',
- 'LAST-OBJECT',
- 'LAST-OF',
- 'LAST-PROCE',
- 'LAST-PROCED',
- 'LAST-PROCEDU',
- 'LAST-PROCEDUR',
- 'LAST-PROCEDURE',
- 'LAST-SERVER',
- 'LAST-TAB-I',
- 'LAST-TAB-IT',
- 'LAST-TAB-ITE',
- 'LAST-TAB-ITEM',
- 'LASTKEY',
- 'LC',
- 'LDBNAME',
- 'LE',
- 'LEAVE',
- 'LEFT-ALIGN',
- 'LEFT-ALIGNE',
- 'LEFT-ALIGNED',
- 'LEFT-TRIM',
- 'LENGTH',
- 'LIBRARY',
- 'LIKE',
- 'LIKE-SEQUENTIAL',
- 'LINE',
- 'LINE-COUNT',
- 'LINE-COUNTE',
- 'LINE-COUNTER',
- 'LIST-EVENTS',
- 'LIST-ITEM-PAIRS',
- 'LIST-ITEMS',
- 'LIST-PROPERTY-NAMES',
- 'LIST-QUERY-ATTRS',
- 'LIST-SET-ATTRS',
- 'LIST-WIDGETS',
- 'LISTI',
- 'LISTIN',
- 'LISTING',
- 'LITERAL-QUESTION',
- 'LITTLE-ENDIAN',
- 'LOAD',
- 'LOAD-DOMAINS',
- 'LOAD-ICON',
- 'LOAD-IMAGE',
- 'LOAD-IMAGE-DOWN',
- 'LOAD-IMAGE-INSENSITIVE',
- 'LOAD-IMAGE-UP',
- 'LOAD-MOUSE-P',
- 'LOAD-MOUSE-PO',
- 'LOAD-MOUSE-POI',
- 'LOAD-MOUSE-POIN',
- 'LOAD-MOUSE-POINT',
- 'LOAD-MOUSE-POINTE',
- 'LOAD-MOUSE-POINTER',
- 'LOAD-PICTURE',
- 'LOAD-SMALL-ICON',
- 'LOCAL-NAME',
- 'LOCAL-VERSION-INFO',
- 'LOCATOR-COLUMN-NUMBER',
- 'LOCATOR-LINE-NUMBER',
- 'LOCATOR-PUBLIC-ID',
- 'LOCATOR-SYSTEM-ID',
- 'LOCATOR-TYPE',
- 'LOCK-REGISTRATION',
- 'LOCKED',
- 'LOG',
- 'LOG-AUDIT-EVENT',
- 'LOG-MANAGER',
- 'LOGICAL',
- 'LOGIN-EXPIRATION-TIMESTAMP',
- 'LOGIN-HOST',
- 'LOGIN-STATE',
- 'LOGOUT',
- 'LONGCHAR',
- 'LOOKAHEAD',
- 'LOOKUP',
- 'LT',
- 'MACHINE-CLASS',
- 'MANDATORY',
- 'MANUAL-HIGHLIGHT',
- 'MAP',
- 'MARGIN-EXTRA',
- 'MARGIN-HEIGHT',
- 'MARGIN-HEIGHT-',
- 'MARGIN-HEIGHT-C',
- 'MARGIN-HEIGHT-CH',
- 'MARGIN-HEIGHT-CHA',
- 'MARGIN-HEIGHT-CHAR',
- 'MARGIN-HEIGHT-CHARS',
- 'MARGIN-HEIGHT-P',
- 'MARGIN-HEIGHT-PI',
- 'MARGIN-HEIGHT-PIX',
- 'MARGIN-HEIGHT-PIXE',
- 'MARGIN-HEIGHT-PIXEL',
- 'MARGIN-HEIGHT-PIXELS',
- 'MARGIN-WIDTH',
- 'MARGIN-WIDTH-',
- 'MARGIN-WIDTH-C',
- 'MARGIN-WIDTH-CH',
- 'MARGIN-WIDTH-CHA',
- 'MARGIN-WIDTH-CHAR',
- 'MARGIN-WIDTH-CHARS',
- 'MARGIN-WIDTH-P',
- 'MARGIN-WIDTH-PI',
- 'MARGIN-WIDTH-PIX',
- 'MARGIN-WIDTH-PIXE',
- 'MARGIN-WIDTH-PIXEL',
- 'MARGIN-WIDTH-PIXELS',
- 'MARK-NEW',
- 'MARK-ROW-STATE',
- 'MATCHES',
- 'MAX',
- 'MAX-BUTTON',
- 'MAX-CHARS',
- 'MAX-DATA-GUESS',
- 'MAX-HEIGHT',
- 'MAX-HEIGHT-C',
- 'MAX-HEIGHT-CH',
- 'MAX-HEIGHT-CHA',
- 'MAX-HEIGHT-CHAR',
- 'MAX-HEIGHT-CHARS',
- 'MAX-HEIGHT-P',
- 'MAX-HEIGHT-PI',
- 'MAX-HEIGHT-PIX',
- 'MAX-HEIGHT-PIXE',
- 'MAX-HEIGHT-PIXEL',
- 'MAX-HEIGHT-PIXELS',
- 'MAX-ROWS',
- 'MAX-SIZE',
- 'MAX-VAL',
- 'MAX-VALU',
- 'MAX-VALUE',
- 'MAX-WIDTH',
- 'MAX-WIDTH-',
- 'MAX-WIDTH-C',
- 'MAX-WIDTH-CH',
- 'MAX-WIDTH-CHA',
- 'MAX-WIDTH-CHAR',
- 'MAX-WIDTH-CHARS',
- 'MAX-WIDTH-P',
- 'MAX-WIDTH-PI',
- 'MAX-WIDTH-PIX',
- 'MAX-WIDTH-PIXE',
- 'MAX-WIDTH-PIXEL',
- 'MAX-WIDTH-PIXELS',
- 'MAXI',
- 'MAXIM',
- 'MAXIMIZE',
- 'MAXIMU',
- 'MAXIMUM',
- 'MAXIMUM-LEVEL',
- 'MD5-DIGEST',
- 'MEMBER',
- 'MEMPTR-TO-NODE-VALUE',
- 'MENU',
- 'MENU-BAR',
- 'MENU-ITEM',
- 'MENU-K',
- 'MENU-KE',
- 'MENU-KEY',
- 'MENU-M',
- 'MENU-MO',
- 'MENU-MOU',
- 'MENU-MOUS',
- 'MENU-MOUSE',
- 'MENUBAR',
- 'MERGE-BY-FIELD',
- 'MESSAGE',
- 'MESSAGE-AREA',
- 'MESSAGE-AREA-FONT',
- 'MESSAGE-LINES',
- 'METHOD',
- 'MIN',
- 'MIN-BUTTON',
- 'MIN-COLUMN-WIDTH-C',
- 'MIN-COLUMN-WIDTH-CH',
- 'MIN-COLUMN-WIDTH-CHA',
- 'MIN-COLUMN-WIDTH-CHAR',
- 'MIN-COLUMN-WIDTH-CHARS',
- 'MIN-COLUMN-WIDTH-P',
- 'MIN-COLUMN-WIDTH-PI',
- 'MIN-COLUMN-WIDTH-PIX',
- 'MIN-COLUMN-WIDTH-PIXE',
- 'MIN-COLUMN-WIDTH-PIXEL',
- 'MIN-COLUMN-WIDTH-PIXELS',
- 'MIN-HEIGHT',
- 'MIN-HEIGHT-',
- 'MIN-HEIGHT-C',
- 'MIN-HEIGHT-CH',
- 'MIN-HEIGHT-CHA',
- 'MIN-HEIGHT-CHAR',
- 'MIN-HEIGHT-CHARS',
- 'MIN-HEIGHT-P',
- 'MIN-HEIGHT-PI',
- 'MIN-HEIGHT-PIX',
- 'MIN-HEIGHT-PIXE',
- 'MIN-HEIGHT-PIXEL',
- 'MIN-HEIGHT-PIXELS',
- 'MIN-SIZE',
- 'MIN-VAL',
- 'MIN-VALU',
- 'MIN-VALUE',
- 'MIN-WIDTH',
- 'MIN-WIDTH-',
- 'MIN-WIDTH-C',
- 'MIN-WIDTH-CH',
- 'MIN-WIDTH-CHA',
- 'MIN-WIDTH-CHAR',
- 'MIN-WIDTH-CHARS',
- 'MIN-WIDTH-P',
- 'MIN-WIDTH-PI',
- 'MIN-WIDTH-PIX',
- 'MIN-WIDTH-PIXE',
- 'MIN-WIDTH-PIXEL',
- 'MIN-WIDTH-PIXELS',
- 'MINI',
- 'MINIM',
- 'MINIMU',
- 'MINIMUM',
- 'MOD',
- 'MODIFIED',
- 'MODU',
- 'MODUL',
- 'MODULO',
- 'MONTH',
- 'MOUSE',
- 'MOUSE-P',
- 'MOUSE-PO',
- 'MOUSE-POI',
- 'MOUSE-POIN',
- 'MOUSE-POINT',
- 'MOUSE-POINTE',
- 'MOUSE-POINTER',
- 'MOVABLE',
- 'MOVE-AFTER',
- 'MOVE-AFTER-',
- 'MOVE-AFTER-T',
- 'MOVE-AFTER-TA',
- 'MOVE-AFTER-TAB',
- 'MOVE-AFTER-TAB-',
- 'MOVE-AFTER-TAB-I',
- 'MOVE-AFTER-TAB-IT',
- 'MOVE-AFTER-TAB-ITE',
- 'MOVE-AFTER-TAB-ITEM',
- 'MOVE-BEFOR',
- 'MOVE-BEFORE',
- 'MOVE-BEFORE-',
- 'MOVE-BEFORE-T',
- 'MOVE-BEFORE-TA',
- 'MOVE-BEFORE-TAB',
- 'MOVE-BEFORE-TAB-',
- 'MOVE-BEFORE-TAB-I',
- 'MOVE-BEFORE-TAB-IT',
- 'MOVE-BEFORE-TAB-ITE',
- 'MOVE-BEFORE-TAB-ITEM',
- 'MOVE-COL',
- 'MOVE-COLU',
- 'MOVE-COLUM',
- 'MOVE-COLUMN',
- 'MOVE-TO-B',
- 'MOVE-TO-BO',
- 'MOVE-TO-BOT',
- 'MOVE-TO-BOTT',
- 'MOVE-TO-BOTTO',
- 'MOVE-TO-BOTTOM',
- 'MOVE-TO-EOF',
- 'MOVE-TO-T',
- 'MOVE-TO-TO',
- 'MOVE-TO-TOP',
- 'MPE',
- 'MTIME',
- 'MULTI-COMPILE',
- 'MULTIPLE',
- 'MULTIPLE-KEY',
- 'MULTITASKING-INTERVAL',
- 'MUST-EXIST',
- 'NAME',
- 'NAMESPACE-PREFIX',
- 'NAMESPACE-URI',
- 'NATIVE',
- 'NE',
- 'NEEDS-APPSERVER-PROMPT',
- 'NEEDS-PROMPT',
- 'NEW',
- 'NEW-INSTANCE',
- 'NEW-ROW',
- 'NEXT',
- 'NEXT-COLUMN',
- 'NEXT-PROMPT',
- 'NEXT-ROWID',
- 'NEXT-SIBLING',
- 'NEXT-TAB-I',
- 'NEXT-TAB-IT',
- 'NEXT-TAB-ITE',
- 'NEXT-TAB-ITEM',
- 'NEXT-VALUE',
- 'NO',
- 'NO-APPLY',
- 'NO-ARRAY-MESSAGE',
- 'NO-ASSIGN',
- 'NO-ATTR',
- 'NO-ATTR-',
- 'NO-ATTR-L',
- 'NO-ATTR-LI',
- 'NO-ATTR-LIS',
- 'NO-ATTR-LIST',
- 'NO-ATTR-S',
- 'NO-ATTR-SP',
- 'NO-ATTR-SPA',
- 'NO-ATTR-SPAC',
- 'NO-ATTR-SPACE',
- 'NO-AUTO-VALIDATE',
- 'NO-BIND-WHERE',
- 'NO-BOX',
- 'NO-CONSOLE',
- 'NO-CONVERT',
- 'NO-CONVERT-3D-COLORS',
- 'NO-CURRENT-VALUE',
- 'NO-DEBUG',
- 'NO-DRAG',
- 'NO-ECHO',
- 'NO-EMPTY-SPACE',
- 'NO-ERROR',
- 'NO-F',
- 'NO-FI',
- 'NO-FIL',
- 'NO-FILL',
- 'NO-FOCUS',
- 'NO-HELP',
- 'NO-HIDE',
- 'NO-INDEX-HINT',
- 'NO-INHERIT-BGC',
- 'NO-INHERIT-BGCO',
- 'NO-INHERIT-BGCOLOR',
- 'NO-INHERIT-FGC',
- 'NO-INHERIT-FGCO',
- 'NO-INHERIT-FGCOL',
- 'NO-INHERIT-FGCOLO',
- 'NO-INHERIT-FGCOLOR',
- 'NO-JOIN-BY-SQLDB',
- 'NO-LABE',
- 'NO-LABELS',
- 'NO-LOBS',
- 'NO-LOCK',
- 'NO-LOOKAHEAD',
- 'NO-MAP',
- 'NO-MES',
- 'NO-MESS',
- 'NO-MESSA',
- 'NO-MESSAG',
- 'NO-MESSAGE',
- 'NO-PAUSE',
- 'NO-PREFE',
- 'NO-PREFET',
- 'NO-PREFETC',
- 'NO-PREFETCH',
- 'NO-ROW-MARKERS',
- 'NO-SCROLLBAR-VERTICAL',
- 'NO-SEPARATE-CONNECTION',
- 'NO-SEPARATORS',
- 'NO-TAB-STOP',
- 'NO-UND',
- 'NO-UNDE',
- 'NO-UNDER',
- 'NO-UNDERL',
- 'NO-UNDERLI',
- 'NO-UNDERLIN',
- 'NO-UNDERLINE',
- 'NO-UNDO',
- 'NO-VAL',
- 'NO-VALI',
- 'NO-VALID',
- 'NO-VALIDA',
- 'NO-VALIDAT',
- 'NO-VALIDATE',
- 'NO-WAIT',
- 'NO-WORD-WRAP',
- 'NODE-VALUE-TO-MEMPTR',
- 'NONAMESPACE-SCHEMA-LOCATION',
- 'NONE',
- 'NORMALIZE',
- 'NOT',
- 'NOT-ACTIVE',
- 'NOW',
- 'NULL',
- 'NUM-ALI',
- 'NUM-ALIA',
- 'NUM-ALIAS',
- 'NUM-ALIASE',
- 'NUM-ALIASES',
- 'NUM-BUFFERS',
- 'NUM-BUT',
- 'NUM-BUTT',
- 'NUM-BUTTO',
- 'NUM-BUTTON',
- 'NUM-BUTTONS',
- 'NUM-COL',
- 'NUM-COLU',
- 'NUM-COLUM',
- 'NUM-COLUMN',
- 'NUM-COLUMNS',
- 'NUM-COPIES',
- 'NUM-DBS',
- 'NUM-DROPPED-FILES',
- 'NUM-ENTRIES',
- 'NUM-FIELDS',
- 'NUM-FORMATS',
- 'NUM-ITEMS',
- 'NUM-ITERATIONS',
- 'NUM-LINES',
- 'NUM-LOCKED-COL',
- 'NUM-LOCKED-COLU',
- 'NUM-LOCKED-COLUM',
- 'NUM-LOCKED-COLUMN',
- 'NUM-LOCKED-COLUMNS',
- 'NUM-MESSAGES',
- 'NUM-PARAMETERS',
- 'NUM-REFERENCES',
- 'NUM-REPLACED',
- 'NUM-RESULTS',
- 'NUM-SELECTED',
- 'NUM-SELECTED-',
- 'NUM-SELECTED-ROWS',
- 'NUM-SELECTED-W',
- 'NUM-SELECTED-WI',
- 'NUM-SELECTED-WID',
- 'NUM-SELECTED-WIDG',
- 'NUM-SELECTED-WIDGE',
- 'NUM-SELECTED-WIDGET',
- 'NUM-SELECTED-WIDGETS',
- 'NUM-TABS',
- 'NUM-TO-RETAIN',
- 'NUM-VISIBLE-COLUMNS',
- 'NUMERIC',
- 'NUMERIC-F',
- 'NUMERIC-FO',
- 'NUMERIC-FOR',
- 'NUMERIC-FORM',
- 'NUMERIC-FORMA',
- 'NUMERIC-FORMAT',
- 'OCTET-LENGTH',
- 'OF',
- 'OFF',
- 'OK',
- 'OK-CANCEL',
- 'OLD',
- 'ON',
- 'ON-FRAME',
- 'ON-FRAME-',
- 'ON-FRAME-B',
- 'ON-FRAME-BO',
- 'ON-FRAME-BOR',
- 'ON-FRAME-BORD',
- 'ON-FRAME-BORDE',
- 'ON-FRAME-BORDER',
- 'OPEN',
- 'OPSYS',
- 'OPTION',
- 'OR',
- 'ORDERED-JOIN',
- 'ORDINAL',
- 'OS-APPEND',
- 'OS-COMMAND',
- 'OS-COPY',
- 'OS-CREATE-DIR',
- 'OS-DELETE',
- 'OS-DIR',
- 'OS-DRIVE',
- 'OS-DRIVES',
- 'OS-ERROR',
- 'OS-GETENV',
- 'OS-RENAME',
- 'OTHERWISE',
- 'OUTPUT',
- 'OVERLAY',
- 'OVERRIDE',
- 'OWNER',
- 'PAGE',
- 'PAGE-BOT',
- 'PAGE-BOTT',
- 'PAGE-BOTTO',
- 'PAGE-BOTTOM',
- 'PAGE-NUM',
- 'PAGE-NUMB',
- 'PAGE-NUMBE',
- 'PAGE-NUMBER',
- 'PAGE-SIZE',
- 'PAGE-TOP',
- 'PAGE-WID',
- 'PAGE-WIDT',
- 'PAGE-WIDTH',
- 'PAGED',
- 'PARAM',
- 'PARAME',
- 'PARAMET',
- 'PARAMETE',
- 'PARAMETER',
- 'PARENT',
- 'PARSE-STATUS',
- 'PARTIAL-KEY',
- 'PASCAL',
- 'PASSWORD-FIELD',
- 'PATHNAME',
- 'PAUSE',
- 'PBE-HASH-ALG',
- 'PBE-HASH-ALGO',
- 'PBE-HASH-ALGOR',
- 'PBE-HASH-ALGORI',
- 'PBE-HASH-ALGORIT',
- 'PBE-HASH-ALGORITH',
- 'PBE-HASH-ALGORITHM',
- 'PBE-KEY-ROUNDS',
- 'PDBNAME',
- 'PERSIST',
- 'PERSISTE',
- 'PERSISTEN',
- 'PERSISTENT',
- 'PERSISTENT-CACHE-DISABLED',
- 'PFC',
- 'PFCO',
- 'PFCOL',
- 'PFCOLO',
- 'PFCOLOR',
- 'PIXELS',
- 'PIXELS-PER-COL',
- 'PIXELS-PER-COLU',
- 'PIXELS-PER-COLUM',
- 'PIXELS-PER-COLUMN',
- 'PIXELS-PER-ROW',
- 'POPUP-M',
- 'POPUP-ME',
- 'POPUP-MEN',
- 'POPUP-MENU',
- 'POPUP-O',
- 'POPUP-ON',
- 'POPUP-ONL',
- 'POPUP-ONLY',
- 'PORTRAIT',
- 'POSITION',
- 'PRECISION',
- 'PREFER-DATASET',
- 'PREPARE-STRING',
- 'PREPARED',
- 'PREPROC',
- 'PREPROCE',
- 'PREPROCES',
- 'PREPROCESS',
- 'PRESEL',
- 'PRESELE',
- 'PRESELEC',
- 'PRESELECT',
- 'PREV',
- 'PREV-COLUMN',
- 'PREV-SIBLING',
- 'PREV-TAB-I',
- 'PREV-TAB-IT',
- 'PREV-TAB-ITE',
- 'PREV-TAB-ITEM',
- 'PRIMARY',
- 'PRINTER',
- 'PRINTER-CONTROL-HANDLE',
- 'PRINTER-HDC',
- 'PRINTER-NAME',
- 'PRINTER-PORT',
- 'PRINTER-SETUP',
- 'PRIVATE',
- 'PRIVATE-D',
- 'PRIVATE-DA',
- 'PRIVATE-DAT',
- 'PRIVATE-DATA',
- 'PRIVILEGES',
- 'PROC-HA',
- 'PROC-HAN',
- 'PROC-HAND',
- 'PROC-HANDL',
- 'PROC-HANDLE',
- 'PROC-ST',
- 'PROC-STA',
- 'PROC-STAT',
- 'PROC-STATU',
- 'PROC-STATUS',
- 'PROC-TEXT',
- 'PROC-TEXT-BUFFER',
- 'PROCE',
- 'PROCED',
- 'PROCEDU',
- 'PROCEDUR',
- 'PROCEDURE',
- 'PROCEDURE-CALL-TYPE',
- 'PROCEDURE-TYPE',
- 'PROCESS',
- 'PROFILER',
- 'PROGRAM-NAME',
- 'PROGRESS',
- 'PROGRESS-S',
- 'PROGRESS-SO',
- 'PROGRESS-SOU',
- 'PROGRESS-SOUR',
- 'PROGRESS-SOURC',
- 'PROGRESS-SOURCE',
- 'PROMPT',
- 'PROMPT-F',
- 'PROMPT-FO',
- 'PROMPT-FOR',
- 'PROMSGS',
- 'PROPATH',
- 'PROPERTY',
- 'PROTECTED',
- 'PROVERS',
- 'PROVERSI',
- 'PROVERSIO',
- 'PROVERSION',
- 'PROXY',
- 'PROXY-PASSWORD',
- 'PROXY-USERID',
- 'PUBLIC',
- 'PUBLIC-ID',
- 'PUBLISH',
- 'PUBLISHED-EVENTS',
- 'PUT',
- 'PUT-BYTE',
- 'PUT-DOUBLE',
- 'PUT-FLOAT',
- 'PUT-INT64',
- 'PUT-KEY-VAL',
- 'PUT-KEY-VALU',
- 'PUT-KEY-VALUE',
- 'PUT-LONG',
- 'PUT-SHORT',
- 'PUT-STRING',
- 'PUT-UNSIGNED-LONG',
- 'PUTBYTE',
- 'QUERY',
- 'QUERY-CLOSE',
- 'QUERY-OFF-END',
- 'QUERY-OPEN',
- 'QUERY-PREPARE',
- 'QUERY-TUNING',
- 'QUESTION',
- 'QUIT',
- 'QUOTER',
- 'R-INDEX',
- 'RADIO-BUTTONS',
- 'RADIO-SET',
- 'RANDOM',
- 'RAW',
- 'RAW-TRANSFER',
- 'RCODE-INFO',
- 'RCODE-INFOR',
- 'RCODE-INFORM',
- 'RCODE-INFORMA',
- 'RCODE-INFORMAT',
- 'RCODE-INFORMATI',
- 'RCODE-INFORMATIO',
- 'RCODE-INFORMATION',
- 'READ-AVAILABLE',
- 'READ-EXACT-NUM',
- 'READ-FILE',
- 'READ-JSON',
- 'READ-ONLY',
- 'READ-XML',
- 'READ-XMLSCHEMA',
- 'READKEY',
- 'REAL',
- 'RECID',
- 'RECORD-LENGTH',
- 'RECT',
- 'RECTA',
- 'RECTAN',
- 'RECTANG',
- 'RECTANGL',
- 'RECTANGLE',
- 'RECURSIVE',
- 'REFERENCE-ONLY',
- 'REFRESH',
- 'REFRESH-AUDIT-POLICY',
- 'REFRESHABLE',
- 'REGISTER-DOMAIN',
- 'RELEASE',
- 'REMOTE',
- 'REMOVE-EVENTS-PROCEDURE',
- 'REMOVE-SUPER-PROCEDURE',
- 'REPEAT',
- 'REPLACE',
- 'REPLACE-SELECTION-TEXT',
- 'REPOSITION',
- 'REPOSITION-BACKWARD',
- 'REPOSITION-FORWARD',
- 'REPOSITION-MODE',
- 'REPOSITION-TO-ROW',
- 'REPOSITION-TO-ROWID',
- 'REQUEST',
- 'REQUEST-INFO',
- 'RESET',
- 'RESIZA',
- 'RESIZAB',
- 'RESIZABL',
- 'RESIZABLE',
- 'RESIZE',
- 'RESPONSE-INFO',
- 'RESTART-ROW',
- 'RESTART-ROWID',
- 'RETAIN',
- 'RETAIN-SHAPE',
- 'RETRY',
- 'RETRY-CANCEL',
- 'RETURN',
- 'RETURN-ALIGN',
- 'RETURN-ALIGNE',
- 'RETURN-INS',
- 'RETURN-INSE',
- 'RETURN-INSER',
- 'RETURN-INSERT',
- 'RETURN-INSERTE',
- 'RETURN-INSERTED',
- 'RETURN-TO-START-DI',
- 'RETURN-TO-START-DIR',
- 'RETURN-VAL',
- 'RETURN-VALU',
- 'RETURN-VALUE',
- 'RETURN-VALUE-DATA-TYPE',
- 'RETURNS',
- 'REVERSE-FROM',
- 'REVERT',
- 'REVOKE',
- 'RGB-VALUE',
- 'RIGHT-ALIGNED',
- 'RIGHT-TRIM',
- 'ROLES',
- 'ROUND',
- 'ROUTINE-LEVEL',
- 'ROW',
- 'ROW-HEIGHT-CHARS',
- 'ROW-HEIGHT-PIXELS',
- 'ROW-MARKERS',
- 'ROW-OF',
- 'ROW-RESIZABLE',
- 'ROWID',
- 'RULE',
- 'RUN',
- 'RUN-PROCEDURE',
- 'SAVE CACHE',
- 'SAVE',
- 'SAVE-AS',
- 'SAVE-FILE',
- 'SAX-COMPLE',
- 'SAX-COMPLET',
- 'SAX-COMPLETE',
- 'SAX-PARSE',
- 'SAX-PARSE-FIRST',
- 'SAX-PARSE-NEXT',
- 'SAX-PARSER-ERROR',
- 'SAX-RUNNING',
- 'SAX-UNINITIALIZED',
- 'SAX-WRITE-BEGIN',
- 'SAX-WRITE-COMPLETE',
- 'SAX-WRITE-CONTENT',
- 'SAX-WRITE-ELEMENT',
- 'SAX-WRITE-ERROR',
- 'SAX-WRITE-IDLE',
- 'SAX-WRITE-TAG',
- 'SAX-WRITER',
- 'SCHEMA',
- 'SCHEMA-LOCATION',
- 'SCHEMA-MARSHAL',
- 'SCHEMA-PATH',
- 'SCREEN',
- 'SCREEN-IO',
- 'SCREEN-LINES',
- 'SCREEN-VAL',
- 'SCREEN-VALU',
- 'SCREEN-VALUE',
- 'SCROLL',
- 'SCROLL-BARS',
- 'SCROLL-DELTA',
- 'SCROLL-OFFSET',
- 'SCROLL-TO-CURRENT-ROW',
- 'SCROLL-TO-I',
- 'SCROLL-TO-IT',
- 'SCROLL-TO-ITE',
- 'SCROLL-TO-ITEM',
- 'SCROLL-TO-SELECTED-ROW',
- 'SCROLLABLE',
- 'SCROLLBAR-H',
- 'SCROLLBAR-HO',
- 'SCROLLBAR-HOR',
- 'SCROLLBAR-HORI',
- 'SCROLLBAR-HORIZ',
- 'SCROLLBAR-HORIZO',
- 'SCROLLBAR-HORIZON',
- 'SCROLLBAR-HORIZONT',
- 'SCROLLBAR-HORIZONTA',
- 'SCROLLBAR-HORIZONTAL',
- 'SCROLLBAR-V',
- 'SCROLLBAR-VE',
- 'SCROLLBAR-VER',
- 'SCROLLBAR-VERT',
- 'SCROLLBAR-VERTI',
- 'SCROLLBAR-VERTIC',
- 'SCROLLBAR-VERTICA',
- 'SCROLLBAR-VERTICAL',
- 'SCROLLED-ROW-POS',
- 'SCROLLED-ROW-POSI',
- 'SCROLLED-ROW-POSIT',
- 'SCROLLED-ROW-POSITI',
- 'SCROLLED-ROW-POSITIO',
- 'SCROLLED-ROW-POSITION',
- 'SCROLLING',
- 'SDBNAME',
- 'SEAL',
- 'SEAL-TIMESTAMP',
- 'SEARCH',
- 'SEARCH-SELF',
- 'SEARCH-TARGET',
- 'SECTION',
- 'SECURITY-POLICY',
- 'SEEK',
- 'SELECT',
- 'SELECT-ALL',
- 'SELECT-FOCUSED-ROW',
- 'SELECT-NEXT-ROW',
- 'SELECT-PREV-ROW',
- 'SELECT-ROW',
- 'SELECTABLE',
- 'SELECTED',
- 'SELECTION',
- 'SELECTION-END',
- 'SELECTION-LIST',
- 'SELECTION-START',
- 'SELECTION-TEXT',
- 'SELF',
- 'SEND',
- 'SEND-SQL-STATEMENT',
- 'SENSITIVE',
- 'SEPARATE-CONNECTION',
- 'SEPARATOR-FGCOLOR',
- 'SEPARATORS',
- 'SERIALIZABLE',
- 'SERIALIZE-HIDDEN',
- 'SERIALIZE-NAME',
- 'SERVER',
- 'SERVER-CONNECTION-BOUND',
- 'SERVER-CONNECTION-BOUND-REQUEST',
- 'SERVER-CONNECTION-CONTEXT',
- 'SERVER-CONNECTION-ID',
- 'SERVER-OPERATING-MODE',
- 'SESSION',
- 'SESSION-ID',
- 'SET',
- 'SET-APPL-CONTEXT',
- 'SET-ATTR-CALL-TYPE',
- 'SET-ATTRIBUTE-NODE',
- 'SET-BLUE',
- 'SET-BLUE-',
- 'SET-BLUE-V',
- 'SET-BLUE-VA',
- 'SET-BLUE-VAL',
- 'SET-BLUE-VALU',
- 'SET-BLUE-VALUE',
- 'SET-BREAK',
- 'SET-BUFFERS',
- 'SET-CALLBACK',
- 'SET-CLIENT',
- 'SET-COMMIT',
- 'SET-CONTENTS',
- 'SET-CURRENT-VALUE',
- 'SET-DB-CLIENT',
- 'SET-DYNAMIC',
- 'SET-EVENT-MANAGER-OPTION',
- 'SET-GREEN',
- 'SET-GREEN-',
- 'SET-GREEN-V',
- 'SET-GREEN-VA',
- 'SET-GREEN-VAL',
- 'SET-GREEN-VALU',
- 'SET-GREEN-VALUE',
- 'SET-INPUT-SOURCE',
- 'SET-OPTION',
- 'SET-OUTPUT-DESTINATION',
- 'SET-PARAMETER',
- 'SET-POINTER-VALUE',
- 'SET-PROPERTY',
- 'SET-RED',
- 'SET-RED-',
- 'SET-RED-V',
- 'SET-RED-VA',
- 'SET-RED-VAL',
- 'SET-RED-VALU',
- 'SET-RED-VALUE',
- 'SET-REPOSITIONED-ROW',
- 'SET-RGB-VALUE',
- 'SET-ROLLBACK',
- 'SET-SELECTION',
- 'SET-SIZE',
- 'SET-SORT-ARROW',
- 'SET-WAIT-STATE',
- 'SETUSER',
- 'SETUSERI',
- 'SETUSERID',
- 'SHA1-DIGEST',
- 'SHARE',
- 'SHARE-',
- 'SHARE-L',
- 'SHARE-LO',
- 'SHARE-LOC',
- 'SHARE-LOCK',
- 'SHARED',
- 'SHOW-IN-TASKBAR',
- 'SHOW-STAT',
- 'SHOW-STATS',
- 'SIDE-LAB',
- 'SIDE-LABE',
- 'SIDE-LABEL',
- 'SIDE-LABEL-H',
- 'SIDE-LABEL-HA',
- 'SIDE-LABEL-HAN',
- 'SIDE-LABEL-HAND',
- 'SIDE-LABEL-HANDL',
- 'SIDE-LABEL-HANDLE',
- 'SIDE-LABELS',
- 'SIGNATURE',
- 'SILENT',
- 'SIMPLE',
- 'SINGLE',
- 'SINGLE-RUN',
- 'SINGLETON',
- 'SIZE',
- 'SIZE-C',
- 'SIZE-CH',
- 'SIZE-CHA',
- 'SIZE-CHAR',
- 'SIZE-CHARS',
- 'SIZE-P',
- 'SIZE-PI',
- 'SIZE-PIX',
- 'SIZE-PIXE',
- 'SIZE-PIXEL',
- 'SIZE-PIXELS',
- 'SKIP',
- 'SKIP-DELETED-RECORD',
- 'SLIDER',
- 'SMALL-ICON',
- 'SMALL-TITLE',
- 'SMALLINT',
- 'SOME',
- 'SORT',
- 'SORT-ASCENDING',
- 'SORT-NUMBER',
- 'SOURCE',
- 'SOURCE-PROCEDURE',
- 'SPACE',
- 'SQL',
- 'SQRT',
- 'SSL-SERVER-NAME',
- 'STANDALONE',
- 'START',
- 'START-DOCUMENT',
- 'START-ELEMENT',
- 'START-MOVE',
- 'START-RESIZE',
- 'START-ROW-RESIZE',
- 'STATE-DETAIL',
- 'STATIC',
- 'STATUS',
- 'STATUS-AREA',
- 'STATUS-AREA-FONT',
- 'STDCALL',
- 'STOP',
- 'STOP-AFTER',
- 'STOP-PARSING',
- 'STOPPE',
- 'STOPPED',
- 'STORED-PROC',
- 'STORED-PROCE',
- 'STORED-PROCED',
- 'STORED-PROCEDU',
- 'STORED-PROCEDUR',
- 'STORED-PROCEDURE',
- 'STREAM',
- 'STREAM-HANDLE',
- 'STREAM-IO',
- 'STRETCH-TO-FIT',
- 'STRICT',
- 'STRICT-ENTITY-RESOLUTION',
- 'STRING',
- 'STRING-VALUE',
- 'STRING-XREF',
- 'SUB-AVE',
- 'SUB-AVER',
- 'SUB-AVERA',
- 'SUB-AVERAG',
- 'SUB-AVERAGE',
- 'SUB-COUNT',
- 'SUB-MAXIMUM',
- 'SUB-MENU',
- 'SUB-MIN',
- 'SUB-MINIMUM',
- 'SUB-TOTAL',
- 'SUBSCRIBE',
- 'SUBST',
- 'SUBSTI',
- 'SUBSTIT',
- 'SUBSTITU',
- 'SUBSTITUT',
- 'SUBSTITUTE',
- 'SUBSTR',
- 'SUBSTRI',
- 'SUBSTRIN',
- 'SUBSTRING',
- 'SUBTYPE',
- 'SUM',
- 'SUM-MAX',
- 'SUM-MAXI',
- 'SUM-MAXIM',
- 'SUM-MAXIMU',
- 'SUPER',
- 'SUPER-PROCEDURES',
- 'SUPPRESS-NAMESPACE-PROCESSING',
- 'SUPPRESS-W',
- 'SUPPRESS-WA',
- 'SUPPRESS-WAR',
- 'SUPPRESS-WARN',
- 'SUPPRESS-WARNI',
- 'SUPPRESS-WARNIN',
- 'SUPPRESS-WARNING',
- 'SUPPRESS-WARNINGS',
- 'SYMMETRIC-ENCRYPTION-ALGORITHM',
- 'SYMMETRIC-ENCRYPTION-IV',
- 'SYMMETRIC-ENCRYPTION-KEY',
- 'SYMMETRIC-SUPPORT',
- 'SYSTEM-ALERT',
- 'SYSTEM-ALERT-',
- 'SYSTEM-ALERT-B',
- 'SYSTEM-ALERT-BO',
- 'SYSTEM-ALERT-BOX',
- 'SYSTEM-ALERT-BOXE',
- 'SYSTEM-ALERT-BOXES',
- 'SYSTEM-DIALOG',
- 'SYSTEM-HELP',
- 'SYSTEM-ID',
- 'TAB-POSITION',
- 'TAB-STOP',
- 'TABLE',
- 'TABLE-HANDLE',
- 'TABLE-NUMBER',
- 'TABLE-SCAN',
- 'TARGET',
- 'TARGET-PROCEDURE',
- 'TEMP-DIR',
- 'TEMP-DIRE',
- 'TEMP-DIREC',
- 'TEMP-DIRECT',
- 'TEMP-DIRECTO',
- 'TEMP-DIRECTOR',
- 'TEMP-DIRECTORY',
- 'TEMP-TABLE',
- 'TEMP-TABLE-PREPARE',
- 'TERM',
- 'TERMI',
- 'TERMIN',
- 'TERMINA',
- 'TERMINAL',
- 'TERMINATE',
- 'TEXT',
- 'TEXT-CURSOR',
- 'TEXT-SEG-GROW',
- 'TEXT-SELECTED',
- 'THEN',
- 'THIS-OBJECT',
- 'THIS-PROCEDURE',
- 'THREAD-SAFE',
- 'THREE-D',
- 'THROUGH',
- 'THROW',
- 'THRU',
- 'TIC-MARKS',
- 'TIME',
- 'TIME-SOURCE',
- 'TITLE',
- 'TITLE-BGC',
- 'TITLE-BGCO',
- 'TITLE-BGCOL',
- 'TITLE-BGCOLO',
- 'TITLE-BGCOLOR',
- 'TITLE-DC',
- 'TITLE-DCO',
- 'TITLE-DCOL',
- 'TITLE-DCOLO',
- 'TITLE-DCOLOR',
- 'TITLE-FGC',
- 'TITLE-FGCO',
- 'TITLE-FGCOL',
- 'TITLE-FGCOLO',
- 'TITLE-FGCOLOR',
- 'TITLE-FO',
- 'TITLE-FON',
- 'TITLE-FONT',
- 'TO',
- 'TO-ROWID',
- 'TODAY',
- 'TOGGLE-BOX',
- 'TOOLTIP',
- 'TOOLTIPS',
- 'TOP-NAV-QUERY',
- 'TOP-ONLY',
- 'TOPIC',
- 'TOTAL',
- 'TRAILING',
- 'TRANS',
- 'TRANS-INIT-PROCEDURE',
- 'TRANSACTION',
- 'TRANSACTION-MODE',
- 'TRANSPARENT',
- 'TRIGGER',
- 'TRIGGERS',
- 'TRIM',
- 'TRUE',
- 'TRUNC',
- 'TRUNCA',
- 'TRUNCAT',
- 'TRUNCATE',
- 'TYPE',
- 'TYPE-OF',
- 'UNBOX',
- 'UNBUFF',
- 'UNBUFFE',
- 'UNBUFFER',
- 'UNBUFFERE',
- 'UNBUFFERED',
- 'UNDERL',
- 'UNDERLI',
- 'UNDERLIN',
- 'UNDERLINE',
- 'UNDO',
- 'UNFORM',
- 'UNFORMA',
- 'UNFORMAT',
- 'UNFORMATT',
- 'UNFORMATTE',
- 'UNFORMATTED',
- 'UNION',
- 'UNIQUE',
- 'UNIQUE-ID',
- 'UNIQUE-MATCH',
- 'UNIX',
- 'UNLESS-HIDDEN',
- 'UNLOAD',
- 'UNSIGNED-LONG',
- 'UNSUBSCRIBE',
- 'UP',
- 'UPDATE',
- 'UPDATE-ATTRIBUTE',
- 'URL',
- 'URL-DECODE',
- 'URL-ENCODE',
- 'URL-PASSWORD',
- 'URL-USERID',
- 'USE',
- 'USE-DICT-EXPS',
- 'USE-FILENAME',
- 'USE-INDEX',
- 'USE-REVVIDEO',
- 'USE-TEXT',
- 'USE-UNDERLINE',
- 'USE-WIDGET-POOL',
- 'USER',
- 'USER-ID',
- 'USERID',
- 'USING',
- 'V6DISPLAY',
- 'V6FRAME',
- 'VALID-EVENT',
- 'VALID-HANDLE',
- 'VALID-OBJECT',
- 'VALIDATE',
- 'VALIDATE-EXPRESSION',
- 'VALIDATE-MESSAGE',
- 'VALIDATE-SEAL',
- 'VALIDATION-ENABLED',
- 'VALUE',
- 'VALUE-CHANGED',
- 'VALUES',
- 'VAR',
- 'VARI',
- 'VARIA',
- 'VARIAB',
- 'VARIABL',
- 'VARIABLE',
- 'VERBOSE',
- 'VERSION',
- 'VERT',
- 'VERTI',
- 'VERTIC',
- 'VERTICA',
- 'VERTICAL',
- 'VIEW',
- 'VIEW-AS',
- 'VIEW-FIRST-COLUMN-ON-REOPEN',
- 'VIRTUAL-HEIGHT',
- 'VIRTUAL-HEIGHT-',
- 'VIRTUAL-HEIGHT-C',
- 'VIRTUAL-HEIGHT-CH',
- 'VIRTUAL-HEIGHT-CHA',
- 'VIRTUAL-HEIGHT-CHAR',
- 'VIRTUAL-HEIGHT-CHARS',
- 'VIRTUAL-HEIGHT-P',
- 'VIRTUAL-HEIGHT-PI',
- 'VIRTUAL-HEIGHT-PIX',
- 'VIRTUAL-HEIGHT-PIXE',
- 'VIRTUAL-HEIGHT-PIXEL',
- 'VIRTUAL-HEIGHT-PIXELS',
- 'VIRTUAL-WIDTH',
- 'VIRTUAL-WIDTH-',
- 'VIRTUAL-WIDTH-C',
- 'VIRTUAL-WIDTH-CH',
- 'VIRTUAL-WIDTH-CHA',
- 'VIRTUAL-WIDTH-CHAR',
- 'VIRTUAL-WIDTH-CHARS',
- 'VIRTUAL-WIDTH-P',
- 'VIRTUAL-WIDTH-PI',
- 'VIRTUAL-WIDTH-PIX',
- 'VIRTUAL-WIDTH-PIXE',
- 'VIRTUAL-WIDTH-PIXEL',
- 'VIRTUAL-WIDTH-PIXELS',
- 'VISIBLE',
- 'VOID',
- 'WAIT',
- 'WAIT-FOR',
- 'WARNING',
- 'WEB-CONTEXT',
- 'WEEKDAY',
- 'WHEN',
- 'WHERE',
- 'WHILE',
- 'WIDGET',
- 'WIDGET-E',
- 'WIDGET-EN',
- 'WIDGET-ENT',
- 'WIDGET-ENTE',
- 'WIDGET-ENTER',
- 'WIDGET-ID',
- 'WIDGET-L',
- 'WIDGET-LE',
- 'WIDGET-LEA',
- 'WIDGET-LEAV',
- 'WIDGET-LEAVE',
- 'WIDGET-POOL',
- 'WIDTH',
- 'WIDTH-',
- 'WIDTH-C',
- 'WIDTH-CH',
- 'WIDTH-CHA',
- 'WIDTH-CHAR',
- 'WIDTH-CHARS',
- 'WIDTH-P',
- 'WIDTH-PI',
- 'WIDTH-PIX',
- 'WIDTH-PIXE',
- 'WIDTH-PIXEL',
- 'WIDTH-PIXELS',
- 'WINDOW',
- 'WINDOW-MAXIM',
- 'WINDOW-MAXIMI',
- 'WINDOW-MAXIMIZ',
- 'WINDOW-MAXIMIZE',
- 'WINDOW-MAXIMIZED',
- 'WINDOW-MINIM',
- 'WINDOW-MINIMI',
- 'WINDOW-MINIMIZ',
- 'WINDOW-MINIMIZE',
- 'WINDOW-MINIMIZED',
- 'WINDOW-NAME',
- 'WINDOW-NORMAL',
- 'WINDOW-STA',
- 'WINDOW-STAT',
- 'WINDOW-STATE',
- 'WINDOW-SYSTEM',
- 'WITH',
- 'WORD-INDEX',
- 'WORD-WRAP',
- 'WORK-AREA-HEIGHT-PIXELS',
- 'WORK-AREA-WIDTH-PIXELS',
- 'WORK-AREA-X',
- 'WORK-AREA-Y',
- 'WORK-TAB',
- 'WORK-TABL',
- 'WORK-TABLE',
- 'WORKFILE',
- 'WRITE',
- 'WRITE-CDATA',
- 'WRITE-CHARACTERS',
- 'WRITE-COMMENT',
- 'WRITE-DATA-ELEMENT',
- 'WRITE-EMPTY-ELEMENT',
- 'WRITE-ENTITY-REF',
- 'WRITE-EXTERNAL-DTD',
- 'WRITE-FRAGMENT',
- 'WRITE-JSON',
- 'WRITE-MESSAGE',
- 'WRITE-PROCESSING-INSTRUCTION',
- 'WRITE-STATUS',
- 'WRITE-XML',
- 'WRITE-XMLSCHEMA',
- 'X',
- 'X-OF',
- 'XCODE',
- 'XML-DATA-TYPE',
- 'XML-ENTITY-EXPANSION-LIMIT',
- 'XML-NODE-TYPE',
- 'XML-SCHEMA-PATH',
- 'XML-STRICT-ENTITY-RESOLUTION',
- 'XML-SUPPRESS-NAMESPACE-PROCESSING',
- 'XREF',
- 'XREF-XML',
- 'Y',
- 'Y-OF',
- 'YEAR',
- 'YEAR-OFFSET',
- 'YES',
- 'YES-NO',
- 'YES-NO-CANCEL'
-)
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/_php_builtins.py b/venv/lib/python3.11/site-packages/pygments/lexers/_php_builtins.py
deleted file mode 100644
index 5366e75..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/_php_builtins.py
+++ /dev/null
@@ -1,3325 +0,0 @@
-"""
- pygments.lexers._php_builtins
- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
- This file loads the function names and their modules from the
- php webpage and generates itself.
-
- Run with `python -I` to regenerate.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-MODULES = {'APCu': ('apcu_add',
- 'apcu_cache_info',
- 'apcu_cas',
- 'apcu_clear_cache',
- 'apcu_dec',
- 'apcu_delete',
- 'apcu_enabled',
- 'apcu_entry',
- 'apcu_exists',
- 'apcu_fetch',
- 'apcu_inc',
- 'apcu_key_info',
- 'apcu_sma_info',
- 'apcu_store'),
- 'Aliases and deprecated Mysqli': ('mysqli_connect',
- 'mysqli_execute',
- 'mysqli_get_client_stats',
- 'mysqli_get_links_stats',
- 'mysqli_report'),
- 'Apache': ('apache_child_terminate',
- 'apache_get_modules',
- 'apache_get_version',
- 'apache_getenv',
- 'apache_lookup_uri',
- 'apache_note',
- 'apache_request_headers',
- 'apache_response_headers',
- 'apache_setenv',
- 'getallheaders',
- 'virtual'),
- 'Array': ('array_change_key_case',
- 'array_chunk',
- 'array_column',
- 'array_combine',
- 'array_count_values',
- 'array_diff_assoc',
- 'array_diff_key',
- 'array_diff_uassoc',
- 'array_diff_ukey',
- 'array_diff',
- 'array_fill_keys',
- 'array_fill',
- 'array_filter',
- 'array_flip',
- 'array_intersect_assoc',
- 'array_intersect_key',
- 'array_intersect_uassoc',
- 'array_intersect_ukey',
- 'array_intersect',
- 'array_is_list',
- 'array_key_exists',
- 'array_key_first',
- 'array_key_last',
- 'array_keys',
- 'array_map',
- 'array_merge_recursive',
- 'array_merge',
- 'array_multisort',
- 'array_pad',
- 'array_pop',
- 'array_product',
- 'array_push',
- 'array_rand',
- 'array_reduce',
- 'array_replace_recursive',
- 'array_replace',
- 'array_reverse',
- 'array_search',
- 'array_shift',
- 'array_slice',
- 'array_splice',
- 'array_sum',
- 'array_udiff_assoc',
- 'array_udiff_uassoc',
- 'array_udiff',
- 'array_uintersect_assoc',
- 'array_uintersect_uassoc',
- 'array_uintersect',
- 'array_unique',
- 'array_unshift',
- 'array_values',
- 'array_walk_recursive',
- 'array_walk',
- 'array',
- 'arsort',
- 'asort',
- 'compact',
- 'count',
- 'current',
- 'each',
- 'end',
- 'extract',
- 'in_array',
- 'key_exists',
- 'key',
- 'krsort',
- 'ksort',
- 'list',
- 'natcasesort',
- 'natsort',
- 'next',
- 'pos',
- 'prev',
- 'range',
- 'reset',
- 'rsort',
- 'shuffle',
- 'sizeof',
- 'sort',
- 'uasort',
- 'uksort',
- 'usort'),
- 'BC Math': ('bcadd',
- 'bccomp',
- 'bcdiv',
- 'bcmod',
- 'bcmul',
- 'bcpow',
- 'bcpowmod',
- 'bcscale',
- 'bcsqrt',
- 'bcsub'),
- 'Bzip2': ('bzclose',
- 'bzcompress',
- 'bzdecompress',
- 'bzerrno',
- 'bzerror',
- 'bzerrstr',
- 'bzflush',
- 'bzopen',
- 'bzread',
- 'bzwrite'),
- 'COM': ('com_create_guid',
- 'com_event_sink',
- 'com_get_active_object',
- 'com_load_typelib',
- 'com_message_pump',
- 'com_print_typeinfo',
- 'variant_abs',
- 'variant_add',
- 'variant_and',
- 'variant_cast',
- 'variant_cat',
- 'variant_cmp',
- 'variant_date_from_timestamp',
- 'variant_date_to_timestamp',
- 'variant_div',
- 'variant_eqv',
- 'variant_fix',
- 'variant_get_type',
- 'variant_idiv',
- 'variant_imp',
- 'variant_int',
- 'variant_mod',
- 'variant_mul',
- 'variant_neg',
- 'variant_not',
- 'variant_or',
- 'variant_pow',
- 'variant_round',
- 'variant_set_type',
- 'variant_set',
- 'variant_sub',
- 'variant_xor'),
- 'CSPRNG': ('random_bytes', 'random_int'),
- 'CUBRID': ('cubrid_bind',
- 'cubrid_close_prepare',
- 'cubrid_close_request',
- 'cubrid_col_get',
- 'cubrid_col_size',
- 'cubrid_column_names',
- 'cubrid_column_types',
- 'cubrid_commit',
- 'cubrid_connect_with_url',
- 'cubrid_connect',
- 'cubrid_current_oid',
- 'cubrid_disconnect',
- 'cubrid_drop',
- 'cubrid_error_code_facility',
- 'cubrid_error_code',
- 'cubrid_error_msg',
- 'cubrid_execute',
- 'cubrid_fetch',
- 'cubrid_free_result',
- 'cubrid_get_autocommit',
- 'cubrid_get_charset',
- 'cubrid_get_class_name',
- 'cubrid_get_client_info',
- 'cubrid_get_db_parameter',
- 'cubrid_get_query_timeout',
- 'cubrid_get_server_info',
- 'cubrid_get',
- 'cubrid_insert_id',
- 'cubrid_is_instance',
- 'cubrid_lob_close',
- 'cubrid_lob_export',
- 'cubrid_lob_get',
- 'cubrid_lob_send',
- 'cubrid_lob_size',
- 'cubrid_lob2_bind',
- 'cubrid_lob2_close',
- 'cubrid_lob2_export',
- 'cubrid_lob2_import',
- 'cubrid_lob2_new',
- 'cubrid_lob2_read',
- 'cubrid_lob2_seek64',
- 'cubrid_lob2_seek',
- 'cubrid_lob2_size64',
- 'cubrid_lob2_size',
- 'cubrid_lob2_tell64',
- 'cubrid_lob2_tell',
- 'cubrid_lob2_write',
- 'cubrid_lock_read',
- 'cubrid_lock_write',
- 'cubrid_move_cursor',
- 'cubrid_next_result',
- 'cubrid_num_cols',
- 'cubrid_num_rows',
- 'cubrid_pconnect_with_url',
- 'cubrid_pconnect',
- 'cubrid_prepare',
- 'cubrid_put',
- 'cubrid_rollback',
- 'cubrid_schema',
- 'cubrid_seq_drop',
- 'cubrid_seq_insert',
- 'cubrid_seq_put',
- 'cubrid_set_add',
- 'cubrid_set_autocommit',
- 'cubrid_set_db_parameter',
- 'cubrid_set_drop',
- 'cubrid_set_query_timeout',
- 'cubrid_version'),
- 'Calendar': ('cal_days_in_month',
- 'cal_from_jd',
- 'cal_info',
- 'cal_to_jd',
- 'easter_date',
- 'easter_days',
- 'frenchtojd',
- 'gregoriantojd',
- 'jddayofweek',
- 'jdmonthname',
- 'jdtofrench',
- 'jdtogregorian',
- 'jdtojewish',
- 'jdtojulian',
- 'jdtounix',
- 'jewishtojd',
- 'juliantojd',
- 'unixtojd'),
- 'Classes/Object': ('__autoload',
- 'class_alias',
- 'class_exists',
- 'enum_exists',
- 'get_called_class',
- 'get_class_methods',
- 'get_class_vars',
- 'get_class',
- 'get_declared_classes',
- 'get_declared_interfaces',
- 'get_declared_traits',
- 'get_mangled_object_vars',
- 'get_object_vars',
- 'get_parent_class',
- 'interface_exists',
- 'is_a',
- 'is_subclass_of',
- 'method_exists',
- 'property_exists',
- 'trait_exists'),
- 'Ctype': ('ctype_alnum',
- 'ctype_alpha',
- 'ctype_cntrl',
- 'ctype_digit',
- 'ctype_graph',
- 'ctype_lower',
- 'ctype_print',
- 'ctype_punct',
- 'ctype_space',
- 'ctype_upper',
- 'ctype_xdigit'),
- 'DBA': ('dba_close',
- 'dba_delete',
- 'dba_exists',
- 'dba_fetch',
- 'dba_firstkey',
- 'dba_handlers',
- 'dba_insert',
- 'dba_key_split',
- 'dba_list',
- 'dba_nextkey',
- 'dba_open',
- 'dba_optimize',
- 'dba_popen',
- 'dba_replace',
- 'dba_sync'),
- 'DOM': ('dom_import_simplexml',),
- 'Date/Time': ('checkdate',
- 'date_add',
- 'date_create_from_format',
- 'date_create_immutable_from_format',
- 'date_create_immutable',
- 'date_create',
- 'date_date_set',
- 'date_default_timezone_get',
- 'date_default_timezone_set',
- 'date_diff',
- 'date_format',
- 'date_get_last_errors',
- 'date_interval_create_from_date_string',
- 'date_interval_format',
- 'date_isodate_set',
- 'date_modify',
- 'date_offset_get',
- 'date_parse_from_format',
- 'date_parse',
- 'date_sub',
- 'date_sun_info',
- 'date_sunrise',
- 'date_sunset',
- 'date_time_set',
- 'date_timestamp_get',
- 'date_timestamp_set',
- 'date_timezone_get',
- 'date_timezone_set',
- 'date',
- 'getdate',
- 'gettimeofday',
- 'gmdate',
- 'gmmktime',
- 'gmstrftime',
- 'idate',
- 'localtime',
- 'microtime',
- 'mktime',
- 'strftime',
- 'strptime',
- 'strtotime',
- 'time',
- 'timezone_abbreviations_list',
- 'timezone_identifiers_list',
- 'timezone_location_get',
- 'timezone_name_from_abbr',
- 'timezone_name_get',
- 'timezone_offset_get',
- 'timezone_open',
- 'timezone_transitions_get',
- 'timezone_version_get'),
- 'Direct IO': ('dio_close',
- 'dio_fcntl',
- 'dio_open',
- 'dio_read',
- 'dio_seek',
- 'dio_stat',
- 'dio_tcsetattr',
- 'dio_truncate',
- 'dio_write'),
- 'Directory': ('chdir',
- 'chroot',
- 'closedir',
- 'dir',
- 'getcwd',
- 'opendir',
- 'readdir',
- 'rewinddir',
- 'scandir'),
- 'Eio': ('eio_busy',
- 'eio_cancel',
- 'eio_chmod',
- 'eio_chown',
- 'eio_close',
- 'eio_custom',
- 'eio_dup2',
- 'eio_event_loop',
- 'eio_fallocate',
- 'eio_fchmod',
- 'eio_fchown',
- 'eio_fdatasync',
- 'eio_fstat',
- 'eio_fstatvfs',
- 'eio_fsync',
- 'eio_ftruncate',
- 'eio_futime',
- 'eio_get_event_stream',
- 'eio_get_last_error',
- 'eio_grp_add',
- 'eio_grp_cancel',
- 'eio_grp_limit',
- 'eio_grp',
- 'eio_init',
- 'eio_link',
- 'eio_lstat',
- 'eio_mkdir',
- 'eio_mknod',
- 'eio_nop',
- 'eio_npending',
- 'eio_nready',
- 'eio_nreqs',
- 'eio_nthreads',
- 'eio_open',
- 'eio_poll',
- 'eio_read',
- 'eio_readahead',
- 'eio_readdir',
- 'eio_readlink',
- 'eio_realpath',
- 'eio_rename',
- 'eio_rmdir',
- 'eio_seek',
- 'eio_sendfile',
- 'eio_set_max_idle',
- 'eio_set_max_parallel',
- 'eio_set_max_poll_reqs',
- 'eio_set_max_poll_time',
- 'eio_set_min_parallel',
- 'eio_stat',
- 'eio_statvfs',
- 'eio_symlink',
- 'eio_sync_file_range',
- 'eio_sync',
- 'eio_syncfs',
- 'eio_truncate',
- 'eio_unlink',
- 'eio_utime',
- 'eio_write'),
- 'Enchant': ('enchant_broker_describe',
- 'enchant_broker_dict_exists',
- 'enchant_broker_free_dict',
- 'enchant_broker_free',
- 'enchant_broker_get_dict_path',
- 'enchant_broker_get_error',
- 'enchant_broker_init',
- 'enchant_broker_list_dicts',
- 'enchant_broker_request_dict',
- 'enchant_broker_request_pwl_dict',
- 'enchant_broker_set_dict_path',
- 'enchant_broker_set_ordering',
- 'enchant_dict_add_to_personal',
- 'enchant_dict_add_to_session',
- 'enchant_dict_add',
- 'enchant_dict_check',
- 'enchant_dict_describe',
- 'enchant_dict_get_error',
- 'enchant_dict_is_added',
- 'enchant_dict_is_in_session',
- 'enchant_dict_quick_check',
- 'enchant_dict_store_replacement',
- 'enchant_dict_suggest'),
- 'Error Handling': ('debug_backtrace',
- 'debug_print_backtrace',
- 'error_clear_last',
- 'error_get_last',
- 'error_log',
- 'error_reporting',
- 'restore_error_handler',
- 'restore_exception_handler',
- 'set_error_handler',
- 'set_exception_handler',
- 'trigger_error',
- 'user_error'),
- 'Exif': ('exif_imagetype',
- 'exif_read_data',
- 'exif_tagname',
- 'exif_thumbnail',
- 'read_exif_data'),
- 'Expect': ('expect_expectl', 'expect_popen'),
- 'FDF': ('fdf_add_doc_javascript',
- 'fdf_add_template',
- 'fdf_close',
- 'fdf_create',
- 'fdf_enum_values',
- 'fdf_errno',
- 'fdf_error',
- 'fdf_get_ap',
- 'fdf_get_attachment',
- 'fdf_get_encoding',
- 'fdf_get_file',
- 'fdf_get_flags',
- 'fdf_get_opt',
- 'fdf_get_status',
- 'fdf_get_value',
- 'fdf_get_version',
- 'fdf_header',
- 'fdf_next_field_name',
- 'fdf_open_string',
- 'fdf_open',
- 'fdf_remove_item',
- 'fdf_save_string',
- 'fdf_save',
- 'fdf_set_ap',
- 'fdf_set_encoding',
- 'fdf_set_file',
- 'fdf_set_flags',
- 'fdf_set_javascript_action',
- 'fdf_set_on_import_javascript',
- 'fdf_set_opt',
- 'fdf_set_status',
- 'fdf_set_submit_form_action',
- 'fdf_set_target_frame',
- 'fdf_set_value',
- 'fdf_set_version'),
- 'FPM': ('fastcgi_finish_request',),
- 'FTP': ('ftp_alloc',
- 'ftp_append',
- 'ftp_cdup',
- 'ftp_chdir',
- 'ftp_chmod',
- 'ftp_close',
- 'ftp_connect',
- 'ftp_delete',
- 'ftp_exec',
- 'ftp_fget',
- 'ftp_fput',
- 'ftp_get_option',
- 'ftp_get',
- 'ftp_login',
- 'ftp_mdtm',
- 'ftp_mkdir',
- 'ftp_mlsd',
- 'ftp_nb_continue',
- 'ftp_nb_fget',
- 'ftp_nb_fput',
- 'ftp_nb_get',
- 'ftp_nb_put',
- 'ftp_nlist',
- 'ftp_pasv',
- 'ftp_put',
- 'ftp_pwd',
- 'ftp_quit',
- 'ftp_raw',
- 'ftp_rawlist',
- 'ftp_rename',
- 'ftp_rmdir',
- 'ftp_set_option',
- 'ftp_site',
- 'ftp_size',
- 'ftp_ssl_connect',
- 'ftp_systype'),
- 'Fann': ('fann_cascadetrain_on_data',
- 'fann_cascadetrain_on_file',
- 'fann_clear_scaling_params',
- 'fann_copy',
- 'fann_create_from_file',
- 'fann_create_shortcut_array',
- 'fann_create_shortcut',
- 'fann_create_sparse_array',
- 'fann_create_sparse',
- 'fann_create_standard_array',
- 'fann_create_standard',
- 'fann_create_train_from_callback',
- 'fann_create_train',
- 'fann_descale_input',
- 'fann_descale_output',
- 'fann_descale_train',
- 'fann_destroy_train',
- 'fann_destroy',
- 'fann_duplicate_train_data',
- 'fann_get_activation_function',
- 'fann_get_activation_steepness',
- 'fann_get_bias_array',
- 'fann_get_bit_fail_limit',
- 'fann_get_bit_fail',
- 'fann_get_cascade_activation_functions_count',
- 'fann_get_cascade_activation_functions',
- 'fann_get_cascade_activation_steepnesses_count',
- 'fann_get_cascade_activation_steepnesses',
- 'fann_get_cascade_candidate_change_fraction',
- 'fann_get_cascade_candidate_limit',
- 'fann_get_cascade_candidate_stagnation_epochs',
- 'fann_get_cascade_max_cand_epochs',
- 'fann_get_cascade_max_out_epochs',
- 'fann_get_cascade_min_cand_epochs',
- 'fann_get_cascade_min_out_epochs',
- 'fann_get_cascade_num_candidate_groups',
- 'fann_get_cascade_num_candidates',
- 'fann_get_cascade_output_change_fraction',
- 'fann_get_cascade_output_stagnation_epochs',
- 'fann_get_cascade_weight_multiplier',
- 'fann_get_connection_array',
- 'fann_get_connection_rate',
- 'fann_get_errno',
- 'fann_get_errstr',
- 'fann_get_layer_array',
- 'fann_get_learning_momentum',
- 'fann_get_learning_rate',
- 'fann_get_MSE',
- 'fann_get_network_type',
- 'fann_get_num_input',
- 'fann_get_num_layers',
- 'fann_get_num_output',
- 'fann_get_quickprop_decay',
- 'fann_get_quickprop_mu',
- 'fann_get_rprop_decrease_factor',
- 'fann_get_rprop_delta_max',
- 'fann_get_rprop_delta_min',
- 'fann_get_rprop_delta_zero',
- 'fann_get_rprop_increase_factor',
- 'fann_get_sarprop_step_error_shift',
- 'fann_get_sarprop_step_error_threshold_factor',
- 'fann_get_sarprop_temperature',
- 'fann_get_sarprop_weight_decay_shift',
- 'fann_get_total_connections',
- 'fann_get_total_neurons',
- 'fann_get_train_error_function',
- 'fann_get_train_stop_function',
- 'fann_get_training_algorithm',
- 'fann_init_weights',
- 'fann_length_train_data',
- 'fann_merge_train_data',
- 'fann_num_input_train_data',
- 'fann_num_output_train_data',
- 'fann_print_error',
- 'fann_randomize_weights',
- 'fann_read_train_from_file',
- 'fann_reset_errno',
- 'fann_reset_errstr',
- 'fann_reset_MSE',
- 'fann_run',
- 'fann_save_train',
- 'fann_save',
- 'fann_scale_input_train_data',
- 'fann_scale_input',
- 'fann_scale_output_train_data',
- 'fann_scale_output',
- 'fann_scale_train_data',
- 'fann_scale_train',
- 'fann_set_activation_function_hidden',
- 'fann_set_activation_function_layer',
- 'fann_set_activation_function_output',
- 'fann_set_activation_function',
- 'fann_set_activation_steepness_hidden',
- 'fann_set_activation_steepness_layer',
- 'fann_set_activation_steepness_output',
- 'fann_set_activation_steepness',
- 'fann_set_bit_fail_limit',
- 'fann_set_callback',
- 'fann_set_cascade_activation_functions',
- 'fann_set_cascade_activation_steepnesses',
- 'fann_set_cascade_candidate_change_fraction',
- 'fann_set_cascade_candidate_limit',
- 'fann_set_cascade_candidate_stagnation_epochs',
- 'fann_set_cascade_max_cand_epochs',
- 'fann_set_cascade_max_out_epochs',
- 'fann_set_cascade_min_cand_epochs',
- 'fann_set_cascade_min_out_epochs',
- 'fann_set_cascade_num_candidate_groups',
- 'fann_set_cascade_output_change_fraction',
- 'fann_set_cascade_output_stagnation_epochs',
- 'fann_set_cascade_weight_multiplier',
- 'fann_set_error_log',
- 'fann_set_input_scaling_params',
- 'fann_set_learning_momentum',
- 'fann_set_learning_rate',
- 'fann_set_output_scaling_params',
- 'fann_set_quickprop_decay',
- 'fann_set_quickprop_mu',
- 'fann_set_rprop_decrease_factor',
- 'fann_set_rprop_delta_max',
- 'fann_set_rprop_delta_min',
- 'fann_set_rprop_delta_zero',
- 'fann_set_rprop_increase_factor',
- 'fann_set_sarprop_step_error_shift',
- 'fann_set_sarprop_step_error_threshold_factor',
- 'fann_set_sarprop_temperature',
- 'fann_set_sarprop_weight_decay_shift',
- 'fann_set_scaling_params',
- 'fann_set_train_error_function',
- 'fann_set_train_stop_function',
- 'fann_set_training_algorithm',
- 'fann_set_weight_array',
- 'fann_set_weight',
- 'fann_shuffle_train_data',
- 'fann_subset_train_data',
- 'fann_test_data',
- 'fann_test',
- 'fann_train_epoch',
- 'fann_train_on_data',
- 'fann_train_on_file',
- 'fann_train'),
- 'Fileinfo': ('finfo_buffer',
- 'finfo_close',
- 'finfo_file',
- 'finfo_open',
- 'finfo_set_flags',
- 'mime_content_type'),
- 'Filesystem': ('basename',
- 'chgrp',
- 'chmod',
- 'chown',
- 'clearstatcache',
- 'copy',
- 'dirname',
- 'disk_free_space',
- 'disk_total_space',
- 'diskfreespace',
- 'fclose',
- 'fdatasync',
- 'feof',
- 'fflush',
- 'fgetc',
- 'fgetcsv',
- 'fgets',
- 'fgetss',
- 'file_exists',
- 'file_get_contents',
- 'file_put_contents',
- 'file',
- 'fileatime',
- 'filectime',
- 'filegroup',
- 'fileinode',
- 'filemtime',
- 'fileowner',
- 'fileperms',
- 'filesize',
- 'filetype',
- 'flock',
- 'fnmatch',
- 'fopen',
- 'fpassthru',
- 'fputcsv',
- 'fputs',
- 'fread',
- 'fscanf',
- 'fseek',
- 'fstat',
- 'fsync',
- 'ftell',
- 'ftruncate',
- 'fwrite',
- 'glob',
- 'is_dir',
- 'is_executable',
- 'is_file',
- 'is_link',
- 'is_readable',
- 'is_uploaded_file',
- 'is_writable',
- 'is_writeable',
- 'lchgrp',
- 'lchown',
- 'link',
- 'linkinfo',
- 'lstat',
- 'mkdir',
- 'move_uploaded_file',
- 'parse_ini_file',
- 'parse_ini_string',
- 'pathinfo',
- 'pclose',
- 'popen',
- 'readfile',
- 'readlink',
- 'realpath_cache_get',
- 'realpath_cache_size',
- 'realpath',
- 'rename',
- 'rewind',
- 'rmdir',
- 'set_file_buffer',
- 'stat',
- 'symlink',
- 'tempnam',
- 'tmpfile',
- 'touch',
- 'umask',
- 'unlink'),
- 'Filter': ('filter_has_var',
- 'filter_id',
- 'filter_input_array',
- 'filter_input',
- 'filter_list',
- 'filter_var_array',
- 'filter_var'),
- 'Firebird/InterBase': ('fbird_add_user',
- 'fbird_affected_rows',
- 'fbird_backup',
- 'fbird_blob_add',
- 'fbird_blob_cancel',
- 'fbird_blob_close',
- 'fbird_blob_create',
- 'fbird_blob_echo',
- 'fbird_blob_get',
- 'fbird_blob_import',
- 'fbird_blob_info',
- 'fbird_blob_open',
- 'fbird_close',
- 'fbird_commit_ret',
- 'fbird_commit',
- 'fbird_connect',
- 'fbird_db_info',
- 'fbird_delete_user',
- 'fbird_drop_db',
- 'fbird_errcode',
- 'fbird_errmsg',
- 'fbird_execute',
- 'fbird_fetch_assoc',
- 'fbird_fetch_object',
- 'fbird_fetch_row',
- 'fbird_field_info',
- 'fbird_free_event_handler',
- 'fbird_free_query',
- 'fbird_free_result',
- 'fbird_gen_id',
- 'fbird_maintain_db',
- 'fbird_modify_user',
- 'fbird_name_result',
- 'fbird_num_fields',
- 'fbird_num_params',
- 'fbird_param_info',
- 'fbird_pconnect',
- 'fbird_prepare',
- 'fbird_query',
- 'fbird_restore',
- 'fbird_rollback_ret',
- 'fbird_rollback',
- 'fbird_server_info',
- 'fbird_service_attach',
- 'fbird_service_detach',
- 'fbird_set_event_handler',
- 'fbird_trans',
- 'fbird_wait_event',
- 'ibase_add_user',
- 'ibase_affected_rows',
- 'ibase_backup',
- 'ibase_blob_add',
- 'ibase_blob_cancel',
- 'ibase_blob_close',
- 'ibase_blob_create',
- 'ibase_blob_echo',
- 'ibase_blob_get',
- 'ibase_blob_import',
- 'ibase_blob_info',
- 'ibase_blob_open',
- 'ibase_close',
- 'ibase_commit_ret',
- 'ibase_commit',
- 'ibase_connect',
- 'ibase_db_info',
- 'ibase_delete_user',
- 'ibase_drop_db',
- 'ibase_errcode',
- 'ibase_errmsg',
- 'ibase_execute',
- 'ibase_fetch_assoc',
- 'ibase_fetch_object',
- 'ibase_fetch_row',
- 'ibase_field_info',
- 'ibase_free_event_handler',
- 'ibase_free_query',
- 'ibase_free_result',
- 'ibase_gen_id',
- 'ibase_maintain_db',
- 'ibase_modify_user',
- 'ibase_name_result',
- 'ibase_num_fields',
- 'ibase_num_params',
- 'ibase_param_info',
- 'ibase_pconnect',
- 'ibase_prepare',
- 'ibase_query',
- 'ibase_restore',
- 'ibase_rollback_ret',
- 'ibase_rollback',
- 'ibase_server_info',
- 'ibase_service_attach',
- 'ibase_service_detach',
- 'ibase_set_event_handler',
- 'ibase_trans',
- 'ibase_wait_event'),
- 'Function handling': ('call_user_func_array',
- 'call_user_func',
- 'create_function',
- 'forward_static_call_array',
- 'forward_static_call',
- 'func_get_arg',
- 'func_get_args',
- 'func_num_args',
- 'function_exists',
- 'get_defined_functions',
- 'register_shutdown_function',
- 'register_tick_function',
- 'unregister_tick_function'),
- 'GD and Image': ('gd_info',
- 'getimagesize',
- 'getimagesizefromstring',
- 'image_type_to_extension',
- 'image_type_to_mime_type',
- 'image2wbmp',
- 'imageaffine',
- 'imageaffinematrixconcat',
- 'imageaffinematrixget',
- 'imagealphablending',
- 'imageantialias',
- 'imagearc',
- 'imageavif',
- 'imagebmp',
- 'imagechar',
- 'imagecharup',
- 'imagecolorallocate',
- 'imagecolorallocatealpha',
- 'imagecolorat',
- 'imagecolorclosest',
- 'imagecolorclosestalpha',
- 'imagecolorclosesthwb',
- 'imagecolordeallocate',
- 'imagecolorexact',
- 'imagecolorexactalpha',
- 'imagecolormatch',
- 'imagecolorresolve',
- 'imagecolorresolvealpha',
- 'imagecolorset',
- 'imagecolorsforindex',
- 'imagecolorstotal',
- 'imagecolortransparent',
- 'imageconvolution',
- 'imagecopy',
- 'imagecopymerge',
- 'imagecopymergegray',
- 'imagecopyresampled',
- 'imagecopyresized',
- 'imagecreate',
- 'imagecreatefromavif',
- 'imagecreatefrombmp',
- 'imagecreatefromgd2',
- 'imagecreatefromgd2part',
- 'imagecreatefromgd',
- 'imagecreatefromgif',
- 'imagecreatefromjpeg',
- 'imagecreatefrompng',
- 'imagecreatefromstring',
- 'imagecreatefromtga',
- 'imagecreatefromwbmp',
- 'imagecreatefromwebp',
- 'imagecreatefromxbm',
- 'imagecreatefromxpm',
- 'imagecreatetruecolor',
- 'imagecrop',
- 'imagecropauto',
- 'imagedashedline',
- 'imagedestroy',
- 'imageellipse',
- 'imagefill',
- 'imagefilledarc',
- 'imagefilledellipse',
- 'imagefilledpolygon',
- 'imagefilledrectangle',
- 'imagefilltoborder',
- 'imagefilter',
- 'imageflip',
- 'imagefontheight',
- 'imagefontwidth',
- 'imageftbbox',
- 'imagefttext',
- 'imagegammacorrect',
- 'imagegd2',
- 'imagegd',
- 'imagegetclip',
- 'imagegetinterpolation',
- 'imagegif',
- 'imagegrabscreen',
- 'imagegrabwindow',
- 'imageinterlace',
- 'imageistruecolor',
- 'imagejpeg',
- 'imagelayereffect',
- 'imageline',
- 'imageloadfont',
- 'imageopenpolygon',
- 'imagepalettecopy',
- 'imagepalettetotruecolor',
- 'imagepng',
- 'imagepolygon',
- 'imagerectangle',
- 'imageresolution',
- 'imagerotate',
- 'imagesavealpha',
- 'imagescale',
- 'imagesetbrush',
- 'imagesetclip',
- 'imagesetinterpolation',
- 'imagesetpixel',
- 'imagesetstyle',
- 'imagesetthickness',
- 'imagesettile',
- 'imagestring',
- 'imagestringup',
- 'imagesx',
- 'imagesy',
- 'imagetruecolortopalette',
- 'imagettfbbox',
- 'imagettftext',
- 'imagetypes',
- 'imagewbmp',
- 'imagewebp',
- 'imagexbm',
- 'iptcembed',
- 'iptcparse',
- 'jpeg2wbmp',
- 'png2wbmp'),
- 'GMP': ('gmp_abs',
- 'gmp_add',
- 'gmp_and',
- 'gmp_binomial',
- 'gmp_clrbit',
- 'gmp_cmp',
- 'gmp_com',
- 'gmp_div_q',
- 'gmp_div_qr',
- 'gmp_div_r',
- 'gmp_div',
- 'gmp_divexact',
- 'gmp_export',
- 'gmp_fact',
- 'gmp_gcd',
- 'gmp_gcdext',
- 'gmp_hamdist',
- 'gmp_import',
- 'gmp_init',
- 'gmp_intval',
- 'gmp_invert',
- 'gmp_jacobi',
- 'gmp_kronecker',
- 'gmp_lcm',
- 'gmp_legendre',
- 'gmp_mod',
- 'gmp_mul',
- 'gmp_neg',
- 'gmp_nextprime',
- 'gmp_or',
- 'gmp_perfect_power',
- 'gmp_perfect_square',
- 'gmp_popcount',
- 'gmp_pow',
- 'gmp_powm',
- 'gmp_prob_prime',
- 'gmp_random_bits',
- 'gmp_random_range',
- 'gmp_random_seed',
- 'gmp_random',
- 'gmp_root',
- 'gmp_rootrem',
- 'gmp_scan0',
- 'gmp_scan1',
- 'gmp_setbit',
- 'gmp_sign',
- 'gmp_sqrt',
- 'gmp_sqrtrem',
- 'gmp_strval',
- 'gmp_sub',
- 'gmp_testbit',
- 'gmp_xor'),
- 'GeoIP': ('geoip_asnum_by_name',
- 'geoip_continent_code_by_name',
- 'geoip_country_code_by_name',
- 'geoip_country_code3_by_name',
- 'geoip_country_name_by_name',
- 'geoip_database_info',
- 'geoip_db_avail',
- 'geoip_db_filename',
- 'geoip_db_get_all_info',
- 'geoip_domain_by_name',
- 'geoip_id_by_name',
- 'geoip_isp_by_name',
- 'geoip_netspeedcell_by_name',
- 'geoip_org_by_name',
- 'geoip_record_by_name',
- 'geoip_region_by_name',
- 'geoip_region_name_by_code',
- 'geoip_setup_custom_directory',
- 'geoip_time_zone_by_country_and_region'),
- 'Gettext': ('bind_textdomain_codeset',
- 'bindtextdomain',
- 'dcgettext',
- 'dcngettext',
- 'dgettext',
- 'dngettext',
- 'gettext',
- 'ngettext',
- 'textdomain'),
- 'GnuPG': ('gnupg_adddecryptkey',
- 'gnupg_addencryptkey',
- 'gnupg_addsignkey',
- 'gnupg_cleardecryptkeys',
- 'gnupg_clearencryptkeys',
- 'gnupg_clearsignkeys',
- 'gnupg_decrypt',
- 'gnupg_decryptverify',
- 'gnupg_encrypt',
- 'gnupg_encryptsign',
- 'gnupg_export',
- 'gnupg_getengineinfo',
- 'gnupg_geterror',
- 'gnupg_geterrorinfo',
- 'gnupg_getprotocol',
- 'gnupg_import',
- 'gnupg_init',
- 'gnupg_keyinfo',
- 'gnupg_setarmor',
- 'gnupg_seterrormode',
- 'gnupg_setsignmode',
- 'gnupg_sign',
- 'gnupg_verify'),
- 'Grapheme': ('grapheme_extract',
- 'grapheme_stripos',
- 'grapheme_stristr',
- 'grapheme_strlen',
- 'grapheme_strpos',
- 'grapheme_strripos',
- 'grapheme_strrpos',
- 'grapheme_strstr',
- 'grapheme_substr'),
- 'Hash': ('hash_algos',
- 'hash_copy',
- 'hash_equals',
- 'hash_file',
- 'hash_final',
- 'hash_hkdf',
- 'hash_hmac_algos',
- 'hash_hmac_file',
- 'hash_hmac',
- 'hash_init',
- 'hash_pbkdf2',
- 'hash_update_file',
- 'hash_update_stream',
- 'hash_update',
- 'hash'),
- 'IBM DB2': ('db2_autocommit',
- 'db2_bind_param',
- 'db2_client_info',
- 'db2_close',
- 'db2_column_privileges',
- 'db2_columns',
- 'db2_commit',
- 'db2_conn_error',
- 'db2_conn_errormsg',
- 'db2_connect',
- 'db2_cursor_type',
- 'db2_escape_string',
- 'db2_exec',
- 'db2_execute',
- 'db2_fetch_array',
- 'db2_fetch_assoc',
- 'db2_fetch_both',
- 'db2_fetch_object',
- 'db2_fetch_row',
- 'db2_field_display_size',
- 'db2_field_name',
- 'db2_field_num',
- 'db2_field_precision',
- 'db2_field_scale',
- 'db2_field_type',
- 'db2_field_width',
- 'db2_foreign_keys',
- 'db2_free_result',
- 'db2_free_stmt',
- 'db2_get_option',
- 'db2_last_insert_id',
- 'db2_lob_read',
- 'db2_next_result',
- 'db2_num_fields',
- 'db2_num_rows',
- 'db2_pclose',
- 'db2_pconnect',
- 'db2_prepare',
- 'db2_primary_keys',
- 'db2_procedure_columns',
- 'db2_procedures',
- 'db2_result',
- 'db2_rollback',
- 'db2_server_info',
- 'db2_set_option',
- 'db2_special_columns',
- 'db2_statistics',
- 'db2_stmt_error',
- 'db2_stmt_errormsg',
- 'db2_table_privileges',
- 'db2_tables'),
- 'IDN': ('idn_to_ascii', 'idn_to_utf8'),
- 'IMAP': ('imap_8bit',
- 'imap_alerts',
- 'imap_append',
- 'imap_base64',
- 'imap_binary',
- 'imap_body',
- 'imap_bodystruct',
- 'imap_check',
- 'imap_clearflag_full',
- 'imap_close',
- 'imap_create',
- 'imap_createmailbox',
- 'imap_delete',
- 'imap_deletemailbox',
- 'imap_errors',
- 'imap_expunge',
- 'imap_fetch_overview',
- 'imap_fetchbody',
- 'imap_fetchheader',
- 'imap_fetchmime',
- 'imap_fetchstructure',
- 'imap_fetchtext',
- 'imap_gc',
- 'imap_get_quota',
- 'imap_get_quotaroot',
- 'imap_getacl',
- 'imap_getmailboxes',
- 'imap_getsubscribed',
- 'imap_header',
- 'imap_headerinfo',
- 'imap_headers',
- 'imap_last_error',
- 'imap_list',
- 'imap_listmailbox',
- 'imap_listscan',
- 'imap_listsubscribed',
- 'imap_lsub',
- 'imap_mail_compose',
- 'imap_mail_copy',
- 'imap_mail_move',
- 'imap_mail',
- 'imap_mailboxmsginfo',
- 'imap_mime_header_decode',
- 'imap_msgno',
- 'imap_mutf7_to_utf8',
- 'imap_num_msg',
- 'imap_num_recent',
- 'imap_open',
- 'imap_ping',
- 'imap_qprint',
- 'imap_rename',
- 'imap_renamemailbox',
- 'imap_reopen',
- 'imap_rfc822_parse_adrlist',
- 'imap_rfc822_parse_headers',
- 'imap_rfc822_write_address',
- 'imap_savebody',
- 'imap_scan',
- 'imap_scanmailbox',
- 'imap_search',
- 'imap_set_quota',
- 'imap_setacl',
- 'imap_setflag_full',
- 'imap_sort',
- 'imap_status',
- 'imap_subscribe',
- 'imap_thread',
- 'imap_timeout',
- 'imap_uid',
- 'imap_undelete',
- 'imap_unsubscribe',
- 'imap_utf7_decode',
- 'imap_utf7_encode',
- 'imap_utf8_to_mutf7',
- 'imap_utf8'),
- 'Igbinary': ('igbinary_serialize', 'igbinary_unserialize'),
- 'Inotify': ('inotify_add_watch',
- 'inotify_init',
- 'inotify_queue_len',
- 'inotify_read',
- 'inotify_rm_watch'),
- 'JSON': ('json_decode',
- 'json_encode',
- 'json_last_error_msg',
- 'json_last_error'),
- 'LDAP': ('ldap_8859_to_t61',
- 'ldap_add_ext',
- 'ldap_add',
- 'ldap_bind_ext',
- 'ldap_bind',
- 'ldap_close',
- 'ldap_compare',
- 'ldap_connect',
- 'ldap_control_paged_result_response',
- 'ldap_control_paged_result',
- 'ldap_count_entries',
- 'ldap_count_references',
- 'ldap_delete_ext',
- 'ldap_delete',
- 'ldap_dn2ufn',
- 'ldap_err2str',
- 'ldap_errno',
- 'ldap_error',
- 'ldap_escape',
- 'ldap_exop_passwd',
- 'ldap_exop_refresh',
- 'ldap_exop_whoami',
- 'ldap_exop',
- 'ldap_explode_dn',
- 'ldap_first_attribute',
- 'ldap_first_entry',
- 'ldap_first_reference',
- 'ldap_free_result',
- 'ldap_get_attributes',
- 'ldap_get_dn',
- 'ldap_get_entries',
- 'ldap_get_option',
- 'ldap_get_values_len',
- 'ldap_get_values',
- 'ldap_list',
- 'ldap_mod_add_ext',
- 'ldap_mod_add',
- 'ldap_mod_del_ext',
- 'ldap_mod_del',
- 'ldap_mod_replace_ext',
- 'ldap_mod_replace',
- 'ldap_modify_batch',
- 'ldap_modify',
- 'ldap_next_attribute',
- 'ldap_next_entry',
- 'ldap_next_reference',
- 'ldap_parse_exop',
- 'ldap_parse_reference',
- 'ldap_parse_result',
- 'ldap_read',
- 'ldap_rename_ext',
- 'ldap_rename',
- 'ldap_sasl_bind',
- 'ldap_search',
- 'ldap_set_option',
- 'ldap_set_rebind_proc',
- 'ldap_sort',
- 'ldap_start_tls',
- 'ldap_t61_to_8859',
- 'ldap_unbind'),
- 'LZF': ('lzf_compress', 'lzf_decompress', 'lzf_optimized_for'),
- 'Mail': ('ezmlm_hash', 'mail'),
- 'Mailparse': ('mailparse_determine_best_xfer_encoding',
- 'mailparse_msg_create',
- 'mailparse_msg_extract_part_file',
- 'mailparse_msg_extract_part',
- 'mailparse_msg_extract_whole_part_file',
- 'mailparse_msg_free',
- 'mailparse_msg_get_part_data',
- 'mailparse_msg_get_part',
- 'mailparse_msg_get_structure',
- 'mailparse_msg_parse_file',
- 'mailparse_msg_parse',
- 'mailparse_rfc822_parse_addresses',
- 'mailparse_stream_encode',
- 'mailparse_uudecode_all'),
- 'Math': ('abs',
- 'acos',
- 'acosh',
- 'asin',
- 'asinh',
- 'atan2',
- 'atan',
- 'atanh',
- 'base_convert',
- 'bindec',
- 'ceil',
- 'cos',
- 'cosh',
- 'decbin',
- 'dechex',
- 'decoct',
- 'deg2rad',
- 'exp',
- 'expm1',
- 'fdiv',
- 'floor',
- 'fmod',
- 'getrandmax',
- 'hexdec',
- 'hypot',
- 'intdiv',
- 'is_finite',
- 'is_infinite',
- 'is_nan',
- 'lcg_value',
- 'log10',
- 'log1p',
- 'log',
- 'max',
- 'min',
- 'mt_getrandmax',
- 'mt_rand',
- 'mt_srand',
- 'octdec',
- 'pi',
- 'pow',
- 'rad2deg',
- 'rand',
- 'round',
- 'sin',
- 'sinh',
- 'sqrt',
- 'srand',
- 'tan',
- 'tanh'),
- 'Mcrypt': ('mcrypt_create_iv',
- 'mcrypt_decrypt',
- 'mcrypt_enc_get_algorithms_name',
- 'mcrypt_enc_get_block_size',
- 'mcrypt_enc_get_iv_size',
- 'mcrypt_enc_get_key_size',
- 'mcrypt_enc_get_modes_name',
- 'mcrypt_enc_get_supported_key_sizes',
- 'mcrypt_enc_is_block_algorithm_mode',
- 'mcrypt_enc_is_block_algorithm',
- 'mcrypt_enc_is_block_mode',
- 'mcrypt_enc_self_test',
- 'mcrypt_encrypt',
- 'mcrypt_generic_deinit',
- 'mcrypt_generic_init',
- 'mcrypt_generic',
- 'mcrypt_get_block_size',
- 'mcrypt_get_cipher_name',
- 'mcrypt_get_iv_size',
- 'mcrypt_get_key_size',
- 'mcrypt_list_algorithms',
- 'mcrypt_list_modes',
- 'mcrypt_module_close',
- 'mcrypt_module_get_algo_block_size',
- 'mcrypt_module_get_algo_key_size',
- 'mcrypt_module_get_supported_key_sizes',
- 'mcrypt_module_is_block_algorithm_mode',
- 'mcrypt_module_is_block_algorithm',
- 'mcrypt_module_is_block_mode',
- 'mcrypt_module_open',
- 'mcrypt_module_self_test',
- 'mdecrypt_generic'),
- 'Memcache': ('memcache_debug',),
- 'Mhash': ('mhash_count',
- 'mhash_get_block_size',
- 'mhash_get_hash_name',
- 'mhash_keygen_s2k',
- 'mhash'),
- 'Misc.': ('connection_aborted',
- 'connection_status',
- 'constant',
- 'define',
- 'defined',
- 'die',
- 'eval',
- 'exit',
- 'get_browser',
- '__halt_compiler',
- 'highlight_file',
- 'highlight_string',
- 'hrtime',
- 'ignore_user_abort',
- 'pack',
- 'php_strip_whitespace',
- 'sapi_windows_cp_conv',
- 'sapi_windows_cp_get',
- 'sapi_windows_cp_is_utf8',
- 'sapi_windows_cp_set',
- 'sapi_windows_generate_ctrl_event',
- 'sapi_windows_set_ctrl_handler',
- 'sapi_windows_vt100_support',
- 'show_source',
- 'sleep',
- 'sys_getloadavg',
- 'time_nanosleep',
- 'time_sleep_until',
- 'uniqid',
- 'unpack',
- 'usleep'),
- 'Multibyte String': ('mb_check_encoding',
- 'mb_chr',
- 'mb_convert_case',
- 'mb_convert_encoding',
- 'mb_convert_kana',
- 'mb_convert_variables',
- 'mb_decode_mimeheader',
- 'mb_decode_numericentity',
- 'mb_detect_encoding',
- 'mb_detect_order',
- 'mb_encode_mimeheader',
- 'mb_encode_numericentity',
- 'mb_encoding_aliases',
- 'mb_ereg_match',
- 'mb_ereg_replace_callback',
- 'mb_ereg_replace',
- 'mb_ereg_search_getpos',
- 'mb_ereg_search_getregs',
- 'mb_ereg_search_init',
- 'mb_ereg_search_pos',
- 'mb_ereg_search_regs',
- 'mb_ereg_search_setpos',
- 'mb_ereg_search',
- 'mb_ereg',
- 'mb_eregi_replace',
- 'mb_eregi',
- 'mb_get_info',
- 'mb_http_input',
- 'mb_http_output',
- 'mb_internal_encoding',
- 'mb_language',
- 'mb_list_encodings',
- 'mb_ord',
- 'mb_output_handler',
- 'mb_parse_str',
- 'mb_preferred_mime_name',
- 'mb_regex_encoding',
- 'mb_regex_set_options',
- 'mb_scrub',
- 'mb_send_mail',
- 'mb_split',
- 'mb_str_split',
- 'mb_strcut',
- 'mb_strimwidth',
- 'mb_stripos',
- 'mb_stristr',
- 'mb_strlen',
- 'mb_strpos',
- 'mb_strrchr',
- 'mb_strrichr',
- 'mb_strripos',
- 'mb_strrpos',
- 'mb_strstr',
- 'mb_strtolower',
- 'mb_strtoupper',
- 'mb_strwidth',
- 'mb_substitute_character',
- 'mb_substr_count',
- 'mb_substr'),
- 'MySQL': ('mysql_affected_rows',
- 'mysql_client_encoding',
- 'mysql_close',
- 'mysql_connect',
- 'mysql_create_db',
- 'mysql_data_seek',
- 'mysql_db_name',
- 'mysql_db_query',
- 'mysql_drop_db',
- 'mysql_errno',
- 'mysql_error',
- 'mysql_escape_string',
- 'mysql_fetch_array',
- 'mysql_fetch_assoc',
- 'mysql_fetch_field',
- 'mysql_fetch_lengths',
- 'mysql_fetch_object',
- 'mysql_fetch_row',
- 'mysql_field_flags',
- 'mysql_field_len',
- 'mysql_field_name',
- 'mysql_field_seek',
- 'mysql_field_table',
- 'mysql_field_type',
- 'mysql_free_result',
- 'mysql_get_client_info',
- 'mysql_get_host_info',
- 'mysql_get_proto_info',
- 'mysql_get_server_info',
- 'mysql_info',
- 'mysql_insert_id',
- 'mysql_list_dbs',
- 'mysql_list_fields',
- 'mysql_list_processes',
- 'mysql_list_tables',
- 'mysql_num_fields',
- 'mysql_num_rows',
- 'mysql_pconnect',
- 'mysql_ping',
- 'mysql_query',
- 'mysql_real_escape_string',
- 'mysql_result',
- 'mysql_select_db',
- 'mysql_set_charset',
- 'mysql_stat',
- 'mysql_tablename',
- 'mysql_thread_id',
- 'mysql_unbuffered_query'),
- 'Mysql_xdevapi': ('expression', 'getSession'),
- 'Network': ('checkdnsrr',
- 'closelog',
- 'dns_check_record',
- 'dns_get_mx',
- 'dns_get_record',
- 'fsockopen',
- 'gethostbyaddr',
- 'gethostbyname',
- 'gethostbynamel',
- 'gethostname',
- 'getmxrr',
- 'getprotobyname',
- 'getprotobynumber',
- 'getservbyname',
- 'getservbyport',
- 'header_register_callback',
- 'header_remove',
- 'header',
- 'headers_list',
- 'headers_sent',
- 'http_response_code',
- 'inet_ntop',
- 'inet_pton',
- 'ip2long',
- 'long2ip',
- 'net_get_interfaces',
- 'openlog',
- 'pfsockopen',
- 'setcookie',
- 'setrawcookie',
- 'socket_get_status',
- 'socket_set_blocking',
- 'socket_set_timeout',
- 'syslog'),
- 'OAuth': ('oauth_get_sbs', 'oauth_urlencode'),
- 'OCI8': ('oci_bind_array_by_name',
- 'oci_bind_by_name',
- 'oci_cancel',
- 'oci_client_version',
- 'oci_close',
- 'oci_commit',
- 'oci_connect',
- 'oci_define_by_name',
- 'oci_error',
- 'oci_execute',
- 'oci_fetch_all',
- 'oci_fetch_array',
- 'oci_fetch_assoc',
- 'oci_fetch_object',
- 'oci_fetch_row',
- 'oci_fetch',
- 'oci_field_is_null',
- 'oci_field_name',
- 'oci_field_precision',
- 'oci_field_scale',
- 'oci_field_size',
- 'oci_field_type_raw',
- 'oci_field_type',
- 'oci_free_descriptor',
- 'oci_free_statement',
- 'oci_get_implicit_resultset',
- 'oci_lob_copy',
- 'oci_lob_is_equal',
- 'oci_new_collection',
- 'oci_new_connect',
- 'oci_new_cursor',
- 'oci_new_descriptor',
- 'oci_num_fields',
- 'oci_num_rows',
- 'oci_parse',
- 'oci_password_change',
- 'oci_pconnect',
- 'oci_register_taf_callback',
- 'oci_result',
- 'oci_rollback',
- 'oci_server_version',
- 'oci_set_action',
- 'oci_set_call_timeout',
- 'oci_set_client_identifier',
- 'oci_set_client_info',
- 'oci_set_db_operation',
- 'oci_set_edition',
- 'oci_set_module_name',
- 'oci_set_prefetch_lob',
- 'oci_set_prefetch',
- 'oci_statement_type',
- 'oci_unregister_taf_callback'),
- 'ODBC': ('odbc_autocommit',
- 'odbc_binmode',
- 'odbc_close_all',
- 'odbc_close',
- 'odbc_columnprivileges',
- 'odbc_columns',
- 'odbc_commit',
- 'odbc_connect',
- 'odbc_cursor',
- 'odbc_data_source',
- 'odbc_do',
- 'odbc_error',
- 'odbc_errormsg',
- 'odbc_exec',
- 'odbc_execute',
- 'odbc_fetch_array',
- 'odbc_fetch_into',
- 'odbc_fetch_object',
- 'odbc_fetch_row',
- 'odbc_field_len',
- 'odbc_field_name',
- 'odbc_field_num',
- 'odbc_field_precision',
- 'odbc_field_scale',
- 'odbc_field_type',
- 'odbc_foreignkeys',
- 'odbc_free_result',
- 'odbc_gettypeinfo',
- 'odbc_longreadlen',
- 'odbc_next_result',
- 'odbc_num_fields',
- 'odbc_num_rows',
- 'odbc_pconnect',
- 'odbc_prepare',
- 'odbc_primarykeys',
- 'odbc_procedurecolumns',
- 'odbc_procedures',
- 'odbc_result_all',
- 'odbc_result',
- 'odbc_rollback',
- 'odbc_setoption',
- 'odbc_specialcolumns',
- 'odbc_statistics',
- 'odbc_tableprivileges',
- 'odbc_tables'),
- 'OPcache': ('opcache_compile_file',
- 'opcache_get_configuration',
- 'opcache_get_status',
- 'opcache_invalidate',
- 'opcache_is_script_cached',
- 'opcache_reset'),
- 'OpenAL': ('openal_buffer_create',
- 'openal_buffer_data',
- 'openal_buffer_destroy',
- 'openal_buffer_get',
- 'openal_buffer_loadwav',
- 'openal_context_create',
- 'openal_context_current',
- 'openal_context_destroy',
- 'openal_context_process',
- 'openal_context_suspend',
- 'openal_device_close',
- 'openal_device_open',
- 'openal_listener_get',
- 'openal_listener_set',
- 'openal_source_create',
- 'openal_source_destroy',
- 'openal_source_get',
- 'openal_source_pause',
- 'openal_source_play',
- 'openal_source_rewind',
- 'openal_source_set',
- 'openal_source_stop',
- 'openal_stream'),
- 'OpenSSL': ('openssl_cipher_iv_length',
- 'openssl_cms_decrypt',
- 'openssl_cms_encrypt',
- 'openssl_cms_read',
- 'openssl_cms_sign',
- 'openssl_cms_verify',
- 'openssl_csr_export_to_file',
- 'openssl_csr_export',
- 'openssl_csr_get_public_key',
- 'openssl_csr_get_subject',
- 'openssl_csr_new',
- 'openssl_csr_sign',
- 'openssl_decrypt',
- 'openssl_dh_compute_key',
- 'openssl_digest',
- 'openssl_encrypt',
- 'openssl_error_string',
- 'openssl_free_key',
- 'openssl_get_cert_locations',
- 'openssl_get_cipher_methods',
- 'openssl_get_curve_names',
- 'openssl_get_md_methods',
- 'openssl_get_privatekey',
- 'openssl_get_publickey',
- 'openssl_open',
- 'openssl_pbkdf2',
- 'openssl_pkcs12_export_to_file',
- 'openssl_pkcs12_export',
- 'openssl_pkcs12_read',
- 'openssl_pkcs7_decrypt',
- 'openssl_pkcs7_encrypt',
- 'openssl_pkcs7_read',
- 'openssl_pkcs7_sign',
- 'openssl_pkcs7_verify',
- 'openssl_pkey_derive',
- 'openssl_pkey_export_to_file',
- 'openssl_pkey_export',
- 'openssl_pkey_free',
- 'openssl_pkey_get_details',
- 'openssl_pkey_get_private',
- 'openssl_pkey_get_public',
- 'openssl_pkey_new',
- 'openssl_private_decrypt',
- 'openssl_private_encrypt',
- 'openssl_public_decrypt',
- 'openssl_public_encrypt',
- 'openssl_random_pseudo_bytes',
- 'openssl_seal',
- 'openssl_sign',
- 'openssl_spki_export_challenge',
- 'openssl_spki_export',
- 'openssl_spki_new',
- 'openssl_spki_verify',
- 'openssl_verify',
- 'openssl_x509_check_private_key',
- 'openssl_x509_checkpurpose',
- 'openssl_x509_export_to_file',
- 'openssl_x509_export',
- 'openssl_x509_fingerprint',
- 'openssl_x509_free',
- 'openssl_x509_parse',
- 'openssl_x509_read',
- 'openssl_x509_verify'),
- 'Output Control': ('flush',
- 'ob_clean',
- 'ob_end_clean',
- 'ob_end_flush',
- 'ob_flush',
- 'ob_get_clean',
- 'ob_get_contents',
- 'ob_get_flush',
- 'ob_get_length',
- 'ob_get_level',
- 'ob_get_status',
- 'ob_gzhandler',
- 'ob_implicit_flush',
- 'ob_list_handlers',
- 'ob_start',
- 'output_add_rewrite_var',
- 'output_reset_rewrite_vars'),
- 'PCNTL': ('pcntl_alarm',
- 'pcntl_async_signals',
- 'pcntl_errno',
- 'pcntl_exec',
- 'pcntl_fork',
- 'pcntl_get_last_error',
- 'pcntl_getpriority',
- 'pcntl_setpriority',
- 'pcntl_signal_dispatch',
- 'pcntl_signal_get_handler',
- 'pcntl_signal',
- 'pcntl_sigprocmask',
- 'pcntl_sigtimedwait',
- 'pcntl_sigwaitinfo',
- 'pcntl_strerror',
- 'pcntl_wait',
- 'pcntl_waitpid',
- 'pcntl_wexitstatus',
- 'pcntl_wifexited',
- 'pcntl_wifsignaled',
- 'pcntl_wifstopped',
- 'pcntl_wstopsig',
- 'pcntl_wtermsig'),
- 'PCRE': ('preg_filter',
- 'preg_grep',
- 'preg_last_error_msg',
- 'preg_last_error',
- 'preg_match_all',
- 'preg_match',
- 'preg_quote',
- 'preg_replace_callback_array',
- 'preg_replace_callback',
- 'preg_replace',
- 'preg_split'),
- 'PHP Options/Info': ('assert_options',
- 'assert',
- 'cli_get_process_title',
- 'cli_set_process_title',
- 'dl',
- 'extension_loaded',
- 'gc_collect_cycles',
- 'gc_disable',
- 'gc_enable',
- 'gc_enabled',
- 'gc_mem_caches',
- 'gc_status',
- 'get_cfg_var',
- 'get_current_user',
- 'get_defined_constants',
- 'get_extension_funcs',
- 'get_include_path',
- 'get_included_files',
- 'get_loaded_extensions',
- 'get_magic_quotes_gpc',
- 'get_magic_quotes_runtime',
- 'get_required_files',
- 'get_resources',
- 'getenv',
- 'getlastmod',
- 'getmygid',
- 'getmyinode',
- 'getmypid',
- 'getmyuid',
- 'getopt',
- 'getrusage',
- 'ini_alter',
- 'ini_get_all',
- 'ini_get',
- 'ini_restore',
- 'ini_set',
- 'memory_get_peak_usage',
- 'memory_get_usage',
- 'php_ini_loaded_file',
- 'php_ini_scanned_files',
- 'php_sapi_name',
- 'php_uname',
- 'phpcredits',
- 'phpinfo',
- 'phpversion',
- 'putenv',
- 'restore_include_path',
- 'set_include_path',
- 'set_time_limit',
- 'sys_get_temp_dir',
- 'version_compare',
- 'zend_thread_id',
- 'zend_version'),
- 'POSIX': ('posix_access',
- 'posix_ctermid',
- 'posix_errno',
- 'posix_get_last_error',
- 'posix_getcwd',
- 'posix_getegid',
- 'posix_geteuid',
- 'posix_getgid',
- 'posix_getgrgid',
- 'posix_getgrnam',
- 'posix_getgroups',
- 'posix_getlogin',
- 'posix_getpgid',
- 'posix_getpgrp',
- 'posix_getpid',
- 'posix_getppid',
- 'posix_getpwnam',
- 'posix_getpwuid',
- 'posix_getrlimit',
- 'posix_getsid',
- 'posix_getuid',
- 'posix_initgroups',
- 'posix_isatty',
- 'posix_kill',
- 'posix_mkfifo',
- 'posix_mknod',
- 'posix_setegid',
- 'posix_seteuid',
- 'posix_setgid',
- 'posix_setpgid',
- 'posix_setrlimit',
- 'posix_setsid',
- 'posix_setuid',
- 'posix_strerror',
- 'posix_times',
- 'posix_ttyname',
- 'posix_uname'),
- 'PS': ('ps_add_bookmark',
- 'ps_add_launchlink',
- 'ps_add_locallink',
- 'ps_add_note',
- 'ps_add_pdflink',
- 'ps_add_weblink',
- 'ps_arc',
- 'ps_arcn',
- 'ps_begin_page',
- 'ps_begin_pattern',
- 'ps_begin_template',
- 'ps_circle',
- 'ps_clip',
- 'ps_close_image',
- 'ps_close',
- 'ps_closepath_stroke',
- 'ps_closepath',
- 'ps_continue_text',
- 'ps_curveto',
- 'ps_delete',
- 'ps_end_page',
- 'ps_end_pattern',
- 'ps_end_template',
- 'ps_fill_stroke',
- 'ps_fill',
- 'ps_findfont',
- 'ps_get_buffer',
- 'ps_get_parameter',
- 'ps_get_value',
- 'ps_hyphenate',
- 'ps_include_file',
- 'ps_lineto',
- 'ps_makespotcolor',
- 'ps_moveto',
- 'ps_new',
- 'ps_open_file',
- 'ps_open_image_file',
- 'ps_open_image',
- 'ps_open_memory_image',
- 'ps_place_image',
- 'ps_rect',
- 'ps_restore',
- 'ps_rotate',
- 'ps_save',
- 'ps_scale',
- 'ps_set_border_color',
- 'ps_set_border_dash',
- 'ps_set_border_style',
- 'ps_set_info',
- 'ps_set_parameter',
- 'ps_set_text_pos',
- 'ps_set_value',
- 'ps_setcolor',
- 'ps_setdash',
- 'ps_setflat',
- 'ps_setfont',
- 'ps_setgray',
- 'ps_setlinecap',
- 'ps_setlinejoin',
- 'ps_setlinewidth',
- 'ps_setmiterlimit',
- 'ps_setoverprintmode',
- 'ps_setpolydash',
- 'ps_shading_pattern',
- 'ps_shading',
- 'ps_shfill',
- 'ps_show_boxed',
- 'ps_show_xy2',
- 'ps_show_xy',
- 'ps_show2',
- 'ps_show',
- 'ps_string_geometry',
- 'ps_stringwidth',
- 'ps_stroke',
- 'ps_symbol_name',
- 'ps_symbol_width',
- 'ps_symbol',
- 'ps_translate'),
- 'Password Hashing': ('password_algos',
- 'password_get_info',
- 'password_hash',
- 'password_needs_rehash',
- 'password_verify'),
- 'PostgreSQL': ('pg_affected_rows',
- 'pg_cancel_query',
- 'pg_client_encoding',
- 'pg_close',
- 'pg_connect_poll',
- 'pg_connect',
- 'pg_connection_busy',
- 'pg_connection_reset',
- 'pg_connection_status',
- 'pg_consume_input',
- 'pg_convert',
- 'pg_copy_from',
- 'pg_copy_to',
- 'pg_dbname',
- 'pg_delete',
- 'pg_end_copy',
- 'pg_escape_bytea',
- 'pg_escape_identifier',
- 'pg_escape_literal',
- 'pg_escape_string',
- 'pg_execute',
- 'pg_fetch_all_columns',
- 'pg_fetch_all',
- 'pg_fetch_array',
- 'pg_fetch_assoc',
- 'pg_fetch_object',
- 'pg_fetch_result',
- 'pg_fetch_row',
- 'pg_field_is_null',
- 'pg_field_name',
- 'pg_field_num',
- 'pg_field_prtlen',
- 'pg_field_size',
- 'pg_field_table',
- 'pg_field_type_oid',
- 'pg_field_type',
- 'pg_flush',
- 'pg_free_result',
- 'pg_get_notify',
- 'pg_get_pid',
- 'pg_get_result',
- 'pg_host',
- 'pg_insert',
- 'pg_last_error',
- 'pg_last_notice',
- 'pg_last_oid',
- 'pg_lo_close',
- 'pg_lo_create',
- 'pg_lo_export',
- 'pg_lo_import',
- 'pg_lo_open',
- 'pg_lo_read_all',
- 'pg_lo_read',
- 'pg_lo_seek',
- 'pg_lo_tell',
- 'pg_lo_truncate',
- 'pg_lo_unlink',
- 'pg_lo_write',
- 'pg_meta_data',
- 'pg_num_fields',
- 'pg_num_rows',
- 'pg_options',
- 'pg_parameter_status',
- 'pg_pconnect',
- 'pg_ping',
- 'pg_port',
- 'pg_prepare',
- 'pg_put_line',
- 'pg_query_params',
- 'pg_query',
- 'pg_result_error_field',
- 'pg_result_error',
- 'pg_result_seek',
- 'pg_result_status',
- 'pg_select',
- 'pg_send_execute',
- 'pg_send_prepare',
- 'pg_send_query_params',
- 'pg_send_query',
- 'pg_set_client_encoding',
- 'pg_set_error_verbosity',
- 'pg_socket',
- 'pg_trace',
- 'pg_transaction_status',
- 'pg_tty',
- 'pg_unescape_bytea',
- 'pg_untrace',
- 'pg_update',
- 'pg_version'),
- 'Program execution': ('escapeshellarg',
- 'escapeshellcmd',
- 'exec',
- 'passthru',
- 'proc_close',
- 'proc_get_status',
- 'proc_nice',
- 'proc_open',
- 'proc_terminate',
- 'shell_exec',
- 'system'),
- 'Pspell': ('pspell_add_to_personal',
- 'pspell_add_to_session',
- 'pspell_check',
- 'pspell_clear_session',
- 'pspell_config_create',
- 'pspell_config_data_dir',
- 'pspell_config_dict_dir',
- 'pspell_config_ignore',
- 'pspell_config_mode',
- 'pspell_config_personal',
- 'pspell_config_repl',
- 'pspell_config_runtogether',
- 'pspell_config_save_repl',
- 'pspell_new_config',
- 'pspell_new_personal',
- 'pspell_new',
- 'pspell_save_wordlist',
- 'pspell_store_replacement',
- 'pspell_suggest'),
- 'RRD': ('rrd_create',
- 'rrd_error',
- 'rrd_fetch',
- 'rrd_first',
- 'rrd_graph',
- 'rrd_info',
- 'rrd_last',
- 'rrd_lastupdate',
- 'rrd_restore',
- 'rrd_tune',
- 'rrd_update',
- 'rrd_version',
- 'rrd_xport',
- 'rrdc_disconnect'),
- 'Radius': ('radius_acct_open',
- 'radius_add_server',
- 'radius_auth_open',
- 'radius_close',
- 'radius_config',
- 'radius_create_request',
- 'radius_cvt_addr',
- 'radius_cvt_int',
- 'radius_cvt_string',
- 'radius_demangle_mppe_key',
- 'radius_demangle',
- 'radius_get_attr',
- 'radius_get_tagged_attr_data',
- 'radius_get_tagged_attr_tag',
- 'radius_get_vendor_attr',
- 'radius_put_addr',
- 'radius_put_attr',
- 'radius_put_int',
- 'radius_put_string',
- 'radius_put_vendor_addr',
- 'radius_put_vendor_attr',
- 'radius_put_vendor_int',
- 'radius_put_vendor_string',
- 'radius_request_authenticator',
- 'radius_salt_encrypt_attr',
- 'radius_send_request',
- 'radius_server_secret',
- 'radius_strerror'),
- 'Rar': ('rar_wrapper_cache_stats',),
- 'Readline': ('readline_add_history',
- 'readline_callback_handler_install',
- 'readline_callback_handler_remove',
- 'readline_callback_read_char',
- 'readline_clear_history',
- 'readline_completion_function',
- 'readline_info',
- 'readline_list_history',
- 'readline_on_new_line',
- 'readline_read_history',
- 'readline_redisplay',
- 'readline_write_history',
- 'readline'),
- 'Recode': ('recode_file', 'recode_string', 'recode'),
- 'RpmInfo': ('rpmaddtag', 'rpmdbinfo', 'rpmdbsearch', 'rpminfo', 'rpmvercmp'),
- 'SNMP': ('snmp_get_quick_print',
- 'snmp_get_valueretrieval',
- 'snmp_read_mib',
- 'snmp_set_enum_print',
- 'snmp_set_oid_numeric_print',
- 'snmp_set_oid_output_format',
- 'snmp_set_quick_print',
- 'snmp_set_valueretrieval',
- 'snmp2_get',
- 'snmp2_getnext',
- 'snmp2_real_walk',
- 'snmp2_set',
- 'snmp2_walk',
- 'snmp3_get',
- 'snmp3_getnext',
- 'snmp3_real_walk',
- 'snmp3_set',
- 'snmp3_walk',
- 'snmpget',
- 'snmpgetnext',
- 'snmprealwalk',
- 'snmpset',
- 'snmpwalk',
- 'snmpwalkoid'),
- 'SOAP': ('is_soap_fault', 'use_soap_error_handler'),
- 'SPL': ('class_implements',
- 'class_parents',
- 'class_uses',
- 'iterator_apply',
- 'iterator_count',
- 'iterator_to_array',
- 'spl_autoload_call',
- 'spl_autoload_extensions',
- 'spl_autoload_functions',
- 'spl_autoload_register',
- 'spl_autoload_unregister',
- 'spl_autoload',
- 'spl_classes',
- 'spl_object_hash',
- 'spl_object_id'),
- 'SQLSRV': ('sqlsrv_begin_transaction',
- 'sqlsrv_cancel',
- 'sqlsrv_client_info',
- 'sqlsrv_close',
- 'sqlsrv_commit',
- 'sqlsrv_configure',
- 'sqlsrv_connect',
- 'sqlsrv_errors',
- 'sqlsrv_execute',
- 'sqlsrv_fetch_array',
- 'sqlsrv_fetch_object',
- 'sqlsrv_fetch',
- 'sqlsrv_field_metadata',
- 'sqlsrv_free_stmt',
- 'sqlsrv_get_config',
- 'sqlsrv_get_field',
- 'sqlsrv_has_rows',
- 'sqlsrv_next_result',
- 'sqlsrv_num_fields',
- 'sqlsrv_num_rows',
- 'sqlsrv_prepare',
- 'sqlsrv_query',
- 'sqlsrv_rollback',
- 'sqlsrv_rows_affected',
- 'sqlsrv_send_stream_data',
- 'sqlsrv_server_info'),
- 'SSH2': ('ssh2_auth_agent',
- 'ssh2_auth_hostbased_file',
- 'ssh2_auth_none',
- 'ssh2_auth_password',
- 'ssh2_auth_pubkey_file',
- 'ssh2_connect',
- 'ssh2_disconnect',
- 'ssh2_exec',
- 'ssh2_fetch_stream',
- 'ssh2_fingerprint',
- 'ssh2_forward_accept',
- 'ssh2_forward_listen',
- 'ssh2_methods_negotiated',
- 'ssh2_poll',
- 'ssh2_publickey_add',
- 'ssh2_publickey_init',
- 'ssh2_publickey_list',
- 'ssh2_publickey_remove',
- 'ssh2_scp_recv',
- 'ssh2_scp_send',
- 'ssh2_send_eof',
- 'ssh2_sftp_chmod',
- 'ssh2_sftp_lstat',
- 'ssh2_sftp_mkdir',
- 'ssh2_sftp_readlink',
- 'ssh2_sftp_realpath',
- 'ssh2_sftp_rename',
- 'ssh2_sftp_rmdir',
- 'ssh2_sftp_stat',
- 'ssh2_sftp_symlink',
- 'ssh2_sftp_unlink',
- 'ssh2_sftp',
- 'ssh2_shell',
- 'ssh2_tunnel'),
- 'SVN': ('svn_add',
- 'svn_auth_get_parameter',
- 'svn_auth_set_parameter',
- 'svn_blame',
- 'svn_cat',
- 'svn_checkout',
- 'svn_cleanup',
- 'svn_client_version',
- 'svn_commit',
- 'svn_delete',
- 'svn_diff',
- 'svn_export',
- 'svn_fs_abort_txn',
- 'svn_fs_apply_text',
- 'svn_fs_begin_txn2',
- 'svn_fs_change_node_prop',
- 'svn_fs_check_path',
- 'svn_fs_contents_changed',
- 'svn_fs_copy',
- 'svn_fs_delete',
- 'svn_fs_dir_entries',
- 'svn_fs_file_contents',
- 'svn_fs_file_length',
- 'svn_fs_is_dir',
- 'svn_fs_is_file',
- 'svn_fs_make_dir',
- 'svn_fs_make_file',
- 'svn_fs_node_created_rev',
- 'svn_fs_node_prop',
- 'svn_fs_props_changed',
- 'svn_fs_revision_prop',
- 'svn_fs_revision_root',
- 'svn_fs_txn_root',
- 'svn_fs_youngest_rev',
- 'svn_import',
- 'svn_log',
- 'svn_ls',
- 'svn_mkdir',
- 'svn_repos_create',
- 'svn_repos_fs_begin_txn_for_commit',
- 'svn_repos_fs_commit_txn',
- 'svn_repos_fs',
- 'svn_repos_hotcopy',
- 'svn_repos_open',
- 'svn_repos_recover',
- 'svn_revert',
- 'svn_status',
- 'svn_update'),
- 'Scoutapm': ('scoutapm_get_calls', 'scoutapm_list_instrumented_functions'),
- 'Seaslog': ('seaslog_get_author', 'seaslog_get_version'),
- 'Semaphore': ('ftok',
- 'msg_get_queue',
- 'msg_queue_exists',
- 'msg_receive',
- 'msg_remove_queue',
- 'msg_send',
- 'msg_set_queue',
- 'msg_stat_queue',
- 'sem_acquire',
- 'sem_get',
- 'sem_release',
- 'sem_remove',
- 'shm_attach',
- 'shm_detach',
- 'shm_get_var',
- 'shm_has_var',
- 'shm_put_var',
- 'shm_remove_var',
- 'shm_remove'),
- 'Session': ('session_abort',
- 'session_cache_expire',
- 'session_cache_limiter',
- 'session_commit',
- 'session_create_id',
- 'session_decode',
- 'session_destroy',
- 'session_encode',
- 'session_gc',
- 'session_get_cookie_params',
- 'session_id',
- 'session_module_name',
- 'session_name',
- 'session_regenerate_id',
- 'session_register_shutdown',
- 'session_reset',
- 'session_save_path',
- 'session_set_cookie_params',
- 'session_set_save_handler',
- 'session_start',
- 'session_status',
- 'session_unset',
- 'session_write_close'),
- 'Shared Memory': ('shmop_close',
- 'shmop_delete',
- 'shmop_open',
- 'shmop_read',
- 'shmop_size',
- 'shmop_write'),
- 'SimpleXML': ('simplexml_import_dom',
- 'simplexml_load_file',
- 'simplexml_load_string'),
- 'Socket': ('socket_accept',
- 'socket_addrinfo_bind',
- 'socket_addrinfo_connect',
- 'socket_addrinfo_explain',
- 'socket_addrinfo_lookup',
- 'socket_bind',
- 'socket_clear_error',
- 'socket_close',
- 'socket_cmsg_space',
- 'socket_connect',
- 'socket_create_listen',
- 'socket_create_pair',
- 'socket_create',
- 'socket_export_stream',
- 'socket_get_option',
- 'socket_getopt',
- 'socket_getpeername',
- 'socket_getsockname',
- 'socket_import_stream',
- 'socket_last_error',
- 'socket_listen',
- 'socket_read',
- 'socket_recv',
- 'socket_recvfrom',
- 'socket_recvmsg',
- 'socket_select',
- 'socket_send',
- 'socket_sendmsg',
- 'socket_sendto',
- 'socket_set_block',
- 'socket_set_nonblock',
- 'socket_set_option',
- 'socket_setopt',
- 'socket_shutdown',
- 'socket_strerror',
- 'socket_write',
- 'socket_wsaprotocol_info_export',
- 'socket_wsaprotocol_info_import',
- 'socket_wsaprotocol_info_release'),
- 'Sodium': ('sodium_add',
- 'sodium_base642bin',
- 'sodium_bin2base64',
- 'sodium_bin2hex',
- 'sodium_compare',
- 'sodium_crypto_aead_aes256gcm_decrypt',
- 'sodium_crypto_aead_aes256gcm_encrypt',
- 'sodium_crypto_aead_aes256gcm_is_available',
- 'sodium_crypto_aead_aes256gcm_keygen',
- 'sodium_crypto_aead_chacha20poly1305_decrypt',
- 'sodium_crypto_aead_chacha20poly1305_encrypt',
- 'sodium_crypto_aead_chacha20poly1305_ietf_decrypt',
- 'sodium_crypto_aead_chacha20poly1305_ietf_encrypt',
- 'sodium_crypto_aead_chacha20poly1305_ietf_keygen',
- 'sodium_crypto_aead_chacha20poly1305_keygen',
- 'sodium_crypto_aead_xchacha20poly1305_ietf_decrypt',
- 'sodium_crypto_aead_xchacha20poly1305_ietf_encrypt',
- 'sodium_crypto_aead_xchacha20poly1305_ietf_keygen',
- 'sodium_crypto_auth_keygen',
- 'sodium_crypto_auth_verify',
- 'sodium_crypto_auth',
- 'sodium_crypto_box_keypair_from_secretkey_and_publickey',
- 'sodium_crypto_box_keypair',
- 'sodium_crypto_box_open',
- 'sodium_crypto_box_publickey_from_secretkey',
- 'sodium_crypto_box_publickey',
- 'sodium_crypto_box_seal_open',
- 'sodium_crypto_box_seal',
- 'sodium_crypto_box_secretkey',
- 'sodium_crypto_box_seed_keypair',
- 'sodium_crypto_box',
- 'sodium_crypto_generichash_final',
- 'sodium_crypto_generichash_init',
- 'sodium_crypto_generichash_keygen',
- 'sodium_crypto_generichash_update',
- 'sodium_crypto_generichash',
- 'sodium_crypto_kdf_derive_from_key',
- 'sodium_crypto_kdf_keygen',
- 'sodium_crypto_kx_client_session_keys',
- 'sodium_crypto_kx_keypair',
- 'sodium_crypto_kx_publickey',
- 'sodium_crypto_kx_secretkey',
- 'sodium_crypto_kx_seed_keypair',
- 'sodium_crypto_kx_server_session_keys',
- 'sodium_crypto_pwhash_scryptsalsa208sha256_str_verify',
- 'sodium_crypto_pwhash_scryptsalsa208sha256_str',
- 'sodium_crypto_pwhash_scryptsalsa208sha256',
- 'sodium_crypto_pwhash_str_needs_rehash',
- 'sodium_crypto_pwhash_str_verify',
- 'sodium_crypto_pwhash_str',
- 'sodium_crypto_pwhash',
- 'sodium_crypto_scalarmult_base',
- 'sodium_crypto_scalarmult',
- 'sodium_crypto_secretbox_keygen',
- 'sodium_crypto_secretbox_open',
- 'sodium_crypto_secretbox',
- 'sodium_crypto_secretstream_xchacha20poly1305_init_pull',
- 'sodium_crypto_secretstream_xchacha20poly1305_init_push',
- 'sodium_crypto_secretstream_xchacha20poly1305_keygen',
- 'sodium_crypto_secretstream_xchacha20poly1305_pull',
- 'sodium_crypto_secretstream_xchacha20poly1305_push',
- 'sodium_crypto_secretstream_xchacha20poly1305_rekey',
- 'sodium_crypto_shorthash_keygen',
- 'sodium_crypto_shorthash',
- 'sodium_crypto_sign_detached',
- 'sodium_crypto_sign_ed25519_pk_to_curve25519',
- 'sodium_crypto_sign_ed25519_sk_to_curve25519',
- 'sodium_crypto_sign_keypair_from_secretkey_and_publickey',
- 'sodium_crypto_sign_keypair',
- 'sodium_crypto_sign_open',
- 'sodium_crypto_sign_publickey_from_secretkey',
- 'sodium_crypto_sign_publickey',
- 'sodium_crypto_sign_secretkey',
- 'sodium_crypto_sign_seed_keypair',
- 'sodium_crypto_sign_verify_detached',
- 'sodium_crypto_sign',
- 'sodium_crypto_stream_keygen',
- 'sodium_crypto_stream_xor',
- 'sodium_crypto_stream',
- 'sodium_hex2bin',
- 'sodium_increment',
- 'sodium_memcmp',
- 'sodium_memzero',
- 'sodium_pad',
- 'sodium_unpad'),
- 'Solr': ('solr_get_version',),
- 'Stomp': ('stomp_connect_error', 'stomp_version'),
- 'Stream': ('stream_bucket_append',
- 'stream_bucket_make_writeable',
- 'stream_bucket_new',
- 'stream_bucket_prepend',
- 'stream_context_create',
- 'stream_context_get_default',
- 'stream_context_get_options',
- 'stream_context_get_params',
- 'stream_context_set_default',
- 'stream_context_set_option',
- 'stream_context_set_params',
- 'stream_copy_to_stream',
- 'stream_filter_append',
- 'stream_filter_prepend',
- 'stream_filter_register',
- 'stream_filter_remove',
- 'stream_get_contents',
- 'stream_get_filters',
- 'stream_get_line',
- 'stream_get_meta_data',
- 'stream_get_transports',
- 'stream_get_wrappers',
- 'stream_is_local',
- 'stream_isatty',
- 'stream_notification_callback',
- 'stream_register_wrapper',
- 'stream_resolve_include_path',
- 'stream_select',
- 'stream_set_blocking',
- 'stream_set_chunk_size',
- 'stream_set_read_buffer',
- 'stream_set_timeout',
- 'stream_set_write_buffer',
- 'stream_socket_accept',
- 'stream_socket_client',
- 'stream_socket_enable_crypto',
- 'stream_socket_get_name',
- 'stream_socket_pair',
- 'stream_socket_recvfrom',
- 'stream_socket_sendto',
- 'stream_socket_server',
- 'stream_socket_shutdown',
- 'stream_supports_lock',
- 'stream_wrapper_register',
- 'stream_wrapper_restore',
- 'stream_wrapper_unregister'),
- 'String': ('addcslashes',
- 'addslashes',
- 'bin2hex',
- 'chop',
- 'chr',
- 'chunk_split',
- 'convert_cyr_string',
- 'convert_uudecode',
- 'convert_uuencode',
- 'count_chars',
- 'crc32',
- 'crypt',
- 'echo',
- 'explode',
- 'fprintf',
- 'get_html_translation_table',
- 'hebrev',
- 'hebrevc',
- 'hex2bin',
- 'html_entity_decode',
- 'htmlentities',
- 'htmlspecialchars_decode',
- 'htmlspecialchars',
- 'implode',
- 'join',
- 'lcfirst',
- 'levenshtein',
- 'localeconv',
- 'ltrim',
- 'md5_file',
- 'md5',
- 'metaphone',
- 'money_format',
- 'nl_langinfo',
- 'nl2br',
- 'number_format',
- 'ord',
- 'parse_str',
- 'print',
- 'printf',
- 'quoted_printable_decode',
- 'quoted_printable_encode',
- 'quotemeta',
- 'rtrim',
- 'setlocale',
- 'sha1_file',
- 'sha1',
- 'similar_text',
- 'soundex',
- 'sprintf',
- 'sscanf',
- 'str_contains',
- 'str_ends_with',
- 'str_getcsv',
- 'str_ireplace',
- 'str_pad',
- 'str_repeat',
- 'str_replace',
- 'str_rot13',
- 'str_shuffle',
- 'str_split',
- 'str_starts_with',
- 'str_word_count',
- 'strcasecmp',
- 'strchr',
- 'strcmp',
- 'strcoll',
- 'strcspn',
- 'strip_tags',
- 'stripcslashes',
- 'stripos',
- 'stripslashes',
- 'stristr',
- 'strlen',
- 'strnatcasecmp',
- 'strnatcmp',
- 'strncasecmp',
- 'strncmp',
- 'strpbrk',
- 'strpos',
- 'strrchr',
- 'strrev',
- 'strripos',
- 'strrpos',
- 'strspn',
- 'strstr',
- 'strtok',
- 'strtolower',
- 'strtoupper',
- 'strtr',
- 'substr_compare',
- 'substr_count',
- 'substr_replace',
- 'substr',
- 'trim',
- 'ucfirst',
- 'ucwords',
- 'vfprintf',
- 'vprintf',
- 'vsprintf',
- 'wordwrap'),
- 'Swoole': ('swoole_async_dns_lookup',
- 'swoole_async_read',
- 'swoole_async_readfile',
- 'swoole_async_set',
- 'swoole_async_write',
- 'swoole_async_writefile',
- 'swoole_clear_error',
- 'swoole_client_select',
- 'swoole_cpu_num',
- 'swoole_errno',
- 'swoole_error_log',
- 'swoole_event_add',
- 'swoole_event_defer',
- 'swoole_event_del',
- 'swoole_event_exit',
- 'swoole_event_set',
- 'swoole_event_wait',
- 'swoole_event_write',
- 'swoole_get_local_ip',
- 'swoole_last_error',
- 'swoole_load_module',
- 'swoole_select',
- 'swoole_set_process_name',
- 'swoole_strerror',
- 'swoole_timer_after',
- 'swoole_timer_exists',
- 'swoole_timer_tick',
- 'swoole_version'),
- 'TCP': ('tcpwrap_check',),
- 'Taint': ('is_tainted', 'taint', 'untaint'),
- 'Tidy': ('ob_tidyhandler',
- 'tidy_access_count',
- 'tidy_config_count',
- 'tidy_error_count',
- 'tidy_get_output',
- 'tidy_warning_count'),
- 'Tokenizer': ('token_get_all', 'token_name'),
- 'Trader': ('trader_acos',
- 'trader_ad',
- 'trader_add',
- 'trader_adosc',
- 'trader_adx',
- 'trader_adxr',
- 'trader_apo',
- 'trader_aroon',
- 'trader_aroonosc',
- 'trader_asin',
- 'trader_atan',
- 'trader_atr',
- 'trader_avgprice',
- 'trader_bbands',
- 'trader_beta',
- 'trader_bop',
- 'trader_cci',
- 'trader_cdl2crows',
- 'trader_cdl3blackcrows',
- 'trader_cdl3inside',
- 'trader_cdl3linestrike',
- 'trader_cdl3outside',
- 'trader_cdl3starsinsouth',
- 'trader_cdl3whitesoldiers',
- 'trader_cdlabandonedbaby',
- 'trader_cdladvanceblock',
- 'trader_cdlbelthold',
- 'trader_cdlbreakaway',
- 'trader_cdlclosingmarubozu',
- 'trader_cdlconcealbabyswall',
- 'trader_cdlcounterattack',
- 'trader_cdldarkcloudcover',
- 'trader_cdldoji',
- 'trader_cdldojistar',
- 'trader_cdldragonflydoji',
- 'trader_cdlengulfing',
- 'trader_cdleveningdojistar',
- 'trader_cdleveningstar',
- 'trader_cdlgapsidesidewhite',
- 'trader_cdlgravestonedoji',
- 'trader_cdlhammer',
- 'trader_cdlhangingman',
- 'trader_cdlharami',
- 'trader_cdlharamicross',
- 'trader_cdlhighwave',
- 'trader_cdlhikkake',
- 'trader_cdlhikkakemod',
- 'trader_cdlhomingpigeon',
- 'trader_cdlidentical3crows',
- 'trader_cdlinneck',
- 'trader_cdlinvertedhammer',
- 'trader_cdlkicking',
- 'trader_cdlkickingbylength',
- 'trader_cdlladderbottom',
- 'trader_cdllongleggeddoji',
- 'trader_cdllongline',
- 'trader_cdlmarubozu',
- 'trader_cdlmatchinglow',
- 'trader_cdlmathold',
- 'trader_cdlmorningdojistar',
- 'trader_cdlmorningstar',
- 'trader_cdlonneck',
- 'trader_cdlpiercing',
- 'trader_cdlrickshawman',
- 'trader_cdlrisefall3methods',
- 'trader_cdlseparatinglines',
- 'trader_cdlshootingstar',
- 'trader_cdlshortline',
- 'trader_cdlspinningtop',
- 'trader_cdlstalledpattern',
- 'trader_cdlsticksandwich',
- 'trader_cdltakuri',
- 'trader_cdltasukigap',
- 'trader_cdlthrusting',
- 'trader_cdltristar',
- 'trader_cdlunique3river',
- 'trader_cdlupsidegap2crows',
- 'trader_cdlxsidegap3methods',
- 'trader_ceil',
- 'trader_cmo',
- 'trader_correl',
- 'trader_cos',
- 'trader_cosh',
- 'trader_dema',
- 'trader_div',
- 'trader_dx',
- 'trader_ema',
- 'trader_errno',
- 'trader_exp',
- 'trader_floor',
- 'trader_get_compat',
- 'trader_get_unstable_period',
- 'trader_ht_dcperiod',
- 'trader_ht_dcphase',
- 'trader_ht_phasor',
- 'trader_ht_sine',
- 'trader_ht_trendline',
- 'trader_ht_trendmode',
- 'trader_kama',
- 'trader_linearreg_angle',
- 'trader_linearreg_intercept',
- 'trader_linearreg_slope',
- 'trader_linearreg',
- 'trader_ln',
- 'trader_log10',
- 'trader_ma',
- 'trader_macd',
- 'trader_macdext',
- 'trader_macdfix',
- 'trader_mama',
- 'trader_mavp',
- 'trader_max',
- 'trader_maxindex',
- 'trader_medprice',
- 'trader_mfi',
- 'trader_midpoint',
- 'trader_midprice',
- 'trader_min',
- 'trader_minindex',
- 'trader_minmax',
- 'trader_minmaxindex',
- 'trader_minus_di',
- 'trader_minus_dm',
- 'trader_mom',
- 'trader_mult',
- 'trader_natr',
- 'trader_obv',
- 'trader_plus_di',
- 'trader_plus_dm',
- 'trader_ppo',
- 'trader_roc',
- 'trader_rocp',
- 'trader_rocr100',
- 'trader_rocr',
- 'trader_rsi',
- 'trader_sar',
- 'trader_sarext',
- 'trader_set_compat',
- 'trader_set_unstable_period',
- 'trader_sin',
- 'trader_sinh',
- 'trader_sma',
- 'trader_sqrt',
- 'trader_stddev',
- 'trader_stoch',
- 'trader_stochf',
- 'trader_stochrsi',
- 'trader_sub',
- 'trader_sum',
- 'trader_t3',
- 'trader_tan',
- 'trader_tanh',
- 'trader_tema',
- 'trader_trange',
- 'trader_trima',
- 'trader_trix',
- 'trader_tsf',
- 'trader_typprice',
- 'trader_ultosc',
- 'trader_var',
- 'trader_wclprice',
- 'trader_willr',
- 'trader_wma'),
- 'URL': ('base64_decode',
- 'base64_encode',
- 'get_headers',
- 'get_meta_tags',
- 'http_build_query',
- 'parse_url',
- 'rawurldecode',
- 'rawurlencode',
- 'urldecode',
- 'urlencode'),
- 'Uopz': ('uopz_add_function',
- 'uopz_allow_exit',
- 'uopz_backup',
- 'uopz_compose',
- 'uopz_copy',
- 'uopz_del_function',
- 'uopz_delete',
- 'uopz_extend',
- 'uopz_flags',
- 'uopz_function',
- 'uopz_get_exit_status',
- 'uopz_get_hook',
- 'uopz_get_mock',
- 'uopz_get_property',
- 'uopz_get_return',
- 'uopz_get_static',
- 'uopz_implement',
- 'uopz_overload',
- 'uopz_redefine',
- 'uopz_rename',
- 'uopz_restore',
- 'uopz_set_hook',
- 'uopz_set_mock',
- 'uopz_set_property',
- 'uopz_set_return',
- 'uopz_set_static',
- 'uopz_undefine',
- 'uopz_unset_hook',
- 'uopz_unset_mock',
- 'uopz_unset_return'),
- 'Variable handling': ('boolval',
- 'debug_zval_dump',
- 'doubleval',
- 'empty',
- 'floatval',
- 'get_debug_type',
- 'get_defined_vars',
- 'get_resource_id',
- 'get_resource_type',
- 'gettype',
- 'intval',
- 'is_array',
- 'is_bool',
- 'is_callable',
- 'is_countable',
- 'is_double',
- 'is_float',
- 'is_int',
- 'is_integer',
- 'is_iterable',
- 'is_long',
- 'is_null',
- 'is_numeric',
- 'is_object',
- 'is_real',
- 'is_resource',
- 'is_scalar',
- 'is_string',
- 'isset',
- 'print_r',
- 'serialize',
- 'settype',
- 'strval',
- 'unserialize',
- 'unset',
- 'var_dump',
- 'var_export'),
- 'WDDX': ('wddx_add_vars',
- 'wddx_deserialize',
- 'wddx_packet_end',
- 'wddx_packet_start',
- 'wddx_serialize_value',
- 'wddx_serialize_vars'),
- 'WinCache': ('wincache_fcache_fileinfo',
- 'wincache_fcache_meminfo',
- 'wincache_lock',
- 'wincache_ocache_fileinfo',
- 'wincache_ocache_meminfo',
- 'wincache_refresh_if_changed',
- 'wincache_rplist_fileinfo',
- 'wincache_rplist_meminfo',
- 'wincache_scache_info',
- 'wincache_scache_meminfo',
- 'wincache_ucache_add',
- 'wincache_ucache_cas',
- 'wincache_ucache_clear',
- 'wincache_ucache_dec',
- 'wincache_ucache_delete',
- 'wincache_ucache_exists',
- 'wincache_ucache_get',
- 'wincache_ucache_inc',
- 'wincache_ucache_info',
- 'wincache_ucache_meminfo',
- 'wincache_ucache_set',
- 'wincache_unlock'),
- 'XML Parser': ('utf8_decode',
- 'utf8_encode',
- 'xml_error_string',
- 'xml_get_current_byte_index',
- 'xml_get_current_column_number',
- 'xml_get_current_line_number',
- 'xml_get_error_code',
- 'xml_parse_into_struct',
- 'xml_parse',
- 'xml_parser_create_ns',
- 'xml_parser_create',
- 'xml_parser_free',
- 'xml_parser_get_option',
- 'xml_parser_set_option',
- 'xml_set_character_data_handler',
- 'xml_set_default_handler',
- 'xml_set_element_handler',
- 'xml_set_end_namespace_decl_handler',
- 'xml_set_external_entity_ref_handler',
- 'xml_set_notation_decl_handler',
- 'xml_set_object',
- 'xml_set_processing_instruction_handler',
- 'xml_set_start_namespace_decl_handler',
- 'xml_set_unparsed_entity_decl_handler'),
- 'XML-RPC': ('xmlrpc_decode_request',
- 'xmlrpc_decode',
- 'xmlrpc_encode_request',
- 'xmlrpc_encode',
- 'xmlrpc_get_type',
- 'xmlrpc_is_fault',
- 'xmlrpc_parse_method_descriptions',
- 'xmlrpc_server_add_introspection_data',
- 'xmlrpc_server_call_method',
- 'xmlrpc_server_create',
- 'xmlrpc_server_destroy',
- 'xmlrpc_server_register_introspection_callback',
- 'xmlrpc_server_register_method',
- 'xmlrpc_set_type'),
- 'Xhprof': ('xhprof_disable',
- 'xhprof_enable',
- 'xhprof_sample_disable',
- 'xhprof_sample_enable'),
- 'YAZ': ('yaz_addinfo',
- 'yaz_ccl_conf',
- 'yaz_ccl_parse',
- 'yaz_close',
- 'yaz_connect',
- 'yaz_database',
- 'yaz_element',
- 'yaz_errno',
- 'yaz_error',
- 'yaz_es_result',
- 'yaz_es',
- 'yaz_get_option',
- 'yaz_hits',
- 'yaz_itemorder',
- 'yaz_present',
- 'yaz_range',
- 'yaz_record',
- 'yaz_scan_result',
- 'yaz_scan',
- 'yaz_schema',
- 'yaz_search',
- 'yaz_set_option',
- 'yaz_sort',
- 'yaz_syntax',
- 'yaz_wait'),
- 'Yaml': ('yaml_emit_file',
- 'yaml_emit',
- 'yaml_parse_file',
- 'yaml_parse_url',
- 'yaml_parse'),
- 'Zip': ('zip_close',
- 'zip_entry_close',
- 'zip_entry_compressedsize',
- 'zip_entry_compressionmethod',
- 'zip_entry_filesize',
- 'zip_entry_name',
- 'zip_entry_open',
- 'zip_entry_read',
- 'zip_open',
- 'zip_read'),
- 'Zlib': ('deflate_add',
- 'deflate_init',
- 'gzclose',
- 'gzcompress',
- 'gzdecode',
- 'gzdeflate',
- 'gzencode',
- 'gzeof',
- 'gzfile',
- 'gzgetc',
- 'gzgets',
- 'gzgetss',
- 'gzinflate',
- 'gzopen',
- 'gzpassthru',
- 'gzputs',
- 'gzread',
- 'gzrewind',
- 'gzseek',
- 'gztell',
- 'gzuncompress',
- 'gzwrite',
- 'inflate_add',
- 'inflate_get_read_len',
- 'inflate_get_status',
- 'inflate_init',
- 'readgzfile',
- 'zlib_decode',
- 'zlib_encode',
- 'zlib_get_coding_type'),
- 'ZooKeeper': ('zookeeper_dispatch',),
- 'cURL': ('curl_close',
- 'curl_copy_handle',
- 'curl_errno',
- 'curl_error',
- 'curl_escape',
- 'curl_exec',
- 'curl_file_create',
- 'curl_getinfo',
- 'curl_init',
- 'curl_multi_add_handle',
- 'curl_multi_close',
- 'curl_multi_errno',
- 'curl_multi_exec',
- 'curl_multi_getcontent',
- 'curl_multi_info_read',
- 'curl_multi_init',
- 'curl_multi_remove_handle',
- 'curl_multi_select',
- 'curl_multi_setopt',
- 'curl_multi_strerror',
- 'curl_pause',
- 'curl_reset',
- 'curl_setopt_array',
- 'curl_setopt',
- 'curl_share_close',
- 'curl_share_errno',
- 'curl_share_init',
- 'curl_share_setopt',
- 'curl_share_strerror',
- 'curl_strerror',
- 'curl_unescape',
- 'curl_version'),
- 'dBase': ('dbase_add_record',
- 'dbase_close',
- 'dbase_create',
- 'dbase_delete_record',
- 'dbase_get_header_info',
- 'dbase_get_record_with_names',
- 'dbase_get_record',
- 'dbase_numfields',
- 'dbase_numrecords',
- 'dbase_open',
- 'dbase_pack',
- 'dbase_replace_record'),
- 'iconv': ('iconv_get_encoding',
- 'iconv_mime_decode_headers',
- 'iconv_mime_decode',
- 'iconv_mime_encode',
- 'iconv_set_encoding',
- 'iconv_strlen',
- 'iconv_strpos',
- 'iconv_strrpos',
- 'iconv_substr',
- 'iconv',
- 'ob_iconv_handler'),
- 'intl': ('intl_error_name',
- 'intl_get_error_code',
- 'intl_get_error_message',
- 'intl_is_failure'),
- 'libxml': ('libxml_clear_errors',
- 'libxml_disable_entity_loader',
- 'libxml_get_errors',
- 'libxml_get_last_error',
- 'libxml_set_external_entity_loader',
- 'libxml_set_streams_context',
- 'libxml_use_internal_errors'),
- 'mqseries': ('mqseries_back',
- 'mqseries_begin',
- 'mqseries_close',
- 'mqseries_cmit',
- 'mqseries_conn',
- 'mqseries_connx',
- 'mqseries_disc',
- 'mqseries_get',
- 'mqseries_inq',
- 'mqseries_open',
- 'mqseries_put1',
- 'mqseries_put',
- 'mqseries_set',
- 'mqseries_strerror'),
- 'phpdbg': ('phpdbg_break_file',
- 'phpdbg_break_function',
- 'phpdbg_break_method',
- 'phpdbg_break_next',
- 'phpdbg_clear',
- 'phpdbg_color',
- 'phpdbg_end_oplog',
- 'phpdbg_exec',
- 'phpdbg_get_executable',
- 'phpdbg_prompt',
- 'phpdbg_start_oplog'),
- 'runkit7': ('runkit7_constant_add',
- 'runkit7_constant_redefine',
- 'runkit7_constant_remove',
- 'runkit7_function_add',
- 'runkit7_function_copy',
- 'runkit7_function_redefine',
- 'runkit7_function_remove',
- 'runkit7_function_rename',
- 'runkit7_import',
- 'runkit7_method_add',
- 'runkit7_method_copy',
- 'runkit7_method_redefine',
- 'runkit7_method_remove',
- 'runkit7_method_rename',
- 'runkit7_object_id',
- 'runkit7_superglobals',
- 'runkit7_zval_inspect'),
- 'ssdeep': ('ssdeep_fuzzy_compare',
- 'ssdeep_fuzzy_hash_filename',
- 'ssdeep_fuzzy_hash'),
- 'var_representation': ('var_representation',),
- 'win32service': ('win32_continue_service',
- 'win32_create_service',
- 'win32_delete_service',
- 'win32_get_last_control_message',
- 'win32_pause_service',
- 'win32_query_service_status',
- 'win32_send_custom_control',
- 'win32_set_service_exit_code',
- 'win32_set_service_exit_mode',
- 'win32_set_service_status',
- 'win32_start_service_ctrl_dispatcher',
- 'win32_start_service',
- 'win32_stop_service'),
- 'xattr': ('xattr_get',
- 'xattr_list',
- 'xattr_remove',
- 'xattr_set',
- 'xattr_supported'),
- 'xdiff': ('xdiff_file_bdiff_size',
- 'xdiff_file_bdiff',
- 'xdiff_file_bpatch',
- 'xdiff_file_diff_binary',
- 'xdiff_file_diff',
- 'xdiff_file_merge3',
- 'xdiff_file_patch_binary',
- 'xdiff_file_patch',
- 'xdiff_file_rabdiff',
- 'xdiff_string_bdiff_size',
- 'xdiff_string_bdiff',
- 'xdiff_string_bpatch',
- 'xdiff_string_diff_binary',
- 'xdiff_string_diff',
- 'xdiff_string_merge3',
- 'xdiff_string_patch_binary',
- 'xdiff_string_patch',
- 'xdiff_string_rabdiff')}
-
-if __name__ == '__main__': # pragma: no cover
- import glob
- import os
- import pprint
- import re
- import shutil
- import tarfile
- from urllib.request import urlretrieve
-
- PHP_MANUAL_URL = 'http://us3.php.net/distributions/manual/php_manual_en.tar.gz'
- PHP_MANUAL_DIR = './php-chunked-xhtml/'
- PHP_REFERENCE_GLOB = 'ref.*'
- PHP_FUNCTION_RE = r'<a href="function\..*?\.html">(.*?)</a>'
- PHP_MODULE_RE = '<title>(.*?) Functions</title>'
-
- def get_php_functions():
- function_re = re.compile(PHP_FUNCTION_RE)
- module_re = re.compile(PHP_MODULE_RE)
- modules = {}
-
- for file in get_php_references():
- module = ''
- with open(file, encoding='utf-8') as f:
- for line in f:
- if not module:
- search = module_re.search(line)
- if search:
- module = search.group(1)
- modules[module] = []
-
- elif 'href="function.' in line:
- for match in function_re.finditer(line):
- fn = match.group(1)
- if '»' not in fn and '«' not in fn and \
- '::' not in fn and '\\' not in fn and \
- fn not in modules[module]:
- modules[module].append(fn)
-
- if module:
- # These are dummy manual pages, not actual functions
- if module == 'Filesystem':
- modules[module].remove('delete')
-
- if not modules[module]:
- del modules[module]
-
- for key in modules:
- modules[key] = tuple(modules[key])
- return modules
-
- def get_php_references():
- download = urlretrieve(PHP_MANUAL_URL)
- with tarfile.open(download[0]) as tar:
- tar.extractall()
- yield from glob.glob("%s%s" % (PHP_MANUAL_DIR, PHP_REFERENCE_GLOB))
- os.remove(download[0])
-
- def regenerate(filename, modules):
- with open(filename, encoding='utf-8') as fp:
- content = fp.read()
-
- header = content[:content.find('MODULES = {')]
- footer = content[content.find("if __name__ == '__main__':"):]
-
- with open(filename, 'w', encoding='utf-8') as fp:
- fp.write(header)
- fp.write('MODULES = %s\n\n' % pprint.pformat(modules))
- fp.write(footer)
-
- def run():
- print('>> Downloading Function Index')
- modules = get_php_functions()
- total = sum(len(v) for v in modules.values())
- print('%d functions found' % total)
- regenerate(__file__, modules)
- shutil.rmtree(PHP_MANUAL_DIR)
-
- run()
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/_postgres_builtins.py b/venv/lib/python3.11/site-packages/pygments/lexers/_postgres_builtins.py
deleted file mode 100644
index ecc2a7e..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/_postgres_builtins.py
+++ /dev/null
@@ -1,739 +0,0 @@
-"""
- pygments.lexers._postgres_builtins
- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
- Self-updating data files for PostgreSQL lexer.
-
- Run with `python -I` to update itself.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-# Autogenerated: please edit them if you like wasting your time.
-
-KEYWORDS = (
- 'ABORT',
- 'ABSOLUTE',
- 'ACCESS',
- 'ACTION',
- 'ADD',
- 'ADMIN',
- 'AFTER',
- 'AGGREGATE',
- 'ALL',
- 'ALSO',
- 'ALTER',
- 'ALWAYS',
- 'ANALYSE',
- 'ANALYZE',
- 'AND',
- 'ANY',
- 'ARRAY',
- 'AS',
- 'ASC',
- 'ASENSITIVE',
- 'ASSERTION',
- 'ASSIGNMENT',
- 'ASYMMETRIC',
- 'AT',
- 'ATOMIC',
- 'ATTACH',
- 'ATTRIBUTE',
- 'AUTHORIZATION',
- 'BACKWARD',
- 'BEFORE',
- 'BEGIN',
- 'BETWEEN',
- 'BIGINT',
- 'BINARY',
- 'BIT',
- 'BOOLEAN',
- 'BOTH',
- 'BREADTH',
- 'BY',
- 'CACHE',
- 'CALL',
- 'CALLED',
- 'CASCADE',
- 'CASCADED',
- 'CASE',
- 'CAST',
- 'CATALOG',
- 'CHAIN',
- 'CHAR',
- 'CHARACTER',
- 'CHARACTERISTICS',
- 'CHECK',
- 'CHECKPOINT',
- 'CLASS',
- 'CLOSE',
- 'CLUSTER',
- 'COALESCE',
- 'COLLATE',
- 'COLLATION',
- 'COLUMN',
- 'COLUMNS',
- 'COMMENT',
- 'COMMENTS',
- 'COMMIT',
- 'COMMITTED',
- 'COMPRESSION',
- 'CONCURRENTLY',
- 'CONFIGURATION',
- 'CONFLICT',
- 'CONNECTION',
- 'CONSTRAINT',
- 'CONSTRAINTS',
- 'CONTENT',
- 'CONTINUE',
- 'CONVERSION',
- 'COPY',
- 'COST',
- 'CREATE',
- 'CROSS',
- 'CSV',
- 'CUBE',
- 'CURRENT',
- 'CURRENT_CATALOG',
- 'CURRENT_DATE',
- 'CURRENT_ROLE',
- 'CURRENT_SCHEMA',
- 'CURRENT_TIME',
- 'CURRENT_TIMESTAMP',
- 'CURRENT_USER',
- 'CURSOR',
- 'CYCLE',
- 'DATA',
- 'DATABASE',
- 'DAY',
- 'DEALLOCATE',
- 'DEC',
- 'DECIMAL',
- 'DECLARE',
- 'DEFAULT',
- 'DEFAULTS',
- 'DEFERRABLE',
- 'DEFERRED',
- 'DEFINER',
- 'DELETE',
- 'DELIMITER',
- 'DELIMITERS',
- 'DEPENDS',
- 'DEPTH',
- 'DESC',
- 'DETACH',
- 'DICTIONARY',
- 'DISABLE',
- 'DISCARD',
- 'DISTINCT',
- 'DO',
- 'DOCUMENT',
- 'DOMAIN',
- 'DOUBLE',
- 'DROP',
- 'EACH',
- 'ELSE',
- 'ENABLE',
- 'ENCODING',
- 'ENCRYPTED',
- 'END',
- 'ENUM',
- 'ESCAPE',
- 'EVENT',
- 'EXCEPT',
- 'EXCLUDE',
- 'EXCLUDING',
- 'EXCLUSIVE',
- 'EXECUTE',
- 'EXISTS',
- 'EXPLAIN',
- 'EXPRESSION',
- 'EXTENSION',
- 'EXTERNAL',
- 'EXTRACT',
- 'FALSE',
- 'FAMILY',
- 'FETCH',
- 'FILTER',
- 'FINALIZE',
- 'FIRST',
- 'FLOAT',
- 'FOLLOWING',
- 'FOR',
- 'FORCE',
- 'FOREIGN',
- 'FORWARD',
- 'FREEZE',
- 'FROM',
- 'FULL',
- 'FUNCTION',
- 'FUNCTIONS',
- 'GENERATED',
- 'GLOBAL',
- 'GRANT',
- 'GRANTED',
- 'GREATEST',
- 'GROUP',
- 'GROUPING',
- 'GROUPS',
- 'HANDLER',
- 'HAVING',
- 'HEADER',
- 'HOLD',
- 'HOUR',
- 'IDENTITY',
- 'IF',
- 'ILIKE',
- 'IMMEDIATE',
- 'IMMUTABLE',
- 'IMPLICIT',
- 'IMPORT',
- 'IN',
- 'INCLUDE',
- 'INCLUDING',
- 'INCREMENT',
- 'INDEX',
- 'INDEXES',
- 'INHERIT',
- 'INHERITS',
- 'INITIALLY',
- 'INLINE',
- 'INNER',
- 'INOUT',
- 'INPUT',
- 'INSENSITIVE',
- 'INSERT',
- 'INSTEAD',
- 'INT',
- 'INTEGER',
- 'INTERSECT',
- 'INTERVAL',
- 'INTO',
- 'INVOKER',
- 'IS',
- 'ISNULL',
- 'ISOLATION',
- 'JOIN',
- 'KEY',
- 'LABEL',
- 'LANGUAGE',
- 'LARGE',
- 'LAST',
- 'LATERAL',
- 'LEADING',
- 'LEAKPROOF',
- 'LEAST',
- 'LEFT',
- 'LEVEL',
- 'LIKE',
- 'LIMIT',
- 'LISTEN',
- 'LOAD',
- 'LOCAL',
- 'LOCALTIME',
- 'LOCALTIMESTAMP',
- 'LOCATION',
- 'LOCK',
- 'LOCKED',
- 'LOGGED',
- 'MAPPING',
- 'MATCH',
- 'MATERIALIZED',
- 'MAXVALUE',
- 'METHOD',
- 'MINUTE',
- 'MINVALUE',
- 'MODE',
- 'MONTH',
- 'MOVE',
- 'NAME',
- 'NAMES',
- 'NATIONAL',
- 'NATURAL',
- 'NCHAR',
- 'NEW',
- 'NEXT',
- 'NFC',
- 'NFD',
- 'NFKC',
- 'NFKD',
- 'NO',
- 'NONE',
- 'NORMALIZE',
- 'NORMALIZED',
- 'NOT',
- 'NOTHING',
- 'NOTIFY',
- 'NOTNULL',
- 'NOWAIT',
- 'NULL',
- 'NULLIF',
- 'NULLS',
- 'NUMERIC',
- 'OBJECT',
- 'OF',
- 'OFF',
- 'OFFSET',
- 'OIDS',
- 'OLD',
- 'ON',
- 'ONLY',
- 'OPERATOR',
- 'OPTION',
- 'OPTIONS',
- 'OR',
- 'ORDER',
- 'ORDINALITY',
- 'OTHERS',
- 'OUT',
- 'OUTER',
- 'OVER',
- 'OVERLAPS',
- 'OVERLAY',
- 'OVERRIDING',
- 'OWNED',
- 'OWNER',
- 'PARALLEL',
- 'PARSER',
- 'PARTIAL',
- 'PARTITION',
- 'PASSING',
- 'PASSWORD',
- 'PLACING',
- 'PLANS',
- 'POLICY',
- 'POSITION',
- 'PRECEDING',
- 'PRECISION',
- 'PREPARE',
- 'PREPARED',
- 'PRESERVE',
- 'PRIMARY',
- 'PRIOR',
- 'PRIVILEGES',
- 'PROCEDURAL',
- 'PROCEDURE',
- 'PROCEDURES',
- 'PROGRAM',
- 'PUBLICATION',
- 'QUOTE',
- 'RANGE',
- 'READ',
- 'REAL',
- 'REASSIGN',
- 'RECHECK',
- 'RECURSIVE',
- 'REF',
- 'REFERENCES',
- 'REFERENCING',
- 'REFRESH',
- 'REINDEX',
- 'RELATIVE',
- 'RELEASE',
- 'RENAME',
- 'REPEATABLE',
- 'REPLACE',
- 'REPLICA',
- 'RESET',
- 'RESTART',
- 'RESTRICT',
- 'RETURN',
- 'RETURNING',
- 'RETURNS',
- 'REVOKE',
- 'RIGHT',
- 'ROLE',
- 'ROLLBACK',
- 'ROLLUP',
- 'ROUTINE',
- 'ROUTINES',
- 'ROW',
- 'ROWS',
- 'RULE',
- 'SAVEPOINT',
- 'SCHEMA',
- 'SCHEMAS',
- 'SCROLL',
- 'SEARCH',
- 'SECOND',
- 'SECURITY',
- 'SELECT',
- 'SEQUENCE',
- 'SEQUENCES',
- 'SERIALIZABLE',
- 'SERVER',
- 'SESSION',
- 'SESSION_USER',
- 'SET',
- 'SETOF',
- 'SETS',
- 'SHARE',
- 'SHOW',
- 'SIMILAR',
- 'SIMPLE',
- 'SKIP',
- 'SMALLINT',
- 'SNAPSHOT',
- 'SOME',
- 'SQL',
- 'STABLE',
- 'STANDALONE',
- 'START',
- 'STATEMENT',
- 'STATISTICS',
- 'STDIN',
- 'STDOUT',
- 'STORAGE',
- 'STORED',
- 'STRICT',
- 'STRIP',
- 'SUBSCRIPTION',
- 'SUBSTRING',
- 'SUPPORT',
- 'SYMMETRIC',
- 'SYSID',
- 'SYSTEM',
- 'TABLE',
- 'TABLES',
- 'TABLESAMPLE',
- 'TABLESPACE',
- 'TEMP',
- 'TEMPLATE',
- 'TEMPORARY',
- 'TEXT',
- 'THEN',
- 'TIES',
- 'TIME',
- 'TIMESTAMP',
- 'TO',
- 'TRAILING',
- 'TRANSACTION',
- 'TRANSFORM',
- 'TREAT',
- 'TRIGGER',
- 'TRIM',
- 'TRUE',
- 'TRUNCATE',
- 'TRUSTED',
- 'TYPE',
- 'TYPES',
- 'UESCAPE',
- 'UNBOUNDED',
- 'UNCOMMITTED',
- 'UNENCRYPTED',
- 'UNION',
- 'UNIQUE',
- 'UNKNOWN',
- 'UNLISTEN',
- 'UNLOGGED',
- 'UNTIL',
- 'UPDATE',
- 'USER',
- 'USING',
- 'VACUUM',
- 'VALID',
- 'VALIDATE',
- 'VALIDATOR',
- 'VALUE',
- 'VALUES',
- 'VARCHAR',
- 'VARIADIC',
- 'VARYING',
- 'VERBOSE',
- 'VERSION',
- 'VIEW',
- 'VIEWS',
- 'VOLATILE',
- 'WHEN',
- 'WHERE',
- 'WHITESPACE',
- 'WINDOW',
- 'WITH',
- 'WITHIN',
- 'WITHOUT',
- 'WORK',
- 'WRAPPER',
- 'WRITE',
- 'XML',
- 'XMLATTRIBUTES',
- 'XMLCONCAT',
- 'XMLELEMENT',
- 'XMLEXISTS',
- 'XMLFOREST',
- 'XMLNAMESPACES',
- 'XMLPARSE',
- 'XMLPI',
- 'XMLROOT',
- 'XMLSERIALIZE',
- 'XMLTABLE',
- 'YEAR',
- 'YES',
- 'ZONE',
-)
-
-DATATYPES = (
- 'bigint',
- 'bigserial',
- 'bit',
- 'bit varying',
- 'bool',
- 'boolean',
- 'box',
- 'bytea',
- 'char',
- 'character',
- 'character varying',
- 'cidr',
- 'circle',
- 'date',
- 'decimal',
- 'double precision',
- 'float4',
- 'float8',
- 'inet',
- 'int',
- 'int2',
- 'int4',
- 'int8',
- 'integer',
- 'interval',
- 'json',
- 'jsonb',
- 'line',
- 'lseg',
- 'macaddr',
- 'macaddr8',
- 'money',
- 'numeric',
- 'path',
- 'pg_lsn',
- 'pg_snapshot',
- 'point',
- 'polygon',
- 'real',
- 'serial',
- 'serial2',
- 'serial4',
- 'serial8',
- 'smallint',
- 'smallserial',
- 'text',
- 'time',
- 'timestamp',
- 'timestamptz',
- 'timetz',
- 'tsquery',
- 'tsvector',
- 'txid_snapshot',
- 'uuid',
- 'varbit',
- 'varchar',
- 'with time zone',
- 'without time zone',
- 'xml',
-)
-
-PSEUDO_TYPES = (
- 'any',
- 'anyarray',
- 'anycompatible',
- 'anycompatiblearray',
- 'anycompatiblemultirange',
- 'anycompatiblenonarray',
- 'anycompatiblerange',
- 'anyelement',
- 'anyenum',
- 'anymultirange',
- 'anynonarray',
- 'anyrange',
- 'cstring',
- 'event_trigger',
- 'fdw_handler',
- 'index_am_handler',
- 'internal',
- 'language_handler',
- 'pg_ddl_command',
- 'record',
- 'table_am_handler',
- 'trigger',
- 'tsm_handler',
- 'unknown',
- 'void',
-)
-
-# Remove 'trigger' from types
-PSEUDO_TYPES = tuple(sorted(set(PSEUDO_TYPES) - set(map(str.lower, KEYWORDS))))
-
-PLPGSQL_KEYWORDS = (
- 'ALIAS', 'CONSTANT', 'DIAGNOSTICS', 'ELSIF', 'EXCEPTION', 'EXIT',
- 'FOREACH', 'GET', 'LOOP', 'NOTICE', 'OPEN', 'PERFORM', 'QUERY', 'RAISE',
- 'RETURN', 'REVERSE', 'SQLSTATE', 'WHILE',
-)
-
-# Most of these keywords are from ExplainNode function
-# in src/backend/commands/explain.c
-
-EXPLAIN_KEYWORDS = (
- 'Aggregate',
- 'Append',
- 'Bitmap Heap Scan',
- 'Bitmap Index Scan',
- 'BitmapAnd',
- 'BitmapOr',
- 'CTE Scan',
- 'Custom Scan',
- 'Delete',
- 'Foreign Scan',
- 'Function Scan',
- 'Gather Merge',
- 'Gather',
- 'Group',
- 'GroupAggregate',
- 'Hash Join',
- 'Hash',
- 'HashAggregate',
- 'Incremental Sort',
- 'Index Only Scan',
- 'Index Scan',
- 'Insert',
- 'Limit',
- 'LockRows',
- 'Materialize',
- 'Memoize',
- 'Merge Append',
- 'Merge Join',
- 'Merge',
- 'MixedAggregate',
- 'Named Tuplestore Scan',
- 'Nested Loop',
- 'ProjectSet',
- 'Recursive Union',
- 'Result',
- 'Sample Scan',
- 'Seq Scan',
- 'SetOp',
- 'Sort',
- 'SubPlan',
- 'Subquery Scan',
- 'Table Function Scan',
- 'Tid Range Scan',
- 'Tid Scan',
- 'Unique',
- 'Update',
- 'Values Scan',
- 'WindowAgg',
- 'WorkTable Scan',
-)
-
-
-if __name__ == '__main__': # pragma: no cover
- import re
- from urllib.request import urlopen
-
- from pygments.util import format_lines
-
- # One man's constant is another man's variable.
- SOURCE_URL = 'https://github.com/postgres/postgres/raw/master'
- KEYWORDS_URL = SOURCE_URL + '/src/include/parser/kwlist.h'
- DATATYPES_URL = SOURCE_URL + '/doc/src/sgml/datatype.sgml'
-
- def update_myself():
- content = urlopen(DATATYPES_URL).read().decode('utf-8', errors='ignore')
- data_file = list(content.splitlines())
- datatypes = parse_datatypes(data_file)
- pseudos = parse_pseudos(data_file)
-
- content = urlopen(KEYWORDS_URL).read().decode('utf-8', errors='ignore')
- keywords = parse_keywords(content)
-
- update_consts(__file__, 'DATATYPES', datatypes)
- update_consts(__file__, 'PSEUDO_TYPES', pseudos)
- update_consts(__file__, 'KEYWORDS', keywords)
-
- def parse_keywords(f):
- kw = []
- for m in re.finditer(r'PG_KEYWORD\("(.+?)"', f):
- kw.append(m.group(1).upper())
-
- if not kw:
- raise ValueError('no keyword found')
-
- kw.sort()
- return kw
-
- def parse_datatypes(f):
- dt = set()
- for line in f:
- if '<sect1' in line:
- break
- if '<entry><type>' not in line:
- continue
-
- # Parse a string such as
- # time [ (<replaceable>p</replaceable>) ] [ without time zone ]
- # into types "time" and "without time zone"
-
- # remove all the tags
- line = re.sub("<replaceable>[^<]+</replaceable>", "", line)
- line = re.sub("<[^>]+>", "", line)
-
- # Drop the parts containing braces
- for tmp in [t for tmp in line.split('[')
- for t in tmp.split(']') if "(" not in t]:
- for t in tmp.split(','):
- t = t.strip()
- if not t: continue
- dt.add(" ".join(t.split()))
-
- dt = list(dt)
- dt.sort()
- return dt
-
- def parse_pseudos(f):
- dt = []
- re_start = re.compile(r'\s*<table id="datatype-pseudotypes-table">')
- re_entry = re.compile(r'\s*<entry><type>(.+?)</type></entry>')
- re_end = re.compile(r'\s*</table>')
-
- f = iter(f)
- for line in f:
- if re_start.match(line) is not None:
- break
- else:
- raise ValueError('pseudo datatypes table not found')
-
- for line in f:
- m = re_entry.match(line)
- if m is not None:
- dt.append(m.group(1))
-
- if re_end.match(line) is not None:
- break
- else:
- raise ValueError('end of pseudo datatypes table not found')
-
- if not dt:
- raise ValueError('pseudo datatypes not found')
-
- dt.sort()
- return dt
-
- def update_consts(filename, constname, content):
- with open(filename, encoding='utf-8') as f:
- data = f.read()
-
- # Line to start/end inserting
- re_match = re.compile(r'^%s\s*=\s*\($.*?^\s*\)$' % constname, re.M | re.S)
- m = re_match.search(data)
- if not m:
- raise ValueError('Could not find existing definition for %s' %
- (constname,))
-
- new_block = format_lines(constname, content)
- data = data[:m.start()] + new_block + data[m.end():]
-
- with open(filename, 'w', encoding='utf-8', newline='\n') as f:
- f.write(data)
-
- update_myself()
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/_qlik_builtins.py b/venv/lib/python3.11/site-packages/pygments/lexers/_qlik_builtins.py
deleted file mode 100644
index 697c124..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/_qlik_builtins.py
+++ /dev/null
@@ -1,666 +0,0 @@
-"""
- pygments.lexers._qlik_builtins
- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
- Qlik builtins.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-# operators
-# see https://help.qlik.com/en-US/sense/August2021/Subsystems/Hub/Content/Sense_Hub/Scripting/Operators/operators.htm
-OPERATORS_LIST = {
- "words": [
- # Bit operators
- "bitnot",
- "bitand",
- "bitor",
- "bitxor",
- # Logical operators
- "and",
- "or",
- "not",
- "xor",
- # Relational operators
- "precedes",
- "follows",
- # String operators
- "like",
- ],
- "symbols": [
- # Bit operators
- ">>",
- "<<",
- # Logical operators
- # Numeric operators
- "+",
- "-",
- "/",
- "*",
- # Relational operators
- "<",
- "<=",
- ">",
- ">=",
- "=",
- "<>",
- # String operators
- "&",
- ],
-}
-
-# SCRIPT STATEMENTS
-# see https://help.qlik.com/en-US/sense/August2021/Subsystems/Hub/Content/Sense_Hub/Scripting/
-STATEMENT_LIST = [
- # control statements
- "for",
- "each",
- "in",
- "next",
- "do",
- "while",
- "until",
- "unless",
- "loop",
- "return",
- "switch",
- "case",
- "default",
- "if",
- "else",
- "endif",
- "then",
- "end",
- "exit",
- "script",
- "switch",
- # prefixes
- "Add",
- "Buffer",
- "Concatenate",
- "Crosstable",
- "First",
- "Generic",
- "Hierarchy",
- "HierarchyBelongsTo",
- "Inner",
- "IntervalMatch",
- "Join",
- "Keep",
- "Left",
- "Mapping",
- "Merge",
- "NoConcatenate",
- "Outer",
- "Partial reload",
- "Replace",
- "Right",
- "Sample",
- "Semantic",
- "Unless",
- "When",
- # regular statements
- "Alias", # alias ... as ...
- "as",
- "AutoNumber",
- "Binary",
- "Comment field", # comment fields ... using ...
- "Comment fields", # comment field ... with ...
- "using",
- "with",
- "Comment table", # comment table ... with ...
- "Comment tables", # comment tables ... using ...
- "Connect",
- "ODBC", # ODBC CONNECT TO ...
- "OLEBD", # OLEDB CONNECT TO ...
- "CUSTOM", # CUSTOM CONNECT TO ...
- "LIB", # LIB CONNECT TO ...
- "Declare",
- "Derive",
- "From",
- "explicit",
- "implicit",
- "Direct Query",
- "dimension",
- "measure",
- "Directory",
- "Disconnect",
- "Drop field",
- "Drop fields",
- "Drop table",
- "Drop tables",
- "Execute",
- "FlushLog",
- "Force",
- "capitalization",
- "case upper",
- "case lower",
- "case mixed",
- "Load",
- "distinct",
- "from",
- "inline",
- "resident",
- "from_field",
- "autogenerate",
- "extension",
- "where",
- "group by",
- "order by",
- "asc",
- "desc",
- "Let",
- "Loosen Table",
- "Map",
- "NullAsNull",
- "NullAsValue",
- "Qualify",
- "Rem",
- "Rename field",
- "Rename fields",
- "Rename table",
- "Rename tables",
- "Search",
- "include",
- "exclude",
- "Section",
- "access",
- "application",
- "Select",
- "Set",
- "Sleep",
- "SQL",
- "SQLColumns",
- "SQLTables",
- "SQLTypes",
- "Star",
- "Store",
- "Tag",
- "Trace",
- "Unmap",
- "Unqualify",
- "Untag",
- # Qualifiers
- "total",
-]
-
-# Script functions
-# see https://help.qlik.com/en-US/sense/August2021/Subsystems/Hub/Content/Sense_Hub/Scripting/functions-in-scripts-chart-expressions.htm
-SCRIPT_FUNCTIONS = [
- # Basic aggregation functions in the data load script
- "FirstSortedValue",
- "Max",
- "Min",
- "Mode",
- "Only",
- "Sum",
- # Counter aggregation functions in the data load script
- "Count",
- "MissingCount",
- "NullCount",
- "NumericCount",
- "TextCount",
- # Financial aggregation functions in the data load script
- "IRR",
- "XIRR",
- "NPV",
- "XNPV",
- # Statistical aggregation functions in the data load script
- "Avg",
- "Correl",
- "Fractile",
- "FractileExc",
- "Kurtosis",
- "LINEST_B" "LINEST_df",
- "LINEST_f",
- "LINEST_m",
- "LINEST_r2",
- "LINEST_seb",
- "LINEST_sem",
- "LINEST_sey",
- "LINEST_ssreg",
- "Linest_ssresid",
- "Median",
- "Skew",
- "Stdev",
- "Sterr",
- "STEYX",
- # Statistical test functions
- "Chi2Test_chi2",
- "Chi2Test_df",
- "Chi2Test_p",
- # Two independent samples t-tests
- "ttest_conf",
- "ttest_df",
- "ttest_dif",
- "ttest_lower",
- "ttest_sig",
- "ttest_sterr",
- "ttest_t",
- "ttest_upper",
- # Two independent weighted samples t-tests
- "ttestw_conf",
- "ttestw_df",
- "ttestw_dif",
- "ttestw_lower",
- "ttestw_sig",
- "ttestw_sterr",
- "ttestw_t",
- "ttestw_upper",
- # One sample t-tests
- "ttest1_conf",
- "ttest1_df",
- "ttest1_dif",
- "ttest1_lower",
- "ttest1_sig",
- "ttest1_sterr",
- "ttest1_t",
- "ttest1_upper",
- # One weighted sample t-tests
- "ttest1w_conf",
- "ttest1w_df",
- "ttest1w_dif",
- "ttest1w_lower",
- "ttest1w_sig",
- "ttest1w_sterr",
- "ttest1w_t",
- "ttest1w_upper",
- # One column format functions
- "ztest_conf",
- "ztest_dif",
- "ztest_sig",
- "ztest_sterr",
- "ztest_z",
- "ztest_lower",
- "ztest_upper",
- # Weighted two-column format functions
- "ztestw_conf",
- "ztestw_dif",
- "ztestw_lower",
- "ztestw_sig",
- "ztestw_sterr",
- "ztestw_upper",
- "ztestw_z",
- # String aggregation functions in the data load script
- "Concat",
- "FirstValue",
- "LastValue",
- "MaxString",
- "MinString",
- # Synthetic dimension functions
- "ValueList",
- "ValueLoop",
- # Color functions
- "ARGB",
- "HSL",
- "RGB",
- "Color",
- "Colormix1",
- "Colormix2",
- "SysColor",
- "ColorMapHue",
- "ColorMapJet",
- "black",
- "blue",
- "brown",
- "cyan",
- "darkgray",
- "green",
- "lightblue",
- "lightcyan",
- "lightgray",
- "lightgreen",
- "lightmagenta",
- "lightred",
- "magenta",
- "red",
- "white",
- "yellow",
- # Conditional functions
- "alt",
- "class",
- "coalesce",
- "if",
- "match",
- "mixmatch",
- "pick",
- "wildmatch",
- # Counter functions
- "autonumber",
- "autonumberhash128",
- "autonumberhash256",
- "IterNo",
- "RecNo",
- "RowNo",
- # Integer expressions of time
- "second",
- "minute",
- "hour",
- "day",
- "week",
- "month",
- "year",
- "weekyear",
- "weekday",
- # Timestamp functions
- "now",
- "today",
- "LocalTime",
- # Make functions
- "makedate",
- "makeweekdate",
- "maketime",
- # Other date functions
- "AddMonths",
- "AddYears",
- "yeartodate",
- # Timezone functions
- "timezone",
- "GMT",
- "UTC",
- "daylightsaving",
- "converttolocaltime",
- # Set time functions
- "setdateyear",
- "setdateyearmonth",
- # In... functions
- "inyear",
- "inyeartodate",
- "inquarter",
- "inquartertodate",
- "inmonth",
- "inmonthtodate",
- "inmonths",
- "inmonthstodate",
- "inweek",
- "inweektodate",
- "inlunarweek",
- "inlunarweektodate",
- "inday",
- "indaytotime",
- # Start ... end functions
- "yearstart",
- "yearend",
- "yearname",
- "quarterstart",
- "quarterend",
- "quartername",
- "monthstart",
- "monthend",
- "monthname",
- "monthsstart",
- "monthsend",
- "monthsname",
- "weekstart",
- "weekend",
- "weekname",
- "lunarweekstart",
- "lunarweekend",
- "lunarweekname",
- "daystart",
- "dayend",
- "dayname",
- # Day numbering functions
- "age",
- "networkdays",
- "firstworkdate",
- "lastworkdate",
- "daynumberofyear",
- "daynumberofquarter",
- # Exponential and logarithmic
- "exp",
- "log",
- "log10",
- "pow",
- "sqr",
- "sqrt",
- # Count functions
- "GetAlternativeCount",
- "GetExcludedCount",
- "GetNotSelectedCount",
- "GetPossibleCount",
- "GetSelectedCount",
- # Field and selection functions
- "GetCurrentSelections",
- "GetFieldSelections",
- "GetObjectDimension",
- "GetObjectField",
- "GetObjectMeasure",
- # File functions
- "Attribute",
- "ConnectString",
- "FileBaseName",
- "FileDir",
- "FileExtension",
- "FileName",
- "FilePath",
- "FileSize",
- "FileTime",
- "GetFolderPath",
- "QvdCreateTime",
- "QvdFieldName",
- "QvdNoOfFields",
- "QvdNoOfRecords",
- "QvdTableName",
- # Financial functions
- "FV",
- "nPer",
- "Pmt",
- "PV",
- "Rate",
- # Formatting functions
- "ApplyCodepage",
- "Date",
- "Dual",
- "Interval",
- "Money",
- "Num",
- "Time",
- "Timestamp",
- # General numeric functions
- "bitcount",
- "div",
- "fabs",
- "fact",
- "frac",
- "sign",
- # Combination and permutation functions
- "combin",
- "permut",
- # Modulo functions
- "fmod",
- "mod",
- # Parity functions
- "even",
- "odd",
- # Rounding functions
- "ceil",
- "floor",
- "round",
- # Geospatial functions
- "GeoAggrGeometry",
- "GeoBoundingBox",
- "GeoCountVertex",
- "GeoInvProjectGeometry",
- "GeoProjectGeometry",
- "GeoReduceGeometry",
- "GeoGetBoundingBox",
- "GeoGetPolygonCenter",
- "GeoMakePoint",
- "GeoProject",
- # Interpretation functions
- "Date#",
- "Interval#",
- "Money#",
- "Num#",
- "Text",
- "Time#",
- "Timestamp#",
- # Field functions
- "FieldIndex",
- "FieldValue",
- "FieldValueCount",
- # Inter-record functions in the data load script
- "Exists",
- "LookUp",
- "Peek",
- "Previous",
- # Logical functions
- "IsNum",
- "IsText",
- # Mapping functions
- "ApplyMap",
- "MapSubstring",
- # Mathematical functions
- "e",
- "false",
- "pi",
- "rand",
- "true",
- # NULL functions
- "EmptyIsNull",
- "IsNull",
- "Null",
- # Basic range functions
- "RangeMax",
- "RangeMaxString",
- "RangeMin",
- "RangeMinString",
- "RangeMode",
- "RangeOnly",
- "RangeSum",
- # Counter range functions
- "RangeCount",
- "RangeMissingCount",
- "RangeNullCount",
- "RangeNumericCount",
- "RangeTextCount",
- # Statistical range functions
- "RangeAvg",
- "RangeCorrel",
- "RangeFractile",
- "RangeKurtosis",
- "RangeSkew",
- "RangeStdev",
- # Financial range functions
- "RangeIRR",
- "RangeNPV",
- "RangeXIRR",
- "RangeXNPV",
- # Statistical distribution
- "CHIDIST",
- "CHIINV",
- "NORMDIST",
- "NORMINV",
- "TDIST",
- "TINV",
- "FDIST",
- "FINV",
- # String functions
- "Capitalize",
- "Chr",
- "Evaluate",
- "FindOneOf",
- "Hash128",
- "Hash160",
- "Hash256",
- "Index",
- "KeepChar",
- "Left",
- "Len",
- "LevenshteinDist",
- "Lower",
- "LTrim",
- "Mid",
- "Ord",
- "PurgeChar",
- "Repeat",
- "Replace",
- "Right",
- "RTrim",
- "SubField",
- "SubStringCount",
- "TextBetween",
- "Trim",
- "Upper",
- # System functions
- "Author",
- "ClientPlatform",
- "ComputerName",
- "DocumentName",
- "DocumentPath",
- "DocumentTitle",
- "EngineVersion",
- "GetCollationLocale",
- "GetObjectField",
- "GetRegistryString",
- "IsPartialReload",
- "OSUser",
- "ProductVersion",
- "ReloadTime",
- "StateName",
- # Table functions
- "FieldName",
- "FieldNumber",
- "NoOfFields",
- "NoOfRows",
- "NoOfTables",
- "TableName",
- "TableNumber",
-]
-
-# System variables and constants
-# see https://help.qlik.com/en-US/sense/August2021/Subsystems/Hub/Content/Sense_Hub/Scripting/work-with-variables-in-data-load-editor.htm
-CONSTANT_LIST = [
- # System Variables
- "floppy",
- "cd",
- "include",
- "must_include",
- "hideprefix",
- "hidesuffix",
- "qvpath",
- "qvroot",
- "QvWorkPath",
- "QvWorkRoot",
- "StripComments",
- "Verbatim",
- "OpenUrlTimeout",
- "WinPath",
- "WinRoot",
- "CollationLocale",
- "CreateSearchIndexOnReload",
- # value handling variables
- "NullDisplay",
- "NullInterpret",
- "NullValue",
- "OtherSymbol",
- # Currency formatting
- "MoneyDecimalSep",
- "MoneyFormat",
- "MoneyThousandSep",
- # Number formatting
- "DecimalSep",
- "ThousandSep",
- "NumericalAbbreviation",
- # Time formatting
- "DateFormat",
- "TimeFormat",
- "TimestampFormat",
- "MonthNames",
- "LongMonthNames",
- "DayNames",
- "LongDayNames",
- "FirstWeekDay",
- "BrokenWeeks",
- "ReferenceDay",
- "FirstMonthOfYear",
- # Error variables
- "errormode",
- "scripterror",
- "scripterrorcount",
- "scripterrorlist",
- # Other
- "null",
-]
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/_scheme_builtins.py b/venv/lib/python3.11/site-packages/pygments/lexers/_scheme_builtins.py
deleted file mode 100644
index 8f2de9d..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/_scheme_builtins.py
+++ /dev/null
@@ -1,1609 +0,0 @@
-"""
- pygments.lexers._scheme_builtins
- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
- Scheme builtins.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-# Autogenerated by external/scheme-builtins-generator.scm
-# using Guile 3.0.5.130-5a1e7.
-
-scheme_keywords = {
- "*unspecified*",
- "...",
- "=>",
- "@",
- "@@",
- "_",
- "add-to-load-path",
- "and",
- "begin",
- "begin-deprecated",
- "case",
- "case-lambda",
- "case-lambda*",
- "cond",
- "cond-expand",
- "current-filename",
- "current-source-location",
- "debug-set!",
- "define",
- "define*",
- "define-inlinable",
- "define-library",
- "define-macro",
- "define-module",
- "define-once",
- "define-option-interface",
- "define-private",
- "define-public",
- "define-record-type",
- "define-syntax",
- "define-syntax-parameter",
- "define-syntax-rule",
- "define-values",
- "defmacro",
- "defmacro-public",
- "delay",
- "do",
- "else",
- "eval-when",
- "export",
- "export!",
- "export-syntax",
- "false-if-exception",
- "identifier-syntax",
- "if",
- "import",
- "include",
- "include-ci",
- "include-from-path",
- "include-library-declarations",
- "lambda",
- "lambda*",
- "let",
- "let*",
- "let*-values",
- "let-syntax",
- "let-values",
- "letrec",
- "letrec*",
- "letrec-syntax",
- "library",
- "load",
- "match",
- "match-lambda",
- "match-lambda*",
- "match-let",
- "match-let*",
- "match-letrec",
- "or",
- "parameterize",
- "print-set!",
- "quasiquote",
- "quasisyntax",
- "quote",
- "quote-syntax",
- "re-export",
- "re-export-syntax",
- "read-set!",
- "require-extension",
- "set!",
- "start-stack",
- "syntax",
- "syntax-case",
- "syntax-error",
- "syntax-parameterize",
- "syntax-rules",
- "unless",
- "unquote",
- "unquote-splicing",
- "unsyntax",
- "unsyntax-splicing",
- "use-modules",
- "when",
- "while",
- "with-ellipsis",
- "with-fluids",
- "with-syntax",
- "λ",
-}
-
-scheme_builtins = {
- "$sc-dispatch",
- "%char-set-dump",
- "%get-pre-modules-obarray",
- "%get-stack-size",
- "%global-site-dir",
- "%init-rdelim-builtins",
- "%init-rw-builtins",
- "%library-dir",
- "%load-announce",
- "%load-hook",
- "%make-void-port",
- "%package-data-dir",
- "%port-property",
- "%print-module",
- "%resolve-variable",
- "%search-load-path",
- "%set-port-property!",
- "%site-ccache-dir",
- "%site-dir",
- "%start-stack",
- "%string-dump",
- "%symbol-dump",
- "%warn-auto-compilation-enabled",
- "*",
- "+",
- "-",
- "->bool",
- "->char-set",
- "/",
- "1+",
- "1-",
- "<",
- "<=",
- "=",
- ">",
- ">=",
- "abort-to-prompt",
- "abort-to-prompt*",
- "abs",
- "absolute-file-name?",
- "accept",
- "access?",
- "acons",
- "acos",
- "acosh",
- "add-hook!",
- "addrinfo:addr",
- "addrinfo:canonname",
- "addrinfo:fam",
- "addrinfo:flags",
- "addrinfo:protocol",
- "addrinfo:socktype",
- "adjust-port-revealed!",
- "alarm",
- "alist-cons",
- "alist-copy",
- "alist-delete",
- "alist-delete!",
- "allocate-struct",
- "and-map",
- "and=>",
- "angle",
- "any",
- "append",
- "append!",
- "append-map",
- "append-map!",
- "append-reverse",
- "append-reverse!",
- "apply",
- "array->list",
- "array-cell-ref",
- "array-cell-set!",
- "array-contents",
- "array-copy!",
- "array-copy-in-order!",
- "array-dimensions",
- "array-equal?",
- "array-fill!",
- "array-for-each",
- "array-in-bounds?",
- "array-index-map!",
- "array-length",
- "array-map!",
- "array-map-in-order!",
- "array-rank",
- "array-ref",
- "array-set!",
- "array-shape",
- "array-slice",
- "array-slice-for-each",
- "array-slice-for-each-in-order",
- "array-type",
- "array-type-code",
- "array?",
- "ash",
- "asin",
- "asinh",
- "assert-load-verbosity",
- "assoc",
- "assoc-ref",
- "assoc-remove!",
- "assoc-set!",
- "assq",
- "assq-ref",
- "assq-remove!",
- "assq-set!",
- "assv",
- "assv-ref",
- "assv-remove!",
- "assv-set!",
- "atan",
- "atanh",
- "autoload-done!",
- "autoload-done-or-in-progress?",
- "autoload-in-progress!",
- "backtrace",
- "basename",
- "batch-mode?",
- "beautify-user-module!",
- "bind",
- "bind-textdomain-codeset",
- "bindtextdomain",
- "bit-count",
- "bit-count*",
- "bit-extract",
- "bit-invert!",
- "bit-position",
- "bit-set*!",
- "bitvector",
- "bitvector->list",
- "bitvector-bit-clear?",
- "bitvector-bit-set?",
- "bitvector-clear-all-bits!",
- "bitvector-clear-bit!",
- "bitvector-clear-bits!",
- "bitvector-count",
- "bitvector-count-bits",
- "bitvector-fill!",
- "bitvector-flip-all-bits!",
- "bitvector-length",
- "bitvector-position",
- "bitvector-ref",
- "bitvector-set!",
- "bitvector-set-all-bits!",
- "bitvector-set-bit!",
- "bitvector-set-bits!",
- "bitvector?",
- "boolean?",
- "bound-identifier=?",
- "break",
- "break!",
- "caaaar",
- "caaadr",
- "caaar",
- "caadar",
- "caaddr",
- "caadr",
- "caar",
- "cadaar",
- "cadadr",
- "cadar",
- "caddar",
- "cadddr",
- "caddr",
- "cadr",
- "call-with-blocked-asyncs",
- "call-with-current-continuation",
- "call-with-deferred-observers",
- "call-with-include-port",
- "call-with-input-file",
- "call-with-input-string",
- "call-with-module-autoload-lock",
- "call-with-output-file",
- "call-with-output-string",
- "call-with-port",
- "call-with-prompt",
- "call-with-unblocked-asyncs",
- "call-with-values",
- "call/cc",
- "canonicalize-path",
- "car",
- "car+cdr",
- "catch",
- "cdaaar",
- "cdaadr",
- "cdaar",
- "cdadar",
- "cdaddr",
- "cdadr",
- "cdar",
- "cddaar",
- "cddadr",
- "cddar",
- "cdddar",
- "cddddr",
- "cdddr",
- "cddr",
- "cdr",
- "ceiling",
- "ceiling-quotient",
- "ceiling-remainder",
- "ceiling/",
- "centered-quotient",
- "centered-remainder",
- "centered/",
- "char->integer",
- "char-alphabetic?",
- "char-ci<=?",
- "char-ci<?",
- "char-ci=?",
- "char-ci>=?",
- "char-ci>?",
- "char-downcase",
- "char-general-category",
- "char-is-both?",
- "char-lower-case?",
- "char-numeric?",
- "char-ready?",
- "char-set",
- "char-set->list",
- "char-set->string",
- "char-set-adjoin",
- "char-set-adjoin!",
- "char-set-any",
- "char-set-complement",
- "char-set-complement!",
- "char-set-contains?",
- "char-set-copy",
- "char-set-count",
- "char-set-cursor",
- "char-set-cursor-next",
- "char-set-delete",
- "char-set-delete!",
- "char-set-diff+intersection",
- "char-set-diff+intersection!",
- "char-set-difference",
- "char-set-difference!",
- "char-set-every",
- "char-set-filter",
- "char-set-filter!",
- "char-set-fold",
- "char-set-for-each",
- "char-set-hash",
- "char-set-intersection",
- "char-set-intersection!",
- "char-set-map",
- "char-set-ref",
- "char-set-size",
- "char-set-unfold",
- "char-set-unfold!",
- "char-set-union",
- "char-set-union!",
- "char-set-xor",
- "char-set-xor!",
- "char-set<=",
- "char-set=",
- "char-set?",
- "char-titlecase",
- "char-upcase",
- "char-upper-case?",
- "char-whitespace?",
- "char<=?",
- "char<?",
- "char=?",
- "char>=?",
- "char>?",
- "char?",
- "chdir",
- "chmod",
- "chown",
- "chroot",
- "circular-list",
- "circular-list?",
- "close",
- "close-fdes",
- "close-input-port",
- "close-output-port",
- "close-port",
- "closedir",
- "command-line",
- "complex?",
- "compose",
- "concatenate",
- "concatenate!",
- "cond-expand-provide",
- "connect",
- "cons",
- "cons*",
- "cons-source",
- "const",
- "convert-assignment",
- "copy-file",
- "copy-random-state",
- "copy-tree",
- "cos",
- "cosh",
- "count",
- "crypt",
- "ctermid",
- "current-dynamic-state",
- "current-error-port",
- "current-input-port",
- "current-language",
- "current-load-port",
- "current-module",
- "current-output-port",
- "current-time",
- "current-warning-port",
- "datum->random-state",
- "datum->syntax",
- "debug-disable",
- "debug-enable",
- "debug-options",
- "debug-options-interface",
- "default-duplicate-binding-handler",
- "default-duplicate-binding-procedures",
- "default-prompt-tag",
- "define!",
- "define-module*",
- "defined?",
- "delete",
- "delete!",
- "delete-duplicates",
- "delete-duplicates!",
- "delete-file",
- "delete1!",
- "delq",
- "delq!",
- "delq1!",
- "delv",
- "delv!",
- "delv1!",
- "denominator",
- "directory-stream?",
- "dirname",
- "display",
- "display-application",
- "display-backtrace",
- "display-error",
- "dotted-list?",
- "doubly-weak-hash-table?",
- "drain-input",
- "drop",
- "drop-right",
- "drop-right!",
- "drop-while",
- "dup",
- "dup->fdes",
- "dup->inport",
- "dup->outport",
- "dup->port",
- "dup2",
- "duplicate-port",
- "dynamic-call",
- "dynamic-func",
- "dynamic-link",
- "dynamic-object?",
- "dynamic-pointer",
- "dynamic-state?",
- "dynamic-unlink",
- "dynamic-wind",
- "effective-version",
- "eighth",
- "end-of-char-set?",
- "endgrent",
- "endhostent",
- "endnetent",
- "endprotoent",
- "endpwent",
- "endservent",
- "ensure-batch-mode!",
- "environ",
- "eof-object?",
- "eq?",
- "equal?",
- "eqv?",
- "error",
- "euclidean-quotient",
- "euclidean-remainder",
- "euclidean/",
- "eval",
- "eval-string",
- "even?",
- "every",
- "exact->inexact",
- "exact-integer-sqrt",
- "exact-integer?",
- "exact?",
- "exception-accessor",
- "exception-args",
- "exception-kind",
- "exception-predicate",
- "exception-type?",
- "exception?",
- "execl",
- "execle",
- "execlp",
- "exit",
- "exp",
- "expt",
- "f32vector",
- "f32vector->list",
- "f32vector-length",
- "f32vector-ref",
- "f32vector-set!",
- "f32vector?",
- "f64vector",
- "f64vector->list",
- "f64vector-length",
- "f64vector-ref",
- "f64vector-set!",
- "f64vector?",
- "fcntl",
- "fdes->inport",
- "fdes->outport",
- "fdes->ports",
- "fdopen",
- "fifth",
- "file-encoding",
- "file-exists?",
- "file-is-directory?",
- "file-name-separator?",
- "file-port?",
- "file-position",
- "file-set-position",
- "fileno",
- "filter",
- "filter!",
- "filter-map",
- "find",
- "find-tail",
- "finite?",
- "first",
- "flock",
- "floor",
- "floor-quotient",
- "floor-remainder",
- "floor/",
- "fluid->parameter",
- "fluid-bound?",
- "fluid-ref",
- "fluid-ref*",
- "fluid-set!",
- "fluid-thread-local?",
- "fluid-unset!",
- "fluid?",
- "flush-all-ports",
- "fold",
- "fold-right",
- "for-each",
- "force",
- "force-output",
- "format",
- "fourth",
- "frame-address",
- "frame-arguments",
- "frame-dynamic-link",
- "frame-instruction-pointer",
- "frame-previous",
- "frame-procedure-name",
- "frame-return-address",
- "frame-source",
- "frame-stack-pointer",
- "frame?",
- "free-identifier=?",
- "fsync",
- "ftell",
- "gai-strerror",
- "gc",
- "gc-disable",
- "gc-dump",
- "gc-enable",
- "gc-run-time",
- "gc-stats",
- "gcd",
- "generate-temporaries",
- "gensym",
- "get-internal-real-time",
- "get-internal-run-time",
- "get-output-string",
- "get-print-state",
- "getaddrinfo",
- "getaffinity",
- "getcwd",
- "getegid",
- "getenv",
- "geteuid",
- "getgid",
- "getgr",
- "getgrent",
- "getgrgid",
- "getgrnam",
- "getgroups",
- "gethost",
- "gethostbyaddr",
- "gethostbyname",
- "gethostent",
- "gethostname",
- "getitimer",
- "getlogin",
- "getnet",
- "getnetbyaddr",
- "getnetbyname",
- "getnetent",
- "getpass",
- "getpeername",
- "getpgrp",
- "getpid",
- "getppid",
- "getpriority",
- "getproto",
- "getprotobyname",
- "getprotobynumber",
- "getprotoent",
- "getpw",
- "getpwent",
- "getpwnam",
- "getpwuid",
- "getrlimit",
- "getserv",
- "getservbyname",
- "getservbyport",
- "getservent",
- "getsid",
- "getsockname",
- "getsockopt",
- "gettext",
- "gettimeofday",
- "getuid",
- "gmtime",
- "group:gid",
- "group:mem",
- "group:name",
- "group:passwd",
- "hash",
- "hash-clear!",
- "hash-count",
- "hash-create-handle!",
- "hash-fold",
- "hash-for-each",
- "hash-for-each-handle",
- "hash-get-handle",
- "hash-map->list",
- "hash-ref",
- "hash-remove!",
- "hash-set!",
- "hash-table?",
- "hashq",
- "hashq-create-handle!",
- "hashq-get-handle",
- "hashq-ref",
- "hashq-remove!",
- "hashq-set!",
- "hashv",
- "hashv-create-handle!",
- "hashv-get-handle",
- "hashv-ref",
- "hashv-remove!",
- "hashv-set!",
- "hashx-create-handle!",
- "hashx-get-handle",
- "hashx-ref",
- "hashx-remove!",
- "hashx-set!",
- "hook->list",
- "hook-empty?",
- "hook?",
- "hostent:addr-list",
- "hostent:addrtype",
- "hostent:aliases",
- "hostent:length",
- "hostent:name",
- "identifier?",
- "identity",
- "imag-part",
- "in-vicinity",
- "include-deprecated-features",
- "inet-lnaof",
- "inet-makeaddr",
- "inet-netof",
- "inet-ntop",
- "inet-pton",
- "inexact->exact",
- "inexact?",
- "inf",
- "inf?",
- "inherit-print-state",
- "input-port?",
- "install-r6rs!",
- "install-r7rs!",
- "integer->char",
- "integer-expt",
- "integer-length",
- "integer?",
- "interaction-environment",
- "iota",
- "isatty?",
- "issue-deprecation-warning",
- "keyword->symbol",
- "keyword-like-symbol->keyword",
- "keyword?",
- "kill",
- "kw-arg-ref",
- "last",
- "last-pair",
- "lcm",
- "length",
- "length+",
- "link",
- "list",
- "list->array",
- "list->bitvector",
- "list->char-set",
- "list->char-set!",
- "list->f32vector",
- "list->f64vector",
- "list->s16vector",
- "list->s32vector",
- "list->s64vector",
- "list->s8vector",
- "list->string",
- "list->symbol",
- "list->typed-array",
- "list->u16vector",
- "list->u32vector",
- "list->u64vector",
- "list->u8vector",
- "list->vector",
- "list-cdr-ref",
- "list-cdr-set!",
- "list-copy",
- "list-head",
- "list-index",
- "list-ref",
- "list-set!",
- "list-tabulate",
- "list-tail",
- "list=",
- "list?",
- "listen",
- "load-compiled",
- "load-extension",
- "load-from-path",
- "load-in-vicinity",
- "load-user-init",
- "local-define",
- "local-define-module",
- "local-ref",
- "local-ref-module",
- "local-remove",
- "local-set!",
- "localtime",
- "log",
- "log10",
- "logand",
- "logbit?",
- "logcount",
- "logior",
- "lognot",
- "logtest",
- "logxor",
- "lookup-duplicates-handlers",
- "lset-adjoin",
- "lset-diff+intersection",
- "lset-diff+intersection!",
- "lset-difference",
- "lset-difference!",
- "lset-intersection",
- "lset-intersection!",
- "lset-union",
- "lset-union!",
- "lset-xor",
- "lset-xor!",
- "lset<=",
- "lset=",
- "lstat",
- "macro-binding",
- "macro-name",
- "macro-transformer",
- "macro-type",
- "macro?",
- "macroexpand",
- "macroexpanded?",
- "magnitude",
- "major-version",
- "make-array",
- "make-autoload-interface",
- "make-bitvector",
- "make-doubly-weak-hash-table",
- "make-exception",
- "make-exception-from-throw",
- "make-exception-type",
- "make-f32vector",
- "make-f64vector",
- "make-fluid",
- "make-fresh-user-module",
- "make-generalized-vector",
- "make-guardian",
- "make-hash-table",
- "make-hook",
- "make-list",
- "make-module",
- "make-modules-in",
- "make-mutable-parameter",
- "make-object-property",
- "make-parameter",
- "make-polar",
- "make-procedure-with-setter",
- "make-promise",
- "make-prompt-tag",
- "make-record-type",
- "make-rectangular",
- "make-regexp",
- "make-s16vector",
- "make-s32vector",
- "make-s64vector",
- "make-s8vector",
- "make-shared-array",
- "make-socket-address",
- "make-soft-port",
- "make-srfi-4-vector",
- "make-stack",
- "make-string",
- "make-struct-layout",
- "make-struct/no-tail",
- "make-struct/simple",
- "make-symbol",
- "make-syntax-transformer",
- "make-thread-local-fluid",
- "make-typed-array",
- "make-u16vector",
- "make-u32vector",
- "make-u64vector",
- "make-u8vector",
- "make-unbound-fluid",
- "make-undefined-variable",
- "make-variable",
- "make-variable-transformer",
- "make-vector",
- "make-vtable",
- "make-weak-key-hash-table",
- "make-weak-value-hash-table",
- "map",
- "map!",
- "map-in-order",
- "max",
- "member",
- "memoize-expression",
- "memoized-typecode",
- "memq",
- "memv",
- "merge",
- "merge!",
- "micro-version",
- "min",
- "minor-version",
- "mkdir",
- "mkdtemp",
- "mknod",
- "mkstemp",
- "mkstemp!",
- "mktime",
- "module-add!",
- "module-autoload!",
- "module-binder",
- "module-bound?",
- "module-call-observers",
- "module-clear!",
- "module-constructor",
- "module-declarative?",
- "module-defer-observers",
- "module-define!",
- "module-define-submodule!",
- "module-defined?",
- "module-duplicates-handlers",
- "module-ensure-local-variable!",
- "module-export!",
- "module-export-all!",
- "module-filename",
- "module-for-each",
- "module-generate-unique-id!",
- "module-gensym",
- "module-import-interface",
- "module-import-obarray",
- "module-kind",
- "module-local-variable",
- "module-locally-bound?",
- "module-make-local-var!",
- "module-map",
- "module-modified",
- "module-name",
- "module-next-unique-id",
- "module-obarray",
- "module-obarray-get-handle",
- "module-obarray-ref",
- "module-obarray-remove!",
- "module-obarray-set!",
- "module-observe",
- "module-observe-weak",
- "module-observers",
- "module-public-interface",
- "module-re-export!",
- "module-ref",
- "module-ref-submodule",
- "module-remove!",
- "module-replace!",
- "module-replacements",
- "module-reverse-lookup",
- "module-search",
- "module-set!",
- "module-submodule-binder",
- "module-submodules",
- "module-symbol-binding",
- "module-symbol-interned?",
- "module-symbol-local-binding",
- "module-symbol-locally-interned?",
- "module-transformer",
- "module-unobserve",
- "module-use!",
- "module-use-interfaces!",
- "module-uses",
- "module-variable",
- "module-version",
- "module-weak-observers",
- "module?",
- "modulo",
- "modulo-expt",
- "move->fdes",
- "nan",
- "nan?",
- "negate",
- "negative?",
- "nested-define!",
- "nested-define-module!",
- "nested-ref",
- "nested-ref-module",
- "nested-remove!",
- "nested-set!",
- "netent:addrtype",
- "netent:aliases",
- "netent:name",
- "netent:net",
- "newline",
- "ngettext",
- "nice",
- "nil?",
- "ninth",
- "noop",
- "not",
- "not-pair?",
- "null-environment",
- "null-list?",
- "null?",
- "number->string",
- "number?",
- "numerator",
- "object->string",
- "object-address",
- "object-properties",
- "object-property",
- "odd?",
- "open",
- "open-fdes",
- "open-file",
- "open-input-file",
- "open-input-string",
- "open-io-file",
- "open-output-file",
- "open-output-string",
- "opendir",
- "or-map",
- "output-port?",
- "pair-fold",
- "pair-fold-right",
- "pair-for-each",
- "pair?",
- "parameter-converter",
- "parameter-fluid",
- "parameter?",
- "parse-path",
- "parse-path-with-ellipsis",
- "partition",
- "partition!",
- "passwd:dir",
- "passwd:gecos",
- "passwd:gid",
- "passwd:name",
- "passwd:passwd",
- "passwd:shell",
- "passwd:uid",
- "pause",
- "peek",
- "peek-char",
- "pipe",
- "pk",
- "port->fdes",
- "port-closed?",
- "port-column",
- "port-conversion-strategy",
- "port-encoding",
- "port-filename",
- "port-for-each",
- "port-line",
- "port-mode",
- "port-revealed",
- "port-with-print-state",
- "port?",
- "positive?",
- "primitive-_exit",
- "primitive-eval",
- "primitive-exit",
- "primitive-fork",
- "primitive-load",
- "primitive-load-path",
- "primitive-move->fdes",
- "primitive-read",
- "print-disable",
- "print-enable",
- "print-exception",
- "print-options",
- "print-options-interface",
- "procedure",
- "procedure-documentation",
- "procedure-minimum-arity",
- "procedure-name",
- "procedure-properties",
- "procedure-property",
- "procedure-source",
- "procedure-with-setter?",
- "procedure?",
- "process-use-modules",
- "program-arguments",
- "promise?",
- "proper-list?",
- "protoent:aliases",
- "protoent:name",
- "protoent:proto",
- "provide",
- "provided?",
- "purify-module!",
- "putenv",
- "quit",
- "quotient",
- "raise",
- "raise-exception",
- "random",
- "random-state->datum",
- "random-state-from-platform",
- "random:exp",
- "random:hollow-sphere!",
- "random:normal",
- "random:normal-vector!",
- "random:solid-sphere!",
- "random:uniform",
- "rational?",
- "rationalize",
- "read",
- "read-char",
- "read-disable",
- "read-enable",
- "read-hash-extend",
- "read-hash-procedure",
- "read-hash-procedures",
- "read-options",
- "read-options-interface",
- "read-syntax",
- "readdir",
- "readlink",
- "real-part",
- "real?",
- "record-accessor",
- "record-constructor",
- "record-modifier",
- "record-predicate",
- "record-type-constructor",
- "record-type-descriptor",
- "record-type-extensible?",
- "record-type-fields",
- "record-type-has-parent?",
- "record-type-mutable-fields",
- "record-type-name",
- "record-type-opaque?",
- "record-type-parent",
- "record-type-parents",
- "record-type-properties",
- "record-type-uid",
- "record-type?",
- "record?",
- "recv!",
- "recvfrom!",
- "redirect-port",
- "reduce",
- "reduce-right",
- "regexp-exec",
- "regexp?",
- "release-port-handle",
- "reload-module",
- "remainder",
- "remove",
- "remove!",
- "remove-hook!",
- "rename-file",
- "repl-reader",
- "reset-hook!",
- "resolve-interface",
- "resolve-module",
- "resolve-r6rs-interface",
- "restore-signals",
- "restricted-vector-sort!",
- "reverse",
- "reverse!",
- "reverse-list->string",
- "rewinddir",
- "rmdir",
- "round",
- "round-ash",
- "round-quotient",
- "round-remainder",
- "round/",
- "run-hook",
- "s16vector",
- "s16vector->list",
- "s16vector-length",
- "s16vector-ref",
- "s16vector-set!",
- "s16vector?",
- "s32vector",
- "s32vector->list",
- "s32vector-length",
- "s32vector-ref",
- "s32vector-set!",
- "s32vector?",
- "s64vector",
- "s64vector->list",
- "s64vector-length",
- "s64vector-ref",
- "s64vector-set!",
- "s64vector?",
- "s8vector",
- "s8vector->list",
- "s8vector-length",
- "s8vector-ref",
- "s8vector-set!",
- "s8vector?",
- "save-module-excursion",
- "scheme-report-environment",
- "scm-error",
- "search-path",
- "second",
- "seed->random-state",
- "seek",
- "select",
- "self-evaluating?",
- "send",
- "sendfile",
- "sendto",
- "servent:aliases",
- "servent:name",
- "servent:port",
- "servent:proto",
- "set-autoloaded!",
- "set-car!",
- "set-cdr!",
- "set-current-dynamic-state",
- "set-current-error-port",
- "set-current-input-port",
- "set-current-module",
- "set-current-output-port",
- "set-exception-printer!",
- "set-module-binder!",
- "set-module-declarative?!",
- "set-module-duplicates-handlers!",
- "set-module-filename!",
- "set-module-kind!",
- "set-module-name!",
- "set-module-next-unique-id!",
- "set-module-obarray!",
- "set-module-observers!",
- "set-module-public-interface!",
- "set-module-submodule-binder!",
- "set-module-submodules!",
- "set-module-transformer!",
- "set-module-uses!",
- "set-module-version!",
- "set-object-properties!",
- "set-object-property!",
- "set-port-column!",
- "set-port-conversion-strategy!",
- "set-port-encoding!",
- "set-port-filename!",
- "set-port-line!",
- "set-port-revealed!",
- "set-procedure-minimum-arity!",
- "set-procedure-properties!",
- "set-procedure-property!",
- "set-program-arguments",
- "set-source-properties!",
- "set-source-property!",
- "set-struct-vtable-name!",
- "set-symbol-property!",
- "set-tm:gmtoff",
- "set-tm:hour",
- "set-tm:isdst",
- "set-tm:mday",
- "set-tm:min",
- "set-tm:mon",
- "set-tm:sec",
- "set-tm:wday",
- "set-tm:yday",
- "set-tm:year",
- "set-tm:zone",
- "setaffinity",
- "setegid",
- "setenv",
- "seteuid",
- "setgid",
- "setgr",
- "setgrent",
- "setgroups",
- "sethost",
- "sethostent",
- "sethostname",
- "setitimer",
- "setlocale",
- "setnet",
- "setnetent",
- "setpgid",
- "setpriority",
- "setproto",
- "setprotoent",
- "setpw",
- "setpwent",
- "setrlimit",
- "setserv",
- "setservent",
- "setsid",
- "setsockopt",
- "setter",
- "setuid",
- "setvbuf",
- "seventh",
- "shared-array-increments",
- "shared-array-offset",
- "shared-array-root",
- "shutdown",
- "sigaction",
- "simple-exceptions",
- "simple-format",
- "sin",
- "sinh",
- "sixth",
- "sleep",
- "sloppy-assoc",
- "sloppy-assq",
- "sloppy-assv",
- "sockaddr:addr",
- "sockaddr:fam",
- "sockaddr:flowinfo",
- "sockaddr:path",
- "sockaddr:port",
- "sockaddr:scopeid",
- "socket",
- "socketpair",
- "sort",
- "sort!",
- "sort-list",
- "sort-list!",
- "sorted?",
- "source-properties",
- "source-property",
- "span",
- "span!",
- "split-at",
- "split-at!",
- "sqrt",
- "stable-sort",
- "stable-sort!",
- "stack-id",
- "stack-length",
- "stack-ref",
- "stack?",
- "stat",
- "stat:atime",
- "stat:atimensec",
- "stat:blksize",
- "stat:blocks",
- "stat:ctime",
- "stat:ctimensec",
- "stat:dev",
- "stat:gid",
- "stat:ino",
- "stat:mode",
- "stat:mtime",
- "stat:mtimensec",
- "stat:nlink",
- "stat:perms",
- "stat:rdev",
- "stat:size",
- "stat:type",
- "stat:uid",
- "status:exit-val",
- "status:stop-sig",
- "status:term-sig",
- "strerror",
- "strftime",
- "string",
- "string->char-set",
- "string->char-set!",
- "string->list",
- "string->number",
- "string->symbol",
- "string-any",
- "string-any-c-code",
- "string-append",
- "string-append/shared",
- "string-bytes-per-char",
- "string-capitalize",
- "string-capitalize!",
- "string-ci->symbol",
- "string-ci<",
- "string-ci<=",
- "string-ci<=?",
- "string-ci<>",
- "string-ci<?",
- "string-ci=",
- "string-ci=?",
- "string-ci>",
- "string-ci>=",
- "string-ci>=?",
- "string-ci>?",
- "string-compare",
- "string-compare-ci",
- "string-concatenate",
- "string-concatenate-reverse",
- "string-concatenate-reverse/shared",
- "string-concatenate/shared",
- "string-contains",
- "string-contains-ci",
- "string-copy",
- "string-copy!",
- "string-count",
- "string-delete",
- "string-downcase",
- "string-downcase!",
- "string-drop",
- "string-drop-right",
- "string-every",
- "string-every-c-code",
- "string-fill!",
- "string-filter",
- "string-fold",
- "string-fold-right",
- "string-for-each",
- "string-for-each-index",
- "string-hash",
- "string-hash-ci",
- "string-index",
- "string-index-right",
- "string-join",
- "string-length",
- "string-map",
- "string-map!",
- "string-normalize-nfc",
- "string-normalize-nfd",
- "string-normalize-nfkc",
- "string-normalize-nfkd",
- "string-null?",
- "string-pad",
- "string-pad-right",
- "string-prefix-ci?",
- "string-prefix-length",
- "string-prefix-length-ci",
- "string-prefix?",
- "string-ref",
- "string-replace",
- "string-reverse",
- "string-reverse!",
- "string-rindex",
- "string-set!",
- "string-skip",
- "string-skip-right",
- "string-split",
- "string-suffix-ci?",
- "string-suffix-length",
- "string-suffix-length-ci",
- "string-suffix?",
- "string-tabulate",
- "string-take",
- "string-take-right",
- "string-titlecase",
- "string-titlecase!",
- "string-tokenize",
- "string-trim",
- "string-trim-both",
- "string-trim-right",
- "string-unfold",
- "string-unfold-right",
- "string-upcase",
- "string-upcase!",
- "string-utf8-length",
- "string-xcopy!",
- "string<",
- "string<=",
- "string<=?",
- "string<>",
- "string<?",
- "string=",
- "string=?",
- "string>",
- "string>=",
- "string>=?",
- "string>?",
- "string?",
- "strptime",
- "struct-layout",
- "struct-ref",
- "struct-ref/unboxed",
- "struct-set!",
- "struct-set!/unboxed",
- "struct-vtable",
- "struct-vtable-name",
- "struct-vtable?",
- "struct?",
- "substring",
- "substring-fill!",
- "substring-move!",
- "substring/copy",
- "substring/read-only",
- "substring/shared",
- "supports-source-properties?",
- "symbol",
- "symbol->keyword",
- "symbol->string",
- "symbol-append",
- "symbol-fref",
- "symbol-fset!",
- "symbol-hash",
- "symbol-interned?",
- "symbol-pref",
- "symbol-prefix-proc",
- "symbol-property",
- "symbol-property-remove!",
- "symbol-pset!",
- "symbol?",
- "symlink",
- "sync",
- "syntax->datum",
- "syntax-source",
- "syntax-violation",
- "system",
- "system*",
- "system-async-mark",
- "system-error-errno",
- "system-file-name-convention",
- "take",
- "take!",
- "take-right",
- "take-while",
- "take-while!",
- "tan",
- "tanh",
- "tcgetpgrp",
- "tcsetpgrp",
- "tenth",
- "textdomain",
- "third",
- "throw",
- "thunk?",
- "times",
- "tm:gmtoff",
- "tm:hour",
- "tm:isdst",
- "tm:mday",
- "tm:min",
- "tm:mon",
- "tm:sec",
- "tm:wday",
- "tm:yday",
- "tm:year",
- "tm:zone",
- "tmpfile",
- "tmpnam",
- "tms:clock",
- "tms:cstime",
- "tms:cutime",
- "tms:stime",
- "tms:utime",
- "transpose-array",
- "truncate",
- "truncate-file",
- "truncate-quotient",
- "truncate-remainder",
- "truncate/",
- "try-load-module",
- "try-module-autoload",
- "ttyname",
- "typed-array?",
- "tzset",
- "u16vector",
- "u16vector->list",
- "u16vector-length",
- "u16vector-ref",
- "u16vector-set!",
- "u16vector?",
- "u32vector",
- "u32vector->list",
- "u32vector-length",
- "u32vector-ref",
- "u32vector-set!",
- "u32vector?",
- "u64vector",
- "u64vector->list",
- "u64vector-length",
- "u64vector-ref",
- "u64vector-set!",
- "u64vector?",
- "u8vector",
- "u8vector->list",
- "u8vector-length",
- "u8vector-ref",
- "u8vector-set!",
- "u8vector?",
- "ucs-range->char-set",
- "ucs-range->char-set!",
- "umask",
- "uname",
- "unfold",
- "unfold-right",
- "unmemoize-expression",
- "unread-char",
- "unread-string",
- "unsetenv",
- "unspecified?",
- "unzip1",
- "unzip2",
- "unzip3",
- "unzip4",
- "unzip5",
- "use-srfis",
- "user-modules-declarative?",
- "using-readline?",
- "usleep",
- "utime",
- "utsname:machine",
- "utsname:nodename",
- "utsname:release",
- "utsname:sysname",
- "utsname:version",
- "values",
- "variable-bound?",
- "variable-ref",
- "variable-set!",
- "variable-unset!",
- "variable?",
- "vector",
- "vector->list",
- "vector-copy",
- "vector-fill!",
- "vector-length",
- "vector-move-left!",
- "vector-move-right!",
- "vector-ref",
- "vector-set!",
- "vector?",
- "version",
- "version-matches?",
- "waitpid",
- "warn",
- "weak-key-hash-table?",
- "weak-value-hash-table?",
- "with-continuation-barrier",
- "with-dynamic-state",
- "with-error-to-file",
- "with-error-to-port",
- "with-error-to-string",
- "with-exception-handler",
- "with-fluid*",
- "with-fluids*",
- "with-input-from-file",
- "with-input-from-port",
- "with-input-from-string",
- "with-output-to-file",
- "with-output-to-port",
- "with-output-to-string",
- "with-throw-handler",
- "write",
- "write-char",
- "xcons",
- "xsubstring",
- "zero?",
- "zip",
-}
-
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/_scilab_builtins.py b/venv/lib/python3.11/site-packages/pygments/lexers/_scilab_builtins.py
deleted file mode 100644
index f2adba8..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/_scilab_builtins.py
+++ /dev/null
@@ -1,3093 +0,0 @@
-"""
- pygments.lexers._scilab_builtins
- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
- Builtin list for the ScilabLexer.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-# Autogenerated
-
-commands_kw = (
- 'abort',
- 'apropos',
- 'break',
- 'case',
- 'catch',
- 'continue',
- 'do',
- 'else',
- 'elseif',
- 'end',
- 'endfunction',
- 'for',
- 'function',
- 'help',
- 'if',
- 'pause',
- 'quit',
- 'select',
- 'then',
- 'try',
- 'while',
-)
-
-functions_kw = (
- '!!_invoke_',
- '%H5Object_e',
- '%H5Object_fieldnames',
- '%H5Object_p',
- '%XMLAttr_6',
- '%XMLAttr_e',
- '%XMLAttr_i_XMLElem',
- '%XMLAttr_length',
- '%XMLAttr_p',
- '%XMLAttr_size',
- '%XMLDoc_6',
- '%XMLDoc_e',
- '%XMLDoc_i_XMLList',
- '%XMLDoc_p',
- '%XMLElem_6',
- '%XMLElem_e',
- '%XMLElem_i_XMLDoc',
- '%XMLElem_i_XMLElem',
- '%XMLElem_i_XMLList',
- '%XMLElem_p',
- '%XMLList_6',
- '%XMLList_e',
- '%XMLList_i_XMLElem',
- '%XMLList_i_XMLList',
- '%XMLList_length',
- '%XMLList_p',
- '%XMLList_size',
- '%XMLNs_6',
- '%XMLNs_e',
- '%XMLNs_i_XMLElem',
- '%XMLNs_p',
- '%XMLSet_6',
- '%XMLSet_e',
- '%XMLSet_length',
- '%XMLSet_p',
- '%XMLSet_size',
- '%XMLValid_p',
- '%_EClass_6',
- '%_EClass_e',
- '%_EClass_p',
- '%_EObj_0',
- '%_EObj_1__EObj',
- '%_EObj_1_b',
- '%_EObj_1_c',
- '%_EObj_1_i',
- '%_EObj_1_s',
- '%_EObj_2__EObj',
- '%_EObj_2_b',
- '%_EObj_2_c',
- '%_EObj_2_i',
- '%_EObj_2_s',
- '%_EObj_3__EObj',
- '%_EObj_3_b',
- '%_EObj_3_c',
- '%_EObj_3_i',
- '%_EObj_3_s',
- '%_EObj_4__EObj',
- '%_EObj_4_b',
- '%_EObj_4_c',
- '%_EObj_4_i',
- '%_EObj_4_s',
- '%_EObj_5',
- '%_EObj_6',
- '%_EObj_a__EObj',
- '%_EObj_a_b',
- '%_EObj_a_c',
- '%_EObj_a_i',
- '%_EObj_a_s',
- '%_EObj_d__EObj',
- '%_EObj_d_b',
- '%_EObj_d_c',
- '%_EObj_d_i',
- '%_EObj_d_s',
- '%_EObj_disp',
- '%_EObj_e',
- '%_EObj_g__EObj',
- '%_EObj_g_b',
- '%_EObj_g_c',
- '%_EObj_g_i',
- '%_EObj_g_s',
- '%_EObj_h__EObj',
- '%_EObj_h_b',
- '%_EObj_h_c',
- '%_EObj_h_i',
- '%_EObj_h_s',
- '%_EObj_i__EObj',
- '%_EObj_j__EObj',
- '%_EObj_j_b',
- '%_EObj_j_c',
- '%_EObj_j_i',
- '%_EObj_j_s',
- '%_EObj_k__EObj',
- '%_EObj_k_b',
- '%_EObj_k_c',
- '%_EObj_k_i',
- '%_EObj_k_s',
- '%_EObj_l__EObj',
- '%_EObj_l_b',
- '%_EObj_l_c',
- '%_EObj_l_i',
- '%_EObj_l_s',
- '%_EObj_m__EObj',
- '%_EObj_m_b',
- '%_EObj_m_c',
- '%_EObj_m_i',
- '%_EObj_m_s',
- '%_EObj_n__EObj',
- '%_EObj_n_b',
- '%_EObj_n_c',
- '%_EObj_n_i',
- '%_EObj_n_s',
- '%_EObj_o__EObj',
- '%_EObj_o_b',
- '%_EObj_o_c',
- '%_EObj_o_i',
- '%_EObj_o_s',
- '%_EObj_p',
- '%_EObj_p__EObj',
- '%_EObj_p_b',
- '%_EObj_p_c',
- '%_EObj_p_i',
- '%_EObj_p_s',
- '%_EObj_q__EObj',
- '%_EObj_q_b',
- '%_EObj_q_c',
- '%_EObj_q_i',
- '%_EObj_q_s',
- '%_EObj_r__EObj',
- '%_EObj_r_b',
- '%_EObj_r_c',
- '%_EObj_r_i',
- '%_EObj_r_s',
- '%_EObj_s__EObj',
- '%_EObj_s_b',
- '%_EObj_s_c',
- '%_EObj_s_i',
- '%_EObj_s_s',
- '%_EObj_t',
- '%_EObj_x__EObj',
- '%_EObj_x_b',
- '%_EObj_x_c',
- '%_EObj_x_i',
- '%_EObj_x_s',
- '%_EObj_y__EObj',
- '%_EObj_y_b',
- '%_EObj_y_c',
- '%_EObj_y_i',
- '%_EObj_y_s',
- '%_EObj_z__EObj',
- '%_EObj_z_b',
- '%_EObj_z_c',
- '%_EObj_z_i',
- '%_EObj_z_s',
- '%_eigs',
- '%_load',
- '%b_1__EObj',
- '%b_2__EObj',
- '%b_3__EObj',
- '%b_4__EObj',
- '%b_a__EObj',
- '%b_d__EObj',
- '%b_g__EObj',
- '%b_h__EObj',
- '%b_i_XMLList',
- '%b_i__EObj',
- '%b_j__EObj',
- '%b_k__EObj',
- '%b_l__EObj',
- '%b_m__EObj',
- '%b_n__EObj',
- '%b_o__EObj',
- '%b_p__EObj',
- '%b_q__EObj',
- '%b_r__EObj',
- '%b_s__EObj',
- '%b_x__EObj',
- '%b_y__EObj',
- '%b_z__EObj',
- '%c_1__EObj',
- '%c_2__EObj',
- '%c_3__EObj',
- '%c_4__EObj',
- '%c_a__EObj',
- '%c_d__EObj',
- '%c_g__EObj',
- '%c_h__EObj',
- '%c_i_XMLAttr',
- '%c_i_XMLDoc',
- '%c_i_XMLElem',
- '%c_i_XMLList',
- '%c_i__EObj',
- '%c_j__EObj',
- '%c_k__EObj',
- '%c_l__EObj',
- '%c_m__EObj',
- '%c_n__EObj',
- '%c_o__EObj',
- '%c_p__EObj',
- '%c_q__EObj',
- '%c_r__EObj',
- '%c_s__EObj',
- '%c_x__EObj',
- '%c_y__EObj',
- '%c_z__EObj',
- '%ce_i_XMLList',
- '%fptr_i_XMLList',
- '%h_i_XMLList',
- '%hm_i_XMLList',
- '%i_1__EObj',
- '%i_2__EObj',
- '%i_3__EObj',
- '%i_4__EObj',
- '%i_a__EObj',
- '%i_abs',
- '%i_cumprod',
- '%i_cumsum',
- '%i_d__EObj',
- '%i_diag',
- '%i_g__EObj',
- '%i_h__EObj',
- '%i_i_XMLList',
- '%i_i__EObj',
- '%i_j__EObj',
- '%i_k__EObj',
- '%i_l__EObj',
- '%i_m__EObj',
- '%i_matrix',
- '%i_max',
- '%i_maxi',
- '%i_min',
- '%i_mini',
- '%i_mput',
- '%i_n__EObj',
- '%i_o__EObj',
- '%i_p',
- '%i_p__EObj',
- '%i_prod',
- '%i_q__EObj',
- '%i_r__EObj',
- '%i_s__EObj',
- '%i_sum',
- '%i_tril',
- '%i_triu',
- '%i_x__EObj',
- '%i_y__EObj',
- '%i_z__EObj',
- '%ip_i_XMLList',
- '%l_i_XMLList',
- '%l_i__EObj',
- '%lss_i_XMLList',
- '%mc_i_XMLList',
- '%msp_full',
- '%msp_i_XMLList',
- '%msp_spget',
- '%p_i_XMLList',
- '%ptr_i_XMLList',
- '%r_i_XMLList',
- '%s_1__EObj',
- '%s_2__EObj',
- '%s_3__EObj',
- '%s_4__EObj',
- '%s_a__EObj',
- '%s_d__EObj',
- '%s_g__EObj',
- '%s_h__EObj',
- '%s_i_XMLList',
- '%s_i__EObj',
- '%s_j__EObj',
- '%s_k__EObj',
- '%s_l__EObj',
- '%s_m__EObj',
- '%s_n__EObj',
- '%s_o__EObj',
- '%s_p__EObj',
- '%s_q__EObj',
- '%s_r__EObj',
- '%s_s__EObj',
- '%s_x__EObj',
- '%s_y__EObj',
- '%s_z__EObj',
- '%sp_i_XMLList',
- '%spb_i_XMLList',
- '%st_i_XMLList',
- 'Calendar',
- 'ClipBoard',
- 'Matplot',
- 'Matplot1',
- 'PlaySound',
- 'TCL_DeleteInterp',
- 'TCL_DoOneEvent',
- 'TCL_EvalFile',
- 'TCL_EvalStr',
- 'TCL_ExistArray',
- 'TCL_ExistInterp',
- 'TCL_ExistVar',
- 'TCL_GetVar',
- 'TCL_GetVersion',
- 'TCL_SetVar',
- 'TCL_UnsetVar',
- 'TCL_UpVar',
- '_',
- '_code2str',
- '_d',
- '_str2code',
- 'about',
- 'abs',
- 'acos',
- 'addModulePreferences',
- 'addcolor',
- 'addf',
- 'addhistory',
- 'addinter',
- 'addlocalizationdomain',
- 'amell',
- 'and',
- 'argn',
- 'arl2_ius',
- 'ascii',
- 'asin',
- 'atan',
- 'backslash',
- 'balanc',
- 'banner',
- 'base2dec',
- 'basename',
- 'bdiag',
- 'beep',
- 'besselh',
- 'besseli',
- 'besselj',
- 'besselk',
- 'bessely',
- 'beta',
- 'bezout',
- 'bfinit',
- 'blkfc1i',
- 'blkslvi',
- 'bool2s',
- 'browsehistory',
- 'browsevar',
- 'bsplin3val',
- 'buildDoc',
- 'buildouttb',
- 'bvode',
- 'c_link',
- 'call',
- 'callblk',
- 'captions',
- 'cd',
- 'cdfbet',
- 'cdfbin',
- 'cdfchi',
- 'cdfchn',
- 'cdff',
- 'cdffnc',
- 'cdfgam',
- 'cdfnbn',
- 'cdfnor',
- 'cdfpoi',
- 'cdft',
- 'ceil',
- 'champ',
- 'champ1',
- 'chdir',
- 'chol',
- 'clc',
- 'clean',
- 'clear',
- 'clearfun',
- 'clearglobal',
- 'closeEditor',
- 'closeEditvar',
- 'closeXcos',
- 'code2str',
- 'coeff',
- 'color',
- 'comp',
- 'completion',
- 'conj',
- 'contour2di',
- 'contr',
- 'conv2',
- 'convstr',
- 'copy',
- 'copyfile',
- 'corr',
- 'cos',
- 'coserror',
- 'createdir',
- 'cshep2d',
- 'csvDefault',
- 'csvIsnum',
- 'csvRead',
- 'csvStringToDouble',
- 'csvTextScan',
- 'csvWrite',
- 'ctree2',
- 'ctree3',
- 'ctree4',
- 'cumprod',
- 'cumsum',
- 'curblock',
- 'curblockc',
- 'daskr',
- 'dasrt',
- 'dassl',
- 'data2sig',
- 'datatipCreate',
- 'datatipManagerMode',
- 'datatipMove',
- 'datatipRemove',
- 'datatipSetDisplay',
- 'datatipSetInterp',
- 'datatipSetOrientation',
- 'datatipSetStyle',
- 'datatipToggle',
- 'dawson',
- 'dct',
- 'debug',
- 'dec2base',
- 'deff',
- 'definedfields',
- 'degree',
- 'delbpt',
- 'delete',
- 'deletefile',
- 'delip',
- 'delmenu',
- 'det',
- 'dgettext',
- 'dhinf',
- 'diag',
- 'diary',
- 'diffobjs',
- 'disp',
- 'dispbpt',
- 'displayhistory',
- 'disposefftwlibrary',
- 'dlgamma',
- 'dnaupd',
- 'dneupd',
- 'double',
- 'drawaxis',
- 'drawlater',
- 'drawnow',
- 'driver',
- 'dsaupd',
- 'dsearch',
- 'dseupd',
- 'dst',
- 'duplicate',
- 'editvar',
- 'emptystr',
- 'end_scicosim',
- 'ereduc',
- 'erf',
- 'erfc',
- 'erfcx',
- 'erfi',
- 'errcatch',
- 'errclear',
- 'error',
- 'eval_cshep2d',
- 'exec',
- 'execstr',
- 'exists',
- 'exit',
- 'exp',
- 'expm',
- 'exportUI',
- 'export_to_hdf5',
- 'eye',
- 'fadj2sp',
- 'fec',
- 'feval',
- 'fft',
- 'fftw',
- 'fftw_flags',
- 'fftw_forget_wisdom',
- 'fftwlibraryisloaded',
- 'figure',
- 'file',
- 'filebrowser',
- 'fileext',
- 'fileinfo',
- 'fileparts',
- 'filesep',
- 'find',
- 'findBD',
- 'findfiles',
- 'fire_closing_finished',
- 'floor',
- 'format',
- 'fort',
- 'fprintfMat',
- 'freq',
- 'frexp',
- 'fromc',
- 'fromjava',
- 'fscanfMat',
- 'fsolve',
- 'fstair',
- 'full',
- 'fullpath',
- 'funcprot',
- 'funptr',
- 'gamma',
- 'gammaln',
- 'geom3d',
- 'get',
- 'getURL',
- 'get_absolute_file_path',
- 'get_fftw_wisdom',
- 'getblocklabel',
- 'getcallbackobject',
- 'getdate',
- 'getdebuginfo',
- 'getdefaultlanguage',
- 'getdrives',
- 'getdynlibext',
- 'getenv',
- 'getfield',
- 'gethistory',
- 'gethistoryfile',
- 'getinstalledlookandfeels',
- 'getio',
- 'getlanguage',
- 'getlongpathname',
- 'getlookandfeel',
- 'getmd5',
- 'getmemory',
- 'getmodules',
- 'getos',
- 'getpid',
- 'getrelativefilename',
- 'getscicosvars',
- 'getscilabmode',
- 'getshortpathname',
- 'gettext',
- 'getvariablesonstack',
- 'getversion',
- 'glist',
- 'global',
- 'glue',
- 'grand',
- 'graphicfunction',
- 'grayplot',
- 'grep',
- 'gsort',
- 'gstacksize',
- 'h5attr',
- 'h5close',
- 'h5cp',
- 'h5dataset',
- 'h5dump',
- 'h5exists',
- 'h5flush',
- 'h5get',
- 'h5group',
- 'h5isArray',
- 'h5isAttr',
- 'h5isCompound',
- 'h5isFile',
- 'h5isGroup',
- 'h5isList',
- 'h5isRef',
- 'h5isSet',
- 'h5isSpace',
- 'h5isType',
- 'h5isVlen',
- 'h5label',
- 'h5ln',
- 'h5ls',
- 'h5mount',
- 'h5mv',
- 'h5open',
- 'h5read',
- 'h5readattr',
- 'h5rm',
- 'h5umount',
- 'h5write',
- 'h5writeattr',
- 'havewindow',
- 'helpbrowser',
- 'hess',
- 'hinf',
- 'historymanager',
- 'historysize',
- 'host',
- 'htmlDump',
- 'htmlRead',
- 'htmlReadStr',
- 'htmlWrite',
- 'iconvert',
- 'ieee',
- 'ilib_verbose',
- 'imag',
- 'impl',
- 'import_from_hdf5',
- 'imult',
- 'inpnvi',
- 'int',
- 'int16',
- 'int2d',
- 'int32',
- 'int3d',
- 'int8',
- 'interp',
- 'interp2d',
- 'interp3d',
- 'intg',
- 'intppty',
- 'inttype',
- 'inv',
- 'invoke_lu',
- 'is_handle_valid',
- 'is_hdf5_file',
- 'isalphanum',
- 'isascii',
- 'isdef',
- 'isdigit',
- 'isdir',
- 'isequal',
- 'isequalbitwise',
- 'iserror',
- 'isfile',
- 'isglobal',
- 'isletter',
- 'isnum',
- 'isreal',
- 'iswaitingforinput',
- 'jallowClassReloading',
- 'jarray',
- 'jautoTranspose',
- 'jautoUnwrap',
- 'javaclasspath',
- 'javalibrarypath',
- 'jcast',
- 'jcompile',
- 'jconvMatrixMethod',
- 'jcreatejar',
- 'jdeff',
- 'jdisableTrace',
- 'jenableTrace',
- 'jexists',
- 'jgetclassname',
- 'jgetfield',
- 'jgetfields',
- 'jgetinfo',
- 'jgetmethods',
- 'jimport',
- 'jinvoke',
- 'jinvoke_db',
- 'jnewInstance',
- 'jremove',
- 'jsetfield',
- 'junwrap',
- 'junwraprem',
- 'jwrap',
- 'jwrapinfloat',
- 'kron',
- 'lasterror',
- 'ldiv',
- 'ldivf',
- 'legendre',
- 'length',
- 'lib',
- 'librarieslist',
- 'libraryinfo',
- 'light',
- 'linear_interpn',
- 'lines',
- 'link',
- 'linmeq',
- 'list',
- 'listvar_in_hdf5',
- 'load',
- 'loadGui',
- 'loadScicos',
- 'loadXcos',
- 'loadfftwlibrary',
- 'loadhistory',
- 'log',
- 'log1p',
- 'lsq',
- 'lsq_splin',
- 'lsqrsolve',
- 'lsslist',
- 'lstcat',
- 'lstsize',
- 'ltitr',
- 'lu',
- 'ludel',
- 'lufact',
- 'luget',
- 'lusolve',
- 'macr2lst',
- 'macr2tree',
- 'matfile_close',
- 'matfile_listvar',
- 'matfile_open',
- 'matfile_varreadnext',
- 'matfile_varwrite',
- 'matrix',
- 'max',
- 'maxfiles',
- 'mclearerr',
- 'mclose',
- 'meof',
- 'merror',
- 'messagebox',
- 'mfprintf',
- 'mfscanf',
- 'mget',
- 'mgeti',
- 'mgetl',
- 'mgetstr',
- 'min',
- 'mlist',
- 'mode',
- 'model2blk',
- 'mopen',
- 'move',
- 'movefile',
- 'mprintf',
- 'mput',
- 'mputl',
- 'mputstr',
- 'mscanf',
- 'mseek',
- 'msprintf',
- 'msscanf',
- 'mtell',
- 'mtlb_mode',
- 'mtlb_sparse',
- 'mucomp',
- 'mulf',
- 'name2rgb',
- 'nearfloat',
- 'newaxes',
- 'newest',
- 'newfun',
- 'nnz',
- 'norm',
- 'notify',
- 'number_properties',
- 'ode',
- 'odedc',
- 'ones',
- 'openged',
- 'opentk',
- 'optim',
- 'or',
- 'ordmmd',
- 'parallel_concurrency',
- 'parallel_run',
- 'param3d',
- 'param3d1',
- 'part',
- 'pathconvert',
- 'pathsep',
- 'phase_simulation',
- 'plot2d',
- 'plot2d1',
- 'plot2d2',
- 'plot2d3',
- 'plot2d4',
- 'plot3d',
- 'plot3d1',
- 'plotbrowser',
- 'pointer_xproperty',
- 'poly',
- 'ppol',
- 'pppdiv',
- 'predef',
- 'preferences',
- 'print',
- 'printf',
- 'printfigure',
- 'printsetupbox',
- 'prod',
- 'progressionbar',
- 'prompt',
- 'pwd',
- 'qld',
- 'qp_solve',
- 'qr',
- 'raise_window',
- 'rand',
- 'rankqr',
- 'rat',
- 'rcond',
- 'rdivf',
- 'read',
- 'read4b',
- 'read_csv',
- 'readb',
- 'readgateway',
- 'readmps',
- 'real',
- 'realtime',
- 'realtimeinit',
- 'regexp',
- 'relocate_handle',
- 'remez',
- 'removeModulePreferences',
- 'removedir',
- 'removelinehistory',
- 'res_with_prec',
- 'resethistory',
- 'residu',
- 'resume',
- 'return',
- 'ricc',
- 'rlist',
- 'roots',
- 'rotate_axes',
- 'round',
- 'rpem',
- 'rtitr',
- 'rubberbox',
- 'save',
- 'saveGui',
- 'saveafterncommands',
- 'saveconsecutivecommands',
- 'savehistory',
- 'schur',
- 'sci_haltscicos',
- 'sci_tree2',
- 'sci_tree3',
- 'sci_tree4',
- 'sciargs',
- 'scicos_debug',
- 'scicos_debug_count',
- 'scicos_time',
- 'scicosim',
- 'scinotes',
- 'sctree',
- 'semidef',
- 'set',
- 'set_blockerror',
- 'set_fftw_wisdom',
- 'set_xproperty',
- 'setbpt',
- 'setdefaultlanguage',
- 'setenv',
- 'setfield',
- 'sethistoryfile',
- 'setlanguage',
- 'setlookandfeel',
- 'setmenu',
- 'sfact',
- 'sfinit',
- 'show_window',
- 'sident',
- 'sig2data',
- 'sign',
- 'simp',
- 'simp_mode',
- 'sin',
- 'size',
- 'slash',
- 'sleep',
- 'sorder',
- 'sparse',
- 'spchol',
- 'spcompack',
- 'spec',
- 'spget',
- 'splin',
- 'splin2d',
- 'splin3d',
- 'splitURL',
- 'spones',
- 'sprintf',
- 'sqrt',
- 'stacksize',
- 'str2code',
- 'strcat',
- 'strchr',
- 'strcmp',
- 'strcspn',
- 'strindex',
- 'string',
- 'stringbox',
- 'stripblanks',
- 'strncpy',
- 'strrchr',
- 'strrev',
- 'strsplit',
- 'strspn',
- 'strstr',
- 'strsubst',
- 'strtod',
- 'strtok',
- 'subf',
- 'sum',
- 'svd',
- 'swap_handles',
- 'symfcti',
- 'syredi',
- 'system_getproperty',
- 'system_setproperty',
- 'ta2lpd',
- 'tan',
- 'taucs_chdel',
- 'taucs_chfact',
- 'taucs_chget',
- 'taucs_chinfo',
- 'taucs_chsolve',
- 'tempname',
- 'testmatrix',
- 'timer',
- 'tlist',
- 'tohome',
- 'tokens',
- 'toolbar',
- 'toprint',
- 'tr_zer',
- 'tril',
- 'triu',
- 'type',
- 'typename',
- 'uiDisplayTree',
- 'uicontextmenu',
- 'uicontrol',
- 'uigetcolor',
- 'uigetdir',
- 'uigetfile',
- 'uigetfont',
- 'uimenu',
- 'uint16',
- 'uint32',
- 'uint8',
- 'uipopup',
- 'uiputfile',
- 'uiwait',
- 'ulink',
- 'umf_ludel',
- 'umf_lufact',
- 'umf_luget',
- 'umf_luinfo',
- 'umf_lusolve',
- 'umfpack',
- 'unglue',
- 'unix',
- 'unsetmenu',
- 'unzoom',
- 'updatebrowsevar',
- 'usecanvas',
- 'useeditor',
- 'user',
- 'var2vec',
- 'varn',
- 'vec2var',
- 'waitbar',
- 'warnBlockByUID',
- 'warning',
- 'what',
- 'where',
- 'whereis',
- 'who',
- 'winsid',
- 'with_module',
- 'writb',
- 'write',
- 'write4b',
- 'write_csv',
- 'x_choose',
- 'x_choose_modeless',
- 'x_dialog',
- 'x_mdialog',
- 'xarc',
- 'xarcs',
- 'xarrows',
- 'xchange',
- 'xchoicesi',
- 'xclick',
- 'xcos',
- 'xcosAddToolsMenu',
- 'xcosConfigureXmlFile',
- 'xcosDiagramToScilab',
- 'xcosPalCategoryAdd',
- 'xcosPalDelete',
- 'xcosPalDisable',
- 'xcosPalEnable',
- 'xcosPalGenerateIcon',
- 'xcosPalGet',
- 'xcosPalLoad',
- 'xcosPalMove',
- 'xcosSimulationStarted',
- 'xcosUpdateBlock',
- 'xdel',
- 'xend',
- 'xfarc',
- 'xfarcs',
- 'xfpoly',
- 'xfpolys',
- 'xfrect',
- 'xget',
- 'xgetmouse',
- 'xgraduate',
- 'xgrid',
- 'xinit',
- 'xlfont',
- 'xls_open',
- 'xls_read',
- 'xmlAddNs',
- 'xmlAppend',
- 'xmlAsNumber',
- 'xmlAsText',
- 'xmlDTD',
- 'xmlDelete',
- 'xmlDocument',
- 'xmlDump',
- 'xmlElement',
- 'xmlFormat',
- 'xmlGetNsByHref',
- 'xmlGetNsByPrefix',
- 'xmlGetOpenDocs',
- 'xmlIsValidObject',
- 'xmlName',
- 'xmlNs',
- 'xmlRead',
- 'xmlReadStr',
- 'xmlRelaxNG',
- 'xmlRemove',
- 'xmlSchema',
- 'xmlSetAttributes',
- 'xmlValidate',
- 'xmlWrite',
- 'xmlXPath',
- 'xname',
- 'xpause',
- 'xpoly',
- 'xpolys',
- 'xrect',
- 'xrects',
- 'xs2bmp',
- 'xs2emf',
- 'xs2eps',
- 'xs2gif',
- 'xs2jpg',
- 'xs2pdf',
- 'xs2png',
- 'xs2ppm',
- 'xs2ps',
- 'xs2svg',
- 'xsegs',
- 'xset',
- 'xstring',
- 'xstringb',
- 'xtitle',
- 'zeros',
- 'znaupd',
- 'zneupd',
- 'zoom_rect',
-)
-
-macros_kw = (
- '!_deff_wrapper',
- '%0_i_st',
- '%3d_i_h',
- '%Block_xcosUpdateBlock',
- '%TNELDER_p',
- '%TNELDER_string',
- '%TNMPLOT_p',
- '%TNMPLOT_string',
- '%TOPTIM_p',
- '%TOPTIM_string',
- '%TSIMPLEX_p',
- '%TSIMPLEX_string',
- '%_EVoid_p',
- '%_gsort',
- '%_listvarinfile',
- '%_rlist',
- '%_save',
- '%_sodload',
- '%_strsplit',
- '%_unwrap',
- '%ar_p',
- '%asn',
- '%b_a_b',
- '%b_a_s',
- '%b_c_s',
- '%b_c_spb',
- '%b_cumprod',
- '%b_cumsum',
- '%b_d_s',
- '%b_diag',
- '%b_e',
- '%b_f_s',
- '%b_f_spb',
- '%b_g_s',
- '%b_g_spb',
- '%b_grand',
- '%b_h_s',
- '%b_h_spb',
- '%b_i_b',
- '%b_i_ce',
- '%b_i_h',
- '%b_i_hm',
- '%b_i_s',
- '%b_i_sp',
- '%b_i_spb',
- '%b_i_st',
- '%b_iconvert',
- '%b_l_b',
- '%b_l_s',
- '%b_m_b',
- '%b_m_s',
- '%b_matrix',
- '%b_n_hm',
- '%b_o_hm',
- '%b_p_s',
- '%b_prod',
- '%b_r_b',
- '%b_r_s',
- '%b_s_b',
- '%b_s_s',
- '%b_string',
- '%b_sum',
- '%b_tril',
- '%b_triu',
- '%b_x_b',
- '%b_x_s',
- '%bicg',
- '%bicgstab',
- '%c_a_c',
- '%c_b_c',
- '%c_b_s',
- '%c_diag',
- '%c_dsearch',
- '%c_e',
- '%c_eye',
- '%c_f_s',
- '%c_grand',
- '%c_i_c',
- '%c_i_ce',
- '%c_i_h',
- '%c_i_hm',
- '%c_i_lss',
- '%c_i_r',
- '%c_i_s',
- '%c_i_st',
- '%c_matrix',
- '%c_n_l',
- '%c_n_st',
- '%c_o_l',
- '%c_o_st',
- '%c_ones',
- '%c_rand',
- '%c_tril',
- '%c_triu',
- '%cblock_c_cblock',
- '%cblock_c_s',
- '%cblock_e',
- '%cblock_f_cblock',
- '%cblock_p',
- '%cblock_size',
- '%ce_6',
- '%ce_c_ce',
- '%ce_e',
- '%ce_f_ce',
- '%ce_i_ce',
- '%ce_i_s',
- '%ce_i_st',
- '%ce_matrix',
- '%ce_p',
- '%ce_size',
- '%ce_string',
- '%ce_t',
- '%cgs',
- '%champdat_i_h',
- '%choose',
- '%diagram_xcos',
- '%dir_p',
- '%fptr_i_st',
- '%grand_perm',
- '%grayplot_i_h',
- '%h_i_st',
- '%hmS_k_hmS_generic',
- '%hm_1_hm',
- '%hm_1_s',
- '%hm_2_hm',
- '%hm_2_s',
- '%hm_3_hm',
- '%hm_3_s',
- '%hm_4_hm',
- '%hm_4_s',
- '%hm_5',
- '%hm_a_hm',
- '%hm_a_r',
- '%hm_a_s',
- '%hm_abs',
- '%hm_and',
- '%hm_bool2s',
- '%hm_c_hm',
- '%hm_ceil',
- '%hm_conj',
- '%hm_cos',
- '%hm_cumprod',
- '%hm_cumsum',
- '%hm_d_hm',
- '%hm_d_s',
- '%hm_degree',
- '%hm_dsearch',
- '%hm_e',
- '%hm_exp',
- '%hm_eye',
- '%hm_f_hm',
- '%hm_find',
- '%hm_floor',
- '%hm_g_hm',
- '%hm_grand',
- '%hm_gsort',
- '%hm_h_hm',
- '%hm_i_b',
- '%hm_i_ce',
- '%hm_i_h',
- '%hm_i_hm',
- '%hm_i_i',
- '%hm_i_p',
- '%hm_i_r',
- '%hm_i_s',
- '%hm_i_st',
- '%hm_iconvert',
- '%hm_imag',
- '%hm_int',
- '%hm_isnan',
- '%hm_isreal',
- '%hm_j_hm',
- '%hm_j_s',
- '%hm_k_hm',
- '%hm_k_s',
- '%hm_log',
- '%hm_m_p',
- '%hm_m_r',
- '%hm_m_s',
- '%hm_matrix',
- '%hm_max',
- '%hm_mean',
- '%hm_median',
- '%hm_min',
- '%hm_n_b',
- '%hm_n_c',
- '%hm_n_hm',
- '%hm_n_i',
- '%hm_n_p',
- '%hm_n_s',
- '%hm_o_b',
- '%hm_o_c',
- '%hm_o_hm',
- '%hm_o_i',
- '%hm_o_p',
- '%hm_o_s',
- '%hm_ones',
- '%hm_or',
- '%hm_p',
- '%hm_prod',
- '%hm_q_hm',
- '%hm_r_s',
- '%hm_rand',
- '%hm_real',
- '%hm_round',
- '%hm_s',
- '%hm_s_hm',
- '%hm_s_r',
- '%hm_s_s',
- '%hm_sign',
- '%hm_sin',
- '%hm_size',
- '%hm_sqrt',
- '%hm_stdev',
- '%hm_string',
- '%hm_sum',
- '%hm_x_hm',
- '%hm_x_p',
- '%hm_x_s',
- '%hm_zeros',
- '%i_1_s',
- '%i_2_s',
- '%i_3_s',
- '%i_4_s',
- '%i_Matplot',
- '%i_a_i',
- '%i_a_s',
- '%i_and',
- '%i_ascii',
- '%i_b_s',
- '%i_bezout',
- '%i_champ',
- '%i_champ1',
- '%i_contour',
- '%i_contour2d',
- '%i_d_i',
- '%i_d_s',
- '%i_dsearch',
- '%i_e',
- '%i_fft',
- '%i_g_i',
- '%i_gcd',
- '%i_grand',
- '%i_h_i',
- '%i_i_ce',
- '%i_i_h',
- '%i_i_hm',
- '%i_i_i',
- '%i_i_s',
- '%i_i_st',
- '%i_j_i',
- '%i_j_s',
- '%i_l_s',
- '%i_lcm',
- '%i_length',
- '%i_m_i',
- '%i_m_s',
- '%i_mfprintf',
- '%i_mprintf',
- '%i_msprintf',
- '%i_n_s',
- '%i_o_s',
- '%i_or',
- '%i_p_i',
- '%i_p_s',
- '%i_plot2d',
- '%i_plot2d1',
- '%i_plot2d2',
- '%i_q_s',
- '%i_r_i',
- '%i_r_s',
- '%i_round',
- '%i_s_i',
- '%i_s_s',
- '%i_sign',
- '%i_string',
- '%i_x_i',
- '%i_x_s',
- '%ip_a_s',
- '%ip_i_st',
- '%ip_m_s',
- '%ip_n_ip',
- '%ip_o_ip',
- '%ip_p',
- '%ip_part',
- '%ip_s_s',
- '%ip_string',
- '%k',
- '%l_i_h',
- '%l_i_s',
- '%l_i_st',
- '%l_isequal',
- '%l_n_c',
- '%l_n_l',
- '%l_n_m',
- '%l_n_p',
- '%l_n_s',
- '%l_n_st',
- '%l_o_c',
- '%l_o_l',
- '%l_o_m',
- '%l_o_p',
- '%l_o_s',
- '%l_o_st',
- '%lss_a_lss',
- '%lss_a_p',
- '%lss_a_r',
- '%lss_a_s',
- '%lss_c_lss',
- '%lss_c_p',
- '%lss_c_r',
- '%lss_c_s',
- '%lss_e',
- '%lss_eye',
- '%lss_f_lss',
- '%lss_f_p',
- '%lss_f_r',
- '%lss_f_s',
- '%lss_i_ce',
- '%lss_i_lss',
- '%lss_i_p',
- '%lss_i_r',
- '%lss_i_s',
- '%lss_i_st',
- '%lss_inv',
- '%lss_l_lss',
- '%lss_l_p',
- '%lss_l_r',
- '%lss_l_s',
- '%lss_m_lss',
- '%lss_m_p',
- '%lss_m_r',
- '%lss_m_s',
- '%lss_n_lss',
- '%lss_n_p',
- '%lss_n_r',
- '%lss_n_s',
- '%lss_norm',
- '%lss_o_lss',
- '%lss_o_p',
- '%lss_o_r',
- '%lss_o_s',
- '%lss_ones',
- '%lss_r_lss',
- '%lss_r_p',
- '%lss_r_r',
- '%lss_r_s',
- '%lss_rand',
- '%lss_s',
- '%lss_s_lss',
- '%lss_s_p',
- '%lss_s_r',
- '%lss_s_s',
- '%lss_size',
- '%lss_t',
- '%lss_v_lss',
- '%lss_v_p',
- '%lss_v_r',
- '%lss_v_s',
- '%lt_i_s',
- '%m_n_l',
- '%m_o_l',
- '%mc_i_h',
- '%mc_i_s',
- '%mc_i_st',
- '%mc_n_st',
- '%mc_o_st',
- '%mc_string',
- '%mps_p',
- '%mps_string',
- '%msp_a_s',
- '%msp_abs',
- '%msp_e',
- '%msp_find',
- '%msp_i_s',
- '%msp_i_st',
- '%msp_length',
- '%msp_m_s',
- '%msp_maxi',
- '%msp_n_msp',
- '%msp_nnz',
- '%msp_o_msp',
- '%msp_p',
- '%msp_sparse',
- '%msp_spones',
- '%msp_t',
- '%p_a_lss',
- '%p_a_r',
- '%p_c_lss',
- '%p_c_r',
- '%p_cumprod',
- '%p_cumsum',
- '%p_d_p',
- '%p_d_r',
- '%p_d_s',
- '%p_det',
- '%p_e',
- '%p_f_lss',
- '%p_f_r',
- '%p_grand',
- '%p_i_ce',
- '%p_i_h',
- '%p_i_hm',
- '%p_i_lss',
- '%p_i_p',
- '%p_i_r',
- '%p_i_s',
- '%p_i_st',
- '%p_inv',
- '%p_j_s',
- '%p_k_p',
- '%p_k_r',
- '%p_k_s',
- '%p_l_lss',
- '%p_l_p',
- '%p_l_r',
- '%p_l_s',
- '%p_m_hm',
- '%p_m_lss',
- '%p_m_r',
- '%p_matrix',
- '%p_n_l',
- '%p_n_lss',
- '%p_n_r',
- '%p_o_l',
- '%p_o_lss',
- '%p_o_r',
- '%p_o_sp',
- '%p_p_s',
- '%p_part',
- '%p_prod',
- '%p_q_p',
- '%p_q_r',
- '%p_q_s',
- '%p_r_lss',
- '%p_r_p',
- '%p_r_r',
- '%p_r_s',
- '%p_s_lss',
- '%p_s_r',
- '%p_simp',
- '%p_string',
- '%p_sum',
- '%p_v_lss',
- '%p_v_p',
- '%p_v_r',
- '%p_v_s',
- '%p_x_hm',
- '%p_x_r',
- '%p_y_p',
- '%p_y_r',
- '%p_y_s',
- '%p_z_p',
- '%p_z_r',
- '%p_z_s',
- '%pcg',
- '%plist_p',
- '%plist_string',
- '%r_0',
- '%r_a_hm',
- '%r_a_lss',
- '%r_a_p',
- '%r_a_r',
- '%r_a_s',
- '%r_c_lss',
- '%r_c_p',
- '%r_c_r',
- '%r_c_s',
- '%r_clean',
- '%r_cumprod',
- '%r_cumsum',
- '%r_d_p',
- '%r_d_r',
- '%r_d_s',
- '%r_det',
- '%r_diag',
- '%r_e',
- '%r_eye',
- '%r_f_lss',
- '%r_f_p',
- '%r_f_r',
- '%r_f_s',
- '%r_i_ce',
- '%r_i_hm',
- '%r_i_lss',
- '%r_i_p',
- '%r_i_r',
- '%r_i_s',
- '%r_i_st',
- '%r_inv',
- '%r_j_s',
- '%r_k_p',
- '%r_k_r',
- '%r_k_s',
- '%r_l_lss',
- '%r_l_p',
- '%r_l_r',
- '%r_l_s',
- '%r_m_hm',
- '%r_m_lss',
- '%r_m_p',
- '%r_m_r',
- '%r_m_s',
- '%r_matrix',
- '%r_n_lss',
- '%r_n_p',
- '%r_n_r',
- '%r_n_s',
- '%r_norm',
- '%r_o_lss',
- '%r_o_p',
- '%r_o_r',
- '%r_o_s',
- '%r_ones',
- '%r_p',
- '%r_p_s',
- '%r_prod',
- '%r_q_p',
- '%r_q_r',
- '%r_q_s',
- '%r_r_lss',
- '%r_r_p',
- '%r_r_r',
- '%r_r_s',
- '%r_rand',
- '%r_s',
- '%r_s_hm',
- '%r_s_lss',
- '%r_s_p',
- '%r_s_r',
- '%r_s_s',
- '%r_simp',
- '%r_size',
- '%r_string',
- '%r_sum',
- '%r_t',
- '%r_tril',
- '%r_triu',
- '%r_v_lss',
- '%r_v_p',
- '%r_v_r',
- '%r_v_s',
- '%r_varn',
- '%r_x_p',
- '%r_x_r',
- '%r_x_s',
- '%r_y_p',
- '%r_y_r',
- '%r_y_s',
- '%r_z_p',
- '%r_z_r',
- '%r_z_s',
- '%s_1_hm',
- '%s_1_i',
- '%s_2_hm',
- '%s_2_i',
- '%s_3_hm',
- '%s_3_i',
- '%s_4_hm',
- '%s_4_i',
- '%s_5',
- '%s_a_b',
- '%s_a_hm',
- '%s_a_i',
- '%s_a_ip',
- '%s_a_lss',
- '%s_a_msp',
- '%s_a_r',
- '%s_a_sp',
- '%s_and',
- '%s_b_i',
- '%s_b_s',
- '%s_bezout',
- '%s_c_b',
- '%s_c_cblock',
- '%s_c_lss',
- '%s_c_r',
- '%s_c_sp',
- '%s_d_b',
- '%s_d_i',
- '%s_d_p',
- '%s_d_r',
- '%s_d_sp',
- '%s_e',
- '%s_f_b',
- '%s_f_cblock',
- '%s_f_lss',
- '%s_f_r',
- '%s_f_sp',
- '%s_g_b',
- '%s_g_s',
- '%s_gcd',
- '%s_grand',
- '%s_h_b',
- '%s_h_s',
- '%s_i_b',
- '%s_i_c',
- '%s_i_ce',
- '%s_i_h',
- '%s_i_hm',
- '%s_i_i',
- '%s_i_lss',
- '%s_i_p',
- '%s_i_r',
- '%s_i_s',
- '%s_i_sp',
- '%s_i_spb',
- '%s_i_st',
- '%s_j_i',
- '%s_k_hm',
- '%s_k_p',
- '%s_k_r',
- '%s_k_sp',
- '%s_l_b',
- '%s_l_hm',
- '%s_l_i',
- '%s_l_lss',
- '%s_l_p',
- '%s_l_r',
- '%s_l_s',
- '%s_l_sp',
- '%s_lcm',
- '%s_m_b',
- '%s_m_hm',
- '%s_m_i',
- '%s_m_ip',
- '%s_m_lss',
- '%s_m_msp',
- '%s_m_r',
- '%s_matrix',
- '%s_n_hm',
- '%s_n_i',
- '%s_n_l',
- '%s_n_lss',
- '%s_n_r',
- '%s_n_st',
- '%s_o_hm',
- '%s_o_i',
- '%s_o_l',
- '%s_o_lss',
- '%s_o_r',
- '%s_o_st',
- '%s_or',
- '%s_p_b',
- '%s_p_i',
- '%s_pow',
- '%s_q_hm',
- '%s_q_i',
- '%s_q_p',
- '%s_q_r',
- '%s_q_sp',
- '%s_r_b',
- '%s_r_i',
- '%s_r_lss',
- '%s_r_p',
- '%s_r_r',
- '%s_r_s',
- '%s_r_sp',
- '%s_s_b',
- '%s_s_hm',
- '%s_s_i',
- '%s_s_ip',
- '%s_s_lss',
- '%s_s_r',
- '%s_s_sp',
- '%s_simp',
- '%s_v_lss',
- '%s_v_p',
- '%s_v_r',
- '%s_v_s',
- '%s_x_b',
- '%s_x_hm',
- '%s_x_i',
- '%s_x_r',
- '%s_y_p',
- '%s_y_r',
- '%s_y_sp',
- '%s_z_p',
- '%s_z_r',
- '%s_z_sp',
- '%sn',
- '%sp_a_s',
- '%sp_a_sp',
- '%sp_and',
- '%sp_c_s',
- '%sp_ceil',
- '%sp_conj',
- '%sp_cos',
- '%sp_cumprod',
- '%sp_cumsum',
- '%sp_d_s',
- '%sp_d_sp',
- '%sp_det',
- '%sp_diag',
- '%sp_e',
- '%sp_exp',
- '%sp_f_s',
- '%sp_floor',
- '%sp_grand',
- '%sp_gsort',
- '%sp_i_ce',
- '%sp_i_h',
- '%sp_i_s',
- '%sp_i_sp',
- '%sp_i_st',
- '%sp_int',
- '%sp_inv',
- '%sp_k_s',
- '%sp_k_sp',
- '%sp_l_s',
- '%sp_l_sp',
- '%sp_length',
- '%sp_max',
- '%sp_min',
- '%sp_norm',
- '%sp_or',
- '%sp_p_s',
- '%sp_prod',
- '%sp_q_s',
- '%sp_q_sp',
- '%sp_r_s',
- '%sp_r_sp',
- '%sp_round',
- '%sp_s_s',
- '%sp_s_sp',
- '%sp_sin',
- '%sp_sqrt',
- '%sp_string',
- '%sp_sum',
- '%sp_tril',
- '%sp_triu',
- '%sp_y_s',
- '%sp_y_sp',
- '%sp_z_s',
- '%sp_z_sp',
- '%spb_and',
- '%spb_c_b',
- '%spb_cumprod',
- '%spb_cumsum',
- '%spb_diag',
- '%spb_e',
- '%spb_f_b',
- '%spb_g_b',
- '%spb_g_spb',
- '%spb_h_b',
- '%spb_h_spb',
- '%spb_i_b',
- '%spb_i_ce',
- '%spb_i_h',
- '%spb_i_st',
- '%spb_or',
- '%spb_prod',
- '%spb_sum',
- '%spb_tril',
- '%spb_triu',
- '%st_6',
- '%st_c_st',
- '%st_e',
- '%st_f_st',
- '%st_i_b',
- '%st_i_c',
- '%st_i_fptr',
- '%st_i_h',
- '%st_i_i',
- '%st_i_ip',
- '%st_i_lss',
- '%st_i_msp',
- '%st_i_p',
- '%st_i_r',
- '%st_i_s',
- '%st_i_sp',
- '%st_i_spb',
- '%st_i_st',
- '%st_matrix',
- '%st_n_c',
- '%st_n_l',
- '%st_n_mc',
- '%st_n_p',
- '%st_n_s',
- '%st_o_c',
- '%st_o_l',
- '%st_o_mc',
- '%st_o_p',
- '%st_o_s',
- '%st_o_tl',
- '%st_p',
- '%st_size',
- '%st_string',
- '%st_t',
- '%ticks_i_h',
- '%xls_e',
- '%xls_p',
- '%xlssheet_e',
- '%xlssheet_p',
- '%xlssheet_size',
- '%xlssheet_string',
- 'DominationRank',
- 'G_make',
- 'IsAScalar',
- 'NDcost',
- 'OS_Version',
- 'PlotSparse',
- 'ReadHBSparse',
- 'TCL_CreateSlave',
- 'abcd',
- 'abinv',
- 'accept_func_default',
- 'accept_func_vfsa',
- 'acf',
- 'acosd',
- 'acosh',
- 'acoshm',
- 'acosm',
- 'acot',
- 'acotd',
- 'acoth',
- 'acsc',
- 'acscd',
- 'acsch',
- 'add_demo',
- 'add_help_chapter',
- 'add_module_help_chapter',
- 'add_param',
- 'add_profiling',
- 'adj2sp',
- 'aff2ab',
- 'ana_style',
- 'analpf',
- 'analyze',
- 'aplat',
- 'arhnk',
- 'arl2',
- 'arma2p',
- 'arma2ss',
- 'armac',
- 'armax',
- 'armax1',
- 'arobasestring2strings',
- 'arsimul',
- 'ascii2string',
- 'asciimat',
- 'asec',
- 'asecd',
- 'asech',
- 'asind',
- 'asinh',
- 'asinhm',
- 'asinm',
- 'assert_checkalmostequal',
- 'assert_checkequal',
- 'assert_checkerror',
- 'assert_checkfalse',
- 'assert_checkfilesequal',
- 'assert_checktrue',
- 'assert_comparecomplex',
- 'assert_computedigits',
- 'assert_cond2reltol',
- 'assert_cond2reqdigits',
- 'assert_generror',
- 'atand',
- 'atanh',
- 'atanhm',
- 'atanm',
- 'atomsAutoload',
- 'atomsAutoloadAdd',
- 'atomsAutoloadDel',
- 'atomsAutoloadList',
- 'atomsCategoryList',
- 'atomsCheckModule',
- 'atomsDepTreeShow',
- 'atomsGetConfig',
- 'atomsGetInstalled',
- 'atomsGetInstalledPath',
- 'atomsGetLoaded',
- 'atomsGetLoadedPath',
- 'atomsInstall',
- 'atomsIsInstalled',
- 'atomsIsLoaded',
- 'atomsList',
- 'atomsLoad',
- 'atomsQuit',
- 'atomsRemove',
- 'atomsRepositoryAdd',
- 'atomsRepositoryDel',
- 'atomsRepositoryList',
- 'atomsRestoreConfig',
- 'atomsSaveConfig',
- 'atomsSearch',
- 'atomsSetConfig',
- 'atomsShow',
- 'atomsSystemInit',
- 'atomsSystemUpdate',
- 'atomsTest',
- 'atomsUpdate',
- 'atomsVersion',
- 'augment',
- 'auread',
- 'auwrite',
- 'balreal',
- 'bench_run',
- 'bilin',
- 'bilt',
- 'bin2dec',
- 'binomial',
- 'bitand',
- 'bitcmp',
- 'bitget',
- 'bitor',
- 'bitset',
- 'bitxor',
- 'black',
- 'blanks',
- 'bloc2exp',
- 'bloc2ss',
- 'block_parameter_error',
- 'bode',
- 'bode_asymp',
- 'bstap',
- 'buttmag',
- 'bvodeS',
- 'bytecode',
- 'bytecodewalk',
- 'cainv',
- 'calendar',
- 'calerf',
- 'calfrq',
- 'canon',
- 'casc',
- 'cat',
- 'cat_code',
- 'cb_m2sci_gui',
- 'ccontrg',
- 'cell',
- 'cell2mat',
- 'cellstr',
- 'center',
- 'cepstrum',
- 'cfspec',
- 'char',
- 'chart',
- 'cheb1mag',
- 'cheb2mag',
- 'check_gateways',
- 'check_modules_xml',
- 'check_versions',
- 'chepol',
- 'chfact',
- 'chsolve',
- 'classmarkov',
- 'clean_help',
- 'clock',
- 'cls2dls',
- 'cmb_lin',
- 'cmndred',
- 'cmoment',
- 'coding_ga_binary',
- 'coding_ga_identity',
- 'coff',
- 'coffg',
- 'colcomp',
- 'colcompr',
- 'colinout',
- 'colregul',
- 'companion',
- 'complex',
- 'compute_initial_temp',
- 'cond',
- 'cond2sp',
- 'condestsp',
- 'configure_msifort',
- 'configure_msvc',
- 'conjgrad',
- 'cont_frm',
- 'cont_mat',
- 'contrss',
- 'conv',
- 'convert_to_float',
- 'convertindex',
- 'convol',
- 'convol2d',
- 'copfac',
- 'correl',
- 'cosd',
- 'cosh',
- 'coshm',
- 'cosm',
- 'cotd',
- 'cotg',
- 'coth',
- 'cothm',
- 'cov',
- 'covar',
- 'createXConfiguration',
- 'createfun',
- 'createstruct',
- 'cross',
- 'crossover_ga_binary',
- 'crossover_ga_default',
- 'csc',
- 'cscd',
- 'csch',
- 'csgn',
- 'csim',
- 'cspect',
- 'ctr_gram',
- 'czt',
- 'dae',
- 'daeoptions',
- 'damp',
- 'datafit',
- 'date',
- 'datenum',
- 'datevec',
- 'dbphi',
- 'dcf',
- 'ddp',
- 'dec2bin',
- 'dec2hex',
- 'dec2oct',
- 'del_help_chapter',
- 'del_module_help_chapter',
- 'demo_begin',
- 'demo_choose',
- 'demo_compiler',
- 'demo_end',
- 'demo_file_choice',
- 'demo_folder_choice',
- 'demo_function_choice',
- 'demo_gui',
- 'demo_run',
- 'demo_viewCode',
- 'denom',
- 'derivat',
- 'derivative',
- 'des2ss',
- 'des2tf',
- 'detectmsifort64tools',
- 'detectmsvc64tools',
- 'determ',
- 'detr',
- 'detrend',
- 'devtools_run_builder',
- 'dhnorm',
- 'diff',
- 'diophant',
- 'dir',
- 'dirname',
- 'dispfiles',
- 'dllinfo',
- 'dscr',
- 'dsimul',
- 'dt_ility',
- 'dtsi',
- 'edit',
- 'edit_error',
- 'editor',
- 'eigenmarkov',
- 'eigs',
- 'ell1mag',
- 'enlarge_shape',
- 'entropy',
- 'eomday',
- 'epred',
- 'eqfir',
- 'eqiir',
- 'equil',
- 'equil1',
- 'erfinv',
- 'etime',
- 'eval',
- 'evans',
- 'evstr',
- 'example_run',
- 'expression2code',
- 'extract_help_examples',
- 'factor',
- 'factorial',
- 'factors',
- 'faurre',
- 'ffilt',
- 'fft2',
- 'fftshift',
- 'fieldnames',
- 'filt_sinc',
- 'filter',
- 'findABCD',
- 'findAC',
- 'findBDK',
- 'findR',
- 'find_freq',
- 'find_links',
- 'find_scicos_version',
- 'findm',
- 'findmsifortcompiler',
- 'findmsvccompiler',
- 'findx0BD',
- 'firstnonsingleton',
- 'fix',
- 'fixedpointgcd',
- 'flipdim',
- 'flts',
- 'fminsearch',
- 'formatBlackTip',
- 'formatBodeMagTip',
- 'formatBodePhaseTip',
- 'formatGainplotTip',
- 'formatHallModuleTip',
- 'formatHallPhaseTip',
- 'formatNicholsGainTip',
- 'formatNicholsPhaseTip',
- 'formatNyquistTip',
- 'formatPhaseplotTip',
- 'formatSgridDampingTip',
- 'formatSgridFreqTip',
- 'formatZgridDampingTip',
- 'formatZgridFreqTip',
- 'format_txt',
- 'fourplan',
- 'frep2tf',
- 'freson',
- 'frfit',
- 'frmag',
- 'fseek_origin',
- 'fsfirlin',
- 'fspec',
- 'fspecg',
- 'fstabst',
- 'ftest',
- 'ftuneq',
- 'fullfile',
- 'fullrf',
- 'fullrfk',
- 'fun2string',
- 'g_margin',
- 'gainplot',
- 'gamitg',
- 'gcare',
- 'gcd',
- 'gencompilationflags_unix',
- 'generateBlockImage',
- 'generateBlockImages',
- 'generic_i_ce',
- 'generic_i_h',
- 'generic_i_hm',
- 'generic_i_s',
- 'generic_i_st',
- 'genlib',
- 'genmarkov',
- 'geomean',
- 'getDiagramVersion',
- 'getModelicaPath',
- 'getPreferencesValue',
- 'get_file_path',
- 'get_function_path',
- 'get_param',
- 'get_profile',
- 'get_scicos_version',
- 'getd',
- 'getscilabkeywords',
- 'getshell',
- 'gettklib',
- 'gfare',
- 'gfrancis',
- 'givens',
- 'glever',
- 'gmres',
- 'group',
- 'gschur',
- 'gspec',
- 'gtild',
- 'h2norm',
- 'h_cl',
- 'h_inf',
- 'h_inf_st',
- 'h_norm',
- 'hallchart',
- 'halt',
- 'hank',
- 'hankelsv',
- 'harmean',
- 'haveacompiler',
- 'head_comments',
- 'help_from_sci',
- 'help_skeleton',
- 'hermit',
- 'hex2dec',
- 'hilb',
- 'hilbert',
- 'histc',
- 'horner',
- 'householder',
- 'hrmt',
- 'htrianr',
- 'hypermat',
- 'idct',
- 'idst',
- 'ifft',
- 'ifftshift',
- 'iir',
- 'iirgroup',
- 'iirlp',
- 'iirmod',
- 'ilib_build',
- 'ilib_build_jar',
- 'ilib_compile',
- 'ilib_for_link',
- 'ilib_gen_Make',
- 'ilib_gen_Make_unix',
- 'ilib_gen_cleaner',
- 'ilib_gen_gateway',
- 'ilib_gen_loader',
- 'ilib_include_flag',
- 'ilib_mex_build',
- 'im_inv',
- 'importScicosDiagram',
- 'importScicosPal',
- 'importXcosDiagram',
- 'imrep2ss',
- 'ind2sub',
- 'inistate',
- 'init_ga_default',
- 'init_param',
- 'initial_scicos_tables',
- 'input',
- 'instruction2code',
- 'intc',
- 'intdec',
- 'integrate',
- 'interp1',
- 'interpln',
- 'intersect',
- 'intl',
- 'intsplin',
- 'inttrap',
- 'inv_coeff',
- 'invr',
- 'invrs',
- 'invsyslin',
- 'iqr',
- 'isLeapYear',
- 'is_absolute_path',
- 'is_param',
- 'iscell',
- 'iscellstr',
- 'iscolumn',
- 'isempty',
- 'isfield',
- 'isinf',
- 'ismatrix',
- 'isnan',
- 'isrow',
- 'isscalar',
- 'issparse',
- 'issquare',
- 'isstruct',
- 'isvector',
- 'jmat',
- 'justify',
- 'kalm',
- 'karmarkar',
- 'kernel',
- 'kpure',
- 'krac2',
- 'kroneck',
- 'lattn',
- 'lattp',
- 'launchtest',
- 'lcf',
- 'lcm',
- 'lcmdiag',
- 'leastsq',
- 'leqe',
- 'leqr',
- 'lev',
- 'levin',
- 'lex_sort',
- 'lft',
- 'lin',
- 'lin2mu',
- 'lincos',
- 'lindquist',
- 'linf',
- 'linfn',
- 'linsolve',
- 'linspace',
- 'list2vec',
- 'list_param',
- 'listfiles',
- 'listfunctions',
- 'listvarinfile',
- 'lmisolver',
- 'lmitool',
- 'loadXcosLibs',
- 'loadmatfile',
- 'loadwave',
- 'log10',
- 'log2',
- 'logm',
- 'logspace',
- 'lqe',
- 'lqg',
- 'lqg2stan',
- 'lqg_ltr',
- 'lqr',
- 'ls',
- 'lyap',
- 'm2sci_gui',
- 'm_circle',
- 'macglov',
- 'macrovar',
- 'mad',
- 'makecell',
- 'manedit',
- 'mapsound',
- 'markp2ss',
- 'matfile2sci',
- 'mdelete',
- 'mean',
- 'meanf',
- 'median',
- 'members',
- 'mese',
- 'meshgrid',
- 'mfft',
- 'mfile2sci',
- 'minreal',
- 'minss',
- 'mkdir',
- 'modulo',
- 'moment',
- 'mrfit',
- 'msd',
- 'mstr2sci',
- 'mtlb',
- 'mtlb_0',
- 'mtlb_a',
- 'mtlb_all',
- 'mtlb_any',
- 'mtlb_axes',
- 'mtlb_axis',
- 'mtlb_beta',
- 'mtlb_box',
- 'mtlb_choices',
- 'mtlb_close',
- 'mtlb_colordef',
- 'mtlb_cond',
- 'mtlb_cov',
- 'mtlb_cumprod',
- 'mtlb_cumsum',
- 'mtlb_dec2hex',
- 'mtlb_delete',
- 'mtlb_diag',
- 'mtlb_diff',
- 'mtlb_dir',
- 'mtlb_double',
- 'mtlb_e',
- 'mtlb_echo',
- 'mtlb_error',
- 'mtlb_eval',
- 'mtlb_exist',
- 'mtlb_eye',
- 'mtlb_false',
- 'mtlb_fft',
- 'mtlb_fftshift',
- 'mtlb_filter',
- 'mtlb_find',
- 'mtlb_findstr',
- 'mtlb_fliplr',
- 'mtlb_fopen',
- 'mtlb_format',
- 'mtlb_fprintf',
- 'mtlb_fread',
- 'mtlb_fscanf',
- 'mtlb_full',
- 'mtlb_fwrite',
- 'mtlb_get',
- 'mtlb_grid',
- 'mtlb_hold',
- 'mtlb_i',
- 'mtlb_ifft',
- 'mtlb_image',
- 'mtlb_imp',
- 'mtlb_int16',
- 'mtlb_int32',
- 'mtlb_int8',
- 'mtlb_is',
- 'mtlb_isa',
- 'mtlb_isfield',
- 'mtlb_isletter',
- 'mtlb_isspace',
- 'mtlb_l',
- 'mtlb_legendre',
- 'mtlb_linspace',
- 'mtlb_logic',
- 'mtlb_logical',
- 'mtlb_loglog',
- 'mtlb_lower',
- 'mtlb_max',
- 'mtlb_mean',
- 'mtlb_median',
- 'mtlb_mesh',
- 'mtlb_meshdom',
- 'mtlb_min',
- 'mtlb_more',
- 'mtlb_num2str',
- 'mtlb_ones',
- 'mtlb_pcolor',
- 'mtlb_plot',
- 'mtlb_prod',
- 'mtlb_qr',
- 'mtlb_qz',
- 'mtlb_rand',
- 'mtlb_randn',
- 'mtlb_rcond',
- 'mtlb_realmax',
- 'mtlb_realmin',
- 'mtlb_s',
- 'mtlb_semilogx',
- 'mtlb_semilogy',
- 'mtlb_setstr',
- 'mtlb_size',
- 'mtlb_sort',
- 'mtlb_sortrows',
- 'mtlb_sprintf',
- 'mtlb_sscanf',
- 'mtlb_std',
- 'mtlb_strcmp',
- 'mtlb_strcmpi',
- 'mtlb_strfind',
- 'mtlb_strrep',
- 'mtlb_subplot',
- 'mtlb_sum',
- 'mtlb_t',
- 'mtlb_toeplitz',
- 'mtlb_tril',
- 'mtlb_triu',
- 'mtlb_true',
- 'mtlb_type',
- 'mtlb_uint16',
- 'mtlb_uint32',
- 'mtlb_uint8',
- 'mtlb_upper',
- 'mtlb_var',
- 'mtlb_zeros',
- 'mu2lin',
- 'mutation_ga_binary',
- 'mutation_ga_default',
- 'mvcorrel',
- 'mvvacov',
- 'nancumsum',
- 'nand2mean',
- 'nanmax',
- 'nanmean',
- 'nanmeanf',
- 'nanmedian',
- 'nanmin',
- 'nanreglin',
- 'nanstdev',
- 'nansum',
- 'narsimul',
- 'ndgrid',
- 'ndims',
- 'nehari',
- 'neigh_func_csa',
- 'neigh_func_default',
- 'neigh_func_fsa',
- 'neigh_func_vfsa',
- 'neldermead_cget',
- 'neldermead_configure',
- 'neldermead_costf',
- 'neldermead_defaultoutput',
- 'neldermead_destroy',
- 'neldermead_function',
- 'neldermead_get',
- 'neldermead_log',
- 'neldermead_new',
- 'neldermead_restart',
- 'neldermead_search',
- 'neldermead_updatesimp',
- 'nextpow2',
- 'nfreq',
- 'nicholschart',
- 'nlev',
- 'nmplot_cget',
- 'nmplot_configure',
- 'nmplot_contour',
- 'nmplot_destroy',
- 'nmplot_function',
- 'nmplot_get',
- 'nmplot_historyplot',
- 'nmplot_log',
- 'nmplot_new',
- 'nmplot_outputcmd',
- 'nmplot_restart',
- 'nmplot_search',
- 'nmplot_simplexhistory',
- 'noisegen',
- 'nonreg_test_run',
- 'now',
- 'nthroot',
- 'null',
- 'num2cell',
- 'numderivative',
- 'numdiff',
- 'numer',
- 'nyquist',
- 'nyquistfrequencybounds',
- 'obs_gram',
- 'obscont',
- 'observer',
- 'obsv_mat',
- 'obsvss',
- 'oct2dec',
- 'odeoptions',
- 'optim_ga',
- 'optim_moga',
- 'optim_nsga',
- 'optim_nsga2',
- 'optim_sa',
- 'optimbase_cget',
- 'optimbase_checkbounds',
- 'optimbase_checkcostfun',
- 'optimbase_checkx0',
- 'optimbase_configure',
- 'optimbase_destroy',
- 'optimbase_function',
- 'optimbase_get',
- 'optimbase_hasbounds',
- 'optimbase_hasconstraints',
- 'optimbase_hasnlcons',
- 'optimbase_histget',
- 'optimbase_histset',
- 'optimbase_incriter',
- 'optimbase_isfeasible',
- 'optimbase_isinbounds',
- 'optimbase_isinnonlincons',
- 'optimbase_log',
- 'optimbase_logshutdown',
- 'optimbase_logstartup',
- 'optimbase_new',
- 'optimbase_outputcmd',
- 'optimbase_outstruct',
- 'optimbase_proj2bnds',
- 'optimbase_set',
- 'optimbase_stoplog',
- 'optimbase_terminate',
- 'optimget',
- 'optimplotfunccount',
- 'optimplotfval',
- 'optimplotx',
- 'optimset',
- 'optimsimplex_center',
- 'optimsimplex_check',
- 'optimsimplex_compsomefv',
- 'optimsimplex_computefv',
- 'optimsimplex_deltafv',
- 'optimsimplex_deltafvmax',
- 'optimsimplex_destroy',
- 'optimsimplex_dirmat',
- 'optimsimplex_fvmean',
- 'optimsimplex_fvstdev',
- 'optimsimplex_fvvariance',
- 'optimsimplex_getall',
- 'optimsimplex_getallfv',
- 'optimsimplex_getallx',
- 'optimsimplex_getfv',
- 'optimsimplex_getn',
- 'optimsimplex_getnbve',
- 'optimsimplex_getve',
- 'optimsimplex_getx',
- 'optimsimplex_gradientfv',
- 'optimsimplex_log',
- 'optimsimplex_new',
- 'optimsimplex_reflect',
- 'optimsimplex_setall',
- 'optimsimplex_setallfv',
- 'optimsimplex_setallx',
- 'optimsimplex_setfv',
- 'optimsimplex_setn',
- 'optimsimplex_setnbve',
- 'optimsimplex_setve',
- 'optimsimplex_setx',
- 'optimsimplex_shrink',
- 'optimsimplex_size',
- 'optimsimplex_sort',
- 'optimsimplex_xbar',
- 'orth',
- 'output_ga_default',
- 'output_moga_default',
- 'output_nsga2_default',
- 'output_nsga_default',
- 'p_margin',
- 'pack',
- 'pareto_filter',
- 'parrot',
- 'pbig',
- 'pca',
- 'pcg',
- 'pdiv',
- 'pen2ea',
- 'pencan',
- 'pencost',
- 'penlaur',
- 'perctl',
- 'perl',
- 'perms',
- 'permute',
- 'pertrans',
- 'pfactors',
- 'pfss',
- 'phasemag',
- 'phaseplot',
- 'phc',
- 'pinv',
- 'playsnd',
- 'plotprofile',
- 'plzr',
- 'pmodulo',
- 'pol2des',
- 'pol2str',
- 'polar',
- 'polfact',
- 'prbs_a',
- 'prettyprint',
- 'primes',
- 'princomp',
- 'profile',
- 'proj',
- 'projsl',
- 'projspec',
- 'psmall',
- 'pspect',
- 'qmr',
- 'qpsolve',
- 'quart',
- 'quaskro',
- 'rafiter',
- 'randpencil',
- 'range',
- 'rank',
- 'readxls',
- 'recompilefunction',
- 'recons',
- 'reglin',
- 'regress',
- 'remezb',
- 'remove_param',
- 'remove_profiling',
- 'repfreq',
- 'replace_Ix_by_Fx',
- 'repmat',
- 'reset_profiling',
- 'resize_matrix',
- 'returntoscilab',
- 'rhs2code',
- 'ric_desc',
- 'riccati',
- 'rmdir',
- 'routh_t',
- 'rowcomp',
- 'rowcompr',
- 'rowinout',
- 'rowregul',
- 'rowshuff',
- 'rref',
- 'sample',
- 'samplef',
- 'samwr',
- 'savematfile',
- 'savewave',
- 'scanf',
- 'sci2exp',
- 'sciGUI_init',
- 'sci_sparse',
- 'scicos_getvalue',
- 'scicos_simulate',
- 'scicos_workspace_init',
- 'scisptdemo',
- 'scitest',
- 'sdiff',
- 'sec',
- 'secd',
- 'sech',
- 'selection_ga_elitist',
- 'selection_ga_random',
- 'sensi',
- 'setPreferencesValue',
- 'set_param',
- 'setdiff',
- 'sgrid',
- 'show_margins',
- 'show_pca',
- 'showprofile',
- 'signm',
- 'sinc',
- 'sincd',
- 'sind',
- 'sinh',
- 'sinhm',
- 'sinm',
- 'sm2des',
- 'sm2ss',
- 'smga',
- 'smooth',
- 'solve',
- 'sound',
- 'soundsec',
- 'sp2adj',
- 'spaninter',
- 'spanplus',
- 'spantwo',
- 'specfact',
- 'speye',
- 'sprand',
- 'spzeros',
- 'sqroot',
- 'sqrtm',
- 'squarewave',
- 'squeeze',
- 'srfaur',
- 'srkf',
- 'ss2des',
- 'ss2ss',
- 'ss2tf',
- 'sskf',
- 'ssprint',
- 'ssrand',
- 'st_deviation',
- 'st_i_generic',
- 'st_ility',
- 'stabil',
- 'statgain',
- 'stdev',
- 'stdevf',
- 'steadycos',
- 'strange',
- 'strcmpi',
- 'struct',
- 'sub2ind',
- 'sva',
- 'svplot',
- 'sylm',
- 'sylv',
- 'sysconv',
- 'sysdiag',
- 'sysfact',
- 'syslin',
- 'syssize',
- 'system',
- 'systmat',
- 'tabul',
- 'tand',
- 'tanh',
- 'tanhm',
- 'tanm',
- 'tbx_build_blocks',
- 'tbx_build_cleaner',
- 'tbx_build_gateway',
- 'tbx_build_gateway_clean',
- 'tbx_build_gateway_loader',
- 'tbx_build_help',
- 'tbx_build_help_loader',
- 'tbx_build_loader',
- 'tbx_build_localization',
- 'tbx_build_macros',
- 'tbx_build_pal_loader',
- 'tbx_build_src',
- 'tbx_builder',
- 'tbx_builder_gateway',
- 'tbx_builder_gateway_lang',
- 'tbx_builder_help',
- 'tbx_builder_help_lang',
- 'tbx_builder_macros',
- 'tbx_builder_src',
- 'tbx_builder_src_lang',
- 'tbx_generate_pofile',
- 'temp_law_csa',
- 'temp_law_default',
- 'temp_law_fsa',
- 'temp_law_huang',
- 'temp_law_vfsa',
- 'test_clean',
- 'test_on_columns',
- 'test_run',
- 'test_run_level',
- 'testexamples',
- 'tf2des',
- 'tf2ss',
- 'thrownan',
- 'tic',
- 'time_id',
- 'toc',
- 'toeplitz',
- 'tokenpos',
- 'toolboxes',
- 'trace',
- 'trans',
- 'translatepaths',
- 'tree2code',
- 'trfmod',
- 'trianfml',
- 'trimmean',
- 'trisolve',
- 'trzeros',
- 'typeof',
- 'ui_observer',
- 'union',
- 'unique',
- 'unit_test_run',
- 'unix_g',
- 'unix_s',
- 'unix_w',
- 'unix_x',
- 'unobs',
- 'unpack',
- 'unwrap',
- 'variance',
- 'variancef',
- 'vec2list',
- 'vectorfind',
- 'ver',
- 'warnobsolete',
- 'wavread',
- 'wavwrite',
- 'wcenter',
- 'weekday',
- 'wfir',
- 'wfir_gui',
- 'whereami',
- 'who_user',
- 'whos',
- 'wiener',
- 'wigner',
- 'window',
- 'winlist',
- 'with_javasci',
- 'with_macros_source',
- 'with_modelica_compiler',
- 'with_tk',
- 'xcorr',
- 'xcosBlockEval',
- 'xcosBlockInterface',
- 'xcosCodeGeneration',
- 'xcosConfigureModelica',
- 'xcosPal',
- 'xcosPalAdd',
- 'xcosPalAddBlock',
- 'xcosPalExport',
- 'xcosPalGenerateAllIcons',
- 'xcosShowBlockWarning',
- 'xcosValidateBlockSet',
- 'xcosValidateCompareBlock',
- 'xcos_compile',
- 'xcos_debug_gui',
- 'xcos_run',
- 'xcos_simulate',
- 'xcov',
- 'xmltochm',
- 'xmltoformat',
- 'xmltohtml',
- 'xmltojar',
- 'xmltopdf',
- 'xmltops',
- 'xmltoweb',
- 'yulewalk',
- 'zeropen',
- 'zgrid',
- 'zpbutt',
- 'zpch1',
- 'zpch2',
- 'zpell',
-)
-
-variables_kw = (
- '$',
- '%F',
- '%T',
- '%e',
- '%eps',
- '%f',
- '%fftw',
- '%gui',
- '%i',
- '%inf',
- '%io',
- '%modalWarning',
- '%nan',
- '%pi',
- '%s',
- '%t',
- '%tk',
- '%toolboxes',
- '%toolboxes_dir',
- '%z',
- 'PWD',
- 'SCI',
- 'SCIHOME',
- 'TMPDIR',
- 'arnoldilib',
- 'assertlib',
- 'atomslib',
- 'cacsdlib',
- 'compatibility_functilib',
- 'corelib',
- 'data_structureslib',
- 'demo_toolslib',
- 'development_toolslib',
- 'differential_equationlib',
- 'dynamic_linklib',
- 'elementary_functionslib',
- 'enull',
- 'evoid',
- 'external_objectslib',
- 'fd',
- 'fileiolib',
- 'functionslib',
- 'genetic_algorithmslib',
- 'helptoolslib',
- 'home',
- 'integerlib',
- 'interpolationlib',
- 'iolib',
- 'jnull',
- 'jvoid',
- 'linear_algebralib',
- 'm2scilib',
- 'matiolib',
- 'modules_managerlib',
- 'neldermeadlib',
- 'optimbaselib',
- 'optimizationlib',
- 'optimsimplexlib',
- 'output_streamlib',
- 'overloadinglib',
- 'parameterslib',
- 'polynomialslib',
- 'preferenceslib',
- 'randliblib',
- 'scicos_autolib',
- 'scicos_utilslib',
- 'scinoteslib',
- 'signal_processinglib',
- 'simulated_annealinglib',
- 'soundlib',
- 'sparselib',
- 'special_functionslib',
- 'spreadsheetlib',
- 'statisticslib',
- 'stringlib',
- 'tclscilib',
- 'timelib',
- 'umfpacklib',
- 'xcoslib',
-)
-
-
-if __name__ == '__main__': # pragma: no cover
- import subprocess
- from pygments.util import format_lines, duplicates_removed
-
- mapping = {'variables': 'builtin'}
-
- def extract_completion(var_type):
- s = subprocess.Popen(['scilab', '-nwni'], stdin=subprocess.PIPE,
- stdout=subprocess.PIPE, stderr=subprocess.PIPE)
- output = s.communicate('''\
-fd = mopen("/dev/stderr", "wt");
-mputl(strcat(completion("", "%s"), "||"), fd);
-mclose(fd)\n''' % var_type)
- if '||' not in output[1]:
- raise Exception(output[0])
- # Invalid DISPLAY causes this to be output:
- text = output[1].strip()
- if text.startswith('Error: unable to open display \n'):
- text = text[len('Error: unable to open display \n'):]
- return text.split('||')
-
- new_data = {}
- seen = set() # only keep first type for a given word
- for t in ('functions', 'commands', 'macros', 'variables'):
- new_data[t] = duplicates_removed(extract_completion(t), seen)
- seen.update(set(new_data[t]))
-
-
- with open(__file__, encoding='utf-8') as f:
- content = f.read()
-
- header = content[:content.find('# Autogenerated')]
- footer = content[content.find("if __name__ == '__main__':"):]
-
- with open(__file__, 'w', encoding='utf-8') as f:
- f.write(header)
- f.write('# Autogenerated\n\n')
- for k, v in sorted(new_data.items()):
- f.write(format_lines(k + '_kw', v) + '\n\n')
- f.write(footer)
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/_sourcemod_builtins.py b/venv/lib/python3.11/site-packages/pygments/lexers/_sourcemod_builtins.py
deleted file mode 100644
index 02d3e77..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/_sourcemod_builtins.py
+++ /dev/null
@@ -1,1151 +0,0 @@
-"""
- pygments.lexers._sourcemod_builtins
- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
- This file contains the names of SourceMod functions.
-
- Do not edit the FUNCTIONS list by hand.
-
- Run with `python -I` to regenerate.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-FUNCTIONS = (
- 'OnEntityCreated',
- 'OnEntityDestroyed',
- 'OnGetGameDescription',
- 'OnLevelInit',
- 'SDKHook',
- 'SDKHookEx',
- 'SDKUnhook',
- 'SDKHooks_TakeDamage',
- 'SDKHooks_DropWeapon',
- 'TopMenuHandler',
- 'CreateTopMenu',
- 'LoadTopMenuConfig',
- 'AddToTopMenu',
- 'GetTopMenuInfoString',
- 'GetTopMenuObjName',
- 'RemoveFromTopMenu',
- 'DisplayTopMenu',
- 'DisplayTopMenuCategory',
- 'FindTopMenuCategory',
- 'SetTopMenuTitleCaching',
- 'OnAdminMenuCreated',
- 'OnAdminMenuReady',
- 'GetAdminTopMenu',
- 'AddTargetsToMenu',
- 'AddTargetsToMenu2',
- 'RedisplayAdminMenu',
- 'TEHook',
- 'AddTempEntHook',
- 'RemoveTempEntHook',
- 'TE_Start',
- 'TE_IsValidProp',
- 'TE_WriteNum',
- 'TE_ReadNum',
- 'TE_WriteFloat',
- 'TE_ReadFloat',
- 'TE_WriteVector',
- 'TE_ReadVector',
- 'TE_WriteAngles',
- 'TE_WriteFloatArray',
- 'TE_Send',
- 'TE_WriteEncodedEnt',
- 'TE_SendToAll',
- 'TE_SendToClient',
- 'CreateKeyValues',
- 'KvSetString',
- 'KvSetNum',
- 'KvSetUInt64',
- 'KvSetFloat',
- 'KvSetColor',
- 'KvSetVector',
- 'KvGetString',
- 'KvGetNum',
- 'KvGetFloat',
- 'KvGetColor',
- 'KvGetUInt64',
- 'KvGetVector',
- 'KvJumpToKey',
- 'KvJumpToKeySymbol',
- 'KvGotoFirstSubKey',
- 'KvGotoNextKey',
- 'KvSavePosition',
- 'KvDeleteKey',
- 'KvDeleteThis',
- 'KvGoBack',
- 'KvRewind',
- 'KvGetSectionName',
- 'KvSetSectionName',
- 'KvGetDataType',
- 'KeyValuesToFile',
- 'FileToKeyValues',
- 'StringToKeyValues',
- 'KvSetEscapeSequences',
- 'KvNodesInStack',
- 'KvCopySubkeys',
- 'KvFindKeyById',
- 'KvGetNameSymbol',
- 'KvGetSectionSymbol',
- 'TE_SetupSparks',
- 'TE_SetupSmoke',
- 'TE_SetupDust',
- 'TE_SetupMuzzleFlash',
- 'TE_SetupMetalSparks',
- 'TE_SetupEnergySplash',
- 'TE_SetupArmorRicochet',
- 'TE_SetupGlowSprite',
- 'TE_SetupExplosion',
- 'TE_SetupBloodSprite',
- 'TE_SetupBeamRingPoint',
- 'TE_SetupBeamPoints',
- 'TE_SetupBeamLaser',
- 'TE_SetupBeamRing',
- 'TE_SetupBeamFollow',
- 'HookEvent',
- 'HookEventEx',
- 'UnhookEvent',
- 'CreateEvent',
- 'FireEvent',
- 'CancelCreatedEvent',
- 'GetEventBool',
- 'SetEventBool',
- 'GetEventInt',
- 'SetEventInt',
- 'GetEventFloat',
- 'SetEventFloat',
- 'GetEventString',
- 'SetEventString',
- 'GetEventName',
- 'SetEventBroadcast',
- 'GetUserMessageType',
- 'GetUserMessageId',
- 'GetUserMessageName',
- 'StartMessage',
- 'StartMessageEx',
- 'EndMessage',
- 'MsgHook',
- 'MsgPostHook',
- 'HookUserMessage',
- 'UnhookUserMessage',
- 'StartMessageAll',
- 'StartMessageOne',
- 'InactivateClient',
- 'ReconnectClient',
- 'GetMaxEntities',
- 'GetEntityCount',
- 'IsValidEntity',
- 'IsValidEdict',
- 'IsEntNetworkable',
- 'CreateEdict',
- 'RemoveEdict',
- 'GetEdictFlags',
- 'SetEdictFlags',
- 'GetEdictClassname',
- 'GetEntityNetClass',
- 'ChangeEdictState',
- 'GetEntData',
- 'SetEntData',
- 'GetEntDataFloat',
- 'SetEntDataFloat',
- 'GetEntDataEnt2',
- 'SetEntDataEnt2',
- 'GetEntDataVector',
- 'SetEntDataVector',
- 'GetEntDataString',
- 'SetEntDataString',
- 'FindSendPropOffs',
- 'FindSendPropInfo',
- 'FindDataMapOffs',
- 'FindDataMapInfo',
- 'GetEntSendPropOffs',
- 'GetEntProp',
- 'SetEntProp',
- 'GetEntPropFloat',
- 'SetEntPropFloat',
- 'GetEntPropEnt',
- 'SetEntPropEnt',
- 'GetEntPropVector',
- 'SetEntPropVector',
- 'GetEntPropString',
- 'SetEntPropString',
- 'GetEntPropArraySize',
- 'GetEntDataArray',
- 'SetEntDataArray',
- 'GetEntityAddress',
- 'GetEntityClassname',
- 'float',
- 'FloatMul',
- 'FloatDiv',
- 'FloatAdd',
- 'FloatSub',
- 'FloatFraction',
- 'RoundToZero',
- 'RoundToCeil',
- 'RoundToFloor',
- 'RoundToNearest',
- 'FloatCompare',
- 'SquareRoot',
- 'Pow',
- 'Exponential',
- 'Logarithm',
- 'Sine',
- 'Cosine',
- 'Tangent',
- 'FloatAbs',
- 'ArcTangent',
- 'ArcCosine',
- 'ArcSine',
- 'ArcTangent2',
- 'RoundFloat',
- 'operator%',
- 'DegToRad',
- 'RadToDeg',
- 'GetURandomInt',
- 'GetURandomFloat',
- 'SetURandomSeed',
- 'SetURandomSeedSimple',
- 'RemovePlayerItem',
- 'GivePlayerItem',
- 'GetPlayerWeaponSlot',
- 'IgniteEntity',
- 'ExtinguishEntity',
- 'TeleportEntity',
- 'ForcePlayerSuicide',
- 'SlapPlayer',
- 'FindEntityByClassname',
- 'GetClientEyeAngles',
- 'CreateEntityByName',
- 'DispatchSpawn',
- 'DispatchKeyValue',
- 'DispatchKeyValueFloat',
- 'DispatchKeyValueVector',
- 'GetClientAimTarget',
- 'GetTeamCount',
- 'GetTeamName',
- 'GetTeamScore',
- 'SetTeamScore',
- 'GetTeamClientCount',
- 'SetEntityModel',
- 'GetPlayerDecalFile',
- 'GetPlayerJingleFile',
- 'GetServerNetStats',
- 'EquipPlayerWeapon',
- 'ActivateEntity',
- 'SetClientInfo',
- 'GivePlayerAmmo',
- 'SetClientListeningFlags',
- 'GetClientListeningFlags',
- 'SetListenOverride',
- 'GetListenOverride',
- 'IsClientMuted',
- 'TR_GetPointContents',
- 'TR_GetPointContentsEnt',
- 'TR_TraceRay',
- 'TR_TraceHull',
- 'TR_TraceRayFilter',
- 'TR_TraceHullFilter',
- 'TR_TraceRayEx',
- 'TR_TraceHullEx',
- 'TR_TraceRayFilterEx',
- 'TR_TraceHullFilterEx',
- 'TR_GetFraction',
- 'TR_GetEndPosition',
- 'TR_GetEntityIndex',
- 'TR_DidHit',
- 'TR_GetHitGroup',
- 'TR_GetPlaneNormal',
- 'TR_PointOutsideWorld',
- 'SortIntegers',
- 'SortFloats',
- 'SortStrings',
- 'SortFunc1D',
- 'SortCustom1D',
- 'SortCustom2D',
- 'SortADTArray',
- 'SortFuncADTArray',
- 'SortADTArrayCustom',
- 'CompileRegex',
- 'MatchRegex',
- 'GetRegexSubString',
- 'SimpleRegexMatch',
- 'TF2_GetPlayerClass',
- 'TF2_SetPlayerClass',
- 'TF2_RemoveWeaponSlot',
- 'TF2_RemoveAllWeapons',
- 'TF2_IsPlayerInCondition',
- 'TF2_GetObjectType',
- 'TF2_GetObjectMode',
- 'NominateMap',
- 'RemoveNominationByMap',
- 'RemoveNominationByOwner',
- 'GetExcludeMapList',
- 'GetNominatedMapList',
- 'CanMapChooserStartVote',
- 'InitiateMapChooserVote',
- 'HasEndOfMapVoteFinished',
- 'EndOfMapVoteEnabled',
- 'OnNominationRemoved',
- 'OnMapVoteStarted',
- 'CreateTimer',
- 'KillTimer',
- 'TriggerTimer',
- 'GetTickedTime',
- 'GetMapTimeLeft',
- 'GetMapTimeLimit',
- 'ExtendMapTimeLimit',
- 'GetTickInterval',
- 'OnMapTimeLeftChanged',
- 'IsServerProcessing',
- 'CreateDataTimer',
- 'ByteCountToCells',
- 'CreateArray',
- 'ClearArray',
- 'CloneArray',
- 'ResizeArray',
- 'GetArraySize',
- 'PushArrayCell',
- 'PushArrayString',
- 'PushArrayArray',
- 'GetArrayCell',
- 'GetArrayString',
- 'GetArrayArray',
- 'SetArrayCell',
- 'SetArrayString',
- 'SetArrayArray',
- 'ShiftArrayUp',
- 'RemoveFromArray',
- 'SwapArrayItems',
- 'FindStringInArray',
- 'FindValueInArray',
- 'ProcessTargetString',
- 'ReplyToTargetError',
- 'MultiTargetFilter',
- 'AddMultiTargetFilter',
- 'RemoveMultiTargetFilter',
- 'OnBanClient',
- 'OnBanIdentity',
- 'OnRemoveBan',
- 'BanClient',
- 'BanIdentity',
- 'RemoveBan',
- 'CreateTrie',
- 'SetTrieValue',
- 'SetTrieArray',
- 'SetTrieString',
- 'GetTrieValue',
- 'GetTrieArray',
- 'GetTrieString',
- 'RemoveFromTrie',
- 'ClearTrie',
- 'GetTrieSize',
- 'GetFunctionByName',
- 'CreateGlobalForward',
- 'CreateForward',
- 'GetForwardFunctionCount',
- 'AddToForward',
- 'RemoveFromForward',
- 'RemoveAllFromForward',
- 'Call_StartForward',
- 'Call_StartFunction',
- 'Call_PushCell',
- 'Call_PushCellRef',
- 'Call_PushFloat',
- 'Call_PushFloatRef',
- 'Call_PushArray',
- 'Call_PushArrayEx',
- 'Call_PushString',
- 'Call_PushStringEx',
- 'Call_Finish',
- 'Call_Cancel',
- 'NativeCall',
- 'CreateNative',
- 'ThrowNativeError',
- 'GetNativeStringLength',
- 'GetNativeString',
- 'SetNativeString',
- 'GetNativeCell',
- 'GetNativeCellRef',
- 'SetNativeCellRef',
- 'GetNativeArray',
- 'SetNativeArray',
- 'FormatNativeString',
- 'RequestFrameCallback',
- 'RequestFrame',
- 'OnRebuildAdminCache',
- 'DumpAdminCache',
- 'AddCommandOverride',
- 'GetCommandOverride',
- 'UnsetCommandOverride',
- 'CreateAdmGroup',
- 'FindAdmGroup',
- 'SetAdmGroupAddFlag',
- 'GetAdmGroupAddFlag',
- 'GetAdmGroupAddFlags',
- 'SetAdmGroupImmuneFrom',
- 'GetAdmGroupImmuneCount',
- 'GetAdmGroupImmuneFrom',
- 'AddAdmGroupCmdOverride',
- 'GetAdmGroupCmdOverride',
- 'RegisterAuthIdentType',
- 'CreateAdmin',
- 'GetAdminUsername',
- 'BindAdminIdentity',
- 'SetAdminFlag',
- 'GetAdminFlag',
- 'GetAdminFlags',
- 'AdminInheritGroup',
- 'GetAdminGroupCount',
- 'GetAdminGroup',
- 'SetAdminPassword',
- 'GetAdminPassword',
- 'FindAdminByIdentity',
- 'RemoveAdmin',
- 'FlagBitsToBitArray',
- 'FlagBitArrayToBits',
- 'FlagArrayToBits',
- 'FlagBitsToArray',
- 'FindFlagByName',
- 'FindFlagByChar',
- 'FindFlagChar',
- 'ReadFlagString',
- 'CanAdminTarget',
- 'CreateAuthMethod',
- 'SetAdmGroupImmunityLevel',
- 'GetAdmGroupImmunityLevel',
- 'SetAdminImmunityLevel',
- 'GetAdminImmunityLevel',
- 'FlagToBit',
- 'BitToFlag',
- 'ServerCommand',
- 'ServerCommandEx',
- 'InsertServerCommand',
- 'ServerExecute',
- 'ClientCommand',
- 'FakeClientCommand',
- 'FakeClientCommandEx',
- 'PrintToServer',
- 'PrintToConsole',
- 'ReplyToCommand',
- 'GetCmdReplySource',
- 'SetCmdReplySource',
- 'IsChatTrigger',
- 'ShowActivity2',
- 'ShowActivity',
- 'ShowActivityEx',
- 'FormatActivitySource',
- 'SrvCmd',
- 'RegServerCmd',
- 'ConCmd',
- 'RegConsoleCmd',
- 'RegAdminCmd',
- 'GetCmdArgs',
- 'GetCmdArg',
- 'GetCmdArgString',
- 'CreateConVar',
- 'FindConVar',
- 'ConVarChanged',
- 'HookConVarChange',
- 'UnhookConVarChange',
- 'GetConVarBool',
- 'SetConVarBool',
- 'GetConVarInt',
- 'SetConVarInt',
- 'GetConVarFloat',
- 'SetConVarFloat',
- 'GetConVarString',
- 'SetConVarString',
- 'ResetConVar',
- 'GetConVarDefault',
- 'GetConVarFlags',
- 'SetConVarFlags',
- 'GetConVarBounds',
- 'SetConVarBounds',
- 'GetConVarName',
- 'QueryClientConVar',
- 'GetCommandIterator',
- 'ReadCommandIterator',
- 'CheckCommandAccess',
- 'CheckAccess',
- 'IsValidConVarChar',
- 'GetCommandFlags',
- 'SetCommandFlags',
- 'FindFirstConCommand',
- 'FindNextConCommand',
- 'SendConVarValue',
- 'AddServerTag',
- 'RemoveServerTag',
- 'CommandListener',
- 'AddCommandListener',
- 'RemoveCommandListener',
- 'CommandExists',
- 'OnClientSayCommand',
- 'OnClientSayCommand_Post',
- 'TF2_IgnitePlayer',
- 'TF2_RespawnPlayer',
- 'TF2_RegeneratePlayer',
- 'TF2_AddCondition',
- 'TF2_RemoveCondition',
- 'TF2_SetPlayerPowerPlay',
- 'TF2_DisguisePlayer',
- 'TF2_RemovePlayerDisguise',
- 'TF2_StunPlayer',
- 'TF2_MakeBleed',
- 'TF2_GetClass',
- 'TF2_CalcIsAttackCritical',
- 'TF2_OnIsHolidayActive',
- 'TF2_IsHolidayActive',
- 'TF2_IsPlayerInDuel',
- 'TF2_RemoveWearable',
- 'TF2_OnConditionAdded',
- 'TF2_OnConditionRemoved',
- 'TF2_OnWaitingForPlayersStart',
- 'TF2_OnWaitingForPlayersEnd',
- 'TF2_OnPlayerTeleport',
- 'SQL_Connect',
- 'SQL_DefConnect',
- 'SQL_ConnectCustom',
- 'SQLite_UseDatabase',
- 'SQL_CheckConfig',
- 'SQL_GetDriver',
- 'SQL_ReadDriver',
- 'SQL_GetDriverIdent',
- 'SQL_GetDriverProduct',
- 'SQL_SetCharset',
- 'SQL_GetAffectedRows',
- 'SQL_GetInsertId',
- 'SQL_GetError',
- 'SQL_EscapeString',
- 'SQL_QuoteString',
- 'SQL_FastQuery',
- 'SQL_Query',
- 'SQL_PrepareQuery',
- 'SQL_FetchMoreResults',
- 'SQL_HasResultSet',
- 'SQL_GetRowCount',
- 'SQL_GetFieldCount',
- 'SQL_FieldNumToName',
- 'SQL_FieldNameToNum',
- 'SQL_FetchRow',
- 'SQL_MoreRows',
- 'SQL_Rewind',
- 'SQL_FetchString',
- 'SQL_FetchFloat',
- 'SQL_FetchInt',
- 'SQL_IsFieldNull',
- 'SQL_FetchSize',
- 'SQL_BindParamInt',
- 'SQL_BindParamFloat',
- 'SQL_BindParamString',
- 'SQL_Execute',
- 'SQL_LockDatabase',
- 'SQL_UnlockDatabase',
- 'SQLTCallback',
- 'SQL_IsSameConnection',
- 'SQL_TConnect',
- 'SQL_TQuery',
- 'SQL_CreateTransaction',
- 'SQL_AddQuery',
- 'SQLTxnSuccess',
- 'SQLTxnFailure',
- 'SQL_ExecuteTransaction',
- 'CloseHandle',
- 'CloneHandle',
- 'MenuHandler',
- 'CreateMenu',
- 'DisplayMenu',
- 'DisplayMenuAtItem',
- 'AddMenuItem',
- 'InsertMenuItem',
- 'RemoveMenuItem',
- 'RemoveAllMenuItems',
- 'GetMenuItem',
- 'GetMenuSelectionPosition',
- 'GetMenuItemCount',
- 'SetMenuPagination',
- 'GetMenuPagination',
- 'GetMenuStyle',
- 'SetMenuTitle',
- 'GetMenuTitle',
- 'CreatePanelFromMenu',
- 'GetMenuExitButton',
- 'SetMenuExitButton',
- 'GetMenuExitBackButton',
- 'SetMenuExitBackButton',
- 'SetMenuNoVoteButton',
- 'CancelMenu',
- 'GetMenuOptionFlags',
- 'SetMenuOptionFlags',
- 'IsVoteInProgress',
- 'CancelVote',
- 'VoteMenu',
- 'VoteMenuToAll',
- 'VoteHandler',
- 'SetVoteResultCallback',
- 'CheckVoteDelay',
- 'IsClientInVotePool',
- 'RedrawClientVoteMenu',
- 'GetMenuStyleHandle',
- 'CreatePanel',
- 'CreateMenuEx',
- 'GetClientMenu',
- 'CancelClientMenu',
- 'GetMaxPageItems',
- 'GetPanelStyle',
- 'SetPanelTitle',
- 'DrawPanelItem',
- 'DrawPanelText',
- 'CanPanelDrawFlags',
- 'SetPanelKeys',
- 'SendPanelToClient',
- 'GetPanelTextRemaining',
- 'GetPanelCurrentKey',
- 'SetPanelCurrentKey',
- 'RedrawMenuItem',
- 'InternalShowMenu',
- 'GetMenuVoteInfo',
- 'IsNewVoteAllowed',
- 'PrefetchSound',
- 'EmitAmbientSound',
- 'FadeClientVolume',
- 'StopSound',
- 'EmitSound',
- 'EmitSentence',
- 'GetDistGainFromSoundLevel',
- 'AmbientSHook',
- 'NormalSHook',
- 'AddAmbientSoundHook',
- 'AddNormalSoundHook',
- 'RemoveAmbientSoundHook',
- 'RemoveNormalSoundHook',
- 'EmitSoundToClient',
- 'EmitSoundToAll',
- 'ATTN_TO_SNDLEVEL',
- 'GetGameSoundParams',
- 'EmitGameSound',
- 'EmitAmbientGameSound',
- 'EmitGameSoundToClient',
- 'EmitGameSoundToAll',
- 'PrecacheScriptSound',
- 'strlen',
- 'StrContains',
- 'strcmp',
- 'strncmp',
- 'StrEqual',
- 'strcopy',
- 'Format',
- 'FormatEx',
- 'VFormat',
- 'StringToInt',
- 'StringToIntEx',
- 'IntToString',
- 'StringToFloat',
- 'StringToFloatEx',
- 'FloatToString',
- 'BreakString',
- 'TrimString',
- 'SplitString',
- 'ReplaceString',
- 'ReplaceStringEx',
- 'GetCharBytes',
- 'IsCharAlpha',
- 'IsCharNumeric',
- 'IsCharSpace',
- 'IsCharMB',
- 'IsCharUpper',
- 'IsCharLower',
- 'StripQuotes',
- 'CharToUpper',
- 'CharToLower',
- 'FindCharInString',
- 'StrCat',
- 'ExplodeString',
- 'ImplodeStrings',
- 'GetVectorLength',
- 'GetVectorDistance',
- 'GetVectorDotProduct',
- 'GetVectorCrossProduct',
- 'NormalizeVector',
- 'GetAngleVectors',
- 'GetVectorAngles',
- 'GetVectorVectors',
- 'AddVectors',
- 'SubtractVectors',
- 'ScaleVector',
- 'NegateVector',
- 'MakeVectorFromPoints',
- 'BaseComm_IsClientGagged',
- 'BaseComm_IsClientMuted',
- 'BaseComm_SetClientGag',
- 'BaseComm_SetClientMute',
- 'FormatUserLogText',
- 'FindPluginByFile',
- 'FindTarget',
- 'AcceptEntityInput',
- 'SetVariantBool',
- 'SetVariantString',
- 'SetVariantInt',
- 'SetVariantFloat',
- 'SetVariantVector3D',
- 'SetVariantPosVector3D',
- 'SetVariantColor',
- 'SetVariantEntity',
- 'GameRules_GetProp',
- 'GameRules_SetProp',
- 'GameRules_GetPropFloat',
- 'GameRules_SetPropFloat',
- 'GameRules_GetPropEnt',
- 'GameRules_SetPropEnt',
- 'GameRules_GetPropVector',
- 'GameRules_SetPropVector',
- 'GameRules_GetPropString',
- 'GameRules_SetPropString',
- 'GameRules_GetRoundState',
- 'OnClientConnect',
- 'OnClientConnected',
- 'OnClientPutInServer',
- 'OnClientDisconnect',
- 'OnClientDisconnect_Post',
- 'OnClientCommand',
- 'OnClientSettingsChanged',
- 'OnClientAuthorized',
- 'OnClientPreAdminCheck',
- 'OnClientPostAdminFilter',
- 'OnClientPostAdminCheck',
- 'GetMaxClients',
- 'GetMaxHumanPlayers',
- 'GetClientCount',
- 'GetClientName',
- 'GetClientIP',
- 'GetClientAuthString',
- 'GetClientAuthId',
- 'GetSteamAccountID',
- 'GetClientUserId',
- 'IsClientConnected',
- 'IsClientInGame',
- 'IsClientInKickQueue',
- 'IsClientAuthorized',
- 'IsFakeClient',
- 'IsClientSourceTV',
- 'IsClientReplay',
- 'IsClientObserver',
- 'IsPlayerAlive',
- 'GetClientInfo',
- 'GetClientTeam',
- 'SetUserAdmin',
- 'GetUserAdmin',
- 'AddUserFlags',
- 'RemoveUserFlags',
- 'SetUserFlagBits',
- 'GetUserFlagBits',
- 'CanUserTarget',
- 'RunAdminCacheChecks',
- 'NotifyPostAdminCheck',
- 'CreateFakeClient',
- 'SetFakeClientConVar',
- 'GetClientHealth',
- 'GetClientModel',
- 'GetClientWeapon',
- 'GetClientMaxs',
- 'GetClientMins',
- 'GetClientAbsAngles',
- 'GetClientAbsOrigin',
- 'GetClientArmor',
- 'GetClientDeaths',
- 'GetClientFrags',
- 'GetClientDataRate',
- 'IsClientTimingOut',
- 'GetClientTime',
- 'GetClientLatency',
- 'GetClientAvgLatency',
- 'GetClientAvgLoss',
- 'GetClientAvgChoke',
- 'GetClientAvgData',
- 'GetClientAvgPackets',
- 'GetClientOfUserId',
- 'KickClient',
- 'KickClientEx',
- 'ChangeClientTeam',
- 'GetClientSerial',
- 'GetClientFromSerial',
- 'FindStringTable',
- 'GetNumStringTables',
- 'GetStringTableNumStrings',
- 'GetStringTableMaxStrings',
- 'GetStringTableName',
- 'FindStringIndex',
- 'ReadStringTable',
- 'GetStringTableDataLength',
- 'GetStringTableData',
- 'SetStringTableData',
- 'AddToStringTable',
- 'LockStringTables',
- 'AddFileToDownloadsTable',
- 'GetEntityFlags',
- 'SetEntityFlags',
- 'GetEntityMoveType',
- 'SetEntityMoveType',
- 'GetEntityRenderMode',
- 'SetEntityRenderMode',
- 'GetEntityRenderFx',
- 'SetEntityRenderFx',
- 'SetEntityRenderColor',
- 'GetEntityGravity',
- 'SetEntityGravity',
- 'SetEntityHealth',
- 'GetClientButtons',
- 'EntityOutput',
- 'HookEntityOutput',
- 'UnhookEntityOutput',
- 'HookSingleEntityOutput',
- 'UnhookSingleEntityOutput',
- 'SMC_CreateParser',
- 'SMC_ParseFile',
- 'SMC_GetErrorString',
- 'SMC_ParseStart',
- 'SMC_SetParseStart',
- 'SMC_ParseEnd',
- 'SMC_SetParseEnd',
- 'SMC_NewSection',
- 'SMC_KeyValue',
- 'SMC_EndSection',
- 'SMC_SetReaders',
- 'SMC_RawLine',
- 'SMC_SetRawLine',
- 'BfWriteBool',
- 'BfWriteByte',
- 'BfWriteChar',
- 'BfWriteShort',
- 'BfWriteWord',
- 'BfWriteNum',
- 'BfWriteFloat',
- 'BfWriteString',
- 'BfWriteEntity',
- 'BfWriteAngle',
- 'BfWriteCoord',
- 'BfWriteVecCoord',
- 'BfWriteVecNormal',
- 'BfWriteAngles',
- 'BfReadBool',
- 'BfReadByte',
- 'BfReadChar',
- 'BfReadShort',
- 'BfReadWord',
- 'BfReadNum',
- 'BfReadFloat',
- 'BfReadString',
- 'BfReadEntity',
- 'BfReadAngle',
- 'BfReadCoord',
- 'BfReadVecCoord',
- 'BfReadVecNormal',
- 'BfReadAngles',
- 'BfGetNumBytesLeft',
- 'CreateProfiler',
- 'StartProfiling',
- 'StopProfiling',
- 'GetProfilerTime',
- 'OnPluginStart',
- 'AskPluginLoad2',
- 'OnPluginEnd',
- 'OnPluginPauseChange',
- 'OnGameFrame',
- 'OnMapStart',
- 'OnMapEnd',
- 'OnConfigsExecuted',
- 'OnAutoConfigsBuffered',
- 'OnAllPluginsLoaded',
- 'GetMyHandle',
- 'GetPluginIterator',
- 'MorePlugins',
- 'ReadPlugin',
- 'GetPluginStatus',
- 'GetPluginFilename',
- 'IsPluginDebugging',
- 'GetPluginInfo',
- 'FindPluginByNumber',
- 'SetFailState',
- 'ThrowError',
- 'GetTime',
- 'FormatTime',
- 'LoadGameConfigFile',
- 'GameConfGetOffset',
- 'GameConfGetKeyValue',
- 'GameConfGetAddress',
- 'GetSysTickCount',
- 'AutoExecConfig',
- 'RegPluginLibrary',
- 'LibraryExists',
- 'GetExtensionFileStatus',
- 'OnLibraryAdded',
- 'OnLibraryRemoved',
- 'ReadMapList',
- 'SetMapListCompatBind',
- 'OnClientFloodCheck',
- 'OnClientFloodResult',
- 'CanTestFeatures',
- 'GetFeatureStatus',
- 'RequireFeature',
- 'LoadFromAddress',
- 'StoreToAddress',
- 'CreateStack',
- 'PushStackCell',
- 'PushStackString',
- 'PushStackArray',
- 'PopStackCell',
- 'PopStackString',
- 'PopStackArray',
- 'IsStackEmpty',
- 'PopStack',
- 'OnPlayerRunCmd',
- 'BuildPath',
- 'OpenDirectory',
- 'ReadDirEntry',
- 'OpenFile',
- 'DeleteFile',
- 'ReadFileLine',
- 'ReadFile',
- 'ReadFileString',
- 'WriteFile',
- 'WriteFileString',
- 'WriteFileLine',
- 'ReadFileCell',
- 'WriteFileCell',
- 'IsEndOfFile',
- 'FileSeek',
- 'FilePosition',
- 'FileExists',
- 'RenameFile',
- 'DirExists',
- 'FileSize',
- 'FlushFile',
- 'RemoveDir',
- 'CreateDirectory',
- 'GetFileTime',
- 'LogToOpenFile',
- 'LogToOpenFileEx',
- 'PbReadInt',
- 'PbReadFloat',
- 'PbReadBool',
- 'PbReadString',
- 'PbReadColor',
- 'PbReadAngle',
- 'PbReadVector',
- 'PbReadVector2D',
- 'PbGetRepeatedFieldCount',
- 'PbSetInt',
- 'PbSetFloat',
- 'PbSetBool',
- 'PbSetString',
- 'PbSetColor',
- 'PbSetAngle',
- 'PbSetVector',
- 'PbSetVector2D',
- 'PbAddInt',
- 'PbAddFloat',
- 'PbAddBool',
- 'PbAddString',
- 'PbAddColor',
- 'PbAddAngle',
- 'PbAddVector',
- 'PbAddVector2D',
- 'PbRemoveRepeatedFieldValue',
- 'PbReadMessage',
- 'PbReadRepeatedMessage',
- 'PbAddMessage',
- 'SetNextMap',
- 'GetNextMap',
- 'ForceChangeLevel',
- 'GetMapHistorySize',
- 'GetMapHistory',
- 'GeoipCode2',
- 'GeoipCode3',
- 'GeoipCountry',
- 'MarkNativeAsOptional',
- 'RegClientCookie',
- 'FindClientCookie',
- 'SetClientCookie',
- 'GetClientCookie',
- 'SetAuthIdCookie',
- 'AreClientCookiesCached',
- 'OnClientCookiesCached',
- 'CookieMenuHandler',
- 'SetCookiePrefabMenu',
- 'SetCookieMenuItem',
- 'ShowCookieMenu',
- 'GetCookieIterator',
- 'ReadCookieIterator',
- 'GetCookieAccess',
- 'GetClientCookieTime',
- 'LoadTranslations',
- 'SetGlobalTransTarget',
- 'GetClientLanguage',
- 'GetServerLanguage',
- 'GetLanguageCount',
- 'GetLanguageInfo',
- 'SetClientLanguage',
- 'GetLanguageByCode',
- 'GetLanguageByName',
- 'CS_OnBuyCommand',
- 'CS_OnCSWeaponDrop',
- 'CS_OnGetWeaponPrice',
- 'CS_OnTerminateRound',
- 'CS_RespawnPlayer',
- 'CS_SwitchTeam',
- 'CS_DropWeapon',
- 'CS_TerminateRound',
- 'CS_GetTranslatedWeaponAlias',
- 'CS_GetWeaponPrice',
- 'CS_GetClientClanTag',
- 'CS_SetClientClanTag',
- 'CS_GetTeamScore',
- 'CS_SetTeamScore',
- 'CS_GetMVPCount',
- 'CS_SetMVPCount',
- 'CS_GetClientContributionScore',
- 'CS_SetClientContributionScore',
- 'CS_GetClientAssists',
- 'CS_SetClientAssists',
- 'CS_AliasToWeaponID',
- 'CS_WeaponIDToAlias',
- 'CS_IsValidWeaponID',
- 'CS_UpdateClientModel',
- 'LogToGame',
- 'SetRandomSeed',
- 'GetRandomFloat',
- 'GetRandomInt',
- 'IsMapValid',
- 'IsDedicatedServer',
- 'GetEngineTime',
- 'GetGameTime',
- 'GetGameTickCount',
- 'GetGameDescription',
- 'GetGameFolderName',
- 'GetCurrentMap',
- 'PrecacheModel',
- 'PrecacheSentenceFile',
- 'PrecacheDecal',
- 'PrecacheGeneric',
- 'IsModelPrecached',
- 'IsDecalPrecached',
- 'IsGenericPrecached',
- 'PrecacheSound',
- 'IsSoundPrecached',
- 'CreateDialog',
- 'GetEngineVersion',
- 'PrintToChat',
- 'PrintToChatAll',
- 'PrintCenterText',
- 'PrintCenterTextAll',
- 'PrintHintText',
- 'PrintHintTextToAll',
- 'ShowVGUIPanel',
- 'CreateHudSynchronizer',
- 'SetHudTextParams',
- 'SetHudTextParamsEx',
- 'ShowSyncHudText',
- 'ClearSyncHud',
- 'ShowHudText',
- 'ShowMOTDPanel',
- 'DisplayAskConnectBox',
- 'EntIndexToEntRef',
- 'EntRefToEntIndex',
- 'MakeCompatEntRef',
- 'SetClientViewEntity',
- 'SetLightStyle',
- 'GetClientEyePosition',
- 'CreateDataPack',
- 'WritePackCell',
- 'WritePackFloat',
- 'WritePackString',
- 'ReadPackCell',
- 'ReadPackFloat',
- 'ReadPackString',
- 'ResetPack',
- 'GetPackPosition',
- 'SetPackPosition',
- 'IsPackReadable',
- 'LogMessage',
- 'LogToFile',
- 'LogToFileEx',
- 'LogAction',
- 'LogError',
- 'OnLogAction',
- 'GameLogHook',
- 'AddGameLogHook',
- 'RemoveGameLogHook',
- 'FindTeamByName',
- 'StartPrepSDKCall',
- 'PrepSDKCall_SetVirtual',
- 'PrepSDKCall_SetSignature',
- 'PrepSDKCall_SetAddress',
- 'PrepSDKCall_SetFromConf',
- 'PrepSDKCall_SetReturnInfo',
- 'PrepSDKCall_AddParameter',
- 'EndPrepSDKCall',
- 'SDKCall',
- 'GetPlayerResourceEntity',
-)
-
-
-if __name__ == '__main__': # pragma: no cover
- import re
- from urllib.request import FancyURLopener
-
- from pygments.util import format_lines
-
- class Opener(FancyURLopener):
- version = 'Mozilla/5.0 (Pygments Sourcemod Builtins Update)'
-
- opener = Opener()
-
- def get_version():
- f = opener.open('http://docs.sourcemod.net/api/index.php')
- r = re.compile(r'SourceMod v\.<b>([\d\.]+(?:-\w+)?)</td>')
- for line in f:
- m = r.search(line.decode())
- if m is not None:
- return m.groups()[0]
- raise ValueError('No version in api docs')
-
- def get_sm_functions():
- f = opener.open('http://docs.sourcemod.net/api/SMfuncs.js')
- r = re.compile(r'SMfunctions\[\d+\] = Array \("(?:public )?([^,]+)",".+"\);')
- functions = []
- for line in f:
- m = r.match(line.decode())
- if m is not None:
- functions.append(m.groups()[0])
- return functions
-
- def regenerate(filename, natives):
- with open(filename, encoding='utf-8') as fp:
- content = fp.read()
-
- header = content[:content.find('FUNCTIONS = (')]
- footer = content[content.find("if __name__ == '__main__':")-1:]
-
-
- with open(filename, 'w', encoding='utf-8') as fp:
- fp.write(header)
- fp.write(format_lines('FUNCTIONS', natives))
- fp.write('\n\n' + footer)
-
- def run():
- version = get_version()
- print('> Downloading function index for SourceMod %s' % version)
- functions = get_sm_functions()
- print('> %d functions found:' % len(functions))
-
- functionlist = []
- for full_function_name in functions:
- print('>> %s' % full_function_name)
- functionlist.append(full_function_name)
-
- regenerate(__file__, functionlist)
-
-
- run()
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/_stan_builtins.py b/venv/lib/python3.11/site-packages/pygments/lexers/_stan_builtins.py
deleted file mode 100644
index 23f7fa6..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/_stan_builtins.py
+++ /dev/null
@@ -1,648 +0,0 @@
-"""
- pygments.lexers._stan_builtins
- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
- This file contains the names of functions for Stan used by
- ``pygments.lexers.math.StanLexer. This is for Stan language version 2.29.0.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-KEYWORDS = (
- 'break',
- 'continue',
- 'else',
- 'for',
- 'if',
- 'in',
- 'print',
- 'reject',
- 'return',
- 'while',
-)
-
-TYPES = (
- 'cholesky_factor_corr',
- 'cholesky_factor_cov',
- 'corr_matrix',
- 'cov_matrix',
- 'int',
- 'matrix',
- 'ordered',
- 'positive_ordered',
- 'real',
- 'row_vector',
- 'simplex',
- 'unit_vector',
- 'vector',
- 'void',
- 'array',
- 'complex'
-)
-
-FUNCTIONS = (
- 'abs',
- 'acos',
- 'acosh',
- 'add_diag',
- 'algebra_solver',
- 'algebra_solver_newton',
- 'append_array',
- 'append_col',
- 'append_row',
- 'arg',
- 'asin',
- 'asinh',
- 'atan',
- 'atan2',
- 'atanh',
- 'bernoulli_cdf',
- 'bernoulli_lccdf',
- 'bernoulli_lcdf',
- 'bernoulli_logit_glm_lpmf',
- 'bernoulli_logit_glm_lupmf',
- 'bernoulli_logit_glm_rng',
- 'bernoulli_logit_lpmf',
- 'bernoulli_logit_lupmf',
- 'bernoulli_logit_rng',
- 'bernoulli_lpmf',
- 'bernoulli_lupmf',
- 'bernoulli_rng',
- 'bessel_first_kind',
- 'bessel_second_kind',
- 'beta',
- 'beta_binomial_cdf',
- 'beta_binomial_lccdf',
- 'beta_binomial_lcdf',
- 'beta_binomial_lpmf',
- 'beta_binomial_lupmf',
- 'beta_binomial_rng',
- 'beta_cdf',
- 'beta_lccdf',
- 'beta_lcdf',
- 'beta_lpdf',
- 'beta_lupdf',
- 'beta_proportion_lccdf',
- 'beta_proportion_lcdf',
- 'beta_proportion_rng',
- 'beta_rng',
- 'binary_log_loss',
- 'binomial_cdf',
- 'binomial_coefficient_log',
- 'binomial_lccdf',
- 'binomial_lcdf',
- 'binomial_logit_lpmf',
- 'binomial_logit_lupmf',
- 'binomial_lpmf',
- 'binomial_lupmf',
- 'binomial_rng',
- 'block',
- 'categorical_logit_glm_lpmf',
- 'categorical_logit_glm_lupmf',
- 'categorical_logit_lpmf',
- 'categorical_logit_lupmf',
- 'categorical_logit_rng',
- 'categorical_lpmf',
- 'categorical_lupmf',
- 'categorical_rng',
- 'cauchy_cdf',
- 'cauchy_lccdf',
- 'cauchy_lcdf',
- 'cauchy_lpdf',
- 'cauchy_lupdf',
- 'cauchy_rng',
- 'cbrt',
- 'ceil',
- 'chi_square_cdf',
- 'chi_square_lccdf',
- 'chi_square_lcdf',
- 'chi_square_lpdf',
- 'chi_square_lupdf',
- 'chi_square_rng',
- 'chol2inv',
- 'cholesky_decompose',
- 'choose',
- 'col',
- 'cols',
- 'columns_dot_product',
- 'columns_dot_self',
- 'conj',
- 'cos',
- 'cosh',
- 'cov_exp_quad',
- 'crossprod',
- 'csr_extract_u',
- 'csr_extract_v',
- 'csr_extract_w',
- 'csr_matrix_times_vector',
- 'csr_to_dense_matrix',
- 'cumulative_sum',
- 'dae',
- 'dae_tol',
- 'determinant',
- 'diag_matrix',
- 'diag_post_multiply',
- 'diag_pre_multiply',
- 'diagonal',
- 'digamma',
- 'dims',
- 'dirichlet_lpdf',
- 'dirichlet_lupdf',
- 'dirichlet_rng',
- 'discrete_range_cdf',
- 'discrete_range_lccdf',
- 'discrete_range_lcdf',
- 'discrete_range_lpmf',
- 'discrete_range_lupmf',
- 'discrete_range_rng',
- 'distance',
- 'dot_product',
- 'dot_self',
- 'double_exponential_cdf',
- 'double_exponential_lccdf',
- 'double_exponential_lcdf',
- 'double_exponential_lpdf',
- 'double_exponential_lupdf',
- 'double_exponential_rng',
- 'e',
- 'eigenvalues_sym',
- 'eigenvectors_sym',
- 'erf',
- 'erfc',
- 'exp',
- 'exp2',
- 'exp_mod_normal_cdf',
- 'exp_mod_normal_lccdf',
- 'exp_mod_normal_lcdf',
- 'exp_mod_normal_lpdf',
- 'exp_mod_normal_lupdf',
- 'exp_mod_normal_rng',
- 'expm1',
- 'exponential_cdf',
- 'exponential_lccdf',
- 'exponential_lcdf',
- 'exponential_lpdf',
- 'exponential_lupdf',
- 'exponential_rng',
- 'fabs',
- 'falling_factorial',
- 'fdim',
- 'floor',
- 'fma',
- 'fmax',
- 'fmin',
- 'fmod',
- 'frechet_cdf',
- 'frechet_lccdf',
- 'frechet_lcdf',
- 'frechet_lpdf',
- 'frechet_lupdf',
- 'frechet_rng',
- 'gamma_cdf',
- 'gamma_lccdf',
- 'gamma_lcdf',
- 'gamma_lpdf',
- 'gamma_lupdf',
- 'gamma_p',
- 'gamma_q',
- 'gamma_rng',
- 'gaussian_dlm_obs_lpdf',
- 'gaussian_dlm_obs_lupdf',
- 'generalized_inverse',
- 'get_imag',
- 'get_lp',
- 'get_real',
- 'gumbel_cdf',
- 'gumbel_lccdf',
- 'gumbel_lcdf',
- 'gumbel_lpdf',
- 'gumbel_lupdf',
- 'gumbel_rng',
- 'head',
- 'hmm_hidden_state_prob',
- 'hmm_latent_rng',
- 'hmm_marginal',
- 'hypergeometric_lpmf',
- 'hypergeometric_lupmf',
- 'hypergeometric_rng',
- 'hypot',
- 'identity_matrix',
- 'inc_beta',
- 'int_step',
- 'integrate_1d',
- 'integrate_ode',
- 'integrate_ode_adams',
- 'integrate_ode_bdf',
- 'integrate_ode_rk45',
- 'inv',
- 'inv_chi_square_cdf',
- 'inv_chi_square_lccdf',
- 'inv_chi_square_lcdf',
- 'inv_chi_square_lpdf',
- 'inv_chi_square_lupdf',
- 'inv_chi_square_rng',
- 'inv_cloglog',
- 'inv_erfc',
- 'inv_gamma_cdf',
- 'inv_gamma_lccdf',
- 'inv_gamma_lcdf',
- 'inv_gamma_lpdf',
- 'inv_gamma_lupdf',
- 'inv_gamma_rng',
- 'inv_logit',
- 'inv_Phi',
- 'inv_sqrt',
- 'inv_square',
- 'inv_wishart_lpdf',
- 'inv_wishart_lupdf',
- 'inv_wishart_rng',
- 'inverse',
- 'inverse_spd',
- 'is_inf',
- 'is_nan',
- 'lambert_w0',
- 'lambert_wm1',
- 'lbeta',
- 'lchoose',
- 'ldexp',
- 'lgamma',
- 'linspaced_array',
- 'linspaced_int_array',
- 'linspaced_row_vector',
- 'linspaced_vector',
- 'lkj_corr_cholesky_lpdf',
- 'lkj_corr_cholesky_lupdf',
- 'lkj_corr_cholesky_rng',
- 'lkj_corr_lpdf',
- 'lkj_corr_lupdf',
- 'lkj_corr_rng',
- 'lmgamma',
- 'lmultiply',
- 'log',
- 'log10',
- 'log1m',
- 'log1m_exp',
- 'log1m_inv_logit',
- 'log1p',
- 'log1p_exp',
- 'log2',
- 'log_determinant',
- 'log_diff_exp',
- 'log_falling_factorial',
- 'log_inv_logit',
- 'log_inv_logit_diff',
- 'log_mix',
- 'log_modified_bessel_first_kind',
- 'log_rising_factorial',
- 'log_softmax',
- 'log_sum_exp',
- 'logistic_cdf',
- 'logistic_lccdf',
- 'logistic_lcdf',
- 'logistic_lpdf',
- 'logistic_lupdf',
- 'logistic_rng',
- 'logit',
- 'loglogistic_cdf',
- 'loglogistic_lpdf',
- 'loglogistic_rng',
- 'lognormal_cdf',
- 'lognormal_lccdf',
- 'lognormal_lcdf',
- 'lognormal_lpdf',
- 'lognormal_lupdf',
- 'lognormal_rng',
- 'machine_precision',
- 'map_rect',
- 'matrix_exp',
- 'matrix_exp_multiply',
- 'matrix_power',
- 'max',
- 'mdivide_left_spd',
- 'mdivide_left_tri_low',
- 'mdivide_right_spd',
- 'mdivide_right_tri_low',
- 'mean',
- 'min',
- 'modified_bessel_first_kind',
- 'modified_bessel_second_kind',
- 'multi_gp_cholesky_lpdf',
- 'multi_gp_cholesky_lupdf',
- 'multi_gp_lpdf',
- 'multi_gp_lupdf',
- 'multi_normal_cholesky_lpdf',
- 'multi_normal_cholesky_lupdf',
- 'multi_normal_cholesky_rng',
- 'multi_normal_lpdf',
- 'multi_normal_lupdf',
- 'multi_normal_prec_lpdf',
- 'multi_normal_prec_lupdf',
- 'multi_normal_rng',
- 'multi_student_t_lpdf',
- 'multi_student_t_lupdf',
- 'multi_student_t_rng',
- 'multinomial_logit_lpmf',
- 'multinomial_logit_lupmf',
- 'multinomial_logit_rng',
- 'multinomial_lpmf',
- 'multinomial_lupmf',
- 'multinomial_rng',
- 'multiply_log',
- 'multiply_lower_tri_self_transpose',
- 'neg_binomial_2_cdf',
- 'neg_binomial_2_lccdf',
- 'neg_binomial_2_lcdf',
- 'neg_binomial_2_log_glm_lpmf',
- 'neg_binomial_2_log_glm_lupmf',
- 'neg_binomial_2_log_lpmf',
- 'neg_binomial_2_log_lupmf',
- 'neg_binomial_2_log_rng',
- 'neg_binomial_2_lpmf',
- 'neg_binomial_2_lupmf',
- 'neg_binomial_2_rng',
- 'neg_binomial_cdf',
- 'neg_binomial_lccdf',
- 'neg_binomial_lcdf',
- 'neg_binomial_lpmf',
- 'neg_binomial_lupmf',
- 'neg_binomial_rng',
- 'negative_infinity',
- 'norm',
- 'normal_cdf',
- 'normal_id_glm_lpdf',
- 'normal_id_glm_lupdf',
- 'normal_lccdf',
- 'normal_lcdf',
- 'normal_lpdf',
- 'normal_lupdf',
- 'normal_rng',
- 'not_a_number',
- 'num_elements',
- 'ode_adams',
- 'ode_adams_tol',
- 'ode_adjoint_tol_ctl',
- 'ode_bdf',
- 'ode_bdf_tol',
- 'ode_ckrk',
- 'ode_ckrk_tol',
- 'ode_rk45',
- 'ode_rk45_tol',
- 'one_hot_array',
- 'one_hot_int_array',
- 'one_hot_row_vector',
- 'one_hot_vector',
- 'ones_array',
- 'ones_int_array',
- 'ones_row_vector',
- 'ones_vector',
- 'ordered_logistic_glm_lpmf',
- 'ordered_logistic_glm_lupmf',
- 'ordered_logistic_lpmf',
- 'ordered_logistic_lupmf',
- 'ordered_logistic_rng',
- 'ordered_probit_lpmf',
- 'ordered_probit_lupmf',
- 'ordered_probit_rng',
- 'owens_t',
- 'pareto_cdf',
- 'pareto_lccdf',
- 'pareto_lcdf',
- 'pareto_lpdf',
- 'pareto_lupdf',
- 'pareto_rng',
- 'pareto_type_2_cdf',
- 'pareto_type_2_lccdf',
- 'pareto_type_2_lcdf',
- 'pareto_type_2_lpdf',
- 'pareto_type_2_lupdf',
- 'pareto_type_2_rng',
- 'Phi',
- 'Phi_approx',
- 'pi',
- 'poisson_cdf',
- 'poisson_lccdf',
- 'poisson_lcdf',
- 'poisson_log_glm_lpmf',
- 'poisson_log_glm_lupmf',
- 'poisson_log_lpmf',
- 'poisson_log_lupmf',
- 'poisson_log_rng',
- 'poisson_lpmf',
- 'poisson_lupmf',
- 'poisson_rng',
- 'polar',
- 'positive_infinity',
- 'pow',
- 'print',
- 'prod',
- 'proj',
- 'qr_Q',
- 'qr_R',
- 'qr_thin_Q',
- 'qr_thin_R',
- 'quad_form',
- 'quad_form_diag',
- 'quad_form_sym',
- 'quantile',
- 'rank',
- 'rayleigh_cdf',
- 'rayleigh_lccdf',
- 'rayleigh_lcdf',
- 'rayleigh_lpdf',
- 'rayleigh_lupdf',
- 'rayleigh_rng',
- 'reduce_sum',
- 'reject',
- 'rep_array',
- 'rep_matrix',
- 'rep_row_vector',
- 'rep_vector',
- 'reverse',
- 'rising_factorial',
- 'round',
- 'row',
- 'rows',
- 'rows_dot_product',
- 'rows_dot_self',
- 'scale_matrix_exp_multiply',
- 'scaled_inv_chi_square_cdf',
- 'scaled_inv_chi_square_lccdf',
- 'scaled_inv_chi_square_lcdf',
- 'scaled_inv_chi_square_lpdf',
- 'scaled_inv_chi_square_lupdf',
- 'scaled_inv_chi_square_rng',
- 'sd',
- 'segment',
- 'sin',
- 'singular_values',
- 'sinh',
- 'size',
- 'skew_double_exponential_cdf',
- 'skew_double_exponential_lccdf',
- 'skew_double_exponential_lcdf',
- 'skew_double_exponential_lpdf',
- 'skew_double_exponential_lupdf',
- 'skew_double_exponential_rng',
- 'skew_normal_cdf',
- 'skew_normal_lccdf',
- 'skew_normal_lcdf',
- 'skew_normal_lpdf',
- 'skew_normal_lupdf',
- 'skew_normal_rng',
- 'softmax',
- 'sort_asc',
- 'sort_desc',
- 'sort_indices_asc',
- 'sort_indices_desc',
- 'sqrt',
- 'sqrt2',
- 'square',
- 'squared_distance',
- 'std_normal_cdf',
- 'std_normal_lccdf',
- 'std_normal_lcdf',
- 'std_normal_lpdf',
- 'std_normal_lupdf',
- 'std_normal_rng',
- 'step',
- 'student_t_cdf',
- 'student_t_lccdf',
- 'student_t_lcdf',
- 'student_t_lpdf',
- 'student_t_lupdf',
- 'student_t_rng',
- 'sub_col',
- 'sub_row',
- 'sum',
- 'svd_U',
- 'svd_V',
- 'symmetrize_from_lower_tri',
- 'tail',
- 'tan',
- 'tanh',
- 'target',
- 'tcrossprod',
- 'tgamma',
- 'to_array_1d',
- 'to_array_2d',
- 'to_complex',
- 'to_matrix',
- 'to_row_vector',
- 'to_vector',
- 'trace',
- 'trace_gen_quad_form',
- 'trace_quad_form',
- 'trigamma',
- 'trunc',
- 'uniform_cdf',
- 'uniform_lccdf',
- 'uniform_lcdf',
- 'uniform_lpdf',
- 'uniform_lupdf',
- 'uniform_rng',
- 'uniform_simplex',
- 'variance',
- 'von_mises_cdf',
- 'von_mises_lccdf',
- 'von_mises_lcdf',
- 'von_mises_lpdf',
- 'von_mises_lupdf',
- 'von_mises_rng',
- 'weibull_cdf',
- 'weibull_lccdf',
- 'weibull_lcdf',
- 'weibull_lpdf',
- 'weibull_lupdf',
- 'weibull_rng',
- 'wiener_lpdf',
- 'wiener_lupdf',
- 'wishart_lpdf',
- 'wishart_lupdf',
- 'wishart_rng',
- 'zeros_array',
- 'zeros_int_array',
- 'zeros_row_vector'
-)
-
-DISTRIBUTIONS = (
- 'bernoulli',
- 'bernoulli_logit',
- 'bernoulli_logit_glm',
- 'beta',
- 'beta_binomial',
- 'binomial',
- 'binomial_logit',
- 'categorical',
- 'categorical_logit',
- 'categorical_logit_glm',
- 'cauchy',
- 'chi_square',
- 'dirichlet',
- 'discrete_range',
- 'double_exponential',
- 'exp_mod_normal',
- 'exponential',
- 'frechet',
- 'gamma',
- 'gaussian_dlm_obs',
- 'gumbel',
- 'hypergeometric',
- 'inv_chi_square',
- 'inv_gamma',
- 'inv_wishart',
- 'lkj_corr',
- 'lkj_corr_cholesky',
- 'logistic',
- 'loglogistic',
- 'lognormal',
- 'multi_gp',
- 'multi_gp_cholesky',
- 'multi_normal',
- 'multi_normal_cholesky',
- 'multi_normal_prec',
- 'multi_student_t',
- 'multinomial',
- 'multinomial_logit',
- 'neg_binomial',
- 'neg_binomial_2',
- 'neg_binomial_2_log',
- 'neg_binomial_2_log_glm',
- 'normal',
- 'normal_id_glm',
- 'ordered_logistic',
- 'ordered_logistic_glm',
- 'ordered_probit',
- 'pareto',
- 'pareto_type_2',
- 'poisson',
- 'poisson_log',
- 'poisson_log_glm',
- 'rayleigh',
- 'scaled_inv_chi_square',
- 'skew_double_exponential',
- 'skew_normal',
- 'std_normal',
- 'student_t',
- 'uniform',
- 'von_mises',
- 'weibull',
- 'wiener',
- 'wishart',
-)
-
-RESERVED = (
- 'repeat',
- 'until',
- 'then',
- 'true',
- 'false',
- 'var',
- 'struct',
- 'typedef',
- 'export',
- 'auto',
- 'extern',
- 'var',
- 'static',
-)
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/_stata_builtins.py b/venv/lib/python3.11/site-packages/pygments/lexers/_stata_builtins.py
deleted file mode 100644
index 16251cf..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/_stata_builtins.py
+++ /dev/null
@@ -1,457 +0,0 @@
-"""
- pygments.lexers._stata_builtins
- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
- Builtins for Stata
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-
-builtins_special = (
- "if", "in", "using", "replace", "by", "gen", "generate"
-)
-
-builtins_base = (
- "if", "else", "in", "foreach", "for", "forv", "forva",
- "forval", "forvalu", "forvalue", "forvalues", "by", "bys",
- "bysort", "quietly", "qui", "about", "ac",
- "ac_7", "acprplot", "acprplot_7", "adjust", "ado", "adopath",
- "adoupdate", "alpha", "ameans", "an", "ano", "anov", "anova",
- "anova_estat", "anova_terms", "anovadef", "aorder", "ap", "app",
- "appe", "appen", "append", "arch", "arch_dr", "arch_estat",
- "arch_p", "archlm", "areg", "areg_p", "args", "arima",
- "arima_dr", "arima_estat", "arima_p", "as", "asmprobit",
- "asmprobit_estat", "asmprobit_lf", "asmprobit_mfx__dlg",
- "asmprobit_p", "ass", "asse", "asser", "assert", "avplot",
- "avplot_7", "avplots", "avplots_7", "bcskew0", "bgodfrey",
- "binreg", "bip0_lf", "biplot", "bipp_lf", "bipr_lf",
- "bipr_p", "biprobit", "bitest", "bitesti", "bitowt", "blogit",
- "bmemsize", "boot", "bootsamp", "bootstrap", "bootstrap_8",
- "boxco_l", "boxco_p", "boxcox", "boxcox_6", "boxcox_p",
- "bprobit", "br", "break", "brier", "bro", "brow", "brows",
- "browse", "brr", "brrstat", "bs", "bs_7", "bsampl_w",
- "bsample", "bsample_7", "bsqreg", "bstat", "bstat_7", "bstat_8",
- "bstrap", "bstrap_7", "ca", "ca_estat", "ca_p", "cabiplot",
- "camat", "canon", "canon_8", "canon_8_p", "canon_estat",
- "canon_p", "cap", "caprojection", "capt", "captu", "captur",
- "capture", "cat", "cc", "cchart", "cchart_7", "cci",
- "cd", "censobs_table", "centile", "cf", "char", "chdir",
- "checkdlgfiles", "checkestimationsample", "checkhlpfiles",
- "checksum", "chelp", "ci", "cii", "cl", "class", "classutil",
- "clear", "cli", "clis", "clist", "clo", "clog", "clog_lf",
- "clog_p", "clogi", "clogi_sw", "clogit", "clogit_lf",
- "clogit_p", "clogitp", "clogl_sw", "cloglog", "clonevar",
- "clslistarray", "cluster", "cluster_measures", "cluster_stop",
- "cluster_tree", "cluster_tree_8", "clustermat", "cmdlog",
- "cnr", "cnre", "cnreg", "cnreg_p", "cnreg_sw", "cnsreg",
- "codebook", "collaps4", "collapse", "colormult_nb",
- "colormult_nw", "compare", "compress", "conf", "confi",
- "confir", "confirm", "conren", "cons", "const", "constr",
- "constra", "constrai", "constrain", "constraint", "continue",
- "contract", "copy", "copyright", "copysource", "cor", "corc",
- "corr", "corr2data", "corr_anti", "corr_kmo", "corr_smc",
- "corre", "correl", "correla", "correlat", "correlate",
- "corrgram", "cou", "coun", "count", "cox", "cox_p", "cox_sw",
- "coxbase", "coxhaz", "coxvar", "cprplot", "cprplot_7",
- "crc", "cret", "cretu", "cretur", "creturn", "cross", "cs",
- "cscript", "cscript_log", "csi", "ct", "ct_is", "ctset",
- "ctst_5", "ctst_st", "cttost", "cumsp", "cumsp_7", "cumul",
- "cusum", "cusum_7", "cutil", "d", "datasig", "datasign",
- "datasigna", "datasignat", "datasignatu", "datasignatur",
- "datasignature", "datetof", "db", "dbeta", "de", "dec",
- "deco", "decod", "decode", "deff", "des", "desc", "descr",
- "descri", "describ", "describe", "destring", "dfbeta",
- "dfgls", "dfuller", "di", "di_g", "dir", "dirstats", "dis",
- "discard", "disp", "disp_res", "disp_s", "displ", "displa",
- "display", "distinct", "do", "doe", "doed", "doedi",
- "doedit", "dotplot", "dotplot_7", "dprobit", "drawnorm",
- "drop", "ds", "ds_util", "dstdize", "duplicates", "durbina",
- "dwstat", "dydx", "e", "ed", "edi", "edit", "egen",
- "eivreg", "emdef", "end", "en", "enc", "enco", "encod", "encode",
- "eq", "erase", "ereg", "ereg_lf", "ereg_p", "ereg_sw",
- "ereghet", "ereghet_glf", "ereghet_glf_sh", "ereghet_gp",
- "ereghet_ilf", "ereghet_ilf_sh", "ereghet_ip", "eret",
- "eretu", "eretur", "ereturn", "err", "erro", "error", "est",
- "est_cfexist", "est_cfname", "est_clickable", "est_expand",
- "est_hold", "est_table", "est_unhold", "est_unholdok",
- "estat", "estat_default", "estat_summ", "estat_vce_only",
- "esti", "estimates", "etodow", "etof", "etomdy", "ex",
- "exi", "exit", "expand", "expandcl", "fac", "fact", "facto",
- "factor", "factor_estat", "factor_p", "factor_pca_rotated",
- "factor_rotate", "factormat", "fcast", "fcast_compute",
- "fcast_graph", "fdades", "fdadesc", "fdadescr", "fdadescri",
- "fdadescrib", "fdadescribe", "fdasav", "fdasave", "fdause",
- "fh_st", "open", "read", "close",
- "file", "filefilter", "fillin", "find_hlp_file", "findfile",
- "findit", "findit_7", "fit", "fl", "fli", "flis", "flist",
- "for5_0", "form", "forma", "format", "fpredict", "frac_154",
- "frac_adj", "frac_chk", "frac_cox", "frac_ddp", "frac_dis",
- "frac_dv", "frac_in", "frac_mun", "frac_pp", "frac_pq",
- "frac_pv", "frac_wgt", "frac_xo", "fracgen", "fracplot",
- "fracplot_7", "fracpoly", "fracpred", "fron_ex", "fron_hn",
- "fron_p", "fron_tn", "fron_tn2", "frontier", "ftodate", "ftoe",
- "ftomdy", "ftowdate", "g", "gamhet_glf", "gamhet_gp",
- "gamhet_ilf", "gamhet_ip", "gamma", "gamma_d2", "gamma_p",
- "gamma_sw", "gammahet", "gdi_hexagon", "gdi_spokes", "ge",
- "gen", "gene", "gener", "genera", "generat", "generate",
- "genrank", "genstd", "genvmean", "gettoken", "gl", "gladder",
- "gladder_7", "glim_l01", "glim_l02", "glim_l03", "glim_l04",
- "glim_l05", "glim_l06", "glim_l07", "glim_l08", "glim_l09",
- "glim_l10", "glim_l11", "glim_l12", "glim_lf", "glim_mu",
- "glim_nw1", "glim_nw2", "glim_nw3", "glim_p", "glim_v1",
- "glim_v2", "glim_v3", "glim_v4", "glim_v5", "glim_v6",
- "glim_v7", "glm", "glm_6", "glm_p", "glm_sw", "glmpred", "glo",
- "glob", "globa", "global", "glogit", "glogit_8", "glogit_p",
- "gmeans", "gnbre_lf", "gnbreg", "gnbreg_5", "gnbreg_p",
- "gomp_lf", "gompe_sw", "gomper_p", "gompertz", "gompertzhet",
- "gomphet_glf", "gomphet_glf_sh", "gomphet_gp", "gomphet_ilf",
- "gomphet_ilf_sh", "gomphet_ip", "gphdot", "gphpen",
- "gphprint", "gprefs", "gprobi_p", "gprobit", "gprobit_8", "gr",
- "gr7", "gr_copy", "gr_current", "gr_db", "gr_describe",
- "gr_dir", "gr_draw", "gr_draw_replay", "gr_drop", "gr_edit",
- "gr_editviewopts", "gr_example", "gr_example2", "gr_export",
- "gr_print", "gr_qscheme", "gr_query", "gr_read", "gr_rename",
- "gr_replay", "gr_save", "gr_set", "gr_setscheme", "gr_table",
- "gr_undo", "gr_use", "graph", "graph7", "grebar", "greigen",
- "greigen_7", "greigen_8", "grmeanby", "grmeanby_7",
- "gs_fileinfo", "gs_filetype", "gs_graphinfo", "gs_stat",
- "gsort", "gwood", "h", "hadimvo", "hareg", "hausman",
- "haver", "he", "heck_d2", "heckma_p", "heckman", "heckp_lf",
- "heckpr_p", "heckprob", "hel", "help", "hereg", "hetpr_lf",
- "hetpr_p", "hetprob", "hettest", "hexdump", "hilite",
- "hist", "hist_7", "histogram", "hlogit", "hlu", "hmeans",
- "hotel", "hotelling", "hprobit", "hreg", "hsearch", "icd9",
- "icd9_ff", "icd9p", "iis", "impute", "imtest", "inbase",
- "include", "inf", "infi", "infil", "infile", "infix", "inp",
- "inpu", "input", "ins", "insheet", "insp", "inspe",
- "inspec", "inspect", "integ", "inten", "intreg", "intreg_7",
- "intreg_p", "intrg2_ll", "intrg_ll", "intrg_ll2", "ipolate",
- "iqreg", "ir", "irf", "irf_create", "irfm", "iri", "is_svy",
- "is_svysum", "isid", "istdize", "ivprob_1_lf", "ivprob_lf",
- "ivprobit", "ivprobit_p", "ivreg", "ivreg_footnote",
- "ivtob_1_lf", "ivtob_lf", "ivtobit", "ivtobit_p", "jackknife",
- "jacknife", "jknife", "jknife_6", "jknife_8", "jkstat",
- "joinby", "kalarma1", "kap", "kap_3", "kapmeier", "kappa",
- "kapwgt", "kdensity", "kdensity_7", "keep", "ksm", "ksmirnov",
- "ktau", "kwallis", "l", "la", "lab", "labe", "label",
- "labelbook", "ladder", "levels", "levelsof", "leverage",
- "lfit", "lfit_p", "li", "lincom", "line", "linktest",
- "lis", "list", "lloghet_glf", "lloghet_glf_sh", "lloghet_gp",
- "lloghet_ilf", "lloghet_ilf_sh", "lloghet_ip", "llogi_sw",
- "llogis_p", "llogist", "llogistic", "llogistichet",
- "lnorm_lf", "lnorm_sw", "lnorma_p", "lnormal", "lnormalhet",
- "lnormhet_glf", "lnormhet_glf_sh", "lnormhet_gp",
- "lnormhet_ilf", "lnormhet_ilf_sh", "lnormhet_ip", "lnskew0",
- "loadingplot", "loc", "loca", "local", "log", "logi",
- "logis_lf", "logistic", "logistic_p", "logit", "logit_estat",
- "logit_p", "loglogs", "logrank", "loneway", "lookfor",
- "lookup", "lowess", "lowess_7", "lpredict", "lrecomp", "lroc",
- "lroc_7", "lrtest", "ls", "lsens", "lsens_7", "lsens_x",
- "lstat", "ltable", "ltable_7", "ltriang", "lv", "lvr2plot",
- "lvr2plot_7", "m", "ma", "mac", "macr", "macro", "makecns",
- "man", "manova", "manova_estat", "manova_p", "manovatest",
- "mantel", "mark", "markin", "markout", "marksample", "mat",
- "mat_capp", "mat_order", "mat_put_rr", "mat_rapp", "mata",
- "mata_clear", "mata_describe", "mata_drop", "mata_matdescribe",
- "mata_matsave", "mata_matuse", "mata_memory", "mata_mlib",
- "mata_mosave", "mata_rename", "mata_which", "matalabel",
- "matcproc", "matlist", "matname", "matr", "matri",
- "matrix", "matrix_input__dlg", "matstrik", "mcc", "mcci",
- "md0_", "md1_", "md1debug_", "md2_", "md2debug_", "mds",
- "mds_estat", "mds_p", "mdsconfig", "mdslong", "mdsmat",
- "mdsshepard", "mdytoe", "mdytof", "me_derd", "mean",
- "means", "median", "memory", "memsize", "meqparse", "mer",
- "merg", "merge", "mfp", "mfx", "mhelp", "mhodds", "minbound",
- "mixed_ll", "mixed_ll_reparm", "mkassert", "mkdir",
- "mkmat", "mkspline", "ml", "ml_5", "ml_adjs", "ml_bhhhs",
- "ml_c_d", "ml_check", "ml_clear", "ml_cnt", "ml_debug",
- "ml_defd", "ml_e0", "ml_e0_bfgs", "ml_e0_cycle", "ml_e0_dfp",
- "ml_e0i", "ml_e1", "ml_e1_bfgs", "ml_e1_bhhh", "ml_e1_cycle",
- "ml_e1_dfp", "ml_e2", "ml_e2_cycle", "ml_ebfg0", "ml_ebfr0",
- "ml_ebfr1", "ml_ebh0q", "ml_ebhh0", "ml_ebhr0", "ml_ebr0i",
- "ml_ecr0i", "ml_edfp0", "ml_edfr0", "ml_edfr1", "ml_edr0i",
- "ml_eds", "ml_eer0i", "ml_egr0i", "ml_elf", "ml_elf_bfgs",
- "ml_elf_bhhh", "ml_elf_cycle", "ml_elf_dfp", "ml_elfi",
- "ml_elfs", "ml_enr0i", "ml_enrr0", "ml_erdu0", "ml_erdu0_bfgs",
- "ml_erdu0_bhhh", "ml_erdu0_bhhhq", "ml_erdu0_cycle",
- "ml_erdu0_dfp", "ml_erdu0_nrbfgs", "ml_exde", "ml_footnote",
- "ml_geqnr", "ml_grad0", "ml_graph", "ml_hbhhh", "ml_hd0",
- "ml_hold", "ml_init", "ml_inv", "ml_log", "ml_max",
- "ml_mlout", "ml_mlout_8", "ml_model", "ml_nb0", "ml_opt",
- "ml_p", "ml_plot", "ml_query", "ml_rdgrd", "ml_repor",
- "ml_s_e", "ml_score", "ml_searc", "ml_technique", "ml_unhold",
- "mleval", "mlf_", "mlmatbysum", "mlmatsum", "mlog", "mlogi",
- "mlogit", "mlogit_footnote", "mlogit_p", "mlopts", "mlsum",
- "mlvecsum", "mnl0_", "mor", "more", "mov", "move", "mprobit",
- "mprobit_lf", "mprobit_p", "mrdu0_", "mrdu1_", "mvdecode",
- "mvencode", "mvreg", "mvreg_estat", "n", "nbreg",
- "nbreg_al", "nbreg_lf", "nbreg_p", "nbreg_sw", "nestreg", "net",
- "newey", "newey_7", "newey_p", "news", "nl", "nl_7", "nl_9",
- "nl_9_p", "nl_p", "nl_p_7", "nlcom", "nlcom_p", "nlexp2",
- "nlexp2_7", "nlexp2a", "nlexp2a_7", "nlexp3", "nlexp3_7",
- "nlgom3", "nlgom3_7", "nlgom4", "nlgom4_7", "nlinit", "nllog3",
- "nllog3_7", "nllog4", "nllog4_7", "nlog_rd", "nlogit",
- "nlogit_p", "nlogitgen", "nlogittree", "nlpred", "no",
- "nobreak", "noi", "nois", "noisi", "noisil", "noisily", "note",
- "notes", "notes_dlg", "nptrend", "numlabel", "numlist", "odbc",
- "old_ver", "olo", "olog", "ologi", "ologi_sw", "ologit",
- "ologit_p", "ologitp", "on", "one", "onew", "onewa", "oneway",
- "op_colnm", "op_comp", "op_diff", "op_inv", "op_str", "opr",
- "opro", "oprob", "oprob_sw", "oprobi", "oprobi_p", "oprobit",
- "oprobitp", "opts_exclusive", "order", "orthog", "orthpoly",
- "ou", "out", "outf", "outfi", "outfil", "outfile", "outs",
- "outsh", "outshe", "outshee", "outsheet", "ovtest", "pac",
- "pac_7", "palette", "parse", "parse_dissim", "pause", "pca",
- "pca_8", "pca_display", "pca_estat", "pca_p", "pca_rotate",
- "pcamat", "pchart", "pchart_7", "pchi", "pchi_7", "pcorr",
- "pctile", "pentium", "pergram", "pergram_7", "permute",
- "permute_8", "personal", "peto_st", "pkcollapse", "pkcross",
- "pkequiv", "pkexamine", "pkexamine_7", "pkshape", "pksumm",
- "pksumm_7", "pl", "plo", "plot", "plugin", "pnorm",
- "pnorm_7", "poisgof", "poiss_lf", "poiss_sw", "poisso_p",
- "poisson", "poisson_estat", "post", "postclose", "postfile",
- "postutil", "pperron", "pr", "prais", "prais_e", "prais_e2",
- "prais_p", "predict", "predictnl", "preserve", "print",
- "pro", "prob", "probi", "probit", "probit_estat", "probit_p",
- "proc_time", "procoverlay", "procrustes", "procrustes_estat",
- "procrustes_p", "profiler", "prog", "progr", "progra",
- "program", "prop", "proportion", "prtest", "prtesti", "pwcorr",
- "pwd", "q", "s", "qby", "qbys", "qchi", "qchi_7", "qladder",
- "qladder_7", "qnorm", "qnorm_7", "qqplot", "qqplot_7", "qreg",
- "qreg_c", "qreg_p", "qreg_sw", "qu", "quadchk", "quantile",
- "quantile_7", "que", "quer", "query", "range", "ranksum",
- "ratio", "rchart", "rchart_7", "rcof", "recast", "reclink",
- "recode", "reg", "reg3", "reg3_p", "regdw", "regr", "regre",
- "regre_p2", "regres", "regres_p", "regress", "regress_estat",
- "regriv_p", "remap", "ren", "rena", "renam", "rename",
- "renpfix", "repeat", "replace", "report", "reshape",
- "restore", "ret", "retu", "retur", "return", "rm", "rmdir",
- "robvar", "roccomp", "roccomp_7", "roccomp_8", "rocf_lf",
- "rocfit", "rocfit_8", "rocgold", "rocplot", "rocplot_7",
- "roctab", "roctab_7", "rolling", "rologit", "rologit_p",
- "rot", "rota", "rotat", "rotate", "rotatemat", "rreg",
- "rreg_p", "ru", "run", "runtest", "rvfplot", "rvfplot_7",
- "rvpplot", "rvpplot_7", "sa", "safesum", "sample",
- "sampsi", "sav", "save", "savedresults", "saveold", "sc",
- "sca", "scal", "scala", "scalar", "scatter", "scm_mine",
- "sco", "scob_lf", "scob_p", "scobi_sw", "scobit", "scor",
- "score", "scoreplot", "scoreplot_help", "scree", "screeplot",
- "screeplot_help", "sdtest", "sdtesti", "se", "search",
- "separate", "seperate", "serrbar", "serrbar_7", "serset", "set",
- "set_defaults", "sfrancia", "sh", "she", "shel", "shell",
- "shewhart", "shewhart_7", "signestimationsample", "signrank",
- "signtest", "simul", "simul_7", "simulate", "simulate_8",
- "sktest", "sleep", "slogit", "slogit_d2", "slogit_p", "smooth",
- "snapspan", "so", "sor", "sort", "spearman", "spikeplot",
- "spikeplot_7", "spikeplt", "spline_x", "split", "sqreg",
- "sqreg_p", "sret", "sretu", "sretur", "sreturn", "ssc", "st",
- "st_ct", "st_hc", "st_hcd", "st_hcd_sh", "st_is", "st_issys",
- "st_note", "st_promo", "st_set", "st_show", "st_smpl",
- "st_subid", "stack", "statsby", "statsby_8", "stbase", "stci",
- "stci_7", "stcox", "stcox_estat", "stcox_fr", "stcox_fr_ll",
- "stcox_p", "stcox_sw", "stcoxkm", "stcoxkm_7", "stcstat",
- "stcurv", "stcurve", "stcurve_7", "stdes", "stem", "stepwise",
- "stereg", "stfill", "stgen", "stir", "stjoin", "stmc", "stmh",
- "stphplot", "stphplot_7", "stphtest", "stphtest_7",
- "stptime", "strate", "strate_7", "streg", "streg_sw", "streset",
- "sts", "sts_7", "stset", "stsplit", "stsum", "sttocc",
- "sttoct", "stvary", "stweib", "su", "suest", "suest_8",
- "sum", "summ", "summa", "summar", "summari", "summariz",
- "summarize", "sunflower", "sureg", "survcurv", "survsum",
- "svar", "svar_p", "svmat", "svy", "svy_disp", "svy_dreg",
- "svy_est", "svy_est_7", "svy_estat", "svy_get", "svy_gnbreg_p",
- "svy_head", "svy_header", "svy_heckman_p", "svy_heckprob_p",
- "svy_intreg_p", "svy_ivreg_p", "svy_logistic_p", "svy_logit_p",
- "svy_mlogit_p", "svy_nbreg_p", "svy_ologit_p", "svy_oprobit_p",
- "svy_poisson_p", "svy_probit_p", "svy_regress_p", "svy_sub",
- "svy_sub_7", "svy_x", "svy_x_7", "svy_x_p", "svydes",
- "svydes_8", "svygen", "svygnbreg", "svyheckman", "svyheckprob",
- "svyintreg", "svyintreg_7", "svyintrg", "svyivreg", "svylc",
- "svylog_p", "svylogit", "svymarkout", "svymarkout_8",
- "svymean", "svymlog", "svymlogit", "svynbreg", "svyolog",
- "svyologit", "svyoprob", "svyoprobit", "svyopts",
- "svypois", "svypois_7", "svypoisson", "svyprobit", "svyprobt",
- "svyprop", "svyprop_7", "svyratio", "svyreg", "svyreg_p",
- "svyregress", "svyset", "svyset_7", "svyset_8", "svytab",
- "svytab_7", "svytest", "svytotal", "sw", "sw_8", "swcnreg",
- "swcox", "swereg", "swilk", "swlogis", "swlogit",
- "swologit", "swoprbt", "swpois", "swprobit", "swqreg",
- "swtobit", "swweib", "symmetry", "symmi", "symplot",
- "symplot_7", "syntax", "sysdescribe", "sysdir", "sysuse",
- "szroeter", "ta", "tab", "tab1", "tab2", "tab_or", "tabd",
- "tabdi", "tabdis", "tabdisp", "tabi", "table", "tabodds",
- "tabodds_7", "tabstat", "tabu", "tabul", "tabula", "tabulat",
- "tabulate", "te", "tempfile", "tempname", "tempvar", "tes",
- "test", "testnl", "testparm", "teststd", "tetrachoric",
- "time_it", "timer", "tis", "tob", "tobi", "tobit", "tobit_p",
- "tobit_sw", "token", "tokeni", "tokeniz", "tokenize",
- "tostring", "total", "translate", "translator", "transmap",
- "treat_ll", "treatr_p", "treatreg", "trim", "trnb_cons",
- "trnb_mean", "trpoiss_d2", "trunc_ll", "truncr_p", "truncreg",
- "tsappend", "tset", "tsfill", "tsline", "tsline_ex",
- "tsreport", "tsrevar", "tsrline", "tsset", "tssmooth",
- "tsunab", "ttest", "ttesti", "tut_chk", "tut_wait", "tutorial",
- "tw", "tware_st", "two", "twoway", "twoway__fpfit_serset",
- "twoway__function_gen", "twoway__histogram_gen",
- "twoway__ipoint_serset", "twoway__ipoints_serset",
- "twoway__kdensity_gen", "twoway__lfit_serset",
- "twoway__normgen_gen", "twoway__pci_serset",
- "twoway__qfit_serset", "twoway__scatteri_serset",
- "twoway__sunflower_gen", "twoway_ksm_serset", "ty", "typ",
- "type", "typeof", "u", "unab", "unabbrev", "unabcmd",
- "update", "us", "use", "uselabel", "var", "var_mkcompanion",
- "var_p", "varbasic", "varfcast", "vargranger", "varirf",
- "varirf_add", "varirf_cgraph", "varirf_create", "varirf_ctable",
- "varirf_describe", "varirf_dir", "varirf_drop", "varirf_erase",
- "varirf_graph", "varirf_ograph", "varirf_rename", "varirf_set",
- "varirf_table", "varlist", "varlmar", "varnorm", "varsoc",
- "varstable", "varstable_w", "varstable_w2", "varwle",
- "vce", "vec", "vec_fevd", "vec_mkphi", "vec_p", "vec_p_w",
- "vecirf_create", "veclmar", "veclmar_w", "vecnorm",
- "vecnorm_w", "vecrank", "vecstable", "verinst", "vers",
- "versi", "versio", "version", "view", "viewsource", "vif",
- "vwls", "wdatetof", "webdescribe", "webseek", "webuse",
- "weib1_lf", "weib2_lf", "weib_lf", "weib_lf0", "weibhet_glf",
- "weibhet_glf_sh", "weibhet_glfa", "weibhet_glfa_sh",
- "weibhet_gp", "weibhet_ilf", "weibhet_ilf_sh", "weibhet_ilfa",
- "weibhet_ilfa_sh", "weibhet_ip", "weibu_sw", "weibul_p",
- "weibull", "weibull_c", "weibull_s", "weibullhet",
- "wh", "whelp", "whi", "which", "whil", "while", "wilc_st",
- "wilcoxon", "win", "wind", "windo", "window", "winexec",
- "wntestb", "wntestb_7", "wntestq", "xchart", "xchart_7",
- "xcorr", "xcorr_7", "xi", "xi_6", "xmlsav", "xmlsave",
- "xmluse", "xpose", "xsh", "xshe", "xshel", "xshell",
- "xt_iis", "xt_tis", "xtab_p", "xtabond", "xtbin_p",
- "xtclog", "xtcloglog", "xtcloglog_8", "xtcloglog_d2",
- "xtcloglog_pa_p", "xtcloglog_re_p", "xtcnt_p", "xtcorr",
- "xtdata", "xtdes", "xtfront_p", "xtfrontier", "xtgee",
- "xtgee_elink", "xtgee_estat", "xtgee_makeivar", "xtgee_p",
- "xtgee_plink", "xtgls", "xtgls_p", "xthaus", "xthausman",
- "xtht_p", "xthtaylor", "xtile", "xtint_p", "xtintreg",
- "xtintreg_8", "xtintreg_d2", "xtintreg_p", "xtivp_1",
- "xtivp_2", "xtivreg", "xtline", "xtline_ex", "xtlogit",
- "xtlogit_8", "xtlogit_d2", "xtlogit_fe_p", "xtlogit_pa_p",
- "xtlogit_re_p", "xtmixed", "xtmixed_estat", "xtmixed_p",
- "xtnb_fe", "xtnb_lf", "xtnbreg", "xtnbreg_pa_p",
- "xtnbreg_refe_p", "xtpcse", "xtpcse_p", "xtpois", "xtpoisson",
- "xtpoisson_d2", "xtpoisson_pa_p", "xtpoisson_refe_p", "xtpred",
- "xtprobit", "xtprobit_8", "xtprobit_d2", "xtprobit_re_p",
- "xtps_fe", "xtps_lf", "xtps_ren", "xtps_ren_8", "xtrar_p",
- "xtrc", "xtrc_p", "xtrchh", "xtrefe_p", "xtreg", "xtreg_be",
- "xtreg_fe", "xtreg_ml", "xtreg_pa_p", "xtreg_re",
- "xtregar", "xtrere_p", "xtset", "xtsf_ll", "xtsf_llti",
- "xtsum", "xttab", "xttest0", "xttobit", "xttobit_8",
- "xttobit_p", "xttrans", "yx", "yxview__barlike_draw",
- "yxview_area_draw", "yxview_bar_draw", "yxview_dot_draw",
- "yxview_dropline_draw", "yxview_function_draw",
- "yxview_iarrow_draw", "yxview_ilabels_draw",
- "yxview_normal_draw", "yxview_pcarrow_draw",
- "yxview_pcbarrow_draw", "yxview_pccapsym_draw",
- "yxview_pcscatter_draw", "yxview_pcspike_draw",
- "yxview_rarea_draw", "yxview_rbar_draw", "yxview_rbarm_draw",
- "yxview_rcap_draw", "yxview_rcapsym_draw",
- "yxview_rconnected_draw", "yxview_rline_draw",
- "yxview_rscatter_draw", "yxview_rspike_draw",
- "yxview_spike_draw", "yxview_sunflower_draw", "zap_s", "zinb",
- "zinb_llf", "zinb_plf", "zip", "zip_llf", "zip_p", "zip_plf",
- "zt_ct_5", "zt_hc_5", "zt_hcd_5", "zt_is_5", "zt_iss_5",
- "zt_sho_5", "zt_smp_5", "ztbase_5", "ztcox_5", "ztdes_5",
- "ztereg_5", "ztfill_5", "ztgen_5", "ztir_5", "ztjoin_5", "ztnb",
- "ztnb_p", "ztp", "ztp_p", "zts_5", "ztset_5", "ztspli_5",
- "ztsum_5", "zttoct_5", "ztvary_5", "ztweib_5"
-)
-
-
-
-builtins_functions = (
- "abbrev", "abs", "acos", "acosh", "asin", "asinh", "atan",
- "atan2", "atanh", "autocode", "betaden", "binomial",
- "binomialp", "binomialtail", "binormal", "bofd",
- "byteorder", "c", "_caller", "cauchy", "cauchyden",
- "cauchytail", "Cdhms", "ceil", "char", "chi2", "chi2den",
- "chi2tail", "Chms", "chop", "cholesky", "clip", "Clock",
- "clock", "cloglog", "Cmdyhms", "Cofc", "cofC", "Cofd", "cofd",
- "coleqnumb", "collatorlocale", "collatorversion",
- "colnfreeparms", "colnumb", "colsof", "comb", "cond", "corr",
- "cos", "cosh", "daily", "date", "day", "det", "dgammapda",
- "dgammapdada", "dgammapdadx", "dgammapdx", "dgammapdxdx",
- "dhms", "diag", "diag0cnt", "digamma", "dofb", "dofC", "dofc",
- "dofh", "dofm", "dofq", "dofw", "dofy", "dow", "doy",
- "dunnettprob", "e", "el", "esample", "epsdouble", "epsfloat",
- "exp", "expm1", "exponential", "exponentialden",
- "exponentialtail", "F", "Fden", "fileexists", "fileread",
- "filereaderror", "filewrite", "float", "floor", "fmtwidth",
- "frval", "_frval", "Ftail", "gammaden", "gammap", "gammaptail",
- "get", "hadamard", "halfyear", "halfyearly", "has_eprop", "hh",
- "hhC", "hms", "hofd", "hours", "hypergeometric",
- "hypergeometricp", "I", "ibeta", "ibetatail", "igaussian",
- "igaussianden", "igaussiantail", "indexnot", "inlist",
- "inrange", "int", "inv", "invbinomial", "invbinomialtail",
- "invcauchy", "invcauchytail", "invchi2", "invchi2tail",
- "invcloglog", "invdunnettprob", "invexponential",
- "invexponentialtail", "invF", "invFtail", "invgammap",
- "invgammaptail", "invibeta", "invibetatail", "invigaussian",
- "invigaussiantail", "invlaplace", "invlaplacetail",
- "invlogisticp", "invlogisticsp", "invlogisticmsp",
- "invlogistictailp", "invlogistictailsp", "invlogistictailmsp",
- "invlogit", "invnbinomial", "invnbinomialtail", "invnchi2",
- "invnchi2tail", "invnF", "invnFtail", "invnibeta",
- "invnormal", "invnt", "invnttail", "invpoisson",
- "invpoissontail", "invsym", "invt", "invttail", "invtukeyprob",
- "invweibullabp", "invweibullabgp", "invweibullphabp",
- "invweibullphabgp", "invweibullphtailabp",
- "invweibullphtailabgp", "invweibulltailabp",
- "invweibulltailabgp", "irecode", "issymmetric", "J", "laplace",
- "laplaceden", "laplacetail", "ln", "ln1m", "ln1p", "lncauchyden",
- "lnfactorial", "lngamma", "lnigammaden", "lnigaussianden",
- "lniwishartden", "lnlaplaceden", "lnmvnormalden", "lnnormal",
- "lnnormalden", "lnnormaldenxs", "lnnormaldenxms", "lnwishartden",
- "log", "log10", "log1m", "log1p", "logisticx", "logisticsx",
- "logisticmsx", "logisticdenx", "logisticdensx", "logisticdenmsx",
- "logistictailx", "logistictailsx", "logistictailmsx", "logit",
- "matmissing", "matrix", "matuniform", "max", "maxbyte",
- "maxdouble", "maxfloat", "maxint", "maxlong", "mdy", "mdyhms",
- "mi", "min", "minbyte", "mindouble", "minfloat", "minint",
- "minlong", "minutes", "missing", "mm", "mmC", "mod", "mofd",
- "month", "monthly", "mreldif", "msofhours", "msofminutes",
- "msofseconds", "nbetaden", "nbinomial", "nbinomialp",
- "nbinomialtail", "nchi2", "nchi2den", "nchi2tail", "nF",
- "nFden", "nFtail", "nibeta", "normal", "normalden",
- "normaldenxs", "normaldenxms", "npnchi2", "npnF", "npnt",
- "nt", "ntden", "nttail", "nullmat", "plural", "plurals1",
- "poisson", "poissonp", "poissontail", "qofd", "quarter",
- "quarterly", "r", "rbeta", "rbinomial", "rcauchy", "rchi2",
- "recode", "real", "regexm", "regexr", "regexs", "reldif",
- "replay", "return", "rexponential", "rgamma", "rhypergeometric",
- "rigaussian", "rlaplace", "rlogistic", "rlogistics",
- "rlogisticms", "rnbinomial", "rnormal", "rnormalm", "rnormalms",
- "round", "roweqnumb", "rownfreeparms", "rownumb", "rowsof",
- "rpoisson", "rt", "runiform", "runiformab", "runiformint",
- "rweibullab", "rweibullabg", "rweibullphab", "rweibullphabg",
- "s", "scalar", "seconds", "sign", "sin", "sinh",
- "smallestdouble", "soundex", "soundex_nara", "sqrt", "ss",
- "ssC", "strcat", "strdup", "string", "stringns", "stritrim",
- "strlen", "strlower", "strltrim", "strmatch", "strofreal",
- "strofrealns", "strpos", "strproper", "strreverse", "strrpos",
- "strrtrim", "strtoname", "strtrim", "strupper", "subinstr",
- "subinword", "substr", "sum", "sweep", "t", "tan", "tanh",
- "tC", "tc", "td", "tden", "th", "tin", "tm", "tobytes", "tq",
- "trace", "trigamma", "trunc", "ttail", "tukeyprob", "tw",
- "twithin", "uchar", "udstrlen", "udsubstr", "uisdigit",
- "uisletter", "ustrcompare", "ustrfix", "ustrfrom",
- "ustrinvalidcnt", "ustrleft", "ustrlen", "ustrlower",
- "ustrltrim", "ustrnormalize", "ustrpos", "ustrregexm",
- "ustrregexra", "ustrregexrf", "ustrregexs", "ustrreverse",
- "ustrright", "ustrrpos", "ustrrtrim", "ustrsortkey",
- "ustrtitle", "ustrto", "ustrtohex", "ustrtoname",
- "ustrtrim", "ustrunescape", "ustrupper", "ustrword",
- "ustrwordcount", "usubinstr", "usubstr", "vec", "vecdiag",
- "week", "weekly", "weibullabx", "weibullabgx", "weibulldenabx",
- "weibulldenabgx", "weibullphabx", "weibullphabgx",
- "weibullphdenabx", "weibullphdenabgx", "weibullphtailabx",
- "weibullphtailabgx", "weibulltailabx", "weibulltailabgx",
- "wofd", "word", "wordbreaklocale", "wordcount",
- "year", "yearly", "yh", "ym", "yofd", "yq", "yw"
-)
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/_tsql_builtins.py b/venv/lib/python3.11/site-packages/pygments/lexers/_tsql_builtins.py
deleted file mode 100644
index 96b16bf..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/_tsql_builtins.py
+++ /dev/null
@@ -1,1003 +0,0 @@
-"""
- pygments.lexers._tsql_builtins
- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
- These are manually translated lists from https://msdn.microsoft.com.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-# See https://msdn.microsoft.com/en-us/library/ms174986.aspx.
-OPERATORS = (
- '!<',
- '!=',
- '!>',
- '<',
- '<=',
- '<>',
- '=',
- '>',
- '>=',
- '+',
- '+=',
- '-',
- '-=',
- '*',
- '*=',
- '/',
- '/=',
- '%',
- '%=',
- '&',
- '&=',
- '|',
- '|=',
- '^',
- '^=',
- '~',
- '::',
-)
-
-OPERATOR_WORDS = (
- 'all',
- 'and',
- 'any',
- 'between',
- 'except',
- 'exists',
- 'in',
- 'intersect',
- 'like',
- 'not',
- 'or',
- 'some',
- 'union',
-)
-
-_KEYWORDS_SERVER = (
- 'add',
- 'all',
- 'alter',
- 'and',
- 'any',
- 'as',
- 'asc',
- 'authorization',
- 'backup',
- 'begin',
- 'between',
- 'break',
- 'browse',
- 'bulk',
- 'by',
- 'cascade',
- 'case',
- 'catch',
- 'check',
- 'checkpoint',
- 'close',
- 'clustered',
- 'coalesce',
- 'collate',
- 'column',
- 'commit',
- 'compute',
- 'constraint',
- 'contains',
- 'containstable',
- 'continue',
- 'convert',
- 'create',
- 'cross',
- 'current',
- 'current_date',
- 'current_time',
- 'current_timestamp',
- 'current_user',
- 'cursor',
- 'database',
- 'dbcc',
- 'deallocate',
- 'declare',
- 'default',
- 'delete',
- 'deny',
- 'desc',
- 'disk',
- 'distinct',
- 'distributed',
- 'double',
- 'drop',
- 'dump',
- 'else',
- 'end',
- 'errlvl',
- 'escape',
- 'except',
- 'exec',
- 'execute',
- 'exists',
- 'exit',
- 'external',
- 'fetch',
- 'file',
- 'fillfactor',
- 'for',
- 'foreign',
- 'freetext',
- 'freetexttable',
- 'from',
- 'full',
- 'function',
- 'goto',
- 'grant',
- 'group',
- 'having',
- 'holdlock',
- 'identity',
- 'identity_insert',
- 'identitycol',
- 'if',
- 'in',
- 'index',
- 'inner',
- 'insert',
- 'intersect',
- 'into',
- 'is',
- 'join',
- 'key',
- 'kill',
- 'left',
- 'like',
- 'lineno',
- 'load',
- 'merge',
- 'national',
- 'nocheck',
- 'nonclustered',
- 'not',
- 'null',
- 'nullif',
- 'of',
- 'off',
- 'offsets',
- 'on',
- 'open',
- 'opendatasource',
- 'openquery',
- 'openrowset',
- 'openxml',
- 'option',
- 'or',
- 'order',
- 'outer',
- 'over',
- 'percent',
- 'pivot',
- 'plan',
- 'precision',
- 'primary',
- 'print',
- 'proc',
- 'procedure',
- 'public',
- 'raiserror',
- 'read',
- 'readtext',
- 'reconfigure',
- 'references',
- 'replication',
- 'restore',
- 'restrict',
- 'return',
- 'revert',
- 'revoke',
- 'right',
- 'rollback',
- 'rowcount',
- 'rowguidcol',
- 'rule',
- 'save',
- 'schema',
- 'securityaudit',
- 'select',
- 'semantickeyphrasetable',
- 'semanticsimilaritydetailstable',
- 'semanticsimilaritytable',
- 'session_user',
- 'set',
- 'setuser',
- 'shutdown',
- 'some',
- 'statistics',
- 'system_user',
- 'table',
- 'tablesample',
- 'textsize',
- 'then',
- 'throw',
- 'to',
- 'top',
- 'tran',
- 'transaction',
- 'trigger',
- 'truncate',
- 'try',
- 'try_convert',
- 'tsequal',
- 'union',
- 'unique',
- 'unpivot',
- 'update',
- 'updatetext',
- 'use',
- 'user',
- 'values',
- 'varying',
- 'view',
- 'waitfor',
- 'when',
- 'where',
- 'while',
- 'with',
- 'within',
- 'writetext',
-)
-
-_KEYWORDS_FUTURE = (
- 'absolute',
- 'action',
- 'admin',
- 'after',
- 'aggregate',
- 'alias',
- 'allocate',
- 'are',
- 'array',
- 'asensitive',
- 'assertion',
- 'asymmetric',
- 'at',
- 'atomic',
- 'before',
- 'binary',
- 'bit',
- 'blob',
- 'boolean',
- 'both',
- 'breadth',
- 'call',
- 'called',
- 'cardinality',
- 'cascaded',
- 'cast',
- 'catalog',
- 'char',
- 'character',
- 'class',
- 'clob',
- 'collation',
- 'collect',
- 'completion',
- 'condition',
- 'connect',
- 'connection',
- 'constraints',
- 'constructor',
- 'corr',
- 'corresponding',
- 'covar_pop',
- 'covar_samp',
- 'cube',
- 'cume_dist',
- 'current_catalog',
- 'current_default_transform_group',
- 'current_path',
- 'current_role',
- 'current_schema',
- 'current_transform_group_for_type',
- 'cycle',
- 'data',
- 'date',
- 'day',
- 'dec',
- 'decimal',
- 'deferrable',
- 'deferred',
- 'depth',
- 'deref',
- 'describe',
- 'descriptor',
- 'destroy',
- 'destructor',
- 'deterministic',
- 'diagnostics',
- 'dictionary',
- 'disconnect',
- 'domain',
- 'dynamic',
- 'each',
- 'element',
- 'end-exec',
- 'equals',
- 'every',
- 'exception',
- 'false',
- 'filter',
- 'first',
- 'float',
- 'found',
- 'free',
- 'fulltexttable',
- 'fusion',
- 'general',
- 'get',
- 'global',
- 'go',
- 'grouping',
- 'hold',
- 'host',
- 'hour',
- 'ignore',
- 'immediate',
- 'indicator',
- 'initialize',
- 'initially',
- 'inout',
- 'input',
- 'int',
- 'integer',
- 'intersection',
- 'interval',
- 'isolation',
- 'iterate',
- 'language',
- 'large',
- 'last',
- 'lateral',
- 'leading',
- 'less',
- 'level',
- 'like_regex',
- 'limit',
- 'ln',
- 'local',
- 'localtime',
- 'localtimestamp',
- 'locator',
- 'map',
- 'match',
- 'member',
- 'method',
- 'minute',
- 'mod',
- 'modifies',
- 'modify',
- 'module',
- 'month',
- 'multiset',
- 'names',
- 'natural',
- 'nchar',
- 'nclob',
- 'new',
- 'next',
- 'no',
- 'none',
- 'normalize',
- 'numeric',
- 'object',
- 'occurrences_regex',
- 'old',
- 'only',
- 'operation',
- 'ordinality',
- 'out',
- 'output',
- 'overlay',
- 'pad',
- 'parameter',
- 'parameters',
- 'partial',
- 'partition',
- 'path',
- 'percent_rank',
- 'percentile_cont',
- 'percentile_disc',
- 'position_regex',
- 'postfix',
- 'prefix',
- 'preorder',
- 'prepare',
- 'preserve',
- 'prior',
- 'privileges',
- 'range',
- 'reads',
- 'real',
- 'recursive',
- 'ref',
- 'referencing',
- 'regr_avgx',
- 'regr_avgy',
- 'regr_count',
- 'regr_intercept',
- 'regr_r2',
- 'regr_slope',
- 'regr_sxx',
- 'regr_sxy',
- 'regr_syy',
- 'relative',
- 'release',
- 'result',
- 'returns',
- 'role',
- 'rollup',
- 'routine',
- 'row',
- 'rows',
- 'savepoint',
- 'scope',
- 'scroll',
- 'search',
- 'second',
- 'section',
- 'sensitive',
- 'sequence',
- 'session',
- 'sets',
- 'similar',
- 'size',
- 'smallint',
- 'space',
- 'specific',
- 'specifictype',
- 'sql',
- 'sqlexception',
- 'sqlstate',
- 'sqlwarning',
- 'start',
- 'state',
- 'statement',
- 'static',
- 'stddev_pop',
- 'stddev_samp',
- 'structure',
- 'submultiset',
- 'substring_regex',
- 'symmetric',
- 'system',
- 'temporary',
- 'terminate',
- 'than',
- 'time',
- 'timestamp',
- 'timezone_hour',
- 'timezone_minute',
- 'trailing',
- 'translate_regex',
- 'translation',
- 'treat',
- 'true',
- 'uescape',
- 'under',
- 'unknown',
- 'unnest',
- 'usage',
- 'using',
- 'value',
- 'var_pop',
- 'var_samp',
- 'varchar',
- 'variable',
- 'whenever',
- 'width_bucket',
- 'window',
- 'within',
- 'without',
- 'work',
- 'write',
- 'xmlagg',
- 'xmlattributes',
- 'xmlbinary',
- 'xmlcast',
- 'xmlcomment',
- 'xmlconcat',
- 'xmldocument',
- 'xmlelement',
- 'xmlexists',
- 'xmlforest',
- 'xmliterate',
- 'xmlnamespaces',
- 'xmlparse',
- 'xmlpi',
- 'xmlquery',
- 'xmlserialize',
- 'xmltable',
- 'xmltext',
- 'xmlvalidate',
- 'year',
- 'zone',
-)
-
-_KEYWORDS_ODBC = (
- 'absolute',
- 'action',
- 'ada',
- 'add',
- 'all',
- 'allocate',
- 'alter',
- 'and',
- 'any',
- 'are',
- 'as',
- 'asc',
- 'assertion',
- 'at',
- 'authorization',
- 'avg',
- 'begin',
- 'between',
- 'bit',
- 'bit_length',
- 'both',
- 'by',
- 'cascade',
- 'cascaded',
- 'case',
- 'cast',
- 'catalog',
- 'char',
- 'char_length',
- 'character',
- 'character_length',
- 'check',
- 'close',
- 'coalesce',
- 'collate',
- 'collation',
- 'column',
- 'commit',
- 'connect',
- 'connection',
- 'constraint',
- 'constraints',
- 'continue',
- 'convert',
- 'corresponding',
- 'count',
- 'create',
- 'cross',
- 'current',
- 'current_date',
- 'current_time',
- 'current_timestamp',
- 'current_user',
- 'cursor',
- 'date',
- 'day',
- 'deallocate',
- 'dec',
- 'decimal',
- 'declare',
- 'default',
- 'deferrable',
- 'deferred',
- 'delete',
- 'desc',
- 'describe',
- 'descriptor',
- 'diagnostics',
- 'disconnect',
- 'distinct',
- 'domain',
- 'double',
- 'drop',
- 'else',
- 'end',
- 'end-exec',
- 'escape',
- 'except',
- 'exception',
- 'exec',
- 'execute',
- 'exists',
- 'external',
- 'extract',
- 'false',
- 'fetch',
- 'first',
- 'float',
- 'for',
- 'foreign',
- 'fortran',
- 'found',
- 'from',
- 'full',
- 'get',
- 'global',
- 'go',
- 'goto',
- 'grant',
- 'group',
- 'having',
- 'hour',
- 'identity',
- 'immediate',
- 'in',
- 'include',
- 'index',
- 'indicator',
- 'initially',
- 'inner',
- 'input',
- 'insensitive',
- 'insert',
- 'int',
- 'integer',
- 'intersect',
- 'interval',
- 'into',
- 'is',
- 'isolation',
- 'join',
- 'key',
- 'language',
- 'last',
- 'leading',
- 'left',
- 'level',
- 'like',
- 'local',
- 'lower',
- 'match',
- 'max',
- 'min',
- 'minute',
- 'module',
- 'month',
- 'names',
- 'national',
- 'natural',
- 'nchar',
- 'next',
- 'no',
- 'none',
- 'not',
- 'null',
- 'nullif',
- 'numeric',
- 'octet_length',
- 'of',
- 'on',
- 'only',
- 'open',
- 'option',
- 'or',
- 'order',
- 'outer',
- 'output',
- 'overlaps',
- 'pad',
- 'partial',
- 'pascal',
- 'position',
- 'precision',
- 'prepare',
- 'preserve',
- 'primary',
- 'prior',
- 'privileges',
- 'procedure',
- 'public',
- 'read',
- 'real',
- 'references',
- 'relative',
- 'restrict',
- 'revoke',
- 'right',
- 'rollback',
- 'rows',
- 'schema',
- 'scroll',
- 'second',
- 'section',
- 'select',
- 'session',
- 'session_user',
- 'set',
- 'size',
- 'smallint',
- 'some',
- 'space',
- 'sql',
- 'sqlca',
- 'sqlcode',
- 'sqlerror',
- 'sqlstate',
- 'sqlwarning',
- 'substring',
- 'sum',
- 'system_user',
- 'table',
- 'temporary',
- 'then',
- 'time',
- 'timestamp',
- 'timezone_hour',
- 'timezone_minute',
- 'to',
- 'trailing',
- 'transaction',
- 'translate',
- 'translation',
- 'trim',
- 'true',
- 'union',
- 'unique',
- 'unknown',
- 'update',
- 'upper',
- 'usage',
- 'user',
- 'using',
- 'value',
- 'values',
- 'varchar',
- 'varying',
- 'view',
- 'when',
- 'whenever',
- 'where',
- 'with',
- 'work',
- 'write',
- 'year',
- 'zone',
-)
-
-# See https://msdn.microsoft.com/en-us/library/ms189822.aspx.
-KEYWORDS = sorted(set(_KEYWORDS_FUTURE + _KEYWORDS_ODBC + _KEYWORDS_SERVER))
-
-# See https://msdn.microsoft.com/en-us/library/ms187752.aspx.
-TYPES = (
- 'bigint',
- 'binary',
- 'bit',
- 'char',
- 'cursor',
- 'date',
- 'datetime',
- 'datetime2',
- 'datetimeoffset',
- 'decimal',
- 'float',
- 'hierarchyid',
- 'image',
- 'int',
- 'money',
- 'nchar',
- 'ntext',
- 'numeric',
- 'nvarchar',
- 'real',
- 'smalldatetime',
- 'smallint',
- 'smallmoney',
- 'sql_variant',
- 'table',
- 'text',
- 'time',
- 'timestamp',
- 'tinyint',
- 'uniqueidentifier',
- 'varbinary',
- 'varchar',
- 'xml',
-)
-
-# See https://msdn.microsoft.com/en-us/library/ms174318.aspx.
-FUNCTIONS = (
- '$partition',
- 'abs',
- 'acos',
- 'app_name',
- 'applock_mode',
- 'applock_test',
- 'ascii',
- 'asin',
- 'assemblyproperty',
- 'atan',
- 'atn2',
- 'avg',
- 'binary_checksum',
- 'cast',
- 'ceiling',
- 'certencoded',
- 'certprivatekey',
- 'char',
- 'charindex',
- 'checksum',
- 'checksum_agg',
- 'choose',
- 'col_length',
- 'col_name',
- 'columnproperty',
- 'compress',
- 'concat',
- 'connectionproperty',
- 'context_info',
- 'convert',
- 'cos',
- 'cot',
- 'count',
- 'count_big',
- 'current_request_id',
- 'current_timestamp',
- 'current_transaction_id',
- 'current_user',
- 'cursor_status',
- 'database_principal_id',
- 'databasepropertyex',
- 'dateadd',
- 'datediff',
- 'datediff_big',
- 'datefromparts',
- 'datename',
- 'datepart',
- 'datetime2fromparts',
- 'datetimefromparts',
- 'datetimeoffsetfromparts',
- 'day',
- 'db_id',
- 'db_name',
- 'decompress',
- 'degrees',
- 'dense_rank',
- 'difference',
- 'eomonth',
- 'error_line',
- 'error_message',
- 'error_number',
- 'error_procedure',
- 'error_severity',
- 'error_state',
- 'exp',
- 'file_id',
- 'file_idex',
- 'file_name',
- 'filegroup_id',
- 'filegroup_name',
- 'filegroupproperty',
- 'fileproperty',
- 'floor',
- 'format',
- 'formatmessage',
- 'fulltextcatalogproperty',
- 'fulltextserviceproperty',
- 'get_filestream_transaction_context',
- 'getansinull',
- 'getdate',
- 'getutcdate',
- 'grouping',
- 'grouping_id',
- 'has_perms_by_name',
- 'host_id',
- 'host_name',
- 'iif',
- 'index_col',
- 'indexkey_property',
- 'indexproperty',
- 'is_member',
- 'is_rolemember',
- 'is_srvrolemember',
- 'isdate',
- 'isjson',
- 'isnull',
- 'isnumeric',
- 'json_modify',
- 'json_query',
- 'json_value',
- 'left',
- 'len',
- 'log',
- 'log10',
- 'lower',
- 'ltrim',
- 'max',
- 'min',
- 'min_active_rowversion',
- 'month',
- 'nchar',
- 'newid',
- 'newsequentialid',
- 'ntile',
- 'object_definition',
- 'object_id',
- 'object_name',
- 'object_schema_name',
- 'objectproperty',
- 'objectpropertyex',
- 'opendatasource',
- 'openjson',
- 'openquery',
- 'openrowset',
- 'openxml',
- 'original_db_name',
- 'original_login',
- 'parse',
- 'parsename',
- 'patindex',
- 'permissions',
- 'pi',
- 'power',
- 'pwdcompare',
- 'pwdencrypt',
- 'quotename',
- 'radians',
- 'rand',
- 'rank',
- 'replace',
- 'replicate',
- 'reverse',
- 'right',
- 'round',
- 'row_number',
- 'rowcount_big',
- 'rtrim',
- 'schema_id',
- 'schema_name',
- 'scope_identity',
- 'serverproperty',
- 'session_context',
- 'session_user',
- 'sign',
- 'sin',
- 'smalldatetimefromparts',
- 'soundex',
- 'sp_helplanguage',
- 'space',
- 'sqrt',
- 'square',
- 'stats_date',
- 'stdev',
- 'stdevp',
- 'str',
- 'string_escape',
- 'string_split',
- 'stuff',
- 'substring',
- 'sum',
- 'suser_id',
- 'suser_name',
- 'suser_sid',
- 'suser_sname',
- 'switchoffset',
- 'sysdatetime',
- 'sysdatetimeoffset',
- 'system_user',
- 'sysutcdatetime',
- 'tan',
- 'textptr',
- 'textvalid',
- 'timefromparts',
- 'todatetimeoffset',
- 'try_cast',
- 'try_convert',
- 'try_parse',
- 'type_id',
- 'type_name',
- 'typeproperty',
- 'unicode',
- 'upper',
- 'user_id',
- 'user_name',
- 'var',
- 'varp',
- 'xact_state',
- 'year',
-)
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/_usd_builtins.py b/venv/lib/python3.11/site-packages/pygments/lexers/_usd_builtins.py
deleted file mode 100644
index d891ad0..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/_usd_builtins.py
+++ /dev/null
@@ -1,112 +0,0 @@
-"""
- pygments.lexers._usd_builtins
- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
- A collection of known USD-related keywords, attributes, and types.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-COMMON_ATTRIBUTES = [
- "extent",
- "xformOpOrder",
-]
-
-KEYWORDS = [
- "class",
- "clips",
- "custom",
- "customData",
- "def",
- "dictionary",
- "inherits",
- "over",
- "payload",
- "references",
- "rel",
- "subLayers",
- "timeSamples",
- "uniform",
- "variantSet",
- "variantSets",
- "variants",
-]
-
-OPERATORS = [
- "add",
- "append",
- "delete",
- "prepend",
- "reorder",
-]
-
-SPECIAL_NAMES = [
- "active",
- "apiSchemas",
- "defaultPrim",
- "elementSize",
- "endTimeCode",
- "hidden",
- "instanceable",
- "interpolation",
- "kind",
- "startTimeCode",
- "upAxis",
-]
-
-TYPES = [
- "asset",
- "bool",
- "color3d",
- "color3f",
- "color3h",
- "color4d",
- "color4f",
- "color4h",
- "double",
- "double2",
- "double3",
- "double4",
- "float",
- "float2",
- "float3",
- "float4",
- "frame4d",
- "half",
- "half2",
- "half3",
- "half4",
- "int",
- "int2",
- "int3",
- "int4",
- "keyword",
- "matrix2d",
- "matrix3d",
- "matrix4d",
- "normal3d",
- "normal3f",
- "normal3h",
- "point3d",
- "point3f",
- "point3h",
- "quatd",
- "quatf",
- "quath",
- "string",
- "syn",
- "token",
- "uchar",
- "uchar2",
- "uchar3",
- "uchar4",
- "uint",
- "uint2",
- "uint3",
- "uint4",
- "usdaType",
- "vector3d",
- "vector3f",
- "vector3h",
-]
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/_vbscript_builtins.py b/venv/lib/python3.11/site-packages/pygments/lexers/_vbscript_builtins.py
deleted file mode 100644
index e7e96e6..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/_vbscript_builtins.py
+++ /dev/null
@@ -1,279 +0,0 @@
-"""
- pygments.lexers._vbscript_builtins
- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
- These are manually translated lists from
- http://www.indusoft.com/pdf/VBScript%20Reference.pdf.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-KEYWORDS = [
- 'ByRef',
- 'ByVal',
- # dim: special rule
- 'call',
- 'case',
- 'class',
- # const: special rule
- 'do',
- 'each',
- 'else',
- 'elseif',
- 'end',
- 'erase',
- 'execute',
- 'function',
- 'exit',
- 'for',
- 'function',
- 'GetRef',
- 'global',
- 'if',
- 'let',
- 'loop',
- 'next',
- 'new',
- # option: special rule
- 'private',
- 'public',
- 'redim',
- 'select',
- 'set',
- 'sub',
- 'then',
- 'wend',
- 'while',
- 'with',
-]
-
-BUILTIN_FUNCTIONS = [
- 'Abs',
- 'Array',
- 'Asc',
- 'Atn',
- 'CBool',
- 'CByte',
- 'CCur',
- 'CDate',
- 'CDbl',
- 'Chr',
- 'CInt',
- 'CLng',
- 'Cos',
- 'CreateObject',
- 'CSng',
- 'CStr',
- 'Date',
- 'DateAdd',
- 'DateDiff',
- 'DatePart',
- 'DateSerial',
- 'DateValue',
- 'Day',
- 'Eval',
- 'Exp',
- 'Filter',
- 'Fix',
- 'FormatCurrency',
- 'FormatDateTime',
- 'FormatNumber',
- 'FormatPercent',
- 'GetObject',
- 'GetLocale',
- 'Hex',
- 'Hour',
- 'InStr',
- 'inStrRev',
- 'Int',
- 'IsArray',
- 'IsDate',
- 'IsEmpty',
- 'IsNull',
- 'IsNumeric',
- 'IsObject',
- 'Join',
- 'LBound',
- 'LCase',
- 'Left',
- 'Len',
- 'LoadPicture',
- 'Log',
- 'LTrim',
- 'Mid',
- 'Minute',
- 'Month',
- 'MonthName',
- 'MsgBox',
- 'Now',
- 'Oct',
- 'Randomize',
- 'RegExp',
- 'Replace',
- 'RGB',
- 'Right',
- 'Rnd',
- 'Round',
- 'RTrim',
- 'ScriptEngine',
- 'ScriptEngineBuildVersion',
- 'ScriptEngineMajorVersion',
- 'ScriptEngineMinorVersion',
- 'Second',
- 'SetLocale',
- 'Sgn',
- 'Space',
- 'Split',
- 'Sqr',
- 'StrComp',
- 'String',
- 'StrReverse',
- 'Tan',
- 'Time',
- 'Timer',
- 'TimeSerial',
- 'TimeValue',
- 'Trim',
- 'TypeName',
- 'UBound',
- 'UCase',
- 'VarType',
- 'Weekday',
- 'WeekdayName',
- 'Year',
-]
-
-BUILTIN_VARIABLES = [
- 'Debug',
- 'Dictionary',
- 'Drive',
- 'Drives',
- 'Err',
- 'File',
- 'Files',
- 'FileSystemObject',
- 'Folder',
- 'Folders',
- 'Match',
- 'Matches',
- 'RegExp',
- 'Submatches',
- 'TextStream',
-]
-
-OPERATORS = [
- '+',
- '-',
- '*',
- '/',
- '\\',
- '^',
- '|',
- '<',
- '<=',
- '>',
- '>=',
- '=',
- '<>',
- '&',
- '$',
-]
-
-OPERATOR_WORDS = [
- 'mod',
- 'and',
- 'or',
- 'xor',
- 'eqv',
- 'imp',
- 'is',
- 'not',
-]
-
-BUILTIN_CONSTANTS = [
- 'False',
- 'True',
- 'vbAbort',
- 'vbAbortRetryIgnore',
- 'vbApplicationModal',
- 'vbArray',
- 'vbBinaryCompare',
- 'vbBlack',
- 'vbBlue',
- 'vbBoole',
- 'vbByte',
- 'vbCancel',
- 'vbCr',
- 'vbCritical',
- 'vbCrLf',
- 'vbCurrency',
- 'vbCyan',
- 'vbDataObject',
- 'vbDate',
- 'vbDefaultButton1',
- 'vbDefaultButton2',
- 'vbDefaultButton3',
- 'vbDefaultButton4',
- 'vbDouble',
- 'vbEmpty',
- 'vbError',
- 'vbExclamation',
- 'vbFalse',
- 'vbFirstFullWeek',
- 'vbFirstJan1',
- 'vbFormFeed',
- 'vbFriday',
- 'vbGeneralDate',
- 'vbGreen',
- 'vbIgnore',
- 'vbInformation',
- 'vbInteger',
- 'vbLf',
- 'vbLong',
- 'vbLongDate',
- 'vbLongTime',
- 'vbMagenta',
- 'vbMonday',
- 'vbMsgBoxHelpButton',
- 'vbMsgBoxRight',
- 'vbMsgBoxRtlReading',
- 'vbMsgBoxSetForeground',
- 'vbNewLine',
- 'vbNo',
- 'vbNull',
- 'vbNullChar',
- 'vbNullString',
- 'vbObject',
- 'vbObjectError',
- 'vbOK',
- 'vbOKCancel',
- 'vbOKOnly',
- 'vbQuestion',
- 'vbRed',
- 'vbRetry',
- 'vbRetryCancel',
- 'vbSaturday',
- 'vbShortDate',
- 'vbShortTime',
- 'vbSingle',
- 'vbString',
- 'vbSunday',
- 'vbSystemModal',
- 'vbTab',
- 'vbTextCompare',
- 'vbThursday',
- 'vbTrue',
- 'vbTuesday',
- 'vbUseDefault',
- 'vbUseSystem',
- 'vbUseSystem',
- 'vbVariant',
- 'vbVerticalTab',
- 'vbWednesday',
- 'vbWhite',
- 'vbYellow',
- 'vbYes',
- 'vbYesNo',
- 'vbYesNoCancel',
-]
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/_vim_builtins.py b/venv/lib/python3.11/site-packages/pygments/lexers/_vim_builtins.py
deleted file mode 100644
index ccc8740..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/_vim_builtins.py
+++ /dev/null
@@ -1,1938 +0,0 @@
-"""
- pygments.lexers._vim_builtins
- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
- This file is autogenerated by scripts/get_vimkw.py
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-# Split up in multiple functions so it's importable by jython, which has a
-# per-method size limit.
-
-def _getauto():
- var = (
- ('BufAdd','BufAdd'),
- ('BufCreate','BufCreate'),
- ('BufDelete','BufDelete'),
- ('BufEnter','BufEnter'),
- ('BufFilePost','BufFilePost'),
- ('BufFilePre','BufFilePre'),
- ('BufHidden','BufHidden'),
- ('BufLeave','BufLeave'),
- ('BufNew','BufNew'),
- ('BufNewFile','BufNewFile'),
- ('BufRead','BufRead'),
- ('BufReadCmd','BufReadCmd'),
- ('BufReadPost','BufReadPost'),
- ('BufReadPre','BufReadPre'),
- ('BufUnload','BufUnload'),
- ('BufWinEnter','BufWinEnter'),
- ('BufWinLeave','BufWinLeave'),
- ('BufWipeout','BufWipeout'),
- ('BufWrite','BufWrite'),
- ('BufWriteCmd','BufWriteCmd'),
- ('BufWritePost','BufWritePost'),
- ('BufWritePre','BufWritePre'),
- ('Cmd','Cmd'),
- ('CmdwinEnter','CmdwinEnter'),
- ('CmdwinLeave','CmdwinLeave'),
- ('ColorScheme','ColorScheme'),
- ('CompleteDone','CompleteDone'),
- ('CursorHold','CursorHold'),
- ('CursorHoldI','CursorHoldI'),
- ('CursorMoved','CursorMoved'),
- ('CursorMovedI','CursorMovedI'),
- ('EncodingChanged','EncodingChanged'),
- ('FileAppendCmd','FileAppendCmd'),
- ('FileAppendPost','FileAppendPost'),
- ('FileAppendPre','FileAppendPre'),
- ('FileChangedRO','FileChangedRO'),
- ('FileChangedShell','FileChangedShell'),
- ('FileChangedShellPost','FileChangedShellPost'),
- ('FileEncoding','FileEncoding'),
- ('FileReadCmd','FileReadCmd'),
- ('FileReadPost','FileReadPost'),
- ('FileReadPre','FileReadPre'),
- ('FileType','FileType'),
- ('FileWriteCmd','FileWriteCmd'),
- ('FileWritePost','FileWritePost'),
- ('FileWritePre','FileWritePre'),
- ('FilterReadPost','FilterReadPost'),
- ('FilterReadPre','FilterReadPre'),
- ('FilterWritePost','FilterWritePost'),
- ('FilterWritePre','FilterWritePre'),
- ('FocusGained','FocusGained'),
- ('FocusLost','FocusLost'),
- ('FuncUndefined','FuncUndefined'),
- ('GUIEnter','GUIEnter'),
- ('GUIFailed','GUIFailed'),
- ('InsertChange','InsertChange'),
- ('InsertCharPre','InsertCharPre'),
- ('InsertEnter','InsertEnter'),
- ('InsertLeave','InsertLeave'),
- ('MenuPopup','MenuPopup'),
- ('QuickFixCmdPost','QuickFixCmdPost'),
- ('QuickFixCmdPre','QuickFixCmdPre'),
- ('QuitPre','QuitPre'),
- ('RemoteReply','RemoteReply'),
- ('SessionLoadPost','SessionLoadPost'),
- ('ShellCmdPost','ShellCmdPost'),
- ('ShellFilterPost','ShellFilterPost'),
- ('SourceCmd','SourceCmd'),
- ('SourcePre','SourcePre'),
- ('SpellFileMissing','SpellFileMissing'),
- ('StdinReadPost','StdinReadPost'),
- ('StdinReadPre','StdinReadPre'),
- ('SwapExists','SwapExists'),
- ('Syntax','Syntax'),
- ('TabEnter','TabEnter'),
- ('TabLeave','TabLeave'),
- ('TermChanged','TermChanged'),
- ('TermResponse','TermResponse'),
- ('TextChanged','TextChanged'),
- ('TextChangedI','TextChangedI'),
- ('User','User'),
- ('UserGettingBored','UserGettingBored'),
- ('VimEnter','VimEnter'),
- ('VimLeave','VimLeave'),
- ('VimLeavePre','VimLeavePre'),
- ('VimResized','VimResized'),
- ('WinEnter','WinEnter'),
- ('WinLeave','WinLeave'),
- ('event','event'),
- )
- return var
-auto = _getauto()
-
-def _getcommand():
- var = (
- ('a','a'),
- ('ab','ab'),
- ('abc','abclear'),
- ('abo','aboveleft'),
- ('al','all'),
- ('ar','ar'),
- ('ar','args'),
- ('arga','argadd'),
- ('argd','argdelete'),
- ('argdo','argdo'),
- ('arge','argedit'),
- ('argg','argglobal'),
- ('argl','arglocal'),
- ('argu','argument'),
- ('as','ascii'),
- ('au','au'),
- ('b','buffer'),
- ('bN','bNext'),
- ('ba','ball'),
- ('bad','badd'),
- ('bd','bdelete'),
- ('bel','belowright'),
- ('bf','bfirst'),
- ('bl','blast'),
- ('bm','bmodified'),
- ('bn','bnext'),
- ('bo','botright'),
- ('bp','bprevious'),
- ('br','br'),
- ('br','brewind'),
- ('brea','break'),
- ('breaka','breakadd'),
- ('breakd','breakdel'),
- ('breakl','breaklist'),
- ('bro','browse'),
- ('bu','bu'),
- ('buf','buf'),
- ('bufdo','bufdo'),
- ('buffers','buffers'),
- ('bun','bunload'),
- ('bw','bwipeout'),
- ('c','c'),
- ('c','change'),
- ('cN','cN'),
- ('cN','cNext'),
- ('cNf','cNf'),
- ('cNf','cNfile'),
- ('cabc','cabclear'),
- ('cad','cad'),
- ('cad','caddexpr'),
- ('caddb','caddbuffer'),
- ('caddf','caddfile'),
- ('cal','call'),
- ('cat','catch'),
- ('cb','cbuffer'),
- ('cc','cc'),
- ('ccl','cclose'),
- ('cd','cd'),
- ('ce','center'),
- ('cex','cexpr'),
- ('cf','cfile'),
- ('cfir','cfirst'),
- ('cg','cgetfile'),
- ('cgetb','cgetbuffer'),
- ('cgete','cgetexpr'),
- ('changes','changes'),
- ('chd','chdir'),
- ('che','checkpath'),
- ('checkt','checktime'),
- ('cl','cl'),
- ('cl','clist'),
- ('cla','clast'),
- ('clo','close'),
- ('cmapc','cmapclear'),
- ('cn','cn'),
- ('cn','cnext'),
- ('cnew','cnewer'),
- ('cnf','cnf'),
- ('cnf','cnfile'),
- ('co','copy'),
- ('col','colder'),
- ('colo','colorscheme'),
- ('com','com'),
- ('comc','comclear'),
- ('comp','compiler'),
- ('con','con'),
- ('con','continue'),
- ('conf','confirm'),
- ('cope','copen'),
- ('cp','cprevious'),
- ('cpf','cpfile'),
- ('cq','cquit'),
- ('cr','crewind'),
- ('cs','cs'),
- ('cscope','cscope'),
- ('cstag','cstag'),
- ('cuna','cunabbrev'),
- ('cw','cwindow'),
- ('d','d'),
- ('d','delete'),
- ('de','de'),
- ('debug','debug'),
- ('debugg','debuggreedy'),
- ('del','del'),
- ('delc','delcommand'),
- ('delel','delel'),
- ('delep','delep'),
- ('deletel','deletel'),
- ('deletep','deletep'),
- ('deletl','deletl'),
- ('deletp','deletp'),
- ('delf','delf'),
- ('delf','delfunction'),
- ('dell','dell'),
- ('delm','delmarks'),
- ('delp','delp'),
- ('dep','dep'),
- ('di','di'),
- ('di','display'),
- ('diffg','diffget'),
- ('diffo','diffoff'),
- ('diffp','diffpatch'),
- ('diffpu','diffput'),
- ('diffs','diffsplit'),
- ('difft','diffthis'),
- ('diffu','diffupdate'),
- ('dig','dig'),
- ('dig','digraphs'),
- ('dir','dir'),
- ('dj','djump'),
- ('dl','dl'),
- ('dli','dlist'),
- ('do','do'),
- ('doau','doau'),
- ('dp','dp'),
- ('dr','drop'),
- ('ds','dsearch'),
- ('dsp','dsplit'),
- ('e','e'),
- ('e','edit'),
- ('ea','ea'),
- ('earlier','earlier'),
- ('ec','ec'),
- ('echoe','echoerr'),
- ('echom','echomsg'),
- ('echon','echon'),
- ('el','else'),
- ('elsei','elseif'),
- ('em','emenu'),
- ('en','en'),
- ('en','endif'),
- ('endf','endf'),
- ('endf','endfunction'),
- ('endfo','endfor'),
- ('endfun','endfun'),
- ('endt','endtry'),
- ('endw','endwhile'),
- ('ene','enew'),
- ('ex','ex'),
- ('exi','exit'),
- ('exu','exusage'),
- ('f','f'),
- ('f','file'),
- ('files','files'),
- ('filet','filet'),
- ('filetype','filetype'),
- ('fin','fin'),
- ('fin','find'),
- ('fina','finally'),
- ('fini','finish'),
- ('fir','first'),
- ('fix','fixdel'),
- ('fo','fold'),
- ('foldc','foldclose'),
- ('foldd','folddoopen'),
- ('folddoc','folddoclosed'),
- ('foldo','foldopen'),
- ('for','for'),
- ('fu','fu'),
- ('fu','function'),
- ('fun','fun'),
- ('g','g'),
- ('go','goto'),
- ('gr','grep'),
- ('grepa','grepadd'),
- ('gui','gui'),
- ('gvim','gvim'),
- ('h','h'),
- ('h','help'),
- ('ha','hardcopy'),
- ('helpf','helpfind'),
- ('helpg','helpgrep'),
- ('helpt','helptags'),
- ('hi','hi'),
- ('hid','hide'),
- ('his','history'),
- ('i','i'),
- ('ia','ia'),
- ('iabc','iabclear'),
- ('if','if'),
- ('ij','ijump'),
- ('il','ilist'),
- ('imapc','imapclear'),
- ('in','in'),
- ('intro','intro'),
- ('is','isearch'),
- ('isp','isplit'),
- ('iuna','iunabbrev'),
- ('j','join'),
- ('ju','jumps'),
- ('k','k'),
- ('kee','keepmarks'),
- ('keepa','keepa'),
- ('keepalt','keepalt'),
- ('keepj','keepjumps'),
- ('keepp','keeppatterns'),
- ('l','l'),
- ('l','list'),
- ('lN','lN'),
- ('lN','lNext'),
- ('lNf','lNf'),
- ('lNf','lNfile'),
- ('la','la'),
- ('la','last'),
- ('lad','lad'),
- ('lad','laddexpr'),
- ('laddb','laddbuffer'),
- ('laddf','laddfile'),
- ('lan','lan'),
- ('lan','language'),
- ('lat','lat'),
- ('later','later'),
- ('lb','lbuffer'),
- ('lc','lcd'),
- ('lch','lchdir'),
- ('lcl','lclose'),
- ('lcs','lcs'),
- ('lcscope','lcscope'),
- ('le','left'),
- ('lefta','leftabove'),
- ('lex','lexpr'),
- ('lf','lfile'),
- ('lfir','lfirst'),
- ('lg','lgetfile'),
- ('lgetb','lgetbuffer'),
- ('lgete','lgetexpr'),
- ('lgr','lgrep'),
- ('lgrepa','lgrepadd'),
- ('lh','lhelpgrep'),
- ('ll','ll'),
- ('lla','llast'),
- ('lli','llist'),
- ('lmak','lmake'),
- ('lmapc','lmapclear'),
- ('lne','lne'),
- ('lne','lnext'),
- ('lnew','lnewer'),
- ('lnf','lnf'),
- ('lnf','lnfile'),
- ('lo','lo'),
- ('lo','loadview'),
- ('loadk','loadk'),
- ('loadkeymap','loadkeymap'),
- ('loc','lockmarks'),
- ('lockv','lockvar'),
- ('lol','lolder'),
- ('lop','lopen'),
- ('lp','lprevious'),
- ('lpf','lpfile'),
- ('lr','lrewind'),
- ('ls','ls'),
- ('lt','ltag'),
- ('lua','lua'),
- ('luado','luado'),
- ('luafile','luafile'),
- ('lv','lvimgrep'),
- ('lvimgrepa','lvimgrepadd'),
- ('lw','lwindow'),
- ('m','move'),
- ('ma','ma'),
- ('ma','mark'),
- ('mak','make'),
- ('marks','marks'),
- ('mat','match'),
- ('menut','menut'),
- ('menut','menutranslate'),
- ('mes','mes'),
- ('messages','messages'),
- ('mk','mk'),
- ('mk','mkexrc'),
- ('mks','mksession'),
- ('mksp','mkspell'),
- ('mkv','mkv'),
- ('mkv','mkvimrc'),
- ('mkvie','mkview'),
- ('mo','mo'),
- ('mod','mode'),
- ('mz','mz'),
- ('mz','mzscheme'),
- ('mzf','mzfile'),
- ('n','n'),
- ('n','next'),
- ('nb','nbkey'),
- ('nbc','nbclose'),
- ('nbs','nbstart'),
- ('ne','ne'),
- ('new','new'),
- ('nmapc','nmapclear'),
- ('noa','noa'),
- ('noautocmd','noautocmd'),
- ('noh','nohlsearch'),
- ('nu','number'),
- ('o','o'),
- ('o','open'),
- ('ol','oldfiles'),
- ('omapc','omapclear'),
- ('on','only'),
- ('opt','options'),
- ('ownsyntax','ownsyntax'),
- ('p','p'),
- ('p','print'),
- ('pc','pclose'),
- ('pe','pe'),
- ('pe','perl'),
- ('ped','pedit'),
- ('perld','perldo'),
- ('po','pop'),
- ('popu','popu'),
- ('popu','popup'),
- ('pp','ppop'),
- ('pr','pr'),
- ('pre','preserve'),
- ('prev','previous'),
- ('pro','pro'),
- ('prof','profile'),
- ('profd','profdel'),
- ('promptf','promptfind'),
- ('promptr','promptrepl'),
- ('ps','psearch'),
- ('ptN','ptN'),
- ('ptN','ptNext'),
- ('pta','ptag'),
- ('ptf','ptfirst'),
- ('ptj','ptjump'),
- ('ptl','ptlast'),
- ('ptn','ptn'),
- ('ptn','ptnext'),
- ('ptp','ptprevious'),
- ('ptr','ptrewind'),
- ('pts','ptselect'),
- ('pu','put'),
- ('pw','pwd'),
- ('py','py'),
- ('py','python'),
- ('py3','py3'),
- ('py3','py3'),
- ('py3do','py3do'),
- ('pydo','pydo'),
- ('pyf','pyfile'),
- ('python3','python3'),
- ('q','q'),
- ('q','quit'),
- ('qa','qall'),
- ('quita','quitall'),
- ('r','r'),
- ('r','read'),
- ('re','re'),
- ('rec','recover'),
- ('red','red'),
- ('red','redo'),
- ('redi','redir'),
- ('redr','redraw'),
- ('redraws','redrawstatus'),
- ('reg','registers'),
- ('res','resize'),
- ('ret','retab'),
- ('retu','return'),
- ('rew','rewind'),
- ('ri','right'),
- ('rightb','rightbelow'),
- ('ru','ru'),
- ('ru','runtime'),
- ('rub','ruby'),
- ('rubyd','rubydo'),
- ('rubyf','rubyfile'),
- ('rundo','rundo'),
- ('rv','rviminfo'),
- ('sN','sNext'),
- ('sa','sargument'),
- ('sal','sall'),
- ('san','sandbox'),
- ('sav','saveas'),
- ('sb','sbuffer'),
- ('sbN','sbNext'),
- ('sba','sball'),
- ('sbf','sbfirst'),
- ('sbl','sblast'),
- ('sbm','sbmodified'),
- ('sbn','sbnext'),
- ('sbp','sbprevious'),
- ('sbr','sbrewind'),
- ('scrip','scrip'),
- ('scrip','scriptnames'),
- ('scripte','scriptencoding'),
- ('scs','scs'),
- ('scscope','scscope'),
- ('se','set'),
- ('setf','setfiletype'),
- ('setg','setglobal'),
- ('setl','setlocal'),
- ('sf','sfind'),
- ('sfir','sfirst'),
- ('sh','shell'),
- ('si','si'),
- ('sig','sig'),
- ('sign','sign'),
- ('sil','silent'),
- ('sim','simalt'),
- ('sl','sl'),
- ('sl','sleep'),
- ('sla','slast'),
- ('sm','smagic'),
- ('sm','smap'),
- ('sme','sme'),
- ('smenu','smenu'),
- ('sn','snext'),
- ('sni','sniff'),
- ('sno','snomagic'),
- ('snoreme','snoreme'),
- ('snoremenu','snoremenu'),
- ('so','so'),
- ('so','source'),
- ('sor','sort'),
- ('sp','split'),
- ('spe','spe'),
- ('spe','spellgood'),
- ('spelld','spelldump'),
- ('spelli','spellinfo'),
- ('spellr','spellrepall'),
- ('spellu','spellundo'),
- ('spellw','spellwrong'),
- ('spr','sprevious'),
- ('sre','srewind'),
- ('st','st'),
- ('st','stop'),
- ('sta','stag'),
- ('star','star'),
- ('star','startinsert'),
- ('start','start'),
- ('startg','startgreplace'),
- ('startr','startreplace'),
- ('stj','stjump'),
- ('stopi','stopinsert'),
- ('sts','stselect'),
- ('sun','sunhide'),
- ('sunme','sunme'),
- ('sunmenu','sunmenu'),
- ('sus','suspend'),
- ('sv','sview'),
- ('sw','swapname'),
- ('sy','sy'),
- ('syn','syn'),
- ('sync','sync'),
- ('syncbind','syncbind'),
- ('syntime','syntime'),
- ('t','t'),
- ('tN','tN'),
- ('tN','tNext'),
- ('ta','ta'),
- ('ta','tag'),
- ('tab','tab'),
- ('tabN','tabN'),
- ('tabN','tabNext'),
- ('tabc','tabclose'),
- ('tabd','tabdo'),
- ('tabe','tabedit'),
- ('tabf','tabfind'),
- ('tabfir','tabfirst'),
- ('tabl','tablast'),
- ('tabm','tabmove'),
- ('tabn','tabnext'),
- ('tabnew','tabnew'),
- ('tabo','tabonly'),
- ('tabp','tabprevious'),
- ('tabr','tabrewind'),
- ('tabs','tabs'),
- ('tags','tags'),
- ('tc','tcl'),
- ('tcld','tcldo'),
- ('tclf','tclfile'),
- ('te','tearoff'),
- ('tf','tfirst'),
- ('th','throw'),
- ('tj','tjump'),
- ('tl','tlast'),
- ('tm','tm'),
- ('tm','tmenu'),
- ('tn','tn'),
- ('tn','tnext'),
- ('to','topleft'),
- ('tp','tprevious'),
- ('tr','tr'),
- ('tr','trewind'),
- ('try','try'),
- ('ts','tselect'),
- ('tu','tu'),
- ('tu','tunmenu'),
- ('u','u'),
- ('u','undo'),
- ('un','un'),
- ('una','unabbreviate'),
- ('undoj','undojoin'),
- ('undol','undolist'),
- ('unh','unhide'),
- ('unl','unl'),
- ('unlo','unlockvar'),
- ('uns','unsilent'),
- ('up','update'),
- ('v','v'),
- ('ve','ve'),
- ('ve','version'),
- ('verb','verbose'),
- ('vert','vertical'),
- ('vi','vi'),
- ('vi','visual'),
- ('vie','view'),
- ('vim','vimgrep'),
- ('vimgrepa','vimgrepadd'),
- ('viu','viusage'),
- ('vmapc','vmapclear'),
- ('vne','vnew'),
- ('vs','vsplit'),
- ('w','w'),
- ('w','write'),
- ('wN','wNext'),
- ('wa','wall'),
- ('wh','while'),
- ('win','win'),
- ('win','winsize'),
- ('winc','wincmd'),
- ('windo','windo'),
- ('winp','winpos'),
- ('wn','wnext'),
- ('wp','wprevious'),
- ('wq','wq'),
- ('wqa','wqall'),
- ('ws','wsverb'),
- ('wundo','wundo'),
- ('wv','wviminfo'),
- ('x','x'),
- ('x','xit'),
- ('xa','xall'),
- ('xmapc','xmapclear'),
- ('xme','xme'),
- ('xmenu','xmenu'),
- ('xnoreme','xnoreme'),
- ('xnoremenu','xnoremenu'),
- ('xunme','xunme'),
- ('xunmenu','xunmenu'),
- ('xwininfo','xwininfo'),
- ('y','yank'),
- )
- return var
-command = _getcommand()
-
-def _getoption():
- var = (
- ('acd','acd'),
- ('ai','ai'),
- ('akm','akm'),
- ('al','al'),
- ('aleph','aleph'),
- ('allowrevins','allowrevins'),
- ('altkeymap','altkeymap'),
- ('ambiwidth','ambiwidth'),
- ('ambw','ambw'),
- ('anti','anti'),
- ('antialias','antialias'),
- ('ar','ar'),
- ('arab','arab'),
- ('arabic','arabic'),
- ('arabicshape','arabicshape'),
- ('ari','ari'),
- ('arshape','arshape'),
- ('autochdir','autochdir'),
- ('autoindent','autoindent'),
- ('autoread','autoread'),
- ('autowrite','autowrite'),
- ('autowriteall','autowriteall'),
- ('aw','aw'),
- ('awa','awa'),
- ('background','background'),
- ('backspace','backspace'),
- ('backup','backup'),
- ('backupcopy','backupcopy'),
- ('backupdir','backupdir'),
- ('backupext','backupext'),
- ('backupskip','backupskip'),
- ('balloondelay','balloondelay'),
- ('ballooneval','ballooneval'),
- ('balloonexpr','balloonexpr'),
- ('bdir','bdir'),
- ('bdlay','bdlay'),
- ('beval','beval'),
- ('bex','bex'),
- ('bexpr','bexpr'),
- ('bg','bg'),
- ('bh','bh'),
- ('bin','bin'),
- ('binary','binary'),
- ('biosk','biosk'),
- ('bioskey','bioskey'),
- ('bk','bk'),
- ('bkc','bkc'),
- ('bl','bl'),
- ('bomb','bomb'),
- ('breakat','breakat'),
- ('brk','brk'),
- ('browsedir','browsedir'),
- ('bs','bs'),
- ('bsdir','bsdir'),
- ('bsk','bsk'),
- ('bt','bt'),
- ('bufhidden','bufhidden'),
- ('buflisted','buflisted'),
- ('buftype','buftype'),
- ('casemap','casemap'),
- ('cb','cb'),
- ('cc','cc'),
- ('ccv','ccv'),
- ('cd','cd'),
- ('cdpath','cdpath'),
- ('cedit','cedit'),
- ('cf','cf'),
- ('cfu','cfu'),
- ('ch','ch'),
- ('charconvert','charconvert'),
- ('ci','ci'),
- ('cin','cin'),
- ('cindent','cindent'),
- ('cink','cink'),
- ('cinkeys','cinkeys'),
- ('cino','cino'),
- ('cinoptions','cinoptions'),
- ('cinw','cinw'),
- ('cinwords','cinwords'),
- ('clipboard','clipboard'),
- ('cmdheight','cmdheight'),
- ('cmdwinheight','cmdwinheight'),
- ('cmp','cmp'),
- ('cms','cms'),
- ('co','co'),
- ('cocu','cocu'),
- ('cole','cole'),
- ('colorcolumn','colorcolumn'),
- ('columns','columns'),
- ('com','com'),
- ('comments','comments'),
- ('commentstring','commentstring'),
- ('compatible','compatible'),
- ('complete','complete'),
- ('completefunc','completefunc'),
- ('completeopt','completeopt'),
- ('concealcursor','concealcursor'),
- ('conceallevel','conceallevel'),
- ('confirm','confirm'),
- ('consk','consk'),
- ('conskey','conskey'),
- ('copyindent','copyindent'),
- ('cot','cot'),
- ('cp','cp'),
- ('cpo','cpo'),
- ('cpoptions','cpoptions'),
- ('cpt','cpt'),
- ('crb','crb'),
- ('cryptmethod','cryptmethod'),
- ('cscopepathcomp','cscopepathcomp'),
- ('cscopeprg','cscopeprg'),
- ('cscopequickfix','cscopequickfix'),
- ('cscoperelative','cscoperelative'),
- ('cscopetag','cscopetag'),
- ('cscopetagorder','cscopetagorder'),
- ('cscopeverbose','cscopeverbose'),
- ('cspc','cspc'),
- ('csprg','csprg'),
- ('csqf','csqf'),
- ('csre','csre'),
- ('cst','cst'),
- ('csto','csto'),
- ('csverb','csverb'),
- ('cuc','cuc'),
- ('cul','cul'),
- ('cursorbind','cursorbind'),
- ('cursorcolumn','cursorcolumn'),
- ('cursorline','cursorline'),
- ('cwh','cwh'),
- ('debug','debug'),
- ('deco','deco'),
- ('def','def'),
- ('define','define'),
- ('delcombine','delcombine'),
- ('dex','dex'),
- ('dg','dg'),
- ('dict','dict'),
- ('dictionary','dictionary'),
- ('diff','diff'),
- ('diffexpr','diffexpr'),
- ('diffopt','diffopt'),
- ('digraph','digraph'),
- ('dip','dip'),
- ('dir','dir'),
- ('directory','directory'),
- ('display','display'),
- ('dy','dy'),
- ('ea','ea'),
- ('ead','ead'),
- ('eadirection','eadirection'),
- ('eb','eb'),
- ('ed','ed'),
- ('edcompatible','edcompatible'),
- ('ef','ef'),
- ('efm','efm'),
- ('ei','ei'),
- ('ek','ek'),
- ('enc','enc'),
- ('encoding','encoding'),
- ('endofline','endofline'),
- ('eol','eol'),
- ('ep','ep'),
- ('equalalways','equalalways'),
- ('equalprg','equalprg'),
- ('errorbells','errorbells'),
- ('errorfile','errorfile'),
- ('errorformat','errorformat'),
- ('esckeys','esckeys'),
- ('et','et'),
- ('eventignore','eventignore'),
- ('ex','ex'),
- ('expandtab','expandtab'),
- ('exrc','exrc'),
- ('fcl','fcl'),
- ('fcs','fcs'),
- ('fdc','fdc'),
- ('fde','fde'),
- ('fdi','fdi'),
- ('fdl','fdl'),
- ('fdls','fdls'),
- ('fdm','fdm'),
- ('fdn','fdn'),
- ('fdo','fdo'),
- ('fdt','fdt'),
- ('fen','fen'),
- ('fenc','fenc'),
- ('fencs','fencs'),
- ('fex','fex'),
- ('ff','ff'),
- ('ffs','ffs'),
- ('fic','fic'),
- ('fileencoding','fileencoding'),
- ('fileencodings','fileencodings'),
- ('fileformat','fileformat'),
- ('fileformats','fileformats'),
- ('fileignorecase','fileignorecase'),
- ('filetype','filetype'),
- ('fillchars','fillchars'),
- ('fk','fk'),
- ('fkmap','fkmap'),
- ('flp','flp'),
- ('fml','fml'),
- ('fmr','fmr'),
- ('fo','fo'),
- ('foldclose','foldclose'),
- ('foldcolumn','foldcolumn'),
- ('foldenable','foldenable'),
- ('foldexpr','foldexpr'),
- ('foldignore','foldignore'),
- ('foldlevel','foldlevel'),
- ('foldlevelstart','foldlevelstart'),
- ('foldmarker','foldmarker'),
- ('foldmethod','foldmethod'),
- ('foldminlines','foldminlines'),
- ('foldnestmax','foldnestmax'),
- ('foldopen','foldopen'),
- ('foldtext','foldtext'),
- ('formatexpr','formatexpr'),
- ('formatlistpat','formatlistpat'),
- ('formatoptions','formatoptions'),
- ('formatprg','formatprg'),
- ('fp','fp'),
- ('fs','fs'),
- ('fsync','fsync'),
- ('ft','ft'),
- ('gcr','gcr'),
- ('gd','gd'),
- ('gdefault','gdefault'),
- ('gfm','gfm'),
- ('gfn','gfn'),
- ('gfs','gfs'),
- ('gfw','gfw'),
- ('ghr','ghr'),
- ('go','go'),
- ('gp','gp'),
- ('grepformat','grepformat'),
- ('grepprg','grepprg'),
- ('gtl','gtl'),
- ('gtt','gtt'),
- ('guicursor','guicursor'),
- ('guifont','guifont'),
- ('guifontset','guifontset'),
- ('guifontwide','guifontwide'),
- ('guiheadroom','guiheadroom'),
- ('guioptions','guioptions'),
- ('guipty','guipty'),
- ('guitablabel','guitablabel'),
- ('guitabtooltip','guitabtooltip'),
- ('helpfile','helpfile'),
- ('helpheight','helpheight'),
- ('helplang','helplang'),
- ('hf','hf'),
- ('hh','hh'),
- ('hi','hi'),
- ('hid','hid'),
- ('hidden','hidden'),
- ('highlight','highlight'),
- ('history','history'),
- ('hk','hk'),
- ('hkmap','hkmap'),
- ('hkmapp','hkmapp'),
- ('hkp','hkp'),
- ('hl','hl'),
- ('hlg','hlg'),
- ('hls','hls'),
- ('hlsearch','hlsearch'),
- ('ic','ic'),
- ('icon','icon'),
- ('iconstring','iconstring'),
- ('ignorecase','ignorecase'),
- ('im','im'),
- ('imactivatefunc','imactivatefunc'),
- ('imactivatekey','imactivatekey'),
- ('imaf','imaf'),
- ('imak','imak'),
- ('imc','imc'),
- ('imcmdline','imcmdline'),
- ('imd','imd'),
- ('imdisable','imdisable'),
- ('imi','imi'),
- ('iminsert','iminsert'),
- ('ims','ims'),
- ('imsearch','imsearch'),
- ('imsf','imsf'),
- ('imstatusfunc','imstatusfunc'),
- ('inc','inc'),
- ('include','include'),
- ('includeexpr','includeexpr'),
- ('incsearch','incsearch'),
- ('inde','inde'),
- ('indentexpr','indentexpr'),
- ('indentkeys','indentkeys'),
- ('indk','indk'),
- ('inex','inex'),
- ('inf','inf'),
- ('infercase','infercase'),
- ('inoremap','inoremap'),
- ('insertmode','insertmode'),
- ('invacd','invacd'),
- ('invai','invai'),
- ('invakm','invakm'),
- ('invallowrevins','invallowrevins'),
- ('invaltkeymap','invaltkeymap'),
- ('invanti','invanti'),
- ('invantialias','invantialias'),
- ('invar','invar'),
- ('invarab','invarab'),
- ('invarabic','invarabic'),
- ('invarabicshape','invarabicshape'),
- ('invari','invari'),
- ('invarshape','invarshape'),
- ('invautochdir','invautochdir'),
- ('invautoindent','invautoindent'),
- ('invautoread','invautoread'),
- ('invautowrite','invautowrite'),
- ('invautowriteall','invautowriteall'),
- ('invaw','invaw'),
- ('invawa','invawa'),
- ('invbackup','invbackup'),
- ('invballooneval','invballooneval'),
- ('invbeval','invbeval'),
- ('invbin','invbin'),
- ('invbinary','invbinary'),
- ('invbiosk','invbiosk'),
- ('invbioskey','invbioskey'),
- ('invbk','invbk'),
- ('invbl','invbl'),
- ('invbomb','invbomb'),
- ('invbuflisted','invbuflisted'),
- ('invcf','invcf'),
- ('invci','invci'),
- ('invcin','invcin'),
- ('invcindent','invcindent'),
- ('invcompatible','invcompatible'),
- ('invconfirm','invconfirm'),
- ('invconsk','invconsk'),
- ('invconskey','invconskey'),
- ('invcopyindent','invcopyindent'),
- ('invcp','invcp'),
- ('invcrb','invcrb'),
- ('invcscoperelative','invcscoperelative'),
- ('invcscopetag','invcscopetag'),
- ('invcscopeverbose','invcscopeverbose'),
- ('invcsre','invcsre'),
- ('invcst','invcst'),
- ('invcsverb','invcsverb'),
- ('invcuc','invcuc'),
- ('invcul','invcul'),
- ('invcursorbind','invcursorbind'),
- ('invcursorcolumn','invcursorcolumn'),
- ('invcursorline','invcursorline'),
- ('invdeco','invdeco'),
- ('invdelcombine','invdelcombine'),
- ('invdg','invdg'),
- ('invdiff','invdiff'),
- ('invdigraph','invdigraph'),
- ('invea','invea'),
- ('inveb','inveb'),
- ('inved','inved'),
- ('invedcompatible','invedcompatible'),
- ('invek','invek'),
- ('invendofline','invendofline'),
- ('inveol','inveol'),
- ('invequalalways','invequalalways'),
- ('inverrorbells','inverrorbells'),
- ('invesckeys','invesckeys'),
- ('invet','invet'),
- ('invex','invex'),
- ('invexpandtab','invexpandtab'),
- ('invexrc','invexrc'),
- ('invfen','invfen'),
- ('invfic','invfic'),
- ('invfileignorecase','invfileignorecase'),
- ('invfk','invfk'),
- ('invfkmap','invfkmap'),
- ('invfoldenable','invfoldenable'),
- ('invgd','invgd'),
- ('invgdefault','invgdefault'),
- ('invguipty','invguipty'),
- ('invhid','invhid'),
- ('invhidden','invhidden'),
- ('invhk','invhk'),
- ('invhkmap','invhkmap'),
- ('invhkmapp','invhkmapp'),
- ('invhkp','invhkp'),
- ('invhls','invhls'),
- ('invhlsearch','invhlsearch'),
- ('invic','invic'),
- ('invicon','invicon'),
- ('invignorecase','invignorecase'),
- ('invim','invim'),
- ('invimc','invimc'),
- ('invimcmdline','invimcmdline'),
- ('invimd','invimd'),
- ('invimdisable','invimdisable'),
- ('invincsearch','invincsearch'),
- ('invinf','invinf'),
- ('invinfercase','invinfercase'),
- ('invinsertmode','invinsertmode'),
- ('invis','invis'),
- ('invjoinspaces','invjoinspaces'),
- ('invjs','invjs'),
- ('invlazyredraw','invlazyredraw'),
- ('invlbr','invlbr'),
- ('invlinebreak','invlinebreak'),
- ('invlisp','invlisp'),
- ('invlist','invlist'),
- ('invloadplugins','invloadplugins'),
- ('invlpl','invlpl'),
- ('invlz','invlz'),
- ('invma','invma'),
- ('invmacatsui','invmacatsui'),
- ('invmagic','invmagic'),
- ('invmh','invmh'),
- ('invml','invml'),
- ('invmod','invmod'),
- ('invmodeline','invmodeline'),
- ('invmodifiable','invmodifiable'),
- ('invmodified','invmodified'),
- ('invmore','invmore'),
- ('invmousef','invmousef'),
- ('invmousefocus','invmousefocus'),
- ('invmousehide','invmousehide'),
- ('invnu','invnu'),
- ('invnumber','invnumber'),
- ('invodev','invodev'),
- ('invopendevice','invopendevice'),
- ('invpaste','invpaste'),
- ('invpi','invpi'),
- ('invpreserveindent','invpreserveindent'),
- ('invpreviewwindow','invpreviewwindow'),
- ('invprompt','invprompt'),
- ('invpvw','invpvw'),
- ('invreadonly','invreadonly'),
- ('invrelativenumber','invrelativenumber'),
- ('invremap','invremap'),
- ('invrestorescreen','invrestorescreen'),
- ('invrevins','invrevins'),
- ('invri','invri'),
- ('invrightleft','invrightleft'),
- ('invrl','invrl'),
- ('invrnu','invrnu'),
- ('invro','invro'),
- ('invrs','invrs'),
- ('invru','invru'),
- ('invruler','invruler'),
- ('invsb','invsb'),
- ('invsc','invsc'),
- ('invscb','invscb'),
- ('invscrollbind','invscrollbind'),
- ('invscs','invscs'),
- ('invsecure','invsecure'),
- ('invsft','invsft'),
- ('invshellslash','invshellslash'),
- ('invshelltemp','invshelltemp'),
- ('invshiftround','invshiftround'),
- ('invshortname','invshortname'),
- ('invshowcmd','invshowcmd'),
- ('invshowfulltag','invshowfulltag'),
- ('invshowmatch','invshowmatch'),
- ('invshowmode','invshowmode'),
- ('invsi','invsi'),
- ('invsm','invsm'),
- ('invsmartcase','invsmartcase'),
- ('invsmartindent','invsmartindent'),
- ('invsmarttab','invsmarttab'),
- ('invsmd','invsmd'),
- ('invsn','invsn'),
- ('invsol','invsol'),
- ('invspell','invspell'),
- ('invsplitbelow','invsplitbelow'),
- ('invsplitright','invsplitright'),
- ('invspr','invspr'),
- ('invsr','invsr'),
- ('invssl','invssl'),
- ('invsta','invsta'),
- ('invstartofline','invstartofline'),
- ('invstmp','invstmp'),
- ('invswapfile','invswapfile'),
- ('invswf','invswf'),
- ('invta','invta'),
- ('invtagbsearch','invtagbsearch'),
- ('invtagrelative','invtagrelative'),
- ('invtagstack','invtagstack'),
- ('invtbi','invtbi'),
- ('invtbidi','invtbidi'),
- ('invtbs','invtbs'),
- ('invtermbidi','invtermbidi'),
- ('invterse','invterse'),
- ('invtextauto','invtextauto'),
- ('invtextmode','invtextmode'),
- ('invtf','invtf'),
- ('invtgst','invtgst'),
- ('invtildeop','invtildeop'),
- ('invtimeout','invtimeout'),
- ('invtitle','invtitle'),
- ('invto','invto'),
- ('invtop','invtop'),
- ('invtr','invtr'),
- ('invttimeout','invttimeout'),
- ('invttybuiltin','invttybuiltin'),
- ('invttyfast','invttyfast'),
- ('invtx','invtx'),
- ('invudf','invudf'),
- ('invundofile','invundofile'),
- ('invvb','invvb'),
- ('invvisualbell','invvisualbell'),
- ('invwa','invwa'),
- ('invwarn','invwarn'),
- ('invwb','invwb'),
- ('invweirdinvert','invweirdinvert'),
- ('invwfh','invwfh'),
- ('invwfw','invwfw'),
- ('invwic','invwic'),
- ('invwildignorecase','invwildignorecase'),
- ('invwildmenu','invwildmenu'),
- ('invwinfixheight','invwinfixheight'),
- ('invwinfixwidth','invwinfixwidth'),
- ('invwiv','invwiv'),
- ('invwmnu','invwmnu'),
- ('invwrap','invwrap'),
- ('invwrapscan','invwrapscan'),
- ('invwrite','invwrite'),
- ('invwriteany','invwriteany'),
- ('invwritebackup','invwritebackup'),
- ('invws','invws'),
- ('is','is'),
- ('isf','isf'),
- ('isfname','isfname'),
- ('isi','isi'),
- ('isident','isident'),
- ('isk','isk'),
- ('iskeyword','iskeyword'),
- ('isp','isp'),
- ('isprint','isprint'),
- ('joinspaces','joinspaces'),
- ('js','js'),
- ('key','key'),
- ('keymap','keymap'),
- ('keymodel','keymodel'),
- ('keywordprg','keywordprg'),
- ('km','km'),
- ('kmp','kmp'),
- ('kp','kp'),
- ('langmap','langmap'),
- ('langmenu','langmenu'),
- ('laststatus','laststatus'),
- ('lazyredraw','lazyredraw'),
- ('lbr','lbr'),
- ('lcs','lcs'),
- ('linebreak','linebreak'),
- ('lines','lines'),
- ('linespace','linespace'),
- ('lisp','lisp'),
- ('lispwords','lispwords'),
- ('list','list'),
- ('listchars','listchars'),
- ('lm','lm'),
- ('lmap','lmap'),
- ('loadplugins','loadplugins'),
- ('lpl','lpl'),
- ('ls','ls'),
- ('lsp','lsp'),
- ('lw','lw'),
- ('lz','lz'),
- ('ma','ma'),
- ('macatsui','macatsui'),
- ('magic','magic'),
- ('makeef','makeef'),
- ('makeprg','makeprg'),
- ('mat','mat'),
- ('matchpairs','matchpairs'),
- ('matchtime','matchtime'),
- ('maxcombine','maxcombine'),
- ('maxfuncdepth','maxfuncdepth'),
- ('maxmapdepth','maxmapdepth'),
- ('maxmem','maxmem'),
- ('maxmempattern','maxmempattern'),
- ('maxmemtot','maxmemtot'),
- ('mco','mco'),
- ('mef','mef'),
- ('menuitems','menuitems'),
- ('mfd','mfd'),
- ('mh','mh'),
- ('mis','mis'),
- ('mkspellmem','mkspellmem'),
- ('ml','ml'),
- ('mls','mls'),
- ('mm','mm'),
- ('mmd','mmd'),
- ('mmp','mmp'),
- ('mmt','mmt'),
- ('mod','mod'),
- ('modeline','modeline'),
- ('modelines','modelines'),
- ('modifiable','modifiable'),
- ('modified','modified'),
- ('more','more'),
- ('mouse','mouse'),
- ('mousef','mousef'),
- ('mousefocus','mousefocus'),
- ('mousehide','mousehide'),
- ('mousem','mousem'),
- ('mousemodel','mousemodel'),
- ('mouses','mouses'),
- ('mouseshape','mouseshape'),
- ('mouset','mouset'),
- ('mousetime','mousetime'),
- ('mp','mp'),
- ('mps','mps'),
- ('msm','msm'),
- ('mzq','mzq'),
- ('mzquantum','mzquantum'),
- ('nf','nf'),
- ('nnoremap','nnoremap'),
- ('noacd','noacd'),
- ('noai','noai'),
- ('noakm','noakm'),
- ('noallowrevins','noallowrevins'),
- ('noaltkeymap','noaltkeymap'),
- ('noanti','noanti'),
- ('noantialias','noantialias'),
- ('noar','noar'),
- ('noarab','noarab'),
- ('noarabic','noarabic'),
- ('noarabicshape','noarabicshape'),
- ('noari','noari'),
- ('noarshape','noarshape'),
- ('noautochdir','noautochdir'),
- ('noautoindent','noautoindent'),
- ('noautoread','noautoread'),
- ('noautowrite','noautowrite'),
- ('noautowriteall','noautowriteall'),
- ('noaw','noaw'),
- ('noawa','noawa'),
- ('nobackup','nobackup'),
- ('noballooneval','noballooneval'),
- ('nobeval','nobeval'),
- ('nobin','nobin'),
- ('nobinary','nobinary'),
- ('nobiosk','nobiosk'),
- ('nobioskey','nobioskey'),
- ('nobk','nobk'),
- ('nobl','nobl'),
- ('nobomb','nobomb'),
- ('nobuflisted','nobuflisted'),
- ('nocf','nocf'),
- ('noci','noci'),
- ('nocin','nocin'),
- ('nocindent','nocindent'),
- ('nocompatible','nocompatible'),
- ('noconfirm','noconfirm'),
- ('noconsk','noconsk'),
- ('noconskey','noconskey'),
- ('nocopyindent','nocopyindent'),
- ('nocp','nocp'),
- ('nocrb','nocrb'),
- ('nocscoperelative','nocscoperelative'),
- ('nocscopetag','nocscopetag'),
- ('nocscopeverbose','nocscopeverbose'),
- ('nocsre','nocsre'),
- ('nocst','nocst'),
- ('nocsverb','nocsverb'),
- ('nocuc','nocuc'),
- ('nocul','nocul'),
- ('nocursorbind','nocursorbind'),
- ('nocursorcolumn','nocursorcolumn'),
- ('nocursorline','nocursorline'),
- ('nodeco','nodeco'),
- ('nodelcombine','nodelcombine'),
- ('nodg','nodg'),
- ('nodiff','nodiff'),
- ('nodigraph','nodigraph'),
- ('noea','noea'),
- ('noeb','noeb'),
- ('noed','noed'),
- ('noedcompatible','noedcompatible'),
- ('noek','noek'),
- ('noendofline','noendofline'),
- ('noeol','noeol'),
- ('noequalalways','noequalalways'),
- ('noerrorbells','noerrorbells'),
- ('noesckeys','noesckeys'),
- ('noet','noet'),
- ('noex','noex'),
- ('noexpandtab','noexpandtab'),
- ('noexrc','noexrc'),
- ('nofen','nofen'),
- ('nofic','nofic'),
- ('nofileignorecase','nofileignorecase'),
- ('nofk','nofk'),
- ('nofkmap','nofkmap'),
- ('nofoldenable','nofoldenable'),
- ('nogd','nogd'),
- ('nogdefault','nogdefault'),
- ('noguipty','noguipty'),
- ('nohid','nohid'),
- ('nohidden','nohidden'),
- ('nohk','nohk'),
- ('nohkmap','nohkmap'),
- ('nohkmapp','nohkmapp'),
- ('nohkp','nohkp'),
- ('nohls','nohls'),
- ('nohlsearch','nohlsearch'),
- ('noic','noic'),
- ('noicon','noicon'),
- ('noignorecase','noignorecase'),
- ('noim','noim'),
- ('noimc','noimc'),
- ('noimcmdline','noimcmdline'),
- ('noimd','noimd'),
- ('noimdisable','noimdisable'),
- ('noincsearch','noincsearch'),
- ('noinf','noinf'),
- ('noinfercase','noinfercase'),
- ('noinsertmode','noinsertmode'),
- ('nois','nois'),
- ('nojoinspaces','nojoinspaces'),
- ('nojs','nojs'),
- ('nolazyredraw','nolazyredraw'),
- ('nolbr','nolbr'),
- ('nolinebreak','nolinebreak'),
- ('nolisp','nolisp'),
- ('nolist','nolist'),
- ('noloadplugins','noloadplugins'),
- ('nolpl','nolpl'),
- ('nolz','nolz'),
- ('noma','noma'),
- ('nomacatsui','nomacatsui'),
- ('nomagic','nomagic'),
- ('nomh','nomh'),
- ('noml','noml'),
- ('nomod','nomod'),
- ('nomodeline','nomodeline'),
- ('nomodifiable','nomodifiable'),
- ('nomodified','nomodified'),
- ('nomore','nomore'),
- ('nomousef','nomousef'),
- ('nomousefocus','nomousefocus'),
- ('nomousehide','nomousehide'),
- ('nonu','nonu'),
- ('nonumber','nonumber'),
- ('noodev','noodev'),
- ('noopendevice','noopendevice'),
- ('nopaste','nopaste'),
- ('nopi','nopi'),
- ('nopreserveindent','nopreserveindent'),
- ('nopreviewwindow','nopreviewwindow'),
- ('noprompt','noprompt'),
- ('nopvw','nopvw'),
- ('noreadonly','noreadonly'),
- ('norelativenumber','norelativenumber'),
- ('noremap','noremap'),
- ('norestorescreen','norestorescreen'),
- ('norevins','norevins'),
- ('nori','nori'),
- ('norightleft','norightleft'),
- ('norl','norl'),
- ('nornu','nornu'),
- ('noro','noro'),
- ('nors','nors'),
- ('noru','noru'),
- ('noruler','noruler'),
- ('nosb','nosb'),
- ('nosc','nosc'),
- ('noscb','noscb'),
- ('noscrollbind','noscrollbind'),
- ('noscs','noscs'),
- ('nosecure','nosecure'),
- ('nosft','nosft'),
- ('noshellslash','noshellslash'),
- ('noshelltemp','noshelltemp'),
- ('noshiftround','noshiftround'),
- ('noshortname','noshortname'),
- ('noshowcmd','noshowcmd'),
- ('noshowfulltag','noshowfulltag'),
- ('noshowmatch','noshowmatch'),
- ('noshowmode','noshowmode'),
- ('nosi','nosi'),
- ('nosm','nosm'),
- ('nosmartcase','nosmartcase'),
- ('nosmartindent','nosmartindent'),
- ('nosmarttab','nosmarttab'),
- ('nosmd','nosmd'),
- ('nosn','nosn'),
- ('nosol','nosol'),
- ('nospell','nospell'),
- ('nosplitbelow','nosplitbelow'),
- ('nosplitright','nosplitright'),
- ('nospr','nospr'),
- ('nosr','nosr'),
- ('nossl','nossl'),
- ('nosta','nosta'),
- ('nostartofline','nostartofline'),
- ('nostmp','nostmp'),
- ('noswapfile','noswapfile'),
- ('noswf','noswf'),
- ('nota','nota'),
- ('notagbsearch','notagbsearch'),
- ('notagrelative','notagrelative'),
- ('notagstack','notagstack'),
- ('notbi','notbi'),
- ('notbidi','notbidi'),
- ('notbs','notbs'),
- ('notermbidi','notermbidi'),
- ('noterse','noterse'),
- ('notextauto','notextauto'),
- ('notextmode','notextmode'),
- ('notf','notf'),
- ('notgst','notgst'),
- ('notildeop','notildeop'),
- ('notimeout','notimeout'),
- ('notitle','notitle'),
- ('noto','noto'),
- ('notop','notop'),
- ('notr','notr'),
- ('nottimeout','nottimeout'),
- ('nottybuiltin','nottybuiltin'),
- ('nottyfast','nottyfast'),
- ('notx','notx'),
- ('noudf','noudf'),
- ('noundofile','noundofile'),
- ('novb','novb'),
- ('novisualbell','novisualbell'),
- ('nowa','nowa'),
- ('nowarn','nowarn'),
- ('nowb','nowb'),
- ('noweirdinvert','noweirdinvert'),
- ('nowfh','nowfh'),
- ('nowfw','nowfw'),
- ('nowic','nowic'),
- ('nowildignorecase','nowildignorecase'),
- ('nowildmenu','nowildmenu'),
- ('nowinfixheight','nowinfixheight'),
- ('nowinfixwidth','nowinfixwidth'),
- ('nowiv','nowiv'),
- ('nowmnu','nowmnu'),
- ('nowrap','nowrap'),
- ('nowrapscan','nowrapscan'),
- ('nowrite','nowrite'),
- ('nowriteany','nowriteany'),
- ('nowritebackup','nowritebackup'),
- ('nows','nows'),
- ('nrformats','nrformats'),
- ('nu','nu'),
- ('number','number'),
- ('numberwidth','numberwidth'),
- ('nuw','nuw'),
- ('odev','odev'),
- ('oft','oft'),
- ('ofu','ofu'),
- ('omnifunc','omnifunc'),
- ('opendevice','opendevice'),
- ('operatorfunc','operatorfunc'),
- ('opfunc','opfunc'),
- ('osfiletype','osfiletype'),
- ('pa','pa'),
- ('para','para'),
- ('paragraphs','paragraphs'),
- ('paste','paste'),
- ('pastetoggle','pastetoggle'),
- ('patchexpr','patchexpr'),
- ('patchmode','patchmode'),
- ('path','path'),
- ('pdev','pdev'),
- ('penc','penc'),
- ('pex','pex'),
- ('pexpr','pexpr'),
- ('pfn','pfn'),
- ('ph','ph'),
- ('pheader','pheader'),
- ('pi','pi'),
- ('pm','pm'),
- ('pmbcs','pmbcs'),
- ('pmbfn','pmbfn'),
- ('popt','popt'),
- ('preserveindent','preserveindent'),
- ('previewheight','previewheight'),
- ('previewwindow','previewwindow'),
- ('printdevice','printdevice'),
- ('printencoding','printencoding'),
- ('printexpr','printexpr'),
- ('printfont','printfont'),
- ('printheader','printheader'),
- ('printmbcharset','printmbcharset'),
- ('printmbfont','printmbfont'),
- ('printoptions','printoptions'),
- ('prompt','prompt'),
- ('pt','pt'),
- ('pumheight','pumheight'),
- ('pvh','pvh'),
- ('pvw','pvw'),
- ('qe','qe'),
- ('quoteescape','quoteescape'),
- ('rdt','rdt'),
- ('re','re'),
- ('readonly','readonly'),
- ('redrawtime','redrawtime'),
- ('regexpengine','regexpengine'),
- ('relativenumber','relativenumber'),
- ('remap','remap'),
- ('report','report'),
- ('restorescreen','restorescreen'),
- ('revins','revins'),
- ('ri','ri'),
- ('rightleft','rightleft'),
- ('rightleftcmd','rightleftcmd'),
- ('rl','rl'),
- ('rlc','rlc'),
- ('rnu','rnu'),
- ('ro','ro'),
- ('rs','rs'),
- ('rtp','rtp'),
- ('ru','ru'),
- ('ruf','ruf'),
- ('ruler','ruler'),
- ('rulerformat','rulerformat'),
- ('runtimepath','runtimepath'),
- ('sb','sb'),
- ('sbo','sbo'),
- ('sbr','sbr'),
- ('sc','sc'),
- ('scb','scb'),
- ('scr','scr'),
- ('scroll','scroll'),
- ('scrollbind','scrollbind'),
- ('scrolljump','scrolljump'),
- ('scrolloff','scrolloff'),
- ('scrollopt','scrollopt'),
- ('scs','scs'),
- ('sect','sect'),
- ('sections','sections'),
- ('secure','secure'),
- ('sel','sel'),
- ('selection','selection'),
- ('selectmode','selectmode'),
- ('sessionoptions','sessionoptions'),
- ('sft','sft'),
- ('sh','sh'),
- ('shcf','shcf'),
- ('shell','shell'),
- ('shellcmdflag','shellcmdflag'),
- ('shellpipe','shellpipe'),
- ('shellquote','shellquote'),
- ('shellredir','shellredir'),
- ('shellslash','shellslash'),
- ('shelltemp','shelltemp'),
- ('shelltype','shelltype'),
- ('shellxescape','shellxescape'),
- ('shellxquote','shellxquote'),
- ('shiftround','shiftround'),
- ('shiftwidth','shiftwidth'),
- ('shm','shm'),
- ('shortmess','shortmess'),
- ('shortname','shortname'),
- ('showbreak','showbreak'),
- ('showcmd','showcmd'),
- ('showfulltag','showfulltag'),
- ('showmatch','showmatch'),
- ('showmode','showmode'),
- ('showtabline','showtabline'),
- ('shq','shq'),
- ('si','si'),
- ('sidescroll','sidescroll'),
- ('sidescrolloff','sidescrolloff'),
- ('siso','siso'),
- ('sj','sj'),
- ('slm','slm'),
- ('sm','sm'),
- ('smartcase','smartcase'),
- ('smartindent','smartindent'),
- ('smarttab','smarttab'),
- ('smc','smc'),
- ('smd','smd'),
- ('sn','sn'),
- ('so','so'),
- ('softtabstop','softtabstop'),
- ('sol','sol'),
- ('sp','sp'),
- ('spc','spc'),
- ('spell','spell'),
- ('spellcapcheck','spellcapcheck'),
- ('spellfile','spellfile'),
- ('spelllang','spelllang'),
- ('spellsuggest','spellsuggest'),
- ('spf','spf'),
- ('spl','spl'),
- ('splitbelow','splitbelow'),
- ('splitright','splitright'),
- ('spr','spr'),
- ('sps','sps'),
- ('sr','sr'),
- ('srr','srr'),
- ('ss','ss'),
- ('ssl','ssl'),
- ('ssop','ssop'),
- ('st','st'),
- ('sta','sta'),
- ('stal','stal'),
- ('startofline','startofline'),
- ('statusline','statusline'),
- ('stl','stl'),
- ('stmp','stmp'),
- ('sts','sts'),
- ('su','su'),
- ('sua','sua'),
- ('suffixes','suffixes'),
- ('suffixesadd','suffixesadd'),
- ('sw','sw'),
- ('swapfile','swapfile'),
- ('swapsync','swapsync'),
- ('swb','swb'),
- ('swf','swf'),
- ('switchbuf','switchbuf'),
- ('sws','sws'),
- ('sxe','sxe'),
- ('sxq','sxq'),
- ('syn','syn'),
- ('synmaxcol','synmaxcol'),
- ('syntax','syntax'),
- ('t_AB','t_AB'),
- ('t_AF','t_AF'),
- ('t_AL','t_AL'),
- ('t_CS','t_CS'),
- ('t_CV','t_CV'),
- ('t_Ce','t_Ce'),
- ('t_Co','t_Co'),
- ('t_Cs','t_Cs'),
- ('t_DL','t_DL'),
- ('t_EI','t_EI'),
- ('t_F1','t_F1'),
- ('t_F2','t_F2'),
- ('t_F3','t_F3'),
- ('t_F4','t_F4'),
- ('t_F5','t_F5'),
- ('t_F6','t_F6'),
- ('t_F7','t_F7'),
- ('t_F8','t_F8'),
- ('t_F9','t_F9'),
- ('t_IE','t_IE'),
- ('t_IS','t_IS'),
- ('t_K1','t_K1'),
- ('t_K3','t_K3'),
- ('t_K4','t_K4'),
- ('t_K5','t_K5'),
- ('t_K6','t_K6'),
- ('t_K7','t_K7'),
- ('t_K8','t_K8'),
- ('t_K9','t_K9'),
- ('t_KA','t_KA'),
- ('t_KB','t_KB'),
- ('t_KC','t_KC'),
- ('t_KD','t_KD'),
- ('t_KE','t_KE'),
- ('t_KF','t_KF'),
- ('t_KG','t_KG'),
- ('t_KH','t_KH'),
- ('t_KI','t_KI'),
- ('t_KJ','t_KJ'),
- ('t_KK','t_KK'),
- ('t_KL','t_KL'),
- ('t_RI','t_RI'),
- ('t_RV','t_RV'),
- ('t_SI','t_SI'),
- ('t_Sb','t_Sb'),
- ('t_Sf','t_Sf'),
- ('t_WP','t_WP'),
- ('t_WS','t_WS'),
- ('t_ZH','t_ZH'),
- ('t_ZR','t_ZR'),
- ('t_al','t_al'),
- ('t_bc','t_bc'),
- ('t_cd','t_cd'),
- ('t_ce','t_ce'),
- ('t_cl','t_cl'),
- ('t_cm','t_cm'),
- ('t_cs','t_cs'),
- ('t_da','t_da'),
- ('t_db','t_db'),
- ('t_dl','t_dl'),
- ('t_fs','t_fs'),
- ('t_k1','t_k1'),
- ('t_k2','t_k2'),
- ('t_k3','t_k3'),
- ('t_k4','t_k4'),
- ('t_k5','t_k5'),
- ('t_k6','t_k6'),
- ('t_k7','t_k7'),
- ('t_k8','t_k8'),
- ('t_k9','t_k9'),
- ('t_kB','t_kB'),
- ('t_kD','t_kD'),
- ('t_kI','t_kI'),
- ('t_kN','t_kN'),
- ('t_kP','t_kP'),
- ('t_kb','t_kb'),
- ('t_kd','t_kd'),
- ('t_ke','t_ke'),
- ('t_kh','t_kh'),
- ('t_kl','t_kl'),
- ('t_kr','t_kr'),
- ('t_ks','t_ks'),
- ('t_ku','t_ku'),
- ('t_le','t_le'),
- ('t_mb','t_mb'),
- ('t_md','t_md'),
- ('t_me','t_me'),
- ('t_mr','t_mr'),
- ('t_ms','t_ms'),
- ('t_nd','t_nd'),
- ('t_op','t_op'),
- ('t_se','t_se'),
- ('t_so','t_so'),
- ('t_sr','t_sr'),
- ('t_te','t_te'),
- ('t_ti','t_ti'),
- ('t_ts','t_ts'),
- ('t_u7','t_u7'),
- ('t_ue','t_ue'),
- ('t_us','t_us'),
- ('t_ut','t_ut'),
- ('t_vb','t_vb'),
- ('t_ve','t_ve'),
- ('t_vi','t_vi'),
- ('t_vs','t_vs'),
- ('t_xs','t_xs'),
- ('ta','ta'),
- ('tabline','tabline'),
- ('tabpagemax','tabpagemax'),
- ('tabstop','tabstop'),
- ('tag','tag'),
- ('tagbsearch','tagbsearch'),
- ('taglength','taglength'),
- ('tagrelative','tagrelative'),
- ('tags','tags'),
- ('tagstack','tagstack'),
- ('tal','tal'),
- ('tb','tb'),
- ('tbi','tbi'),
- ('tbidi','tbidi'),
- ('tbis','tbis'),
- ('tbs','tbs'),
- ('tenc','tenc'),
- ('term','term'),
- ('termbidi','termbidi'),
- ('termencoding','termencoding'),
- ('terse','terse'),
- ('textauto','textauto'),
- ('textmode','textmode'),
- ('textwidth','textwidth'),
- ('tf','tf'),
- ('tgst','tgst'),
- ('thesaurus','thesaurus'),
- ('tildeop','tildeop'),
- ('timeout','timeout'),
- ('timeoutlen','timeoutlen'),
- ('title','title'),
- ('titlelen','titlelen'),
- ('titleold','titleold'),
- ('titlestring','titlestring'),
- ('tl','tl'),
- ('tm','tm'),
- ('to','to'),
- ('toolbar','toolbar'),
- ('toolbariconsize','toolbariconsize'),
- ('top','top'),
- ('tpm','tpm'),
- ('tr','tr'),
- ('ts','ts'),
- ('tsl','tsl'),
- ('tsr','tsr'),
- ('ttimeout','ttimeout'),
- ('ttimeoutlen','ttimeoutlen'),
- ('ttm','ttm'),
- ('tty','tty'),
- ('ttybuiltin','ttybuiltin'),
- ('ttyfast','ttyfast'),
- ('ttym','ttym'),
- ('ttymouse','ttymouse'),
- ('ttyscroll','ttyscroll'),
- ('ttytype','ttytype'),
- ('tw','tw'),
- ('tx','tx'),
- ('uc','uc'),
- ('udf','udf'),
- ('udir','udir'),
- ('ul','ul'),
- ('undodir','undodir'),
- ('undofile','undofile'),
- ('undolevels','undolevels'),
- ('undoreload','undoreload'),
- ('updatecount','updatecount'),
- ('updatetime','updatetime'),
- ('ur','ur'),
- ('ut','ut'),
- ('vb','vb'),
- ('vbs','vbs'),
- ('vdir','vdir'),
- ('ve','ve'),
- ('verbose','verbose'),
- ('verbosefile','verbosefile'),
- ('vfile','vfile'),
- ('vi','vi'),
- ('viewdir','viewdir'),
- ('viewoptions','viewoptions'),
- ('viminfo','viminfo'),
- ('virtualedit','virtualedit'),
- ('visualbell','visualbell'),
- ('vnoremap','vnoremap'),
- ('vop','vop'),
- ('wa','wa'),
- ('wak','wak'),
- ('warn','warn'),
- ('wb','wb'),
- ('wc','wc'),
- ('wcm','wcm'),
- ('wd','wd'),
- ('weirdinvert','weirdinvert'),
- ('wfh','wfh'),
- ('wfw','wfw'),
- ('wh','wh'),
- ('whichwrap','whichwrap'),
- ('wi','wi'),
- ('wic','wic'),
- ('wig','wig'),
- ('wildchar','wildchar'),
- ('wildcharm','wildcharm'),
- ('wildignore','wildignore'),
- ('wildignorecase','wildignorecase'),
- ('wildmenu','wildmenu'),
- ('wildmode','wildmode'),
- ('wildoptions','wildoptions'),
- ('wim','wim'),
- ('winaltkeys','winaltkeys'),
- ('window','window'),
- ('winfixheight','winfixheight'),
- ('winfixwidth','winfixwidth'),
- ('winheight','winheight'),
- ('winminheight','winminheight'),
- ('winminwidth','winminwidth'),
- ('winwidth','winwidth'),
- ('wiv','wiv'),
- ('wiw','wiw'),
- ('wm','wm'),
- ('wmh','wmh'),
- ('wmnu','wmnu'),
- ('wmw','wmw'),
- ('wop','wop'),
- ('wrap','wrap'),
- ('wrapmargin','wrapmargin'),
- ('wrapscan','wrapscan'),
- ('write','write'),
- ('writeany','writeany'),
- ('writebackup','writebackup'),
- ('writedelay','writedelay'),
- ('ws','ws'),
- ('ww','ww'),
- )
- return var
-option = _getoption()
-
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/actionscript.py b/venv/lib/python3.11/site-packages/pygments/lexers/actionscript.py
deleted file mode 100644
index e0e94a5..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/actionscript.py
+++ /dev/null
@@ -1,245 +0,0 @@
-"""
- pygments.lexers.actionscript
- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for ActionScript and MXML.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, bygroups, using, this, words, default
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation, Whitespace
-
-__all__ = ['ActionScriptLexer', 'ActionScript3Lexer', 'MxmlLexer']
-
-
-class ActionScriptLexer(RegexLexer):
- """
- For ActionScript source code.
-
- .. versionadded:: 0.9
- """
-
- name = 'ActionScript'
- aliases = ['actionscript', 'as']
- filenames = ['*.as']
- mimetypes = ['application/x-actionscript', 'text/x-actionscript',
- 'text/actionscript']
-
- flags = re.DOTALL
- tokens = {
- 'root': [
- (r'\s+', Whitespace),
- (r'//.*?\n', Comment.Single),
- (r'/\*.*?\*/', Comment.Multiline),
- (r'/(\\\\|\\[^\\]|[^/\\\n])*/[gim]*', String.Regex),
- (r'[~^*!%&<>|+=:;,/?\\-]+', Operator),
- (r'[{}\[\]();.]+', Punctuation),
- (words((
- 'case', 'default', 'for', 'each', 'in', 'while', 'do', 'break',
- 'return', 'continue', 'if', 'else', 'throw', 'try', 'catch',
- 'var', 'with', 'new', 'typeof', 'arguments', 'instanceof', 'this',
- 'switch'), suffix=r'\b'),
- Keyword),
- (words((
- 'class', 'public', 'final', 'internal', 'native', 'override', 'private',
- 'protected', 'static', 'import', 'extends', 'implements', 'interface',
- 'intrinsic', 'return', 'super', 'dynamic', 'function', 'const', 'get',
- 'namespace', 'package', 'set'), suffix=r'\b'),
- Keyword.Declaration),
- (r'(true|false|null|NaN|Infinity|-Infinity|undefined|Void)\b',
- Keyword.Constant),
- (words((
- 'Accessibility', 'AccessibilityProperties', 'ActionScriptVersion',
- 'ActivityEvent', 'AntiAliasType', 'ApplicationDomain', 'AsBroadcaster', 'Array',
- 'AsyncErrorEvent', 'AVM1Movie', 'BevelFilter', 'Bitmap', 'BitmapData',
- 'BitmapDataChannel', 'BitmapFilter', 'BitmapFilterQuality', 'BitmapFilterType',
- 'BlendMode', 'BlurFilter', 'Boolean', 'ByteArray', 'Camera', 'Capabilities', 'CapsStyle',
- 'Class', 'Color', 'ColorMatrixFilter', 'ColorTransform', 'ContextMenu',
- 'ContextMenuBuiltInItems', 'ContextMenuEvent', 'ContextMenuItem',
- 'ConvultionFilter', 'CSMSettings', 'DataEvent', 'Date', 'DefinitionError',
- 'DeleteObjectSample', 'Dictionary', 'DisplacmentMapFilter', 'DisplayObject',
- 'DisplacmentMapFilterMode', 'DisplayObjectContainer', 'DropShadowFilter',
- 'Endian', 'EOFError', 'Error', 'ErrorEvent', 'EvalError', 'Event', 'EventDispatcher',
- 'EventPhase', 'ExternalInterface', 'FileFilter', 'FileReference',
- 'FileReferenceList', 'FocusDirection', 'FocusEvent', 'Font', 'FontStyle', 'FontType',
- 'FrameLabel', 'FullScreenEvent', 'Function', 'GlowFilter', 'GradientBevelFilter',
- 'GradientGlowFilter', 'GradientType', 'Graphics', 'GridFitType', 'HTTPStatusEvent',
- 'IBitmapDrawable', 'ID3Info', 'IDataInput', 'IDataOutput', 'IDynamicPropertyOutput'
- 'IDynamicPropertyWriter', 'IEventDispatcher', 'IExternalizable',
- 'IllegalOperationError', 'IME', 'IMEConversionMode', 'IMEEvent', 'int',
- 'InteractiveObject', 'InterpolationMethod', 'InvalidSWFError', 'InvokeEvent',
- 'IOError', 'IOErrorEvent', 'JointStyle', 'Key', 'Keyboard', 'KeyboardEvent', 'KeyLocation',
- 'LineScaleMode', 'Loader', 'LoaderContext', 'LoaderInfo', 'LoadVars', 'LocalConnection',
- 'Locale', 'Math', 'Matrix', 'MemoryError', 'Microphone', 'MorphShape', 'Mouse', 'MouseEvent',
- 'MovieClip', 'MovieClipLoader', 'Namespace', 'NetConnection', 'NetStatusEvent',
- 'NetStream', 'NewObjectSample', 'Number', 'Object', 'ObjectEncoding', 'PixelSnapping',
- 'Point', 'PrintJob', 'PrintJobOptions', 'PrintJobOrientation', 'ProgressEvent', 'Proxy',
- 'QName', 'RangeError', 'Rectangle', 'ReferenceError', 'RegExp', 'Responder', 'Sample',
- 'Scene', 'ScriptTimeoutError', 'Security', 'SecurityDomain', 'SecurityError',
- 'SecurityErrorEvent', 'SecurityPanel', 'Selection', 'Shape', 'SharedObject',
- 'SharedObjectFlushStatus', 'SimpleButton', 'Socket', 'Sound', 'SoundChannel',
- 'SoundLoaderContext', 'SoundMixer', 'SoundTransform', 'SpreadMethod', 'Sprite',
- 'StackFrame', 'StackOverflowError', 'Stage', 'StageAlign', 'StageDisplayState',
- 'StageQuality', 'StageScaleMode', 'StaticText', 'StatusEvent', 'String', 'StyleSheet',
- 'SWFVersion', 'SyncEvent', 'SyntaxError', 'System', 'TextColorType', 'TextField',
- 'TextFieldAutoSize', 'TextFieldType', 'TextFormat', 'TextFormatAlign',
- 'TextLineMetrics', 'TextRenderer', 'TextSnapshot', 'Timer', 'TimerEvent', 'Transform',
- 'TypeError', 'uint', 'URIError', 'URLLoader', 'URLLoaderDataFormat', 'URLRequest',
- 'URLRequestHeader', 'URLRequestMethod', 'URLStream', 'URLVariabeles', 'VerifyError',
- 'Video', 'XML', 'XMLDocument', 'XMLList', 'XMLNode', 'XMLNodeType', 'XMLSocket',
- 'XMLUI'), suffix=r'\b'),
- Name.Builtin),
- (words((
- 'decodeURI', 'decodeURIComponent', 'encodeURI', 'escape', 'eval', 'isFinite', 'isNaN',
- 'isXMLName', 'clearInterval', 'fscommand', 'getTimer', 'getURL', 'getVersion',
- 'parseFloat', 'parseInt', 'setInterval', 'trace', 'updateAfterEvent',
- 'unescape'), suffix=r'\b'),
- Name.Function),
- (r'[$a-zA-Z_]\w*', Name.Other),
- (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
- (r'0x[0-9a-f]+', Number.Hex),
- (r'[0-9]+', Number.Integer),
- (r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
- (r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
- ]
- }
-
- def analyse_text(text):
- """This is only used to disambiguate between ActionScript and
- ActionScript3. We return 0 here; the ActionScript3 lexer will match
- AS3 variable definitions and that will hopefully suffice."""
- return 0
-
-class ActionScript3Lexer(RegexLexer):
- """
- For ActionScript 3 source code.
-
- .. versionadded:: 0.11
- """
-
- name = 'ActionScript 3'
- url = 'https://help.adobe.com/en_US/FlashPlatform/reference/actionscript/3/index.html'
- aliases = ['actionscript3', 'as3']
- filenames = ['*.as']
- mimetypes = ['application/x-actionscript3', 'text/x-actionscript3',
- 'text/actionscript3']
-
- identifier = r'[$a-zA-Z_]\w*'
- typeidentifier = identifier + r'(?:\.<\w+>)?'
-
- flags = re.DOTALL | re.MULTILINE
- tokens = {
- 'root': [
- (r'\s+', Whitespace),
- (r'(function\s+)(' + identifier + r')(\s*)(\()',
- bygroups(Keyword.Declaration, Name.Function, Text, Operator),
- 'funcparams'),
- (r'(var|const)(\s+)(' + identifier + r')(\s*)(:)(\s*)(' +
- typeidentifier + r')',
- bygroups(Keyword.Declaration, Whitespace, Name, Whitespace, Punctuation, Whitespace,
- Keyword.Type)),
- (r'(import|package)(\s+)((?:' + identifier + r'|\.)+)(\s*)',
- bygroups(Keyword, Whitespace, Name.Namespace, Whitespace)),
- (r'(new)(\s+)(' + typeidentifier + r')(\s*)(\()',
- bygroups(Keyword, Whitespace, Keyword.Type, Whitespace, Operator)),
- (r'//.*?\n', Comment.Single),
- (r'/\*.*?\*/', Comment.Multiline),
- (r'/(\\\\|\\[^\\]|[^\\\n])*/[gisx]*', String.Regex),
- (r'(\.)(' + identifier + r')', bygroups(Operator, Name.Attribute)),
- (r'(case|default|for|each|in|while|do|break|return|continue|if|else|'
- r'throw|try|catch|with|new|typeof|arguments|instanceof|this|'
- r'switch|import|include|as|is)\b',
- Keyword),
- (r'(class|public|final|internal|native|override|private|protected|'
- r'static|import|extends|implements|interface|intrinsic|return|super|'
- r'dynamic|function|const|get|namespace|package|set)\b',
- Keyword.Declaration),
- (r'(true|false|null|NaN|Infinity|-Infinity|undefined|void)\b',
- Keyword.Constant),
- (r'(decodeURI|decodeURIComponent|encodeURI|escape|eval|isFinite|isNaN|'
- r'isXMLName|clearInterval|fscommand|getTimer|getURL|getVersion|'
- r'isFinite|parseFloat|parseInt|setInterval|trace|updateAfterEvent|'
- r'unescape)\b', Name.Function),
- (identifier, Name),
- (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
- (r'0x[0-9a-f]+', Number.Hex),
- (r'[0-9]+', Number.Integer),
- (r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
- (r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
- (r'[~^*!%&<>|+=:;,/?\\{}\[\]().-]+', Operator),
- ],
- 'funcparams': [
- (r'\s+', Whitespace),
- (r'(\s*)(\.\.\.)?(' + identifier + r')(\s*)(:)(\s*)(' +
- typeidentifier + r'|\*)(\s*)',
- bygroups(Whitespace, Punctuation, Name, Whitespace, Operator, Whitespace,
- Keyword.Type, Whitespace), 'defval'),
- (r'\)', Operator, 'type')
- ],
- 'type': [
- (r'(\s*)(:)(\s*)(' + typeidentifier + r'|\*)',
- bygroups(Whitespace, Operator, Whitespace, Keyword.Type), '#pop:2'),
- (r'\s+', Text, '#pop:2'),
- default('#pop:2')
- ],
- 'defval': [
- (r'(=)(\s*)([^(),]+)(\s*)(,?)',
- bygroups(Operator, Whitespace, using(this), Whitespace, Operator), '#pop'),
- (r',', Operator, '#pop'),
- default('#pop')
- ]
- }
-
- def analyse_text(text):
- if re.match(r'\w+\s*:\s*\w', text):
- return 0.3
- return 0
-
-
-class MxmlLexer(RegexLexer):
- """
- For MXML markup.
- Nested AS3 in <script> tags is highlighted by the appropriate lexer.
-
- .. versionadded:: 1.1
- """
- flags = re.MULTILINE | re.DOTALL
- name = 'MXML'
- aliases = ['mxml']
- filenames = ['*.mxml']
- mimetimes = ['text/xml', 'application/xml']
-
- tokens = {
- 'root': [
- ('[^<&]+', Text),
- (r'&\S*?;', Name.Entity),
- (r'(\<\!\[CDATA\[)(.*?)(\]\]\>)',
- bygroups(String, using(ActionScript3Lexer), String)),
- ('<!--', Comment, 'comment'),
- (r'<\?.*?\?>', Comment.Preproc),
- ('<![^>]*>', Comment.Preproc),
- (r'<\s*[\w:.-]+', Name.Tag, 'tag'),
- (r'<\s*/\s*[\w:.-]+\s*>', Name.Tag),
- ],
- 'comment': [
- ('[^-]+', Comment),
- ('-->', Comment, '#pop'),
- ('-', Comment),
- ],
- 'tag': [
- (r'\s+', Whitespace),
- (r'[\w.:-]+\s*=', Name.Attribute, 'attr'),
- (r'/?\s*>', Name.Tag, '#pop'),
- ],
- 'attr': [
- (r'\s+', Whitespace),
- ('".*?"', String, '#pop'),
- ("'.*?'", String, '#pop'),
- (r'[^\s>]+', String, '#pop'),
- ],
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/ada.py b/venv/lib/python3.11/site-packages/pygments/lexers/ada.py
deleted file mode 100644
index 6a5e644..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/ada.py
+++ /dev/null
@@ -1,144 +0,0 @@
-"""
- pygments.lexers.ada
- ~~~~~~~~~~~~~~~~~~~
-
- Lexers for Ada family languages.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, include, bygroups, words, using, this, \
- default
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation
-from pygments.lexers._ada_builtins import KEYWORD_LIST, BUILTIN_LIST
-
-__all__ = ['AdaLexer']
-
-
-class AdaLexer(RegexLexer):
- """
- For Ada source code.
-
- .. versionadded:: 1.3
- """
-
- name = 'Ada'
- aliases = ['ada', 'ada95', 'ada2005']
- filenames = ['*.adb', '*.ads', '*.ada']
- mimetypes = ['text/x-ada']
-
- flags = re.MULTILINE | re.IGNORECASE
-
- tokens = {
- 'root': [
- (r'[^\S\n]+', Text),
- (r'--.*?\n', Comment.Single),
- (r'[^\S\n]+', Text),
- (r'function|procedure|entry', Keyword.Declaration, 'subprogram'),
- (r'(subtype|type)(\s+)(\w+)',
- bygroups(Keyword.Declaration, Text, Keyword.Type), 'type_def'),
- (r'task|protected', Keyword.Declaration),
- (r'(subtype)(\s+)', bygroups(Keyword.Declaration, Text)),
- (r'(end)(\s+)', bygroups(Keyword.Reserved, Text), 'end'),
- (r'(pragma)(\s+)(\w+)', bygroups(Keyword.Reserved, Text,
- Comment.Preproc)),
- (r'(true|false|null)\b', Keyword.Constant),
- # builtin types
- (words(BUILTIN_LIST, suffix=r'\b'), Keyword.Type),
- (r'(and(\s+then)?|in|mod|not|or(\s+else)|rem)\b', Operator.Word),
- (r'generic|private', Keyword.Declaration),
- (r'package', Keyword.Declaration, 'package'),
- (r'array\b', Keyword.Reserved, 'array_def'),
- (r'(with|use)(\s+)', bygroups(Keyword.Namespace, Text), 'import'),
- (r'(\w+)(\s*)(:)(\s*)(constant)',
- bygroups(Name.Constant, Text, Punctuation, Text,
- Keyword.Reserved)),
- (r'<<\w+>>', Name.Label),
- (r'(\w+)(\s*)(:)(\s*)(declare|begin|loop|for|while)',
- bygroups(Name.Label, Text, Punctuation, Text, Keyword.Reserved)),
- # keywords
- (words(KEYWORD_LIST, prefix=r'\b', suffix=r'\b'),
- Keyword.Reserved),
- (r'"[^"]*"', String),
- include('attribute'),
- include('numbers'),
- (r"'[^']'", String.Character),
- (r'(\w+)(\s*|[(,])', bygroups(Name, using(this))),
- (r"(<>|=>|:=|@|[\[\]]|[()|:;,.'])", Punctuation),
- (r'[*<>+=/&-]', Operator),
- (r'\n+', Text),
- ],
- 'numbers': [
- (r'[0-9_]+#[0-9a-f_\.]+#', Number.Hex),
- (r'[0-9_]+\.[0-9_]*', Number.Float),
- (r'[0-9_]+', Number.Integer),
- ],
- 'attribute': [
- (r"(')(\w+)", bygroups(Punctuation, Name.Attribute)),
- ],
- 'subprogram': [
- (r'\(', Punctuation, ('#pop', 'formal_part')),
- (r';', Punctuation, '#pop'),
- (r'is\b', Keyword.Reserved, '#pop'),
- (r'"[^"]+"|\w+', Name.Function),
- include('root'),
- ],
- 'end': [
- ('(if|case|record|loop|select)', Keyword.Reserved),
- (r'"[^"]+"|[\w.]+', Name.Function),
- (r'\s+', Text),
- (';', Punctuation, '#pop'),
- ],
- 'type_def': [
- (r';', Punctuation, '#pop'),
- (r'\(', Punctuation, 'formal_part'),
- (r'\[', Punctuation, 'formal_part'),
- (r'with|and|use', Keyword.Reserved),
- (r'array\b', Keyword.Reserved, ('#pop', 'array_def')),
- (r'record\b', Keyword.Reserved, ('record_def')),
- (r'(null record)(;)', bygroups(Keyword.Reserved, Punctuation), '#pop'),
- include('root'),
- ],
- 'array_def': [
- (r';', Punctuation, '#pop'),
- (r'(\w+)(\s+)(range)', bygroups(Keyword.Type, Text, Keyword.Reserved)),
- include('root'),
- ],
- 'record_def': [
- (r'end record', Keyword.Reserved, '#pop'),
- include('root'),
- ],
- 'import': [
- # TODO: use Name.Namespace if appropriate. This needs
- # work to disinguish imports from aspects.
- (r'[\w.]+', Name, '#pop'),
- default('#pop'),
- ],
- 'formal_part': [
- (r'\)', Punctuation, '#pop'),
- (r'\]', Punctuation, '#pop'),
- (r'\w+', Name.Variable),
- (r',|:[^=]', Punctuation),
- (r'(in|not|null|out|access)\b', Keyword.Reserved),
- include('root'),
- ],
- 'package': [
- ('body', Keyword.Declaration),
- (r'is\s+new|renames', Keyword.Reserved),
- ('is', Keyword.Reserved, '#pop'),
- (';', Punctuation, '#pop'),
- (r'\(', Punctuation, 'package_instantiation'),
- (r'([\w.]+)', Name.Class),
- include('root'),
- ],
- 'package_instantiation': [
- (r'("[^"]+"|\w+)(\s+)(=>)', bygroups(Name.Variable, Text, Punctuation)),
- (r'[\w.\'"]', Text),
- (r'\)', Punctuation, '#pop'),
- include('root'),
- ],
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/agile.py b/venv/lib/python3.11/site-packages/pygments/lexers/agile.py
deleted file mode 100644
index c0c1a45..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/agile.py
+++ /dev/null
@@ -1,23 +0,0 @@
-"""
- pygments.lexers.agile
- ~~~~~~~~~~~~~~~~~~~~~
-
- Just export lexer classes previously contained in this module.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexers.lisp import SchemeLexer
-from pygments.lexers.jvm import IokeLexer, ClojureLexer
-from pygments.lexers.python import PythonLexer, PythonConsoleLexer, \
- PythonTracebackLexer, Python3Lexer, Python3TracebackLexer, DgLexer
-from pygments.lexers.ruby import RubyLexer, RubyConsoleLexer, FancyLexer
-from pygments.lexers.perl import PerlLexer, Perl6Lexer
-from pygments.lexers.d import CrocLexer, MiniDLexer
-from pygments.lexers.iolang import IoLexer
-from pygments.lexers.tcl import TclLexer
-from pygments.lexers.factor import FactorLexer
-from pygments.lexers.scripting import LuaLexer, MoonScriptLexer
-
-__all__ = []
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/algebra.py b/venv/lib/python3.11/site-packages/pygments/lexers/algebra.py
deleted file mode 100644
index 95f1754..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/algebra.py
+++ /dev/null
@@ -1,302 +0,0 @@
-"""
- pygments.lexers.algebra
- ~~~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for computer algebra systems.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import Lexer, RegexLexer, bygroups, do_insertions, words
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation, Generic, Whitespace
-
-__all__ = ['GAPLexer', 'GAPConsoleLexer', 'MathematicaLexer', 'MuPADLexer',
- 'BCLexer']
-
-
-class GAPLexer(RegexLexer):
- """
- For GAP source code.
-
- .. versionadded:: 2.0
- """
- name = 'GAP'
- url = 'https://www.gap-system.org'
- aliases = ['gap']
- filenames = ['*.g', '*.gd', '*.gi', '*.gap']
-
- tokens = {
- 'root': [
- (r'#.*$', Comment.Single),
- (r'"(?:[^"\\]|\\.)*"', String),
- (r'\(|\)|\[|\]|\{|\}', Punctuation),
- (r'''(?x)\b(?:
- if|then|elif|else|fi|
- for|while|do|od|
- repeat|until|
- break|continue|
- function|local|return|end|
- rec|
- quit|QUIT|
- IsBound|Unbind|
- TryNextMethod|
- Info|Assert
- )\b''', Keyword),
- (r'''(?x)\b(?:
- true|false|fail|infinity
- )\b''',
- Name.Constant),
- (r'''(?x)\b(?:
- (Declare|Install)([A-Z][A-Za-z]+)|
- BindGlobal|BIND_GLOBAL
- )\b''',
- Name.Builtin),
- (r'\.|,|:=|;|=|\+|-|\*|/|\^|>|<', Operator),
- (r'''(?x)\b(?:
- and|or|not|mod|in
- )\b''',
- Operator.Word),
- (r'''(?x)
- (?:\w+|`[^`]*`)
- (?:::\w+|`[^`]*`)*''', Name.Variable),
- (r'[0-9]+(?:\.[0-9]*)?(?:e[0-9]+)?', Number),
- (r'\.[0-9]+(?:e[0-9]+)?', Number),
- (r'.', Text)
- ],
- }
-
- def analyse_text(text):
- score = 0.0
-
- # Declaration part
- if re.search(
- r"(InstallTrueMethod|Declare(Attribute|Category|Filter|Operation" +
- r"|GlobalFunction|Synonym|SynonymAttr|Property))", text
- ):
- score += 0.7
-
- # Implementation part
- if re.search(
- r"(DeclareRepresentation|Install(GlobalFunction|Method|" +
- r"ImmediateMethod|OtherMethod)|New(Family|Type)|Objectify)", text
- ):
- score += 0.7
-
- return min(score, 1.0)
-
-
-class GAPConsoleLexer(Lexer):
- """
- For GAP console sessions. Modeled after JuliaConsoleLexer.
-
- .. versionadded:: 2.14
- """
- name = 'GAP session'
- aliases = ['gap-console', 'gap-repl']
- filenames = ['*.tst']
-
- def get_tokens_unprocessed(self, text):
- gaplexer = GAPLexer(**self.options)
- start = 0
- curcode = ''
- insertions = []
- output = False
- error = False
-
- for line in text.splitlines(keepends=True):
- if line.startswith('gap> ') or line.startswith('brk> '):
- insertions.append((len(curcode), [(0, Generic.Prompt, line[:5])]))
- curcode += line[5:]
- output = False
- error = False
- elif not output and line.startswith('> '):
- insertions.append((len(curcode), [(0, Generic.Prompt, line[:2])]))
- curcode += line[2:]
- else:
- if curcode:
- yield from do_insertions(
- insertions, gaplexer.get_tokens_unprocessed(curcode))
- curcode = ''
- insertions = []
- if line.startswith('Error, ') or error:
- yield start, Generic.Error, line
- error = True
- else:
- yield start, Generic.Output, line
- output = True
- start += len(line)
-
- if curcode:
- yield from do_insertions(
- insertions, gaplexer.get_tokens_unprocessed(curcode))
-
- # the following is needed to distinguish Scilab and GAP .tst files
- def analyse_text(text):
- # GAP prompts are a dead give away, although hypothetical;y a
- # file in another language could be trying to compare a variable
- # "gap" as in "gap> 0.1". But that this should happen at the
- # start of a line seems unlikely...
- if re.search(r"^gap> ", text):
- return 0.9
- else:
- return 0.0
-
-
-class MathematicaLexer(RegexLexer):
- """
- Lexer for Mathematica source code.
-
- .. versionadded:: 2.0
- """
- name = 'Mathematica'
- url = 'http://www.wolfram.com/mathematica/'
- aliases = ['mathematica', 'mma', 'nb']
- filenames = ['*.nb', '*.cdf', '*.nbp', '*.ma']
- mimetypes = ['application/mathematica',
- 'application/vnd.wolfram.mathematica',
- 'application/vnd.wolfram.mathematica.package',
- 'application/vnd.wolfram.cdf']
-
- # http://reference.wolfram.com/mathematica/guide/Syntax.html
- operators = (
- ";;", "=", "=.", "!=" "==", ":=", "->", ":>", "/.", "+", "-", "*", "/",
- "^", "&&", "||", "!", "<>", "|", "/;", "?", "@", "//", "/@", "@@",
- "@@@", "~~", "===", "&", "<", ">", "<=", ">=",
- )
-
- punctuation = (",", ";", "(", ")", "[", "]", "{", "}")
-
- def _multi_escape(entries):
- return '(%s)' % ('|'.join(re.escape(entry) for entry in entries))
-
- tokens = {
- 'root': [
- (r'(?s)\(\*.*?\*\)', Comment),
-
- (r'([a-zA-Z]+[A-Za-z0-9]*`)', Name.Namespace),
- (r'([A-Za-z0-9]*_+[A-Za-z0-9]*)', Name.Variable),
- (r'#\d*', Name.Variable),
- (r'([a-zA-Z]+[a-zA-Z0-9]*)', Name),
-
- (r'-?\d+\.\d*', Number.Float),
- (r'-?\d*\.\d+', Number.Float),
- (r'-?\d+', Number.Integer),
-
- (words(operators), Operator),
- (words(punctuation), Punctuation),
- (r'".*?"', String),
- (r'\s+', Text.Whitespace),
- ],
- }
-
-
-class MuPADLexer(RegexLexer):
- """
- A MuPAD lexer.
- Contributed by Christopher Creutzig <christopher@creutzig.de>.
-
- .. versionadded:: 0.8
- """
- name = 'MuPAD'
- url = 'http://www.mupad.com'
- aliases = ['mupad']
- filenames = ['*.mu']
-
- tokens = {
- 'root': [
- (r'//.*?$', Comment.Single),
- (r'/\*', Comment.Multiline, 'comment'),
- (r'"(?:[^"\\]|\\.)*"', String),
- (r'\(|\)|\[|\]|\{|\}', Punctuation),
- (r'''(?x)\b(?:
- next|break|end|
- axiom|end_axiom|category|end_category|domain|end_domain|inherits|
- if|%if|then|elif|else|end_if|
- case|of|do|otherwise|end_case|
- while|end_while|
- repeat|until|end_repeat|
- for|from|to|downto|step|end_for|
- proc|local|option|save|begin|end_proc|
- delete|frame
- )\b''', Keyword),
- (r'''(?x)\b(?:
- DOM_ARRAY|DOM_BOOL|DOM_COMPLEX|DOM_DOMAIN|DOM_EXEC|DOM_EXPR|
- DOM_FAIL|DOM_FLOAT|DOM_FRAME|DOM_FUNC_ENV|DOM_HFARRAY|DOM_IDENT|
- DOM_INT|DOM_INTERVAL|DOM_LIST|DOM_NIL|DOM_NULL|DOM_POLY|DOM_PROC|
- DOM_PROC_ENV|DOM_RAT|DOM_SET|DOM_STRING|DOM_TABLE|DOM_VAR
- )\b''', Name.Class),
- (r'''(?x)\b(?:
- PI|EULER|E|CATALAN|
- NIL|FAIL|undefined|infinity|
- TRUE|FALSE|UNKNOWN
- )\b''',
- Name.Constant),
- (r'\b(?:dom|procname)\b', Name.Builtin.Pseudo),
- (r'\.|,|:|;|=|\+|-|\*|/|\^|@|>|<|\$|\||!|\'|%|~=', Operator),
- (r'''(?x)\b(?:
- and|or|not|xor|
- assuming|
- div|mod|
- union|minus|intersect|in|subset
- )\b''',
- Operator.Word),
- (r'\b(?:I|RDN_INF|RD_NINF|RD_NAN)\b', Number),
- # (r'\b(?:adt|linalg|newDomain|hold)\b', Name.Builtin),
- (r'''(?x)
- ((?:[a-zA-Z_#][\w#]*|`[^`]*`)
- (?:::[a-zA-Z_#][\w#]*|`[^`]*`)*)(\s*)([(])''',
- bygroups(Name.Function, Text, Punctuation)),
- (r'''(?x)
- (?:[a-zA-Z_#][\w#]*|`[^`]*`)
- (?:::[a-zA-Z_#][\w#]*|`[^`]*`)*''', Name.Variable),
- (r'[0-9]+(?:\.[0-9]*)?(?:e[0-9]+)?', Number),
- (r'\.[0-9]+(?:e[0-9]+)?', Number),
- (r'\s+', Whitespace),
- (r'.', Text)
- ],
- 'comment': [
- (r'[^/*]+', Comment.Multiline),
- (r'/\*', Comment.Multiline, '#push'),
- (r'\*/', Comment.Multiline, '#pop'),
- (r'[*/]', Comment.Multiline)
- ],
- }
-
-
-class BCLexer(RegexLexer):
- """
- A BC lexer.
-
- .. versionadded:: 2.1
- """
- name = 'BC'
- url = 'https://www.gnu.org/software/bc/'
- aliases = ['bc']
- filenames = ['*.bc']
-
- tokens = {
- 'root': [
- (r'/\*', Comment.Multiline, 'comment'),
- (r'"(?:[^"\\]|\\.)*"', String),
- (r'[{}();,]', Punctuation),
- (words(('if', 'else', 'while', 'for', 'break', 'continue',
- 'halt', 'return', 'define', 'auto', 'print', 'read',
- 'length', 'scale', 'sqrt', 'limits', 'quit',
- 'warranty'), suffix=r'\b'), Keyword),
- (r'\+\+|--|\|\||&&|'
- r'([-<>+*%\^/!=])=?', Operator),
- # bc doesn't support exponential
- (r'[0-9]+(\.[0-9]*)?', Number),
- (r'\.[0-9]+', Number),
- (r'.', Text)
- ],
- 'comment': [
- (r'[^*/]+', Comment.Multiline),
- (r'\*/', Comment.Multiline, '#pop'),
- (r'[*/]', Comment.Multiline)
- ],
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/ambient.py b/venv/lib/python3.11/site-packages/pygments/lexers/ambient.py
deleted file mode 100644
index deba0f3..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/ambient.py
+++ /dev/null
@@ -1,76 +0,0 @@
-"""
- pygments.lexers.ambient
- ~~~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for AmbientTalk language.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, include, words, bygroups
-from pygments.token import Comment, Operator, Keyword, Name, String, \
- Number, Punctuation, Whitespace
-
-__all__ = ['AmbientTalkLexer']
-
-
-class AmbientTalkLexer(RegexLexer):
- """
- Lexer for AmbientTalk source code.
-
- .. versionadded:: 2.0
- """
- name = 'AmbientTalk'
- url = 'https://code.google.com/p/ambienttalk'
- filenames = ['*.at']
- aliases = ['ambienttalk', 'ambienttalk/2', 'at']
- mimetypes = ['text/x-ambienttalk']
-
- flags = re.MULTILINE | re.DOTALL
-
- builtin = words(('if:', 'then:', 'else:', 'when:', 'whenever:', 'discovered:',
- 'disconnected:', 'reconnected:', 'takenOffline:', 'becomes:',
- 'export:', 'as:', 'object:', 'actor:', 'mirror:', 'taggedAs:',
- 'mirroredBy:', 'is:'))
- tokens = {
- 'root': [
- (r'\s+', Whitespace),
- (r'//.*?\n', Comment.Single),
- (r'/\*.*?\*/', Comment.Multiline),
- (r'(def|deftype|import|alias|exclude)\b', Keyword),
- (builtin, Name.Builtin),
- (r'(true|false|nil)\b', Keyword.Constant),
- (r'(~|lobby|jlobby|/)\.', Keyword.Constant, 'namespace'),
- (r'"(\\\\|\\[^\\]|[^"\\])*"', String),
- (r'\|', Punctuation, 'arglist'),
- (r'<:|[*^!%&<>+=,./?-]|:=', Operator),
- (r"`[a-zA-Z_]\w*", String.Symbol),
- (r"[a-zA-Z_]\w*:", Name.Function),
- (r"[{}()\[\];`]", Punctuation),
- (r'(self|super)\b', Name.Variable.Instance),
- (r"[a-zA-Z_]\w*", Name.Variable),
- (r"@[a-zA-Z_]\w*", Name.Class),
- (r"@\[", Name.Class, 'annotations'),
- include('numbers'),
- ],
- 'numbers': [
- (r'(\d+\.\d*|\d*\.\d+)([eE][+-]?[0-9]+)?', Number.Float),
- (r'\d+', Number.Integer)
- ],
- 'namespace': [
- (r'[a-zA-Z_]\w*\.', Name.Namespace),
- (r'[a-zA-Z_]\w*:', Name.Function, '#pop'),
- (r'[a-zA-Z_]\w*(?!\.)', Name.Function, '#pop')
- ],
- 'annotations': [
- (r"(.*?)\]", Name.Class, '#pop')
- ],
- 'arglist': [
- (r'\|', Punctuation, '#pop'),
- (r'(\s*)(,)(\s*)', bygroups(Whitespace, Punctuation, Whitespace)),
- (r'[a-zA-Z_]\w*', Name.Variable),
- ],
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/amdgpu.py b/venv/lib/python3.11/site-packages/pygments/lexers/amdgpu.py
deleted file mode 100644
index 860dfd4..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/amdgpu.py
+++ /dev/null
@@ -1,54 +0,0 @@
-"""
- pygments.lexers.amdgpu
- ~~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for the AMDGPU ISA assembly.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, words
-from pygments.token import Name, Text, Keyword, Whitespace, Number, Comment
-
-import re
-
-__all__ = ['AMDGPULexer']
-
-
-class AMDGPULexer(RegexLexer):
- """
- For AMD GPU assembly.
-
- .. versionadded:: 2.8
- """
- name = 'AMDGPU'
- aliases = ['amdgpu']
- filenames = ['*.isa']
-
- flags = re.IGNORECASE
-
- tokens = {
- 'root': [
- (r'\s+', Whitespace),
- (r'[\r\n]+', Text),
- (r'(([a-z_0-9])*:([a-z_0-9])*)', Name.Attribute),
- (r'(\[|\]|\(|\)|,|\:|\&)', Text),
- (r'([;#]|//).*?\n', Comment.Single),
- (r'((s_)?(scratch|ds|buffer|flat|image)_[a-z0-9_]+)', Keyword.Reserved),
- (r'(_lo|_hi)', Name.Variable),
- (r'(vmcnt|lgkmcnt|expcnt)', Name.Attribute),
- (r'(attr[0-9].[a-z])', Name.Attribute),
- (words((
- 'op', 'vaddr', 'vdata', 'off', 'soffset', 'srsrc', 'format',
- 'offset', 'offen', 'idxen', 'glc', 'dlc', 'slc', 'tfe', 'lds',
- 'lit', 'unorm'), suffix=r'\b'), Name.Attribute),
- (r'(label_[a-z0-9]+)', Keyword),
- (r'(_L[0-9]*)', Name.Variable),
- (r'(s|v)_[a-z0-9_]+', Keyword),
- (r'(v[0-9.]+|vcc|exec|v)', Name.Variable),
- (r's[0-9.]+|s', Name.Variable),
- (r'[0-9]+\.[^0-9]+', Number.Float),
- (r'(0[xX][a-z0-9]+)|([0-9]+)', Number.Integer)
- ]
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/ampl.py b/venv/lib/python3.11/site-packages/pygments/lexers/ampl.py
deleted file mode 100644
index b5abcac..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/ampl.py
+++ /dev/null
@@ -1,88 +0,0 @@
-"""
- pygments.lexers.ampl
- ~~~~~~~~~~~~~~~~~~~~
-
- Lexers for the AMPL language.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, bygroups, using, this, words
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation, Whitespace
-
-__all__ = ['AmplLexer']
-
-
-class AmplLexer(RegexLexer):
- """
- For AMPL source code.
-
- .. versionadded:: 2.2
- """
- name = 'Ampl'
- url = 'http://ampl.com/'
- aliases = ['ampl']
- filenames = ['*.run']
-
- tokens = {
- 'root': [
- (r'\n', Text),
- (r'\s+', Whitespace),
- (r'#.*?\n', Comment.Single),
- (r'/[*](.|\n)*?[*]/', Comment.Multiline),
- (words((
- 'call', 'cd', 'close', 'commands', 'data', 'delete', 'display',
- 'drop', 'end', 'environ', 'exit', 'expand', 'include', 'load',
- 'model', 'objective', 'option', 'problem', 'purge', 'quit',
- 'redeclare', 'reload', 'remove', 'reset', 'restore', 'shell',
- 'show', 'solexpand', 'solution', 'solve', 'update', 'unload',
- 'xref', 'coeff', 'coef', 'cover', 'obj', 'interval', 'default',
- 'from', 'to', 'to_come', 'net_in', 'net_out', 'dimen',
- 'dimension', 'check', 'complements', 'write', 'function',
- 'pipe', 'format', 'if', 'then', 'else', 'in', 'while', 'repeat',
- 'for'), suffix=r'\b'), Keyword.Reserved),
- (r'(integer|binary|symbolic|ordered|circular|reversed|INOUT|IN|OUT|LOCAL)',
- Keyword.Type),
- (r'\".*?\"', String.Double),
- (r'\'.*?\'', String.Single),
- (r'[()\[\]{},;:]+', Punctuation),
- (r'\b(\w+)(\.)(astatus|init0|init|lb0|lb1|lb2|lb|lrc|'
- r'lslack|rc|relax|slack|sstatus|status|ub0|ub1|ub2|'
- r'ub|urc|uslack|val)',
- bygroups(Name.Variable, Punctuation, Keyword.Reserved)),
- (r'(set|param|var|arc|minimize|maximize|subject to|s\.t\.|subj to|'
- r'node|table|suffix|read table|write table)(\s+)(\w+)',
- bygroups(Keyword.Declaration, Whitespace, Name.Variable)),
- (r'(param)(\s*)(:)(\s*)(\w+)(\s*)(:)(\s*)((\w|\s)+)',
- bygroups(Keyword.Declaration, Whitespace, Punctuation, Whitespace,
- Name.Variable, Whitespace, Punctuation, Whitespace, Name.Variable)),
- (r'(let|fix|unfix)(\s*)((?:\{.*\})?)(\s*)(\w+)',
- bygroups(Keyword.Declaration, Whitespace, using(this), Whitespace,
- Name.Variable)),
- (words((
- 'abs', 'acos', 'acosh', 'alias', 'asin', 'asinh', 'atan', 'atan2',
- 'atanh', 'ceil', 'ctime', 'cos', 'exp', 'floor', 'log', 'log10',
- 'max', 'min', 'precision', 'round', 'sin', 'sinh', 'sqrt', 'tan',
- 'tanh', 'time', 'trunc', 'Beta', 'Cauchy', 'Exponential', 'Gamma',
- 'Irand224', 'Normal', 'Normal01', 'Poisson', 'Uniform', 'Uniform01',
- 'num', 'num0', 'ichar', 'char', 'length', 'substr', 'sprintf',
- 'match', 'sub', 'gsub', 'print', 'printf', 'next', 'nextw', 'prev',
- 'prevw', 'first', 'last', 'ord', 'ord0', 'card', 'arity',
- 'indexarity'), prefix=r'\b', suffix=r'\b'), Name.Builtin),
- (r'(\+|\-|\*|/|\*\*|=|<=|>=|==|\||\^|<|>|\!|\.\.|:=|\&|\!=|<<|>>)',
- Operator),
- (words((
- 'or', 'exists', 'forall', 'and', 'in', 'not', 'within', 'union',
- 'diff', 'difference', 'symdiff', 'inter', 'intersect',
- 'intersection', 'cross', 'setof', 'by', 'less', 'sum', 'prod',
- 'product', 'div', 'mod'), suffix=r'\b'),
- Keyword.Reserved), # Operator.Name but not enough emphasized with that
- (r'(\d+\.(?!\.)\d*|\.(?!.)\d+)([eE][+-]?\d+)?', Number.Float),
- (r'\d+([eE][+-]?\d+)?', Number.Integer),
- (r'[+-]?Infinity', Number.Integer),
- (r'(\w+|(\.(?!\.)))', Text)
- ]
-
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/apdlexer.py b/venv/lib/python3.11/site-packages/pygments/lexers/apdlexer.py
deleted file mode 100644
index a50219c..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/apdlexer.py
+++ /dev/null
@@ -1,592 +0,0 @@
-"""
- pygments.lexers.apdlexer
- ~~~~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for ANSYS Parametric Design Language.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, include, words, default
-from pygments.token import Comment, Keyword, Name, Number, Operator, \
- String, Generic, Punctuation, Whitespace, Escape
-
-__all__ = ['apdlexer']
-
-
-class apdlexer(RegexLexer):
- """
- For APDL source code.
-
- .. versionadded:: 2.9
- """
- name = 'ANSYS parametric design language'
- aliases = ['ansys', 'apdl']
- filenames = ['*.ans']
- flags = re.IGNORECASE
-
- # list of elements
- elafunb = ("SURF152", "SURF153", "SURF154", "SURF156", "SHELL157",
- "SURF159", "LINK160", "BEAM161", "PLANE162",
- "SHELL163", "SOLID164", "COMBI165", "MASS166",
- "LINK167", "SOLID168", "TARGE169", "TARGE170",
- "CONTA171", "CONTA172", "CONTA173", "CONTA174",
- "CONTA175", "CONTA176", "CONTA177", "CONTA178",
- "PRETS179", "LINK180", "SHELL181", "PLANE182",
- "PLANE183", "MPC184", "SOLID185", "SOLID186",
- "SOLID187", "BEAM188", "BEAM189", "SOLSH190",
- "INTER192", "INTER193", "INTER194", "INTER195",
- "MESH200", "FOLLW201", "INTER202", "INTER203",
- "INTER204", "INTER205", "SHELL208", "SHELL209",
- "CPT212", "CPT213", "COMBI214", "CPT215", "CPT216",
- "CPT217", "FLUID220", "FLUID221", "PLANE223",
- "SOLID226", "SOLID227", "PLANE230", "SOLID231",
- "SOLID232", "PLANE233", "SOLID236", "SOLID237",
- "PLANE238", "SOLID239", "SOLID240", "HSFLD241",
- "HSFLD242", "SURF251", "SURF252", "REINF263",
- "REINF264", "REINF265", "SOLID272", "SOLID273",
- "SOLID278", "SOLID279", "SHELL281", "SOLID285",
- "PIPE288", "PIPE289", "ELBOW290", "USER300", "BEAM3",
- "BEAM4", "BEAM23", "BEAM24", "BEAM44", "BEAM54",
- "COMBIN7", "FLUID79", "FLUID80", "FLUID81", "FLUID141",
- "FLUID142", "INFIN9", "INFIN47", "PLANE13", "PLANE25",
- "PLANE42", "PLANE53", "PLANE67", "PLANE82", "PLANE83",
- "PLANE145", "PLANE146", "CONTAC12", "CONTAC52",
- "LINK1", "LINK8", "LINK10", "LINK32", "PIPE16",
- "PIPE17", "PIPE18", "PIPE20", "PIPE59", "PIPE60",
- "SHELL41", "SHELL43", "SHELL57", "SHELL63", "SHELL91",
- "SHELL93", "SHELL99", "SHELL150", "SOLID5", "SOLID45",
- "SOLID46", "SOLID65", "SOLID69", "SOLID92", "SOLID95",
- "SOLID117", "SOLID127", "SOLID128", "SOLID147",
- "SOLID148", "SOLID191", "VISCO88", "VISCO89",
- "VISCO106", "VISCO107", "VISCO108", "TRANS109")
-
- elafunc = ("PGRAPH", "/VT", "VTIN", "VTRFIL", "VTTEMP", "PGRSET",
- "VTCLR", "VTMETH", "VTRSLT", "VTVMOD", "PGSELE",
- "VTDISC", "VTMP", "VTSEC", "PGWRITE", "VTEVAL", "VTOP",
- "VTSFE", "POUTRES", "VTFREQ", "VTPOST", "VTSL",
- "FLDATA1-40", "HFPCSWP", "MSDATA", "MSVARY", "QFACT",
- "FLOCHECK", "HFPOWER", "MSMASS", "PERI", "SPADP",
- "FLREAD", "HFPORT", "MSMETH", "PLFSS", "SPARM",
- "FLOTRAN", "HFSCAT", "MSMIR", "PLSCH", "SPFSS",
- "HFADP", "ICE", "MSNOMF", "PLSYZ", "SPICE", "HFARRAY",
- "ICEDELE", "MSPROP", "PLTD", "SPSCAN", "HFDEEM",
- "ICELIST", "MSQUAD", "PLTLINE", "SPSWP", "HFEIGOPT",
- "ICVFRC", "MSRELAX", "PLVFRC", "HFEREFINE", "LPRT",
- "MSSOLU", "/PICE", "HFMODPRT", "MSADV", "MSSPEC",
- "PLWAVE", "HFPA", "MSCAP", "MSTERM", "PRSYZ")
-
- elafund = ("*VOPER", "VOVLAP", "*VPLOT", "VPLOT", "VPTN", "*VPUT",
- "VPUT", "*VREAD", "VROTAT", "VSBA", "VSBV", "VSBW",
- "/VSCALE", "*VSCFUN", "VSEL", "VSLA", "*VSTAT", "VSUM",
- "VSWEEP", "VSYMM", "VTRAN", "VTYPE", "/VUP", "*VWRITE",
- "/WAIT", "WAVES", "WERASE", "WFRONT", "/WINDOW",
- "WMID", "WMORE", "WPAVE", "WPCSYS", "WPLANE", "WPOFFS",
- "WPROTA", "WPSTYL", "WRFULL", "WRITE", "WRITEMAP",
- "*WRK", "WSORT", "WSPRINGS", "WSTART", "WTBCREATE",
- "XFDATA", "XFENRICH", "XFLIST", "/XFRM", "/XRANGE",
- "XVAR", "/YRANGE", "/ZOOM", "/WB", "XMLO", "/XML",
- "CNTR", "EBLOCK", "CMBLOCK", "NBLOCK", "/TRACK",
- "CWZPLOT", "~EUI", "NELE", "EALL", "NALL", "FLITEM",
- "LSLN", "PSOLVE", "ASLN", "/VERIFY", "/SSS", "~CFIN",
- "*EVAL", "*MOONEY", "/RUNSTAT", "ALPFILL",
- "ARCOLLAPSE", "ARDETACH", "ARFILL", "ARMERGE",
- "ARSPLIT", "FIPLOT", "GAPFINISH", "GAPLIST",
- "GAPMERGE", "GAPOPT", "GAPPLOT", "LNCOLLAPSE",
- "LNDETACH", "LNFILL", "LNMERGE", "LNSPLIT", "PCONV",
- "PLCONV", "PEMOPTS", "PEXCLUDE", "PINCLUDE", "PMETH",
- "/PMETH", "PMOPTS", "PPLOT", "PPRANGE", "PRCONV",
- "PRECISION", "RALL", "RFILSZ", "RITER", "RMEMRY",
- "RSPEED", "RSTAT", "RTIMST", "/RUNST", "RWFRNT",
- "SARPLOT", "SHSD", "SLPPLOT", "SLSPLOT", "VCVFILL",
- "/OPT", "OPEQN", "OPFACT", "OPFRST", "OPGRAD",
- "OPKEEP", "OPLOOP", "OPPRNT", "OPRAND", "OPSUBP",
- "OPSWEEP", "OPTYPE", "OPUSER", "OPVAR", "OPADD",
- "OPCLR", "OPDEL", "OPMAKE", "OPSEL", "OPANL", "OPDATA",
- "OPRESU", "OPSAVE", "OPEXE", "OPLFA", "OPLGR",
- "OPLIST", "OPLSW", "OPRFA", "OPRGR", "OPRSW",
- "PILECALC", "PILEDISPSET", "PILEGEN", "PILELOAD",
- "PILEMASS", "PILERUN", "PILESEL", "PILESTIF",
- "PLVAROPT", "PRVAROPT", "TOCOMP", "TODEF", "TOFREQ",
- "TOTYPE", "TOVAR", "TOEXE", "TOLOOP", "TOGRAPH",
- "TOLIST", "TOPLOT", "TOPRINT", "TOSTAT", "TZAMESH",
- "TZDELE", "TZEGEN", "XVAROPT", "PGSAVE", "SOLCONTROL",
- "TOTAL", "VTGEOM", "VTREAL", "VTSTAT")
-
- elafune = ("/ANUM", "AOFFST", "AOVLAP", "APLOT", "APPEND", "APTN",
- "ARCLEN", "ARCTRM", "AREAS", "AREFINE", "AREMESH",
- "AREVERSE", "AROTAT", "ARSCALE", "ARSYM", "ASBA",
- "ASBL", "ASBV", "ASBW", "ASCRES", "ASEL", "ASIFILE",
- "*ASK", "ASKIN", "ASLL", "ASLV", "ASOL", "/ASSIGN",
- "ASUB", "ASUM", "ATAN", "ATRAN", "ATYPE", "/AUTO",
- "AUTOTS", "/AUX2", "/AUX3", "/AUX12", "/AUX15",
- "AVPRIN", "AVRES", "AWAVE", "/AXLAB", "*AXPY",
- "/BATCH", "BCSOPTION", "BETAD", "BF", "BFA", "BFADELE",
- "BFALIST", "BFCUM", "BFDELE", "BFE", "BFECUM",
- "BFEDELE", "BFELIST", "BFESCAL", "BFINT", "BFK",
- "BFKDELE", "BFKLIST", "BFL", "BFLDELE", "BFLIST",
- "BFLLIST", "BFSCALE", "BFTRAN", "BFUNIF", "BFV",
- "BFVDELE", "BFVLIST", "BIOOPT", "BIOT", "BLC4", "BLC5",
- "BLOCK", "BOOL", "BOPTN", "BSAX", "BSMD", "BSM1",
- "BSM2", "BSPLIN", "BSS1", "BSS2", "BSTE", "BSTQ",
- "BTOL", "BUCOPT", "C", "CALC", "CAMPBELL", "CBDOF",
- "CBMD", "CBMX", "CBTE", "CBTMP", "CDOPT", "CDREAD",
- "CDWRITE", "CE", "CECHECK", "CECMOD", "CECYC",
- "CEDELE", "CEINTF", "CELIST", "CENTER", "CEQN",
- "CERIG", "CESGEN", "CFACT", "*CFCLOS", "*CFOPEN",
- "*CFWRITE", "/CFORMAT", "CGLOC", "CGOMGA", "CGROW",
- "CHECK", "CHKMSH", "CINT", "CIRCLE", "CISOL",
- "/CLABEL", "/CLEAR", "CLOCAL", "CLOG", "/CLOG",
- "CLRMSHLN", "CM", "CMACEL", "/CMAP", "CMATRIX",
- "CMDELE", "CMDOMEGA", "CMEDIT", "CMGRP", "CMLIST",
- "CMMOD", "CMOMEGA", "CMPLOT", "CMROTATE", "CMSEL",
- "CMSFILE", "CMSOPT", "CMWRITE", "CNCHECK", "CNKMOD",
- "CNTR", "CNVTOL", "/COLOR", "*COMP", "COMBINE",
- "COMPRESS", "CON4", "CONE", "/CONFIG", "CONJUG",
- "/CONTOUR", "/COPY", "CORIOLIS", "COUPLE", "COVAL",
- "CP", "CPCYC", "CPDELE", "CPINTF", "/CPLANE", "CPLGEN",
- "CPLIST", "CPMERGE", "CPNGEN", "CPSGEN", "CQC",
- "*CREATE", "CRPLIM", "CS", "CSCIR", "CSDELE", "CSKP",
- "CSLIST", "CSWPLA", "CSYS", "/CTYPE", "CURR2D",
- "CUTCONTROL", "/CVAL", "CVAR", "/CWD", "CYCCALC",
- "/CYCEXPAND", "CYCFILES", "CYCFREQ", "*CYCLE",
- "CYCLIC", "CYCOPT", "CYCPHASE", "CYCSPEC", "CYL4",
- "CYL5", "CYLIND", "CZDEL", "CZMESH", "D", "DA",
- "DADELE", "DALIST", "DAMORPH", "DATA", "DATADEF",
- "DCGOMG", "DCUM", "DCVSWP", "DDASPEC", "DDELE",
- "DDOPTION", "DEACT", "DEFINE", "*DEL", "DELETE",
- "/DELETE", "DELTIM", "DELTIME", "DEMORPH", "DERIV", "DESIZE",
- "DESOL", "DETAB", "/DEVDISP", "/DEVICE", "/DFLAB",
- "DFLX", "DFSWAVE", "DIG", "DIGIT", "*DIM",
- "/DIRECTORY", "DISPLAY", "/DIST", "DJ", "DJDELE",
- "DJLIST", "DK", "DKDELE", "DKLIST", "DL", "DLDELE",
- "DLIST", "DLLIST", "*DMAT", "DMOVE", "DMPEXT",
- "DMPOPTION", "DMPRAT", "DMPSTR", "DNSOL", "*DO", "DOF",
- "DOFSEL", "DOMEGA", "*DOT", "*DOWHILE", "DSCALE",
- "/DSCALE", "DSET", "DSPOPTION", "DSUM", "DSURF",
- "DSYM", "DSYS", "DTRAN", "DUMP", "/DV3D", "DVAL",
- "DVMORPH", "DYNOPT", "E", "EALIVE", "EDADAPT", "EDALE",
- "EDASMP", "EDBOUND", "EDBX", "EDBVIS", "EDCADAPT",
- "EDCGEN", "EDCLIST", "EDCMORE", "EDCNSTR", "EDCONTACT",
- "EDCPU", "EDCRB", "EDCSC", "EDCTS", "EDCURVE",
- "EDDAMP", "EDDBL", "EDDC", "EDDRELAX", "EDDUMP",
- "EDELE", "EDENERGY", "EDFPLOT", "EDGCALE", "/EDGE",
- "EDHGLS", "EDHIST", "EDHTIME", "EDINT", "EDIPART",
- "EDIS", "EDLCS", "EDLOAD", "EDMP", "EDNB", "EDNDTSD",
- "EDNROT", "EDOPT", "EDOUT", "EDPART", "EDPC", "EDPL",
- "EDPVEL", "EDRC", "EDRD", "EDREAD", "EDRI", "EDRST",
- "EDRUN", "EDSHELL", "EDSOLV", "EDSP", "EDSTART",
- "EDTERM", "EDTP", "EDVEL", "EDWELD", "EDWRITE",
- "EEXTRUDE", "/EFACET", "EGEN", "*EIGEN", "EINFIN",
- "EINTF", "EKILL", "ELBOW", "ELEM", "ELIST", "*ELSE",
- "*ELSEIF", "EMAGERR", "EMATWRITE", "EMF", "EMFT",
- "EMID", "EMIS", "EMODIF", "EMORE", "EMSYM", "EMTGEN",
- "EMUNIT", "EN", "*END", "*ENDDO", "*ENDIF",
- "ENDRELEASE", "ENERSOL", "ENGEN", "ENORM", "ENSYM",
- "EORIENT", "EPLOT", "EQSLV", "ERASE", "/ERASE",
- "EREAD", "EREFINE", "EREINF", "ERESX", "ERNORM",
- "ERRANG", "ESCHECK", "ESEL", "/ESHAPE", "ESIZE",
- "ESLA", "ESLL", "ESLN", "ESLV", "ESOL", "ESORT",
- "ESSOLV", "ESTIF", "ESURF", "ESYM", "ESYS", "ET",
- "ETABLE", "ETCHG", "ETCONTROL", "ETDELE", "ETLIST",
- "ETYPE", "EUSORT", "EWRITE", "*EXIT", "/EXIT", "EXP",
- "EXPAND", "/EXPAND", "EXPASS", "*EXPORT", "EXPROFILE",
- "EXPSOL", "EXTOPT", "EXTREM", "EXUNIT", "F", "/FACET",
- "FATIGUE", "FC", "FCCHECK", "FCDELE", "FCLIST", "FCUM",
- "FCTYP", "FDELE", "/FDELE", "FE", "FEBODY", "FECONS",
- "FEFOR", "FELIST", "FESURF", "*FFT", "FILE",
- "FILEAUX2", "FILEAUX3", "FILEDISP", "FILL", "FILLDATA",
- "/FILNAME", "FINISH", "FITEM", "FJ", "FJDELE",
- "FJLIST", "FK", "FKDELE", "FKLIST", "FL", "FLIST",
- "FLLIST", "FLST", "FLUXV", "FLUREAD", "FMAGBC",
- "FMAGSUM", "/FOCUS", "FOR2D", "FORCE", "FORM",
- "/FORMAT", "FP", "FPLIST", "*FREE", "FREQ", "FRQSCL",
- "FS", "FSCALE", "FSDELE", "FSLIST", "FSNODE", "FSPLOT",
- "FSSECT", "FSSPARM", "FSUM", "FTCALC", "FTRAN",
- "FTSIZE", "FTWRITE", "FTYPE", "FVMESH", "GAP", "GAPF",
- "GAUGE", "GCDEF", "GCGEN", "/GCMD", "/GCOLUMN",
- "GENOPT", "GEOM", "GEOMETRY", "*GET", "/GFILE",
- "/GFORMAT", "/GLINE", "/GMARKER", "GMATRIX", "GMFACE",
- "*GO", "/GO", "/GOLIST", "/GOPR", "GP", "GPDELE",
- "GPLIST", "GPLOT", "/GRAPHICS", "/GRESUME", "/GRID",
- "/GROPT", "GRP", "/GRTYP", "/GSAVE", "GSBDATA",
- "GSGDATA", "GSLIST", "GSSOL", "/GST", "GSUM", "/GTHK",
- "/GTYPE", "HARFRQ", "/HBC", "HBMAT", "/HEADER", "HELP",
- "HELPDISP", "HEMIOPT", "HFANG", "HFSYM", "HMAGSOLV",
- "HPGL", "HPTCREATE", "HPTDELETE", "HRCPLX", "HREXP",
- "HROPT", "HROCEAN", "HROUT", "IC", "ICDELE", "ICLIST",
- "/ICLWID", "/ICSCALE", "*IF", "IGESIN", "IGESOUT",
- "/IMAGE", "IMAGIN", "IMESH", "IMMED", "IMPD",
- "INISTATE", "*INIT", "/INPUT", "/INQUIRE", "INRES",
- "INRTIA", "INT1", "INTSRF", "IOPTN", "IRLF", "IRLIST",
- "*ITENGINE", "JPEG", "JSOL", "K", "KATT", "KBC",
- "KBETW", "KCALC", "KCENTER", "KCLEAR", "KDELE",
- "KDIST", "KEEP", "KESIZE", "KEYOPT", "KEYPTS", "KEYW",
- "KFILL", "KGEN", "KL", "KLIST", "KMESH", "KMODIF",
- "KMOVE", "KNODE", "KPLOT", "KPSCALE", "KREFINE",
- "KSCALE", "KSCON", "KSEL", "KSLL", "KSLN", "KSUM",
- "KSYMM", "KTRAN", "KUSE", "KWPAVE", "KWPLAN", "L",
- "L2ANG", "L2TAN", "LANG", "LARC", "/LARC", "LAREA",
- "LARGE", "LATT", "LAYER", "LAYERP26", "LAYLIST",
- "LAYPLOT", "LCABS", "LCASE", "LCCALC", "LCCAT",
- "LCDEF", "LCFACT", "LCFILE", "LCLEAR", "LCOMB",
- "LCOPER", "LCSEL", "LCSL", "LCSUM", "LCWRITE",
- "LCZERO", "LDELE", "LDIV", "LDRAG", "LDREAD", "LESIZE",
- "LEXTND", "LFILLT", "LFSURF", "LGEN", "LGLUE",
- "LGWRITE", "/LIGHT", "LINA", "LINE", "/LINE", "LINES",
- "LINL", "LINP", "LINV", "LIST", "*LIST", "LLIST",
- "LMATRIX", "LMESH", "LNSRCH", "LOCAL", "LOVLAP",
- "LPLOT", "LPTN", "LREFINE", "LREVERSE", "LROTAT",
- "LSBA", "*LSBAC", "LSBL", "LSBV", "LSBW", "LSCLEAR",
- "LSDELE", "*LSDUMP", "LSEL", "*LSENGINE", "*LSFACTOR",
- "LSLA", "LSLK", "LSOPER", "/LSPEC", "LSREAD",
- "*LSRESTORE", "LSSCALE", "LSSOLVE", "LSTR", "LSUM",
- "LSWRITE", "/LSYMBOL", "LSYMM", "LTAN", "LTRAN",
- "LUMPM", "LVSCALE", "LWPLAN", "M", "MADAPT", "MAGOPT",
- "MAGSOLV", "/MAIL", "MAP", "/MAP", "MAP2DTO3D",
- "MAPSOLVE", "MAPVAR", "MASTER", "MAT", "MATER",
- "MCHECK", "MDAMP", "MDELE", "MDPLOT", "MEMM", "/MENU",
- "MESHING", "MFANALYSIS", "MFBUCKET", "MFCALC", "MFCI",
- "MFCLEAR", "MFCMMAND", "MFCONV", "MFDTIME", "MFELEM",
- "MFEM", "MFEXTER", "MFFNAME", "MFFR", "MFIMPORT",
- "MFINTER", "MFITER", "MFLCOMM", "MFLIST", "MFMAP",
- "MFORDER", "MFOUTPUT", "*MFOURI", "MFPSIMUL", "MFRC",
- "MFRELAX", "MFRSTART", "MFSORDER", "MFSURFACE",
- "MFTIME", "MFTOL", "*MFUN", "MFVOLUME", "MFWRITE",
- "MGEN", "MIDTOL", "/MKDIR", "MLIST", "MMASS", "MMF",
- "MODCONT", "MODE", "MODIFY", "MODMSH", "MODSELOPTION",
- "MODOPT", "MONITOR", "*MOPER", "MOPT", "MORPH", "MOVE",
- "MP", "MPAMOD", "MPCHG", "MPCOPY", "MPDATA", "MPDELE",
- "MPDRES", "/MPLIB", "MPLIST", "MPPLOT", "MPREAD",
- "MPRINT", "MPTEMP", "MPTGEN", "MPTRES", "MPWRITE",
- "/MREP", "MSAVE", "*MSG", "MSHAPE", "MSHCOPY",
- "MSHKEY", "MSHMID", "MSHPATTERN", "MSOLVE", "/MSTART",
- "MSTOLE", "*MULT", "*MWRITE", "MXPAND", "N", "NANG",
- "NAXIS", "NCNV", "NDELE", "NDIST", "NDSURF", "NEQIT",
- "/NERR", "NFORCE", "NGEN", "NKPT", "NLADAPTIVE",
- "NLDIAG", "NLDPOST", "NLGEOM", "NLHIST", "NLIST",
- "NLMESH", "NLOG", "NLOPT", "NMODIF", "NOCOLOR",
- "NODES", "/NOERASE", "/NOLIST", "NOOFFSET", "NOORDER",
- "/NOPR", "NORA", "NORL", "/NORMAL", "NPLOT", "NPRINT",
- "NREAD", "NREFINE", "NRLSUM", "*NRM", "NROPT",
- "NROTAT", "NRRANG", "NSCALE", "NSEL", "NSLA", "NSLE",
- "NSLK", "NSLL", "NSLV", "NSMOOTH", "NSOL", "NSORT",
- "NSTORE", "NSUBST", "NSVR", "NSYM", "/NUMBER",
- "NUMCMP", "NUMEXP", "NUMMRG", "NUMOFF", "NUMSTR",
- "NUMVAR", "NUSORT", "NWPAVE", "NWPLAN", "NWRITE",
- "OCDATA", "OCDELETE", "OCLIST", "OCREAD", "OCTABLE",
- "OCTYPE", "OCZONE", "OMEGA", "OPERATE", "OPNCONTROL",
- "OUTAERO", "OUTOPT", "OUTPR", "/OUTPUT", "OUTRES",
- "OVCHECK", "PADELE", "/PAGE", "PAGET", "PAPUT",
- "PARESU", "PARTSEL", "PARRES", "PARSAV", "PASAVE",
- "PATH", "PAUSE", "/PBC", "/PBF", "PCALC", "PCGOPT",
- "PCIRC", "/PCIRCLE", "/PCOPY", "PCROSS", "PDANL",
- "PDCDF", "PDCFLD", "PDCLR", "PDCMAT", "PDCORR",
- "PDDMCS", "PDDOEL", "PDEF", "PDEXE", "PDHIST",
- "PDINQR", "PDLHS", "PDMETH", "PDOT", "PDPINV",
- "PDPLOT", "PDPROB", "PDRESU", "PDROPT", "/PDS",
- "PDSAVE", "PDSCAT", "PDSENS", "PDSHIS", "PDUSER",
- "PDVAR", "PDWRITE", "PERBC2D", "PERTURB", "PFACT",
- "PHYSICS", "PIVCHECK", "PLCAMP", "PLCFREQ", "PLCHIST",
- "PLCINT", "PLCPLX", "PLCRACK", "PLDISP", "PLESOL",
- "PLETAB", "PLFAR", "PLF2D", "PLGEOM", "PLLS", "PLMAP",
- "PLMC", "PLNEAR", "PLNSOL", "/PLOPTS", "PLORB", "PLOT",
- "PLOTTING", "PLPAGM", "PLPATH", "PLSECT", "PLST",
- "PLTIME", "PLTRAC", "PLVAR", "PLVECT", "PLZZ",
- "/PMACRO", "PMAP", "PMGTRAN", "PMLOPT", "PMLSIZE",
- "/PMORE", "PNGR", "/PNUM", "POINT", "POLY", "/POLYGON",
- "/POST1", "/POST26", "POWERH", "PPATH", "PRANGE",
- "PRAS", "PRCAMP", "PRCINT", "PRCPLX", "PRED",
- "PRENERGY", "/PREP7", "PRERR", "PRESOL", "PRETAB",
- "PRFAR", "PRI2", "PRIM", "PRINT", "*PRINT", "PRISM",
- "PRITER", "PRJSOL", "PRNEAR", "PRNLD", "PRNSOL",
- "PROD", "PRORB", "PRPATH", "PRRFOR", "PRRSOL",
- "PRSCONTROL", "PRSECT", "PRTIME", "PRVAR", "PRVECT",
- "PSCONTROL", "PSCR", "PSDCOM", "PSDFRQ", "PSDGRAPH",
- "PSDRES", "PSDSPL", "PSDUNIT", "PSDVAL", "PSDWAV",
- "/PSEARCH", "PSEL", "/PSF", "PSMAT", "PSMESH",
- "/PSPEC", "/PSTATUS", "PSTRES", "/PSYMB", "PTR",
- "PTXY", "PVECT", "/PWEDGE", "QDVAL", "QRDOPT", "QSOPT",
- "QUAD", "/QUIT", "QUOT", "R", "RACE", "RADOPT",
- "RAPPND", "RATE", "/RATIO", "RBE3", "RCON", "RCYC",
- "RDEC", "RDELE", "READ", "REAL", "REALVAR", "RECTNG",
- "REMESH", "/RENAME", "REORDER", "*REPEAT", "/REPLOT",
- "RESCOMBINE", "RESCONTROL", "RESET", "/RESET", "RESP",
- "RESUME", "RESVEC", "RESWRITE", "*RETURN", "REXPORT",
- "REZONE", "RFORCE", "/RGB", "RIGID", "RIGRESP",
- "RIMPORT", "RLIST", "RMALIST", "RMANL", "RMASTER",
- "RMCAP", "RMCLIST", "/RMDIR", "RMFLVEC", "RMLVSCALE",
- "RMMLIST", "RMMRANGE", "RMMSELECT", "RMNDISP",
- "RMNEVEC", "RMODIF", "RMORE", "RMPORDER", "RMRESUME",
- "RMRGENERATE", "RMROPTIONS", "RMRPLOT", "RMRSTATUS",
- "RMSAVE", "RMSMPLE", "RMUSE", "RMXPORT", "ROCK",
- "ROSE", "RPOLY", "RPR4", "RPRISM", "RPSD", "RSFIT",
- "RSOPT", "RSPLIT", "RSPLOT", "RSPRNT", "RSSIMS",
- "RSTMAC", "RSTOFF", "RSURF", "RSYMM", "RSYS", "RTHICK",
- "SABS", "SADD", "SALLOW", "SAVE", "SBCLIST", "SBCTRAN",
- "SDELETE", "SE", "SECCONTROL", "SECDATA",
- "SECFUNCTION", "SECJOINT", "/SECLIB", "SECLOCK",
- "SECMODIF", "SECNUM", "SECOFFSET", "SECPLOT",
- "SECREAD", "SECSTOP", "SECTYPE", "SECWRITE", "SED",
- "SEDLIST", "SEEXP", "/SEG", "SEGEN", "SELIST", "SELM",
- "SELTOL", "SENERGY", "SEOPT", "SESYMM", "*SET", "SET",
- "SETFGAP", "SETRAN", "SEXP", "SF", "SFA", "SFACT",
- "SFADELE", "SFALIST", "SFBEAM", "SFCALC", "SFCUM",
- "SFDELE", "SFE", "SFEDELE", "SFELIST", "SFFUN",
- "SFGRAD", "SFL", "SFLDELE", "SFLEX", "SFLIST",
- "SFLLIST", "SFSCALE", "SFTRAN", "/SHADE", "SHELL",
- "/SHOW", "/SHOWDISP", "SHPP", "/SHRINK", "SLIST",
- "SLOAD", "SMALL", "*SMAT", "SMAX", "/SMBC", "SMBODY",
- "SMCONS", "SMFOR", "SMIN", "SMOOTH", "SMRTSIZE",
- "SMSURF", "SMULT", "SNOPTION", "SOLU", "/SOLU",
- "SOLUOPT", "SOLVE", "SORT", "SOURCE", "SPACE",
- "SPCNOD", "SPCTEMP", "SPDAMP", "SPEC", "SPFREQ",
- "SPGRAPH", "SPH4", "SPH5", "SPHERE", "SPLINE", "SPLOT",
- "SPMWRITE", "SPOINT", "SPOPT", "SPREAD", "SPTOPT",
- "SPOWER", "SPUNIT", "SPVAL", "SQRT", "*SREAD", "SRSS",
- "SSBT", "/SSCALE", "SSLN", "SSMT", "SSPA", "SSPB",
- "SSPD", "SSPE", "SSPM", "SSUM", "SSTATE", "STABILIZE",
- "STAOPT", "STAT", "*STATUS", "/STATUS", "STEF",
- "STORE", "SUBOPT", "SUBSET", "SUCALC",
- "SUCR", "SUDEL", "SUEVAL", "SUGET", "SUMAP", "SUMTYPE",
- "SUPL", "SUPR", "SURESU", "SUSAVE", "SUSEL", "SUVECT",
- "SV", "SVPLOT", "SVTYP", "SWADD", "SWDEL", "SWGEN",
- "SWLIST", "SYNCHRO", "/SYP", "/SYS", "TALLOW",
- "TARGET", "*TAXIS", "TB", "TBCOPY", "TBDATA", "TBDELE",
- "TBEO", "TBIN", "TBFIELD", "TBFT", "TBLE", "TBLIST",
- "TBMODIF", "TBPLOT", "TBPT", "TBTEMP", "TCHG", "/TEE",
- "TERM", "THEXPAND", "THOPT", "TIFF", "TIME",
- "TIMERANGE", "TIMINT", "TIMP", "TINTP",
- "/TLABEL", "TOFFST", "*TOPER", "TORQ2D", "TORQC2D",
- "TORQSUM", "TORUS", "TRANS", "TRANSFER", "*TREAD",
- "TREF", "/TRIAD", "/TRLCY", "TRNOPT", "TRPDEL",
- "TRPLIS", "TRPOIN", "TRTIME", "TSHAP", "/TSPEC",
- "TSRES", "TUNIF", "TVAR", "/TXTRE", "/TYPE", "TYPE",
- "/UCMD", "/UDOC", "/UI", "UIMP", "/UIS", "*ULIB", "/UPF",
- "UNDELETE", "UNDO", "/UNITS", "UNPAUSE", "UPCOORD",
- "UPGEOM", "*USE", "/USER", "USRCAL", "USRDOF",
- "USRELEM", "V", "V2DOPT", "VA", "*VABS", "VADD",
- "VARDEL", "VARNAM", "VATT", "VCLEAR", "*VCOL",
- "/VCONE", "VCROSS", "*VCUM", "VDDAM", "VDELE", "VDGL",
- "VDOT", "VDRAG", "*VEC", "*VEDIT", "VEORIENT", "VEXT",
- "*VFACT", "*VFILL", "VFOPT", "VFQUERY", "VFSM",
- "*VFUN", "VGEN", "*VGET", "VGET", "VGLUE", "/VIEW",
- "VIMP", "VINP", "VINV", "*VITRP", "*VLEN", "VLIST",
- "VLSCALE", "*VMASK", "VMESH", "VOFFST", "VOLUMES")
-
- # list of in-built () functions
- elafunf = ("NX()", "NY()", "NZ()", "KX()", "KY()", "KZ()", "LX()",
- "LY()", "LZ()", "LSX()", "LSY()", "LSZ()", "NODE()",
- "KP()", "DISTND()", "DISTKP()", "DISTEN()", "ANGLEN()",
- "ANGLEK()", "NNEAR()", "KNEAR()", "ENEARN()",
- "AREAND()", "AREAKP()", "ARNODE()", "NORMNX()",
- "NORMNY()", "NORMNZ()", "NORMKX()", "NORMKY()",
- "NORMKZ()", "ENEXTN()", "NELEM()", "NODEDOF()",
- "ELADJ()", "NDFACE()", "NMFACE()", "ARFACE()", "UX()",
- "UY()", "UZ()", "ROTX()", "ROTY()", "ROTZ()", "TEMP()",
- "PRES()", "VX()", "VY()", "VZ()", "ENKE()", "ENDS()",
- "VOLT()", "MAG()", "AX()", "AY()", "AZ()",
- "VIRTINQR()", "KWGET()", "VALCHR()", "VALHEX()",
- "CHRHEX()", "STRFILL()", "STRCOMP()", "STRPOS()",
- "STRLENG()", "UPCASE()", "LWCASE()", "JOIN()",
- "SPLIT()", "ABS()", "SIGN()", "CXABS()", "EXP()",
- "LOG()", "LOG10()", "SQRT()", "NINT()", "MOD()",
- "RAND()", "GDIS()", "SIN()", "COS()", "TAN()",
- "SINH()", "COSH()", "TANH()", "ASIN()", "ACOS()",
- "ATAN()", "ATAN2()")
-
- elafung = ("NSEL()", "ESEL()", "KSEL()", "LSEL()", "ASEL()",
- "VSEL()", "NDNEXT()", "ELNEXT()", "KPNEXT()",
- "LSNEXT()", "ARNEXT()", "VLNEXT()", "CENTRX()",
- "CENTRY()", "CENTRZ()")
-
- elafunh = ("~CAT5IN", "~CATIAIN", "~PARAIN", "~PROEIN", "~SATIN",
- "~UGIN", "A", "AADD", "AATT", "ABEXTRACT", "*ABBR",
- "ABBRES", "ABBSAV", "ABS", "ACCAT", "ACCOPTION",
- "ACEL", "ACLEAR", "ADAMS", "ADAPT", "ADD", "ADDAM",
- "ADELE", "ADGL", "ADRAG", "AESIZE", "AFILLT", "AFLIST",
- "AFSURF", "*AFUN", "AGEN", "AGLUE", "AINA", "AINP",
- "AINV", "AL", "ALIST", "ALLSEL", "ALPHAD", "AMAP",
- "AMESH", "/AN3D", "ANCNTR", "ANCUT", "ANCYC", "ANDATA",
- "ANDSCL", "ANDYNA", "/ANFILE", "ANFLOW", "/ANGLE",
- "ANHARM", "ANIM", "ANISOS", "ANMODE", "ANMRES",
- "/ANNOT", "ANORM", "ANPRES", "ANSOL", "ANSTOAQWA",
- "ANSTOASAS", "ANTIME", "ANTYPE")
-
- special = ("/COM", "/TITLE", "STITLE")
-
- elements = ("SOLID5",
- "LINK11",
- "PLANE13",
- "COMBIN14",
- "MASS2",
- "PLANE25",
- "MATRIX27",
- "FLUID29",
- "FLUID30",
- "LINK31",
- "LINK33",
- "LINK34",
- "PLANE35",
- "SOURC36",
- "COMBIN37",
- "FLUID38",
- "COMBIN39",
- "COMBIN40",
- "INFIN47",
- "MATRIX50",
- "PLANE55",
- "SHELL61",
- "LINK68",
- "SOLID70",
- "MASS71",
- "PLANE75",
- "PLANE77",
- "PLANE78",
- "PLANE83",
- "SOLID87",
- "SOLID90",
- "CIRCU94",
- "SOLID96",
- "SOLID98",
- "INFIN110",
- "INFIN111",
- "FLUID116",
- "PLANE121",
- "SOLID122",
- "SOLID123",
- "CIRCU124",
- "CIRCU125",
- "TRANS126",
- "FLUID129",
- "FLUID130",
- "SHELL131",
- "SHELL132",
- "FLUID136",
- "FLUID138",
- "FLUID139",
- "SURF151",
- "SURF152",
- "SURF153",
- "SURF154",
- "SURF155",
- "SURF156",
- "SHELL157",
- "SURF159",
- "TARGE169",
- "TARGE170",
- "CONTA172",
- "CONTA174",
- "CONTA175",
- "CONTA177",
- "CONTA178",
- "PRETS179",
- "LINK180",
- "SHELL181",
- "PLANE182",
- "PLANE183",
- "MPC184",
- "SOLID185",
- "SOLID186",
- "SOLID187",
- "BEAM188",
- "BEAM189",
- "SOLSH190",
- "INTER192",
- "INTER193",
- "INTER194",
- "INTER195",
- "MESH200",
- "FOLLW201",
- "INTER202",
- "INTER203",
- "INTER204",
- "INTER205",
- "SHELL208",
- "SHELL209",
- "CPT212",
- "CPT213",
- "COMBI214",
- "CPT215",
- "CPT216",
- "CPT217",
- "FLUID218",
- "FLUID220",
- "FLUID221",
- "PLANE222",
- "PLANE223",
- "SOLID225",
- "SOLID226",
- "SOLID227",
- "PLANE230",
- "SOLID231",
- "SOLID232",
- "PLANE233",
- "SOLID236",
- "SOLID237",
- "PLANE238",
- "SOLID239",
- "SOLID240",
- "HSFLD241",
- "HSFLD242",
- "COMBI250",
- "SURF251",
- "SURF252",
- "INFIN257",
- "REINF263",
- "REINF264",
- "REINF265",
- "SOLID272",
- "SOLID273",
- "SOLID278",
- "SOLID279",
- "CABLE280",
- "SHELL281",
- "SOLID285",
- "PIPE288",
- "PIPE289",
- "ELBOW290",
- "SOLID291",
- "PLANE292",
- "PLANE293",
- "USER300")
-
- tokens = {
- 'root': [
- (r'[^\S\n]+', Whitespace),
- (words((elafunb+elafunc+elafund+elafune+elafunh+special), suffix=r'\b'), Keyword, 'non-keyword'),
- default('non-keyword'),
- ],
- 'non-keyword': [
- (r'!.*\n', Comment, '#pop'),
- (r'%.*?%', Escape),
- include('strings'),
- include('nums'),
- (words((elafunf+elafung), suffix=r'\b'), Name.Builtin),
- (words((elements), suffix=r'\b'), Name.Property),
- include('core'),
- (r'AR[0-9]+', Name.Variable.Instance),
- (r'[a-z_][a-z0-9_]*', Name.Variable),
- (r'\n+', Whitespace, '#pop'),
- (r'[^\S\n]+', Whitespace),
- ],
- 'core': [
- # Operators
- (r'(\*\*|\*|\+|-|\/|<|>|<=|>=|==|\/=|=|\(|\))', Operator),
- (r'/EOF', Generic.Emph),
- (r'[\.(),:&;]', Punctuation),
- ],
- 'strings': [
- (r'(?s)"(\\\\|\\[0-7]+|\\.|[^"\\])*"', String.Double),
- (r"(?s)'(\\\\|\\[0-7]+|\\.|[^'\\])*'", String.Single),
- (r'[$%]', String.Symbol),
- ],
- 'nums': [
- (r'[+-]?\d*\.\d+([efEF][-+]?\d+)?', Number.Float), # with dot
- (r'([+-]?\d+([efEF][-+]?\d+))', Number.Float), # With scientific notation
- (r'\b\d+(?![.ef])', Number.Integer), # integer simple
- ]
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/apl.py b/venv/lib/python3.11/site-packages/pygments/lexers/apl.py
deleted file mode 100644
index 815184d..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/apl.py
+++ /dev/null
@@ -1,104 +0,0 @@
-"""
- pygments.lexers.apl
- ~~~~~~~~~~~~~~~~~~~
-
- Lexers for APL.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer
-from pygments.token import Comment, Operator, Keyword, Name, String, \
- Number, Punctuation, Whitespace
-
-__all__ = ['APLLexer']
-
-
-class APLLexer(RegexLexer):
- """
- A simple APL lexer.
-
- .. versionadded:: 2.0
- """
- name = 'APL'
- url = 'https://en.m.wikipedia.org/wiki/APL_(programming_language)'
- aliases = ['apl']
- filenames = [
- '*.apl', '*.aplf', '*.aplo', '*.apln',
- '*.aplc', '*.apli', '*.dyalog',
- ]
-
- tokens = {
- 'root': [
- # Whitespace
- # ==========
- (r'\s+', Whitespace),
- #
- # Comment
- # =======
- # '⍝' is traditional; '#' is supported by GNU APL and NGN (but not Dyalog)
- (r'[⍝#].*$', Comment.Single),
- #
- # Strings
- # =======
- (r'\'((\'\')|[^\'])*\'', String.Single),
- (r'"(("")|[^"])*"', String.Double), # supported by NGN APL
- #
- # Punctuation
- # ===========
- # This token type is used for diamond and parenthesis
- # but not for bracket and ; (see below)
- (r'[⋄◇()]', Punctuation),
- #
- # Array indexing
- # ==============
- # Since this token type is very important in APL, it is not included in
- # the punctuation token type but rather in the following one
- (r'[\[\];]', String.Regex),
- #
- # Distinguished names
- # ===================
- # following IBM APL2 standard
- (r'⎕[A-Za-zΔ∆⍙][A-Za-zΔ∆⍙_¯0-9]*', Name.Function),
- #
- # Labels
- # ======
- # following IBM APL2 standard
- # (r'[A-Za-zΔ∆⍙][A-Za-zΔ∆⍙_¯0-9]*:', Name.Label),
- #
- # Variables
- # =========
- # following IBM APL2 standard (with a leading _ ok for GNU APL and Dyalog)
- (r'[A-Za-zΔ∆⍙_][A-Za-zΔ∆⍙_¯0-9]*', Name.Variable),
- #
- # Numbers
- # =======
- (r'¯?(0[Xx][0-9A-Fa-f]+|[0-9]*\.?[0-9]+([Ee][+¯]?[0-9]+)?|¯|∞)'
- r'([Jj]¯?(0[Xx][0-9A-Fa-f]+|[0-9]*\.?[0-9]+([Ee][+¯]?[0-9]+)?|¯|∞))?',
- Number),
- #
- # Operators
- # ==========
- (r'[\.\\\/⌿⍀¨⍣⍨⍠⍤∘⌸&⌶@⌺⍥⍛⍢]', Name.Attribute), # closest token type
- (r'[+\-×÷⌈⌊∣|⍳?*⍟○!⌹<≤=>≥≠≡≢∊⍷∪∩~∨∧⍱⍲⍴,⍪⌽⊖⍉↑↓⊂⊃⌷⍋⍒⊤⊥⍕⍎⊣⊢⍁⍂≈⌸⍯↗⊆⊇⍸√⌾…⍮]',
- Operator),
- #
- # Constant
- # ========
- (r'⍬', Name.Constant),
- #
- # Quad symbol
- # ===========
- (r'[⎕⍞]', Name.Variable.Global),
- #
- # Arrows left/right
- # =================
- (r'[←→]', Keyword.Declaration),
- #
- # D-Fn
- # ====
- (r'[⍺⍵⍶⍹∇:]', Name.Builtin.Pseudo),
- (r'[{}]', Keyword.Type),
- ],
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/archetype.py b/venv/lib/python3.11/site-packages/pygments/lexers/archetype.py
deleted file mode 100644
index e8312d7..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/archetype.py
+++ /dev/null
@@ -1,319 +0,0 @@
-"""
- pygments.lexers.archetype
- ~~~~~~~~~~~~~~~~~~~~~~~~~
-
- Lexer for Archetype-related syntaxes, including:
-
- - ODIN syntax <https://github.com/openEHR/odin>
- - ADL syntax <http://www.openehr.org/releases/trunk/architecture/am/adl2.pdf>
- - cADL sub-syntax of ADL
-
- For uses of this syntax, see the openEHR archetypes <http://www.openEHR.org/ckm>
-
- Contributed by Thomas Beale <https://github.com/wolandscat>,
- <https://bitbucket.org/thomas_beale>.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, include, bygroups, using, default
-from pygments.token import Text, Comment, Name, Literal, Number, String, \
- Punctuation, Keyword, Operator, Generic, Whitespace
-
-__all__ = ['OdinLexer', 'CadlLexer', 'AdlLexer']
-
-
-class AtomsLexer(RegexLexer):
- """
- Lexer for Values used in ADL and ODIN.
-
- .. versionadded:: 2.1
- """
-
- tokens = {
- # ----- pseudo-states for inclusion -----
- 'whitespace': [
- (r'\n', Whitespace),
- (r'\s+', Whitespace),
- (r'([ \t]*)(--.*)$', bygroups(Whitespace, Comment)),
- ],
- 'archetype_id': [
- (r'([ \t]*)(([a-zA-Z]\w+(\.[a-zA-Z]\w+)*::)?[a-zA-Z]\w+(-[a-zA-Z]\w+){2}'
- r'\.\w+[\w-]*\.v\d+(\.\d+){,2}((-[a-z]+)(\.\d+)?)?)',
- bygroups(Whitespace, Name.Decorator)),
- ],
- 'date_constraints': [
- # ISO 8601-based date/time constraints
- (r'[Xx?YyMmDdHhSs\d]{2,4}([:-][Xx?YyMmDdHhSs\d]{2}){2}', Literal.Date),
- # ISO 8601-based duration constraints + optional trailing slash
- (r'(P[YyMmWwDd]+(T[HhMmSs]+)?|PT[HhMmSs]+)/?', Literal.Date),
- ],
- 'ordered_values': [
- # ISO 8601 date with optional 'T' ligature
- (r'\d{4}-\d{2}-\d{2}T?', Literal.Date),
- # ISO 8601 time
- (r'\d{2}:\d{2}:\d{2}(\.\d+)?([+-]\d{4}|Z)?', Literal.Date),
- # ISO 8601 duration
- (r'P((\d*(\.\d+)?[YyMmWwDd]){1,3}(T(\d*(\.\d+)?[HhMmSs]){,3})?|'
- r'T(\d*(\.\d+)?[HhMmSs]){,3})', Literal.Date),
- (r'[+-]?(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+', Number.Float),
- (r'[+-]?\d*\.\d+%?', Number.Float),
- (r'0x[0-9a-fA-F]+', Number.Hex),
- (r'[+-]?\d+%?', Number.Integer),
- ],
- 'values': [
- include('ordered_values'),
- (r'([Tt]rue|[Ff]alse)', Literal),
- (r'"', String, 'string'),
- (r"'(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])'", String.Char),
- (r'[a-z][a-z0-9+.-]*:', Literal, 'uri'),
- # term code
- (r'(\[)(\w[\w-]*(?:\([^)\n]+\))?)(::)(\w[\w-]*)(\])',
- bygroups(Punctuation, Name.Decorator, Punctuation, Name.Decorator,
- Punctuation)),
- (r'\|', Punctuation, 'interval'),
- # list continuation
- (r'\.\.\.', Punctuation),
- ],
- 'constraint_values': [
- (r'(\[)(\w[\w-]*(?:\([^)\n]+\))?)(::)',
- bygroups(Punctuation, Name.Decorator, Punctuation), 'adl14_code_constraint'),
- # ADL 1.4 ordinal constraint
- (r'(\d*)(\|)(\[\w[\w-]*::\w[\w-]*\])((?:[,;])?)',
- bygroups(Number, Punctuation, Name.Decorator, Punctuation)),
- include('date_constraints'),
- include('values'),
- ],
-
- # ----- real states -----
- 'string': [
- ('"', String, '#pop'),
- (r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|'
- r'u[a-fA-F0-9]{4}|U[a-fA-F0-9]{8}|[0-7]{1,3})', String.Escape),
- # all other characters
- (r'[^\\"]+', String),
- # stray backslash
- (r'\\', String),
- ],
- 'uri': [
- # effective URI terminators
- (r'[,>\s]', Punctuation, '#pop'),
- (r'[^>\s,]+', Literal),
- ],
- 'interval': [
- (r'\|', Punctuation, '#pop'),
- include('ordered_values'),
- (r'\.\.', Punctuation),
- (r'[<>=] *', Punctuation),
- # handle +/-
- (r'\+/-', Punctuation),
- (r'\s+', Whitespace),
- ],
- 'any_code': [
- include('archetype_id'),
- # if it is a code
- (r'[a-z_]\w*[0-9.]+(@[^\]]+)?', Name.Decorator),
- # if it is tuple with attribute names
- (r'[a-z_]\w*', Name.Class),
- # if it is an integer, i.e. Xpath child index
- (r'[0-9]+', Text),
- (r'\|', Punctuation, 'code_rubric'),
- (r'\]', Punctuation, '#pop'),
- # handle use_archetype statement
- (r'(\s*)(,)(\s*)', bygroups(Whitespace, Punctuation, Whitespace)),
- ],
- 'code_rubric': [
- (r'\|', Punctuation, '#pop'),
- (r'[^|]+', String),
- ],
- 'adl14_code_constraint': [
- (r'\]', Punctuation, '#pop'),
- (r'\|', Punctuation, 'code_rubric'),
- (r'(\w[\w-]*)([;,]?)', bygroups(Name.Decorator, Punctuation)),
- include('whitespace'),
- ],
- }
-
-
-class OdinLexer(AtomsLexer):
- """
- Lexer for ODIN syntax.
-
- .. versionadded:: 2.1
- """
- name = 'ODIN'
- aliases = ['odin']
- filenames = ['*.odin']
- mimetypes = ['text/odin']
-
- tokens = {
- 'path': [
- (r'>', Punctuation, '#pop'),
- # attribute name
- (r'[a-z_]\w*', Name.Class),
- (r'/', Punctuation),
- (r'\[', Punctuation, 'key'),
- (r'(\s*)(,)(\s*)', bygroups(Whitespace, Punctuation, Whitespace), '#pop'),
- (r'\s+', Whitespace, '#pop'),
- ],
- 'key': [
- include('values'),
- (r'\]', Punctuation, '#pop'),
- ],
- 'type_cast': [
- (r'\)', Punctuation, '#pop'),
- (r'[^)]+', Name.Class),
- ],
- 'root': [
- include('whitespace'),
- (r'([Tt]rue|[Ff]alse)', Literal),
- include('values'),
- # x-ref path
- (r'/', Punctuation, 'path'),
- # x-ref path starting with key
- (r'\[', Punctuation, 'key'),
- # attribute name
- (r'[a-z_]\w*', Name.Class),
- (r'=', Operator),
- (r'\(', Punctuation, 'type_cast'),
- (r',', Punctuation),
- (r'<', Punctuation),
- (r'>', Punctuation),
- (r';', Punctuation),
- ],
- }
-
-
-class CadlLexer(AtomsLexer):
- """
- Lexer for cADL syntax.
-
- .. versionadded:: 2.1
- """
- name = 'cADL'
- aliases = ['cadl']
- filenames = ['*.cadl']
-
- tokens = {
- 'path': [
- # attribute name
- (r'[a-z_]\w*', Name.Class),
- (r'/', Punctuation),
- (r'\[', Punctuation, 'any_code'),
- (r'\s+', Punctuation, '#pop'),
- ],
- 'root': [
- include('whitespace'),
- (r'(cardinality|existence|occurrences|group|include|exclude|'
- r'allow_archetype|use_archetype|use_node)\W', Keyword.Type),
- (r'(and|or|not|there_exists|xor|implies|for_all)\W', Keyword.Type),
- (r'(after|before|closed)\W', Keyword.Type),
- (r'(not)\W', Operator),
- (r'(matches|is_in)\W', Operator),
- # is_in / not is_in char
- ('(\u2208|\u2209)', Operator),
- # there_exists / not there_exists / for_all / and / or
- ('(\u2203|\u2204|\u2200|\u2227|\u2228|\u22BB|\223C)',
- Operator),
- # regex in slot or as string constraint
- (r'(\{)(\s*)(/[^}]+/)(\s*)(\})',
- bygroups(Punctuation, Whitespace, String.Regex, Whitespace, Punctuation)),
- # regex in slot or as string constraint
- (r'(\{)(\s*)(\^[^}]+\^)(\s*)(\})',
- bygroups(Punctuation, Whitespace, String.Regex, Whitespace, Punctuation)),
- (r'/', Punctuation, 'path'),
- # for cardinality etc
- (r'(\{)((?:\d+\.\.)?(?:\d+|\*))'
- r'((?:\s*;\s*(?:ordered|unordered|unique)){,2})(\})',
- bygroups(Punctuation, Number, Number, Punctuation)),
- # [{ is start of a tuple value
- (r'\[\{', Punctuation),
- (r'\}\]', Punctuation),
- (r'\{', Punctuation),
- (r'\}', Punctuation),
- include('constraint_values'),
- # type name
- (r'[A-Z]\w+(<[A-Z]\w+([A-Za-z_<>]*)>)?', Name.Class),
- # attribute name
- (r'[a-z_]\w*', Name.Class),
- (r'\[', Punctuation, 'any_code'),
- (r'(~|//|\\\\|\+|-|/|\*|\^|!=|=|<=|>=|<|>]?)', Operator),
- (r'\(', Punctuation),
- (r'\)', Punctuation),
- # for lists of values
- (r',', Punctuation),
- (r'"', String, 'string'),
- # for assumed value
- (r';', Punctuation),
- ],
- }
-
-
-class AdlLexer(AtomsLexer):
- """
- Lexer for ADL syntax.
-
- .. versionadded:: 2.1
- """
-
- name = 'ADL'
- aliases = ['adl']
- filenames = ['*.adl', '*.adls', '*.adlf', '*.adlx']
-
- tokens = {
- 'whitespace': [
- # blank line ends
- (r'\s*\n', Whitespace),
- # comment-only line
- (r'^([ \t]*)(--.*)$', bygroups(Whitespace, Comment)),
- ],
- 'odin_section': [
- # repeating the following two rules from the root state enable multi-line
- # strings that start in the first column to be dealt with
- (r'^(language|description|ontology|terminology|annotations|'
- r'component_terminologies|revision_history)([ \t]*\n)',
- bygroups(Generic.Heading, Whitespace)),
- (r'^(definition)([ \t]*\n)', bygroups(Generic.Heading, Whitespace), 'cadl_section'),
- (r'^([ \t]*|[ \t]+.*)\n', using(OdinLexer)),
- (r'^([^"]*")(>[ \t]*\n)', bygroups(String, Punctuation)),
- # template overlay delimiter
- (r'^----------*\n', Text, '#pop'),
- (r'^.*\n', String),
- default('#pop'),
- ],
- 'cadl_section': [
- (r'^([ \t]*|[ \t]+.*)\n', using(CadlLexer)),
- default('#pop'),
- ],
- 'rules_section': [
- (r'^[ \t]+.*\n', using(CadlLexer)),
- default('#pop'),
- ],
- 'metadata': [
- (r'\)', Punctuation, '#pop'),
- (r';', Punctuation),
- (r'([Tt]rue|[Ff]alse)', Literal),
- # numbers and version ids
- (r'\d+(\.\d+)*', Literal),
- # Guids
- (r'(\d|[a-fA-F])+(-(\d|[a-fA-F])+){3,}', Literal),
- (r'\w+', Name.Class),
- (r'"', String, 'string'),
- (r'=', Operator),
- (r'[ \t]+', Whitespace),
- default('#pop'),
- ],
- 'root': [
- (r'^(archetype|template_overlay|operational_template|template|'
- r'speciali[sz]e)', Generic.Heading),
- (r'^(language|description|ontology|terminology|annotations|'
- r'component_terminologies|revision_history)[ \t]*\n',
- Generic.Heading, 'odin_section'),
- (r'^(definition)[ \t]*\n', Generic.Heading, 'cadl_section'),
- (r'^(rules)[ \t]*\n', Generic.Heading, 'rules_section'),
- include('archetype_id'),
- (r'([ \t]*)(\()', bygroups(Whitespace, Punctuation), 'metadata'),
- include('whitespace'),
- ],
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/arrow.py b/venv/lib/python3.11/site-packages/pygments/lexers/arrow.py
deleted file mode 100644
index 894b64d..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/arrow.py
+++ /dev/null
@@ -1,117 +0,0 @@
-"""
- pygments.lexers.arrow
- ~~~~~~~~~~~~~~~~~~~~~
-
- Lexer for Arrow.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, bygroups, default, include
-from pygments.token import Text, Operator, Keyword, Punctuation, Name, \
- String, Number, Whitespace
-
-__all__ = ['ArrowLexer']
-
-TYPES = r'\b(int|bool|char)((?:\[\])*)(?=\s+)'
-IDENT = r'([a-zA-Z_][a-zA-Z0-9_]*)'
-DECL = TYPES + r'(\s+)' + IDENT
-
-
-class ArrowLexer(RegexLexer):
- """
- Lexer for Arrow
-
- .. versionadded:: 2.7
- """
-
- name = 'Arrow'
- url = 'https://pypi.org/project/py-arrow-lang/'
- aliases = ['arrow']
- filenames = ['*.arw']
-
- tokens = {
- 'root': [
- (r'\s+', Whitespace),
- (r'^[|\s]+', Punctuation),
- include('blocks'),
- include('statements'),
- include('expressions'),
- ],
- 'blocks': [
- (r'(function)(\n+)(/-->)(\s*)' +
- DECL + # 4 groups
- r'(\()', bygroups(
- Keyword.Reserved, Whitespace, Punctuation,
- Whitespace, Keyword.Type, Punctuation, Whitespace,
- Name.Function, Punctuation
- ), 'fparams'),
- (r'/-->$|\\-->$|/--<|\\--<|\^', Punctuation),
- ],
- 'statements': [
- (DECL, bygroups(Keyword.Type, Punctuation, Text, Name.Variable)),
- (r'\[', Punctuation, 'index'),
- (r'=', Operator),
- (r'require|main', Keyword.Reserved),
- (r'print', Keyword.Reserved, 'print'),
- ],
- 'expressions': [
- (r'\s+', Whitespace),
- (r'[0-9]+', Number.Integer),
- (r'true|false', Keyword.Constant),
- (r"'", String.Char, 'char'),
- (r'"', String.Double, 'string'),
- (r'\{', Punctuation, 'array'),
- (r'==|!=|<|>|\+|-|\*|/|%', Operator),
- (r'and|or|not|length', Operator.Word),
- (r'(input)(\s+)(int|char\[\])', bygroups(
- Keyword.Reserved, Whitespace, Keyword.Type
- )),
- (IDENT + r'(\()', bygroups(
- Name.Function, Punctuation
- ), 'fargs'),
- (IDENT, Name.Variable),
- (r'\[', Punctuation, 'index'),
- (r'\(', Punctuation, 'expressions'),
- (r'\)', Punctuation, '#pop'),
- ],
- 'print': [
- include('expressions'),
- (r',', Punctuation),
- default('#pop'),
- ],
- 'fparams': [
- (DECL, bygroups(Keyword.Type, Punctuation, Whitespace, Name.Variable)),
- (r',', Punctuation),
- (r'\)', Punctuation, '#pop'),
- ],
- 'escape': [
- (r'\\(["\\/abfnrtv]|[0-9]{1,3}|x[0-9a-fA-F]{2}|u[0-9a-fA-F]{4})',
- String.Escape),
- ],
- 'char': [
- (r"'", String.Char, '#pop'),
- include('escape'),
- (r"[^'\\]", String.Char),
- ],
- 'string': [
- (r'"', String.Double, '#pop'),
- include('escape'),
- (r'[^"\\]+', String.Double),
- ],
- 'array': [
- include('expressions'),
- (r'\}', Punctuation, '#pop'),
- (r',', Punctuation),
- ],
- 'fargs': [
- include('expressions'),
- (r'\)', Punctuation, '#pop'),
- (r',', Punctuation),
- ],
- 'index': [
- include('expressions'),
- (r'\]', Punctuation, '#pop'),
- ],
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/arturo.py b/venv/lib/python3.11/site-packages/pygments/lexers/arturo.py
deleted file mode 100644
index 7225824..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/arturo.py
+++ /dev/null
@@ -1,250 +0,0 @@
-"""
- pygments.lexers.arturo
- ~~~~~~~~~~~~~~~~~~~~~~
-
- Lexer for the Arturo language.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, bygroups, do_insertions, include, \
- this, using, words
-from pygments.token import Comment, Error, Keyword, Name, Number, Operator, \
- Punctuation, String, Text
-
-from pygments.util import ClassNotFound, get_bool_opt
-
-__all__ = ['ArturoLexer']
-
-
-class ArturoLexer(RegexLexer):
- """
- For Arturo source code.
-
- See `Arturo's Github <https://github.com/arturo-lang/arturo>`_
- and `Arturo's Website <https://arturo-lang.io/>`_.
-
- .. versionadded:: 2.14.0
- """
-
- name = 'Arturo'
- aliases = ['arturo', 'art']
- filenames = ['*.art']
- url = 'https://arturo-lang.io/'
-
- def __init__(self, **options):
- self.handle_annotateds = get_bool_opt(options, 'handle_annotateds',
- True)
- RegexLexer.__init__(self, **options)
-
- def handle_annotated_strings(self, match):
- """Adds syntax from another languages inside annotated strings
-
- match args:
- 1:open_string,
- 2:exclamation_mark,
- 3:lang_name,
- 4:space_or_newline,
- 5:code,
- 6:close_string
- """
- from pygments.lexers import get_lexer_by_name
-
- # Header's section
- yield match.start(1), String.Double, match.group(1)
- yield match.start(2), String.Interpol, match.group(2)
- yield match.start(3), String.Interpol, match.group(3)
- yield match.start(4), Text.Whitespace, match.group(4)
-
- lexer = None
- if self.handle_annotateds:
- try:
- lexer = get_lexer_by_name(match.group(3).strip())
- except ClassNotFound:
- pass
- code = match.group(5)
-
- if lexer is None:
- yield match.group(5), String, code
- else:
- yield from do_insertions([], lexer.get_tokens_unprocessed(code))
-
- yield match.start(6), String.Double, match.group(6)
-
- tokens = {
- 'root': [
- (r';.*?$', Comment.Single),
- (r'^((\s#!)|(#!)).*?$', Comment.Hashbang),
-
- # Constants
- (words(('false', 'true', 'maybe'), # boolean
- suffix=r'\b'), Name.Constant),
- (words(('this', 'init'), # class related keywords
- prefix=r'\b', suffix=r'\b\??:?'), Name.Builtin.Pseudo),
- (r'`.`', String.Char), # character
- (r'\\\w+\b\??:?', Name.Property), # array index
- (r'#\w+', Name.Constant), # color
- (r'\b[0-9]+\.[0-9]+', Number.Float), # float
- (r'\b[0-9]+', Number.Integer), # integer
- (r'\w+\b\??:', Name.Label), # label
- # Note: Literals can be labeled too
- (r'\'(?:\w+\b\??:?)', Keyword.Declaration), # literal
- (r'\:\w+', Keyword.Type), # type
- # Note: Attributes can be labeled too
- (r'\.\w+\??:?', Name.Attribute), # attributes
-
- # Switch structure
- (r'(\()(.*?)(\)\?)',
- bygroups(Punctuation, using(this), Punctuation)),
-
- # Single Line Strings
- (r'"', String.Double, 'inside-simple-string'),
- (r'»', String.Single, 'inside-smart-string'),
- (r'«««', String.Double, 'inside-safe-string'),
- (r'\{\/', String.Single, 'inside-regex-string'),
-
- # Multi Line Strings
- (r'\{\:', String.Double, 'inside-curly-verb-string'),
- (r'(\{)(\!)(\w+)(\s|\n)([\w\W]*?)(^\})', handle_annotated_strings),
- (r'\{', String.Single, 'inside-curly-string'),
- (r'\-{3,}', String.Single, 'inside-eof-string'),
-
- include('builtin-functions'),
-
- # Operators
- (r'[()[\],]', Punctuation),
- (words(('->', '==>', '|', '::', '@', '#', # sugar syntax
- '$', '&', '!', '!!', './')), Name.Decorator),
- (words(('<:', ':>', ':<', '>:', '<\\', '<>', '<', '>',
- 'ø', '∞',
- '+', '-', '*', '~', '=', '^', '%', '/', '//',
- '==>', '<=>', '<==>',
- '=>>', '<<=>>', '<<==>>',
- '-->', '<->', '<-->',
- '=|', '|=', '-:', ':-',
- '_', '.', '..', '\\')), Operator),
-
- (r'\b\w+', Name),
- (r'\s+', Text.Whitespace),
- (r'.+$', Error),
- ],
-
- 'inside-interpol': [
- (r'\|', String.Interpol, '#pop'),
- (r'[^|]+', using(this)),
- ],
- 'inside-template': [
- (r'\|\|\>', String.Interpol, '#pop'),
- (r'[^|]+', using(this)),
- ],
- 'string-escape': [
- (words(('\\\\', '\\n', '\\t', '\\"')), String.Escape),
- ],
-
- 'inside-simple-string': [
- include('string-escape'),
- (r'\|', String.Interpol, 'inside-interpol'), # Interpolation
- (r'\<\|\|', String.Interpol, 'inside-template'), # Templates
- (r'"', String.Double, '#pop'), # Closing Quote
- (r'[^|"]+', String) # String Content
- ],
- 'inside-smart-string': [
- include('string-escape'),
- (r'\|', String.Interpol, 'inside-interpol'), # Interpolation
- (r'\<\|\|', String.Interpol, 'inside-template'), # Templates
- (r'\n', String.Single, '#pop'), # Closing Quote
- (r'[^|\n]+', String) # String Content
- ],
- 'inside-safe-string': [
- include('string-escape'),
- (r'\|', String.Interpol, 'inside-interpol'), # Interpolation
- (r'\<\|\|', String.Interpol, 'inside-template'), # Templates
- (r'»»»', String.Double, '#pop'), # Closing Quote
- (r'[^|»]+', String) # String Content
- ],
- 'inside-regex-string': [
- (r'\\[sSwWdDbBZApPxucItnvfr0]+', String.Escape),
- (r'\|', String.Interpol, 'inside-interpol'), # Interpolation
- (r'\<\|\|', String.Interpol, 'inside-template'), # Templates
- (r'\/\}', String.Single, '#pop'), # Closing Quote
- (r'[^|\/]+', String.Regex), # String Content
- ],
- 'inside-curly-verb-string': [
- include('string-escape'),
- (r'\|', String.Interpol, 'inside-interpol'), # Interpolation
- (r'\<\|\|', String.Interpol, 'inside-template'), # Templates
- (r'\:\}', String.Double, '#pop'), # Closing Quote
- (r'[^|<:]+', String), # String Content
- ],
- 'inside-curly-string': [
- include('string-escape'),
- (r'\|', String.Interpol, 'inside-interpol'), # Interpolation
- (r'\<\|\|', String.Interpol, 'inside-template'), # Templates
- (r'\}', String.Single, '#pop'), # Closing Quote
- (r'[^|<}]+', String), # String Content
- ],
- 'inside-eof-string': [
- include('string-escape'),
- (r'\|', String.Interpol, 'inside-interpol'), # Interpolation
- (r'\<\|\|', String.Interpol, 'inside-template'), # Templates
- (r'\Z', String.Single, '#pop'), # Closing Quote
- (r'[^|<]+', String), # String Content
- ],
-
- 'builtin-functions': [
- (words((
- 'all', 'and', 'any', 'ascii', 'attr', 'attribute',
- 'attributeLabel', 'binary', 'block' 'char', 'contains',
- 'database', 'date', 'dictionary', 'empty', 'equal', 'even',
- 'every', 'exists', 'false', 'floatin', 'function', 'greater',
- 'greaterOrEqual', 'if', 'in', 'inline', 'integer', 'is',
- 'key', 'label', 'leap', 'less', 'lessOrEqual', 'literal',
- 'logical', 'lower', 'nand', 'negative', 'nor', 'not',
- 'notEqual', 'null', 'numeric', 'odd', 'or', 'path',
- 'pathLabel', 'positive', 'prefix', 'prime', 'set', 'some',
- 'sorted', 'standalone', 'string', 'subset', 'suffix',
- 'superset', 'ymbol', 'true', 'try', 'type', 'unless', 'upper',
- 'when', 'whitespace', 'word', 'xnor', 'xor', 'zero',
- ), prefix=r'\b', suffix=r'\b\?'), Name.Builtin),
- (words((
- 'abs', 'acos', 'acosh', 'acsec', 'acsech', 'actan', 'actanh',
- 'add', 'after', 'alphabet', 'and', 'angle', 'append', 'arg',
- 'args', 'arity', 'array', 'as', 'asec', 'asech', 'asin',
- 'asinh', 'atan', 'atan2', 'atanh', 'attr', 'attrs', 'average',
- 'before', 'benchmark', 'blend', 'break', 'builtins1',
- 'builtins2', 'call', 'capitalize', 'case', 'ceil', 'chop',
- 'chunk', 'clear', 'close', 'cluster', 'color', 'combine',
- 'conj', 'continue', 'copy', 'cos', 'cosh', 'couple', 'csec',
- 'csech', 'ctan', 'ctanh', 'cursor', 'darken', 'dec', 'decode',
- 'decouple', 'define', 'delete', 'desaturate', 'deviation',
- 'dictionary', 'difference', 'digest', 'digits', 'div', 'do',
- 'download', 'drop', 'dup', 'e', 'else', 'empty', 'encode',
- 'ensure', 'env', 'epsilon', 'escape', 'execute', 'exit', 'exp',
- 'extend', 'extract', 'factors', 'false', 'fdiv', 'filter',
- 'first', 'flatten', 'floor', 'fold', 'from', 'function',
- 'gamma', 'gcd', 'get', 'goto', 'hash', 'help', 'hypot', 'if',
- 'in', 'inc', 'indent', 'index', 'infinity', 'info', 'input',
- 'insert', 'inspect', 'intersection', 'invert', 'join', 'keys',
- 'kurtosis', 'last', 'let', 'levenshtein', 'lighten', 'list',
- 'ln', 'log', 'loop', 'lower', 'mail', 'map', 'match', 'max',
- 'maybe', 'median', 'min', 'mod', 'module', 'mul', 'nand',
- 'neg', 'new', 'nor', 'normalize', 'not', 'now', 'null', 'open',
- 'or', 'outdent', 'pad', 'panic', 'path', 'pause',
- 'permissions', 'permutate', 'pi', 'pop', 'pow', 'powerset',
- 'powmod', 'prefix', 'print', 'prints', 'process', 'product',
- 'query', 'random', 'range', 'read', 'relative', 'remove',
- 'rename', 'render', 'repeat', 'replace', 'request', 'return',
- 'reverse', 'round', 'sample', 'saturate', 'script', 'sec',
- 'sech', 'select', 'serve', 'set', 'shl', 'shr', 'shuffle',
- 'sin', 'sinh', 'size', 'skewness', 'slice', 'sort', 'split',
- 'sqrt', 'squeeze', 'stack', 'strip', 'sub', 'suffix', 'sum',
- 'switch', 'symbols', 'symlink', 'sys', 'take', 'tan', 'tanh',
- 'terminal', 'to', 'true', 'truncate', 'try', 'type', 'union',
- 'unique', 'unless', 'until', 'unzip', 'upper', 'values', 'var',
- 'variance', 'volume', 'webview', 'while', 'with', 'wordwrap',
- 'write', 'xnor', 'xor', 'zip'
- ), prefix=r'\b', suffix=r'\b'), Name.Builtin)
- ],
-
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/asc.py b/venv/lib/python3.11/site-packages/pygments/lexers/asc.py
deleted file mode 100644
index e261f41..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/asc.py
+++ /dev/null
@@ -1,55 +0,0 @@
-"""
- pygments.lexers.asc
- ~~~~~~~~~~~~~~~~~~~
-
- Lexer for various ASCII armored files.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-import re
-
-from pygments.lexer import RegexLexer, bygroups
-from pygments.token import Comment, Generic, Name, Operator, String, Whitespace
-
-__all__ = ['AscLexer']
-
-
-class AscLexer(RegexLexer):
- """
- Lexer for ASCII armored files, containing `-----BEGIN/END ...-----` wrapped
- base64 data.
-
- .. versionadded:: 2.10
- """
- name = 'ASCII armored'
- aliases = ['asc', 'pem']
- filenames = [
- '*.asc', # PGP; *.gpg, *.pgp, and *.sig too, but those can be binary
- '*.pem', # X.509; *.cer, *.crt, *.csr, and key etc too, but those can be binary
- 'id_dsa', 'id_ecdsa', 'id_ecdsa_sk', 'id_ed25519', 'id_ed25519_sk',
- 'id_rsa', # SSH private keys
- ]
- mimetypes = ['application/pgp-keys', 'application/pgp-encrypted',
- 'application/pgp-signature', 'application/pem-certificate-chain']
-
- flags = re.MULTILINE
-
- tokens = {
- 'root': [
- (r'\s+', Whitespace),
- (r'^-----BEGIN [^\n]+-----$', Generic.Heading, 'data'),
- (r'\S+', Comment),
- ],
- 'data': [
- (r'\s+', Whitespace),
- (r'^([^:]+)(:)([ \t]+)(.*)',
- bygroups(Name.Attribute, Operator, Whitespace, String)),
- (r'^-----END [^\n]+-----$', Generic.Heading, 'root'),
- (r'\S+', String),
- ],
- }
-
- def analyse_text(text):
- if re.search(r'^-----BEGIN [^\n]+-----\r?\n', text):
- return True
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/asm.py b/venv/lib/python3.11/site-packages/pygments/lexers/asm.py
deleted file mode 100644
index 0035c72..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/asm.py
+++ /dev/null
@@ -1,1037 +0,0 @@
-"""
- pygments.lexers.asm
- ~~~~~~~~~~~~~~~~~~~
-
- Lexers for assembly languages.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, include, bygroups, using, words, \
- DelegatingLexer, default
-from pygments.lexers.c_cpp import CppLexer, CLexer
-from pygments.lexers.d import DLexer
-from pygments.token import Text, Name, Number, String, Comment, Punctuation, \
- Other, Keyword, Operator, Whitespace
-
-__all__ = ['GasLexer', 'ObjdumpLexer', 'DObjdumpLexer', 'CppObjdumpLexer',
- 'CObjdumpLexer', 'HsailLexer', 'LlvmLexer', 'LlvmMirBodyLexer',
- 'LlvmMirLexer', 'NasmLexer', 'NasmObjdumpLexer', 'TasmLexer',
- 'Ca65Lexer', 'Dasm16Lexer']
-
-
-class GasLexer(RegexLexer):
- """
- For Gas (AT&T) assembly code.
- """
- name = 'GAS'
- aliases = ['gas', 'asm']
- filenames = ['*.s', '*.S']
- mimetypes = ['text/x-gas']
-
- #: optional Comment or Whitespace
- string = r'"(\\"|[^"])*"'
- char = r'[\w$.@-]'
- identifier = r'(?:[a-zA-Z$_]' + char + r'*|\.' + char + '+)'
- number = r'(?:0[xX][a-fA-F0-9]+|#?-?\d+)'
- register = '%' + identifier + r'\b'
-
- tokens = {
- 'root': [
- include('whitespace'),
- (identifier + ':', Name.Label),
- (r'\.' + identifier, Name.Attribute, 'directive-args'),
- (r'lock|rep(n?z)?|data\d+', Name.Attribute),
- (identifier, Name.Function, 'instruction-args'),
- (r'[\r\n]+', Text)
- ],
- 'directive-args': [
- (identifier, Name.Constant),
- (string, String),
- ('@' + identifier, Name.Attribute),
- (number, Number.Integer),
- (register, Name.Variable),
- (r'[\r\n]+', Whitespace, '#pop'),
- (r'([;#]|//).*?\n', Comment.Single, '#pop'),
- (r'/[*].*?[*]/', Comment.Multiline),
- (r'/[*].*?\n[\w\W]*?[*]/', Comment.Multiline, '#pop'),
-
- include('punctuation'),
- include('whitespace')
- ],
- 'instruction-args': [
- # For objdump-disassembled code, shouldn't occur in
- # actual assembler input
- ('([a-z0-9]+)( )(<)('+identifier+')(>)',
- bygroups(Number.Hex, Text, Punctuation, Name.Constant,
- Punctuation)),
- ('([a-z0-9]+)( )(<)('+identifier+')([-+])('+number+')(>)',
- bygroups(Number.Hex, Text, Punctuation, Name.Constant,
- Punctuation, Number.Integer, Punctuation)),
-
- # Address constants
- (identifier, Name.Constant),
- (number, Number.Integer),
- # Registers
- (register, Name.Variable),
- # Numeric constants
- ('$'+number, Number.Integer),
- (r"$'(.|\\')'", String.Char),
- (r'[\r\n]+', Whitespace, '#pop'),
- (r'([;#]|//).*?\n', Comment.Single, '#pop'),
- (r'/[*].*?[*]/', Comment.Multiline),
- (r'/[*].*?\n[\w\W]*?[*]/', Comment.Multiline, '#pop'),
-
- include('punctuation'),
- include('whitespace')
- ],
- 'whitespace': [
- (r'\n', Whitespace),
- (r'\s+', Whitespace),
- (r'([;#]|//).*?\n', Comment.Single),
- (r'/[*][\w\W]*?[*]/', Comment.Multiline)
- ],
- 'punctuation': [
- (r'[-*,.()\[\]!:{}]+', Punctuation)
- ]
- }
-
- def analyse_text(text):
- if re.search(r'^\.(text|data|section)', text, re.M):
- return True
- elif re.search(r'^\.\w+', text, re.M):
- return 0.1
-
-
-def _objdump_lexer_tokens(asm_lexer):
- """
- Common objdump lexer tokens to wrap an ASM lexer.
- """
- hex_re = r'[0-9A-Za-z]'
- return {
- 'root': [
- # File name & format:
- ('(.*?)(:)( +file format )(.*?)$',
- bygroups(Name.Label, Punctuation, Text, String)),
- # Section header
- ('(Disassembly of section )(.*?)(:)$',
- bygroups(Text, Name.Label, Punctuation)),
- # Function labels
- # (With offset)
- ('('+hex_re+'+)( )(<)(.*?)([-+])(0[xX][A-Za-z0-9]+)(>:)$',
- bygroups(Number.Hex, Whitespace, Punctuation, Name.Function,
- Punctuation, Number.Hex, Punctuation)),
- # (Without offset)
- ('('+hex_re+'+)( )(<)(.*?)(>:)$',
- bygroups(Number.Hex, Whitespace, Punctuation, Name.Function,
- Punctuation)),
- # Code line with disassembled instructions
- ('( *)('+hex_re+r'+:)(\t)((?:'+hex_re+hex_re+' )+)( *\t)([a-zA-Z].*?)$',
- bygroups(Whitespace, Name.Label, Whitespace, Number.Hex, Whitespace,
- using(asm_lexer))),
- # Code line without raw instructions (objdump --no-show-raw-insn)
- ('( *)('+hex_re+r'+:)( *\t)([a-zA-Z].*?)$',
- bygroups(Whitespace, Name.Label, Whitespace,
- using(asm_lexer))),
- # Code line with ascii
- ('( *)('+hex_re+r'+:)(\t)((?:'+hex_re+hex_re+' )+)( *)(.*?)$',
- bygroups(Whitespace, Name.Label, Whitespace, Number.Hex, Whitespace, String)),
- # Continued code line, only raw opcodes without disassembled
- # instruction
- ('( *)('+hex_re+r'+:)(\t)((?:'+hex_re+hex_re+' )+)$',
- bygroups(Whitespace, Name.Label, Whitespace, Number.Hex)),
- # Skipped a few bytes
- (r'\t\.\.\.$', Text),
- # Relocation line
- # (With offset)
- (r'(\t\t\t)('+hex_re+r'+:)( )([^\t]+)(\t)(.*?)([-+])(0x'+hex_re+'+)$',
- bygroups(Whitespace, Name.Label, Whitespace, Name.Property, Whitespace,
- Name.Constant, Punctuation, Number.Hex)),
- # (Without offset)
- (r'(\t\t\t)('+hex_re+r'+:)( )([^\t]+)(\t)(.*?)$',
- bygroups(Whitespace, Name.Label, Whitespace, Name.Property, Whitespace,
- Name.Constant)),
- (r'[^\n]+\n', Other)
- ]
- }
-
-
-class ObjdumpLexer(RegexLexer):
- """
- For the output of ``objdump -dr``.
- """
- name = 'objdump'
- aliases = ['objdump']
- filenames = ['*.objdump']
- mimetypes = ['text/x-objdump']
-
- tokens = _objdump_lexer_tokens(GasLexer)
-
-
-class DObjdumpLexer(DelegatingLexer):
- """
- For the output of ``objdump -Sr`` on compiled D files.
- """
- name = 'd-objdump'
- aliases = ['d-objdump']
- filenames = ['*.d-objdump']
- mimetypes = ['text/x-d-objdump']
-
- def __init__(self, **options):
- super().__init__(DLexer, ObjdumpLexer, **options)
-
-
-class CppObjdumpLexer(DelegatingLexer):
- """
- For the output of ``objdump -Sr`` on compiled C++ files.
- """
- name = 'cpp-objdump'
- aliases = ['cpp-objdump', 'c++-objdumb', 'cxx-objdump']
- filenames = ['*.cpp-objdump', '*.c++-objdump', '*.cxx-objdump']
- mimetypes = ['text/x-cpp-objdump']
-
- def __init__(self, **options):
- super().__init__(CppLexer, ObjdumpLexer, **options)
-
-
-class CObjdumpLexer(DelegatingLexer):
- """
- For the output of ``objdump -Sr`` on compiled C files.
- """
- name = 'c-objdump'
- aliases = ['c-objdump']
- filenames = ['*.c-objdump']
- mimetypes = ['text/x-c-objdump']
-
- def __init__(self, **options):
- super().__init__(CLexer, ObjdumpLexer, **options)
-
-
-class HsailLexer(RegexLexer):
- """
- For HSAIL assembly code.
-
- .. versionadded:: 2.2
- """
- name = 'HSAIL'
- aliases = ['hsail', 'hsa']
- filenames = ['*.hsail']
- mimetypes = ['text/x-hsail']
-
- string = r'"[^"]*?"'
- identifier = r'[a-zA-Z_][\w.]*'
- # Registers
- register_number = r'[0-9]+'
- register = r'(\$(c|s|d|q)' + register_number + r')\b'
- # Qualifiers
- alignQual = r'(align\(\d+\))'
- widthQual = r'(width\((\d+|all)\))'
- allocQual = r'(alloc\(agent\))'
- # Instruction Modifiers
- roundingMod = (r'((_ftz)?(_up|_down|_zero|_near))')
- datatypeMod = (r'_('
- # packedTypes
- r'u8x4|s8x4|u16x2|s16x2|u8x8|s8x8|u16x4|s16x4|u32x2|s32x2|'
- r'u8x16|s8x16|u16x8|s16x8|u32x4|s32x4|u64x2|s64x2|'
- r'f16x2|f16x4|f16x8|f32x2|f32x4|f64x2|'
- # baseTypes
- r'u8|s8|u16|s16|u32|s32|u64|s64|'
- r'b128|b8|b16|b32|b64|b1|'
- r'f16|f32|f64|'
- # opaqueType
- r'roimg|woimg|rwimg|samp|sig32|sig64)')
-
- # Numeric Constant
- float = r'((\d+\.)|(\d*\.\d+))[eE][+-]?\d+'
- hexfloat = r'0[xX](([0-9a-fA-F]+\.[0-9a-fA-F]*)|([0-9a-fA-F]*\.[0-9a-fA-F]+))[pP][+-]?\d+'
- ieeefloat = r'0((h|H)[0-9a-fA-F]{4}|(f|F)[0-9a-fA-F]{8}|(d|D)[0-9a-fA-F]{16})'
-
- tokens = {
- 'root': [
- include('whitespace'),
- include('comments'),
-
- (string, String),
-
- (r'@' + identifier + ':?', Name.Label),
-
- (register, Name.Variable.Anonymous),
-
- include('keyword'),
-
- (r'&' + identifier, Name.Variable.Global),
- (r'%' + identifier, Name.Variable),
-
- (hexfloat, Number.Hex),
- (r'0[xX][a-fA-F0-9]+', Number.Hex),
- (ieeefloat, Number.Float),
- (float, Number.Float),
- (r'\d+', Number.Integer),
-
- (r'[=<>{}\[\]()*.,:;!]|x\b', Punctuation)
- ],
- 'whitespace': [
- (r'(\n|\s)+', Whitespace),
- ],
- 'comments': [
- (r'/\*.*?\*/', Comment.Multiline),
- (r'//.*?\n', Comment.Single),
- ],
- 'keyword': [
- # Types
- (r'kernarg' + datatypeMod, Keyword.Type),
-
- # Regular keywords
- (r'\$(full|base|small|large|default|zero|near)', Keyword),
- (words((
- 'module', 'extension', 'pragma', 'prog', 'indirect', 'signature',
- 'decl', 'kernel', 'function', 'enablebreakexceptions',
- 'enabledetectexceptions', 'maxdynamicgroupsize', 'maxflatgridsize',
- 'maxflatworkgroupsize', 'requireddim', 'requiredgridsize',
- 'requiredworkgroupsize', 'requirenopartialworkgroups'),
- suffix=r'\b'), Keyword),
-
- # instructions
- (roundingMod, Keyword),
- (datatypeMod, Keyword),
- (r'_(' + alignQual + '|' + widthQual + ')', Keyword),
- (r'_kernarg', Keyword),
- (r'(nop|imagefence)\b', Keyword),
- (words((
- 'cleardetectexcept', 'clock', 'cuid', 'debugtrap', 'dim',
- 'getdetectexcept', 'groupbaseptr', 'kernargbaseptr', 'laneid',
- 'maxcuid', 'maxwaveid', 'packetid', 'setdetectexcept', 'waveid',
- 'workitemflatabsid', 'workitemflatid', 'nullptr', 'abs', 'bitrev',
- 'currentworkgroupsize', 'currentworkitemflatid', 'fract', 'ncos',
- 'neg', 'nexp2', 'nlog2', 'nrcp', 'nrsqrt', 'nsin', 'nsqrt',
- 'gridgroups', 'gridsize', 'not', 'sqrt', 'workgroupid',
- 'workgroupsize', 'workitemabsid', 'workitemid', 'ceil', 'floor',
- 'rint', 'trunc', 'add', 'bitmask', 'borrow', 'carry', 'copysign',
- 'div', 'rem', 'sub', 'shl', 'shr', 'and', 'or', 'xor', 'unpackhi',
- 'unpacklo', 'max', 'min', 'fma', 'mad', 'bitextract', 'bitselect',
- 'shuffle', 'cmov', 'bitalign', 'bytealign', 'lerp', 'nfma', 'mul',
- 'mulhi', 'mul24hi', 'mul24', 'mad24', 'mad24hi', 'bitinsert',
- 'combine', 'expand', 'lda', 'mov', 'pack', 'unpack', 'packcvt',
- 'unpackcvt', 'sad', 'sementp', 'ftos', 'stof', 'cmp', 'ld', 'st',
- '_eq', '_ne', '_lt', '_le', '_gt', '_ge', '_equ', '_neu', '_ltu',
- '_leu', '_gtu', '_geu', '_num', '_nan', '_seq', '_sne', '_slt',
- '_sle', '_sgt', '_sge', '_snum', '_snan', '_sequ', '_sneu', '_sltu',
- '_sleu', '_sgtu', '_sgeu', 'atomic', '_ld', '_st', '_cas', '_add',
- '_and', '_exch', '_max', '_min', '_or', '_sub', '_wrapdec',
- '_wrapinc', '_xor', 'ret', 'cvt', '_readonly', '_kernarg', '_global',
- 'br', 'cbr', 'sbr', '_scacq', '_screl', '_scar', '_rlx', '_wave',
- '_wg', '_agent', '_system', 'ldimage', 'stimage', '_v2', '_v3', '_v4',
- '_1d', '_2d', '_3d', '_1da', '_2da', '_1db', '_2ddepth', '_2dadepth',
- '_width', '_height', '_depth', '_array', '_channelorder',
- '_channeltype', 'querysampler', '_coord', '_filter', '_addressing',
- 'barrier', 'wavebarrier', 'initfbar', 'joinfbar', 'waitfbar',
- 'arrivefbar', 'leavefbar', 'releasefbar', 'ldf', 'activelaneid',
- 'activelanecount', 'activelanemask', 'activelanepermute', 'call',
- 'scall', 'icall', 'alloca', 'packetcompletionsig',
- 'addqueuewriteindex', 'casqueuewriteindex', 'ldqueuereadindex',
- 'stqueuereadindex', 'readonly', 'global', 'private', 'group',
- 'spill', 'arg', '_upi', '_downi', '_zeroi', '_neari', '_upi_sat',
- '_downi_sat', '_zeroi_sat', '_neari_sat', '_supi', '_sdowni',
- '_szeroi', '_sneari', '_supi_sat', '_sdowni_sat', '_szeroi_sat',
- '_sneari_sat', '_pp', '_ps', '_sp', '_ss', '_s', '_p', '_pp_sat',
- '_ps_sat', '_sp_sat', '_ss_sat', '_s_sat', '_p_sat')), Keyword),
-
- # Integer types
- (r'i[1-9]\d*', Keyword)
- ]
- }
-
-
-class LlvmLexer(RegexLexer):
- """
- For LLVM assembly code.
- """
- name = 'LLVM'
- url = 'https://llvm.org/docs/LangRef.html'
- aliases = ['llvm']
- filenames = ['*.ll']
- mimetypes = ['text/x-llvm']
-
- #: optional Comment or Whitespace
- string = r'"[^"]*?"'
- identifier = r'([-a-zA-Z$._][\w\-$.]*|' + string + ')'
- block_label = r'(' + identifier + r'|(\d+))'
-
- tokens = {
- 'root': [
- include('whitespace'),
-
- # Before keywords, because keywords are valid label names :(...
- (block_label + r'\s*:', Name.Label),
-
- include('keyword'),
-
- (r'%' + identifier, Name.Variable),
- (r'@' + identifier, Name.Variable.Global),
- (r'%\d+', Name.Variable.Anonymous),
- (r'@\d+', Name.Variable.Global),
- (r'#\d+', Name.Variable.Global),
- (r'!' + identifier, Name.Variable),
- (r'!\d+', Name.Variable.Anonymous),
- (r'c?' + string, String),
-
- (r'0[xX][a-fA-F0-9]+', Number),
- (r'-?\d+(?:[.]\d+)?(?:[eE][-+]?\d+(?:[.]\d+)?)?', Number),
-
- (r'[=<>{}\[\]()*.,!]|x\b', Punctuation)
- ],
- 'whitespace': [
- (r'(\n|\s+)+', Whitespace),
- (r';.*?\n', Comment)
- ],
- 'keyword': [
- # Regular keywords
- (words((
- 'aarch64_sve_vector_pcs', 'aarch64_vector_pcs', 'acq_rel',
- 'acquire', 'add', 'addrspace', 'addrspacecast', 'afn', 'alias',
- 'aliasee', 'align', 'alignLog2', 'alignstack', 'alloca',
- 'allocsize', 'allOnes', 'alwaysinline', 'alwaysInline',
- 'amdgpu_cs', 'amdgpu_es', 'amdgpu_gfx', 'amdgpu_gs',
- 'amdgpu_hs', 'amdgpu_kernel', 'amdgpu_ls', 'amdgpu_ps',
- 'amdgpu_vs', 'and', 'any', 'anyregcc', 'appending', 'arcp',
- 'argmemonly', 'args', 'arm_aapcs_vfpcc', 'arm_aapcscc',
- 'arm_apcscc', 'ashr', 'asm', 'atomic', 'atomicrmw',
- 'attributes', 'available_externally', 'avr_intrcc',
- 'avr_signalcc', 'bit', 'bitcast', 'bitMask', 'blockaddress',
- 'blockcount', 'br', 'branchFunnel', 'builtin', 'byArg',
- 'byref', 'byte', 'byteArray', 'byval', 'c', 'call', 'callbr',
- 'callee', 'caller', 'calls', 'canAutoHide', 'catch',
- 'catchpad', 'catchret', 'catchswitch', 'cc', 'ccc',
- 'cfguard_checkcc', 'cleanup', 'cleanuppad', 'cleanupret',
- 'cmpxchg', 'cold', 'coldcc', 'comdat', 'common', 'constant',
- 'contract', 'convergent', 'critical', 'cxx_fast_tlscc',
- 'datalayout', 'declare', 'default', 'define', 'deplibs',
- 'dereferenceable', 'dereferenceable_or_null', 'distinct',
- 'dllexport', 'dllimport', 'dso_local', 'dso_local_equivalent',
- 'dso_preemptable', 'dsoLocal', 'eq', 'exact', 'exactmatch',
- 'extern_weak', 'external', 'externally_initialized',
- 'extractelement', 'extractvalue', 'fadd', 'false', 'fast',
- 'fastcc', 'fcmp', 'fdiv', 'fence', 'filter', 'flags', 'fmul',
- 'fneg', 'fpext', 'fptosi', 'fptoui', 'fptrunc', 'freeze',
- 'frem', 'from', 'fsub', 'funcFlags', 'function', 'gc',
- 'getelementptr', 'ghccc', 'global', 'guid', 'gv', 'hash',
- 'hhvm_ccc', 'hhvmcc', 'hidden', 'hot', 'hotness', 'icmp',
- 'ifunc', 'inaccessiblemem_or_argmemonly',
- 'inaccessiblememonly', 'inalloca', 'inbounds', 'indir',
- 'indirectbr', 'info', 'initialexec', 'inline', 'inlineBits',
- 'inlinehint', 'inrange', 'inreg', 'insertelement',
- 'insertvalue', 'insts', 'intel_ocl_bicc', 'inteldialect',
- 'internal', 'inttoptr', 'invoke', 'jumptable', 'kind',
- 'landingpad', 'largest', 'linkage', 'linkonce', 'linkonce_odr',
- 'live', 'load', 'local_unnamed_addr', 'localdynamic',
- 'localexec', 'lshr', 'max', 'metadata', 'min', 'minsize',
- 'module', 'monotonic', 'msp430_intrcc', 'mul', 'mustprogress',
- 'musttail', 'naked', 'name', 'nand', 'ne', 'nest', 'ninf',
- 'nnan', 'noalias', 'nobuiltin', 'nocallback', 'nocapture',
- 'nocf_check', 'noduplicate', 'noduplicates', 'nofree',
- 'noimplicitfloat', 'noinline', 'noInline', 'nomerge', 'none',
- 'nonlazybind', 'nonnull', 'noprofile', 'norecurse',
- 'noRecurse', 'noredzone', 'noreturn', 'nosync', 'notail',
- 'notEligibleToImport', 'noundef', 'nounwind', 'nsw',
- 'nsz', 'null', 'null_pointer_is_valid', 'nuw', 'oeq', 'offset',
- 'oge', 'ogt', 'ole', 'olt', 'one', 'opaque', 'optforfuzzing',
- 'optnone', 'optsize', 'or', 'ord', 'param', 'params',
- 'partition', 'path', 'personality', 'phi', 'poison',
- 'preallocated', 'prefix', 'preserve_allcc', 'preserve_mostcc',
- 'private', 'prologue', 'protected', 'ptrtoint', 'ptx_device',
- 'ptx_kernel', 'readnone', 'readNone', 'readonly', 'readOnly',
- 'reassoc', 'refs', 'relbf', 'release', 'resByArg', 'resume',
- 'ret', 'returnDoesNotAlias', 'returned', 'returns_twice',
- 'safestack', 'samesize', 'sanitize_address',
- 'sanitize_hwaddress', 'sanitize_memory', 'sanitize_memtag',
- 'sanitize_thread', 'sdiv', 'section', 'select', 'seq_cst',
- 'sext', 'sge', 'sgt', 'shadowcallstack', 'shl',
- 'shufflevector', 'sideeffect', 'signext', 'single',
- 'singleImpl', 'singleImplName', 'sitofp', 'sizeM1',
- 'sizeM1BitWidth', 'sle', 'slt', 'source_filename',
- 'speculatable', 'speculative_load_hardening', 'spir_func',
- 'spir_kernel', 'srem', 'sret', 'ssp', 'sspreq', 'sspstrong',
- 'store', 'strictfp', 'sub', 'summaries', 'summary', 'swiftcc',
- 'swifterror', 'swiftself', 'switch', 'syncscope', 'tail',
- 'tailcc', 'target', 'thread_local', 'to', 'token', 'triple',
- 'true', 'trunc', 'type', 'typeCheckedLoadConstVCalls',
- 'typeCheckedLoadVCalls', 'typeid', 'typeidCompatibleVTable',
- 'typeIdInfo', 'typeTestAssumeConstVCalls',
- 'typeTestAssumeVCalls', 'typeTestRes', 'typeTests', 'udiv',
- 'ueq', 'uge', 'ugt', 'uitofp', 'ule', 'ult', 'umax', 'umin',
- 'undef', 'une', 'uniformRetVal', 'uniqueRetVal', 'unknown',
- 'unnamed_addr', 'uno', 'unordered', 'unreachable', 'unsat',
- 'unwind', 'urem', 'uselistorder', 'uselistorder_bb', 'uwtable',
- 'va_arg', 'varFlags', 'variable', 'vcall_visibility',
- 'vFuncId', 'virtFunc', 'virtualConstProp', 'void', 'volatile',
- 'vscale', 'vTableFuncs', 'weak', 'weak_odr', 'webkit_jscc',
- 'win64cc', 'within', 'wpdRes', 'wpdResolutions', 'writeonly',
- 'x', 'x86_64_sysvcc', 'x86_fastcallcc', 'x86_intrcc',
- 'x86_mmx', 'x86_regcallcc', 'x86_stdcallcc', 'x86_thiscallcc',
- 'x86_vectorcallcc', 'xchg', 'xor', 'zeroext',
- 'zeroinitializer', 'zext', 'immarg', 'willreturn'),
- suffix=r'\b'), Keyword),
-
- # Types
- (words(('void', 'half', 'bfloat', 'float', 'double', 'fp128',
- 'x86_fp80', 'ppc_fp128', 'label', 'metadata', 'x86_mmx',
- 'x86_amx', 'token', 'ptr')),
- Keyword.Type),
-
- # Integer types
- (r'i[1-9]\d*', Keyword.Type)
- ]
- }
-
-
-class LlvmMirBodyLexer(RegexLexer):
- """
- For LLVM MIR examples without the YAML wrapper.
-
- .. versionadded:: 2.6
- """
- name = 'LLVM-MIR Body'
- url = 'https://llvm.org/docs/MIRLangRef.html'
- aliases = ['llvm-mir-body']
- filenames = []
- mimetypes = []
-
- tokens = {
- 'root': [
- # Attributes on basic blocks
- (words(('liveins', 'successors'), suffix=':'), Keyword),
- # Basic Block Labels
- (r'bb\.[0-9]+(\.[a-zA-Z0-9_.-]+)?( \(address-taken\))?:', Name.Label),
- (r'bb\.[0-9]+ \(%[a-zA-Z0-9_.-]+\)( \(address-taken\))?:', Name.Label),
- (r'%bb\.[0-9]+(\.\w+)?', Name.Label),
- # Stack references
- (r'%stack\.[0-9]+(\.\w+\.addr)?', Name),
- # Subreg indices
- (r'%subreg\.\w+', Name),
- # Virtual registers
- (r'%[a-zA-Z0-9_]+ *', Name.Variable, 'vreg'),
- # Reference to LLVM-IR global
- include('global'),
- # Reference to Intrinsic
- (r'intrinsic\(\@[a-zA-Z0-9_.]+\)', Name.Variable.Global),
- # Comparison predicates
- (words(('eq', 'ne', 'sgt', 'sge', 'slt', 'sle', 'ugt', 'uge', 'ult',
- 'ule'), prefix=r'intpred\(', suffix=r'\)'), Name.Builtin),
- (words(('oeq', 'one', 'ogt', 'oge', 'olt', 'ole', 'ugt', 'uge',
- 'ult', 'ule'), prefix=r'floatpred\(', suffix=r'\)'),
- Name.Builtin),
- # Physical registers
- (r'\$\w+', String.Single),
- # Assignment operator
- (r'=', Operator),
- # gMIR Opcodes
- (r'(G_ANYEXT|G_[SZ]EXT|G_SEXT_INREG|G_TRUNC|G_IMPLICIT_DEF|G_PHI|'
- r'G_FRAME_INDEX|G_GLOBAL_VALUE|G_INTTOPTR|G_PTRTOINT|G_BITCAST|'
- r'G_CONSTANT|G_FCONSTANT|G_VASTART|G_VAARG|G_CTLZ|G_CTLZ_ZERO_UNDEF|'
- r'G_CTTZ|G_CTTZ_ZERO_UNDEF|G_CTPOP|G_BSWAP|G_BITREVERSE|'
- r'G_ADDRSPACE_CAST|G_BLOCK_ADDR|G_JUMP_TABLE|G_DYN_STACKALLOC|'
- r'G_ADD|G_SUB|G_MUL|G_[SU]DIV|G_[SU]REM|G_AND|G_OR|G_XOR|G_SHL|'
- r'G_[LA]SHR|G_[IF]CMP|G_SELECT|G_GEP|G_PTR_MASK|G_SMIN|G_SMAX|'
- r'G_UMIN|G_UMAX|G_[US]ADDO|G_[US]ADDE|G_[US]SUBO|G_[US]SUBE|'
- r'G_[US]MULO|G_[US]MULH|G_FNEG|G_FPEXT|G_FPTRUNC|G_FPTO[US]I|'
- r'G_[US]ITOFP|G_FABS|G_FCOPYSIGN|G_FCANONICALIZE|G_FMINNUM|'
- r'G_FMAXNUM|G_FMINNUM_IEEE|G_FMAXNUM_IEEE|G_FMINIMUM|G_FMAXIMUM|'
- r'G_FADD|G_FSUB|G_FMUL|G_FMA|G_FMAD|G_FDIV|G_FREM|G_FPOW|G_FEXP|'
- r'G_FEXP2|G_FLOG|G_FLOG2|G_FLOG10|G_FCEIL|G_FCOS|G_FSIN|G_FSQRT|'
- r'G_FFLOOR|G_FRINT|G_FNEARBYINT|G_INTRINSIC_TRUNC|'
- r'G_INTRINSIC_ROUND|G_LOAD|G_[ZS]EXTLOAD|G_INDEXED_LOAD|'
- r'G_INDEXED_[ZS]EXTLOAD|G_STORE|G_INDEXED_STORE|'
- r'G_ATOMIC_CMPXCHG_WITH_SUCCESS|G_ATOMIC_CMPXCHG|'
- r'G_ATOMICRMW_(XCHG|ADD|SUB|AND|NAND|OR|XOR|MAX|MIN|UMAX|UMIN|FADD|'
- r'FSUB)'
- r'|G_FENCE|G_EXTRACT|G_UNMERGE_VALUES|G_INSERT|G_MERGE_VALUES|'
- r'G_BUILD_VECTOR|G_BUILD_VECTOR_TRUNC|G_CONCAT_VECTORS|'
- r'G_INTRINSIC|G_INTRINSIC_W_SIDE_EFFECTS|G_BR|G_BRCOND|'
- r'G_BRINDIRECT|G_BRJT|G_INSERT_VECTOR_ELT|G_EXTRACT_VECTOR_ELT|'
- r'G_SHUFFLE_VECTOR)\b',
- Name.Builtin),
- # Target independent opcodes
- (r'(COPY|PHI|INSERT_SUBREG|EXTRACT_SUBREG|REG_SEQUENCE)\b',
- Name.Builtin),
- # Flags
- (words(('killed', 'implicit')), Keyword),
- # ConstantInt values
- (r'(i[0-9]+)( +)', bygroups(Keyword.Type, Whitespace), 'constantint'),
- # ConstantFloat values
- (r'(half|float|double) +', Keyword.Type, 'constantfloat'),
- # Bare immediates
- include('integer'),
- # MMO's
- (r'(::)( *)', bygroups(Operator, Whitespace), 'mmo'),
- # MIR Comments
- (r';.*', Comment),
- # If we get here, assume it's a target instruction
- (r'[a-zA-Z0-9_]+', Name),
- # Everything else that isn't highlighted
- (r'[(), \n]+', Text),
- ],
- # The integer constant from a ConstantInt value
- 'constantint': [
- include('integer'),
- (r'(?=.)', Text, '#pop'),
- ],
- # The floating point constant from a ConstantFloat value
- 'constantfloat': [
- include('float'),
- (r'(?=.)', Text, '#pop'),
- ],
- 'vreg': [
- # The bank or class if there is one
- (r'( *)(:(?!:))', bygroups(Whitespace, Keyword), ('#pop', 'vreg_bank_or_class')),
- # The LLT if there is one
- (r'( *)(\()', bygroups(Whitespace, Text), 'vreg_type'),
- (r'(?=.)', Text, '#pop'),
- ],
- 'vreg_bank_or_class': [
- # The unassigned bank/class
- (r'( *)(_)', bygroups(Whitespace, Name.Variable.Magic)),
- (r'( *)([a-zA-Z0-9_]+)', bygroups(Whitespace, Name.Variable)),
- # The LLT if there is one
- (r'( *)(\()', bygroups(Whitespace, Text), 'vreg_type'),
- (r'(?=.)', Text, '#pop'),
- ],
- 'vreg_type': [
- # Scalar and pointer types
- (r'( *)([sp][0-9]+)', bygroups(Whitespace, Keyword.Type)),
- (r'( *)(<[0-9]+ *x *[sp][0-9]+>)', bygroups(Whitespace, Keyword.Type)),
- (r'\)', Text, '#pop'),
- (r'(?=.)', Text, '#pop'),
- ],
- 'mmo': [
- (r'\(', Text),
- (r' +', Whitespace),
- (words(('load', 'store', 'on', 'into', 'from', 'align', 'monotonic',
- 'acquire', 'release', 'acq_rel', 'seq_cst')),
- Keyword),
- # IR references
- (r'%ir\.[a-zA-Z0-9_.-]+', Name),
- (r'%ir-block\.[a-zA-Z0-9_.-]+', Name),
- (r'[-+]', Operator),
- include('integer'),
- include('global'),
- (r',', Punctuation),
- (r'\), \(', Text),
- (r'\)', Text, '#pop'),
- ],
- 'integer': [(r'-?[0-9]+', Number.Integer),],
- 'float': [(r'-?[0-9]+\.[0-9]+(e[+-][0-9]+)?', Number.Float)],
- 'global': [(r'\@[a-zA-Z0-9_.]+', Name.Variable.Global)],
- }
-
-
-class LlvmMirLexer(RegexLexer):
- """
- Lexer for the overall LLVM MIR document format.
-
- MIR is a human readable serialization format that's used to represent LLVM's
- machine specific intermediate representation. It allows LLVM's developers to
- see the state of the compilation process at various points, as well as test
- individual pieces of the compiler.
-
- .. versionadded:: 2.6
- """
- name = 'LLVM-MIR'
- url = 'https://llvm.org/docs/MIRLangRef.html'
- aliases = ['llvm-mir']
- filenames = ['*.mir']
-
- tokens = {
- 'root': [
- # Comments are hashes at the YAML level
- (r'#.*', Comment),
- # Documents starting with | are LLVM-IR
- (r'--- \|$', Keyword, 'llvm_ir'),
- # Other documents are MIR
- (r'---', Keyword, 'llvm_mir'),
- # Consume everything else in one token for efficiency
- (r'[^-#]+|.', Text),
- ],
- 'llvm_ir': [
- # Documents end with '...' or '---'
- (r'(\.\.\.|(?=---))', Keyword, '#pop'),
- # Delegate to the LlvmLexer
- (r'((?:.|\n)+?)(?=(\.\.\.|---))', bygroups(using(LlvmLexer))),
- ],
- 'llvm_mir': [
- # Comments are hashes at the YAML level
- (r'#.*', Comment),
- # Documents end with '...' or '---'
- (r'(\.\.\.|(?=---))', Keyword, '#pop'),
- # Handle the simple attributes
- (r'name:', Keyword, 'name'),
- (words(('alignment', ),
- suffix=':'), Keyword, 'number'),
- (words(('legalized', 'regBankSelected', 'tracksRegLiveness',
- 'selected', 'exposesReturnsTwice'),
- suffix=':'), Keyword, 'boolean'),
- # Handle the attributes don't highlight inside
- (words(('registers', 'stack', 'fixedStack', 'liveins', 'frameInfo',
- 'machineFunctionInfo'),
- suffix=':'), Keyword),
- # Delegate the body block to the LlvmMirBodyLexer
- (r'body: *\|', Keyword, 'llvm_mir_body'),
- # Consume everything else
- (r'.+', Text),
- (r'\n', Whitespace),
- ],
- 'name': [
- (r'[^\n]+', Name),
- default('#pop'),
- ],
- 'boolean': [
- (r' *(true|false)', Name.Builtin),
- default('#pop'),
- ],
- 'number': [
- (r' *[0-9]+', Number),
- default('#pop'),
- ],
- 'llvm_mir_body': [
- # Documents end with '...' or '---'.
- # We have to pop llvm_mir_body and llvm_mir
- (r'(\.\.\.|(?=---))', Keyword, '#pop:2'),
- # Delegate the body block to the LlvmMirBodyLexer
- (r'((?:.|\n)+?)(?=\.\.\.|---)', bygroups(using(LlvmMirBodyLexer))),
- # The '...' is optional. If we didn't already find it then it isn't
- # there. There might be a '---' instead though.
- (r'(?!\.\.\.|---)((?:.|\n)+)', bygroups(using(LlvmMirBodyLexer))),
- ],
- }
-
-
-class NasmLexer(RegexLexer):
- """
- For Nasm (Intel) assembly code.
- """
- name = 'NASM'
- aliases = ['nasm']
- filenames = ['*.asm', '*.ASM', '*.nasm']
- mimetypes = ['text/x-nasm']
-
- # Tasm uses the same file endings, but TASM is not as common as NASM, so
- # we prioritize NASM higher by default
- priority = 1.0
-
- identifier = r'[a-z$._?][\w$.?#@~]*'
- hexn = r'(?:0x[0-9a-f]+|$0[0-9a-f]*|[0-9]+[0-9a-f]*h)'
- octn = r'[0-7]+q'
- binn = r'[01]+b'
- decn = r'[0-9]+'
- floatn = decn + r'\.e?' + decn
- string = r'"(\\"|[^"\n])*"|' + r"'(\\'|[^'\n])*'|" + r"`(\\`|[^`\n])*`"
- declkw = r'(?:res|d)[bwdqt]|times'
- register = (r'(r[0-9][0-5]?[bwd]?|'
- r'[a-d][lh]|[er]?[a-d]x|[er]?[sb]p|[er]?[sd]i|[c-gs]s|st[0-7]|'
- r'mm[0-7]|cr[0-4]|dr[0-367]|tr[3-7]|k[0-7]|'
- r'[xyz]mm(?:[12][0-9]?|3[01]?|[04-9]))\b')
- wordop = r'seg|wrt|strict|rel|abs'
- type = r'byte|[dq]?word'
- # Directives must be followed by whitespace, otherwise CPU will match
- # cpuid for instance.
- directives = (r'(?:BITS|USE16|USE32|SECTION|SEGMENT|ABSOLUTE|EXTERN|GLOBAL|'
- r'ORG|ALIGN|STRUC|ENDSTRUC|COMMON|CPU|GROUP|UPPERCASE|IMPORT|'
- r'EXPORT|LIBRARY|MODULE)(?=\s)')
-
- flags = re.IGNORECASE | re.MULTILINE
- tokens = {
- 'root': [
- (r'^\s*%', Comment.Preproc, 'preproc'),
- include('whitespace'),
- (identifier + ':', Name.Label),
- (r'(%s)(\s+)(equ)' % identifier,
- bygroups(Name.Constant, Whitespace, Keyword.Declaration),
- 'instruction-args'),
- (directives, Keyword, 'instruction-args'),
- (declkw, Keyword.Declaration, 'instruction-args'),
- (identifier, Name.Function, 'instruction-args'),
- (r'[\r\n]+', Whitespace)
- ],
- 'instruction-args': [
- (string, String),
- (hexn, Number.Hex),
- (octn, Number.Oct),
- (binn, Number.Bin),
- (floatn, Number.Float),
- (decn, Number.Integer),
- include('punctuation'),
- (register, Name.Builtin),
- (identifier, Name.Variable),
- (r'[\r\n]+', Whitespace, '#pop'),
- include('whitespace')
- ],
- 'preproc': [
- (r'[^;\n]+', Comment.Preproc),
- (r';.*?\n', Comment.Single, '#pop'),
- (r'\n', Comment.Preproc, '#pop'),
- ],
- 'whitespace': [
- (r'\n', Whitespace),
- (r'[ \t]+', Whitespace),
- (r';.*', Comment.Single),
- (r'#.*', Comment.Single)
- ],
- 'punctuation': [
- (r'[,{}():\[\]]+', Punctuation),
- (r'[&|^<>+*/%~-]+', Operator),
- (r'[$]+', Keyword.Constant),
- (wordop, Operator.Word),
- (type, Keyword.Type)
- ],
- }
-
- def analyse_text(text):
- # Probably TASM
- if re.match(r'PROC', text, re.IGNORECASE):
- return False
-
-
-class NasmObjdumpLexer(ObjdumpLexer):
- """
- For the output of ``objdump -d -M intel``.
-
- .. versionadded:: 2.0
- """
- name = 'objdump-nasm'
- aliases = ['objdump-nasm']
- filenames = ['*.objdump-intel']
- mimetypes = ['text/x-nasm-objdump']
-
- tokens = _objdump_lexer_tokens(NasmLexer)
-
-
-class TasmLexer(RegexLexer):
- """
- For Tasm (Turbo Assembler) assembly code.
- """
- name = 'TASM'
- aliases = ['tasm']
- filenames = ['*.asm', '*.ASM', '*.tasm']
- mimetypes = ['text/x-tasm']
-
- identifier = r'[@a-z$._?][\w$.?#@~]*'
- hexn = r'(?:0x[0-9a-f]+|$0[0-9a-f]*|[0-9]+[0-9a-f]*h)'
- octn = r'[0-7]+q'
- binn = r'[01]+b'
- decn = r'[0-9]+'
- floatn = decn + r'\.e?' + decn
- string = r'"(\\"|[^"\n])*"|' + r"'(\\'|[^'\n])*'|" + r"`(\\`|[^`\n])*`"
- declkw = r'(?:res|d)[bwdqt]|times'
- register = (r'(r[0-9][0-5]?[bwd]|'
- r'[a-d][lh]|[er]?[a-d]x|[er]?[sb]p|[er]?[sd]i|[c-gs]s|st[0-7]|'
- r'mm[0-7]|cr[0-4]|dr[0-367]|tr[3-7])\b')
- wordop = r'seg|wrt|strict'
- type = r'byte|[dq]?word'
- directives = (r'BITS|USE16|USE32|SECTION|SEGMENT|ABSOLUTE|EXTERN|GLOBAL|'
- r'ORG|ALIGN|STRUC|ENDSTRUC|ENDS|COMMON|CPU|GROUP|UPPERCASE|INCLUDE|'
- r'EXPORT|LIBRARY|MODULE|PROC|ENDP|USES|ARG|DATASEG|UDATASEG|END|IDEAL|'
- r'P386|MODEL|ASSUME|CODESEG|SIZE')
- # T[A-Z][a-z] is more of a convention. Lexer should filter out STRUC definitions
- # and then 'add' them to datatype somehow.
- datatype = (r'db|dd|dw|T[A-Z][a-z]+')
-
- flags = re.IGNORECASE | re.MULTILINE
- tokens = {
- 'root': [
- (r'^\s*%', Comment.Preproc, 'preproc'),
- include('whitespace'),
- (identifier + ':', Name.Label),
- (directives, Keyword, 'instruction-args'),
- (r'(%s)(\s+)(%s)' % (identifier, datatype),
- bygroups(Name.Constant, Whitespace, Keyword.Declaration),
- 'instruction-args'),
- (declkw, Keyword.Declaration, 'instruction-args'),
- (identifier, Name.Function, 'instruction-args'),
- (r'[\r\n]+', Whitespace)
- ],
- 'instruction-args': [
- (string, String),
- (hexn, Number.Hex),
- (octn, Number.Oct),
- (binn, Number.Bin),
- (floatn, Number.Float),
- (decn, Number.Integer),
- include('punctuation'),
- (register, Name.Builtin),
- (identifier, Name.Variable),
- # Do not match newline when it's preceded by a backslash
- (r'(\\)(\s*)(;.*)([\r\n])',
- bygroups(Text, Whitespace, Comment.Single, Whitespace)),
- (r'[\r\n]+', Whitespace, '#pop'),
- include('whitespace')
- ],
- 'preproc': [
- (r'[^;\n]+', Comment.Preproc),
- (r';.*?\n', Comment.Single, '#pop'),
- (r'\n', Comment.Preproc, '#pop'),
- ],
- 'whitespace': [
- (r'[\n\r]', Whitespace),
- (r'(\\)([\n\r])', bygroups(Text, Whitespace)),
- (r'[ \t]+', Whitespace),
- (r';.*', Comment.Single)
- ],
- 'punctuation': [
- (r'[,():\[\]]+', Punctuation),
- (r'[&|^<>+*=/%~-]+', Operator),
- (r'[$]+', Keyword.Constant),
- (wordop, Operator.Word),
- (type, Keyword.Type)
- ],
- }
-
- def analyse_text(text):
- # See above
- if re.match(r'PROC', text, re.I):
- return True
-
-
-class Ca65Lexer(RegexLexer):
- """
- For ca65 assembler sources.
-
- .. versionadded:: 1.6
- """
- name = 'ca65 assembler'
- aliases = ['ca65']
- filenames = ['*.s']
-
- flags = re.IGNORECASE
-
- tokens = {
- 'root': [
- (r';.*', Comment.Single),
- (r'\s+', Whitespace),
- (r'[a-z_.@$][\w.@$]*:', Name.Label),
- (r'((ld|st)[axy]|(in|de)[cxy]|asl|lsr|ro[lr]|adc|sbc|cmp|cp[xy]'
- r'|cl[cvdi]|se[cdi]|jmp|jsr|bne|beq|bpl|bmi|bvc|bvs|bcc|bcs'
- r'|p[lh][ap]|rt[is]|brk|nop|ta[xy]|t[xy]a|txs|tsx|and|ora|eor'
- r'|bit)\b', Keyword),
- (r'\.\w+', Keyword.Pseudo),
- (r'[-+~*/^&|!<>=]', Operator),
- (r'"[^"\n]*.', String),
- (r"'[^'\n]*.", String.Char),
- (r'\$[0-9a-f]+|[0-9a-f]+h\b', Number.Hex),
- (r'\d+', Number.Integer),
- (r'%[01]+', Number.Bin),
- (r'[#,.:()=\[\]]', Punctuation),
- (r'[a-z_.@$][\w.@$]*', Name),
- ]
- }
-
- def analyse_text(self, text):
- # comments in GAS start with "#"
- if re.search(r'^\s*;', text, re.MULTILINE):
- return 0.9
-
-
-class Dasm16Lexer(RegexLexer):
- """
- For DCPU-16 Assembly.
-
- .. versionadded:: 2.4
- """
- name = 'DASM16'
- url = 'http://0x10c.com/doc/dcpu-16.txt'
- aliases = ['dasm16']
- filenames = ['*.dasm16', '*.dasm']
- mimetypes = ['text/x-dasm16']
-
- INSTRUCTIONS = [
- 'SET',
- 'ADD', 'SUB',
- 'MUL', 'MLI',
- 'DIV', 'DVI',
- 'MOD', 'MDI',
- 'AND', 'BOR', 'XOR',
- 'SHR', 'ASR', 'SHL',
- 'IFB', 'IFC', 'IFE', 'IFN', 'IFG', 'IFA', 'IFL', 'IFU',
- 'ADX', 'SBX',
- 'STI', 'STD',
- 'JSR',
- 'INT', 'IAG', 'IAS', 'RFI', 'IAQ', 'HWN', 'HWQ', 'HWI',
- ]
-
- REGISTERS = [
- 'A', 'B', 'C',
- 'X', 'Y', 'Z',
- 'I', 'J',
- 'SP', 'PC', 'EX',
- 'POP', 'PEEK', 'PUSH'
- ]
-
- # Regexes yo
- char = r'[a-zA-Z0-9_$@.]'
- identifier = r'(?:[a-zA-Z$_]' + char + r'*|\.' + char + '+)'
- number = r'[+-]?(?:0[xX][a-zA-Z0-9]+|\d+)'
- binary_number = r'0b[01_]+'
- instruction = r'(?i)(' + '|'.join(INSTRUCTIONS) + ')'
- single_char = r"'\\?" + char + "'"
- string = r'"(\\"|[^"])*"'
-
- def guess_identifier(lexer, match):
- ident = match.group(0)
- klass = Name.Variable if ident.upper() in lexer.REGISTERS else Name.Label
- yield match.start(), klass, ident
-
- tokens = {
- 'root': [
- include('whitespace'),
- (':' + identifier, Name.Label),
- (identifier + ':', Name.Label),
- (instruction, Name.Function, 'instruction-args'),
- (r'\.' + identifier, Name.Function, 'data-args'),
- (r'[\r\n]+', Whitespace)
- ],
-
- 'numeric' : [
- (binary_number, Number.Integer),
- (number, Number.Integer),
- (single_char, String),
- ],
-
- 'arg' : [
- (identifier, guess_identifier),
- include('numeric')
- ],
-
- 'deref' : [
- (r'\+', Punctuation),
- (r'\]', Punctuation, '#pop'),
- include('arg'),
- include('whitespace')
- ],
-
- 'instruction-line' : [
- (r'[\r\n]+', Whitespace, '#pop'),
- (r';.*?$', Comment, '#pop'),
- include('whitespace')
- ],
-
- 'instruction-args': [
- (r',', Punctuation),
- (r'\[', Punctuation, 'deref'),
- include('arg'),
- include('instruction-line')
- ],
-
- 'data-args' : [
- (r',', Punctuation),
- include('numeric'),
- (string, String),
- include('instruction-line')
- ],
-
- 'whitespace': [
- (r'\n', Whitespace),
- (r'\s+', Whitespace),
- (r';.*?\n', Comment)
- ],
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/asn1.py b/venv/lib/python3.11/site-packages/pygments/lexers/asn1.py
deleted file mode 100644
index 30632cb..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/asn1.py
+++ /dev/null
@@ -1,179 +0,0 @@
-"""
- pygments.lexers.asn1
- ~~~~~~~~~~~~~~~~~~~~
-
- Pygments lexers for ASN.1.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.token import Comment, Operator, Keyword, Name, String, \
- Number, Punctuation, Whitespace
-from pygments.lexer import RegexLexer, words, bygroups
-
-__all__ = ['Asn1Lexer']
-
-SINGLE_WORD_KEYWORDS = [
- "ENCODED",
- "ABSTRACT-SYNTAX",
- "END",
- "APPLICATION",
- "EXPLICIT",
- "IMPLICIT",
- "AUTOMATIC",
- "TAGS",
- "BEGIN",
- "EXTENSIBILITY",
- "BY",
- "FROM",
- "COMPONENT",
- "UNIVERSAL",
- "COMPONENTS",
- "CONSTRAINED",
- "IMPLIED",
- "DEFINITIONS",
- "INCLUDES",
- "PRIVATE",
- "WITH",
- "OF",
-]
-
-OPERATOR_WORDS = [
- "EXCEPT",
- "UNION",
- "INTERSECTION",
-]
-
-SINGLE_WORD_NAMESPACE_KEYWORDS = [
- "EXPORTS",
- "IMPORTS",
-]
-
-MULTI_WORDS_DECLARATIONS = [
- "SEQUENCE OF",
- "SET OF",
- "INSTANCE OF",
- "WITH SYNTAX",
-]
-
-SINGLE_WORDS_DECLARATIONS = [
- "SIZE",
- "SEQUENCE",
- "SET",
- "CLASS",
- "UNIQUE",
- "DEFAULT",
- "CHOICE",
- "PATTERN",
- "OPTIONAL",
- "PRESENT",
- "ABSENT",
- "CONTAINING",
- "ENUMERATED",
- "ALL",
-]
-
-TWO_WORDS_TYPES = [
- "OBJECT IDENTIFIER",
- "BIT STRING",
- "OCTET STRING",
- "CHARACTER STRING",
- "EMBEDDED PDV",
-]
-
-SINGLE_WORD_TYPES = [
- "RELATIVE-OID",
- "TYPE-IDENTIFIER",
- "ObjectDescriptor",
- "IA5String",
- "INTEGER",
- "ISO646String",
- "T61String",
- "BMPString",
- "NumericString",
- "TeletexString",
- "GeneralizedTime",
- "REAL",
- "BOOLEAN",
- "GeneralString",
- "GraphicString",
- "UniversalString",
- "UTCTime",
- "VisibleString",
- "UTF8String",
- "PrintableString",
- "VideotexString",
- "EXTERNAL",
-]
-
-
-def word_sequences(tokens):
- return "(" + '|'.join(token.replace(' ', r'\s+') for token in tokens) + r')\b'
-
-
-class Asn1Lexer(RegexLexer):
-
- """
- Lexer for ASN.1 module definition
-
- .. versionadded:: 2.16
- """
-
- flags = re.MULTILINE
-
- name = 'ASN.1'
- aliases = ['asn1']
- filenames = ["*.asn1"]
- url = "https://www.itu.int/ITU-T/studygroups/com17/languages/X.680-0207.pdf"
-
- tokens = {
- 'root': [
- # Whitespace:
- (r'\s+', Whitespace),
- # Comments:
- (r'--.*$', Comment.Single),
- (r'/\*', Comment.Multiline, 'comment'),
- # Numbers:
- (r'\d+\.\d*([eE][-+]?\d+)?', Number.Float),
- (r'\d+', Number.Integer),
- # Identifier:
- (r"&?[a-z][-a-zA-Z0-9]*[a-zA-Z0-9]\b", Name.Variable),
- # Constants:
- (words(("TRUE", "FALSE", "NULL", "MINUS-INFINITY", "PLUS-INFINITY", "MIN", "MAX"), suffix=r'\b'), Keyword.Constant),
- # Builtin types:
- (word_sequences(TWO_WORDS_TYPES), Keyword.Type),
- (words(SINGLE_WORD_TYPES, suffix=r'\b'), Keyword.Type),
- # Other keywords:
- (r"EXPORTS\s+ALL\b", Keyword.Namespace),
- (words(SINGLE_WORD_NAMESPACE_KEYWORDS, suffix=r'\b'), Operator.Namespace),
- (word_sequences(MULTI_WORDS_DECLARATIONS), Keyword.Declaration),
- (words(SINGLE_WORDS_DECLARATIONS, suffix=r'\b'), Keyword.Declaration),
- (words(OPERATOR_WORDS, suffix=r'\b'), Operator.Word),
- (words(SINGLE_WORD_KEYWORDS), Keyword),
- # Type identifier:
- (r"&?[A-Z][-a-zA-Z0-9]*[a-zA-Z0-9]\b", Name.Type),
- # Operators:
- (r"(::=|\.\.\.|\.\.|\[\[|\]\]|\||\^)", Operator),
- # Punctuation:
- (r"(\.|,|\{|\}|\(|\)|\[|\])", Punctuation),
- # String:
- (r'"', String, 'string'),
- # Binary string:
- (r"('[01 ]*')(B)\b", bygroups(String, String.Affix)),
- (r"('[0-9A-F ]*')(H)\b",bygroups(String, String.Affix)),
- ],
- 'comment': [
- (r'[^*/]+', Comment.Multiline),
- (r'/\*', Comment.Multiline, '#push'),
- (r'\*/', Comment.Multiline, '#pop'),
- (r'[*/]', Comment.Multiline)
- ],
- 'string': [
- (r'""', String),
- (r'"', String, "#pop"),
- (r'[^"]', String),
- ]
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/automation.py b/venv/lib/python3.11/site-packages/pygments/lexers/automation.py
deleted file mode 100644
index f0f7c5b..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/automation.py
+++ /dev/null
@@ -1,381 +0,0 @@
-"""
- pygments.lexers.automation
- ~~~~~~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for automation scripting languages.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, include, bygroups, combined
-from pygments.token import Text, Comment, Operator, Name, String, \
- Number, Punctuation, Generic
-
-__all__ = ['AutohotkeyLexer', 'AutoItLexer']
-
-
-class AutohotkeyLexer(RegexLexer):
- """
- For autohotkey source code.
-
- .. versionadded:: 1.4
- """
- name = 'autohotkey'
- url = 'http://www.autohotkey.com/'
- aliases = ['autohotkey', 'ahk']
- filenames = ['*.ahk', '*.ahkl']
- mimetypes = ['text/x-autohotkey']
-
- tokens = {
- 'root': [
- (r'^(\s*)(/\*)', bygroups(Text, Comment.Multiline), 'incomment'),
- (r'^(\s*)(\()', bygroups(Text, Generic), 'incontinuation'),
- (r'\s+;.*?$', Comment.Single),
- (r'^;.*?$', Comment.Single),
- (r'[]{}(),;[]', Punctuation),
- (r'(in|is|and|or|not)\b', Operator.Word),
- (r'\%[a-zA-Z_#@$][\w#@$]*\%', Name.Variable),
- (r'!=|==|:=|\.=|<<|>>|[-~+/*%=<>&^|?:!.]', Operator),
- include('commands'),
- include('labels'),
- include('builtInFunctions'),
- include('builtInVariables'),
- (r'"', String, combined('stringescape', 'dqs')),
- include('numbers'),
- (r'[a-zA-Z_#@$][\w#@$]*', Name),
- (r'\\|\'', Text),
- (r'\`([,%`abfnrtv\-+;])', String.Escape),
- include('garbage'),
- ],
- 'incomment': [
- (r'^\s*\*/', Comment.Multiline, '#pop'),
- (r'[^*]+', Comment.Multiline),
- (r'\*', Comment.Multiline)
- ],
- 'incontinuation': [
- (r'^\s*\)', Generic, '#pop'),
- (r'[^)]', Generic),
- (r'[)]', Generic),
- ],
- 'commands': [
- (r'(?i)^(\s*)(global|local|static|'
- r'#AllowSameLineComments|#ClipboardTimeout|#CommentFlag|'
- r'#ErrorStdOut|#EscapeChar|#HotkeyInterval|#HotkeyModifierTimeout|'
- r'#Hotstring|#IfWinActive|#IfWinExist|#IfWinNotActive|'
- r'#IfWinNotExist|#IncludeAgain|#Include|#InstallKeybdHook|'
- r'#InstallMouseHook|#KeyHistory|#LTrim|#MaxHotkeysPerInterval|'
- r'#MaxMem|#MaxThreads|#MaxThreadsBuffer|#MaxThreadsPerHotkey|'
- r'#NoEnv|#NoTrayIcon|#Persistent|#SingleInstance|#UseHook|'
- r'#WinActivateForce|AutoTrim|BlockInput|Break|Click|ClipWait|'
- r'Continue|Control|ControlClick|ControlFocus|ControlGetFocus|'
- r'ControlGetPos|ControlGetText|ControlGet|ControlMove|ControlSend|'
- r'ControlSendRaw|ControlSetText|CoordMode|Critical|'
- r'DetectHiddenText|DetectHiddenWindows|Drive|DriveGet|'
- r'DriveSpaceFree|Edit|Else|EnvAdd|EnvDiv|EnvGet|EnvMult|EnvSet|'
- r'EnvSub|EnvUpdate|Exit|ExitApp|FileAppend|'
- r'FileCopy|FileCopyDir|FileCreateDir|FileCreateShortcut|'
- r'FileDelete|FileGetAttrib|FileGetShortcut|FileGetSize|'
- r'FileGetTime|FileGetVersion|FileInstall|FileMove|FileMoveDir|'
- r'FileRead|FileReadLine|FileRecycle|FileRecycleEmpty|'
- r'FileRemoveDir|FileSelectFile|FileSelectFolder|FileSetAttrib|'
- r'FileSetTime|FormatTime|GetKeyState|Gosub|Goto|GroupActivate|'
- r'GroupAdd|GroupClose|GroupDeactivate|Gui|GuiControl|'
- r'GuiControlGet|Hotkey|IfEqual|IfExist|IfGreaterOrEqual|IfGreater|'
- r'IfInString|IfLess|IfLessOrEqual|IfMsgBox|IfNotEqual|IfNotExist|'
- r'IfNotInString|IfWinActive|IfWinExist|IfWinNotActive|'
- r'IfWinNotExist|If |ImageSearch|IniDelete|IniRead|IniWrite|'
- r'InputBox|Input|KeyHistory|KeyWait|ListHotkeys|ListLines|'
- r'ListVars|Loop|Menu|MouseClickDrag|MouseClick|MouseGetPos|'
- r'MouseMove|MsgBox|OnExit|OutputDebug|Pause|PixelGetColor|'
- r'PixelSearch|PostMessage|Process|Progress|Random|RegDelete|'
- r'RegRead|RegWrite|Reload|Repeat|Return|RunAs|RunWait|Run|'
- r'SendEvent|SendInput|SendMessage|SendMode|SendPlay|SendRaw|Send|'
- r'SetBatchLines|SetCapslockState|SetControlDelay|'
- r'SetDefaultMouseSpeed|SetEnv|SetFormat|SetKeyDelay|'
- r'SetMouseDelay|SetNumlockState|SetScrollLockState|'
- r'SetStoreCapslockMode|SetTimer|SetTitleMatchMode|'
- r'SetWinDelay|SetWorkingDir|Shutdown|Sleep|Sort|SoundBeep|'
- r'SoundGet|SoundGetWaveVolume|SoundPlay|SoundSet|'
- r'SoundSetWaveVolume|SplashImage|SplashTextOff|SplashTextOn|'
- r'SplitPath|StatusBarGetText|StatusBarWait|StringCaseSense|'
- r'StringGetPos|StringLeft|StringLen|StringLower|StringMid|'
- r'StringReplace|StringRight|StringSplit|StringTrimLeft|'
- r'StringTrimRight|StringUpper|Suspend|SysGet|Thread|ToolTip|'
- r'Transform|TrayTip|URLDownloadToFile|While|WinActivate|'
- r'WinActivateBottom|WinClose|WinGetActiveStats|WinGetActiveTitle|'
- r'WinGetClass|WinGetPos|WinGetText|WinGetTitle|WinGet|WinHide|'
- r'WinKill|WinMaximize|WinMenuSelectItem|WinMinimizeAllUndo|'
- r'WinMinimizeAll|WinMinimize|WinMove|WinRestore|WinSetTitle|'
- r'WinSet|WinShow|WinWaitActive|WinWaitClose|WinWaitNotActive|'
- r'WinWait)\b', bygroups(Text, Name.Builtin)),
- ],
- 'builtInFunctions': [
- (r'(?i)(Abs|ACos|Asc|ASin|ATan|Ceil|Chr|Cos|DllCall|Exp|FileExist|'
- r'Floor|GetKeyState|IL_Add|IL_Create|IL_Destroy|InStr|IsFunc|'
- r'IsLabel|Ln|Log|LV_Add|LV_Delete|LV_DeleteCol|LV_GetCount|'
- r'LV_GetNext|LV_GetText|LV_Insert|LV_InsertCol|LV_Modify|'
- r'LV_ModifyCol|LV_SetImageList|Mod|NumGet|NumPut|OnMessage|'
- r'RegExMatch|RegExReplace|RegisterCallback|Round|SB_SetIcon|'
- r'SB_SetParts|SB_SetText|Sin|Sqrt|StrLen|SubStr|Tan|TV_Add|'
- r'TV_Delete|TV_GetChild|TV_GetCount|TV_GetNext|TV_Get|'
- r'TV_GetParent|TV_GetPrev|TV_GetSelection|TV_GetText|TV_Modify|'
- r'VarSetCapacity|WinActive|WinExist|Object|ComObjActive|'
- r'ComObjArray|ComObjEnwrap|ComObjUnwrap|ComObjParameter|'
- r'ComObjType|ComObjConnect|ComObjCreate|ComObjGet|ComObjError|'
- r'ComObjValue|Insert|MinIndex|MaxIndex|Remove|SetCapacity|'
- r'GetCapacity|GetAddress|_NewEnum|FileOpen|Read|Write|ReadLine|'
- r'WriteLine|ReadNumType|WriteNumType|RawRead|RawWrite|Seek|Tell|'
- r'Close|Next|IsObject|StrPut|StrGet|Trim|LTrim|RTrim)\b',
- Name.Function),
- ],
- 'builtInVariables': [
- (r'(?i)(A_AhkPath|A_AhkVersion|A_AppData|A_AppDataCommon|'
- r'A_AutoTrim|A_BatchLines|A_CaretX|A_CaretY|A_ComputerName|'
- r'A_ControlDelay|A_Cursor|A_DDDD|A_DDD|A_DD|A_DefaultMouseSpeed|'
- r'A_Desktop|A_DesktopCommon|A_DetectHiddenText|'
- r'A_DetectHiddenWindows|A_EndChar|A_EventInfo|A_ExitReason|'
- r'A_FormatFloat|A_FormatInteger|A_Gui|A_GuiEvent|A_GuiControl|'
- r'A_GuiControlEvent|A_GuiHeight|A_GuiWidth|A_GuiX|A_GuiY|A_Hour|'
- r'A_IconFile|A_IconHidden|A_IconNumber|A_IconTip|A_Index|'
- r'A_IPAddress1|A_IPAddress2|A_IPAddress3|A_IPAddress4|A_ISAdmin|'
- r'A_IsCompiled|A_IsCritical|A_IsPaused|A_IsSuspended|A_KeyDelay|'
- r'A_Language|A_LastError|A_LineFile|A_LineNumber|A_LoopField|'
- r'A_LoopFileAttrib|A_LoopFileDir|A_LoopFileExt|A_LoopFileFullPath|'
- r'A_LoopFileLongPath|A_LoopFileName|A_LoopFileShortName|'
- r'A_LoopFileShortPath|A_LoopFileSize|A_LoopFileSizeKB|'
- r'A_LoopFileSizeMB|A_LoopFileTimeAccessed|A_LoopFileTimeCreated|'
- r'A_LoopFileTimeModified|A_LoopReadLine|A_LoopRegKey|'
- r'A_LoopRegName|A_LoopRegSubkey|A_LoopRegTimeModified|'
- r'A_LoopRegType|A_MDAY|A_Min|A_MM|A_MMM|A_MMMM|A_Mon|A_MouseDelay|'
- r'A_MSec|A_MyDocuments|A_Now|A_NowUTC|A_NumBatchLines|A_OSType|'
- r'A_OSVersion|A_PriorHotkey|A_ProgramFiles|A_Programs|'
- r'A_ProgramsCommon|A_ScreenHeight|A_ScreenWidth|A_ScriptDir|'
- r'A_ScriptFullPath|A_ScriptName|A_Sec|A_Space|A_StartMenu|'
- r'A_StartMenuCommon|A_Startup|A_StartupCommon|A_StringCaseSense|'
- r'A_Tab|A_Temp|A_ThisFunc|A_ThisHotkey|A_ThisLabel|A_ThisMenu|'
- r'A_ThisMenuItem|A_ThisMenuItemPos|A_TickCount|A_TimeIdle|'
- r'A_TimeIdlePhysical|A_TimeSincePriorHotkey|A_TimeSinceThisHotkey|'
- r'A_TitleMatchMode|A_TitleMatchModeSpeed|A_UserName|A_WDay|'
- r'A_WinDelay|A_WinDir|A_WorkingDir|A_YDay|A_YEAR|A_YWeek|A_YYYY|'
- r'Clipboard|ClipboardAll|ComSpec|ErrorLevel|ProgramFiles|True|'
- r'False|A_IsUnicode|A_FileEncoding|A_OSVersion|A_PtrSize)\b',
- Name.Variable),
- ],
- 'labels': [
- # hotkeys and labels
- # technically, hotkey names are limited to named keys and buttons
- (r'(^\s*)([^:\s("]+?:{1,2})', bygroups(Text, Name.Label)),
- (r'(^\s*)(::[^:\s]+?::)', bygroups(Text, Name.Label)),
- ],
- 'numbers': [
- (r'(\d+\.\d*|\d*\.\d+)([eE][+-]?[0-9]+)?', Number.Float),
- (r'\d+[eE][+-]?[0-9]+', Number.Float),
- (r'0\d+', Number.Oct),
- (r'0[xX][a-fA-F0-9]+', Number.Hex),
- (r'\d+L', Number.Integer.Long),
- (r'\d+', Number.Integer)
- ],
- 'stringescape': [
- (r'\"\"|\`([,%`abfnrtv])', String.Escape),
- ],
- 'strings': [
- (r'[^"\n]+', String),
- ],
- 'dqs': [
- (r'"', String, '#pop'),
- include('strings')
- ],
- 'garbage': [
- (r'[^\S\n]', Text),
- # (r'.', Text), # no cheating
- ],
- }
-
-
-class AutoItLexer(RegexLexer):
- """
- For AutoIt files.
-
- AutoIt is a freeware BASIC-like scripting language
- designed for automating the Windows GUI and general scripting
-
- .. versionadded:: 1.6
- """
- name = 'AutoIt'
- url = 'http://www.autoitscript.com/site/autoit/'
- aliases = ['autoit']
- filenames = ['*.au3']
- mimetypes = ['text/x-autoit']
-
- # Keywords, functions, macros from au3.keywords.properties
- # which can be found in AutoIt installed directory, e.g.
- # c:\Program Files (x86)\AutoIt3\SciTE\au3.keywords.properties
-
- keywords = """\
- #include-once #include #endregion #forcedef #forceref #region
- and byref case continueloop dim do else elseif endfunc endif
- endselect exit exitloop for func global
- if local next not or return select step
- then to until wend while exit""".split()
-
- functions = """\
- abs acos adlibregister adlibunregister asc ascw asin assign atan
- autoitsetoption autoitwingettitle autoitwinsettitle beep binary binarylen
- binarymid binarytostring bitand bitnot bitor bitrotate bitshift bitxor
- blockinput break call cdtray ceiling chr chrw clipget clipput consoleread
- consolewrite consolewriteerror controlclick controlcommand controldisable
- controlenable controlfocus controlgetfocus controlgethandle controlgetpos
- controlgettext controlhide controllistview controlmove controlsend
- controlsettext controlshow controltreeview cos dec dircopy dircreate
- dirgetsize dirmove dirremove dllcall dllcalladdress dllcallbackfree
- dllcallbackgetptr dllcallbackregister dllclose dllopen dllstructcreate
- dllstructgetdata dllstructgetptr dllstructgetsize dllstructsetdata
- drivegetdrive drivegetfilesystem drivegetlabel drivegetserial drivegettype
- drivemapadd drivemapdel drivemapget drivesetlabel drivespacefree
- drivespacetotal drivestatus envget envset envupdate eval execute exp
- filechangedir fileclose filecopy filecreatentfslink filecreateshortcut
- filedelete fileexists filefindfirstfile filefindnextfile fileflush
- filegetattrib filegetencoding filegetlongname filegetpos filegetshortcut
- filegetshortname filegetsize filegettime filegetversion fileinstall filemove
- fileopen fileopendialog fileread filereadline filerecycle filerecycleempty
- filesavedialog fileselectfolder filesetattrib filesetpos filesettime
- filewrite filewriteline floor ftpsetproxy guicreate guictrlcreateavi
- guictrlcreatebutton guictrlcreatecheckbox guictrlcreatecombo
- guictrlcreatecontextmenu guictrlcreatedate guictrlcreatedummy
- guictrlcreateedit guictrlcreategraphic guictrlcreategroup guictrlcreateicon
- guictrlcreateinput guictrlcreatelabel guictrlcreatelist
- guictrlcreatelistview guictrlcreatelistviewitem guictrlcreatemenu
- guictrlcreatemenuitem guictrlcreatemonthcal guictrlcreateobj
- guictrlcreatepic guictrlcreateprogress guictrlcreateradio
- guictrlcreateslider guictrlcreatetab guictrlcreatetabitem
- guictrlcreatetreeview guictrlcreatetreeviewitem guictrlcreateupdown
- guictrldelete guictrlgethandle guictrlgetstate guictrlread guictrlrecvmsg
- guictrlregisterlistviewsort guictrlsendmsg guictrlsendtodummy
- guictrlsetbkcolor guictrlsetcolor guictrlsetcursor guictrlsetdata
- guictrlsetdefbkcolor guictrlsetdefcolor guictrlsetfont guictrlsetgraphic
- guictrlsetimage guictrlsetlimit guictrlsetonevent guictrlsetpos
- guictrlsetresizing guictrlsetstate guictrlsetstyle guictrlsettip guidelete
- guigetcursorinfo guigetmsg guigetstyle guiregistermsg guisetaccelerators
- guisetbkcolor guisetcoord guisetcursor guisetfont guisethelp guiseticon
- guisetonevent guisetstate guisetstyle guistartgroup guiswitch hex hotkeyset
- httpsetproxy httpsetuseragent hwnd inetclose inetget inetgetinfo inetgetsize
- inetread inidelete iniread inireadsection inireadsectionnames
- inirenamesection iniwrite iniwritesection inputbox int isadmin isarray
- isbinary isbool isdeclared isdllstruct isfloat ishwnd isint iskeyword
- isnumber isobj isptr isstring log memgetstats mod mouseclick mouseclickdrag
- mousedown mousegetcursor mousegetpos mousemove mouseup mousewheel msgbox
- number objcreate objcreateinterface objevent objevent objget objname
- onautoitexitregister onautoitexitunregister opt ping pixelchecksum
- pixelgetcolor pixelsearch pluginclose pluginopen processclose processexists
- processgetstats processlist processsetpriority processwait processwaitclose
- progressoff progresson progressset ptr random regdelete regenumkey
- regenumval regread regwrite round run runas runaswait runwait send
- sendkeepactive seterror setextended shellexecute shellexecutewait shutdown
- sin sleep soundplay soundsetwavevolume splashimageon splashoff splashtexton
- sqrt srandom statusbargettext stderrread stdinwrite stdioclose stdoutread
- string stringaddcr stringcompare stringformat stringfromasciiarray
- stringinstr stringisalnum stringisalpha stringisascii stringisdigit
- stringisfloat stringisint stringislower stringisspace stringisupper
- stringisxdigit stringleft stringlen stringlower stringmid stringregexp
- stringregexpreplace stringreplace stringright stringsplit stringstripcr
- stringstripws stringtoasciiarray stringtobinary stringtrimleft
- stringtrimright stringupper tan tcpaccept tcpclosesocket tcpconnect
- tcplisten tcpnametoip tcprecv tcpsend tcpshutdown tcpstartup timerdiff
- timerinit tooltip traycreateitem traycreatemenu traygetmsg trayitemdelete
- trayitemgethandle trayitemgetstate trayitemgettext trayitemsetonevent
- trayitemsetstate trayitemsettext traysetclick trayseticon traysetonevent
- traysetpauseicon traysetstate traysettooltip traytip ubound udpbind
- udpclosesocket udpopen udprecv udpsend udpshutdown udpstartup vargettype
- winactivate winactive winclose winexists winflash wingetcaretpos
- wingetclasslist wingetclientsize wingethandle wingetpos wingetprocess
- wingetstate wingettext wingettitle winkill winlist winmenuselectitem
- winminimizeall winminimizeallundo winmove winsetontop winsetstate
- winsettitle winsettrans winwait winwaitactive winwaitclose
- winwaitnotactive""".split()
-
- macros = """\
- @appdatacommondir @appdatadir @autoitexe @autoitpid @autoitversion
- @autoitx64 @com_eventobj @commonfilesdir @compiled @computername @comspec
- @cpuarch @cr @crlf @desktopcommondir @desktopdepth @desktopdir
- @desktopheight @desktoprefresh @desktopwidth @documentscommondir @error
- @exitcode @exitmethod @extended @favoritescommondir @favoritesdir
- @gui_ctrlhandle @gui_ctrlid @gui_dragfile @gui_dragid @gui_dropid
- @gui_winhandle @homedrive @homepath @homeshare @hotkeypressed @hour
- @ipaddress1 @ipaddress2 @ipaddress3 @ipaddress4 @kblayout @lf
- @logondnsdomain @logondomain @logonserver @mday @min @mon @msec @muilang
- @mydocumentsdir @numparams @osarch @osbuild @oslang @osservicepack @ostype
- @osversion @programfilesdir @programscommondir @programsdir @scriptdir
- @scriptfullpath @scriptlinenumber @scriptname @sec @startmenucommondir
- @startmenudir @startupcommondir @startupdir @sw_disable @sw_enable @sw_hide
- @sw_lock @sw_maximize @sw_minimize @sw_restore @sw_show @sw_showdefault
- @sw_showmaximized @sw_showminimized @sw_showminnoactive @sw_showna
- @sw_shownoactivate @sw_shownormal @sw_unlock @systemdir @tab @tempdir
- @tray_id @trayiconflashing @trayiconvisible @username @userprofiledir @wday
- @windowsdir @workingdir @yday @year""".split()
-
- tokens = {
- 'root': [
- (r';.*\n', Comment.Single),
- (r'(#comments-start|#cs)(.|\n)*?(#comments-end|#ce)',
- Comment.Multiline),
- (r'[\[\]{}(),;]', Punctuation),
- (r'(and|or|not)\b', Operator.Word),
- (r'[$|@][a-zA-Z_]\w*', Name.Variable),
- (r'!=|==|:=|\.=|<<|>>|[-~+/*%=<>&^|?:!.]', Operator),
- include('commands'),
- include('labels'),
- include('builtInFunctions'),
- include('builtInMarcros'),
- (r'"', String, combined('stringescape', 'dqs')),
- (r"'", String, 'sqs'),
- include('numbers'),
- (r'[a-zA-Z_#@$][\w#@$]*', Name),
- (r'\\|\'', Text),
- (r'\`([,%`abfnrtv\-+;])', String.Escape),
- (r'_\n', Text), # Line continuation
- include('garbage'),
- ],
- 'commands': [
- (r'(?i)(\s*)(%s)\b' % '|'.join(keywords),
- bygroups(Text, Name.Builtin)),
- ],
- 'builtInFunctions': [
- (r'(?i)(%s)\b' % '|'.join(functions),
- Name.Function),
- ],
- 'builtInMarcros': [
- (r'(?i)(%s)\b' % '|'.join(macros),
- Name.Variable.Global),
- ],
- 'labels': [
- # sendkeys
- (r'(^\s*)(\{\S+?\})', bygroups(Text, Name.Label)),
- ],
- 'numbers': [
- (r'(\d+\.\d*|\d*\.\d+)([eE][+-]?[0-9]+)?', Number.Float),
- (r'\d+[eE][+-]?[0-9]+', Number.Float),
- (r'0\d+', Number.Oct),
- (r'0[xX][a-fA-F0-9]+', Number.Hex),
- (r'\d+L', Number.Integer.Long),
- (r'\d+', Number.Integer)
- ],
- 'stringescape': [
- (r'\"\"|\`([,%`abfnrtv])', String.Escape),
- ],
- 'strings': [
- (r'[^"\n]+', String),
- ],
- 'dqs': [
- (r'"', String, '#pop'),
- include('strings')
- ],
- 'sqs': [
- (r'\'\'|\`([,%`abfnrtv])', String.Escape),
- (r"'", String, '#pop'),
- (r"[^'\n]+", String)
- ],
- 'garbage': [
- (r'[^\S\n]', Text),
- ],
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/bare.py b/venv/lib/python3.11/site-packages/pygments/lexers/bare.py
deleted file mode 100644
index fce5ae0..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/bare.py
+++ /dev/null
@@ -1,102 +0,0 @@
-"""
- pygments.lexers.bare
- ~~~~~~~~~~~~~~~~~~~~
-
- Lexer for the BARE schema.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, words, bygroups
-from pygments.token import Text, Comment, Keyword, Name, Literal, Whitespace
-
-__all__ = ['BareLexer']
-
-
-class BareLexer(RegexLexer):
- """
- For BARE schema source.
-
- .. versionadded:: 2.7
- """
- name = 'BARE'
- url = 'https://baremessages.org'
- filenames = ['*.bare']
- aliases = ['bare']
-
- keywords = [
- 'type',
- 'enum',
- 'u8',
- 'u16',
- 'u32',
- 'u64',
- 'uint',
- 'i8',
- 'i16',
- 'i32',
- 'i64',
- 'int',
- 'f32',
- 'f64',
- 'bool',
- 'void',
- 'data',
- 'string',
- 'optional',
- 'map',
- ]
-
- tokens = {
- 'root': [
- (r'(type)(\s+)([A-Z][a-zA-Z0-9]+)(\s+)(\{)',
- bygroups(Keyword, Whitespace, Name.Class, Whitespace, Text), 'struct'),
- (r'(type)(\s+)([A-Z][a-zA-Z0-9]+)(\s+)(\()',
- bygroups(Keyword, Whitespace, Name.Class, Whitespace, Text), 'union'),
- (r'(type)(\s+)([A-Z][a-zA-Z0-9]+)(\s+)',
- bygroups(Keyword, Whitespace, Name, Whitespace), 'typedef'),
- (r'(enum)(\s+)([A-Z][a-zA-Z0-9]+)(\s+\{)',
- bygroups(Keyword, Whitespace, Name.Class, Whitespace), 'enum'),
- (r'#.*?$', Comment),
- (r'\s+', Whitespace),
- ],
- 'struct': [
- (r'\{', Text, '#push'),
- (r'\}', Text, '#pop'),
- (r'([a-zA-Z0-9]+)(:)(\s*)',
- bygroups(Name.Attribute, Text, Whitespace), 'typedef'),
- (r'\s+', Whitespace),
- ],
- 'union': [
- (r'\)', Text, '#pop'),
- (r'(\s*)(\|)(\s*)', bygroups(Whitespace, Text, Whitespace)),
- (r'[A-Z][a-zA-Z0-9]+', Name.Class),
- (words(keywords), Keyword),
- (r'\s+', Whitespace),
- ],
- 'typedef': [
- (r'\[\]', Text),
- (r'#.*?$', Comment, '#pop'),
- (r'(\[)(\d+)(\])', bygroups(Text, Literal, Text)),
- (r'<|>', Text),
- (r'\(', Text, 'union'),
- (r'(\[)([a-z][a-z-A-Z0-9]+)(\])', bygroups(Text, Keyword, Text)),
- (r'(\[)([A-Z][a-z-A-Z0-9]+)(\])', bygroups(Text, Name.Class, Text)),
- (r'([A-Z][a-z-A-Z0-9]+)', Name.Class),
- (words(keywords), Keyword),
- (r'\n', Text, '#pop'),
- (r'\{', Text, 'struct'),
- (r'\s+', Whitespace),
- (r'\d+', Literal),
- ],
- 'enum': [
- (r'\{', Text, '#push'),
- (r'\}', Text, '#pop'),
- (r'([A-Z][A-Z0-9_]*)(\s*=\s*)(\d+)',
- bygroups(Name.Attribute, Text, Literal)),
- (r'([A-Z][A-Z0-9_]*)', bygroups(Name.Attribute)),
- (r'#.*?$', Comment),
- (r'\s+', Whitespace),
- ],
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/basic.py b/venv/lib/python3.11/site-packages/pygments/lexers/basic.py
deleted file mode 100644
index 8837dac..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/basic.py
+++ /dev/null
@@ -1,665 +0,0 @@
-"""
- pygments.lexers.basic
- ~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for BASIC like languages (other than VB.net).
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, bygroups, default, words, include
-from pygments.token import Comment, Error, Keyword, Name, Number, \
- Punctuation, Operator, String, Text, Whitespace
-from pygments.lexers import _vbscript_builtins
-
-
-__all__ = ['BlitzBasicLexer', 'BlitzMaxLexer', 'MonkeyLexer', 'CbmBasicV2Lexer',
- 'QBasicLexer', 'VBScriptLexer', 'BBCBasicLexer']
-
-
-class BlitzMaxLexer(RegexLexer):
- """
- For BlitzMax source code.
-
- .. versionadded:: 1.4
- """
-
- name = 'BlitzMax'
- url = 'http://blitzbasic.com'
- aliases = ['blitzmax', 'bmax']
- filenames = ['*.bmx']
- mimetypes = ['text/x-bmx']
-
- bmax_vopwords = r'\b(Shl|Shr|Sar|Mod)\b'
- bmax_sktypes = r'@{1,2}|[!#$%]'
- bmax_lktypes = r'\b(Int|Byte|Short|Float|Double|Long)\b'
- bmax_name = r'[a-z_]\w*'
- bmax_var = (r'(%s)(?:(?:([ \t]*)(%s)|([ \t]*:[ \t]*\b(?:Shl|Shr|Sar|Mod)\b)'
- r'|([ \t]*)(:)([ \t]*)(?:%s|(%s)))(?:([ \t]*)(Ptr))?)') % \
- (bmax_name, bmax_sktypes, bmax_lktypes, bmax_name)
- bmax_func = bmax_var + r'?((?:[ \t]|\.\.\n)*)([(])'
-
- flags = re.MULTILINE | re.IGNORECASE
- tokens = {
- 'root': [
- # Text
- (r'\s+', Whitespace),
- (r'(\.\.)(\n)', bygroups(Text, Whitespace)), # Line continuation
- # Comments
- (r"'.*?\n", Comment.Single),
- (r'([ \t]*)\bRem\n(\n|.)*?\s*\bEnd([ \t]*)Rem', Comment.Multiline),
- # Data types
- ('"', String.Double, 'string'),
- # Numbers
- (r'[0-9]+\.[0-9]*(?!\.)', Number.Float),
- (r'\.[0-9]*(?!\.)', Number.Float),
- (r'[0-9]+', Number.Integer),
- (r'\$[0-9a-f]+', Number.Hex),
- (r'\%[10]+', Number.Bin),
- # Other
- (r'(?:(?:(:)?([ \t]*)(:?%s|([+\-*/&|~]))|Or|And|Not|[=<>^]))' %
- (bmax_vopwords), Operator),
- (r'[(),.:\[\]]', Punctuation),
- (r'(?:#[\w \t]*)', Name.Label),
- (r'(?:\?[\w \t]*)', Comment.Preproc),
- # Identifiers
- (r'\b(New)\b([ \t]?)([(]?)(%s)' % (bmax_name),
- bygroups(Keyword.Reserved, Whitespace, Punctuation, Name.Class)),
- (r'\b(Import|Framework|Module)([ \t]+)(%s\.%s)' %
- (bmax_name, bmax_name),
- bygroups(Keyword.Reserved, Whitespace, Keyword.Namespace)),
- (bmax_func, bygroups(Name.Function, Whitespace, Keyword.Type,
- Operator, Whitespace, Punctuation, Whitespace,
- Keyword.Type, Name.Class, Whitespace,
- Keyword.Type, Whitespace, Punctuation)),
- (bmax_var, bygroups(Name.Variable, Whitespace, Keyword.Type, Operator,
- Whitespace, Punctuation, Whitespace, Keyword.Type,
- Name.Class, Whitespace, Keyword.Type)),
- (r'\b(Type|Extends)([ \t]+)(%s)' % (bmax_name),
- bygroups(Keyword.Reserved, Whitespace, Name.Class)),
- # Keywords
- (r'\b(Ptr)\b', Keyword.Type),
- (r'\b(Pi|True|False|Null|Self|Super)\b', Keyword.Constant),
- (r'\b(Local|Global|Const|Field)\b', Keyword.Declaration),
- (words((
- 'TNullMethodException', 'TNullFunctionException',
- 'TNullObjectException', 'TArrayBoundsException',
- 'TRuntimeException'), prefix=r'\b', suffix=r'\b'), Name.Exception),
- (words((
- 'Strict', 'SuperStrict', 'Module', 'ModuleInfo',
- 'End', 'Return', 'Continue', 'Exit', 'Public', 'Private',
- 'Var', 'VarPtr', 'Chr', 'Len', 'Asc', 'SizeOf', 'Sgn', 'Abs', 'Min', 'Max',
- 'New', 'Release', 'Delete', 'Incbin', 'IncbinPtr', 'IncbinLen',
- 'Framework', 'Include', 'Import', 'Extern', 'EndExtern',
- 'Function', 'EndFunction', 'Type', 'EndType', 'Extends', 'Method', 'EndMethod',
- 'Abstract', 'Final', 'If', 'Then', 'Else', 'ElseIf', 'EndIf',
- 'For', 'To', 'Next', 'Step', 'EachIn', 'While', 'Wend', 'EndWhile',
- 'Repeat', 'Until', 'Forever', 'Select', 'Case', 'Default', 'EndSelect',
- 'Try', 'Catch', 'EndTry', 'Throw', 'Assert', 'Goto', 'DefData', 'ReadData',
- 'RestoreData'), prefix=r'\b', suffix=r'\b'),
- Keyword.Reserved),
- # Final resolve (for variable names and such)
- (r'(%s)' % (bmax_name), Name.Variable),
- ],
- 'string': [
- (r'""', String.Double),
- (r'"C?', String.Double, '#pop'),
- (r'[^"]+', String.Double),
- ],
- }
-
-
-class BlitzBasicLexer(RegexLexer):
- """
- For BlitzBasic source code.
-
- .. versionadded:: 2.0
- """
-
- name = 'BlitzBasic'
- url = 'http://blitzbasic.com'
- aliases = ['blitzbasic', 'b3d', 'bplus']
- filenames = ['*.bb', '*.decls']
- mimetypes = ['text/x-bb']
-
- bb_sktypes = r'@{1,2}|[#$%]'
- bb_name = r'[a-z]\w*'
- bb_var = (r'(%s)(?:([ \t]*)(%s)|([ \t]*)([.])([ \t]*)(?:(%s)))?') % \
- (bb_name, bb_sktypes, bb_name)
-
- flags = re.MULTILINE | re.IGNORECASE
- tokens = {
- 'root': [
- # Text
- (r'\s+', Whitespace),
- # Comments
- (r";.*?\n", Comment.Single),
- # Data types
- ('"', String.Double, 'string'),
- # Numbers
- (r'[0-9]+\.[0-9]*(?!\.)', Number.Float),
- (r'\.[0-9]+(?!\.)', Number.Float),
- (r'[0-9]+', Number.Integer),
- (r'\$[0-9a-f]+', Number.Hex),
- (r'\%[10]+', Number.Bin),
- # Other
- (words(('Shl', 'Shr', 'Sar', 'Mod', 'Or', 'And', 'Not',
- 'Abs', 'Sgn', 'Handle', 'Int', 'Float', 'Str',
- 'First', 'Last', 'Before', 'After'),
- prefix=r'\b', suffix=r'\b'),
- Operator),
- (r'([+\-*/~=<>^])', Operator),
- (r'[(),:\[\]\\]', Punctuation),
- (r'\.([ \t]*)(%s)' % bb_name, Name.Label),
- # Identifiers
- (r'\b(New)\b([ \t]+)(%s)' % (bb_name),
- bygroups(Keyword.Reserved, Whitespace, Name.Class)),
- (r'\b(Gosub|Goto)\b([ \t]+)(%s)' % (bb_name),
- bygroups(Keyword.Reserved, Whitespace, Name.Label)),
- (r'\b(Object)\b([ \t]*)([.])([ \t]*)(%s)\b' % (bb_name),
- bygroups(Operator, Whitespace, Punctuation, Whitespace, Name.Class)),
- (r'\b%s\b([ \t]*)(\()' % bb_var,
- bygroups(Name.Function, Whitespace, Keyword.Type, Whitespace, Punctuation,
- Whitespace, Name.Class, Whitespace, Punctuation)),
- (r'\b(Function)\b([ \t]+)%s' % bb_var,
- bygroups(Keyword.Reserved, Whitespace, Name.Function, Whitespace, Keyword.Type,
- Whitespace, Punctuation, Whitespace, Name.Class)),
- (r'\b(Type)([ \t]+)(%s)' % (bb_name),
- bygroups(Keyword.Reserved, Whitespace, Name.Class)),
- # Keywords
- (r'\b(Pi|True|False|Null)\b', Keyword.Constant),
- (r'\b(Local|Global|Const|Field|Dim)\b', Keyword.Declaration),
- (words((
- 'End', 'Return', 'Exit', 'Chr', 'Len', 'Asc', 'New', 'Delete', 'Insert',
- 'Include', 'Function', 'Type', 'If', 'Then', 'Else', 'ElseIf', 'EndIf',
- 'For', 'To', 'Next', 'Step', 'Each', 'While', 'Wend',
- 'Repeat', 'Until', 'Forever', 'Select', 'Case', 'Default',
- 'Goto', 'Gosub', 'Data', 'Read', 'Restore'), prefix=r'\b', suffix=r'\b'),
- Keyword.Reserved),
- # Final resolve (for variable names and such)
- # (r'(%s)' % (bb_name), Name.Variable),
- (bb_var, bygroups(Name.Variable, Whitespace, Keyword.Type,
- Whitespace, Punctuation, Whitespace, Name.Class)),
- ],
- 'string': [
- (r'""', String.Double),
- (r'"C?', String.Double, '#pop'),
- (r'[^"\n]+', String.Double),
- ],
- }
-
-
-class MonkeyLexer(RegexLexer):
- """
- For
- `Monkey <https://en.wikipedia.org/wiki/Monkey_(programming_language)>`_
- source code.
-
- .. versionadded:: 1.6
- """
-
- name = 'Monkey'
- aliases = ['monkey']
- filenames = ['*.monkey']
- mimetypes = ['text/x-monkey']
-
- name_variable = r'[a-z_]\w*'
- name_function = r'[A-Z]\w*'
- name_constant = r'[A-Z_][A-Z0-9_]*'
- name_class = r'[A-Z]\w*'
- name_module = r'[a-z0-9_]*'
-
- keyword_type = r'(?:Int|Float|String|Bool|Object|Array|Void)'
- # ? == Bool // % == Int // # == Float // $ == String
- keyword_type_special = r'[?%#$]'
-
- flags = re.MULTILINE
-
- tokens = {
- 'root': [
- # Text
- (r'\s+', Whitespace),
- # Comments
- (r"'.*", Comment),
- (r'(?i)^#rem\b', Comment.Multiline, 'comment'),
- # preprocessor directives
- (r'(?i)^(?:#If|#ElseIf|#Else|#EndIf|#End|#Print|#Error)\b', Comment.Preproc),
- # preprocessor variable (any line starting with '#' that is not a directive)
- (r'^#', Comment.Preproc, 'variables'),
- # String
- ('"', String.Double, 'string'),
- # Numbers
- (r'[0-9]+\.[0-9]*(?!\.)', Number.Float),
- (r'\.[0-9]+(?!\.)', Number.Float),
- (r'[0-9]+', Number.Integer),
- (r'\$[0-9a-fA-Z]+', Number.Hex),
- (r'\%[10]+', Number.Bin),
- # Native data types
- (r'\b%s\b' % keyword_type, Keyword.Type),
- # Exception handling
- (r'(?i)\b(?:Try|Catch|Throw)\b', Keyword.Reserved),
- (r'Throwable', Name.Exception),
- # Builtins
- (r'(?i)\b(?:Null|True|False)\b', Name.Builtin),
- (r'(?i)\b(?:Self|Super)\b', Name.Builtin.Pseudo),
- (r'\b(?:HOST|LANG|TARGET|CONFIG)\b', Name.Constant),
- # Keywords
- (r'(?i)^(Import)(\s+)(.*)(\n)',
- bygroups(Keyword.Namespace, Whitespace, Name.Namespace, Whitespace)),
- (r'(?i)^Strict\b.*\n', Keyword.Reserved),
- (r'(?i)(Const|Local|Global|Field)(\s+)',
- bygroups(Keyword.Declaration, Whitespace), 'variables'),
- (r'(?i)(New|Class|Interface|Extends|Implements)(\s+)',
- bygroups(Keyword.Reserved, Whitespace), 'classname'),
- (r'(?i)(Function|Method)(\s+)',
- bygroups(Keyword.Reserved, Whitespace), 'funcname'),
- (r'(?i)(?:End|Return|Public|Private|Extern|Property|'
- r'Final|Abstract)\b', Keyword.Reserved),
- # Flow Control stuff
- (r'(?i)(?:If|Then|Else|ElseIf|EndIf|'
- r'Select|Case|Default|'
- r'While|Wend|'
- r'Repeat|Until|Forever|'
- r'For|To|Until|Step|EachIn|Next|'
- r'Exit|Continue)(?=\s)', Keyword.Reserved),
- # not used yet
- (r'(?i)\b(?:Module|Inline)\b', Keyword.Reserved),
- # Array
- (r'[\[\]]', Punctuation),
- # Other
- (r'<=|>=|<>|\*=|/=|\+=|-=|&=|~=|\|=|[-&*/^+=<>|~]', Operator),
- (r'(?i)(?:Not|Mod|Shl|Shr|And|Or)', Operator.Word),
- (r'[(){}!#,.:]', Punctuation),
- # catch the rest
- (r'%s\b' % name_constant, Name.Constant),
- (r'%s\b' % name_function, Name.Function),
- (r'%s\b' % name_variable, Name.Variable),
- ],
- 'funcname': [
- (r'(?i)%s\b' % name_function, Name.Function),
- (r':', Punctuation, 'classname'),
- (r'\s+', Whitespace),
- (r'\(', Punctuation, 'variables'),
- (r'\)', Punctuation, '#pop')
- ],
- 'classname': [
- (r'%s\.' % name_module, Name.Namespace),
- (r'%s\b' % keyword_type, Keyword.Type),
- (r'%s\b' % name_class, Name.Class),
- # array (of given size)
- (r'(\[)(\s*)(\d*)(\s*)(\])',
- bygroups(Punctuation, Whitespace, Number.Integer, Whitespace, Punctuation)),
- # generics
- (r'\s+(?!<)', Whitespace, '#pop'),
- (r'<', Punctuation, '#push'),
- (r'>', Punctuation, '#pop'),
- (r'\n', Whitespace, '#pop'),
- default('#pop')
- ],
- 'variables': [
- (r'%s\b' % name_constant, Name.Constant),
- (r'%s\b' % name_variable, Name.Variable),
- (r'%s' % keyword_type_special, Keyword.Type),
- (r'\s+', Whitespace),
- (r':', Punctuation, 'classname'),
- (r',', Punctuation, '#push'),
- default('#pop')
- ],
- 'string': [
- (r'[^"~]+', String.Double),
- (r'~q|~n|~r|~t|~z|~~', String.Escape),
- (r'"', String.Double, '#pop'),
- ],
- 'comment': [
- (r'(?i)^#rem.*?', Comment.Multiline, "#push"),
- (r'(?i)^#end.*?', Comment.Multiline, "#pop"),
- (r'\n', Comment.Multiline),
- (r'.+', Comment.Multiline),
- ],
- }
-
-
-class CbmBasicV2Lexer(RegexLexer):
- """
- For CBM BASIC V2 sources.
-
- .. versionadded:: 1.6
- """
- name = 'CBM BASIC V2'
- aliases = ['cbmbas']
- filenames = ['*.bas']
-
- flags = re.IGNORECASE
-
- tokens = {
- 'root': [
- (r'rem.*\n', Comment.Single),
- (r'\s+', Whitespace),
- (r'new|run|end|for|to|next|step|go(to|sub)?|on|return|stop|cont'
- r'|if|then|input#?|read|wait|load|save|verify|poke|sys|print#?'
- r'|list|clr|cmd|open|close|get#?', Keyword.Reserved),
- (r'data|restore|dim|let|def|fn', Keyword.Declaration),
- (r'tab|spc|sgn|int|abs|usr|fre|pos|sqr|rnd|log|exp|cos|sin|tan|atn'
- r'|peek|len|val|asc|(str|chr|left|right|mid)\$', Name.Builtin),
- (r'[-+*/^<>=]', Operator),
- (r'not|and|or', Operator.Word),
- (r'"[^"\n]*.', String),
- (r'\d+|[-+]?\d*\.\d*(e[-+]?\d+)?', Number.Float),
- (r'[(),:;]', Punctuation),
- (r'\w+[$%]?', Name),
- ]
- }
-
- def analyse_text(text):
- # if it starts with a line number, it shouldn't be a "modern" Basic
- # like VB.net
- if re.match(r'^\d+', text):
- return 0.2
-
-
-class QBasicLexer(RegexLexer):
- """
- For
- `QBasic <http://en.wikipedia.org/wiki/QBasic>`_
- source code.
-
- .. versionadded:: 2.0
- """
-
- name = 'QBasic'
- aliases = ['qbasic', 'basic']
- filenames = ['*.BAS', '*.bas']
- mimetypes = ['text/basic']
-
- declarations = ('DATA', 'LET')
-
- functions = (
- 'ABS', 'ASC', 'ATN', 'CDBL', 'CHR$', 'CINT', 'CLNG',
- 'COMMAND$', 'COS', 'CSNG', 'CSRLIN', 'CVD', 'CVDMBF', 'CVI',
- 'CVL', 'CVS', 'CVSMBF', 'DATE$', 'ENVIRON$', 'EOF', 'ERDEV',
- 'ERDEV$', 'ERL', 'ERR', 'EXP', 'FILEATTR', 'FIX', 'FRE',
- 'FREEFILE', 'HEX$', 'INKEY$', 'INP', 'INPUT$', 'INSTR', 'INT',
- 'IOCTL$', 'LBOUND', 'LCASE$', 'LEFT$', 'LEN', 'LOC', 'LOF',
- 'LOG', 'LPOS', 'LTRIM$', 'MID$', 'MKD$', 'MKDMBF$', 'MKI$',
- 'MKL$', 'MKS$', 'MKSMBF$', 'OCT$', 'PEEK', 'PEN', 'PLAY',
- 'PMAP', 'POINT', 'POS', 'RIGHT$', 'RND', 'RTRIM$', 'SADD',
- 'SCREEN', 'SEEK', 'SETMEM', 'SGN', 'SIN', 'SPACE$', 'SPC',
- 'SQR', 'STICK', 'STR$', 'STRIG', 'STRING$', 'TAB', 'TAN',
- 'TIME$', 'TIMER', 'UBOUND', 'UCASE$', 'VAL', 'VARPTR',
- 'VARPTR$', 'VARSEG'
- )
-
- metacommands = ('$DYNAMIC', '$INCLUDE', '$STATIC')
-
- operators = ('AND', 'EQV', 'IMP', 'NOT', 'OR', 'XOR')
-
- statements = (
- 'BEEP', 'BLOAD', 'BSAVE', 'CALL', 'CALL ABSOLUTE',
- 'CALL INTERRUPT', 'CALLS', 'CHAIN', 'CHDIR', 'CIRCLE', 'CLEAR',
- 'CLOSE', 'CLS', 'COLOR', 'COM', 'COMMON', 'CONST', 'DATA',
- 'DATE$', 'DECLARE', 'DEF FN', 'DEF SEG', 'DEFDBL', 'DEFINT',
- 'DEFLNG', 'DEFSNG', 'DEFSTR', 'DEF', 'DIM', 'DO', 'LOOP',
- 'DRAW', 'END', 'ENVIRON', 'ERASE', 'ERROR', 'EXIT', 'FIELD',
- 'FILES', 'FOR', 'NEXT', 'FUNCTION', 'GET', 'GOSUB', 'GOTO',
- 'IF', 'THEN', 'INPUT', 'INPUT #', 'IOCTL', 'KEY', 'KEY',
- 'KILL', 'LET', 'LINE', 'LINE INPUT', 'LINE INPUT #', 'LOCATE',
- 'LOCK', 'UNLOCK', 'LPRINT', 'LSET', 'MID$', 'MKDIR', 'NAME',
- 'ON COM', 'ON ERROR', 'ON KEY', 'ON PEN', 'ON PLAY',
- 'ON STRIG', 'ON TIMER', 'ON UEVENT', 'ON', 'OPEN', 'OPEN COM',
- 'OPTION BASE', 'OUT', 'PAINT', 'PALETTE', 'PCOPY', 'PEN',
- 'PLAY', 'POKE', 'PRESET', 'PRINT', 'PRINT #', 'PRINT USING',
- 'PSET', 'PUT', 'PUT', 'RANDOMIZE', 'READ', 'REDIM', 'REM',
- 'RESET', 'RESTORE', 'RESUME', 'RETURN', 'RMDIR', 'RSET', 'RUN',
- 'SCREEN', 'SEEK', 'SELECT CASE', 'SHARED', 'SHELL', 'SLEEP',
- 'SOUND', 'STATIC', 'STOP', 'STRIG', 'SUB', 'SWAP', 'SYSTEM',
- 'TIME$', 'TIMER', 'TROFF', 'TRON', 'TYPE', 'UEVENT', 'UNLOCK',
- 'VIEW', 'WAIT', 'WHILE', 'WEND', 'WIDTH', 'WINDOW', 'WRITE'
- )
-
- keywords = (
- 'ACCESS', 'ALIAS', 'ANY', 'APPEND', 'AS', 'BASE', 'BINARY',
- 'BYVAL', 'CASE', 'CDECL', 'DOUBLE', 'ELSE', 'ELSEIF', 'ENDIF',
- 'INTEGER', 'IS', 'LIST', 'LOCAL', 'LONG', 'LOOP', 'MOD',
- 'NEXT', 'OFF', 'ON', 'OUTPUT', 'RANDOM', 'SIGNAL', 'SINGLE',
- 'STEP', 'STRING', 'THEN', 'TO', 'UNTIL', 'USING', 'WEND'
- )
-
- tokens = {
- 'root': [
- (r'\n+', Text),
- (r'\s+', Text.Whitespace),
- (r'^(\s*)(\d*)(\s*)(REM .*)$',
- bygroups(Text.Whitespace, Name.Label, Text.Whitespace,
- Comment.Single)),
- (r'^(\s*)(\d+)(\s*)',
- bygroups(Text.Whitespace, Name.Label, Text.Whitespace)),
- (r'(?=[\s]*)(\w+)(?=[\s]*=)', Name.Variable.Global),
- (r'(?=[^"]*)\'.*$', Comment.Single),
- (r'"[^\n"]*"', String.Double),
- (r'(END)(\s+)(FUNCTION|IF|SELECT|SUB)',
- bygroups(Keyword.Reserved, Text.Whitespace, Keyword.Reserved)),
- (r'(DECLARE)(\s+)([A-Z]+)(\s+)(\S+)',
- bygroups(Keyword.Declaration, Text.Whitespace, Name.Variable,
- Text.Whitespace, Name)),
- (r'(DIM)(\s+)(SHARED)(\s+)([^\s(]+)',
- bygroups(Keyword.Declaration, Text.Whitespace, Name.Variable,
- Text.Whitespace, Name.Variable.Global)),
- (r'(DIM)(\s+)([^\s(]+)',
- bygroups(Keyword.Declaration, Text.Whitespace, Name.Variable.Global)),
- (r'^(\s*)([a-zA-Z_]+)(\s*)(\=)',
- bygroups(Text.Whitespace, Name.Variable.Global, Text.Whitespace,
- Operator)),
- (r'(GOTO|GOSUB)(\s+)(\w+\:?)',
- bygroups(Keyword.Reserved, Text.Whitespace, Name.Label)),
- (r'(SUB)(\s+)(\w+\:?)',
- bygroups(Keyword.Reserved, Text.Whitespace, Name.Label)),
- include('declarations'),
- include('functions'),
- include('metacommands'),
- include('operators'),
- include('statements'),
- include('keywords'),
- (r'[a-zA-Z_]\w*[$@#&!]', Name.Variable.Global),
- (r'[a-zA-Z_]\w*\:', Name.Label),
- (r'\-?\d*\.\d+[@|#]?', Number.Float),
- (r'\-?\d+[@|#]', Number.Float),
- (r'\-?\d+#?', Number.Integer.Long),
- (r'\-?\d+#?', Number.Integer),
- (r'!=|==|:=|\.=|<<|>>|[-~+/\\*%=<>&^|?:!.]', Operator),
- (r'[\[\]{}(),;]', Punctuation),
- (r'[\w]+', Name.Variable.Global),
- ],
- # can't use regular \b because of X$()
- # XXX: use words() here
- 'declarations': [
- (r'\b(%s)(?=\(|\b)' % '|'.join(map(re.escape, declarations)),
- Keyword.Declaration),
- ],
- 'functions': [
- (r'\b(%s)(?=\(|\b)' % '|'.join(map(re.escape, functions)),
- Keyword.Reserved),
- ],
- 'metacommands': [
- (r'\b(%s)(?=\(|\b)' % '|'.join(map(re.escape, metacommands)),
- Keyword.Constant),
- ],
- 'operators': [
- (r'\b(%s)(?=\(|\b)' % '|'.join(map(re.escape, operators)), Operator.Word),
- ],
- 'statements': [
- (r'\b(%s)\b' % '|'.join(map(re.escape, statements)),
- Keyword.Reserved),
- ],
- 'keywords': [
- (r'\b(%s)\b' % '|'.join(keywords), Keyword),
- ],
- }
-
- def analyse_text(text):
- if '$DYNAMIC' in text or '$STATIC' in text:
- return 0.9
-
-
-class VBScriptLexer(RegexLexer):
- """
- VBScript is scripting language that is modeled on Visual Basic.
-
- .. versionadded:: 2.4
- """
- name = 'VBScript'
- aliases = ['vbscript']
- filenames = ['*.vbs', '*.VBS']
- flags = re.IGNORECASE
-
- tokens = {
- 'root': [
- (r"'[^\n]*", Comment.Single),
- (r'\s+', Whitespace),
- ('"', String.Double, 'string'),
- ('&h[0-9a-f]+', Number.Hex),
- # Float variant 1, for example: 1., 1.e2, 1.2e3
- (r'[0-9]+\.[0-9]*(e[+-]?[0-9]+)?', Number.Float),
- (r'\.[0-9]+(e[+-]?[0-9]+)?', Number.Float), # Float variant 2, for example: .1, .1e2
- (r'[0-9]+e[+-]?[0-9]+', Number.Float), # Float variant 3, for example: 123e45
- (r'[0-9]+', Number.Integer),
- ('#.+#', String), # date or time value
- (r'(dim)(\s+)([a-z_][a-z0-9_]*)',
- bygroups(Keyword.Declaration, Whitespace, Name.Variable), 'dim_more'),
- (r'(function|sub)(\s+)([a-z_][a-z0-9_]*)',
- bygroups(Keyword.Declaration, Whitespace, Name.Function)),
- (r'(class)(\s+)([a-z_][a-z0-9_]*)',
- bygroups(Keyword.Declaration, Whitespace, Name.Class)),
- (r'(const)(\s+)([a-z_][a-z0-9_]*)',
- bygroups(Keyword.Declaration, Whitespace, Name.Constant)),
- (r'(end)(\s+)(class|function|if|property|sub|with)',
- bygroups(Keyword, Whitespace, Keyword)),
- (r'(on)(\s+)(error)(\s+)(goto)(\s+)(0)',
- bygroups(Keyword, Whitespace, Keyword, Whitespace, Keyword, Whitespace, Number.Integer)),
- (r'(on)(\s+)(error)(\s+)(resume)(\s+)(next)',
- bygroups(Keyword, Whitespace, Keyword, Whitespace, Keyword, Whitespace, Keyword)),
- (r'(option)(\s+)(explicit)', bygroups(Keyword, Whitespace, Keyword)),
- (r'(property)(\s+)(get|let|set)(\s+)([a-z_][a-z0-9_]*)',
- bygroups(Keyword.Declaration, Whitespace, Keyword.Declaration, Whitespace, Name.Property)),
- (r'rem\s.*[^\n]*', Comment.Single),
- (words(_vbscript_builtins.KEYWORDS, suffix=r'\b'), Keyword),
- (words(_vbscript_builtins.OPERATORS), Operator),
- (words(_vbscript_builtins.OPERATOR_WORDS, suffix=r'\b'), Operator.Word),
- (words(_vbscript_builtins.BUILTIN_CONSTANTS, suffix=r'\b'), Name.Constant),
- (words(_vbscript_builtins.BUILTIN_FUNCTIONS, suffix=r'\b'), Name.Builtin),
- (words(_vbscript_builtins.BUILTIN_VARIABLES, suffix=r'\b'), Name.Builtin),
- (r'[a-z_][a-z0-9_]*', Name),
- (r'\b_\n', Operator),
- (words(r'(),.:'), Punctuation),
- (r'.+(\n)?', Error)
- ],
- 'dim_more': [
- (r'(\s*)(,)(\s*)([a-z_][a-z0-9]*)',
- bygroups(Whitespace, Punctuation, Whitespace, Name.Variable)),
- default('#pop'),
- ],
- 'string': [
- (r'[^"\n]+', String.Double),
- (r'\"\"', String.Double),
- (r'"', String.Double, '#pop'),
- (r'\n', Error, '#pop'), # Unterminated string
- ],
- }
-
-
-class BBCBasicLexer(RegexLexer):
- """
- BBC Basic was supplied on the BBC Micro, and later Acorn RISC OS.
- It is also used by BBC Basic For Windows.
-
- .. versionadded:: 2.4
- """
- base_keywords = ['OTHERWISE', 'AND', 'DIV', 'EOR', 'MOD', 'OR', 'ERROR',
- 'LINE', 'OFF', 'STEP', 'SPC', 'TAB', 'ELSE', 'THEN',
- 'OPENIN', 'PTR', 'PAGE', 'TIME', 'LOMEM', 'HIMEM', 'ABS',
- 'ACS', 'ADVAL', 'ASC', 'ASN', 'ATN', 'BGET', 'COS', 'COUNT',
- 'DEG', 'ERL', 'ERR', 'EVAL', 'EXP', 'EXT', 'FALSE', 'FN',
- 'GET', 'INKEY', 'INSTR', 'INT', 'LEN', 'LN', 'LOG', 'NOT',
- 'OPENUP', 'OPENOUT', 'PI', 'POINT', 'POS', 'RAD', 'RND',
- 'SGN', 'SIN', 'SQR', 'TAN', 'TO', 'TRUE', 'USR', 'VAL',
- 'VPOS', 'CHR$', 'GET$', 'INKEY$', 'LEFT$', 'MID$',
- 'RIGHT$', 'STR$', 'STRING$', 'EOF', 'PTR', 'PAGE', 'TIME',
- 'LOMEM', 'HIMEM', 'SOUND', 'BPUT', 'CALL', 'CHAIN', 'CLEAR',
- 'CLOSE', 'CLG', 'CLS', 'DATA', 'DEF', 'DIM', 'DRAW', 'END',
- 'ENDPROC', 'ENVELOPE', 'FOR', 'GOSUB', 'GOTO', 'GCOL', 'IF',
- 'INPUT', 'LET', 'LOCAL', 'MODE', 'MOVE', 'NEXT', 'ON',
- 'VDU', 'PLOT', 'PRINT', 'PROC', 'READ', 'REM', 'REPEAT',
- 'REPORT', 'RESTORE', 'RETURN', 'RUN', 'STOP', 'COLOUR',
- 'TRACE', 'UNTIL', 'WIDTH', 'OSCLI']
-
- basic5_keywords = ['WHEN', 'OF', 'ENDCASE', 'ENDIF', 'ENDWHILE', 'CASE',
- 'CIRCLE', 'FILL', 'ORIGIN', 'POINT', 'RECTANGLE', 'SWAP',
- 'WHILE', 'WAIT', 'MOUSE', 'QUIT', 'SYS', 'INSTALL',
- 'LIBRARY', 'TINT', 'ELLIPSE', 'BEATS', 'TEMPO', 'VOICES',
- 'VOICE', 'STEREO', 'OVERLAY', 'APPEND', 'AUTO', 'CRUNCH',
- 'DELETE', 'EDIT', 'HELP', 'LIST', 'LOAD', 'LVAR', 'NEW',
- 'OLD', 'RENUMBER', 'SAVE', 'TEXTLOAD', 'TEXTSAVE',
- 'TWIN', 'TWINO', 'INSTALL', 'SUM', 'BEAT']
-
-
- name = 'BBC Basic'
- aliases = ['bbcbasic']
- filenames = ['*.bbc']
-
- tokens = {
- 'root': [
- (r"[0-9]+", Name.Label),
- (r"(\*)([^\n]*)",
- bygroups(Keyword.Pseudo, Comment.Special)),
- default('code'),
- ],
-
- 'code': [
- (r"(REM)([^\n]*)",
- bygroups(Keyword.Declaration, Comment.Single)),
- (r'\n', Whitespace, 'root'),
- (r'\s+', Whitespace),
- (r':', Comment.Preproc),
-
- # Some special cases to make functions come out nicer
- (r'(DEF)(\s*)(FN|PROC)([A-Za-z_@][\w@]*)',
- bygroups(Keyword.Declaration, Whitespace,
- Keyword.Declaration, Name.Function)),
- (r'(FN|PROC)([A-Za-z_@][\w@]*)',
- bygroups(Keyword, Name.Function)),
-
- (r'(GOTO|GOSUB|THEN|RESTORE)(\s*)(\d+)',
- bygroups(Keyword, Whitespace, Name.Label)),
-
- (r'(TRUE|FALSE)', Keyword.Constant),
- (r'(PAGE|LOMEM|HIMEM|TIME|WIDTH|ERL|ERR|REPORT\$|POS|VPOS|VOICES)',
- Keyword.Pseudo),
-
- (words(base_keywords), Keyword),
- (words(basic5_keywords), Keyword),
-
- ('"', String.Double, 'string'),
-
- ('%[01]{1,32}', Number.Bin),
- ('&[0-9a-f]{1,8}', Number.Hex),
-
- (r'[+-]?[0-9]+\.[0-9]*(E[+-]?[0-9]+)?', Number.Float),
- (r'[+-]?\.[0-9]+(E[+-]?[0-9]+)?', Number.Float),
- (r'[+-]?[0-9]+E[+-]?[0-9]+', Number.Float),
- (r'[+-]?\d+', Number.Integer),
-
- (r'([A-Za-z_@][\w@]*[%$]?)', Name.Variable),
- (r'([+\-]=|[$!|?+\-*/%^=><();]|>=|<=|<>|<<|>>|>>>|,)', Operator),
- ],
- 'string': [
- (r'[^"\n]+', String.Double),
- (r'"', String.Double, '#pop'),
- (r'\n', Error, 'root'), # Unterminated string
- ],
- }
-
- def analyse_text(text):
- if text.startswith('10REM >') or text.startswith('REM >'):
- return 0.9
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/bdd.py b/venv/lib/python3.11/site-packages/pygments/lexers/bdd.py
deleted file mode 100644
index 8cf47f8..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/bdd.py
+++ /dev/null
@@ -1,58 +0,0 @@
-"""
- pygments.lexers.bdd
- ~~~~~~~~~~~~~~~~~~~
-
- Lexer for BDD(Behavior-driven development).
- More information: https://en.wikipedia.org/wiki/Behavior-driven_development
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, include
-from pygments.token import Comment, Keyword, Name, String, Number, Text, \
- Punctuation, Whitespace
-
-__all__ = ['BddLexer']
-
-class BddLexer(RegexLexer):
- """
- Lexer for BDD(Behavior-driven development), which highlights not only
- keywords, but also comments, punctuations, strings, numbers, and variables.
-
- .. versionadded:: 2.11
- """
-
- name = 'Bdd'
- aliases = ['bdd']
- filenames = ['*.feature']
- mimetypes = ['text/x-bdd']
-
- step_keywords = (r'Given|When|Then|Add|And|Feature|Scenario Outline|'
- r'Scenario|Background|Examples|But')
-
- tokens = {
- 'comments': [
- (r'^\s*#.*$', Comment),
- ],
- 'miscellaneous': [
- (r'(<|>|\[|\]|=|\||:|\(|\)|\{|\}|,|\.|;|-|_|\$)', Punctuation),
- (r'((?<=\<)[^\\>]+(?=\>))', Name.Variable),
- (r'"([^\"]*)"', String),
- (r'^@\S+', Name.Label),
- ],
- 'numbers': [
- (r'(\d+\.?\d*|\d*\.\d+)([eE][+-]?[0-9]+)?', Number),
- ],
- 'root': [
- (r'\n|\s+', Whitespace),
- (step_keywords, Keyword),
- include('comments'),
- include('miscellaneous'),
- include('numbers'),
- (r'\S+', Text),
- ]
- }
-
- def analyse_text(self, text):
- return
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/berry.py b/venv/lib/python3.11/site-packages/pygments/lexers/berry.py
deleted file mode 100644
index e078fa1..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/berry.py
+++ /dev/null
@@ -1,99 +0,0 @@
-"""
- pygments.lexers.berry
- ~~~~~~~~~~~~~~~~~~~~~
-
- Lexer for Berry.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, words, include, bygroups
-from pygments.token import Comment, Whitespace, Operator, Keyword, Name, \
- String, Number, Punctuation
-
-__all__ = ['BerryLexer']
-
-
-class BerryLexer(RegexLexer):
- """
- For `berry <http://github.com/berry-lang/berry>`_ source code.
-
- .. versionadded:: 2.12.0
- """
- name = 'Berry'
- aliases = ['berry', 'be']
- filenames = ['*.be']
- mimetypes = ['text/x-berry', 'application/x-berry']
-
- _name = r'\b[^\W\d]\w*'
-
- tokens = {
- 'root': [
- include('whitespace'),
- include('numbers'),
- include('keywords'),
- (rf'(def)(\s+)({_name})',
- bygroups(Keyword.Declaration, Whitespace, Name.Function)),
- (rf'\b(class)(\s+)({_name})',
- bygroups(Keyword.Declaration, Whitespace, Name.Class)),
- (rf'\b(import)(\s+)({_name})',
- bygroups(Keyword.Namespace, Whitespace, Name.Namespace)),
- include('expr')
- ],
- 'expr': [
- (r'[^\S\n]+', Whitespace),
- (r'\.\.|[~!%^&*+=|?:<>/-]', Operator),
- (r'[(){}\[\],.;]', Punctuation),
- include('controls'),
- include('builtins'),
- include('funccall'),
- include('member'),
- include('name'),
- include('strings')
- ],
- 'whitespace': [
- (r'\s+', Whitespace),
- (r'#-(.|\n)*?-#', Comment.Multiline),
- (r'#.*?$', Comment.Single)
- ],
- 'keywords': [
- (words((
- 'as', 'break', 'continue', 'import', 'static', 'self', 'super'),
- suffix=r'\b'), Keyword.Reserved),
- (r'(true|false|nil)\b', Keyword.Constant),
- (r'(var|def)\b', Keyword.Declaration)
- ],
- 'controls': [
- (words((
- 'if', 'elif', 'else', 'for', 'while', 'do', 'end', 'break',
- 'continue', 'return', 'try', 'except', 'raise'),
- suffix=r'\b'), Keyword)
- ],
- 'builtins': [
- (words((
- 'assert', 'bool', 'input', 'classname', 'classof', 'number', 'real',
- 'bytes', 'compile', 'map', 'list', 'int', 'isinstance', 'print',
- 'range', 'str', 'super', 'module', 'size', 'issubclass', 'open',
- 'file', 'type', 'call'),
- suffix=r'\b'), Name.Builtin)
- ],
- 'numbers': [
- (r'0[xX][a-fA-F0-9]+', Number.Hex),
- (r'-?\d+', Number.Integer),
- (r'(-?\d+\.?|\.\d)\d*([eE][+-]?\d+)?', Number.Float)
- ],
- 'name': [
- (_name, Name)
- ],
- 'funccall': [
- (rf'{_name}(?=\s*\()', Name.Function, '#pop')
- ],
- 'member': [
- (rf'(?<=\.){_name}\b(?!\()', Name.Attribute, '#pop')
- ],
- 'strings': [
- (r'"([^\\]|\\.)*?"', String.Double, '#pop'),
- (r'\'([^\\]|\\.)*?\'', String.Single, '#pop')
- ]
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/bibtex.py b/venv/lib/python3.11/site-packages/pygments/lexers/bibtex.py
deleted file mode 100644
index 34883cd..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/bibtex.py
+++ /dev/null
@@ -1,159 +0,0 @@
-"""
- pygments.lexers.bibtex
- ~~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for BibTeX bibliography data and styles
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, ExtendedRegexLexer, include, default, \
- words
-from pygments.token import Name, Comment, String, Error, Number, Keyword, \
- Punctuation, Whitespace
-
-__all__ = ['BibTeXLexer', 'BSTLexer']
-
-
-class BibTeXLexer(ExtendedRegexLexer):
- """
- A lexer for BibTeX bibliography data format.
-
- .. versionadded:: 2.2
- """
-
- name = 'BibTeX'
- aliases = ['bibtex', 'bib']
- filenames = ['*.bib']
- mimetypes = ["text/x-bibtex"]
- flags = re.IGNORECASE
-
- ALLOWED_CHARS = r'@!$&*+\-./:;<>?\[\\\]^`|~'
- IDENTIFIER = '[{}][{}]*'.format('a-z_' + ALLOWED_CHARS, r'\w' + ALLOWED_CHARS)
-
- def open_brace_callback(self, match, ctx):
- opening_brace = match.group()
- ctx.opening_brace = opening_brace
- yield match.start(), Punctuation, opening_brace
- ctx.pos = match.end()
-
- def close_brace_callback(self, match, ctx):
- closing_brace = match.group()
- if (
- ctx.opening_brace == '{' and closing_brace != '}' or
- ctx.opening_brace == '(' and closing_brace != ')'
- ):
- yield match.start(), Error, closing_brace
- else:
- yield match.start(), Punctuation, closing_brace
- del ctx.opening_brace
- ctx.pos = match.end()
-
- tokens = {
- 'root': [
- include('whitespace'),
- (r'@comment(?!ary)', Comment),
- ('@preamble', Name.Class, ('closing-brace', 'value', 'opening-brace')),
- ('@string', Name.Class, ('closing-brace', 'field', 'opening-brace')),
- ('@' + IDENTIFIER, Name.Class,
- ('closing-brace', 'command-body', 'opening-brace')),
- ('.+', Comment),
- ],
- 'opening-brace': [
- include('whitespace'),
- (r'[{(]', open_brace_callback, '#pop'),
- ],
- 'closing-brace': [
- include('whitespace'),
- (r'[})]', close_brace_callback, '#pop'),
- ],
- 'command-body': [
- include('whitespace'),
- (r'[^\s\,\}]+', Name.Label, ('#pop', 'fields')),
- ],
- 'fields': [
- include('whitespace'),
- (',', Punctuation, 'field'),
- default('#pop'),
- ],
- 'field': [
- include('whitespace'),
- (IDENTIFIER, Name.Attribute, ('value', '=')),
- default('#pop'),
- ],
- '=': [
- include('whitespace'),
- ('=', Punctuation, '#pop'),
- ],
- 'value': [
- include('whitespace'),
- (IDENTIFIER, Name.Variable),
- ('"', String, 'quoted-string'),
- (r'\{', String, 'braced-string'),
- (r'[\d]+', Number),
- ('#', Punctuation),
- default('#pop'),
- ],
- 'quoted-string': [
- (r'\{', String, 'braced-string'),
- ('"', String, '#pop'),
- (r'[^\{\"]+', String),
- ],
- 'braced-string': [
- (r'\{', String, '#push'),
- (r'\}', String, '#pop'),
- (r'[^\{\}]+', String),
- ],
- 'whitespace': [
- (r'\s+', Whitespace),
- ],
- }
-
-
-class BSTLexer(RegexLexer):
- """
- A lexer for BibTeX bibliography styles.
-
- .. versionadded:: 2.2
- """
-
- name = 'BST'
- aliases = ['bst', 'bst-pybtex']
- filenames = ['*.bst']
- flags = re.IGNORECASE | re.MULTILINE
-
- tokens = {
- 'root': [
- include('whitespace'),
- (words(['read', 'sort']), Keyword),
- (words(['execute', 'integers', 'iterate', 'reverse', 'strings']),
- Keyword, ('group')),
- (words(['function', 'macro']), Keyword, ('group', 'group')),
- (words(['entry']), Keyword, ('group', 'group', 'group')),
- ],
- 'group': [
- include('whitespace'),
- (r'\{', Punctuation, ('#pop', 'group-end', 'body')),
- ],
- 'group-end': [
- include('whitespace'),
- (r'\}', Punctuation, '#pop'),
- ],
- 'body': [
- include('whitespace'),
- (r"\'[^#\"\{\}\s]+", Name.Function),
- (r'[^#\"\{\}\s]+\$', Name.Builtin),
- (r'[^#\"\{\}\s]+', Name.Variable),
- (r'"[^\"]*"', String),
- (r'#-?\d+', Number),
- (r'\{', Punctuation, ('group-end', 'body')),
- default('#pop'),
- ],
- 'whitespace': [
- (r'\s+', Whitespace),
- ('%.*?$', Comment.Single),
- ],
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/blueprint.py b/venv/lib/python3.11/site-packages/pygments/lexers/blueprint.py
deleted file mode 100644
index ec5c618..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/blueprint.py
+++ /dev/null
@@ -1,174 +0,0 @@
-"""
- pygments.lexers.blueprint
- ~~~~~~~~~~~~~~~~~~~~~~~~~
-
- Lexer for the Blueprint UI markup language.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, include, bygroups, words
-from pygments.token import (
- Comment,
- Operator,
- Keyword,
- Name,
- String,
- Number,
- Punctuation,
- Whitespace,
-)
-
-__all__ = ["BlueprintLexer"]
-
-
-class BlueprintLexer(RegexLexer):
- """
- For Blueprint UI markup.
-
- .. versionadded:: 2.16
- """
-
- name = "Blueprint"
- aliases = ["blueprint"]
- filenames = ["*.blp"]
- mimetypes = ["text/x-blueprint"]
- url = "https://gitlab.gnome.org/jwestman/blueprint-compiler"
-
- flags = re.IGNORECASE
- tokens = {
- "root": [
- include("block-content"),
- ],
- "type": [
- (r"\$\s*[a-z_][a-z0-9_\-]*", Name.Class),
- (r"(?:([a-z_][a-z0-9_\-]*)(\s*)(\.)(\s*))?([a-z_][a-z0-9_\-]*)",
- bygroups(Name.Namespace, Whitespace, Punctuation, Whitespace, Name.Class)),
- ],
- "whitespace": [
- (r"\s+", Whitespace),
- (r"//.*?\n", Comment.Single),
- (r"/\*", Comment.Multiline, "comment-multiline"),
- ],
- "comment-multiline": [
- (r"\*/", Comment.Multiline, "#pop"),
- (r"[^*]+", Comment.Multiline),
- (r"\*", Comment.Multiline),
- ],
- "value": [
- (r"(typeof)(\s*)(<)", bygroups(Keyword, Whitespace, Punctuation), "typeof"),
- (words(("true", "false", "null")), Keyword.Constant),
- (r"[a-z_][a-z0-9_\-]*", Name.Variable),
- (r"\|", Operator),
- (r'".*?"', String.Double),
- (r"\'.*?\'", String.Single),
- (r"0x[\d_]*", Number.Hex),
- (r"[0-9_]+", Number.Integer),
- (r"\d[\d\.a-z_]*", Number),
- ],
- "typeof": [
- include("whitespace"),
- include("type"),
- (r">", Punctuation, "#pop"),
- ],
- "content": [
- include("whitespace"),
- # Keywords
- (words(("after", "bidirectional", "bind-property", "bind", "default",
- "destructive", "disabled", "inverted", "no-sync-create",
- "suggested", "swapped", "sync-create", "template")),
- Keyword),
- # Translated strings
- (r"(C?_)(\s*)(\()",
- bygroups(Name.Function.Builtin, Whitespace, Punctuation),
- "paren-content"),
- # Cast expressions
- (r"(as)(\s*)(<)", bygroups(Keyword, Whitespace, Punctuation), "typeof"),
- # Closures
- (r"(\$?[a-z_][a-z0-9_\-]*)(\s*)(\()",
- bygroups(Name.Function, Whitespace, Punctuation),
- "paren-content"),
- # Objects
- (r"(?:(\$\s*[a-z_][a-z0-9_\-]+)|(?:([a-z_][a-z0-9_\-]*)(\s*)(\.)(\s*))?([a-z_][a-z0-9_\-]*))(?:(\s+)([a-z_][a-z0-9_\-]*))?(\s*)(\{)",
- bygroups(Name.Class, Name.Namespace, Whitespace, Punctuation, Whitespace,
- Name.Class, Whitespace, Name.Variable, Whitespace, Punctuation),
- "brace-block"),
- # Misc
- include("value"),
- (r",|\.", Punctuation),
- ],
- "block-content": [
- # Import statements
- (r"(using)(\s+)([a-z_][a-z0-9_\-]*)(\s+)(\d[\d\.]*)(;)",
- bygroups(Keyword, Whitespace, Name.Namespace, Whitespace,
- Name.Namespace, Punctuation)),
- # Menus
- (r"(menu|section|submenu)(?:(\s+)([a-z_][a-z0-9_\-]*))?(\s*)(\{)",
- bygroups(Keyword, Whitespace, Name.Variable, Whitespace, Punctuation),
- "brace-block"),
- (r"(item)(\s*)(\{)",
- bygroups(Keyword, Whitespace, Punctuation),
- "brace-block"),
- (r"(item)(\s*)(\()",
- bygroups(Keyword, Whitespace, Punctuation),
- "paren-block"),
- # Templates
- (r"template", Keyword.Declaration, "template"),
- # Nested blocks. When extensions are added, this is where they go.
- (r"(responses|items|mime-types|patterns|suffixes|marks|widgets|strings|styles)(\s*)(\[)",
- bygroups(Keyword, Whitespace, Punctuation),
- "bracket-block"),
- (r"(accessibility|setters|layout|item)(\s*)(\{)",
- bygroups(Keyword, Whitespace, Punctuation),
- "brace-block"),
- (r"(condition|mark|item)(\s*)(\()",
- bygroups(Keyword, Whitespace, Punctuation),
- "paren-content"),
- (r"\[", Punctuation, "child-type"),
- # Properties and signals
- (r"([a-z_][a-z0-9_\-]*(?:::[a-z0-9_]+)?)(\s*)(:|=>)",
- bygroups(Name.Property, Whitespace, Punctuation),
- "statement"),
- include("content"),
- ],
- "paren-block": [
- include("block-content"),
- (r"\)", Punctuation, "#pop"),
- ],
- "paren-content": [
- include("content"),
- (r"\)", Punctuation, "#pop"),
- ],
- "bracket-block": [
- include("block-content"),
- (r"\]", Punctuation, "#pop"),
- ],
- "brace-block": [
- include("block-content"),
- (r"\}", Punctuation, "#pop"),
- ],
- "statement": [
- include("content"),
- (r";", Punctuation, "#pop"),
- ],
- "child-type": [
- include("whitespace"),
- (r"(action)(\s+)(response)(\s*)(=)(\s*)",
- bygroups(Keyword, Whitespace, Name.Attribute, Whitespace,
- Punctuation, Whitespace)),
- (words(("default", "internal-child", "response")), Keyword),
- (r"[a-z_][a-z0-9_\-]*", Name.Decorator),
- include("value"),
- (r"=", Punctuation),
- (r"\]", Punctuation, "#pop"),
- ],
- "template": [
- include("whitespace"),
- include("type"),
- (r":", Punctuation),
- (r"\{", Punctuation, ("#pop", "brace-block")),
- ],
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/boa.py b/venv/lib/python3.11/site-packages/pygments/lexers/boa.py
deleted file mode 100644
index f7baf3c..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/boa.py
+++ /dev/null
@@ -1,97 +0,0 @@
-"""
- pygments.lexers.boa
- ~~~~~~~~~~~~~~~~~~~
-
- Lexers for the Boa language.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, words
-from pygments.token import String, Comment, Keyword, Name, Number, Operator, \
- Punctuation, Whitespace
-
-__all__ = ['BoaLexer']
-
-
-class BoaLexer(RegexLexer):
- """
- Lexer for the `Boa <http://boa.cs.iastate.edu/docs/>`_ language.
-
- .. versionadded:: 2.4
- """
- name = 'Boa'
- aliases = ['boa']
- filenames = ['*.boa']
-
- reserved = words(
- ('input', 'output', 'of', 'weight', 'before', 'after', 'stop',
- 'ifall', 'foreach', 'exists', 'function', 'break', 'switch', 'case',
- 'visitor', 'default', 'return', 'visit', 'while', 'if', 'else'),
- suffix=r'\b', prefix=r'\b')
- keywords = words(
- ('bottom', 'collection', 'maximum', 'mean', 'minimum', 'set', 'sum',
- 'top', 'string', 'int', 'bool', 'float', 'time', 'false', 'true',
- 'array', 'map', 'stack', 'enum', 'type'), suffix=r'\b', prefix=r'\b')
- classes = words(
- ('Project', 'ForgeKind', 'CodeRepository', 'Revision', 'RepositoryKind',
- 'ChangedFile', 'FileKind', 'ASTRoot', 'Namespace', 'Declaration', 'Type',
- 'Method', 'Variable', 'Statement', 'Expression', 'Modifier',
- 'StatementKind', 'ExpressionKind', 'ModifierKind', 'Visibility',
- 'TypeKind', 'Person', 'ChangeKind'),
- suffix=r'\b', prefix=r'\b')
- operators = ('->', ':=', ':', '=', '<<', '!', '++', '||',
- '&&', '+', '-', '*', ">", "<")
- string_sep = ('`', '\"')
- built_in_functions = words(
- (
- # Array functions
- 'new', 'sort',
- # Date & Time functions
- 'yearof', 'dayofyear', 'hourof', 'minuteof', 'secondof', 'now',
- 'addday', 'addmonth', 'addweek', 'addyear', 'dayofmonth', 'dayofweek',
- 'dayofyear', 'formattime', 'trunctoday', 'trunctohour', 'trunctominute',
- 'trunctomonth', 'trunctosecond', 'trunctoyear',
- # Map functions
- 'clear', 'haskey', 'keys', 'lookup', 'remove', 'values',
- # Math functions
- 'abs', 'acos', 'acosh', 'asin', 'asinh', 'atan', 'atan2', 'atanh',
- 'ceil', 'cos', 'cosh', 'exp', 'floor', 'highbit', 'isfinite', 'isinf',
- 'isnan', 'isnormal', 'log', 'log10', 'max', 'min', 'nrand', 'pow',
- 'rand', 'round', 'sin', 'sinh', 'sqrt', 'tan', 'tanh', 'trunc',
- # Other functions
- 'def', 'hash', 'len',
- # Set functions
- 'add', 'contains', 'remove',
- # String functions
- 'format', 'lowercase', 'match', 'matchposns', 'matchstrs', 'regex',
- 'split', 'splitall', 'splitn', 'strfind', 'strreplace', 'strrfind',
- 'substring', 'trim', 'uppercase',
- # Type Conversion functions
- 'bool', 'float', 'int', 'string', 'time',
- # Domain-Specific functions
- 'getast', 'getsnapshot', 'hasfiletype', 'isfixingrevision', 'iskind',
- 'isliteral',
- ),
- prefix=r'\b',
- suffix=r'\(')
-
- tokens = {
- 'root': [
- (r'#.*?$', Comment.Single),
- (r'/\*.*?\*/', Comment.Multiline),
- (reserved, Keyword.Reserved),
- (built_in_functions, Name.Function),
- (keywords, Keyword.Type),
- (classes, Name.Classes),
- (words(operators), Operator),
- (r'[][(),;{}\\.]', Punctuation),
- (r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
- (r"`(\\\\|\\[^\\]|[^`\\])*`", String.Backtick),
- (words(string_sep), String.Delimiter),
- (r'[a-zA-Z_]+', Name.Variable),
- (r'[0-9]+', Number.Integer),
- (r'\s+', Whitespace), # Whitespace
- ]
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/bqn.py b/venv/lib/python3.11/site-packages/pygments/lexers/bqn.py
deleted file mode 100644
index af84b4d..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/bqn.py
+++ /dev/null
@@ -1,110 +0,0 @@
-"""
- pygments.lexers.bqn
- ~~~~~~~~~~~~~~~~~~~
-
- Lexer for BQN.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer
-from pygments.token import Comment, Operator, Keyword, Name, String, \
- Number, Punctuation, Whitespace
-
-__all__ = ['BQNLexer']
-
-
-class BQNLexer(RegexLexer):
- """
- A simple BQN lexer.
-
- .. versionadded:: 2.16
- """
- name = 'BQN'
- url = 'https://mlochbaum.github.io/BQN/index.html'
- aliases = ['bqn']
- filenames = ['*.bqn']
- mimetypes = []
-
- tokens = {
- 'root': [
- # Whitespace
- # ==========
- (r'\s+', Whitespace),
- #
- # Comment
- # =======
- # '#' is a comment that continues to the end of the line
- (r'#.*$', Comment.Single),
- #
- # Strings
- # =======
- (r'\'((\'\')|[^\'])*\'', String.Single),
- (r'"(("")|[^"])*"', String.Double),
- #
- # Null Character
- # ==============
- # Literal representation of the null character
- (r'@', String.Symbol),
- #
- # Punctuation
- # ===========
- # This token type is used for diamond, commas
- # and array and list brackets and strand syntax
- (r'[\.⋄,\[\]⟨⟩‿]', Punctuation),
- #
- # Expression Grouping
- # ===================
- # Since this token type is important in BQN, it is not included in
- # the punctuation token type but rather in the following one
- (r'[\(\)]', String.Regex),
- #
- # Numbers
- # =======
- # Includes the numeric literals and the Nothing character
- (r'¯?([0-9]+\.?[0-9]+|[0-9]+)([Ee][¯]?[0-9]+)?|¯|∞|π|·', Number),
- #
- # Variables
- # =========
- (r'\b[a-z]\w*\b', Name.Variable),
- #
- # 1-Modifiers
- # ===========
- (r'[˙˜˘¨⌜⁼´˝`𝕣]', Name.Attribute),
- (r'\b_[a-zA-Z0-9]+\b', Name.Attribute),
- #
- # 2-Modifiers
- # ===========
- (r'[∘○⊸⟜⌾⊘◶⎉⚇⍟⎊]', Name.Property),
- (r'\b_[a-zA-Z0-9]+_\b', Name.Property),
- #
- # Functions
- # =========
- # The monadic or dyadic function primitives and function
- # operands and arguments, along with function self-reference
- (r'[+\-×÷\*√⌊⌈∧∨¬|≤<>≥=≠≡≢⊣⊢⥊∾≍⋈↑↓↕«»⌽⍉/⍋⍒⊏⊑⊐⊒∊⍷⊔!𝕎𝕏𝔽𝔾𝕊]',
- Operator),
- (r'[A-Z]\w*|•\w+\b', Operator),
- #
- # Constant
- # ========
- (r'˙', Name.Constant),
- #
- # Define/Export/Change
- # ====================
- (r'[←↩⇐]', Keyword.Declaration),
- #
- # Blocks
- # ======
- (r'[{}]', Keyword.Type),
- #
- # Extra characters
- # ================
- (r'[;:?𝕨𝕩𝕗𝕘𝕤]', Name.Entity),
- #
-
- ],
- }
-
-
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/business.py b/venv/lib/python3.11/site-packages/pygments/lexers/business.py
deleted file mode 100644
index d2f2dd3..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/business.py
+++ /dev/null
@@ -1,626 +0,0 @@
-"""
- pygments.lexers.business
- ~~~~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for "business-oriented" languages.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, include, words, bygroups
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation, Error, Whitespace
-
-from pygments.lexers._openedge_builtins import OPENEDGEKEYWORDS
-
-__all__ = ['CobolLexer', 'CobolFreeformatLexer', 'ABAPLexer', 'OpenEdgeLexer',
- 'GoodDataCLLexer', 'MaqlLexer']
-
-
-class CobolLexer(RegexLexer):
- """
- Lexer for OpenCOBOL code.
-
- .. versionadded:: 1.6
- """
- name = 'COBOL'
- aliases = ['cobol']
- filenames = ['*.cob', '*.COB', '*.cpy', '*.CPY']
- mimetypes = ['text/x-cobol']
- flags = re.IGNORECASE | re.MULTILINE
-
- # Data Types: by PICTURE and USAGE
- # Operators: **, *, +, -, /, <, >, <=, >=, =, <>
- # Logical (?): NOT, AND, OR
-
- # Reserved words:
- # http://opencobol.add1tocobol.com/#reserved-words
- # Intrinsics:
- # http://opencobol.add1tocobol.com/#does-opencobol-implement-any-intrinsic-functions
-
- tokens = {
- 'root': [
- include('comment'),
- include('strings'),
- include('core'),
- include('nums'),
- (r'[a-z0-9]([\w\-]*[a-z0-9]+)?', Name.Variable),
- # (r'[\s]+', Text),
- (r'[ \t]+', Whitespace),
- ],
- 'comment': [
- (r'(^.{6}[*/].*\n|^.{6}|\*>.*\n)', Comment),
- ],
- 'core': [
- # Figurative constants
- (r'(^|(?<=[^\w\-]))(ALL\s+)?'
- r'((ZEROES)|(HIGH-VALUE|LOW-VALUE|QUOTE|SPACE|ZERO)(S)?)'
- r'\s*($|(?=[^\w\-]))',
- Name.Constant),
-
- # Reserved words STATEMENTS and other bolds
- (words((
- 'ACCEPT', 'ADD', 'ALLOCATE', 'CALL', 'CANCEL', 'CLOSE', 'COMPUTE',
- 'CONFIGURATION', 'CONTINUE', 'DATA', 'DELETE', 'DISPLAY', 'DIVIDE',
- 'DIVISION', 'ELSE', 'END', 'END-ACCEPT',
- 'END-ADD', 'END-CALL', 'END-COMPUTE', 'END-DELETE', 'END-DISPLAY',
- 'END-DIVIDE', 'END-EVALUATE', 'END-IF', 'END-MULTIPLY', 'END-OF-PAGE',
- 'END-PERFORM', 'END-READ', 'END-RETURN', 'END-REWRITE', 'END-SEARCH',
- 'END-START', 'END-STRING', 'END-SUBTRACT', 'END-UNSTRING', 'END-WRITE',
- 'ENVIRONMENT', 'EVALUATE', 'EXIT', 'FD', 'FILE', 'FILE-CONTROL', 'FOREVER',
- 'FREE', 'GENERATE', 'GO', 'GOBACK', 'IDENTIFICATION', 'IF', 'INITIALIZE',
- 'INITIATE', 'INPUT-OUTPUT', 'INSPECT', 'INVOKE', 'I-O-CONTROL', 'LINKAGE',
- 'LOCAL-STORAGE', 'MERGE', 'MOVE', 'MULTIPLY', 'OPEN', 'PERFORM',
- 'PROCEDURE', 'PROGRAM-ID', 'RAISE', 'READ', 'RELEASE', 'RESUME',
- 'RETURN', 'REWRITE', 'SCREEN', 'SD', 'SEARCH', 'SECTION', 'SET',
- 'SORT', 'START', 'STOP', 'STRING', 'SUBTRACT', 'SUPPRESS',
- 'TERMINATE', 'THEN', 'UNLOCK', 'UNSTRING', 'USE', 'VALIDATE',
- 'WORKING-STORAGE', 'WRITE'), prefix=r'(^|(?<=[^\w\-]))',
- suffix=r'\s*($|(?=[^\w\-]))'),
- Keyword.Reserved),
-
- # Reserved words
- (words((
- 'ACCESS', 'ADDRESS', 'ADVANCING', 'AFTER', 'ALL',
- 'ALPHABET', 'ALPHABETIC', 'ALPHABETIC-LOWER', 'ALPHABETIC-UPPER',
- 'ALPHANUMERIC', 'ALPHANUMERIC-EDITED', 'ALSO', 'ALTER', 'ALTERNATE'
- 'ANY', 'ARE', 'AREA', 'AREAS', 'ARGUMENT-NUMBER', 'ARGUMENT-VALUE', 'AS',
- 'ASCENDING', 'ASSIGN', 'AT', 'AUTO', 'AUTO-SKIP', 'AUTOMATIC',
- 'AUTOTERMINATE', 'BACKGROUND-COLOR', 'BASED', 'BEEP', 'BEFORE', 'BELL',
- 'BLANK', 'BLINK', 'BLOCK', 'BOTTOM', 'BY', 'BYTE-LENGTH', 'CHAINING',
- 'CHARACTER', 'CHARACTERS', 'CLASS', 'CODE', 'CODE-SET', 'COL',
- 'COLLATING', 'COLS', 'COLUMN', 'COLUMNS', 'COMMA', 'COMMAND-LINE',
- 'COMMIT', 'COMMON', 'CONSTANT', 'CONTAINS', 'CONTENT', 'CONTROL',
- 'CONTROLS', 'CONVERTING', 'COPY', 'CORR', 'CORRESPONDING', 'COUNT', 'CRT',
- 'CURRENCY', 'CURSOR', 'CYCLE', 'DATE', 'DAY', 'DAY-OF-WEEK', 'DE',
- 'DEBUGGING', 'DECIMAL-POINT', 'DECLARATIVES', 'DEFAULT', 'DELIMITED',
- 'DELIMITER', 'DEPENDING', 'DESCENDING', 'DETAIL', 'DISK',
- 'DOWN', 'DUPLICATES', 'DYNAMIC', 'EBCDIC',
- 'ENTRY', 'ENVIRONMENT-NAME', 'ENVIRONMENT-VALUE', 'EOL', 'EOP',
- 'EOS', 'ERASE', 'ERROR', 'ESCAPE', 'EXCEPTION',
- 'EXCLUSIVE', 'EXTEND', 'EXTERNAL', 'FILE-ID', 'FILLER', 'FINAL',
- 'FIRST', 'FIXED', 'FLOAT-LONG', 'FLOAT-SHORT',
- 'FOOTING', 'FOR', 'FOREGROUND-COLOR', 'FORMAT', 'FROM', 'FULL',
- 'FUNCTION', 'FUNCTION-ID', 'GIVING', 'GLOBAL', 'GROUP',
- 'HEADING', 'HIGHLIGHT', 'I-O', 'ID',
- 'IGNORE', 'IGNORING', 'IN', 'INDEX', 'INDEXED', 'INDICATE',
- 'INITIAL', 'INITIALIZED', 'INPUT', 'INTO', 'INTRINSIC', 'INVALID',
- 'IS', 'JUST', 'JUSTIFIED', 'KEY', 'LABEL',
- 'LAST', 'LEADING', 'LEFT', 'LENGTH', 'LIMIT', 'LIMITS', 'LINAGE',
- 'LINAGE-COUNTER', 'LINE', 'LINES', 'LOCALE', 'LOCK',
- 'LOWLIGHT', 'MANUAL', 'MEMORY', 'MINUS', 'MODE', 'MULTIPLE',
- 'NATIONAL', 'NATIONAL-EDITED', 'NATIVE', 'NEGATIVE', 'NEXT', 'NO',
- 'NULL', 'NULLS', 'NUMBER', 'NUMBERS', 'NUMERIC', 'NUMERIC-EDITED',
- 'OBJECT-COMPUTER', 'OCCURS', 'OF', 'OFF', 'OMITTED', 'ON', 'ONLY',
- 'OPTIONAL', 'ORDER', 'ORGANIZATION', 'OTHER', 'OUTPUT', 'OVERFLOW',
- 'OVERLINE', 'PACKED-DECIMAL', 'PADDING', 'PAGE', 'PARAGRAPH',
- 'PLUS', 'POINTER', 'POSITION', 'POSITIVE', 'PRESENT', 'PREVIOUS',
- 'PRINTER', 'PRINTING', 'PROCEDURE-POINTER', 'PROCEDURES',
- 'PROCEED', 'PROGRAM', 'PROGRAM-POINTER', 'PROMPT', 'QUOTE',
- 'QUOTES', 'RANDOM', 'RD', 'RECORD', 'RECORDING', 'RECORDS', 'RECURSIVE',
- 'REDEFINES', 'REEL', 'REFERENCE', 'RELATIVE', 'REMAINDER', 'REMOVAL',
- 'RENAMES', 'REPLACING', 'REPORT', 'REPORTING', 'REPORTS', 'REPOSITORY',
- 'REQUIRED', 'RESERVE', 'RETURNING', 'REVERSE-VIDEO', 'REWIND',
- 'RIGHT', 'ROLLBACK', 'ROUNDED', 'RUN', 'SAME', 'SCROLL',
- 'SECURE', 'SEGMENT-LIMIT', 'SELECT', 'SENTENCE', 'SEPARATE',
- 'SEQUENCE', 'SEQUENTIAL', 'SHARING', 'SIGN', 'SIGNED', 'SIGNED-INT',
- 'SIGNED-LONG', 'SIGNED-SHORT', 'SIZE', 'SORT-MERGE', 'SOURCE',
- 'SOURCE-COMPUTER', 'SPECIAL-NAMES', 'STANDARD',
- 'STANDARD-1', 'STANDARD-2', 'STATUS', 'SUBKEY', 'SUM',
- 'SYMBOLIC', 'SYNC', 'SYNCHRONIZED', 'TALLYING', 'TAPE',
- 'TEST', 'THROUGH', 'THRU', 'TIME', 'TIMES', 'TO', 'TOP', 'TRAILING',
- 'TRANSFORM', 'TYPE', 'UNDERLINE', 'UNIT', 'UNSIGNED',
- 'UNSIGNED-INT', 'UNSIGNED-LONG', 'UNSIGNED-SHORT', 'UNTIL', 'UP',
- 'UPDATE', 'UPON', 'USAGE', 'USING', 'VALUE', 'VALUES', 'VARYING',
- 'WAIT', 'WHEN', 'WITH', 'WORDS', 'YYYYDDD', 'YYYYMMDD'),
- prefix=r'(^|(?<=[^\w\-]))', suffix=r'\s*($|(?=[^\w\-]))'),
- Keyword.Pseudo),
-
- # inactive reserved words
- (words((
- 'ACTIVE-CLASS', 'ALIGNED', 'ANYCASE', 'ARITHMETIC', 'ATTRIBUTE',
- 'B-AND', 'B-NOT', 'B-OR', 'B-XOR', 'BIT', 'BOOLEAN', 'CD', 'CENTER',
- 'CF', 'CH', 'CHAIN', 'CLASS-ID', 'CLASSIFICATION', 'COMMUNICATION',
- 'CONDITION', 'DATA-POINTER', 'DESTINATION', 'DISABLE', 'EC', 'EGI',
- 'EMI', 'ENABLE', 'END-RECEIVE', 'ENTRY-CONVENTION', 'EO', 'ESI',
- 'EXCEPTION-OBJECT', 'EXPANDS', 'FACTORY', 'FLOAT-BINARY-16',
- 'FLOAT-BINARY-34', 'FLOAT-BINARY-7', 'FLOAT-DECIMAL-16',
- 'FLOAT-DECIMAL-34', 'FLOAT-EXTENDED', 'FORMAT', 'FUNCTION-POINTER',
- 'GET', 'GROUP-USAGE', 'IMPLEMENTS', 'INFINITY', 'INHERITS',
- 'INTERFACE', 'INTERFACE-ID', 'INVOKE', 'LC_ALL', 'LC_COLLATE',
- 'LC_CTYPE', 'LC_MESSAGES', 'LC_MONETARY', 'LC_NUMERIC', 'LC_TIME',
- 'LINE-COUNTER', 'MESSAGE', 'METHOD', 'METHOD-ID', 'NESTED', 'NONE',
- 'NORMAL', 'OBJECT', 'OBJECT-REFERENCE', 'OPTIONS', 'OVERRIDE',
- 'PAGE-COUNTER', 'PF', 'PH', 'PROPERTY', 'PROTOTYPE', 'PURGE',
- 'QUEUE', 'RAISE', 'RAISING', 'RECEIVE', 'RELATION', 'REPLACE',
- 'REPRESENTS-NOT-A-NUMBER', 'RESET', 'RESUME', 'RETRY', 'RF', 'RH',
- 'SECONDS', 'SEGMENT', 'SELF', 'SEND', 'SOURCES', 'STATEMENT',
- 'STEP', 'STRONG', 'SUB-QUEUE-1', 'SUB-QUEUE-2', 'SUB-QUEUE-3',
- 'SUPER', 'SYMBOL', 'SYSTEM-DEFAULT', 'TABLE', 'TERMINAL', 'TEXT',
- 'TYPEDEF', 'UCS-4', 'UNIVERSAL', 'USER-DEFAULT', 'UTF-16', 'UTF-8',
- 'VAL-STATUS', 'VALID', 'VALIDATE', 'VALIDATE-STATUS'),
- prefix=r'(^|(?<=[^\w\-]))', suffix=r'\s*($|(?=[^\w\-]))'),
- Error),
-
- # Data Types
- (r'(^|(?<=[^\w\-]))'
- r'(PIC\s+.+?(?=(\s|\.\s))|PICTURE\s+.+?(?=(\s|\.\s))|'
- r'(COMPUTATIONAL)(-[1-5X])?|(COMP)(-[1-5X])?|'
- r'BINARY-C-LONG|'
- r'BINARY-CHAR|BINARY-DOUBLE|BINARY-LONG|BINARY-SHORT|'
- r'BINARY)\s*($|(?=[^\w\-]))', Keyword.Type),
-
- # Operators
- (r'(\*\*|\*|\+|-|/|<=|>=|<|>|==|/=|=)', Operator),
-
- # (r'(::)', Keyword.Declaration),
-
- (r'([(),;:&%.])', Punctuation),
-
- # Intrinsics
- (r'(^|(?<=[^\w\-]))(ABS|ACOS|ANNUITY|ASIN|ATAN|BYTE-LENGTH|'
- r'CHAR|COMBINED-DATETIME|CONCATENATE|COS|CURRENT-DATE|'
- r'DATE-OF-INTEGER|DATE-TO-YYYYMMDD|DAY-OF-INTEGER|DAY-TO-YYYYDDD|'
- r'EXCEPTION-(?:FILE|LOCATION|STATEMENT|STATUS)|EXP10|EXP|E|'
- r'FACTORIAL|FRACTION-PART|INTEGER-OF-(?:DATE|DAY|PART)|INTEGER|'
- r'LENGTH|LOCALE-(?:DATE|TIME(?:-FROM-SECONDS)?)|LOG(?:10)?|'
- r'LOWER-CASE|MAX|MEAN|MEDIAN|MIDRANGE|MIN|MOD|NUMVAL(?:-C)?|'
- r'ORD(?:-MAX|-MIN)?|PI|PRESENT-VALUE|RANDOM|RANGE|REM|REVERSE|'
- r'SECONDS-FROM-FORMATTED-TIME|SECONDS-PAST-MIDNIGHT|SIGN|SIN|SQRT|'
- r'STANDARD-DEVIATION|STORED-CHAR-LENGTH|SUBSTITUTE(?:-CASE)?|'
- r'SUM|TAN|TEST-DATE-YYYYMMDD|TEST-DAY-YYYYDDD|TRIM|'
- r'UPPER-CASE|VARIANCE|WHEN-COMPILED|YEAR-TO-YYYY)\s*'
- r'($|(?=[^\w\-]))', Name.Function),
-
- # Booleans
- (r'(^|(?<=[^\w\-]))(true|false)\s*($|(?=[^\w\-]))', Name.Builtin),
- # Comparing Operators
- (r'(^|(?<=[^\w\-]))(equal|equals|ne|lt|le|gt|ge|'
- r'greater|less|than|not|and|or)\s*($|(?=[^\w\-]))', Operator.Word),
- ],
-
- # \"[^\"\n]*\"|\'[^\'\n]*\'
- 'strings': [
- # apparently strings can be delimited by EOL if they are continued
- # in the next line
- (r'"[^"\n]*("|\n)', String.Double),
- (r"'[^'\n]*('|\n)", String.Single),
- ],
-
- 'nums': [
- (r'\d+(\s*|\.$|$)', Number.Integer),
- (r'[+-]?\d*\.\d+(E[-+]?\d+)?', Number.Float),
- (r'[+-]?\d+\.\d*(E[-+]?\d+)?', Number.Float),
- ],
- }
-
-
-class CobolFreeformatLexer(CobolLexer):
- """
- Lexer for Free format OpenCOBOL code.
-
- .. versionadded:: 1.6
- """
- name = 'COBOLFree'
- aliases = ['cobolfree']
- filenames = ['*.cbl', '*.CBL']
- mimetypes = []
- flags = re.IGNORECASE | re.MULTILINE
-
- tokens = {
- 'comment': [
- (r'(\*>.*\n|^\w*\*.*$)', Comment),
- ],
- }
-
-
-class ABAPLexer(RegexLexer):
- """
- Lexer for ABAP, SAP's integrated language.
-
- .. versionadded:: 1.1
- """
- name = 'ABAP'
- aliases = ['abap']
- filenames = ['*.abap', '*.ABAP']
- mimetypes = ['text/x-abap']
-
- flags = re.IGNORECASE | re.MULTILINE
-
- tokens = {
- 'common': [
- (r'\s+', Whitespace),
- (r'^\*.*$', Comment.Single),
- (r'\".*?\n', Comment.Single),
- (r'##\w+', Comment.Special),
- ],
- 'variable-names': [
- (r'<\S+>', Name.Variable),
- (r'\w[\w~]*(?:(\[\])|->\*)?', Name.Variable),
- ],
- 'root': [
- include('common'),
- # function calls
- (r'CALL\s+(?:BADI|CUSTOMER-FUNCTION|FUNCTION)',
- Keyword),
- (r'(CALL\s+(?:DIALOG|SCREEN|SUBSCREEN|SELECTION-SCREEN|'
- r'TRANSACTION|TRANSFORMATION))\b',
- Keyword),
- (r'(FORM|PERFORM)(\s+)(\w+)',
- bygroups(Keyword, Whitespace, Name.Function)),
- (r'(PERFORM)(\s+)(\()(\w+)(\))',
- bygroups(Keyword, Whitespace, Punctuation, Name.Variable, Punctuation)),
- (r'(MODULE)(\s+)(\S+)(\s+)(INPUT|OUTPUT)',
- bygroups(Keyword, Whitespace, Name.Function, Whitespace, Keyword)),
-
- # method implementation
- (r'(METHOD)(\s+)([\w~]+)',
- bygroups(Keyword, Whitespace, Name.Function)),
- # method calls
- (r'(\s+)([\w\-]+)([=\-]>)([\w\-~]+)',
- bygroups(Whitespace, Name.Variable, Operator, Name.Function)),
- # call methodnames returning style
- (r'(?<=(=|-)>)([\w\-~]+)(?=\()', Name.Function),
-
- # text elements
- (r'(TEXT)(-)(\d{3})',
- bygroups(Keyword, Punctuation, Number.Integer)),
- (r'(TEXT)(-)(\w{3})',
- bygroups(Keyword, Punctuation, Name.Variable)),
-
- # keywords with dashes in them.
- # these need to be first, because for instance the -ID part
- # of MESSAGE-ID wouldn't get highlighted if MESSAGE was
- # first in the list of keywords.
- (r'(ADD-CORRESPONDING|AUTHORITY-CHECK|'
- r'CLASS-DATA|CLASS-EVENTS|CLASS-METHODS|CLASS-POOL|'
- r'DELETE-ADJACENT|DIVIDE-CORRESPONDING|'
- r'EDITOR-CALL|ENHANCEMENT-POINT|ENHANCEMENT-SECTION|EXIT-COMMAND|'
- r'FIELD-GROUPS|FIELD-SYMBOLS|FIELD-SYMBOL|FUNCTION-POOL|'
- r'INTERFACE-POOL|INVERTED-DATE|'
- r'LOAD-OF-PROGRAM|LOG-POINT|'
- r'MESSAGE-ID|MOVE-CORRESPONDING|MULTIPLY-CORRESPONDING|'
- r'NEW-LINE|NEW-PAGE|NEW-SECTION|NO-EXTENSION|'
- r'OUTPUT-LENGTH|PRINT-CONTROL|'
- r'SELECT-OPTIONS|START-OF-SELECTION|SUBTRACT-CORRESPONDING|'
- r'SYNTAX-CHECK|SYSTEM-EXCEPTIONS|'
- r'TYPE-POOL|TYPE-POOLS|NO-DISPLAY'
- r')\b', Keyword),
-
- # keyword kombinations
- (r'(?<![-\>])(CREATE\s+(PUBLIC|PRIVATE|DATA|OBJECT)|'
- r'(PUBLIC|PRIVATE|PROTECTED)\s+SECTION|'
- r'(TYPE|LIKE)\s+((LINE\s+OF|REF\s+TO|'
- r'(SORTED|STANDARD|HASHED)\s+TABLE\s+OF))?|'
- r'FROM\s+(DATABASE|MEMORY)|CALL\s+METHOD|'
- r'(GROUP|ORDER) BY|HAVING|SEPARATED BY|'
- r'GET\s+(BADI|BIT|CURSOR|DATASET|LOCALE|PARAMETER|'
- r'PF-STATUS|(PROPERTY|REFERENCE)\s+OF|'
- r'RUN\s+TIME|TIME\s+(STAMP)?)?|'
- r'SET\s+(BIT|BLANK\s+LINES|COUNTRY|CURSOR|DATASET|EXTENDED\s+CHECK|'
- r'HANDLER|HOLD\s+DATA|LANGUAGE|LEFT\s+SCROLL-BOUNDARY|'
- r'LOCALE|MARGIN|PARAMETER|PF-STATUS|PROPERTY\s+OF|'
- r'RUN\s+TIME\s+(ANALYZER|CLOCK\s+RESOLUTION)|SCREEN|'
- r'TITLEBAR|UPADTE\s+TASK\s+LOCAL|USER-COMMAND)|'
- r'CONVERT\s+((INVERTED-)?DATE|TIME|TIME\s+STAMP|TEXT)|'
- r'(CLOSE|OPEN)\s+(DATASET|CURSOR)|'
- r'(TO|FROM)\s+(DATA BUFFER|INTERNAL TABLE|MEMORY ID|'
- r'DATABASE|SHARED\s+(MEMORY|BUFFER))|'
- r'DESCRIBE\s+(DISTANCE\s+BETWEEN|FIELD|LIST|TABLE)|'
- r'FREE\s(MEMORY|OBJECT)?|'
- r'PROCESS\s+(BEFORE\s+OUTPUT|AFTER\s+INPUT|'
- r'ON\s+(VALUE-REQUEST|HELP-REQUEST))|'
- r'AT\s+(LINE-SELECTION|USER-COMMAND|END\s+OF|NEW)|'
- r'AT\s+SELECTION-SCREEN(\s+(ON(\s+(BLOCK|(HELP|VALUE)-REQUEST\s+FOR|'
- r'END\s+OF|RADIOBUTTON\s+GROUP))?|OUTPUT))?|'
- r'SELECTION-SCREEN:?\s+((BEGIN|END)\s+OF\s+((TABBED\s+)?BLOCK|LINE|'
- r'SCREEN)|COMMENT|FUNCTION\s+KEY|'
- r'INCLUDE\s+BLOCKS|POSITION|PUSHBUTTON|'
- r'SKIP|ULINE)|'
- r'LEAVE\s+(LIST-PROCESSING|PROGRAM|SCREEN|'
- r'TO LIST-PROCESSING|TO TRANSACTION)'
- r'(ENDING|STARTING)\s+AT|'
- r'FORMAT\s+(COLOR|INTENSIFIED|INVERSE|HOTSPOT|INPUT|FRAMES|RESET)|'
- r'AS\s+(CHECKBOX|SUBSCREEN|WINDOW)|'
- r'WITH\s+(((NON-)?UNIQUE)?\s+KEY|FRAME)|'
- r'(BEGIN|END)\s+OF|'
- r'DELETE(\s+ADJACENT\s+DUPLICATES\sFROM)?|'
- r'COMPARING(\s+ALL\s+FIELDS)?|'
- r'(INSERT|APPEND)(\s+INITIAL\s+LINE\s+(IN)?TO|\s+LINES\s+OF)?|'
- r'IN\s+((BYTE|CHARACTER)\s+MODE|PROGRAM)|'
- r'END-OF-(DEFINITION|PAGE|SELECTION)|'
- r'WITH\s+FRAME(\s+TITLE)|'
- r'(REPLACE|FIND)\s+((FIRST|ALL)\s+OCCURRENCES?\s+OF\s+)?(SUBSTRING|REGEX)?|'
- r'MATCH\s+(LENGTH|COUNT|LINE|OFFSET)|'
- r'(RESPECTING|IGNORING)\s+CASE|'
- r'IN\s+UPDATE\s+TASK|'
- r'(SOURCE|RESULT)\s+(XML)?|'
- r'REFERENCE\s+INTO|'
-
- # simple kombinations
- r'AND\s+(MARK|RETURN)|CLIENT\s+SPECIFIED|CORRESPONDING\s+FIELDS\s+OF|'
- r'IF\s+FOUND|FOR\s+EVENT|INHERITING\s+FROM|LEAVE\s+TO\s+SCREEN|'
- r'LOOP\s+AT\s+(SCREEN)?|LOWER\s+CASE|MATCHCODE\s+OBJECT|MODIF\s+ID|'
- r'MODIFY\s+SCREEN|NESTING\s+LEVEL|NO\s+INTERVALS|OF\s+STRUCTURE|'
- r'RADIOBUTTON\s+GROUP|RANGE\s+OF|REF\s+TO|SUPPRESS DIALOG|'
- r'TABLE\s+OF|UPPER\s+CASE|TRANSPORTING\s+NO\s+FIELDS|'
- r'VALUE\s+CHECK|VISIBLE\s+LENGTH|HEADER\s+LINE|COMMON\s+PART)\b', Keyword),
-
- # single word keywords.
- (r'(^|(?<=(\s|\.)))(ABBREVIATED|ABSTRACT|ADD|ALIASES|ALIGN|ALPHA|'
- r'ASSERT|AS|ASSIGN(ING)?|AT(\s+FIRST)?|'
- r'BACK|BLOCK|BREAK-POINT|'
- r'CASE|CAST|CATCH|CHANGING|CHECK|CLASS|CLEAR|COLLECT|COLOR|COMMIT|COND|CONV|'
- r'CREATE|COMMUNICATION|COMPONENTS?|COMPUTE|CONCATENATE|CONDENSE|'
- r'CONSTANTS|CONTEXTS|CONTINUE|CONTROLS|COUNTRY|CURRENCY|'
- r'DATA|DATE|DECIMALS|DEFAULT|DEFINE|DEFINITION|DEFERRED|DEMAND|'
- r'DETAIL|DIRECTORY|DIVIDE|DO|DUMMY|'
- r'ELSE(IF)?|ENDAT|ENDCASE|ENDCATCH|ENDCLASS|ENDDO|ENDFORM|ENDFUNCTION|'
- r'ENDIF|ENDINTERFACE|ENDLOOP|ENDMETHOD|ENDMODULE|ENDSELECT|ENDTRY|ENDWHILE|'
- r'ENHANCEMENT|EVENTS|EXACT|EXCEPTIONS?|EXIT|EXPONENT|EXPORT|EXPORTING|EXTRACT|'
- r'FETCH|FIELDS?|FOR|FORM|FORMAT|FREE|FROM|FUNCTION|'
- r'HIDE|'
- r'ID|IF|IMPORT|IMPLEMENTATION|IMPORTING|IN|INCLUDE|INCLUDING|'
- r'INDEX|INFOTYPES|INITIALIZATION|INTERFACE|INTERFACES|INTO|'
- r'LANGUAGE|LEAVE|LENGTH|LINES|LOAD|LOCAL|'
- r'JOIN|'
- r'KEY|'
- r'NEW|NEXT|'
- r'MAXIMUM|MESSAGE|METHOD[S]?|MINIMUM|MODULE|MODIFIER|MODIFY|MOVE|MULTIPLY|'
- r'NODES|NUMBER|'
- r'OBLIGATORY|OBJECT|OF|OFF|ON|OTHERS|OVERLAY|'
- r'PACK|PAD|PARAMETERS|PERCENTAGE|POSITION|PROGRAM|PROVIDE|PUBLIC|PUT|PF\d\d|'
- r'RAISE|RAISING|RANGES?|READ|RECEIVE|REDEFINITION|REFRESH|REJECT|REPORT|RESERVE|'
- r'REF|RESUME|RETRY|RETURN|RETURNING|RIGHT|ROLLBACK|REPLACE|'
- r'SCROLL|SEARCH|SELECT|SHIFT|SIGN|SINGLE|SIZE|SKIP|SORT|SPLIT|STATICS|STOP|'
- r'STYLE|SUBMATCHES|SUBMIT|SUBTRACT|SUM(?!\()|SUMMARY|SUMMING|SUPPLY|SWITCH|'
- r'TABLE|TABLES|TIMESTAMP|TIMES?|TIMEZONE|TITLE|\??TO|'
- r'TOP-OF-PAGE|TRANSFER|TRANSLATE|TRY|TYPES|'
- r'ULINE|UNDER|UNPACK|UPDATE|USING|'
- r'VALUE|VALUES|VIA|VARYING|VARY|'
- r'WAIT|WHEN|WHERE|WIDTH|WHILE|WITH|WINDOW|WRITE|XSD|ZERO)\b', Keyword),
-
- # builtins
- (r'(abs|acos|asin|atan|'
- r'boolc|boolx|bit_set|'
- r'char_off|charlen|ceil|cmax|cmin|condense|contains|'
- r'contains_any_of|contains_any_not_of|concat_lines_of|cos|cosh|'
- r'count|count_any_of|count_any_not_of|'
- r'dbmaxlen|distance|'
- r'escape|exp|'
- r'find|find_end|find_any_of|find_any_not_of|floor|frac|from_mixed|'
- r'insert|'
- r'lines|log|log10|'
- r'match|matches|'
- r'nmax|nmin|numofchar|'
- r'repeat|replace|rescale|reverse|round|'
- r'segment|shift_left|shift_right|sign|sin|sinh|sqrt|strlen|'
- r'substring|substring_after|substring_from|substring_before|substring_to|'
- r'tan|tanh|to_upper|to_lower|to_mixed|translate|trunc|'
- r'xstrlen)(\()\b', bygroups(Name.Builtin, Punctuation)),
-
- (r'&[0-9]', Name),
- (r'[0-9]+', Number.Integer),
-
- # operators which look like variable names before
- # parsing variable names.
- (r'(?<=(\s|.))(AND|OR|EQ|NE|GT|LT|GE|LE|CO|CN|CA|NA|CS|NOT|NS|CP|NP|'
- r'BYTE-CO|BYTE-CN|BYTE-CA|BYTE-NA|BYTE-CS|BYTE-NS|'
- r'IS\s+(NOT\s+)?(INITIAL|ASSIGNED|REQUESTED|BOUND))\b', Operator.Word),
-
- include('variable-names'),
-
- # standard operators after variable names,
- # because < and > are part of field symbols.
- (r'[?*<>=\-+&]', Operator),
- (r"'(''|[^'])*'", String.Single),
- (r"`([^`])*`", String.Single),
- (r"([|}])([^{}|]*?)([|{])",
- bygroups(Punctuation, String.Single, Punctuation)),
- (r'[/;:()\[\],.]', Punctuation),
- (r'(!)(\w+)', bygroups(Operator, Name)),
- ],
- }
-
-
-class OpenEdgeLexer(RegexLexer):
- """
- Lexer for `OpenEdge ABL (formerly Progress)
- <http://web.progress.com/en/openedge/abl.html>`_ source code.
-
- .. versionadded:: 1.5
- """
- name = 'OpenEdge ABL'
- aliases = ['openedge', 'abl', 'progress']
- filenames = ['*.p', '*.cls']
- mimetypes = ['text/x-openedge', 'application/x-openedge']
-
- types = (r'(?i)(^|(?<=[^\w\-]))(CHARACTER|CHAR|CHARA|CHARAC|CHARACT|CHARACTE|'
- r'COM-HANDLE|DATE|DATETIME|DATETIME-TZ|'
- r'DECIMAL|DEC|DECI|DECIM|DECIMA|HANDLE|'
- r'INT64|INTEGER|INT|INTE|INTEG|INTEGE|'
- r'LOGICAL|LONGCHAR|MEMPTR|RAW|RECID|ROWID)\s*($|(?=[^\w\-]))')
-
- keywords = words(OPENEDGEKEYWORDS,
- prefix=r'(?i)(^|(?<=[^\w\-]))',
- suffix=r'\s*($|(?=[^\w\-]))')
-
- tokens = {
- 'root': [
- (r'/\*', Comment.Multiline, 'comment'),
- (r'\{', Comment.Preproc, 'preprocessor'),
- (r'\s*&.*', Comment.Preproc),
- (r'0[xX][0-9a-fA-F]+[LlUu]*', Number.Hex),
- (r'(?i)(DEFINE|DEF|DEFI|DEFIN)\b', Keyword.Declaration),
- (types, Keyword.Type),
- (keywords, Name.Builtin),
- (r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
- (r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
- (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
- (r'[0-9]+', Number.Integer),
- (r'\s+', Whitespace),
- (r'[+*/=-]', Operator),
- (r'[.:()]', Punctuation),
- (r'.', Name.Variable), # Lazy catch-all
- ],
- 'comment': [
- (r'[^*/]', Comment.Multiline),
- (r'/\*', Comment.Multiline, '#push'),
- (r'\*/', Comment.Multiline, '#pop'),
- (r'[*/]', Comment.Multiline)
- ],
- 'preprocessor': [
- (r'[^{}]', Comment.Preproc),
- (r'\{', Comment.Preproc, '#push'),
- (r'\}', Comment.Preproc, '#pop'),
- ],
- }
-
- def analyse_text(text):
- """Try to identify OpenEdge ABL based on a few common constructs."""
- result = 0
-
- if 'END.' in text:
- result += 0.05
-
- if 'END PROCEDURE.' in text:
- result += 0.05
-
- if 'ELSE DO:' in text:
- result += 0.05
-
- return result
-
-
-class GoodDataCLLexer(RegexLexer):
- """
- Lexer for `GoodData-CL
- <https://github.com/gooddata/GoodData-CL/raw/master/cli/src/main/resources/\
-com/gooddata/processor/COMMANDS.txt>`_
- script files.
-
- .. versionadded:: 1.4
- """
-
- name = 'GoodData-CL'
- aliases = ['gooddata-cl']
- filenames = ['*.gdc']
- mimetypes = ['text/x-gooddata-cl']
-
- flags = re.IGNORECASE
- tokens = {
- 'root': [
- # Comments
- (r'#.*', Comment.Single),
- # Function call
- (r'[a-z]\w*', Name.Function),
- # Argument list
- (r'\(', Punctuation, 'args-list'),
- # Punctuation
- (r';', Punctuation),
- # Space is not significant
- (r'\s+', Text)
- ],
- 'args-list': [
- (r'\)', Punctuation, '#pop'),
- (r',', Punctuation),
- (r'[a-z]\w*', Name.Variable),
- (r'=', Operator),
- (r'"', String, 'string-literal'),
- (r'[0-9]+(?:\.[0-9]+)?(?:e[+-]?[0-9]{1,3})?', Number),
- # Space is not significant
- (r'\s', Whitespace)
- ],
- 'string-literal': [
- (r'\\[tnrfbae"\\]', String.Escape),
- (r'"', String, '#pop'),
- (r'[^\\"]+', String)
- ]
- }
-
-
-class MaqlLexer(RegexLexer):
- """
- Lexer for `GoodData MAQL
- <https://secure.gooddata.com/docs/html/advanced.metric.tutorial.html>`_
- scripts.
-
- .. versionadded:: 1.4
- """
-
- name = 'MAQL'
- aliases = ['maql']
- filenames = ['*.maql']
- mimetypes = ['text/x-gooddata-maql', 'application/x-gooddata-maql']
-
- flags = re.IGNORECASE
- tokens = {
- 'root': [
- # IDENTITY
- (r'IDENTIFIER\b', Name.Builtin),
- # IDENTIFIER
- (r'\{[^}]+\}', Name.Variable),
- # NUMBER
- (r'[0-9]+(?:\.[0-9]+)?(?:e[+-]?[0-9]{1,3})?', Number),
- # STRING
- (r'"', String, 'string-literal'),
- # RELATION
- (r'\<\>|\!\=', Operator),
- (r'\=|\>\=|\>|\<\=|\<', Operator),
- # :=
- (r'\:\=', Operator),
- # OBJECT
- (r'\[[^]]+\]', Name.Variable.Class),
- # keywords
- (words((
- 'DIMENSION', 'DIMENSIONS', 'BOTTOM', 'METRIC', 'COUNT', 'OTHER',
- 'FACT', 'WITH', 'TOP', 'OR', 'ATTRIBUTE', 'CREATE', 'PARENT',
- 'FALSE', 'ROW', 'ROWS', 'FROM', 'ALL', 'AS', 'PF', 'COLUMN',
- 'COLUMNS', 'DEFINE', 'REPORT', 'LIMIT', 'TABLE', 'LIKE', 'AND',
- 'BY', 'BETWEEN', 'EXCEPT', 'SELECT', 'MATCH', 'WHERE', 'TRUE',
- 'FOR', 'IN', 'WITHOUT', 'FILTER', 'ALIAS', 'WHEN', 'NOT', 'ON',
- 'KEYS', 'KEY', 'FULLSET', 'PRIMARY', 'LABELS', 'LABEL',
- 'VISUAL', 'TITLE', 'DESCRIPTION', 'FOLDER', 'ALTER', 'DROP',
- 'ADD', 'DATASET', 'DATATYPE', 'INT', 'BIGINT', 'DOUBLE', 'DATE',
- 'VARCHAR', 'DECIMAL', 'SYNCHRONIZE', 'TYPE', 'DEFAULT', 'ORDER',
- 'ASC', 'DESC', 'HYPERLINK', 'INCLUDE', 'TEMPLATE', 'MODIFY'),
- suffix=r'\b'),
- Keyword),
- # FUNCNAME
- (r'[a-z]\w*\b', Name.Function),
- # Comments
- (r'#.*', Comment.Single),
- # Punctuation
- (r'[,;()]', Punctuation),
- # Space is not significant
- (r'\s+', Whitespace)
- ],
- 'string-literal': [
- (r'\\[tnrfbae"\\]', String.Escape),
- (r'"', String, '#pop'),
- (r'[^\\"]+', String)
- ],
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/c_cpp.py b/venv/lib/python3.11/site-packages/pygments/lexers/c_cpp.py
deleted file mode 100644
index ba6702f..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/c_cpp.py
+++ /dev/null
@@ -1,411 +0,0 @@
-"""
- pygments.lexers.c_cpp
- ~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for C/C++ languages.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, include, bygroups, using, \
- this, inherit, default, words
-from pygments.util import get_bool_opt
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation, Whitespace
-
-__all__ = ['CLexer', 'CppLexer']
-
-
-class CFamilyLexer(RegexLexer):
- """
- For C family source code. This is used as a base class to avoid repetitious
- definitions.
- """
-
- # The trailing ?, rather than *, avoids a geometric performance drop here.
- #: only one /* */ style comment
- _ws1 = r'\s*(?:/[*].*?[*]/\s*)?'
-
- # Hexadecimal part in an hexadecimal integer/floating-point literal.
- # This includes decimal separators matching.
- _hexpart = r'[0-9a-fA-F](\'?[0-9a-fA-F])*'
- # Decimal part in an decimal integer/floating-point literal.
- # This includes decimal separators matching.
- _decpart = r'\d(\'?\d)*'
- # Integer literal suffix (e.g. 'ull' or 'll').
- _intsuffix = r'(([uU][lL]{0,2})|[lL]{1,2}[uU]?)?'
-
- # Identifier regex with C and C++ Universal Character Name (UCN) support.
- _ident = r'(?!\d)(?:[\w$]|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8})+'
- _namespaced_ident = r'(?!\d)(?:[\w$]|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8}|::)+'
-
- # Single and multiline comment regexes
- # Beware not to use *? for the inner content! When these regexes
- # are embedded in larger regexes, that can cause the stuff*? to
- # match more than it would have if the regex had been used in
- # a standalone way ...
- _comment_single = r'//(?:.|(?<=\\)\n)*\n'
- _comment_multiline = r'/(?:\\\n)?[*](?:[^*]|[*](?!(?:\\\n)?/))*[*](?:\\\n)?/'
-
- # Regex to match optional comments
- _possible_comments = rf'\s*(?:(?:(?:{_comment_single})|(?:{_comment_multiline}))\s*)*'
-
- tokens = {
- 'whitespace': [
- # preprocessor directives: without whitespace
- (r'^#if\s+0', Comment.Preproc, 'if0'),
- ('^#', Comment.Preproc, 'macro'),
- # or with whitespace
- ('^(' + _ws1 + r')(#if\s+0)',
- bygroups(using(this), Comment.Preproc), 'if0'),
- ('^(' + _ws1 + ')(#)',
- bygroups(using(this), Comment.Preproc), 'macro'),
- # Labels:
- # Line start and possible indentation.
- (r'(^[ \t]*)'
- # Not followed by keywords which can be mistaken as labels.
- r'(?!(?:public|private|protected|default)\b)'
- # Actual label, followed by a single colon.
- r'(' + _ident + r')(\s*)(:)(?!:)',
- bygroups(Whitespace, Name.Label, Whitespace, Punctuation)),
- (r'\n', Whitespace),
- (r'[^\S\n]+', Whitespace),
- (r'\\\n', Text), # line continuation
- (_comment_single, Comment.Single),
- (_comment_multiline, Comment.Multiline),
- # Open until EOF, so no ending delimiter
- (r'/(\\\n)?[*][\w\W]*', Comment.Multiline),
- ],
- 'statements': [
- include('keywords'),
- include('types'),
- (r'([LuU]|u8)?(")', bygroups(String.Affix, String), 'string'),
- (r"([LuU]|u8)?(')(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])(')",
- bygroups(String.Affix, String.Char, String.Char, String.Char)),
-
- # Hexadecimal floating-point literals (C11, C++17)
- (r'0[xX](' + _hexpart + r'\.' + _hexpart + r'|\.' + _hexpart +
- r'|' + _hexpart + r')[pP][+-]?' + _hexpart + r'[lL]?', Number.Float),
-
- (r'(-)?(' + _decpart + r'\.' + _decpart + r'|\.' + _decpart + r'|' +
- _decpart + r')[eE][+-]?' + _decpart + r'[fFlL]?', Number.Float),
- (r'(-)?((' + _decpart + r'\.(' + _decpart + r')?|\.' +
- _decpart + r')[fFlL]?)|(' + _decpart + r'[fFlL])', Number.Float),
- (r'(-)?0[xX]' + _hexpart + _intsuffix, Number.Hex),
- (r'(-)?0[bB][01](\'?[01])*' + _intsuffix, Number.Bin),
- (r'(-)?0(\'?[0-7])+' + _intsuffix, Number.Oct),
- (r'(-)?' + _decpart + _intsuffix, Number.Integer),
- (r'[~!%^&*+=|?:<>/-]', Operator),
- (r'[()\[\],.]', Punctuation),
- (r'(true|false|NULL)\b', Name.Builtin),
- (_ident, Name)
- ],
- 'types': [
- (words(('int8', 'int16', 'int32', 'int64', 'wchar_t'), prefix=r'__',
- suffix=r'\b'), Keyword.Reserved),
- (words(('bool', 'int', 'long', 'float', 'short', 'double', 'char',
- 'unsigned', 'signed', 'void', '_BitInt',
- '__int128'), suffix=r'\b'), Keyword.Type)
- ],
- 'keywords': [
- (r'(struct|union)(\s+)', bygroups(Keyword, Whitespace), 'classname'),
- (r'case\b', Keyword, 'case-value'),
- (words(('asm', 'auto', 'break', 'const', 'continue', 'default',
- 'do', 'else', 'enum', 'extern', 'for', 'goto', 'if',
- 'register', 'restricted', 'return', 'sizeof', 'struct',
- 'static', 'switch', 'typedef', 'volatile', 'while', 'union',
- 'thread_local', 'alignas', 'alignof', 'static_assert', '_Pragma'),
- suffix=r'\b'), Keyword),
- (words(('inline', '_inline', '__inline', 'naked', 'restrict',
- 'thread'), suffix=r'\b'), Keyword.Reserved),
- # Vector intrinsics
- (r'(__m(128i|128d|128|64))\b', Keyword.Reserved),
- # Microsoft-isms
- (words((
- 'asm', 'based', 'except', 'stdcall', 'cdecl',
- 'fastcall', 'declspec', 'finally', 'try',
- 'leave', 'w64', 'unaligned', 'raise', 'noop',
- 'identifier', 'forceinline', 'assume'),
- prefix=r'__', suffix=r'\b'), Keyword.Reserved)
- ],
- 'root': [
- include('whitespace'),
- include('keywords'),
- # functions
- (r'(' + _namespaced_ident + r'(?:[&*\s])+)' # return arguments
- r'(' + _possible_comments + r')'
- r'(' + _namespaced_ident + r')' # method name
- r'(' + _possible_comments + r')'
- r'(\([^;"\')]*?\))' # signature
- r'(' + _possible_comments + r')'
- r'([^;{/"\']*)(\{)',
- bygroups(using(this), using(this, state='whitespace'),
- Name.Function, using(this, state='whitespace'),
- using(this), using(this, state='whitespace'),
- using(this), Punctuation),
- 'function'),
- # function declarations
- (r'(' + _namespaced_ident + r'(?:[&*\s])+)' # return arguments
- r'(' + _possible_comments + r')'
- r'(' + _namespaced_ident + r')' # method name
- r'(' + _possible_comments + r')'
- r'(\([^;"\')]*?\))' # signature
- r'(' + _possible_comments + r')'
- r'([^;/"\']*)(;)',
- bygroups(using(this), using(this, state='whitespace'),
- Name.Function, using(this, state='whitespace'),
- using(this), using(this, state='whitespace'),
- using(this), Punctuation)),
- include('types'),
- default('statement'),
- ],
- 'statement': [
- include('whitespace'),
- include('statements'),
- (r'\}', Punctuation),
- (r'[{;]', Punctuation, '#pop'),
- ],
- 'function': [
- include('whitespace'),
- include('statements'),
- (';', Punctuation),
- (r'\{', Punctuation, '#push'),
- (r'\}', Punctuation, '#pop'),
- ],
- 'string': [
- (r'"', String, '#pop'),
- (r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|'
- r'u[a-fA-F0-9]{4}|U[a-fA-F0-9]{8}|[0-7]{1,3})', String.Escape),
- (r'[^\\"\n]+', String), # all other characters
- (r'\\\n', String), # line continuation
- (r'\\', String), # stray backslash
- ],
- 'macro': [
- (r'('+_ws1+r')(include)('+_ws1+r')("[^"]+")([^\n]*)',
- bygroups(using(this), Comment.Preproc, using(this),
- Comment.PreprocFile, Comment.Single)),
- (r'('+_ws1+r')(include)('+_ws1+r')(<[^>]+>)([^\n]*)',
- bygroups(using(this), Comment.Preproc, using(this),
- Comment.PreprocFile, Comment.Single)),
- (r'[^/\n]+', Comment.Preproc),
- (r'/[*](.|\n)*?[*]/', Comment.Multiline),
- (r'//.*?\n', Comment.Single, '#pop'),
- (r'/', Comment.Preproc),
- (r'(?<=\\)\n', Comment.Preproc),
- (r'\n', Comment.Preproc, '#pop'),
- ],
- 'if0': [
- (r'^\s*#if.*?(?<!\\)\n', Comment.Preproc, '#push'),
- (r'^\s*#el(?:se|if).*\n', Comment.Preproc, '#pop'),
- (r'^\s*#endif.*?(?<!\\)\n', Comment.Preproc, '#pop'),
- (r'.*?\n', Comment),
- ],
- 'classname': [
- (_ident, Name.Class, '#pop'),
- # template specification
- (r'\s*(?=>)', Text, '#pop'),
- default('#pop')
- ],
- # Mark identifiers preceded by `case` keyword as constants.
- 'case-value': [
- (r'(?<!:)(:)(?!:)', Punctuation, '#pop'),
- (_ident, Name.Constant),
- include('whitespace'),
- include('statements'),
- ]
- }
-
- stdlib_types = {
- 'size_t', 'ssize_t', 'off_t', 'wchar_t', 'ptrdiff_t', 'sig_atomic_t', 'fpos_t',
- 'clock_t', 'time_t', 'va_list', 'jmp_buf', 'FILE', 'DIR', 'div_t', 'ldiv_t',
- 'mbstate_t', 'wctrans_t', 'wint_t', 'wctype_t'}
- c99_types = {
- 'int8_t', 'int16_t', 'int32_t', 'int64_t', 'uint8_t',
- 'uint16_t', 'uint32_t', 'uint64_t', 'int_least8_t', 'int_least16_t',
- 'int_least32_t', 'int_least64_t', 'uint_least8_t', 'uint_least16_t',
- 'uint_least32_t', 'uint_least64_t', 'int_fast8_t', 'int_fast16_t', 'int_fast32_t',
- 'int_fast64_t', 'uint_fast8_t', 'uint_fast16_t', 'uint_fast32_t', 'uint_fast64_t',
- 'intptr_t', 'uintptr_t', 'intmax_t', 'uintmax_t'}
- linux_types = {
- 'clockid_t', 'cpu_set_t', 'cpumask_t', 'dev_t', 'gid_t', 'id_t', 'ino_t', 'key_t',
- 'mode_t', 'nfds_t', 'pid_t', 'rlim_t', 'sig_t', 'sighandler_t', 'siginfo_t',
- 'sigset_t', 'sigval_t', 'socklen_t', 'timer_t', 'uid_t'}
- c11_atomic_types = {
- 'atomic_bool', 'atomic_char', 'atomic_schar', 'atomic_uchar', 'atomic_short',
- 'atomic_ushort', 'atomic_int', 'atomic_uint', 'atomic_long', 'atomic_ulong',
- 'atomic_llong', 'atomic_ullong', 'atomic_char16_t', 'atomic_char32_t', 'atomic_wchar_t',
- 'atomic_int_least8_t', 'atomic_uint_least8_t', 'atomic_int_least16_t',
- 'atomic_uint_least16_t', 'atomic_int_least32_t', 'atomic_uint_least32_t',
- 'atomic_int_least64_t', 'atomic_uint_least64_t', 'atomic_int_fast8_t',
- 'atomic_uint_fast8_t', 'atomic_int_fast16_t', 'atomic_uint_fast16_t',
- 'atomic_int_fast32_t', 'atomic_uint_fast32_t', 'atomic_int_fast64_t',
- 'atomic_uint_fast64_t', 'atomic_intptr_t', 'atomic_uintptr_t', 'atomic_size_t',
- 'atomic_ptrdiff_t', 'atomic_intmax_t', 'atomic_uintmax_t'}
-
- def __init__(self, **options):
- self.stdlibhighlighting = get_bool_opt(options, 'stdlibhighlighting', True)
- self.c99highlighting = get_bool_opt(options, 'c99highlighting', True)
- self.c11highlighting = get_bool_opt(options, 'c11highlighting', True)
- self.platformhighlighting = get_bool_opt(options, 'platformhighlighting', True)
- RegexLexer.__init__(self, **options)
-
- def get_tokens_unprocessed(self, text, stack=('root',)):
- for index, token, value in \
- RegexLexer.get_tokens_unprocessed(self, text, stack):
- if token is Name:
- if self.stdlibhighlighting and value in self.stdlib_types:
- token = Keyword.Type
- elif self.c99highlighting and value in self.c99_types:
- token = Keyword.Type
- elif self.c11highlighting and value in self.c11_atomic_types:
- token = Keyword.Type
- elif self.platformhighlighting and value in self.linux_types:
- token = Keyword.Type
- yield index, token, value
-
-
-class CLexer(CFamilyLexer):
- """
- For C source code with preprocessor directives.
-
- Additional options accepted:
-
- `stdlibhighlighting`
- Highlight common types found in the C/C++ standard library (e.g. `size_t`).
- (default: ``True``).
-
- `c99highlighting`
- Highlight common types found in the C99 standard library (e.g. `int8_t`).
- Actually, this includes all fixed-width integer types.
- (default: ``True``).
-
- `c11highlighting`
- Highlight atomic types found in the C11 standard library (e.g. `atomic_bool`).
- (default: ``True``).
-
- `platformhighlighting`
- Highlight common types found in the platform SDK headers (e.g. `clockid_t` on Linux).
- (default: ``True``).
- """
- name = 'C'
- aliases = ['c']
- filenames = ['*.c', '*.h', '*.idc', '*.x[bp]m']
- mimetypes = ['text/x-chdr', 'text/x-csrc', 'image/x-xbitmap', 'image/x-xpixmap']
- priority = 0.1
-
- tokens = {
- 'keywords': [
- (words((
- '_Alignas', '_Alignof', '_Noreturn', '_Generic', '_Thread_local',
- '_Static_assert', '_Imaginary', 'noreturn', 'imaginary', 'complex'),
- suffix=r'\b'), Keyword),
- inherit
- ],
- 'types': [
- (words(('_Bool', '_Complex', '_Atomic'), suffix=r'\b'), Keyword.Type),
- inherit
- ]
- }
-
- def analyse_text(text):
- if re.search(r'^\s*#include [<"]', text, re.MULTILINE):
- return 0.1
- if re.search(r'^\s*#ifn?def ', text, re.MULTILINE):
- return 0.1
-
-
-class CppLexer(CFamilyLexer):
- """
- For C++ source code with preprocessor directives.
-
- Additional options accepted:
-
- `stdlibhighlighting`
- Highlight common types found in the C/C++ standard library (e.g. `size_t`).
- (default: ``True``).
-
- `c99highlighting`
- Highlight common types found in the C99 standard library (e.g. `int8_t`).
- Actually, this includes all fixed-width integer types.
- (default: ``True``).
-
- `c11highlighting`
- Highlight atomic types found in the C11 standard library (e.g. `atomic_bool`).
- (default: ``True``).
-
- `platformhighlighting`
- Highlight common types found in the platform SDK headers (e.g. `clockid_t` on Linux).
- (default: ``True``).
- """
- name = 'C++'
- url = 'https://isocpp.org/'
- aliases = ['cpp', 'c++']
- filenames = ['*.cpp', '*.hpp', '*.c++', '*.h++',
- '*.cc', '*.hh', '*.cxx', '*.hxx',
- '*.C', '*.H', '*.cp', '*.CPP', '*.tpp']
- mimetypes = ['text/x-c++hdr', 'text/x-c++src']
- priority = 0.1
-
- tokens = {
- 'statements': [
- # C++11 raw strings
- (r'((?:[LuU]|u8)?R)(")([^\\()\s]{,16})(\()((?:.|\n)*?)(\)\3)(")',
- bygroups(String.Affix, String, String.Delimiter, String.Delimiter,
- String, String.Delimiter, String)),
- inherit,
- ],
- 'root': [
- inherit,
- # C++ Microsoft-isms
- (words(('virtual_inheritance', 'uuidof', 'super', 'single_inheritance',
- 'multiple_inheritance', 'interface', 'event'),
- prefix=r'__', suffix=r'\b'), Keyword.Reserved),
- # Offload C++ extensions, http://offload.codeplay.com/
- (r'__(offload|blockingoffload|outer)\b', Keyword.Pseudo),
- ],
- 'enumname': [
- include('whitespace'),
- # 'enum class' and 'enum struct' C++11 support
- (words(('class', 'struct'), suffix=r'\b'), Keyword),
- (CFamilyLexer._ident, Name.Class, '#pop'),
- # template specification
- (r'\s*(?=>)', Text, '#pop'),
- default('#pop')
- ],
- 'keywords': [
- (r'(class|concept|typename)(\s+)', bygroups(Keyword, Whitespace), 'classname'),
- (words((
- 'catch', 'const_cast', 'delete', 'dynamic_cast', 'explicit',
- 'export', 'friend', 'mutable', 'new', 'operator',
- 'private', 'protected', 'public', 'reinterpret_cast', 'class',
- '__restrict', 'static_cast', 'template', 'this', 'throw', 'throws',
- 'try', 'typeid', 'using', 'virtual', 'constexpr', 'nullptr', 'concept',
- 'decltype', 'noexcept', 'override', 'final', 'constinit', 'consteval',
- 'co_await', 'co_return', 'co_yield', 'requires', 'import', 'module',
- 'typename', 'and', 'and_eq', 'bitand', 'bitor', 'compl', 'not',
- 'not_eq', 'or', 'or_eq', 'xor', 'xor_eq'),
- suffix=r'\b'), Keyword),
- (r'namespace\b', Keyword, 'namespace'),
- (r'(enum)(\s+)', bygroups(Keyword, Whitespace), 'enumname'),
- inherit
- ],
- 'types': [
- (r'char(16_t|32_t|8_t)\b', Keyword.Type),
- inherit
- ],
- 'namespace': [
- (r'[;{]', Punctuation, ('#pop', 'root')),
- (r'inline\b', Keyword.Reserved),
- (CFamilyLexer._ident, Name.Namespace),
- include('statement')
- ]
- }
-
- def analyse_text(text):
- if re.search('#include <[a-z_]+>', text):
- return 0.2
- if re.search('using namespace ', text):
- return 0.4
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/c_like.py b/venv/lib/python3.11/site-packages/pygments/lexers/c_like.py
deleted file mode 100644
index a7379c9..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/c_like.py
+++ /dev/null
@@ -1,666 +0,0 @@
-"""
- pygments.lexers.c_like
- ~~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for other C-like languages.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, include, bygroups, inherit, words, \
- default
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation, Whitespace
-
-from pygments.lexers.c_cpp import CLexer, CppLexer
-from pygments.lexers import _mql_builtins
-
-__all__ = ['PikeLexer', 'NesCLexer', 'ClayLexer', 'ECLexer', 'ValaLexer',
- 'CudaLexer', 'SwigLexer', 'MqlLexer', 'ArduinoLexer', 'CharmciLexer',
- 'OmgIdlLexer']
-
-
-class PikeLexer(CppLexer):
- """
- For `Pike <http://pike.lysator.liu.se/>`_ source code.
-
- .. versionadded:: 2.0
- """
- name = 'Pike'
- aliases = ['pike']
- filenames = ['*.pike', '*.pmod']
- mimetypes = ['text/x-pike']
-
- tokens = {
- 'statements': [
- (words((
- 'catch', 'new', 'private', 'protected', 'public', 'gauge',
- 'throw', 'throws', 'class', 'interface', 'implement', 'abstract',
- 'extends', 'from', 'this', 'super', 'constant', 'final', 'static',
- 'import', 'use', 'extern', 'inline', 'proto', 'break', 'continue',
- 'if', 'else', 'for', 'while', 'do', 'switch', 'case', 'as', 'in',
- 'version', 'return', 'true', 'false', 'null',
- '__VERSION__', '__MAJOR__', '__MINOR__', '__BUILD__', '__REAL_VERSION__',
- '__REAL_MAJOR__', '__REAL_MINOR__', '__REAL_BUILD__', '__DATE__', '__TIME__',
- '__FILE__', '__DIR__', '__LINE__', '__AUTO_BIGNUM__', '__NT__', '__PIKE__',
- '__amigaos__', '_Pragma', 'static_assert', 'defined', 'sscanf'), suffix=r'\b'),
- Keyword),
- (r'(bool|int|long|float|short|double|char|string|object|void|mapping|'
- r'array|multiset|program|function|lambda|mixed|'
- r'[a-z_][a-z0-9_]*_t)\b',
- Keyword.Type),
- (r'(class)(\s+)', bygroups(Keyword, Whitespace), 'classname'),
- (r'[~!%^&*+=|?:<>/@-]', Operator),
- inherit,
- ],
- 'classname': [
- (r'[a-zA-Z_]\w*', Name.Class, '#pop'),
- # template specification
- (r'\s*(?=>)', Whitespace, '#pop'),
- ],
- }
-
-
-class NesCLexer(CLexer):
- """
- For `nesC <https://github.com/tinyos/nesc>`_ source code with preprocessor
- directives.
-
- .. versionadded:: 2.0
- """
- name = 'nesC'
- aliases = ['nesc']
- filenames = ['*.nc']
- mimetypes = ['text/x-nescsrc']
-
- tokens = {
- 'statements': [
- (words((
- 'abstract', 'as', 'async', 'atomic', 'call', 'command', 'component',
- 'components', 'configuration', 'event', 'extends', 'generic',
- 'implementation', 'includes', 'interface', 'module', 'new', 'norace',
- 'post', 'provides', 'signal', 'task', 'uses'), suffix=r'\b'),
- Keyword),
- (words(('nx_struct', 'nx_union', 'nx_int8_t', 'nx_int16_t', 'nx_int32_t',
- 'nx_int64_t', 'nx_uint8_t', 'nx_uint16_t', 'nx_uint32_t',
- 'nx_uint64_t'), suffix=r'\b'),
- Keyword.Type),
- inherit,
- ],
- }
-
-
-class ClayLexer(RegexLexer):
- """
- For `Clay <http://claylabs.com/clay/>`_ source.
-
- .. versionadded:: 2.0
- """
- name = 'Clay'
- filenames = ['*.clay']
- aliases = ['clay']
- mimetypes = ['text/x-clay']
- tokens = {
- 'root': [
- (r'\s+', Whitespace),
- (r'//.*?$', Comment.Single),
- (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline),
- (r'\b(public|private|import|as|record|variant|instance'
- r'|define|overload|default|external|alias'
- r'|rvalue|ref|forward|inline|noinline|forceinline'
- r'|enum|var|and|or|not|if|else|goto|return|while'
- r'|switch|case|break|continue|for|in|true|false|try|catch|throw'
- r'|finally|onerror|staticassert|eval|when|newtype'
- r'|__FILE__|__LINE__|__COLUMN__|__ARG__'
- r')\b', Keyword),
- (r'[~!%^&*+=|:<>/-]', Operator),
- (r'[#(){}\[\],;.]', Punctuation),
- (r'0x[0-9a-fA-F]+[LlUu]*', Number.Hex),
- (r'\d+[LlUu]*', Number.Integer),
- (r'\b(true|false)\b', Name.Builtin),
- (r'(?i)[a-z_?][\w?]*', Name),
- (r'"""', String, 'tdqs'),
- (r'"', String, 'dqs'),
- ],
- 'strings': [
- (r'(?i)\\(x[0-9a-f]{2}|.)', String.Escape),
- (r'[^\\"]+', String),
- ],
- 'nl': [
- (r'\n', String),
- ],
- 'dqs': [
- (r'"', String, '#pop'),
- include('strings'),
- ],
- 'tdqs': [
- (r'"""', String, '#pop'),
- include('strings'),
- include('nl'),
- ],
- }
-
-
-class ECLexer(CLexer):
- """
- For eC source code with preprocessor directives.
-
- .. versionadded:: 1.5
- """
- name = 'eC'
- aliases = ['ec']
- filenames = ['*.ec', '*.eh']
- mimetypes = ['text/x-echdr', 'text/x-ecsrc']
-
- tokens = {
- 'statements': [
- (words((
- 'virtual', 'class', 'private', 'public', 'property', 'import',
- 'delete', 'new', 'new0', 'renew', 'renew0', 'define', 'get',
- 'set', 'remote', 'dllexport', 'dllimport', 'stdcall', 'subclass',
- '__on_register_module', 'namespace', 'using', 'typed_object',
- 'any_object', 'incref', 'register', 'watch', 'stopwatching', 'firewatchers',
- 'watchable', 'class_designer', 'class_fixed', 'class_no_expansion', 'isset',
- 'class_default_property', 'property_category', 'class_data',
- 'class_property', 'thisclass', 'dbtable', 'dbindex',
- 'database_open', 'dbfield'), suffix=r'\b'), Keyword),
- (words(('uint', 'uint16', 'uint32', 'uint64', 'bool', 'byte',
- 'unichar', 'int64'), suffix=r'\b'),
- Keyword.Type),
- (r'(class)(\s+)', bygroups(Keyword, Whitespace), 'classname'),
- (r'(null|value|this)\b', Name.Builtin),
- inherit,
- ]
- }
-
-
-class ValaLexer(RegexLexer):
- """
- For Vala source code with preprocessor directives.
-
- .. versionadded:: 1.1
- """
- name = 'Vala'
- aliases = ['vala', 'vapi']
- filenames = ['*.vala', '*.vapi']
- mimetypes = ['text/x-vala']
-
- tokens = {
- 'whitespace': [
- (r'^\s*#if\s+0', Comment.Preproc, 'if0'),
- (r'\n', Whitespace),
- (r'\s+', Whitespace),
- (r'\\\n', Text), # line continuation
- (r'//(\n|(.|\n)*?[^\\]\n)', Comment.Single),
- (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline),
- ],
- 'statements': [
- (r'[L@]?"', String, 'string'),
- (r"L?'(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])'",
- String.Char),
- (r'(?s)""".*?"""', String), # verbatim strings
- (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[lL]?', Number.Float),
- (r'(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float),
- (r'0x[0-9a-fA-F]+[Ll]?', Number.Hex),
- (r'0[0-7]+[Ll]?', Number.Oct),
- (r'\d+[Ll]?', Number.Integer),
- (r'[~!%^&*+=|?:<>/-]', Operator),
- (r'(\[)(Compact|Immutable|(?:Boolean|Simple)Type)(\])',
- bygroups(Punctuation, Name.Decorator, Punctuation)),
- # TODO: "correctly" parse complex code attributes
- (r'(\[)(CCode|(?:Integer|Floating)Type)',
- bygroups(Punctuation, Name.Decorator)),
- (r'[()\[\],.]', Punctuation),
- (words((
- 'as', 'base', 'break', 'case', 'catch', 'construct', 'continue',
- 'default', 'delete', 'do', 'else', 'enum', 'finally', 'for',
- 'foreach', 'get', 'if', 'in', 'is', 'lock', 'new', 'out', 'params',
- 'return', 'set', 'sizeof', 'switch', 'this', 'throw', 'try',
- 'typeof', 'while', 'yield'), suffix=r'\b'),
- Keyword),
- (words((
- 'abstract', 'const', 'delegate', 'dynamic', 'ensures', 'extern',
- 'inline', 'internal', 'override', 'owned', 'private', 'protected',
- 'public', 'ref', 'requires', 'signal', 'static', 'throws', 'unowned',
- 'var', 'virtual', 'volatile', 'weak', 'yields'), suffix=r'\b'),
- Keyword.Declaration),
- (r'(namespace|using)(\s+)', bygroups(Keyword.Namespace, Whitespace),
- 'namespace'),
- (r'(class|errordomain|interface|struct)(\s+)',
- bygroups(Keyword.Declaration, Whitespace), 'class'),
- (r'(\.)([a-zA-Z_]\w*)',
- bygroups(Operator, Name.Attribute)),
- # void is an actual keyword, others are in glib-2.0.vapi
- (words((
- 'void', 'bool', 'char', 'double', 'float', 'int', 'int8', 'int16',
- 'int32', 'int64', 'long', 'short', 'size_t', 'ssize_t', 'string',
- 'time_t', 'uchar', 'uint', 'uint8', 'uint16', 'uint32', 'uint64',
- 'ulong', 'unichar', 'ushort'), suffix=r'\b'),
- Keyword.Type),
- (r'(true|false|null)\b', Name.Builtin),
- (r'[a-zA-Z_]\w*', Name),
- ],
- 'root': [
- include('whitespace'),
- default('statement'),
- ],
- 'statement': [
- include('whitespace'),
- include('statements'),
- ('[{}]', Punctuation),
- (';', Punctuation, '#pop'),
- ],
- 'string': [
- (r'"', String, '#pop'),
- (r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|[0-7]{1,3})', String.Escape),
- (r'[^\\"\n]+', String), # all other characters
- (r'\\\n', String), # line continuation
- (r'\\', String), # stray backslash
- ],
- 'if0': [
- (r'^\s*#if.*?(?<!\\)\n', Comment.Preproc, '#push'),
- (r'^\s*#el(?:se|if).*\n', Comment.Preproc, '#pop'),
- (r'^\s*#endif.*?(?<!\\)\n', Comment.Preproc, '#pop'),
- (r'.*?\n', Comment),
- ],
- 'class': [
- (r'[a-zA-Z_]\w*', Name.Class, '#pop')
- ],
- 'namespace': [
- (r'[a-zA-Z_][\w.]*', Name.Namespace, '#pop')
- ],
- }
-
-
-class CudaLexer(CLexer):
- """
- For NVIDIA `CUDA™ <http://developer.nvidia.com/category/zone/cuda-zone>`_
- source.
-
- .. versionadded:: 1.6
- """
- name = 'CUDA'
- filenames = ['*.cu', '*.cuh']
- aliases = ['cuda', 'cu']
- mimetypes = ['text/x-cuda']
-
- function_qualifiers = {'__device__', '__global__', '__host__',
- '__noinline__', '__forceinline__'}
- variable_qualifiers = {'__device__', '__constant__', '__shared__',
- '__restrict__'}
- vector_types = {'char1', 'uchar1', 'char2', 'uchar2', 'char3', 'uchar3',
- 'char4', 'uchar4', 'short1', 'ushort1', 'short2', 'ushort2',
- 'short3', 'ushort3', 'short4', 'ushort4', 'int1', 'uint1',
- 'int2', 'uint2', 'int3', 'uint3', 'int4', 'uint4', 'long1',
- 'ulong1', 'long2', 'ulong2', 'long3', 'ulong3', 'long4',
- 'ulong4', 'longlong1', 'ulonglong1', 'longlong2',
- 'ulonglong2', 'float1', 'float2', 'float3', 'float4',
- 'double1', 'double2', 'dim3'}
- variables = {'gridDim', 'blockIdx', 'blockDim', 'threadIdx', 'warpSize'}
- functions = {'__threadfence_block', '__threadfence', '__threadfence_system',
- '__syncthreads', '__syncthreads_count', '__syncthreads_and',
- '__syncthreads_or'}
- execution_confs = {'<<<', '>>>'}
-
- def get_tokens_unprocessed(self, text, stack=('root',)):
- for index, token, value in CLexer.get_tokens_unprocessed(self, text, stack):
- if token is Name:
- if value in self.variable_qualifiers:
- token = Keyword.Type
- elif value in self.vector_types:
- token = Keyword.Type
- elif value in self.variables:
- token = Name.Builtin
- elif value in self.execution_confs:
- token = Keyword.Pseudo
- elif value in self.function_qualifiers:
- token = Keyword.Reserved
- elif value in self.functions:
- token = Name.Function
- yield index, token, value
-
-
-class SwigLexer(CppLexer):
- """
- For `SWIG <http://www.swig.org/>`_ source code.
-
- .. versionadded:: 2.0
- """
- name = 'SWIG'
- aliases = ['swig']
- filenames = ['*.swg', '*.i']
- mimetypes = ['text/swig']
- priority = 0.04 # Lower than C/C++ and Objective C/C++
-
- tokens = {
- 'root': [
- # Match it here so it won't be matched as a function in the rest of root
- (r'\$\**\&?\w+', Name),
- inherit
- ],
- 'statements': [
- # SWIG directives
- (r'(%[a-z_][a-z0-9_]*)', Name.Function),
- # Special variables
- (r'\$\**\&?\w+', Name),
- # Stringification / additional preprocessor directives
- (r'##*[a-zA-Z_]\w*', Comment.Preproc),
- inherit,
- ],
- }
-
- # This is a far from complete set of SWIG directives
- swig_directives = {
- # Most common directives
- '%apply', '%define', '%director', '%enddef', '%exception', '%extend',
- '%feature', '%fragment', '%ignore', '%immutable', '%import', '%include',
- '%inline', '%insert', '%module', '%newobject', '%nspace', '%pragma',
- '%rename', '%shared_ptr', '%template', '%typecheck', '%typemap',
- # Less common directives
- '%arg', '%attribute', '%bang', '%begin', '%callback', '%catches', '%clear',
- '%constant', '%copyctor', '%csconst', '%csconstvalue', '%csenum',
- '%csmethodmodifiers', '%csnothrowexception', '%default', '%defaultctor',
- '%defaultdtor', '%defined', '%delete', '%delobject', '%descriptor',
- '%exceptionclass', '%exceptionvar', '%extend_smart_pointer', '%fragments',
- '%header', '%ifcplusplus', '%ignorewarn', '%implicit', '%implicitconv',
- '%init', '%javaconst', '%javaconstvalue', '%javaenum', '%javaexception',
- '%javamethodmodifiers', '%kwargs', '%luacode', '%mutable', '%naturalvar',
- '%nestedworkaround', '%perlcode', '%pythonabc', '%pythonappend',
- '%pythoncallback', '%pythoncode', '%pythondynamic', '%pythonmaybecall',
- '%pythonnondynamic', '%pythonprepend', '%refobject', '%shadow', '%sizeof',
- '%trackobjects', '%types', '%unrefobject', '%varargs', '%warn',
- '%warnfilter'}
-
- def analyse_text(text):
- rv = 0
- # Search for SWIG directives, which are conventionally at the beginning of
- # a line. The probability of them being within a line is low, so let another
- # lexer win in this case.
- matches = re.findall(r'^\s*(%[a-z_][a-z0-9_]*)', text, re.M)
- for m in matches:
- if m in SwigLexer.swig_directives:
- rv = 0.98
- break
- else:
- rv = 0.91 # Fraction higher than MatlabLexer
- return rv
-
-
-class MqlLexer(CppLexer):
- """
- For `MQL4 <http://docs.mql4.com/>`_ and
- `MQL5 <http://www.mql5.com/en/docs>`_ source code.
-
- .. versionadded:: 2.0
- """
- name = 'MQL'
- aliases = ['mql', 'mq4', 'mq5', 'mql4', 'mql5']
- filenames = ['*.mq4', '*.mq5', '*.mqh']
- mimetypes = ['text/x-mql']
-
- tokens = {
- 'statements': [
- (words(_mql_builtins.keywords, suffix=r'\b'), Keyword),
- (words(_mql_builtins.c_types, suffix=r'\b'), Keyword.Type),
- (words(_mql_builtins.types, suffix=r'\b'), Name.Function),
- (words(_mql_builtins.constants, suffix=r'\b'), Name.Constant),
- (words(_mql_builtins.colors, prefix='(clr)?', suffix=r'\b'),
- Name.Constant),
- inherit,
- ],
- }
-
-
-class ArduinoLexer(CppLexer):
- """
- For `Arduino(tm) <https://arduino.cc/>`_ source.
-
- This is an extension of the CppLexer, as the Arduino® Language is a superset
- of C++
-
- .. versionadded:: 2.1
- """
-
- name = 'Arduino'
- aliases = ['arduino']
- filenames = ['*.ino']
- mimetypes = ['text/x-arduino']
-
- # Language sketch main structure functions
- structure = {'setup', 'loop'}
-
- # Language operators
- operators = {'not', 'or', 'and', 'xor'}
-
- # Language 'variables'
- variables = {
- 'DIGITAL_MESSAGE', 'FIRMATA_STRING', 'ANALOG_MESSAGE', 'REPORT_DIGITAL',
- 'REPORT_ANALOG', 'INPUT_PULLUP', 'SET_PIN_MODE', 'INTERNAL2V56', 'SYSTEM_RESET',
- 'LED_BUILTIN', 'INTERNAL1V1', 'SYSEX_START', 'INTERNAL', 'EXTERNAL', 'HIGH',
- 'LOW', 'INPUT', 'OUTPUT', 'INPUT_PULLUP', 'LED_BUILTIN', 'true', 'false',
- 'void', 'boolean', 'char', 'unsigned char', 'byte', 'int', 'unsigned int',
- 'word', 'long', 'unsigned long', 'short', 'float', 'double', 'string', 'String',
- 'array', 'static', 'volatile', 'const', 'boolean', 'byte', 'word', 'string',
- 'String', 'array', 'int', 'float', 'private', 'char', 'virtual', 'operator',
- 'sizeof', 'uint8_t', 'uint16_t', 'uint32_t', 'uint64_t', 'int8_t', 'int16_t',
- 'int32_t', 'int64_t', 'dynamic_cast', 'typedef', 'const_cast', 'const',
- 'struct', 'static_cast', 'union', 'unsigned', 'long', 'volatile', 'static',
- 'protected', 'bool', 'public', 'friend', 'auto', 'void', 'enum', 'extern',
- 'class', 'short', 'reinterpret_cast', 'double', 'register', 'explicit',
- 'signed', 'inline', 'delete', '_Bool', 'complex', '_Complex', '_Imaginary',
- 'atomic_bool', 'atomic_char', 'atomic_schar', 'atomic_uchar', 'atomic_short',
- 'atomic_ushort', 'atomic_int', 'atomic_uint', 'atomic_long', 'atomic_ulong',
- 'atomic_llong', 'atomic_ullong', 'PROGMEM'}
-
- # Language shipped functions and class ( )
- functions = {
- 'KeyboardController', 'MouseController', 'SoftwareSerial', 'EthernetServer',
- 'EthernetClient', 'LiquidCrystal', 'RobotControl', 'GSMVoiceCall',
- 'EthernetUDP', 'EsploraTFT', 'HttpClient', 'RobotMotor', 'WiFiClient',
- 'GSMScanner', 'FileSystem', 'Scheduler', 'GSMServer', 'YunClient', 'YunServer',
- 'IPAddress', 'GSMClient', 'GSMModem', 'Keyboard', 'Ethernet', 'Console',
- 'GSMBand', 'Esplora', 'Stepper', 'Process', 'WiFiUDP', 'GSM_SMS', 'Mailbox',
- 'USBHost', 'Firmata', 'PImage', 'Client', 'Server', 'GSMPIN', 'FileIO',
- 'Bridge', 'Serial', 'EEPROM', 'Stream', 'Mouse', 'Audio', 'Servo', 'File',
- 'Task', 'GPRS', 'WiFi', 'Wire', 'TFT', 'GSM', 'SPI', 'SD',
- 'runShellCommandAsynchronously', 'analogWriteResolution',
- 'retrieveCallingNumber', 'printFirmwareVersion', 'analogReadResolution',
- 'sendDigitalPortPair', 'noListenOnLocalhost', 'readJoystickButton',
- 'setFirmwareVersion', 'readJoystickSwitch', 'scrollDisplayRight',
- 'getVoiceCallStatus', 'scrollDisplayLeft', 'writeMicroseconds',
- 'delayMicroseconds', 'beginTransmission', 'getSignalStrength',
- 'runAsynchronously', 'getAsynchronously', 'listenOnLocalhost',
- 'getCurrentCarrier', 'readAccelerometer', 'messageAvailable',
- 'sendDigitalPorts', 'lineFollowConfig', 'countryNameWrite', 'runShellCommand',
- 'readStringUntil', 'rewindDirectory', 'readTemperature', 'setClockDivider',
- 'readLightSensor', 'endTransmission', 'analogReference', 'detachInterrupt',
- 'countryNameRead', 'attachInterrupt', 'encryptionType', 'readBytesUntil',
- 'robotNameWrite', 'readMicrophone', 'robotNameRead', 'cityNameWrite',
- 'userNameWrite', 'readJoystickY', 'readJoystickX', 'mouseReleased',
- 'openNextFile', 'scanNetworks', 'noInterrupts', 'digitalWrite', 'beginSpeaker',
- 'mousePressed', 'isActionDone', 'mouseDragged', 'displayLogos', 'noAutoscroll',
- 'addParameter', 'remoteNumber', 'getModifiers', 'keyboardRead', 'userNameRead',
- 'waitContinue', 'processInput', 'parseCommand', 'printVersion', 'readNetworks',
- 'writeMessage', 'blinkVersion', 'cityNameRead', 'readMessage', 'setDataMode',
- 'parsePacket', 'isListening', 'setBitOrder', 'beginPacket', 'isDirectory',
- 'motorsWrite', 'drawCompass', 'digitalRead', 'clearScreen', 'serialEvent',
- 'rightToLeft', 'setTextSize', 'leftToRight', 'requestFrom', 'keyReleased',
- 'compassRead', 'analogWrite', 'interrupts', 'WiFiServer', 'disconnect',
- 'playMelody', 'parseFloat', 'autoscroll', 'getPINUsed', 'setPINUsed',
- 'setTimeout', 'sendAnalog', 'readSlider', 'analogRead', 'beginWrite',
- 'createChar', 'motorsStop', 'keyPressed', 'tempoWrite', 'readButton',
- 'subnetMask', 'debugPrint', 'macAddress', 'writeGreen', 'randomSeed',
- 'attachGPRS', 'readString', 'sendString', 'remotePort', 'releaseAll',
- 'mouseMoved', 'background', 'getXChange', 'getYChange', 'answerCall',
- 'getResult', 'voiceCall', 'endPacket', 'constrain', 'getSocket', 'writeJSON',
- 'getButton', 'available', 'connected', 'findUntil', 'readBytes', 'exitValue',
- 'readGreen', 'writeBlue', 'startLoop', 'IPAddress', 'isPressed', 'sendSysex',
- 'pauseMode', 'gatewayIP', 'setCursor', 'getOemKey', 'tuneWrite', 'noDisplay',
- 'loadImage', 'switchPIN', 'onRequest', 'onReceive', 'changePIN', 'playFile',
- 'noBuffer', 'parseInt', 'overflow', 'checkPIN', 'knobRead', 'beginTFT',
- 'bitClear', 'updateIR', 'bitWrite', 'position', 'writeRGB', 'highByte',
- 'writeRed', 'setSpeed', 'readBlue', 'noStroke', 'remoteIP', 'transfer',
- 'shutdown', 'hangCall', 'beginSMS', 'endWrite', 'attached', 'maintain',
- 'noCursor', 'checkReg', 'checkPUK', 'shiftOut', 'isValid', 'shiftIn', 'pulseIn',
- 'connect', 'println', 'localIP', 'pinMode', 'getIMEI', 'display', 'noBlink',
- 'process', 'getBand', 'running', 'beginSD', 'drawBMP', 'lowByte', 'setBand',
- 'release', 'bitRead', 'prepare', 'pointTo', 'readRed', 'setMode', 'noFill',
- 'remove', 'listen', 'stroke', 'detach', 'attach', 'noTone', 'exists', 'buffer',
- 'height', 'bitSet', 'circle', 'config', 'cursor', 'random', 'IRread', 'setDNS',
- 'endSMS', 'getKey', 'micros', 'millis', 'begin', 'print', 'write', 'ready',
- 'flush', 'width', 'isPIN', 'blink', 'clear', 'press', 'mkdir', 'rmdir', 'close',
- 'point', 'yield', 'image', 'BSSID', 'click', 'delay', 'read', 'text', 'move',
- 'peek', 'beep', 'rect', 'line', 'open', 'seek', 'fill', 'size', 'turn', 'stop',
- 'home', 'find', 'step', 'tone', 'sqrt', 'RSSI', 'SSID', 'end', 'bit', 'tan',
- 'cos', 'sin', 'pow', 'map', 'abs', 'max', 'min', 'get', 'run', 'put',
- 'isAlphaNumeric', 'isAlpha', 'isAscii', 'isWhitespace', 'isControl', 'isDigit',
- 'isGraph', 'isLowerCase', 'isPrintable', 'isPunct', 'isSpace', 'isUpperCase',
- 'isHexadecimalDigit'}
-
- # do not highlight
- suppress_highlight = {
- 'namespace', 'template', 'mutable', 'using', 'asm', 'typeid',
- 'typename', 'this', 'alignof', 'constexpr', 'decltype', 'noexcept',
- 'static_assert', 'thread_local', 'restrict'}
-
- def get_tokens_unprocessed(self, text, stack=('root',)):
- for index, token, value in CppLexer.get_tokens_unprocessed(self, text, stack):
- if value in self.structure:
- yield index, Name.Builtin, value
- elif value in self.operators:
- yield index, Operator, value
- elif value in self.variables:
- yield index, Keyword.Reserved, value
- elif value in self.suppress_highlight:
- yield index, Name, value
- elif value in self.functions:
- yield index, Name.Function, value
- else:
- yield index, token, value
-
-
-class CharmciLexer(CppLexer):
- """
- For `Charm++ <https://charm.cs.illinois.edu>`_ interface files (.ci).
-
- .. versionadded:: 2.4
- """
-
- name = 'Charmci'
- aliases = ['charmci']
- filenames = ['*.ci']
-
- mimetypes = []
-
- tokens = {
- 'keywords': [
- (r'(module)(\s+)', bygroups(Keyword, Text), 'classname'),
- (words(('mainmodule', 'mainchare', 'chare', 'array', 'group',
- 'nodegroup', 'message', 'conditional')), Keyword),
- (words(('entry', 'aggregate', 'threaded', 'sync', 'exclusive',
- 'nokeep', 'notrace', 'immediate', 'expedited', 'inline',
- 'local', 'python', 'accel', 'readwrite', 'writeonly',
- 'accelblock', 'memcritical', 'packed', 'varsize',
- 'initproc', 'initnode', 'initcall', 'stacksize',
- 'createhere', 'createhome', 'reductiontarget', 'iget',
- 'nocopy', 'mutable', 'migratable', 'readonly')), Keyword),
- inherit,
- ],
- }
-
-
-class OmgIdlLexer(CLexer):
- """
- Lexer for Object Management Group Interface Definition Language.
-
- .. versionadded:: 2.9
- """
-
- name = 'OMG Interface Definition Language'
- url = 'https://www.omg.org/spec/IDL/About-IDL/'
- aliases = ['omg-idl']
- filenames = ['*.idl', '*.pidl']
- mimetypes = []
-
- scoped_name = r'((::)?\w+)+'
-
- tokens = {
- 'values': [
- (words(('true', 'false'), prefix=r'(?i)', suffix=r'\b'), Number),
- (r'([Ll]?)(")', bygroups(String.Affix, String.Double), 'string'),
- (r'([Ll]?)(\')(\\[^\']+)(\')',
- bygroups(String.Affix, String.Char, String.Escape, String.Char)),
- (r'([Ll]?)(\')(\\\')(\')',
- bygroups(String.Affix, String.Char, String.Escape, String.Char)),
- (r'([Ll]?)(\'.\')', bygroups(String.Affix, String.Char)),
- (r'[+-]?\d+(\.\d*)?[Ee][+-]?\d+', Number.Float),
- (r'[+-]?(\d+\.\d*)|(\d*\.\d+)([Ee][+-]?\d+)?', Number.Float),
- (r'(?i)[+-]?0x[0-9a-f]+', Number.Hex),
- (r'[+-]?[1-9]\d*', Number.Integer),
- (r'[+-]?0[0-7]*', Number.Oct),
- (r'[\+\-\*\/%^&\|~]', Operator),
- (words(('<<', '>>')), Operator),
- (scoped_name, Name),
- (r'[{};:,<>\[\]]', Punctuation),
- ],
- 'annotation_params': [
- include('whitespace'),
- (r'\(', Punctuation, '#push'),
- include('values'),
- (r'=', Punctuation),
- (r'\)', Punctuation, '#pop'),
- ],
- 'annotation_params_maybe': [
- (r'\(', Punctuation, 'annotation_params'),
- include('whitespace'),
- default('#pop'),
- ],
- 'annotation_appl': [
- (r'@' + scoped_name, Name.Decorator, 'annotation_params_maybe'),
- ],
- 'enum': [
- include('whitespace'),
- (r'[{,]', Punctuation),
- (r'\w+', Name.Constant),
- include('annotation_appl'),
- (r'\}', Punctuation, '#pop'),
- ],
- 'root': [
- include('whitespace'),
- (words((
- 'typedef', 'const',
- 'in', 'out', 'inout', 'local',
- ), prefix=r'(?i)', suffix=r'\b'), Keyword.Declaration),
- (words((
- 'void', 'any', 'native', 'bitfield',
- 'unsigned', 'boolean', 'char', 'wchar', 'octet', 'short', 'long',
- 'int8', 'uint8', 'int16', 'int32', 'int64', 'uint16', 'uint32', 'uint64',
- 'float', 'double', 'fixed',
- 'sequence', 'string', 'wstring', 'map',
- ), prefix=r'(?i)', suffix=r'\b'), Keyword.Type),
- (words((
- '@annotation', 'struct', 'union', 'bitset', 'interface',
- 'exception', 'valuetype', 'eventtype', 'component',
- ), prefix=r'(?i)', suffix=r'(\s+)(\w+)'), bygroups(Keyword, Whitespace, Name.Class)),
- (words((
- 'abstract', 'alias', 'attribute', 'case', 'connector',
- 'consumes', 'context', 'custom', 'default', 'emits', 'factory',
- 'finder', 'getraises', 'home', 'import', 'manages', 'mirrorport',
- 'multiple', 'Object', 'oneway', 'primarykey', 'private', 'port',
- 'porttype', 'provides', 'public', 'publishes', 'raises',
- 'readonly', 'setraises', 'supports', 'switch', 'truncatable',
- 'typeid', 'typename', 'typeprefix', 'uses', 'ValueBase',
- ), prefix=r'(?i)', suffix=r'\b'), Keyword),
- (r'(?i)(enum|bitmask)(\s+)(\w+)',
- bygroups(Keyword, Whitespace, Name.Class), 'enum'),
- (r'(?i)(module)(\s+)(\w+)',
- bygroups(Keyword.Namespace, Whitespace, Name.Namespace)),
- (r'(\w+)(\s*)(=)', bygroups(Name.Constant, Whitespace, Operator)),
- (r'[\(\)]', Punctuation),
- include('values'),
- include('annotation_appl'),
- ],
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/capnproto.py b/venv/lib/python3.11/site-packages/pygments/lexers/capnproto.py
deleted file mode 100644
index 04237f7..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/capnproto.py
+++ /dev/null
@@ -1,75 +0,0 @@
-"""
- pygments.lexers.capnproto
- ~~~~~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for the Cap'n Proto schema language.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, default
-from pygments.token import Text, Comment, Keyword, Name, Literal, Whitespace
-
-__all__ = ['CapnProtoLexer']
-
-
-class CapnProtoLexer(RegexLexer):
- """
- For Cap'n Proto source.
-
- .. versionadded:: 2.2
- """
- name = 'Cap\'n Proto'
- url = 'https://capnproto.org'
- filenames = ['*.capnp']
- aliases = ['capnp']
-
- tokens = {
- 'root': [
- (r'#.*?$', Comment.Single),
- (r'@[0-9a-zA-Z]*', Name.Decorator),
- (r'=', Literal, 'expression'),
- (r':', Name.Class, 'type'),
- (r'\$', Name.Attribute, 'annotation'),
- (r'(struct|enum|interface|union|import|using|const|annotation|'
- r'extends|in|of|on|as|with|from|fixed)\b',
- Keyword),
- (r'[\w.]+', Name),
- (r'[^#@=:$\w\s]+', Text),
- (r'\s+', Whitespace),
- ],
- 'type': [
- (r'[^][=;,(){}$]+', Name.Class),
- (r'[\[(]', Name.Class, 'parentype'),
- default('#pop'),
- ],
- 'parentype': [
- (r'[^][;()]+', Name.Class),
- (r'[\[(]', Name.Class, '#push'),
- (r'[])]', Name.Class, '#pop'),
- default('#pop'),
- ],
- 'expression': [
- (r'[^][;,(){}$]+', Literal),
- (r'[\[(]', Literal, 'parenexp'),
- default('#pop'),
- ],
- 'parenexp': [
- (r'[^][;()]+', Literal),
- (r'[\[(]', Literal, '#push'),
- (r'[])]', Literal, '#pop'),
- default('#pop'),
- ],
- 'annotation': [
- (r'[^][;,(){}=:]+', Name.Attribute),
- (r'[\[(]', Name.Attribute, 'annexp'),
- default('#pop'),
- ],
- 'annexp': [
- (r'[^][;()]+', Name.Attribute),
- (r'[\[(]', Name.Attribute, '#push'),
- (r'[])]', Name.Attribute, '#pop'),
- default('#pop'),
- ],
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/carbon.py b/venv/lib/python3.11/site-packages/pygments/lexers/carbon.py
deleted file mode 100644
index 758e8af..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/carbon.py
+++ /dev/null
@@ -1,96 +0,0 @@
-"""
- pygments.lexers.carbon
- ~~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for the Carbon programming language.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-import re
-
-from pygments.lexer import RegexLexer, bygroups, words
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation, Whitespace
-
-__all__ = ['CarbonLexer']
-
-
-class CarbonLexer(RegexLexer):
- """
- For Carbon source.
-
- .. versionadded:: 2.15
- """
- name = 'Carbon'
- url = 'https://github.com/carbon-language/carbon-lang'
- filenames = ['*.carbon']
- aliases = ['carbon']
- mimetypes = ['text/x-carbon']
-
- flags = re.MULTILINE | re.DOTALL
-
- tokens = {
- 'root': [
- (r'\n', Whitespace),
- (r'\s+', Whitespace),
- (r'\\\n', Text),
- # comments
- (r'//(.*?)\n', Comment.Single),
- (r'/(\\\n)?[*].*?[*](\\\n)?/', Comment.Multiline),
- # Declaration
- (r'(package|import|api|namespace|library)\b', Keyword.Namespace),
- (r'(abstract|alias|fn|class|interface|let|var|virtual|external|'
- r'base|addr|extends|choice|constraint|impl)\b', Keyword.Declaration),
- # Keywords
- (words(('as', 'or', 'not', 'and', 'break', 'continue', 'case',
- 'default', 'if', 'else', 'destructor', 'for', 'forall',
- 'while', 'where', 'then', 'in', 'is', 'return', 'returned',
- 'friend', 'partial', 'private', 'protected', 'observe', 'Self',
- 'override', 'final', 'match', 'type', 'like'), suffix=r'\b'), Keyword),
- (r'(self)\b', Keyword.Pseudo),
- (r'(true|false)\b', Keyword.Constant),
- (r'(auto|bool|string|i8|i16|i32|i64|u8|u16|u32|u64|'
- r'f8|f16|f32|f64)\b', Keyword.Type),
- # numeric literals
- (r'[0-9]*[.][0-9]+', Number.Double),
- (r'0b[01]+', Number.Bin),
- (r'0o[0-7]+', Number.Oct),
- (r'0x[0-9a-fA-F]+', Number.Hex),
- (r'[0-9]+', Number.Integer),
- # string literal
- (r'"(\\.|[^"\\])*"', String),
- # char literal
- (r'\'(\\.|[^\'\\])\'', String.Char),
- # tokens
- (r'<<=|>>=|<<|>>|<=|>=|\+=|-=|\*=|/=|\%=|\|=|&=|\^=|&&|\|\||&|\||'
- r'\+\+|--|\%|\^|\~|==|!=|::|[.]{3}|->|=>|[+\-*/&]', Operator),
- (r'[|<>=!()\[\]{}.,;:\?]', Punctuation),
- # identifiers
- (r'[^\W\d]\w*', Name.Other),
- ]
- }
-
- def analyse_text(text):
- result = 0
- if 'forall' in text:
- result += 0.1
- if 'type' in text:
- result += 0.1
- if 'Self' in text:
- result += 0.1
- if 'observe' in text:
- result += 0.1
- if 'package' in text:
- result += 0.1
- if 'library' in text:
- result += 0.1
- if 'choice' in text:
- result += 0.1
- if 'addr' in text:
- result += 0.1
- if 'constraint' in text:
- result += 0.1
- if 'impl' in text:
- result += 0.1
- return result
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/cddl.py b/venv/lib/python3.11/site-packages/pygments/lexers/cddl.py
deleted file mode 100644
index bd7f54a..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/cddl.py
+++ /dev/null
@@ -1,173 +0,0 @@
-"""
- pygments.lexers.cddl
- ~~~~~~~~~~~~~~~~~~~~
-
- Lexer for the Concise data definition language (CDDL), a notational
- convention to express CBOR and JSON data structures.
-
- More information:
- https://datatracker.ietf.org/doc/rfc8610/
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, bygroups, include, words
-from pygments.token import Comment, Error, Keyword, Name, Number, Operator, \
- Punctuation, String, Whitespace
-
-__all__ = ['CddlLexer']
-
-
-class CddlLexer(RegexLexer):
- """
- Lexer for CDDL definitions.
-
- .. versionadded:: 2.8
- """
- name = "CDDL"
- url = 'https://datatracker.ietf.org/doc/rfc8610/'
- aliases = ["cddl"]
- filenames = ["*.cddl"]
- mimetypes = ["text/x-cddl"]
-
- _prelude_types = [
- "any",
- "b64legacy",
- "b64url",
- "bigfloat",
- "bigint",
- "bignint",
- "biguint",
- "bool",
- "bstr",
- "bytes",
- "cbor-any",
- "decfrac",
- "eb16",
- "eb64legacy",
- "eb64url",
- "encoded-cbor",
- "false",
- "float",
- "float16",
- "float16-32",
- "float32",
- "float32-64",
- "float64",
- "int",
- "integer",
- "mime-message",
- "nil",
- "nint",
- "null",
- "number",
- "regexp",
- "tdate",
- "text",
- "time",
- "true",
- "tstr",
- "uint",
- "undefined",
- "unsigned",
- "uri",
- ]
-
- _controls = [
- ".and",
- ".bits",
- ".cbor",
- ".cborseq",
- ".default",
- ".eq",
- ".ge",
- ".gt",
- ".le",
- ".lt",
- ".ne",
- ".regexp",
- ".size",
- ".within",
- ]
-
- _re_id = (
- r"[$@A-Z_a-z]"
- r"(?:[\-\.]+(?=[$@0-9A-Z_a-z])|[$@0-9A-Z_a-z])*"
-
- )
-
- # While the spec reads more like "an int must not start with 0" we use a
- # lookahead here that says "after a 0 there must be no digit". This makes the
- # '0' the invalid character in '01', which looks nicer when highlighted.
- _re_uint = r"(?:0b[01]+|0x[0-9a-fA-F]+|[1-9]\d*|0(?!\d))"
- _re_int = r"-?" + _re_uint
-
- tokens = {
- "commentsandwhitespace": [(r"\s+", Whitespace), (r";.+$", Comment.Single)],
- "root": [
- include("commentsandwhitespace"),
- # tag types
- (r"#(\d\.{uint})?".format(uint=_re_uint), Keyword.Type), # type or any
- # occurrence
- (
- r"({uint})?(\*)({uint})?".format(uint=_re_uint),
- bygroups(Number, Operator, Number),
- ),
- (r"\?|\+", Operator), # occurrence
- (r"\^", Operator), # cuts
- (r"(\.\.\.|\.\.)", Operator), # rangeop
- (words(_controls, suffix=r"\b"), Operator.Word), # ctlops
- # into choice op
- (r"&(?=\s*({groupname}|\())".format(groupname=_re_id), Operator),
- (r"~(?=\s*{})".format(_re_id), Operator), # unwrap op
- (r"//|/(?!/)", Operator), # double und single slash
- (r"=>|/==|/=|=", Operator),
- (r"[\[\]{}\(\),<>:]", Punctuation),
- # Bytestrings
- (r"(b64)(')", bygroups(String.Affix, String.Single), "bstrb64url"),
- (r"(h)(')", bygroups(String.Affix, String.Single), "bstrh"),
- (r"'", String.Single, "bstr"),
- # Barewords as member keys (must be matched before values, types, typenames,
- # groupnames).
- # Token type is String as barewords are always interpreted as such.
- (r"({bareword})(\s*)(:)".format(bareword=_re_id),
- bygroups(String, Whitespace, Punctuation)),
- # predefined types
- (words(_prelude_types, prefix=r"(?![\-_$@])\b", suffix=r"\b(?![\-_$@])"),
- Name.Builtin),
- # user-defined groupnames, typenames
- (_re_id, Name.Class),
- # values
- (r"0b[01]+", Number.Bin),
- (r"0o[0-7]+", Number.Oct),
- (r"0x[0-9a-fA-F]+(\.[0-9a-fA-F]+)?p[+-]?\d+", Number.Hex), # hexfloat
- (r"0x[0-9a-fA-F]+", Number.Hex), # hex
- # Float
- (r"{int}(?=(\.\d|e[+-]?\d))(?:\.\d+)?(?:e[+-]?\d+)?".format(int=_re_int),
- Number.Float),
- # Int
- (_re_int, Number.Integer),
- (r'"(\\\\|\\"|[^"])*"', String.Double),
- ],
- "bstrb64url": [
- (r"'", String.Single, "#pop"),
- include("commentsandwhitespace"),
- (r"\\.", String.Escape),
- (r"[0-9a-zA-Z\-_=]+", String.Single),
- (r".", Error),
- # (r";.+$", Token.Other),
- ],
- "bstrh": [
- (r"'", String.Single, "#pop"),
- include("commentsandwhitespace"),
- (r"\\.", String.Escape),
- (r"[0-9a-fA-F]+", String.Single),
- (r".", Error),
- ],
- "bstr": [
- (r"'", String.Single, "#pop"),
- (r"\\.", String.Escape),
- (r"[^'\\]+", String.Single),
- ],
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/chapel.py b/venv/lib/python3.11/site-packages/pygments/lexers/chapel.py
deleted file mode 100644
index 9af5f0e..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/chapel.py
+++ /dev/null
@@ -1,140 +0,0 @@
-"""
- pygments.lexers.chapel
- ~~~~~~~~~~~~~~~~~~~~~~
-
- Lexer for the Chapel language.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, bygroups, words
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation, Whitespace
-
-__all__ = ['ChapelLexer']
-
-
-class ChapelLexer(RegexLexer):
- """
- For Chapel source.
-
- .. versionadded:: 2.0
- """
- name = 'Chapel'
- url = 'https://chapel-lang.org/'
- filenames = ['*.chpl']
- aliases = ['chapel', 'chpl']
- # mimetypes = ['text/x-chapel']
-
- known_types = ('bool', 'bytes', 'complex', 'imag', 'int', 'locale',
- 'nothing', 'opaque', 'range', 'real', 'string', 'uint',
- 'void')
-
- type_modifiers_par = ('atomic', 'single', 'sync')
- type_modifiers_mem = ('borrowed', 'owned', 'shared', 'unmanaged')
- type_modifiers = (*type_modifiers_par, *type_modifiers_mem)
-
- declarations = ('config', 'const', 'in', 'inout', 'out', 'param', 'ref',
- 'type', 'var')
-
- constants = ('false', 'nil', 'none', 'true')
-
- other_keywords = ('align', 'as',
- 'begin', 'break', 'by',
- 'catch', 'cobegin', 'coforall', 'continue',
- 'defer', 'delete', 'dmapped', 'do', 'domain',
- 'else', 'enum', 'except', 'export', 'extern',
- 'for', 'forall', 'foreach', 'forwarding',
- 'if', 'implements', 'import', 'index', 'init', 'inline',
- 'label', 'lambda', 'let', 'lifetime', 'local',
- 'new', 'noinit',
- 'on', 'only', 'otherwise', 'override',
- 'pragma', 'primitive', 'private', 'prototype', 'public',
- 'reduce', 'require', 'return',
- 'scan', 'select', 'serial', 'sparse', 'subdomain',
- 'then', 'this', 'throw', 'throws', 'try',
- 'use',
- 'when', 'where', 'while', 'with',
- 'yield',
- 'zip')
-
- tokens = {
- 'root': [
- (r'\n', Whitespace),
- (r'\s+', Whitespace),
- (r'\\\n', Text),
-
- (r'//(.*?)\n', Comment.Single),
- (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline),
-
- (words(declarations, suffix=r'\b'), Keyword.Declaration),
- (words(constants, suffix=r'\b'), Keyword.Constant),
- (words(known_types, suffix=r'\b'), Keyword.Type),
- (words((*type_modifiers, *other_keywords), suffix=r'\b'), Keyword),
-
- (r'@', Keyword, 'attributename'),
- (r'(iter)(\s+)', bygroups(Keyword, Whitespace), 'procname'),
- (r'(proc)(\s+)', bygroups(Keyword, Whitespace), 'procname'),
- (r'(operator)(\s+)', bygroups(Keyword, Whitespace), 'procname'),
- (r'(class|interface|module|record|union)(\s+)', bygroups(Keyword, Whitespace),
- 'classname'),
-
- # imaginary integers
- (r'\d+i', Number),
- (r'\d+\.\d*([Ee][-+]\d+)?i', Number),
- (r'\.\d+([Ee][-+]\d+)?i', Number),
- (r'\d+[Ee][-+]\d+i', Number),
-
- # reals cannot end with a period due to lexical ambiguity with
- # .. operator. See reference for rationale.
- (r'(\d*\.\d+)([eE][+-]?[0-9]+)?i?', Number.Float),
- (r'\d+[eE][+-]?[0-9]+i?', Number.Float),
-
- # integer literals
- # -- binary
- (r'0[bB][01]+', Number.Bin),
- # -- hex
- (r'0[xX][0-9a-fA-F]+', Number.Hex),
- # -- octal
- (r'0[oO][0-7]+', Number.Oct),
- # -- decimal
- (r'[0-9]+', Number.Integer),
-
- # strings
- (r'"(\\\\|\\"|[^"])*"', String),
- (r"'(\\\\|\\'|[^'])*'", String),
-
- # tokens
- (r'(=|\+=|-=|\*=|/=|\*\*=|%=|&=|\|=|\^=|&&=|\|\|=|<<=|>>=|'
- r'<=>|<~>|\.\.|by|#|\.\.\.|'
- r'&&|\|\||!|&|\||\^|~|<<|>>|'
- r'==|!=|<=|>=|<|>|'
- r'[+\-*/%]|\*\*)', Operator),
- (r'[:;,.?()\[\]{}]', Punctuation),
-
- # identifiers
- (r'[a-zA-Z_][\w$]*', Name.Other),
- ],
- 'classname': [
- (r'[a-zA-Z_][\w$]*', Name.Class, '#pop'),
- ],
- 'procname': [
- (r'([a-zA-Z_][.\w$]*|' # regular function name, including secondary
- r'\~[a-zA-Z_][.\w$]*|' # support for legacy destructors
- r'[+*/!~%<>=&^|\-:]{1,2})', # operators
- Name.Function, '#pop'),
-
- # allow `proc (atomic T).foo`
- (r'\(', Punctuation, "receivertype"),
- (r'\)+\.', Punctuation),
- ],
- 'receivertype': [
- (words(type_modifiers, suffix=r'\b'), Keyword),
- (words(known_types, suffix=r'\b'), Keyword.Type),
- (r'[^()]*', Name.Other, '#pop'),
- ],
- 'attributename': [
- (r'[a-zA-Z_][.\w$]*', Name.Decorator, '#pop'),
- ],
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/clean.py b/venv/lib/python3.11/site-packages/pygments/lexers/clean.py
deleted file mode 100644
index 73d93e0..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/clean.py
+++ /dev/null
@@ -1,179 +0,0 @@
-"""
- pygments.lexers.clean
- ~~~~~~~~~~~~~~~~~~~~~
-
- Lexer for the Clean language.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import ExtendedRegexLexer, words, default, include, bygroups
-from pygments.token import Comment, Error, Keyword, Literal, Name, Number, \
- Operator, Punctuation, String, Whitespace
-
-__all__ = ['CleanLexer']
-
-
-class CleanLexer(ExtendedRegexLexer):
- """
- Lexer for the general purpose, state-of-the-art, pure and lazy functional
- programming language Clean.
-
- .. versionadded: 2.2
- """
- name = 'Clean'
- url = 'http://clean.cs.ru.nl/Clean'
- aliases = ['clean']
- filenames = ['*.icl', '*.dcl']
-
- keywords = (
- 'case', 'ccall', 'class', 'code', 'code inline', 'derive', 'export',
- 'foreign', 'generic', 'if', 'in', 'infix', 'infixl', 'infixr',
- 'instance', 'let', 'of', 'otherwise', 'special', 'stdcall', 'where',
- 'with')
-
- modulewords = ('implementation', 'definition', 'system')
-
- lowerId = r'[a-z`][\w`]*'
- upperId = r'[A-Z`][\w`]*'
- funnyId = r'[~@#$%\^?!+\-*<>\\/|&=:]+'
- scoreUpperId = r'_' + upperId
- scoreLowerId = r'_' + lowerId
- moduleId = r'[a-zA-Z_][a-zA-Z0-9_.`]+'
- classId = '|'.join([lowerId, upperId, funnyId])
-
- tokens = {
- 'root': [
- include('comments'),
- include('keywords'),
- include('module'),
- include('import'),
- include('whitespace'),
- include('literals'),
- include('operators'),
- include('delimiters'),
- include('names'),
- ],
- 'whitespace': [
- (r'\s+', Whitespace),
- ],
- 'comments': [
- (r'//.*\n', Comment.Single),
- (r'/\*', Comment.Multiline, 'comments.in'),
- (r'/\*\*', Comment.Special, 'comments.in'),
- ],
- 'comments.in': [
- (r'\*\/', Comment.Multiline, '#pop'),
- (r'/\*', Comment.Multiline, '#push'),
- (r'[^*/]+', Comment.Multiline),
- (r'\*(?!/)', Comment.Multiline),
- (r'/', Comment.Multiline),
- ],
- 'keywords': [
- (words(keywords, prefix=r'\b', suffix=r'\b'), Keyword),
- ],
- 'module': [
- (words(modulewords, prefix=r'\b', suffix=r'\b'), Keyword.Namespace),
- (r'\bmodule\b', Keyword.Namespace, 'module.name'),
- ],
- 'module.name': [
- include('whitespace'),
- (moduleId, Name.Class, '#pop'),
- ],
- 'import': [
- (r'\b(import)\b(\s*)', bygroups(Keyword, Whitespace), 'import.module'),
- (r'\b(from)\b(\s*)\b(' + moduleId + r')\b(\s*)\b(import)\b',
- bygroups(Keyword, Whitespace, Name.Class, Whitespace, Keyword),
- 'import.what'),
- ],
- 'import.module': [
- (r'\b(qualified)\b(\s*)', bygroups(Keyword, Whitespace)),
- (r'(\s*)\b(as)\b', bygroups(Whitespace, Keyword), ('#pop', 'import.module.as')),
- (moduleId, Name.Class),
- (r'(\s*)(,)(\s*)', bygroups(Whitespace, Punctuation, Whitespace)),
- (r'\s+', Whitespace),
- default('#pop'),
- ],
- 'import.module.as': [
- include('whitespace'),
- (lowerId, Name.Class, '#pop'),
- (upperId, Name.Class, '#pop'),
- ],
- 'import.what': [
- (r'\b(class)\b(\s+)(' + classId + r')',
- bygroups(Keyword, Whitespace, Name.Class), 'import.what.class'),
- (r'\b(instance)(\s+)(' + classId + r')(\s+)',
- bygroups(Keyword, Whitespace, Name.Class, Whitespace), 'import.what.instance'),
- (r'(::)(\s*)\b(' + upperId + r')\b',
- bygroups(Punctuation, Whitespace, Name.Class), 'import.what.type'),
- (r'\b(generic)\b(\s+)\b(' + lowerId + '|' + upperId + r')\b',
- bygroups(Keyword, Whitespace, Name)),
- include('names'),
- (r'(,)(\s+)', bygroups(Punctuation, Whitespace)),
- (r'$', Whitespace, '#pop'),
- include('whitespace'),
- ],
- 'import.what.class': [
- (r',', Punctuation, '#pop'),
- (r'\(', Punctuation, 'import.what.class.members'),
- (r'$', Whitespace, '#pop:2'),
- include('whitespace'),
- ],
- 'import.what.class.members': [
- (r',', Punctuation),
- (r'\.\.', Punctuation),
- (r'\)', Punctuation, '#pop'),
- include('names'),
- ],
- 'import.what.instance': [
- (r'[,)]', Punctuation, '#pop'),
- (r'\(', Punctuation, 'import.what.instance'),
- (r'$', Whitespace, '#pop:2'),
- include('whitespace'),
- include('names'),
- ],
- 'import.what.type': [
- (r',', Punctuation, '#pop'),
- (r'[({]', Punctuation, 'import.what.type.consesandfields'),
- (r'$', Whitespace, '#pop:2'),
- include('whitespace'),
- ],
- 'import.what.type.consesandfields': [
- (r',', Punctuation),
- (r'\.\.', Punctuation),
- (r'[)}]', Punctuation, '#pop'),
- include('names'),
- ],
- 'literals': [
- (r'\'([^\'\\]|\\(x[\da-fA-F]+|\d+|.))\'', Literal.Char),
- (r'[+~-]?0[0-7]+\b', Number.Oct),
- (r'[+~-]?\d+\.\d+(E[+-]?\d+)?', Number.Float),
- (r'[+~-]?\d+\b', Number.Integer),
- (r'[+~-]?0x[\da-fA-F]+\b', Number.Hex),
- (r'True|False', Literal),
- (r'"', String.Double, 'literals.stringd'),
- ],
- 'literals.stringd': [
- (r'[^\\"\n]+', String.Double),
- (r'"', String.Double, '#pop'),
- (r'\\.', String.Double),
- (r'[$\n]', Error, '#pop'),
- ],
- 'operators': [
- (r'[-~@#$%\^?!+*<>\\/|&=:.]+', Operator),
- (r'\b_+\b', Operator),
- ],
- 'delimiters': [
- (r'[,;(){}\[\]]', Punctuation),
- (r'(\')([\w`.]+)(\')',
- bygroups(Punctuation, Name.Class, Punctuation)),
- ],
- 'names': [
- (lowerId, Name),
- (scoreLowerId, Name),
- (funnyId, Name.Function),
- (upperId, Name.Class),
- (scoreUpperId, Name.Class),
- ]
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/comal.py b/venv/lib/python3.11/site-packages/pygments/lexers/comal.py
deleted file mode 100644
index 6665955..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/comal.py
+++ /dev/null
@@ -1,80 +0,0 @@
-"""
- pygments.lexers.comal
- ~~~~~~~~~~~~~~~~~~~~~
-
- Lexer for COMAL-80.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, words
-from pygments.token import Comment, Whitespace, Operator, Keyword, String, \
- Number, Name, Punctuation
-
-__all__ = ["Comal80Lexer"]
-
-
-class Comal80Lexer(RegexLexer):
- """
- For COMAL-80 source code.
- """
-
- name = 'COMAL-80'
- url = 'https://en.wikipedia.org/wiki/COMAL'
- aliases = ['comal', 'comal80']
- filenames = ['*.cml', '*.comal']
- flags = re.IGNORECASE
- #
- # COMAL allows for some strange characters in names which we list here so
- # keywords and word operators will not be recognized at the start of an
- # identifier.
- #
- _suffix = r"\b(?!['\[\]←£\\])"
- _identifier = r"[a-z]['\[\]←£\\\w]*"
-
- tokens = {
- 'root': [
- (r'//.*\n', Comment.Single),
- (r'\s+', Whitespace),
- (r':[=+-]|\<\>|[-+*/^↑<>=]', Operator),
- (r'(and +then|or +else)' + _suffix, Operator.Word),
- (words([
- 'and', 'bitand', 'bitor', 'bitxor', 'div', 'in', 'mod', 'not',
- 'or'], suffix=_suffix,), Operator.Word),
- (words([
- 'append', 'at', 'case', 'chain', 'close', 'copy', 'create', 'cursor',
- 'data', 'delete', 'dir', 'do', 'elif', 'else', 'end', 'endcase', 'endif',
- 'endfor', 'endloop', 'endtrap', 'endwhile', 'exec', 'exit', 'file',
- 'for', 'goto', 'handler', 'if', 'input', 'let', 'loop', 'mount', 'null',
- 'of', 'open', 'otherwise', 'output', 'page', 'pass', 'poke', 'print',
- 'random', 'read', 'repeat', 'report', 'return', 'rename', 'restore',
- 'select', 'step', 'stop', 'sys', 'then', 'to', 'trap', 'unit', 'unit$',
- 'until', 'using', 'when', 'while', 'write', 'zone'], suffix=_suffix),
- Keyword.Reserved),
- (words([
- 'closed', 'dim', 'endfunc', 'endproc', 'external', 'func', 'import',
- 'proc', 'ref', 'use'], suffix=_suffix), Keyword.Declaration),
- (words([
- 'abs', 'atn', 'chr$', 'cos', 'eod', 'eof', 'err', 'errfile', 'errtext',
- 'esc', 'exp', 'int', 'key$', 'len', 'log', 'ord', 'peek', 'randomize',
- 'rnd', 'sgn', 'sin', 'spc$', 'sqr', 'status$', 'str$', 'tab', 'tan',
- 'time', 'val'], suffix=_suffix), Name.Builtin),
- (words(['false', 'pi', 'true'], suffix=_suffix), Keyword.Constant),
- (r'"', String, 'string'),
- (_identifier + r":(?=[ \n/])", Name.Label),
- (_identifier + r"[$#]?", Name),
- (r'%[01]+', Number.Bin),
- (r'\$[0-9a-f]+', Number.Hex),
- (r'\d*\.\d*(e[-+]?\d+)?', Number.Float),
- (r'\d+', Number.Integer),
- (r'[(),:;]', Punctuation),
- ],
- 'string': [
- (r'[^"]+', String),
- (r'"[0-9]*"', String.Escape),
- (r'"', String, '#pop'),
- ],
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/compiled.py b/venv/lib/python3.11/site-packages/pygments/lexers/compiled.py
deleted file mode 100644
index fe27425..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/compiled.py
+++ /dev/null
@@ -1,34 +0,0 @@
-"""
- pygments.lexers.compiled
- ~~~~~~~~~~~~~~~~~~~~~~~~
-
- Just export lexer classes previously contained in this module.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexers.jvm import JavaLexer, ScalaLexer
-from pygments.lexers.c_cpp import CLexer, CppLexer
-from pygments.lexers.d import DLexer
-from pygments.lexers.objective import ObjectiveCLexer, \
- ObjectiveCppLexer, LogosLexer
-from pygments.lexers.go import GoLexer
-from pygments.lexers.rust import RustLexer
-from pygments.lexers.c_like import ECLexer, ValaLexer, CudaLexer
-from pygments.lexers.pascal import DelphiLexer, PortugolLexer, Modula2Lexer
-from pygments.lexers.ada import AdaLexer
-from pygments.lexers.business import CobolLexer, CobolFreeformatLexer
-from pygments.lexers.fortran import FortranLexer
-from pygments.lexers.prolog import PrologLexer
-from pygments.lexers.python import CythonLexer
-from pygments.lexers.graphics import GLShaderLexer
-from pygments.lexers.ml import OcamlLexer
-from pygments.lexers.basic import BlitzBasicLexer, BlitzMaxLexer, MonkeyLexer
-from pygments.lexers.dylan import DylanLexer, DylanLidLexer, DylanConsoleLexer
-from pygments.lexers.ooc import OocLexer
-from pygments.lexers.felix import FelixLexer
-from pygments.lexers.nimrod import NimrodLexer
-from pygments.lexers.crystal import CrystalLexer
-
-__all__ = []
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/configs.py b/venv/lib/python3.11/site-packages/pygments/lexers/configs.py
deleted file mode 100644
index 6c5e542..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/configs.py
+++ /dev/null
@@ -1,1435 +0,0 @@
-"""
- pygments.lexers.configs
- ~~~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for configuration file formats.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import ExtendedRegexLexer, RegexLexer, default, words, \
- bygroups, include, using, line_re
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation, Whitespace, Literal, Error, Generic
-from pygments.lexers.shell import BashLexer
-from pygments.lexers.data import JsonLexer
-
-__all__ = ['IniLexer', 'SystemdLexer', 'DesktopLexer', 'RegeditLexer', 'PropertiesLexer',
- 'KconfigLexer', 'Cfengine3Lexer', 'ApacheConfLexer', 'SquidConfLexer',
- 'NginxConfLexer', 'LighttpdConfLexer', 'DockerLexer',
- 'TerraformLexer', 'TermcapLexer', 'TerminfoLexer',
- 'PkgConfigLexer', 'PacmanConfLexer', 'AugeasLexer', 'TOMLLexer',
- 'NestedTextLexer', 'SingularityLexer', 'UnixConfigLexer']
-
-
-class IniLexer(RegexLexer):
- """
- Lexer for configuration files in INI style.
- """
-
- name = 'INI'
- aliases = ['ini', 'cfg', 'dosini']
- filenames = [
- '*.ini', '*.cfg', '*.inf', '.editorconfig',
- ]
- mimetypes = ['text/x-ini', 'text/inf']
-
- tokens = {
- 'root': [
- (r'\s+', Whitespace),
- (r'[;#].*', Comment.Single),
- (r'(\[.*?\])([ \t]*)$', bygroups(Keyword, Whitespace)),
- (r'(.*?)([  \t]*)([=:])([ \t]*)([^;#\n]*)(\\)(\s+)',
- bygroups(Name.Attribute, Whitespace, Operator, Whitespace, String,
- Text, Whitespace),
- "value"),
- (r'(.*?)([ \t]*)([=:])([  \t]*)([^ ;#\n]*(?: +[^ ;#\n]+)*)',
- bygroups(Name.Attribute, Whitespace, Operator, Whitespace, String)),
- # standalone option, supported by some INI parsers
- (r'(.+?)$', Name.Attribute),
- ],
- 'value': [ # line continuation
- (r'\s+', Whitespace),
- (r'(\s*)(.*)(\\)([ \t]*)',
- bygroups(Whitespace, String, Text, Whitespace)),
- (r'.*$', String, "#pop"),
- ],
- }
-
- def analyse_text(text):
- npos = text.find('\n')
- if npos < 3:
- return False
- if text[0] == '[' and text[npos-1] == ']':
- return 0.8
- return False
-
-
-class DesktopLexer(RegexLexer):
- """
- Lexer for .desktop files.
-
- .. versionadded:: 2.16
- """
-
- name = 'Desktop file'
- url = "https://specifications.freedesktop.org/desktop-entry-spec/desktop-entry-spec-latest.html"
- aliases = ['desktop']
- filenames = ['*.desktop']
-
- tokens = {
- 'root': [
- (r'^[ \t]*\n', Whitespace),
- (r'^(#.*)(\n)', bygroups(Comment.Single, Whitespace)),
- (r'(\[[^\]\n]+\])(\n)', bygroups(Keyword, Whitespace)),
- (r'([-A-Za-z0-9]+)(\[[^\] \t=]+\])?([ \t]*)(=)([ \t]*)([^\n]*)([ \t\n]*\n)',
- bygroups(Name.Attribute, Name.Namespace, Whitespace, Operator, Whitespace, String, Whitespace)),
- ],
- }
-
- def analyse_text(text):
- if text.startswith("[Desktop Entry]"):
- return 1.0
- if re.search(r"^\[Desktop Entry\][ \t]*$", text[:500], re.MULTILINE) is not None:
- return 0.9
- return 0.0
-
-
-class SystemdLexer(RegexLexer):
- """
- Lexer for systemd unit files.
-
- .. versionadded:: 2.16
- """
-
- name = 'Systemd'
- url = "https://www.freedesktop.org/software/systemd/man/systemd.syntax.html"
- aliases = ['systemd']
- filenames = [
- '*.service', '*.socket', '*.device', '*.mount', '*.automount',
- '*.swap', '*.target', '*.path', '*.timer', '*.slice', '*.scope',
- ]
-
- tokens = {
- 'root': [
- (r'^[ \t]*\n', Whitespace),
- (r'^([;#].*)(\n)', bygroups(Comment.Single, Whitespace)),
- (r'(\[[^\]\n]+\])(\n)', bygroups(Keyword, Whitespace)),
- (r'([^=]+)([ \t]*)(=)([ \t]*)([^\n]*)(\\)(\n)',
- bygroups(Name.Attribute, Whitespace, Operator, Whitespace, String,
- Text, Whitespace),
- "value"),
- (r'([^=]+)([ \t]*)(=)([ \t]*)([^\n]*)(\n)',
- bygroups(Name.Attribute, Whitespace, Operator, Whitespace, String, Whitespace)),
- ],
- 'value': [
- # line continuation
- (r'^([;#].*)(\n)', bygroups(Comment.Single, Whitespace)),
- (r'([ \t]*)([^\n]*)(\\)(\n)',
- bygroups(Whitespace, String, Text, Whitespace)),
- (r'([ \t]*)([^\n]*)(\n)',
- bygroups(Whitespace, String, Whitespace), "#pop"),
- ],
- }
-
- def analyse_text(text):
- if text.startswith("[Unit]"):
- return 1.0
- if re.search(r"^\[Unit\][ \t]*$", text[:500], re.MULTILINE) is not None:
- return 0.9
- return 0.0
-
-
-class RegeditLexer(RegexLexer):
- """
- Lexer for Windows Registry files produced by regedit.
-
- .. versionadded:: 1.6
- """
-
- name = 'reg'
- url = 'http://en.wikipedia.org/wiki/Windows_Registry#.REG_files'
- aliases = ['registry']
- filenames = ['*.reg']
- mimetypes = ['text/x-windows-registry']
-
- tokens = {
- 'root': [
- (r'Windows Registry Editor.*', Text),
- (r'\s+', Whitespace),
- (r'[;#].*', Comment.Single),
- (r'(\[)(-?)(HKEY_[A-Z_]+)(.*?\])$',
- bygroups(Keyword, Operator, Name.Builtin, Keyword)),
- # String keys, which obey somewhat normal escaping
- (r'("(?:\\"|\\\\|[^"])+")([ \t]*)(=)([ \t]*)',
- bygroups(Name.Attribute, Whitespace, Operator, Whitespace),
- 'value'),
- # Bare keys (includes @)
- (r'(.*?)([ \t]*)(=)([ \t]*)',
- bygroups(Name.Attribute, Whitespace, Operator, Whitespace),
- 'value'),
- ],
- 'value': [
- (r'-', Operator, '#pop'), # delete value
- (r'(dword|hex(?:\([0-9a-fA-F]\))?)(:)([0-9a-fA-F,]+)',
- bygroups(Name.Variable, Punctuation, Number), '#pop'),
- # As far as I know, .reg files do not support line continuation.
- (r'.+', String, '#pop'),
- default('#pop'),
- ]
- }
-
- def analyse_text(text):
- return text.startswith('Windows Registry Editor')
-
-
-class PropertiesLexer(RegexLexer):
- """
- Lexer for configuration files in Java's properties format.
-
- Note: trailing whitespace counts as part of the value as per spec
-
- .. versionadded:: 1.4
- """
-
- name = 'Properties'
- aliases = ['properties', 'jproperties']
- filenames = ['*.properties']
- mimetypes = ['text/x-java-properties']
-
- tokens = {
- 'root': [
- # comments
- (r'[!#].*|/{2}.*', Comment.Single),
- # ending a comment or whitespace-only line
- (r'\n', Whitespace),
- # eat whitespace at the beginning of a line
- (r'^[^\S\n]+', Whitespace),
- # start lexing a key
- default('key'),
- ],
- 'key': [
- # non-escaped key characters
- (r'[^\\:=\s]+', Name.Attribute),
- # escapes
- include('escapes'),
- # separator is the first non-escaped whitespace or colon or '=' on the line;
- # if it's whitespace, = and : are gobbled after it
- (r'([^\S\n]*)([:=])([^\S\n]*)',
- bygroups(Whitespace, Operator, Whitespace),
- ('#pop', 'value')),
- (r'[^\S\n]+', Whitespace, ('#pop', 'value')),
- # maybe we got no value after all
- (r'\n', Whitespace, '#pop'),
- ],
- 'value': [
- # non-escaped value characters
- (r'[^\\\n]+', String),
- # escapes
- include('escapes'),
- # end the value on an unescaped newline
- (r'\n', Whitespace, '#pop'),
- ],
- 'escapes': [
- # line continuations; these gobble whitespace at the beginning of the next line
- (r'(\\\n)([^\S\n]*)', bygroups(String.Escape, Whitespace)),
- # other escapes
- (r'\\(.|\n)', String.Escape),
- ],
- }
-
-
-def _rx_indent(level):
- # Kconfig *always* interprets a tab as 8 spaces, so this is the default.
- # Edit this if you are in an environment where KconfigLexer gets expanded
- # input (tabs expanded to spaces) and the expansion tab width is != 8,
- # e.g. in connection with Trac (trac.ini, [mimeviewer], tab_width).
- # Value range here is 2 <= {tab_width} <= 8.
- tab_width = 8
- # Regex matching a given indentation {level}, assuming that indentation is
- # a multiple of {tab_width}. In other cases there might be problems.
- if tab_width == 2:
- space_repeat = '+'
- else:
- space_repeat = '{1,%d}' % (tab_width - 1)
- if level == 1:
- level_repeat = ''
- else:
- level_repeat = '{%s}' % level
- return r'(?:\t| %s\t| {%s})%s.*\n' % (space_repeat, tab_width, level_repeat)
-
-
-class KconfigLexer(RegexLexer):
- """
- For Linux-style Kconfig files.
-
- .. versionadded:: 1.6
- """
-
- name = 'Kconfig'
- aliases = ['kconfig', 'menuconfig', 'linux-config', 'kernel-config']
- # Adjust this if new kconfig file names appear in your environment
- filenames = ['Kconfig*', '*Config.in*', 'external.in*',
- 'standard-modules.in']
- mimetypes = ['text/x-kconfig']
- # No re.MULTILINE, indentation-aware help text needs line-by-line handling
- flags = 0
-
- def call_indent(level):
- # If indentation >= {level} is detected, enter state 'indent{level}'
- return (_rx_indent(level), String.Doc, 'indent%s' % level)
-
- def do_indent(level):
- # Print paragraphs of indentation level >= {level} as String.Doc,
- # ignoring blank lines. Then return to 'root' state.
- return [
- (_rx_indent(level), String.Doc),
- (r'\s*\n', Text),
- default('#pop:2')
- ]
-
- tokens = {
- 'root': [
- (r'\s+', Whitespace),
- (r'#.*?\n', Comment.Single),
- (words((
- 'mainmenu', 'config', 'menuconfig', 'choice', 'endchoice',
- 'comment', 'menu', 'endmenu', 'visible if', 'if', 'endif',
- 'source', 'prompt', 'select', 'depends on', 'default',
- 'range', 'option'), suffix=r'\b'),
- Keyword),
- (r'(---help---|help)[\t ]*\n', Keyword, 'help'),
- (r'(bool|tristate|string|hex|int|defconfig_list|modules|env)\b',
- Name.Builtin),
- (r'[!=&|]', Operator),
- (r'[()]', Punctuation),
- (r'[0-9]+', Number.Integer),
- (r"'(''|[^'])*'", String.Single),
- (r'"(""|[^"])*"', String.Double),
- (r'\S+', Text),
- ],
- # Help text is indented, multi-line and ends when a lower indentation
- # level is detected.
- 'help': [
- # Skip blank lines after help token, if any
- (r'\s*\n', Text),
- # Determine the first help line's indentation level heuristically(!).
- # Attention: this is not perfect, but works for 99% of "normal"
- # indentation schemes up to a max. indentation level of 7.
- call_indent(7),
- call_indent(6),
- call_indent(5),
- call_indent(4),
- call_indent(3),
- call_indent(2),
- call_indent(1),
- default('#pop'), # for incomplete help sections without text
- ],
- # Handle text for indentation levels 7 to 1
- 'indent7': do_indent(7),
- 'indent6': do_indent(6),
- 'indent5': do_indent(5),
- 'indent4': do_indent(4),
- 'indent3': do_indent(3),
- 'indent2': do_indent(2),
- 'indent1': do_indent(1),
- }
-
-
-class Cfengine3Lexer(RegexLexer):
- """
- Lexer for CFEngine3 policy files.
-
- .. versionadded:: 1.5
- """
-
- name = 'CFEngine3'
- url = 'http://cfengine.org'
- aliases = ['cfengine3', 'cf3']
- filenames = ['*.cf']
- mimetypes = []
-
- tokens = {
- 'root': [
- (r'#.*?\n', Comment),
- (r'(body)(\s+)(\S+)(\s+)(control)',
- bygroups(Keyword, Whitespace, Keyword, Whitespace, Keyword)),
- (r'(body|bundle)(\s+)(\S+)(\s+)(\w+)(\()',
- bygroups(Keyword, Whitespace, Keyword, Whitespace, Name.Function, Punctuation),
- 'arglist'),
- (r'(body|bundle)(\s+)(\S+)(\s+)(\w+)',
- bygroups(Keyword, Whitespace, Keyword, Whitespace, Name.Function)),
- (r'(")([^"]+)(")(\s+)(string|slist|int|real)(\s*)(=>)(\s*)',
- bygroups(Punctuation, Name.Variable, Punctuation,
- Whitespace, Keyword.Type, Whitespace, Operator, Whitespace)),
- (r'(\S+)(\s*)(=>)(\s*)',
- bygroups(Keyword.Reserved, Whitespace, Operator, Text)),
- (r'"', String, 'string'),
- (r'(\w+)(\()', bygroups(Name.Function, Punctuation)),
- (r'([\w.!&|()]+)(::)', bygroups(Name.Class, Punctuation)),
- (r'(\w+)(:)', bygroups(Keyword.Declaration, Punctuation)),
- (r'@[{(][^)}]+[})]', Name.Variable),
- (r'[(){},;]', Punctuation),
- (r'=>', Operator),
- (r'->', Operator),
- (r'\d+\.\d+', Number.Float),
- (r'\d+', Number.Integer),
- (r'\w+', Name.Function),
- (r'\s+', Whitespace),
- ],
- 'string': [
- (r'\$[{(]', String.Interpol, 'interpol'),
- (r'\\.', String.Escape),
- (r'"', String, '#pop'),
- (r'\n', String),
- (r'.', String),
- ],
- 'interpol': [
- (r'\$[{(]', String.Interpol, '#push'),
- (r'[})]', String.Interpol, '#pop'),
- (r'[^${()}]+', String.Interpol),
- ],
- 'arglist': [
- (r'\)', Punctuation, '#pop'),
- (r',', Punctuation),
- (r'\w+', Name.Variable),
- (r'\s+', Whitespace),
- ],
- }
-
-
-class ApacheConfLexer(RegexLexer):
- """
- Lexer for configuration files following the Apache config file
- format.
-
- .. versionadded:: 0.6
- """
-
- name = 'ApacheConf'
- aliases = ['apacheconf', 'aconf', 'apache']
- filenames = ['.htaccess', 'apache.conf', 'apache2.conf']
- mimetypes = ['text/x-apacheconf']
- flags = re.MULTILINE | re.IGNORECASE
-
- tokens = {
- 'root': [
- (r'\s+', Whitespace),
- (r'#(.*\\\n)+.*$|(#.*?)$', Comment),
- (r'(<[^\s>/][^\s>]*)(?:(\s+)(.*))?(>)',
- bygroups(Name.Tag, Whitespace, String, Name.Tag)),
- (r'(</[^\s>]+)(>)',
- bygroups(Name.Tag, Name.Tag)),
- (r'[a-z]\w*', Name.Builtin, 'value'),
- (r'\.+', Text),
- ],
- 'value': [
- (r'\\\n', Text),
- (r'\n+', Whitespace, '#pop'),
- (r'\\', Text),
- (r'[^\S\n]+', Whitespace),
- (r'\d+\.\d+\.\d+\.\d+(?:/\d+)?', Number),
- (r'\d+', Number),
- (r'/([*a-z0-9][*\w./-]+)', String.Other),
- (r'(on|off|none|any|all|double|email|dns|min|minimal|'
- r'os|productonly|full|emerg|alert|crit|error|warn|'
- r'notice|info|debug|registry|script|inetd|standalone|'
- r'user|group)\b', Keyword),
- (r'"([^"\\]*(?:\\(.|\n)[^"\\]*)*)"', String.Double),
- (r'[^\s"\\]+', Text)
- ],
- }
-
-
-class SquidConfLexer(RegexLexer):
- """
- Lexer for squid configuration files.
-
- .. versionadded:: 0.9
- """
-
- name = 'SquidConf'
- url = 'http://www.squid-cache.org/'
- aliases = ['squidconf', 'squid.conf', 'squid']
- filenames = ['squid.conf']
- mimetypes = ['text/x-squidconf']
- flags = re.IGNORECASE
-
- keywords = (
- "access_log", "acl", "always_direct", "announce_host",
- "announce_period", "announce_port", "announce_to", "anonymize_headers",
- "append_domain", "as_whois_server", "auth_param_basic",
- "authenticate_children", "authenticate_program", "authenticate_ttl",
- "broken_posts", "buffered_logs", "cache_access_log", "cache_announce",
- "cache_dir", "cache_dns_program", "cache_effective_group",
- "cache_effective_user", "cache_host", "cache_host_acl",
- "cache_host_domain", "cache_log", "cache_mem", "cache_mem_high",
- "cache_mem_low", "cache_mgr", "cachemgr_passwd", "cache_peer",
- "cache_peer_access", "cache_replacement_policy", "cache_stoplist",
- "cache_stoplist_pattern", "cache_store_log", "cache_swap",
- "cache_swap_high", "cache_swap_log", "cache_swap_low", "client_db",
- "client_lifetime", "client_netmask", "connect_timeout", "coredump_dir",
- "dead_peer_timeout", "debug_options", "delay_access", "delay_class",
- "delay_initial_bucket_level", "delay_parameters", "delay_pools",
- "deny_info", "dns_children", "dns_defnames", "dns_nameservers",
- "dns_testnames", "emulate_httpd_log", "err_html_text",
- "fake_user_agent", "firewall_ip", "forwarded_for", "forward_snmpd_port",
- "fqdncache_size", "ftpget_options", "ftpget_program", "ftp_list_width",
- "ftp_passive", "ftp_user", "half_closed_clients", "header_access",
- "header_replace", "hierarchy_stoplist", "high_response_time_warning",
- "high_page_fault_warning", "hosts_file", "htcp_port", "http_access",
- "http_anonymizer", "httpd_accel", "httpd_accel_host",
- "httpd_accel_port", "httpd_accel_uses_host_header",
- "httpd_accel_with_proxy", "http_port", "http_reply_access",
- "icp_access", "icp_hit_stale", "icp_port", "icp_query_timeout",
- "ident_lookup", "ident_lookup_access", "ident_timeout",
- "incoming_http_average", "incoming_icp_average", "inside_firewall",
- "ipcache_high", "ipcache_low", "ipcache_size", "local_domain",
- "local_ip", "logfile_rotate", "log_fqdn", "log_icp_queries",
- "log_mime_hdrs", "maximum_object_size", "maximum_single_addr_tries",
- "mcast_groups", "mcast_icp_query_timeout", "mcast_miss_addr",
- "mcast_miss_encode_key", "mcast_miss_port", "memory_pools",
- "memory_pools_limit", "memory_replacement_policy", "mime_table",
- "min_http_poll_cnt", "min_icp_poll_cnt", "minimum_direct_hops",
- "minimum_object_size", "minimum_retry_timeout", "miss_access",
- "negative_dns_ttl", "negative_ttl", "neighbor_timeout",
- "neighbor_type_domain", "netdb_high", "netdb_low", "netdb_ping_period",
- "netdb_ping_rate", "never_direct", "no_cache", "passthrough_proxy",
- "pconn_timeout", "pid_filename", "pinger_program", "positive_dns_ttl",
- "prefer_direct", "proxy_auth", "proxy_auth_realm", "query_icmp",
- "quick_abort", "quick_abort_max", "quick_abort_min",
- "quick_abort_pct", "range_offset_limit", "read_timeout",
- "redirect_children", "redirect_program",
- "redirect_rewrites_host_header", "reference_age",
- "refresh_pattern", "reload_into_ims", "request_body_max_size",
- "request_size", "request_timeout", "shutdown_lifetime",
- "single_parent_bypass", "siteselect_timeout", "snmp_access",
- "snmp_incoming_address", "snmp_port", "source_ping", "ssl_proxy",
- "store_avg_object_size", "store_objects_per_bucket",
- "strip_query_terms", "swap_level1_dirs", "swap_level2_dirs",
- "tcp_incoming_address", "tcp_outgoing_address", "tcp_recv_bufsize",
- "test_reachability", "udp_hit_obj", "udp_hit_obj_size",
- "udp_incoming_address", "udp_outgoing_address", "unique_hostname",
- "unlinkd_program", "uri_whitespace", "useragent_log",
- "visible_hostname", "wais_relay", "wais_relay_host", "wais_relay_port",
- )
-
- opts = (
- "proxy-only", "weight", "ttl", "no-query", "default", "round-robin",
- "multicast-responder", "on", "off", "all", "deny", "allow", "via",
- "parent", "no-digest", "heap", "lru", "realm", "children", "q1", "q2",
- "credentialsttl", "none", "disable", "offline_toggle", "diskd",
- )
-
- actions = (
- "shutdown", "info", "parameter", "server_list", "client_list",
- r'squid.conf',
- )
-
- actions_stats = (
- "objects", "vm_objects", "utilization", "ipcache", "fqdncache", "dns",
- "redirector", "io", "reply_headers", "filedescriptors", "netdb",
- )
-
- actions_log = ("status", "enable", "disable", "clear")
-
- acls = (
- "url_regex", "urlpath_regex", "referer_regex", "port", "proto",
- "req_mime_type", "rep_mime_type", "method", "browser", "user", "src",
- "dst", "time", "dstdomain", "ident", "snmp_community",
- )
-
- ipv4_group = r'(\d+|0x[0-9a-f]+)'
- ipv4 = rf'({ipv4_group}(\.{ipv4_group}){{3}})'
- ipv6_group = r'([0-9a-f]{0,4})'
- ipv6 = rf'({ipv6_group}(:{ipv6_group}){{1,7}})'
- bare_ip = rf'({ipv4}|{ipv6})'
- # XXX: /integer is a subnet mark, but what is /IP ?
- # There is no test where it is used.
- ip = rf'{bare_ip}(/({bare_ip}|\d+))?'
-
- tokens = {
- 'root': [
- (r'\s+', Whitespace),
- (r'#', Comment, 'comment'),
- (words(keywords, prefix=r'\b', suffix=r'\b'), Keyword),
- (words(opts, prefix=r'\b', suffix=r'\b'), Name.Constant),
- # Actions
- (words(actions, prefix=r'\b', suffix=r'\b'), String),
- (words(actions_stats, prefix=r'stats/', suffix=r'\b'), String),
- (words(actions_log, prefix=r'log/', suffix=r'='), String),
- (words(acls, prefix=r'\b', suffix=r'\b'), Keyword),
- (ip, Number.Float),
- (r'(?:\b\d+\b(?:-\b\d+|%)?)', Number),
- (r'\S+', Text),
- ],
- 'comment': [
- (r'\s*TAG:.*', String.Escape, '#pop'),
- (r'.+', Comment, '#pop'),
- default('#pop'),
- ],
- }
-
-
-class NginxConfLexer(RegexLexer):
- """
- Lexer for Nginx configuration files.
-
- .. versionadded:: 0.11
- """
- name = 'Nginx configuration file'
- url = 'http://nginx.net/'
- aliases = ['nginx']
- filenames = ['nginx.conf']
- mimetypes = ['text/x-nginx-conf']
-
- tokens = {
- 'root': [
- (r'(include)(\s+)([^\s;]+)', bygroups(Keyword, Whitespace, Name)),
- (r'[^\s;#]+', Keyword, 'stmt'),
- include('base'),
- ],
- 'block': [
- (r'\}', Punctuation, '#pop:2'),
- (r'[^\s;#]+', Keyword.Namespace, 'stmt'),
- include('base'),
- ],
- 'stmt': [
- (r'\{', Punctuation, 'block'),
- (r';', Punctuation, '#pop'),
- include('base'),
- ],
- 'base': [
- (r'#.*\n', Comment.Single),
- (r'on|off', Name.Constant),
- (r'\$[^\s;#()]+', Name.Variable),
- (r'([a-z0-9.-]+)(:)([0-9]+)',
- bygroups(Name, Punctuation, Number.Integer)),
- (r'[a-z-]+/[a-z-+]+', String), # mimetype
- # (r'[a-zA-Z._-]+', Keyword),
- (r'[0-9]+[km]?\b', Number.Integer),
- (r'(~)(\s*)([^\s{]+)', bygroups(Punctuation, Whitespace, String.Regex)),
- (r'[:=~]', Punctuation),
- (r'[^\s;#{}$]+', String), # catch all
- (r'/[^\s;#]*', Name), # pathname
- (r'\s+', Whitespace),
- (r'[$;]', Text), # leftover characters
- ],
- }
-
-
-class LighttpdConfLexer(RegexLexer):
- """
- Lexer for Lighttpd configuration files.
-
- .. versionadded:: 0.11
- """
- name = 'Lighttpd configuration file'
- url = 'http://lighttpd.net/'
- aliases = ['lighttpd', 'lighty']
- filenames = ['lighttpd.conf']
- mimetypes = ['text/x-lighttpd-conf']
-
- tokens = {
- 'root': [
- (r'#.*\n', Comment.Single),
- (r'/\S*', Name), # pathname
- (r'[a-zA-Z._-]+', Keyword),
- (r'\d+\.\d+\.\d+\.\d+(?:/\d+)?', Number),
- (r'[0-9]+', Number),
- (r'=>|=~|\+=|==|=|\+', Operator),
- (r'\$[A-Z]+', Name.Builtin),
- (r'[(){}\[\],]', Punctuation),
- (r'"([^"\\]*(?:\\.[^"\\]*)*)"', String.Double),
- (r'\s+', Whitespace),
- ],
-
- }
-
-
-class DockerLexer(RegexLexer):
- """
- Lexer for Docker configuration files.
-
- .. versionadded:: 2.0
- """
- name = 'Docker'
- url = 'http://docker.io'
- aliases = ['docker', 'dockerfile']
- filenames = ['Dockerfile', '*.docker']
- mimetypes = ['text/x-dockerfile-config']
-
- _keywords = (r'(?:MAINTAINER|EXPOSE|WORKDIR|USER|STOPSIGNAL)')
- _bash_keywords = (r'(?:RUN|CMD|ENTRYPOINT|ENV|ARG|LABEL|ADD|COPY)')
- _lb = r'(?:\s*\\?\s*)' # dockerfile line break regex
- flags = re.IGNORECASE | re.MULTILINE
-
- tokens = {
- 'root': [
- (r'#.*', Comment),
- (r'(FROM)([ \t]*)(\S*)([ \t]*)(?:(AS)([ \t]*)(\S*))?',
- bygroups(Keyword, Whitespace, String, Whitespace, Keyword, Whitespace, String)),
- (r'(ONBUILD)(\s+)(%s)' % (_lb,), bygroups(Keyword, Whitespace, using(BashLexer))),
- (r'(HEALTHCHECK)(\s+)((%s--\w+=\w+%s)*)' % (_lb, _lb),
- bygroups(Keyword, Whitespace, using(BashLexer))),
- (r'(VOLUME|ENTRYPOINT|CMD|SHELL)(\s+)(%s)(\[.*?\])' % (_lb,),
- bygroups(Keyword, Whitespace, using(BashLexer), using(JsonLexer))),
- (r'(LABEL|ENV|ARG)(\s+)((%s\w+=\w+%s)*)' % (_lb, _lb),
- bygroups(Keyword, Whitespace, using(BashLexer))),
- (r'(%s|VOLUME)\b(\s+)(.*)' % (_keywords), bygroups(Keyword, Whitespace, String)),
- (r'(%s)(\s+)' % (_bash_keywords,), bygroups(Keyword, Whitespace)),
- (r'(.*\\\n)*.+', using(BashLexer)),
- ]
- }
-
-
-class TerraformLexer(ExtendedRegexLexer):
- """
- Lexer for terraformi ``.tf`` files.
-
- .. versionadded:: 2.1
- """
-
- name = 'Terraform'
- url = 'https://www.terraform.io/'
- aliases = ['terraform', 'tf', 'hcl']
- filenames = ['*.tf', '*.hcl']
- mimetypes = ['application/x-tf', 'application/x-terraform']
-
- classes = ('backend', 'data', 'module', 'output', 'provider',
- 'provisioner', 'resource', 'variable')
- classes_re = "({})".format(('|').join(classes))
-
- types = ('string', 'number', 'bool', 'list', 'tuple', 'map', 'set', 'object', 'null')
-
- numeric_functions = ('abs', 'ceil', 'floor', 'log', 'max',
- 'mix', 'parseint', 'pow', 'signum')
-
- string_functions = ('chomp', 'format', 'formatlist', 'indent',
- 'join', 'lower', 'regex', 'regexall', 'replace',
- 'split', 'strrev', 'substr', 'title', 'trim',
- 'trimprefix', 'trimsuffix', 'trimspace', 'upper'
- )
-
- collection_functions = ('alltrue', 'anytrue', 'chunklist', 'coalesce',
- 'coalescelist', 'compact', 'concat', 'contains',
- 'distinct', 'element', 'flatten', 'index', 'keys',
- 'length', 'list', 'lookup', 'map', 'matchkeys',
- 'merge', 'range', 'reverse', 'setintersection',
- 'setproduct', 'setsubtract', 'setunion', 'slice',
- 'sort', 'sum', 'transpose', 'values', 'zipmap'
- )
-
- encoding_functions = ('base64decode', 'base64encode', 'base64gzip',
- 'csvdecode', 'jsondecode', 'jsonencode', 'textdecodebase64',
- 'textencodebase64', 'urlencode', 'yamldecode', 'yamlencode')
-
- filesystem_functions = ('abspath', 'dirname', 'pathexpand', 'basename',
- 'file', 'fileexists', 'fileset', 'filebase64', 'templatefile')
-
- date_time_functions = ('formatdate', 'timeadd', 'timestamp')
-
- hash_crypto_functions = ('base64sha256', 'base64sha512', 'bcrypt', 'filebase64sha256',
- 'filebase64sha512', 'filemd5', 'filesha1', 'filesha256', 'filesha512',
- 'md5', 'rsadecrypt', 'sha1', 'sha256', 'sha512', 'uuid', 'uuidv5')
-
- ip_network_functions = ('cidrhost', 'cidrnetmask', 'cidrsubnet', 'cidrsubnets')
-
- type_conversion_functions = ('can', 'defaults', 'tobool', 'tolist', 'tomap',
- 'tonumber', 'toset', 'tostring', 'try')
-
- builtins = numeric_functions + string_functions + collection_functions + encoding_functions +\
- filesystem_functions + date_time_functions + hash_crypto_functions + ip_network_functions +\
- type_conversion_functions
- builtins_re = "({})".format(('|').join(builtins))
-
- def heredoc_callback(self, match, ctx):
- # Parse a terraform heredoc
- # match: 1 = <<[-]?, 2 = name 3 = rest of line
-
- start = match.start(1)
- yield start, Operator, match.group(1) # <<[-]?
- yield match.start(2), String.Delimiter, match.group(2) # heredoc name
-
- ctx.pos = match.start(3)
- ctx.end = match.end(3)
- yield ctx.pos, String.Heredoc, match.group(3)
- ctx.pos = match.end()
-
- hdname = match.group(2)
- tolerant = True # leading whitespace is always accepted
-
- lines = []
-
- for match in line_re.finditer(ctx.text, ctx.pos):
- if tolerant:
- check = match.group().strip()
- else:
- check = match.group().rstrip()
- if check == hdname:
- for amatch in lines:
- yield amatch.start(), String.Heredoc, amatch.group()
- yield match.start(), String.Delimiter, match.group()
- ctx.pos = match.end()
- break
- else:
- lines.append(match)
- else:
- # end of heredoc not found -- error!
- for amatch in lines:
- yield amatch.start(), Error, amatch.group()
- ctx.end = len(ctx.text)
-
- tokens = {
- 'root': [
- include('basic'),
- include('whitespace'),
-
- # Strings
- (r'(".*")', bygroups(String.Double)),
-
- # Constants
- (words(('true', 'false'), prefix=r'\b', suffix=r'\b'), Name.Constant),
-
- # Types
- (words(types, prefix=r'\b', suffix=r'\b'), Keyword.Type),
-
- include('identifier'),
- include('punctuation'),
- (r'[0-9]+', Number),
- ],
- 'basic': [
- (r'\s*/\*', Comment.Multiline, 'comment'),
- (r'\s*(#|//).*\n', Comment.Single),
- include('whitespace'),
-
- # e.g. terraform {
- # e.g. egress {
- (r'(\s*)([0-9a-zA-Z-_]+)(\s*)(=?)(\s*)(\{)',
- bygroups(Whitespace, Name.Builtin, Whitespace, Operator, Whitespace, Punctuation)),
-
- # Assignment with attributes, e.g. something = ...
- (r'(\s*)([0-9a-zA-Z-_]+)(\s*)(=)(\s*)',
- bygroups(Whitespace, Name.Attribute, Whitespace, Operator, Whitespace)),
-
- # Assignment with environment variables and similar, e.g. "something" = ...
- # or key value assignment, e.g. "SlotName" : ...
- (r'(\s*)("\S+")(\s*)([=:])(\s*)',
- bygroups(Whitespace, Literal.String.Double, Whitespace, Operator, Whitespace)),
-
- # Functions, e.g. jsonencode(element("value"))
- (builtins_re + r'(\()', bygroups(Name.Function, Punctuation)),
-
- # List of attributes, e.g. ignore_changes = [last_modified, filename]
- (r'(\[)([a-z_,\s]+)(\])', bygroups(Punctuation, Name.Builtin, Punctuation)),
-
- # e.g. resource "aws_security_group" "allow_tls" {
- # e.g. backend "consul" {
- (classes_re + r'(\s+)("[0-9a-zA-Z-_]+")?(\s*)("[0-9a-zA-Z-_]+")(\s+)(\{)',
- bygroups(Keyword.Reserved, Whitespace, Name.Class, Whitespace, Name.Variable, Whitespace, Punctuation)),
-
- # here-doc style delimited strings
- (r'(<<-?)\s*([a-zA-Z_]\w*)(.*?\n)', heredoc_callback),
- ],
- 'identifier': [
- (r'\b(var\.[0-9a-zA-Z-_\.\[\]]+)\b', bygroups(Name.Variable)),
- (r'\b([0-9a-zA-Z-_\[\]]+\.[0-9a-zA-Z-_\.\[\]]+)\b',
- bygroups(Name.Variable)),
- ],
- 'punctuation': [
- (r'[\[\]()\{\},.?:!=]', Punctuation),
- ],
- 'comment': [
- (r'[^*/]', Comment.Multiline),
- (r'/\*', Comment.Multiline, '#push'),
- (r'\*/', Comment.Multiline, '#pop'),
- (r'[*/]', Comment.Multiline)
- ],
- 'whitespace': [
- (r'\n', Whitespace),
- (r'\s+', Whitespace),
- (r'(\\)(\n)', bygroups(Text, Whitespace)),
- ],
- }
-
-
-class TermcapLexer(RegexLexer):
- """
- Lexer for termcap database source.
-
- This is very simple and minimal.
-
- .. versionadded:: 2.1
- """
- name = 'Termcap'
- aliases = ['termcap']
- filenames = ['termcap', 'termcap.src']
- mimetypes = []
-
- # NOTE:
- # * multiline with trailing backslash
- # * separator is ':'
- # * to embed colon as data, we must use \072
- # * space after separator is not allowed (mayve)
- tokens = {
- 'root': [
- (r'^#.*', Comment),
- (r'^[^\s#:|]+', Name.Tag, 'names'),
- (r'\s+', Whitespace),
- ],
- 'names': [
- (r'\n', Whitespace, '#pop'),
- (r':', Punctuation, 'defs'),
- (r'\|', Punctuation),
- (r'[^:|]+', Name.Attribute),
- ],
- 'defs': [
- (r'(\\)(\n[ \t]*)', bygroups(Text, Whitespace)),
- (r'\n[ \t]*', Whitespace, '#pop:2'),
- (r'(#)([0-9]+)', bygroups(Operator, Number)),
- (r'=', Operator, 'data'),
- (r':', Punctuation),
- (r'[^\s:=#]+', Name.Class),
- ],
- 'data': [
- (r'\\072', Literal),
- (r':', Punctuation, '#pop'),
- (r'[^:\\]+', Literal), # for performance
- (r'.', Literal),
- ],
- }
-
-
-class TerminfoLexer(RegexLexer):
- """
- Lexer for terminfo database source.
-
- This is very simple and minimal.
-
- .. versionadded:: 2.1
- """
- name = 'Terminfo'
- aliases = ['terminfo']
- filenames = ['terminfo', 'terminfo.src']
- mimetypes = []
-
- # NOTE:
- # * multiline with leading whitespace
- # * separator is ','
- # * to embed comma as data, we can use \,
- # * space after separator is allowed
- tokens = {
- 'root': [
- (r'^#.*$', Comment),
- (r'^[^\s#,|]+', Name.Tag, 'names'),
- (r'\s+', Whitespace),
- ],
- 'names': [
- (r'\n', Whitespace, '#pop'),
- (r'(,)([ \t]*)', bygroups(Punctuation, Whitespace), 'defs'),
- (r'\|', Punctuation),
- (r'[^,|]+', Name.Attribute),
- ],
- 'defs': [
- (r'\n[ \t]+', Whitespace),
- (r'\n', Whitespace, '#pop:2'),
- (r'(#)([0-9]+)', bygroups(Operator, Number)),
- (r'=', Operator, 'data'),
- (r'(,)([ \t]*)', bygroups(Punctuation, Whitespace)),
- (r'[^\s,=#]+', Name.Class),
- ],
- 'data': [
- (r'\\[,\\]', Literal),
- (r'(,)([ \t]*)', bygroups(Punctuation, Whitespace), '#pop'),
- (r'[^\\,]+', Literal), # for performance
- (r'.', Literal),
- ],
- }
-
-
-class PkgConfigLexer(RegexLexer):
- """
- Lexer for pkg-config
- (see also `manual page <http://linux.die.net/man/1/pkg-config>`_).
-
- .. versionadded:: 2.1
- """
-
- name = 'PkgConfig'
- url = 'http://www.freedesktop.org/wiki/Software/pkg-config/'
- aliases = ['pkgconfig']
- filenames = ['*.pc']
- mimetypes = []
-
- tokens = {
- 'root': [
- (r'#.*$', Comment.Single),
-
- # variable definitions
- (r'^(\w+)(=)', bygroups(Name.Attribute, Operator)),
-
- # keyword lines
- (r'^([\w.]+)(:)',
- bygroups(Name.Tag, Punctuation), 'spvalue'),
-
- # variable references
- include('interp'),
-
- # fallback
- (r'\s+', Whitespace),
- (r'[^${}#=:\n.]+', Text),
- (r'.', Text),
- ],
- 'interp': [
- # you can escape literal "$" as "$$"
- (r'\$\$', Text),
-
- # variable references
- (r'\$\{', String.Interpol, 'curly'),
- ],
- 'curly': [
- (r'\}', String.Interpol, '#pop'),
- (r'\w+', Name.Attribute),
- ],
- 'spvalue': [
- include('interp'),
-
- (r'#.*$', Comment.Single, '#pop'),
- (r'\n', Whitespace, '#pop'),
-
- # fallback
- (r'\s+', Whitespace),
- (r'[^${}#\n\s]+', Text),
- (r'.', Text),
- ],
- }
-
-
-class PacmanConfLexer(RegexLexer):
- """
- Lexer for pacman.conf.
-
- Actually, IniLexer works almost fine for this format,
- but it yield error token. It is because pacman.conf has
- a form without assignment like:
-
- UseSyslog
- Color
- TotalDownload
- CheckSpace
- VerbosePkgLists
-
- These are flags to switch on.
-
- .. versionadded:: 2.1
- """
-
- name = 'PacmanConf'
- url = 'https://www.archlinux.org/pacman/pacman.conf.5.html'
- aliases = ['pacmanconf']
- filenames = ['pacman.conf']
- mimetypes = []
-
- tokens = {
- 'root': [
- # comment
- (r'#.*$', Comment.Single),
-
- # section header
- (r'^(\s*)(\[.*?\])(\s*)$', bygroups(Whitespace, Keyword, Whitespace)),
-
- # variable definitions
- # (Leading space is allowed...)
- (r'(\w+)(\s*)(=)',
- bygroups(Name.Attribute, Whitespace, Operator)),
-
- # flags to on
- (r'^(\s*)(\w+)(\s*)$',
- bygroups(Whitespace, Name.Attribute, Whitespace)),
-
- # built-in special values
- (words((
- '$repo', # repository
- '$arch', # architecture
- '%o', # outfile
- '%u', # url
- ), suffix=r'\b'),
- Name.Variable),
-
- # fallback
- (r'\s+', Whitespace),
- (r'.', Text),
- ],
- }
-
-
-class AugeasLexer(RegexLexer):
- """
- Lexer for Augeas.
-
- .. versionadded:: 2.4
- """
- name = 'Augeas'
- url = 'http://augeas.net'
- aliases = ['augeas']
- filenames = ['*.aug']
-
- tokens = {
- 'root': [
- (r'(module)(\s*)([^\s=]+)', bygroups(Keyword.Namespace, Whitespace, Name.Namespace)),
- (r'(let)(\s*)([^\s=]+)', bygroups(Keyword.Declaration, Whitespace, Name.Variable)),
- (r'(del|store|value|counter|seq|key|label|autoload|incl|excl|transform|test|get|put)(\s+)', bygroups(Name.Builtin, Whitespace)),
- (r'(\()([^:]+)(\:)(unit|string|regexp|lens|tree|filter)(\))', bygroups(Punctuation, Name.Variable, Punctuation, Keyword.Type, Punctuation)),
- (r'\(\*', Comment.Multiline, 'comment'),
- (r'[*+\-.;=?|]', Operator),
- (r'[()\[\]{}]', Operator),
- (r'"', String.Double, 'string'),
- (r'\/', String.Regex, 'regex'),
- (r'([A-Z]\w*)(\.)(\w+)', bygroups(Name.Namespace, Punctuation, Name.Variable)),
- (r'.', Name.Variable),
- (r'\s+', Whitespace),
- ],
- 'string': [
- (r'\\.', String.Escape),
- (r'[^"]', String.Double),
- (r'"', String.Double, '#pop'),
- ],
- 'regex': [
- (r'\\.', String.Escape),
- (r'[^/]', String.Regex),
- (r'\/', String.Regex, '#pop'),
- ],
- 'comment': [
- (r'[^*)]', Comment.Multiline),
- (r'\(\*', Comment.Multiline, '#push'),
- (r'\*\)', Comment.Multiline, '#pop'),
- (r'[)*]', Comment.Multiline)
- ],
- }
-
-
-class TOMLLexer(RegexLexer):
- """
- Lexer for TOML, a simple language for config files.
-
- .. versionadded:: 2.4
- """
-
- name = 'TOML'
- aliases = ['toml']
- filenames = ['*.toml', 'Pipfile', 'poetry.lock']
- mimetypes = ['application/toml']
- url = 'https://toml.io'
-
- # Based on the TOML spec: https://toml.io/en/v1.0.0
-
- # The following is adapted from CPython's tomllib:
- _time = r"\d\d:\d\d:\d\d(\.\d+)?"
- _datetime = rf"""(?x)
- \d\d\d\d-\d\d-\d\d # date, e.g., 1988-10-27
- (
- [Tt ] {_time} # optional time
- (
- [Zz]|[+-]\d\d:\d\d # optional time offset
- )?
- )?
- """
-
- tokens = {
- 'root': [
- # Note that we make an effort in order to distinguish
- # moments at which we're parsing a key and moments at
- # which we're parsing a value. In the TOML code
- #
- # 1234 = 1234
- #
- # the first "1234" should be Name, the second Integer.
-
- # Whitespace
- (r'\s+', Whitespace),
-
- # Comment
- (r'#.*', Comment.Single),
-
- # Assignment keys
- include('key'),
-
- # After "=", find a value
- (r'(=)(\s*)', bygroups(Operator, Whitespace), 'value'),
-
- # Table header
- (r'\[\[?', Keyword, 'table-key'),
- ],
- 'key': [
- # Start of bare key (only ASCII is allowed here).
- (r'[A-Za-z0-9_-]+', Name),
- # Quoted key
- (r'"', String.Double, 'basic-string'),
- (r"'", String.Single, 'literal-string'),
- # Dots act as separators in keys
- (r'\.', Punctuation),
- ],
- 'table-key': [
- # This is like 'key', but highlights the name components
- # and separating dots as Keyword because it looks better
- # when the whole table header is Keyword. We do highlight
- # strings as strings though.
- # Start of bare key (only ASCII is allowed here).
- (r'[A-Za-z0-9_-]+', Keyword),
- (r'"', String.Double, 'basic-string'),
- (r"'", String.Single, 'literal-string'),
- (r'\.', Keyword),
- (r'\]\]?', Keyword, '#pop'),
-
- # Inline whitespace allowed
- (r'[ \t]+', Whitespace),
- ],
- 'value': [
- # Datetime, baretime
- (_datetime, Literal.Date, '#pop'),
- (_time, Literal.Date, '#pop'),
-
- # Recognize as float if there is a fractional part
- # and/or an exponent.
- (r'[+-]?\d[0-9_]*[eE][+-]?\d[0-9_]*', Number.Float, '#pop'),
- (r'[+-]?\d[0-9_]*\.\d[0-9_]*([eE][+-]?\d[0-9_]*)?',
- Number.Float, '#pop'),
-
- # Infinities and NaN
- (r'[+-]?(inf|nan)', Number.Float, '#pop'),
-
- # Integers
- (r'-?0b[01_]+', Number.Bin, '#pop'),
- (r'-?0o[0-7_]+', Number.Oct, '#pop'),
- (r'-?0x[0-9a-fA-F_]+', Number.Hex, '#pop'),
- (r'[+-]?[0-9_]+', Number.Integer, '#pop'),
-
- # Strings
- (r'"""', String.Double, ('#pop', 'multiline-basic-string')),
- (r'"', String.Double, ('#pop', 'basic-string')),
- (r"'''", String.Single, ('#pop', 'multiline-literal-string')),
- (r"'", String.Single, ('#pop', 'literal-string')),
-
- # Booleans
- (r'true|false', Keyword.Constant, '#pop'),
-
- # Start of array
- (r'\[', Punctuation, ('#pop', 'array')),
-
- # Start of inline table
- (r'\{', Punctuation, ('#pop', 'inline-table')),
- ],
- 'array': [
- # Whitespace, including newlines, is ignored inside arrays,
- # and comments are allowed.
- (r'\s+', Whitespace),
- (r'#.*', Comment.Single),
-
- # Delimiters
- (r',', Punctuation),
-
- # End of array
- (r'\]', Punctuation, '#pop'),
-
- # Parse a value and come back
- default('value'),
- ],
- 'inline-table': [
- # Note that unlike inline arrays, inline tables do not
- # allow newlines or comments.
- (r'[ \t]+', Whitespace),
-
- # Keys
- include('key'),
-
- # Values
- (r'(=)(\s*)', bygroups(Punctuation, Whitespace), 'value'),
-
- # Delimiters
- (r',', Punctuation),
-
- # End of inline table
- (r'\}', Punctuation, '#pop'),
- ],
- 'basic-string': [
- (r'"', String.Double, '#pop'),
- include('escapes'),
- (r'[^"\\]+', String.Double),
- ],
- 'literal-string': [
- (r".*?'", String.Single, '#pop'),
- ],
- 'multiline-basic-string': [
- (r'"""', String.Double, '#pop'),
- (r'(\\)(\n)', bygroups(String.Escape, Whitespace)),
- include('escapes'),
- (r'[^"\\]+', String.Double),
- (r'"', String.Double),
- ],
- 'multiline-literal-string': [
- (r"'''", String.Single, '#pop'),
- (r"[^']+", String.Single),
- (r"'", String.Single),
- ],
- 'escapes': [
- (r'\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8}', String.Escape),
- (r'\\.', String.Escape),
- ],
- }
-
-class NestedTextLexer(RegexLexer):
- """
- Lexer for *NextedText*, a human-friendly data format.
-
- .. versionadded:: 2.9
-
- .. versionchanged:: 2.16
- Added support for *NextedText* v3.0.
- """
-
- name = 'NestedText'
- url = 'https://nestedtext.org'
- aliases = ['nestedtext', 'nt']
- filenames = ['*.nt']
-
- tokens = {
- 'root': [
- # Comment: # ...
- (r'^([ ]*)(#.*)$', bygroups(Whitespace, Comment)),
-
- # Inline dictionary: {...}
- (r'^([ ]*)(\{)', bygroups(Whitespace, Punctuation), 'inline_dict'),
-
- # Inline list: [...]
- (r'^([ ]*)(\[)', bygroups(Whitespace, Punctuation), 'inline_list'),
-
- # empty multiline string item: >
- (r'^([ ]*)(>)$', bygroups(Whitespace, Punctuation)),
-
- # multiline string item: > ...
- (r'^([ ]*)(>)( )(.*?)([ \t]*)$', bygroups(Whitespace, Punctuation, Whitespace, Text, Whitespace)),
-
- # empty list item: -
- (r'^([ ]*)(-)$', bygroups(Whitespace, Punctuation)),
-
- # list item: - ...
- (r'^([ ]*)(-)( )(.*?)([ \t]*)$', bygroups(Whitespace, Punctuation, Whitespace, Text, Whitespace)),
-
- # empty multiline key item: :
- (r'^([ ]*)(:)$', bygroups(Whitespace, Punctuation)),
-
- # multiline key item: : ...
- (r'^([ ]*)(:)( )([^\n]*?)([ \t]*)$', bygroups(Whitespace, Punctuation, Whitespace, Name.Tag, Whitespace)),
-
- # empty dict key item: ...:
- (r'^([ ]*)([^\{\[\s].*?)(:)$', bygroups(Whitespace, Name.Tag, Punctuation)),
-
- # dict key item: ...: ...
- (r'^([ ]*)([^\{\[\s].*?)(:)( )(.*?)([ \t]*)$', bygroups(Whitespace, Name.Tag, Punctuation, Whitespace, Text, Whitespace)),
- ],
- 'inline_list': [
- include('whitespace'),
- (r'[^\{\}\[\],\s]+', Text),
- include('inline_value'),
- (r',', Punctuation),
- (r'\]', Punctuation, '#pop'),
- (r'\n', Error, '#pop'),
- ],
- 'inline_dict': [
- include('whitespace'),
- (r'[^\{\}\[\],:\s]+', Name.Tag),
- (r':', Punctuation, 'inline_dict_value'),
- (r'\}', Punctuation, '#pop'),
- (r'\n', Error, '#pop'),
- ],
- 'inline_dict_value': [
- include('whitespace'),
- (r'[^\{\}\[\],:\s]+', Text),
- include('inline_value'),
- (r',', Punctuation, '#pop'),
- (r'\}', Punctuation, '#pop:2'),
- ],
- 'inline_value': [
- include('whitespace'),
- (r'\{', Punctuation, 'inline_dict'),
- (r'\[', Punctuation, 'inline_list'),
- ],
- 'whitespace': [
- (r'[ \t]+', Whitespace),
- ],
- }
-
-
-class SingularityLexer(RegexLexer):
- """
- Lexer for Singularity definition files.
-
- .. versionadded:: 2.6
- """
-
- name = 'Singularity'
- url = 'https://www.sylabs.io/guides/3.0/user-guide/definition_files.html'
- aliases = ['singularity']
- filenames = ['*.def', 'Singularity']
- flags = re.IGNORECASE | re.MULTILINE | re.DOTALL
-
- _headers = r'^(\s*)(bootstrap|from|osversion|mirrorurl|include|registry|namespace|includecmd)(:)'
- _section = r'^(%(?:pre|post|setup|environment|help|labels|test|runscript|files|startscript))(\s*)'
- _appsect = r'^(%app(?:install|help|run|labels|env|test|files))(\s*)'
-
- tokens = {
- 'root': [
- (_section, bygroups(Generic.Heading, Whitespace), 'script'),
- (_appsect, bygroups(Generic.Heading, Whitespace), 'script'),
- (_headers, bygroups(Whitespace, Keyword, Text)),
- (r'\s*#.*?\n', Comment),
- (r'\b(([0-9]+\.?[0-9]*)|(\.[0-9]+))\b', Number),
- (r'[ \t]+', Whitespace),
- (r'(?!^\s*%).', Text),
- ],
- 'script': [
- (r'(.+?(?=^\s*%))|(.*)', using(BashLexer), '#pop'),
- ],
- }
-
- def analyse_text(text):
- """This is a quite simple script file, but there are a few keywords
- which seem unique to this language."""
- result = 0
- if re.search(r'\b(?:osversion|includecmd|mirrorurl)\b', text, re.IGNORECASE):
- result += 0.5
-
- if re.search(SingularityLexer._section[1:], text):
- result += 0.49
-
- return result
-
-
-class UnixConfigLexer(RegexLexer):
- """
- Lexer for Unix/Linux config files using colon-separated values, e.g.
-
- * ``/etc/group``
- * ``/etc/passwd``
- * ``/etc/shadow``
-
- .. versionadded:: 2.12
- """
-
- name = 'Unix/Linux config files'
- aliases = ['unixconfig', 'linuxconfig']
- filenames = []
-
- tokens = {
- 'root': [
- (r'^#.*', Comment),
- (r'\n', Whitespace),
- (r':', Punctuation),
- (r'[0-9]+', Number),
- (r'((?!\n)[a-zA-Z0-9\_\-\s\(\),]){2,}', Text),
- (r'[^:\n]+', String),
- ],
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/console.py b/venv/lib/python3.11/site-packages/pygments/lexers/console.py
deleted file mode 100644
index ac498d5..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/console.py
+++ /dev/null
@@ -1,114 +0,0 @@
-"""
- pygments.lexers.console
- ~~~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for misc console output.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, include, bygroups
-from pygments.token import Generic, Comment, String, Text, Keyword, Name, \
- Punctuation, Number, Whitespace
-
-__all__ = ['VCTreeStatusLexer', 'PyPyLogLexer']
-
-
-class VCTreeStatusLexer(RegexLexer):
- """
- For colorizing output of version control status commands, like "hg
- status" or "svn status".
-
- .. versionadded:: 2.0
- """
- name = 'VCTreeStatus'
- aliases = ['vctreestatus']
- filenames = []
- mimetypes = []
-
- tokens = {
- 'root': [
- (r'^A \+ C\s+', Generic.Error),
- (r'^A\s+\+?\s+', String),
- (r'^M\s+', Generic.Inserted),
- (r'^C\s+', Generic.Error),
- (r'^D\s+', Generic.Deleted),
- (r'^[?!]\s+', Comment.Preproc),
- (r' >\s+.*\n', Comment.Preproc),
- (r'\S+', Text),
- (r'\s+', Whitespace),
- ]
- }
-
-
-class PyPyLogLexer(RegexLexer):
- """
- Lexer for PyPy log files.
-
- .. versionadded:: 1.5
- """
- name = "PyPy Log"
- aliases = ["pypylog", "pypy"]
- filenames = ["*.pypylog"]
- mimetypes = ['application/x-pypylog']
-
- tokens = {
- "root": [
- (r"\[\w+\] \{jit-log-.*?$", Keyword, "jit-log"),
- (r"\[\w+\] \{jit-backend-counts$", Keyword, "jit-backend-counts"),
- include("extra-stuff"),
- ],
- "jit-log": [
- (r"\[\w+\] jit-log-.*?}$", Keyword, "#pop"),
- (r"^\+\d+: ", Comment),
- (r"--end of the loop--", Comment),
- (r"[ifp]\d+", Name),
- (r"ptr\d+", Name),
- (r"(\()(\w+(?:\.\w+)?)(\))",
- bygroups(Punctuation, Name.Builtin, Punctuation)),
- (r"[\[\]=,()]", Punctuation),
- (r"(\d+\.\d+|inf|-inf)", Number.Float),
- (r"-?\d+", Number.Integer),
- (r"'.*'", String),
- (r"(None|descr|ConstClass|ConstPtr|TargetToken)", Name),
- (r"<.*?>+", Name.Builtin),
- (r"(label|debug_merge_point|jump|finish)", Name.Class),
- (r"(int_add_ovf|int_add|int_sub_ovf|int_sub|int_mul_ovf|int_mul|"
- r"int_floordiv|int_mod|int_lshift|int_rshift|int_and|int_or|"
- r"int_xor|int_eq|int_ne|int_ge|int_gt|int_le|int_lt|int_is_zero|"
- r"int_is_true|"
- r"uint_floordiv|uint_ge|uint_lt|"
- r"float_add|float_sub|float_mul|float_truediv|float_neg|"
- r"float_eq|float_ne|float_ge|float_gt|float_le|float_lt|float_abs|"
- r"ptr_eq|ptr_ne|instance_ptr_eq|instance_ptr_ne|"
- r"cast_int_to_float|cast_float_to_int|"
- r"force_token|quasiimmut_field|same_as|virtual_ref_finish|"
- r"virtual_ref|mark_opaque_ptr|"
- r"call_may_force|call_assembler|call_loopinvariant|"
- r"call_release_gil|call_pure|call|"
- r"new_with_vtable|new_array|newstr|newunicode|new|"
- r"arraylen_gc|"
- r"getarrayitem_gc_pure|getarrayitem_gc|setarrayitem_gc|"
- r"getarrayitem_raw|setarrayitem_raw|getfield_gc_pure|"
- r"getfield_gc|getinteriorfield_gc|setinteriorfield_gc|"
- r"getfield_raw|setfield_gc|setfield_raw|"
- r"strgetitem|strsetitem|strlen|copystrcontent|"
- r"unicodegetitem|unicodesetitem|unicodelen|"
- r"guard_true|guard_false|guard_value|guard_isnull|"
- r"guard_nonnull_class|guard_nonnull|guard_class|guard_no_overflow|"
- r"guard_not_forced|guard_no_exception|guard_not_invalidated)",
- Name.Builtin),
- include("extra-stuff"),
- ],
- "jit-backend-counts": [
- (r"\[\w+\] jit-backend-counts}$", Keyword, "#pop"),
- (r":", Punctuation),
- (r"\d+", Number),
- include("extra-stuff"),
- ],
- "extra-stuff": [
- (r"\s+", Whitespace),
- (r"#.*?$", Comment),
- ],
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/cplint.py b/venv/lib/python3.11/site-packages/pygments/lexers/cplint.py
deleted file mode 100644
index 8a48c01..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/cplint.py
+++ /dev/null
@@ -1,44 +0,0 @@
-"""
- pygments.lexers.cplint
- ~~~~~~~~~~~~~~~~~~~~~~
-
- Lexer for the cplint language
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import bygroups, inherit, words
-from pygments.lexers import PrologLexer
-from pygments.token import Operator, Keyword, Name, String, Punctuation
-
-__all__ = ['CplintLexer']
-
-
-class CplintLexer(PrologLexer):
- """
- Lexer for cplint files, including CP-logic, Logic Programs with Annotated
- Disjunctions, Distributional Clauses syntax, ProbLog, DTProbLog.
-
- .. versionadded:: 2.12
- """
- name = 'cplint'
- url = 'https://cplint.eu'
- aliases = ['cplint']
- filenames = ['*.ecl', '*.prolog', '*.pro', '*.pl', '*.P', '*.lpad', '*.cpl']
- mimetypes = ['text/x-cplint']
-
- tokens = {
- 'root': [
- (r'map_query', Keyword),
- (words(('gaussian', 'uniform_dens', 'dirichlet', 'gamma', 'beta',
- 'poisson', 'binomial', 'geometric', 'exponential', 'pascal',
- 'multinomial', 'user', 'val', 'uniform', 'discrete',
- 'finite')), Name.Builtin),
- # annotations of atoms
- (r'([a-z]+)(:)', bygroups(String.Atom, Punctuation)),
- (r':(-|=)|::?|~=?|=>', Operator),
- (r'\?', Name.Builtin),
- inherit,
- ],
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/crystal.py b/venv/lib/python3.11/site-packages/pygments/lexers/crystal.py
deleted file mode 100644
index e4df8b1..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/crystal.py
+++ /dev/null
@@ -1,365 +0,0 @@
-"""
- pygments.lexers.crystal
- ~~~~~~~~~~~~~~~~~~~~~~~
-
- Lexer for Crystal.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import ExtendedRegexLexer, include, bygroups, default, \
- words, line_re
-from pygments.token import Comment, Operator, Keyword, Name, String, Number, \
- Punctuation, Error, Whitespace
-
-__all__ = ['CrystalLexer']
-
-
-CRYSTAL_OPERATORS = [
- '!=', '!~', '!', '%', '&&', '&', '**', '*', '+', '-', '/', '<=>', '<<', '<=', '<',
- '===', '==', '=~', '=', '>=', '>>', '>', '[]=', '[]?', '[]', '^', '||', '|', '~'
-]
-
-
-class CrystalLexer(ExtendedRegexLexer):
- """
- For Crystal source code.
-
- .. versionadded:: 2.2
- """
-
- name = 'Crystal'
- url = 'https://crystal-lang.org'
- aliases = ['cr', 'crystal']
- filenames = ['*.cr']
- mimetypes = ['text/x-crystal']
-
- flags = re.DOTALL | re.MULTILINE
-
- def heredoc_callback(self, match, ctx):
- # okay, this is the hardest part of parsing Crystal...
- # match: 1 = <<-?, 2 = quote? 3 = name 4 = quote? 5 = rest of line
-
- start = match.start(1)
- yield start, Operator, match.group(1) # <<-?
- yield match.start(2), String.Heredoc, match.group(2) # quote ", ', `
- yield match.start(3), String.Delimiter, match.group(3) # heredoc name
- yield match.start(4), String.Heredoc, match.group(4) # quote again
-
- heredocstack = ctx.__dict__.setdefault('heredocstack', [])
- outermost = not bool(heredocstack)
- heredocstack.append((match.group(1) == '<<-', match.group(3)))
-
- ctx.pos = match.start(5)
- ctx.end = match.end(5)
- # this may find other heredocs, so limit the recursion depth
- if len(heredocstack) < 100:
- yield from self.get_tokens_unprocessed(context=ctx)
- else:
- yield ctx.pos, String.Heredoc, match.group(5)
- ctx.pos = match.end()
-
- if outermost:
- # this is the outer heredoc again, now we can process them all
- for tolerant, hdname in heredocstack:
- lines = []
- for match in line_re.finditer(ctx.text, ctx.pos):
- if tolerant:
- check = match.group().strip()
- else:
- check = match.group().rstrip()
- if check == hdname:
- for amatch in lines:
- yield amatch.start(), String.Heredoc, amatch.group()
- yield match.start(), String.Delimiter, match.group()
- ctx.pos = match.end()
- break
- else:
- lines.append(match)
- else:
- # end of heredoc not found -- error!
- for amatch in lines:
- yield amatch.start(), Error, amatch.group()
- ctx.end = len(ctx.text)
- del heredocstack[:]
-
- def gen_crystalstrings_rules():
- states = {}
- states['strings'] = [
- (r'\:\w+[!?]?', String.Symbol),
- (words(CRYSTAL_OPERATORS, prefix=r'\:'), String.Symbol),
- (r":'(\\\\|\\[^\\]|[^'\\])*'", String.Symbol),
- # This allows arbitrary text after '\ for simplicity
- (r"'(\\\\|\\'|[^']|\\[^'\\]+)'", String.Char),
- (r':"', String.Symbol, 'simple-sym'),
- # Crystal doesn't have "symbol:"s but this simplifies function args
- (r'([a-zA-Z_]\w*)(:)(?!:)', bygroups(String.Symbol, Punctuation)),
- (r'"', String.Double, 'simple-string'),
- (r'(?<!\.)`', String.Backtick, 'simple-backtick'),
- ]
-
- # double-quoted string and symbol
- for name, ttype, end in ('string', String.Double, '"'), \
- ('sym', String.Symbol, '"'), \
- ('backtick', String.Backtick, '`'):
- states['simple-'+name] = [
- include('string-escaped' if name == 'sym' else 'string-intp-escaped'),
- (r'[^\\%s#]+' % end, ttype),
- (r'[\\#]', ttype),
- (end, ttype, '#pop'),
- ]
-
- # https://crystal-lang.org/docs/syntax_and_semantics/literals/string.html#percent-string-literals
- for lbrace, rbrace, bracecc, name in \
- ('\\{', '\\}', '{}', 'cb'), \
- ('\\[', '\\]', '\\[\\]', 'sb'), \
- ('\\(', '\\)', '()', 'pa'), \
- ('<', '>', '<>', 'ab'), \
- ('\\|', '\\|', '\\|', 'pi'):
- states[name+'-intp-string'] = [
- (r'\\' + lbrace, String.Other),
- ] + (lbrace != rbrace) * [
- (lbrace, String.Other, '#push'),
- ] + [
- (rbrace, String.Other, '#pop'),
- include('string-intp-escaped'),
- (r'[\\#' + bracecc + ']', String.Other),
- (r'[^\\#' + bracecc + ']+', String.Other),
- ]
- states['strings'].append((r'%Q?' + lbrace, String.Other,
- name+'-intp-string'))
- states[name+'-string'] = [
- (r'\\[\\' + bracecc + ']', String.Other),
- ] + (lbrace != rbrace) * [
- (lbrace, String.Other, '#push'),
- ] + [
- (rbrace, String.Other, '#pop'),
- (r'[\\#' + bracecc + ']', String.Other),
- (r'[^\\#' + bracecc + ']+', String.Other),
- ]
- # https://crystal-lang.org/docs/syntax_and_semantics/literals/array.html#percent-array-literals
- states['strings'].append((r'%[qwi]' + lbrace, String.Other,
- name+'-string'))
- states[name+'-regex'] = [
- (r'\\[\\' + bracecc + ']', String.Regex),
- ] + (lbrace != rbrace) * [
- (lbrace, String.Regex, '#push'),
- ] + [
- (rbrace + '[imsx]*', String.Regex, '#pop'),
- include('string-intp'),
- (r'[\\#' + bracecc + ']', String.Regex),
- (r'[^\\#' + bracecc + ']+', String.Regex),
- ]
- states['strings'].append((r'%r' + lbrace, String.Regex,
- name+'-regex'))
-
- return states
-
- tokens = {
- 'root': [
- (r'#.*?$', Comment.Single),
- # keywords
- (words('''
- abstract asm begin break case do else elsif end ensure extend if in
- include next of private protected require rescue return select self super
- then unless until when while with yield
- '''.split(), suffix=r'\b'), Keyword),
- (words('''
- previous_def forall out uninitialized __DIR__ __FILE__ __LINE__
- __END_LINE__
- '''.split(), prefix=r'(?<!\.)', suffix=r'\b'), Keyword.Pseudo),
- # https://crystal-lang.org/docs/syntax_and_semantics/is_a.html
- (r'\.(is_a\?|nil\?|responds_to\?|as\?|as\b)', Keyword.Pseudo),
- (words(['true', 'false', 'nil'], suffix=r'\b'), Keyword.Constant),
- # start of function, class and module names
- (r'(module|lib)(\s+)([a-zA-Z_]\w*(?:::[a-zA-Z_]\w*)*)',
- bygroups(Keyword, Whitespace, Name.Namespace)),
- (r'(def|fun|macro)(\s+)((?:[a-zA-Z_]\w*::)*)',
- bygroups(Keyword, Whitespace, Name.Namespace), 'funcname'),
- (r'def(?=[*%&^`~+-/\[<>=])', Keyword, 'funcname'),
- (r'(annotation|class|struct|union|type|alias|enum)(\s+)((?:[a-zA-Z_]\w*::)*)',
- bygroups(Keyword, Whitespace, Name.Namespace), 'classname'),
- # https://crystal-lang.org/api/toplevel.html
- (words('''
- instance_sizeof offsetof pointerof sizeof typeof
- '''.split(), prefix=r'(?<!\.)', suffix=r'\b'), Keyword.Pseudo),
- # macros
- (r'(?<!\.)(debugger\b|p!|pp!|record\b|spawn\b)', Name.Builtin.Pseudo),
- # builtins
- (words('''
- abort at_exit caller exit gets loop main p pp print printf puts
- raise rand read_line sleep spawn sprintf system
- '''.split(), prefix=r'(?<!\.)', suffix=r'\b'), Name.Builtin),
- # https://crystal-lang.org/api/Object.html#macro-summary
- (r'(?<!\.)(((class_)?((getter|property)\b[!?]?|setter\b))|'
- r'(def_(clone|equals|equals_and_hash|hash)|delegate|forward_missing_to)\b)',
- Name.Builtin.Pseudo),
- # normal heredocs
- (r'(?<!\w)(<<-?)(["`\']?)([a-zA-Z_]\w*)(\2)(.*?\n)',
- heredoc_callback),
- # empty string heredocs
- (r'(<<-?)("|\')()(\2)(.*?\n)', heredoc_callback),
- (r'__END__', Comment.Preproc, 'end-part'),
- # multiline regex (after keywords or assignments)
- (r'(?:^|(?<=[=<>~!:])|'
- r'(?<=(?:\s|;)when\s)|'
- r'(?<=(?:\s|;)or\s)|'
- r'(?<=(?:\s|;)and\s)|'
- r'(?<=\.index\s)|'
- r'(?<=\.scan\s)|'
- r'(?<=\.sub\s)|'
- r'(?<=\.sub!\s)|'
- r'(?<=\.gsub\s)|'
- r'(?<=\.gsub!\s)|'
- r'(?<=\.match\s)|'
- r'(?<=(?:\s|;)if\s)|'
- r'(?<=(?:\s|;)elsif\s)|'
- r'(?<=^when\s)|'
- r'(?<=^index\s)|'
- r'(?<=^scan\s)|'
- r'(?<=^sub\s)|'
- r'(?<=^gsub\s)|'
- r'(?<=^sub!\s)|'
- r'(?<=^gsub!\s)|'
- r'(?<=^match\s)|'
- r'(?<=^if\s)|'
- r'(?<=^elsif\s)'
- r')(\s*)(/)', bygroups(Whitespace, String.Regex), 'multiline-regex'),
- # multiline regex (in method calls or subscripts)
- (r'(?<=\(|,|\[)/', String.Regex, 'multiline-regex'),
- # multiline regex (this time the funny no whitespace rule)
- (r'(\s+)(/)(?![\s=])', bygroups(Whitespace, String.Regex),
- 'multiline-regex'),
- # lex numbers and ignore following regular expressions which
- # are division operators in fact (grrrr. i hate that. any
- # better ideas?)
- # since pygments 0.7 we also eat a "?" operator after numbers
- # so that the char operator does not work. Chars are not allowed
- # there so that you can use the ternary operator.
- # stupid example:
- # x>=0?n[x]:""
- (r'(0o[0-7]+(?:_[0-7]+)*(?:_?[iu][0-9]+)?)\b(\s*)([/?])?',
- bygroups(Number.Oct, Whitespace, Operator)),
- (r'(0x[0-9A-Fa-f]+(?:_[0-9A-Fa-f]+)*(?:_?[iu][0-9]+)?)\b(\s*)([/?])?',
- bygroups(Number.Hex, Whitespace, Operator)),
- (r'(0b[01]+(?:_[01]+)*(?:_?[iu][0-9]+)?)\b(\s*)([/?])?',
- bygroups(Number.Bin, Whitespace, Operator)),
- # 3 separate expressions for floats because any of the 3 optional
- # parts makes it a float
- (r'((?:0(?![0-9])|[1-9][\d_]*)(?:\.\d[\d_]*)(?:e[+-]?[0-9]+)?'
- r'(?:_?f[0-9]+)?)(\s*)([/?])?',
- bygroups(Number.Float, Whitespace, Operator)),
- (r'((?:0(?![0-9])|[1-9][\d_]*)(?:\.\d[\d_]*)?(?:e[+-]?[0-9]+)'
- r'(?:_?f[0-9]+)?)(\s*)([/?])?',
- bygroups(Number.Float, Whitespace, Operator)),
- (r'((?:0(?![0-9])|[1-9][\d_]*)(?:\.\d[\d_]*)?(?:e[+-]?[0-9]+)?'
- r'(?:_?f[0-9]+))(\s*)([/?])?',
- bygroups(Number.Float, Whitespace, Operator)),
- (r'(0\b|[1-9][\d]*(?:_\d+)*(?:_?[iu][0-9]+)?)\b(\s*)([/?])?',
- bygroups(Number.Integer, Whitespace, Operator)),
- # Names
- (r'@@[a-zA-Z_]\w*', Name.Variable.Class),
- (r'@[a-zA-Z_]\w*', Name.Variable.Instance),
- (r'\$\w+', Name.Variable.Global),
- (r'\$[!@&`\'+~=/\\,;.<>_*$?:"^-]', Name.Variable.Global),
- (r'\$-[0adFiIlpvw]', Name.Variable.Global),
- (r'::', Operator),
- include('strings'),
- # https://crystal-lang.org/reference/syntax_and_semantics/literals/char.html
- (r'\?(\\[MC]-)*' # modifiers
- r'(\\([\\abefnrtv#"\']|[0-7]{1,3}|x[a-fA-F0-9]{2}|u[a-fA-F0-9]{4}|u\{[a-fA-F0-9 ]+\})|\S)'
- r'(?!\w)',
- String.Char),
- (r'[A-Z][A-Z_]+\b(?!::|\.)', Name.Constant),
- # macro expansion
- (r'\{%', String.Interpol, 'in-macro-control'),
- (r'\{\{', String.Interpol, 'in-macro-expr'),
- # annotations
- (r'(@\[)(\s*)([A-Z]\w*(::[A-Z]\w*)*)',
- bygroups(Operator, Whitespace, Name.Decorator), 'in-annot'),
- # this is needed because Crystal attributes can look
- # like keywords (class) or like this: ` ?!?
- (words(CRYSTAL_OPERATORS, prefix=r'(\.|::)'),
- bygroups(Operator, Name.Operator)),
- (r'(\.|::)([a-zA-Z_]\w*[!?]?|[*%&^`~+\-/\[<>=])',
- bygroups(Operator, Name)),
- # Names can end with [!?] unless it's "!="
- (r'[a-zA-Z_]\w*(?:[!?](?!=))?', Name),
- (r'(\[|\]\??|\*\*|<=>?|>=|<<?|>>?|=~|===|'
- r'!~|&&?|\|\||\.{1,3})', Operator),
- (r'[-+/*%=<>&!^|~]=?', Operator),
- (r'[(){};,/?:\\]', Punctuation),
- (r'\s+', Whitespace)
- ],
- 'funcname': [
- (r'(?:([a-zA-Z_]\w*)(\.))?'
- r'([a-zA-Z_]\w*[!?]?|\*\*?|[-+]@?|'
- r'[/%&|^`~]|\[\]=?|<<|>>|<=?>|>=?|===?)',
- bygroups(Name.Class, Operator, Name.Function), '#pop'),
- default('#pop')
- ],
- 'classname': [
- (r'[A-Z_]\w*', Name.Class),
- (r'(\()(\s*)([A-Z_]\w*)(\s*)(\))',
- bygroups(Punctuation, Whitespace, Name.Class, Whitespace, Punctuation)),
- default('#pop')
- ],
- 'in-intp': [
- (r'\{', String.Interpol, '#push'),
- (r'\}', String.Interpol, '#pop'),
- include('root'),
- ],
- 'string-intp': [
- (r'#\{', String.Interpol, 'in-intp'),
- ],
- 'string-escaped': [
- # https://crystal-lang.org/reference/syntax_and_semantics/literals/string.html
- (r'\\([\\abefnrtv#"\']|[0-7]{1,3}|x[a-fA-F0-9]{2}|u[a-fA-F0-9]{4}|u\{[a-fA-F0-9 ]+\})',
- String.Escape)
- ],
- 'string-intp-escaped': [
- include('string-intp'),
- include('string-escaped'),
- ],
- 'interpolated-regex': [
- include('string-intp'),
- (r'[\\#]', String.Regex),
- (r'[^\\#]+', String.Regex),
- ],
- 'interpolated-string': [
- include('string-intp'),
- (r'[\\#]', String.Other),
- (r'[^\\#]+', String.Other),
- ],
- 'multiline-regex': [
- include('string-intp'),
- (r'\\\\', String.Regex),
- (r'\\/', String.Regex),
- (r'[\\#]', String.Regex),
- (r'[^\\/#]+', String.Regex),
- (r'/[imsx]*', String.Regex, '#pop'),
- ],
- 'end-part': [
- (r'.+', Comment.Preproc, '#pop')
- ],
- 'in-macro-control': [
- (r'\{%', String.Interpol, '#push'),
- (r'%\}', String.Interpol, '#pop'),
- (r'(for|verbatim)\b', Keyword),
- include('root'),
- ],
- 'in-macro-expr': [
- (r'\{\{', String.Interpol, '#push'),
- (r'\}\}', String.Interpol, '#pop'),
- include('root'),
- ],
- 'in-annot': [
- (r'\[', Operator, '#push'),
- (r'\]', Operator, '#pop'),
- include('root'),
- ],
- }
- tokens.update(gen_crystalstrings_rules())
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/csound.py b/venv/lib/python3.11/site-packages/pygments/lexers/csound.py
deleted file mode 100644
index 64f03cf..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/csound.py
+++ /dev/null
@@ -1,468 +0,0 @@
-"""
- pygments.lexers.csound
- ~~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for Csound languages.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, bygroups, default, include, using, words
-from pygments.token import Comment, Error, Keyword, Name, Number, Operator, Punctuation, \
- String, Text, Whitespace
-from pygments.lexers._csound_builtins import OPCODES, DEPRECATED_OPCODES, REMOVED_OPCODES
-from pygments.lexers.html import HtmlLexer
-from pygments.lexers.python import PythonLexer
-from pygments.lexers.scripting import LuaLexer
-
-__all__ = ['CsoundScoreLexer', 'CsoundOrchestraLexer', 'CsoundDocumentLexer']
-
-newline = (r'((?:(?:;|//).*)*)(\n)', bygroups(Comment.Single, Text))
-
-
-class CsoundLexer(RegexLexer):
- url = 'https://csound.com/'
-
- tokens = {
- 'whitespace': [
- (r'[ \t]+', Whitespace),
- (r'/[*](?:.|\n)*?[*]/', Comment.Multiline),
- (r'(?:;|//).*$', Comment.Single),
- (r'(\\)(\n)', bygroups(Text, Whitespace))
- ],
-
- 'preprocessor directives': [
- (r'#(?:e(?:nd(?:if)?|lse)\b|##)|@@?[ \t]*\d+', Comment.Preproc),
- (r'#includestr', Comment.Preproc, 'includestr directive'),
- (r'#include', Comment.Preproc, 'include directive'),
- (r'#[ \t]*define', Comment.Preproc, 'define directive'),
- (r'#(?:ifn?def|undef)\b', Comment.Preproc, 'macro directive')
- ],
-
- 'include directive': [
- include('whitespace'),
- (r'([^ \t]).*?\1', String, '#pop')
- ],
- 'includestr directive': [
- include('whitespace'),
- (r'"', String, ('#pop', 'quoted string'))
- ],
-
- 'define directive': [
- (r'\n', Whitespace),
- include('whitespace'),
- (r'([A-Z_a-z]\w*)(\()', bygroups(Comment.Preproc, Punctuation),
- ('#pop', 'macro parameter name list')),
- (r'[A-Z_a-z]\w*', Comment.Preproc, ('#pop', 'before macro body'))
- ],
- 'macro parameter name list': [
- include('whitespace'),
- (r'[A-Z_a-z]\w*', Comment.Preproc),
- (r"['#]", Punctuation),
- (r'\)', Punctuation, ('#pop', 'before macro body'))
- ],
- 'before macro body': [
- (r'\n', Whitespace),
- include('whitespace'),
- (r'#', Punctuation, ('#pop', 'macro body'))
- ],
- 'macro body': [
- (r'(?:\\(?!#)|[^#\\]|\n)+', Comment.Preproc),
- (r'\\#', Comment.Preproc),
- (r'(?<!\\)#', Punctuation, '#pop')
- ],
-
- 'macro directive': [
- include('whitespace'),
- (r'[A-Z_a-z]\w*', Comment.Preproc, '#pop')
- ],
-
- 'macro uses': [
- (r'(\$[A-Z_a-z]\w*\.?)(\()', bygroups(Comment.Preproc, Punctuation),
- 'macro parameter value list'),
- (r'\$[A-Z_a-z]\w*(?:\.|\b)', Comment.Preproc)
- ],
- 'macro parameter value list': [
- (r'(?:[^\'#"{()]|\{(?!\{))+', Comment.Preproc),
- (r"['#]", Punctuation),
- (r'"', String, 'macro parameter value quoted string'),
- (r'\{\{', String, 'macro parameter value braced string'),
- (r'\(', Comment.Preproc, 'macro parameter value parenthetical'),
- (r'\)', Punctuation, '#pop')
- ],
- 'macro parameter value quoted string': [
- (r"\\[#'()]", Comment.Preproc),
- (r"[#'()]", Error),
- include('quoted string')
- ],
- 'macro parameter value braced string': [
- (r"\\[#'()]", Comment.Preproc),
- (r"[#'()]", Error),
- include('braced string')
- ],
- 'macro parameter value parenthetical': [
- (r'(?:[^\\()]|\\\))+', Comment.Preproc),
- (r'\(', Comment.Preproc, '#push'),
- (r'\)', Comment.Preproc, '#pop')
- ],
-
- 'whitespace and macro uses': [
- include('whitespace'),
- include('macro uses')
- ],
-
- 'numbers': [
- (r'\d+[Ee][+-]?\d+|(\d+\.\d*|\d*\.\d+)([Ee][+-]?\d+)?', Number.Float),
- (r'(0[Xx])([0-9A-Fa-f]+)', bygroups(Keyword.Type, Number.Hex)),
- (r'\d+', Number.Integer)
- ],
-
- 'quoted string': [
- (r'"', String, '#pop'),
- (r'[^"$]+', String),
- include('macro uses'),
- (r'[$]', String)
- ],
-
- 'braced string': [
- # Do nothing. This must be defined in subclasses.
- ]
- }
-
-
-class CsoundScoreLexer(CsoundLexer):
- """
- For `Csound <https://csound.com>`_ scores.
-
- .. versionadded:: 2.1
- """
-
- name = 'Csound Score'
- aliases = ['csound-score', 'csound-sco']
- filenames = ['*.sco']
-
- tokens = {
- 'root': [
- (r'\n', Whitespace),
- include('whitespace and macro uses'),
- include('preprocessor directives'),
-
- (r'[aBbCdefiqstvxy]', Keyword),
- # There is also a w statement that is generated internally and should not be
- # used; see https://github.com/csound/csound/issues/750.
-
- (r'z', Keyword.Constant),
- # z is a constant equal to 800,000,000,000. 800 billion seconds is about
- # 25,367.8 years. See also
- # https://csound.com/docs/manual/ScoreTop.html and
- # https://github.com/csound/csound/search?q=stof+path%3AEngine+filename%3Asread.c.
-
- (r'([nNpP][pP])(\d+)', bygroups(Keyword, Number.Integer)),
-
- (r'[mn]', Keyword, 'mark statement'),
-
- include('numbers'),
- (r'[!+\-*/^%&|<>#~.]', Operator),
- (r'[()\[\]]', Punctuation),
- (r'"', String, 'quoted string'),
- (r'\{', Comment.Preproc, 'loop after left brace'),
- ],
-
- 'mark statement': [
- include('whitespace and macro uses'),
- (r'[A-Z_a-z]\w*', Name.Label),
- (r'\n', Whitespace, '#pop')
- ],
-
- 'loop after left brace': [
- include('whitespace and macro uses'),
- (r'\d+', Number.Integer, ('#pop', 'loop after repeat count')),
- ],
- 'loop after repeat count': [
- include('whitespace and macro uses'),
- (r'[A-Z_a-z]\w*', Comment.Preproc, ('#pop', 'loop'))
- ],
- 'loop': [
- (r'\}', Comment.Preproc, '#pop'),
- include('root')
- ],
-
- # Braced strings are not allowed in Csound scores, but this is needed because the
- # superclass includes it.
- 'braced string': [
- (r'\}\}', String, '#pop'),
- (r'[^}]|\}(?!\})', String)
- ]
- }
-
-
-class CsoundOrchestraLexer(CsoundLexer):
- """
- For `Csound <https://csound.com>`_ orchestras.
-
- .. versionadded:: 2.1
- """
-
- name = 'Csound Orchestra'
- aliases = ['csound', 'csound-orc']
- filenames = ['*.orc', '*.udo']
-
- user_defined_opcodes = set()
-
- def opcode_name_callback(lexer, match):
- opcode = match.group(0)
- lexer.user_defined_opcodes.add(opcode)
- yield match.start(), Name.Function, opcode
-
- def name_callback(lexer, match):
- type_annotation_token = Keyword.Type
-
- name = match.group(1)
- if name in OPCODES or name in DEPRECATED_OPCODES or name in REMOVED_OPCODES:
- yield match.start(), Name.Builtin, name
- elif name in lexer.user_defined_opcodes:
- yield match.start(), Name.Function, name
- else:
- type_annotation_token = Name
- name_match = re.search(r'^(g?[afikSw])(\w+)', name)
- if name_match:
- yield name_match.start(1), Keyword.Type, name_match.group(1)
- yield name_match.start(2), Name, name_match.group(2)
- else:
- yield match.start(), Name, name
-
- if match.group(2):
- yield match.start(2), Punctuation, match.group(2)
- yield match.start(3), type_annotation_token, match.group(3)
-
- tokens = {
- 'root': [
- (r'\n', Whitespace),
-
- (r'^([ \t]*)(\w+)(:)([ \t]+|$)', bygroups(Whitespace, Name.Label, Punctuation, Whitespace)),
-
- include('whitespace and macro uses'),
- include('preprocessor directives'),
-
- (r'\binstr\b', Keyword.Declaration, 'instrument numbers and identifiers'),
- (r'\bopcode\b', Keyword.Declaration, 'after opcode keyword'),
- (r'\b(?:end(?:in|op))\b', Keyword.Declaration),
-
- include('partial statements')
- ],
-
- 'partial statements': [
- (r'\b(?:0dbfs|A4|k(?:r|smps)|nchnls(?:_i)?|sr)\b', Name.Variable.Global),
-
- include('numbers'),
-
- (r'\+=|-=|\*=|/=|<<|>>|<=|>=|==|!=|&&|\|\||[~¬]|[=!+\-*/^%&|<>#?:]', Operator),
- (r'[(),\[\]]', Punctuation),
-
- (r'"', String, 'quoted string'),
- (r'\{\{', String, 'braced string'),
-
- (words((
- 'do', 'else', 'elseif', 'endif', 'enduntil', 'fi', 'if', 'ithen', 'kthen',
- 'od', 'then', 'until', 'while',
- ), prefix=r'\b', suffix=r'\b'), Keyword),
- (words(('return', 'rireturn'), prefix=r'\b', suffix=r'\b'), Keyword.Pseudo),
-
- (r'\b[ik]?goto\b', Keyword, 'goto label'),
- (r'\b(r(?:einit|igoto)|tigoto)(\(|\b)', bygroups(Keyword.Pseudo, Punctuation),
- 'goto label'),
- (r'\b(c(?:g|in?|k|nk?)goto)(\(|\b)', bygroups(Keyword.Pseudo, Punctuation),
- ('goto label', 'goto argument')),
- (r'\b(timout)(\(|\b)', bygroups(Keyword.Pseudo, Punctuation),
- ('goto label', 'goto argument', 'goto argument')),
- (r'\b(loop_[gl][et])(\(|\b)', bygroups(Keyword.Pseudo, Punctuation),
- ('goto label', 'goto argument', 'goto argument', 'goto argument')),
-
- (r'\bprintk?s\b', Name.Builtin, 'prints opcode'),
- (r'\b(?:readscore|scoreline(?:_i)?)\b', Name.Builtin, 'Csound score opcode'),
- (r'\bpyl?run[it]?\b', Name.Builtin, 'Python opcode'),
- (r'\blua_(?:exec|opdef)\b', Name.Builtin, 'Lua opcode'),
- (r'\bp\d+\b', Name.Variable.Instance),
- (r'\b([A-Z_a-z]\w*)(?:(:)([A-Za-z]))?\b', name_callback)
- ],
-
- 'instrument numbers and identifiers': [
- include('whitespace and macro uses'),
- (r'\d+|[A-Z_a-z]\w*', Name.Function),
- (r'[+,]', Punctuation),
- (r'\n', Whitespace, '#pop')
- ],
-
- 'after opcode keyword': [
- include('whitespace and macro uses'),
- (r'[A-Z_a-z]\w*', opcode_name_callback, ('#pop', 'opcode type signatures')),
- (r'\n', Whitespace, '#pop')
- ],
- 'opcode type signatures': [
- include('whitespace and macro uses'),
-
- # https://github.com/csound/csound/search?q=XIDENT+path%3AEngine+filename%3Acsound_orc.lex
- (r'0|[afijkKoOpPStV\[\]]+', Keyword.Type),
-
- (r',', Punctuation),
- (r'\n', Whitespace, '#pop')
- ],
-
- 'quoted string': [
- (r'"', String, '#pop'),
- (r'[^\\"$%)]+', String),
- include('macro uses'),
- include('escape sequences'),
- include('format specifiers'),
- (r'[\\$%)]', String)
- ],
- 'braced string': [
- (r'\}\}', String, '#pop'),
- (r'(?:[^\\%)}]|\}(?!\}))+', String),
- include('escape sequences'),
- include('format specifiers'),
- (r'[\\%)]', String)
- ],
- 'escape sequences': [
- # https://github.com/csound/csound/search?q=unquote_string+path%3AEngine+filename%3Acsound_orc_compile.c
- (r'\\(?:[\\abnrt"]|[0-7]{1,3})', String.Escape)
- ],
- # Format specifiers are highlighted in all strings, even though only
- # fprintks https://csound.com/docs/manual/fprintks.html
- # fprints https://csound.com/docs/manual/fprints.html
- # printf/printf_i https://csound.com/docs/manual/printf.html
- # printks https://csound.com/docs/manual/printks.html
- # prints https://csound.com/docs/manual/prints.html
- # sprintf https://csound.com/docs/manual/sprintf.html
- # sprintfk https://csound.com/docs/manual/sprintfk.html
- # work with strings that contain format specifiers. In addition, these opcodes’
- # handling of format specifiers is inconsistent:
- # - fprintks and fprints accept %a and %A specifiers, and accept %s specifiers
- # starting in Csound 6.15.0.
- # - printks and prints accept %a and %A specifiers, but don’t accept %s
- # specifiers.
- # - printf, printf_i, sprintf, and sprintfk don’t accept %a and %A specifiers,
- # but accept %s specifiers.
- # See https://github.com/csound/csound/issues/747 for more information.
- 'format specifiers': [
- (r'%[#0\- +]*\d*(?:\.\d+)?[AE-GXac-giosux]', String.Interpol),
- (r'%%', String.Escape)
- ],
-
- 'goto argument': [
- include('whitespace and macro uses'),
- (r',', Punctuation, '#pop'),
- include('partial statements')
- ],
- 'goto label': [
- include('whitespace and macro uses'),
- (r'\w+', Name.Label, '#pop'),
- default('#pop')
- ],
-
- 'prints opcode': [
- include('whitespace and macro uses'),
- (r'"', String, 'prints quoted string'),
- default('#pop')
- ],
- 'prints quoted string': [
- (r'\\\\[aAbBnNrRtT]', String.Escape),
- (r'%[!nNrRtT]|[~^]{1,2}', String.Escape),
- include('quoted string')
- ],
-
- 'Csound score opcode': [
- include('whitespace and macro uses'),
- (r'"', String, 'quoted string'),
- (r'\{\{', String, 'Csound score'),
- (r'\n', Whitespace, '#pop')
- ],
- 'Csound score': [
- (r'\}\}', String, '#pop'),
- (r'([^}]+)|\}(?!\})', using(CsoundScoreLexer))
- ],
-
- 'Python opcode': [
- include('whitespace and macro uses'),
- (r'"', String, 'quoted string'),
- (r'\{\{', String, 'Python'),
- (r'\n', Whitespace, '#pop')
- ],
- 'Python': [
- (r'\}\}', String, '#pop'),
- (r'([^}]+)|\}(?!\})', using(PythonLexer))
- ],
-
- 'Lua opcode': [
- include('whitespace and macro uses'),
- (r'"', String, 'quoted string'),
- (r'\{\{', String, 'Lua'),
- (r'\n', Whitespace, '#pop')
- ],
- 'Lua': [
- (r'\}\}', String, '#pop'),
- (r'([^}]+)|\}(?!\})', using(LuaLexer))
- ]
- }
-
-
-class CsoundDocumentLexer(RegexLexer):
- """
- For `Csound <https://csound.com>`_ documents.
-
- .. versionadded:: 2.1
- """
-
- name = 'Csound Document'
- aliases = ['csound-document', 'csound-csd']
- filenames = ['*.csd']
-
- # These tokens are based on those in XmlLexer in pygments/lexers/html.py. Making
- # CsoundDocumentLexer a subclass of XmlLexer rather than RegexLexer may seem like a
- # better idea, since Csound Document files look like XML files. However, Csound
- # Documents can contain Csound comments (preceded by //, for example) before and
- # after the root element, unescaped bitwise AND & and less than < operators, etc. In
- # other words, while Csound Document files look like XML files, they may not actually
- # be XML files.
- tokens = {
- 'root': [
- (r'/[*](.|\n)*?[*]/', Comment.Multiline),
- (r'(?:;|//).*$', Comment.Single),
- (r'[^/;<]+|/(?!/)', Text),
-
- (r'<\s*CsInstruments', Name.Tag, ('orchestra', 'tag')),
- (r'<\s*CsScore', Name.Tag, ('score', 'tag')),
- (r'<\s*[Hh][Tt][Mm][Ll]', Name.Tag, ('HTML', 'tag')),
-
- (r'<\s*[\w:.-]+', Name.Tag, 'tag'),
- (r'<\s*/\s*[\w:.-]+\s*>', Name.Tag)
- ],
-
- 'orchestra': [
- (r'<\s*/\s*CsInstruments\s*>', Name.Tag, '#pop'),
- (r'(.|\n)+?(?=<\s*/\s*CsInstruments\s*>)', using(CsoundOrchestraLexer))
- ],
- 'score': [
- (r'<\s*/\s*CsScore\s*>', Name.Tag, '#pop'),
- (r'(.|\n)+?(?=<\s*/\s*CsScore\s*>)', using(CsoundScoreLexer))
- ],
- 'HTML': [
- (r'<\s*/\s*[Hh][Tt][Mm][Ll]\s*>', Name.Tag, '#pop'),
- (r'(.|\n)+?(?=<\s*/\s*[Hh][Tt][Mm][Ll]\s*>)', using(HtmlLexer))
- ],
-
- 'tag': [
- (r'\s+', Whitespace),
- (r'[\w.:-]+\s*=', Name.Attribute, 'attr'),
- (r'/?\s*>', Name.Tag, '#pop')
- ],
- 'attr': [
- (r'\s+', Whitespace),
- (r'".*?"', String, '#pop'),
- (r"'.*?'", String, '#pop'),
- (r'[^\s>]+', String, '#pop')
- ]
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/css.py b/venv/lib/python3.11/site-packages/pygments/lexers/css.py
deleted file mode 100644
index d8a961f..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/css.py
+++ /dev/null
@@ -1,602 +0,0 @@
-"""
- pygments.lexers.css
- ~~~~~~~~~~~~~~~~~~~
-
- Lexers for CSS and related stylesheet formats.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-import copy
-
-from pygments.lexer import ExtendedRegexLexer, RegexLexer, include, bygroups, \
- default, words, inherit
-from pygments.token import Comment, Operator, Keyword, Name, String, Number, \
- Punctuation, Whitespace
-from pygments.lexers._css_builtins import _css_properties
-
-__all__ = ['CssLexer', 'SassLexer', 'ScssLexer', 'LessCssLexer']
-
-
-# List of vendor prefixes obtained from:
-# https://www.w3.org/TR/CSS21/syndata.html#vendor-keyword-history
-_vendor_prefixes = (
- '-ms-', 'mso-', '-moz-', '-o-', '-xv-', '-atsc-', '-wap-', '-khtml-',
- '-webkit-', 'prince-', '-ah-', '-hp-', '-ro-', '-rim-', '-tc-',
-)
-
-# List of extended color keywords obtained from:
-# https://drafts.csswg.org/css-color/#named-colors
-_color_keywords = (
- 'aliceblue', 'antiquewhite', 'aqua', 'aquamarine', 'azure', 'beige',
- 'bisque', 'black', 'blanchedalmond', 'blue', 'blueviolet', 'brown',
- 'burlywood', 'cadetblue', 'chartreuse', 'chocolate', 'coral',
- 'cornflowerblue', 'cornsilk', 'crimson', 'cyan', 'darkblue', 'darkcyan',
- 'darkgoldenrod', 'darkgray', 'darkgreen', 'darkgrey', 'darkkhaki',
- 'darkmagenta', 'darkolivegreen', 'darkorange', 'darkorchid', 'darkred',
- 'darksalmon', 'darkseagreen', 'darkslateblue', 'darkslategray',
- 'darkslategrey', 'darkturquoise', 'darkviolet', 'deeppink', 'deepskyblue',
- 'dimgray', 'dimgrey', 'dodgerblue', 'firebrick', 'floralwhite',
- 'forestgreen', 'fuchsia', 'gainsboro', 'ghostwhite', 'gold', 'goldenrod',
- 'gray', 'green', 'greenyellow', 'grey', 'honeydew', 'hotpink', 'indianred',
- 'indigo', 'ivory', 'khaki', 'lavender', 'lavenderblush', 'lawngreen',
- 'lemonchiffon', 'lightblue', 'lightcoral', 'lightcyan',
- 'lightgoldenrodyellow', 'lightgray', 'lightgreen', 'lightgrey',
- 'lightpink', 'lightsalmon', 'lightseagreen', 'lightskyblue',
- 'lightslategray', 'lightslategrey', 'lightsteelblue', 'lightyellow',
- 'lime', 'limegreen', 'linen', 'magenta', 'maroon', 'mediumaquamarine',
- 'mediumblue', 'mediumorchid', 'mediumpurple', 'mediumseagreen',
- 'mediumslateblue', 'mediumspringgreen', 'mediumturquoise',
- 'mediumvioletred', 'midnightblue', 'mintcream', 'mistyrose', 'moccasin',
- 'navajowhite', 'navy', 'oldlace', 'olive', 'olivedrab', 'orange',
- 'orangered', 'orchid', 'palegoldenrod', 'palegreen', 'paleturquoise',
- 'palevioletred', 'papayawhip', 'peachpuff', 'peru', 'pink', 'plum',
- 'powderblue', 'purple', 'rebeccapurple', 'red', 'rosybrown', 'royalblue',
- 'saddlebrown', 'salmon', 'sandybrown', 'seagreen', 'seashell', 'sienna',
- 'silver', 'skyblue', 'slateblue', 'slategray', 'slategrey', 'snow',
- 'springgreen', 'steelblue', 'tan', 'teal', 'thistle', 'tomato', 'turquoise',
- 'violet', 'wheat', 'white', 'whitesmoke', 'yellow', 'yellowgreen',
-) + ('transparent',)
-
-# List of keyword values obtained from:
-# http://cssvalues.com/
-_keyword_values = (
- 'absolute', 'alias', 'all', 'all-petite-caps', 'all-scroll',
- 'all-small-caps', 'allow-end', 'alpha', 'alternate', 'alternate-reverse',
- 'always', 'armenian', 'auto', 'avoid', 'avoid-column', 'avoid-page',
- 'backwards', 'balance', 'baseline', 'below', 'blink', 'block', 'bold',
- 'bolder', 'border-box', 'both', 'bottom', 'box-decoration', 'break-word',
- 'capitalize', 'cell', 'center', 'circle', 'clip', 'clone', 'close-quote',
- 'col-resize', 'collapse', 'color', 'color-burn', 'color-dodge', 'column',
- 'column-reverse', 'compact', 'condensed', 'contain', 'container',
- 'content-box', 'context-menu', 'copy', 'cover', 'crisp-edges', 'crosshair',
- 'currentColor', 'cursive', 'darken', 'dashed', 'decimal',
- 'decimal-leading-zero', 'default', 'descendants', 'difference', 'digits',
- 'disc', 'distribute', 'dot', 'dotted', 'double', 'double-circle', 'e-resize',
- 'each-line', 'ease', 'ease-in', 'ease-in-out', 'ease-out', 'edges',
- 'ellipsis', 'end', 'ew-resize', 'exclusion', 'expanded', 'extra-condensed',
- 'extra-expanded', 'fantasy', 'fill', 'fill-box', 'filled', 'first', 'fixed',
- 'flat', 'flex', 'flex-end', 'flex-start', 'flip', 'force-end', 'forwards',
- 'from-image', 'full-width', 'geometricPrecision', 'georgian', 'groove',
- 'hanging', 'hard-light', 'help', 'hidden', 'hide', 'horizontal', 'hue',
- 'icon', 'infinite', 'inherit', 'initial', 'ink', 'inline', 'inline-block',
- 'inline-flex', 'inline-table', 'inset', 'inside', 'inter-word', 'invert',
- 'isolate', 'italic', 'justify', 'large', 'larger', 'last', 'left',
- 'lighten', 'lighter', 'line-through', 'linear', 'list-item', 'local',
- 'loose', 'lower-alpha', 'lower-greek', 'lower-latin', 'lower-roman',
- 'lowercase', 'ltr', 'luminance', 'luminosity', 'mandatory', 'manipulation',
- 'manual', 'margin-box', 'match-parent', 'medium', 'mixed', 'monospace',
- 'move', 'multiply', 'n-resize', 'ne-resize', 'nesw-resize',
- 'no-close-quote', 'no-drop', 'no-open-quote', 'no-repeat', 'none', 'normal',
- 'not-allowed', 'nowrap', 'ns-resize', 'nw-resize', 'nwse-resize', 'objects',
- 'oblique', 'off', 'on', 'open', 'open-quote', 'optimizeLegibility',
- 'optimizeSpeed', 'outset', 'outside', 'over', 'overlay', 'overline',
- 'padding-box', 'page', 'pan-down', 'pan-left', 'pan-right', 'pan-up',
- 'pan-x', 'pan-y', 'paused', 'petite-caps', 'pixelated', 'pointer',
- 'preserve-3d', 'progress', 'proximity', 'relative', 'repeat',
- 'repeat no-repeat', 'repeat-x', 'repeat-y', 'reverse', 'ridge', 'right',
- 'round', 'row', 'row-resize', 'row-reverse', 'rtl', 'ruby', 'ruby-base',
- 'ruby-base-container', 'ruby-text', 'ruby-text-container', 'run-in',
- 'running', 's-resize', 'sans-serif', 'saturation', 'scale-down', 'screen',
- 'scroll', 'se-resize', 'semi-condensed', 'semi-expanded', 'separate',
- 'serif', 'sesame', 'show', 'sideways', 'sideways-left', 'sideways-right',
- 'slice', 'small', 'small-caps', 'smaller', 'smooth', 'snap', 'soft-light',
- 'solid', 'space', 'space-around', 'space-between', 'spaces', 'square',
- 'start', 'static', 'step-end', 'step-start', 'sticky', 'stretch', 'strict',
- 'stroke-box', 'style', 'sw-resize', 'table', 'table-caption', 'table-cell',
- 'table-column', 'table-column-group', 'table-footer-group',
- 'table-header-group', 'table-row', 'table-row-group', 'text', 'thick',
- 'thin', 'titling-caps', 'to', 'top', 'triangle', 'ultra-condensed',
- 'ultra-expanded', 'under', 'underline', 'unicase', 'unset', 'upper-alpha',
- 'upper-latin', 'upper-roman', 'uppercase', 'upright', 'use-glyph-orientation',
- 'vertical', 'vertical-text', 'view-box', 'visible', 'w-resize', 'wait',
- 'wavy', 'weight', 'weight style', 'wrap', 'wrap-reverse', 'x-large',
- 'x-small', 'xx-large', 'xx-small', 'zoom-in', 'zoom-out',
-)
-
-# List of other keyword values from other sources:
-_other_keyword_values = (
- 'above', 'aural', 'behind', 'bidi-override', 'center-left', 'center-right',
- 'cjk-ideographic', 'continuous', 'crop', 'cross', 'embed', 'far-left',
- 'far-right', 'fast', 'faster', 'hebrew', 'high', 'higher', 'hiragana',
- 'hiragana-iroha', 'katakana', 'katakana-iroha', 'landscape', 'left-side',
- 'leftwards', 'level', 'loud', 'low', 'lower', 'message-box', 'middle',
- 'mix', 'narrower', 'once', 'portrait', 'right-side', 'rightwards', 'silent',
- 'slow', 'slower', 'small-caption', 'soft', 'spell-out', 'status-bar',
- 'super', 'text-bottom', 'text-top', 'wider', 'x-fast', 'x-high', 'x-loud',
- 'x-low', 'x-soft', 'yes', 'pre', 'pre-wrap', 'pre-line',
-)
-
-# List of functional notation and function keyword values:
-_functional_notation_keyword_values = (
- 'attr', 'blackness', 'blend', 'blenda', 'blur', 'brightness', 'calc',
- 'circle', 'color-mod', 'contrast', 'counter', 'cubic-bezier', 'device-cmyk',
- 'drop-shadow', 'ellipse', 'gray', 'grayscale', 'hsl', 'hsla', 'hue',
- 'hue-rotate', 'hwb', 'image', 'inset', 'invert', 'lightness',
- 'linear-gradient', 'matrix', 'matrix3d', 'opacity', 'perspective',
- 'polygon', 'radial-gradient', 'rect', 'repeating-linear-gradient',
- 'repeating-radial-gradient', 'rgb', 'rgba', 'rotate', 'rotate3d', 'rotateX',
- 'rotateY', 'rotateZ', 'saturate', 'saturation', 'scale', 'scale3d',
- 'scaleX', 'scaleY', 'scaleZ', 'sepia', 'shade', 'skewX', 'skewY', 'steps',
- 'tint', 'toggle', 'translate', 'translate3d', 'translateX', 'translateY',
- 'translateZ', 'whiteness',
-)
-# Note! Handle url(...) separately.
-
-# List of units obtained from:
-# https://www.w3.org/TR/css3-values/
-_angle_units = (
- 'deg', 'grad', 'rad', 'turn',
-)
-_frequency_units = (
- 'Hz', 'kHz',
-)
-_length_units = (
- 'em', 'ex', 'ch', 'rem',
- 'vh', 'vw', 'vmin', 'vmax',
- 'px', 'mm', 'cm', 'in', 'pt', 'pc', 'q',
-)
-_resolution_units = (
- 'dpi', 'dpcm', 'dppx',
-)
-_time_units = (
- 's', 'ms',
-)
-_all_units = _angle_units + _frequency_units + _length_units + \
- _resolution_units + _time_units
-
-
-class CssLexer(RegexLexer):
- """
- For CSS (Cascading Style Sheets).
- """
-
- name = 'CSS'
- url = 'https://www.w3.org/TR/CSS/#css'
- aliases = ['css']
- filenames = ['*.css']
- mimetypes = ['text/css']
-
- tokens = {
- 'root': [
- include('basics'),
- ],
- 'basics': [
- (r'\s+', Whitespace),
- (r'/\*(?:.|\n)*?\*/', Comment),
- (r'\{', Punctuation, 'content'),
- (r'(\:{1,2})([\w-]+)', bygroups(Punctuation, Name.Decorator)),
- (r'(\.)([\w-]+)', bygroups(Punctuation, Name.Class)),
- (r'(\#)([\w-]+)', bygroups(Punctuation, Name.Namespace)),
- (r'(@)([\w-]+)', bygroups(Punctuation, Keyword), 'atrule'),
- (r'[\w-]+', Name.Tag),
- (r'[~^*!%&$\[\]()<>|+=@:;,./?-]', Operator),
- (r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
- (r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
- ],
- 'atrule': [
- (r'\{', Punctuation, 'atcontent'),
- (r';', Punctuation, '#pop'),
- include('basics'),
- ],
- 'atcontent': [
- include('basics'),
- (r'\}', Punctuation, '#pop:2'),
- ],
- 'content': [
- (r'\s+', Whitespace),
- (r'\}', Punctuation, '#pop'),
- (r';', Punctuation),
- (r'^@.*?$', Comment.Preproc),
-
- (words(_vendor_prefixes,), Keyword.Pseudo),
- (r'('+r'|'.join(_css_properties)+r')(\s*)(\:)',
- bygroups(Keyword, Whitespace, Punctuation), 'value-start'),
- (r'([-]+[a-zA-Z_][\w-]*)(\s*)(\:)', bygroups(Name.Variable, Whitespace, Punctuation),
- 'value-start'),
- (r'([a-zA-Z_][\w-]*)(\s*)(\:)', bygroups(Name, Whitespace, Punctuation),
- 'value-start'),
-
- (r'/\*(?:.|\n)*?\*/', Comment),
- ],
- 'value-start': [
- (r'\s+', Whitespace),
- (words(_vendor_prefixes,), Name.Builtin.Pseudo),
- include('urls'),
- (r'('+r'|'.join(_functional_notation_keyword_values)+r')(\()',
- bygroups(Name.Builtin, Punctuation), 'function-start'),
- (r'([a-zA-Z_][\w-]+)(\()',
- bygroups(Name.Function, Punctuation), 'function-start'),
- (words(_keyword_values, suffix=r'\b'), Keyword.Constant),
- (words(_other_keyword_values, suffix=r'\b'), Keyword.Constant),
- (words(_color_keywords, suffix=r'\b'), Keyword.Constant),
- # for transition-property etc.
- (words(_css_properties, suffix=r'\b'), Keyword),
- (r'\!important', Comment.Preproc),
- (r'/\*(?:.|\n)*?\*/', Comment),
-
- include('numeric-values'),
-
- (r'[~^*!%&<>|+=@:./?-]+', Operator),
- (r'[\[\](),]+', Punctuation),
- (r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
- (r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
- (r'[a-zA-Z_][\w-]*', Name),
- (r';', Punctuation, '#pop'),
- (r'\}', Punctuation, '#pop:2'),
- ],
- 'function-start': [
- (r'\s+', Whitespace),
- (r'[-]+([A-Za-z][\w+]*[-]*)+', Name.Variable),
- include('urls'),
- (words(_vendor_prefixes,), Keyword.Pseudo),
- (words(_keyword_values, suffix=r'\b'), Keyword.Constant),
- (words(_other_keyword_values, suffix=r'\b'), Keyword.Constant),
- (words(_color_keywords, suffix=r'\b'), Keyword.Constant),
-
- # function-start may be entered recursively
- (r'(' + r'|'.join(_functional_notation_keyword_values) + r')(\()',
- bygroups(Name.Builtin, Punctuation), 'function-start'),
- (r'([a-zA-Z_][\w-]+)(\()',
- bygroups(Name.Function, Punctuation), 'function-start'),
-
- (r'/\*(?:.|\n)*?\*/', Comment),
- include('numeric-values'),
- (r'[*+/-]', Operator),
- (r',', Punctuation),
- (r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
- (r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
- (r'[a-zA-Z_-]\w*', Name),
- (r'\)', Punctuation, '#pop'),
- ],
- 'urls': [
- (r'(url)(\()(".*?")(\))', bygroups(Name.Builtin, Punctuation,
- String.Double, Punctuation)),
- (r"(url)(\()('.*?')(\))", bygroups(Name.Builtin, Punctuation,
- String.Single, Punctuation)),
- (r'(url)(\()(.*?)(\))', bygroups(Name.Builtin, Punctuation,
- String.Other, Punctuation)),
- ],
- 'numeric-values': [
- (r'\#[a-zA-Z0-9]{1,6}', Number.Hex),
- (r'[+\-]?[0-9]*[.][0-9]+', Number.Float, 'numeric-end'),
- (r'[+\-]?[0-9]+', Number.Integer, 'numeric-end'),
- ],
- 'numeric-end': [
- (words(_all_units, suffix=r'\b'), Keyword.Type),
- (r'%', Keyword.Type),
- default('#pop'),
- ],
- }
-
-
-common_sass_tokens = {
- 'value': [
- (r'[ \t]+', Whitespace),
- (r'[!$][\w-]+', Name.Variable),
- (r'url\(', String.Other, 'string-url'),
- (r'[a-z_-][\w-]*(?=\()', Name.Function),
- (words(_css_properties + (
- 'above', 'absolute', 'always', 'armenian', 'aural', 'auto', 'avoid', 'baseline',
- 'behind', 'below', 'bidi-override', 'blink', 'block', 'bold', 'bolder', 'both',
- 'capitalize', 'center-left', 'center-right', 'center', 'circle',
- 'cjk-ideographic', 'close-quote', 'collapse', 'condensed', 'continuous',
- 'crosshair', 'cross', 'cursive', 'dashed', 'decimal-leading-zero',
- 'decimal', 'default', 'digits', 'disc', 'dotted', 'double', 'e-resize', 'embed',
- 'extra-condensed', 'extra-expanded', 'expanded', 'fantasy', 'far-left',
- 'far-right', 'faster', 'fast', 'fixed', 'georgian', 'groove', 'hebrew', 'help',
- 'hidden', 'hide', 'higher', 'high', 'hiragana-iroha', 'hiragana', 'icon',
- 'inherit', 'inline-table', 'inline', 'inset', 'inside', 'invert', 'italic',
- 'justify', 'katakana-iroha', 'katakana', 'landscape', 'larger', 'large',
- 'left-side', 'leftwards', 'level', 'lighter', 'line-through', 'list-item',
- 'loud', 'lower-alpha', 'lower-greek', 'lower-roman', 'lowercase', 'ltr',
- 'lower', 'low', 'medium', 'message-box', 'middle', 'mix', 'monospace',
- 'n-resize', 'narrower', 'ne-resize', 'no-close-quote', 'no-open-quote',
- 'no-repeat', 'none', 'normal', 'nowrap', 'nw-resize', 'oblique', 'once',
- 'open-quote', 'outset', 'outside', 'overline', 'pointer', 'portrait', 'px',
- 'relative', 'repeat-x', 'repeat-y', 'repeat', 'rgb', 'ridge', 'right-side',
- 'rightwards', 's-resize', 'sans-serif', 'scroll', 'se-resize',
- 'semi-condensed', 'semi-expanded', 'separate', 'serif', 'show', 'silent',
- 'slow', 'slower', 'small-caps', 'small-caption', 'smaller', 'soft', 'solid',
- 'spell-out', 'square', 'static', 'status-bar', 'super', 'sw-resize',
- 'table-caption', 'table-cell', 'table-column', 'table-column-group',
- 'table-footer-group', 'table-header-group', 'table-row',
- 'table-row-group', 'text', 'text-bottom', 'text-top', 'thick', 'thin',
- 'transparent', 'ultra-condensed', 'ultra-expanded', 'underline',
- 'upper-alpha', 'upper-latin', 'upper-roman', 'uppercase', 'url',
- 'visible', 'w-resize', 'wait', 'wider', 'x-fast', 'x-high', 'x-large', 'x-loud',
- 'x-low', 'x-small', 'x-soft', 'xx-large', 'xx-small', 'yes'), suffix=r'\b'),
- Name.Constant),
- (words(_color_keywords, suffix=r'\b'), Name.Entity),
- (words((
- 'black', 'silver', 'gray', 'white', 'maroon', 'red', 'purple', 'fuchsia', 'green',
- 'lime', 'olive', 'yellow', 'navy', 'blue', 'teal', 'aqua'), suffix=r'\b'),
- Name.Builtin),
- (r'\!(important|default)', Name.Exception),
- (r'(true|false)', Name.Pseudo),
- (r'(and|or|not)', Operator.Word),
- (r'/\*', Comment.Multiline, 'inline-comment'),
- (r'//[^\n]*', Comment.Single),
- (r'\#[a-z0-9]{1,6}', Number.Hex),
- (r'(-?\d+)(\%|[a-z]+)?', bygroups(Number.Integer, Keyword.Type)),
- (r'(-?\d*\.\d+)(\%|[a-z]+)?', bygroups(Number.Float, Keyword.Type)),
- (r'#\{', String.Interpol, 'interpolation'),
- (r'[~^*!&%<>|+=@:,./?-]+', Operator),
- (r'[\[\]()]+', Punctuation),
- (r'"', String.Double, 'string-double'),
- (r"'", String.Single, 'string-single'),
- (r'[a-z_-][\w-]*', Name),
- ],
-
- 'interpolation': [
- (r'\}', String.Interpol, '#pop'),
- include('value'),
- ],
-
- 'selector': [
- (r'[ \t]+', Whitespace),
- (r'\:', Name.Decorator, 'pseudo-class'),
- (r'\.', Name.Class, 'class'),
- (r'\#', Name.Namespace, 'id'),
- (r'[\w-]+', Name.Tag),
- (r'#\{', String.Interpol, 'interpolation'),
- (r'&', Keyword),
- (r'[~^*!&\[\]()<>|+=@:;,./?-]', Operator),
- (r'"', String.Double, 'string-double'),
- (r"'", String.Single, 'string-single'),
- ],
-
- 'string-double': [
- (r'(\\.|#(?=[^\n{])|[^\n"#])+', String.Double),
- (r'#\{', String.Interpol, 'interpolation'),
- (r'"', String.Double, '#pop'),
- ],
-
- 'string-single': [
- (r"(\\.|#(?=[^\n{])|[^\n'#])+", String.Single),
- (r'#\{', String.Interpol, 'interpolation'),
- (r"'", String.Single, '#pop'),
- ],
-
- 'string-url': [
- (r'(\\#|#(?=[^\n{])|[^\n#)])+', String.Other),
- (r'#\{', String.Interpol, 'interpolation'),
- (r'\)', String.Other, '#pop'),
- ],
-
- 'pseudo-class': [
- (r'[\w-]+', Name.Decorator),
- (r'#\{', String.Interpol, 'interpolation'),
- default('#pop'),
- ],
-
- 'class': [
- (r'[\w-]+', Name.Class),
- (r'#\{', String.Interpol, 'interpolation'),
- default('#pop'),
- ],
-
- 'id': [
- (r'[\w-]+', Name.Namespace),
- (r'#\{', String.Interpol, 'interpolation'),
- default('#pop'),
- ],
-
- 'for': [
- (r'(from|to|through)', Operator.Word),
- include('value'),
- ],
-}
-
-
-def _indentation(lexer, match, ctx):
- indentation = match.group(0)
- yield match.start(), Whitespace, indentation
- ctx.last_indentation = indentation
- ctx.pos = match.end()
-
- if hasattr(ctx, 'block_state') and ctx.block_state and \
- indentation.startswith(ctx.block_indentation) and \
- indentation != ctx.block_indentation:
- ctx.stack.append(ctx.block_state)
- else:
- ctx.block_state = None
- ctx.block_indentation = None
- ctx.stack.append('content')
-
-
-def _starts_block(token, state):
- def callback(lexer, match, ctx):
- yield match.start(), token, match.group(0)
-
- if hasattr(ctx, 'last_indentation'):
- ctx.block_indentation = ctx.last_indentation
- else:
- ctx.block_indentation = ''
-
- ctx.block_state = state
- ctx.pos = match.end()
-
- return callback
-
-
-class SassLexer(ExtendedRegexLexer):
- """
- For Sass stylesheets.
-
- .. versionadded:: 1.3
- """
-
- name = 'Sass'
- url = 'https://sass-lang.com/'
- aliases = ['sass']
- filenames = ['*.sass']
- mimetypes = ['text/x-sass']
-
- flags = re.IGNORECASE | re.MULTILINE
-
- tokens = {
- 'root': [
- (r'[ \t]*\n', Whitespace),
- (r'[ \t]*', _indentation),
- ],
-
- 'content': [
- (r'//[^\n]*', _starts_block(Comment.Single, 'single-comment'),
- 'root'),
- (r'/\*[^\n]*', _starts_block(Comment.Multiline, 'multi-comment'),
- 'root'),
- (r'@import', Keyword, 'import'),
- (r'@for', Keyword, 'for'),
- (r'@(debug|warn|if|while)', Keyword, 'value'),
- (r'(@mixin)( )([\w-]+)', bygroups(Keyword, Whitespace, Name.Function), 'value'),
- (r'(@include)( )([\w-]+)', bygroups(Keyword, Whitespace, Name.Decorator), 'value'),
- (r'@extend', Keyword, 'selector'),
- (r'@[\w-]+', Keyword, 'selector'),
- (r'=[\w-]+', Name.Function, 'value'),
- (r'\+[\w-]+', Name.Decorator, 'value'),
- (r'([!$][\w-]\w*)([ \t]*(?:(?:\|\|)?=|:))',
- bygroups(Name.Variable, Operator), 'value'),
- (r':', Name.Attribute, 'old-style-attr'),
- (r'(?=.+?[=:]([^a-z]|$))', Name.Attribute, 'new-style-attr'),
- default('selector'),
- ],
-
- 'single-comment': [
- (r'.+', Comment.Single),
- (r'\n', Whitespace, 'root'),
- ],
-
- 'multi-comment': [
- (r'.+', Comment.Multiline),
- (r'\n', Whitespace, 'root'),
- ],
-
- 'import': [
- (r'[ \t]+', Whitespace),
- (r'\S+', String),
- (r'\n', Whitespace, 'root'),
- ],
-
- 'old-style-attr': [
- (r'[^\s:="\[]+', Name.Attribute),
- (r'#\{', String.Interpol, 'interpolation'),
- (r'([ \t]*)(=)', bygroups(Whitespace, Operator), 'value'),
- default('value'),
- ],
-
- 'new-style-attr': [
- (r'[^\s:="\[]+', Name.Attribute),
- (r'#\{', String.Interpol, 'interpolation'),
- (r'([ \t]*)([=:])', bygroups(Whitespace, Operator), 'value'),
- ],
-
- 'inline-comment': [
- (r"(\\#|#(?=[^\n{])|\*(?=[^\n/])|[^\n#*])+", Comment.Multiline),
- (r'#\{', String.Interpol, 'interpolation'),
- (r"\*/", Comment, '#pop'),
- ],
- }
- for group, common in common_sass_tokens.items():
- tokens[group] = copy.copy(common)
- tokens['value'].append((r'\n', Whitespace, 'root'))
- tokens['selector'].append((r'\n', Whitespace, 'root'))
-
-
-class ScssLexer(RegexLexer):
- """
- For SCSS stylesheets.
- """
-
- name = 'SCSS'
- url = 'https://sass-lang.com/'
- aliases = ['scss']
- filenames = ['*.scss']
- mimetypes = ['text/x-scss']
-
- flags = re.IGNORECASE | re.DOTALL
- tokens = {
- 'root': [
- (r'\s+', Whitespace),
- (r'//.*?\n', Comment.Single),
- (r'/\*.*?\*/', Comment.Multiline),
- (r'@import', Keyword, 'value'),
- (r'@for', Keyword, 'for'),
- (r'@(debug|warn|if|while)', Keyword, 'value'),
- (r'(@mixin)( [\w-]+)', bygroups(Keyword, Name.Function), 'value'),
- (r'(@include)( [\w-]+)', bygroups(Keyword, Name.Decorator), 'value'),
- (r'@extend', Keyword, 'selector'),
- (r'(@media)(\s+)', bygroups(Keyword, Whitespace), 'value'),
- (r'@[\w-]+', Keyword, 'selector'),
- (r'(\$[\w-]*\w)([ \t]*:)', bygroups(Name.Variable, Operator), 'value'),
- # TODO: broken, and prone to infinite loops.
- # (r'(?=[^;{}][;}])', Name.Attribute, 'attr'),
- # (r'(?=[^;{}:]+:[^a-z])', Name.Attribute, 'attr'),
- default('selector'),
- ],
-
- 'attr': [
- (r'[^\s:="\[]+', Name.Attribute),
- (r'#\{', String.Interpol, 'interpolation'),
- (r'[ \t]*:', Operator, 'value'),
- default('#pop'),
- ],
-
- 'inline-comment': [
- (r"(\\#|#(?=[^{])|\*(?=[^/])|[^#*])+", Comment.Multiline),
- (r'#\{', String.Interpol, 'interpolation'),
- (r"\*/", Comment, '#pop'),
- ],
- }
- for group, common in common_sass_tokens.items():
- tokens[group] = copy.copy(common)
- tokens['value'].extend([(r'\n', Whitespace), (r'[;{}]', Punctuation, '#pop')])
- tokens['selector'].extend([(r'\n', Whitespace), (r'[;{}]', Punctuation, '#pop')])
-
-
-class LessCssLexer(CssLexer):
- """
- For LESS styleshets.
-
- .. versionadded:: 2.1
- """
-
- name = 'LessCss'
- url = 'http://lesscss.org/'
- aliases = ['less']
- filenames = ['*.less']
- mimetypes = ['text/x-less-css']
-
- tokens = {
- 'root': [
- (r'@\w+', Name.Variable),
- inherit,
- ],
- 'content': [
- (r'\{', Punctuation, '#push'),
- (r'//.*\n', Comment.Single),
- inherit,
- ],
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/d.py b/venv/lib/python3.11/site-packages/pygments/lexers/d.py
deleted file mode 100644
index db9020d..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/d.py
+++ /dev/null
@@ -1,258 +0,0 @@
-"""
- pygments.lexers.d
- ~~~~~~~~~~~~~~~~~
-
- Lexers for D languages.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, include, words, bygroups
-from pygments.token import Comment, Keyword, Name, String, Number, \
- Punctuation, Whitespace
-
-__all__ = ['DLexer', 'CrocLexer', 'MiniDLexer']
-
-
-class DLexer(RegexLexer):
- """
- For D source.
-
- .. versionadded:: 1.2
- """
- name = 'D'
- url = 'https://dlang.org/'
- filenames = ['*.d', '*.di']
- aliases = ['d']
- mimetypes = ['text/x-dsrc']
-
- tokens = {
- 'root': [
- (r'\n', Whitespace),
- (r'\s+', Whitespace),
- # (r'\\\n', Text), # line continuations
- # Comments
- (r'(//.*?)(\n)', bygroups(Comment.Single, Whitespace)),
- (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline),
- (r'/\+', Comment.Multiline, 'nested_comment'),
- # Keywords
- (words((
- 'abstract', 'alias', 'align', 'asm', 'assert', 'auto', 'body',
- 'break', 'case', 'cast', 'catch', 'class', 'const', 'continue',
- 'debug', 'default', 'delegate', 'delete', 'deprecated', 'do', 'else',
- 'enum', 'export', 'extern', 'finally', 'final', 'foreach_reverse',
- 'foreach', 'for', 'function', 'goto', 'if', 'immutable', 'import',
- 'interface', 'invariant', 'inout', 'in', 'is', 'lazy', 'mixin',
- 'module', 'new', 'nothrow', 'out', 'override', 'package', 'pragma',
- 'private', 'protected', 'public', 'pure', 'ref', 'return', 'scope',
- 'shared', 'static', 'struct', 'super', 'switch', 'synchronized',
- 'template', 'this', 'throw', 'try', 'typeid', 'typeof',
- 'union', 'unittest', 'version', 'volatile', 'while', 'with',
- '__gshared', '__traits', '__vector', '__parameters'),
- suffix=r'\b'),
- Keyword),
- (words((
- # Removed in 2.072
- 'typedef', ),
- suffix=r'\b'),
- Keyword.Removed),
- (words((
- 'bool', 'byte', 'cdouble', 'cent', 'cfloat', 'char', 'creal',
- 'dchar', 'double', 'float', 'idouble', 'ifloat', 'int', 'ireal',
- 'long', 'real', 'short', 'ubyte', 'ucent', 'uint', 'ulong',
- 'ushort', 'void', 'wchar'), suffix=r'\b'),
- Keyword.Type),
- (r'(false|true|null)\b', Keyword.Constant),
- (words((
- '__FILE__', '__FILE_FULL_PATH__', '__MODULE__', '__LINE__', '__FUNCTION__',
- '__PRETTY_FUNCTION__', '__DATE__', '__EOF__', '__TIME__', '__TIMESTAMP__',
- '__VENDOR__', '__VERSION__'), suffix=r'\b'),
- Keyword.Pseudo),
- (r'macro\b', Keyword.Reserved),
- (r'(string|wstring|dstring|size_t|ptrdiff_t)\b', Name.Builtin),
- # FloatLiteral
- # -- HexFloat
- (r'0[xX]([0-9a-fA-F_]*\.[0-9a-fA-F_]+|[0-9a-fA-F_]+)'
- r'[pP][+\-]?[0-9_]+[fFL]?[i]?', Number.Float),
- # -- DecimalFloat
- (r'[0-9_]+(\.[0-9_]+[eE][+\-]?[0-9_]+|'
- r'\.[0-9_]*|[eE][+\-]?[0-9_]+)[fFL]?[i]?', Number.Float),
- (r'\.(0|[1-9][0-9_]*)([eE][+\-]?[0-9_]+)?[fFL]?[i]?', Number.Float),
- # IntegerLiteral
- # -- Binary
- (r'0[Bb][01_]+', Number.Bin),
- # -- Octal
- (r'0[0-7_]+', Number.Oct),
- # -- Hexadecimal
- (r'0[xX][0-9a-fA-F_]+', Number.Hex),
- # -- Decimal
- (r'(0|[1-9][0-9_]*)([LUu]|Lu|LU|uL|UL)?', Number.Integer),
- # CharacterLiteral
- (r"""'(\\['"?\\abfnrtv]|\\x[0-9a-fA-F]{2}|\\[0-7]{1,3}"""
- r"""|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8}|\\&\w+;|.)'""",
- String.Char),
- # StringLiteral
- # -- WysiwygString
- (r'r"[^"]*"[cwd]?', String),
- # -- AlternateWysiwygString
- (r'`[^`]*`[cwd]?', String),
- # -- DoubleQuotedString
- (r'"(\\\\|\\[^\\]|[^"\\])*"[cwd]?', String),
- # -- EscapeSequence
- (r"\\(['\"?\\abfnrtv]|x[0-9a-fA-F]{2}|[0-7]{1,3}"
- r"|u[0-9a-fA-F]{4}|U[0-9a-fA-F]{8}|&\w+;)",
- String),
- # -- HexString
- (r'x"[0-9a-fA-F_\s]*"[cwd]?', String),
- # -- DelimitedString
- (r'q"\[', String, 'delimited_bracket'),
- (r'q"\(', String, 'delimited_parenthesis'),
- (r'q"<', String, 'delimited_angle'),
- (r'q"\{', String, 'delimited_curly'),
- (r'q"([a-zA-Z_]\w*)\n.*?\n\1"', String),
- (r'q"(.).*?\1"', String),
- # -- TokenString
- (r'q\{', String, 'token_string'),
- # Attributes
- (r'@([a-zA-Z_]\w*)?', Name.Decorator),
- # Tokens
- (r'(~=|\^=|%=|\*=|==|!>=|!<=|!<>=|!<>|!<|!>|!=|>>>=|>>>|>>=|>>|>='
- r'|<>=|<>|<<=|<<|<=|\+\+|\+=|--|-=|\|\||\|=|&&|&=|\.\.\.|\.\.|/=)'
- r'|[/.&|\-+<>!()\[\]{}?,;:$=*%^~]', Punctuation),
- # Identifier
- (r'[a-zA-Z_]\w*', Name),
- # Line
- (r'(#line)(\s)(.*)(\n)', bygroups(Comment.Special, Whitespace,
- Comment.Special, Whitespace)),
- ],
- 'nested_comment': [
- (r'[^+/]+', Comment.Multiline),
- (r'/\+', Comment.Multiline, '#push'),
- (r'\+/', Comment.Multiline, '#pop'),
- (r'[+/]', Comment.Multiline),
- ],
- 'token_string': [
- (r'\{', Punctuation, 'token_string_nest'),
- (r'\}', String, '#pop'),
- include('root'),
- ],
- 'token_string_nest': [
- (r'\{', Punctuation, '#push'),
- (r'\}', Punctuation, '#pop'),
- include('root'),
- ],
- 'delimited_bracket': [
- (r'[^\[\]]+', String),
- (r'\[', String, 'delimited_inside_bracket'),
- (r'\]"', String, '#pop'),
- ],
- 'delimited_inside_bracket': [
- (r'[^\[\]]+', String),
- (r'\[', String, '#push'),
- (r'\]', String, '#pop'),
- ],
- 'delimited_parenthesis': [
- (r'[^()]+', String),
- (r'\(', String, 'delimited_inside_parenthesis'),
- (r'\)"', String, '#pop'),
- ],
- 'delimited_inside_parenthesis': [
- (r'[^()]+', String),
- (r'\(', String, '#push'),
- (r'\)', String, '#pop'),
- ],
- 'delimited_angle': [
- (r'[^<>]+', String),
- (r'<', String, 'delimited_inside_angle'),
- (r'>"', String, '#pop'),
- ],
- 'delimited_inside_angle': [
- (r'[^<>]+', String),
- (r'<', String, '#push'),
- (r'>', String, '#pop'),
- ],
- 'delimited_curly': [
- (r'[^{}]+', String),
- (r'\{', String, 'delimited_inside_curly'),
- (r'\}"', String, '#pop'),
- ],
- 'delimited_inside_curly': [
- (r'[^{}]+', String),
- (r'\{', String, '#push'),
- (r'\}', String, '#pop'),
- ],
- }
-
-
-class CrocLexer(RegexLexer):
- """
- For Croc source.
- """
- name = 'Croc'
- url = 'http://jfbillingsley.com/croc'
- filenames = ['*.croc']
- aliases = ['croc']
- mimetypes = ['text/x-crocsrc']
-
- tokens = {
- 'root': [
- (r'\n', Whitespace),
- (r'\s+', Whitespace),
- # Comments
- (r'(//.*?)(\n)', bygroups(Comment.Single, Whitespace)),
- (r'/\*', Comment.Multiline, 'nestedcomment'),
- # Keywords
- (words((
- 'as', 'assert', 'break', 'case', 'catch', 'class', 'continue',
- 'default', 'do', 'else', 'finally', 'for', 'foreach', 'function',
- 'global', 'namespace', 'if', 'import', 'in', 'is', 'local',
- 'module', 'return', 'scope', 'super', 'switch', 'this', 'throw',
- 'try', 'vararg', 'while', 'with', 'yield'), suffix=r'\b'),
- Keyword),
- (r'(false|true|null)\b', Keyword.Constant),
- # FloatLiteral
- (r'([0-9][0-9_]*)(?=[.eE])(\.[0-9][0-9_]*)?([eE][+\-]?[0-9_]+)?',
- Number.Float),
- # IntegerLiteral
- # -- Binary
- (r'0[bB][01][01_]*', Number.Bin),
- # -- Hexadecimal
- (r'0[xX][0-9a-fA-F][0-9a-fA-F_]*', Number.Hex),
- # -- Decimal
- (r'([0-9][0-9_]*)(?![.eE])', Number.Integer),
- # CharacterLiteral
- (r"""'(\\['"\\nrt]|\\x[0-9a-fA-F]{2}|\\[0-9]{1,3}"""
- r"""|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8}|.)'""",
- String.Char),
- # StringLiteral
- # -- WysiwygString
- (r'@"(""|[^"])*"', String),
- (r'@`(``|[^`])*`', String),
- (r"@'(''|[^'])*'", String),
- # -- DoubleQuotedString
- (r'"(\\\\|\\[^\\]|[^"\\])*"', String),
- # Tokens
- (r'(~=|\^=|%=|\*=|==|!=|>>>=|>>>|>>=|>>|>=|<=>|\?=|-\>'
- r'|<<=|<<|<=|\+\+|\+=|--|-=|\|\||\|=|&&|&=|\.\.|/=)'
- r'|[-/.&$@|\+<>!()\[\]{}?,;:=*%^~#\\]', Punctuation),
- # Identifier
- (r'[a-zA-Z_]\w*', Name),
- ],
- 'nestedcomment': [
- (r'[^*/]+', Comment.Multiline),
- (r'/\*', Comment.Multiline, '#push'),
- (r'\*/', Comment.Multiline, '#pop'),
- (r'[*/]', Comment.Multiline),
- ],
- }
-
-
-class MiniDLexer(CrocLexer):
- """
- For MiniD source. MiniD is now known as Croc.
- """
- name = 'MiniD'
- filenames = [] # don't lex .md as MiniD, reserve for Markdown
- aliases = ['minid']
- mimetypes = ['text/x-minidsrc']
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/dalvik.py b/venv/lib/python3.11/site-packages/pygments/lexers/dalvik.py
deleted file mode 100644
index eb97bd5..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/dalvik.py
+++ /dev/null
@@ -1,127 +0,0 @@
-"""
- pygments.lexers.dalvik
- ~~~~~~~~~~~~~~~~~~~~~~
-
- Pygments lexers for Dalvik VM-related languages.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, include, bygroups
-from pygments.token import Keyword, Text, Comment, Name, String, Number, \
- Punctuation, Whitespace
-
-__all__ = ['SmaliLexer']
-
-
-class SmaliLexer(RegexLexer):
- """
- For Smali (Android/Dalvik) assembly
- code.
-
- .. versionadded:: 1.6
- """
- name = 'Smali'
- url = 'http://code.google.com/p/smali/'
- aliases = ['smali']
- filenames = ['*.smali']
- mimetypes = ['text/smali']
-
- tokens = {
- 'root': [
- include('comment'),
- include('label'),
- include('field'),
- include('method'),
- include('class'),
- include('directive'),
- include('access-modifier'),
- include('instruction'),
- include('literal'),
- include('punctuation'),
- include('type'),
- include('whitespace')
- ],
- 'directive': [
- (r'^([ \t]*)(\.(?:class|super|implements|field|subannotation|annotation|'
- r'enum|method|registers|locals|array-data|packed-switch|'
- r'sparse-switch|catchall|catch|line|parameter|local|prologue|'
- r'epilogue|source))', bygroups(Whitespace, Keyword)),
- (r'^([ \t]*)(\.end)( )(field|subannotation|annotation|method|array-data|'
- 'packed-switch|sparse-switch|parameter|local)',
- bygroups(Whitespace, Keyword, Whitespace, Keyword)),
- (r'^([ \t]*)(\.restart)( )(local)',
- bygroups(Whitespace, Keyword, Whitespace, Keyword)),
- ],
- 'access-modifier': [
- (r'(public|private|protected|static|final|synchronized|bridge|'
- r'varargs|native|abstract|strictfp|synthetic|constructor|'
- r'declared-synchronized|interface|enum|annotation|volatile|'
- r'transient)', Keyword),
- ],
- 'whitespace': [
- (r'\n', Whitespace),
- (r'\s+', Whitespace),
- ],
- 'instruction': [
- (r'\b[vp]\d+\b', Name.Builtin), # registers
- (r'(\b[a-z][A-Za-z0-9/-]+)(\s+)', bygroups(Text, Whitespace)), # instructions
- ],
- 'literal': [
- (r'".*"', String),
- (r'0x[0-9A-Fa-f]+t?', Number.Hex),
- (r'[0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
- (r'[0-9]+L?', Number.Integer),
- ],
- 'field': [
- (r'(\$?\b)([\w$]*)(:)',
- bygroups(Punctuation, Name.Variable, Punctuation)),
- ],
- 'method': [
- (r'<(?:cl)?init>', Name.Function), # constructor
- (r'(\$?\b)([\w$]*)(\()',
- bygroups(Punctuation, Name.Function, Punctuation)),
- ],
- 'label': [
- (r':\w+', Name.Label),
- ],
- 'class': [
- # class names in the form Lcom/namespace/ClassName;
- # I only want to color the ClassName part, so the namespace part is
- # treated as 'Text'
- (r'(L)((?:[\w$]+/)*)([\w$]+)(;)',
- bygroups(Keyword.Type, Text, Name.Class, Text)),
- ],
- 'punctuation': [
- (r'->', Punctuation),
- (r'[{},():=.-]', Punctuation),
- ],
- 'type': [
- (r'[ZBSCIJFDV\[]+', Keyword.Type),
- ],
- 'comment': [
- (r'#.*?\n', Comment),
- ],
- }
-
- def analyse_text(text):
- score = 0
- if re.search(r'^\s*\.class\s', text, re.MULTILINE):
- score += 0.5
- if re.search(r'\b((check-cast|instance-of|throw-verification-error'
- r')\b|(-to|add|[ais]get|[ais]put|and|cmpl|const|div|'
- r'if|invoke|move|mul|neg|not|or|rem|return|rsub|shl|'
- r'shr|sub|ushr)[-/])|{|}', text, re.MULTILINE):
- score += 0.3
- if re.search(r'(\.(catchall|epilogue|restart local|prologue)|'
- r'\b(array-data|class-change-error|declared-synchronized|'
- r'(field|inline|vtable)@0x[0-9a-fA-F]|generic-error|'
- r'illegal-class-access|illegal-field-access|'
- r'illegal-method-access|instantiation-error|no-error|'
- r'no-such-class|no-such-field|no-such-method|'
- r'packed-switch|sparse-switch))\b', text, re.MULTILINE):
- score += 0.6
- return score
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/data.py b/venv/lib/python3.11/site-packages/pygments/lexers/data.py
deleted file mode 100644
index afb5f7e..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/data.py
+++ /dev/null
@@ -1,767 +0,0 @@
-"""
- pygments.lexers.data
- ~~~~~~~~~~~~~~~~~~~~
-
- Lexers for data file format.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import Lexer, ExtendedRegexLexer, LexerContext, \
- include, bygroups
-from pygments.token import Comment, Error, Keyword, Literal, Name, Number, \
- Punctuation, String, Whitespace
-
-__all__ = ['YamlLexer', 'JsonLexer', 'JsonBareObjectLexer', 'JsonLdLexer']
-
-
-class YamlLexerContext(LexerContext):
- """Indentation context for the YAML lexer."""
-
- def __init__(self, *args, **kwds):
- super().__init__(*args, **kwds)
- self.indent_stack = []
- self.indent = -1
- self.next_indent = 0
- self.block_scalar_indent = None
-
-
-class YamlLexer(ExtendedRegexLexer):
- """
- Lexer for YAML, a human-friendly data serialization
- language.
-
- .. versionadded:: 0.11
- """
-
- name = 'YAML'
- url = 'http://yaml.org/'
- aliases = ['yaml']
- filenames = ['*.yaml', '*.yml']
- mimetypes = ['text/x-yaml']
-
- def something(token_class):
- """Do not produce empty tokens."""
- def callback(lexer, match, context):
- text = match.group()
- if not text:
- return
- yield match.start(), token_class, text
- context.pos = match.end()
- return callback
-
- def reset_indent(token_class):
- """Reset the indentation levels."""
- def callback(lexer, match, context):
- text = match.group()
- context.indent_stack = []
- context.indent = -1
- context.next_indent = 0
- context.block_scalar_indent = None
- yield match.start(), token_class, text
- context.pos = match.end()
- return callback
-
- def save_indent(token_class, start=False):
- """Save a possible indentation level."""
- def callback(lexer, match, context):
- text = match.group()
- extra = ''
- if start:
- context.next_indent = len(text)
- if context.next_indent < context.indent:
- while context.next_indent < context.indent:
- context.indent = context.indent_stack.pop()
- if context.next_indent > context.indent:
- extra = text[context.indent:]
- text = text[:context.indent]
- else:
- context.next_indent += len(text)
- if text:
- yield match.start(), token_class, text
- if extra:
- yield match.start()+len(text), token_class.Error, extra
- context.pos = match.end()
- return callback
-
- def set_indent(token_class, implicit=False):
- """Set the previously saved indentation level."""
- def callback(lexer, match, context):
- text = match.group()
- if context.indent < context.next_indent:
- context.indent_stack.append(context.indent)
- context.indent = context.next_indent
- if not implicit:
- context.next_indent += len(text)
- yield match.start(), token_class, text
- context.pos = match.end()
- return callback
-
- def set_block_scalar_indent(token_class):
- """Set an explicit indentation level for a block scalar."""
- def callback(lexer, match, context):
- text = match.group()
- context.block_scalar_indent = None
- if not text:
- return
- increment = match.group(1)
- if increment:
- current_indent = max(context.indent, 0)
- increment = int(increment)
- context.block_scalar_indent = current_indent + increment
- if text:
- yield match.start(), token_class, text
- context.pos = match.end()
- return callback
-
- def parse_block_scalar_empty_line(indent_token_class, content_token_class):
- """Process an empty line in a block scalar."""
- def callback(lexer, match, context):
- text = match.group()
- if (context.block_scalar_indent is None or
- len(text) <= context.block_scalar_indent):
- if text:
- yield match.start(), indent_token_class, text
- else:
- indentation = text[:context.block_scalar_indent]
- content = text[context.block_scalar_indent:]
- yield match.start(), indent_token_class, indentation
- yield (match.start()+context.block_scalar_indent,
- content_token_class, content)
- context.pos = match.end()
- return callback
-
- def parse_block_scalar_indent(token_class):
- """Process indentation spaces in a block scalar."""
- def callback(lexer, match, context):
- text = match.group()
- if context.block_scalar_indent is None:
- if len(text) <= max(context.indent, 0):
- context.stack.pop()
- context.stack.pop()
- return
- context.block_scalar_indent = len(text)
- else:
- if len(text) < context.block_scalar_indent:
- context.stack.pop()
- context.stack.pop()
- return
- if text:
- yield match.start(), token_class, text
- context.pos = match.end()
- return callback
-
- def parse_plain_scalar_indent(token_class):
- """Process indentation spaces in a plain scalar."""
- def callback(lexer, match, context):
- text = match.group()
- if len(text) <= context.indent:
- context.stack.pop()
- context.stack.pop()
- return
- if text:
- yield match.start(), token_class, text
- context.pos = match.end()
- return callback
-
- tokens = {
- # the root rules
- 'root': [
- # ignored whitespaces
- (r'[ ]+(?=#|$)', Whitespace),
- # line breaks
- (r'\n+', Whitespace),
- # a comment
- (r'#[^\n]*', Comment.Single),
- # the '%YAML' directive
- (r'^%YAML(?=[ ]|$)', reset_indent(Name.Tag), 'yaml-directive'),
- # the %TAG directive
- (r'^%TAG(?=[ ]|$)', reset_indent(Name.Tag), 'tag-directive'),
- # document start and document end indicators
- (r'^(?:---|\.\.\.)(?=[ ]|$)', reset_indent(Name.Namespace),
- 'block-line'),
- # indentation spaces
- (r'[ ]*(?!\s|$)', save_indent(Whitespace, start=True),
- ('block-line', 'indentation')),
- ],
-
- # trailing whitespaces after directives or a block scalar indicator
- 'ignored-line': [
- # ignored whitespaces
- (r'[ ]+(?=#|$)', Whitespace),
- # a comment
- (r'#[^\n]*', Comment.Single),
- # line break
- (r'\n', Whitespace, '#pop:2'),
- ],
-
- # the %YAML directive
- 'yaml-directive': [
- # the version number
- (r'([ ]+)([0-9]+\.[0-9]+)',
- bygroups(Whitespace, Number), 'ignored-line'),
- ],
-
- # the %TAG directive
- 'tag-directive': [
- # a tag handle and the corresponding prefix
- (r'([ ]+)(!|![\w-]*!)'
- r'([ ]+)(!|!?[\w;/?:@&=+$,.!~*\'()\[\]%-]+)',
- bygroups(Whitespace, Keyword.Type, Whitespace, Keyword.Type),
- 'ignored-line'),
- ],
-
- # block scalar indicators and indentation spaces
- 'indentation': [
- # trailing whitespaces are ignored
- (r'[ ]*$', something(Whitespace), '#pop:2'),
- # whitespaces preceding block collection indicators
- (r'[ ]+(?=[?:-](?:[ ]|$))', save_indent(Whitespace)),
- # block collection indicators
- (r'[?:-](?=[ ]|$)', set_indent(Punctuation.Indicator)),
- # the beginning a block line
- (r'[ ]*', save_indent(Whitespace), '#pop'),
- ],
-
- # an indented line in the block context
- 'block-line': [
- # the line end
- (r'[ ]*(?=#|$)', something(Whitespace), '#pop'),
- # whitespaces separating tokens
- (r'[ ]+', Whitespace),
- # key with colon
- (r'''([^#,?\[\]{}"'\n]+)(:)(?=[ ]|$)''',
- bygroups(Name.Tag, set_indent(Punctuation, implicit=True))),
- # tags, anchors and aliases,
- include('descriptors'),
- # block collections and scalars
- include('block-nodes'),
- # flow collections and quoted scalars
- include('flow-nodes'),
- # a plain scalar
- (r'(?=[^\s?:,\[\]{}#&*!|>\'"%@`-]|[?:-]\S)',
- something(Name.Variable),
- 'plain-scalar-in-block-context'),
- ],
-
- # tags, anchors, aliases
- 'descriptors': [
- # a full-form tag
- (r'!<[\w#;/?:@&=+$,.!~*\'()\[\]%-]+>', Keyword.Type),
- # a tag in the form '!', '!suffix' or '!handle!suffix'
- (r'!(?:[\w-]+!)?'
- r'[\w#;/?:@&=+$,.!~*\'()\[\]%-]*', Keyword.Type),
- # an anchor
- (r'&[\w-]+', Name.Label),
- # an alias
- (r'\*[\w-]+', Name.Variable),
- ],
-
- # block collections and scalars
- 'block-nodes': [
- # implicit key
- (r':(?=[ ]|$)', set_indent(Punctuation.Indicator, implicit=True)),
- # literal and folded scalars
- (r'[|>]', Punctuation.Indicator,
- ('block-scalar-content', 'block-scalar-header')),
- ],
-
- # flow collections and quoted scalars
- 'flow-nodes': [
- # a flow sequence
- (r'\[', Punctuation.Indicator, 'flow-sequence'),
- # a flow mapping
- (r'\{', Punctuation.Indicator, 'flow-mapping'),
- # a single-quoted scalar
- (r'\'', String, 'single-quoted-scalar'),
- # a double-quoted scalar
- (r'\"', String, 'double-quoted-scalar'),
- ],
-
- # the content of a flow collection
- 'flow-collection': [
- # whitespaces
- (r'[ ]+', Whitespace),
- # line breaks
- (r'\n+', Whitespace),
- # a comment
- (r'#[^\n]*', Comment.Single),
- # simple indicators
- (r'[?:,]', Punctuation.Indicator),
- # tags, anchors and aliases
- include('descriptors'),
- # nested collections and quoted scalars
- include('flow-nodes'),
- # a plain scalar
- (r'(?=[^\s?:,\[\]{}#&*!|>\'"%@`])',
- something(Name.Variable),
- 'plain-scalar-in-flow-context'),
- ],
-
- # a flow sequence indicated by '[' and ']'
- 'flow-sequence': [
- # include flow collection rules
- include('flow-collection'),
- # the closing indicator
- (r'\]', Punctuation.Indicator, '#pop'),
- ],
-
- # a flow mapping indicated by '{' and '}'
- 'flow-mapping': [
- # key with colon
- (r'''([^,:?\[\]{}"'\n]+)(:)(?=[ ]|$)''',
- bygroups(Name.Tag, Punctuation)),
- # include flow collection rules
- include('flow-collection'),
- # the closing indicator
- (r'\}', Punctuation.Indicator, '#pop'),
- ],
-
- # block scalar lines
- 'block-scalar-content': [
- # line break
- (r'\n', Whitespace),
- # empty line
- (r'^[ ]+$',
- parse_block_scalar_empty_line(Whitespace, Name.Constant)),
- # indentation spaces (we may leave the state here)
- (r'^[ ]*', parse_block_scalar_indent(Whitespace)),
- # line content
- (r'[\S\t ]+', Name.Constant),
- ],
-
- # the content of a literal or folded scalar
- 'block-scalar-header': [
- # indentation indicator followed by chomping flag
- (r'([1-9])?[+-]?(?=[ ]|$)',
- set_block_scalar_indent(Punctuation.Indicator),
- 'ignored-line'),
- # chomping flag followed by indentation indicator
- (r'[+-]?([1-9])?(?=[ ]|$)',
- set_block_scalar_indent(Punctuation.Indicator),
- 'ignored-line'),
- ],
-
- # ignored and regular whitespaces in quoted scalars
- 'quoted-scalar-whitespaces': [
- # leading and trailing whitespaces are ignored
- (r'^[ ]+', Whitespace),
- (r'[ ]+$', Whitespace),
- # line breaks are ignored
- (r'\n+', Whitespace),
- # other whitespaces are a part of the value
- (r'[ ]+', Name.Variable),
- ],
-
- # single-quoted scalars
- 'single-quoted-scalar': [
- # include whitespace and line break rules
- include('quoted-scalar-whitespaces'),
- # escaping of the quote character
- (r'\'\'', String.Escape),
- # regular non-whitespace characters
- (r'[^\s\']+', String),
- # the closing quote
- (r'\'', String, '#pop'),
- ],
-
- # double-quoted scalars
- 'double-quoted-scalar': [
- # include whitespace and line break rules
- include('quoted-scalar-whitespaces'),
- # escaping of special characters
- (r'\\[0abt\tn\nvfre "\\N_LP]', String),
- # escape codes
- (r'\\(?:x[0-9A-Fa-f]{2}|u[0-9A-Fa-f]{4}|U[0-9A-Fa-f]{8})',
- String.Escape),
- # regular non-whitespace characters
- (r'[^\s"\\]+', String),
- # the closing quote
- (r'"', String, '#pop'),
- ],
-
- # the beginning of a new line while scanning a plain scalar
- 'plain-scalar-in-block-context-new-line': [
- # empty lines
- (r'^[ ]+$', Whitespace),
- # line breaks
- (r'\n+', Whitespace),
- # document start and document end indicators
- (r'^(?=---|\.\.\.)', something(Name.Namespace), '#pop:3'),
- # indentation spaces (we may leave the block line state here)
- (r'^[ ]*', parse_plain_scalar_indent(Whitespace), '#pop'),
- ],
-
- # a plain scalar in the block context
- 'plain-scalar-in-block-context': [
- # the scalar ends with the ':' indicator
- (r'[ ]*(?=:[ ]|:$)', something(Whitespace), '#pop'),
- # the scalar ends with whitespaces followed by a comment
- (r'[ ]+(?=#)', Whitespace, '#pop'),
- # trailing whitespaces are ignored
- (r'[ ]+$', Whitespace),
- # line breaks are ignored
- (r'\n+', Whitespace, 'plain-scalar-in-block-context-new-line'),
- # other whitespaces are a part of the value
- (r'[ ]+', Literal.Scalar.Plain),
- # regular non-whitespace characters
- (r'(?::(?!\s)|[^\s:])+', Literal.Scalar.Plain),
- ],
-
- # a plain scalar is the flow context
- 'plain-scalar-in-flow-context': [
- # the scalar ends with an indicator character
- (r'[ ]*(?=[,:?\[\]{}])', something(Whitespace), '#pop'),
- # the scalar ends with a comment
- (r'[ ]+(?=#)', Whitespace, '#pop'),
- # leading and trailing whitespaces are ignored
- (r'^[ ]+', Whitespace),
- (r'[ ]+$', Whitespace),
- # line breaks are ignored
- (r'\n+', Whitespace),
- # other whitespaces are a part of the value
- (r'[ ]+', Name.Variable),
- # regular non-whitespace characters
- (r'[^\s,:?\[\]{}]+', Name.Variable),
- ],
-
- }
-
- def get_tokens_unprocessed(self, text=None, context=None):
- if context is None:
- context = YamlLexerContext(text, 0)
- return super().get_tokens_unprocessed(text, context)
-
-
-class JsonLexer(Lexer):
- """
- For JSON data structures.
-
- Javascript-style comments are supported (like ``/* */`` and ``//``),
- though comments are not part of the JSON specification.
- This allows users to highlight JSON as it is used in the wild.
-
- No validation is performed on the input JSON document.
-
- .. versionadded:: 1.5
- """
-
- name = 'JSON'
- url = 'https://www.json.org'
- aliases = ['json', 'json-object']
- filenames = ['*.json', '*.jsonl', '*.ndjson', 'Pipfile.lock']
- mimetypes = ['application/json', 'application/json-object', 'application/x-ndjson', 'application/jsonl', 'application/json-seq']
-
- # No validation of integers, floats, or constants is done.
- # As long as the characters are members of the following
- # sets, the token will be considered valid. For example,
- #
- # "--1--" is parsed as an integer
- # "1...eee" is parsed as a float
- # "trustful" is parsed as a constant
- #
- integers = set('-0123456789')
- floats = set('.eE+')
- constants = set('truefalsenull') # true|false|null
- hexadecimals = set('0123456789abcdefABCDEF')
- punctuations = set('{}[],')
- whitespaces = {'\u0020', '\u000a', '\u000d', '\u0009'}
-
- def get_tokens_unprocessed(self, text):
- """Parse JSON data."""
-
- in_string = False
- in_escape = False
- in_unicode_escape = 0
- in_whitespace = False
- in_constant = False
- in_number = False
- in_float = False
- in_punctuation = False
- in_comment_single = False
- in_comment_multiline = False
- expecting_second_comment_opener = False # // or /*
- expecting_second_comment_closer = False # */
-
- start = 0
-
- # The queue is used to store data that may need to be tokenized
- # differently based on what follows. In particular, JSON object
- # keys are tokenized differently than string values, but cannot
- # be distinguished until punctuation is encountered outside the
- # string.
- #
- # A ":" character after the string indicates that the string is
- # an object key; any other character indicates the string is a
- # regular string value.
- #
- # The queue holds tuples that contain the following data:
- #
- # (start_index, token_type, text)
- #
- # By default the token type of text in double quotes is
- # String.Double. The token type will be replaced if a colon
- # is encountered after the string closes.
- #
- queue = []
-
- for stop, character in enumerate(text):
- if in_string:
- if in_unicode_escape:
- if character in self.hexadecimals:
- in_unicode_escape -= 1
- if not in_unicode_escape:
- in_escape = False
- else:
- in_unicode_escape = 0
- in_escape = False
-
- elif in_escape:
- if character == 'u':
- in_unicode_escape = 4
- else:
- in_escape = False
-
- elif character == '\\':
- in_escape = True
-
- elif character == '"':
- queue.append((start, String.Double, text[start:stop + 1]))
- in_string = False
- in_escape = False
- in_unicode_escape = 0
-
- continue
-
- elif in_whitespace:
- if character in self.whitespaces:
- continue
-
- if queue:
- queue.append((start, Whitespace, text[start:stop]))
- else:
- yield start, Whitespace, text[start:stop]
- in_whitespace = False
- # Fall through so the new character can be evaluated.
-
- elif in_constant:
- if character in self.constants:
- continue
-
- yield start, Keyword.Constant, text[start:stop]
- in_constant = False
- # Fall through so the new character can be evaluated.
-
- elif in_number:
- if character in self.integers:
- continue
- elif character in self.floats:
- in_float = True
- continue
-
- if in_float:
- yield start, Number.Float, text[start:stop]
- else:
- yield start, Number.Integer, text[start:stop]
- in_number = False
- in_float = False
- # Fall through so the new character can be evaluated.
-
- elif in_punctuation:
- if character in self.punctuations:
- continue
-
- yield start, Punctuation, text[start:stop]
- in_punctuation = False
- # Fall through so the new character can be evaluated.
-
- elif in_comment_single:
- if character != '\n':
- continue
-
- if queue:
- queue.append((start, Comment.Single, text[start:stop]))
- else:
- yield start, Comment.Single, text[start:stop]
-
- in_comment_single = False
- # Fall through so the new character can be evaluated.
-
- elif in_comment_multiline:
- if character == '*':
- expecting_second_comment_closer = True
- elif expecting_second_comment_closer:
- expecting_second_comment_closer = False
- if character == '/':
- if queue:
- queue.append((start, Comment.Multiline, text[start:stop + 1]))
- else:
- yield start, Comment.Multiline, text[start:stop + 1]
-
- in_comment_multiline = False
-
- continue
-
- elif expecting_second_comment_opener:
- expecting_second_comment_opener = False
- if character == '/':
- in_comment_single = True
- continue
- elif character == '*':
- in_comment_multiline = True
- continue
-
- # Exhaust the queue. Accept the existing token types.
- yield from queue
- queue.clear()
-
- yield start, Error, text[start:stop]
- # Fall through so the new character can be evaluated.
-
- start = stop
-
- if character == '"':
- in_string = True
-
- elif character in self.whitespaces:
- in_whitespace = True
-
- elif character in {'f', 'n', 't'}: # The first letters of true|false|null
- # Exhaust the queue. Accept the existing token types.
- yield from queue
- queue.clear()
-
- in_constant = True
-
- elif character in self.integers:
- # Exhaust the queue. Accept the existing token types.
- yield from queue
- queue.clear()
-
- in_number = True
-
- elif character == ':':
- # Yield from the queue. Replace string token types.
- for _start, _token, _text in queue:
- # There can be only three types of tokens before a ':':
- # Whitespace, Comment, or a quoted string.
- #
- # If it's a quoted string we emit Name.Tag.
- # Otherwise, we yield the original token.
- #
- # In all other cases this would be invalid JSON,
- # but this is not a validating JSON lexer, so it's OK.
- if _token is String.Double:
- yield _start, Name.Tag, _text
- else:
- yield _start, _token, _text
- queue.clear()
-
- in_punctuation = True
-
- elif character in self.punctuations:
- # Exhaust the queue. Accept the existing token types.
- yield from queue
- queue.clear()
-
- in_punctuation = True
-
- elif character == '/':
- # This is the beginning of a comment.
- expecting_second_comment_opener = True
-
- else:
- # Exhaust the queue. Accept the existing token types.
- yield from queue
- queue.clear()
-
- yield start, Error, character
-
- # Yield any remaining text.
- yield from queue
- if in_string:
- yield start, Error, text[start:]
- elif in_float:
- yield start, Number.Float, text[start:]
- elif in_number:
- yield start, Number.Integer, text[start:]
- elif in_constant:
- yield start, Keyword.Constant, text[start:]
- elif in_whitespace:
- yield start, Whitespace, text[start:]
- elif in_punctuation:
- yield start, Punctuation, text[start:]
- elif in_comment_single:
- yield start, Comment.Single, text[start:]
- elif in_comment_multiline:
- yield start, Error, text[start:]
- elif expecting_second_comment_opener:
- yield start, Error, text[start:]
-
-
-class JsonBareObjectLexer(JsonLexer):
- """
- For JSON data structures (with missing object curly braces).
-
- .. versionadded:: 2.2
-
- .. deprecated:: 2.8.0
-
- Behaves the same as `JsonLexer` now.
- """
-
- name = 'JSONBareObject'
- aliases = []
- filenames = []
- mimetypes = []
-
-
-class JsonLdLexer(JsonLexer):
- """
- For JSON-LD linked data.
-
- .. versionadded:: 2.0
- """
-
- name = 'JSON-LD'
- url = 'https://json-ld.org/'
- aliases = ['jsonld', 'json-ld']
- filenames = ['*.jsonld']
- mimetypes = ['application/ld+json']
-
- json_ld_keywords = {
- '"@%s"' % keyword
- for keyword in (
- 'base',
- 'container',
- 'context',
- 'direction',
- 'graph',
- 'id',
- 'import',
- 'included',
- 'index',
- 'json',
- 'language',
- 'list',
- 'nest',
- 'none',
- 'prefix',
- 'propagate',
- 'protected',
- 'reverse',
- 'set',
- 'type',
- 'value',
- 'version',
- 'vocab',
- )
- }
-
- def get_tokens_unprocessed(self, text):
- for start, token, value in super().get_tokens_unprocessed(text):
- if token is Name.Tag and value in self.json_ld_keywords:
- yield start, Name.Decorator, value
- else:
- yield start, token, value
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/dax.py b/venv/lib/python3.11/site-packages/pygments/lexers/dax.py
deleted file mode 100644
index 39618c3..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/dax.py
+++ /dev/null
@@ -1,136 +0,0 @@
-"""
- pygments.lexers.dax
- ~~~~~~~~~~~~~~~~~~~
-
- Lexer for LilyPond.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, words
-from pygments.token import Comment, Punctuation, Whitespace,\
- Name, Operator, String, Number, Text
-
-__all__ = ['DaxLexer']
-
-
-class DaxLexer(RegexLexer):
- """
- Lexer for Power BI DAX
- Referenced from: https://github.com/sql-bi/SyntaxHighlighterBrushDax
-
- .. versionadded:: 2.15
- """
- name = 'Dax'
- aliases = ['dax']
- filenames = ['*.dax']
- url = 'https://learn.microsoft.com/en-us/dax/dax-function-reference'
- mimetypes = []
-
- tokens = {
- 'root': [
- (r'\s+', Whitespace),
- (r"--.*\n?", Comment.Single), # Comment: Double dash comment
- (r"//.*\n?", Comment.Single), # Comment: Double backslash comment
- (r'/\*', Comment.Multiline, 'multiline-comments'),
- (words(('abs', 'accrint', 'accrintm', 'acos', 'acosh', 'acot', 'acoth',
- 'addcolumns', 'addmissingitems', 'all', 'allcrossfiltered',
- 'allexcept', 'allnoblankrow', 'allselected', 'amordegrc', 'amorlinc',
- 'and','approximatedistinctcount', 'asin', 'asinh', 'atan', 'atanh',
- 'average', 'averagea', 'averagex', 'beta.dist', 'beta.inv',
- 'bitand', 'bitlshift', 'bitor', 'bitrshift', 'bitxor', 'blank',
- 'calculate', 'calculatetable', 'calendar', 'calendarauto', 'ceiling',
- 'chisq.dist', 'chisq.dist.rt', 'chisq.inv', 'chisq.inv.rt',
- 'closingbalancemonth', 'closingbalancequarter', 'closingbalanceyear',
- 'coalesce', 'columnstatistics', 'combin', 'combina', 'combinevalues',
- 'concatenate', 'concatenatex', 'confidence.norm', 'confidence.t',
- 'contains', 'containsrow', 'containsstring', 'containsstringexact',
- 'convert', 'cos', 'cosh', 'cot', 'coth', 'count', 'counta', 'countax',
- 'countblank', 'countrows', 'countx', 'coupdaybs', 'coupdays',
- 'coupdaysnc', 'coupncd', 'coupnum', 'couppcd', 'crossfilter',
- 'crossjoin', 'cumipmt', 'cumprinc', 'currency', 'currentgroup',
- 'customdata', 'datatable', 'date', 'dateadd', 'datediff',
- 'datesbetween', 'datesinperiod', 'datesmtd', 'datesqtd',
- 'datesytd', 'datevalue', 'day', 'db', 'ddb', 'degrees', 'detailrows',
- 'disc', 'distinct', 'distinctcount', 'distinctcountnoblank',
- 'divide', 'dollarde', 'dollarfr', 'duration', 'earlier', 'earliest',
- 'edate', 'effect', 'endofmonth', 'endofquarter', 'endofyear',
- 'eomonth', 'error', 'evaluateandlog', 'even', 'exact', 'except',
- 'exp', 'expon.dist', 'fact', 'false', 'filter', 'filters', 'find',
- 'firstdate', 'firstnonblank', 'firstnonblankvalue', 'fixed', 'floor',
- 'format', 'fv', 'gcd', 'generate', 'generateall', 'generateseries',
- 'geomean', 'geomeanx', 'groupby', 'hash', 'hasonefilter',
- 'hasonevalue', 'hour', 'if', 'if.eager', 'iferror', 'ignore', 'index',
- 'int', 'intersect', 'intrate', 'ipmt', 'isafter', 'isblank',
- 'iscrossfiltered', 'isempty', 'iserror', 'iseven', 'isfiltered',
- 'isinscope', 'islogical', 'isnontext', 'isnumber', 'iso.ceiling',
- 'isodd', 'isonorafter', 'ispmt', 'isselectedmeasure', 'issubtotal',
- 'istext', 'keepfilters', 'keywordmatch', 'lastdate', 'lastnonblank',
- 'lastnonblankvalue', 'lcm', 'left', 'len', 'linest', 'linestx', 'ln',
- 'log', 'log10', 'lookupvalue', 'lower', 'max', 'maxa', 'maxx',
- 'mduration', 'median', 'medianx', 'mid', 'min', 'mina', 'minute',
- 'minx', 'mod', 'month', 'mround', 'nameof', 'naturalinnerjoin',
- 'naturalleftouterjoin', 'networkdays', 'nextday', 'nextmonth',
- 'nextquarter', 'nextyear', 'nominal', 'nonvisual', 'norm.dist',
- 'norm.inv', 'norm.s.dist', 'norm.s.inv', 'not', 'now', 'nper', 'odd',
- 'oddfprice', 'oddfyield', 'oddlprice', 'oddlyield', 'offset',
- 'openingbalancemonth', 'openingbalancequarter', 'openingbalanceyear',
- 'or', 'orderby', 'parallelperiod', 'partitionby', 'path',
- 'pathcontains', 'pathitem', 'pathitemreverse', 'pathlength',
- 'pduration', 'percentile.exc', 'percentile.inc', 'percentilex.exc',
- 'percentilex.inc', 'permut', 'pi', 'pmt', 'poisson.dist', 'power',
- 'ppmt', 'previousday', 'previousmonth', 'previousquarter',
- 'previousyear', 'price', 'pricedisc', 'pricemat', 'product',
- 'productx', 'pv', 'quarter', 'quotient', 'radians', 'rand',
- 'randbetween', 'rank.eq', 'rankx', 'rate', 'received', 'related',
- 'relatedtable', 'removefilters', 'replace', 'rept', 'right',
- 'rollup', 'rollupaddissubtotal', 'rollupgroup', 'rollupissubtotal',
- 'round', 'rounddown', 'roundup', 'row', 'rri', 'sameperiodlastyear',
- 'sample', 'sampleaxiswithlocalminmax', 'search', 'second',
- 'selectcolumns', 'selectedmeasure', 'selectedmeasureformatstring',
- 'selectedmeasurename', 'selectedvalue', 'sign', 'sin', 'sinh', 'sln',
- 'sqrt', 'sqrtpi', 'startofmonth', 'startofquarter', 'startofyear',
- 'stdev.p', 'stdev.s', 'stdevx.p', 'stdevx.s', 'substitute',
- 'substitutewithindex', 'sum', 'summarize', 'summarizecolumns', 'sumx',
- 'switch', 'syd', 't.dist', 't.dist.2t', 't.dist.rt', 't.inv',
- 't.inv.2t', 'tan', 'tanh', 'tbilleq', 'tbillprice', 'tbillyield',
- 'time', 'timevalue', 'tocsv', 'today', 'tojson', 'topn',
- 'topnperlevel', 'topnskip', 'totalmtd', 'totalqtd', 'totalytd',
- 'treatas', 'trim', 'true', 'trunc', 'unichar', 'unicode', 'union',
- 'upper', 'userculture', 'userelationship', 'username', 'userobjectid',
- 'userprincipalname', 'utcnow', 'utctoday', 'value', 'values', 'var.p',
- 'var.s', 'varx.p', 'varx.s', 'vdb', 'weekday', 'weeknum', 'window',
- 'xirr', 'xnpv', 'year', 'yearfrac', 'yield', 'yielddisc', 'yieldmat'),
- prefix=r'(?i)', suffix=r'\b'), Name.Function), #Functions
-
- (words(('at','asc','boolean','both','by','create','currency',
- 'datetime','day','define','desc','double',
- 'evaluate','false','integer','measure',
- 'month','none','order','return','single','start','string',
- 'table','true','var','year'),
- prefix=r'(?i)', suffix=r'\b'), Name.Builtin), # Keyword
-
- (r':=|[-+*\/=^]', Operator),
- (r'\b(IN|NOT)\b', Operator.Word),
- (r'"', String, 'string'), #StringLiteral
- (r"'(?:[^']|'')*'(?!')(?:\[[ \w]+\])?|\w+\[[ \w]+\]",
- Name.Attribute), # Column reference
- (r"\[[ \w]+\]", Name.Attribute), #Measure reference
- (r'(?<!\w)(\d+\.?\d*|\.\d+\b)', Number),# Number
- (r'[\[\](){}`,.]', Punctuation), #Parenthesis
- (r'.*\n', Text),
-
- ],
- 'multiline-comments': [
- (r'/\*', Comment.Multiline, 'multiline-comments'),
- (r'\*/', Comment.Multiline, '#pop'),
- (r'[^/*]+', Comment.Multiline),
- (r'[/*]', Comment.Multiline)
- ],
- 'string': [
- (r'""', String.Escape),
- (r'"', String, '#pop'),
- (r'[^"]+', String),
- ]
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/devicetree.py b/venv/lib/python3.11/site-packages/pygments/lexers/devicetree.py
deleted file mode 100644
index 9221464..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/devicetree.py
+++ /dev/null
@@ -1,109 +0,0 @@
-"""
- pygments.lexers.devicetree
- ~~~~~~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for Devicetree language.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, bygroups, include, default, words
-from pygments.token import Comment, Keyword, Name, Number, Operator, \
- Punctuation, String, Text, Whitespace
-
-__all__ = ['DevicetreeLexer']
-
-
-class DevicetreeLexer(RegexLexer):
- """
- Lexer for Devicetree files.
-
- .. versionadded:: 2.7
- """
-
- name = 'Devicetree'
- url = 'https://www.devicetree.org/'
- aliases = ['devicetree', 'dts']
- filenames = ['*.dts', '*.dtsi']
- mimetypes = ['text/x-c']
-
- #: optional Whitespace or /*...*/ style comment
- _ws = r'\s*(?:/[*][^*/]*?[*]/\s*)*'
-
- tokens = {
- 'macro': [
- # Include preprocessor directives (C style):
- (r'(#include)(' + _ws + r')([^\n]+)',
- bygroups(Comment.Preproc, Comment.Multiline, Comment.PreprocFile)),
- # Define preprocessor directives (C style):
- (r'(#define)(' + _ws + r')([^\n]+)',
- bygroups(Comment.Preproc, Comment.Multiline, Comment.Preproc)),
- # devicetree style with file:
- (r'(/[^*/{]+/)(' + _ws + r')("[^\n{]+")',
- bygroups(Comment.Preproc, Comment.Multiline, Comment.PreprocFile)),
- # devicetree style with property:
- (r'(/[^*/{]+/)(' + _ws + r')([^\n;{]*)([;]?)',
- bygroups(Comment.Preproc, Comment.Multiline, Comment.Preproc, Punctuation)),
- ],
- 'whitespace': [
- (r'\n', Whitespace),
- (r'\s+', Whitespace),
- (r'\\\n', Text), # line continuation
- (r'//(\n|[\w\W]*?[^\\]\n)', Comment.Single),
- (r'/(\\\n)?[*][\w\W]*?[*](\\\n)?/', Comment.Multiline),
- # Open until EOF, so no ending delimiter
- (r'/(\\\n)?[*][\w\W]*', Comment.Multiline),
- ],
- 'statements': [
- (r'(L?)(")', bygroups(String.Affix, String), 'string'),
- (r'0x[0-9a-fA-F]+', Number.Hex),
- (r'\d+', Number.Integer),
- (r'([^\s{}/*]*)(\s*)(:)', bygroups(Name.Label, Text, Punctuation), '#pop'),
- (words(('compatible', 'model', 'phandle', 'status', '#address-cells',
- '#size-cells', 'reg', 'virtual-reg', 'ranges', 'dma-ranges',
- 'device_type', 'name'), suffix=r'\b'), Keyword.Reserved),
- (r'([~!%^&*+=|?:<>/#-])', Operator),
- (r'[()\[\]{},.]', Punctuation),
- (r'[a-zA-Z_][\w-]*(?=(?:\s*,\s*[a-zA-Z_][\w-]*|(?:' + _ws + r'))*\s*[=;])',
- Name),
- (r'[a-zA-Z_]\w*', Name.Attribute),
- ],
- 'root': [
- include('whitespace'),
- include('macro'),
-
- # Nodes
- (r'([^/*@\s&]+|/)(@?)((?:0x)?[0-9a-fA-F,]*)(' + _ws + r')(\{)',
- bygroups(Name.Function, Operator, Number.Integer,
- Comment.Multiline, Punctuation), 'node'),
-
- default('statement'),
- ],
- 'statement': [
- include('whitespace'),
- include('statements'),
- (';', Punctuation, '#pop'),
- ],
- 'node': [
- include('whitespace'),
- include('macro'),
-
- (r'([^/*@\s&]+|/)(@?)((?:0x)?[0-9a-fA-F,]*)(' + _ws + r')(\{)',
- bygroups(Name.Function, Operator, Number.Integer,
- Comment.Multiline, Punctuation), '#push'),
-
- include('statements'),
-
- (r'\};', Punctuation, '#pop'),
- (';', Punctuation),
- ],
- 'string': [
- (r'"', String, '#pop'),
- (r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|'
- r'u[a-fA-F0-9]{4}|U[a-fA-F0-9]{8}|[0-7]{1,3})', String.Escape),
- (r'[^\\"\n]+', String), # all other characters
- (r'\\\n', String), # line continuation
- (r'\\', String), # stray backslash
- ],
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/diff.py b/venv/lib/python3.11/site-packages/pygments/lexers/diff.py
deleted file mode 100644
index 0ab85bf..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/diff.py
+++ /dev/null
@@ -1,168 +0,0 @@
-"""
- pygments.lexers.diff
- ~~~~~~~~~~~~~~~~~~~~
-
- Lexers for diff/patch formats.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, include, bygroups
-from pygments.token import Text, Comment, Operator, Keyword, Name, Generic, \
- Literal, Whitespace
-
-__all__ = ['DiffLexer', 'DarcsPatchLexer', 'WDiffLexer']
-
-
-class DiffLexer(RegexLexer):
- """
- Lexer for unified or context-style diffs or patches.
- """
-
- name = 'Diff'
- aliases = ['diff', 'udiff']
- filenames = ['*.diff', '*.patch']
- mimetypes = ['text/x-diff', 'text/x-patch']
-
- tokens = {
- 'root': [
- (r'( )(.*)(\n)', bygroups(Whitespace, Text, Whitespace)),
- (r'(!.*|---)(\n)', bygroups(Generic.Strong, Whitespace)),
- (r'((?:< |-).*)(\n)', bygroups(Generic.Deleted, Whitespace)),
- (r'((?:> |\+).*)(\n)', bygroups(Generic.Inserted, Whitespace)),
- (
- r'(@.*|\d(?:,\d+)?(?:a|c|d)\d+(?:,\d+)?)(\n)',
- bygroups(Generic.Subheading, Whitespace),
- ),
- (r'((?:[Ii]ndex|diff).*)(\n)', bygroups(Generic.Heading, Whitespace)),
- (r'(=.*)(\n)', bygroups(Generic.Heading, Whitespace)),
- (r'(.*)(\n)', bygroups(Text, Whitespace)),
- ]
- }
-
- def analyse_text(text):
- if text[:7] == 'Index: ':
- return True
- if text[:5] == 'diff ':
- return True
- if text[:4] == '--- ':
- return 0.9
-
-
-class DarcsPatchLexer(RegexLexer):
- """
- DarcsPatchLexer is a lexer for the various versions of the darcs patch
- format. Examples of this format are derived by commands such as
- ``darcs annotate --patch`` and ``darcs send``.
-
- .. versionadded:: 0.10
- """
-
- name = 'Darcs Patch'
- aliases = ['dpatch']
- filenames = ['*.dpatch', '*.darcspatch']
-
- DPATCH_KEYWORDS = ('hunk', 'addfile', 'adddir', 'rmfile', 'rmdir', 'move',
- 'replace')
-
- tokens = {
- 'root': [
- (r'<', Operator),
- (r'>', Operator),
- (r'\{', Operator),
- (r'\}', Operator),
- (r'(\[)((?:TAG )?)(.*)(\n)(.*)(\*\*)(\d+)(\s?)(\])',
- bygroups(Operator, Keyword, Name, Whitespace, Name, Operator,
- Literal.Date, Whitespace, Operator)),
- (r'(\[)((?:TAG )?)(.*)(\n)(.*)(\*\*)(\d+)(\s?)',
- bygroups(Operator, Keyword, Name, Whitespace, Name, Operator,
- Literal.Date, Whitespace), 'comment'),
- (r'New patches:', Generic.Heading),
- (r'Context:', Generic.Heading),
- (r'Patch bundle hash:', Generic.Heading),
- (r'(\s*)(%s)(.*)(\n)' % '|'.join(DPATCH_KEYWORDS),
- bygroups(Whitespace, Keyword, Text, Whitespace)),
- (r'\+', Generic.Inserted, "insert"),
- (r'-', Generic.Deleted, "delete"),
- (r'(.*)(\n)', bygroups(Text, Whitespace)),
- ],
- 'comment': [
- (r'[^\]].*\n', Comment),
- (r'\]', Operator, "#pop"),
- ],
- 'specialText': [ # darcs add [_CODE_] special operators for clarity
- (r'\n', Whitespace, "#pop"), # line-based
- (r'\[_[^_]*_]', Operator),
- ],
- 'insert': [
- include('specialText'),
- (r'\[', Generic.Inserted),
- (r'[^\n\[]+', Generic.Inserted),
- ],
- 'delete': [
- include('specialText'),
- (r'\[', Generic.Deleted),
- (r'[^\n\[]+', Generic.Deleted),
- ],
- }
-
-
-class WDiffLexer(RegexLexer):
- """
- A wdiff lexer.
-
- Note that:
-
- * It only works with normal output (without options like ``-l``).
- * If the target files contain "[-", "-]", "{+", or "+}",
- especially they are unbalanced, the lexer will get confused.
-
- .. versionadded:: 2.2
- """
-
- name = 'WDiff'
- url = 'https://www.gnu.org/software/wdiff/'
- aliases = ['wdiff']
- filenames = ['*.wdiff']
- mimetypes = []
-
- flags = re.MULTILINE | re.DOTALL
-
- # We can only assume "[-" after "[-" before "-]" is `nested`,
- # for instance wdiff to wdiff outputs. We have no way to
- # distinct these marker is of wdiff output from original text.
-
- ins_op = r"\{\+"
- ins_cl = r"\+\}"
- del_op = r"\[\-"
- del_cl = r"\-\]"
- normal = r'[^{}[\]+-]+' # for performance
- tokens = {
- 'root': [
- (ins_op, Generic.Inserted, 'inserted'),
- (del_op, Generic.Deleted, 'deleted'),
- (normal, Text),
- (r'.', Text),
- ],
- 'inserted': [
- (ins_op, Generic.Inserted, '#push'),
- (del_op, Generic.Inserted, '#push'),
- (del_cl, Generic.Inserted, '#pop'),
-
- (ins_cl, Generic.Inserted, '#pop'),
- (normal, Generic.Inserted),
- (r'.', Generic.Inserted),
- ],
- 'deleted': [
- (del_op, Generic.Deleted, '#push'),
- (ins_op, Generic.Deleted, '#push'),
- (ins_cl, Generic.Deleted, '#pop'),
-
- (del_cl, Generic.Deleted, '#pop'),
- (normal, Generic.Deleted),
- (r'.', Generic.Deleted),
- ],
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/dns.py b/venv/lib/python3.11/site-packages/pygments/lexers/dns.py
deleted file mode 100644
index 18cab31..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/dns.py
+++ /dev/null
@@ -1,106 +0,0 @@
-"""
- pygments.lexers.dns
- ~~~~~~~~~~~~~~~~~~~
-
- Pygments lexers for DNS
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.token import Comment, Operator, Keyword, Name, String, \
- Number, Punctuation, Whitespace, Literal
-from pygments.lexer import RegexLexer, bygroups, include
-
-__all__ = ['DnsZoneLexer']
-
-
-CLASSES = [
- "IN",
- "CS",
- "CH",
- "HS",
-]
-
-CLASSES_RE = "(" + "|".join(CLASSES) + ')'
-
-
-class DnsZoneLexer(RegexLexer):
-
- """
- Lexer for DNS zone file
-
- .. versionadded:: 2.16
- """
-
- flags = re.MULTILINE
-
- name = 'Zone'
- aliases = ['zone']
- filenames = [ "*.zone" ]
- url = "https://datatracker.ietf.org/doc/html/rfc1035"
- mimetypes = ['text/dns']
-
- tokens = {
- 'root': [
- # Empty/comment line:
- (r'([ \t]*)(;.*)(\n)', bygroups(Whitespace, Comment.Single, Whitespace)),
- # Special directives:
- (r'^\$ORIGIN\b', Keyword, 'values'),
- (r'^\$TTL\b', Keyword, 'values'),
- (r'^\$INCLUDE\b', Comment.Preproc, 'include'),
- # TODO, $GENERATE https://bind9.readthedocs.io/en/v9.18.14/chapter3.html#soa-rr
- (r'^\$[A-Z]+\b', Keyword, 'values'),
- # Records:
- # <domain-name> [<TTL>] [<class>] <type> <RDATA> [<comment>]
- (r'^(@)([ \t]+)(?:([0-9]+[smhdw]?)([ \t]+))?(?:' + CLASSES_RE + "([ \t]+))?([A-Z]+)([ \t]+)",
- bygroups(Operator, Whitespace, Number.Integer, Whitespace, Name.Class, Whitespace, Keyword.Type, Whitespace),
- "values"),
- (r'^([^ \t\n]*)([ \t]+)(?:([0-9]+[smhdw]?)([ \t]+))?(?:' + CLASSES_RE + "([ \t]+))?([A-Z]+)([ \t]+)",
- bygroups(Name, Whitespace, Number.Integer, Whitespace, Name.Class, Whitespace, Keyword.Type, Whitespace),
- "values"),
- # <domain-name> [<class>] [<TTL>] <type> <RDATA> [<comment>]
- (r'^(Operator)([ \t]+)(?:' + CLASSES_RE + "([ \t]+))?(?:([0-9]+[smhdw]?)([ \t]+))?([A-Z]+)([ \t]+)",
- bygroups(Name, Whitespace, Number.Integer, Whitespace, Name.Class, Whitespace, Keyword.Type, Whitespace),
- "values"),
- (r'^([^ \t\n]*)([ \t]+)(?:' + CLASSES_RE + "([ \t]+))?(?:([0-9]+[smhdw]?)([ \t]+))?([A-Z]+)([ \t]+)",
- bygroups(Name, Whitespace, Number.Integer, Whitespace, Name.Class, Whitespace, Keyword.Type, Whitespace),
- "values"),
- ],
- # Parsing values:
- 'values': [
- (r'\n', Whitespace, "#pop"),
- (r'\(', Punctuation, 'nested'),
- include('simple-values'),
- ],
- # Parsing nested values (...):
- 'nested': [
- (r'\)', Punctuation, "#pop"),
- include('simple-values'),
- ],
- # Parsing values:
- 'simple-values': [
- (r'(;.*)(\n)', bygroups(Comment.Single, Whitespace)),
- (r'[ \t]+', Whitespace),
- (r"@\b", Operator),
- ('"', String, 'string'),
- (r'[0-9]+[smhdw]?$', Number.Integer),
- (r'([0-9]+[smhdw]?)([ \t]+)', bygroups(Number.Integer, Whitespace)),
- (r'\S+', Literal),
- ],
- 'include': [
- (r'([ \t]+)([^ \t\n]+)([ \t]+)([-\._a-zA-Z]+)([ \t]+)(;.*)?$',
- bygroups(Whitespace, Comment.PreprocFile, Whitespace, Name, Whitespace, Comment.Single), '#pop'),
- (r'([ \t]+)([^ \t\n]+)([ \t\n]+)$', bygroups(Whitespace, Comment.PreprocFile, Whitespace), '#pop'),
- ],
- "string": [
- (r'\\"', String),
- (r'"', String, "#pop"),
- (r'[^"]+', String),
- ]
- }
-
- def analyse_text(text):
- return text.startswith("$ORIGIN")
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/dotnet.py b/venv/lib/python3.11/site-packages/pygments/lexers/dotnet.py
deleted file mode 100644
index 5c488dd..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/dotnet.py
+++ /dev/null
@@ -1,841 +0,0 @@
-"""
- pygments.lexers.dotnet
- ~~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for .net languages.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-import re
-
-from pygments.lexer import RegexLexer, DelegatingLexer, bygroups, include, \
- using, this, default, words
-from pygments.token import Punctuation, Text, Comment, Operator, Keyword, \
- Name, String, Number, Literal, Other, Whitespace
-from pygments.util import get_choice_opt
-from pygments import unistring as uni
-
-from pygments.lexers.html import XmlLexer
-
-__all__ = ['CSharpLexer', 'NemerleLexer', 'BooLexer', 'VbNetLexer',
- 'CSharpAspxLexer', 'VbNetAspxLexer', 'FSharpLexer', 'XppLexer']
-
-
-class CSharpLexer(RegexLexer):
- """
- For C# source code.
-
- Additional options accepted:
-
- `unicodelevel`
- Determines which Unicode characters this lexer allows for identifiers.
- The possible values are:
-
- * ``none`` -- only the ASCII letters and numbers are allowed. This
- is the fastest selection.
- * ``basic`` -- all Unicode characters from the specification except
- category ``Lo`` are allowed.
- * ``full`` -- all Unicode characters as specified in the C# specs
- are allowed. Note that this means a considerable slowdown since the
- ``Lo`` category has more than 40,000 characters in it!
-
- The default value is ``basic``.
-
- .. versionadded:: 0.8
- """
-
- name = 'C#'
- url = 'https://docs.microsoft.com/en-us/dotnet/csharp/'
- aliases = ['csharp', 'c#', 'cs']
- filenames = ['*.cs']
- mimetypes = ['text/x-csharp'] # inferred
-
- flags = re.MULTILINE | re.DOTALL
-
- # for the range of allowed unicode characters in identifiers, see
- # http://www.ecma-international.org/publications/files/ECMA-ST/Ecma-334.pdf
-
- levels = {
- 'none': r'@?[_a-zA-Z]\w*',
- 'basic': ('@?[_' + uni.combine('Lu', 'Ll', 'Lt', 'Lm', 'Nl') + ']' +
- '[' + uni.combine('Lu', 'Ll', 'Lt', 'Lm', 'Nl', 'Nd', 'Pc',
- 'Cf', 'Mn', 'Mc') + ']*'),
- 'full': ('@?(?:_|[^' +
- uni.allexcept('Lu', 'Ll', 'Lt', 'Lm', 'Lo', 'Nl') + '])' +
- '[^' + uni.allexcept('Lu', 'Ll', 'Lt', 'Lm', 'Lo', 'Nl',
- 'Nd', 'Pc', 'Cf', 'Mn', 'Mc') + ']*'),
- }
-
- tokens = {}
- token_variants = True
-
- for levelname, cs_ident in levels.items():
- tokens[levelname] = {
- 'root': [
- # method names
- (r'^([ \t]*)((?:' + cs_ident + r'(?:\[\])?\s+)+?)' # return type
- r'(' + cs_ident + ')' # method name
- r'(\s*)(\()', # signature start
- bygroups(Whitespace, using(this), Name.Function, Whitespace,
- Punctuation)),
- (r'^(\s*)(\[.*?\])', bygroups(Whitespace, Name.Attribute)),
- (r'[^\S\n]+', Whitespace),
- (r'(\\)(\n)', bygroups(Text, Whitespace)), # line continuation
- (r'//.*?\n', Comment.Single),
- (r'/[*].*?[*]/', Comment.Multiline),
- (r'\n', Whitespace),
- (words((
- '>>>=', '>>=', '<<=', '<=', '>=', '+=', '-=', '*=', '/=',
- '%=', '&=', '|=', '^=', '??=', '=>', '??', '?.', '!=', '==',
- '&&', '||', '>>>', '>>', '<<', '++', '--', '+', '-', '*',
- '/', '%', '&', '|', '^', '<', '>', '?', '!', '~', '=',
- )), Operator),
- (r'=~|!=|==|<<|>>|[-+/*%=<>&^|]', Operator),
- (r'[()\[\];:,.]', Punctuation),
- (r'[{}]', Punctuation),
- (r'@"(""|[^"])*"', String),
- (r'\$?"(\\\\|\\[^\\]|[^"\\\n])*["\n]', String),
- (r"'\\.'|'[^\\]'", String.Char),
- (r"[0-9]+(\.[0-9]*)?([eE][+-][0-9]+)?"
- r"[flFLdD]?|0[xX][0-9a-fA-F]+[Ll]?", Number),
- (r'(#)([ \t]*)(if|endif|else|elif|define|undef|'
- r'line|error|warning|region|endregion|pragma)\b(.*?)(\n)',
- bygroups(Comment.Preproc, Whitespace, Comment.Preproc,
- Comment.Preproc, Whitespace)),
- (r'\b(extern)(\s+)(alias)\b', bygroups(Keyword, Whitespace,
- Keyword)),
- (r'(abstract|as|async|await|base|break|by|case|catch|'
- r'checked|const|continue|default|delegate|'
- r'do|else|enum|event|explicit|extern|false|finally|'
- r'fixed|for|foreach|goto|if|implicit|in|interface|'
- r'internal|is|let|lock|new|null|on|operator|'
- r'out|override|params|private|protected|public|readonly|'
- r'ref|return|sealed|sizeof|stackalloc|static|'
- r'switch|this|throw|true|try|typeof|'
- r'unchecked|unsafe|virtual|void|while|'
- r'get|set|new|partial|yield|add|remove|value|alias|ascending|'
- r'descending|from|group|into|orderby|select|thenby|where|'
- r'join|equals)\b', Keyword),
- (r'(global)(::)', bygroups(Keyword, Punctuation)),
- (r'(bool|byte|char|decimal|double|dynamic|float|int|long|object|'
- r'sbyte|short|string|uint|ulong|ushort|var)\b\??', Keyword.Type),
- (r'(class|struct)(\s+)', bygroups(Keyword, Whitespace), 'class'),
- (r'(namespace|using)(\s+)', bygroups(Keyword, Whitespace), 'namespace'),
- (cs_ident, Name),
- ],
- 'class': [
- (cs_ident, Name.Class, '#pop'),
- default('#pop'),
- ],
- 'namespace': [
- (r'(?=\()', Text, '#pop'), # using (resource)
- ('(' + cs_ident + r'|\.)+', Name.Namespace, '#pop'),
- ]
- }
-
- def __init__(self, **options):
- level = get_choice_opt(options, 'unicodelevel', list(self.tokens), 'basic')
- if level not in self._all_tokens:
- # compile the regexes now
- self._tokens = self.__class__.process_tokendef(level)
- else:
- self._tokens = self._all_tokens[level]
-
- RegexLexer.__init__(self, **options)
-
-
-class NemerleLexer(RegexLexer):
- """
- For Nemerle source code.
-
- Additional options accepted:
-
- `unicodelevel`
- Determines which Unicode characters this lexer allows for identifiers.
- The possible values are:
-
- * ``none`` -- only the ASCII letters and numbers are allowed. This
- is the fastest selection.
- * ``basic`` -- all Unicode characters from the specification except
- category ``Lo`` are allowed.
- * ``full`` -- all Unicode characters as specified in the C# specs
- are allowed. Note that this means a considerable slowdown since the
- ``Lo`` category has more than 40,000 characters in it!
-
- The default value is ``basic``.
-
- .. versionadded:: 1.5
- """
-
- name = 'Nemerle'
- url = 'http://nemerle.org'
- aliases = ['nemerle']
- filenames = ['*.n']
- mimetypes = ['text/x-nemerle'] # inferred
-
- flags = re.MULTILINE | re.DOTALL
-
- # for the range of allowed unicode characters in identifiers, see
- # http://www.ecma-international.org/publications/files/ECMA-ST/Ecma-334.pdf
-
- levels = {
- 'none': r'@?[_a-zA-Z]\w*',
- 'basic': ('@?[_' + uni.combine('Lu', 'Ll', 'Lt', 'Lm', 'Nl') + ']' +
- '[' + uni.combine('Lu', 'Ll', 'Lt', 'Lm', 'Nl', 'Nd', 'Pc',
- 'Cf', 'Mn', 'Mc') + ']*'),
- 'full': ('@?(?:_|[^' +
- uni.allexcept('Lu', 'Ll', 'Lt', 'Lm', 'Lo', 'Nl') + '])' +
- '[^' + uni.allexcept('Lu', 'Ll', 'Lt', 'Lm', 'Lo', 'Nl',
- 'Nd', 'Pc', 'Cf', 'Mn', 'Mc') + ']*'),
- }
-
- tokens = {}
- token_variants = True
-
- for levelname, cs_ident in levels.items():
- tokens[levelname] = {
- 'root': [
- # method names
- (r'^([ \t]*)((?:' + cs_ident + r'(?:\[\])?\s+)+?)' # return type
- r'(' + cs_ident + ')' # method name
- r'(\s*)(\()', # signature start
- bygroups(Whitespace, using(this), Name.Function, Whitespace, \
- Punctuation)),
- (r'^(\s*)(\[.*?\])', bygroups(Whitespace, Name.Attribute)),
- (r'[^\S\n]+', Whitespace),
- (r'(\\)(\n)', bygroups(Text, Whitespace)), # line continuation
- (r'//.*?\n', Comment.Single),
- (r'/[*].*?[*]/', Comment.Multiline),
- (r'\n', Whitespace),
- (r'(\$)(\s*)(")', bygroups(String, Whitespace, String),
- 'splice-string'),
- (r'(\$)(\s*)(<#)', bygroups(String, Whitespace, String),
- 'splice-string2'),
- (r'<#', String, 'recursive-string'),
-
- (r'(<\[)(\s*)(' + cs_ident + ':)?',
- bygroups(Keyword, Whitespace, Keyword)),
- (r'\]\>', Keyword),
-
- # quasiquotation only
- (r'\$' + cs_ident, Name),
- (r'(\$)(\()', bygroups(Name, Punctuation),
- 'splice-string-content'),
-
- (r'[~!%^&*()+=|\[\]:;,.<>/?-]', Punctuation),
- (r'[{}]', Punctuation),
- (r'@"(""|[^"])*"', String),
- (r'"(\\\\|\\[^\\]|[^"\\\n])*["\n]', String),
- (r"'\\.'|'[^\\]'", String.Char),
- (r"0[xX][0-9a-fA-F]+[Ll]?", Number),
- (r"[0-9](\.[0-9]*)?([eE][+-][0-9]+)?[flFLdD]?", Number),
- (r'(#)([ \t]*)(if|endif|else|elif|define|undef|'
- r'line|error|warning|region|endregion|pragma)\b',
- bygroups(Comment.Preproc, Whitespace, Comment.Preproc), 'preproc'),
- (r'\b(extern)(\s+)(alias)\b', bygroups(Keyword, Whitespace, Keyword)),
- (r'(abstract|and|as|base|catch|def|delegate|'
- r'enum|event|extern|false|finally|'
- r'fun|implements|interface|internal|'
- r'is|macro|match|matches|module|mutable|new|'
- r'null|out|override|params|partial|private|'
- r'protected|public|ref|sealed|static|'
- r'syntax|this|throw|true|try|type|typeof|'
- r'virtual|volatile|when|where|with|'
- r'assert|assert2|async|break|checked|continue|do|else|'
- r'ensures|for|foreach|if|late|lock|new|nolate|'
- r'otherwise|regexp|repeat|requires|return|surroundwith|'
- r'unchecked|unless|using|while|yield)\b', Keyword),
- (r'(global)(::)', bygroups(Keyword, Punctuation)),
- (r'(bool|byte|char|decimal|double|float|int|long|object|sbyte|'
- r'short|string|uint|ulong|ushort|void|array|list)\b\??',
- Keyword.Type),
- (r'(:>?)(\s*)(' + cs_ident + r'\??)',
- bygroups(Punctuation, Whitespace, Keyword.Type)),
- (r'(class|struct|variant|module)(\s+)',
- bygroups(Keyword, Whitespace), 'class'),
- (r'(namespace|using)(\s+)', bygroups(Keyword, Whitespace),
- 'namespace'),
- (cs_ident, Name),
- ],
- 'class': [
- (cs_ident, Name.Class, '#pop')
- ],
- 'preproc': [
- (r'\w+', Comment.Preproc),
- (r'[ \t]+', Whitespace),
- (r'\n', Whitespace, '#pop')
- ],
- 'namespace': [
- (r'(?=\()', Text, '#pop'), # using (resource)
- ('(' + cs_ident + r'|\.)+', Name.Namespace, '#pop')
- ],
- 'splice-string': [
- (r'[^"$]', String),
- (r'\$' + cs_ident, Name),
- (r'(\$)(\()', bygroups(Name, Punctuation),
- 'splice-string-content'),
- (r'\\"', String),
- (r'"', String, '#pop')
- ],
- 'splice-string2': [
- (r'[^#<>$]', String),
- (r'\$' + cs_ident, Name),
- (r'(\$)(\()', bygroups(Name, Punctuation),
- 'splice-string-content'),
- (r'<#', String, '#push'),
- (r'#>', String, '#pop')
- ],
- 'recursive-string': [
- (r'[^#<>]', String),
- (r'<#', String, '#push'),
- (r'#>', String, '#pop')
- ],
- 'splice-string-content': [
- (r'if|match', Keyword),
- (r'[~!%^&*+=|\[\]:;,.<>/?-\\"$ ]', Punctuation),
- (cs_ident, Name),
- (r'\d+', Number),
- (r'\(', Punctuation, '#push'),
- (r'\)', Punctuation, '#pop')
- ]
- }
-
- def __init__(self, **options):
- level = get_choice_opt(options, 'unicodelevel', list(self.tokens),
- 'basic')
- if level not in self._all_tokens:
- # compile the regexes now
- self._tokens = self.__class__.process_tokendef(level)
- else:
- self._tokens = self._all_tokens[level]
-
- RegexLexer.__init__(self, **options)
-
- def analyse_text(text):
- """Nemerle is quite similar to Python, but @if is relatively uncommon
- elsewhere."""
- result = 0
-
- if '@if' in text:
- result += 0.1
-
- return result
-
-
-class BooLexer(RegexLexer):
- """
- For Boo source code.
- """
-
- name = 'Boo'
- url = 'https://github.com/boo-lang/boo'
- aliases = ['boo']
- filenames = ['*.boo']
- mimetypes = ['text/x-boo']
-
- tokens = {
- 'root': [
- (r'\s+', Whitespace),
- (r'(#|//).*$', Comment.Single),
- (r'/[*]', Comment.Multiline, 'comment'),
- (r'[]{}:(),.;[]', Punctuation),
- (r'(\\)(\n)', bygroups(Text, Whitespace)),
- (r'\\', Text),
- (r'(in|is|and|or|not)\b', Operator.Word),
- (r'/(\\\\|\\[^\\]|[^/\\\s])/', String.Regex),
- (r'@/(\\\\|\\[^\\]|[^/\\])*/', String.Regex),
- (r'=~|!=|==|<<|>>|[-+/*%=<>&^|]', Operator),
- (r'(as|abstract|callable|constructor|destructor|do|import|'
- r'enum|event|final|get|interface|internal|of|override|'
- r'partial|private|protected|public|return|set|static|'
- r'struct|transient|virtual|yield|super|and|break|cast|'
- r'continue|elif|else|ensure|except|for|given|goto|if|in|'
- r'is|isa|not|or|otherwise|pass|raise|ref|try|unless|when|'
- r'while|from|as)\b', Keyword),
- (r'def(?=\s+\(.*?\))', Keyword),
- (r'(def)(\s+)', bygroups(Keyword, Whitespace), 'funcname'),
- (r'(class)(\s+)', bygroups(Keyword, Whitespace), 'classname'),
- (r'(namespace)(\s+)', bygroups(Keyword, Whitespace), 'namespace'),
- (r'(?<!\.)(true|false|null|self|__eval__|__switch__|array|'
- r'assert|checked|enumerate|filter|getter|len|lock|map|'
- r'matrix|max|min|normalArrayIndexing|print|property|range|'
- r'rawArrayIndexing|required|typeof|unchecked|using|'
- r'yieldAll|zip)\b', Name.Builtin),
- (r'"""(\\\\|\\"|.*?)"""', String.Double),
- (r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
- (r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
- (r'[a-zA-Z_]\w*', Name),
- (r'(\d+\.\d*|\d*\.\d+)([fF][+-]?[0-9]+)?', Number.Float),
- (r'[0-9][0-9.]*(ms?|d|h|s)', Number),
- (r'0\d+', Number.Oct),
- (r'0x[a-fA-F0-9]+', Number.Hex),
- (r'\d+L', Number.Integer.Long),
- (r'\d+', Number.Integer),
- ],
- 'comment': [
- ('/[*]', Comment.Multiline, '#push'),
- ('[*]/', Comment.Multiline, '#pop'),
- ('[^/*]', Comment.Multiline),
- ('[*/]', Comment.Multiline)
- ],
- 'funcname': [
- (r'[a-zA-Z_]\w*', Name.Function, '#pop')
- ],
- 'classname': [
- (r'[a-zA-Z_]\w*', Name.Class, '#pop')
- ],
- 'namespace': [
- (r'[a-zA-Z_][\w.]*', Name.Namespace, '#pop')
- ]
- }
-
-
-class VbNetLexer(RegexLexer):
- """
- For Visual Basic.NET source code.
- Also LibreOffice Basic, OpenOffice Basic, and StarOffice Basic.
- """
-
- name = 'VB.net'
- url = 'https://docs.microsoft.com/en-us/dotnet/visual-basic/'
- aliases = ['vb.net', 'vbnet', 'lobas', 'oobas', 'sobas']
- filenames = ['*.vb', '*.bas']
- mimetypes = ['text/x-vbnet', 'text/x-vba'] # (?)
-
- uni_name = '[_' + uni.combine('Ll', 'Lt', 'Lm', 'Nl') + ']' + \
- '[' + uni.combine('Ll', 'Lt', 'Lm', 'Nl', 'Nd', 'Pc',
- 'Cf', 'Mn', 'Mc') + ']*'
-
- flags = re.MULTILINE | re.IGNORECASE
- tokens = {
- 'root': [
- (r'^\s*<.*?>', Name.Attribute),
- (r'\s+', Whitespace),
- (r'\n', Whitespace),
- (r'(rem\b.*?)(\n)', bygroups(Comment, Whitespace)),
- (r"('.*?)(\n)", bygroups(Comment, Whitespace)),
- (r'#If\s.*?\sThen|#ElseIf\s.*?\sThen|#Else|#End\s+If|#Const|'
- r'#ExternalSource.*?\n|#End\s+ExternalSource|'
- r'#Region.*?\n|#End\s+Region|#ExternalChecksum',
- Comment.Preproc),
- (r'[(){}!#,.:]', Punctuation),
- (r'(Option)(\s+)(Strict|Explicit|Compare)(\s+)'
- r'(On|Off|Binary|Text)',
- bygroups(Keyword.Declaration, Whitespace, Keyword.Declaration,
- Whitespace, Keyword.Declaration)),
- (words((
- 'AddHandler', 'Alias', 'ByRef', 'ByVal', 'Call', 'Case',
- 'Catch', 'CBool', 'CByte', 'CChar', 'CDate', 'CDec', 'CDbl',
- 'CInt', 'CLng', 'CObj', 'Continue', 'CSByte', 'CShort', 'CSng',
- 'CStr', 'CType', 'CUInt', 'CULng', 'CUShort', 'Declare',
- 'Default', 'Delegate', 'DirectCast', 'Do', 'Each', 'Else',
- 'ElseIf', 'EndIf', 'Erase', 'Error', 'Event', 'Exit', 'False',
- 'Finally', 'For', 'Friend', 'Get', 'Global', 'GoSub', 'GoTo',
- 'Handles', 'If', 'Implements', 'Inherits', 'Interface', 'Let',
- 'Lib', 'Loop', 'Me', 'MustInherit', 'MustOverride', 'MyBase',
- 'MyClass', 'Narrowing', 'New', 'Next', 'Not', 'Nothing',
- 'NotInheritable', 'NotOverridable', 'Of', 'On', 'Operator',
- 'Option', 'Optional', 'Overloads', 'Overridable', 'Overrides',
- 'ParamArray', 'Partial', 'Private', 'Protected', 'Public',
- 'RaiseEvent', 'ReadOnly', 'ReDim', 'RemoveHandler', 'Resume',
- 'Return', 'Select', 'Set', 'Shadows', 'Shared', 'Single',
- 'Static', 'Step', 'Stop', 'SyncLock', 'Then', 'Throw', 'To',
- 'True', 'Try', 'TryCast', 'Wend', 'Using', 'When', 'While',
- 'Widening', 'With', 'WithEvents', 'WriteOnly'),
- prefix=r'(?<!\.)', suffix=r'\b'), Keyword),
- (r'(?<!\.)End\b', Keyword, 'end'),
- (r'(?<!\.)(Dim|Const)\b', Keyword, 'dim'),
- (r'(?<!\.)(Function|Sub|Property)(\s+)',
- bygroups(Keyword, Whitespace), 'funcname'),
- (r'(?<!\.)(Class|Structure|Enum)(\s+)',
- bygroups(Keyword, Whitespace), 'classname'),
- (r'(?<!\.)(Module|Namespace|Imports)(\s+)',
- bygroups(Keyword, Whitespace), 'namespace'),
- (r'(?<!\.)(Boolean|Byte|Char|Date|Decimal|Double|Integer|Long|'
- r'Object|SByte|Short|Single|String|Variant|UInteger|ULong|'
- r'UShort)\b', Keyword.Type),
- (r'(?<!\.)(AddressOf|And|AndAlso|As|GetType|In|Is|IsNot|Like|Mod|'
- r'Or|OrElse|TypeOf|Xor)\b', Operator.Word),
- (r'&=|[*]=|/=|\\=|\^=|\+=|-=|<<=|>>=|<<|>>|:=|'
- r'<=|>=|<>|[-&*/\\^+=<>\[\]]',
- Operator),
- ('"', String, 'string'),
- (r'(_)(\n)', bygroups(Text, Whitespace)), # Line continuation (must be before Name)
- (uni_name + '[%&@!#$]?', Name),
- ('#.*?#', Literal.Date),
- (r'(\d+\.\d*|\d*\.\d+)(F[+-]?[0-9]+)?', Number.Float),
- (r'\d+([SILDFR]|US|UI|UL)?', Number.Integer),
- (r'&H[0-9a-f]+([SILDFR]|US|UI|UL)?', Number.Integer),
- (r'&O[0-7]+([SILDFR]|US|UI|UL)?', Number.Integer),
- ],
- 'string': [
- (r'""', String),
- (r'"C?', String, '#pop'),
- (r'[^"]+', String),
- ],
- 'dim': [
- (uni_name, Name.Variable, '#pop'),
- default('#pop'), # any other syntax
- ],
- 'funcname': [
- (uni_name, Name.Function, '#pop'),
- ],
- 'classname': [
- (uni_name, Name.Class, '#pop'),
- ],
- 'namespace': [
- (uni_name, Name.Namespace),
- (r'\.', Name.Namespace),
- default('#pop'),
- ],
- 'end': [
- (r'\s+', Whitespace),
- (r'(Function|Sub|Property|Class|Structure|Enum|Module|Namespace)\b',
- Keyword, '#pop'),
- default('#pop'),
- ]
- }
-
- def analyse_text(text):
- if re.search(r'^\s*(#If|Module|Namespace)', text, re.MULTILINE):
- return 0.5
-
-
-class GenericAspxLexer(RegexLexer):
- """
- Lexer for ASP.NET pages.
- """
-
- name = 'aspx-gen'
- filenames = []
- mimetypes = []
-
- flags = re.DOTALL
-
- tokens = {
- 'root': [
- (r'(<%[@=#]?)(.*?)(%>)', bygroups(Name.Tag, Other, Name.Tag)),
- (r'(<script.*?>)(.*?)(</script>)', bygroups(using(XmlLexer),
- Other,
- using(XmlLexer))),
- (r'(.+?)(?=<)', using(XmlLexer)),
- (r'.+', using(XmlLexer)),
- ],
- }
-
-
-# TODO support multiple languages within the same source file
-class CSharpAspxLexer(DelegatingLexer):
- """
- Lexer for highlighting C# within ASP.NET pages.
- """
-
- name = 'aspx-cs'
- aliases = ['aspx-cs']
- filenames = ['*.aspx', '*.asax', '*.ascx', '*.ashx', '*.asmx', '*.axd']
- mimetypes = []
-
- def __init__(self, **options):
- super().__init__(CSharpLexer, GenericAspxLexer, **options)
-
- def analyse_text(text):
- if re.search(r'Page\s*Language="C#"', text, re.I) is not None:
- return 0.2
- elif re.search(r'script[^>]+language=["\']C#', text, re.I) is not None:
- return 0.15
-
-
-class VbNetAspxLexer(DelegatingLexer):
- """
- Lexer for highlighting Visual Basic.net within ASP.NET pages.
- """
-
- name = 'aspx-vb'
- aliases = ['aspx-vb']
- filenames = ['*.aspx', '*.asax', '*.ascx', '*.ashx', '*.asmx', '*.axd']
- mimetypes = []
-
- def __init__(self, **options):
- super().__init__(VbNetLexer, GenericAspxLexer, **options)
-
- def analyse_text(text):
- if re.search(r'Page\s*Language="Vb"', text, re.I) is not None:
- return 0.2
- elif re.search(r'script[^>]+language=["\']vb', text, re.I) is not None:
- return 0.15
-
-
-# Very close to functional.OcamlLexer
-class FSharpLexer(RegexLexer):
- """
- For the F# language (version 3.0).
-
- .. versionadded:: 1.5
- """
-
- name = 'F#'
- url = 'https://fsharp.org/'
- aliases = ['fsharp', 'f#']
- filenames = ['*.fs', '*.fsi', '*.fsx']
- mimetypes = ['text/x-fsharp']
-
- keywords = [
- 'abstract', 'as', 'assert', 'base', 'begin', 'class', 'default',
- 'delegate', 'do!', 'do', 'done', 'downcast', 'downto', 'elif', 'else',
- 'end', 'exception', 'extern', 'false', 'finally', 'for', 'function',
- 'fun', 'global', 'if', 'inherit', 'inline', 'interface', 'internal',
- 'in', 'lazy', 'let!', 'let', 'match', 'member', 'module', 'mutable',
- 'namespace', 'new', 'null', 'of', 'open', 'override', 'private', 'public',
- 'rec', 'return!', 'return', 'select', 'static', 'struct', 'then', 'to',
- 'true', 'try', 'type', 'upcast', 'use!', 'use', 'val', 'void', 'when',
- 'while', 'with', 'yield!', 'yield',
- ]
- # Reserved words; cannot hurt to color them as keywords too.
- keywords += [
- 'atomic', 'break', 'checked', 'component', 'const', 'constraint',
- 'constructor', 'continue', 'eager', 'event', 'external', 'fixed',
- 'functor', 'include', 'method', 'mixin', 'object', 'parallel',
- 'process', 'protected', 'pure', 'sealed', 'tailcall', 'trait',
- 'virtual', 'volatile',
- ]
- keyopts = [
- '!=', '#', '&&', '&', r'\(', r'\)', r'\*', r'\+', ',', r'-\.',
- '->', '-', r'\.\.', r'\.', '::', ':=', ':>', ':', ';;', ';', '<-',
- r'<\]', '<', r'>\]', '>', r'\?\?', r'\?', r'\[<', r'\[\|', r'\[', r'\]',
- '_', '`', r'\{', r'\|\]', r'\|', r'\}', '~', '<@@', '<@', '=', '@>', '@@>',
- ]
-
- operators = r'[!$%&*+\./:<=>?@^|~-]'
- word_operators = ['and', 'or', 'not']
- prefix_syms = r'[!?~]'
- infix_syms = r'[=<>@^|&+\*/$%-]'
- primitives = [
- 'sbyte', 'byte', 'char', 'nativeint', 'unativeint', 'float32', 'single',
- 'float', 'double', 'int8', 'uint8', 'int16', 'uint16', 'int32',
- 'uint32', 'int64', 'uint64', 'decimal', 'unit', 'bool', 'string',
- 'list', 'exn', 'obj', 'enum',
- ]
-
- # See http://msdn.microsoft.com/en-us/library/dd233181.aspx and/or
- # http://fsharp.org/about/files/spec.pdf for reference. Good luck.
-
- tokens = {
- 'escape-sequence': [
- (r'\\[\\"\'ntbrafv]', String.Escape),
- (r'\\[0-9]{3}', String.Escape),
- (r'\\u[0-9a-fA-F]{4}', String.Escape),
- (r'\\U[0-9a-fA-F]{8}', String.Escape),
- ],
- 'root': [
- (r'\s+', Whitespace),
- (r'\(\)|\[\]', Name.Builtin.Pseudo),
- (r'\b(?<!\.)([A-Z][\w\']*)(?=\s*\.)',
- Name.Namespace, 'dotted'),
- (r'\b([A-Z][\w\']*)', Name),
- (r'(///.*?)(\n)', bygroups(String.Doc, Whitespace)),
- (r'(//.*?)(\n)', bygroups(Comment.Single, Whitespace)),
- (r'\(\*(?!\))', Comment, 'comment'),
-
- (r'@"', String, 'lstring'),
- (r'"""', String, 'tqs'),
- (r'"', String, 'string'),
-
- (r'\b(open|module)(\s+)([\w.]+)',
- bygroups(Keyword, Whitespace, Name.Namespace)),
- (r'\b(let!?)(\s+)(\w+)',
- bygroups(Keyword, Whitespace, Name.Variable)),
- (r'\b(type)(\s+)(\w+)',
- bygroups(Keyword, Whitespace, Name.Class)),
- (r'\b(member|override)(\s+)(\w+)(\.)(\w+)',
- bygroups(Keyword, Whitespace, Name, Punctuation, Name.Function)),
- (r'\b(%s)\b' % '|'.join(keywords), Keyword),
- (r'``([^`\n\r\t]|`[^`\n\r\t])+``', Name),
- (r'(%s)' % '|'.join(keyopts), Operator),
- (r'(%s|%s)?%s' % (infix_syms, prefix_syms, operators), Operator),
- (r'\b(%s)\b' % '|'.join(word_operators), Operator.Word),
- (r'\b(%s)\b' % '|'.join(primitives), Keyword.Type),
- (r'(#)([ \t]*)(if|endif|else|line|nowarn|light|\d+)\b(.*?)(\n)',
- bygroups(Comment.Preproc, Whitespace, Comment.Preproc,
- Comment.Preproc, Whitespace)),
-
- (r"[^\W\d][\w']*", Name),
-
- (r'\d[\d_]*[uU]?[yslLnQRZINGmM]?', Number.Integer),
- (r'0[xX][\da-fA-F][\da-fA-F_]*[uU]?[yslLn]?[fF]?', Number.Hex),
- (r'0[oO][0-7][0-7_]*[uU]?[yslLn]?', Number.Oct),
- (r'0[bB][01][01_]*[uU]?[yslLn]?', Number.Bin),
- (r'-?\d[\d_]*(.[\d_]*)?([eE][+\-]?\d[\d_]*)[fFmM]?',
- Number.Float),
-
- (r"'(?:(\\[\\\"'ntbr ])|(\\[0-9]{3})|(\\x[0-9a-fA-F]{2}))'B?",
- String.Char),
- (r"'.'", String.Char),
- (r"'", Keyword), # a stray quote is another syntax element
-
- (r'@?"', String.Double, 'string'),
-
- (r'[~?][a-z][\w\']*:', Name.Variable),
- ],
- 'dotted': [
- (r'\s+', Whitespace),
- (r'\.', Punctuation),
- (r'[A-Z][\w\']*(?=\s*\.)', Name.Namespace),
- (r'[A-Z][\w\']*', Name, '#pop'),
- (r'[a-z_][\w\']*', Name, '#pop'),
- # e.g. dictionary index access
- default('#pop'),
- ],
- 'comment': [
- (r'[^(*)@"]+', Comment),
- (r'\(\*', Comment, '#push'),
- (r'\*\)', Comment, '#pop'),
- # comments cannot be closed within strings in comments
- (r'@"', String, 'lstring'),
- (r'"""', String, 'tqs'),
- (r'"', String, 'string'),
- (r'[(*)@]', Comment),
- ],
- 'string': [
- (r'[^\\"]+', String),
- include('escape-sequence'),
- (r'\\\n', String),
- (r'\n', String), # newlines are allowed in any string
- (r'"B?', String, '#pop'),
- ],
- 'lstring': [
- (r'[^"]+', String),
- (r'\n', String),
- (r'""', String),
- (r'"B?', String, '#pop'),
- ],
- 'tqs': [
- (r'[^"]+', String),
- (r'\n', String),
- (r'"""B?', String, '#pop'),
- (r'"', String),
- ],
- }
-
- def analyse_text(text):
- """F# doesn't have that many unique features -- |> and <| are weak
- indicators."""
- result = 0
- if '|>' in text:
- result += 0.05
- if '<|' in text:
- result += 0.05
-
- return result
-
-
-class XppLexer(RegexLexer):
-
- """
- For X++ source code. This is based loosely on the CSharpLexer
-
- .. versionadded:: 2.15
- """
-
- name = 'X++'
- url = 'https://learn.microsoft.com/en-us/dynamics365/fin-ops-core/dev-itpro/dev-ref/xpp-language-reference'
- aliases = ['xpp', 'x++']
- filenames = ['*.xpp']
-
- flags = re.MULTILINE
-
- XPP_CHARS = ('@?(?:_|[^' +
- uni.allexcept('Lu', 'Ll', 'Lt', 'Lm', 'Lo', 'Nl') + '])' +
- '[^' + uni.allexcept('Lu', 'Ll', 'Lt', 'Lm', 'Lo', 'Nl',
- 'Nd', 'Pc', 'Cf', 'Mn', 'Mc') + ']*');
- # Temporary, see
- # https://github.com/thatch/regexlint/pull/49
- XPP_CHARS = XPP_CHARS.replace('\x00', '\x01')
-
- OPERATORS = (
- '<=', '>=', '+=', '-=', '*=', '/=', '!=', '==',
- '&&', '||', '>>', '<<', '++', '--', '+', '-', '*',
- '/', '%', '&', '|', '^', '<', '>', '?', '!', '~', '=',
- )
- KEYWORDS = ('abstract','anytype','as','async','asc','at','avg','break','breakpoint','by','byref','case','catch',
- 'changecompany','client','container','continue','count','crosscompany','default','delegate',
- 'delete_from','desc','display','div','do','edit','else','element','eventhandler','exists','false','final',
- 'firstfast','firstonly','firstonly10','firstonly100','firstonly1000','flush','for','forceliterals',
- 'forcenestedloop','forceplaceholders','forceselectorder','forupdate','from','group','if','insert_recordset',
- 'interface','is','join','like','maxof','minof','mod','new','next','nofetch','notexists','null','optimisticlock','order',
- 'outer','pause','pessimisticlock','print','private','protected','public','repeatableread','retry','return',
- 'reverse','select','server','setting','static','sum','super','switch','tablelock','this','throw','true','try','ttsabort','ttsbegin',
- 'ttscommit','update_recordset','validtimestate','void','where','while','window')
- RUNTIME_FUNCTIONS = ('_duration','abs','acos','any2Date','any2Enum','any2Guid','any2Int','any2Int64','any2Real','any2Str','anytodate',
- 'anytoenum','anytoguid','anytoint','anytoint64','anytoreal','anytostr','asin','atan','beep','cTerm','char2Num','classIdGet',
- 'corrFlagGet','corrFlagSet','cos','cosh','curExt','curUserId','date2Num','date2Str','datetime2Str','dayName','dayOfMth',
- 'dayOfWk','dayOfYr','ddb','decRound','dg','dimOf','endMth','enum2str','exp','exp10','fV','fieldId2Name','fieldId2PName',
- 'fieldName2Id','frac','funcName','getCurrentPartition','getCurrentPartitionRecId','getPrefix','guid2Str','idg','indexId2Name',
- 'indexName2Id','int2Str','int642Str','intvMax','intvName','intvNo','intvNorm','log10','logN','match','max','min','mkDate','mthName',
- 'mthOfYr','newGuid','nextMth','nextQtr','nextYr','num2Char','num2Date','num2Str','pmt','power','prevMth','prevQtr','prevYr',
- 'prmIsDefault','pt','pv','rate','refPrintAll','round','runAs','sessionId','setPrefix','sin','sinh','sleep','sln','str2Date',
- 'str2Datetime','str2Enum','str2Guid','str2Int','str2Int64','str2Num','str2Time','strAlpha','strCmp','strColSeq','strDel',
- 'strFind','strFmt','strIns','strKeep','strLTrim','strLen','strLine','strLwr','strNFind','strPoke','strPrompt','strRTrim',
- 'strRem','strRep','strScan','strUpr','subStr','syd','systemDateGet','systemDateSet','tableId2Name',
- 'tableId2PName','tableName2Id','tan','tanh','term','time2Str','timeNow','today','trunc','typeOf','uint2Str','wkOfYr','year')
- COMPILE_FUNCTIONS = ('attributeStr','classNum','classStr','configurationKeyNum','configurationKeyStr','dataEntityDataSourceStr','delegateStr',
- 'dimensionHierarchyLevelStr','dimensionHierarchyStr','dimensionReferenceStr','dutyStr','enumCnt','enumLiteralStr','enumNum','enumStr',
- 'extendedTypeNum','extendedTypeStr','fieldNum','fieldPName','fieldStr','formControlStr','formDataFieldStr','formDataSourceStr',
- 'formMethodStr','formStr','identifierStr','indexNum','indexStr','licenseCodeNum','licenseCodeStr','literalStr','maxDate','maxInt',
- 'measureStr','measurementStr','menuItemActionStr','menuItemDisplayStr','menuItemOutputStr','menuStr','methodStr','minInt','privilegeStr',
- 'queryDatasourceStr','queryMethodStr','queryStr','reportStr','resourceStr','roleStr','ssrsReportStr','staticDelegateStr','staticMethodStr',
- 'tableCollectionStr','tableFieldGroupStr','tableMethodStr','tableNum','tablePName','tableStaticMethodStr','tableStr','tileStr','varStr',
- 'webActionItemStr','webDisplayContentItemStr','webFormStr','webMenuStr','webOutputContentItemStr','webReportStr','webSiteTempStr',
- 'webStaticFileStr','webUrlItemStr','webWebPartStr','webletItemStr','webpageDefStr','websiteDefStr','workflowApprovalStr',
- 'workflowCategoryStr','workflowTaskStr','workflowTypeStr')
-
- tokens = {}
-
- tokens = {
- 'root': [
- # method names
- (r'(\s*)\b(else|if)\b([^\n])', bygroups(Whitespace, Keyword, using(this))), # ensure that if is not treated like a function
- (r'^([ \t]*)((?:' + XPP_CHARS + r'(?:\[\])?\s+)+?)' # return type
- r'(' + XPP_CHARS + ')' # method name
- r'(\s*)(\()', # signature start
- bygroups(Whitespace, using(this), Name.Function, Whitespace,
- Punctuation)),
- (r'^(\s*)(\[)([^\n]*?)(\])', bygroups(Whitespace, Name.Attribute, Name.Variable.Class, Name.Attribute)),
- (r'[^\S\n]+', Whitespace),
- (r'(\\)(\n)', bygroups(Text, Whitespace)), # line continuation
- (r'//[^\n]*?\n', Comment.Single),
- (r'/[*][^\n]*?[*]/', Comment.Multiline),
- (r'\n', Whitespace),
- (words(OPERATORS), Operator),
- (r'=~|!=|==|<<|>>|[-+/*%=<>&^|]', Operator),
- (r'[()\[\];:,.#@]', Punctuation),
- (r'[{}]', Punctuation),
- (r'@"(""|[^"])*"', String),
- (r'\$?"(\\\\|\\[^\\]|[^"\\\n])*["\n]', String),
- (r"'\\.'|'[^\\]'", String.Char),
- (r"[0-9]+(\.[0-9]*)?([eE][+-][0-9]+)?"
- r"[flFLdD]?|0[xX][0-9a-fA-F]+[Ll]?", Number),
- (words(KEYWORDS, suffix=r'\b'), Keyword),
- (r'(boolean|int|int64|str|real|guid|date)\b\??', Keyword.Type),
- (r'(class|struct|extends|implements)(\s+)', bygroups(Keyword, Whitespace), 'class'),
- (r'('+XPP_CHARS+')(::)', bygroups(Name.Variable.Class, Punctuation)),
- (r'(\s*)(\w+)(\s+\w+(,|=)?[^\n]*;)', bygroups(Whitespace, Name.Variable.Class, using(this))), # declaration
- # x++ specific function to get field should highlight the classname
- (r'(fieldNum\()('+XPP_CHARS+r')(\s*,\s*)('+XPP_CHARS+r')(\s*\))',
- bygroups(using(this), Name.Variable.Class, using(this), Name.Property, using(this))),
- # x++ specific function to get table should highlight the classname
- (r'(tableNum\()('+XPP_CHARS+r')(\s*\))',
- bygroups(using(this), Name.Variable.Class, using(this))),
- (words(RUNTIME_FUNCTIONS, suffix=r'(?=\()'), Name.Function.Magic),
- (words(COMPILE_FUNCTIONS, suffix=r'(?=\()'), Name.Function.Magic),
- (XPP_CHARS, Name),
- ],
- 'class': [
- (XPP_CHARS, Name.Class, '#pop'),
- default('#pop'),
- ],
- 'namespace': [
- (r'(?=\()', Text, '#pop'), # using (resource)
- ('(' + XPP_CHARS + r'|\.)+', Name.Namespace, '#pop'),
- ]
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/dsls.py b/venv/lib/python3.11/site-packages/pygments/lexers/dsls.py
deleted file mode 100644
index 37a5ff6..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/dsls.py
+++ /dev/null
@@ -1,982 +0,0 @@
-"""
- pygments.lexers.dsls
- ~~~~~~~~~~~~~~~~~~~~
-
- Lexers for various domain-specific languages.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import ExtendedRegexLexer, RegexLexer, bygroups, words, \
- include, default, this, using, combined
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation, Whitespace
-
-__all__ = ['ProtoBufLexer', 'ZeekLexer', 'PuppetLexer', 'RslLexer',
- 'MscgenLexer', 'VGLLexer', 'AlloyLexer', 'PanLexer',
- 'CrmshLexer', 'ThriftLexer', 'FlatlineLexer', 'SnowballLexer']
-
-
-class ProtoBufLexer(RegexLexer):
- """
- Lexer for Protocol Buffer definition files.
-
- .. versionadded:: 1.4
- """
-
- name = 'Protocol Buffer'
- url = 'https://developers.google.com/protocol-buffers/'
- aliases = ['protobuf', 'proto']
- filenames = ['*.proto']
-
- tokens = {
- 'root': [
- (r'[ \t]+', Whitespace),
- (r'[,;{}\[\]()<>]', Punctuation),
- (r'/(\\\n)?/(\n|(.|\n)*?[^\\]\n)', Comment.Single),
- (r'/(\\\n)?\*(.|\n)*?\*(\\\n)?/', Comment.Multiline),
- (words((
- 'import', 'option', 'optional', 'required', 'repeated',
- 'reserved', 'default', 'packed', 'ctype', 'extensions', 'to',
- 'max', 'rpc', 'returns', 'oneof', 'syntax'), prefix=r'\b', suffix=r'\b'),
- Keyword),
- (words((
- 'int32', 'int64', 'uint32', 'uint64', 'sint32', 'sint64',
- 'fixed32', 'fixed64', 'sfixed32', 'sfixed64',
- 'float', 'double', 'bool', 'string', 'bytes'), suffix=r'\b'),
- Keyword.Type),
- (r'(true|false)\b', Keyword.Constant),
- (r'(package)(\s+)', bygroups(Keyword.Namespace, Whitespace), 'package'),
- (r'(message|extend)(\s+)',
- bygroups(Keyword.Declaration, Whitespace), 'message'),
- (r'(enum|group|service)(\s+)',
- bygroups(Keyword.Declaration, Whitespace), 'type'),
- (r'\".*?\"', String),
- (r'\'.*?\'', String),
- (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[LlUu]*', Number.Float),
- (r'(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float),
- (r'(\-?(inf|nan))\b', Number.Float),
- (r'0x[0-9a-fA-F]+[LlUu]*', Number.Hex),
- (r'0[0-7]+[LlUu]*', Number.Oct),
- (r'\d+[LlUu]*', Number.Integer),
- (r'[+-=]', Operator),
- (r'([a-zA-Z_][\w.]*)([ \t]*)(=)',
- bygroups(Name.Attribute, Whitespace, Operator)),
- (r'[a-zA-Z_][\w.]*', Name),
- ],
- 'package': [
- (r'[a-zA-Z_]\w*', Name.Namespace, '#pop'),
- default('#pop'),
- ],
- 'message': [
- (r'[a-zA-Z_]\w*', Name.Class, '#pop'),
- default('#pop'),
- ],
- 'type': [
- (r'[a-zA-Z_]\w*', Name, '#pop'),
- default('#pop'),
- ],
- }
-
-
-class ThriftLexer(RegexLexer):
- """
- For Thrift interface definitions.
-
- .. versionadded:: 2.1
- """
- name = 'Thrift'
- url = 'https://thrift.apache.org/'
- aliases = ['thrift']
- filenames = ['*.thrift']
- mimetypes = ['application/x-thrift']
-
- tokens = {
- 'root': [
- include('whitespace'),
- include('comments'),
- (r'"', String.Double, combined('stringescape', 'dqs')),
- (r'\'', String.Single, combined('stringescape', 'sqs')),
- (r'(namespace)(\s+)',
- bygroups(Keyword.Namespace, Whitespace), 'namespace'),
- (r'(enum|union|struct|service|exception)(\s+)',
- bygroups(Keyword.Declaration, Whitespace), 'class'),
- (r'((?:(?:[^\W\d]|\$)[\w.\[\]$<>]*\s+)+?)' # return arguments
- r'((?:[^\W\d]|\$)[\w$]*)' # method name
- r'(\s*)(\()', # signature start
- bygroups(using(this), Name.Function, Whitespace, Operator)),
- include('keywords'),
- include('numbers'),
- (r'[&=]', Operator),
- (r'[:;,{}()<>\[\]]', Punctuation),
- (r'[a-zA-Z_](\.\w|\w)*', Name),
- ],
- 'whitespace': [
- (r'\n', Whitespace),
- (r'\s+', Whitespace),
- ],
- 'comments': [
- (r'#.*$', Comment),
- (r'//.*?\n', Comment),
- (r'/\*[\w\W]*?\*/', Comment.Multiline),
- ],
- 'stringescape': [
- (r'\\([\\nrt"\'])', String.Escape),
- ],
- 'dqs': [
- (r'"', String.Double, '#pop'),
- (r'[^\\"\n]+', String.Double),
- ],
- 'sqs': [
- (r"'", String.Single, '#pop'),
- (r'[^\\\'\n]+', String.Single),
- ],
- 'namespace': [
- (r'[a-z*](\.\w|\w)*', Name.Namespace, '#pop'),
- default('#pop'),
- ],
- 'class': [
- (r'[a-zA-Z_]\w*', Name.Class, '#pop'),
- default('#pop'),
- ],
- 'keywords': [
- (r'(async|oneway|extends|throws|required|optional)\b', Keyword),
- (r'(true|false)\b', Keyword.Constant),
- (r'(const|typedef)\b', Keyword.Declaration),
- (words((
- 'cpp_namespace', 'cpp_include', 'cpp_type', 'java_package',
- 'cocoa_prefix', 'csharp_namespace', 'delphi_namespace',
- 'php_namespace', 'py_module', 'perl_package',
- 'ruby_namespace', 'smalltalk_category', 'smalltalk_prefix',
- 'xsd_all', 'xsd_optional', 'xsd_nillable', 'xsd_namespace',
- 'xsd_attrs', 'include'), suffix=r'\b'),
- Keyword.Namespace),
- (words((
- 'void', 'bool', 'byte', 'i16', 'i32', 'i64', 'double',
- 'string', 'binary', 'map', 'list', 'set', 'slist',
- 'senum'), suffix=r'\b'),
- Keyword.Type),
- (words((
- 'BEGIN', 'END', '__CLASS__', '__DIR__', '__FILE__',
- '__FUNCTION__', '__LINE__', '__METHOD__', '__NAMESPACE__',
- 'abstract', 'alias', 'and', 'args', 'as', 'assert', 'begin',
- 'break', 'case', 'catch', 'class', 'clone', 'continue',
- 'declare', 'def', 'default', 'del', 'delete', 'do', 'dynamic',
- 'elif', 'else', 'elseif', 'elsif', 'end', 'enddeclare',
- 'endfor', 'endforeach', 'endif', 'endswitch', 'endwhile',
- 'ensure', 'except', 'exec', 'finally', 'float', 'for',
- 'foreach', 'function', 'global', 'goto', 'if', 'implements',
- 'import', 'in', 'inline', 'instanceof', 'interface', 'is',
- 'lambda', 'module', 'native', 'new', 'next', 'nil', 'not',
- 'or', 'pass', 'public', 'print', 'private', 'protected',
- 'raise', 'redo', 'rescue', 'retry', 'register', 'return',
- 'self', 'sizeof', 'static', 'super', 'switch', 'synchronized',
- 'then', 'this', 'throw', 'transient', 'try', 'undef',
- 'unless', 'unsigned', 'until', 'use', 'var', 'virtual',
- 'volatile', 'when', 'while', 'with', 'xor', 'yield'),
- prefix=r'\b', suffix=r'\b'),
- Keyword.Reserved),
- ],
- 'numbers': [
- (r'[+-]?(\d+\.\d+([eE][+-]?\d+)?|\.?\d+[eE][+-]?\d+)', Number.Float),
- (r'[+-]?0x[0-9A-Fa-f]+', Number.Hex),
- (r'[+-]?[0-9]+', Number.Integer),
- ],
- }
-
-
-class ZeekLexer(RegexLexer):
- """
- For Zeek scripts.
-
- .. versionadded:: 2.5
- """
- name = 'Zeek'
- url = 'https://www.zeek.org/'
- aliases = ['zeek', 'bro']
- filenames = ['*.zeek', '*.bro']
-
- _hex = r'[0-9a-fA-F]'
- _float = r'((\d*\.?\d+)|(\d+\.?\d*))([eE][-+]?\d+)?'
- _h = r'[A-Za-z0-9][-A-Za-z0-9]*'
-
- tokens = {
- 'root': [
- include('whitespace'),
- include('comments'),
- include('directives'),
- include('attributes'),
- include('types'),
- include('keywords'),
- include('literals'),
- include('operators'),
- include('punctuation'),
- (r'((?:[A-Za-z_]\w*)(?:::(?:[A-Za-z_]\w*))*)(?=\s*\()',
- Name.Function),
- include('identifiers'),
- ],
-
- 'whitespace': [
- (r'\n', Whitespace),
- (r'\s+', Whitespace),
- (r'(\\)(\n)', bygroups(Text, Whitespace)),
- ],
-
- 'comments': [
- (r'#.*$', Comment),
- ],
-
- 'directives': [
- (r'@(load-plugin|load-sigs|load|unload)\b.*$', Comment.Preproc),
- (r'@(DEBUG|DIR|FILENAME|deprecated|if|ifdef|ifndef|else|endif)\b', Comment.Preproc),
- (r'(@prefixes)(\s*)((\+?=).*)$', bygroups(Comment.Preproc,
- Whitespace, Comment.Preproc)),
- ],
-
- 'attributes': [
- (words(('redef', 'priority', 'log', 'optional', 'default', 'add_func',
- 'delete_func', 'expire_func', 'read_expire', 'write_expire',
- 'create_expire', 'synchronized', 'persistent', 'rotate_interval',
- 'rotate_size', 'encrypt', 'raw_output', 'mergeable', 'error_handler',
- 'type_column', 'deprecated'),
- prefix=r'&', suffix=r'\b'),
- Keyword.Pseudo),
- ],
-
- 'types': [
- (words(('any',
- 'enum', 'record', 'set', 'table', 'vector',
- 'function', 'hook', 'event',
- 'addr', 'bool', 'count', 'double', 'file', 'int', 'interval',
- 'pattern', 'port', 'string', 'subnet', 'time'),
- suffix=r'\b'),
- Keyword.Type),
-
- (r'(opaque)(\s+)(of)(\s+)((?:[A-Za-z_]\w*)(?:::(?:[A-Za-z_]\w*))*)\b',
- bygroups(Keyword.Type, Whitespace, Operator.Word, Whitespace, Keyword.Type)),
-
- (r'(type)(\s+)((?:[A-Za-z_]\w*)(?:::(?:[A-Za-z_]\w*))*)(\s*)(:)(\s*)\b(record|enum)\b',
- bygroups(Keyword, Whitespace, Name.Class, Whitespace, Operator, Whitespace, Keyword.Type)),
-
- (r'(type)(\s+)((?:[A-Za-z_]\w*)(?:::(?:[A-Za-z_]\w*))*)(\s*)(:)',
- bygroups(Keyword, Whitespace, Name, Whitespace, Operator)),
-
- (r'(redef)(\s+)(record|enum)(\s+)((?:[A-Za-z_]\w*)(?:::(?:[A-Za-z_]\w*))*)\b',
- bygroups(Keyword, Whitespace, Keyword.Type, Whitespace, Name.Class)),
- ],
-
- 'keywords': [
- (words(('redef', 'export', 'if', 'else', 'for', 'while',
- 'return', 'break', 'next', 'continue', 'fallthrough',
- 'switch', 'default', 'case',
- 'add', 'delete',
- 'when', 'timeout', 'schedule'),
- suffix=r'\b'),
- Keyword),
- (r'(print)\b', Keyword),
- (r'(global|local|const|option)\b', Keyword.Declaration),
- (r'(module)(\s+)(([A-Za-z_]\w*)(?:::([A-Za-z_]\w*))*)\b',
- bygroups(Keyword.Namespace, Whitespace, Name.Namespace)),
- ],
-
- 'literals': [
- (r'"', String, 'string'),
-
- # Not the greatest match for patterns, but generally helps
- # disambiguate between start of a pattern and just a division
- # operator.
- (r'/(?=.*/)', String.Regex, 'regex'),
-
- (r'(T|F)\b', Keyword.Constant),
-
- # Port
- (r'\d{1,5}/(udp|tcp|icmp|unknown)\b', Number),
-
- # IPv4 Address
- (r'(\d{1,3}.){3}(\d{1,3})\b', Number),
-
- # IPv6 Address
- (r'\[([0-9a-fA-F]{0,4}:){2,7}([0-9a-fA-F]{0,4})?((\d{1,3}.){3}(\d{1,3}))?\]', Number),
-
- # Numeric
- (r'0[xX]' + _hex + r'+\b', Number.Hex),
- (_float + r'\s*(day|hr|min|sec|msec|usec)s?\b', Number.Float),
- (_float + r'\b', Number.Float),
- (r'(\d+)\b', Number.Integer),
-
- # Hostnames
- (_h + r'(\.' + _h + r')+', String),
- ],
-
- 'operators': [
- (r'[!%*/+<=>~|&^-]', Operator),
- (r'([-+=&|]{2}|[+=!><-]=)', Operator),
- (r'(in|as|is|of)\b', Operator.Word),
- (r'\??\$', Operator),
- ],
-
- 'punctuation': [
- (r'[{}()\[\],;.]', Punctuation),
- # The "ternary if", which uses '?' and ':', could instead be
- # treated as an Operator, but colons are more frequently used to
- # separate field/identifier names from their types, so the (often)
- # less-prominent Punctuation is used even with '?' for consistency.
- (r'[?:]', Punctuation),
- ],
-
- 'identifiers': [
- (r'([a-zA-Z_]\w*)(::)', bygroups(Name, Punctuation)),
- (r'[a-zA-Z_]\w*', Name)
- ],
-
- 'string': [
- (r'\\.', String.Escape),
- (r'%-?[0-9]*(\.[0-9]+)?[DTd-gsx]', String.Escape),
- (r'"', String, '#pop'),
- (r'.', String),
- ],
-
- 'regex': [
- (r'\\.', String.Escape),
- (r'/', String.Regex, '#pop'),
- (r'.', String.Regex),
- ],
- }
-
-
-BroLexer = ZeekLexer
-
-
-class PuppetLexer(RegexLexer):
- """
- For Puppet configuration DSL.
-
- .. versionadded:: 1.6
- """
- name = 'Puppet'
- url = 'https://puppet.com/'
- aliases = ['puppet']
- filenames = ['*.pp']
-
- tokens = {
- 'root': [
- include('comments'),
- include('keywords'),
- include('names'),
- include('numbers'),
- include('operators'),
- include('strings'),
-
- (r'[]{}:(),;[]', Punctuation),
- (r'\s+', Whitespace),
- ],
-
- 'comments': [
- (r'(\s*)(#.*)$', bygroups(Whitespace, Comment)),
- (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline),
- ],
-
- 'operators': [
- (r'(=>|\?|<|>|=|\+|-|/|\*|~|!|\|)', Operator),
- (r'(in|and|or|not)\b', Operator.Word),
- ],
-
- 'names': [
- (r'[a-zA-Z_]\w*', Name.Attribute),
- (r'(\$\S+)(\[)(\S+)(\])', bygroups(Name.Variable, Punctuation,
- String, Punctuation)),
- (r'\$\S+', Name.Variable),
- ],
-
- 'numbers': [
- # Copypasta from the Python lexer
- (r'(\d+\.\d*|\d*\.\d+)([eE][+-]?[0-9]+)?j?', Number.Float),
- (r'\d+[eE][+-]?[0-9]+j?', Number.Float),
- (r'0[0-7]+j?', Number.Oct),
- (r'0[xX][a-fA-F0-9]+', Number.Hex),
- (r'\d+L', Number.Integer.Long),
- (r'\d+j?', Number.Integer)
- ],
-
- 'keywords': [
- # Left out 'group' and 'require'
- # Since they're often used as attributes
- (words((
- 'absent', 'alert', 'alias', 'audit', 'augeas', 'before', 'case',
- 'check', 'class', 'computer', 'configured', 'contained',
- 'create_resources', 'crit', 'cron', 'debug', 'default',
- 'define', 'defined', 'directory', 'else', 'elsif', 'emerg',
- 'err', 'exec', 'extlookup', 'fail', 'false', 'file',
- 'filebucket', 'fqdn_rand', 'generate', 'host', 'if', 'import',
- 'include', 'info', 'inherits', 'inline_template', 'installed',
- 'interface', 'k5login', 'latest', 'link', 'loglevel',
- 'macauthorization', 'mailalias', 'maillist', 'mcx', 'md5',
- 'mount', 'mounted', 'nagios_command', 'nagios_contact',
- 'nagios_contactgroup', 'nagios_host', 'nagios_hostdependency',
- 'nagios_hostescalation', 'nagios_hostextinfo', 'nagios_hostgroup',
- 'nagios_service', 'nagios_servicedependency', 'nagios_serviceescalation',
- 'nagios_serviceextinfo', 'nagios_servicegroup', 'nagios_timeperiod',
- 'node', 'noop', 'notice', 'notify', 'package', 'present', 'purged',
- 'realize', 'regsubst', 'resources', 'role', 'router', 'running',
- 'schedule', 'scheduled_task', 'search', 'selboolean', 'selmodule',
- 'service', 'sha1', 'shellquote', 'split', 'sprintf',
- 'ssh_authorized_key', 'sshkey', 'stage', 'stopped', 'subscribe',
- 'tag', 'tagged', 'template', 'tidy', 'true', 'undef', 'unmounted',
- 'user', 'versioncmp', 'vlan', 'warning', 'yumrepo', 'zfs', 'zone',
- 'zpool'), prefix='(?i)', suffix=r'\b'),
- Keyword),
- ],
-
- 'strings': [
- (r'"([^"])*"', String),
- (r"'(\\'|[^'])*'", String),
- ],
-
- }
-
-
-class RslLexer(RegexLexer):
- """
- RSL is the formal specification
- language used in RAISE (Rigorous Approach to Industrial Software Engineering)
- method.
-
- .. versionadded:: 2.0
- """
- name = 'RSL'
- url = 'http://en.wikipedia.org/wiki/RAISE'
- aliases = ['rsl']
- filenames = ['*.rsl']
- mimetypes = ['text/rsl']
-
- flags = re.MULTILINE | re.DOTALL
-
- tokens = {
- 'root': [
- (words((
- 'Bool', 'Char', 'Int', 'Nat', 'Real', 'Text', 'Unit', 'abs',
- 'all', 'always', 'any', 'as', 'axiom', 'card', 'case', 'channel',
- 'chaos', 'class', 'devt_relation', 'dom', 'elems', 'else', 'elif',
- 'end', 'exists', 'extend', 'false', 'for', 'hd', 'hide', 'if',
- 'in', 'is', 'inds', 'initialise', 'int', 'inter', 'isin', 'len',
- 'let', 'local', 'ltl_assertion', 'object', 'of', 'out', 'post',
- 'pre', 'read', 'real', 'rng', 'scheme', 'skip', 'stop', 'swap',
- 'then', 'theory', 'test_case', 'tl', 'transition_system', 'true',
- 'type', 'union', 'until', 'use', 'value', 'variable', 'while',
- 'with', 'write', '~isin', '-inflist', '-infset', '-list',
- '-set'), prefix=r'\b', suffix=r'\b'),
- Keyword),
- (r'(variable|value)\b', Keyword.Declaration),
- (r'--.*?\n', Comment),
- (r'<:.*?:>', Comment),
- (r'\{!.*?!\}', Comment),
- (r'/\*.*?\*/', Comment),
- (r'^([ \t]*)([\w]+)([ \t]*)(:[^:])', bygroups(Whitespace,
- Name.Function, Whitespace, Name.Function)),
- (r'(^[ \t]*)([\w]+)([ \t]*)(\([\w\s,]*\))([ \t]*)(is|as)',
- bygroups(Whitespace, Name.Function, Whitespace, Text,
- Whitespace, Keyword)),
- (r'\b[A-Z]\w*\b', Keyword.Type),
- (r'(true|false)\b', Keyword.Constant),
- (r'".*"', String),
- (r'\'.\'', String.Char),
- (r'(><|->|-m->|/\\|<=|<<=|<\.|\|\||\|\^\||-~->|-~m->|\\/|>=|>>|'
- r'\.>|\+\+|-\\|<->|=>|:-|~=|\*\*|<<|>>=|\+>|!!|\|=\||#)',
- Operator),
- (r'[0-9]+\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
- (r'0x[0-9a-f]+', Number.Hex),
- (r'[0-9]+', Number.Integer),
- (r'\s+', Whitespace),
- (r'.', Text),
- ],
- }
-
- def analyse_text(text):
- """
- Check for the most common text in the beginning of a RSL file.
- """
- if re.search(r'scheme\s*.*?=\s*class\s*type', text, re.I) is not None:
- return 1.0
-
-
-class MscgenLexer(RegexLexer):
- """
- For Mscgen files.
-
- .. versionadded:: 1.6
- """
- name = 'Mscgen'
- url = 'http://www.mcternan.me.uk/mscgen/'
- aliases = ['mscgen', 'msc']
- filenames = ['*.msc']
-
- _var = r'(\w+|"(?:\\"|[^"])*")'
-
- tokens = {
- 'root': [
- (r'msc\b', Keyword.Type),
- # Options
- (r'(hscale|HSCALE|width|WIDTH|wordwraparcs|WORDWRAPARCS'
- r'|arcgradient|ARCGRADIENT)\b', Name.Property),
- # Operators
- (r'(abox|ABOX|rbox|RBOX|box|BOX|note|NOTE)\b', Operator.Word),
- (r'(\.|-|\|){3}', Keyword),
- (r'(?:-|=|\.|:){2}'
- r'|<<=>>|<->|<=>|<<>>|<:>'
- r'|->|=>>|>>|=>|:>|-x|-X'
- r'|<-|<<=|<<|<=|<:|x-|X-|=', Operator),
- # Names
- (r'\*', Name.Builtin),
- (_var, Name.Variable),
- # Other
- (r'\[', Punctuation, 'attrs'),
- (r'\{|\}|,|;', Punctuation),
- include('comments')
- ],
- 'attrs': [
- (r'\]', Punctuation, '#pop'),
- (_var + r'(\s*)(=)(\s*)' + _var,
- bygroups(Name.Attribute, Whitespace, Operator, Whitespace,
- String)),
- (r',', Punctuation),
- include('comments')
- ],
- 'comments': [
- (r'(?://|#).*?\n', Comment.Single),
- (r'/\*(?:.|\n)*?\*/', Comment.Multiline),
- (r'[ \t\r\n]+', Whitespace)
- ]
- }
-
-
-class VGLLexer(RegexLexer):
- """
- For SampleManager VGL source code.
-
- .. versionadded:: 1.6
- """
- name = 'VGL'
- url = 'http://www.thermoscientific.com/samplemanager'
- aliases = ['vgl']
- filenames = ['*.rpf']
-
- flags = re.MULTILINE | re.DOTALL | re.IGNORECASE
-
- tokens = {
- 'root': [
- (r'\{[^}]*\}', Comment.Multiline),
- (r'declare', Keyword.Constant),
- (r'(if|then|else|endif|while|do|endwhile|and|or|prompt|object'
- r'|create|on|line|with|global|routine|value|endroutine|constant'
- r'|global|set|join|library|compile_option|file|exists|create|copy'
- r'|delete|enable|windows|name|notprotected)(?! *[=<>.,()])',
- Keyword),
- (r'(true|false|null|empty|error|locked)', Keyword.Constant),
- (r'[~^*#!%&\[\]()<>|+=:;,./?-]', Operator),
- (r'"[^"]*"', String),
- (r'(\.)([a-z_$][\w$]*)', bygroups(Operator, Name.Attribute)),
- (r'[0-9][0-9]*(\.[0-9]+(e[+\-]?[0-9]+)?)?', Number),
- (r'[a-z_$][\w$]*', Name),
- (r'[\r\n]+', Whitespace),
- (r'\s+', Whitespace)
- ]
- }
-
-
-class AlloyLexer(RegexLexer):
- """
- For Alloy source code.
-
- .. versionadded:: 2.0
- """
-
- name = 'Alloy'
- url = 'http://alloy.mit.edu'
- aliases = ['alloy']
- filenames = ['*.als']
- mimetypes = ['text/x-alloy']
-
- flags = re.MULTILINE | re.DOTALL
-
- iden_rex = r'[a-zA-Z_][\w]*"*'
- string_rex = r'"\b(\\\\|\\[^\\]|[^"\\])*"'
- text_tuple = (r'[^\S\n]+', Whitespace)
-
- tokens = {
- 'sig': [
- (r'(extends)\b', Keyword, '#pop'),
- (iden_rex, Name),
- text_tuple,
- (r',', Punctuation),
- (r'\{', Operator, '#pop'),
- ],
- 'module': [
- text_tuple,
- (iden_rex, Name, '#pop'),
- ],
- 'fun': [
- text_tuple,
- (r'\{', Operator, '#pop'),
- (iden_rex, Name, '#pop'),
- ],
- 'fact': [
- include('fun'),
- (string_rex, String, '#pop'),
- ],
- 'root': [
- (r'--.*?$', Comment.Single),
- (r'//.*?$', Comment.Single),
- (r'/\*.*?\*/', Comment.Multiline),
- text_tuple,
- (r'(module|open)(\s+)', bygroups(Keyword.Namespace, Whitespace),
- 'module'),
- (r'(sig|enum)(\s+)', bygroups(Keyword.Declaration, Whitespace), 'sig'),
- (r'(iden|univ|none)\b', Keyword.Constant),
- (r'(int|Int)\b', Keyword.Type),
- (r'(var|this|abstract|extends|set|seq|one|lone|let)\b', Keyword),
- (r'(all|some|no|sum|disj|when|else)\b', Keyword),
- (r'(run|check|for|but|exactly|expect|as|steps)\b', Keyword),
- (r'(always|after|eventually|until|release)\b', Keyword), # future time operators
- (r'(historically|before|once|since|triggered)\b', Keyword), # past time operators
- (r'(and|or|implies|iff|in)\b', Operator.Word),
- (r'(fun|pred|assert)(\s+)', bygroups(Keyword, Whitespace), 'fun'),
- (r'(fact)(\s+)', bygroups(Keyword, Whitespace), 'fact'),
- (r'!|#|&&|\+\+|<<|>>|>=|<=>|<=|\.\.|\.|->', Operator),
- (r'[-+/*%=<>&!^|~{}\[\]().\';]', Operator),
- (iden_rex, Name),
- (r'[:,]', Punctuation),
- (r'[0-9]+', Number.Integer),
- (string_rex, String),
- (r'\n', Whitespace),
- ]
- }
-
-
-class PanLexer(RegexLexer):
- """
- Lexer for pan source files.
-
- Based on tcsh lexer.
-
- .. versionadded:: 2.0
- """
-
- name = 'Pan'
- url = 'https://github.com/quattor/pan/'
- aliases = ['pan']
- filenames = ['*.pan']
-
- tokens = {
- 'root': [
- include('basic'),
- (r'\(', Keyword, 'paren'),
- (r'\{', Keyword, 'curly'),
- include('data'),
- ],
- 'basic': [
- (words((
- 'if', 'for', 'with', 'else', 'type', 'bind', 'while', 'valid', 'final',
- 'prefix', 'unique', 'object', 'foreach', 'include', 'template',
- 'function', 'variable', 'structure', 'extensible', 'declaration'),
- prefix=r'\b', suffix=r'\b'),
- Keyword),
- (words((
- 'file_contents', 'format', 'index', 'length', 'match', 'matches',
- 'replace', 'splice', 'split', 'substr', 'to_lowercase', 'to_uppercase',
- 'debug', 'error', 'traceback', 'deprecated', 'base64_decode',
- 'base64_encode', 'digest', 'escape', 'unescape', 'append', 'create',
- 'first', 'nlist', 'key', 'list', 'merge', 'next', 'prepend', 'is_boolean',
- 'is_defined', 'is_double', 'is_list', 'is_long', 'is_nlist', 'is_null',
- 'is_number', 'is_property', 'is_resource', 'is_string', 'to_boolean',
- 'to_double', 'to_long', 'to_string', 'clone', 'delete', 'exists',
- 'path_exists', 'if_exists', 'return', 'value'),
- prefix=r'\b', suffix=r'\b'),
- Name.Builtin),
- (r'#.*', Comment),
- (r'\\[\w\W]', String.Escape),
- (r'(\b\w+)(\s*)(=)', bygroups(Name.Variable, Whitespace, Operator)),
- (r'[\[\]{}()=]+', Operator),
- (r'<<\s*(\'?)\\?(\w+)[\w\W]+?\2', String),
- (r';', Punctuation),
- ],
- 'data': [
- (r'(?s)"(\\\\|\\[0-7]+|\\.|[^"\\])*"', String.Double),
- (r"(?s)'(\\\\|\\[0-7]+|\\.|[^'\\])*'", String.Single),
- (r'\s+', Whitespace),
- (r'[^=\s\[\]{}()$"\'`\\;#]+', Text),
- (r'\d+(?= |\Z)', Number),
- ],
- 'curly': [
- (r'\}', Keyword, '#pop'),
- (r':-', Keyword),
- (r'\w+', Name.Variable),
- (r'[^}:"\'`$]+', Punctuation),
- (r':', Punctuation),
- include('root'),
- ],
- 'paren': [
- (r'\)', Keyword, '#pop'),
- include('root'),
- ],
- }
-
-
-class CrmshLexer(RegexLexer):
- """
- Lexer for crmsh configuration files for Pacemaker clusters.
-
- .. versionadded:: 2.1
- """
- name = 'Crmsh'
- url = 'http://crmsh.github.io/'
- aliases = ['crmsh', 'pcmk']
- filenames = ['*.crmsh', '*.pcmk']
- mimetypes = []
-
- elem = words((
- 'node', 'primitive', 'group', 'clone', 'ms', 'location',
- 'colocation', 'order', 'fencing_topology', 'rsc_ticket',
- 'rsc_template', 'property', 'rsc_defaults',
- 'op_defaults', 'acl_target', 'acl_group', 'user', 'role',
- 'tag'), suffix=r'(?![\w#$-])')
- sub = words((
- 'params', 'meta', 'operations', 'op', 'rule',
- 'attributes', 'utilization'), suffix=r'(?![\w#$-])')
- acl = words(('read', 'write', 'deny'), suffix=r'(?![\w#$-])')
- bin_rel = words(('and', 'or'), suffix=r'(?![\w#$-])')
- un_ops = words(('defined', 'not_defined'), suffix=r'(?![\w#$-])')
- date_exp = words(('in_range', 'date', 'spec', 'in'), suffix=r'(?![\w#$-])')
- acl_mod = (r'(?:tag|ref|reference|attribute|type|xpath)')
- bin_ops = (r'(?:lt|gt|lte|gte|eq|ne)')
- val_qual = (r'(?:string|version|number)')
- rsc_role_action = (r'(?:Master|Started|Slave|Stopped|'
- r'start|promote|demote|stop)')
-
- tokens = {
- 'root': [
- (r'^(#.*)(\n)?', bygroups(Comment, Whitespace)),
- # attr=value (nvpair)
- (r'([\w#$-]+)(=)("(?:""|[^"])*"|\S+)',
- bygroups(Name.Attribute, Punctuation, String)),
- # need this construct, otherwise numeric node ids
- # are matched as scores
- # elem id:
- (r'(node)(\s+)([\w#$-]+)(:)',
- bygroups(Keyword, Whitespace, Name, Punctuation)),
- # scores
- (r'([+-]?([0-9]+|inf)):', Number),
- # keywords (elements and other)
- (elem, Keyword),
- (sub, Keyword),
- (acl, Keyword),
- # binary operators
- (r'(?:%s:)?(%s)(?![\w#$-])' % (val_qual, bin_ops), Operator.Word),
- # other operators
- (bin_rel, Operator.Word),
- (un_ops, Operator.Word),
- (date_exp, Operator.Word),
- # builtin attributes (e.g. #uname)
- (r'#[a-z]+(?![\w#$-])', Name.Builtin),
- # acl_mod:blah
- (r'(%s)(:)("(?:""|[^"])*"|\S+)' % acl_mod,
- bygroups(Keyword, Punctuation, Name)),
- # rsc_id[:(role|action)]
- # NB: this matches all other identifiers
- (r'([\w#$-]+)(?:(:)(%s))?(?![\w#$-])' % rsc_role_action,
- bygroups(Name, Punctuation, Operator.Word)),
- # punctuation
- (r'(\\(?=\n)|[\[\](){}/:@])', Punctuation),
- (r'\s+|\n', Whitespace),
- ],
- }
-
-
-class FlatlineLexer(RegexLexer):
- """
- Lexer for Flatline expressions.
-
- .. versionadded:: 2.2
- """
- name = 'Flatline'
- url = 'https://github.com/bigmlcom/flatline'
- aliases = ['flatline']
- filenames = []
- mimetypes = ['text/x-flatline']
-
- special_forms = ('let',)
-
- builtins = (
- "!=", "*", "+", "-", "<", "<=", "=", ">", ">=", "abs", "acos", "all",
- "all-but", "all-with-defaults", "all-with-numeric-default", "and",
- "asin", "atan", "avg", "avg-window", "bin-center", "bin-count", "call",
- "category-count", "ceil", "cond", "cond-window", "cons", "cos", "cosh",
- "count", "diff-window", "div", "ensure-value", "ensure-weighted-value",
- "epoch", "epoch-day", "epoch-fields", "epoch-hour", "epoch-millisecond",
- "epoch-minute", "epoch-month", "epoch-second", "epoch-weekday",
- "epoch-year", "exp", "f", "field", "field-prop", "fields", "filter",
- "first", "floor", "head", "if", "in", "integer", "language", "length",
- "levenshtein", "linear-regression", "list", "ln", "log", "log10", "map",
- "matches", "matches?", "max", "maximum", "md5", "mean", "median", "min",
- "minimum", "missing", "missing-count", "missing?", "missing_count",
- "mod", "mode", "normalize", "not", "nth", "occurrences", "or",
- "percentile", "percentile-label", "population", "population-fraction",
- "pow", "preferred", "preferred?", "quantile-label", "rand", "rand-int",
- "random-value", "re-quote", "real", "replace", "replace-first", "rest",
- "round", "row-number", "segment-label", "sha1", "sha256", "sin", "sinh",
- "sqrt", "square", "standard-deviation", "standard_deviation", "str",
- "subs", "sum", "sum-squares", "sum-window", "sum_squares", "summary",
- "summary-no", "summary-str", "tail", "tan", "tanh", "to-degrees",
- "to-radians", "variance", "vectorize", "weighted-random-value", "window",
- "winnow", "within-percentiles?", "z-score",
- )
-
- valid_name = r'(?!#)[\w!$%*+<=>?/.#-]+'
-
- tokens = {
- 'root': [
- # whitespaces - usually not relevant
- (r'[,]+', Text),
- (r'\s+', Whitespace),
-
- # numbers
- (r'-?\d+\.\d+', Number.Float),
- (r'-?\d+', Number.Integer),
- (r'0x-?[a-f\d]+', Number.Hex),
-
- # strings, symbols and characters
- (r'"(\\\\|\\[^\\]|[^"\\])*"', String),
- (r"\\(.|[a-z]+)", String.Char),
-
- # expression template placeholder
- (r'_', String.Symbol),
-
- # highlight the special forms
- (words(special_forms, suffix=' '), Keyword),
-
- # highlight the builtins
- (words(builtins, suffix=' '), Name.Builtin),
-
- # the remaining functions
- (r'(?<=\()' + valid_name, Name.Function),
-
- # find the remaining variables
- (valid_name, Name.Variable),
-
- # parentheses
- (r'(\(|\))', Punctuation),
- ],
- }
-
-
-class SnowballLexer(ExtendedRegexLexer):
- """
- Lexer for Snowball source code.
-
- .. versionadded:: 2.2
- """
-
- name = 'Snowball'
- url = 'https://snowballstem.org/'
- aliases = ['snowball']
- filenames = ['*.sbl']
-
- _ws = r'\n\r\t '
-
- def __init__(self, **options):
- self._reset_stringescapes()
- ExtendedRegexLexer.__init__(self, **options)
-
- def _reset_stringescapes(self):
- self._start = "'"
- self._end = "'"
-
- def _string(do_string_first):
- def callback(lexer, match, ctx):
- s = match.start()
- text = match.group()
- string = re.compile(r'([^%s]*)(.)' % re.escape(lexer._start)).match
- escape = re.compile(r'([^%s]*)(.)' % re.escape(lexer._end)).match
- pos = 0
- do_string = do_string_first
- while pos < len(text):
- if do_string:
- match = string(text, pos)
- yield s + match.start(1), String.Single, match.group(1)
- if match.group(2) == "'":
- yield s + match.start(2), String.Single, match.group(2)
- ctx.stack.pop()
- break
- yield s + match.start(2), String.Escape, match.group(2)
- pos = match.end()
- match = escape(text, pos)
- yield s + match.start(), String.Escape, match.group()
- if match.group(2) != lexer._end:
- ctx.stack[-1] = 'escape'
- break
- pos = match.end()
- do_string = True
- ctx.pos = s + match.end()
- return callback
-
- def _stringescapes(lexer, match, ctx):
- lexer._start = match.group(3)
- lexer._end = match.group(5)
- return bygroups(Keyword.Reserved, Whitespace, String.Escape, Whitespace,
- String.Escape)(lexer, match, ctx)
-
- tokens = {
- 'root': [
- (r'len\b', Name.Builtin),
- (r'lenof\b', Operator.Word),
- include('root1'),
- ],
- 'root1': [
- (r'[%s]+' % _ws, Whitespace),
- (r'\d+', Number.Integer),
- (r"'", String.Single, 'string'),
- (r'[()]', Punctuation),
- (r'/\*[\w\W]*?\*/', Comment.Multiline),
- (r'//.*', Comment.Single),
- (r'[!*+\-/<=>]=|[-=]>|<[+-]|[$*+\-/<=>?\[\]]', Operator),
- (words(('as', 'get', 'hex', 'among', 'define', 'decimal',
- 'backwardmode'), suffix=r'\b'),
- Keyword.Reserved),
- (words(('strings', 'booleans', 'integers', 'routines', 'externals',
- 'groupings'), suffix=r'\b'),
- Keyword.Reserved, 'declaration'),
- (words(('do', 'or', 'and', 'for', 'hop', 'non', 'not', 'set', 'try',
- 'fail', 'goto', 'loop', 'next', 'test', 'true',
- 'false', 'unset', 'atmark', 'attach', 'delete', 'gopast',
- 'insert', 'repeat', 'sizeof', 'tomark', 'atleast',
- 'atlimit', 'reverse', 'setmark', 'tolimit', 'setlimit',
- 'backwards', 'substring'), suffix=r'\b'),
- Operator.Word),
- (words(('size', 'limit', 'cursor', 'maxint', 'minint'),
- suffix=r'\b'),
- Name.Builtin),
- (r'(stringdef\b)([%s]*)([^%s]+)' % (_ws, _ws),
- bygroups(Keyword.Reserved, Whitespace, String.Escape)),
- (r'(stringescapes\b)([%s]*)(.)([%s]*)(.)' % (_ws, _ws),
- _stringescapes),
- (r'[A-Za-z]\w*', Name),
- ],
- 'declaration': [
- (r'\)', Punctuation, '#pop'),
- (words(('len', 'lenof'), suffix=r'\b'), Name,
- ('root1', 'declaration')),
- include('root1'),
- ],
- 'string': [
- (r"[^']*'", _string(True)),
- ],
- 'escape': [
- (r"[^']*'", _string(False)),
- ],
- }
-
- def get_tokens_unprocessed(self, text=None, context=None):
- self._reset_stringescapes()
- return ExtendedRegexLexer.get_tokens_unprocessed(self, text, context)
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/dylan.py b/venv/lib/python3.11/site-packages/pygments/lexers/dylan.py
deleted file mode 100644
index f5aa73a..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/dylan.py
+++ /dev/null
@@ -1,281 +0,0 @@
-"""
- pygments.lexers.dylan
- ~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for the Dylan language.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import Lexer, RegexLexer, bygroups, do_insertions, \
- default, line_re
-from pygments.token import Comment, Operator, Keyword, Name, String, \
- Number, Punctuation, Generic, Literal, Whitespace
-
-__all__ = ['DylanLexer', 'DylanConsoleLexer', 'DylanLidLexer']
-
-
-class DylanLexer(RegexLexer):
- """
- For the Dylan language.
-
- .. versionadded:: 0.7
- """
-
- name = 'Dylan'
- url = 'http://www.opendylan.org/'
- aliases = ['dylan']
- filenames = ['*.dylan', '*.dyl', '*.intr']
- mimetypes = ['text/x-dylan']
-
- flags = re.IGNORECASE
-
- builtins = {
- 'subclass', 'abstract', 'block', 'concrete', 'constant', 'class',
- 'compiler-open', 'compiler-sideways', 'domain', 'dynamic',
- 'each-subclass', 'exception', 'exclude', 'function', 'generic',
- 'handler', 'inherited', 'inline', 'inline-only', 'instance',
- 'interface', 'import', 'keyword', 'library', 'macro', 'method',
- 'module', 'open', 'primary', 'required', 'sealed', 'sideways',
- 'singleton', 'slot', 'thread', 'variable', 'virtual'}
-
- keywords = {
- 'above', 'afterwards', 'begin', 'below', 'by', 'case', 'cleanup',
- 'create', 'define', 'else', 'elseif', 'end', 'export', 'finally',
- 'for', 'from', 'if', 'in', 'let', 'local', 'otherwise', 'rename',
- 'select', 'signal', 'then', 'to', 'unless', 'until', 'use', 'when',
- 'while'}
-
- operators = {
- '~', '+', '-', '*', '|', '^', '=', '==', '~=', '~==', '<', '<=',
- '>', '>=', '&', '|'}
-
- functions = {
- 'abort', 'abs', 'add', 'add!', 'add-method', 'add-new', 'add-new!',
- 'all-superclasses', 'always', 'any?', 'applicable-method?', 'apply',
- 'aref', 'aref-setter', 'as', 'as-lowercase', 'as-lowercase!',
- 'as-uppercase', 'as-uppercase!', 'ash', 'backward-iteration-protocol',
- 'break', 'ceiling', 'ceiling/', 'cerror', 'check-type', 'choose',
- 'choose-by', 'complement', 'compose', 'concatenate', 'concatenate-as',
- 'condition-format-arguments', 'condition-format-string', 'conjoin',
- 'copy-sequence', 'curry', 'default-handler', 'dimension', 'dimensions',
- 'direct-subclasses', 'direct-superclasses', 'disjoin', 'do',
- 'do-handlers', 'element', 'element-setter', 'empty?', 'error', 'even?',
- 'every?', 'false-or', 'fill!', 'find-key', 'find-method', 'first',
- 'first-setter', 'floor', 'floor/', 'forward-iteration-protocol',
- 'function-arguments', 'function-return-values',
- 'function-specializers', 'gcd', 'generic-function-mandatory-keywords',
- 'generic-function-methods', 'head', 'head-setter', 'identity',
- 'initialize', 'instance?', 'integral?', 'intersection',
- 'key-sequence', 'key-test', 'last', 'last-setter', 'lcm', 'limited',
- 'list', 'logand', 'logbit?', 'logior', 'lognot', 'logxor', 'make',
- 'map', 'map-as', 'map-into', 'max', 'member?', 'merge-hash-codes',
- 'min', 'modulo', 'negative', 'negative?', 'next-method',
- 'object-class', 'object-hash', 'odd?', 'one-of', 'pair', 'pop',
- 'pop-last', 'positive?', 'push', 'push-last', 'range', 'rank',
- 'rcurry', 'reduce', 'reduce1', 'remainder', 'remove', 'remove!',
- 'remove-duplicates', 'remove-duplicates!', 'remove-key!',
- 'remove-method', 'replace-elements!', 'replace-subsequence!',
- 'restart-query', 'return-allowed?', 'return-description',
- 'return-query', 'reverse', 'reverse!', 'round', 'round/',
- 'row-major-index', 'second', 'second-setter', 'shallow-copy',
- 'signal', 'singleton', 'size', 'size-setter', 'slot-initialized?',
- 'sort', 'sort!', 'sorted-applicable-methods', 'subsequence-position',
- 'subtype?', 'table-protocol', 'tail', 'tail-setter', 'third',
- 'third-setter', 'truncate', 'truncate/', 'type-error-expected-type',
- 'type-error-value', 'type-for-copy', 'type-union', 'union', 'values',
- 'vector', 'zero?'}
-
- valid_name = '\\\\?[\\w!&*<>|^$%@\\-+~?/=]+'
-
- def get_tokens_unprocessed(self, text):
- for index, token, value in RegexLexer.get_tokens_unprocessed(self, text):
- if token is Name:
- lowercase_value = value.lower()
- if lowercase_value in self.builtins:
- yield index, Name.Builtin, value
- continue
- if lowercase_value in self.keywords:
- yield index, Keyword, value
- continue
- if lowercase_value in self.functions:
- yield index, Name.Builtin, value
- continue
- if lowercase_value in self.operators:
- yield index, Operator, value
- continue
- yield index, token, value
-
- tokens = {
- 'root': [
- # Whitespace
- (r'\s+', Whitespace),
-
- # single line comment
- (r'//.*?\n', Comment.Single),
-
- # lid header
- (r'([a-z0-9-]+)(:)([ \t]*)(.*(?:\n[ \t].+)*)',
- bygroups(Name.Attribute, Operator, Whitespace, String)),
-
- default('code') # no header match, switch to code
- ],
- 'code': [
- # Whitespace
- (r'\s+', Whitespace),
-
- # single line comment
- (r'(//.*?)(\n)', bygroups(Comment.Single, Whitespace)),
-
- # multi-line comment
- (r'/\*', Comment.Multiline, 'comment'),
-
- # strings and characters
- (r'"', String, 'string'),
- (r"'(\\.|\\[0-7]{1,3}|\\x[a-f0-9]{1,2}|[^\\\'\n])'", String.Char),
-
- # binary integer
- (r'#b[01]+', Number.Bin),
-
- # octal integer
- (r'#o[0-7]+', Number.Oct),
-
- # floating point
- (r'[-+]?(\d*\.\d+(e[-+]?\d+)?|\d+(\.\d*)?e[-+]?\d+)', Number.Float),
-
- # decimal integer
- (r'[-+]?\d+', Number.Integer),
-
- # hex integer
- (r'#x[0-9a-f]+', Number.Hex),
-
- # Macro parameters
- (r'(\?' + valid_name + ')(:)'
- r'(token|name|variable|expression|body|case-body|\*)',
- bygroups(Name.Tag, Operator, Name.Builtin)),
- (r'(\?)(:)(token|name|variable|expression|body|case-body|\*)',
- bygroups(Name.Tag, Operator, Name.Builtin)),
- (r'\?' + valid_name, Name.Tag),
-
- # Punctuation
- (r'(=>|::|#\(|#\[|##|\?\?|\?=|\?|[(){}\[\],.;])', Punctuation),
-
- # Most operators are picked up as names and then re-flagged.
- # This one isn't valid in a name though, so we pick it up now.
- (r':=', Operator),
-
- # Pick up #t / #f before we match other stuff with #.
- (r'#[tf]', Literal),
-
- # #"foo" style keywords
- (r'#"', String.Symbol, 'keyword'),
-
- # #rest, #key, #all-keys, etc.
- (r'#[a-z0-9-]+', Keyword),
-
- # required-init-keyword: style keywords.
- (valid_name + ':', Keyword),
-
- # class names
- ('<' + valid_name + '>', Name.Class),
-
- # define variable forms.
- (r'\*' + valid_name + r'\*', Name.Variable.Global),
-
- # define constant forms.
- (r'\$' + valid_name, Name.Constant),
-
- # everything else. We re-flag some of these in the method above.
- (valid_name, Name),
- ],
- 'comment': [
- (r'[^*/]+', Comment.Multiline),
- (r'/\*', Comment.Multiline, '#push'),
- (r'\*/', Comment.Multiline, '#pop'),
- (r'[*/]', Comment.Multiline)
- ],
- 'keyword': [
- (r'"', String.Symbol, '#pop'),
- (r'[^\\"]+', String.Symbol), # all other characters
- ],
- 'string': [
- (r'"', String, '#pop'),
- (r'\\([\\abfnrtv"\']|x[a-f0-9]{2,4}|[0-7]{1,3})', String.Escape),
- (r'[^\\"\n]+', String), # all other characters
- (r'\\\n', String), # line continuation
- (r'\\', String), # stray backslash
- ]
- }
-
-
-class DylanLidLexer(RegexLexer):
- """
- For Dylan LID (Library Interchange Definition) files.
-
- .. versionadded:: 1.6
- """
-
- name = 'DylanLID'
- aliases = ['dylan-lid', 'lid']
- filenames = ['*.lid', '*.hdp']
- mimetypes = ['text/x-dylan-lid']
-
- flags = re.IGNORECASE
-
- tokens = {
- 'root': [
- # Whitespace
- (r'\s+', Whitespace),
-
- # single line comment
- (r'(//.*?)(\n)', bygroups(Comment.Single, Whitespace)),
-
- # lid header
- (r'(.*?)(:)([ \t]*)(.*(?:\n[ \t].+)*)',
- bygroups(Name.Attribute, Operator, Whitespace, String)),
- ]
- }
-
-
-class DylanConsoleLexer(Lexer):
- """
- For Dylan interactive console output.
-
- This is based on a copy of the RubyConsoleLexer.
-
- .. versionadded:: 1.6
- """
- name = 'Dylan session'
- aliases = ['dylan-console', 'dylan-repl']
- filenames = ['*.dylan-console']
- mimetypes = ['text/x-dylan-console']
- _example = 'dylan-console/console'
-
- _prompt_re = re.compile(r'\?| ')
-
- def get_tokens_unprocessed(self, text):
- dylexer = DylanLexer(**self.options)
-
- curcode = ''
- insertions = []
- for match in line_re.finditer(text):
- line = match.group()
- m = self._prompt_re.match(line)
- if m is not None:
- end = m.end()
- insertions.append((len(curcode),
- [(0, Generic.Prompt, line[:end])]))
- curcode += line[end:]
- else:
- if curcode:
- yield from do_insertions(insertions,
- dylexer.get_tokens_unprocessed(curcode))
- curcode = ''
- insertions = []
- yield match.start(), Generic.Output, line
- if curcode:
- yield from do_insertions(insertions,
- dylexer.get_tokens_unprocessed(curcode))
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/ecl.py b/venv/lib/python3.11/site-packages/pygments/lexers/ecl.py
deleted file mode 100644
index 7e93e0c..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/ecl.py
+++ /dev/null
@@ -1,145 +0,0 @@
-"""
- pygments.lexers.ecl
- ~~~~~~~~~~~~~~~~~~~
-
- Lexers for the ECL language.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, include, bygroups, words
-from pygments.token import Comment, Operator, Keyword, Name, String, \
- Number, Punctuation, Whitespace
-
-__all__ = ['ECLLexer']
-
-
-class ECLLexer(RegexLexer):
- """
- Lexer for the declarative big-data ECL language.
-
- .. versionadded:: 1.5
- """
-
- name = 'ECL'
- url = 'https://hpccsystems.com/training/documentation/ecl-language-reference/html'
- aliases = ['ecl']
- filenames = ['*.ecl']
- mimetypes = ['application/x-ecl']
-
- flags = re.IGNORECASE | re.MULTILINE
-
- tokens = {
- 'root': [
- include('whitespace'),
- include('statements'),
- ],
- 'whitespace': [
- (r'\s+', Whitespace),
- (r'\/\/.*', Comment.Single),
- (r'/(\\\n)?\*(.|\n)*?\*(\\\n)?/', Comment.Multiline),
- ],
- 'statements': [
- include('types'),
- include('keywords'),
- include('functions'),
- include('hash'),
- (r'"', String, 'string'),
- (r'\'', String, 'string'),
- (r'(\d+\.\d*|\.\d+|\d+)e[+-]?\d+[lu]*', Number.Float),
- (r'(\d+\.\d*|\.\d+|\d+f)f?', Number.Float),
- (r'0x[0-9a-f]+[lu]*', Number.Hex),
- (r'0[0-7]+[lu]*', Number.Oct),
- (r'\d+[lu]*', Number.Integer),
- (r'[~!%^&*+=|?:<>/-]+', Operator),
- (r'[{}()\[\],.;]', Punctuation),
- (r'[a-z_]\w*', Name),
- ],
- 'hash': [
- (r'^#.*$', Comment.Preproc),
- ],
- 'types': [
- (r'(RECORD|END)\D', Keyword.Declaration),
- (r'((?:ASCII|BIG_ENDIAN|BOOLEAN|DATA|DECIMAL|EBCDIC|INTEGER|PATTERN|'
- r'QSTRING|REAL|RECORD|RULE|SET OF|STRING|TOKEN|UDECIMAL|UNICODE|'
- r'UNSIGNED|VARSTRING|VARUNICODE)\d*)(\s+)',
- bygroups(Keyword.Type, Whitespace)),
- ],
- 'keywords': [
- (words((
- 'APPLY', 'ASSERT', 'BUILD', 'BUILDINDEX', 'EVALUATE', 'FAIL',
- 'KEYDIFF', 'KEYPATCH', 'LOADXML', 'NOTHOR', 'NOTIFY', 'OUTPUT',
- 'PARALLEL', 'SEQUENTIAL', 'SOAPCALL', 'CHECKPOINT', 'DEPRECATED',
- 'FAILCODE', 'FAILMESSAGE', 'FAILURE', 'GLOBAL', 'INDEPENDENT',
- 'ONWARNING', 'PERSIST', 'PRIORITY', 'RECOVERY', 'STORED', 'SUCCESS',
- 'WAIT', 'WHEN'), suffix=r'\b'),
- Keyword.Reserved),
- # These are classed differently, check later
- (words((
- 'ALL', 'AND', 'ANY', 'AS', 'ATMOST', 'BEFORE', 'BEGINC++', 'BEST',
- 'BETWEEN', 'CASE', 'CONST', 'COUNTER', 'CSV', 'DESCEND', 'ENCRYPT',
- 'ENDC++', 'ENDMACRO', 'EXCEPT', 'EXCLUSIVE', 'EXPIRE', 'EXPORT',
- 'EXTEND', 'FALSE', 'FEW', 'FIRST', 'FLAT', 'FULL', 'FUNCTION',
- 'GROUP', 'HEADER', 'HEADING', 'HOLE', 'IFBLOCK', 'IMPORT', 'IN',
- 'JOINED', 'KEEP', 'KEYED', 'LAST', 'LEFT', 'LIMIT', 'LOAD', 'LOCAL',
- 'LOCALE', 'LOOKUP', 'MACRO', 'MANY', 'MAXCOUNT', 'MAXLENGTH',
- 'MIN SKEW', 'MODULE', 'INTERFACE', 'NAMED', 'NOCASE', 'NOROOT',
- 'NOSCAN', 'NOSORT', 'NOT', 'OF', 'ONLY', 'OPT', 'OR', 'OUTER',
- 'OVERWRITE', 'PACKED', 'PARTITION', 'PENALTY', 'PHYSICALLENGTH',
- 'PIPE', 'QUOTE', 'RELATIONSHIP', 'REPEAT', 'RETURN', 'RIGHT',
- 'SCAN', 'SELF', 'SEPARATOR', 'SERVICE', 'SHARED', 'SKEW', 'SKIP',
- 'SQL', 'STORE', 'TERMINATOR', 'THOR', 'THRESHOLD', 'TOKEN',
- 'TRANSFORM', 'TRIM', 'TRUE', 'TYPE', 'UNICODEORDER', 'UNSORTED',
- 'VALIDATE', 'VIRTUAL', 'WHOLE', 'WILD', 'WITHIN', 'XML', 'XPATH',
- '__COMPRESSED__'), suffix=r'\b'),
- Keyword.Reserved),
- ],
- 'functions': [
- (words((
- 'ABS', 'ACOS', 'ALLNODES', 'ASCII', 'ASIN', 'ASSTRING', 'ATAN',
- 'ATAN2', 'AVE', 'CASE', 'CHOOSE', 'CHOOSEN', 'CHOOSESETS',
- 'CLUSTERSIZE', 'COMBINE', 'CORRELATION', 'COS', 'COSH', 'COUNT',
- 'COVARIANCE', 'CRON', 'DATASET', 'DEDUP', 'DEFINE', 'DENORMALIZE',
- 'DISTRIBUTE', 'DISTRIBUTED', 'DISTRIBUTION', 'EBCDIC', 'ENTH',
- 'ERROR', 'EVALUATE', 'EVENT', 'EVENTEXTRA', 'EVENTNAME', 'EXISTS',
- 'EXP', 'FAILCODE', 'FAILMESSAGE', 'FETCH', 'FROMUNICODE',
- 'GETISVALID', 'GLOBAL', 'GRAPH', 'GROUP', 'HASH', 'HASH32',
- 'HASH64', 'HASHCRC', 'HASHMD5', 'HAVING', 'IF', 'INDEX',
- 'INTFORMAT', 'ISVALID', 'ITERATE', 'JOIN', 'KEYUNICODE', 'LENGTH',
- 'LIBRARY', 'LIMIT', 'LN', 'LOCAL', 'LOG', 'LOOP', 'MAP', 'MATCHED',
- 'MATCHLENGTH', 'MATCHPOSITION', 'MATCHTEXT', 'MATCHUNICODE', 'MAX',
- 'MERGE', 'MERGEJOIN', 'MIN', 'NOLOCAL', 'NONEMPTY', 'NORMALIZE',
- 'PARSE', 'PIPE', 'POWER', 'PRELOAD', 'PROCESS', 'PROJECT', 'PULL',
- 'RANDOM', 'RANGE', 'RANK', 'RANKED', 'REALFORMAT', 'RECORDOF',
- 'REGEXFIND', 'REGEXREPLACE', 'REGROUP', 'REJECTED', 'ROLLUP',
- 'ROUND', 'ROUNDUP', 'ROW', 'ROWDIFF', 'SAMPLE', 'SET', 'SIN',
- 'SINH', 'SIZEOF', 'SOAPCALL', 'SORT', 'SORTED', 'SQRT', 'STEPPED',
- 'STORED', 'SUM', 'TABLE', 'TAN', 'TANH', 'THISNODE', 'TOPN',
- 'TOUNICODE', 'TRANSFER', 'TRIM', 'TRUNCATE', 'TYPEOF', 'UNGROUP',
- 'UNICODEORDER', 'VARIANCE', 'WHICH', 'WORKUNIT', 'XMLDECODE',
- 'XMLENCODE', 'XMLTEXT', 'XMLUNICODE'), suffix=r'\b'),
- Name.Function),
- ],
- 'string': [
- (r'"', String, '#pop'),
- (r'\'', String, '#pop'),
- (r'[^"\']+', String),
- ],
- }
-
- def analyse_text(text):
- """This is very difficult to guess relative to other business languages.
- -> in conjunction with BEGIN/END seems relatively rare though."""
- result = 0
-
- if '->' in text:
- result += 0.01
- if 'BEGIN' in text:
- result += 0.01
- if 'END' in text:
- result += 0.01
-
- return result
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/eiffel.py b/venv/lib/python3.11/site-packages/pygments/lexers/eiffel.py
deleted file mode 100644
index 8a5a559..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/eiffel.py
+++ /dev/null
@@ -1,69 +0,0 @@
-"""
- pygments.lexers.eiffel
- ~~~~~~~~~~~~~~~~~~~~~~
-
- Lexer for the Eiffel language.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, include, words, bygroups
-from pygments.token import Comment, Operator, Keyword, Name, String, Number, \
- Punctuation, Whitespace
-
-__all__ = ['EiffelLexer']
-
-
-class EiffelLexer(RegexLexer):
- """
- For Eiffel source code.
-
- .. versionadded:: 2.0
- """
- name = 'Eiffel'
- url = 'https://www.eiffel.com'
- aliases = ['eiffel']
- filenames = ['*.e']
- mimetypes = ['text/x-eiffel']
-
- tokens = {
- 'root': [
- (r'[^\S\n]+', Whitespace),
- (r'--.*?$', Comment.Single),
- (r'[^\S\n]+', Whitespace),
- # Please note that keyword and operator are case insensitive.
- (r'(?i)(true|false|void|current|result|precursor)\b', Keyword.Constant),
- (r'(?i)(not|xor|implies|or)\b', Operator.Word),
- (r'(?i)(and)(?:(\s+)(then))?\b',
- bygroups(Operator.Word, Whitespace, Operator.Word)),
- (r'(?i)(or)(?:(\s+)(else))?\b',
- bygroups(Operator.Word, Whitespace, Operator.Word)),
- (words((
- 'across', 'agent', 'alias', 'all', 'as', 'assign', 'attached',
- 'attribute', 'check', 'class', 'convert', 'create', 'debug',
- 'deferred', 'detachable', 'do', 'else', 'elseif', 'end', 'ensure',
- 'expanded', 'export', 'external', 'feature', 'from', 'frozen', 'if',
- 'inherit', 'inspect', 'invariant', 'like', 'local', 'loop', 'none',
- 'note', 'obsolete', 'old', 'once', 'only', 'redefine', 'rename',
- 'require', 'rescue', 'retry', 'select', 'separate', 'then',
- 'undefine', 'until', 'variant', 'when'), prefix=r'(?i)\b', suffix=r'\b'),
- Keyword.Reserved),
- (r'"\[([^\]%]|%(.|\n)|\][^"])*?\]"', String),
- (r'"([^"%\n]|%.)*?"', String),
- include('numbers'),
- (r"'([^'%]|%'|%%)'", String.Char),
- (r"(//|\\\\|>=|<=|:=|/=|~|/~|[\\?!#%&@|+/\-=>*$<^\[\]])", Operator),
- (r"([{}():;,.])", Punctuation),
- (r'([a-z]\w*)|([A-Z][A-Z0-9_]*[a-z]\w*)', Name),
- (r'([A-Z][A-Z0-9_]*)', Name.Class),
- (r'\n+', Whitespace),
- ],
- 'numbers': [
- (r'0[xX][a-fA-F0-9]+', Number.Hex),
- (r'0[bB][01]+', Number.Bin),
- (r'0[cC][0-7]+', Number.Oct),
- (r'([0-9]+\.[0-9]*)|([0-9]*\.[0-9]+)', Number.Float),
- (r'[0-9]+', Number.Integer),
- ],
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/elm.py b/venv/lib/python3.11/site-packages/pygments/lexers/elm.py
deleted file mode 100644
index 0e7ac3f..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/elm.py
+++ /dev/null
@@ -1,124 +0,0 @@
-"""
- pygments.lexers.elm
- ~~~~~~~~~~~~~~~~~~~
-
- Lexer for the Elm programming language.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, words, include, bygroups
-from pygments.token import Comment, Keyword, Name, Number, Punctuation, \
- String, Whitespace
-
-__all__ = ['ElmLexer']
-
-
-class ElmLexer(RegexLexer):
- """
- For Elm source code.
-
- .. versionadded:: 2.1
- """
-
- name = 'Elm'
- url = 'https://elm-lang.org/'
- aliases = ['elm']
- filenames = ['*.elm']
- mimetypes = ['text/x-elm']
-
- validName = r'[a-z_][a-zA-Z0-9_\']*'
-
- specialName = r'^main '
-
- builtinOps = (
- '~', '||', '|>', '|', '`', '^', '\\', '\'', '>>', '>=', '>', '==',
- '=', '<~', '<|', '<=', '<<', '<-', '<', '::', ':', '/=', '//', '/',
- '..', '.', '->', '-', '++', '+', '*', '&&', '%',
- )
-
- reservedWords = words((
- 'alias', 'as', 'case', 'else', 'if', 'import', 'in',
- 'let', 'module', 'of', 'port', 'then', 'type', 'where',
- ), suffix=r'\b')
-
- tokens = {
- 'root': [
-
- # Comments
- (r'\{-', Comment.Multiline, 'comment'),
- (r'--.*', Comment.Single),
-
- # Whitespace
- (r'\s+', Whitespace),
-
- # Strings
- (r'"', String, 'doublequote'),
-
- # Modules
- (r'^(\s*)(module)(\s*)', bygroups(Whitespace, Keyword.Namespace,
- Whitespace), 'imports'),
-
- # Imports
- (r'^(\s*)(import)(\s*)', bygroups(Whitespace, Keyword.Namespace,
- Whitespace), 'imports'),
-
- # Shaders
- (r'\[glsl\|.*', Name.Entity, 'shader'),
-
- # Keywords
- (reservedWords, Keyword.Reserved),
-
- # Types
- (r'[A-Z][a-zA-Z0-9_]*', Keyword.Type),
-
- # Main
- (specialName, Keyword.Reserved),
-
- # Prefix Operators
- (words((builtinOps), prefix=r'\(', suffix=r'\)'), Name.Function),
-
- # Infix Operators
- (words(builtinOps), Name.Function),
-
- # Numbers
- include('numbers'),
-
- # Variable Names
- (validName, Name.Variable),
-
- # Parens
- (r'[,()\[\]{}]', Punctuation),
-
- ],
-
- 'comment': [
- (r'-(?!\})', Comment.Multiline),
- (r'\{-', Comment.Multiline, 'comment'),
- (r'[^-}]', Comment.Multiline),
- (r'-\}', Comment.Multiline, '#pop'),
- ],
-
- 'doublequote': [
- (r'\\u[0-9a-fA-F]{4}', String.Escape),
- (r'\\[nrfvb\\"]', String.Escape),
- (r'[^"]', String),
- (r'"', String, '#pop'),
- ],
-
- 'imports': [
- (r'\w+(\.\w+)*', Name.Class, '#pop'),
- ],
-
- 'numbers': [
- (r'_?\d+\.(?=\d+)', Number.Float),
- (r'_?\d+', Number.Integer),
- ],
-
- 'shader': [
- (r'\|(?!\])', Name.Entity),
- (r'\|\]', Name.Entity, '#pop'),
- (r'(.*)(\n)', bygroups(Name.Entity, Whitespace)),
- ],
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/elpi.py b/venv/lib/python3.11/site-packages/pygments/lexers/elpi.py
deleted file mode 100644
index 6fc8b50..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/elpi.py
+++ /dev/null
@@ -1,173 +0,0 @@
-"""
- pygments.lexers.elpi
- ~~~~~~~~~~~~~~~~~~~~
-
- Lexer for the `Elpi <http://github.com/LPCIC/elpi>`_ programming language.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, bygroups, include
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation
-
-__all__ = ['ElpiLexer']
-
-
-class ElpiLexer(RegexLexer):
- """
- Lexer for the Elpi programming language.
-
- .. versionadded:: 2.11
- """
-
- name = 'Elpi'
- url = 'http://github.com/LPCIC/elpi'
- aliases = ['elpi']
- filenames = ['*.elpi']
- mimetypes = ['text/x-elpi']
-
- lcase_re = r"[a-z]"
- ucase_re = r"[A-Z]"
- digit_re = r"[0-9]"
- schar2_re = r"([+*^?/<>`'@#~=&!])"
- schar_re = r"({}|-|\$|_)".format(schar2_re)
- idchar_re = r"({}|{}|{}|{})".format(lcase_re,ucase_re,digit_re,schar_re)
- idcharstarns_re = r"({}*(\.({}|{}){}*)*)".format(idchar_re, lcase_re, ucase_re, idchar_re)
- symbchar_re = r"({}|{}|{}|{}|:)".format(lcase_re, ucase_re, digit_re, schar_re)
- constant_re = r"({}{}*|{}{}|{}{}*|_{}+)".format(ucase_re, idchar_re, lcase_re, idcharstarns_re, schar2_re, symbchar_re, idchar_re)
- symbol_re = r"(,|<=>|->|:-|;|\?-|->|&|=>|\bas\b|\buvar\b|<|=<|=|==|>=|>|\bi<|\bi=<|\bi>=|\bi>|\bis\b|\br<|\br=<|\br>=|\br>|\bs<|\bs=<|\bs>=|\bs>|@|::|\[\]|`->|`:|`:=|\^|-|\+|\bi-|\bi\+|r-|r\+|/|\*|\bdiv\b|\bi\*|\bmod\b|\br\*|~|\bi~|\br~)"
- escape_re = r"\(({}|{})\)".format(constant_re,symbol_re)
- const_sym_re = r"({}|{}|{})".format(constant_re,symbol_re,escape_re)
-
- tokens = {
- 'root': [
- include('elpi')
- ],
-
- 'elpi': [
- include('_elpi-comment'),
-
- (r"(:before|:after|:if|:name)(\s*)(\")",
- bygroups(Keyword.Mode, Text.Whitespace, String.Double),
- 'elpi-string'),
- (r"(:index)(\s*\()", bygroups(Keyword.Mode, Text.Whitespace),
- 'elpi-indexing-expr'),
- (r"\b(external pred|pred)(\s+)({})".format(const_sym_re),
- bygroups(Keyword.Declaration, Text.Whitespace, Name.Function),
- 'elpi-pred-item'),
- (r"\b(external type|type)(\s+)(({}(,\s*)?)+)".format(const_sym_re),
- bygroups(Keyword.Declaration, Text.Whitespace, Name.Function),
- 'elpi-type'),
- (r"\b(kind)(\s+)(({}|,)+)".format(const_sym_re),
- bygroups(Keyword.Declaration, Text.Whitespace, Name.Function),
- 'elpi-type'),
- (r"\b(typeabbrev)(\s+)({})".format(const_sym_re),
- bygroups(Keyword.Declaration, Text.Whitespace, Name.Function),
- 'elpi-type'),
- (r"\b(accumulate)(\s+)(\")",
- bygroups(Keyword.Declaration, Text.Whitespace, String.Double),
- 'elpi-string'),
- (r"\b(accumulate|namespace|local)(\s+)({})".format(constant_re),
- bygroups(Keyword.Declaration, Text.Whitespace, Text)),
- (r"\b(shorten)(\s+)({}\.)".format(constant_re),
- bygroups(Keyword.Declaration, Text.Whitespace, Text)),
- (r"\b(pi|sigma)(\s+)([a-zA-Z][A-Za-z0-9_ ]*)(\\)",
- bygroups(Keyword.Declaration, Text.Whitespace, Name.Variable, Text)),
- (r"\b(constraint)(\s+)(({}(\s+)?)+)".format(const_sym_re),
- bygroups(Keyword.Declaration, Text.Whitespace, Name.Function),
- 'elpi-chr-rule-start'),
-
- (r"(?=[A-Z_]){}".format(constant_re), Name.Variable),
- (r"(?=[a-z_]){}\\".format(constant_re), Name.Variable),
- (r"_", Name.Variable),
- (r"({}|!|=>|;)".format(symbol_re), Keyword.Declaration),
- (constant_re, Text),
- (r"\[|\]|\||=>", Keyword.Declaration),
- (r'"', String.Double, 'elpi-string'),
- (r'`', String.Double, 'elpi-btick'),
- (r'\'', String.Double, 'elpi-tick'),
- (r'\{\{', Punctuation, 'elpi-quote'),
- (r'\{[^\{]', Text, 'elpi-spill'),
- (r"\(", Text, 'elpi-in-parens'),
- (r'\d[\d_]*', Number.Integer),
- (r'-?\d[\d_]*(.[\d_]*)?([eE][+\-]?\d[\d_]*)', Number.Float),
- (r"[\+\*\-/\^\.]", Operator),
- ],
- '_elpi-comment': [
- (r'%[^\n]*\n', Comment),
- (r'/\*', Comment, 'elpi-multiline-comment'),
- (r"\s+", Text.Whitespace),
- ],
- 'elpi-multiline-comment': [
- (r'\*/', Comment, '#pop'),
- (r'.', Comment)
- ],
- 'elpi-indexing-expr':[
- (r'[0-9 _]+', Number.Integer),
- (r'\)', Text, '#pop'),
- ],
- 'elpi-type': [
- (r"(ctype\s+)(\")", bygroups(Keyword.Type, String.Double), 'elpi-string'),
- (r'->', Keyword.Type),
- (constant_re, Keyword.Type),
- (r"\(|\)", Keyword.Type),
- (r"\.", Text, '#pop'),
- include('_elpi-comment'),
- ],
- 'elpi-chr-rule-start': [
- (r"\{", Text, 'elpi-chr-rule'),
- include('_elpi-comment'),
- ],
- 'elpi-chr-rule': [
- (r"\brule\b", Keyword.Declaration),
- (r"\\", Keyword.Declaration),
- (r"\}", Text, '#pop:2'),
- include('elpi'),
- ],
- 'elpi-pred-item': [
- (r"[io]:", Keyword.Mode, 'elpi-ctype'),
- (r"\.", Text, '#pop'),
- include('_elpi-comment'),
- ],
- 'elpi-ctype': [
- (r"(ctype\s+)(\")", bygroups(Keyword.Type, String.Double), 'elpi-string'),
- (r'->', Keyword.Type),
- (constant_re, Keyword.Type),
- (r"\(|\)", Keyword.Type),
- (r",", Text, '#pop'),
- (r"\.", Text, '#pop:2'),
- include('_elpi-comment'),
- ],
- 'elpi-btick': [
- (r'[^` ]+', String.Double),
- (r'`', String.Double, '#pop'),
- ],
- 'elpi-tick': [
- (r'[^\' ]+', String.Double),
- (r'\'', String.Double, '#pop'),
- ],
- 'elpi-string': [
- (r'[^\"]+', String.Double),
- (r'"', String.Double, '#pop'),
- ],
- 'elpi-quote': [
- (r'\{\{', Punctuation, '#push'),
- (r'\}\}', Punctuation, '#pop'),
- (r"(lp:)((?=[A-Z_]){})".format(constant_re), bygroups(Keyword, Name.Variable)),
- (r"[^l\}]+", Text),
- (r"l|\}", Text),
- ],
- 'elpi-spill': [
- (r'\{[^\{]', Text, '#push'),
- (r'\}[^\}]', Text, '#pop'),
- include('elpi'),
- ],
- 'elpi-in-parens': [
- (r"\(", Operator, '#push'),
- (r"\)", Operator, '#pop'),
- include('elpi'),
- ],
-
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/email.py b/venv/lib/python3.11/site-packages/pygments/lexers/email.py
deleted file mode 100644
index 36a8a95..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/email.py
+++ /dev/null
@@ -1,132 +0,0 @@
-"""
- pygments.lexers.email
- ~~~~~~~~~~~~~~~~~~~~~
-
- Lexer for the raw E-mail.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, DelegatingLexer, bygroups
-from pygments.lexers.mime import MIMELexer
-from pygments.token import Text, Keyword, Name, String, Number, Comment
-from pygments.util import get_bool_opt
-
-__all__ = ["EmailLexer"]
-
-
-class EmailHeaderLexer(RegexLexer):
- """
- Sub-lexer for raw E-mail. This lexer only process header part of e-mail.
-
- .. versionadded:: 2.5
- """
-
- def __init__(self, **options):
- super().__init__(**options)
- self.highlight_x = get_bool_opt(options, "highlight-X-header", False)
-
- def get_x_header_tokens(self, match):
- if self.highlight_x:
- # field
- yield match.start(1), Name.Tag, match.group(1)
-
- # content
- default_actions = self.get_tokens_unprocessed(
- match.group(2), stack=("root", "header"))
- yield from default_actions
- else:
- # lowlight
- yield match.start(1), Comment.Special, match.group(1)
- yield match.start(2), Comment.Multiline, match.group(2)
-
- tokens = {
- "root": [
- (r"^(?:[A-WYZ]|X400)[\w\-]*:", Name.Tag, "header"),
- (r"^(X-(?:\w[\w\-]*:))([\s\S]*?\n)(?![ \t])", get_x_header_tokens),
- ],
- "header": [
- # folding
- (r"\n[ \t]", Text.Whitespace),
- (r"\n(?![ \t])", Text.Whitespace, "#pop"),
-
- # keywords
- (r"\bE?SMTPS?\b", Keyword),
- (r"\b(?:HE|EH)LO\b", Keyword),
-
- # mailbox
- (r"[\w\.\-\+=]+@[\w\.\-]+", Name.Label),
- (r"<[\w\.\-\+=]+@[\w\.\-]+>", Name.Label),
-
- # domain
- (r"\b(\w[\w\.-]*\.[\w\.-]*\w[a-zA-Z]+)\b", Name.Function),
-
- # IPv4
- (r"(?<=\b)(?:(?:25[0-5]|2[0-4][0-9]|1?[0-9][0-9]?)\.){3}(?:25[0"
- r"-5]|2[0-4][0-9]|1?[0-9][0-9]?)(?=\b)",
- Number.Integer),
-
- # IPv6
- (r"(?<=\b)([0-9a-fA-F]{1,4}:){1,7}:(?!\b)", Number.Hex),
- (r"(?<=\b):((:[0-9a-fA-F]{1,4}){1,7}|:)(?=\b)", Number.Hex),
- (r"(?<=\b)([0-9a-fA-F]{1,4}:){7,7}[0-9a-fA-F]{1,4}(?=\b)", Number.Hex),
- (r"(?<=\b)([0-9a-fA-F]{1,4}:){1,6}:[0-9a-fA-F]{1,4}(?=\b)", Number.Hex),
- (r"(?<=\b)[0-9a-fA-F]{1,4}:((:[0-9a-fA-F]{1,4}){1,6})(?=\b)", Number.Hex),
- (r"(?<=\b)fe80:(:[0-9a-fA-F]{0,4}){0,4}%[0-9a-zA-Z]{1,}(?=\b)", Number.Hex),
- (r"(?<=\b)([0-9a-fA-F]{1,4}:){1,5}(:[0-9a-fA-F]{1,4}){1,2}(?=\b)", Number.Hex),
- (r"(?<=\b)([0-9a-fA-F]{1,4}:){1,4}(:[0-9a-fA-F]{1,4}){1,3}(?=\b)",
- Number.Hex),
- (r"(?<=\b)([0-9a-fA-F]{1,4}:){1,3}(:[0-9a-fA-F]{1,4}){1,4}(?=\b)",
- Number.Hex),
- (r"(?<=\b)([0-9a-fA-F]{1,4}:){1,2}(:[0-9a-fA-F]{1,4}){1,5}(?=\b)",
- Number.Hex),
- (r"(?<=\b)::(ffff(:0{1,4}){0,1}:){0,1}((25[0-5]|(2[0-4]|1{0,1}"
- r"[0-9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}"
- r"[0-9])(?=\b)",
- Number.Hex),
- (r"(?<=\b)([0-9a-fA-F]{1,4}:){1,4}:((25[0-5]|(2[0-4]|1{0,1}[0-9])"
- r"{0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])(?=\b)",
- Number.Hex),
-
- # Date time
- (r"(?:(Sun|Mon|Tue|Wed|Thu|Fri|Sat),\s+)?(0[1-9]|[1-2]?[0-9]|3["
- r"01])\s+(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec)\s+("
- r"19[0-9]{2}|[2-9][0-9]{3})\s+(2[0-3]|[0-1][0-9]):([0-5][0-9])"
- r"(?::(60|[0-5][0-9]))?(?:\.\d{1,5})?\s+([-\+][0-9]{2}[0-5][0-"
- r"9]|\(?(?:UTC?|GMT|(?:E|C|M|P)(?:ST|ET|DT)|[A-IK-Z])\)?)",
- Name.Decorator),
-
- # RFC-2047 encoded string
- (r"(=\?)([\w-]+)(\?)([BbQq])(\?)([\[\w!\"#$%&\'()*+,-./:;<=>@[\\"
- r"\]^_`{|}~]+)(\?=)",
- bygroups(String.Affix, Name.Constant, String.Affix, Keyword.Constant,
- String.Affix, Number.Hex, String.Affix)),
-
- # others
- (r'[\s]+', Text.Whitespace),
- (r'[\S]', Text),
- ],
- }
-
-
-class EmailLexer(DelegatingLexer):
- """
- Lexer for raw E-mail.
-
- Additional options accepted:
-
- `highlight-X-header`
- Highlight the fields of ``X-`` user-defined email header. (default:
- ``False``).
-
- .. versionadded:: 2.5
- """
-
- name = "E-mail"
- aliases = ["email", "eml"]
- filenames = ["*.eml"]
- mimetypes = ["message/rfc822"]
-
- def __init__(self, **options):
- super().__init__(EmailHeaderLexer, MIMELexer, Comment, **options)
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/erlang.py b/venv/lib/python3.11/site-packages/pygments/lexers/erlang.py
deleted file mode 100644
index e484664..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/erlang.py
+++ /dev/null
@@ -1,528 +0,0 @@
-"""
- pygments.lexers.erlang
- ~~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for Erlang.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import Lexer, RegexLexer, bygroups, words, do_insertions, \
- include, default, line_re
-from pygments.token import Comment, Operator, Keyword, Name, String, \
- Number, Punctuation, Generic, Whitespace
-
-__all__ = ['ErlangLexer', 'ErlangShellLexer', 'ElixirConsoleLexer',
- 'ElixirLexer']
-
-
-class ErlangLexer(RegexLexer):
- """
- For the Erlang functional programming language.
-
- .. versionadded:: 0.9
- """
-
- name = 'Erlang'
- url = 'https://www.erlang.org/'
- aliases = ['erlang']
- filenames = ['*.erl', '*.hrl', '*.es', '*.escript']
- mimetypes = ['text/x-erlang']
-
- keywords = (
- 'after', 'begin', 'case', 'catch', 'cond', 'end', 'fun', 'if',
- 'let', 'of', 'query', 'receive', 'try', 'when',
- )
-
- builtins = ( # See erlang(3) man page
- 'abs', 'append_element', 'apply', 'atom_to_list', 'binary_to_list',
- 'bitstring_to_list', 'binary_to_term', 'bit_size', 'bump_reductions',
- 'byte_size', 'cancel_timer', 'check_process_code', 'delete_module',
- 'demonitor', 'disconnect_node', 'display', 'element', 'erase', 'exit',
- 'float', 'float_to_list', 'fun_info', 'fun_to_list',
- 'function_exported', 'garbage_collect', 'get', 'get_keys',
- 'group_leader', 'hash', 'hd', 'integer_to_list', 'iolist_to_binary',
- 'iolist_size', 'is_atom', 'is_binary', 'is_bitstring', 'is_boolean',
- 'is_builtin', 'is_float', 'is_function', 'is_integer', 'is_list',
- 'is_number', 'is_pid', 'is_port', 'is_process_alive', 'is_record',
- 'is_reference', 'is_tuple', 'length', 'link', 'list_to_atom',
- 'list_to_binary', 'list_to_bitstring', 'list_to_existing_atom',
- 'list_to_float', 'list_to_integer', 'list_to_pid', 'list_to_tuple',
- 'load_module', 'localtime_to_universaltime', 'make_tuple', 'md5',
- 'md5_final', 'md5_update', 'memory', 'module_loaded', 'monitor',
- 'monitor_node', 'node', 'nodes', 'open_port', 'phash', 'phash2',
- 'pid_to_list', 'port_close', 'port_command', 'port_connect',
- 'port_control', 'port_call', 'port_info', 'port_to_list',
- 'process_display', 'process_flag', 'process_info', 'purge_module',
- 'put', 'read_timer', 'ref_to_list', 'register', 'resume_process',
- 'round', 'send', 'send_after', 'send_nosuspend', 'set_cookie',
- 'setelement', 'size', 'spawn', 'spawn_link', 'spawn_monitor',
- 'spawn_opt', 'split_binary', 'start_timer', 'statistics',
- 'suspend_process', 'system_flag', 'system_info', 'system_monitor',
- 'system_profile', 'term_to_binary', 'tl', 'trace', 'trace_delivered',
- 'trace_info', 'trace_pattern', 'trunc', 'tuple_size', 'tuple_to_list',
- 'universaltime_to_localtime', 'unlink', 'unregister', 'whereis'
- )
-
- operators = r'(\+\+?|--?|\*|/|<|>|/=|=:=|=/=|=<|>=|==?|<-|!|\?)'
- word_operators = (
- 'and', 'andalso', 'band', 'bnot', 'bor', 'bsl', 'bsr', 'bxor',
- 'div', 'not', 'or', 'orelse', 'rem', 'xor'
- )
-
- atom_re = r"(?:[a-z]\w*|'[^\n']*[^\\]')"
-
- variable_re = r'(?:[A-Z_]\w*)'
-
- esc_char_re = r'[bdefnrstv\'"\\]'
- esc_octal_re = r'[0-7][0-7]?[0-7]?'
- esc_hex_re = r'(?:x[0-9a-fA-F]{2}|x\{[0-9a-fA-F]+\})'
- esc_ctrl_re = r'\^[a-zA-Z]'
- escape_re = r'(?:\\(?:'+esc_char_re+r'|'+esc_octal_re+r'|'+esc_hex_re+r'|'+esc_ctrl_re+r'))'
-
- macro_re = r'(?:'+variable_re+r'|'+atom_re+r')'
-
- base_re = r'(?:[2-9]|[12][0-9]|3[0-6])'
-
- tokens = {
- 'root': [
- (r'\s+', Whitespace),
- (r'(%.*)(\n)', bygroups(Comment, Whitespace)),
- (words(keywords, suffix=r'\b'), Keyword),
- (words(builtins, suffix=r'\b'), Name.Builtin),
- (words(word_operators, suffix=r'\b'), Operator.Word),
- (r'^-', Punctuation, 'directive'),
- (operators, Operator),
- (r'"', String, 'string'),
- (r'<<', Name.Label),
- (r'>>', Name.Label),
- ('(' + atom_re + ')(:)', bygroups(Name.Namespace, Punctuation)),
- ('(?:^|(?<=:))(' + atom_re + r')(\s*)(\()',
- bygroups(Name.Function, Whitespace, Punctuation)),
- (r'[+-]?' + base_re + r'#[0-9a-zA-Z]+', Number.Integer),
- (r'[+-]?\d+', Number.Integer),
- (r'[+-]?\d+.\d+', Number.Float),
- (r'[]\[:_@\".{}()|;,]', Punctuation),
- (variable_re, Name.Variable),
- (atom_re, Name),
- (r'\?'+macro_re, Name.Constant),
- (r'\$(?:'+escape_re+r'|\\[ %]|[^\\])', String.Char),
- (r'#'+atom_re+r'(:?\.'+atom_re+r')?', Name.Label),
-
- # Erlang script shebang
- (r'\A#!.+\n', Comment.Hashbang),
-
- # EEP 43: Maps
- # http://www.erlang.org/eeps/eep-0043.html
- (r'#\{', Punctuation, 'map_key'),
- ],
- 'string': [
- (escape_re, String.Escape),
- (r'"', String, '#pop'),
- (r'~[0-9.*]*[~#+BPWXb-ginpswx]', String.Interpol),
- (r'[^"\\~]+', String),
- (r'~', String),
- ],
- 'directive': [
- (r'(define)(\s*)(\()('+macro_re+r')',
- bygroups(Name.Entity, Whitespace, Punctuation, Name.Constant), '#pop'),
- (r'(record)(\s*)(\()('+macro_re+r')',
- bygroups(Name.Entity, Whitespace, Punctuation, Name.Label), '#pop'),
- (atom_re, Name.Entity, '#pop'),
- ],
- 'map_key': [
- include('root'),
- (r'=>', Punctuation, 'map_val'),
- (r':=', Punctuation, 'map_val'),
- (r'\}', Punctuation, '#pop'),
- ],
- 'map_val': [
- include('root'),
- (r',', Punctuation, '#pop'),
- (r'(?=\})', Punctuation, '#pop'),
- ],
- }
-
-
-class ErlangShellLexer(Lexer):
- """
- Shell sessions in erl (for Erlang code).
-
- .. versionadded:: 1.1
- """
- name = 'Erlang erl session'
- aliases = ['erl']
- filenames = ['*.erl-sh']
- mimetypes = ['text/x-erl-shellsession']
-
- _prompt_re = re.compile(r'(?:\([\w@_.]+\))?\d+>(?=\s|\Z)')
-
- def get_tokens_unprocessed(self, text):
- erlexer = ErlangLexer(**self.options)
-
- curcode = ''
- insertions = []
- for match in line_re.finditer(text):
- line = match.group()
- m = self._prompt_re.match(line)
- if m is not None:
- end = m.end()
- insertions.append((len(curcode),
- [(0, Generic.Prompt, line[:end])]))
- curcode += line[end:]
- else:
- if curcode:
- yield from do_insertions(insertions,
- erlexer.get_tokens_unprocessed(curcode))
- curcode = ''
- insertions = []
- if line.startswith('*'):
- yield match.start(), Generic.Traceback, line
- else:
- yield match.start(), Generic.Output, line
- if curcode:
- yield from do_insertions(insertions,
- erlexer.get_tokens_unprocessed(curcode))
-
-
-def gen_elixir_string_rules(name, symbol, token):
- states = {}
- states['string_' + name] = [
- (r'[^#%s\\]+' % (symbol,), token),
- include('escapes'),
- (r'\\.', token),
- (r'(%s)' % (symbol,), bygroups(token), "#pop"),
- include('interpol')
- ]
- return states
-
-
-def gen_elixir_sigstr_rules(term, term_class, token, interpol=True):
- if interpol:
- return [
- (r'[^#%s\\]+' % (term_class,), token),
- include('escapes'),
- (r'\\.', token),
- (r'%s[a-zA-Z]*' % (term,), token, '#pop'),
- include('interpol')
- ]
- else:
- return [
- (r'[^%s\\]+' % (term_class,), token),
- (r'\\.', token),
- (r'%s[a-zA-Z]*' % (term,), token, '#pop'),
- ]
-
-
-class ElixirLexer(RegexLexer):
- """
- For the Elixir language.
-
- .. versionadded:: 1.5
- """
-
- name = 'Elixir'
- url = 'http://elixir-lang.org'
- aliases = ['elixir', 'ex', 'exs']
- filenames = ['*.ex', '*.eex', '*.exs', '*.leex']
- mimetypes = ['text/x-elixir']
-
- KEYWORD = ('fn', 'do', 'end', 'after', 'else', 'rescue', 'catch')
- KEYWORD_OPERATOR = ('not', 'and', 'or', 'when', 'in')
- BUILTIN = (
- 'case', 'cond', 'for', 'if', 'unless', 'try', 'receive', 'raise',
- 'quote', 'unquote', 'unquote_splicing', 'throw', 'super',
- )
- BUILTIN_DECLARATION = (
- 'def', 'defp', 'defmodule', 'defprotocol', 'defmacro', 'defmacrop',
- 'defdelegate', 'defexception', 'defstruct', 'defimpl', 'defcallback',
- )
-
- BUILTIN_NAMESPACE = ('import', 'require', 'use', 'alias')
- CONSTANT = ('nil', 'true', 'false')
-
- PSEUDO_VAR = ('_', '__MODULE__', '__DIR__', '__ENV__', '__CALLER__')
-
- OPERATORS3 = (
- '<<<', '>>>', '|||', '&&&', '^^^', '~~~', '===', '!==',
- '~>>', '<~>', '|~>', '<|>',
- )
- OPERATORS2 = (
- '==', '!=', '<=', '>=', '&&', '||', '<>', '++', '--', '|>', '=~',
- '->', '<-', '|', '.', '=', '~>', '<~',
- )
- OPERATORS1 = ('<', '>', '+', '-', '*', '/', '!', '^', '&')
-
- PUNCTUATION = (
- '\\\\', '<<', '>>', '=>', '(', ')', ':', ';', ',', '[', ']',
- )
-
- def get_tokens_unprocessed(self, text):
- for index, token, value in RegexLexer.get_tokens_unprocessed(self, text):
- if token is Name:
- if value in self.KEYWORD:
- yield index, Keyword, value
- elif value in self.KEYWORD_OPERATOR:
- yield index, Operator.Word, value
- elif value in self.BUILTIN:
- yield index, Keyword, value
- elif value in self.BUILTIN_DECLARATION:
- yield index, Keyword.Declaration, value
- elif value in self.BUILTIN_NAMESPACE:
- yield index, Keyword.Namespace, value
- elif value in self.CONSTANT:
- yield index, Name.Constant, value
- elif value in self.PSEUDO_VAR:
- yield index, Name.Builtin.Pseudo, value
- else:
- yield index, token, value
- else:
- yield index, token, value
-
- def gen_elixir_sigil_rules():
- # all valid sigil terminators (excluding heredocs)
- terminators = [
- (r'\{', r'\}', '}', 'cb'),
- (r'\[', r'\]', r'\]', 'sb'),
- (r'\(', r'\)', ')', 'pa'),
- ('<', '>', '>', 'ab'),
- ('/', '/', '/', 'slas'),
- (r'\|', r'\|', '|', 'pipe'),
- ('"', '"', '"', 'quot'),
- ("'", "'", "'", 'apos'),
- ]
-
- # heredocs have slightly different rules
- triquotes = [(r'"""', 'triquot'), (r"'''", 'triapos')]
-
- token = String.Other
- states = {'sigils': []}
-
- for term, name in triquotes:
- states['sigils'] += [
- (r'(~[a-z])(%s)' % (term,), bygroups(token, String.Heredoc),
- (name + '-end', name + '-intp')),
- (r'(~[A-Z])(%s)' % (term,), bygroups(token, String.Heredoc),
- (name + '-end', name + '-no-intp')),
- ]
-
- states[name + '-end'] = [
- (r'[a-zA-Z]+', token, '#pop'),
- default('#pop'),
- ]
- states[name + '-intp'] = [
- (r'^(\s*)(' + term + ')', bygroups(Whitespace, String.Heredoc), '#pop'),
- include('heredoc_interpol'),
- ]
- states[name + '-no-intp'] = [
- (r'^(\s*)(' + term +')', bygroups(Whitespace, String.Heredoc), '#pop'),
- include('heredoc_no_interpol'),
- ]
-
- for lterm, rterm, rterm_class, name in terminators:
- states['sigils'] += [
- (r'~[a-z]' + lterm, token, name + '-intp'),
- (r'~[A-Z]' + lterm, token, name + '-no-intp'),
- ]
- states[name + '-intp'] = \
- gen_elixir_sigstr_rules(rterm, rterm_class, token)
- states[name + '-no-intp'] = \
- gen_elixir_sigstr_rules(rterm, rterm_class, token, interpol=False)
-
- return states
-
- op3_re = "|".join(re.escape(s) for s in OPERATORS3)
- op2_re = "|".join(re.escape(s) for s in OPERATORS2)
- op1_re = "|".join(re.escape(s) for s in OPERATORS1)
- ops_re = r'(?:%s|%s|%s)' % (op3_re, op2_re, op1_re)
- punctuation_re = "|".join(re.escape(s) for s in PUNCTUATION)
- alnum = r'\w'
- name_re = r'(?:\.\.\.|[a-z_]%s*[!?]?)' % alnum
- modname_re = r'[A-Z]%(alnum)s*(?:\.[A-Z]%(alnum)s*)*' % {'alnum': alnum}
- complex_name_re = r'(?:%s|%s|%s)' % (name_re, modname_re, ops_re)
- special_atom_re = r'(?:\.\.\.|<<>>|%\{\}|%|\{\})'
-
- long_hex_char_re = r'(\\x\{)([\da-fA-F]+)(\})'
- hex_char_re = r'(\\x[\da-fA-F]{1,2})'
- escape_char_re = r'(\\[abdefnrstv])'
-
- tokens = {
- 'root': [
- (r'\s+', Whitespace),
- (r'#.*$', Comment.Single),
-
- # Various kinds of characters
- (r'(\?)' + long_hex_char_re,
- bygroups(String.Char,
- String.Escape, Number.Hex, String.Escape)),
- (r'(\?)' + hex_char_re,
- bygroups(String.Char, String.Escape)),
- (r'(\?)' + escape_char_re,
- bygroups(String.Char, String.Escape)),
- (r'\?\\?.', String.Char),
-
- # '::' has to go before atoms
- (r':::', String.Symbol),
- (r'::', Operator),
-
- # atoms
- (r':' + special_atom_re, String.Symbol),
- (r':' + complex_name_re, String.Symbol),
- (r':"', String.Symbol, 'string_double_atom'),
- (r":'", String.Symbol, 'string_single_atom'),
-
- # [keywords: ...]
- (r'(%s|%s)(:)(?=\s|\n)' % (special_atom_re, complex_name_re),
- bygroups(String.Symbol, Punctuation)),
-
- # @attributes
- (r'@' + name_re, Name.Attribute),
-
- # identifiers
- (name_re, Name),
- (r'(%%?)(%s)' % (modname_re,), bygroups(Punctuation, Name.Class)),
-
- # operators and punctuation
- (op3_re, Operator),
- (op2_re, Operator),
- (punctuation_re, Punctuation),
- (r'&\d', Name.Entity), # anon func arguments
- (op1_re, Operator),
-
- # numbers
- (r'0b[01]+', Number.Bin),
- (r'0o[0-7]+', Number.Oct),
- (r'0x[\da-fA-F]+', Number.Hex),
- (r'\d(_?\d)*\.\d(_?\d)*([eE][-+]?\d(_?\d)*)?', Number.Float),
- (r'\d(_?\d)*', Number.Integer),
-
- # strings and heredocs
- (r'(""")(\s*)', bygroups(String.Heredoc, Whitespace),
- 'heredoc_double'),
- (r"(''')(\s*)$", bygroups(String.Heredoc, Whitespace),
- 'heredoc_single'),
- (r'"', String.Double, 'string_double'),
- (r"'", String.Single, 'string_single'),
-
- include('sigils'),
-
- (r'%\{', Punctuation, 'map_key'),
- (r'\{', Punctuation, 'tuple'),
- ],
- 'heredoc_double': [
- (r'^(\s*)(""")', bygroups(Whitespace, String.Heredoc), '#pop'),
- include('heredoc_interpol'),
- ],
- 'heredoc_single': [
- (r"^\s*'''", String.Heredoc, '#pop'),
- include('heredoc_interpol'),
- ],
- 'heredoc_interpol': [
- (r'[^#\\\n]+', String.Heredoc),
- include('escapes'),
- (r'\\.', String.Heredoc),
- (r'\n+', String.Heredoc),
- include('interpol'),
- ],
- 'heredoc_no_interpol': [
- (r'[^\\\n]+', String.Heredoc),
- (r'\\.', String.Heredoc),
- (r'\n+', Whitespace),
- ],
- 'escapes': [
- (long_hex_char_re,
- bygroups(String.Escape, Number.Hex, String.Escape)),
- (hex_char_re, String.Escape),
- (escape_char_re, String.Escape),
- ],
- 'interpol': [
- (r'#\{', String.Interpol, 'interpol_string'),
- ],
- 'interpol_string': [
- (r'\}', String.Interpol, "#pop"),
- include('root')
- ],
- 'map_key': [
- include('root'),
- (r':', Punctuation, 'map_val'),
- (r'=>', Punctuation, 'map_val'),
- (r'\}', Punctuation, '#pop'),
- ],
- 'map_val': [
- include('root'),
- (r',', Punctuation, '#pop'),
- (r'(?=\})', Punctuation, '#pop'),
- ],
- 'tuple': [
- include('root'),
- (r'\}', Punctuation, '#pop'),
- ],
- }
- tokens.update(gen_elixir_string_rules('double', '"', String.Double))
- tokens.update(gen_elixir_string_rules('single', "'", String.Single))
- tokens.update(gen_elixir_string_rules('double_atom', '"', String.Symbol))
- tokens.update(gen_elixir_string_rules('single_atom', "'", String.Symbol))
- tokens.update(gen_elixir_sigil_rules())
-
-
-class ElixirConsoleLexer(Lexer):
- """
- For Elixir interactive console (iex) output like:
-
- .. sourcecode:: iex
-
- iex> [head | tail] = [1,2,3]
- [1,2,3]
- iex> head
- 1
- iex> tail
- [2,3]
- iex> [head | tail]
- [1,2,3]
- iex> length [head | tail]
- 3
-
- .. versionadded:: 1.5
- """
-
- name = 'Elixir iex session'
- aliases = ['iex']
- mimetypes = ['text/x-elixir-shellsession']
-
- _prompt_re = re.compile(r'(iex|\.{3})((?:\([\w@_.]+\))?\d+|\(\d+\))?> ')
-
- def get_tokens_unprocessed(self, text):
- exlexer = ElixirLexer(**self.options)
-
- curcode = ''
- in_error = False
- insertions = []
- for match in line_re.finditer(text):
- line = match.group()
- if line.startswith('** '):
- in_error = True
- insertions.append((len(curcode),
- [(0, Generic.Error, line[:-1])]))
- curcode += line[-1:]
- else:
- m = self._prompt_re.match(line)
- if m is not None:
- in_error = False
- end = m.end()
- insertions.append((len(curcode),
- [(0, Generic.Prompt, line[:end])]))
- curcode += line[end:]
- else:
- if curcode:
- yield from do_insertions(
- insertions, exlexer.get_tokens_unprocessed(curcode))
- curcode = ''
- insertions = []
- token = Generic.Error if in_error else Generic.Output
- yield match.start(), token, line
- if curcode:
- yield from do_insertions(
- insertions, exlexer.get_tokens_unprocessed(curcode))
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/esoteric.py b/venv/lib/python3.11/site-packages/pygments/lexers/esoteric.py
deleted file mode 100644
index ccc2805..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/esoteric.py
+++ /dev/null
@@ -1,301 +0,0 @@
-"""
- pygments.lexers.esoteric
- ~~~~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for esoteric languages.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, include, words, bygroups
-from pygments.token import Comment, Operator, Keyword, Name, String, Number, \
- Punctuation, Error, Whitespace
-
-__all__ = ['BrainfuckLexer', 'BefungeLexer', 'RedcodeLexer', 'CAmkESLexer',
- 'CapDLLexer', 'AheuiLexer']
-
-
-class BrainfuckLexer(RegexLexer):
- """
- Lexer for the esoteric BrainFuck language.
- """
-
- name = 'Brainfuck'
- url = 'http://www.muppetlabs.com/~breadbox/bf/'
- aliases = ['brainfuck', 'bf']
- filenames = ['*.bf', '*.b']
- mimetypes = ['application/x-brainfuck']
-
- tokens = {
- 'common': [
- # use different colors for different instruction types
- (r'[.,]+', Name.Tag),
- (r'[+-]+', Name.Builtin),
- (r'[<>]+', Name.Variable),
- (r'[^.,+\-<>\[\]]+', Comment),
- ],
- 'root': [
- (r'\[', Keyword, 'loop'),
- (r'\]', Error),
- include('common'),
- ],
- 'loop': [
- (r'\[', Keyword, '#push'),
- (r'\]', Keyword, '#pop'),
- include('common'),
- ]
- }
-
- def analyse_text(text):
- """It's safe to assume that a program which mostly consists of + -
- and < > is brainfuck."""
- plus_minus_count = 0
- greater_less_count = 0
-
- range_to_check = max(256, len(text))
-
- for c in text[:range_to_check]:
- if c == '+' or c == '-':
- plus_minus_count += 1
- if c == '<' or c == '>':
- greater_less_count += 1
-
- if plus_minus_count > (0.25 * range_to_check):
- return 1.0
- if greater_less_count > (0.25 * range_to_check):
- return 1.0
-
- result = 0
- if '[-]' in text:
- result += 0.5
-
- return result
-
-
-class BefungeLexer(RegexLexer):
- """
- Lexer for the esoteric Befunge language.
-
- .. versionadded:: 0.7
- """
- name = 'Befunge'
- url = 'http://en.wikipedia.org/wiki/Befunge'
- aliases = ['befunge']
- filenames = ['*.befunge']
- mimetypes = ['application/x-befunge']
-
- tokens = {
- 'root': [
- (r'[0-9a-f]', Number),
- (r'[+*/%!`-]', Operator), # Traditional math
- (r'[<>^v?\[\]rxjk]', Name.Variable), # Move, imperatives
- (r'[:\\$.,n]', Name.Builtin), # Stack ops, imperatives
- (r'[|_mw]', Keyword),
- (r'[{}]', Name.Tag), # Befunge-98 stack ops
- (r'".*?"', String.Double), # Strings don't appear to allow escapes
- (r'\'.', String.Single), # Single character
- (r'[#;]', Comment), # Trampoline... depends on direction hit
- (r'[pg&~=@iotsy]', Keyword), # Misc
- (r'[()A-Z]', Comment), # Fingerprints
- (r'\s+', Whitespace), # Whitespace doesn't matter
- ],
- }
-
-
-class CAmkESLexer(RegexLexer):
- """
- Basic lexer for the input language for the CAmkES component platform.
-
- .. versionadded:: 2.1
- """
- name = 'CAmkES'
- url = 'https://sel4.systems/CAmkES/'
- aliases = ['camkes', 'idl4']
- filenames = ['*.camkes', '*.idl4']
-
- tokens = {
- 'root': [
- # C pre-processor directive
- (r'^(\s*)(#.*)(\n)', bygroups(Whitespace, Comment.Preproc,
- Whitespace)),
-
- # Whitespace, comments
- (r'\s+', Whitespace),
- (r'/\*(.|\n)*?\*/', Comment),
- (r'//.*$', Comment),
-
- (r'[\[(){},.;\]]', Punctuation),
- (r'[~!%^&*+=|?:<>/-]', Operator),
-
- (words(('assembly', 'attribute', 'component', 'composition',
- 'configuration', 'connection', 'connector', 'consumes',
- 'control', 'dataport', 'Dataport', 'Dataports', 'emits',
- 'event', 'Event', 'Events', 'export', 'from', 'group',
- 'hardware', 'has', 'interface', 'Interface', 'maybe',
- 'procedure', 'Procedure', 'Procedures', 'provides',
- 'template', 'thread', 'threads', 'to', 'uses', 'with'),
- suffix=r'\b'), Keyword),
-
- (words(('bool', 'boolean', 'Buf', 'char', 'character', 'double',
- 'float', 'in', 'inout', 'int', 'int16_6', 'int32_t',
- 'int64_t', 'int8_t', 'integer', 'mutex', 'out', 'real',
- 'refin', 'semaphore', 'signed', 'string', 'struct',
- 'uint16_t', 'uint32_t', 'uint64_t', 'uint8_t', 'uintptr_t',
- 'unsigned', 'void'),
- suffix=r'\b'), Keyword.Type),
-
- # Recognised attributes
- (r'[a-zA-Z_]\w*_(priority|domain|buffer)', Keyword.Reserved),
- (words(('dma_pool', 'from_access', 'to_access'), suffix=r'\b'),
- Keyword.Reserved),
-
- # CAmkES-level include
- (r'(import)(\s+)((?:<[^>]*>|"[^"]*");)',
- bygroups(Comment.Preproc, Whitespace, Comment.Preproc)),
-
- # C-level include
- (r'(include)(\s+)((?:<[^>]*>|"[^"]*");)',
- bygroups(Comment.Preproc, Whitespace, Comment.Preproc)),
-
- # Literals
- (r'0[xX][\da-fA-F]+', Number.Hex),
- (r'-?[\d]+', Number),
- (r'-?[\d]+\.[\d]+', Number.Float),
- (r'"[^"]*"', String),
- (r'[Tt]rue|[Ff]alse', Name.Builtin),
-
- # Identifiers
- (r'[a-zA-Z_]\w*', Name),
- ],
- }
-
-
-class CapDLLexer(RegexLexer):
- """
- Basic lexer for CapDL.
-
- The source of the primary tool that reads such specifications is available
- at https://github.com/seL4/capdl/tree/master/capDL-tool. Note that this
- lexer only supports a subset of the grammar. For example, identifiers can
- shadow type names, but these instances are currently incorrectly
- highlighted as types. Supporting this would need a stateful lexer that is
- considered unnecessarily complex for now.
-
- .. versionadded:: 2.2
- """
- name = 'CapDL'
- url = 'https://ssrg.nicta.com.au/publications/nictaabstracts/Kuz_KLW_10.abstract.pml'
- aliases = ['capdl']
- filenames = ['*.cdl']
-
- tokens = {
- 'root': [
- # C pre-processor directive
- (r'^(\s*)(#.*)(\n)',
- bygroups(Whitespace, Comment.Preproc, Whitespace)),
-
- # Whitespace, comments
- (r'\s+', Whitespace),
- (r'/\*(.|\n)*?\*/', Comment),
- (r'(//|--).*$', Comment),
-
- (r'[<>\[(){},:;=\]]', Punctuation),
- (r'\.\.', Punctuation),
-
- (words(('arch', 'arm11', 'caps', 'child_of', 'ia32', 'irq', 'maps',
- 'objects'), suffix=r'\b'), Keyword),
-
- (words(('aep', 'asid_pool', 'cnode', 'ep', 'frame', 'io_device',
- 'io_ports', 'io_pt', 'notification', 'pd', 'pt', 'tcb',
- 'ut', 'vcpu'), suffix=r'\b'), Keyword.Type),
-
- # Properties
- (words(('asid', 'addr', 'badge', 'cached', 'dom', 'domainID', 'elf',
- 'fault_ep', 'G', 'guard', 'guard_size', 'init', 'ip',
- 'prio', 'sp', 'R', 'RG', 'RX', 'RW', 'RWG', 'RWX', 'W',
- 'WG', 'WX', 'level', 'masked', 'master_reply', 'paddr',
- 'ports', 'reply', 'uncached'), suffix=r'\b'),
- Keyword.Reserved),
-
- # Literals
- (r'0[xX][\da-fA-F]+', Number.Hex),
- (r'\d+(\.\d+)?(k|M)?', Number),
- (words(('bits',), suffix=r'\b'), Number),
- (words(('cspace', 'vspace', 'reply_slot', 'caller_slot',
- 'ipc_buffer_slot'), suffix=r'\b'), Number),
-
- # Identifiers
- (r'[a-zA-Z_][-@\.\w]*', Name),
- ],
- }
-
-
-class RedcodeLexer(RegexLexer):
- """
- A simple Redcode lexer based on ICWS'94.
- Contributed by Adam Blinkinsop <blinks@acm.org>.
-
- .. versionadded:: 0.8
- """
- name = 'Redcode'
- aliases = ['redcode']
- filenames = ['*.cw']
-
- opcodes = ('DAT', 'MOV', 'ADD', 'SUB', 'MUL', 'DIV', 'MOD',
- 'JMP', 'JMZ', 'JMN', 'DJN', 'CMP', 'SLT', 'SPL',
- 'ORG', 'EQU', 'END')
- modifiers = ('A', 'B', 'AB', 'BA', 'F', 'X', 'I')
-
- tokens = {
- 'root': [
- # Whitespace:
- (r'\s+', Whitespace),
- (r';.*$', Comment.Single),
- # Lexemes:
- # Identifiers
- (r'\b(%s)\b' % '|'.join(opcodes), Name.Function),
- (r'\b(%s)\b' % '|'.join(modifiers), Name.Decorator),
- (r'[A-Za-z_]\w+', Name),
- # Operators
- (r'[-+*/%]', Operator),
- (r'[#$@<>]', Operator), # mode
- (r'[.,]', Punctuation), # mode
- # Numbers
- (r'[-+]?\d+', Number.Integer),
- ],
- }
-
-
-class AheuiLexer(RegexLexer):
- """
- Aheui is esoteric language based on Korean alphabets.
- """
-
- name = 'Aheui'
- url = 'http://aheui.github.io/'
- aliases = ['aheui']
- filenames = ['*.aheui']
-
- tokens = {
- 'root': [
- ('['
- '나-낳냐-냫너-넣녀-녛노-놓뇨-눟뉴-닇'
- '다-닿댜-댷더-덯뎌-뎧도-돟됴-둫듀-딓'
- '따-땋땨-떃떠-떻뗘-뗳또-똫뚀-뚷뜌-띟'
- '라-랗랴-럏러-렇려-렿로-롷료-뤃류-릫'
- '마-맣먀-먛머-멓며-몋모-뫃묘-뭏뮤-믷'
- '바-밯뱌-뱧버-벟벼-볗보-봏뵤-붛뷰-빃'
- '빠-빻뺘-뺳뻐-뻫뼈-뼣뽀-뽛뾰-뿧쀼-삏'
- '사-샇샤-샿서-섷셔-셯소-솧쇼-숳슈-싛'
- '싸-쌓쌰-썋써-쎃쎠-쎻쏘-쏳쑈-쑿쓔-씧'
- '자-잫쟈-쟣저-젛져-졓조-좋죠-줗쥬-즿'
- '차-챃챠-챻처-첳쳐-쳫초-촣쵸-춯츄-칗'
- '카-캏캬-컇커-컿켜-켷코-콯쿄-쿻큐-킣'
- '타-탛탸-턓터-텋텨-톃토-톻툐-퉇튜-틯'
- '파-팧퍄-퍟퍼-펗펴-폏포-퐇표-풓퓨-픻'
- '하-핳햐-햫허-헣혀-혛호-홓효-훟휴-힇'
- ']', Operator),
- ('.', Comment),
- ],
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/ezhil.py b/venv/lib/python3.11/site-packages/pygments/lexers/ezhil.py
deleted file mode 100644
index 49478ea..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/ezhil.py
+++ /dev/null
@@ -1,77 +0,0 @@
-"""
- pygments.lexers.ezhil
- ~~~~~~~~~~~~~~~~~~~~~
-
- Pygments lexers for Ezhil language.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, include, words
-from pygments.token import Keyword, Comment, Name, String, Number, \
- Punctuation, Operator, Whitespace
-
-__all__ = ['EzhilLexer']
-
-
-class EzhilLexer(RegexLexer):
- """
- Lexer for Ezhil, a Tamil script-based programming language.
-
- .. versionadded:: 2.1
- """
- name = 'Ezhil'
- url = 'http://ezhillang.org'
- aliases = ['ezhil']
- filenames = ['*.n']
- mimetypes = ['text/x-ezhil']
- # Refer to tamil.utf8.tamil_letters from open-tamil for a stricter version of this.
- # This much simpler version is close enough, and includes combining marks.
- _TALETTERS = '[a-zA-Z_]|[\u0b80-\u0bff]'
- tokens = {
- 'root': [
- include('keywords'),
- (r'#.*$', Comment.Single),
- (r'[@+/*,^\-%]|[!<>=]=?|&&?|\|\|?', Operator),
- ('இல்', Operator.Word),
- (words(('assert', 'max', 'min',
- 'நீளம்', 'சரம்_இடமாற்று', 'சரம்_கண்டுபிடி',
- 'பட்டியல்', 'பின்இணை', 'வரிசைப்படுத்து',
- 'எடு', 'தலைகீழ்', 'நீட்டிக்க', 'நுழைக்க', 'வை',
- 'கோப்பை_திற', 'கோப்பை_எழுது', 'கோப்பை_மூடு',
- 'pi', 'sin', 'cos', 'tan', 'sqrt', 'hypot', 'pow',
- 'exp', 'log', 'log10', 'exit',
- ), suffix=r'\b'), Name.Builtin),
- (r'(True|False)\b', Keyword.Constant),
- (r'[^\S\n]+', Whitespace),
- include('identifier'),
- include('literal'),
- (r'[(){}\[\]:;.]', Punctuation),
- ],
- 'keywords': [
- ('பதிப்பி|தேர்ந்தெடு|தேர்வு|ஏதேனில்|ஆனால்|இல்லைஆனால்|இல்லை|ஆக|ஒவ்வொன்றாக|இல்|வரை|செய்|முடியேனில்|பின்கொடு|முடி|நிரல்பாகம்|தொடர்|நிறுத்து|நிரல்பாகம்', Keyword),
- ],
- 'identifier': [
- ('(?:'+_TALETTERS+')(?:[0-9]|'+_TALETTERS+')*', Name),
- ],
- 'literal': [
- (r'".*?"', String),
- (r'\d+((\.\d*)?[eE][+-]?\d+|\.\d*)', Number.Float),
- (r'\d+', Number.Integer),
- ]
- }
-
- def analyse_text(text):
- """This language uses Tamil-script. We'll assume that if there's a
- decent amount of Tamil-characters, it's this language. This assumption
- is obviously horribly off if someone uses string literals in tamil
- in another language."""
- if len(re.findall(r'[\u0b80-\u0bff]', text)) > 10:
- return 0.25
-
- def __init__(self, **options):
- super().__init__(**options)
- self.encoding = options.get('encoding', 'utf-8')
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/factor.py b/venv/lib/python3.11/site-packages/pygments/lexers/factor.py
deleted file mode 100644
index aa0cd2d..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/factor.py
+++ /dev/null
@@ -1,364 +0,0 @@
-"""
- pygments.lexers.factor
- ~~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for the Factor language.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, bygroups, default, words
-from pygments.token import Text, Comment, Keyword, Name, String, Number, \
- Whitespace, Punctuation
-
-__all__ = ['FactorLexer']
-
-
-class FactorLexer(RegexLexer):
- """
- Lexer for the Factor language.
-
- .. versionadded:: 1.4
- """
- name = 'Factor'
- url = 'http://factorcode.org'
- aliases = ['factor']
- filenames = ['*.factor']
- mimetypes = ['text/x-factor']
-
- builtin_kernel = words((
- '-rot', '2bi', '2bi@', '2bi*', '2curry', '2dip', '2drop', '2dup', '2keep', '2nip',
- '2over', '2tri', '2tri@', '2tri*', '3bi', '3curry', '3dip', '3drop', '3dup', '3keep',
- '3tri', '4dip', '4drop', '4dup', '4keep', '<wrapper>', '=', '>boolean', 'clone',
- '?', '?execute', '?if', 'and', 'assert', 'assert=', 'assert?', 'bi', 'bi-curry',
- 'bi-curry@', 'bi-curry*', 'bi@', 'bi*', 'boa', 'boolean', 'boolean?', 'both?',
- 'build', 'call', 'callstack', 'callstack>array', 'callstack?', 'clear', '(clone)',
- 'compose', 'compose?', 'curry', 'curry?', 'datastack', 'die', 'dip', 'do', 'drop',
- 'dup', 'dupd', 'either?', 'eq?', 'equal?', 'execute', 'hashcode', 'hashcode*',
- 'identity-hashcode', 'identity-tuple', 'identity-tuple?', 'if', 'if*',
- 'keep', 'loop', 'most', 'new', 'nip', 'not', 'null', 'object', 'or', 'over',
- 'pick', 'prepose', 'retainstack', 'rot', 'same?', 'swap', 'swapd', 'throw',
- 'tri', 'tri-curry', 'tri-curry@', 'tri-curry*', 'tri@', 'tri*', 'tuple',
- 'tuple?', 'unless', 'unless*', 'until', 'when', 'when*', 'while', 'with',
- 'wrapper', 'wrapper?', 'xor'), suffix=r'(\s+)')
-
- builtin_assocs = words((
- '2cache', '<enum>', '>alist', '?at', '?of', 'assoc', 'assoc-all?',
- 'assoc-any?', 'assoc-clone-like', 'assoc-combine', 'assoc-diff',
- 'assoc-diff!', 'assoc-differ', 'assoc-each', 'assoc-empty?',
- 'assoc-filter', 'assoc-filter!', 'assoc-filter-as', 'assoc-find',
- 'assoc-hashcode', 'assoc-intersect', 'assoc-like', 'assoc-map',
- 'assoc-map-as', 'assoc-partition', 'assoc-refine', 'assoc-size',
- 'assoc-stack', 'assoc-subset?', 'assoc-union', 'assoc-union!',
- 'assoc=', 'assoc>map', 'assoc?', 'at', 'at+', 'at*', 'cache', 'change-at',
- 'clear-assoc', 'delete-at', 'delete-at*', 'enum', 'enum?', 'extract-keys',
- 'inc-at', 'key?', 'keys', 'map>assoc', 'maybe-set-at', 'new-assoc', 'of',
- 'push-at', 'rename-at', 'set-at', 'sift-keys', 'sift-values', 'substitute',
- 'unzip', 'value-at', 'value-at*', 'value?', 'values', 'zip'), suffix=r'(\s+)')
-
- builtin_combinators = words((
- '2cleave', '2cleave>quot', '3cleave', '3cleave>quot', '4cleave',
- '4cleave>quot', 'alist>quot', 'call-effect', 'case', 'case-find',
- 'case>quot', 'cleave', 'cleave>quot', 'cond', 'cond>quot', 'deep-spread>quot',
- 'execute-effect', 'linear-case-quot', 'no-case', 'no-case?', 'no-cond',
- 'no-cond?', 'recursive-hashcode', 'shallow-spread>quot', 'spread',
- 'to-fixed-point', 'wrong-values', 'wrong-values?'), suffix=r'(\s+)')
-
- builtin_math = words((
- '-', '/', '/f', '/i', '/mod', '2/', '2^', '<', '<=', '<fp-nan>', '>',
- '>=', '>bignum', '>fixnum', '>float', '>integer', '(all-integers?)',
- '(each-integer)', '(find-integer)', '*', '+', '?1+',
- 'abs', 'align', 'all-integers?', 'bignum', 'bignum?', 'bit?', 'bitand',
- 'bitnot', 'bitor', 'bits>double', 'bits>float', 'bitxor', 'complex',
- 'complex?', 'denominator', 'double>bits', 'each-integer', 'even?',
- 'find-integer', 'find-last-integer', 'fixnum', 'fixnum?', 'float',
- 'float>bits', 'float?', 'fp-bitwise=', 'fp-infinity?', 'fp-nan-payload',
- 'fp-nan?', 'fp-qnan?', 'fp-sign', 'fp-snan?', 'fp-special?',
- 'if-zero', 'imaginary-part', 'integer', 'integer>fixnum',
- 'integer>fixnum-strict', 'integer?', 'log2', 'log2-expects-positive',
- 'log2-expects-positive?', 'mod', 'neg', 'neg?', 'next-float',
- 'next-power-of-2', 'number', 'number=', 'number?', 'numerator', 'odd?',
- 'out-of-fixnum-range', 'out-of-fixnum-range?', 'power-of-2?',
- 'prev-float', 'ratio', 'ratio?', 'rational', 'rational?', 'real',
- 'real-part', 'real?', 'recip', 'rem', 'sgn', 'shift', 'sq', 'times',
- 'u<', 'u<=', 'u>', 'u>=', 'unless-zero', 'unordered?', 'when-zero',
- 'zero?'), suffix=r'(\s+)')
-
- builtin_sequences = words((
- '1sequence', '2all?', '2each', '2map', '2map-as', '2map-reduce', '2reduce',
- '2selector', '2sequence', '3append', '3append-as', '3each', '3map', '3map-as',
- '3sequence', '4sequence', '<repetition>', '<reversed>', '<slice>', '?first',
- '?last', '?nth', '?second', '?set-nth', 'accumulate', 'accumulate!',
- 'accumulate-as', 'all?', 'any?', 'append', 'append!', 'append-as',
- 'assert-sequence', 'assert-sequence=', 'assert-sequence?',
- 'binary-reduce', 'bounds-check', 'bounds-check?', 'bounds-error',
- 'bounds-error?', 'but-last', 'but-last-slice', 'cartesian-each',
- 'cartesian-map', 'cartesian-product', 'change-nth', 'check-slice',
- 'check-slice-error', 'clone-like', 'collapse-slice', 'collector',
- 'collector-for', 'concat', 'concat-as', 'copy', 'count', 'cut', 'cut-slice',
- 'cut*', 'delete-all', 'delete-slice', 'drop-prefix', 'each', 'each-from',
- 'each-index', 'empty?', 'exchange', 'filter', 'filter!', 'filter-as', 'find',
- 'find-from', 'find-index', 'find-index-from', 'find-last', 'find-last-from',
- 'first', 'first2', 'first3', 'first4', 'flip', 'follow', 'fourth', 'glue', 'halves',
- 'harvest', 'head', 'head-slice', 'head-slice*', 'head*', 'head?',
- 'if-empty', 'immutable', 'immutable-sequence', 'immutable-sequence?',
- 'immutable?', 'index', 'index-from', 'indices', 'infimum', 'infimum-by',
- 'insert-nth', 'interleave', 'iota', 'iota-tuple', 'iota-tuple?', 'join',
- 'join-as', 'last', 'last-index', 'last-index-from', 'length', 'lengthen',
- 'like', 'longer', 'longer?', 'longest', 'map', 'map!', 'map-as', 'map-find',
- 'map-find-last', 'map-index', 'map-integers', 'map-reduce', 'map-sum',
- 'max-length', 'member-eq?', 'member?', 'midpoint@', 'min-length',
- 'mismatch', 'move', 'new-like', 'new-resizable', 'new-sequence',
- 'non-negative-integer-expected', 'non-negative-integer-expected?',
- 'nth', 'nths', 'pad-head', 'pad-tail', 'padding', 'partition', 'pop', 'pop*',
- 'prefix', 'prepend', 'prepend-as', 'produce', 'produce-as', 'product', 'push',
- 'push-all', 'push-either', 'push-if', 'reduce', 'reduce-index', 'remove',
- 'remove!', 'remove-eq', 'remove-eq!', 'remove-nth', 'remove-nth!', 'repetition',
- 'repetition?', 'replace-slice', 'replicate', 'replicate-as', 'rest',
- 'rest-slice', 'reverse', 'reverse!', 'reversed', 'reversed?', 'second',
- 'selector', 'selector-for', 'sequence', 'sequence-hashcode', 'sequence=',
- 'sequence?', 'set-first', 'set-fourth', 'set-last', 'set-length', 'set-nth',
- 'set-second', 'set-third', 'short', 'shorten', 'shorter', 'shorter?',
- 'shortest', 'sift', 'slice', 'slice-error', 'slice-error?', 'slice?',
- 'snip', 'snip-slice', 'start', 'start*', 'subseq', 'subseq?', 'suffix',
- 'suffix!', 'sum', 'sum-lengths', 'supremum', 'supremum-by', 'surround', 'tail',
- 'tail-slice', 'tail-slice*', 'tail*', 'tail?', 'third', 'trim',
- 'trim-head', 'trim-head-slice', 'trim-slice', 'trim-tail', 'trim-tail-slice',
- 'unclip', 'unclip-last', 'unclip-last-slice', 'unclip-slice', 'unless-empty',
- 'virtual-exemplar', 'virtual-sequence', 'virtual-sequence?', 'virtual@',
- 'when-empty'), suffix=r'(\s+)')
-
- builtin_namespaces = words((
- '+@', 'change', 'change-global', 'counter', 'dec', 'get', 'get-global',
- 'global', 'inc', 'init-namespaces', 'initialize', 'is-global', 'make-assoc',
- 'namespace', 'namestack', 'off', 'on', 'set', 'set-global', 'set-namestack',
- 'toggle', 'with-global', 'with-scope', 'with-variable', 'with-variables'),
- suffix=r'(\s+)')
-
- builtin_arrays = words((
- '1array', '2array', '3array', '4array', '<array>', '>array', 'array',
- 'array?', 'pair', 'pair?', 'resize-array'), suffix=r'(\s+)')
-
- builtin_io = words((
- '(each-stream-block-slice)', '(each-stream-block)',
- '(stream-contents-by-block)', '(stream-contents-by-element)',
- '(stream-contents-by-length-or-block)',
- '(stream-contents-by-length)', '+byte+', '+character+',
- 'bad-seek-type', 'bad-seek-type?', 'bl', 'contents', 'each-block',
- 'each-block-size', 'each-block-slice', 'each-line', 'each-morsel',
- 'each-stream-block', 'each-stream-block-slice', 'each-stream-line',
- 'error-stream', 'flush', 'input-stream', 'input-stream?',
- 'invalid-read-buffer', 'invalid-read-buffer?', 'lines', 'nl',
- 'output-stream', 'output-stream?', 'print', 'read', 'read-into',
- 'read-partial', 'read-partial-into', 'read-until', 'read1', 'readln',
- 'seek-absolute', 'seek-absolute?', 'seek-end', 'seek-end?',
- 'seek-input', 'seek-output', 'seek-relative', 'seek-relative?',
- 'stream-bl', 'stream-contents', 'stream-contents*', 'stream-copy',
- 'stream-copy*', 'stream-element-type', 'stream-flush',
- 'stream-length', 'stream-lines', 'stream-nl', 'stream-print',
- 'stream-read', 'stream-read-into', 'stream-read-partial',
- 'stream-read-partial-into', 'stream-read-partial-unsafe',
- 'stream-read-unsafe', 'stream-read-until', 'stream-read1',
- 'stream-readln', 'stream-seek', 'stream-seekable?', 'stream-tell',
- 'stream-write', 'stream-write1', 'tell-input', 'tell-output',
- 'with-error-stream', 'with-error-stream*', 'with-error>output',
- 'with-input-output+error-streams',
- 'with-input-output+error-streams*', 'with-input-stream',
- 'with-input-stream*', 'with-output-stream', 'with-output-stream*',
- 'with-output>error', 'with-output+error-stream',
- 'with-output+error-stream*', 'with-streams', 'with-streams*',
- 'write', 'write1'), suffix=r'(\s+)')
-
- builtin_strings = words((
- '1string', '<string>', '>string', 'resize-string', 'string',
- 'string?'), suffix=r'(\s+)')
-
- builtin_vectors = words((
- '1vector', '<vector>', '>vector', '?push', 'vector', 'vector?'),
- suffix=r'(\s+)')
-
- builtin_continuations = words((
- '<condition>', '<continuation>', '<restart>', 'attempt-all',
- 'attempt-all-error', 'attempt-all-error?', 'callback-error-hook',
- 'callcc0', 'callcc1', 'cleanup', 'compute-restarts', 'condition',
- 'condition?', 'continuation', 'continuation?', 'continue',
- 'continue-restart', 'continue-with', 'current-continuation',
- 'error', 'error-continuation', 'error-in-thread', 'error-thread',
- 'ifcc', 'ignore-errors', 'in-callback?', 'original-error', 'recover',
- 'restart', 'restart?', 'restarts', 'rethrow', 'rethrow-restarts',
- 'return', 'return-continuation', 'thread-error-hook', 'throw-continue',
- 'throw-restarts', 'with-datastack', 'with-return'), suffix=r'(\s+)')
-
- tokens = {
- 'root': [
- # factor allows a file to start with a shebang
- (r'#!.*$', Comment.Preproc),
- default('base'),
- ],
- 'base': [
- (r'\s+', Whitespace),
-
- # defining words
- (r'((?:MACRO|MEMO|TYPED)?:[:]?)(\s+)(\S+)',
- bygroups(Keyword, Whitespace, Name.Function)),
- (r'(M:[:]?)(\s+)(\S+)(\s+)(\S+)',
- bygroups(Keyword, Whitespace, Name.Class, Whitespace,
- Name.Function)),
- (r'(C:)(\s+)(\S+)(\s+)(\S+)',
- bygroups(Keyword, Whitespace, Name.Function, Whitespace,
- Name.Class)),
- (r'(GENERIC:)(\s+)(\S+)',
- bygroups(Keyword, Whitespace, Name.Function)),
- (r'(HOOK:|GENERIC#)(\s+)(\S+)(\s+)(\S+)',
- bygroups(Keyword, Whitespace, Name.Function, Whitespace,
- Name.Function)),
- (r'(\()(\s)', bygroups(Name.Function, Whitespace), 'stackeffect'),
- (r'(;)(\s)', bygroups(Keyword, Whitespace)),
-
- # imports and namespaces
- (r'(USING:)(\s+)',
- bygroups(Keyword.Namespace, Whitespace), 'vocabs'),
- (r'(USE:|UNUSE:|IN:|QUALIFIED:)(\s+)(\S+)',
- bygroups(Keyword.Namespace, Whitespace, Name.Namespace)),
- (r'(QUALIFIED-WITH:)(\s+)(\S+)(\s+)(\S+)',
- bygroups(Keyword.Namespace, Whitespace, Name.Namespace,
- Whitespace, Name.Namespace)),
- (r'(FROM:|EXCLUDE:)(\s+)(\S+)(\s+=>\s)',
- bygroups(Keyword.Namespace, Whitespace, Name.Namespace,
- Whitespace), 'words'),
- (r'(RENAME:)(\s+)(\S+)(\s+)(\S+)(\s+)(=>)(\s+)(\S+)',
- bygroups(Keyword.Namespace, Whitespace, Name.Function, Whitespace,
- Name.Namespace, Whitespace, Punctuation, Whitespace,
- Name.Function)),
- (r'(ALIAS:|TYPEDEF:)(\s+)(\S+)(\s+)(\S+)',
- bygroups(Keyword.Namespace, Whitespace, Name.Function, Whitespace,
- Name.Function)),
- (r'(DEFER:|FORGET:|POSTPONE:)(\s+)(\S+)',
- bygroups(Keyword.Namespace, Whitespace, Name.Function)),
-
- # tuples and classes
- (r'(TUPLE:|ERROR:)(\s+)(\S+)(\s+)(<)(\s+)(\S+)',
- bygroups(Keyword, Whitespace, Name.Class, Whitespace, Punctuation,
- Whitespace, Name.Class), 'slots'),
- (r'(TUPLE:|ERROR:|BUILTIN:)(\s+)(\S+)',
- bygroups(Keyword, Whitespace, Name.Class), 'slots'),
- (r'(MIXIN:|UNION:|INTERSECTION:)(\s+)(\S+)',
- bygroups(Keyword, Whitespace, Name.Class)),
- (r'(PREDICATE:)(\s+)(\S+)(\s+)(<)(\s+)(\S+)',
- bygroups(Keyword, Whitespace, Name.Class, Whitespace,
- Punctuation, Whitespace, Name.Class)),
- (r'(C:)(\s+)(\S+)(\s+)(\S+)',
- bygroups(Keyword, Whitespace, Name.Function, Whitespace, Name.Class)),
- (r'(INSTANCE:)(\s+)(\S+)(\s+)(\S+)',
- bygroups(Keyword, Whitespace, Name.Class, Whitespace, Name.Class)),
- (r'(SLOT:)(\s+)(\S+)', bygroups(Keyword, Whitespace, Name.Function)),
- (r'(SINGLETON:)(\s+)(\S+)', bygroups(Keyword, Whitespace, Name.Class)),
- (r'SINGLETONS:', Keyword, 'classes'),
-
- # other syntax
- (r'(CONSTANT:|SYMBOL:|MAIN:|HELP:)(\s+)(\S+)',
- bygroups(Keyword, Whitespace, Name.Function)),
- (r'(SYMBOLS:)(\s+)', bygroups(Keyword, Whitespace), 'words'),
- (r'(SYNTAX:)(\s+)', bygroups(Keyword, Whitespace)),
- (r'(ALIEN:)(\s+)', bygroups(Keyword, Whitespace)),
- (r'(STRUCT:)(\s+)(\S+)', bygroups(Keyword, Whitespace, Name.Class)),
- (r'(FUNCTION:)(\s+)'
- r'(\S+)(\s+)(\S+)(\s+)'
- r'(\()(\s+)([^)]+)(\))(\s)',
- bygroups(Keyword.Namespace, Whitespace,
- Text, Whitespace, Name.Function, Whitespace,
- Punctuation, Whitespace, Text, Punctuation, Whitespace)),
- (r'(FUNCTION-ALIAS:)(\s+)'
- r'(\S+)(\s+)(\S+)(\s+)'
- r'(\S+)(\s+)'
- r'(\()(\s+)([^)]+)(\))(\s)',
- bygroups(Keyword.Namespace, Whitespace,
- Text, Whitespace, Name.Function, Whitespace,
- Name.Function, Whitespace,
- Punctuation, Whitespace, Text, Punctuation, Whitespace)),
-
- # vocab.private
- (r'(<PRIVATE|PRIVATE>)(\s)', bygroups(Keyword.Namespace, Whitespace)),
-
- # strings
- (r'"""\s(?:.|\n)*?\s"""', String),
- (r'"(?:\\\\|\\"|[^"])*"', String),
- (r'(\S+")(\s+)((?:\\\\|\\"|[^"])*")',
- bygroups(String, Whitespace, String)),
- (r'(CHAR:)(\s+)(\\[\\abfnrstv]|[^\\]\S*)(\s)',
- bygroups(String.Char, Whitespace, String.Char, Whitespace)),
-
- # comments
- (r'!\s+.*$', Comment),
- (r'#!\s+.*$', Comment),
- (r'/\*\s+(?:.|\n)*?\s\*/', Comment),
-
- # boolean constants
- (r'[tf]\b', Name.Constant),
-
- # symbols and literals
- (r'[\\$]\s+\S+', Name.Constant),
- (r'M\\\s+\S+\s+\S+', Name.Constant),
-
- # numbers
- (r'[+-]?(?:[\d,]*\d)?\.(?:\d([\d,]*\d)?)?(?:[eE][+-]?\d+)?\s', Number),
- (r'[+-]?\d(?:[\d,]*\d)?(?:[eE][+-]?\d+)?\s', Number),
- (r'0x[a-fA-F\d](?:[a-fA-F\d,]*[a-fA-F\d])?(?:p\d([\d,]*\d)?)?\s', Number),
- (r'NAN:\s+[a-fA-F\d](?:[a-fA-F\d,]*[a-fA-F\d])?(?:p\d([\d,]*\d)?)?\s', Number),
- (r'0b[01]+\s', Number.Bin),
- (r'0o[0-7]+\s', Number.Oct),
- (r'(?:\d([\d,]*\d)?)?\+\d(?:[\d,]*\d)?/\d(?:[\d,]*\d)?\s', Number),
- (r'(?:\-\d([\d,]*\d)?)?\-\d(?:[\d,]*\d)?/\d(?:[\d,]*\d)?\s', Number),
-
- # keywords
- (r'(?:deprecated|final|foldable|flushable|inline|recursive)\s',
- Keyword),
-
- # builtins
- (builtin_kernel, bygroups(Name.Builtin, Whitespace)),
- (builtin_assocs, bygroups(Name.Builtin, Whitespace)),
- (builtin_combinators, bygroups(Name.Builtin, Whitespace)),
- (builtin_math, bygroups(Name.Builtin, Whitespace)),
- (builtin_sequences, bygroups(Name.Builtin, Whitespace)),
- (builtin_namespaces, bygroups(Name.Builtin, Whitespace)),
- (builtin_arrays, bygroups(Name.Builtin, Whitespace)),
- (builtin_io, bygroups(Name.Builtin, Whitespace)),
- (builtin_strings, bygroups(Name.Builtin, Whitespace)),
- (builtin_vectors, bygroups(Name.Builtin, Whitespace)),
- (builtin_continuations, bygroups(Name.Builtin, Whitespace)),
-
- # everything else is text
- (r'\S+', Text),
- ],
- 'stackeffect': [
- (r'\s+', Whitespace),
- (r'(\()(\s+)', bygroups(Name.Function, Whitespace), 'stackeffect'),
- (r'(\))(\s+)', bygroups(Name.Function, Whitespace), '#pop'),
- (r'(--)(\s+)', bygroups(Name.Function, Whitespace)),
- (r'\S+', Name.Variable),
- ],
- 'slots': [
- (r'\s+', Whitespace),
- (r'(;)(\s+)', bygroups(Keyword, Whitespace), '#pop'),
- (r'(\{)(\s+)(\S+)(\s+)([^}]+)(\s+)(\})(\s+)',
- bygroups(Text, Whitespace, Name.Variable, Whitespace,
- Text, Whitespace, Text, Whitespace)),
- (r'\S+', Name.Variable),
- ],
- 'vocabs': [
- (r'\s+', Whitespace),
- (r'(;)(\s+)', bygroups(Keyword, Whitespace), '#pop'),
- (r'\S+', Name.Namespace),
- ],
- 'classes': [
- (r'\s+', Whitespace),
- (r'(;)(\s+)', bygroups(Keyword, Whitespace), '#pop'),
- (r'\S+', Name.Class),
- ],
- 'words': [
- (r'\s+', Whitespace),
- (r'(;)(\s+)', bygroups(Keyword, Whitespace), '#pop'),
- (r'\S+', Name.Function),
- ],
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/fantom.py b/venv/lib/python3.11/site-packages/pygments/lexers/fantom.py
deleted file mode 100644
index 7182d81..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/fantom.py
+++ /dev/null
@@ -1,251 +0,0 @@
-"""
- pygments.lexers.fantom
- ~~~~~~~~~~~~~~~~~~~~~~
-
- Lexer for the Fantom language.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from string import Template
-
-from pygments.lexer import RegexLexer, include, bygroups, using, \
- this, default, words
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation, Literal, Whitespace
-
-__all__ = ['FantomLexer']
-
-
-class FantomLexer(RegexLexer):
- """
- For Fantom source code.
-
- .. versionadded:: 1.5
- """
- name = 'Fantom'
- aliases = ['fan']
- filenames = ['*.fan']
- mimetypes = ['application/x-fantom']
-
- # often used regexes
- def s(str):
- return Template(str).substitute(
- dict(
- pod=r'[\"\w\.]+',
- eos=r'\n|;',
- id=r'[a-zA-Z_]\w*',
- # all chars which can be part of type definition. Starts with
- # either letter, or [ (maps), or | (funcs)
- type=r'(?:\[|[a-zA-Z_]|\|)[:\w\[\]|\->?]*?',
- )
- )
-
- tokens = {
- 'comments': [
- (r'(?s)/\*.*?\*/', Comment.Multiline), # Multiline
- (r'//.*?$', Comment.Single), # Single line
- # TODO: highlight references in fandocs
- (r'\*\*.*?$', Comment.Special), # Fandoc
- (r'#.*$', Comment.Single) # Shell-style
- ],
- 'literals': [
- (r'\b-?[\d_]+(ns|ms|sec|min|hr|day)', Number), # Duration
- (r'\b-?[\d_]*\.[\d_]+(ns|ms|sec|min|hr|day)', Number), # Duration with dot
- (r'\b-?(\d+)?\.\d+(f|F|d|D)?', Number.Float), # Float/Decimal
- (r'\b-?0x[0-9a-fA-F_]+', Number.Hex), # Hex
- (r'\b-?[\d_]+', Number.Integer), # Int
- (r"'\\.'|'[^\\]'|'\\u[0-9a-f]{4}'", String.Char), # Char
- (r'"', Punctuation, 'insideStr'), # Opening quote
- (r'`', Punctuation, 'insideUri'), # Opening accent
- (r'\b(true|false|null)\b', Keyword.Constant), # Bool & null
- (r'(?:(\w+)(::))?(\w+)(<\|)(.*?)(\|>)', # DSL
- bygroups(Name.Namespace, Punctuation, Name.Class,
- Punctuation, String, Punctuation)),
- (r'(?:(\w+)(::))?(\w+)?(#)(\w+)?', # Type/slot literal
- bygroups(Name.Namespace, Punctuation, Name.Class,
- Punctuation, Name.Function)),
- (r'\[,\]', Literal), # Empty list
- (s(r'($type)(\[,\])'), # Typed empty list
- bygroups(using(this, state='inType'), Literal)),
- (r'\[:\]', Literal), # Empty Map
- (s(r'($type)(\[:\])'),
- bygroups(using(this, state='inType'), Literal)),
- ],
- 'insideStr': [
- (r'\\\\', String.Escape), # Escaped backslash
- (r'\\"', String.Escape), # Escaped "
- (r'\\`', String.Escape), # Escaped `
- (r'\$\w+', String.Interpol), # Subst var
- (r'\$\{.*?\}', String.Interpol), # Subst expr
- (r'"', Punctuation, '#pop'), # Closing quot
- (r'.', String) # String content
- ],
- 'insideUri': [ # TODO: remove copy/paste str/uri
- (r'\\\\', String.Escape), # Escaped backslash
- (r'\\"', String.Escape), # Escaped "
- (r'\\`', String.Escape), # Escaped `
- (r'\$\w+', String.Interpol), # Subst var
- (r'\$\{.*?\}', String.Interpol), # Subst expr
- (r'`', Punctuation, '#pop'), # Closing tick
- (r'.', String.Backtick) # URI content
- ],
- 'protectionKeywords': [
- (r'\b(public|protected|private|internal)\b', Keyword),
- ],
- 'typeKeywords': [
- (r'\b(abstract|final|const|native|facet|enum)\b', Keyword),
- ],
- 'methodKeywords': [
- (r'\b(abstract|native|once|override|static|virtual|final)\b',
- Keyword),
- ],
- 'fieldKeywords': [
- (r'\b(abstract|const|final|native|override|static|virtual|'
- r'readonly)\b', Keyword)
- ],
- 'otherKeywords': [
- (words((
- 'try', 'catch', 'throw', 'finally', 'for', 'if', 'else', 'while',
- 'as', 'is', 'isnot', 'switch', 'case', 'default', 'continue',
- 'break', 'do', 'return', 'get', 'set'), prefix=r'\b', suffix=r'\b'),
- Keyword),
- (r'\b(it|this|super)\b', Name.Builtin.Pseudo),
- ],
- 'operators': [
- (r'\+\+|\-\-|\+|\-|\*|/|\|\||&&|<=>|<=|<|>=|>|=|!|\[|\]', Operator)
- ],
- 'inType': [
- (r'[\[\]|\->:?]', Punctuation),
- (s(r'$id'), Name.Class),
- default('#pop'),
-
- ],
- 'root': [
- include('comments'),
- include('protectionKeywords'),
- include('typeKeywords'),
- include('methodKeywords'),
- include('fieldKeywords'),
- include('literals'),
- include('otherKeywords'),
- include('operators'),
- (r'using\b', Keyword.Namespace, 'using'), # Using stmt
- (r'@\w+', Name.Decorator, 'facet'), # Symbol
- (r'(class|mixin)(\s+)(\w+)', bygroups(Keyword, Whitespace, Name.Class),
- 'inheritance'), # Inheritance list
-
- # Type var := val
- (s(r'($type)([ \t]+)($id)(\s*)(:=)'),
- bygroups(using(this, state='inType'), Whitespace,
- Name.Variable, Whitespace, Operator)),
-
- # var := val
- (s(r'($id)(\s*)(:=)'),
- bygroups(Name.Variable, Whitespace, Operator)),
-
- # .someId( or ->someId( ###
- (s(r'(\.|(?:\->))($id)(\s*)(\()'),
- bygroups(Operator, Name.Function, Whitespace, Punctuation),
- 'insideParen'),
-
- # .someId or ->someId
- (s(r'(\.|(?:\->))($id)'),
- bygroups(Operator, Name.Function)),
-
- # new makeXXX (
- (r'(new)(\s+)(make\w*)(\s*)(\()',
- bygroups(Keyword, Whitespace, Name.Function, Whitespace, Punctuation),
- 'insideMethodDeclArgs'),
-
- # Type name (
- (s(r'($type)([ \t]+)' # Return type and whitespace
- r'($id)(\s*)(\()'), # method name + open brace
- bygroups(using(this, state='inType'), Whitespace,
- Name.Function, Whitespace, Punctuation),
- 'insideMethodDeclArgs'),
-
- # ArgType argName,
- (s(r'($type)(\s+)($id)(\s*)(,)'),
- bygroups(using(this, state='inType'), Whitespace, Name.Variable,
- Whitespace, Punctuation)),
-
- # ArgType argName)
- # Covered in 'insideParen' state
-
- # ArgType argName -> ArgType|
- (s(r'($type)(\s+)($id)(\s*)(\->)(\s*)($type)(\|)'),
- bygroups(using(this, state='inType'), Whitespace, Name.Variable,
- Whitespace, Punctuation, Whitespace, using(this, state='inType'),
- Punctuation)),
-
- # ArgType argName|
- (s(r'($type)(\s+)($id)(\s*)(\|)'),
- bygroups(using(this, state='inType'), Whitespace, Name.Variable,
- Whitespace, Punctuation)),
-
- # Type var
- (s(r'($type)([ \t]+)($id)'),
- bygroups(using(this, state='inType'), Whitespace,
- Name.Variable)),
-
- (r'\(', Punctuation, 'insideParen'),
- (r'\{', Punctuation, 'insideBrace'),
- (r'\s+', Whitespace),
- (r'.', Text)
- ],
- 'insideParen': [
- (r'\)', Punctuation, '#pop'),
- include('root'),
- ],
- 'insideMethodDeclArgs': [
- (r'\)', Punctuation, '#pop'),
- (s(r'($type)(\s+)($id)(\s*)(\))'),
- bygroups(using(this, state='inType'), Whitespace, Name.Variable,
- Whitespace, Punctuation), '#pop'),
- include('root'),
- ],
- 'insideBrace': [
- (r'\}', Punctuation, '#pop'),
- include('root'),
- ],
- 'inheritance': [
- (r'\s+', Whitespace), # Whitespace
- (r':|,', Punctuation),
- (r'(?:(\w+)(::))?(\w+)',
- bygroups(Name.Namespace, Punctuation, Name.Class)),
- (r'\{', Punctuation, '#pop')
- ],
- 'using': [
- (r'[ \t]+', Whitespace), # consume whitespaces
- (r'(\[)(\w+)(\])',
- bygroups(Punctuation, Comment.Special, Punctuation)), # ffi
- (r'(\")?([\w.]+)(\")?',
- bygroups(Punctuation, Name.Namespace, Punctuation)), # podname
- (r'::', Punctuation, 'usingClass'),
- default('#pop')
- ],
- 'usingClass': [
- (r'[ \t]+', Whitespace), # consume whitespaces
- (r'(as)(\s+)(\w+)',
- bygroups(Keyword.Declaration, Whitespace, Name.Class), '#pop:2'),
- (r'[\w$]+', Name.Class),
- default('#pop:2') # jump out to root state
- ],
- 'facet': [
- (r'\s+', Whitespace),
- (r'\{', Punctuation, 'facetFields'),
- default('#pop')
- ],
- 'facetFields': [
- include('comments'),
- include('literals'),
- include('operators'),
- (r'\s+', Whitespace),
- (r'(\s*)(\w+)(\s*)(=)', bygroups(Whitespace, Name, Whitespace, Operator)),
- (r'\}', Punctuation, '#pop'),
- (r'\s+', Whitespace),
- (r'.', Text)
- ],
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/felix.py b/venv/lib/python3.11/site-packages/pygments/lexers/felix.py
deleted file mode 100644
index 5e34f05..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/felix.py
+++ /dev/null
@@ -1,276 +0,0 @@
-"""
- pygments.lexers.felix
- ~~~~~~~~~~~~~~~~~~~~~
-
- Lexer for the Felix language.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, include, bygroups, default, words, \
- combined
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation, Whitespace
-
-__all__ = ['FelixLexer']
-
-
-class FelixLexer(RegexLexer):
- """
- For Felix source code.
-
- .. versionadded:: 1.2
- """
-
- name = 'Felix'
- url = 'http://www.felix-lang.org'
- aliases = ['felix', 'flx']
- filenames = ['*.flx', '*.flxh']
- mimetypes = ['text/x-felix']
-
- preproc = (
- 'elif', 'else', 'endif', 'if', 'ifdef', 'ifndef',
- )
-
- keywords = (
- '_', '_deref', 'all', 'as',
- 'assert', 'attempt', 'call', 'callback', 'case', 'caseno', 'cclass',
- 'code', 'compound', 'ctypes', 'do', 'done', 'downto', 'elif', 'else',
- 'endattempt', 'endcase', 'endif', 'endmatch', 'enum', 'except',
- 'exceptions', 'expect', 'finally', 'for', 'forall', 'forget', 'fork',
- 'functor', 'goto', 'ident', 'if', 'incomplete', 'inherit', 'instance',
- 'interface', 'jump', 'lambda', 'loop', 'match', 'module', 'namespace',
- 'new', 'noexpand', 'nonterm', 'obj', 'of', 'open', 'parse', 'raise',
- 'regexp', 'reglex', 'regmatch', 'rename', 'return', 'the', 'then',
- 'to', 'type', 'typecase', 'typedef', 'typematch', 'typeof', 'upto',
- 'when', 'whilst', 'with', 'yield',
- )
-
- keyword_directives = (
- '_gc_pointer', '_gc_type', 'body', 'comment', 'const', 'export',
- 'header', 'inline', 'lval', 'macro', 'noinline', 'noreturn',
- 'package', 'private', 'pod', 'property', 'public', 'publish',
- 'requires', 'todo', 'virtual', 'use',
- )
-
- keyword_declarations = (
- 'def', 'let', 'ref', 'val', 'var',
- )
-
- keyword_types = (
- 'unit', 'void', 'any', 'bool',
- 'byte', 'offset',
- 'address', 'caddress', 'cvaddress', 'vaddress',
- 'tiny', 'short', 'int', 'long', 'vlong',
- 'utiny', 'ushort', 'vshort', 'uint', 'ulong', 'uvlong',
- 'int8', 'int16', 'int32', 'int64',
- 'uint8', 'uint16', 'uint32', 'uint64',
- 'float', 'double', 'ldouble',
- 'complex', 'dcomplex', 'lcomplex',
- 'imaginary', 'dimaginary', 'limaginary',
- 'char', 'wchar', 'uchar',
- 'charp', 'charcp', 'ucharp', 'ucharcp',
- 'string', 'wstring', 'ustring',
- 'cont',
- 'array', 'varray', 'list',
- 'lvalue', 'opt', 'slice',
- )
-
- keyword_constants = (
- 'false', 'true',
- )
-
- operator_words = (
- 'and', 'not', 'in', 'is', 'isin', 'or', 'xor',
- )
-
- name_builtins = (
- '_svc', 'while',
- )
-
- name_pseudo = (
- 'root', 'self', 'this',
- )
-
- decimal_suffixes = '([tTsSiIlLvV]|ll|LL|([iIuU])(8|16|32|64))?'
-
- tokens = {
- 'root': [
- include('whitespace'),
-
- # Keywords
- (words(('axiom', 'ctor', 'fun', 'gen', 'proc', 'reduce',
- 'union'), suffix=r'\b'),
- Keyword, 'funcname'),
- (words(('class', 'cclass', 'cstruct', 'obj', 'struct'), suffix=r'\b'),
- Keyword, 'classname'),
- (r'(instance|module|typeclass)\b', Keyword, 'modulename'),
-
- (words(keywords, suffix=r'\b'), Keyword),
- (words(keyword_directives, suffix=r'\b'), Name.Decorator),
- (words(keyword_declarations, suffix=r'\b'), Keyword.Declaration),
- (words(keyword_types, suffix=r'\b'), Keyword.Type),
- (words(keyword_constants, suffix=r'\b'), Keyword.Constant),
-
- # Operators
- include('operators'),
-
- # Float Literal
- # -- Hex Float
- (r'0[xX]([0-9a-fA-F_]*\.[0-9a-fA-F_]+|[0-9a-fA-F_]+)'
- r'[pP][+\-]?[0-9_]+[lLfFdD]?', Number.Float),
- # -- DecimalFloat
- (r'[0-9_]+(\.[0-9_]+[eE][+\-]?[0-9_]+|'
- r'\.[0-9_]*|[eE][+\-]?[0-9_]+)[lLfFdD]?', Number.Float),
- (r'\.(0|[1-9][0-9_]*)([eE][+\-]?[0-9_]+)?[lLfFdD]?',
- Number.Float),
-
- # IntegerLiteral
- # -- Binary
- (r'0[Bb][01_]+%s' % decimal_suffixes, Number.Bin),
- # -- Octal
- (r'0[0-7_]+%s' % decimal_suffixes, Number.Oct),
- # -- Hexadecimal
- (r'0[xX][0-9a-fA-F_]+%s' % decimal_suffixes, Number.Hex),
- # -- Decimal
- (r'(0|[1-9][0-9_]*)%s' % decimal_suffixes, Number.Integer),
-
- # Strings
- ('([rR][cC]?|[cC][rR])"""', String, 'tdqs'),
- ("([rR][cC]?|[cC][rR])'''", String, 'tsqs'),
- ('([rR][cC]?|[cC][rR])"', String, 'dqs'),
- ("([rR][cC]?|[cC][rR])'", String, 'sqs'),
- ('[cCfFqQwWuU]?"""', String, combined('stringescape', 'tdqs')),
- ("[cCfFqQwWuU]?'''", String, combined('stringescape', 'tsqs')),
- ('[cCfFqQwWuU]?"', String, combined('stringescape', 'dqs')),
- ("[cCfFqQwWuU]?'", String, combined('stringescape', 'sqs')),
-
- # Punctuation
- (r'[\[\]{}:(),;?]', Punctuation),
-
- # Labels
- (r'[a-zA-Z_]\w*:>', Name.Label),
-
- # Identifiers
- (r'(%s)\b' % '|'.join(name_builtins), Name.Builtin),
- (r'(%s)\b' % '|'.join(name_pseudo), Name.Builtin.Pseudo),
- (r'[a-zA-Z_]\w*', Name),
- ],
- 'whitespace': [
- (r'\s+', Whitespace),
-
- include('comment'),
-
- # Preprocessor
- (r'(#)(\s*)(if)(\s+)(0)',
- bygroups(Comment.Preproc, Whitespace, Comment.Preproc,
- Whitespace, Comment.Preproc), 'if0'),
- (r'#', Comment.Preproc, 'macro'),
- ],
- 'operators': [
- (r'(%s)\b' % '|'.join(operator_words), Operator.Word),
- (r'!=|==|<<|>>|\|\||&&|[-~+/*%=<>&^|.$]', Operator),
- ],
- 'comment': [
- (r'//(.*?)$', Comment.Single),
- (r'/[*]', Comment.Multiline, 'comment2'),
- ],
- 'comment2': [
- (r'[^/*]', Comment.Multiline),
- (r'/[*]', Comment.Multiline, '#push'),
- (r'[*]/', Comment.Multiline, '#pop'),
- (r'[/*]', Comment.Multiline),
- ],
- 'if0': [
- (r'^(\s*)(#if.*?(?<!\\))(\n)',
- bygroups(Whitespace, Comment, Whitespace), '#push'),
- (r'^(\s*)(#endif.*?(?<!\\))(\n)',
- bygroups(Whitespace, Comment, Whitespace), '#pop'),
- (r'(.*?)(\n)', bygroups(Comment, Whitespace)),
- ],
- 'macro': [
- include('comment'),
- (r'(import|include)(\s+)(<[^>]*?>)',
- bygroups(Comment.Preproc, Whitespace, String), '#pop'),
- (r'(import|include)(\s+)("[^"]*?")',
- bygroups(Comment.Preproc, Whitespace, String), '#pop'),
- (r"(import|include)(\s+)('[^']*?')",
- bygroups(Comment.Preproc, Whitespace, String), '#pop'),
- (r'[^/\n]+', Comment.Preproc),
- # (r'/[*](.|\n)*?[*]/', Comment),
- # (r'//.*?\n', Comment, '#pop'),
- (r'/', Comment.Preproc),
- (r'(?<=\\)\n', Comment.Preproc),
- (r'\n', Whitespace, '#pop'),
- ],
- 'funcname': [
- include('whitespace'),
- (r'[a-zA-Z_]\w*', Name.Function, '#pop'),
- # anonymous functions
- (r'(?=\()', Text, '#pop'),
- ],
- 'classname': [
- include('whitespace'),
- (r'[a-zA-Z_]\w*', Name.Class, '#pop'),
- # anonymous classes
- (r'(?=\{)', Text, '#pop'),
- ],
- 'modulename': [
- include('whitespace'),
- (r'\[', Punctuation, ('modulename2', 'tvarlist')),
- default('modulename2'),
- ],
- 'modulename2': [
- include('whitespace'),
- (r'([a-zA-Z_]\w*)', Name.Namespace, '#pop:2'),
- ],
- 'tvarlist': [
- include('whitespace'),
- include('operators'),
- (r'\[', Punctuation, '#push'),
- (r'\]', Punctuation, '#pop'),
- (r',', Punctuation),
- (r'(with|where)\b', Keyword),
- (r'[a-zA-Z_]\w*', Name),
- ],
- 'stringescape': [
- (r'\\([\\abfnrtv"\']|\n|N\{.*?\}|u[a-fA-F0-9]{4}|'
- r'U[a-fA-F0-9]{8}|x[a-fA-F0-9]{2}|[0-7]{1,3})', String.Escape)
- ],
- 'strings': [
- (r'%(\([a-zA-Z0-9]+\))?[-#0 +]*([0-9]+|[*])?(\.([0-9]+|[*]))?'
- '[hlL]?[E-GXc-giorsux%]', String.Interpol),
- (r'[^\\\'"%\n]+', String),
- # quotes, percents and backslashes must be parsed one at a time
- (r'[\'"\\]', String),
- # unhandled string formatting sign
- (r'%', String)
- # newlines are an error (use "nl" state)
- ],
- 'nl': [
- (r'\n', String)
- ],
- 'dqs': [
- (r'"', String, '#pop'),
- # included here again for raw strings
- (r'\\\\|\\"|\\\n', String.Escape),
- include('strings')
- ],
- 'sqs': [
- (r"'", String, '#pop'),
- # included here again for raw strings
- (r"\\\\|\\'|\\\n", String.Escape),
- include('strings')
- ],
- 'tdqs': [
- (r'"""', String, '#pop'),
- include('strings'),
- include('nl')
- ],
- 'tsqs': [
- (r"'''", String, '#pop'),
- include('strings'),
- include('nl')
- ],
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/fift.py b/venv/lib/python3.11/site-packages/pygments/lexers/fift.py
deleted file mode 100644
index 027175b..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/fift.py
+++ /dev/null
@@ -1,67 +0,0 @@
-"""
- pygments.lexers.fift
- ~~~~~~~~~~~~~~~~~~~~
-
- Lexers for fift.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, include
-from pygments.token import Literal, Comment, Name, String, Number, Whitespace
-
-__all__ = ['FiftLexer']
-
-
-class FiftLexer(RegexLexer):
- """
- For Fift source code.
- """
-
- name = 'Fift'
- aliases = ['fift', 'fif']
- filenames = ['*.fif']
- url = 'https://ton-blockchain.github.io/docs/fiftbase.pdf'
-
- tokens = {
- 'root': [
- (r'\s+', Whitespace),
-
- include('comments'),
-
- (r'[\.+]?\"', String, 'string'),
-
- # numbers
- (r'0x[0-9a-fA-F]+', Number.Hex),
- (r'0b[01]+', Number.Bin),
- (r'-?[0-9]+("/"-?[0-9]+)?', Number.Decimal),
-
- # slices
- (r'b\{[01]+\}', Literal),
- (r'x\{[0-9a-fA-F_]+\}', Literal),
-
- # byte literal
- (r'B\{[0-9a-fA-F_]+\}', Literal),
-
- # treat anything as word
- (r'\S+', Name)
- ],
-
- 'string': [
- (r'\\.', String.Escape),
- (r'\"', String, '#pop'),
- (r'[^\"\r\n\\]+', String)
- ],
-
- 'comments': [
- (r'//.*', Comment.Singleline),
- (r'/\*', Comment.Multiline, 'comment'),
- ],
- 'comment': [
- (r'[^/*]+', Comment.Multiline),
- (r'/\*', Comment.Multiline, '#push'),
- (r'\*/', Comment.Multiline, '#pop'),
- (r'[*/]', Comment.Multiline),
- ],
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/floscript.py b/venv/lib/python3.11/site-packages/pygments/lexers/floscript.py
deleted file mode 100644
index 6cc2971..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/floscript.py
+++ /dev/null
@@ -1,82 +0,0 @@
-"""
- pygments.lexers.floscript
- ~~~~~~~~~~~~~~~~~~~~~~~~~
-
- Lexer for FloScript
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, include, bygroups
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation, Whitespace
-
-__all__ = ['FloScriptLexer']
-
-
-class FloScriptLexer(RegexLexer):
- """
- For FloScript configuration language source code.
-
- .. versionadded:: 2.4
- """
-
- name = 'FloScript'
- url = 'https://github.com/ioflo/ioflo'
- aliases = ['floscript', 'flo']
- filenames = ['*.flo']
-
- def innerstring_rules(ttype):
- return [
- # the old style '%s' % (...) string formatting
- (r'%(\(\w+\))?[-#0 +]*([0-9]+|[*])?(\.([0-9]+|[*]))?'
- '[hlL]?[E-GXc-giorsux%]', String.Interpol),
- # backslashes, quotes and formatting signs must be parsed one at a time
- (r'[^\\\'"%\n]+', ttype),
- (r'[\'"\\]', ttype),
- # unhandled string formatting sign
- (r'%', ttype),
- # newlines are an error (use "nl" state)
- ]
-
- tokens = {
- 'root': [
- (r'\s+', Whitespace),
-
- (r'[]{}:(),;[]', Punctuation),
- (r'(\\)(\n)', bygroups(Text, Whitespace)),
- (r'\\', Text),
- (r'(to|by|with|from|per|for|cum|qua|via|as|at|in|of|on|re|is|if|be|into|'
- r'and|not)\b', Operator.Word),
- (r'!=|==|<<|>>|[-~+/*%=<>&^|.]', Operator),
- (r'(load|init|server|logger|log|loggee|first|over|under|next|done|timeout|'
- r'repeat|native|benter|enter|recur|exit|precur|renter|rexit|print|put|inc|'
- r'copy|set|aux|rear|raze|go|let|do|bid|ready|start|stop|run|abort|use|flo|'
- r'give|take)\b', Name.Builtin),
- (r'(frame|framer|house)\b', Keyword),
- ('"', String, 'string'),
-
- include('name'),
- include('numbers'),
- (r'#.+$', Comment.Single),
- ],
- 'string': [
- ('[^"]+', String),
- ('"', String, '#pop'),
- ],
- 'numbers': [
- (r'(\d+\.\d*|\d*\.\d+)([eE][+-]?[0-9]+)?j?', Number.Float),
- (r'\d+[eE][+-]?[0-9]+j?', Number.Float),
- (r'0[0-7]+j?', Number.Oct),
- (r'0[bB][01]+', Number.Bin),
- (r'0[xX][a-fA-F0-9]+', Number.Hex),
- (r'\d+L', Number.Integer.Long),
- (r'\d+j?', Number.Integer)
- ],
-
- 'name': [
- (r'@[\w.]+', Name.Decorator),
- (r'[a-zA-Z_]\w*', Name),
- ],
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/forth.py b/venv/lib/python3.11/site-packages/pygments/lexers/forth.py
deleted file mode 100644
index 7feeaef..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/forth.py
+++ /dev/null
@@ -1,179 +0,0 @@
-"""
- pygments.lexers.forth
- ~~~~~~~~~~~~~~~~~~~~~
-
- Lexer for the Forth language.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, bygroups
-from pygments.token import Text, Comment, Keyword, Name, String, Number, \
- Whitespace
-
-
-__all__ = ['ForthLexer']
-
-
-class ForthLexer(RegexLexer):
- """
- Lexer for Forth files.
-
- .. versionadded:: 2.2
- """
- name = 'Forth'
- url = 'https://www.forth.com/forth/'
- aliases = ['forth']
- filenames = ['*.frt', '*.fs']
- mimetypes = ['application/x-forth']
-
- flags = re.IGNORECASE | re.MULTILINE
-
- tokens = {
- 'root': [
- (r'\s+', Whitespace),
- # All comment types
- (r'\\.*?$', Comment.Single),
- (r'\([\s].*?\)', Comment.Single),
- # defining words. The next word is a new command name
- (r'(:|variable|constant|value|buffer:)(\s+)',
- bygroups(Keyword.Namespace, Whitespace), 'worddef'),
- # strings are rather simple
- (r'([.sc]")(\s+?)', bygroups(String, Whitespace), 'stringdef'),
- # keywords from the various wordsets
- # *** Wordset BLOCK
- (r'(blk|block|buffer|evaluate|flush|load|save-buffers|update|'
- # *** Wordset BLOCK-EXT
- r'empty-buffers|list|refill|scr|thru|'
- # *** Wordset CORE
- r'\#s|\*\/mod|\+loop|\/mod|0<|0=|1\+|1-|2!|'
- r'2\*|2\/|2@|2drop|2dup|2over|2swap|>body|'
- r'>in|>number|>r|\?dup|abort|abort\"|abs|'
- r'accept|align|aligned|allot|and|base|begin|'
- r'bl|c!|c,|c@|cell\+|cells|char|char\+|'
- r'chars|constant|count|cr|create|decimal|'
- r'depth|do|does>|drop|dup|else|emit|environment\?|'
- r'evaluate|execute|exit|fill|find|fm\/mod|'
- r'here|hold|i|if|immediate|invert|j|key|'
- r'leave|literal|loop|lshift|m\*|max|min|'
- r'mod|move|negate|or|over|postpone|quit|'
- r'r>|r@|recurse|repeat|rot|rshift|s\"|s>d|'
- r'sign|sm\/rem|source|space|spaces|state|swap|'
- r'then|type|u\.|u\<|um\*|um\/mod|unloop|until|'
- r'variable|while|word|xor|\[char\]|\[\'\]|'
- r'@|!|\#|<\#|\#>|:|;|\+|-|\*|\/|,|<|>|\|1\+|1-|\.|'
- # *** Wordset CORE-EXT
- r'\.r|0<>|'
- r'0>|2>r|2r>|2r@|:noname|\?do|again|c\"|'
- r'case|compile,|endcase|endof|erase|false|'
- r'hex|marker|nip|of|pad|parse|pick|refill|'
- r'restore-input|roll|save-input|source-id|to|'
- r'true|tuck|u\.r|u>|unused|value|within|'
- r'\[compile\]|'
- # *** Wordset CORE-EXT-obsolescent
- r'\#tib|convert|expect|query|span|'
- r'tib|'
- # *** Wordset DOUBLE
- r'2constant|2literal|2variable|d\+|d-|'
- r'd\.|d\.r|d0<|d0=|d2\*|d2\/|d<|d=|d>s|'
- r'dabs|dmax|dmin|dnegate|m\*\/|m\+|'
- # *** Wordset DOUBLE-EXT
- r'2rot|du<|'
- # *** Wordset EXCEPTION
- r'catch|throw|'
- # *** Wordset EXCEPTION-EXT
- r'abort|abort\"|'
- # *** Wordset FACILITY
- r'at-xy|key\?|page|'
- # *** Wordset FACILITY-EXT
- r'ekey|ekey>char|ekey\?|emit\?|ms|time&date|'
- # *** Wordset FILE
- r'BIN|CLOSE-FILE|CREATE-FILE|DELETE-FILE|FILE-POSITION|'
- r'FILE-SIZE|INCLUDE-FILE|INCLUDED|OPEN-FILE|R\/O|'
- r'R\/W|READ-FILE|READ-LINE|REPOSITION-FILE|RESIZE-FILE|'
- r'S\"|SOURCE-ID|W/O|WRITE-FILE|WRITE-LINE|'
- # *** Wordset FILE-EXT
- r'FILE-STATUS|FLUSH-FILE|REFILL|RENAME-FILE|'
- # *** Wordset FLOAT
- r'>float|d>f|'
- r'f!|f\*|f\+|f-|f\/|f0<|f0=|f<|f>d|f@|'
- r'falign|faligned|fconstant|fdepth|fdrop|fdup|'
- r'fliteral|float\+|floats|floor|fmax|fmin|'
- r'fnegate|fover|frot|fround|fswap|fvariable|'
- r'represent|'
- # *** Wordset FLOAT-EXT
- r'df!|df@|dfalign|dfaligned|dfloat\+|'
- r'dfloats|f\*\*|f\.|fabs|facos|facosh|falog|'
- r'fasin|fasinh|fatan|fatan2|fatanh|fcos|fcosh|'
- r'fe\.|fexp|fexpm1|fln|flnp1|flog|fs\.|fsin|'
- r'fsincos|fsinh|fsqrt|ftan|ftanh|f~|precision|'
- r'set-precision|sf!|sf@|sfalign|sfaligned|sfloat\+|'
- r'sfloats|'
- # *** Wordset LOCAL
- r'\(local\)|to|'
- # *** Wordset LOCAL-EXT
- r'locals\||'
- # *** Wordset MEMORY
- r'allocate|free|resize|'
- # *** Wordset SEARCH
- r'definitions|find|forth-wordlist|get-current|'
- r'get-order|search-wordlist|set-current|set-order|'
- r'wordlist|'
- # *** Wordset SEARCH-EXT
- r'also|forth|only|order|previous|'
- # *** Wordset STRING
- r'-trailing|\/string|blank|cmove|cmove>|compare|'
- r'search|sliteral|'
- # *** Wordset TOOLS
- r'.s|dump|see|words|'
- # *** Wordset TOOLS-EXT
- r';code|'
- r'ahead|assembler|bye|code|cs-pick|cs-roll|'
- r'editor|state|\[else\]|\[if\]|\[then\]|'
- # *** Wordset TOOLS-EXT-obsolescent
- r'forget|'
- # Forth 2012
- r'defer|defer@|defer!|action-of|begin-structure|field:|buffer:|'
- r'parse-name|buffer:|traverse-wordlist|n>r|nr>|2value|fvalue|'
- r'name>interpret|name>compile|name>string|'
- r'cfield:|end-structure)(?!\S)', Keyword),
-
- # Numbers
- (r'(\$[0-9A-F]+)', Number.Hex),
- (r'(\#|%|&|\-|\+)?[0-9]+', Number.Integer),
- (r'(\#|%|&|\-|\+)?[0-9.]+', Keyword.Type),
- # amforth specific
- (r'(@i|!i|@e|!e|pause|noop|turnkey|sleep|'
- r'itype|icompare|sp@|sp!|rp@|rp!|up@|up!|'
- r'>a|a>|a@|a!|a@+|a@-|>b|b>|b@|b!|b@+|b@-|'
- r'find-name|1ms|'
- r'sp0|rp0|\(evaluate\)|int-trap|int!)(?!\S)',
- Name.Constant),
- # a proposal
- (r'(do-recognizer|r:fail|recognizer:|get-recognizers|'
- r'set-recognizers|r:float|r>comp|r>int|r>post|'
- r'r:name|r:word|r:dnum|r:num|recognizer|forth-recognizer|'
- r'rec:num|rec:float|rec:word)(?!\S)', Name.Decorator),
- # defining words. The next word is a new command name
- (r'(Evalue|Rvalue|Uvalue|Edefer|Rdefer|Udefer)(\s+)',
- bygroups(Keyword.Namespace, Text), 'worddef'),
-
- (r'\S+', Name.Function), # Anything else is executed
-
- ],
- 'worddef': [
- (r'\S+', Name.Class, '#pop'),
- ],
- 'stringdef': [
- (r'[^"]+', String, '#pop'),
- ],
- }
-
- def analyse_text(text):
- """Forth uses : COMMAND ; quite a lot in a single line, so we're trying
- to find that."""
- if re.search('\n:[^\n]+;\n', text):
- return 0.3
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/fortran.py b/venv/lib/python3.11/site-packages/pygments/lexers/fortran.py
deleted file mode 100644
index cee254f..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/fortran.py
+++ /dev/null
@@ -1,213 +0,0 @@
-"""
- pygments.lexers.fortran
- ~~~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for Fortran languages.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, bygroups, include, words, using, default
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation, Generic
-
-__all__ = ['FortranLexer', 'FortranFixedLexer']
-
-
-class FortranLexer(RegexLexer):
- """
- Lexer for FORTRAN 90 code.
-
- .. versionadded:: 0.10
- """
- name = 'Fortran'
- url = 'https://fortran-lang.org/'
- aliases = ['fortran', 'f90']
- filenames = ['*.f03', '*.f90', '*.F03', '*.F90']
- mimetypes = ['text/x-fortran']
- flags = re.IGNORECASE | re.MULTILINE
-
- # Data Types: INTEGER, REAL, COMPLEX, LOGICAL, CHARACTER and DOUBLE PRECISION
- # Operators: **, *, +, -, /, <, >, <=, >=, ==, /=
- # Logical (?): NOT, AND, OR, EQV, NEQV
-
- # Builtins:
- # http://gcc.gnu.org/onlinedocs/gcc-3.4.6/g77/Table-of-Intrinsic-Functions.html
-
- tokens = {
- 'root': [
- (r'^#.*\n', Comment.Preproc),
- (r'!.*\n', Comment),
- include('strings'),
- include('core'),
- (r'[a-z][\w$]*', Name),
- include('nums'),
- (r'[\s]+', Text.Whitespace),
- ],
- 'core': [
- # Statements
-
- (r'\b(DO)(\s+)(CONCURRENT)\b', bygroups(Keyword, Text.Whitespace, Keyword)),
- (r'\b(GO)(\s*)(TO)\b', bygroups(Keyword, Text.Whitespace, Keyword)),
-
- (words((
- 'ABSTRACT', 'ACCEPT', 'ALL', 'ALLSTOP', 'ALLOCATABLE', 'ALLOCATE',
- 'ARRAY', 'ASSIGN', 'ASSOCIATE', 'ASYNCHRONOUS', 'BACKSPACE', 'BIND',
- 'BLOCK', 'BLOCKDATA', 'BYTE', 'CALL', 'CASE', 'CLASS', 'CLOSE',
- 'CODIMENSION', 'COMMON', 'CONTIGUOUS', 'CONTAINS',
- 'CONTINUE', 'CRITICAL', 'CYCLE', 'DATA', 'DEALLOCATE', 'DECODE',
- 'DEFERRED', 'DIMENSION', 'DO', 'ELEMENTAL', 'ELSE', 'ELSEIF', 'ENCODE',
- 'END', 'ENDASSOCIATE', 'ENDBLOCK', 'ENDDO', 'ENDENUM', 'ENDFORALL',
- 'ENDFUNCTION', 'ENDIF', 'ENDINTERFACE', 'ENDMODULE', 'ENDPROGRAM',
- 'ENDSELECT', 'ENDSUBMODULE', 'ENDSUBROUTINE', 'ENDTYPE', 'ENDWHERE',
- 'ENTRY', 'ENUM', 'ENUMERATOR', 'EQUIVALENCE', 'ERROR STOP', 'EXIT',
- 'EXTENDS', 'EXTERNAL', 'EXTRINSIC', 'FILE', 'FINAL', 'FORALL', 'FORMAT',
- 'FUNCTION', 'GENERIC', 'IF', 'IMAGES', 'IMPLICIT',
- 'IMPORT', 'IMPURE', 'INCLUDE', 'INQUIRE', 'INTENT', 'INTERFACE',
- 'INTRINSIC', 'IS', 'LOCK', 'MEMORY', 'MODULE', 'NAMELIST', 'NULLIFY',
- 'NONE', 'NON_INTRINSIC', 'NON_OVERRIDABLE', 'NOPASS', 'ONLY', 'OPEN',
- 'OPTIONAL', 'OPTIONS', 'PARAMETER', 'PASS', 'PAUSE', 'POINTER', 'PRINT',
- 'PRIVATE', 'PROGRAM', 'PROCEDURE', 'PROTECTED', 'PUBLIC', 'PURE', 'READ',
- 'RECURSIVE', 'RESULT', 'RETURN', 'REWIND', 'SAVE', 'SELECT', 'SEQUENCE',
- 'STOP', 'SUBMODULE', 'SUBROUTINE', 'SYNC', 'SYNCALL', 'SYNCIMAGES',
- 'SYNCMEMORY', 'TARGET', 'THEN', 'TYPE', 'UNLOCK', 'USE', 'VALUE',
- 'VOLATILE', 'WHERE', 'WRITE', 'WHILE'), prefix=r'\b', suffix=r'\s*\b'),
- Keyword),
-
- # Data Types
- (words((
- 'CHARACTER', 'COMPLEX', 'DOUBLE PRECISION', 'DOUBLE COMPLEX', 'INTEGER',
- 'LOGICAL', 'REAL', 'C_INT', 'C_SHORT', 'C_LONG', 'C_LONG_LONG',
- 'C_SIGNED_CHAR', 'C_SIZE_T', 'C_INT8_T', 'C_INT16_T', 'C_INT32_T',
- 'C_INT64_T', 'C_INT_LEAST8_T', 'C_INT_LEAST16_T', 'C_INT_LEAST32_T',
- 'C_INT_LEAST64_T', 'C_INT_FAST8_T', 'C_INT_FAST16_T', 'C_INT_FAST32_T',
- 'C_INT_FAST64_T', 'C_INTMAX_T', 'C_INTPTR_T', 'C_FLOAT', 'C_DOUBLE',
- 'C_LONG_DOUBLE', 'C_FLOAT_COMPLEX', 'C_DOUBLE_COMPLEX',
- 'C_LONG_DOUBLE_COMPLEX', 'C_BOOL', 'C_CHAR', 'C_PTR', 'C_FUNPTR'),
- prefix=r'\b', suffix=r'\s*\b'),
- Keyword.Type),
-
- # Operators
- (r'(\*\*|\*|\+|-|\/|<|>|<=|>=|==|\/=|=)', Operator),
-
- (r'(::)', Keyword.Declaration),
-
- (r'[()\[\],:&%;.]', Punctuation),
- # Intrinsics
- (words((
- 'Abort', 'Abs', 'Access', 'AChar', 'ACos', 'ACosH', 'AdjustL',
- 'AdjustR', 'AImag', 'AInt', 'Alarm', 'All', 'Allocated', 'ALog',
- 'AMax', 'AMin', 'AMod', 'And', 'ANInt', 'Any', 'ASin', 'ASinH',
- 'Associated', 'ATan', 'ATanH', 'Atomic_Define', 'Atomic_Ref',
- 'BesJ', 'BesJN', 'Bessel_J0', 'Bessel_J1', 'Bessel_JN', 'Bessel_Y0',
- 'Bessel_Y1', 'Bessel_YN', 'BesY', 'BesYN', 'BGE', 'BGT', 'BLE',
- 'BLT', 'Bit_Size', 'BTest', 'CAbs', 'CCos', 'Ceiling', 'CExp',
- 'Char', 'ChDir', 'ChMod', 'CLog', 'Cmplx', 'Command_Argument_Count',
- 'Complex', 'Conjg', 'Cos', 'CosH', 'Count', 'CPU_Time', 'CShift',
- 'CSin', 'CSqRt', 'CTime', 'C_Loc', 'C_Associated',
- 'C_Null_Ptr', 'C_Null_Funptr', 'C_F_Pointer', 'C_F_ProcPointer',
- 'C_Null_Char', 'C_Alert', 'C_Backspace', 'C_Form_Feed', 'C_FunLoc',
- 'C_Sizeof', 'C_New_Line', 'C_Carriage_Return',
- 'C_Horizontal_Tab', 'C_Vertical_Tab', 'DAbs', 'DACos', 'DASin',
- 'DATan', 'Date_and_Time', 'DbesJ', 'DbesJN', 'DbesY',
- 'DbesYN', 'Dble', 'DCos', 'DCosH', 'DDiM', 'DErF',
- 'DErFC', 'DExp', 'Digits', 'DiM', 'DInt', 'DLog', 'DMax',
- 'DMin', 'DMod', 'DNInt', 'Dot_Product', 'DProd', 'DSign', 'DSinH',
- 'DShiftL', 'DShiftR', 'DSin', 'DSqRt', 'DTanH', 'DTan', 'DTime',
- 'EOShift', 'Epsilon', 'ErF', 'ErFC', 'ErFC_Scaled', 'ETime',
- 'Execute_Command_Line', 'Exit', 'Exp', 'Exponent', 'Extends_Type_Of',
- 'FDate', 'FGet', 'FGetC', 'FindLoc', 'Float', 'Floor', 'Flush',
- 'FNum', 'FPutC', 'FPut', 'Fraction', 'FSeek', 'FStat', 'FTell',
- 'Gamma', 'GError', 'GetArg', 'Get_Command', 'Get_Command_Argument',
- 'Get_Environment_Variable', 'GetCWD', 'GetEnv', 'GetGId', 'GetLog',
- 'GetPId', 'GetUId', 'GMTime', 'HostNm', 'Huge', 'Hypot', 'IAbs',
- 'IAChar', 'IAll', 'IAnd', 'IAny', 'IArgC', 'IBClr', 'IBits',
- 'IBSet', 'IChar', 'IDate', 'IDiM', 'IDInt', 'IDNInt', 'IEOr',
- 'IErrNo', 'IFix', 'Imag', 'ImagPart', 'Image_Index', 'Index',
- 'Int', 'IOr', 'IParity', 'IRand', 'IsaTty', 'IShft', 'IShftC',
- 'ISign', 'Iso_C_Binding', 'Is_Contiguous', 'Is_Iostat_End',
- 'Is_Iostat_Eor', 'ITime', 'Kill', 'Kind', 'LBound', 'LCoBound',
- 'Len', 'Len_Trim', 'LGe', 'LGt', 'Link', 'LLe', 'LLt', 'LnBlnk',
- 'Loc', 'Log', 'Log_Gamma', 'Logical', 'Long', 'LShift', 'LStat',
- 'LTime', 'MaskL', 'MaskR', 'MatMul', 'Max', 'MaxExponent',
- 'MaxLoc', 'MaxVal', 'MClock', 'Merge', 'Merge_Bits', 'Move_Alloc',
- 'Min', 'MinExponent', 'MinLoc', 'MinVal', 'Mod', 'Modulo', 'MvBits',
- 'Nearest', 'New_Line', 'NInt', 'Norm2', 'Not', 'Null', 'Num_Images',
- 'Or', 'Pack', 'Parity', 'PError', 'Precision', 'Present', 'Product',
- 'Radix', 'Rand', 'Random_Number', 'Random_Seed', 'Range', 'Real',
- 'RealPart', 'Rename', 'Repeat', 'Reshape', 'RRSpacing', 'RShift',
- 'Same_Type_As', 'Scale', 'Scan', 'Second', 'Selected_Char_Kind',
- 'Selected_Int_Kind', 'Selected_Real_Kind', 'Set_Exponent', 'Shape',
- 'ShiftA', 'ShiftL', 'ShiftR', 'Short', 'Sign', 'Signal', 'SinH',
- 'Sin', 'Sleep', 'Sngl', 'Spacing', 'Spread', 'SqRt', 'SRand',
- 'Stat', 'Storage_Size', 'Sum', 'SymLnk', 'System', 'System_Clock',
- 'Tan', 'TanH', 'Time', 'This_Image', 'Tiny', 'TrailZ', 'Transfer',
- 'Transpose', 'Trim', 'TtyNam', 'UBound', 'UCoBound', 'UMask',
- 'Unlink', 'Unpack', 'Verify', 'XOr', 'ZAbs', 'ZCos', 'ZExp',
- 'ZLog', 'ZSin', 'ZSqRt'), prefix=r'\b', suffix=r'\s*\b'),
- Name.Builtin),
-
- # Booleans
- (r'\.(true|false)\.', Name.Builtin),
- # Comparing Operators
- (r'\.(eq|ne|lt|le|gt|ge|not|and|or|eqv|neqv)\.', Operator.Word),
- ],
-
- 'strings': [
- (r'"(\\[0-7]+|\\[^0-7]|[^"\\])*"', String.Double),
- (r"'(\\[0-7]+|\\[^0-7]|[^'\\])*'", String.Single),
- ],
-
- 'nums': [
- (r'\d+(?![.e])(_([1-9]|[a-z]\w*))?', Number.Integer),
- (r'[+-]?\d*\.\d+([ed][-+]?\d+)?(_([1-9]|[a-z]\w*))?', Number.Float),
- (r'[+-]?\d+\.\d*([ed][-+]?\d+)?(_([1-9]|[a-z]\w*))?', Number.Float),
- (r'[+-]?\d+(\.\d*)?[ed][-+]?\d+(_([1-9]|[a-z]\w*))?', Number.Float),
- ],
- }
-
-
-class FortranFixedLexer(RegexLexer):
- """
- Lexer for fixed format Fortran.
-
- .. versionadded:: 2.1
- """
- name = 'FortranFixed'
- aliases = ['fortranfixed']
- filenames = ['*.f', '*.F']
-
- flags = re.IGNORECASE
-
- def _lex_fortran(self, match, ctx=None):
- """Lex a line just as free form fortran without line break."""
- lexer = FortranLexer()
- text = match.group(0) + "\n"
- for index, token, value in lexer.get_tokens_unprocessed(text):
- value = value.replace('\n', '')
- if value != '':
- yield index, token, value
-
- tokens = {
- 'root': [
- (r'[C*].*\n', Comment),
- (r'#.*\n', Comment.Preproc),
- (r' {0,4}!.*\n', Comment),
- (r'(.{5})', Name.Label, 'cont-char'),
- (r'.*\n', using(FortranLexer)),
- ],
- 'cont-char': [
- (' ', Text, 'code'),
- ('0', Comment, 'code'),
- ('.', Generic.Strong, 'code'),
- ],
- 'code': [
- (r'(.{66})(.*)(\n)',
- bygroups(_lex_fortran, Comment, Text.Whitespace), 'root'),
- (r'(.*)(\n)', bygroups(_lex_fortran, Text.Whitespace), 'root'),
- default('root'),
- ]
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/foxpro.py b/venv/lib/python3.11/site-packages/pygments/lexers/foxpro.py
deleted file mode 100644
index 9d8d951..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/foxpro.py
+++ /dev/null
@@ -1,427 +0,0 @@
-"""
- pygments.lexers.foxpro
- ~~~~~~~~~~~~~~~~~~~~~~
-
- Simple lexer for Microsoft Visual FoxPro source code.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer
-from pygments.token import Punctuation, Text, Comment, Operator, Keyword, \
- Name, String
-
-__all__ = ['FoxProLexer']
-
-
-class FoxProLexer(RegexLexer):
- """Lexer for Microsoft Visual FoxPro language.
-
- FoxPro syntax allows to shorten all keywords and function names
- to 4 characters. Shortened forms are not recognized by this lexer.
-
- .. versionadded:: 1.6
- """
-
- name = 'FoxPro'
- aliases = ['foxpro', 'vfp', 'clipper', 'xbase']
- filenames = ['*.PRG', '*.prg']
- mimetype = []
-
- flags = re.IGNORECASE | re.MULTILINE
-
- tokens = {
- 'root': [
- (r';\s*\n', Punctuation), # consume newline
- (r'(^|\n)\s*', Text, 'newline'),
-
- # Square brackets may be used for array indices
- # and for string literal. Look for arrays
- # before matching string literals.
- (r'(?<=\w)\[[0-9, ]+\]', Text),
- (r'\'[^\'\n]*\'|"[^"\n]*"|\[[^]*]\]', String),
- (r'(^\s*\*|&&|&amp;&amp;).*?\n', Comment.Single),
-
- (r'(ABS|ACLASS|ACOPY|ACOS|ADATABASES|ADBOBJECTS|ADDBS|'
- r'ADDPROPERTY|ADEL|ADIR|ADLLS|ADOCKSTATE|AELEMENT|AERROR|'
- r'AEVENTS|AFIELDS|AFONT|AGETCLASS|AGETFILEVERSION|AINS|'
- r'AINSTANCE|ALANGUAGE|ALEN|ALIAS|ALINES|ALLTRIM|'
- r'AMEMBERS|AMOUSEOBJ|ANETRESOURCES|APRINTERS|APROCINFO|'
- r'ASC|ASCAN|ASELOBJ|ASESSIONS|ASIN|ASORT|ASQLHANDLES|'
- r'ASTACKINFO|ASUBSCRIPT|AT|AT_C|ATAGINFO|ATAN|ATC|ATCC|'
- r'ATCLINE|ATLINE|ATN2|AUSED|AVCXCLASSES|BAR|BARCOUNT|'
- r'BARPROMPT|BETWEEN|BINDEVENT|BINTOC|BITAND|BITCLEAR|'
- r'BITLSHIFT|BITNOT|BITOR|BITRSHIFT|BITSET|BITTEST|BITXOR|'
- r'BOF|CANDIDATE|CAPSLOCK|CAST|CDOW|CDX|CEILING|CHR|CHRSAW|'
- r'CHRTRAN|CHRTRANC|CLEARRESULTSET|CMONTH|CNTBAR|CNTPAD|COL|'
- r'COM|Functions|COMARRAY|COMCLASSINFO|COMPOBJ|COMPROP|'
- r'COMRETURNERROR|COS|CPCONVERT|CPCURRENT|CPDBF|CREATEBINARY|'
- r'CREATEOBJECT|CREATEOBJECTEX|CREATEOFFLINE|CTOBIN|CTOD|'
- r'CTOT|CURDIR|CURSORGETPROP|CURSORSETPROP|CURSORTOXML|'
- r'CURVAL|DATE|DATETIME|DAY|DBC|DBF|DBGETPROP|DBSETPROP|'
- r'DBUSED|DDEAbortTrans|DDEAdvise|DDEEnabled|DDEExecute|'
- r'DDEInitiate|DDELastError|DDEPoke|DDERequest|DDESetOption|'
- r'DDESetService|DDESetTopic|DDETerminate|DEFAULTEXT|'
- r'DELETED|DESCENDING|DIFFERENCE|DIRECTORY|DISKSPACE|'
- r'DisplayPath|DMY|DODEFAULT|DOW|DRIVETYPE|DROPOFFLINE|'
- r'DTOC|DTOR|DTOS|DTOT|EDITSOURCE|EMPTY|EOF|ERROR|EVAL(UATE)?|'
- r'EVENTHANDLER|EVL|EXECSCRIPT|EXP|FCHSIZE|FCLOSE|FCOUNT|'
- r'FCREATE|FDATE|FEOF|FERROR|FFLUSH|FGETS|FIELD|FILE|'
- r'FILETOSTR|FILTER|FKLABEL|FKMAX|FLDLIST|FLOCK|FLOOR|'
- r'FONTMETRIC|FOPEN|FOR|FORCEEXT|FORCEPATH|FOUND|FPUTS|'
- r'FREAD|FSEEK|FSIZE|FTIME|FULLPATH|FV|FWRITE|'
- r'GETAUTOINCVALUE|GETBAR|GETCOLOR|GETCP|GETDIR|GETENV|'
- r'GETFILE|GETFLDSTATE|GETFONT|GETINTERFACE|'
- r'GETNEXTMODIFIED|GETOBJECT|GETPAD|GETPEM|GETPICT|'
- r'GETPRINTER|GETRESULTSET|GETWORDCOUNT|GETWORDNUM|'
- r'GETCURSORADAPTER|GOMONTH|HEADER|HOME|HOUR|ICASE|'
- r'IDXCOLLATE|IIF|IMESTATUS|INDBC|INDEXSEEK|INKEY|INLIST|'
- r'INPUTBOX|INSMODE|INT|ISALPHA|ISBLANK|ISCOLOR|ISDIGIT|'
- r'ISEXCLUSIVE|ISFLOCKED|ISLEADBYTE|ISLOWER|ISMEMOFETCHED|'
- r'ISMOUSE|ISNULL|ISPEN|ISREADONLY|ISRLOCKED|'
- r'ISTRANSACTABLE|ISUPPER|JUSTDRIVE|JUSTEXT|JUSTFNAME|'
- r'JUSTPATH|JUSTSTEM|KEY|KEYMATCH|LASTKEY|LEFT|LEFTC|LEN|'
- r'LENC|LIKE|LIKEC|LINENO|LOADPICTURE|LOCFILE|LOCK|LOG|'
- r'LOG10|LOOKUP|LOWER|LTRIM|LUPDATE|MAKETRANSACTABLE|MAX|'
- r'MCOL|MDOWN|MDX|MDY|MEMLINES|MEMORY|MENU|MESSAGE|'
- r'MESSAGEBOX|MIN|MINUTE|MLINE|MOD|MONTH|MRKBAR|MRKPAD|'
- r'MROW|MTON|MWINDOW|NDX|NEWOBJECT|NORMALIZE|NTOM|NUMLOCK|'
- r'NVL|OBJNUM|OBJTOCLIENT|OBJVAR|OCCURS|OEMTOANSI|OLDVAL|'
- r'ON|ORDER|OS|PAD|PADL|PARAMETERS|PAYMENT|PCOL|PCOUNT|'
- r'PEMSTATUS|PI|POPUP|PRIMARY|PRINTSTATUS|PRMBAR|PRMPAD|'
- r'PROGRAM|PROMPT|PROPER|PROW|PRTINFO|PUTFILE|PV|QUARTER|'
- r'RAISEEVENT|RAND|RAT|RATC|RATLINE|RDLEVEL|READKEY|RECCOUNT|'
- r'RECNO|RECSIZE|REFRESH|RELATION|REPLICATE|REQUERY|RGB|'
- r'RGBSCHEME|RIGHT|RIGHTC|RLOCK|ROUND|ROW|RTOD|RTRIM|'
- r'SAVEPICTURE|SCHEME|SCOLS|SEC|SECONDS|SEEK|SELECT|SET|'
- r'SETFLDSTATE|SETRESULTSET|SIGN|SIN|SKPBAR|SKPPAD|SOUNDEX|'
- r'SPACE|SQLCANCEL|SQLCOLUMNS|SQLCOMMIT|SQLCONNECT|'
- r'SQLDISCONNECT|SQLEXEC|SQLGETPROP|SQLIDLEDISCONNECT|'
- r'SQLMORERESULTS|SQLPREPARE|SQLROLLBACK|SQLSETPROP|'
- r'SQLSTRINGCONNECT|SQLTABLES|SQRT|SROWS|STR|STRCONV|'
- r'STREXTRACT|STRTOFILE|STRTRAN|STUFF|STUFFC|SUBSTR|'
- r'SUBSTRC|SYS|SYSMETRIC|TABLEREVERT|TABLEUPDATE|TAG|'
- r'TAGCOUNT|TAGNO|TAN|TARGET|TEXTMERGE|TIME|TRANSFORM|'
- r'TRIM|TTOC|TTOD|TXNLEVEL|TXTWIDTH|TYPE|UNBINDEVENTS|'
- r'UNIQUE|UPDATED|UPPER|USED|VAL|VARREAD|VARTYPE|VERSION|'
- r'WBORDER|WCHILD|WCOLS|WDOCKABLE|WEEK|WEXIST|WFONT|WLAST|'
- r'WLCOL|WLROW|WMAXIMUM|WMINIMUM|WONTOP|WOUTPUT|WPARENT|'
- r'WREAD|WROWS|WTITLE|WVISIBLE|XMLTOCURSOR|XMLUPDATEGRAM|'
- r'YEAR)(?=\s*\()', Name.Function),
-
- (r'_ALIGNMENT|_ASCIICOLS|_ASCIIROWS|_ASSIST|_BEAUTIFY|_BOX|'
- r'_BROWSER|_BUILDER|_CALCMEM|_CALCVALUE|_CLIPTEXT|_CONVERTER|'
- r'_COVERAGE|_CUROBJ|_DBLCLICK|_DIARYDATE|_DOS|_FOXDOC|_FOXREF|'
- r'_GALLERY|_GENGRAPH|_GENHTML|_GENMENU|_GENPD|_GENSCRN|'
- r'_GENXTAB|_GETEXPR|_INCLUDE|_INCSEEK|_INDENT|_LMARGIN|_MAC|'
- r'_MENUDESIGNER|_MLINE|_PADVANCE|_PAGENO|_PAGETOTAL|_PBPAGE|'
- r'_PCOLNO|_PCOPIES|_PDRIVER|_PDSETUP|_PECODE|_PEJECT|_PEPAGE|'
- r'_PLENGTH|_PLINENO|_PLOFFSET|_PPITCH|_PQUALITY|_PRETEXT|'
- r'_PSCODE|_PSPACING|_PWAIT|_RMARGIN|_REPORTBUILDER|'
- r'_REPORTOUTPUT|_REPORTPREVIEW|_SAMPLES|_SCCTEXT|_SCREEN|'
- r'_SHELL|_SPELLCHK|_STARTUP|_TABS|_TALLY|_TASKPANE|_TEXT|'
- r'_THROTTLE|_TOOLBOX|_TOOLTIPTIMEOUT|_TRANSPORT|_TRIGGERLEVEL|'
- r'_UNIX|_VFP|_WINDOWS|_WIZARD|_WRAP', Keyword.Pseudo),
-
- (r'THISFORMSET|THISFORM|THIS', Name.Builtin),
-
- (r'Application|CheckBox|Collection|Column|ComboBox|'
- r'CommandButton|CommandGroup|Container|Control|CursorAdapter|'
- r'Cursor|Custom|DataEnvironment|DataObject|EditBox|'
- r'Empty|Exception|Fields|Files|File|FormSet|Form|FoxCode|'
- r'Grid|Header|Hyperlink|Image|Label|Line|ListBox|Objects|'
- r'OptionButton|OptionGroup|PageFrame|Page|ProjectHook|Projects|'
- r'Project|Relation|ReportListener|Separator|Servers|Server|'
- r'Session|Shape|Spinner|Tables|TextBox|Timer|ToolBar|'
- r'XMLAdapter|XMLField|XMLTable', Name.Class),
-
- (r'm\.[a-z_]\w*', Name.Variable),
- (r'\.(F|T|AND|OR|NOT|NULL)\.|\b(AND|OR|NOT|NULL)\b', Operator.Word),
-
- (r'\.(ActiveColumn|ActiveControl|ActiveForm|ActivePage|'
- r'ActiveProject|ActiveRow|AddLineFeeds|ADOCodePage|Alias|'
- r'Alignment|Align|AllowAddNew|AllowAutoColumnFit|'
- r'AllowCellSelection|AllowDelete|AllowHeaderSizing|'
- r'AllowInsert|AllowModalMessages|AllowOutput|AllowRowSizing|'
- r'AllowSimultaneousFetch|AllowTabs|AllowUpdate|'
- r'AlwaysOnBottom|AlwaysOnTop|Anchor|Application|'
- r'AutoActivate|AutoCenter|AutoCloseTables|AutoComplete|'
- r'AutoCompSource|AutoCompTable|AutoHideScrollBar|'
- r'AutoIncrement|AutoOpenTables|AutoRelease|AutoSize|'
- r'AutoVerbMenu|AutoYield|BackColor|ForeColor|BackStyle|'
- r'BaseClass|BatchUpdateCount|BindControls|BorderColor|'
- r'BorderStyle|BorderWidth|BoundColumn|BoundTo|Bound|'
- r'BreakOnError|BufferModeOverride|BufferMode|'
- r'BuildDateTime|ButtonCount|Buttons|Cancel|Caption|'
- r'Centered|Century|ChildAlias|ChildOrder|ChildTable|'
- r'ClassLibrary|Class|ClipControls|Closable|CLSID|CodePage|'
- r'ColorScheme|ColorSource|ColumnCount|ColumnLines|'
- r'ColumnOrder|Columns|ColumnWidths|CommandClauses|'
- r'Comment|CompareMemo|ConflictCheckCmd|ConflictCheckType|'
- r'ContinuousScroll|ControlBox|ControlCount|Controls|'
- r'ControlSource|ConversionFunc|Count|CurrentControl|'
- r'CurrentDataSession|CurrentPass|CurrentX|CurrentY|'
- r'CursorSchema|CursorSource|CursorStatus|Curvature|'
- r'Database|DataSessionID|DataSession|DataSourceType|'
- r'DataSource|DataType|DateFormat|DateMark|Debug|'
- r'DeclareXMLPrefix|DEClassLibrary|DEClass|DefaultFilePath|'
- r'Default|DefOLELCID|DeleteCmdDataSourceType|DeleteCmdDataSource|'
- r'DeleteCmd|DeleteMark|Description|Desktop|'
- r'Details|DisabledBackColor|DisabledForeColor|'
- r'DisabledItemBackColor|DisabledItemForeColor|'
- r'DisabledPicture|DisableEncode|DisplayCount|'
- r'DisplayValue|Dockable|Docked|DockPosition|'
- r'DocumentFile|DownPicture|DragIcon|DragMode|DrawMode|'
- r'DrawStyle|DrawWidth|DynamicAlignment|DynamicBackColor|'
- r'DynamicForeColor|DynamicCurrentControl|DynamicFontBold|'
- r'DynamicFontItalic|DynamicFontStrikethru|'
- r'DynamicFontUnderline|DynamicFontName|DynamicFontOutline|'
- r'DynamicFontShadow|DynamicFontSize|DynamicInputMask|'
- r'DynamicLineHeight|EditorOptions|Enabled|'
- r'EnableHyperlinks|Encrypted|ErrorNo|Exclude|Exclusive|'
- r'FetchAsNeeded|FetchMemoCmdList|FetchMemoDataSourceType|'
- r'FetchMemoDataSource|FetchMemo|FetchSize|'
- r'FileClassLibrary|FileClass|FillColor|FillStyle|Filter|'
- r'FirstElement|FirstNestedTable|Flags|FontBold|FontItalic|'
- r'FontStrikethru|FontUnderline|FontCharSet|FontCondense|'
- r'FontExtend|FontName|FontOutline|FontShadow|FontSize|'
- r'ForceCloseTag|Format|FormCount|FormattedOutput|Forms|'
- r'FractionDigits|FRXDataSession|FullName|GDIPlusGraphics|'
- r'GridLineColor|GridLines|GridLineWidth|HalfHeightCaption|'
- r'HeaderClassLibrary|HeaderClass|HeaderHeight|Height|'
- r'HelpContextID|HideSelection|HighlightBackColor|'
- r'HighlightForeColor|HighlightStyle|HighlightRowLineWidth|'
- r'HighlightRow|Highlight|HomeDir|Hours|HostName|'
- r'HScrollSmallChange|hWnd|Icon|IncrementalSearch|Increment|'
- r'InitialSelectedAlias|InputMask|InsertCmdDataSourceType|'
- r'InsertCmdDataSource|InsertCmdRefreshCmd|'
- r'InsertCmdRefreshFieldList|InsertCmdRefreshKeyFieldList|'
- r'InsertCmd|Instancing|IntegralHeight|'
- r'Interval|IMEMode|IsAttribute|IsBase64|IsBinary|IsNull|'
- r'IsDiffGram|IsLoaded|ItemBackColor,|ItemData|ItemIDData|'
- r'ItemTips|IXMLDOMElement|KeyboardHighValue|KeyboardLowValue|'
- r'Keyfield|KeyFieldList|KeyPreview|KeySort|LanguageOptions|'
- r'LeftColumn|Left|LineContents|LineNo|LineSlant|LinkMaster|'
- r'ListCount|ListenerType|ListIndex|ListItemID|ListItem|'
- r'List|LockColumnsLeft|LockColumns|LockScreen|MacDesktop|'
- r'MainFile|MapN19_4ToCurrency|MapBinary|MapVarchar|Margin|'
- r'MaxButton|MaxHeight|MaxLeft|MaxLength|MaxRecords|MaxTop|'
- r'MaxWidth|MDIForm|MemberClassLibrary|MemberClass|'
- r'MemoWindow|Message|MinButton|MinHeight|MinWidth|'
- r'MouseIcon|MousePointer|Movable|MoverBars|MultiSelect|'
- r'Name|NestedInto|NewIndex|NewItemID|NextSiblingTable|'
- r'NoCpTrans|NoDataOnLoad|NoData|NullDisplay|'
- r'NumberOfElements|Object|OLEClass|OLEDragMode|'
- r'OLEDragPicture|OLEDropEffects|OLEDropHasData|'
- r'OLEDropMode|OLEDropTextInsertion|OLELCID|'
- r'OLERequestPendingTimeout|OLEServerBusyRaiseError|'
- r'OLEServerBusyTimeout|OLETypeAllowed|OneToMany|'
- r'OpenViews|OpenWindow|Optimize|OrderDirection|Order|'
- r'OutputPageCount|OutputType|PageCount|PageHeight|'
- r'PageNo|PageOrder|Pages|PageTotal|PageWidth|'
- r'PanelLink|Panel|ParentAlias|ParentClass|ParentTable|'
- r'Parent|Partition|PasswordChar|PictureMargin|'
- r'PicturePosition|PictureSpacing|PictureSelectionDisplay|'
- r'PictureVal|Picture|Prepared|'
- r'PolyPoints|PreserveWhiteSpace|PreviewContainer|'
- r'PrintJobName|Procedure|PROCESSID|ProgID|ProjectHookClass|'
- r'ProjectHookLibrary|ProjectHook|QuietMode|'
- r'ReadCycle|ReadLock|ReadMouse|ReadObject|ReadOnly|'
- r'ReadSave|ReadTimeout|RecordMark|RecordSourceType|'
- r'RecordSource|RefreshAlias|'
- r'RefreshCmdDataSourceType|RefreshCmdDataSource|RefreshCmd|'
- r'RefreshIgnoreFieldList|RefreshTimeStamp|RelationalExpr|'
- r'RelativeColumn|RelativeRow|ReleaseType|Resizable|'
- r'RespectCursorCP|RespectNesting|RightToLeft|RotateFlip|'
- r'Rotation|RowColChange|RowHeight|RowSourceType|'
- r'RowSource|ScaleMode|SCCProvider|SCCStatus|ScrollBars|'
- r'Seconds|SelectCmd|SelectedID|'
- r'SelectedItemBackColor|SelectedItemForeColor|Selected|'
- r'SelectionNamespaces|SelectOnEntry|SelLength|SelStart|'
- r'SelText|SendGDIPlusImage|SendUpdates|ServerClassLibrary|'
- r'ServerClass|ServerHelpFile|ServerName|'
- r'ServerProject|ShowTips|ShowInTaskbar|ShowWindow|'
- r'Sizable|SizeBox|SOM|Sorted|Sparse|SpecialEffect|'
- r'SpinnerHighValue|SpinnerLowValue|SplitBar|StackLevel|'
- r'StartMode|StatusBarText|StatusBar|Stretch|StrictDateEntry|'
- r'Style|TabIndex|Tables|TabOrientation|Tabs|TabStop|'
- r'TabStretch|TabStyle|Tag|TerminateRead|Text|Themes|'
- r'ThreadID|TimestampFieldList|TitleBar|ToolTipText|'
- r'TopIndex|TopItemID|Top|TwoPassProcess|TypeLibCLSID|'
- r'TypeLibDesc|TypeLibName|Type|Unicode|UpdatableFieldList|'
- r'UpdateCmdDataSourceType|UpdateCmdDataSource|'
- r'UpdateCmdRefreshCmd|UpdateCmdRefreshFieldList|'
- r'UpdateCmdRefreshKeyFieldList|UpdateCmd|'
- r'UpdateGramSchemaLocation|UpdateGram|UpdateNameList|UpdateType|'
- r'UseCodePage|UseCursorSchema|UseDeDataSource|UseMemoSize|'
- r'UserValue|UseTransactions|UTF8Encoded|Value|VersionComments|'
- r'VersionCompany|VersionCopyright|VersionDescription|'
- r'VersionNumber|VersionProduct|VersionTrademarks|Version|'
- r'VFPXMLProgID|ViewPortHeight|ViewPortLeft|'
- r'ViewPortTop|ViewPortWidth|VScrollSmallChange|View|Visible|'
- r'VisualEffect|WhatsThisButton|WhatsThisHelpID|WhatsThisHelp|'
- r'WhereType|Width|WindowList|WindowState|WindowType|WordWrap|'
- r'WrapCharInCDATA|WrapInCDATA|WrapMemoInCDATA|XMLAdapter|'
- r'XMLConstraints|XMLNameIsXPath|XMLNamespace|XMLName|'
- r'XMLPrefix|XMLSchemaLocation|XMLTable|XMLType|'
- r'XSDfractionDigits|XSDmaxLength|XSDtotalDigits|'
- r'XSDtype|ZoomBox)', Name.Attribute),
-
- (r'\.(ActivateCell|AddColumn|AddItem|AddListItem|AddObject|'
- r'AddProperty|AddTableSchema|AddToSCC|Add|'
- r'ApplyDiffgram|Attach|AutoFit|AutoOpen|Box|Build|'
- r'CancelReport|ChangesToCursor|CheckIn|CheckOut|Circle|'
- r'CleanUp|ClearData|ClearStatus|Clear|CloneObject|CloseTables|'
- r'Close|Cls|CursorAttach|CursorDetach|CursorFill|'
- r'CursorRefresh|DataToClip|DelayedMemoFetch|DeleteColumn|'
- r'Dock|DoMessage|DoScroll|DoStatus|DoVerb|Drag|Draw|Eval|'
- r'GetData|GetDockState|GetFormat|GetKey|GetLatestVersion|'
- r'GetPageHeight|GetPageWidth|Help|Hide|IncludePageInOutput|'
- r'IndexToItemID|ItemIDToIndex|Item|LoadXML|Line|Modify|'
- r'MoveItem|Move|Nest|OLEDrag|OnPreviewClose|OutputPage|'
- r'Point|Print|PSet|Quit|ReadExpression|ReadMethod|'
- r'RecordRefresh|Refresh|ReleaseXML|Release|RemoveFromSCC|'
- r'RemoveItem|RemoveListItem|RemoveObject|Remove|'
- r'Render|Requery|RequestData|ResetToDefault|Reset|Run|'
- r'SaveAsClass|SaveAs|SetAll|SetData|SetFocus|SetFormat|'
- r'SetMain|SetVar|SetViewPort|ShowWhatsThis|Show|'
- r'SupportsListenerType|TextHeight|TextWidth|ToCursor|'
- r'ToXML|UndoCheckOut|Unnest|UpdateStatus|WhatsThisMode|'
- r'WriteExpression|WriteMethod|ZOrder)', Name.Function),
-
- (r'\.(Activate|AdjustObjectSize|AfterBand|AfterBuild|'
- r'AfterCloseTables|AfterCursorAttach|AfterCursorClose|'
- r'AfterCursorDetach|AfterCursorFill|AfterCursorRefresh|'
- r'AfterCursorUpdate|AfterDelete|AfterInsert|'
- r'AfterRecordRefresh|AfterUpdate|AfterDock|AfterReport|'
- r'AfterRowColChange|BeforeBand|BeforeCursorAttach|'
- r'BeforeCursorClose|BeforeCursorDetach|BeforeCursorFill|'
- r'BeforeCursorRefresh|BeforeCursorUpdate|BeforeDelete|'
- r'BeforeInsert|BeforeDock|BeforeOpenTables|'
- r'BeforeRecordRefresh|BeforeReport|BeforeRowColChange|'
- r'BeforeUpdate|Click|dbc_Activate|dbc_AfterAddTable|'
- r'dbc_AfterAppendProc|dbc_AfterCloseTable|dbc_AfterCopyProc|'
- r'dbc_AfterCreateConnection|dbc_AfterCreateOffline|'
- r'dbc_AfterCreateTable|dbc_AfterCreateView|dbc_AfterDBGetProp|'
- r'dbc_AfterDBSetProp|dbc_AfterDeleteConnection|'
- r'dbc_AfterDropOffline|dbc_AfterDropTable|'
- r'dbc_AfterModifyConnection|dbc_AfterModifyProc|'
- r'dbc_AfterModifyTable|dbc_AfterModifyView|dbc_AfterOpenTable|'
- r'dbc_AfterRemoveTable|dbc_AfterRenameConnection|'
- r'dbc_AfterRenameTable|dbc_AfterRenameView|'
- r'dbc_AfterValidateData|dbc_BeforeAddTable|'
- r'dbc_BeforeAppendProc|dbc_BeforeCloseTable|'
- r'dbc_BeforeCopyProc|dbc_BeforeCreateConnection|'
- r'dbc_BeforeCreateOffline|dbc_BeforeCreateTable|'
- r'dbc_BeforeCreateView|dbc_BeforeDBGetProp|'
- r'dbc_BeforeDBSetProp|dbc_BeforeDeleteConnection|'
- r'dbc_BeforeDropOffline|dbc_BeforeDropTable|'
- r'dbc_BeforeModifyConnection|dbc_BeforeModifyProc|'
- r'dbc_BeforeModifyTable|dbc_BeforeModifyView|'
- r'dbc_BeforeOpenTable|dbc_BeforeRemoveTable|'
- r'dbc_BeforeRenameConnection|dbc_BeforeRenameTable|'
- r'dbc_BeforeRenameView|dbc_BeforeValidateData|'
- r'dbc_CloseData|dbc_Deactivate|dbc_ModifyData|dbc_OpenData|'
- r'dbc_PackData|DblClick|Deactivate|Deleted|Destroy|DoCmd|'
- r'DownClick|DragDrop|DragOver|DropDown|ErrorMessage|Error|'
- r'EvaluateContents|GotFocus|Init|InteractiveChange|KeyPress|'
- r'LoadReport|Load|LostFocus|Message|MiddleClick|MouseDown|'
- r'MouseEnter|MouseLeave|MouseMove|MouseUp|MouseWheel|Moved|'
- r'OLECompleteDrag|OLEDragOver|OLEGiveFeedback|OLESetData|'
- r'OLEStartDrag|OnMoveItem|Paint|ProgrammaticChange|'
- r'QueryAddFile|QueryModifyFile|QueryNewFile|QueryRemoveFile|'
- r'QueryRunFile|QueryUnload|RangeHigh|RangeLow|ReadActivate|'
- r'ReadDeactivate|ReadShow|ReadValid|ReadWhen|Resize|'
- r'RightClick|SCCInit|SCCDestroy|Scrolled|Timer|UIEnable|'
- r'UnDock|UnloadReport|Unload|UpClick|Valid|When)', Name.Function),
-
- (r'\s+', Text),
- # everything else is not colored
- (r'.', Text),
- ],
- 'newline': [
- (r'\*.*?$', Comment.Single, '#pop'),
- (r'(ACCEPT|ACTIVATE\s*MENU|ACTIVATE\s*POPUP|ACTIVATE\s*SCREEN|'
- r'ACTIVATE\s*WINDOW|APPEND|APPEND\s*FROM|APPEND\s*FROM\s*ARRAY|'
- r'APPEND\s*GENERAL|APPEND\s*MEMO|ASSIST|AVERAGE|BLANK|BROWSE|'
- r'BUILD\s*APP|BUILD\s*EXE|BUILD\s*PROJECT|CALCULATE|CALL|'
- r'CANCEL|CHANGE|CLEAR|CLOSE|CLOSE\s*MEMO|COMPILE|CONTINUE|'
- r'COPY\s*FILE|COPY\s*INDEXES|COPY\s*MEMO|COPY\s*STRUCTURE|'
- r'COPY\s*STRUCTURE\s*EXTENDED|COPY\s*TAG|COPY\s*TO|'
- r'COPY\s*TO\s*ARRAY|COUNT|CREATE|CREATE\s*COLOR\s*SET|'
- r'CREATE\s*CURSOR|CREATE\s*FROM|CREATE\s*LABEL|CREATE\s*MENU|'
- r'CREATE\s*PROJECT|CREATE\s*QUERY|CREATE\s*REPORT|'
- r'CREATE\s*SCREEN|CREATE\s*TABLE|CREATE\s*VIEW|DDE|'
- r'DEACTIVATE\s*MENU|DEACTIVATE\s*POPUP|DEACTIVATE\s*WINDOW|'
- r'DECLARE|DEFINE\s*BAR|DEFINE\s*BOX|DEFINE\s*MENU|'
- r'DEFINE\s*PAD|DEFINE\s*POPUP|DEFINE\s*WINDOW|DELETE|'
- r'DELETE\s*FILE|DELETE\s*TAG|DIMENSION|DIRECTORY|DISPLAY|'
- r'DISPLAY\s*FILES|DISPLAY\s*MEMORY|DISPLAY\s*STATUS|'
- r'DISPLAY\s*STRUCTURE|DO|EDIT|EJECT|EJECT\s*PAGE|ERASE|'
- r'EXIT|EXPORT|EXTERNAL|FILER|FIND|FLUSH|FUNCTION|GATHER|'
- r'GETEXPR|GO|GOTO|HELP|HIDE\s*MENU|HIDE\s*POPUP|'
- r'HIDE\s*WINDOW|IMPORT|INDEX|INPUT|INSERT|JOIN|KEYBOARD|'
- r'LABEL|LIST|LOAD|LOCATE|LOOP|MENU|MENU\s*TO|MODIFY\s*COMMAND|'
- r'MODIFY\s*FILE|MODIFY\s*GENERAL|MODIFY\s*LABEL|MODIFY\s*MEMO|'
- r'MODIFY\s*MENU|MODIFY\s*PROJECT|MODIFY\s*QUERY|'
- r'MODIFY\s*REPORT|MODIFY\s*SCREEN|MODIFY\s*STRUCTURE|'
- r'MODIFY\s*WINDOW|MOVE\s*POPUP|MOVE\s*WINDOW|NOTE|'
- r'ON\s*APLABOUT|ON\s*BAR|ON\s*ERROR|ON\s*ESCAPE|'
- r'ON\s*EXIT\s*BAR|ON\s*EXIT\s*MENU|ON\s*EXIT\s*PAD|'
- r'ON\s*EXIT\s*POPUP|ON\s*KEY|ON\s*KEY\s*=|ON\s*KEY\s*LABEL|'
- r'ON\s*MACHELP|ON\s*PAD|ON\s*PAGE|ON\s*READERROR|'
- r'ON\s*SELECTION\s*BAR|ON\s*SELECTION\s*MENU|'
- r'ON\s*SELECTION\s*PAD|ON\s*SELECTION\s*POPUP|ON\s*SHUTDOWN|'
- r'PACK|PARAMETERS|PLAY\s*MACRO|POP\s*KEY|POP\s*MENU|'
- r'POP\s*POPUP|PRIVATE|PROCEDURE|PUBLIC|PUSH\s*KEY|'
- r'PUSH\s*MENU|PUSH\s*POPUP|QUIT|READ|READ\s*MENU|RECALL|'
- r'REINDEX|RELEASE|RELEASE\s*MODULE|RENAME|REPLACE|'
- r'REPLACE\s*FROM\s*ARRAY|REPORT|RESTORE\s*FROM|'
- r'RESTORE\s*MACROS|RESTORE\s*SCREEN|RESTORE\s*WINDOW|'
- r'RESUME|RETRY|RETURN|RUN|RUN\s*\/N"|RUNSCRIPT|'
- r'SAVE\s*MACROS|SAVE\s*SCREEN|SAVE\s*TO|SAVE\s*WINDOWS|'
- r'SCATTER|SCROLL|SEEK|SELECT|SET|SET\s*ALTERNATE|'
- r'SET\s*ANSI|SET\s*APLABOUT|SET\s*AUTOSAVE|SET\s*BELL|'
- r'SET\s*BLINK|SET\s*BLOCKSIZE|SET\s*BORDER|SET\s*BRSTATUS|'
- r'SET\s*CARRY|SET\s*CENTURY|SET\s*CLEAR|SET\s*CLOCK|'
- r'SET\s*COLLATE|SET\s*COLOR\s*OF|SET\s*COLOR\s*OF\s*SCHEME|'
- r'SET\s*COLOR\s*SET|SET\s*COLOR\s*TO|SET\s*COMPATIBLE|'
- r'SET\s*CONFIRM|SET\s*CONSOLE|SET\s*CURRENCY|SET\s*CURSOR|'
- r'SET\s*DATE|SET\s*DEBUG|SET\s*DECIMALS|SET\s*DEFAULT|'
- r'SET\s*DELETED|SET\s*DELIMITERS|SET\s*DEVELOPMENT|'
- r'SET\s*DEVICE|SET\s*DISPLAY|SET\s*DOHISTORY|SET\s*ECHO|'
- r'SET\s*ESCAPE|SET\s*EXACT|SET\s*EXCLUSIVE|SET\s*FIELDS|'
- r'SET\s*FILTER|SET\s*FIXED|SET\s*FORMAT|SET\s*FULLPATH|'
- r'SET\s*FUNCTION|SET\s*HEADINGS|SET\s*HELP|SET\s*HELPFILTER|'
- r'SET\s*HOURS|SET\s*INDEX|SET\s*INTENSITY|SET\s*KEY|'
- r'SET\s*KEYCOMP|SET\s*LIBRARY|SET\s*LOCK|SET\s*LOGERRORS|'
- r'SET\s*MACDESKTOP|SET\s*MACHELP|SET\s*MACKEY|SET\s*MARGIN|'
- r'SET\s*MARK\s*OF|SET\s*MARK\s*TO|SET\s*MEMOWIDTH|'
- r'SET\s*MESSAGE|SET\s*MOUSE|SET\s*MULTILOCKS|SET\s*NEAR|'
- r'SET\s*NOCPTRANS|SET\s*NOTIFY|SET\s*ODOMETER|SET\s*OPTIMIZE|'
- r'SET\s*ORDER|SET\s*PALETTE|SET\s*PATH|SET\s*PDSETUP|'
- r'SET\s*POINT|SET\s*PRINTER|SET\s*PROCEDURE|SET\s*READBORDER|'
- r'SET\s*REFRESH|SET\s*RELATION|SET\s*RELATION\s*OFF|'
- r'SET\s*REPROCESS|SET\s*RESOURCE|SET\s*SAFETY|SET\s*SCOREBOARD|'
- r'SET\s*SEPARATOR|SET\s*SHADOWS|SET\s*SKIP|SET\s*SKIP\s*OF|'
- r'SET\s*SPACE|SET\s*STATUS|SET\s*STATUS\s*BAR|SET\s*STEP|'
- r'SET\s*STICKY|SET\s*SYSMENU|SET\s*TALK|SET\s*TEXTMERGE|'
- r'SET\s*TEXTMERGE\s*DELIMITERS|SET\s*TOPIC|SET\s*TRBETWEEN|'
- r'SET\s*TYPEAHEAD|SET\s*UDFPARMS|SET\s*UNIQUE|SET\s*VIEW|'
- r'SET\s*VOLUME|SET\s*WINDOW\s*OF\s*MEMO|SET\s*XCMDFILE|'
- r'SHOW\s*GET|SHOW\s*GETS|SHOW\s*MENU|SHOW\s*OBJECT|'
- r'SHOW\s*POPUP|SHOW\s*WINDOW|SIZE\s*POPUP|SKIP|SORT|'
- r'STORE|SUM|SUSPEND|TOTAL|TYPE|UNLOCK|UPDATE|USE|WAIT|'
- r'ZAP|ZOOM\s*WINDOW|DO\s*CASE|CASE|OTHERWISE|ENDCASE|'
- r'DO\s*WHILE|ENDDO|FOR|ENDFOR|NEXT|IF|ELSE|ENDIF|PRINTJOB|'
- r'ENDPRINTJOB|SCAN|ENDSCAN|TEXT|ENDTEXT|=)',
- Keyword.Reserved, '#pop'),
- (r'#\s*(IF|ELIF|ELSE|ENDIF|DEFINE|IFDEF|IFNDEF|INCLUDE)',
- Comment.Preproc, '#pop'),
- (r'(m\.)?[a-z_]\w*', Name.Variable, '#pop'),
- (r'.', Text, '#pop'),
- ],
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/freefem.py b/venv/lib/python3.11/site-packages/pygments/lexers/freefem.py
deleted file mode 100644
index eab6cbf..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/freefem.py
+++ /dev/null
@@ -1,894 +0,0 @@
-"""
- pygments.lexers.freefem
- ~~~~~~~~~~~~~~~~~~~~~~~
-
- Lexer for FreeFem++ language.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.token import Comment, Operator, Keyword, Name
-
-from pygments.lexers.c_cpp import CppLexer
-
-__all__ = ['FreeFemLexer']
-
-
-class FreeFemLexer(CppLexer):
- """
- For FreeFem++ source.
-
- This is an extension of the CppLexer, as the FreeFem Language is a superset
- of C++.
-
- .. versionadded:: 2.4
- """
-
- name = 'Freefem'
- url = 'https://freefem.org/'
- aliases = ['freefem']
- filenames = ['*.edp']
- mimetypes = ['text/x-freefem']
-
- # Language operators
- operators = {'+', '-', '*', '.*', '/', './', '%', '^', '^-1', ':', '\''}
-
- # types
- types = {'bool', 'border', 'complex', 'dmatrix', 'fespace', 'func', 'gslspline',
- 'ifstream', 'int', 'macro', 'matrix', 'mesh', 'mesh3', 'mpiComm',
- 'mpiGroup', 'mpiRequest', 'NewMacro', 'EndMacro', 'ofstream', 'Pmmap',
- 'problem', 'Psemaphore', 'real', 'solve', 'string', 'varf'}
-
- # finite element spaces
- fespaces = {'BDM1', 'BDM1Ortho', 'Edge03d', 'Edge13d', 'Edge23d', 'FEQF', 'HCT',
- 'P0', 'P03d', 'P0Edge', 'P1', 'P13d', 'P1b', 'P1b3d', 'P1bl', 'P1bl3d',
- 'P1dc', 'P1Edge', 'P1nc', 'P2', 'P23d', 'P2b', 'P2BR', 'P2dc', 'P2Edge',
- 'P2h', 'P2Morley', 'P2pnc', 'P3', 'P3dc', 'P3Edge', 'P4', 'P4dc',
- 'P4Edge', 'P5Edge', 'RT0', 'RT03d', 'RT0Ortho', 'RT1', 'RT1Ortho',
- 'RT2', 'RT2Ortho'}
-
- # preprocessor
- preprocessor = {'ENDIFMACRO', 'include', 'IFMACRO', 'load'}
-
- # Language keywords
- keywords = {
- 'adj',
- 'append',
- 'area',
- 'ARGV',
- 'be',
- 'binary',
- 'BoundaryEdge',
- 'bordermeasure',
- 'CG',
- 'Cholesky',
- 'cin',
- 'cout',
- 'Crout',
- 'default',
- 'diag',
- 'edgeOrientation',
- 'endl',
- 'false',
- 'ffind',
- 'FILE',
- 'find',
- 'fixed',
- 'flush',
- 'GMRES',
- 'good',
- 'hTriangle',
- 'im',
- 'imax',
- 'imin',
- 'InternalEdge',
- 'l1',
- 'l2',
- 'label',
- 'lenEdge',
- 'length',
- 'LINE',
- 'linfty',
- 'LU',
- 'm',
- 'max',
- 'measure',
- 'min',
- 'mpiAnySource',
- 'mpiBAND',
- 'mpiBXOR',
- 'mpiCommWorld',
- 'mpiLAND',
- 'mpiLOR',
- 'mpiLXOR',
- 'mpiMAX',
- 'mpiMIN',
- 'mpiPROD',
- 'mpirank',
- 'mpisize',
- 'mpiSUM',
- 'mpiUndefined',
- 'n',
- 'N',
- 'nbe',
- 'ndof',
- 'ndofK',
- 'noshowbase',
- 'noshowpos',
- 'notaregion',
- 'nt',
- 'nTonEdge',
- 'nuEdge',
- 'nuTriangle',
- 'nv',
- 'P',
- 'pi',
- 'precision',
- 'qf1pE',
- 'qf1pElump',
- 'qf1pT',
- 'qf1pTlump',
- 'qfV1',
- 'qfV1lump',
- 'qf2pE',
- 'qf2pT',
- 'qf2pT4P1',
- 'qfV2',
- 'qf3pE',
- 'qf4pE',
- 'qf5pE',
- 'qf5pT',
- 'qfV5',
- 'qf7pT',
- 'qf9pT',
- 'qfnbpE',
- 'quantile',
- 're',
- 'region',
- 'rfind',
- 'scientific',
- 'searchMethod',
- 'setw',
- 'showbase',
- 'showpos',
- 'sparsesolver',
- 'sum',
- 'tellp',
- 'true',
- 'UMFPACK',
- 'unused',
- 'whoinElement',
- 'verbosity',
- 'version',
- 'volume',
- 'x',
- 'y',
- 'z'
- }
-
- # Language shipped functions and class ( )
- functions = {
- 'abs',
- 'acos',
- 'acosh',
- 'adaptmesh',
- 'adj',
- 'AffineCG',
- 'AffineGMRES',
- 'arg',
- 'asin',
- 'asinh',
- 'assert',
- 'atan',
- 'atan2',
- 'atanh',
- 'atof',
- 'atoi',
- 'BFGS',
- 'broadcast',
- 'buildlayers',
- 'buildmesh',
- 'ceil',
- 'chi',
- 'complexEigenValue',
- 'copysign',
- 'change',
- 'checkmovemesh',
- 'clock',
- 'cmaes',
- 'conj',
- 'convect',
- 'cos',
- 'cosh',
- 'cube',
- 'd',
- 'dd',
- 'dfft',
- 'diffnp',
- 'diffpos',
- 'dimKrylov',
- 'dist',
- 'dumptable',
- 'dx',
- 'dxx',
- 'dxy',
- 'dxz',
- 'dy',
- 'dyx',
- 'dyy',
- 'dyz',
- 'dz',
- 'dzx',
- 'dzy',
- 'dzz',
- 'EigenValue',
- 'emptymesh',
- 'erf',
- 'erfc',
- 'exec',
- 'exit',
- 'exp',
- 'fdim',
- 'floor',
- 'fmax',
- 'fmin',
- 'fmod',
- 'freeyams',
- 'getARGV',
- 'getline',
- 'gmshload',
- 'gmshload3',
- 'gslcdfugaussianP',
- 'gslcdfugaussianQ',
- 'gslcdfugaussianPinv',
- 'gslcdfugaussianQinv',
- 'gslcdfgaussianP',
- 'gslcdfgaussianQ',
- 'gslcdfgaussianPinv',
- 'gslcdfgaussianQinv',
- 'gslcdfgammaP',
- 'gslcdfgammaQ',
- 'gslcdfgammaPinv',
- 'gslcdfgammaQinv',
- 'gslcdfcauchyP',
- 'gslcdfcauchyQ',
- 'gslcdfcauchyPinv',
- 'gslcdfcauchyQinv',
- 'gslcdflaplaceP',
- 'gslcdflaplaceQ',
- 'gslcdflaplacePinv',
- 'gslcdflaplaceQinv',
- 'gslcdfrayleighP',
- 'gslcdfrayleighQ',
- 'gslcdfrayleighPinv',
- 'gslcdfrayleighQinv',
- 'gslcdfchisqP',
- 'gslcdfchisqQ',
- 'gslcdfchisqPinv',
- 'gslcdfchisqQinv',
- 'gslcdfexponentialP',
- 'gslcdfexponentialQ',
- 'gslcdfexponentialPinv',
- 'gslcdfexponentialQinv',
- 'gslcdfexppowP',
- 'gslcdfexppowQ',
- 'gslcdftdistP',
- 'gslcdftdistQ',
- 'gslcdftdistPinv',
- 'gslcdftdistQinv',
- 'gslcdffdistP',
- 'gslcdffdistQ',
- 'gslcdffdistPinv',
- 'gslcdffdistQinv',
- 'gslcdfbetaP',
- 'gslcdfbetaQ',
- 'gslcdfbetaPinv',
- 'gslcdfbetaQinv',
- 'gslcdfflatP',
- 'gslcdfflatQ',
- 'gslcdfflatPinv',
- 'gslcdfflatQinv',
- 'gslcdflognormalP',
- 'gslcdflognormalQ',
- 'gslcdflognormalPinv',
- 'gslcdflognormalQinv',
- 'gslcdfgumbel1P',
- 'gslcdfgumbel1Q',
- 'gslcdfgumbel1Pinv',
- 'gslcdfgumbel1Qinv',
- 'gslcdfgumbel2P',
- 'gslcdfgumbel2Q',
- 'gslcdfgumbel2Pinv',
- 'gslcdfgumbel2Qinv',
- 'gslcdfweibullP',
- 'gslcdfweibullQ',
- 'gslcdfweibullPinv',
- 'gslcdfweibullQinv',
- 'gslcdfparetoP',
- 'gslcdfparetoQ',
- 'gslcdfparetoPinv',
- 'gslcdfparetoQinv',
- 'gslcdflogisticP',
- 'gslcdflogisticQ',
- 'gslcdflogisticPinv',
- 'gslcdflogisticQinv',
- 'gslcdfbinomialP',
- 'gslcdfbinomialQ',
- 'gslcdfpoissonP',
- 'gslcdfpoissonQ',
- 'gslcdfgeometricP',
- 'gslcdfgeometricQ',
- 'gslcdfnegativebinomialP',
- 'gslcdfnegativebinomialQ',
- 'gslcdfpascalP',
- 'gslcdfpascalQ',
- 'gslinterpakima',
- 'gslinterpakimaperiodic',
- 'gslinterpcsplineperiodic',
- 'gslinterpcspline',
- 'gslinterpsteffen',
- 'gslinterplinear',
- 'gslinterppolynomial',
- 'gslranbernoullipdf',
- 'gslranbeta',
- 'gslranbetapdf',
- 'gslranbinomialpdf',
- 'gslranexponential',
- 'gslranexponentialpdf',
- 'gslranexppow',
- 'gslranexppowpdf',
- 'gslrancauchy',
- 'gslrancauchypdf',
- 'gslranchisq',
- 'gslranchisqpdf',
- 'gslranerlang',
- 'gslranerlangpdf',
- 'gslranfdist',
- 'gslranfdistpdf',
- 'gslranflat',
- 'gslranflatpdf',
- 'gslrangamma',
- 'gslrangammaint',
- 'gslrangammapdf',
- 'gslrangammamt',
- 'gslrangammaknuth',
- 'gslrangaussian',
- 'gslrangaussianratiomethod',
- 'gslrangaussianziggurat',
- 'gslrangaussianpdf',
- 'gslranugaussian',
- 'gslranugaussianratiomethod',
- 'gslranugaussianpdf',
- 'gslrangaussiantail',
- 'gslrangaussiantailpdf',
- 'gslranugaussiantail',
- 'gslranugaussiantailpdf',
- 'gslranlandau',
- 'gslranlandaupdf',
- 'gslrangeometricpdf',
- 'gslrangumbel1',
- 'gslrangumbel1pdf',
- 'gslrangumbel2',
- 'gslrangumbel2pdf',
- 'gslranlogistic',
- 'gslranlogisticpdf',
- 'gslranlognormal',
- 'gslranlognormalpdf',
- 'gslranlogarithmicpdf',
- 'gslrannegativebinomialpdf',
- 'gslranpascalpdf',
- 'gslranpareto',
- 'gslranparetopdf',
- 'gslranpoissonpdf',
- 'gslranrayleigh',
- 'gslranrayleighpdf',
- 'gslranrayleightail',
- 'gslranrayleightailpdf',
- 'gslrantdist',
- 'gslrantdistpdf',
- 'gslranlaplace',
- 'gslranlaplacepdf',
- 'gslranlevy',
- 'gslranweibull',
- 'gslranweibullpdf',
- 'gslsfairyAi',
- 'gslsfairyBi',
- 'gslsfairyAiscaled',
- 'gslsfairyBiscaled',
- 'gslsfairyAideriv',
- 'gslsfairyBideriv',
- 'gslsfairyAiderivscaled',
- 'gslsfairyBiderivscaled',
- 'gslsfairyzeroAi',
- 'gslsfairyzeroBi',
- 'gslsfairyzeroAideriv',
- 'gslsfairyzeroBideriv',
- 'gslsfbesselJ0',
- 'gslsfbesselJ1',
- 'gslsfbesselJn',
- 'gslsfbesselY0',
- 'gslsfbesselY1',
- 'gslsfbesselYn',
- 'gslsfbesselI0',
- 'gslsfbesselI1',
- 'gslsfbesselIn',
- 'gslsfbesselI0scaled',
- 'gslsfbesselI1scaled',
- 'gslsfbesselInscaled',
- 'gslsfbesselK0',
- 'gslsfbesselK1',
- 'gslsfbesselKn',
- 'gslsfbesselK0scaled',
- 'gslsfbesselK1scaled',
- 'gslsfbesselKnscaled',
- 'gslsfbesselj0',
- 'gslsfbesselj1',
- 'gslsfbesselj2',
- 'gslsfbesseljl',
- 'gslsfbessely0',
- 'gslsfbessely1',
- 'gslsfbessely2',
- 'gslsfbesselyl',
- 'gslsfbesseli0scaled',
- 'gslsfbesseli1scaled',
- 'gslsfbesseli2scaled',
- 'gslsfbesselilscaled',
- 'gslsfbesselk0scaled',
- 'gslsfbesselk1scaled',
- 'gslsfbesselk2scaled',
- 'gslsfbesselklscaled',
- 'gslsfbesselJnu',
- 'gslsfbesselYnu',
- 'gslsfbesselInuscaled',
- 'gslsfbesselInu',
- 'gslsfbesselKnuscaled',
- 'gslsfbesselKnu',
- 'gslsfbessellnKnu',
- 'gslsfbesselzeroJ0',
- 'gslsfbesselzeroJ1',
- 'gslsfbesselzeroJnu',
- 'gslsfclausen',
- 'gslsfhydrogenicR1',
- 'gslsfdawson',
- 'gslsfdebye1',
- 'gslsfdebye2',
- 'gslsfdebye3',
- 'gslsfdebye4',
- 'gslsfdebye5',
- 'gslsfdebye6',
- 'gslsfdilog',
- 'gslsfmultiply',
- 'gslsfellintKcomp',
- 'gslsfellintEcomp',
- 'gslsfellintPcomp',
- 'gslsfellintDcomp',
- 'gslsfellintF',
- 'gslsfellintE',
- 'gslsfellintRC',
- 'gslsferfc',
- 'gslsflogerfc',
- 'gslsferf',
- 'gslsferfZ',
- 'gslsferfQ',
- 'gslsfhazard',
- 'gslsfexp',
- 'gslsfexpmult',
- 'gslsfexpm1',
- 'gslsfexprel',
- 'gslsfexprel2',
- 'gslsfexpreln',
- 'gslsfexpintE1',
- 'gslsfexpintE2',
- 'gslsfexpintEn',
- 'gslsfexpintE1scaled',
- 'gslsfexpintE2scaled',
- 'gslsfexpintEnscaled',
- 'gslsfexpintEi',
- 'gslsfexpintEiscaled',
- 'gslsfShi',
- 'gslsfChi',
- 'gslsfexpint3',
- 'gslsfSi',
- 'gslsfCi',
- 'gslsfatanint',
- 'gslsffermidiracm1',
- 'gslsffermidirac0',
- 'gslsffermidirac1',
- 'gslsffermidirac2',
- 'gslsffermidiracint',
- 'gslsffermidiracmhalf',
- 'gslsffermidirachalf',
- 'gslsffermidirac3half',
- 'gslsffermidiracinc0',
- 'gslsflngamma',
- 'gslsfgamma',
- 'gslsfgammastar',
- 'gslsfgammainv',
- 'gslsftaylorcoeff',
- 'gslsffact',
- 'gslsfdoublefact',
- 'gslsflnfact',
- 'gslsflndoublefact',
- 'gslsflnchoose',
- 'gslsfchoose',
- 'gslsflnpoch',
- 'gslsfpoch',
- 'gslsfpochrel',
- 'gslsfgammaincQ',
- 'gslsfgammaincP',
- 'gslsfgammainc',
- 'gslsflnbeta',
- 'gslsfbeta',
- 'gslsfbetainc',
- 'gslsfgegenpoly1',
- 'gslsfgegenpoly2',
- 'gslsfgegenpoly3',
- 'gslsfgegenpolyn',
- 'gslsfhyperg0F1',
- 'gslsfhyperg1F1int',
- 'gslsfhyperg1F1',
- 'gslsfhypergUint',
- 'gslsfhypergU',
- 'gslsfhyperg2F0',
- 'gslsflaguerre1',
- 'gslsflaguerre2',
- 'gslsflaguerre3',
- 'gslsflaguerren',
- 'gslsflambertW0',
- 'gslsflambertWm1',
- 'gslsflegendrePl',
- 'gslsflegendreP1',
- 'gslsflegendreP2',
- 'gslsflegendreP3',
- 'gslsflegendreQ0',
- 'gslsflegendreQ1',
- 'gslsflegendreQl',
- 'gslsflegendrePlm',
- 'gslsflegendresphPlm',
- 'gslsflegendrearraysize',
- 'gslsfconicalPhalf',
- 'gslsfconicalPmhalf',
- 'gslsfconicalP0',
- 'gslsfconicalP1',
- 'gslsfconicalPsphreg',
- 'gslsfconicalPcylreg',
- 'gslsflegendreH3d0',
- 'gslsflegendreH3d1',
- 'gslsflegendreH3d',
- 'gslsflog',
- 'gslsflogabs',
- 'gslsflog1plusx',
- 'gslsflog1plusxmx',
- 'gslsfpowint',
- 'gslsfpsiint',
- 'gslsfpsi',
- 'gslsfpsi1piy',
- 'gslsfpsi1int',
- 'gslsfpsi1',
- 'gslsfpsin',
- 'gslsfsynchrotron1',
- 'gslsfsynchrotron2',
- 'gslsftransport2',
- 'gslsftransport3',
- 'gslsftransport4',
- 'gslsftransport5',
- 'gslsfsin',
- 'gslsfcos',
- 'gslsfhypot',
- 'gslsfsinc',
- 'gslsflnsinh',
- 'gslsflncosh',
- 'gslsfanglerestrictsymm',
- 'gslsfanglerestrictpos',
- 'gslsfzetaint',
- 'gslsfzeta',
- 'gslsfzetam1',
- 'gslsfzetam1int',
- 'gslsfhzeta',
- 'gslsfetaint',
- 'gslsfeta',
- 'imag',
- 'int1d',
- 'int2d',
- 'int3d',
- 'intalledges',
- 'intallfaces',
- 'interpolate',
- 'invdiff',
- 'invdiffnp',
- 'invdiffpos',
- 'Isend',
- 'isInf',
- 'isNaN',
- 'isoline',
- 'Irecv',
- 'j0',
- 'j1',
- 'jn',
- 'jump',
- 'lgamma',
- 'LinearCG',
- 'LinearGMRES',
- 'log',
- 'log10',
- 'lrint',
- 'lround',
- 'max',
- 'mean',
- 'medit',
- 'min',
- 'mmg3d',
- 'movemesh',
- 'movemesh23',
- 'mpiAlltoall',
- 'mpiAlltoallv',
- 'mpiAllgather',
- 'mpiAllgatherv',
- 'mpiAllReduce',
- 'mpiBarrier',
- 'mpiGather',
- 'mpiGatherv',
- 'mpiRank',
- 'mpiReduce',
- 'mpiScatter',
- 'mpiScatterv',
- 'mpiSize',
- 'mpiWait',
- 'mpiWaitAny',
- 'mpiWtick',
- 'mpiWtime',
- 'mshmet',
- 'NaN',
- 'NLCG',
- 'on',
- 'plot',
- 'polar',
- 'Post',
- 'pow',
- 'processor',
- 'processorblock',
- 'projection',
- 'randinit',
- 'randint31',
- 'randint32',
- 'random',
- 'randreal1',
- 'randreal2',
- 'randreal3',
- 'randres53',
- 'Read',
- 'readmesh',
- 'readmesh3',
- 'Recv',
- 'rint',
- 'round',
- 'savemesh',
- 'savesol',
- 'savevtk',
- 'seekg',
- 'Sent',
- 'set',
- 'sign',
- 'signbit',
- 'sin',
- 'sinh',
- 'sort',
- 'splitComm',
- 'splitmesh',
- 'sqrt',
- 'square',
- 'srandom',
- 'srandomdev',
- 'Stringification',
- 'swap',
- 'system',
- 'tan',
- 'tanh',
- 'tellg',
- 'tetg',
- 'tetgconvexhull',
- 'tetgreconstruction',
- 'tetgtransfo',
- 'tgamma',
- 'triangulate',
- 'trunc',
- 'Wait',
- 'Write',
- 'y0',
- 'y1',
- 'yn'
- }
-
- # function parameters
- parameters = {
- 'A',
- 'A1',
- 'abserror',
- 'absolute',
- 'aniso',
- 'aspectratio',
- 'B',
- 'B1',
- 'bb',
- 'beginend',
- 'bin',
- 'boundary',
- 'bw',
- 'close',
- 'cmm',
- 'coef',
- 'composante',
- 'cutoff',
- 'datafilename',
- 'dataname',
- 'dim',
- 'distmax',
- 'displacement',
- 'doptions',
- 'dparams',
- 'eps',
- 'err',
- 'errg',
- 'facemerge',
- 'facetcl',
- 'factorize',
- 'file',
- 'fill',
- 'fixedborder',
- 'flabel',
- 'flags',
- 'floatmesh',
- 'floatsol',
- 'fregion',
- 'gradation',
- 'grey',
- 'hmax',
- 'hmin',
- 'holelist',
- 'hsv',
- 'init',
- 'inquire',
- 'inside',
- 'IsMetric',
- 'iso',
- 'ivalue',
- 'keepbackvertices',
- 'label',
- 'labeldown',
- 'labelmid',
- 'labelup',
- 'levelset',
- 'loptions',
- 'lparams',
- 'maxit',
- 'maxsubdiv',
- 'meditff',
- 'mem',
- 'memory',
- 'metric',
- 'mode',
- 'nbarrow',
- 'nbiso',
- 'nbiter',
- 'nbjacoby',
- 'nboffacetcl',
- 'nbofholes',
- 'nbofregions',
- 'nbregul',
- 'nbsmooth',
- 'nbvx',
- 'ncv',
- 'nev',
- 'nomeshgeneration',
- 'normalization',
- 'omega',
- 'op',
- 'optimize',
- 'option',
- 'options',
- 'order',
- 'orientation',
- 'periodic',
- 'power',
- 'precon',
- 'prev',
- 'ps',
- 'ptmerge',
- 'qfe',
- 'qforder',
- 'qft',
- 'qfV',
- 'ratio',
- 'rawvector',
- 'reffacelow',
- 'reffacemid',
- 'reffaceup',
- 'refnum',
- 'reftet',
- 'reftri',
- 'region',
- 'regionlist',
- 'renumv',
- 'rescaling',
- 'ridgeangle',
- 'save',
- 'sigma',
- 'sizeofvolume',
- 'smoothing',
- 'solver',
- 'sparams',
- 'split',
- 'splitin2',
- 'splitpbedge',
- 'stop',
- 'strategy',
- 'swap',
- 'switch',
- 'sym',
- 't',
- 'tgv',
- 'thetamax',
- 'tol',
- 'tolpivot',
- 'tolpivotsym',
- 'transfo',
- 'U2Vc',
- 'value',
- 'varrow',
- 'vector',
- 'veps',
- 'viso',
- 'wait',
- 'width',
- 'withsurfacemesh',
- 'WindowIndex',
- 'which',
- 'zbound'
- }
-
- # deprecated
- deprecated = {'fixeborder'}
-
- # do not highlight
- suppress_highlight = {
- 'alignof',
- 'asm',
- 'constexpr',
- 'decltype',
- 'div',
- 'double',
- 'grad',
- 'mutable',
- 'namespace',
- 'noexcept',
- 'restrict',
- 'static_assert',
- 'template',
- 'this',
- 'thread_local',
- 'typeid',
- 'typename',
- 'using'
- }
-
- def get_tokens_unprocessed(self, text, stack=('root',)):
- for index, token, value in CppLexer.get_tokens_unprocessed(self, text, stack):
- if value in self.operators:
- yield index, Operator, value
- elif value in self.types:
- yield index, Keyword.Type, value
- elif value in self.fespaces:
- yield index, Name.Class, value
- elif value in self.preprocessor:
- yield index, Comment.Preproc, value
- elif value in self.keywords:
- yield index, Keyword.Reserved, value
- elif value in self.functions:
- yield index, Name.Function, value
- elif value in self.parameters:
- yield index, Keyword.Pseudo, value
- elif value in self.suppress_highlight:
- yield index, Name, value
- else:
- yield index, token, value
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/func.py b/venv/lib/python3.11/site-packages/pygments/lexers/func.py
deleted file mode 100644
index 871f040..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/func.py
+++ /dev/null
@@ -1,108 +0,0 @@
-"""
- pygments.lexers.func
- ~~~~~~~~~~~~~~~~~~~~
-
- Lexers for FunC.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, include, words
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Whitespace, Punctuation
-
-__all__ = ['FuncLexer']
-
-
-class FuncLexer(RegexLexer):
- """
- For FunC source code.
- """
-
- name = 'FunC'
- aliases = ['func', 'fc']
- filenames = ['*.fc', '*.func']
-
- # 1. Does not start from "
- # 2. Can start from ` and end with `, containing any character
- # 3. Starts with underscore or { or } and have more than 1 character after it
- # 4. Starts with letter, contains letters, numbers and underscores
- identifier = r'(?!")(`([^`]+)`|((?=_)_|(?=\{)\{|(?=\})\}|(?![_`{}]))([^;,\[\]\(\)\s~.]+))'
-
- tokens = {
- 'root': [
- (r'\n', Whitespace),
- (r'\s+', Whitespace),
-
- include('keywords'),
- include('strings'),
- include('directives'),
- include('numeric'),
- include('comments'),
- include('storage'),
- include('functions'),
- include('variables'),
-
- (r'[.;(),\[\]~{}]', Punctuation)
- ],
- 'keywords': [
- (words((
- '<=>', '>=', '<=', '!=', '==', '^>>', '~>>',
- '>>', '<<', '/%', '^%', '~%', '^/', '~/', '+=',
- '-=', '*=', '/=', '~/=', '^/=', '%=', '^%=', '<<=',
- '>>=', '~>>=', '^>>=', '&=', '|=', '^=', '^', '=',
- '~', '/', '%', '-', '*', '+','>',
- '<', '&', '|', ':', '?'), prefix=r'(?<=\s)', suffix=r'(?=\s)'),
- Operator),
- (words((
- 'if', 'ifnot',
- 'else', 'elseif', 'elseifnot',
- 'while', 'do', 'until', 'repeat',
- 'return', 'impure', 'method_id',
- 'forall', 'asm', 'inline', 'inline_ref'), prefix=r'\b', suffix=r'\b'),
- Keyword),
- (words(('true', 'false'), prefix=r'\b', suffix=r'\b'), Keyword.Constant),
- ],
- 'directives': [
- (r'#include|#pragma', Keyword, 'directive'),
- ],
- 'directive': [
- include('strings'),
- (r'\s+', Whitespace),
- (r'version|not-version', Keyword),
- (r'(>=|<=|=|>|<|\^)?([0-9]+)(.[0-9]+)?(.[0-9]+)?', Number), # version
- (r';', Text, '#pop')
- ],
- 'strings': [
- (r'\"([^\n\"]+)\"[Hhcusa]?', String),
- ],
- 'numeric': [
- (r'\b(-?(?!_)([\d_]+|0x[\d_a-fA-F]+)|0b[1_0]+)(?<!_)(?=[\s\)\],;])', Number)
- ],
- 'comments': [
- (r';;([^\n]*)', Comment.Singleline),
- (r'\{-', Comment.Multiline, 'comment'),
- ],
- 'comment': [
- (r'[^-}{]+', Comment.Multiline),
- (r'\{-', Comment.Multiline, '#push'),
- (r'-\}', Comment.Multiline, '#pop'),
- (r'[-}{]', Comment.Multiline),
- ],
- 'storage': [
- (words((
- 'var', 'int', 'slice', 'tuple',
- 'cell', 'builder', 'cont', '_'),
- prefix=r'\b', suffix=r'(?=[\s\(\),\[\]])'),
- Keyword.Type),
- (words(('global', 'const'), prefix=r'\b', suffix=r'\b'), Keyword.Constant),
- ],
- 'variables': [
- (identifier, Name.Variable),
- ],
- 'functions': [
- # identifier followed by (
- (identifier + r'(?=[\(])', Name.Function),
- ]
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/functional.py b/venv/lib/python3.11/site-packages/pygments/lexers/functional.py
deleted file mode 100644
index 6189dd2..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/functional.py
+++ /dev/null
@@ -1,20 +0,0 @@
-"""
- pygments.lexers.functional
- ~~~~~~~~~~~~~~~~~~~~~~~~~~
-
- Just export lexer classes previously contained in this module.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexers.lisp import SchemeLexer, CommonLispLexer, RacketLexer, \
- NewLispLexer, ShenLexer
-from pygments.lexers.haskell import HaskellLexer, LiterateHaskellLexer, \
- KokaLexer
-from pygments.lexers.theorem import CoqLexer
-from pygments.lexers.erlang import ErlangLexer, ErlangShellLexer, \
- ElixirConsoleLexer, ElixirLexer
-from pygments.lexers.ml import SMLLexer, OcamlLexer, OpaLexer
-
-__all__ = []
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/futhark.py b/venv/lib/python3.11/site-packages/pygments/lexers/futhark.py
deleted file mode 100644
index b0efa88..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/futhark.py
+++ /dev/null
@@ -1,106 +0,0 @@
-"""
- pygments.lexers.futhark
- ~~~~~~~~~~~~~~~~~~~~~~~
-
- Lexer for the Futhark language
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, bygroups
-from pygments.token import Comment, Operator, Keyword, Name, String, \
- Number, Punctuation, Whitespace
-from pygments import unistring as uni
-
-__all__ = ['FutharkLexer']
-
-
-class FutharkLexer(RegexLexer):
- """
- A Futhark lexer
-
- .. versionadded:: 2.8
- """
- name = 'Futhark'
- url = 'https://futhark-lang.org/'
- aliases = ['futhark']
- filenames = ['*.fut']
- mimetypes = ['text/x-futhark']
-
- num_types = ('i8', 'i16', 'i32', 'i64', 'u8', 'u16', 'u32', 'u64', 'f32', 'f64')
-
- other_types = ('bool', )
-
- reserved = ('if', 'then', 'else', 'def', 'let', 'loop', 'in', 'with',
- 'type', 'type~', 'type^',
- 'val', 'entry', 'for', 'while', 'do', 'case', 'match',
- 'include', 'import', 'module', 'open', 'local', 'assert', '_')
-
- ascii = ('NUL', 'SOH', '[SE]TX', 'EOT', 'ENQ', 'ACK',
- 'BEL', 'BS', 'HT', 'LF', 'VT', 'FF', 'CR', 'S[OI]', 'DLE',
- 'DC[1-4]', 'NAK', 'SYN', 'ETB', 'CAN',
- 'EM', 'SUB', 'ESC', '[FGRU]S', 'SP', 'DEL')
-
- num_postfix = r'(%s)?' % '|'.join(num_types)
-
- identifier_re = '[a-zA-Z_][a-zA-Z_0-9\']*'
-
- # opstart_re = '+\-\*/%=\!><\|&\^'
-
- tokens = {
- 'root': [
- (r'--(.*?)$', Comment.Single),
- (r'\s+', Whitespace),
- (r'\(\)', Punctuation),
- (r'\b(%s)(?!\')\b' % '|'.join(reserved), Keyword.Reserved),
- (r'\b(%s)(?!\')\b' % '|'.join(num_types + other_types), Keyword.Type),
-
- # Identifiers
- (r'#\[([a-zA-Z_\(\) ]*)\]', Comment.Preproc),
- (r'[#!]?(%s\.)*%s' % (identifier_re, identifier_re), Name),
-
- (r'\\', Operator),
- (r'[-+/%=!><|&*^][-+/%=!><|&*^.]*', Operator),
- (r'[][(),:;`{}?.\'~^]', Punctuation),
-
- # Numbers
- (r'0[xX]_*[\da-fA-F](_*[\da-fA-F])*_*[pP][+-]?\d(_*\d)*' + num_postfix,
- Number.Float),
- (r'0[xX]_*[\da-fA-F](_*[\da-fA-F])*\.[\da-fA-F](_*[\da-fA-F])*'
- r'(_*[pP][+-]?\d(_*\d)*)?' + num_postfix, Number.Float),
- (r'\d(_*\d)*_*[eE][+-]?\d(_*\d)*' + num_postfix, Number.Float),
- (r'\d(_*\d)*\.\d(_*\d)*(_*[eE][+-]?\d(_*\d)*)?' + num_postfix, Number.Float),
- (r'0[bB]_*[01](_*[01])*' + num_postfix, Number.Bin),
- (r'0[xX]_*[\da-fA-F](_*[\da-fA-F])*' + num_postfix, Number.Hex),
- (r'\d(_*\d)*' + num_postfix, Number.Integer),
-
- # Character/String Literals
- (r"'", String.Char, 'character'),
- (r'"', String, 'string'),
- # Special
- (r'\[[a-zA-Z_\d]*\]', Keyword.Type),
- (r'\(\)', Name.Builtin),
- ],
- 'character': [
- # Allows multi-chars, incorrectly.
- (r"[^\\']'", String.Char, '#pop'),
- (r"\\", String.Escape, 'escape'),
- ("'", String.Char, '#pop'),
- ],
- 'string': [
- (r'[^\\"]+', String),
- (r"\\", String.Escape, 'escape'),
- ('"', String, '#pop'),
- ],
-
- 'escape': [
- (r'[abfnrtv"\'&\\]', String.Escape, '#pop'),
- (r'\^[][' + uni.Lu + r'@^_]', String.Escape, '#pop'),
- ('|'.join(ascii), String.Escape, '#pop'),
- (r'o[0-7]+', String.Escape, '#pop'),
- (r'x[\da-fA-F]+', String.Escape, '#pop'),
- (r'\d+', String.Escape, '#pop'),
- (r'(\s+)(\\)', bygroups(Whitespace, String.Escape), '#pop'),
- ],
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/gcodelexer.py b/venv/lib/python3.11/site-packages/pygments/lexers/gcodelexer.py
deleted file mode 100644
index c3a4a58..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/gcodelexer.py
+++ /dev/null
@@ -1,35 +0,0 @@
-"""
- pygments.lexers.gcodelexer
- ~~~~~~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for the G Code Language.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, bygroups
-from pygments.token import Comment, Name, Text, Keyword, Number
-
-__all__ = ['GcodeLexer']
-
-
-class GcodeLexer(RegexLexer):
- """
- For gcode source code.
-
- .. versionadded:: 2.9
- """
- name = 'g-code'
- aliases = ['gcode']
- filenames = ['*.gcode']
-
- tokens = {
- 'root': [
- (r';.*\n', Comment),
- (r'^[gmGM]\d{1,4}\s', Name.Builtin), # M or G commands
- (r'([^gGmM])([+-]?\d*[.]?\d+)', bygroups(Keyword, Number)),
- (r'\s', Text.Whitespace),
- (r'.*\n', Text),
- ]
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/gdscript.py b/venv/lib/python3.11/site-packages/pygments/lexers/gdscript.py
deleted file mode 100644
index 0f4f6d4..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/gdscript.py
+++ /dev/null
@@ -1,188 +0,0 @@
-"""
- pygments.lexers.gdscript
- ~~~~~~~~~~~~~~~~~~~~~~~~
-
- Lexer for GDScript.
-
- Modified by Daniel J. Ramirez <djrmuv@gmail.com> based on the original
- python.py.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, include, bygroups, default, words, \
- combined
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation, Whitespace
-
-__all__ = ["GDScriptLexer"]
-
-
-class GDScriptLexer(RegexLexer):
- """
- For GDScript source code.
- """
-
- name = "GDScript"
- url = 'https://www.godotengine.org'
- aliases = ["gdscript", "gd"]
- filenames = ["*.gd"]
- mimetypes = ["text/x-gdscript", "application/x-gdscript"]
-
- def innerstring_rules(ttype):
- return [
- # the old style '%s' % (...) string formatting
- (r"%(\(\w+\))?[-#0 +]*([0-9]+|[*])?(\.([0-9]+|[*]))?"
- "[hlL]?[E-GXc-giorsux%]",
- String.Interpol),
- # backslashes, quotes and formatting signs must be parsed one at a time
- (r'[^\\\'"%\n]+', ttype),
- (r'[\'"\\]', ttype),
- # unhandled string formatting sign
- (r"%", ttype),
- # newlines are an error (use "nl" state)
- ]
-
- tokens = {
- "root": [
- (r"\n", Whitespace),
- (r'^(\s*)([rRuUbB]{,2})("""(?:.|\n)*?""")',
- bygroups(Whitespace, String.Affix, String.Doc)),
- (r"^(\s*)([rRuUbB]{,2})('''(?:.|\n)*?''')",
- bygroups(Whitespace, String.Affix, String.Doc)),
- (r"[^\S\n]+", Whitespace),
- (r"#.*$", Comment.Single),
- (r"[]{}:(),;[]", Punctuation),
- (r"(\\)(\n)", bygroups(Text, Whitespace)),
- (r"\\", Text),
- (r"(in|and|or|not)\b", Operator.Word),
- (r"!=|==|<<|>>|&&|\+=|-=|\*=|/=|%=|&=|\|=|\|\||[-~+/*%=<>&^.!|$]",
- Operator),
- include("keywords"),
- (r"(func)(\s+)", bygroups(Keyword, Whitespace), "funcname"),
- (r"(class)(\s+)", bygroups(Keyword, Whitespace), "classname"),
- include("builtins"),
- ('([rR]|[uUbB][rR]|[rR][uUbB])(""")',
- bygroups(String.Affix, String.Double),
- "tdqs"),
- ("([rR]|[uUbB][rR]|[rR][uUbB])(''')",
- bygroups(String.Affix, String.Single),
- "tsqs"),
- ('([rR]|[uUbB][rR]|[rR][uUbB])(")',
- bygroups(String.Affix, String.Double),
- "dqs"),
- ("([rR]|[uUbB][rR]|[rR][uUbB])(')",
- bygroups(String.Affix, String.Single),
- "sqs"),
- ('([uUbB]?)(""")',
- bygroups(String.Affix, String.Double),
- combined("stringescape", "tdqs")),
- ("([uUbB]?)(''')",
- bygroups(String.Affix, String.Single),
- combined("stringescape", "tsqs")),
- ('([uUbB]?)(")',
- bygroups(String.Affix, String.Double),
- combined("stringescape", "dqs")),
- ("([uUbB]?)(')",
- bygroups(String.Affix, String.Single),
- combined("stringescape", "sqs")),
- include("name"),
- include("numbers"),
- ],
- "keywords": [
- (words(("and", "in", "not", "or", "as", "breakpoint", "class",
- "class_name", "extends", "is", "func", "setget", "signal",
- "tool", "const", "enum", "export", "onready", "static",
- "var", "break", "continue", "if", "elif", "else", "for",
- "pass", "return", "match", "while", "remote", "master",
- "puppet", "remotesync", "mastersync", "puppetsync"),
- suffix=r"\b"), Keyword),
- ],
- "builtins": [
- (words(("Color8", "ColorN", "abs", "acos", "asin", "assert", "atan",
- "atan2", "bytes2var", "ceil", "char", "clamp", "convert",
- "cos", "cosh", "db2linear", "decimals", "dectime", "deg2rad",
- "dict2inst", "ease", "exp", "floor", "fmod", "fposmod",
- "funcref", "hash", "inst2dict", "instance_from_id", "is_inf",
- "is_nan", "lerp", "linear2db", "load", "log", "max", "min",
- "nearest_po2", "pow", "preload", "print", "print_stack",
- "printerr", "printraw", "prints", "printt", "rad2deg",
- "rand_range", "rand_seed", "randf", "randi", "randomize",
- "range", "round", "seed", "sign", "sin", "sinh", "sqrt",
- "stepify", "str", "str2var", "tan", "tan", "tanh",
- "type_exist", "typeof", "var2bytes", "var2str", "weakref",
- "yield"), prefix=r"(?<!\.)", suffix=r"\b"),
- Name.Builtin),
- (r"((?<!\.)(self|false|true)|(PI|TAU|NAN|INF)" r")\b",
- Name.Builtin.Pseudo),
- (words(("bool", "int", "float", "String", "NodePath", "Vector2",
- "Rect2", "Transform2D", "Vector3", "Rect3", "Plane", "Quat",
- "Basis", "Transform", "Color", "RID", "Object", "NodePath",
- "Dictionary", "Array", "PackedByteArray", "PackedInt32Array",
- "PackedInt64Array", "PackedFloat32Array", "PackedFloat64Array",
- "PackedStringArray", "PackedVector2Array", "PackedVector3Array",
- "PackedColorArray", "null", "void"),
- prefix=r"(?<!\.)", suffix=r"\b"),
- Name.Builtin.Type),
- ],
- "numbers": [
- (r"(\d+\.\d*|\d*\.\d+)([eE][+-]?[0-9]+)?j?", Number.Float),
- (r"\d+[eE][+-]?[0-9]+j?", Number.Float),
- (r"0[xX][a-fA-F0-9]+", Number.Hex),
- (r"\d+j?", Number.Integer),
- ],
- "name": [(r"[a-zA-Z_]\w*", Name)],
- "funcname": [(r"[a-zA-Z_]\w*", Name.Function, "#pop"), default("#pop")],
- "classname": [(r"[a-zA-Z_]\w*", Name.Class, "#pop")],
- "stringescape": [
- (
- r'\\([\\abfnrtv"\']|\n|N\{.*?\}|u[a-fA-F0-9]{4}|'
- r"U[a-fA-F0-9]{8}|x[a-fA-F0-9]{2}|[0-7]{1,3})",
- String.Escape,
- )
- ],
- "strings-single": innerstring_rules(String.Single),
- "strings-double": innerstring_rules(String.Double),
- "dqs": [
- (r'"', String.Double, "#pop"),
- (r'\\\\|\\"|\\\n', String.Escape), # included here for raw strings
- include("strings-double"),
- ],
- "sqs": [
- (r"'", String.Single, "#pop"),
- (r"\\\\|\\'|\\\n", String.Escape), # included here for raw strings
- include("strings-single"),
- ],
- "tdqs": [
- (r'"""', String.Double, "#pop"),
- include("strings-double"),
- (r"\n", Whitespace),
- ],
- "tsqs": [
- (r"'''", String.Single, "#pop"),
- include("strings-single"),
- (r"\n", Whitespace),
- ],
- }
-
- def analyse_text(text):
- score = 0.0
-
- if re.search(
- r"func (_ready|_init|_input|_process|_unhandled_input)", text
- ):
- score += 0.8
-
- if re.search(
- r"(extends |class_name |onready |preload|load|setget|func [^_])",
- text
- ):
- score += 0.4
-
- if re.search(r"(var|const|enum|export|signal|tool)", text):
- score += 0.2
-
- return min(score, 1.0)
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/go.py b/venv/lib/python3.11/site-packages/pygments/lexers/go.py
deleted file mode 100644
index fe4a184..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/go.py
+++ /dev/null
@@ -1,98 +0,0 @@
-"""
- pygments.lexers.go
- ~~~~~~~~~~~~~~~~~~
-
- Lexers for the Google Go language.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, bygroups, words
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation, Whitespace
-
-__all__ = ['GoLexer']
-
-
-class GoLexer(RegexLexer):
- """
- For Go source.
-
- .. versionadded:: 1.2
- """
- name = 'Go'
- url = 'https://go.dev/'
- filenames = ['*.go']
- aliases = ['go', 'golang']
- mimetypes = ['text/x-gosrc']
-
- tokens = {
- 'root': [
- (r'\n', Whitespace),
- (r'\s+', Whitespace),
- (r'(\\)(\n)', bygroups(Text, Whitespace)), # line continuations
- (r'//(.*?)$', Comment.Single),
- (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline),
- (r'(import|package)\b', Keyword.Namespace),
- (r'(var|func|struct|map|chan|type|interface|const)\b',
- Keyword.Declaration),
- (words((
- 'break', 'default', 'select', 'case', 'defer', 'go',
- 'else', 'goto', 'switch', 'fallthrough', 'if', 'range',
- 'continue', 'for', 'return'), suffix=r'\b'),
- Keyword),
- (r'(true|false|iota|nil)\b', Keyword.Constant),
- # It seems the builtin types aren't actually keywords, but
- # can be used as functions. So we need two declarations.
- (words((
- 'uint', 'uint8', 'uint16', 'uint32', 'uint64',
- 'int', 'int8', 'int16', 'int32', 'int64',
- 'float', 'float32', 'float64',
- 'complex64', 'complex128', 'byte', 'rune',
- 'string', 'bool', 'error', 'uintptr', 'any', 'comparable',
- 'print', 'println', 'panic', 'recover', 'close', 'complex',
- 'real', 'imag', 'len', 'cap', 'append', 'copy', 'delete',
- 'new', 'make', 'min', 'max', 'clear'), suffix=r'\b(\()'),
- bygroups(Name.Builtin, Punctuation)),
- (words((
- 'uint', 'uint8', 'uint16', 'uint32', 'uint64',
- 'int', 'int8', 'int16', 'int32', 'int64',
- 'float', 'float32', 'float64',
- 'complex64', 'complex128', 'byte', 'rune',
- 'string', 'bool', 'error', 'uintptr', 'any', 'comparable'), suffix=r'\b'),
- Keyword.Type),
- # imaginary_lit
- (r'\d+i', Number),
- (r'\d+\.\d*([Ee][-+]\d+)?i', Number),
- (r'\.\d+([Ee][-+]\d+)?i', Number),
- (r'\d+[Ee][-+]\d+i', Number),
- # float_lit
- (r'\d+(\.\d+[eE][+\-]?\d+|'
- r'\.\d*|[eE][+\-]?\d+)', Number.Float),
- (r'\.\d+([eE][+\-]?\d+)?', Number.Float),
- # int_lit
- # -- octal_lit
- (r'0[0-7]+', Number.Oct),
- # -- hex_lit
- (r'0[xX][0-9a-fA-F]+', Number.Hex),
- # -- decimal_lit
- (r'(0|[1-9][0-9]*)', Number.Integer),
- # char_lit
- (r"""'(\\['"\\abfnrtv]|\\x[0-9a-fA-F]{2}|\\[0-7]{1,3}"""
- r"""|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8}|[^\\])'""",
- String.Char),
- # StringLiteral
- # -- raw_string_lit
- (r'`[^`]*`', String),
- # -- interpreted_string_lit
- (r'"(\\\\|\\[^\\]|[^"\\])*"', String),
- # Tokens
- (r'(<<=|>>=|<<|>>|<=|>=|&\^=|&\^|\+=|-=|\*=|/=|%=|&=|\|=|&&|\|\|'
- r'|<-|\+\+|--|==|!=|:=|\.\.\.|[+\-*/%&]'
- r'|~|\|)', Operator),
- (r'[|^<>=!()\[\]{}.,;:]', Punctuation),
- # identifier
- (r'[^\W\d]\w*', Name.Other),
- ]
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/grammar_notation.py b/venv/lib/python3.11/site-packages/pygments/lexers/grammar_notation.py
deleted file mode 100644
index 7927133..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/grammar_notation.py
+++ /dev/null
@@ -1,265 +0,0 @@
-"""
- pygments.lexers.grammar_notation
- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for grammar notations like BNF.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, bygroups, include, this, using, words
-from pygments.token import Comment, Keyword, Literal, Name, Number, \
- Operator, Punctuation, String, Text, Whitespace
-
-__all__ = ['BnfLexer', 'AbnfLexer', 'JsgfLexer', 'PegLexer']
-
-
-class BnfLexer(RegexLexer):
- """
- This lexer is for grammar notations which are similar to
- original BNF.
-
- In order to maximize a number of targets of this lexer,
- let's decide some designs:
-
- * We don't distinguish `Terminal Symbol`.
-
- * We do assume that `NonTerminal Symbol` are always enclosed
- with arrow brackets.
-
- * We do assume that `NonTerminal Symbol` may include
- any printable characters except arrow brackets and ASCII 0x20.
- This assumption is for `RBNF <http://www.rfc-base.org/txt/rfc-5511.txt>`_.
-
- * We do assume that target notation doesn't support comment.
-
- * We don't distinguish any operators and punctuation except
- `::=`.
-
- Though these decision making might cause too minimal highlighting
- and you might be disappointed, but it is reasonable for us.
-
- .. versionadded:: 2.1
- """
-
- name = 'BNF'
- aliases = ['bnf']
- filenames = ['*.bnf']
- mimetypes = ['text/x-bnf']
-
- tokens = {
- 'root': [
- (r'(<)([ -;=?-~]+)(>)',
- bygroups(Punctuation, Name.Class, Punctuation)),
-
- # an only operator
- (r'::=', Operator),
-
- # fallback
- (r'[^<>:]+', Text), # for performance
- (r'.', Text),
- ],
- }
-
-
-class AbnfLexer(RegexLexer):
- """
- Lexer for IETF 7405 ABNF.
-
- (Updates `5234 <http://www.ietf.org/rfc/rfc5234.txt>`_) grammars.
-
- .. versionadded:: 2.1
- """
-
- name = 'ABNF'
- url = 'http://www.ietf.org/rfc/rfc7405.txt'
- aliases = ['abnf']
- filenames = ['*.abnf']
- mimetypes = ['text/x-abnf']
-
- _core_rules = (
- 'ALPHA', 'BIT', 'CHAR', 'CR', 'CRLF', 'CTL', 'DIGIT',
- 'DQUOTE', 'HEXDIG', 'HTAB', 'LF', 'LWSP', 'OCTET',
- 'SP', 'VCHAR', 'WSP')
-
- tokens = {
- 'root': [
- # comment
- (r';.*$', Comment.Single),
-
- # quoted
- # double quote itself in this state, it is as '%x22'.
- (r'(%[si])?"[^"]*"', Literal),
-
- # binary (but i have never seen...)
- (r'%b[01]+\-[01]+\b', Literal), # range
- (r'%b[01]+(\.[01]+)*\b', Literal), # concat
-
- # decimal
- (r'%d[0-9]+\-[0-9]+\b', Literal), # range
- (r'%d[0-9]+(\.[0-9]+)*\b', Literal), # concat
-
- # hexadecimal
- (r'%x[0-9a-fA-F]+\-[0-9a-fA-F]+\b', Literal), # range
- (r'%x[0-9a-fA-F]+(\.[0-9a-fA-F]+)*\b', Literal), # concat
-
- # repetition (<a>*<b>element) including nRule
- (r'\b[0-9]+\*[0-9]+', Operator),
- (r'\b[0-9]+\*', Operator),
- (r'\b[0-9]+', Operator),
- (r'\*', Operator),
-
- # Strictly speaking, these are not keyword but
- # are called `Core Rule'.
- (words(_core_rules, suffix=r'\b'), Keyword),
-
- # nonterminals (ALPHA *(ALPHA / DIGIT / "-"))
- (r'[a-zA-Z][a-zA-Z0-9-]*\b', Name.Class),
-
- # operators
- (r'(=/|=|/)', Operator),
-
- # punctuation
- (r'[\[\]()]', Punctuation),
-
- # fallback
- (r'\s+', Whitespace),
- (r'.', Text),
- ],
- }
-
-
-class JsgfLexer(RegexLexer):
- """
- For JSpeech Grammar Format grammars.
-
- .. versionadded:: 2.2
- """
- name = 'JSGF'
- url = 'https://www.w3.org/TR/jsgf/'
- aliases = ['jsgf']
- filenames = ['*.jsgf']
- mimetypes = ['application/jsgf', 'application/x-jsgf', 'text/jsgf']
-
- tokens = {
- 'root': [
- include('comments'),
- include('non-comments'),
- ],
- 'comments': [
- (r'/\*\*(?!/)', Comment.Multiline, 'documentation comment'),
- (r'/\*[\w\W]*?\*/', Comment.Multiline),
- (r'//.*$', Comment.Single),
- ],
- 'non-comments': [
- (r'\A#JSGF[^;]*', Comment.Preproc),
- (r'\s+', Whitespace),
- (r';', Punctuation),
- (r'[=|()\[\]*+]', Operator),
- (r'/[^/]+/', Number.Float),
- (r'"', String.Double, 'string'),
- (r'\{', String.Other, 'tag'),
- (words(('import', 'public'), suffix=r'\b'), Keyword.Reserved),
- (r'grammar\b', Keyword.Reserved, 'grammar name'),
- (r'(<)(NULL|VOID)(>)',
- bygroups(Punctuation, Name.Builtin, Punctuation)),
- (r'<', Punctuation, 'rulename'),
- (r'\w+|[^\s;=|()\[\]*+/"{<\w]+', Text),
- ],
- 'string': [
- (r'"', String.Double, '#pop'),
- (r'\\.', String.Escape),
- (r'[^\\"]+', String.Double),
- ],
- 'tag': [
- (r'\}', String.Other, '#pop'),
- (r'\\.', String.Escape),
- (r'[^\\}]+', String.Other),
- ],
- 'grammar name': [
- (r';', Punctuation, '#pop'),
- (r'\s+', Whitespace),
- (r'\.', Punctuation),
- (r'[^;\s.]+', Name.Namespace),
- ],
- 'rulename': [
- (r'>', Punctuation, '#pop'),
- (r'\*', Punctuation),
- (r'\s+', Whitespace),
- (r'([^.>]+)(\s*)(\.)', bygroups(Name.Namespace, Text, Punctuation)),
- (r'[^.>]+', Name.Constant),
- ],
- 'documentation comment': [
- (r'\*/', Comment.Multiline, '#pop'),
- (r'^(\s*)(\*?)(\s*)(@(?:example|see))(\s+)'
- r'([\w\W]*?(?=(?:^\s*\*?\s*@|\*/)))',
- bygroups(Whitespace, Comment.Multiline, Whitespace, Comment.Special,
- Whitespace, using(this, state='example'))),
- (r'(^\s*\*?\s*)(@\S*)',
- bygroups(Comment.Multiline, Comment.Special)),
- (r'[^*\n@]+|\w|\W', Comment.Multiline),
- ],
- 'example': [
- (r'(\n\s*)(\*)', bygroups(Whitespace, Comment.Multiline)),
- include('non-comments'),
- (r'.', Comment.Multiline),
- ],
- }
-
-
-class PegLexer(RegexLexer):
- """
- This lexer is for Parsing Expression Grammars (PEG).
-
- Various implementations of PEG have made different decisions
- regarding the syntax, so let's try to be accommodating:
-
- * `<-`, `←`, `:`, and `=` are all accepted as rule operators.
-
- * Both `|` and `/` are choice operators.
-
- * `^`, `↑`, and `~` are cut operators.
-
- * A single `a-z` character immediately before a string, or
- multiple `a-z` characters following a string, are part of the
- string (e.g., `r"..."` or `"..."ilmsuxa`).
-
- .. versionadded:: 2.6
- """
-
- name = 'PEG'
- url = 'https://bford.info/pub/lang/peg.pdf'
- aliases = ['peg']
- filenames = ['*.peg']
- mimetypes = ['text/x-peg']
-
- tokens = {
- 'root': [
- # Comments
- (r'#.*$', Comment.Single),
-
- # All operators
- (r'<-|[←:=/|&!?*+^↑~]', Operator),
-
- # Other punctuation
- (r'[()]', Punctuation),
-
- # Keywords
- (r'\.', Keyword),
-
- # Character classes
- (r'(\[)([^\]]*(?:\\.[^\]\\]*)*)(\])',
- bygroups(Punctuation, String, Punctuation)),
-
- # Single and double quoted strings (with optional modifiers)
- (r'[a-z]?"[^"\\]*(?:\\.[^"\\]*)*"[a-z]*', String.Double),
- (r"[a-z]?'[^'\\]*(?:\\.[^'\\]*)*'[a-z]*", String.Single),
-
- # Nonterminals are not whitespace, operators, or punctuation
- (r'[^\s<←:=/|&!?*+\^↑~()\[\]"\'#]+', Name.Class),
-
- # Fallback
- (r'.', Text),
- ],
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/graph.py b/venv/lib/python3.11/site-packages/pygments/lexers/graph.py
deleted file mode 100644
index 753df36..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/graph.py
+++ /dev/null
@@ -1,109 +0,0 @@
-"""
- pygments.lexers.graph
- ~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for graph query languages.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, include, bygroups, using, this, words
-from pygments.token import Keyword, Punctuation, Comment, Operator, Name,\
- String, Number, Whitespace
-
-
-__all__ = ['CypherLexer']
-
-
-class CypherLexer(RegexLexer):
- """
- For Cypher Query Language
-
- For the Cypher version in Neo4j 3.3
-
- .. versionadded:: 2.0
- """
- name = 'Cypher'
- url = 'https://neo4j.com/docs/developer-manual/3.3/cypher/'
- aliases = ['cypher']
- filenames = ['*.cyp', '*.cypher']
-
- flags = re.MULTILINE | re.IGNORECASE
-
- tokens = {
- 'root': [
- include('clauses'),
- include('keywords'),
- include('relations'),
- include('strings'),
- include('whitespace'),
- include('barewords'),
- include('comment'),
- ],
- 'keywords': [
- (r'(create|order|match|limit|set|skip|start|return|with|where|'
- r'delete|foreach|not|by|true|false)\b', Keyword),
- ],
- 'clauses': [
- # based on https://neo4j.com/docs/cypher-refcard/3.3/
- (r'(create)(\s+)(index|unique)\b',
- bygroups(Keyword, Whitespace, Keyword)),
- (r'(drop)(\s+)(contraint|index)(\s+)(on)\b',
- bygroups(Keyword, Whitespace, Keyword, Whitespace, Keyword)),
- (r'(ends)(\s+)(with)\b',
- bygroups(Keyword, Whitespace, Keyword)),
- (r'(is)(\s+)(node)(\s+)(key)\b',
- bygroups(Keyword, Whitespace, Keyword, Whitespace, Keyword)),
- (r'(is)(\s+)(null|unique)\b',
- bygroups(Keyword, Whitespace, Keyword)),
- (r'(load)(\s+)(csv)(\s+)(from)\b',
- bygroups(Keyword, Whitespace, Keyword, Whitespace, Keyword)),
- (r'(on)(\s+)(match|create)\b',
- bygroups(Keyword, Whitespace, Keyword)),
- (r'(optional)(\s+)(match)\b',
- bygroups(Keyword, Whitespace, Keyword)),
- (r'(order)(\s+)(by)\b',
- bygroups(Keyword, Whitespace, Keyword)),
- (r'(starts)(\s+)(with)\b',
- bygroups(Keyword, Whitespace, Keyword)),
- (r'(union)(\s+)(all)\b',
- bygroups(Keyword, Whitespace, Keyword)),
- (r'(using)(\s+)(periodic)(\s+)(commit)\b',
- bygroups(Keyword, Whitespace, Keyword, Whitespace, Keyword)),
- (r'(using)(\s+)(index)\b',
- bygroups(Keyword, Whitespace, Keyword)),
- (r'(using)(\s+)(range|text|point)(\s+)(index)\b',
- bygroups(Keyword, Whitespace, Name, Whitespace, Keyword)),
- (words((
- 'all', 'any', 'as', 'asc', 'ascending', 'assert', 'call', 'case', 'create',
- 'delete', 'desc', 'descending', 'distinct', 'end', 'fieldterminator',
- 'foreach', 'in', 'limit', 'match', 'merge', 'none', 'not', 'null',
- 'remove', 'return', 'set', 'skip', 'single', 'start', 'then', 'union',
- 'unwind', 'yield', 'where', 'when', 'with', 'collect'), suffix=r'\b'), Keyword),
- ],
- 'relations': [
- (r'(-\[)(.*?)(\]->)', bygroups(Operator, using(this), Operator)),
- (r'(<-\[)(.*?)(\]-)', bygroups(Operator, using(this), Operator)),
- (r'(-\[)(.*?)(\]-)', bygroups(Operator, using(this), Operator)),
- (r'-->|<--|\[|\]', Operator),
- (r'<|>|<>|=|<=|=>|\(|\)|\||:|,|;', Punctuation),
- (r'[.*{}]', Punctuation),
- ],
- 'strings': [
- (r'([\'"])(?:\\[tbnrf\'"\\]|[^\\])*?\1', String),
- (r'`(?:``|[^`])+`', Name.Variable),
- ],
- 'whitespace': [
- (r'\s+', Whitespace),
- ],
- 'barewords': [
- (r'[a-z]\w*', Name),
- (r'\d+', Number),
- ],
- 'comment': [
- (r'//.*$', Comment.Single),
- ],
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/graphics.py b/venv/lib/python3.11/site-packages/pygments/lexers/graphics.py
deleted file mode 100644
index 8c112f5..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/graphics.py
+++ /dev/null
@@ -1,798 +0,0 @@
-"""
- pygments.lexers.graphics
- ~~~~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for computer graphics and plotting related languages.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, words, include, bygroups, using, \
- this, default
-from pygments.token import Text, Comment, Operator, Keyword, Name, \
- Number, Punctuation, String, Whitespace
-
-__all__ = ['GLShaderLexer', 'PostScriptLexer', 'AsymptoteLexer', 'GnuplotLexer',
- 'PovrayLexer', 'HLSLShaderLexer']
-
-
-class GLShaderLexer(RegexLexer):
- """
- GLSL (OpenGL Shader) lexer.
-
- .. versionadded:: 1.1
- """
- name = 'GLSL'
- aliases = ['glsl']
- filenames = ['*.vert', '*.frag', '*.geo']
- mimetypes = ['text/x-glslsrc']
-
- tokens = {
- 'root': [
- (r'#(?:.*\\\n)*.*$', Comment.Preproc),
- (r'//.*$', Comment.Single),
- (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline),
- (r'\+|-|~|!=?|\*|/|%|<<|>>|<=?|>=?|==?|&&?|\^|\|\|?',
- Operator),
- (r'[?:]', Operator), # quick hack for ternary
- (r'\bdefined\b', Operator),
- (r'[;{}(),\[\]]', Punctuation),
- # FIXME when e is present, no decimal point needed
- (r'[+-]?\d*\.\d+([eE][-+]?\d+)?', Number.Float),
- (r'[+-]?\d+\.\d*([eE][-+]?\d+)?', Number.Float),
- (r'0[xX][0-9a-fA-F]*', Number.Hex),
- (r'0[0-7]*', Number.Oct),
- (r'[1-9][0-9]*', Number.Integer),
- (words((
- # Storage qualifiers
- 'attribute', 'const', 'uniform', 'varying',
- 'buffer', 'shared', 'in', 'out',
- # Layout qualifiers
- 'layout',
- # Interpolation qualifiers
- 'flat', 'smooth', 'noperspective',
- # Auxiliary qualifiers
- 'centroid', 'sample', 'patch',
- # Parameter qualifiers. Some double as Storage qualifiers
- 'inout',
- # Precision qualifiers
- 'lowp', 'mediump', 'highp', 'precision',
- # Invariance qualifiers
- 'invariant',
- # Precise qualifiers
- 'precise',
- # Memory qualifiers
- 'coherent', 'volatile', 'restrict', 'readonly', 'writeonly',
- # Statements
- 'break', 'continue', 'do', 'for', 'while', 'switch',
- 'case', 'default', 'if', 'else', 'subroutine',
- 'discard', 'return', 'struct'),
- prefix=r'\b', suffix=r'\b'),
- Keyword),
- (words((
- # Boolean values
- 'true', 'false'),
- prefix=r'\b', suffix=r'\b'),
- Keyword.Constant),
- (words((
- # Miscellaneous types
- 'void', 'atomic_uint',
- # Floating-point scalars and vectors
- 'float', 'vec2', 'vec3', 'vec4',
- 'double', 'dvec2', 'dvec3', 'dvec4',
- # Integer scalars and vectors
- 'int', 'ivec2', 'ivec3', 'ivec4',
- 'uint', 'uvec2', 'uvec3', 'uvec4',
- # Boolean scalars and vectors
- 'bool', 'bvec2', 'bvec3', 'bvec4',
- # Matrices
- 'mat2', 'mat3', 'mat4', 'dmat2', 'dmat3', 'dmat4',
- 'mat2x2', 'mat2x3', 'mat2x4', 'dmat2x2', 'dmat2x3', 'dmat2x4',
- 'mat3x2', 'mat3x3', 'mat3x4', 'dmat3x2', 'dmat3x3',
- 'dmat3x4', 'mat4x2', 'mat4x3', 'mat4x4', 'dmat4x2', 'dmat4x3', 'dmat4x4',
- # Floating-point samplers
- 'sampler1D', 'sampler2D', 'sampler3D', 'samplerCube',
- 'sampler1DArray', 'sampler2DArray', 'samplerCubeArray',
- 'sampler2DRect', 'samplerBuffer',
- 'sampler2DMS', 'sampler2DMSArray',
- # Shadow samplers
- 'sampler1DShadow', 'sampler2DShadow', 'samplerCubeShadow',
- 'sampler1DArrayShadow', 'sampler2DArrayShadow',
- 'samplerCubeArrayShadow', 'sampler2DRectShadow',
- # Signed integer samplers
- 'isampler1D', 'isampler2D', 'isampler3D', 'isamplerCube',
- 'isampler1DArray', 'isampler2DArray', 'isamplerCubeArray',
- 'isampler2DRect', 'isamplerBuffer',
- 'isampler2DMS', 'isampler2DMSArray',
- # Unsigned integer samplers
- 'usampler1D', 'usampler2D', 'usampler3D', 'usamplerCube',
- 'usampler1DArray', 'usampler2DArray', 'usamplerCubeArray',
- 'usampler2DRect', 'usamplerBuffer',
- 'usampler2DMS', 'usampler2DMSArray',
- # Floating-point image types
- 'image1D', 'image2D', 'image3D', 'imageCube',
- 'image1DArray', 'image2DArray', 'imageCubeArray',
- 'image2DRect', 'imageBuffer',
- 'image2DMS', 'image2DMSArray',
- # Signed integer image types
- 'iimage1D', 'iimage2D', 'iimage3D', 'iimageCube',
- 'iimage1DArray', 'iimage2DArray', 'iimageCubeArray',
- 'iimage2DRect', 'iimageBuffer',
- 'iimage2DMS', 'iimage2DMSArray',
- # Unsigned integer image types
- 'uimage1D', 'uimage2D', 'uimage3D', 'uimageCube',
- 'uimage1DArray', 'uimage2DArray', 'uimageCubeArray',
- 'uimage2DRect', 'uimageBuffer',
- 'uimage2DMS', 'uimage2DMSArray'),
- prefix=r'\b', suffix=r'\b'),
- Keyword.Type),
- (words((
- # Reserved for future use.
- 'common', 'partition', 'active', 'asm', 'class',
- 'union', 'enum', 'typedef', 'template', 'this',
- 'resource', 'goto', 'inline', 'noinline', 'public',
- 'static', 'extern', 'external', 'interface', 'long',
- 'short', 'half', 'fixed', 'unsigned', 'superp', 'input',
- 'output', 'hvec2', 'hvec3', 'hvec4', 'fvec2', 'fvec3',
- 'fvec4', 'sampler3DRect', 'filter', 'sizeof', 'cast',
- 'namespace', 'using'),
- prefix=r'\b', suffix=r'\b'),
- Keyword.Reserved),
- # All names beginning with "gl_" are reserved.
- (r'gl_\w*', Name.Builtin),
- (r'[a-zA-Z_]\w*', Name),
- (r'\.', Punctuation),
- (r'\s+', Whitespace),
- ],
- }
-
-
-class HLSLShaderLexer(RegexLexer):
- """
- HLSL (Microsoft Direct3D Shader) lexer.
-
- .. versionadded:: 2.3
- """
- name = 'HLSL'
- aliases = ['hlsl']
- filenames = ['*.hlsl', '*.hlsli']
- mimetypes = ['text/x-hlsl']
-
- tokens = {
- 'root': [
- (r'#(?:.*\\\n)*.*$', Comment.Preproc),
- (r'//.*$', Comment.Single),
- (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline),
- (r'\+|-|~|!=?|\*|/|%|<<|>>|<=?|>=?|==?|&&?|\^|\|\|?',
- Operator),
- (r'[?:]', Operator), # quick hack for ternary
- (r'\bdefined\b', Operator),
- (r'[;{}(),.\[\]]', Punctuation),
- # FIXME when e is present, no decimal point needed
- (r'[+-]?\d*\.\d+([eE][-+]?\d+)?f?', Number.Float),
- (r'[+-]?\d+\.\d*([eE][-+]?\d+)?f?', Number.Float),
- (r'0[xX][0-9a-fA-F]*', Number.Hex),
- (r'0[0-7]*', Number.Oct),
- (r'[1-9][0-9]*', Number.Integer),
- (r'"', String, 'string'),
- (words((
- 'asm','asm_fragment','break','case','cbuffer','centroid','class',
- 'column_major','compile','compile_fragment','const','continue',
- 'default','discard','do','else','export','extern','for','fxgroup',
- 'globallycoherent','groupshared','if','in','inline','inout',
- 'interface','line','lineadj','linear','namespace','nointerpolation',
- 'noperspective','NULL','out','packoffset','pass','pixelfragment',
- 'point','precise','return','register','row_major','sample',
- 'sampler','shared','stateblock','stateblock_state','static',
- 'struct','switch','tbuffer','technique','technique10',
- 'technique11','texture','typedef','triangle','triangleadj',
- 'uniform','vertexfragment','volatile','while'),
- prefix=r'\b', suffix=r'\b'),
- Keyword),
- (words(('true','false'), prefix=r'\b', suffix=r'\b'),
- Keyword.Constant),
- (words((
- 'auto','catch','char','const_cast','delete','dynamic_cast','enum',
- 'explicit','friend','goto','long','mutable','new','operator',
- 'private','protected','public','reinterpret_cast','short','signed',
- 'sizeof','static_cast','template','this','throw','try','typename',
- 'union','unsigned','using','virtual'),
- prefix=r'\b', suffix=r'\b'),
- Keyword.Reserved),
- (words((
- 'dword','matrix','snorm','string','unorm','unsigned','void','vector',
- 'BlendState','Buffer','ByteAddressBuffer','ComputeShader',
- 'DepthStencilState','DepthStencilView','DomainShader',
- 'GeometryShader','HullShader','InputPatch','LineStream',
- 'OutputPatch','PixelShader','PointStream','RasterizerState',
- 'RenderTargetView','RasterizerOrderedBuffer',
- 'RasterizerOrderedByteAddressBuffer',
- 'RasterizerOrderedStructuredBuffer','RasterizerOrderedTexture1D',
- 'RasterizerOrderedTexture1DArray','RasterizerOrderedTexture2D',
- 'RasterizerOrderedTexture2DArray','RasterizerOrderedTexture3D',
- 'RWBuffer','RWByteAddressBuffer','RWStructuredBuffer',
- 'RWTexture1D','RWTexture1DArray','RWTexture2D','RWTexture2DArray',
- 'RWTexture3D','SamplerState','SamplerComparisonState',
- 'StructuredBuffer','Texture1D','Texture1DArray','Texture2D',
- 'Texture2DArray','Texture2DMS','Texture2DMSArray','Texture3D',
- 'TextureCube','TextureCubeArray','TriangleStream','VertexShader'),
- prefix=r'\b', suffix=r'\b'),
- Keyword.Type),
- (words((
- 'bool','double','float','int','half','min16float','min10float',
- 'min16int','min12int','min16uint','uint'),
- prefix=r'\b', suffix=r'([1-4](x[1-4])?)?\b'),
- Keyword.Type), # vector and matrix types
- (words((
- 'abort','abs','acos','all','AllMemoryBarrier',
- 'AllMemoryBarrierWithGroupSync','any','AppendStructuredBuffer',
- 'asdouble','asfloat','asin','asint','asuint','asuint','atan',
- 'atan2','ceil','CheckAccessFullyMapped','clamp','clip',
- 'CompileShader','ConsumeStructuredBuffer','cos','cosh','countbits',
- 'cross','D3DCOLORtoUBYTE4','ddx','ddx_coarse','ddx_fine','ddy',
- 'ddy_coarse','ddy_fine','degrees','determinant',
- 'DeviceMemoryBarrier','DeviceMemoryBarrierWithGroupSync','distance',
- 'dot','dst','errorf','EvaluateAttributeAtCentroid',
- 'EvaluateAttributeAtSample','EvaluateAttributeSnapped','exp',
- 'exp2','f16tof32','f32tof16','faceforward','firstbithigh',
- 'firstbitlow','floor','fma','fmod','frac','frexp','fwidth',
- 'GetRenderTargetSampleCount','GetRenderTargetSamplePosition',
- 'GlobalOrderedCountIncrement','GroupMemoryBarrier',
- 'GroupMemoryBarrierWithGroupSync','InterlockedAdd','InterlockedAnd',
- 'InterlockedCompareExchange','InterlockedCompareStore',
- 'InterlockedExchange','InterlockedMax','InterlockedMin',
- 'InterlockedOr','InterlockedXor','isfinite','isinf','isnan',
- 'ldexp','length','lerp','lit','log','log10','log2','mad','max',
- 'min','modf','msad4','mul','noise','normalize','pow','printf',
- 'Process2DQuadTessFactorsAvg','Process2DQuadTessFactorsMax',
- 'Process2DQuadTessFactorsMin','ProcessIsolineTessFactors',
- 'ProcessQuadTessFactorsAvg','ProcessQuadTessFactorsMax',
- 'ProcessQuadTessFactorsMin','ProcessTriTessFactorsAvg',
- 'ProcessTriTessFactorsMax','ProcessTriTessFactorsMin',
- 'QuadReadLaneAt','QuadSwapX','QuadSwapY','radians','rcp',
- 'reflect','refract','reversebits','round','rsqrt','saturate',
- 'sign','sin','sincos','sinh','smoothstep','sqrt','step','tan',
- 'tanh','tex1D','tex1D','tex1Dbias','tex1Dgrad','tex1Dlod',
- 'tex1Dproj','tex2D','tex2D','tex2Dbias','tex2Dgrad','tex2Dlod',
- 'tex2Dproj','tex3D','tex3D','tex3Dbias','tex3Dgrad','tex3Dlod',
- 'tex3Dproj','texCUBE','texCUBE','texCUBEbias','texCUBEgrad',
- 'texCUBElod','texCUBEproj','transpose','trunc','WaveAllBitAnd',
- 'WaveAllMax','WaveAllMin','WaveAllBitOr','WaveAllBitXor',
- 'WaveAllEqual','WaveAllProduct','WaveAllSum','WaveAllTrue',
- 'WaveAnyTrue','WaveBallot','WaveGetLaneCount','WaveGetLaneIndex',
- 'WaveGetOrderedIndex','WaveIsHelperLane','WaveOnce',
- 'WavePrefixProduct','WavePrefixSum','WaveReadFirstLane',
- 'WaveReadLaneAt'),
- prefix=r'\b', suffix=r'\b'),
- Name.Builtin), # built-in functions
- (words((
- 'SV_ClipDistance','SV_ClipDistance0','SV_ClipDistance1',
- 'SV_Culldistance','SV_CullDistance0','SV_CullDistance1',
- 'SV_Coverage','SV_Depth','SV_DepthGreaterEqual',
- 'SV_DepthLessEqual','SV_DispatchThreadID','SV_DomainLocation',
- 'SV_GroupID','SV_GroupIndex','SV_GroupThreadID','SV_GSInstanceID',
- 'SV_InnerCoverage','SV_InsideTessFactor','SV_InstanceID',
- 'SV_IsFrontFace','SV_OutputControlPointID','SV_Position',
- 'SV_PrimitiveID','SV_RenderTargetArrayIndex','SV_SampleIndex',
- 'SV_StencilRef','SV_TessFactor','SV_VertexID',
- 'SV_ViewportArrayIndex'),
- prefix=r'\b', suffix=r'\b'),
- Name.Decorator), # system-value semantics
- (r'\bSV_Target[0-7]?\b', Name.Decorator),
- (words((
- 'allow_uav_condition','branch','call','domain','earlydepthstencil',
- 'fastopt','flatten','forcecase','instance','loop','maxtessfactor',
- 'numthreads','outputcontrolpoints','outputtopology','partitioning',
- 'patchconstantfunc','unroll'),
- prefix=r'\b', suffix=r'\b'),
- Name.Decorator), # attributes
- (r'[a-zA-Z_]\w*', Name),
- (r'\\$', Comment.Preproc), # backslash at end of line -- usually macro continuation
- (r'\s+', Whitespace),
- ],
- 'string': [
- (r'"', String, '#pop'),
- (r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|'
- r'u[a-fA-F0-9]{4}|U[a-fA-F0-9]{8}|[0-7]{1,3})', String.Escape),
- (r'[^\\"\n]+', String), # all other characters
- (r'\\\n', String), # line continuation
- (r'\\', String), # stray backslash
- ],
- }
-
-
-class PostScriptLexer(RegexLexer):
- """
- Lexer for PostScript files.
-
- .. versionadded:: 1.4
- """
- name = 'PostScript'
- url = 'https://en.wikipedia.org/wiki/PostScript'
- aliases = ['postscript', 'postscr']
- filenames = ['*.ps', '*.eps']
- mimetypes = ['application/postscript']
-
- delimiter = r'()<>\[\]{}/%\s'
- delimiter_end = r'(?=[%s])' % delimiter
-
- valid_name_chars = r'[^%s]' % delimiter
- valid_name = r"%s+%s" % (valid_name_chars, delimiter_end)
-
- tokens = {
- 'root': [
- # All comment types
- (r'^%!.+$', Comment.Preproc),
- (r'%%.*$', Comment.Special),
- (r'(^%.*\n){2,}', Comment.Multiline),
- (r'%.*$', Comment.Single),
-
- # String literals are awkward; enter separate state.
- (r'\(', String, 'stringliteral'),
-
- (r'[{}<>\[\]]', Punctuation),
-
- # Numbers
- (r'<[0-9A-Fa-f]+>' + delimiter_end, Number.Hex),
- # Slight abuse: use Oct to signify any explicit base system
- (r'[0-9]+\#(\-|\+)?([0-9]+\.?|[0-9]*\.[0-9]+|[0-9]+\.[0-9]*)'
- r'((e|E)[0-9]+)?' + delimiter_end, Number.Oct),
- (r'(\-|\+)?([0-9]+\.?|[0-9]*\.[0-9]+|[0-9]+\.[0-9]*)((e|E)[0-9]+)?'
- + delimiter_end, Number.Float),
- (r'(\-|\+)?[0-9]+' + delimiter_end, Number.Integer),
-
- # References
- (r'\/%s' % valid_name, Name.Variable),
-
- # Names
- (valid_name, Name.Function), # Anything else is executed
-
- # These keywords taken from
- # <http://www.math.ubc.ca/~cass/graphics/manual/pdf/a1.pdf>
- # Is there an authoritative list anywhere that doesn't involve
- # trawling documentation?
-
- (r'(false|true)' + delimiter_end, Keyword.Constant),
-
- # Conditionals / flow control
- (r'(eq|ne|g[et]|l[et]|and|or|not|if(?:else)?|for(?:all)?)'
- + delimiter_end, Keyword.Reserved),
-
- (words((
- 'abs', 'add', 'aload', 'arc', 'arcn', 'array', 'atan', 'begin',
- 'bind', 'ceiling', 'charpath', 'clip', 'closepath', 'concat',
- 'concatmatrix', 'copy', 'cos', 'currentlinewidth', 'currentmatrix',
- 'currentpoint', 'curveto', 'cvi', 'cvs', 'def', 'defaultmatrix',
- 'dict', 'dictstackoverflow', 'div', 'dtransform', 'dup', 'end',
- 'exch', 'exec', 'exit', 'exp', 'fill', 'findfont', 'floor', 'get',
- 'getinterval', 'grestore', 'gsave', 'gt', 'identmatrix', 'idiv',
- 'idtransform', 'index', 'invertmatrix', 'itransform', 'length',
- 'lineto', 'ln', 'load', 'log', 'loop', 'matrix', 'mod', 'moveto',
- 'mul', 'neg', 'newpath', 'pathforall', 'pathbbox', 'pop', 'print',
- 'pstack', 'put', 'quit', 'rand', 'rangecheck', 'rcurveto', 'repeat',
- 'restore', 'rlineto', 'rmoveto', 'roll', 'rotate', 'round', 'run',
- 'save', 'scale', 'scalefont', 'setdash', 'setfont', 'setgray',
- 'setlinecap', 'setlinejoin', 'setlinewidth', 'setmatrix',
- 'setrgbcolor', 'shfill', 'show', 'showpage', 'sin', 'sqrt',
- 'stack', 'stringwidth', 'stroke', 'strokepath', 'sub', 'syntaxerror',
- 'transform', 'translate', 'truncate', 'typecheck', 'undefined',
- 'undefinedfilename', 'undefinedresult'), suffix=delimiter_end),
- Name.Builtin),
-
- (r'\s+', Whitespace),
- ],
-
- 'stringliteral': [
- (r'[^()\\]+', String),
- (r'\\', String.Escape, 'escape'),
- (r'\(', String, '#push'),
- (r'\)', String, '#pop'),
- ],
-
- 'escape': [
- (r'[0-8]{3}|n|r|t|b|f|\\|\(|\)', String.Escape, '#pop'),
- default('#pop'),
- ],
- }
-
-
-class AsymptoteLexer(RegexLexer):
- """
- For Asymptote source code.
-
- .. versionadded:: 1.2
- """
- name = 'Asymptote'
- url = 'http://asymptote.sf.net/'
- aliases = ['asymptote', 'asy']
- filenames = ['*.asy']
- mimetypes = ['text/x-asymptote']
-
- #: optional Comment or Whitespace
- _ws = r'(?:\s|//.*?\n|/\*.*?\*/)+'
-
- tokens = {
- 'whitespace': [
- (r'\n', Whitespace),
- (r'\s+', Whitespace),
- (r'(\\)(\n)', bygroups(Text, Whitespace)), # line continuation
- (r'//(\n|(.|\n)*?[^\\]\n)', Comment),
- (r'/(\\\n)?\*(.|\n)*?\*(\\\n)?/', Comment),
- ],
- 'statements': [
- # simple string (TeX friendly)
- (r'"(\\\\|\\[^\\]|[^"\\])*"', String),
- # C style string (with character escapes)
- (r"'", String, 'string'),
- (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[lL]?', Number.Float),
- (r'(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float),
- (r'0x[0-9a-fA-F]+[Ll]?', Number.Hex),
- (r'0[0-7]+[Ll]?', Number.Oct),
- (r'\d+[Ll]?', Number.Integer),
- (r'[~!%^&*+=|?:<>/-]', Operator),
- (r'[()\[\],.]', Punctuation),
- (r'\b(case)(.+?)(:)', bygroups(Keyword, using(this), Text)),
- (r'(and|controls|tension|atleast|curl|if|else|while|for|do|'
- r'return|break|continue|struct|typedef|new|access|import|'
- r'unravel|from|include|quote|static|public|private|restricted|'
- r'this|explicit|true|false|null|cycle|newframe|operator)\b', Keyword),
- # Since an asy-type-name can be also an asy-function-name,
- # in the following we test if the string " [a-zA-Z]" follows
- # the Keyword.Type.
- # Of course it is not perfect !
- (r'(Braid|FitResult|Label|Legend|TreeNode|abscissa|arc|arrowhead|'
- r'binarytree|binarytreeNode|block|bool|bool3|bounds|bqe|circle|'
- r'conic|coord|coordsys|cputime|ellipse|file|filltype|frame|grid3|'
- r'guide|horner|hsv|hyperbola|indexedTransform|int|inversion|key|'
- r'light|line|linefit|marginT|marker|mass|object|pair|parabola|path|'
- r'path3|pen|picture|point|position|projection|real|revolution|'
- r'scaleT|scientific|segment|side|slice|splitface|string|surface|'
- r'tensionSpecifier|ticklocate|ticksgridT|tickvalues|transform|'
- r'transformation|tree|triangle|trilinear|triple|vector|'
- r'vertex|void)(?=\s+[a-zA-Z])', Keyword.Type),
- # Now the asy-type-name which are not asy-function-name
- # except yours !
- # Perhaps useless
- (r'(Braid|FitResult|TreeNode|abscissa|arrowhead|block|bool|bool3|'
- r'bounds|coord|frame|guide|horner|int|linefit|marginT|pair|pen|'
- r'picture|position|real|revolution|slice|splitface|ticksgridT|'
- r'tickvalues|tree|triple|vertex|void)\b', Keyword.Type),
- (r'[a-zA-Z_]\w*:(?!:)', Name.Label),
- (r'[a-zA-Z_]\w*', Name),
- ],
- 'root': [
- include('whitespace'),
- # functions
- (r'((?:[\w*\s])+?(?:\s|\*))' # return arguments
- r'([a-zA-Z_]\w*)' # method name
- r'(\s*\([^;]*?\))' # signature
- r'(' + _ws + r')(\{)',
- bygroups(using(this), Name.Function, using(this), using(this),
- Punctuation),
- 'function'),
- # function declarations
- (r'((?:[\w*\s])+?(?:\s|\*))' # return arguments
- r'([a-zA-Z_]\w*)' # method name
- r'(\s*\([^;]*?\))' # signature
- r'(' + _ws + r')(;)',
- bygroups(using(this), Name.Function, using(this), using(this),
- Punctuation)),
- default('statement'),
- ],
- 'statement': [
- include('whitespace'),
- include('statements'),
- ('[{}]', Punctuation),
- (';', Punctuation, '#pop'),
- ],
- 'function': [
- include('whitespace'),
- include('statements'),
- (';', Punctuation),
- (r'\{', Punctuation, '#push'),
- (r'\}', Punctuation, '#pop'),
- ],
- 'string': [
- (r"'", String, '#pop'),
- (r'\\([\\abfnrtv"\'?]|x[a-fA-F0-9]{2,4}|[0-7]{1,3})', String.Escape),
- (r'\n', String),
- (r"[^\\'\n]+", String), # all other characters
- (r'\\\n', String),
- (r'\\n', String), # line continuation
- (r'\\', String), # stray backslash
- ],
- }
-
- def get_tokens_unprocessed(self, text):
- from pygments.lexers._asy_builtins import ASYFUNCNAME, ASYVARNAME
- for index, token, value in \
- RegexLexer.get_tokens_unprocessed(self, text):
- if token is Name and value in ASYFUNCNAME:
- token = Name.Function
- elif token is Name and value in ASYVARNAME:
- token = Name.Variable
- yield index, token, value
-
-
-def _shortened(word):
- dpos = word.find('$')
- return '|'.join(word[:dpos] + word[dpos+1:i] + r'\b'
- for i in range(len(word), dpos, -1))
-
-
-def _shortened_many(*words):
- return '|'.join(map(_shortened, words))
-
-
-class GnuplotLexer(RegexLexer):
- """
- For Gnuplot plotting scripts.
-
- .. versionadded:: 0.11
- """
-
- name = 'Gnuplot'
- url = 'http://gnuplot.info/'
- aliases = ['gnuplot']
- filenames = ['*.plot', '*.plt']
- mimetypes = ['text/x-gnuplot']
-
- tokens = {
- 'root': [
- include('whitespace'),
- (_shortened('bi$nd'), Keyword, 'bind'),
- (_shortened_many('ex$it', 'q$uit'), Keyword, 'quit'),
- (_shortened('f$it'), Keyword, 'fit'),
- (r'(if)(\s*)(\()', bygroups(Keyword, Text, Punctuation), 'if'),
- (r'else\b', Keyword),
- (_shortened('pa$use'), Keyword, 'pause'),
- (_shortened_many('p$lot', 'rep$lot', 'sp$lot'), Keyword, 'plot'),
- (_shortened('sa$ve'), Keyword, 'save'),
- (_shortened('se$t'), Keyword, ('genericargs', 'optionarg')),
- (_shortened_many('sh$ow', 'uns$et'),
- Keyword, ('noargs', 'optionarg')),
- (_shortened_many('low$er', 'ra$ise', 'ca$ll', 'cd$', 'cl$ear',
- 'h$elp', '\\?$', 'hi$story', 'l$oad', 'pr$int',
- 'pwd$', 're$read', 'res$et', 'scr$eendump',
- 'she$ll', 'sy$stem', 'up$date'),
- Keyword, 'genericargs'),
- (_shortened_many('pwd$', 're$read', 'res$et', 'scr$eendump',
- 'she$ll', 'test$'),
- Keyword, 'noargs'),
- (r'([a-zA-Z_]\w*)(\s*)(=)',
- bygroups(Name.Variable, Whitespace, Operator), 'genericargs'),
- (r'([a-zA-Z_]\w*)(\s*)(\()(.*?)(\))(\s*)(=)',
- bygroups(Name.Function, Whitespace, Punctuation,
- Text, Punctuation, Whitespace, Operator), 'genericargs'),
- (r'@[a-zA-Z_]\w*', Name.Constant), # macros
- (r';', Keyword),
- ],
- 'comment': [
- (r'[^\\\n]+', Comment),
- (r'\\\n', Comment),
- (r'\\', Comment),
- # don't add the newline to the Comment token
- default('#pop'),
- ],
- 'whitespace': [
- ('#', Comment, 'comment'),
- (r'[ \t\v\f]+', Whitespace),
- ],
- 'noargs': [
- include('whitespace'),
- # semicolon and newline end the argument list
- (r';', Punctuation, '#pop'),
- (r'\n', Whitespace, '#pop'),
- ],
- 'dqstring': [
- (r'"', String, '#pop'),
- (r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|[0-7]{1,3})', String.Escape),
- (r'[^\\"\n]+', String), # all other characters
- (r'\\\n', String), # line continuation
- (r'\\', String), # stray backslash
- (r'\n', Whitespace, '#pop'), # newline ends the string too
- ],
- 'sqstring': [
- (r"''", String), # escaped single quote
- (r"'", String, '#pop'),
- (r"[^\\'\n]+", String), # all other characters
- (r'\\\n', String), # line continuation
- (r'\\', String), # normal backslash
- (r'\n', Whitespace, '#pop'), # newline ends the string too
- ],
- 'genericargs': [
- include('noargs'),
- (r'"', String, 'dqstring'),
- (r"'", String, 'sqstring'),
- (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+', Number.Float),
- (r'(\d+\.\d*|\.\d+)', Number.Float),
- (r'-?\d+', Number.Integer),
- ('[,.~!%^&*+=|?:<>/-]', Operator),
- (r'[{}()\[\]]', Punctuation),
- (r'(eq|ne)\b', Operator.Word),
- (r'([a-zA-Z_]\w*)(\s*)(\()',
- bygroups(Name.Function, Text, Punctuation)),
- (r'[a-zA-Z_]\w*', Name),
- (r'@[a-zA-Z_]\w*', Name.Constant), # macros
- (r'(\\)(\n)', bygroups(Text, Whitespace)),
- ],
- 'optionarg': [
- include('whitespace'),
- (_shortened_many(
- "a$ll", "an$gles", "ar$row", "au$toscale", "b$ars", "bor$der",
- "box$width", "cl$abel", "c$lip", "cn$trparam", "co$ntour", "da$ta",
- "data$file", "dg$rid3d", "du$mmy", "enc$oding", "dec$imalsign",
- "fit$", "font$path", "fo$rmat", "fu$nction", "fu$nctions", "g$rid",
- "hid$den3d", "his$torysize", "is$osamples", "k$ey", "keyt$itle",
- "la$bel", "li$nestyle", "ls$", "loa$dpath", "loc$ale", "log$scale",
- "mac$ros", "map$ping", "map$ping3d", "mar$gin", "lmar$gin",
- "rmar$gin", "tmar$gin", "bmar$gin", "mo$use", "multi$plot",
- "mxt$ics", "nomxt$ics", "mx2t$ics", "nomx2t$ics", "myt$ics",
- "nomyt$ics", "my2t$ics", "nomy2t$ics", "mzt$ics", "nomzt$ics",
- "mcbt$ics", "nomcbt$ics", "of$fsets", "or$igin", "o$utput",
- "pa$rametric", "pm$3d", "pal$ette", "colorb$ox", "p$lot",
- "poi$ntsize", "pol$ar", "pr$int", "obj$ect", "sa$mples", "si$ze",
- "st$yle", "su$rface", "table$", "t$erminal", "termo$ptions", "ti$cs",
- "ticsc$ale", "ticsl$evel", "timef$mt", "tim$estamp", "tit$le",
- "v$ariables", "ve$rsion", "vi$ew", "xyp$lane", "xda$ta", "x2da$ta",
- "yda$ta", "y2da$ta", "zda$ta", "cbda$ta", "xl$abel", "x2l$abel",
- "yl$abel", "y2l$abel", "zl$abel", "cbl$abel", "xti$cs", "noxti$cs",
- "x2ti$cs", "nox2ti$cs", "yti$cs", "noyti$cs", "y2ti$cs", "noy2ti$cs",
- "zti$cs", "nozti$cs", "cbti$cs", "nocbti$cs", "xdti$cs", "noxdti$cs",
- "x2dti$cs", "nox2dti$cs", "ydti$cs", "noydti$cs", "y2dti$cs",
- "noy2dti$cs", "zdti$cs", "nozdti$cs", "cbdti$cs", "nocbdti$cs",
- "xmti$cs", "noxmti$cs", "x2mti$cs", "nox2mti$cs", "ymti$cs",
- "noymti$cs", "y2mti$cs", "noy2mti$cs", "zmti$cs", "nozmti$cs",
- "cbmti$cs", "nocbmti$cs", "xr$ange", "x2r$ange", "yr$ange",
- "y2r$ange", "zr$ange", "cbr$ange", "rr$ange", "tr$ange", "ur$ange",
- "vr$ange", "xzeroa$xis", "x2zeroa$xis", "yzeroa$xis", "y2zeroa$xis",
- "zzeroa$xis", "zeroa$xis", "z$ero"), Name.Builtin, '#pop'),
- ],
- 'bind': [
- ('!', Keyword, '#pop'),
- (_shortened('all$windows'), Name.Builtin),
- include('genericargs'),
- ],
- 'quit': [
- (r'gnuplot\b', Keyword),
- include('noargs'),
- ],
- 'fit': [
- (r'via\b', Name.Builtin),
- include('plot'),
- ],
- 'if': [
- (r'\)', Punctuation, '#pop'),
- include('genericargs'),
- ],
- 'pause': [
- (r'(mouse|any|button1|button2|button3)\b', Name.Builtin),
- (_shortened('key$press'), Name.Builtin),
- include('genericargs'),
- ],
- 'plot': [
- (_shortened_many('ax$es', 'axi$s', 'bin$ary', 'ev$ery', 'i$ndex',
- 'mat$rix', 's$mooth', 'thru$', 't$itle',
- 'not$itle', 'u$sing', 'w$ith'),
- Name.Builtin),
- include('genericargs'),
- ],
- 'save': [
- (_shortened_many('f$unctions', 's$et', 't$erminal', 'v$ariables'),
- Name.Builtin),
- include('genericargs'),
- ],
- }
-
-
-class PovrayLexer(RegexLexer):
- """
- For Persistence of Vision Raytracer files.
-
- .. versionadded:: 0.11
- """
- name = 'POVRay'
- url = 'http://www.povray.org/'
- aliases = ['pov']
- filenames = ['*.pov', '*.inc']
- mimetypes = ['text/x-povray']
-
- tokens = {
- 'root': [
- (r'/\*[\w\W]*?\*/', Comment.Multiline),
- (r'//.*$', Comment.Single),
- (r'(?s)"(?:\\.|[^"\\])+"', String.Double),
- (words((
- 'break', 'case', 'debug', 'declare', 'default', 'define', 'else',
- 'elseif', 'end', 'error', 'fclose', 'fopen', 'for', 'if', 'ifdef',
- 'ifndef', 'include', 'local', 'macro', 'range', 'read', 'render',
- 'statistics', 'switch', 'undef', 'version', 'warning', 'while',
- 'write'), prefix=r'#', suffix=r'\b'),
- Comment.Preproc),
- (words((
- 'aa_level', 'aa_threshold', 'abs', 'acos', 'acosh', 'adaptive', 'adc_bailout',
- 'agate', 'agate_turb', 'all', 'alpha', 'ambient', 'ambient_light', 'angle',
- 'aperture', 'arc_angle', 'area_light', 'asc', 'asin', 'asinh', 'assumed_gamma',
- 'atan', 'atan2', 'atanh', 'atmosphere', 'atmospheric_attenuation',
- 'attenuating', 'average', 'background', 'black_hole', 'blue', 'blur_samples',
- 'bounded_by', 'box_mapping', 'bozo', 'break', 'brick', 'brick_size',
- 'brightness', 'brilliance', 'bumps', 'bumpy1', 'bumpy2', 'bumpy3', 'bump_map',
- 'bump_size', 'case', 'caustics', 'ceil', 'checker', 'chr', 'clipped_by', 'clock',
- 'color', 'color_map', 'colour', 'colour_map', 'component', 'composite', 'concat',
- 'confidence', 'conic_sweep', 'constant', 'control0', 'control1', 'cos', 'cosh',
- 'count', 'crackle', 'crand', 'cube', 'cubic_spline', 'cylindrical_mapping',
- 'debug', 'declare', 'default', 'degrees', 'dents', 'diffuse', 'direction',
- 'distance', 'distance_maximum', 'div', 'dust', 'dust_type', 'eccentricity',
- 'else', 'emitting', 'end', 'error', 'error_bound', 'exp', 'exponent',
- 'fade_distance', 'fade_power', 'falloff', 'falloff_angle', 'false',
- 'file_exists', 'filter', 'finish', 'fisheye', 'flatness', 'flip', 'floor',
- 'focal_point', 'fog', 'fog_alt', 'fog_offset', 'fog_type', 'frequency', 'gif',
- 'global_settings', 'glowing', 'gradient', 'granite', 'gray_threshold',
- 'green', 'halo', 'hexagon', 'hf_gray_16', 'hierarchy', 'hollow', 'hypercomplex',
- 'if', 'ifdef', 'iff', 'image_map', 'incidence', 'include', 'int', 'interpolate',
- 'inverse', 'ior', 'irid', 'irid_wavelength', 'jitter', 'lambda', 'leopard',
- 'linear', 'linear_spline', 'linear_sweep', 'location', 'log', 'looks_like',
- 'look_at', 'low_error_factor', 'mandel', 'map_type', 'marble', 'material_map',
- 'matrix', 'max', 'max_intersections', 'max_iteration', 'max_trace_level',
- 'max_value', 'metallic', 'min', 'minimum_reuse', 'mod', 'mortar',
- 'nearest_count', 'no', 'normal', 'normal_map', 'no_shadow', 'number_of_waves',
- 'octaves', 'off', 'offset', 'omega', 'omnimax', 'on', 'once', 'onion', 'open',
- 'orthographic', 'panoramic', 'pattern1', 'pattern2', 'pattern3',
- 'perspective', 'pgm', 'phase', 'phong', 'phong_size', 'pi', 'pigment',
- 'pigment_map', 'planar_mapping', 'png', 'point_at', 'pot', 'pow', 'ppm',
- 'precision', 'pwr', 'quadratic_spline', 'quaternion', 'quick_color',
- 'quick_colour', 'quilted', 'radial', 'radians', 'radiosity', 'radius', 'rainbow',
- 'ramp_wave', 'rand', 'range', 'reciprocal', 'recursion_limit', 'red',
- 'reflection', 'refraction', 'render', 'repeat', 'rgb', 'rgbf', 'rgbft', 'rgbt',
- 'right', 'ripples', 'rotate', 'roughness', 'samples', 'scale', 'scallop_wave',
- 'scattering', 'seed', 'shadowless', 'sin', 'sine_wave', 'sinh', 'sky', 'sky_sphere',
- 'slice', 'slope_map', 'smooth', 'specular', 'spherical_mapping', 'spiral',
- 'spiral1', 'spiral2', 'spotlight', 'spotted', 'sqr', 'sqrt', 'statistics', 'str',
- 'strcmp', 'strength', 'strlen', 'strlwr', 'strupr', 'sturm', 'substr', 'switch', 'sys',
- 't', 'tan', 'tanh', 'test_camera_1', 'test_camera_2', 'test_camera_3',
- 'test_camera_4', 'texture', 'texture_map', 'tga', 'thickness', 'threshold',
- 'tightness', 'tile2', 'tiles', 'track', 'transform', 'translate', 'transmit',
- 'triangle_wave', 'true', 'ttf', 'turbulence', 'turb_depth', 'type',
- 'ultra_wide_angle', 'up', 'use_color', 'use_colour', 'use_index', 'u_steps',
- 'val', 'variance', 'vaxis_rotate', 'vcross', 'vdot', 'version', 'vlength',
- 'vnormalize', 'volume_object', 'volume_rendered', 'vol_with_light',
- 'vrotate', 'v_steps', 'warning', 'warp', 'water_level', 'waves', 'while', 'width',
- 'wood', 'wrinkles', 'yes'), prefix=r'\b', suffix=r'\b'),
- Keyword),
- (words((
- 'bicubic_patch', 'blob', 'box', 'camera', 'cone', 'cubic', 'cylinder', 'difference',
- 'disc', 'height_field', 'intersection', 'julia_fractal', 'lathe',
- 'light_source', 'merge', 'mesh', 'object', 'plane', 'poly', 'polygon', 'prism',
- 'quadric', 'quartic', 'smooth_triangle', 'sor', 'sphere', 'superellipsoid',
- 'text', 'torus', 'triangle', 'union'), suffix=r'\b'),
- Name.Builtin),
- (r'\b(x|y|z|u|v)\b', Name.Builtin.Pseudo),
- (r'[a-zA-Z_]\w*', Name),
- (r'[0-9]*\.[0-9]+', Number.Float),
- (r'[0-9]+', Number.Integer),
- (r'[\[\](){}<>;,]', Punctuation),
- (r'[-+*/=.|&]|<=|>=|!=', Operator),
- (r'"(\\\\|\\[^\\]|[^"\\])*"', String),
- (r'\s+', Whitespace),
- ]
- }
-
- def analyse_text(text):
- """POVRAY is similar to JSON/C, but the combination of camera and
- light_source is probably not very likely elsewhere. HLSL or GLSL
- are similar (GLSL even has #version), but they miss #declare, and
- light_source/camera are not keywords anywhere else -- it's fair
- to assume though that any POVRAY scene must have a camera and
- lightsource."""
- result = 0
- if '#version' in text:
- result += 0.05
- if '#declare' in text:
- result += 0.05
- if 'camera' in text:
- result += 0.05
- if 'light_source' in text:
- result += 0.1
-
- return result
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/graphql.py b/venv/lib/python3.11/site-packages/pygments/lexers/graphql.py
deleted file mode 100644
index b17e4a6..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/graphql.py
+++ /dev/null
@@ -1,177 +0,0 @@
-"""
- pygments.lexers.graphql
- ~~~~~~~~~~~~~~~~~~~~~~~
-
- Lexer for GraphQL, an open-source data query and manipulation
- language for APIs.
-
- More information:
- https://graphql.org/
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, words, include, bygroups, default
-from pygments.token import (Comment, Keyword, Name, Number, Punctuation, String,
- Whitespace)
-
-
-__all__ = ["GraphQLLexer"]
-
-OPERATION_TYPES = ("query", "mutation", "subscription")
-BUILTIN_TYPES = ("Int", "Float", "String", "Boolean", "ID")
-BOOLEAN_VALUES = ("true", "false", "null")
-KEYWORDS = (
- "type",
- "schema",
- "extend",
- "enum",
- "scalar",
- "implements",
- "interface",
- "union",
- "input",
- "directive",
- "QUERY",
- "MUTATION",
- "SUBSCRIPTION",
- "FIELD",
- "FRAGMENT_DEFINITION",
- "FRAGMENT_SPREAD",
- "INLINE_FRAGMENT",
- "SCHEMA",
- "SCALAR",
- "OBJECT",
- "FIELD_DEFINITION",
- "ARGUMENT_DEFINITION",
- "INTERFACE",
- "UNION",
- "ENUM",
- "ENUM_VALUE",
- "INPUT_OBJECT",
- "INPUT_FIELD_DEFINITION",
-)
-
-
-class GraphQLLexer(RegexLexer):
- """
- Lexer for GraphQL syntax
-
- .. versionadded:: 2.16
- """
- name = "GraphQL"
- aliases = ["graphql"]
- filenames = ["*.graphql"]
- url = "https://graphql.org"
-
- tokens = {
- "ignored_tokens": [
- (r"\s+", Whitespace), # Whitespaces
- (r"#.*$", Comment),
- (",", Punctuation), # Insignificant commas
- ],
- "value": [
- include("ignored_tokens"),
- (r"-?\d+(?![.eE])", Number.Integer, "#pop"),
- (
- r"-?\d+(\.\d+)?([eE][+-]?\d+)?",
- Number.Float,
- "#pop",
- ),
- (r'"', String, ("#pop", "string")),
- (words(BOOLEAN_VALUES, suffix=r"\b"), Name.Builtin, "#pop"),
- (r"\$[a-zA-Z_]\w*", Name.Variable, "#pop"),
- (r"[a-zA-Z_]\w*", Name.Constant, "#pop"),
- (r"\[", Punctuation, ("#pop", "list_value")),
- (r"\{", Punctuation, ("#pop", "object_value")),
- ],
- "list_value": [
- include("ignored_tokens"),
- ("]", Punctuation, "#pop"),
- default("value"),
- ],
- "object_value": [
- include("ignored_tokens"),
- (r"[a-zA-Z_]\w*", Name),
- (r":", Punctuation, "value"),
- (r"\}", Punctuation, "#pop"),
- ],
- "string": [
- (r'\\(["\\/bfnrt]|u[a-fA-F0-9]{4})', String.Escape),
- (r'[^\\"\n]+', String), # all other characters
- (r'"', String, "#pop"),
- ],
- "root": [
- include("ignored_tokens"),
- (words(OPERATION_TYPES, suffix=r"\b"), Keyword, "operation"),
- (words(KEYWORDS, suffix=r"\b"), Keyword),
- (r"\{", Punctuation, "selection_set"),
- (r"fragment\b", Keyword, "fragment_definition"),
- ],
- "operation": [
- include("ignored_tokens"),
- (r"[a-zA-Z_]\w*", Name.Function),
- (r"\(", Punctuation, "variable_definition"),
- (r"\{", Punctuation, ("#pop", "selection_set")),
- ],
- "variable_definition": [
- include("ignored_tokens"),
- (r"\$[a-zA-Z_]\w*", Name.Variable),
- (r"[\]!]", Punctuation),
- (r":", Punctuation, "type"),
- (r"=", Punctuation, "value"),
- (r"\)", Punctuation, "#pop"),
- ],
- "type": [
- include("ignored_tokens"),
- (r"\[", Punctuation),
- (words(BUILTIN_TYPES, suffix=r"\b"), Name.Builtin, "#pop"),
- (r"[a-zA-Z_]\w*", Name.Class, "#pop"),
- ],
- "selection_set": [
- include("ignored_tokens"),
- (r"([a-zA-Z_]\w*)(\s*)(:)", bygroups(Name.Label, Whitespace, Punctuation)),
- (r"[a-zA-Z_]\w*", Name), # Field
- (
- r"(\.\.\.)(\s+)(on)\b",
- bygroups(Punctuation, Whitespace, Keyword),
- "inline_fragment",
- ),
- (r"\.\.\.", Punctuation, "fragment_spread"),
- (r"\(", Punctuation, "arguments"),
- (r"@[a-zA-Z_]\w*", Name.Decorator, "directive"),
- (r"\{", Punctuation, "selection_set"),
- (r"\}", Punctuation, "#pop"),
- ],
- "directive": [
- include("ignored_tokens"),
- (r"\(", Punctuation, ("#pop", "arguments")),
- ],
- "arguments": [
- include("ignored_tokens"),
- (r"[a-zA-Z_]\w*", Name),
- (r":", Punctuation, "value"),
- (r"\)", Punctuation, "#pop"),
- ],
- # Fragments
- "fragment_definition": [
- include("ignored_tokens"),
- (r"[\]!]", Punctuation), # For NamedType
- (r"on\b", Keyword, "type"),
- (r"[a-zA-Z_]\w*", Name.Function),
- (r"@[a-zA-Z_]\w*", Name.Decorator, "directive"),
- (r"\{", Punctuation, ("#pop", "selection_set")),
- ],
- "fragment_spread": [
- include("ignored_tokens"),
- (r"@[a-zA-Z_]\w*", Name.Decorator, "directive"),
- (r"[a-zA-Z_]\w*", Name, "#pop"), # Fragment name
- ],
- "inline_fragment": [
- include("ignored_tokens"),
- (r"[a-zA-Z_]\w*", Name.Class), # Type condition
- (r"@[a-zA-Z_]\w*", Name.Decorator, "directive"),
- (r"\{", Punctuation, ("#pop", "selection_set")),
- ],
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/graphviz.py b/venv/lib/python3.11/site-packages/pygments/lexers/graphviz.py
deleted file mode 100644
index 1e4ba02..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/graphviz.py
+++ /dev/null
@@ -1,59 +0,0 @@
-"""
- pygments.lexers.graphviz
- ~~~~~~~~~~~~~~~~~~~~~~~~
-
- Lexer for the DOT language (graphviz).
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, bygroups
-from pygments.token import Comment, Keyword, Operator, Name, String, Number, \
- Punctuation, Whitespace
-
-
-__all__ = ['GraphvizLexer']
-
-
-class GraphvizLexer(RegexLexer):
- """
- For graphviz DOT graph description language.
-
- .. versionadded:: 2.8
- """
- name = 'Graphviz'
- url = 'https://www.graphviz.org/doc/info/lang.html'
- aliases = ['graphviz', 'dot']
- filenames = ['*.gv', '*.dot']
- mimetypes = ['text/x-graphviz', 'text/vnd.graphviz']
- tokens = {
- 'root': [
- (r'\s+', Whitespace),
- (r'(#|//).*?$', Comment.Single),
- (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline),
- (r'(?i)(node|edge|graph|digraph|subgraph|strict)\b', Keyword),
- (r'--|->', Operator),
- (r'[{}[\]:;,]', Punctuation),
- (r'(\b\D\w*)(\s*)(=)(\s*)',
- bygroups(Name.Attribute, Whitespace, Punctuation, Whitespace),
- 'attr_id'),
- (r'\b(n|ne|e|se|s|sw|w|nw|c|_)\b', Name.Builtin),
- (r'\b\D\w*', Name.Tag), # node
- (r'[-]?((\.[0-9]+)|([0-9]+(\.[0-9]*)?))', Number),
- (r'"(\\"|[^"])*?"', Name.Tag), # quoted node
- (r'<', Punctuation, 'xml'),
- ],
- 'attr_id': [
- (r'\b\D\w*', String, '#pop'),
- (r'[-]?((\.[0-9]+)|([0-9]+(\.[0-9]*)?))', Number, '#pop'),
- (r'"(\\"|[^"])*?"', String.Double, '#pop'),
- (r'<', Punctuation, ('#pop', 'xml')),
- ],
- 'xml': [
- (r'<', Punctuation, '#push'),
- (r'>', Punctuation, '#pop'),
- (r'\s+', Whitespace),
- (r'[^<>\s]', Name.Tag),
- ]
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/gsql.py b/venv/lib/python3.11/site-packages/pygments/lexers/gsql.py
deleted file mode 100755
index 222ee70..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/gsql.py
+++ /dev/null
@@ -1,104 +0,0 @@
-"""
- pygments.lexers.gsql
- ~~~~~~~~~~~~~~~~~~~~
-
- Lexers for TigerGraph GSQL graph query language
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, include, bygroups, using, this, words
-from pygments.token import Keyword, Punctuation, Comment, Operator, Name, \
- String, Number, Whitespace
-
-__all__ = ["GSQLLexer"]
-
-
-class GSQLLexer(RegexLexer):
-
- """
- For GSQL queries (version 3.x).
-
- .. versionadded:: 2.10
- """
-
- name = 'GSQL'
- url = 'https://docs.tigergraph.com/dev/gsql-ref'
- aliases = ['gsql']
- filenames = ['*.gsql']
-
- flags = re.MULTILINE | re.IGNORECASE
-
- tokens = {
- 'root': [
- include('comment'),
- include('keywords'),
- include('clauses'),
- include('accums'),
- include('relations'),
- include('strings'),
- include('whitespace'),
- include('barewords'),
- include('operators'),
- ],
- 'comment': [
- (r'\#.*', Comment.Single),
- (r'/\*(.|\n)*?\*/', Comment.Multiline),
- ],
- 'keywords': [
- (words((
- 'ACCUM', 'AND', 'ANY', 'API', 'AS', 'ASC', 'AVG', 'BAG', 'BATCH',
- 'BETWEEN', 'BOOL', 'BOTH', 'BREAK', 'BY', 'CASE', 'CATCH', 'COALESCE',
- 'COMPRESS', 'CONTINUE', 'COUNT', 'CREATE', 'DATETIME', 'DATETIME_ADD',
- 'DATETIME_SUB', 'DELETE', 'DESC', 'DISTRIBUTED', 'DO', 'DOUBLE',
- 'EDGE', 'ELSE', 'END', 'ESCAPE', 'EXCEPTION', 'FALSE', 'FILE',
- 'FILTER', 'FLOAT', 'FOREACH', 'FOR', 'FROM', 'GRAPH', 'GROUP',
- 'GSQL_INT_MAX', 'GSQL_INT_MIN', 'GSQL_UINT_MAX', 'HAVING', 'IF',
- 'IN', 'INSERT', 'INT', 'INTERPRET', 'INTERSECT', 'INTERVAL', 'INTO',
- 'IS', 'ISEMPTY', 'JSONARRAY', 'JSONOBJECT', 'LASTHOP', 'LEADING',
- 'LIKE', 'LIMIT', 'LIST', 'LOAD_ACCUM', 'LOG', 'MAP', 'MATCH', 'MAX',
- 'MIN', 'MINUS', 'NOT', 'NOW', 'NULL', 'OFFSET', 'OR', 'ORDER', 'PATH',
- 'PER', 'PINNED', 'POST_ACCUM', 'POST-ACCUM', 'PRIMARY_ID', 'PRINT',
- 'QUERY', 'RAISE', 'RANGE', 'REPLACE', 'RESET_COLLECTION_ACCUM',
- 'RETURN', 'RETURNS', 'RUN', 'SAMPLE', 'SELECT', 'SELECT_VERTEX',
- 'SET', 'SRC', 'STATIC', 'STRING', 'SUM', 'SYNTAX', 'TARGET',
- 'TAGSTGT', 'THEN', 'TO', 'TO_CSV', 'TO_DATETIME', 'TRAILING',
- 'TRIM', 'TRUE', 'TRY', 'TUPLE', 'TYPEDEF', 'UINT', 'UNION', 'UPDATE',
- 'VALUES', 'VERTEX', 'WHEN', 'WHERE', 'WHILE', 'WITH'),
- prefix=r'(?<!\.)', suffix=r'\b'), Keyword),
- ],
- 'clauses': [
- (words(('accum', 'having', 'limit', 'order', 'postAccum', 'sample', 'where')),
- Name.Builtin),
- ],
- 'accums': [
- (words(('andaccum', 'arrayaccum', 'avgaccum', 'bagaccum', 'bitwiseandaccum',
- 'bitwiseoraccum', 'groupbyaccum', 'heapaccum', 'listaccum',
- 'MapAccum', 'maxaccum', 'minaccum', 'oraccum', 'setaccum',
- 'sumaccum')), Name.Builtin),
- ],
- 'relations': [
- (r'(-\s?)(\(.*\:\w?\))(\s?-)', bygroups(Operator, using(this), Operator)),
- (r'->|<-', Operator),
- (r'[.*{}\[\]\<\>\_]', Punctuation),
- ],
- 'strings': [
- (r'"([^"\\]|\\.)*"', String),
- (r'@{1,2}\w+', Name.Variable),
- ],
- 'whitespace': [
- (r'\s+', Whitespace),
- ],
- 'barewords': [
- (r'[a-z]\w*', Name),
- (r'(\d+\.\d+|\d+)', Number),
- ],
- 'operators': [
- (r'\$|[^0-9|\/|\-](\-\=|\+\=|\*\=|\\\=|\=|\=\=|\=\=\=|'
- r'\+|\-|\*|\\|\+\=|\>|\<)[^\>|\/]', Operator),
- (r'(\||\(|\)|\,|\;|\=|\-|\+|\*|\/|\>|\<|\:)', Operator),
- ],
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/haskell.py b/venv/lib/python3.11/site-packages/pygments/lexers/haskell.py
deleted file mode 100644
index 2c5fa13..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/haskell.py
+++ /dev/null
@@ -1,871 +0,0 @@
-"""
- pygments.lexers.haskell
- ~~~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for Haskell and related languages.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import Lexer, RegexLexer, bygroups, do_insertions, \
- default, include, inherit, line_re
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation, Generic, Whitespace
-from pygments import unistring as uni
-
-__all__ = ['HaskellLexer', 'HspecLexer', 'IdrisLexer', 'AgdaLexer', 'CryptolLexer',
- 'LiterateHaskellLexer', 'LiterateIdrisLexer', 'LiterateAgdaLexer',
- 'LiterateCryptolLexer', 'KokaLexer']
-
-
-class HaskellLexer(RegexLexer):
- """
- A Haskell lexer based on the lexemes defined in the Haskell 98 Report.
-
- .. versionadded:: 0.8
- """
- name = 'Haskell'
- url = 'https://www.haskell.org/'
- aliases = ['haskell', 'hs']
- filenames = ['*.hs']
- mimetypes = ['text/x-haskell']
-
- reserved = ('case', 'class', 'data', 'default', 'deriving', 'do', 'else',
- 'family', 'if', 'in', 'infix[lr]?', 'instance',
- 'let', 'newtype', 'of', 'then', 'type', 'where', '_')
- ascii = ('NUL', 'SOH', '[SE]TX', 'EOT', 'ENQ', 'ACK',
- 'BEL', 'BS', 'HT', 'LF', 'VT', 'FF', 'CR', 'S[OI]', 'DLE',
- 'DC[1-4]', 'NAK', 'SYN', 'ETB', 'CAN',
- 'EM', 'SUB', 'ESC', '[FGRU]S', 'SP', 'DEL')
-
- tokens = {
- 'root': [
- # Whitespace:
- (r'\s+', Whitespace),
- # (r'--\s*|.*$', Comment.Doc),
- (r'--(?![!#$%&*+./<=>?@^|_~:\\]).*?$', Comment.Single),
- (r'\{-', Comment.Multiline, 'comment'),
- # Lexemes:
- # Identifiers
- (r'\bimport\b', Keyword.Reserved, 'import'),
- (r'\bmodule\b', Keyword.Reserved, 'module'),
- (r'\berror\b', Name.Exception),
- (r'\b(%s)(?!\')\b' % '|'.join(reserved), Keyword.Reserved),
- (r"'[^\\]'", String.Char), # this has to come before the TH quote
- (r'^[_' + uni.Ll + r'][\w\']*', Name.Function),
- (r"'?[_" + uni.Ll + r"][\w']*", Name),
- (r"('')?[" + uni.Lu + r"][\w\']*", Keyword.Type),
- (r"(')[" + uni.Lu + r"][\w\']*", Keyword.Type),
- (r"(')\[[^\]]*\]", Keyword.Type), # tuples and lists get special treatment in GHC
- (r"(')\([^)]*\)", Keyword.Type), # ..
- (r"(')[:!#$%&*+.\\/<=>?@^|~-]+", Keyword.Type), # promoted type operators
- # Operators
- (r'\\(?![:!#$%&*+.\\/<=>?@^|~-]+)', Name.Function), # lambda operator
- (r'(<-|::|->|=>|=)(?![:!#$%&*+.\\/<=>?@^|~-]+)', Operator.Word), # specials
- (r':[:!#$%&*+.\\/<=>?@^|~-]*', Keyword.Type), # Constructor operators
- (r'[:!#$%&*+.\\/<=>?@^|~-]+', Operator), # Other operators
- # Numbers
- (r'0[xX]_*[\da-fA-F](_*[\da-fA-F])*_*[pP][+-]?\d(_*\d)*', Number.Float),
- (r'0[xX]_*[\da-fA-F](_*[\da-fA-F])*\.[\da-fA-F](_*[\da-fA-F])*'
- r'(_*[pP][+-]?\d(_*\d)*)?', Number.Float),
- (r'\d(_*\d)*_*[eE][+-]?\d(_*\d)*', Number.Float),
- (r'\d(_*\d)*\.\d(_*\d)*(_*[eE][+-]?\d(_*\d)*)?', Number.Float),
- (r'0[bB]_*[01](_*[01])*', Number.Bin),
- (r'0[oO]_*[0-7](_*[0-7])*', Number.Oct),
- (r'0[xX]_*[\da-fA-F](_*[\da-fA-F])*', Number.Hex),
- (r'\d(_*\d)*', Number.Integer),
- # Character/String Literals
- (r"'", String.Char, 'character'),
- (r'"', String, 'string'),
- # Special
- (r'\[\]', Keyword.Type),
- (r'\(\)', Name.Builtin),
- (r'[][(),;`{}]', Punctuation),
- ],
- 'import': [
- # Import statements
- (r'\s+', Whitespace),
- (r'"', String, 'string'),
- # after "funclist" state
- (r'\)', Punctuation, '#pop'),
- (r'qualified\b', Keyword),
- # import X as Y
- (r'([' + uni.Lu + r'][\w.]*)(\s+)(as)(\s+)([' + uni.Lu + r'][\w.]*)',
- bygroups(Name.Namespace, Whitespace, Keyword, Whitespace, Name), '#pop'),
- # import X hiding (functions)
- (r'([' + uni.Lu + r'][\w.]*)(\s+)(hiding)(\s+)(\()',
- bygroups(Name.Namespace, Whitespace, Keyword, Whitespace, Punctuation), 'funclist'),
- # import X (functions)
- (r'([' + uni.Lu + r'][\w.]*)(\s+)(\()',
- bygroups(Name.Namespace, Whitespace, Punctuation), 'funclist'),
- # import X
- (r'[\w.]+', Name.Namespace, '#pop'),
- ],
- 'module': [
- (r'\s+', Whitespace),
- (r'([' + uni.Lu + r'][\w.]*)(\s+)(\()',
- bygroups(Name.Namespace, Whitespace, Punctuation), 'funclist'),
- (r'[' + uni.Lu + r'][\w.]*', Name.Namespace, '#pop'),
- ],
- 'funclist': [
- (r'\s+', Whitespace),
- (r'[' + uni.Lu + r']\w*', Keyword.Type),
- (r'(_[\w\']+|[' + uni.Ll + r'][\w\']*)', Name.Function),
- (r'--(?![!#$%&*+./<=>?@^|_~:\\]).*?$', Comment.Single),
- (r'\{-', Comment.Multiline, 'comment'),
- (r',', Punctuation),
- (r'[:!#$%&*+.\\/<=>?@^|~-]+', Operator),
- # (HACK, but it makes sense to push two instances, believe me)
- (r'\(', Punctuation, ('funclist', 'funclist')),
- (r'\)', Punctuation, '#pop:2'),
- ],
- # NOTE: the next four states are shared in the AgdaLexer; make sure
- # any change is compatible with Agda as well or copy over and change
- 'comment': [
- # Multiline Comments
- (r'[^-{}]+', Comment.Multiline),
- (r'\{-', Comment.Multiline, '#push'),
- (r'-\}', Comment.Multiline, '#pop'),
- (r'[-{}]', Comment.Multiline),
- ],
- 'character': [
- # Allows multi-chars, incorrectly.
- (r"[^\\']'", String.Char, '#pop'),
- (r"\\", String.Escape, 'escape'),
- ("'", String.Char, '#pop'),
- ],
- 'string': [
- (r'[^\\"]+', String),
- (r"\\", String.Escape, 'escape'),
- ('"', String, '#pop'),
- ],
- 'escape': [
- (r'[abfnrtv"\'&\\]', String.Escape, '#pop'),
- (r'\^[][' + uni.Lu + r'@^_]', String.Escape, '#pop'),
- ('|'.join(ascii), String.Escape, '#pop'),
- (r'o[0-7]+', String.Escape, '#pop'),
- (r'x[\da-fA-F]+', String.Escape, '#pop'),
- (r'\d+', String.Escape, '#pop'),
- (r'(\s+)(\\)', bygroups(Whitespace, String.Escape), '#pop'),
- ],
- }
-
-
-class HspecLexer(HaskellLexer):
- """
- A Haskell lexer with support for Hspec constructs.
-
- .. versionadded:: 2.4.0
- """
-
- name = 'Hspec'
- aliases = ['hspec']
- filenames = ['*Spec.hs']
- mimetypes = []
-
- tokens = {
- 'root': [
- (r'(it)(\s*)("[^"]*")', bygroups(Text, Whitespace, String.Doc)),
- (r'(describe)(\s*)("[^"]*")', bygroups(Text, Whitespace, String.Doc)),
- (r'(context)(\s*)("[^"]*")', bygroups(Text, Whitespace, String.Doc)),
- inherit,
- ],
- }
-
-
-class IdrisLexer(RegexLexer):
- """
- A lexer for the dependently typed programming language Idris.
-
- Based on the Haskell and Agda Lexer.
-
- .. versionadded:: 2.0
- """
- name = 'Idris'
- url = 'https://www.idris-lang.org/'
- aliases = ['idris', 'idr']
- filenames = ['*.idr']
- mimetypes = ['text/x-idris']
-
- reserved = ('case', 'class', 'data', 'default', 'using', 'do', 'else',
- 'if', 'in', 'infix[lr]?', 'instance', 'rewrite', 'auto',
- 'namespace', 'codata', 'mutual', 'private', 'public', 'abstract',
- 'total', 'partial',
- 'interface', 'implementation', 'export', 'covering', 'constructor',
- 'let', 'proof', 'of', 'then', 'static', 'where', '_', 'with',
- 'pattern', 'term', 'syntax', 'prefix',
- 'postulate', 'parameters', 'record', 'dsl', 'impossible', 'implicit',
- 'tactics', 'intros', 'intro', 'compute', 'refine', 'exact', 'trivial')
-
- ascii = ('NUL', 'SOH', '[SE]TX', 'EOT', 'ENQ', 'ACK',
- 'BEL', 'BS', 'HT', 'LF', 'VT', 'FF', 'CR', 'S[OI]', 'DLE',
- 'DC[1-4]', 'NAK', 'SYN', 'ETB', 'CAN',
- 'EM', 'SUB', 'ESC', '[FGRU]S', 'SP', 'DEL')
-
- directives = ('lib', 'link', 'flag', 'include', 'hide', 'freeze', 'access',
- 'default', 'logging', 'dynamic', 'name', 'error_handlers', 'language')
-
- tokens = {
- 'root': [
- # Comments
- (r'^(\s*)(%%(%s))' % '|'.join(directives),
- bygroups(Whitespace, Keyword.Reserved)),
- (r'(\s*)(--(?![!#$%&*+./<=>?@^|_~:\\]).*?)$', bygroups(Whitespace, Comment.Single)),
- (r'(\s*)(\|{3}.*?)$', bygroups(Whitespace, Comment.Single)),
- (r'(\s*)(\{-)', bygroups(Whitespace, Comment.Multiline), 'comment'),
- # Declaration
- (r'^(\s*)([^\s(){}]+)(\s*)(:)(\s*)',
- bygroups(Whitespace, Name.Function, Whitespace, Operator.Word, Whitespace)),
- # Identifiers
- (r'\b(%s)(?!\')\b' % '|'.join(reserved), Keyword.Reserved),
- (r'(import|module)(\s+)', bygroups(Keyword.Reserved, Whitespace), 'module'),
- (r"('')?[A-Z][\w\']*", Keyword.Type),
- (r'[a-z][\w\']*', Text),
- # Special Symbols
- (r'(<-|::|->|=>|=)', Operator.Word), # specials
- (r'([(){}\[\]:!#$%&*+.\\/<=>?@^|~-]+)', Operator.Word), # specials
- # Numbers
- (r'\d+[eE][+-]?\d+', Number.Float),
- (r'\d+\.\d+([eE][+-]?\d+)?', Number.Float),
- (r'0[xX][\da-fA-F]+', Number.Hex),
- (r'\d+', Number.Integer),
- # Strings
- (r"'", String.Char, 'character'),
- (r'"', String, 'string'),
- (r'[^\s(){}]+', Text),
- (r'\s+?', Whitespace), # Whitespace
- ],
- 'module': [
- (r'\s+', Whitespace),
- (r'([A-Z][\w.]*)(\s+)(\()',
- bygroups(Name.Namespace, Whitespace, Punctuation), 'funclist'),
- (r'[A-Z][\w.]*', Name.Namespace, '#pop'),
- ],
- 'funclist': [
- (r'\s+', Whitespace),
- (r'[A-Z]\w*', Keyword.Type),
- (r'(_[\w\']+|[a-z][\w\']*)', Name.Function),
- (r'--.*$', Comment.Single),
- (r'\{-', Comment.Multiline, 'comment'),
- (r',', Punctuation),
- (r'[:!#$%&*+.\\/<=>?@^|~-]+', Operator),
- # (HACK, but it makes sense to push two instances, believe me)
- (r'\(', Punctuation, ('funclist', 'funclist')),
- (r'\)', Punctuation, '#pop:2'),
- ],
- # NOTE: the next four states are shared in the AgdaLexer; make sure
- # any change is compatible with Agda as well or copy over and change
- 'comment': [
- # Multiline Comments
- (r'[^-{}]+', Comment.Multiline),
- (r'\{-', Comment.Multiline, '#push'),
- (r'-\}', Comment.Multiline, '#pop'),
- (r'[-{}]', Comment.Multiline),
- ],
- 'character': [
- # Allows multi-chars, incorrectly.
- (r"[^\\']", String.Char),
- (r"\\", String.Escape, 'escape'),
- ("'", String.Char, '#pop'),
- ],
- 'string': [
- (r'[^\\"]+', String),
- (r"\\", String.Escape, 'escape'),
- ('"', String, '#pop'),
- ],
- 'escape': [
- (r'[abfnrtv"\'&\\]', String.Escape, '#pop'),
- (r'\^[][A-Z@^_]', String.Escape, '#pop'),
- ('|'.join(ascii), String.Escape, '#pop'),
- (r'o[0-7]+', String.Escape, '#pop'),
- (r'x[\da-fA-F]+', String.Escape, '#pop'),
- (r'\d+', String.Escape, '#pop'),
- (r'(\s+)(\\)', bygroups(Whitespace, String.Escape), '#pop')
- ],
- }
-
-
-class AgdaLexer(RegexLexer):
- """
- For the Agda dependently typed functional programming language and
- proof assistant.
-
- .. versionadded:: 2.0
- """
-
- name = 'Agda'
- url = 'http://wiki.portal.chalmers.se/agda/pmwiki.php'
- aliases = ['agda']
- filenames = ['*.agda']
- mimetypes = ['text/x-agda']
-
- reserved = (
- 'abstract', 'codata', 'coinductive', 'constructor', 'data', 'do',
- 'eta-equality', 'field', 'forall', 'hiding', 'in', 'inductive', 'infix',
- 'infixl', 'infixr', 'instance', 'interleaved', 'let', 'macro', 'mutual',
- 'no-eta-equality', 'open', 'overlap', 'pattern', 'postulate', 'primitive',
- 'private', 'quote', 'quoteTerm', 'record', 'renaming', 'rewrite',
- 'syntax', 'tactic', 'unquote', 'unquoteDecl', 'unquoteDef', 'using',
- 'variable', 'where', 'with',
- )
-
- tokens = {
- 'root': [
- # Declaration
- (r'^(\s*)([^\s(){}]+)(\s*)(:)(\s*)',
- bygroups(Whitespace, Name.Function, Whitespace,
- Operator.Word, Whitespace)),
- # Comments
- (r'--(?![!#$%&*+./<=>?@^|_~:\\]).*?$', Comment.Single),
- (r'\{-', Comment.Multiline, 'comment'),
- # Holes
- (r'\{!', Comment.Directive, 'hole'),
- # Lexemes:
- # Identifiers
- (r'\b(%s)(?!\')\b' % '|'.join(reserved), Keyword.Reserved),
- (r'(import|module)(\s+)', bygroups(Keyword.Reserved, Whitespace),
- 'module'),
- (r'\b(Set|Prop)[\u2080-\u2089]*\b', Keyword.Type),
- # Special Symbols
- (r'(\(|\)|\{|\})', Operator),
- (r'(\.{1,3}|\||\u03BB|\u2200|\u2192|:|=|->)', Operator.Word),
- # Numbers
- (r'\d+[eE][+-]?\d+', Number.Float),
- (r'\d+\.\d+([eE][+-]?\d+)?', Number.Float),
- (r'0[xX][\da-fA-F]+', Number.Hex),
- (r'\d+', Number.Integer),
- # Strings
- (r"'", String.Char, 'character'),
- (r'"', String, 'string'),
- (r'[^\s(){}]+', Text),
- (r'\s+?', Whitespace), # Whitespace
- ],
- 'hole': [
- # Holes
- (r'[^!{}]+', Comment.Directive),
- (r'\{!', Comment.Directive, '#push'),
- (r'!\}', Comment.Directive, '#pop'),
- (r'[!{}]', Comment.Directive),
- ],
- 'module': [
- (r'\{-', Comment.Multiline, 'comment'),
- (r'[a-zA-Z][\w.\']*', Name, '#pop'),
- (r'[\W0-9_]+', Text)
- ],
- 'comment': HaskellLexer.tokens['comment'],
- 'character': HaskellLexer.tokens['character'],
- 'string': HaskellLexer.tokens['string'],
- 'escape': HaskellLexer.tokens['escape']
- }
-
-
-class CryptolLexer(RegexLexer):
- """
- FIXME: A Cryptol2 lexer based on the lexemes defined in the Haskell 98 Report.
-
- .. versionadded:: 2.0
- """
- name = 'Cryptol'
- aliases = ['cryptol', 'cry']
- filenames = ['*.cry']
- mimetypes = ['text/x-cryptol']
-
- reserved = ('Arith', 'Bit', 'Cmp', 'False', 'Inf', 'True', 'else',
- 'export', 'extern', 'fin', 'if', 'import', 'inf', 'lg2',
- 'max', 'min', 'module', 'newtype', 'pragma', 'property',
- 'then', 'type', 'where', 'width')
- ascii = ('NUL', 'SOH', '[SE]TX', 'EOT', 'ENQ', 'ACK',
- 'BEL', 'BS', 'HT', 'LF', 'VT', 'FF', 'CR', 'S[OI]', 'DLE',
- 'DC[1-4]', 'NAK', 'SYN', 'ETB', 'CAN',
- 'EM', 'SUB', 'ESC', '[FGRU]S', 'SP', 'DEL')
-
- tokens = {
- 'root': [
- # Whitespace:
- (r'\s+', Whitespace),
- # (r'--\s*|.*$', Comment.Doc),
- (r'//.*$', Comment.Single),
- (r'/\*', Comment.Multiline, 'comment'),
- # Lexemes:
- # Identifiers
- (r'\bimport\b', Keyword.Reserved, 'import'),
- (r'\bmodule\b', Keyword.Reserved, 'module'),
- (r'\berror\b', Name.Exception),
- (r'\b(%s)(?!\')\b' % '|'.join(reserved), Keyword.Reserved),
- (r'^[_a-z][\w\']*', Name.Function),
- (r"'?[_a-z][\w']*", Name),
- (r"('')?[A-Z][\w\']*", Keyword.Type),
- # Operators
- (r'\\(?![:!#$%&*+.\\/<=>?@^|~-]+)', Name.Function), # lambda operator
- (r'(<-|::|->|=>|=)(?![:!#$%&*+.\\/<=>?@^|~-]+)', Operator.Word), # specials
- (r':[:!#$%&*+.\\/<=>?@^|~-]*', Keyword.Type), # Constructor operators
- (r'[:!#$%&*+.\\/<=>?@^|~-]+', Operator), # Other operators
- # Numbers
- (r'\d+[eE][+-]?\d+', Number.Float),
- (r'\d+\.\d+([eE][+-]?\d+)?', Number.Float),
- (r'0[oO][0-7]+', Number.Oct),
- (r'0[xX][\da-fA-F]+', Number.Hex),
- (r'\d+', Number.Integer),
- # Character/String Literals
- (r"'", String.Char, 'character'),
- (r'"', String, 'string'),
- # Special
- (r'\[\]', Keyword.Type),
- (r'\(\)', Name.Builtin),
- (r'[][(),;`{}]', Punctuation),
- ],
- 'import': [
- # Import statements
- (r'\s+', Whitespace),
- (r'"', String, 'string'),
- # after "funclist" state
- (r'\)', Punctuation, '#pop'),
- (r'qualified\b', Keyword),
- # import X as Y
- (r'([A-Z][\w.]*)(\s+)(as)(\s+)([A-Z][\w.]*)',
- bygroups(Name.Namespace, Whitespace, Keyword, Whitespace, Name), '#pop'),
- # import X hiding (functions)
- (r'([A-Z][\w.]*)(\s+)(hiding)(\s+)(\()',
- bygroups(Name.Namespace, Whitespace, Keyword, Whitespace, Punctuation), 'funclist'),
- # import X (functions)
- (r'([A-Z][\w.]*)(\s+)(\()',
- bygroups(Name.Namespace, Whitespace, Punctuation), 'funclist'),
- # import X
- (r'[\w.]+', Name.Namespace, '#pop'),
- ],
- 'module': [
- (r'\s+', Whitespace),
- (r'([A-Z][\w.]*)(\s+)(\()',
- bygroups(Name.Namespace, Whitespace, Punctuation), 'funclist'),
- (r'[A-Z][\w.]*', Name.Namespace, '#pop'),
- ],
- 'funclist': [
- (r'\s+', Whitespace),
- (r'[A-Z]\w*', Keyword.Type),
- (r'(_[\w\']+|[a-z][\w\']*)', Name.Function),
- # TODO: these don't match the comments in docs, remove.
- # (r'--(?![!#$%&*+./<=>?@^|_~:\\]).*?$', Comment.Single),
- # (r'{-', Comment.Multiline, 'comment'),
- (r',', Punctuation),
- (r'[:!#$%&*+.\\/<=>?@^|~-]+', Operator),
- # (HACK, but it makes sense to push two instances, believe me)
- (r'\(', Punctuation, ('funclist', 'funclist')),
- (r'\)', Punctuation, '#pop:2'),
- ],
- 'comment': [
- # Multiline Comments
- (r'[^/*]+', Comment.Multiline),
- (r'/\*', Comment.Multiline, '#push'),
- (r'\*/', Comment.Multiline, '#pop'),
- (r'[*/]', Comment.Multiline),
- ],
- 'character': [
- # Allows multi-chars, incorrectly.
- (r"[^\\']'", String.Char, '#pop'),
- (r"\\", String.Escape, 'escape'),
- ("'", String.Char, '#pop'),
- ],
- 'string': [
- (r'[^\\"]+', String),
- (r"\\", String.Escape, 'escape'),
- ('"', String, '#pop'),
- ],
- 'escape': [
- (r'[abfnrtv"\'&\\]', String.Escape, '#pop'),
- (r'\^[][A-Z@^_]', String.Escape, '#pop'),
- ('|'.join(ascii), String.Escape, '#pop'),
- (r'o[0-7]+', String.Escape, '#pop'),
- (r'x[\da-fA-F]+', String.Escape, '#pop'),
- (r'\d+', String.Escape, '#pop'),
- (r'(\s+)(\\)', bygroups(Whitespace, String.Escape), '#pop'),
- ],
- }
-
- EXTRA_KEYWORDS = {'join', 'split', 'reverse', 'transpose', 'width',
- 'length', 'tail', '<<', '>>', '<<<', '>>>', 'const',
- 'reg', 'par', 'seq', 'ASSERT', 'undefined', 'error',
- 'trace'}
-
- def get_tokens_unprocessed(self, text):
- stack = ['root']
- for index, token, value in \
- RegexLexer.get_tokens_unprocessed(self, text, stack):
- if token is Name and value in self.EXTRA_KEYWORDS:
- yield index, Name.Builtin, value
- else:
- yield index, token, value
-
-
-class LiterateLexer(Lexer):
- """
- Base class for lexers of literate file formats based on LaTeX or Bird-style
- (prefixing each code line with ">").
-
- Additional options accepted:
-
- `litstyle`
- If given, must be ``"bird"`` or ``"latex"``. If not given, the style
- is autodetected: if the first non-whitespace character in the source
- is a backslash or percent character, LaTeX is assumed, else Bird.
- """
-
- bird_re = re.compile(r'(>[ \t]*)(.*\n)')
-
- def __init__(self, baselexer, **options):
- self.baselexer = baselexer
- Lexer.__init__(self, **options)
-
- def get_tokens_unprocessed(self, text):
- style = self.options.get('litstyle')
- if style is None:
- style = (text.lstrip()[0:1] in '%\\') and 'latex' or 'bird'
-
- code = ''
- insertions = []
- if style == 'bird':
- # bird-style
- for match in line_re.finditer(text):
- line = match.group()
- m = self.bird_re.match(line)
- if m:
- insertions.append((len(code),
- [(0, Comment.Special, m.group(1))]))
- code += m.group(2)
- else:
- insertions.append((len(code), [(0, Text, line)]))
- else:
- # latex-style
- from pygments.lexers.markup import TexLexer
- lxlexer = TexLexer(**self.options)
- codelines = 0
- latex = ''
- for match in line_re.finditer(text):
- line = match.group()
- if codelines:
- if line.lstrip().startswith('\\end{code}'):
- codelines = 0
- latex += line
- else:
- code += line
- elif line.lstrip().startswith('\\begin{code}'):
- codelines = 1
- latex += line
- insertions.append((len(code),
- list(lxlexer.get_tokens_unprocessed(latex))))
- latex = ''
- else:
- latex += line
- insertions.append((len(code),
- list(lxlexer.get_tokens_unprocessed(latex))))
- yield from do_insertions(insertions, self.baselexer.get_tokens_unprocessed(code))
-
-
-class LiterateHaskellLexer(LiterateLexer):
- """
- For Literate Haskell (Bird-style or LaTeX) source.
-
- Additional options accepted:
-
- `litstyle`
- If given, must be ``"bird"`` or ``"latex"``. If not given, the style
- is autodetected: if the first non-whitespace character in the source
- is a backslash or percent character, LaTeX is assumed, else Bird.
-
- .. versionadded:: 0.9
- """
- name = 'Literate Haskell'
- aliases = ['literate-haskell', 'lhaskell', 'lhs']
- filenames = ['*.lhs']
- mimetypes = ['text/x-literate-haskell']
-
- def __init__(self, **options):
- hslexer = HaskellLexer(**options)
- LiterateLexer.__init__(self, hslexer, **options)
-
-
-class LiterateIdrisLexer(LiterateLexer):
- """
- For Literate Idris (Bird-style or LaTeX) source.
-
- Additional options accepted:
-
- `litstyle`
- If given, must be ``"bird"`` or ``"latex"``. If not given, the style
- is autodetected: if the first non-whitespace character in the source
- is a backslash or percent character, LaTeX is assumed, else Bird.
-
- .. versionadded:: 2.0
- """
- name = 'Literate Idris'
- aliases = ['literate-idris', 'lidris', 'lidr']
- filenames = ['*.lidr']
- mimetypes = ['text/x-literate-idris']
-
- def __init__(self, **options):
- hslexer = IdrisLexer(**options)
- LiterateLexer.__init__(self, hslexer, **options)
-
-
-class LiterateAgdaLexer(LiterateLexer):
- """
- For Literate Agda source.
-
- Additional options accepted:
-
- `litstyle`
- If given, must be ``"bird"`` or ``"latex"``. If not given, the style
- is autodetected: if the first non-whitespace character in the source
- is a backslash or percent character, LaTeX is assumed, else Bird.
-
- .. versionadded:: 2.0
- """
- name = 'Literate Agda'
- aliases = ['literate-agda', 'lagda']
- filenames = ['*.lagda']
- mimetypes = ['text/x-literate-agda']
-
- def __init__(self, **options):
- agdalexer = AgdaLexer(**options)
- LiterateLexer.__init__(self, agdalexer, litstyle='latex', **options)
-
-
-class LiterateCryptolLexer(LiterateLexer):
- """
- For Literate Cryptol (Bird-style or LaTeX) source.
-
- Additional options accepted:
-
- `litstyle`
- If given, must be ``"bird"`` or ``"latex"``. If not given, the style
- is autodetected: if the first non-whitespace character in the source
- is a backslash or percent character, LaTeX is assumed, else Bird.
-
- .. versionadded:: 2.0
- """
- name = 'Literate Cryptol'
- aliases = ['literate-cryptol', 'lcryptol', 'lcry']
- filenames = ['*.lcry']
- mimetypes = ['text/x-literate-cryptol']
-
- def __init__(self, **options):
- crylexer = CryptolLexer(**options)
- LiterateLexer.__init__(self, crylexer, **options)
-
-
-class KokaLexer(RegexLexer):
- """
- Lexer for the Koka language.
-
- .. versionadded:: 1.6
- """
-
- name = 'Koka'
- url = 'https://koka-lang.github.io/koka/doc/index.html'
- aliases = ['koka']
- filenames = ['*.kk', '*.kki']
- mimetypes = ['text/x-koka']
-
- keywords = [
- 'infix', 'infixr', 'infixl',
- 'type', 'cotype', 'rectype', 'alias',
- 'struct', 'con',
- 'fun', 'function', 'val', 'var',
- 'external',
- 'if', 'then', 'else', 'elif', 'return', 'match',
- 'private', 'public', 'private',
- 'module', 'import', 'as',
- 'include', 'inline',
- 'rec',
- 'try', 'yield', 'enum',
- 'interface', 'instance',
- ]
-
- # keywords that are followed by a type
- typeStartKeywords = [
- 'type', 'cotype', 'rectype', 'alias', 'struct', 'enum',
- ]
-
- # keywords valid in a type
- typekeywords = [
- 'forall', 'exists', 'some', 'with',
- ]
-
- # builtin names and special names
- builtin = [
- 'for', 'while', 'repeat',
- 'foreach', 'foreach-indexed',
- 'error', 'catch', 'finally',
- 'cs', 'js', 'file', 'ref', 'assigned',
- ]
-
- # symbols that can be in an operator
- symbols = r'[$%&*+@!/\\^~=.:\-?|<>]+'
-
- # symbol boundary: an operator keyword should not be followed by any of these
- sboundary = '(?!' + symbols + ')'
-
- # name boundary: a keyword should not be followed by any of these
- boundary = r'(?![\w/])'
-
- # koka token abstractions
- tokenType = Name.Attribute
- tokenTypeDef = Name.Class
- tokenConstructor = Generic.Emph
-
- # main lexer
- tokens = {
- 'root': [
- include('whitespace'),
-
- # go into type mode
- (r'::?' + sboundary, tokenType, 'type'),
- (r'(alias)(\s+)([a-z]\w*)?', bygroups(Keyword, Whitespace, tokenTypeDef),
- 'alias-type'),
- (r'(struct)(\s+)([a-z]\w*)?', bygroups(Keyword, Whitespace, tokenTypeDef),
- 'struct-type'),
- ((r'(%s)' % '|'.join(typeStartKeywords)) +
- r'(\s+)([a-z]\w*)?', bygroups(Keyword, Whitespace, tokenTypeDef),
- 'type'),
-
- # special sequences of tokens (we use ?: for non-capturing group as
- # required by 'bygroups')
- (r'(module)(\s+)(interface(?=\s))?(\s+)?((?:[a-z]\w*/)*[a-z]\w*)',
- bygroups(Keyword, Whitespace, Keyword, Whitespace, Name.Namespace)),
- (r'(import)(\s+)((?:[a-z]\w*/)*[a-z]\w*)'
- r'(?:(\s*)(=)(\s*)(qualified)?(\s*)'
- r'((?:[a-z]\w*/)*[a-z]\w*))?',
- bygroups(Keyword, Whitespace, Name.Namespace, Whitespace, Keyword, Whitespace,
- Keyword, Whitespace, Name.Namespace)),
-
- (r'^(public|private)?(\s+)?(function|fun|val)'
- r'(\s+)([a-z]\w*|\((?:' + symbols + r'|/)\))',
- bygroups(Keyword, Whitespace, Keyword, Whitespace, Name.Function)),
- (r'^(?:(public|private)(?=\s+external))?((?<!^)\s+)?(external)(\s+)(inline(?=\s))?(\s+)?'
- r'([a-z]\w*|\((?:' + symbols + r'|/)\))',
- bygroups(Keyword, Whitespace, Keyword, Whitespace, Keyword, Whitespace, Name.Function)),
-
- # keywords
- (r'(%s)' % '|'.join(typekeywords) + boundary, Keyword.Type),
- (r'(%s)' % '|'.join(keywords) + boundary, Keyword),
- (r'(%s)' % '|'.join(builtin) + boundary, Keyword.Pseudo),
- (r'::?|:=|\->|[=.]' + sboundary, Keyword),
-
- # names
- (r'((?:[a-z]\w*/)*)([A-Z]\w*)',
- bygroups(Name.Namespace, tokenConstructor)),
- (r'((?:[a-z]\w*/)*)([a-z]\w*)', bygroups(Name.Namespace, Name)),
- (r'((?:[a-z]\w*/)*)(\((?:' + symbols + r'|/)\))',
- bygroups(Name.Namespace, Name)),
- (r'_\w*', Name.Variable),
-
- # literal string
- (r'@"', String.Double, 'litstring'),
-
- # operators
- (symbols + "|/(?![*/])", Operator),
- (r'`', Operator),
- (r'[{}()\[\];,]', Punctuation),
-
- # literals. No check for literal characters with len > 1
- (r'[0-9]+\.[0-9]+([eE][\-+]?[0-9]+)?', Number.Float),
- (r'0[xX][0-9a-fA-F]+', Number.Hex),
- (r'[0-9]+', Number.Integer),
-
- (r"'", String.Char, 'char'),
- (r'"', String.Double, 'string'),
- ],
-
- # type started by alias
- 'alias-type': [
- (r'=', Keyword),
- include('type')
- ],
-
- # type started by struct
- 'struct-type': [
- (r'(?=\((?!,*\)))', Punctuation, '#pop'),
- include('type')
- ],
-
- # type started by colon
- 'type': [
- (r'[(\[<]', tokenType, 'type-nested'),
- include('type-content')
- ],
-
- # type nested in brackets: can contain parameters, comma etc.
- 'type-nested': [
- (r'[)\]>]', tokenType, '#pop'),
- (r'[(\[<]', tokenType, 'type-nested'),
- (r',', tokenType),
- (r'([a-z]\w*)(\s*)(:)(?!:)',
- bygroups(Name, Whitespace, tokenType)), # parameter name
- include('type-content')
- ],
-
- # shared contents of a type
- 'type-content': [
- include('whitespace'),
-
- # keywords
- (r'(%s)' % '|'.join(typekeywords) + boundary, Keyword),
- (r'(?=((%s)' % '|'.join(keywords) + boundary + '))',
- Keyword, '#pop'), # need to match because names overlap...
-
- # kinds
- (r'[EPHVX]' + boundary, tokenType),
-
- # type names
- (r'[a-z][0-9]*(?![\w/])', tokenType),
- (r'_\w*', tokenType.Variable), # Generic.Emph
- (r'((?:[a-z]\w*/)*)([A-Z]\w*)',
- bygroups(Name.Namespace, tokenType)),
- (r'((?:[a-z]\w*/)*)([a-z]\w+)',
- bygroups(Name.Namespace, tokenType)),
-
- # type keyword operators
- (r'::|->|[.:|]', tokenType),
-
- # catchall
- default('#pop')
- ],
-
- # comments and literals
- 'whitespace': [
- (r'(\n\s*)(#.*)$', bygroups(Whitespace, Comment.Preproc)),
- (r'\s+', Whitespace),
- (r'/\*', Comment.Multiline, 'comment'),
- (r'//.*$', Comment.Single)
- ],
- 'comment': [
- (r'[^/*]+', Comment.Multiline),
- (r'/\*', Comment.Multiline, '#push'),
- (r'\*/', Comment.Multiline, '#pop'),
- (r'[*/]', Comment.Multiline),
- ],
- 'litstring': [
- (r'[^"]+', String.Double),
- (r'""', String.Escape),
- (r'"', String.Double, '#pop'),
- ],
- 'string': [
- (r'[^\\"\n]+', String.Double),
- include('escape-sequence'),
- (r'["\n]', String.Double, '#pop'),
- ],
- 'char': [
- (r'[^\\\'\n]+', String.Char),
- include('escape-sequence'),
- (r'[\'\n]', String.Char, '#pop'),
- ],
- 'escape-sequence': [
- (r'\\[nrt\\"\']', String.Escape),
- (r'\\x[0-9a-fA-F]{2}', String.Escape),
- (r'\\u[0-9a-fA-F]{4}', String.Escape),
- # Yes, \U literals are 6 hex digits.
- (r'\\U[0-9a-fA-F]{6}', String.Escape)
- ]
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/haxe.py b/venv/lib/python3.11/site-packages/pygments/lexers/haxe.py
deleted file mode 100644
index 6e99b10..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/haxe.py
+++ /dev/null
@@ -1,937 +0,0 @@
-"""
- pygments.lexers.haxe
- ~~~~~~~~~~~~~~~~~~~~
-
- Lexers for Haxe and related stuff.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import ExtendedRegexLexer, RegexLexer, include, bygroups, \
- default
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation, Generic, Whitespace
-
-__all__ = ['HaxeLexer', 'HxmlLexer']
-
-
-class HaxeLexer(ExtendedRegexLexer):
- """
- For Haxe source code.
-
- .. versionadded:: 1.3
- """
-
- name = 'Haxe'
- url = 'http://haxe.org/'
- aliases = ['haxe', 'hxsl', 'hx']
- filenames = ['*.hx', '*.hxsl']
- mimetypes = ['text/haxe', 'text/x-haxe', 'text/x-hx']
-
- # keywords extracted from lexer.mll in the haxe compiler source
- keyword = (r'(?:function|class|static|var|if|else|while|do|for|'
- r'break|return|continue|extends|implements|import|'
- r'switch|case|default|public|private|try|untyped|'
- r'catch|new|this|throw|extern|enum|in|interface|'
- r'cast|override|dynamic|typedef|package|'
- r'inline|using|null|true|false|abstract)\b')
-
- # idtype in lexer.mll
- typeid = r'_*[A-Z]\w*'
-
- # combined ident and dollar and idtype
- ident = r'(?:_*[a-z]\w*|_+[0-9]\w*|' + typeid + r'|_+|\$\w+)'
-
- binop = (r'(?:%=|&=|\|=|\^=|\+=|\-=|\*=|/=|<<=|>\s*>\s*=|>\s*>\s*>\s*=|==|'
- r'!=|<=|>\s*=|&&|\|\||<<|>>>|>\s*>|\.\.\.|<|>|%|&|\||\^|\+|\*|'
- r'/|\-|=>|=)')
-
- # ident except keywords
- ident_no_keyword = r'(?!' + keyword + ')' + ident
-
- flags = re.DOTALL | re.MULTILINE
-
- preproc_stack = []
-
- def preproc_callback(self, match, ctx):
- proc = match.group(2)
-
- if proc == 'if':
- # store the current stack
- self.preproc_stack.append(ctx.stack[:])
- elif proc in ['else', 'elseif']:
- # restore the stack back to right before #if
- if self.preproc_stack:
- ctx.stack = self.preproc_stack[-1][:]
- elif proc == 'end':
- # remove the saved stack of previous #if
- if self.preproc_stack:
- self.preproc_stack.pop()
-
- # #if and #elseif should follow by an expr
- if proc in ['if', 'elseif']:
- ctx.stack.append('preproc-expr')
-
- # #error can be optionally follow by the error msg
- if proc in ['error']:
- ctx.stack.append('preproc-error')
-
- yield match.start(), Comment.Preproc, '#' + proc
- ctx.pos = match.end()
-
- tokens = {
- 'root': [
- include('spaces'),
- include('meta'),
- (r'(?:package)\b', Keyword.Namespace, ('semicolon', 'package')),
- (r'(?:import)\b', Keyword.Namespace, ('semicolon', 'import')),
- (r'(?:using)\b', Keyword.Namespace, ('semicolon', 'using')),
- (r'(?:extern|private)\b', Keyword.Declaration),
- (r'(?:abstract)\b', Keyword.Declaration, 'abstract'),
- (r'(?:class|interface)\b', Keyword.Declaration, 'class'),
- (r'(?:enum)\b', Keyword.Declaration, 'enum'),
- (r'(?:typedef)\b', Keyword.Declaration, 'typedef'),
-
- # top-level expression
- # although it is not supported in haxe, but it is common to write
- # expression in web pages the positive lookahead here is to prevent
- # an infinite loop at the EOF
- (r'(?=.)', Text, 'expr-statement'),
- ],
-
- # space/tab/comment/preproc
- 'spaces': [
- (r'\s+', Whitespace),
- (r'//[^\n\r]*', Comment.Single),
- (r'/\*.*?\*/', Comment.Multiline),
- (r'(#)(if|elseif|else|end|error)\b', preproc_callback),
- ],
-
- 'string-single-interpol': [
- (r'\$\{', String.Interpol, ('string-interpol-close', 'expr')),
- (r'\$\$', String.Escape),
- (r'\$(?=' + ident + ')', String.Interpol, 'ident'),
- include('string-single'),
- ],
-
- 'string-single': [
- (r"'", String.Single, '#pop'),
- (r'\\.', String.Escape),
- (r'.', String.Single),
- ],
-
- 'string-double': [
- (r'"', String.Double, '#pop'),
- (r'\\.', String.Escape),
- (r'.', String.Double),
- ],
-
- 'string-interpol-close': [
- (r'\$'+ident, String.Interpol),
- (r'\}', String.Interpol, '#pop'),
- ],
-
- 'package': [
- include('spaces'),
- (ident, Name.Namespace),
- (r'\.', Punctuation, 'import-ident'),
- default('#pop'),
- ],
-
- 'import': [
- include('spaces'),
- (ident, Name.Namespace),
- (r'\*', Keyword), # wildcard import
- (r'\.', Punctuation, 'import-ident'),
- (r'in', Keyword.Namespace, 'ident'),
- default('#pop'),
- ],
-
- 'import-ident': [
- include('spaces'),
- (r'\*', Keyword, '#pop'), # wildcard import
- (ident, Name.Namespace, '#pop'),
- ],
-
- 'using': [
- include('spaces'),
- (ident, Name.Namespace),
- (r'\.', Punctuation, 'import-ident'),
- default('#pop'),
- ],
-
- 'preproc-error': [
- (r'\s+', Whitespace),
- (r"'", String.Single, ('#pop', 'string-single')),
- (r'"', String.Double, ('#pop', 'string-double')),
- default('#pop'),
- ],
-
- 'preproc-expr': [
- (r'\s+', Whitespace),
- (r'\!', Comment.Preproc),
- (r'\(', Comment.Preproc, ('#pop', 'preproc-parenthesis')),
-
- (ident, Comment.Preproc, '#pop'),
-
- # Float
- (r'\.[0-9]+', Number.Float),
- (r'[0-9]+[eE][+\-]?[0-9]+', Number.Float),
- (r'[0-9]+\.[0-9]*[eE][+\-]?[0-9]+', Number.Float),
- (r'[0-9]+\.[0-9]+', Number.Float),
- (r'[0-9]+\.(?!' + ident + r'|\.\.)', Number.Float),
-
- # Int
- (r'0x[0-9a-fA-F]+', Number.Hex),
- (r'[0-9]+', Number.Integer),
-
- # String
- (r"'", String.Single, ('#pop', 'string-single')),
- (r'"', String.Double, ('#pop', 'string-double')),
- ],
-
- 'preproc-parenthesis': [
- (r'\s+', Whitespace),
- (r'\)', Comment.Preproc, '#pop'),
- default('preproc-expr-in-parenthesis'),
- ],
-
- 'preproc-expr-chain': [
- (r'\s+', Whitespace),
- (binop, Comment.Preproc, ('#pop', 'preproc-expr-in-parenthesis')),
- default('#pop'),
- ],
-
- # same as 'preproc-expr' but able to chain 'preproc-expr-chain'
- 'preproc-expr-in-parenthesis': [
- (r'\s+', Whitespace),
- (r'\!', Comment.Preproc),
- (r'\(', Comment.Preproc,
- ('#pop', 'preproc-expr-chain', 'preproc-parenthesis')),
-
- (ident, Comment.Preproc, ('#pop', 'preproc-expr-chain')),
-
- # Float
- (r'\.[0-9]+', Number.Float, ('#pop', 'preproc-expr-chain')),
- (r'[0-9]+[eE][+\-]?[0-9]+', Number.Float, ('#pop', 'preproc-expr-chain')),
- (r'[0-9]+\.[0-9]*[eE][+\-]?[0-9]+', Number.Float, ('#pop', 'preproc-expr-chain')),
- (r'[0-9]+\.[0-9]+', Number.Float, ('#pop', 'preproc-expr-chain')),
- (r'[0-9]+\.(?!' + ident + r'|\.\.)', Number.Float, ('#pop', 'preproc-expr-chain')),
-
- # Int
- (r'0x[0-9a-fA-F]+', Number.Hex, ('#pop', 'preproc-expr-chain')),
- (r'[0-9]+', Number.Integer, ('#pop', 'preproc-expr-chain')),
-
- # String
- (r"'", String.Single,
- ('#pop', 'preproc-expr-chain', 'string-single')),
- (r'"', String.Double,
- ('#pop', 'preproc-expr-chain', 'string-double')),
- ],
-
- 'abstract': [
- include('spaces'),
- default(('#pop', 'abstract-body', 'abstract-relation',
- 'abstract-opaque', 'type-param-constraint', 'type-name')),
- ],
-
- 'abstract-body': [
- include('spaces'),
- (r'\{', Punctuation, ('#pop', 'class-body')),
- ],
-
- 'abstract-opaque': [
- include('spaces'),
- (r'\(', Punctuation, ('#pop', 'parenthesis-close', 'type')),
- default('#pop'),
- ],
-
- 'abstract-relation': [
- include('spaces'),
- (r'(?:to|from)', Keyword.Declaration, 'type'),
- (r',', Punctuation),
- default('#pop'),
- ],
-
- 'meta': [
- include('spaces'),
- (r'@', Name.Decorator, ('meta-body', 'meta-ident', 'meta-colon')),
- ],
-
- # optional colon
- 'meta-colon': [
- include('spaces'),
- (r':', Name.Decorator, '#pop'),
- default('#pop'),
- ],
-
- # same as 'ident' but set token as Name.Decorator instead of Name
- 'meta-ident': [
- include('spaces'),
- (ident, Name.Decorator, '#pop'),
- ],
-
- 'meta-body': [
- include('spaces'),
- (r'\(', Name.Decorator, ('#pop', 'meta-call')),
- default('#pop'),
- ],
-
- 'meta-call': [
- include('spaces'),
- (r'\)', Name.Decorator, '#pop'),
- default(('#pop', 'meta-call-sep', 'expr')),
- ],
-
- 'meta-call-sep': [
- include('spaces'),
- (r'\)', Name.Decorator, '#pop'),
- (r',', Punctuation, ('#pop', 'meta-call')),
- ],
-
- 'typedef': [
- include('spaces'),
- default(('#pop', 'typedef-body', 'type-param-constraint',
- 'type-name')),
- ],
-
- 'typedef-body': [
- include('spaces'),
- (r'=', Operator, ('#pop', 'optional-semicolon', 'type')),
- ],
-
- 'enum': [
- include('spaces'),
- default(('#pop', 'enum-body', 'bracket-open',
- 'type-param-constraint', 'type-name')),
- ],
-
- 'enum-body': [
- include('spaces'),
- include('meta'),
- (r'\}', Punctuation, '#pop'),
- (ident_no_keyword, Name, ('enum-member', 'type-param-constraint')),
- ],
-
- 'enum-member': [
- include('spaces'),
- (r'\(', Punctuation,
- ('#pop', 'semicolon', 'flag', 'function-param')),
- default(('#pop', 'semicolon', 'flag')),
- ],
-
- 'class': [
- include('spaces'),
- default(('#pop', 'class-body', 'bracket-open', 'extends',
- 'type-param-constraint', 'type-name')),
- ],
-
- 'extends': [
- include('spaces'),
- (r'(?:extends|implements)\b', Keyword.Declaration, 'type'),
- (r',', Punctuation), # the comma is made optional here, since haxe2
- # requires the comma but haxe3 does not allow it
- default('#pop'),
- ],
-
- 'bracket-open': [
- include('spaces'),
- (r'\{', Punctuation, '#pop'),
- ],
-
- 'bracket-close': [
- include('spaces'),
- (r'\}', Punctuation, '#pop'),
- ],
-
- 'class-body': [
- include('spaces'),
- include('meta'),
- (r'\}', Punctuation, '#pop'),
- (r'(?:static|public|private|override|dynamic|inline|macro)\b',
- Keyword.Declaration),
- default('class-member'),
- ],
-
- 'class-member': [
- include('spaces'),
- (r'(var)\b', Keyword.Declaration,
- ('#pop', 'optional-semicolon', 'var')),
- (r'(function)\b', Keyword.Declaration,
- ('#pop', 'optional-semicolon', 'class-method')),
- ],
-
- # local function, anonymous or not
- 'function-local': [
- include('spaces'),
- (ident_no_keyword, Name.Function,
- ('#pop', 'optional-expr', 'flag', 'function-param',
- 'parenthesis-open', 'type-param-constraint')),
- default(('#pop', 'optional-expr', 'flag', 'function-param',
- 'parenthesis-open', 'type-param-constraint')),
- ],
-
- 'optional-expr': [
- include('spaces'),
- include('expr'),
- default('#pop'),
- ],
-
- 'class-method': [
- include('spaces'),
- (ident, Name.Function, ('#pop', 'optional-expr', 'flag',
- 'function-param', 'parenthesis-open',
- 'type-param-constraint')),
- ],
-
- # function arguments
- 'function-param': [
- include('spaces'),
- (r'\)', Punctuation, '#pop'),
- (r'\?', Punctuation),
- (ident_no_keyword, Name,
- ('#pop', 'function-param-sep', 'assign', 'flag')),
- ],
-
- 'function-param-sep': [
- include('spaces'),
- (r'\)', Punctuation, '#pop'),
- (r',', Punctuation, ('#pop', 'function-param')),
- ],
-
- 'prop-get-set': [
- include('spaces'),
- (r'\(', Punctuation, ('#pop', 'parenthesis-close',
- 'prop-get-set-opt', 'comma', 'prop-get-set-opt')),
- default('#pop'),
- ],
-
- 'prop-get-set-opt': [
- include('spaces'),
- (r'(?:default|null|never|dynamic|get|set)\b', Keyword, '#pop'),
- (ident_no_keyword, Text, '#pop'), # custom getter/setter
- ],
-
- 'expr-statement': [
- include('spaces'),
- # makes semicolon optional here, just to avoid checking the last
- # one is bracket or not.
- default(('#pop', 'optional-semicolon', 'expr')),
- ],
-
- 'expr': [
- include('spaces'),
- (r'@', Name.Decorator, ('#pop', 'optional-expr', 'meta-body',
- 'meta-ident', 'meta-colon')),
- (r'(?:\+\+|\-\-|~(?!/)|!|\-)', Operator),
- (r'\(', Punctuation, ('#pop', 'expr-chain', 'parenthesis')),
- (r'(?:static|public|private|override|dynamic|inline)\b',
- Keyword.Declaration),
- (r'(?:function)\b', Keyword.Declaration, ('#pop', 'expr-chain',
- 'function-local')),
- (r'\{', Punctuation, ('#pop', 'expr-chain', 'bracket')),
- (r'(?:true|false|null)\b', Keyword.Constant, ('#pop', 'expr-chain')),
- (r'(?:this)\b', Keyword, ('#pop', 'expr-chain')),
- (r'(?:cast)\b', Keyword, ('#pop', 'expr-chain', 'cast')),
- (r'(?:try)\b', Keyword, ('#pop', 'catch', 'expr')),
- (r'(?:var)\b', Keyword.Declaration, ('#pop', 'var')),
- (r'(?:new)\b', Keyword, ('#pop', 'expr-chain', 'new')),
- (r'(?:switch)\b', Keyword, ('#pop', 'switch')),
- (r'(?:if)\b', Keyword, ('#pop', 'if')),
- (r'(?:do)\b', Keyword, ('#pop', 'do')),
- (r'(?:while)\b', Keyword, ('#pop', 'while')),
- (r'(?:for)\b', Keyword, ('#pop', 'for')),
- (r'(?:untyped|throw)\b', Keyword),
- (r'(?:return)\b', Keyword, ('#pop', 'optional-expr')),
- (r'(?:macro)\b', Keyword, ('#pop', 'macro')),
- (r'(?:continue|break)\b', Keyword, '#pop'),
- (r'(?:\$\s*[a-z]\b|\$(?!'+ident+'))', Name, ('#pop', 'dollar')),
- (ident_no_keyword, Name, ('#pop', 'expr-chain')),
-
- # Float
- (r'\.[0-9]+', Number.Float, ('#pop', 'expr-chain')),
- (r'[0-9]+[eE][+\-]?[0-9]+', Number.Float, ('#pop', 'expr-chain')),
- (r'[0-9]+\.[0-9]*[eE][+\-]?[0-9]+', Number.Float, ('#pop', 'expr-chain')),
- (r'[0-9]+\.[0-9]+', Number.Float, ('#pop', 'expr-chain')),
- (r'[0-9]+\.(?!' + ident + r'|\.\.)', Number.Float, ('#pop', 'expr-chain')),
-
- # Int
- (r'0x[0-9a-fA-F]+', Number.Hex, ('#pop', 'expr-chain')),
- (r'[0-9]+', Number.Integer, ('#pop', 'expr-chain')),
-
- # String
- (r"'", String.Single, ('#pop', 'expr-chain', 'string-single-interpol')),
- (r'"', String.Double, ('#pop', 'expr-chain', 'string-double')),
-
- # EReg
- (r'~/(\\\\|\\[^\\]|[^/\\\n])*/[gimsu]*', String.Regex, ('#pop', 'expr-chain')),
-
- # Array
- (r'\[', Punctuation, ('#pop', 'expr-chain', 'array-decl')),
- ],
-
- 'expr-chain': [
- include('spaces'),
- (r'(?:\+\+|\-\-)', Operator),
- (binop, Operator, ('#pop', 'expr')),
- (r'(?:in)\b', Keyword, ('#pop', 'expr')),
- (r'\?', Operator, ('#pop', 'expr', 'ternary', 'expr')),
- (r'(\.)(' + ident_no_keyword + ')', bygroups(Punctuation, Name)),
- (r'\[', Punctuation, 'array-access'),
- (r'\(', Punctuation, 'call'),
- default('#pop'),
- ],
-
- # macro reification
- 'macro': [
- include('spaces'),
- include('meta'),
- (r':', Punctuation, ('#pop', 'type')),
-
- (r'(?:extern|private)\b', Keyword.Declaration),
- (r'(?:abstract)\b', Keyword.Declaration, ('#pop', 'optional-semicolon', 'abstract')),
- (r'(?:class|interface)\b', Keyword.Declaration, ('#pop', 'optional-semicolon', 'macro-class')),
- (r'(?:enum)\b', Keyword.Declaration, ('#pop', 'optional-semicolon', 'enum')),
- (r'(?:typedef)\b', Keyword.Declaration, ('#pop', 'optional-semicolon', 'typedef')),
-
- default(('#pop', 'expr')),
- ],
-
- 'macro-class': [
- (r'\{', Punctuation, ('#pop', 'class-body')),
- include('class')
- ],
-
- # cast can be written as "cast expr" or "cast(expr, type)"
- 'cast': [
- include('spaces'),
- (r'\(', Punctuation, ('#pop', 'parenthesis-close',
- 'cast-type', 'expr')),
- default(('#pop', 'expr')),
- ],
-
- # optionally give a type as the 2nd argument of cast()
- 'cast-type': [
- include('spaces'),
- (r',', Punctuation, ('#pop', 'type')),
- default('#pop'),
- ],
-
- 'catch': [
- include('spaces'),
- (r'(?:catch)\b', Keyword, ('expr', 'function-param',
- 'parenthesis-open')),
- default('#pop'),
- ],
-
- # do-while loop
- 'do': [
- include('spaces'),
- default(('#pop', 'do-while', 'expr')),
- ],
-
- # the while after do
- 'do-while': [
- include('spaces'),
- (r'(?:while)\b', Keyword, ('#pop', 'parenthesis',
- 'parenthesis-open')),
- ],
-
- 'while': [
- include('spaces'),
- (r'\(', Punctuation, ('#pop', 'expr', 'parenthesis')),
- ],
-
- 'for': [
- include('spaces'),
- (r'\(', Punctuation, ('#pop', 'expr', 'parenthesis')),
- ],
-
- 'if': [
- include('spaces'),
- (r'\(', Punctuation, ('#pop', 'else', 'optional-semicolon', 'expr',
- 'parenthesis')),
- ],
-
- 'else': [
- include('spaces'),
- (r'(?:else)\b', Keyword, ('#pop', 'expr')),
- default('#pop'),
- ],
-
- 'switch': [
- include('spaces'),
- default(('#pop', 'switch-body', 'bracket-open', 'expr')),
- ],
-
- 'switch-body': [
- include('spaces'),
- (r'(?:case|default)\b', Keyword, ('case-block', 'case')),
- (r'\}', Punctuation, '#pop'),
- ],
-
- 'case': [
- include('spaces'),
- (r':', Punctuation, '#pop'),
- default(('#pop', 'case-sep', 'case-guard', 'expr')),
- ],
-
- 'case-sep': [
- include('spaces'),
- (r':', Punctuation, '#pop'),
- (r',', Punctuation, ('#pop', 'case')),
- ],
-
- 'case-guard': [
- include('spaces'),
- (r'(?:if)\b', Keyword, ('#pop', 'parenthesis', 'parenthesis-open')),
- default('#pop'),
- ],
-
- # optional multiple expr under a case
- 'case-block': [
- include('spaces'),
- (r'(?!(?:case|default)\b|\})', Keyword, 'expr-statement'),
- default('#pop'),
- ],
-
- 'new': [
- include('spaces'),
- default(('#pop', 'call', 'parenthesis-open', 'type')),
- ],
-
- 'array-decl': [
- include('spaces'),
- (r'\]', Punctuation, '#pop'),
- default(('#pop', 'array-decl-sep', 'expr')),
- ],
-
- 'array-decl-sep': [
- include('spaces'),
- (r'\]', Punctuation, '#pop'),
- (r',', Punctuation, ('#pop', 'array-decl')),
- ],
-
- 'array-access': [
- include('spaces'),
- default(('#pop', 'array-access-close', 'expr')),
- ],
-
- 'array-access-close': [
- include('spaces'),
- (r'\]', Punctuation, '#pop'),
- ],
-
- 'comma': [
- include('spaces'),
- (r',', Punctuation, '#pop'),
- ],
-
- 'colon': [
- include('spaces'),
- (r':', Punctuation, '#pop'),
- ],
-
- 'semicolon': [
- include('spaces'),
- (r';', Punctuation, '#pop'),
- ],
-
- 'optional-semicolon': [
- include('spaces'),
- (r';', Punctuation, '#pop'),
- default('#pop'),
- ],
-
- # identity that CAN be a Haxe keyword
- 'ident': [
- include('spaces'),
- (ident, Name, '#pop'),
- ],
-
- 'dollar': [
- include('spaces'),
- (r'\{', Punctuation, ('#pop', 'expr-chain', 'bracket-close', 'expr')),
- default(('#pop', 'expr-chain')),
- ],
-
- 'type-name': [
- include('spaces'),
- (typeid, Name, '#pop'),
- ],
-
- 'type-full-name': [
- include('spaces'),
- (r'\.', Punctuation, 'ident'),
- default('#pop'),
- ],
-
- 'type': [
- include('spaces'),
- (r'\?', Punctuation),
- (ident, Name, ('#pop', 'type-check', 'type-full-name')),
- (r'\{', Punctuation, ('#pop', 'type-check', 'type-struct')),
- (r'\(', Punctuation, ('#pop', 'type-check', 'type-parenthesis')),
- ],
-
- 'type-parenthesis': [
- include('spaces'),
- default(('#pop', 'parenthesis-close', 'type')),
- ],
-
- 'type-check': [
- include('spaces'),
- (r'->', Punctuation, ('#pop', 'type')),
- (r'<(?!=)', Punctuation, 'type-param'),
- default('#pop'),
- ],
-
- 'type-struct': [
- include('spaces'),
- (r'\}', Punctuation, '#pop'),
- (r'\?', Punctuation),
- (r'>', Punctuation, ('comma', 'type')),
- (ident_no_keyword, Name, ('#pop', 'type-struct-sep', 'type', 'colon')),
- include('class-body'),
- ],
-
- 'type-struct-sep': [
- include('spaces'),
- (r'\}', Punctuation, '#pop'),
- (r',', Punctuation, ('#pop', 'type-struct')),
- ],
-
- # type-param can be a normal type or a constant literal...
- 'type-param-type': [
- # Float
- (r'\.[0-9]+', Number.Float, '#pop'),
- (r'[0-9]+[eE][+\-]?[0-9]+', Number.Float, '#pop'),
- (r'[0-9]+\.[0-9]*[eE][+\-]?[0-9]+', Number.Float, '#pop'),
- (r'[0-9]+\.[0-9]+', Number.Float, '#pop'),
- (r'[0-9]+\.(?!' + ident + r'|\.\.)', Number.Float, '#pop'),
-
- # Int
- (r'0x[0-9a-fA-F]+', Number.Hex, '#pop'),
- (r'[0-9]+', Number.Integer, '#pop'),
-
- # String
- (r"'", String.Single, ('#pop', 'string-single')),
- (r'"', String.Double, ('#pop', 'string-double')),
-
- # EReg
- (r'~/(\\\\|\\[^\\]|[^/\\\n])*/[gim]*', String.Regex, '#pop'),
-
- # Array
- (r'\[', Operator, ('#pop', 'array-decl')),
-
- include('type'),
- ],
-
- # type-param part of a type
- # ie. the <A,B> path in Map<A,B>
- 'type-param': [
- include('spaces'),
- default(('#pop', 'type-param-sep', 'type-param-type')),
- ],
-
- 'type-param-sep': [
- include('spaces'),
- (r'>', Punctuation, '#pop'),
- (r',', Punctuation, ('#pop', 'type-param')),
- ],
-
- # optional type-param that may include constraint
- # ie. <T:Constraint, T2:(ConstraintA,ConstraintB)>
- 'type-param-constraint': [
- include('spaces'),
- (r'<(?!=)', Punctuation, ('#pop', 'type-param-constraint-sep',
- 'type-param-constraint-flag', 'type-name')),
- default('#pop'),
- ],
-
- 'type-param-constraint-sep': [
- include('spaces'),
- (r'>', Punctuation, '#pop'),
- (r',', Punctuation, ('#pop', 'type-param-constraint-sep',
- 'type-param-constraint-flag', 'type-name')),
- ],
-
- # the optional constraint inside type-param
- 'type-param-constraint-flag': [
- include('spaces'),
- (r':', Punctuation, ('#pop', 'type-param-constraint-flag-type')),
- default('#pop'),
- ],
-
- 'type-param-constraint-flag-type': [
- include('spaces'),
- (r'\(', Punctuation, ('#pop', 'type-param-constraint-flag-type-sep',
- 'type')),
- default(('#pop', 'type')),
- ],
-
- 'type-param-constraint-flag-type-sep': [
- include('spaces'),
- (r'\)', Punctuation, '#pop'),
- (r',', Punctuation, 'type'),
- ],
-
- # a parenthesis expr that contain exactly one expr
- 'parenthesis': [
- include('spaces'),
- default(('#pop', 'parenthesis-close', 'flag', 'expr')),
- ],
-
- 'parenthesis-open': [
- include('spaces'),
- (r'\(', Punctuation, '#pop'),
- ],
-
- 'parenthesis-close': [
- include('spaces'),
- (r'\)', Punctuation, '#pop'),
- ],
-
- 'var': [
- include('spaces'),
- (ident_no_keyword, Text, ('#pop', 'var-sep', 'assign', 'flag', 'prop-get-set')),
- ],
-
- # optional more var decl.
- 'var-sep': [
- include('spaces'),
- (r',', Punctuation, ('#pop', 'var')),
- default('#pop'),
- ],
-
- # optional assignment
- 'assign': [
- include('spaces'),
- (r'=', Operator, ('#pop', 'expr')),
- default('#pop'),
- ],
-
- # optional type flag
- 'flag': [
- include('spaces'),
- (r':', Punctuation, ('#pop', 'type')),
- default('#pop'),
- ],
-
- # colon as part of a ternary operator (?:)
- 'ternary': [
- include('spaces'),
- (r':', Operator, '#pop'),
- ],
-
- # function call
- 'call': [
- include('spaces'),
- (r'\)', Punctuation, '#pop'),
- default(('#pop', 'call-sep', 'expr')),
- ],
-
- # after a call param
- 'call-sep': [
- include('spaces'),
- (r'\)', Punctuation, '#pop'),
- (r',', Punctuation, ('#pop', 'call')),
- ],
-
- # bracket can be block or object
- 'bracket': [
- include('spaces'),
- (r'(?!(?:\$\s*[a-z]\b|\$(?!'+ident+')))' + ident_no_keyword, Name,
- ('#pop', 'bracket-check')),
- (r"'", String.Single, ('#pop', 'bracket-check', 'string-single')),
- (r'"', String.Double, ('#pop', 'bracket-check', 'string-double')),
- default(('#pop', 'block')),
- ],
-
- 'bracket-check': [
- include('spaces'),
- (r':', Punctuation, ('#pop', 'object-sep', 'expr')), # is object
- default(('#pop', 'block', 'optional-semicolon', 'expr-chain')), # is block
- ],
-
- # code block
- 'block': [
- include('spaces'),
- (r'\}', Punctuation, '#pop'),
- default('expr-statement'),
- ],
-
- # object in key-value pairs
- 'object': [
- include('spaces'),
- (r'\}', Punctuation, '#pop'),
- default(('#pop', 'object-sep', 'expr', 'colon', 'ident-or-string'))
- ],
-
- # a key of an object
- 'ident-or-string': [
- include('spaces'),
- (ident_no_keyword, Name, '#pop'),
- (r"'", String.Single, ('#pop', 'string-single')),
- (r'"', String.Double, ('#pop', 'string-double')),
- ],
-
- # after a key-value pair in object
- 'object-sep': [
- include('spaces'),
- (r'\}', Punctuation, '#pop'),
- (r',', Punctuation, ('#pop', 'object')),
- ],
-
-
-
- }
-
- def analyse_text(text):
- if re.match(r'\w+\s*:\s*\w', text):
- return 0.3
-
-
-class HxmlLexer(RegexLexer):
- """
- Lexer for haXe build files.
-
- .. versionadded:: 1.6
- """
- name = 'Hxml'
- url = 'https://haxe.org/manual/compiler-usage-hxml.html'
- aliases = ['haxeml', 'hxml']
- filenames = ['*.hxml']
-
- tokens = {
- 'root': [
- # Separator
- (r'(--)(next)', bygroups(Punctuation, Generic.Heading)),
- # Compiler switches with one dash
- (r'(-)(prompt|debug|v)', bygroups(Punctuation, Keyword.Keyword)),
- # Compilerswitches with two dashes
- (r'(--)(neko-source|flash-strict|flash-use-stage|no-opt|no-traces|'
- r'no-inline|times|no-output)', bygroups(Punctuation, Keyword)),
- # Targets and other options that take an argument
- (r'(-)(cpp|js|neko|x|as3|swf9?|swf-lib|php|xml|main|lib|D|resource|'
- r'cp|cmd)( +)(.+)',
- bygroups(Punctuation, Keyword, Whitespace, String)),
- # Options that take only numerical arguments
- (r'(-)(swf-version)( +)(\d+)',
- bygroups(Punctuation, Keyword, Whitespace, Number.Integer)),
- # An Option that defines the size, the fps and the background
- # color of an flash movie
- (r'(-)(swf-header)( +)(\d+)(:)(\d+)(:)(\d+)(:)([A-Fa-f0-9]{6})',
- bygroups(Punctuation, Keyword, Whitespace, Number.Integer,
- Punctuation, Number.Integer, Punctuation, Number.Integer,
- Punctuation, Number.Hex)),
- # options with two dashes that takes arguments
- (r'(--)(js-namespace|php-front|php-lib|remap|gen-hx-classes)( +)'
- r'(.+)', bygroups(Punctuation, Keyword, Whitespace, String)),
- # Single line comment, multiline ones are not allowed.
- (r'#.*', Comment.Single)
- ]
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/hdl.py b/venv/lib/python3.11/site-packages/pygments/lexers/hdl.py
deleted file mode 100644
index 319ec93..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/hdl.py
+++ /dev/null
@@ -1,465 +0,0 @@
-"""
- pygments.lexers.hdl
- ~~~~~~~~~~~~~~~~~~~
-
- Lexers for hardware descriptor languages.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, bygroups, include, using, this, words
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation, Whitespace
-
-__all__ = ['VerilogLexer', 'SystemVerilogLexer', 'VhdlLexer']
-
-
-class VerilogLexer(RegexLexer):
- """
- For verilog source code with preprocessor directives.
-
- .. versionadded:: 1.4
- """
- name = 'verilog'
- aliases = ['verilog', 'v']
- filenames = ['*.v']
- mimetypes = ['text/x-verilog']
-
- #: optional Comment or Whitespace
- _ws = r'(?:\s|//.*?\n|/[*].*?[*]/)+'
-
- tokens = {
- 'root': [
- (r'^\s*`define', Comment.Preproc, 'macro'),
- (r'\s+', Whitespace),
- (r'(\\)(\n)', bygroups(String.Escape, Whitespace)), # line continuation
- (r'/(\\\n)?/(\n|(.|\n)*?[^\\]\n)', Comment.Single),
- (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline),
- (r'[{}#@]', Punctuation),
- (r'L?"', String, 'string'),
- (r"L?'(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])'", String.Char),
- (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[lL]?', Number.Float),
- (r'(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float),
- (r'([0-9]+)|(\'h)[0-9a-fA-F]+', Number.Hex),
- (r'([0-9]+)|(\'b)[01]+', Number.Bin),
- (r'([0-9]+)|(\'d)[0-9]+', Number.Integer),
- (r'([0-9]+)|(\'o)[0-7]+', Number.Oct),
- (r'\'[01xz]', Number),
- (r'\d+[Ll]?', Number.Integer),
- (r'[~!%^&*+=|?:<>/-]', Operator),
- (r'[()\[\],.;\']', Punctuation),
- (r'`[a-zA-Z_]\w*', Name.Constant),
-
- (r'^(\s*)(package)(\s+)', bygroups(Whitespace, Keyword.Namespace, Text)),
- (r'^(\s*)(import)(\s+)', bygroups(Whitespace, Keyword.Namespace, Text),
- 'import'),
-
- (words((
- 'always', 'always_comb', 'always_ff', 'always_latch', 'and',
- 'assign', 'automatic', 'begin', 'break', 'buf', 'bufif0', 'bufif1',
- 'case', 'casex', 'casez', 'cmos', 'const', 'continue', 'deassign',
- 'default', 'defparam', 'disable', 'do', 'edge', 'else', 'end', 'endcase',
- 'endfunction', 'endgenerate', 'endmodule', 'endpackage', 'endprimitive',
- 'endspecify', 'endtable', 'endtask', 'enum', 'event', 'final', 'for',
- 'force', 'forever', 'fork', 'function', 'generate', 'genvar', 'highz0',
- 'highz1', 'if', 'initial', 'inout', 'input', 'integer', 'join', 'large',
- 'localparam', 'macromodule', 'medium', 'module', 'nand', 'negedge',
- 'nmos', 'nor', 'not', 'notif0', 'notif1', 'or', 'output', 'packed',
- 'parameter', 'pmos', 'posedge', 'primitive', 'pull0', 'pull1',
- 'pulldown', 'pullup', 'rcmos', 'ref', 'release', 'repeat', 'return',
- 'rnmos', 'rpmos', 'rtran', 'rtranif0', 'rtranif1', 'scalared', 'signed',
- 'small', 'specify', 'specparam', 'strength', 'string', 'strong0',
- 'strong1', 'struct', 'table', 'task', 'tran', 'tranif0', 'tranif1',
- 'type', 'typedef', 'unsigned', 'var', 'vectored', 'void', 'wait',
- 'weak0', 'weak1', 'while', 'xnor', 'xor'), suffix=r'\b'),
- Keyword),
-
- (words((
- 'accelerate', 'autoexpand_vectornets', 'celldefine', 'default_nettype',
- 'else', 'elsif', 'endcelldefine', 'endif', 'endprotect', 'endprotected',
- 'expand_vectornets', 'ifdef', 'ifndef', 'include', 'noaccelerate',
- 'noexpand_vectornets', 'noremove_gatenames', 'noremove_netnames',
- 'nounconnected_drive', 'protect', 'protected', 'remove_gatenames',
- 'remove_netnames', 'resetall', 'timescale', 'unconnected_drive',
- 'undef'), prefix=r'`', suffix=r'\b'),
- Comment.Preproc),
-
- (words((
- 'bits', 'bitstoreal', 'bitstoshortreal', 'countdrivers', 'display', 'fclose',
- 'fdisplay', 'finish', 'floor', 'fmonitor', 'fopen', 'fstrobe', 'fwrite',
- 'getpattern', 'history', 'incsave', 'input', 'itor', 'key', 'list', 'log',
- 'monitor', 'monitoroff', 'monitoron', 'nokey', 'nolog', 'printtimescale',
- 'random', 'readmemb', 'readmemh', 'realtime', 'realtobits', 'reset',
- 'reset_count', 'reset_value', 'restart', 'rtoi', 'save', 'scale', 'scope',
- 'shortrealtobits', 'showscopes', 'showvariables', 'showvars', 'sreadmemb',
- 'sreadmemh', 'stime', 'stop', 'strobe', 'time', 'timeformat', 'write'),
- prefix=r'\$', suffix=r'\b'),
- Name.Builtin),
-
- (words((
- 'byte', 'shortint', 'int', 'longint', 'integer', 'time',
- 'bit', 'logic', 'reg', 'supply0', 'supply1', 'tri', 'triand',
- 'trior', 'tri0', 'tri1', 'trireg', 'uwire', 'wire', 'wand', 'wor'
- 'shortreal', 'real', 'realtime'), suffix=r'\b'),
- Keyword.Type),
- (r'[a-zA-Z_]\w*:(?!:)', Name.Label),
- (r'\$?[a-zA-Z_]\w*', Name),
- (r'\\(\S+)', Name),
- ],
- 'string': [
- (r'"', String, '#pop'),
- (r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|[0-7]{1,3})', String.Escape),
- (r'[^\\"\n]+', String), # all other characters
- (r'(\\)(\n)', bygroups(String.Escape, Whitespace)), # line continuation
- (r'\\', String), # stray backslash
- ],
- 'macro': [
- (r'[^/\n]+', Comment.Preproc),
- (r'/[*](.|\n)*?[*]/', Comment.Multiline),
- (r'//.*?\n', Comment.Single, '#pop'),
- (r'/', Comment.Preproc),
- (r'(?<=\\)\n', Comment.Preproc),
- (r'\n', Whitespace, '#pop'),
- ],
- 'import': [
- (r'[\w:]+\*?', Name.Namespace, '#pop')
- ]
- }
-
- def analyse_text(text):
- """Verilog code will use one of reg/wire/assign for sure, and that
- is not common elsewhere."""
- result = 0
- if 'reg' in text:
- result += 0.1
- if 'wire' in text:
- result += 0.1
- if 'assign' in text:
- result += 0.1
-
- return result
-
-
-class SystemVerilogLexer(RegexLexer):
- """
- Extends verilog lexer to recognise all SystemVerilog keywords from IEEE
- 1800-2009 standard.
-
- .. versionadded:: 1.5
- """
- name = 'systemverilog'
- aliases = ['systemverilog', 'sv']
- filenames = ['*.sv', '*.svh']
- mimetypes = ['text/x-systemverilog']
-
- #: optional Comment or Whitespace
- _ws = r'(?:\s|//.*?\n|/[*].*?[*]/)+'
-
- tokens = {
- 'root': [
- (r'^(\s*)(`define)', bygroups(Whitespace, Comment.Preproc), 'macro'),
- (r'^(\s*)(package)(\s+)', bygroups(Whitespace, Keyword.Namespace, Whitespace)),
- (r'^(\s*)(import)(\s+)', bygroups(Whitespace, Keyword.Namespace, Whitespace), 'import'),
-
- (r'\s+', Whitespace),
- (r'(\\)(\n)', bygroups(String.Escape, Whitespace)), # line continuation
- (r'/(\\\n)?/(\n|(.|\n)*?[^\\]\n)', Comment.Single),
- (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline),
- (r'[{}#@]', Punctuation),
- (r'L?"', String, 'string'),
- (r"L?'(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])'", String.Char),
-
- (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[lL]?', Number.Float),
- (r'(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float),
-
- (r'([1-9][_0-9]*)?\s*\'[sS]?[bB]\s*[xXzZ?01][_xXzZ?01]*',
- Number.Bin),
- (r'([1-9][_0-9]*)?\s*\'[sS]?[oO]\s*[xXzZ?0-7][_xXzZ?0-7]*',
- Number.Oct),
- (r'([1-9][_0-9]*)?\s*\'[sS]?[dD]\s*[xXzZ?0-9][_xXzZ?0-9]*',
- Number.Integer),
- (r'([1-9][_0-9]*)?\s*\'[sS]?[hH]\s*[xXzZ?0-9a-fA-F][_xXzZ?0-9a-fA-F]*',
- Number.Hex),
-
- (r'\'[01xXzZ]', Number),
- (r'[0-9][_0-9]*', Number.Integer),
-
- (r'[~!%^&*+=|?:<>/-]', Operator),
- (words(('inside', 'dist'), suffix=r'\b'), Operator.Word),
-
- (r'[()\[\],.;\'$]', Punctuation),
- (r'`[a-zA-Z_]\w*', Name.Constant),
-
- (words((
- 'accept_on', 'alias', 'always', 'always_comb', 'always_ff',
- 'always_latch', 'and', 'assert', 'assign', 'assume', 'automatic',
- 'before', 'begin', 'bind', 'bins', 'binsof', 'break', 'buf',
- 'bufif0', 'bufif1', 'case', 'casex', 'casez', 'cell',
- 'checker', 'clocking', 'cmos', 'config',
- 'constraint', 'context', 'continue', 'cover', 'covergroup',
- 'coverpoint', 'cross', 'deassign', 'default', 'defparam', 'design',
- 'disable', 'do', 'edge', 'else', 'end', 'endcase',
- 'endchecker', 'endclocking', 'endconfig', 'endfunction',
- 'endgenerate', 'endgroup', 'endinterface', 'endmodule', 'endpackage',
- 'endprimitive', 'endprogram', 'endproperty', 'endsequence',
- 'endspecify', 'endtable', 'endtask', 'enum', 'eventually',
- 'expect', 'export', 'extern', 'final', 'first_match',
- 'for', 'force', 'foreach', 'forever', 'fork', 'forkjoin', 'function',
- 'generate', 'genvar', 'global', 'highz0', 'highz1', 'if', 'iff',
- 'ifnone', 'ignore_bins', 'illegal_bins', 'implies', 'implements', 'import',
- 'incdir', 'include', 'initial', 'inout', 'input',
- 'instance', 'interconnect', 'interface', 'intersect', 'join',
- 'join_any', 'join_none', 'large', 'let', 'liblist', 'library',
- 'local', 'localparam', 'macromodule', 'matches',
- 'medium', 'modport', 'module', 'nand', 'negedge', 'nettype', 'new', 'nexttime',
- 'nmos', 'nor', 'noshowcancelled', 'not', 'notif0', 'notif1', 'null',
- 'or', 'output', 'package', 'packed', 'parameter', 'pmos', 'posedge',
- 'primitive', 'priority', 'program', 'property', 'protected', 'pull0',
- 'pull1', 'pulldown', 'pullup', 'pulsestyle_ondetect',
- 'pulsestyle_onevent', 'pure', 'rand', 'randc', 'randcase',
- 'randsequence', 'rcmos', 'ref',
- 'reject_on', 'release', 'repeat', 'restrict', 'return', 'rnmos',
- 'rpmos', 'rtran', 'rtranif0', 'rtranif1', 's_always', 's_eventually',
- 's_nexttime', 's_until', 's_until_with', 'scalared', 'sequence',
- 'showcancelled', 'small', 'soft', 'solve',
- 'specify', 'specparam', 'static', 'strong', 'strong0',
- 'strong1', 'struct', 'super', 'sync_accept_on',
- 'sync_reject_on', 'table', 'tagged', 'task', 'this', 'throughout',
- 'timeprecision', 'timeunit', 'tran', 'tranif0', 'tranif1',
- 'typedef', 'union', 'unique', 'unique0', 'until',
- 'until_with', 'untyped', 'use', 'vectored',
- 'virtual', 'wait', 'wait_order', 'weak', 'weak0',
- 'weak1', 'while', 'wildcard', 'with', 'within',
- 'xnor', 'xor'),
- suffix=r'\b'),
- Keyword),
-
- (r'(class)(\s+)([a-zA-Z_]\w*)',
- bygroups(Keyword.Declaration, Whitespace, Name.Class)),
- (r'(extends)(\s+)([a-zA-Z_]\w*)',
- bygroups(Keyword.Declaration, Whitespace, Name.Class)),
- (r'(endclass\b)(?:(\s*)(:)(\s*)([a-zA-Z_]\w*))?',
- bygroups(Keyword.Declaration, Whitespace, Punctuation, Whitespace, Name.Class)),
-
- (words((
- # Variable types
- 'bit', 'byte', 'chandle', 'const', 'event', 'int', 'integer',
- 'logic', 'longint', 'real', 'realtime', 'reg', 'shortint',
- 'shortreal', 'signed', 'string', 'time', 'type', 'unsigned',
- 'var', 'void',
- # Net types
- 'supply0', 'supply1', 'tri', 'triand', 'trior', 'trireg',
- 'tri0', 'tri1', 'uwire', 'wand', 'wire', 'wor'),
- suffix=r'\b'),
- Keyword.Type),
-
- (words((
- '`__FILE__', '`__LINE__', '`begin_keywords', '`celldefine',
- '`default_nettype', '`define', '`else', '`elsif', '`end_keywords',
- '`endcelldefine', '`endif', '`ifdef', '`ifndef', '`include',
- '`line', '`nounconnected_drive', '`pragma', '`resetall',
- '`timescale', '`unconnected_drive', '`undef', '`undefineall'),
- suffix=r'\b'),
- Comment.Preproc),
-
- (words((
- # Simulation control tasks (20.2)
- '$exit', '$finish', '$stop',
- # Simulation time functions (20.3)
- '$realtime', '$stime', '$time',
- # Timescale tasks (20.4)
- '$printtimescale', '$timeformat',
- # Conversion functions
- '$bitstoreal', '$bitstoshortreal', '$cast', '$itor',
- '$realtobits', '$rtoi', '$shortrealtobits', '$signed',
- '$unsigned',
- # Data query functions (20.6)
- '$bits', '$isunbounded', '$typename',
- # Array query functions (20.7)
- '$dimensions', '$high', '$increment', '$left', '$low', '$right',
- '$size', '$unpacked_dimensions',
- # Math functions (20.8)
- '$acos', '$acosh', '$asin', '$asinh', '$atan', '$atan2',
- '$atanh', '$ceil', '$clog2', '$cos', '$cosh', '$exp', '$floor',
- '$hypot', '$ln', '$log10', '$pow', '$sin', '$sinh', '$sqrt',
- '$tan', '$tanh',
- # Bit vector system functions (20.9)
- '$countbits', '$countones', '$isunknown', '$onehot', '$onehot0',
- # Severity tasks (20.10)
- '$info', '$error', '$fatal', '$warning',
- # Assertion control tasks (20.12)
- '$assertcontrol', '$assertfailoff', '$assertfailon',
- '$assertkill', '$assertnonvacuouson', '$assertoff', '$asserton',
- '$assertpassoff', '$assertpasson', '$assertvacuousoff',
- # Sampled value system functions (20.13)
- '$changed', '$changed_gclk', '$changing_gclk', '$falling_gclk',
- '$fell', '$fell_gclk', '$future_gclk', '$past', '$past_gclk',
- '$rising_gclk', '$rose', '$rose_gclk', '$sampled', '$stable',
- '$stable_gclk', '$steady_gclk',
- # Coverage control functions (20.14)
- '$coverage_control', '$coverage_get', '$coverage_get_max',
- '$coverage_merge', '$coverage_save', '$get_coverage',
- '$load_coverage_db', '$set_coverage_db_name',
- # Probabilistic distribution functions (20.15)
- '$dist_chi_square', '$dist_erlang', '$dist_exponential',
- '$dist_normal', '$dist_poisson', '$dist_t', '$dist_uniform',
- '$random',
- # Stochastic analysis tasks and functions (20.16)
- '$q_add', '$q_exam', '$q_full', '$q_initialize', '$q_remove',
- # PLA modeling tasks (20.17)
- '$async$and$array', '$async$and$plane', '$async$nand$array',
- '$async$nand$plane', '$async$nor$array', '$async$nor$plane',
- '$async$or$array', '$async$or$plane', '$sync$and$array',
- '$sync$and$plane', '$sync$nand$array', '$sync$nand$plane',
- '$sync$nor$array', '$sync$nor$plane', '$sync$or$array',
- '$sync$or$plane',
- # Miscellaneous tasks and functions (20.18)
- '$system',
- # Display tasks (21.2)
- '$display', '$displayb', '$displayh', '$displayo', '$monitor',
- '$monitorb', '$monitorh', '$monitoro', '$monitoroff',
- '$monitoron', '$strobe', '$strobeb', '$strobeh', '$strobeo',
- '$write', '$writeb', '$writeh', '$writeo',
- # File I/O tasks and functions (21.3)
- '$fclose', '$fdisplay', '$fdisplayb', '$fdisplayh',
- '$fdisplayo', '$feof', '$ferror', '$fflush', '$fgetc', '$fgets',
- '$fmonitor', '$fmonitorb', '$fmonitorh', '$fmonitoro', '$fopen',
- '$fread', '$fscanf', '$fseek', '$fstrobe', '$fstrobeb',
- '$fstrobeh', '$fstrobeo', '$ftell', '$fwrite', '$fwriteb',
- '$fwriteh', '$fwriteo', '$rewind', '$sformat', '$sformatf',
- '$sscanf', '$swrite', '$swriteb', '$swriteh', '$swriteo',
- '$ungetc',
- # Memory load tasks (21.4)
- '$readmemb', '$readmemh',
- # Memory dump tasks (21.5)
- '$writememb', '$writememh',
- # Command line input (21.6)
- '$test$plusargs', '$value$plusargs',
- # VCD tasks (21.7)
- '$dumpall', '$dumpfile', '$dumpflush', '$dumplimit', '$dumpoff',
- '$dumpon', '$dumpports', '$dumpportsall', '$dumpportsflush',
- '$dumpportslimit', '$dumpportsoff', '$dumpportson', '$dumpvars',
- ), suffix=r'\b'),
- Name.Builtin),
-
- (r'[a-zA-Z_]\w*:(?!:)', Name.Label),
- (r'\$?[a-zA-Z_]\w*', Name),
- (r'\\(\S+)', Name),
- ],
- 'string': [
- (r'"', String, '#pop'),
- (r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|[0-7]{1,3})', String.Escape),
- (r'[^\\"\n]+', String), # all other characters
- (r'(\\)(\n)', bygroups(String.Escape, Whitespace)), # line continuation
- (r'\\', String), # stray backslash
- ],
- 'macro': [
- (r'[^/\n]+', Comment.Preproc),
- (r'/[*](.|\n)*?[*]/', Comment.Multiline),
- (r'//.*?$', Comment.Single, '#pop'),
- (r'/', Comment.Preproc),
- (r'(?<=\\)\n', Comment.Preproc),
- (r'\n', Whitespace, '#pop'),
- ],
- 'import': [
- (r'[\w:]+\*?', Name.Namespace, '#pop')
- ]
- }
-
-
-class VhdlLexer(RegexLexer):
- """
- For VHDL source code.
-
- .. versionadded:: 1.5
- """
- name = 'vhdl'
- aliases = ['vhdl']
- filenames = ['*.vhdl', '*.vhd']
- mimetypes = ['text/x-vhdl']
- flags = re.MULTILINE | re.IGNORECASE
-
- tokens = {
- 'root': [
- (r'\s+', Whitespace),
- (r'(\\)(\n)', bygroups(String.Escape, Whitespace)), # line continuation
- (r'--.*?$', Comment.Single),
- (r"'(U|X|0|1|Z|W|L|H|-)'", String.Char),
- (r'[~!%^&*+=|?:<>/-]', Operator),
- (r"'[a-z_]\w*", Name.Attribute),
- (r'[()\[\],.;\']', Punctuation),
- (r'"[^\n\\"]*"', String),
-
- (r'(library)(\s+)([a-z_]\w*)',
- bygroups(Keyword, Whitespace, Name.Namespace)),
- (r'(use)(\s+)(entity)', bygroups(Keyword, Whitespace, Keyword)),
- (r'(use)(\s+)([a-z_][\w.]*\.)(all)',
- bygroups(Keyword, Whitespace, Name.Namespace, Keyword)),
- (r'(use)(\s+)([a-z_][\w.]*)',
- bygroups(Keyword, Whitespace, Name.Namespace)),
- (r'(std|ieee)(\.[a-z_]\w*)',
- bygroups(Name.Namespace, Name.Namespace)),
- (words(('std', 'ieee', 'work'), suffix=r'\b'),
- Name.Namespace),
- (r'(entity|component)(\s+)([a-z_]\w*)',
- bygroups(Keyword, Whitespace, Name.Class)),
- (r'(architecture|configuration)(\s+)([a-z_]\w*)(\s+)'
- r'(of)(\s+)([a-z_]\w*)(\s+)(is)',
- bygroups(Keyword, Whitespace, Name.Class, Whitespace, Keyword, Whitespace,
- Name.Class, Whitespace, Keyword)),
- (r'([a-z_]\w*)(:)(\s+)(process|for)',
- bygroups(Name.Class, Operator, Whitespace, Keyword)),
- (r'(end)(\s+)', bygroups(using(this), Whitespace), 'endblock'),
-
- include('types'),
- include('keywords'),
- include('numbers'),
-
- (r'[a-z_]\w*', Name),
- ],
- 'endblock': [
- include('keywords'),
- (r'[a-z_]\w*', Name.Class),
- (r'\s+', Whitespace),
- (r';', Punctuation, '#pop'),
- ],
- 'types': [
- (words((
- 'boolean', 'bit', 'character', 'severity_level', 'integer', 'time',
- 'delay_length', 'natural', 'positive', 'string', 'bit_vector',
- 'file_open_kind', 'file_open_status', 'std_ulogic', 'std_ulogic_vector',
- 'std_logic', 'std_logic_vector', 'signed', 'unsigned'), suffix=r'\b'),
- Keyword.Type),
- ],
- 'keywords': [
- (words((
- 'abs', 'access', 'after', 'alias', 'all', 'and',
- 'architecture', 'array', 'assert', 'attribute', 'begin', 'block',
- 'body', 'buffer', 'bus', 'case', 'component', 'configuration',
- 'constant', 'disconnect', 'downto', 'else', 'elsif', 'end',
- 'entity', 'exit', 'file', 'for', 'function', 'generate',
- 'generic', 'group', 'guarded', 'if', 'impure', 'in',
- 'inertial', 'inout', 'is', 'label', 'library', 'linkage',
- 'literal', 'loop', 'map', 'mod', 'nand', 'new',
- 'next', 'nor', 'not', 'null', 'of', 'on',
- 'open', 'or', 'others', 'out', 'package', 'port',
- 'postponed', 'procedure', 'process', 'pure', 'range', 'record',
- 'register', 'reject', 'rem', 'return', 'rol', 'ror', 'select',
- 'severity', 'signal', 'shared', 'sla', 'sll', 'sra',
- 'srl', 'subtype', 'then', 'to', 'transport', 'type',
- 'units', 'until', 'use', 'variable', 'wait', 'when',
- 'while', 'with', 'xnor', 'xor'), suffix=r'\b'),
- Keyword),
- ],
- 'numbers': [
- (r'\d{1,2}#[0-9a-f_]+#?', Number.Integer),
- (r'\d+', Number.Integer),
- (r'(\d+\.\d*|\.\d+|\d+)E[+-]?\d+', Number.Float),
- (r'X"[0-9a-f_]+"', Number.Hex),
- (r'O"[0-7_]+"', Number.Oct),
- (r'B"[01_]+"', Number.Bin),
- ],
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/hexdump.py b/venv/lib/python3.11/site-packages/pygments/lexers/hexdump.py
deleted file mode 100644
index 9672fd4..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/hexdump.py
+++ /dev/null
@@ -1,102 +0,0 @@
-"""
- pygments.lexers.hexdump
- ~~~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for hexadecimal dumps.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, bygroups, include
-from pygments.token import Name, Number, String, Punctuation, Whitespace
-
-__all__ = ['HexdumpLexer']
-
-
-class HexdumpLexer(RegexLexer):
- """
- For typical hex dump output formats by the UNIX and GNU/Linux tools ``hexdump``,
- ``hd``, ``hexcat``, ``od`` and ``xxd``, and the DOS tool ``DEBUG``. For example:
-
- .. sourcecode:: hexdump
-
- 00000000 7f 45 4c 46 02 01 01 00 00 00 00 00 00 00 00 00 |.ELF............|
- 00000010 02 00 3e 00 01 00 00 00 c5 48 40 00 00 00 00 00 |..>......H@.....|
-
- The specific supported formats are the outputs of:
-
- * ``hexdump FILE``
- * ``hexdump -C FILE`` -- the `canonical` format used in the example.
- * ``hd FILE`` -- same as ``hexdump -C FILE``.
- * ``hexcat FILE``
- * ``od -t x1z FILE``
- * ``xxd FILE``
- * ``DEBUG.EXE FILE.COM`` and entering ``d`` to the prompt.
-
- .. versionadded:: 2.1
- """
- name = 'Hexdump'
- aliases = ['hexdump']
-
- hd = r'[0-9A-Ha-h]'
-
- tokens = {
- 'root': [
- (r'\n', Whitespace),
- include('offset'),
- (r'('+hd+r'{2})(\-)('+hd+r'{2})',
- bygroups(Number.Hex, Punctuation, Number.Hex)),
- (hd+r'{2}', Number.Hex),
- (r'(\s{2,3})(\>)(.{16})(\<)$',
- bygroups(Whitespace, Punctuation, String, Punctuation), 'bracket-strings'),
- (r'(\s{2,3})(\|)(.{16})(\|)$',
- bygroups(Whitespace, Punctuation, String, Punctuation), 'piped-strings'),
- (r'(\s{2,3})(\>)(.{1,15})(\<)$',
- bygroups(Whitespace, Punctuation, String, Punctuation)),
- (r'(\s{2,3})(\|)(.{1,15})(\|)$',
- bygroups(Whitespace, Punctuation, String, Punctuation)),
- (r'(\s{2,3})(.{1,15})$', bygroups(Whitespace, String)),
- (r'(\s{2,3})(.{16}|.{20})$', bygroups(Whitespace, String), 'nonpiped-strings'),
- (r'\s', Whitespace),
- (r'^\*', Punctuation),
- ],
- 'offset': [
- (r'^('+hd+'+)(:)', bygroups(Name.Label, Punctuation), 'offset-mode'),
- (r'^'+hd+'+', Name.Label),
- ],
- 'offset-mode': [
- (r'\s', Whitespace, '#pop'),
- (hd+'+', Name.Label),
- (r':', Punctuation)
- ],
- 'piped-strings': [
- (r'\n', Whitespace),
- include('offset'),
- (hd+r'{2}', Number.Hex),
- (r'(\s{2,3})(\|)(.{1,16})(\|)$',
- bygroups(Whitespace, Punctuation, String, Punctuation)),
- (r'\s', Whitespace),
- (r'^\*', Punctuation),
- ],
- 'bracket-strings': [
- (r'\n', Whitespace),
- include('offset'),
- (hd+r'{2}', Number.Hex),
- (r'(\s{2,3})(\>)(.{1,16})(\<)$',
- bygroups(Whitespace, Punctuation, String, Punctuation)),
- (r'\s', Whitespace),
- (r'^\*', Punctuation),
- ],
- 'nonpiped-strings': [
- (r'\n', Whitespace),
- include('offset'),
- (r'('+hd+r'{2})(\-)('+hd+r'{2})',
- bygroups(Number.Hex, Punctuation, Number.Hex)),
- (hd+r'{2}', Number.Hex),
- (r'(\s{19,})(.{1,20}?)$', bygroups(Whitespace, String)),
- (r'(\s{2,3})(.{1,20})$', bygroups(Whitespace, String)),
- (r'\s', Whitespace),
- (r'^\*', Punctuation),
- ],
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/html.py b/venv/lib/python3.11/site-packages/pygments/lexers/html.py
deleted file mode 100644
index 27fb753..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/html.py
+++ /dev/null
@@ -1,623 +0,0 @@
-"""
- pygments.lexers.html
- ~~~~~~~~~~~~~~~~~~~~
-
- Lexers for HTML, XML and related markup.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, ExtendedRegexLexer, include, bygroups, \
- default, using
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Punctuation, Whitespace
-from pygments.util import looks_like_xml, html_doctype_matches
-
-from pygments.lexers.javascript import JavascriptLexer
-from pygments.lexers.jvm import ScalaLexer
-from pygments.lexers.css import CssLexer, _indentation, _starts_block
-from pygments.lexers.ruby import RubyLexer
-
-__all__ = ['HtmlLexer', 'DtdLexer', 'XmlLexer', 'XsltLexer', 'HamlLexer',
- 'ScamlLexer', 'PugLexer', 'UrlEncodedLexer']
-
-
-class HtmlLexer(RegexLexer):
- """
- For HTML 4 and XHTML 1 markup. Nested JavaScript and CSS is highlighted
- by the appropriate lexer.
- """
-
- name = 'HTML'
- url = 'https://html.spec.whatwg.org/'
- aliases = ['html']
- filenames = ['*.html', '*.htm', '*.xhtml', '*.xslt']
- mimetypes = ['text/html', 'application/xhtml+xml']
-
- flags = re.IGNORECASE | re.DOTALL
- tokens = {
- 'root': [
- ('[^<&]+', Text),
- (r'&\S*?;', Name.Entity),
- (r'\<\!\[CDATA\[.*?\]\]\>', Comment.Preproc),
- (r'<!--.*?-->', Comment.Multiline),
- (r'<\?.*?\?>', Comment.Preproc),
- ('<![^>]*>', Comment.Preproc),
- (r'(<)(\s*)(script)(\s*)',
- bygroups(Punctuation, Text, Name.Tag, Text),
- ('script-content', 'tag')),
- (r'(<)(\s*)(style)(\s*)',
- bygroups(Punctuation, Text, Name.Tag, Text),
- ('style-content', 'tag')),
- # note: this allows tag names not used in HTML like <x:with-dash>,
- # this is to support yet-unknown template engines and the like
- (r'(<)(\s*)([\w:.-]+)',
- bygroups(Punctuation, Text, Name.Tag), 'tag'),
- (r'(<)(\s*)(/)(\s*)([\w:.-]+)(\s*)(>)',
- bygroups(Punctuation, Text, Punctuation, Text, Name.Tag, Text,
- Punctuation)),
- ],
- 'tag': [
- (r'\s+', Text),
- (r'([\w:-]+\s*)(=)(\s*)', bygroups(Name.Attribute, Operator, Text),
- 'attr'),
- (r'[\w:-]+', Name.Attribute),
- (r'(/?)(\s*)(>)', bygroups(Punctuation, Text, Punctuation), '#pop'),
- ],
- 'script-content': [
- (r'(<)(\s*)(/)(\s*)(script)(\s*)(>)',
- bygroups(Punctuation, Text, Punctuation, Text, Name.Tag, Text,
- Punctuation), '#pop'),
- (r'.+?(?=<\s*/\s*script\s*>)', using(JavascriptLexer)),
- # fallback cases for when there is no closing script tag
- # first look for newline and then go back into root state
- # if that fails just read the rest of the file
- # this is similar to the error handling logic in lexer.py
- (r'.+?\n', using(JavascriptLexer), '#pop'),
- (r'.+', using(JavascriptLexer), '#pop'),
- ],
- 'style-content': [
- (r'(<)(\s*)(/)(\s*)(style)(\s*)(>)',
- bygroups(Punctuation, Text, Punctuation, Text, Name.Tag, Text,
- Punctuation),'#pop'),
- (r'.+?(?=<\s*/\s*style\s*>)', using(CssLexer)),
- # fallback cases for when there is no closing style tag
- # first look for newline and then go back into root state
- # if that fails just read the rest of the file
- # this is similar to the error handling logic in lexer.py
- (r'.+?\n', using(CssLexer), '#pop'),
- (r'.+', using(CssLexer), '#pop'),
- ],
- 'attr': [
- ('".*?"', String, '#pop'),
- ("'.*?'", String, '#pop'),
- (r'[^\s>]+', String, '#pop'),
- ],
- }
-
- def analyse_text(text):
- if html_doctype_matches(text):
- return 0.5
-
-
-class DtdLexer(RegexLexer):
- """
- A lexer for DTDs (Document Type Definitions).
-
- .. versionadded:: 1.5
- """
-
- flags = re.MULTILINE | re.DOTALL
-
- name = 'DTD'
- aliases = ['dtd']
- filenames = ['*.dtd']
- mimetypes = ['application/xml-dtd']
-
- tokens = {
- 'root': [
- include('common'),
-
- (r'(<!ELEMENT)(\s+)(\S+)',
- bygroups(Keyword, Text, Name.Tag), 'element'),
- (r'(<!ATTLIST)(\s+)(\S+)',
- bygroups(Keyword, Text, Name.Tag), 'attlist'),
- (r'(<!ENTITY)(\s+)(\S+)',
- bygroups(Keyword, Text, Name.Entity), 'entity'),
- (r'(<!NOTATION)(\s+)(\S+)',
- bygroups(Keyword, Text, Name.Tag), 'notation'),
- (r'(<!\[)([^\[\s]+)(\s*)(\[)', # conditional sections
- bygroups(Keyword, Name.Entity, Text, Keyword)),
-
- (r'(<!DOCTYPE)(\s+)([^>\s]+)',
- bygroups(Keyword, Text, Name.Tag)),
- (r'PUBLIC|SYSTEM', Keyword.Constant),
- (r'[\[\]>]', Keyword),
- ],
-
- 'common': [
- (r'\s+', Text),
- (r'(%|&)[^;]*;', Name.Entity),
- ('<!--', Comment, 'comment'),
- (r'[(|)*,?+]', Operator),
- (r'"[^"]*"', String.Double),
- (r'\'[^\']*\'', String.Single),
- ],
-
- 'comment': [
- ('[^-]+', Comment),
- ('-->', Comment, '#pop'),
- ('-', Comment),
- ],
-
- 'element': [
- include('common'),
- (r'EMPTY|ANY|#PCDATA', Keyword.Constant),
- (r'[^>\s|()?+*,]+', Name.Tag),
- (r'>', Keyword, '#pop'),
- ],
-
- 'attlist': [
- include('common'),
- (r'CDATA|IDREFS|IDREF|ID|NMTOKENS|NMTOKEN|ENTITIES|ENTITY|NOTATION',
- Keyword.Constant),
- (r'#REQUIRED|#IMPLIED|#FIXED', Keyword.Constant),
- (r'xml:space|xml:lang', Keyword.Reserved),
- (r'[^>\s|()?+*,]+', Name.Attribute),
- (r'>', Keyword, '#pop'),
- ],
-
- 'entity': [
- include('common'),
- (r'SYSTEM|PUBLIC|NDATA', Keyword.Constant),
- (r'[^>\s|()?+*,]+', Name.Entity),
- (r'>', Keyword, '#pop'),
- ],
-
- 'notation': [
- include('common'),
- (r'SYSTEM|PUBLIC', Keyword.Constant),
- (r'[^>\s|()?+*,]+', Name.Attribute),
- (r'>', Keyword, '#pop'),
- ],
- }
-
- def analyse_text(text):
- if not looks_like_xml(text) and \
- ('<!ELEMENT' in text or '<!ATTLIST' in text or '<!ENTITY' in text):
- return 0.8
-
-
-class XmlLexer(RegexLexer):
- """
- Generic lexer for XML (eXtensible Markup Language).
- """
-
- flags = re.MULTILINE | re.DOTALL
-
- name = 'XML'
- aliases = ['xml']
- filenames = ['*.xml', '*.xsl', '*.rss', '*.xslt', '*.xsd',
- '*.wsdl', '*.wsf']
- mimetypes = ['text/xml', 'application/xml', 'image/svg+xml',
- 'application/rss+xml', 'application/atom+xml']
-
- tokens = {
- 'root': [
- (r'[^<&\s]+', Text),
- (r'[^<&\S]+', Whitespace),
- (r'&\S*?;', Name.Entity),
- (r'\<\!\[CDATA\[.*?\]\]\>', Comment.Preproc),
- (r'<!--.*?-->', Comment.Multiline),
- (r'<\?.*?\?>', Comment.Preproc),
- ('<![^>]*>', Comment.Preproc),
- (r'<\s*[\w:.-]+', Name.Tag, 'tag'),
- (r'<\s*/\s*[\w:.-]+\s*>', Name.Tag),
- ],
- 'tag': [
- (r'\s+', Whitespace),
- (r'[\w.:-]+\s*=', Name.Attribute, 'attr'),
- (r'/?\s*>', Name.Tag, '#pop'),
- ],
- 'attr': [
- (r'\s+', Whitespace),
- ('".*?"', String, '#pop'),
- ("'.*?'", String, '#pop'),
- (r'[^\s>]+', String, '#pop'),
- ],
- }
-
- def analyse_text(text):
- if looks_like_xml(text):
- return 0.45 # less than HTML
-
-
-class XsltLexer(XmlLexer):
- """
- A lexer for XSLT.
-
- .. versionadded:: 0.10
- """
-
- name = 'XSLT'
- aliases = ['xslt']
- filenames = ['*.xsl', '*.xslt', '*.xpl'] # xpl is XProc
- mimetypes = ['application/xsl+xml', 'application/xslt+xml']
-
- EXTRA_KEYWORDS = {
- 'apply-imports', 'apply-templates', 'attribute',
- 'attribute-set', 'call-template', 'choose', 'comment',
- 'copy', 'copy-of', 'decimal-format', 'element', 'fallback',
- 'for-each', 'if', 'import', 'include', 'key', 'message',
- 'namespace-alias', 'number', 'otherwise', 'output', 'param',
- 'preserve-space', 'processing-instruction', 'sort',
- 'strip-space', 'stylesheet', 'template', 'text', 'transform',
- 'value-of', 'variable', 'when', 'with-param'
- }
-
- def get_tokens_unprocessed(self, text):
- for index, token, value in XmlLexer.get_tokens_unprocessed(self, text):
- m = re.match('</?xsl:([^>]*)/?>?', value)
-
- if token is Name.Tag and m and m.group(1) in self.EXTRA_KEYWORDS:
- yield index, Keyword, value
- else:
- yield index, token, value
-
- def analyse_text(text):
- if looks_like_xml(text) and '<xsl' in text:
- return 0.8
-
-
-class HamlLexer(ExtendedRegexLexer):
- """
- For Haml markup.
-
- .. versionadded:: 1.3
- """
-
- name = 'Haml'
- aliases = ['haml']
- filenames = ['*.haml']
- mimetypes = ['text/x-haml']
-
- flags = re.IGNORECASE
- # Haml can include " |\n" anywhere,
- # which is ignored and used to wrap long lines.
- # To accommodate this, use this custom faux dot instead.
- _dot = r'(?: \|\n(?=.* \|)|.)'
-
- # In certain places, a comma at the end of the line
- # allows line wrapping as well.
- _comma_dot = r'(?:,\s*\n|' + _dot + ')'
- tokens = {
- 'root': [
- (r'[ \t]*\n', Text),
- (r'[ \t]*', _indentation),
- ],
-
- 'css': [
- (r'\.[\w:-]+', Name.Class, 'tag'),
- (r'\#[\w:-]+', Name.Function, 'tag'),
- ],
-
- 'eval-or-plain': [
- (r'[&!]?==', Punctuation, 'plain'),
- (r'([&!]?[=~])(' + _comma_dot + r'*\n)',
- bygroups(Punctuation, using(RubyLexer)),
- 'root'),
- default('plain'),
- ],
-
- 'content': [
- include('css'),
- (r'%[\w:-]+', Name.Tag, 'tag'),
- (r'!!!' + _dot + r'*\n', Name.Namespace, '#pop'),
- (r'(/)(\[' + _dot + r'*?\])(' + _dot + r'*\n)',
- bygroups(Comment, Comment.Special, Comment),
- '#pop'),
- (r'/' + _dot + r'*\n', _starts_block(Comment, 'html-comment-block'),
- '#pop'),
- (r'-#' + _dot + r'*\n', _starts_block(Comment.Preproc,
- 'haml-comment-block'), '#pop'),
- (r'(-)(' + _comma_dot + r'*\n)',
- bygroups(Punctuation, using(RubyLexer)),
- '#pop'),
- (r':' + _dot + r'*\n', _starts_block(Name.Decorator, 'filter-block'),
- '#pop'),
- include('eval-or-plain'),
- ],
-
- 'tag': [
- include('css'),
- (r'\{(,\n|' + _dot + r')*?\}', using(RubyLexer)),
- (r'\[' + _dot + r'*?\]', using(RubyLexer)),
- (r'\(', Text, 'html-attributes'),
- (r'/[ \t]*\n', Punctuation, '#pop:2'),
- (r'[<>]{1,2}(?=[ \t=])', Punctuation),
- include('eval-or-plain'),
- ],
-
- 'plain': [
- (r'([^#\n]|#[^{\n]|(\\\\)*\\#\{)+', Text),
- (r'(#\{)(' + _dot + r'*?)(\})',
- bygroups(String.Interpol, using(RubyLexer), String.Interpol)),
- (r'\n', Text, 'root'),
- ],
-
- 'html-attributes': [
- (r'\s+', Text),
- (r'[\w:-]+[ \t]*=', Name.Attribute, 'html-attribute-value'),
- (r'[\w:-]+', Name.Attribute),
- (r'\)', Text, '#pop'),
- ],
-
- 'html-attribute-value': [
- (r'[ \t]+', Text),
- (r'\w+', Name.Variable, '#pop'),
- (r'@\w+', Name.Variable.Instance, '#pop'),
- (r'\$\w+', Name.Variable.Global, '#pop'),
- (r"'(\\\\|\\[^\\]|[^'\\\n])*'", String, '#pop'),
- (r'"(\\\\|\\[^\\]|[^"\\\n])*"', String, '#pop'),
- ],
-
- 'html-comment-block': [
- (_dot + '+', Comment),
- (r'\n', Text, 'root'),
- ],
-
- 'haml-comment-block': [
- (_dot + '+', Comment.Preproc),
- (r'\n', Text, 'root'),
- ],
-
- 'filter-block': [
- (r'([^#\n]|#[^{\n]|(\\\\)*\\#\{)+', Name.Decorator),
- (r'(#\{)(' + _dot + r'*?)(\})',
- bygroups(String.Interpol, using(RubyLexer), String.Interpol)),
- (r'\n', Text, 'root'),
- ],
- }
-
-
-class ScamlLexer(ExtendedRegexLexer):
- """
- For `Scaml markup <http://scalate.fusesource.org/>`_. Scaml is Haml for Scala.
-
- .. versionadded:: 1.4
- """
-
- name = 'Scaml'
- aliases = ['scaml']
- filenames = ['*.scaml']
- mimetypes = ['text/x-scaml']
-
- flags = re.IGNORECASE
- # Scaml does not yet support the " |\n" notation to
- # wrap long lines. Once it does, use the custom faux
- # dot instead.
- # _dot = r'(?: \|\n(?=.* \|)|.)'
- _dot = r'.'
-
- tokens = {
- 'root': [
- (r'[ \t]*\n', Text),
- (r'[ \t]*', _indentation),
- ],
-
- 'css': [
- (r'\.[\w:-]+', Name.Class, 'tag'),
- (r'\#[\w:-]+', Name.Function, 'tag'),
- ],
-
- 'eval-or-plain': [
- (r'[&!]?==', Punctuation, 'plain'),
- (r'([&!]?[=~])(' + _dot + r'*\n)',
- bygroups(Punctuation, using(ScalaLexer)),
- 'root'),
- default('plain'),
- ],
-
- 'content': [
- include('css'),
- (r'%[\w:-]+', Name.Tag, 'tag'),
- (r'!!!' + _dot + r'*\n', Name.Namespace, '#pop'),
- (r'(/)(\[' + _dot + r'*?\])(' + _dot + r'*\n)',
- bygroups(Comment, Comment.Special, Comment),
- '#pop'),
- (r'/' + _dot + r'*\n', _starts_block(Comment, 'html-comment-block'),
- '#pop'),
- (r'-#' + _dot + r'*\n', _starts_block(Comment.Preproc,
- 'scaml-comment-block'), '#pop'),
- (r'(-@\s*)(import)?(' + _dot + r'*\n)',
- bygroups(Punctuation, Keyword, using(ScalaLexer)),
- '#pop'),
- (r'(-)(' + _dot + r'*\n)',
- bygroups(Punctuation, using(ScalaLexer)),
- '#pop'),
- (r':' + _dot + r'*\n', _starts_block(Name.Decorator, 'filter-block'),
- '#pop'),
- include('eval-or-plain'),
- ],
-
- 'tag': [
- include('css'),
- (r'\{(,\n|' + _dot + r')*?\}', using(ScalaLexer)),
- (r'\[' + _dot + r'*?\]', using(ScalaLexer)),
- (r'\(', Text, 'html-attributes'),
- (r'/[ \t]*\n', Punctuation, '#pop:2'),
- (r'[<>]{1,2}(?=[ \t=])', Punctuation),
- include('eval-or-plain'),
- ],
-
- 'plain': [
- (r'([^#\n]|#[^{\n]|(\\\\)*\\#\{)+', Text),
- (r'(#\{)(' + _dot + r'*?)(\})',
- bygroups(String.Interpol, using(ScalaLexer), String.Interpol)),
- (r'\n', Text, 'root'),
- ],
-
- 'html-attributes': [
- (r'\s+', Text),
- (r'[\w:-]+[ \t]*=', Name.Attribute, 'html-attribute-value'),
- (r'[\w:-]+', Name.Attribute),
- (r'\)', Text, '#pop'),
- ],
-
- 'html-attribute-value': [
- (r'[ \t]+', Text),
- (r'\w+', Name.Variable, '#pop'),
- (r'@\w+', Name.Variable.Instance, '#pop'),
- (r'\$\w+', Name.Variable.Global, '#pop'),
- (r"'(\\\\|\\[^\\]|[^'\\\n])*'", String, '#pop'),
- (r'"(\\\\|\\[^\\]|[^"\\\n])*"', String, '#pop'),
- ],
-
- 'html-comment-block': [
- (_dot + '+', Comment),
- (r'\n', Text, 'root'),
- ],
-
- 'scaml-comment-block': [
- (_dot + '+', Comment.Preproc),
- (r'\n', Text, 'root'),
- ],
-
- 'filter-block': [
- (r'([^#\n]|#[^{\n]|(\\\\)*\\#\{)+', Name.Decorator),
- (r'(#\{)(' + _dot + r'*?)(\})',
- bygroups(String.Interpol, using(ScalaLexer), String.Interpol)),
- (r'\n', Text, 'root'),
- ],
- }
-
-
-class PugLexer(ExtendedRegexLexer):
- """
- For Pug markup.
- Pug is a variant of Scaml, see:
- http://scalate.fusesource.org/documentation/scaml-reference.html
-
- .. versionadded:: 1.4
- """
-
- name = 'Pug'
- aliases = ['pug', 'jade']
- filenames = ['*.pug', '*.jade']
- mimetypes = ['text/x-pug', 'text/x-jade']
-
- flags = re.IGNORECASE
- _dot = r'.'
-
- tokens = {
- 'root': [
- (r'[ \t]*\n', Text),
- (r'[ \t]*', _indentation),
- ],
-
- 'css': [
- (r'\.[\w:-]+', Name.Class, 'tag'),
- (r'\#[\w:-]+', Name.Function, 'tag'),
- ],
-
- 'eval-or-plain': [
- (r'[&!]?==', Punctuation, 'plain'),
- (r'([&!]?[=~])(' + _dot + r'*\n)',
- bygroups(Punctuation, using(ScalaLexer)), 'root'),
- default('plain'),
- ],
-
- 'content': [
- include('css'),
- (r'!!!' + _dot + r'*\n', Name.Namespace, '#pop'),
- (r'(/)(\[' + _dot + r'*?\])(' + _dot + r'*\n)',
- bygroups(Comment, Comment.Special, Comment),
- '#pop'),
- (r'/' + _dot + r'*\n', _starts_block(Comment, 'html-comment-block'),
- '#pop'),
- (r'-#' + _dot + r'*\n', _starts_block(Comment.Preproc,
- 'scaml-comment-block'), '#pop'),
- (r'(-@\s*)(import)?(' + _dot + r'*\n)',
- bygroups(Punctuation, Keyword, using(ScalaLexer)),
- '#pop'),
- (r'(-)(' + _dot + r'*\n)',
- bygroups(Punctuation, using(ScalaLexer)),
- '#pop'),
- (r':' + _dot + r'*\n', _starts_block(Name.Decorator, 'filter-block'),
- '#pop'),
- (r'[\w:-]+', Name.Tag, 'tag'),
- (r'\|', Text, 'eval-or-plain'),
- ],
-
- 'tag': [
- include('css'),
- (r'\{(,\n|' + _dot + r')*?\}', using(ScalaLexer)),
- (r'\[' + _dot + r'*?\]', using(ScalaLexer)),
- (r'\(', Text, 'html-attributes'),
- (r'/[ \t]*\n', Punctuation, '#pop:2'),
- (r'[<>]{1,2}(?=[ \t=])', Punctuation),
- include('eval-or-plain'),
- ],
-
- 'plain': [
- (r'([^#\n]|#[^{\n]|(\\\\)*\\#\{)+', Text),
- (r'(#\{)(' + _dot + r'*?)(\})',
- bygroups(String.Interpol, using(ScalaLexer), String.Interpol)),
- (r'\n', Text, 'root'),
- ],
-
- 'html-attributes': [
- (r'\s+', Text),
- (r'[\w:-]+[ \t]*=', Name.Attribute, 'html-attribute-value'),
- (r'[\w:-]+', Name.Attribute),
- (r'\)', Text, '#pop'),
- ],
-
- 'html-attribute-value': [
- (r'[ \t]+', Text),
- (r'\w+', Name.Variable, '#pop'),
- (r'@\w+', Name.Variable.Instance, '#pop'),
- (r'\$\w+', Name.Variable.Global, '#pop'),
- (r"'(\\\\|\\[^\\]|[^'\\\n])*'", String, '#pop'),
- (r'"(\\\\|\\[^\\]|[^"\\\n])*"', String, '#pop'),
- ],
-
- 'html-comment-block': [
- (_dot + '+', Comment),
- (r'\n', Text, 'root'),
- ],
-
- 'scaml-comment-block': [
- (_dot + '+', Comment.Preproc),
- (r'\n', Text, 'root'),
- ],
-
- 'filter-block': [
- (r'([^#\n]|#[^{\n]|(\\\\)*\\#\{)+', Name.Decorator),
- (r'(#\{)(' + _dot + r'*?)(\})',
- bygroups(String.Interpol, using(ScalaLexer), String.Interpol)),
- (r'\n', Text, 'root'),
- ],
- }
-JadeLexer = PugLexer # compat
-
-
-class UrlEncodedLexer(RegexLexer):
- """
- Lexer for urlencoded data
-
- .. versionadded:: 2.16
- """
-
- name = 'urlencoded'
- aliases = ['urlencoded']
- mimetypes = ['application/x-www-form-urlencoded']
-
- tokens = {
- 'root': [
- ('([^&=]*)(=)([^=&]*)(&?)', bygroups(Name.Tag, Operator, String, Punctuation)),
- ],
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/idl.py b/venv/lib/python3.11/site-packages/pygments/lexers/idl.py
deleted file mode 100644
index c4d8b31..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/idl.py
+++ /dev/null
@@ -1,285 +0,0 @@
-"""
- pygments.lexers.idl
- ~~~~~~~~~~~~~~~~~~~
-
- Lexers for IDL.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, words, bygroups
-from pygments.token import Text, Comment, Operator, Keyword, Name, Number, \
- String, Whitespace
-
-__all__ = ['IDLLexer']
-
-
-class IDLLexer(RegexLexer):
- """
- Pygments Lexer for IDL (Interactive Data Language).
-
- .. versionadded:: 1.6
- """
- name = 'IDL'
- url = 'https://www.l3harrisgeospatial.com/Software-Technology/IDL'
- aliases = ['idl']
- filenames = ['*.pro']
- mimetypes = ['text/idl']
-
- flags = re.IGNORECASE | re.MULTILINE
-
- _RESERVED = (
- 'and', 'begin', 'break', 'case', 'common', 'compile_opt',
- 'continue', 'do', 'else', 'end', 'endcase', 'endelse',
- 'endfor', 'endforeach', 'endif', 'endrep', 'endswitch',
- 'endwhile', 'eq', 'for', 'foreach', 'forward_function',
- 'function', 'ge', 'goto', 'gt', 'if', 'inherits', 'le',
- 'lt', 'mod', 'ne', 'not', 'of', 'on_ioerror', 'or', 'pro',
- 'repeat', 'switch', 'then', 'until', 'while', 'xor')
- """Reserved words from: http://www.exelisvis.com/docs/reswords.html"""
-
- _BUILTIN_LIB = (
- 'abs', 'acos', 'adapt_hist_equal', 'alog', 'alog10',
- 'amoeba', 'annotate', 'app_user_dir', 'app_user_dir_query',
- 'arg_present', 'array_equal', 'array_indices', 'arrow',
- 'ascii_template', 'asin', 'assoc', 'atan', 'axis',
- 'a_correlate', 'bandpass_filter', 'bandreject_filter',
- 'barplot', 'bar_plot', 'beseli', 'beselj', 'beselk',
- 'besely', 'beta', 'bilinear', 'binary_template', 'bindgen',
- 'binomial', 'bin_date', 'bit_ffs', 'bit_population',
- 'blas_axpy', 'blk_con', 'box_cursor', 'breakpoint',
- 'broyden', 'butterworth', 'bytarr', 'byte', 'byteorder',
- 'bytscl', 'caldat', 'calendar', 'call_external',
- 'call_function', 'call_method', 'call_procedure', 'canny',
- 'catch', 'cd', r'cdf_\w*', 'ceil', 'chebyshev',
- 'check_math',
- 'chisqr_cvf', 'chisqr_pdf', 'choldc', 'cholsol', 'cindgen',
- 'cir_3pnt', 'close', 'cluster', 'cluster_tree', 'clust_wts',
- 'cmyk_convert', 'colorbar', 'colorize_sample',
- 'colormap_applicable', 'colormap_gradient',
- 'colormap_rotation', 'colortable', 'color_convert',
- 'color_exchange', 'color_quan', 'color_range_map', 'comfit',
- 'command_line_args', 'complex', 'complexarr', 'complexround',
- 'compute_mesh_normals', 'cond', 'congrid', 'conj',
- 'constrained_min', 'contour', 'convert_coord', 'convol',
- 'convol_fft', 'coord2to3', 'copy_lun', 'correlate', 'cos',
- 'cosh', 'cpu', 'cramer', 'create_cursor', 'create_struct',
- 'create_view', 'crossp', 'crvlength', 'cti_test',
- 'ct_luminance', 'cursor', 'curvefit', 'cvttobm', 'cv_coord',
- 'cw_animate', 'cw_animate_getp', 'cw_animate_load',
- 'cw_animate_run', 'cw_arcball', 'cw_bgroup', 'cw_clr_index',
- 'cw_colorsel', 'cw_defroi', 'cw_field', 'cw_filesel',
- 'cw_form', 'cw_fslider', 'cw_light_editor',
- 'cw_light_editor_get', 'cw_light_editor_set', 'cw_orient',
- 'cw_palette_editor', 'cw_palette_editor_get',
- 'cw_palette_editor_set', 'cw_pdmenu', 'cw_rgbslider',
- 'cw_tmpl', 'cw_zoom', 'c_correlate', 'dblarr', 'db_exists',
- 'dcindgen', 'dcomplex', 'dcomplexarr', 'define_key',
- 'define_msgblk', 'define_msgblk_from_file', 'defroi',
- 'defsysv', 'delvar', 'dendrogram', 'dendro_plot', 'deriv',
- 'derivsig', 'determ', 'device', 'dfpmin', 'diag_matrix',
- 'dialog_dbconnect', 'dialog_message', 'dialog_pickfile',
- 'dialog_printersetup', 'dialog_printjob',
- 'dialog_read_image', 'dialog_write_image', 'digital_filter',
- 'dilate', 'dindgen', 'dissolve', 'dist', 'distance_measure',
- 'dlm_load', 'dlm_register', 'doc_library', 'double',
- 'draw_roi', 'edge_dog', 'efont', 'eigenql', 'eigenvec',
- 'ellipse', 'elmhes', 'emboss', 'empty', 'enable_sysrtn',
- 'eof', r'eos_\w*', 'erase', 'erf', 'erfc', 'erfcx',
- 'erode', 'errorplot', 'errplot', 'estimator_filter',
- 'execute', 'exit', 'exp', 'expand', 'expand_path', 'expint',
- 'extrac', 'extract_slice', 'factorial', 'fft', 'filepath',
- 'file_basename', 'file_chmod', 'file_copy', 'file_delete',
- 'file_dirname', 'file_expand_path', 'file_info',
- 'file_lines', 'file_link', 'file_mkdir', 'file_move',
- 'file_poll_input', 'file_readlink', 'file_same',
- 'file_search', 'file_test', 'file_which', 'findgen',
- 'finite', 'fix', 'flick', 'float', 'floor', 'flow3',
- 'fltarr', 'flush', 'format_axis_values', 'free_lun',
- 'fstat', 'fulstr', 'funct', 'fv_test', 'fx_root',
- 'fz_roots', 'f_cvf', 'f_pdf', 'gamma', 'gamma_ct',
- 'gauss2dfit', 'gaussfit', 'gaussian_function', 'gaussint',
- 'gauss_cvf', 'gauss_pdf', 'gauss_smooth', 'getenv',
- 'getwindows', 'get_drive_list', 'get_dxf_objects',
- 'get_kbrd', 'get_login_info', 'get_lun', 'get_screen_size',
- 'greg2jul', r'grib_\w*', 'grid3', 'griddata',
- 'grid_input', 'grid_tps', 'gs_iter',
- r'h5[adfgirst]_\w*', 'h5_browser', 'h5_close',
- 'h5_create', 'h5_get_libversion', 'h5_open', 'h5_parse',
- 'hanning', 'hash', r'hdf_\w*', 'heap_free',
- 'heap_gc', 'heap_nosave', 'heap_refcount', 'heap_save',
- 'help', 'hilbert', 'histogram', 'hist_2d', 'hist_equal',
- 'hls', 'hough', 'hqr', 'hsv', 'h_eq_ct', 'h_eq_int',
- 'i18n_multibytetoutf8', 'i18n_multibytetowidechar',
- 'i18n_utf8tomultibyte', 'i18n_widechartomultibyte',
- 'ibeta', 'icontour', 'iconvertcoord', 'idelete', 'identity',
- 'idlexbr_assistant', 'idlitsys_createtool', 'idl_base64',
- 'idl_validname', 'iellipse', 'igamma', 'igetcurrent',
- 'igetdata', 'igetid', 'igetproperty', 'iimage', 'image',
- 'image_cont', 'image_statistics', 'imaginary', 'imap',
- 'indgen', 'intarr', 'interpol', 'interpolate',
- 'interval_volume', 'int_2d', 'int_3d', 'int_tabulated',
- 'invert', 'ioctl', 'iopen', 'iplot', 'ipolygon',
- 'ipolyline', 'iputdata', 'iregister', 'ireset', 'iresolve',
- 'irotate', 'ir_filter', 'isa', 'isave', 'iscale',
- 'isetcurrent', 'isetproperty', 'ishft', 'isocontour',
- 'isosurface', 'isurface', 'itext', 'itranslate', 'ivector',
- 'ivolume', 'izoom', 'i_beta', 'journal', 'json_parse',
- 'json_serialize', 'jul2greg', 'julday', 'keyword_set',
- 'krig2d', 'kurtosis', 'kw_test', 'l64indgen', 'label_date',
- 'label_region', 'ladfit', 'laguerre', 'laplacian',
- 'la_choldc', 'la_cholmprove', 'la_cholsol', 'la_determ',
- 'la_eigenproblem', 'la_eigenql', 'la_eigenvec', 'la_elmhes',
- 'la_gm_linear_model', 'la_hqr', 'la_invert',
- 'la_least_squares', 'la_least_square_equality',
- 'la_linear_equation', 'la_ludc', 'la_lumprove', 'la_lusol',
- 'la_svd', 'la_tridc', 'la_trimprove', 'la_triql',
- 'la_trired', 'la_trisol', 'least_squares_filter', 'leefilt',
- 'legend', 'legendre', 'linbcg', 'lindgen', 'linfit',
- 'linkimage', 'list', 'll_arc_distance', 'lmfit', 'lmgr',
- 'lngamma', 'lnp_test', 'loadct', 'locale_get',
- 'logical_and', 'logical_or', 'logical_true', 'lon64arr',
- 'lonarr', 'long', 'long64', 'lsode', 'ludc', 'lumprove',
- 'lusol', 'lu_complex', 'machar', 'make_array', 'make_dll',
- 'make_rt', 'map', 'mapcontinents', 'mapgrid', 'map_2points',
- 'map_continents', 'map_grid', 'map_image', 'map_patch',
- 'map_proj_forward', 'map_proj_image', 'map_proj_info',
- 'map_proj_init', 'map_proj_inverse', 'map_set',
- 'matrix_multiply', 'matrix_power', 'max', 'md_test',
- 'mean', 'meanabsdev', 'mean_filter', 'median', 'memory',
- 'mesh_clip', 'mesh_decimate', 'mesh_issolid', 'mesh_merge',
- 'mesh_numtriangles', 'mesh_obj', 'mesh_smooth',
- 'mesh_surfacearea', 'mesh_validate', 'mesh_volume',
- 'message', 'min', 'min_curve_surf', 'mk_html_help',
- 'modifyct', 'moment', 'morph_close', 'morph_distance',
- 'morph_gradient', 'morph_hitormiss', 'morph_open',
- 'morph_thin', 'morph_tophat', 'multi', 'm_correlate',
- r'ncdf_\w*', 'newton', 'noise_hurl', 'noise_pick',
- 'noise_scatter', 'noise_slur', 'norm', 'n_elements',
- 'n_params', 'n_tags', 'objarr', 'obj_class', 'obj_destroy',
- 'obj_hasmethod', 'obj_isa', 'obj_new', 'obj_valid',
- 'online_help', 'on_error', 'open', 'oplot', 'oploterr',
- 'parse_url', 'particle_trace', 'path_cache', 'path_sep',
- 'pcomp', 'plot', 'plot3d', 'ploterr', 'plots', 'plot_3dbox',
- 'plot_field', 'pnt_line', 'point_lun', 'polarplot',
- 'polar_contour', 'polar_surface', 'poly', 'polyfill',
- 'polyfillv', 'polygon', 'polyline', 'polyshade', 'polywarp',
- 'poly_2d', 'poly_area', 'poly_fit', 'popd', 'powell',
- 'pref_commit', 'pref_get', 'pref_set', 'prewitt', 'primes',
- 'print', 'printd', 'product', 'profile', 'profiler',
- 'profiles', 'project_vol', 'psafm', 'pseudo',
- 'ps_show_fonts', 'ptrarr', 'ptr_free', 'ptr_new',
- 'ptr_valid', 'pushd', 'p_correlate', 'qgrid3', 'qhull',
- 'qromb', 'qromo', 'qsimp', 'query_ascii', 'query_bmp',
- 'query_csv', 'query_dicom', 'query_gif', 'query_image',
- 'query_jpeg', 'query_jpeg2000', 'query_mrsid', 'query_pict',
- 'query_png', 'query_ppm', 'query_srf', 'query_tiff',
- 'query_wav', 'radon', 'randomn', 'randomu', 'ranks',
- 'rdpix', 'read', 'reads', 'readu', 'read_ascii',
- 'read_binary', 'read_bmp', 'read_csv', 'read_dicom',
- 'read_gif', 'read_image', 'read_interfile', 'read_jpeg',
- 'read_jpeg2000', 'read_mrsid', 'read_pict', 'read_png',
- 'read_ppm', 'read_spr', 'read_srf', 'read_sylk',
- 'read_tiff', 'read_wav', 'read_wave', 'read_x11_bitmap',
- 'read_xwd', 'real_part', 'rebin', 'recall_commands',
- 'recon3', 'reduce_colors', 'reform', 'region_grow',
- 'register_cursor', 'regress', 'replicate',
- 'replicate_inplace', 'resolve_all', 'resolve_routine',
- 'restore', 'retall', 'return', 'reverse', 'rk4', 'roberts',
- 'rot', 'rotate', 'round', 'routine_filepath',
- 'routine_info', 'rs_test', 'r_correlate', 'r_test',
- 'save', 'savgol', 'scale3', 'scale3d', 'scope_level',
- 'scope_traceback', 'scope_varfetch', 'scope_varname',
- 'search2d', 'search3d', 'sem_create', 'sem_delete',
- 'sem_lock', 'sem_release', 'setenv', 'set_plot',
- 'set_shading', 'sfit', 'shade_surf', 'shade_surf_irr',
- 'shade_volume', 'shift', 'shift_diff', 'shmdebug', 'shmmap',
- 'shmunmap', 'shmvar', 'show3', 'showfont', 'simplex', 'sin',
- 'sindgen', 'sinh', 'size', 'skewness', 'skip_lun',
- 'slicer3', 'slide_image', 'smooth', 'sobel', 'socket',
- 'sort', 'spawn', 'spher_harm', 'sph_4pnt', 'sph_scat',
- 'spline', 'spline_p', 'spl_init', 'spl_interp', 'sprsab',
- 'sprsax', 'sprsin', 'sprstp', 'sqrt', 'standardize',
- 'stddev', 'stop', 'strarr', 'strcmp', 'strcompress',
- 'streamline', 'stregex', 'stretch', 'string', 'strjoin',
- 'strlen', 'strlowcase', 'strmatch', 'strmessage', 'strmid',
- 'strpos', 'strput', 'strsplit', 'strtrim', 'struct_assign',
- 'struct_hide', 'strupcase', 'surface', 'surfr', 'svdc',
- 'svdfit', 'svsol', 'swap_endian', 'swap_endian_inplace',
- 'symbol', 'systime', 's_test', 't3d', 'tag_names', 'tan',
- 'tanh', 'tek_color', 'temporary', 'tetra_clip',
- 'tetra_surface', 'tetra_volume', 'text', 'thin', 'threed',
- 'timegen', 'time_test2', 'tm_test', 'total', 'trace',
- 'transpose', 'triangulate', 'trigrid', 'triql', 'trired',
- 'trisol', 'tri_surf', 'truncate_lun', 'ts_coef', 'ts_diff',
- 'ts_fcast', 'ts_smooth', 'tv', 'tvcrs', 'tvlct', 'tvrd',
- 'tvscl', 'typename', 't_cvt', 't_pdf', 'uindgen', 'uint',
- 'uintarr', 'ul64indgen', 'ulindgen', 'ulon64arr', 'ulonarr',
- 'ulong', 'ulong64', 'uniq', 'unsharp_mask', 'usersym',
- 'value_locate', 'variance', 'vector', 'vector_field', 'vel',
- 'velovect', 'vert_t3d', 'voigt', 'voronoi', 'voxel_proj',
- 'wait', 'warp_tri', 'watershed', 'wdelete', 'wf_draw',
- 'where', 'widget_base', 'widget_button', 'widget_combobox',
- 'widget_control', 'widget_displaycontextmen', 'widget_draw',
- 'widget_droplist', 'widget_event', 'widget_info',
- 'widget_label', 'widget_list', 'widget_propertysheet',
- 'widget_slider', 'widget_tab', 'widget_table',
- 'widget_text', 'widget_tree', 'widget_tree_move',
- 'widget_window', 'wiener_filter', 'window', 'writeu',
- 'write_bmp', 'write_csv', 'write_gif', 'write_image',
- 'write_jpeg', 'write_jpeg2000', 'write_nrif', 'write_pict',
- 'write_png', 'write_ppm', 'write_spr', 'write_srf',
- 'write_sylk', 'write_tiff', 'write_wav', 'write_wave',
- 'wset', 'wshow', 'wtn', 'wv_applet', 'wv_cwt',
- 'wv_cw_wavelet', 'wv_denoise', 'wv_dwt', 'wv_fn_coiflet',
- 'wv_fn_daubechies', 'wv_fn_gaussian', 'wv_fn_haar',
- 'wv_fn_morlet', 'wv_fn_paul', 'wv_fn_symlet',
- 'wv_import_data', 'wv_import_wavelet', 'wv_plot3d_wps',
- 'wv_plot_multires', 'wv_pwt', 'wv_tool_denoise',
- 'xbm_edit', 'xdisplayfile', 'xdxf', 'xfont',
- 'xinteranimate', 'xloadct', 'xmanager', 'xmng_tmpl',
- 'xmtool', 'xobjview', 'xobjview_rotate',
- 'xobjview_write_image', 'xpalette', 'xpcolor', 'xplot3d',
- 'xregistered', 'xroi', 'xsq_test', 'xsurface', 'xvaredit',
- 'xvolume', 'xvolume_rotate', 'xvolume_write_image',
- 'xyouts', 'zoom', 'zoom_24')
- """Functions from: http://www.exelisvis.com/docs/routines-1.html"""
-
- tokens = {
- 'root': [
- (r'(^\s*)(;.*?)(\n)', bygroups(Whitespace, Comment.Single,
- Whitespace)),
- (words(_RESERVED, prefix=r'\b', suffix=r'\b'), Keyword),
- (words(_BUILTIN_LIB, prefix=r'\b', suffix=r'\b'), Name.Builtin),
- (r'\+=|-=|\^=|\*=|/=|#=|##=|<=|>=|=', Operator),
- (r'\+\+|--|->|\+|-|##|#|\*|/|<|>|&&|\^|~|\|\|\?|:', Operator),
- (r'\b(mod=|lt=|le=|eq=|ne=|ge=|gt=|not=|and=|or=|xor=)', Operator),
- (r'\b(mod|lt|le|eq|ne|ge|gt|not|and|or|xor)\b', Operator),
- (r'"[^\"]*"', String.Double),
- (r"'[^\']*'", String.Single),
- (r'\b[+\-]?([0-9]*\.[0-9]+|[0-9]+\.[0-9]*)(D|E)?([+\-]?[0-9]+)?\b',
- Number.Float),
- (r'\b\'[+\-]?[0-9A-F]+\'X(U?(S?|L{1,2})|B)\b', Number.Hex),
- (r'\b\'[+\-]?[0-7]+\'O(U?(S?|L{1,2})|B)\b', Number.Oct),
- (r'\b[+\-]?[0-9]+U?L{1,2}\b', Number.Integer.Long),
- (r'\b[+\-]?[0-9]+U?S?\b', Number.Integer),
- (r'\b[+\-]?[0-9]+B\b', Number),
- (r'[ \t]+', Whitespace),
- (r'\n', Whitespace),
- (r'.', Text),
- ]
- }
-
- def analyse_text(text):
- """endelse seems to be unique to IDL, endswitch is rare at least."""
- result = 0
-
- if 'endelse' in text:
- result += 0.2
- if 'endswitch' in text:
- result += 0.01
-
- return result
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/igor.py b/venv/lib/python3.11/site-packages/pygments/lexers/igor.py
deleted file mode 100644
index b25badb..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/igor.py
+++ /dev/null
@@ -1,436 +0,0 @@
-"""
- pygments.lexers.igor
- ~~~~~~~~~~~~~~~~~~~~
-
- Lexers for Igor Pro.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, words
-from pygments.token import Text, Comment, Keyword, Name, String, Whitespace
-
-__all__ = ['IgorLexer']
-
-
-class IgorLexer(RegexLexer):
- """
- Pygments Lexer for Igor Pro procedure files (.ipf).
- See http://www.wavemetrics.com/ and http://www.igorexchange.com/.
-
- .. versionadded:: 2.0
- """
-
- name = 'Igor'
- aliases = ['igor', 'igorpro']
- filenames = ['*.ipf']
- mimetypes = ['text/ipf']
-
- flags = re.IGNORECASE | re.MULTILINE
-
- flowControl = (
- 'if', 'else', 'elseif', 'endif', 'for', 'endfor', 'strswitch', 'switch',
- 'case', 'default', 'endswitch', 'do', 'while', 'try', 'catch', 'endtry',
- 'break', 'continue', 'return', 'AbortOnRTE', 'AbortOnValue'
- )
- types = (
- 'variable', 'string', 'constant', 'strconstant', 'NVAR', 'SVAR', 'WAVE',
- 'STRUCT', 'dfref', 'funcref', 'char', 'uchar', 'int16', 'uint16', 'int32',
- 'uint32', 'int64', 'uint64', 'float', 'double'
- )
- keywords = (
- 'override', 'ThreadSafe', 'MultiThread', 'static', 'Proc',
- 'Picture', 'Prompt', 'DoPrompt', 'macro', 'window', 'function', 'end',
- 'Structure', 'EndStructure', 'EndMacro', 'Menu', 'SubMenu'
- )
- operations = (
- 'Abort', 'AddFIFOData', 'AddFIFOVectData', 'AddMovieAudio', 'AddMovieFrame',
- 'AddWavesToBoxPlot', 'AddWavesToViolinPlot', 'AdoptFiles', 'APMath', 'Append',
- 'AppendBoxPlot', 'AppendImage', 'AppendLayoutObject', 'AppendMatrixContour',
- 'AppendText', 'AppendToGizmo', 'AppendToGraph', 'AppendToLayout',
- 'AppendToTable', 'AppendViolinPlot', 'AppendXYZContour', 'AutoPositionWindow',
- 'AxonTelegraphFindServers', 'BackgroundInfo', 'Beep', 'BezierToPolygon',
- 'BoundingBall', 'BoxSmooth', 'BrowseURL', 'BuildMenu', 'Button', 'cd', 'Chart',
- 'CheckBox', 'CheckDisplayed', 'ChooseColor', 'Close', 'CloseHelp', 'CloseMovie',
- 'CloseProc', 'ColorScale', 'ColorTab2Wave', 'Concatenate', 'ControlBar',
- 'ControlInfo', 'ControlUpdate', 'ConvertGlobalStringTextEncoding', 'ConvexHull',
- 'Convolve', 'CopyDimLabels', 'CopyFile', 'CopyFolder', 'CopyScales', 'Correlate',
- 'CreateAliasShortcut', 'CreateBrowser', 'Cross', 'CtrlBackground', 'CtrlFIFO',
- 'CtrlNamedBackground', 'Cursor', 'CurveFit', 'CustomControl', 'CWT',
- 'DAQmx_AI_SetupReader', 'DAQmx_AO_SetOutputs', 'DAQmx_CTR_CountEdges',
- 'DAQmx_CTR_OutputPulse', 'DAQmx_CTR_Period', 'DAQmx_CTR_PulseWidth',
- 'DAQmx_DeviceInfo', 'DAQmx_DIO_Config', 'DAQmx_DIO_WriteNewData', 'DAQmx_Scan',
- 'DAQmx_WaveformGen', 'Debugger', 'DebuggerOptions', 'DefaultFont',
- 'DefaultGuiControls', 'DefaultGuiFont', 'DefaultTextEncoding', 'DefineGuide',
- 'DelayUpdate', 'DeleteAnnotations', 'DeleteFile', 'DeleteFolder', 'DeletePoints',
- 'Differentiate', 'dir', 'Display', 'DisplayHelpTopic', 'DisplayProcedure',
- 'DoAlert', 'DoIgorMenu', 'DoUpdate', 'DoWindow', 'DoXOPIdle', 'DPSS',
- 'DrawAction', 'DrawArc', 'DrawBezier', 'DrawLine', 'DrawOval', 'DrawPICT',
- 'DrawPoly', 'DrawRect', 'DrawRRect', 'DrawText', 'DrawUserShape', 'DSPDetrend',
- 'DSPPeriodogram', 'Duplicate', 'DuplicateDataFolder', 'DWT', 'EdgeStats', 'Edit',
- 'ErrorBars', 'EstimatePeakSizes', 'Execute', 'ExecuteScriptText',
- 'ExperimentInfo', 'ExperimentModified', 'ExportGizmo', 'Extract',
- 'FastGaussTransform', 'FastOp', 'FBinRead', 'FBinWrite', 'FCALL_CallFunction',
- 'FCALL_FreeLibrary', 'FCALL_GetFunctionList', 'FCALL_GetParamTypeList',
- 'FCALL_LoadLibrary', 'FCALL_Version', 'FFT', 'FGetPos', 'FIFOStatus',
- 'FIFO2Wave', 'FilterFIR', 'FilterIIR', 'FindAPeak', 'FindContour',
- 'FindDuplicates', 'FindLevel', 'FindLevels', 'FindPeak', 'FindPointsInPoly',
- 'FindRoots', 'FindSequence', 'FindValue', 'FMaxFlat', 'FPClustering', 'fprintf',
- 'FReadLine', 'FSetPos', 'FStatus', 'FTPCreateDirectory', 'FTPDelete',
- 'FTPDownload', 'FTPUpload', 'FuncFit', 'FuncFitMD', 'GBLoadWave', 'GetAxis',
- 'GetCamera', 'GetFileFolderInfo', 'GetGizmo', 'GetLastUserMenuInfo',
- 'GetMarquee', 'GetMouse', 'GetSelection', 'GetWindow', 'GISCreateVectorLayer',
- 'GISGetRasterInfo', 'GISGetRegisteredFileInfo', 'GISGetVectorLayerInfo',
- 'GISLoadRasterData', 'GISLoadVectorData', 'GISRasterizeVectorData',
- 'GISRegisterFile', 'GISTransformCoords', 'GISUnRegisterFile',
- 'GISWriteFieldData', 'GISWriteGeometryData', 'GISWriteRaster',
- 'GPIBReadBinaryWave2', 'GPIBReadBinary2', 'GPIBReadWave2', 'GPIBRead2',
- 'GPIBWriteBinaryWave2', 'GPIBWriteBinary2', 'GPIBWriteWave2', 'GPIBWrite2',
- 'GPIB2', 'GraphNormal', 'GraphWaveDraw', 'GraphWaveEdit', 'Grep', 'GroupBox',
- 'Hanning', 'HCluster', 'HDFInfo', 'HDFReadImage', 'HDFReadSDS', 'HDFReadVset',
- 'HDF5CloseFile', 'HDF5CloseGroup', 'HDF5Control', 'HDF5CreateFile',
- 'HDF5CreateGroup', 'HDF5CreateLink', 'HDF5DimensionScale', 'HDF5Dump',
- 'HDF5DumpErrors', 'HDF5FlushFile', 'HDF5ListAttributes', 'HDF5ListGroup',
- 'HDF5LoadData', 'HDF5LoadGroup', 'HDF5LoadImage', 'HDF5OpenFile',
- 'HDF5OpenGroup', 'HDF5SaveData', 'HDF5SaveGroup', 'HDF5SaveImage',
- 'HDF5UnlinkObject', 'HideIgorMenus', 'HideInfo', 'HideProcedures', 'HideTools',
- 'HilbertTransform', 'Histogram', 'ICA', 'IFFT', 'ImageAnalyzeParticles',
- 'ImageBlend', 'ImageBoundaryToMask', 'ImageComposite', 'ImageEdgeDetection',
- 'ImageFileInfo', 'ImageFilter', 'ImageFocus', 'ImageFromXYZ',
- 'ImageGenerateROIMask', 'ImageGLCM', 'ImageHistModification', 'ImageHistogram',
- 'ImageInterpolate', 'ImageLineProfile', 'ImageLoad', 'ImageMorphology',
- 'ImageRegistration', 'ImageRemoveBackground', 'ImageRestore', 'ImageRotate',
- 'ImageSave', 'ImageSeedFill', 'ImageSkeleton3d', 'ImageSnake', 'ImageStats',
- 'ImageThreshold', 'ImageTransform', 'ImageUnwrapPhase', 'ImageWindow',
- 'IndexSort', 'InsertPoints', 'InstantFrequency', 'Integrate', 'IntegrateODE',
- 'Integrate2D', 'Interpolate2', 'Interpolate3D', 'Interp3DPath', 'ITCCloseAll2',
- 'ITCCloseDevice2', 'ITCConfigAllChannels2', 'ITCConfigChannelReset2',
- 'ITCConfigChannelUpload2', 'ITCConfigChannel2', 'ITCFIFOAvailableAll2',
- 'ITCFIFOAvailable2', 'ITCGetAllChannelsConfig2', 'ITCGetChannelConfig2',
- 'ITCGetCurrentDevice2', 'ITCGetDeviceInfo2', 'ITCGetDevices2',
- 'ITCGetErrorString2', 'ITCGetSerialNumber2', 'ITCGetState2', 'ITCGetVersions2',
- 'ITCInitialize2', 'ITCOpenDevice2', 'ITCReadADC2', 'ITCReadDigital2',
- 'ITCReadTimer2', 'ITCSelectDevice2', 'ITCSetDAC2', 'ITCSetGlobals2',
- 'ITCSetModes2', 'ITCSetState2', 'ITCStartAcq2', 'ITCStopAcq2',
- 'ITCUpdateFIFOPositionAll2', 'ITCUpdateFIFOPosition2', 'ITCWriteDigital2',
- 'JCAMPLoadWave', 'JointHistogram', 'JSONXOP_AddTree', 'JSONXOP_AddValue',
- 'JSONXOP_Dump', 'JSONXOP_GetArraySize', 'JSONXOP_GetKeys',
- 'JSONXOP_GetMaxArraySize', 'JSONXOP_GetType', 'JSONXOP_GetValue', 'JSONXOP_New',
- 'JSONXOP_Parse', 'JSONXOP_Release', 'JSONXOP_Remove', 'JSONXOP_Version',
- 'KillBackground', 'KillControl', 'KillDataFolder', 'KillFIFO', 'KillFreeAxis',
- 'KillPath', 'KillPICTs', 'KillStrings', 'KillVariables', 'KillWaves',
- 'KillWindow', 'KMeans', 'Label', 'Layout', 'LayoutPageAction',
- 'LayoutSlideShow', 'Legend', 'LinearFeedbackShiftRegister', 'ListBox',
- 'LoadData', 'LoadPackagePreferences', 'LoadPICT', 'LoadWave', 'Loess',
- 'LombPeriodogram', 'Make', 'MakeIndex', 'MarkPerfTestTime', 'MatrixBalance',
- 'MatrixConvolve', 'MatrixCorr', 'MatrixEigenV', 'MatrixFactor', 'MatrixFilter',
- 'MatrixGaussJ', 'MatrixGLM', 'MatrixInverse', 'MatrixLinearSolve',
- 'MatrixLinearSolveTD', 'MatrixLLS', 'MatrixLUBkSub', 'MatrixLUD', 'MatrixLUDTD',
- 'MatrixMultiply', 'MatrixMultiplyAdd', 'MatrixOP', 'MatrixReverseBalance',
- 'MatrixSchur', 'MatrixSolve', 'MatrixSparse', 'MatrixSVBkSub', 'MatrixSVD',
- 'MatrixTranspose', 'MCC_FindServers', 'MeasureStyledText',
- 'MFR_CheckForNewBricklets', 'MFR_CloseResultFile', 'MFR_CreateOverviewTable',
- 'MFR_GetBrickletCount', 'MFR_GetBrickletData', 'MFR_GetBrickletDeployData',
- 'MFR_GetBrickletMetaData', 'MFR_GetBrickletRawData', 'MFR_GetReportTemplate',
- 'MFR_GetResultFileMetaData', 'MFR_GetResultFileName',
- 'MFR_GetVernissageVersion', 'MFR_GetVersion', 'MFR_GetXOPErrorMessage',
- 'MFR_OpenResultFile', 'MLLoadWave', 'Modify', 'ModifyBoxPlot', 'ModifyBrowser',
- 'ModifyCamera', 'ModifyContour', 'ModifyControl', 'ModifyControlList',
- 'ModifyFreeAxis', 'ModifyGizmo', 'ModifyGraph', 'ModifyImage', 'ModifyLayout',
- 'ModifyPanel', 'ModifyProcedure', 'ModifyTable', 'ModifyViolinPlot',
- 'ModifyWaterfall', 'MoveDataFolder', 'MoveFile', 'MoveFolder', 'MoveString',
- 'MoveSubwindow', 'MoveVariable', 'MoveWave', 'MoveWindow', 'MultiTaperPSD',
- 'MultiThreadingControl', 'NC_CloseFile', 'NC_DumpErrors', 'NC_Inquire',
- 'NC_ListAttributes', 'NC_ListObjects', 'NC_LoadData', 'NC_OpenFile',
- 'NeuralNetworkRun', 'NeuralNetworkTrain', 'NewCamera', 'NewDataFolder',
- 'NewFIFO', 'NewFIFOChan', 'NewFreeAxis', 'NewGizmo', 'NewImage', 'NewLayout',
- 'NewMovie', 'NewNotebook', 'NewPanel', 'NewPath', 'NewWaterfall', 'NILoadWave',
- 'NI4882', 'Note', 'Notebook', 'NotebookAction', 'Open', 'OpenHelp',
- 'OpenNotebook', 'Optimize', 'ParseOperationTemplate', 'PathInfo',
- 'PauseForUser', 'PauseUpdate', 'PCA', 'PlayMovie', 'PlayMovieAction',
- 'PlaySound', 'PolygonOp', 'PopupContextualMenu', 'PopupMenu', 'Preferences',
- 'PrimeFactors', 'Print', 'printf', 'PrintGraphs', 'PrintLayout',
- 'PrintNotebook', 'PrintSettings', 'PrintTable', 'Project', 'PulseStats',
- 'PutScrapText', 'pwd', 'Quit', 'RatioFromNumber', 'Redimension', 'Remez',
- 'Remove', 'RemoveContour', 'RemoveFromGizmo', 'RemoveFromGraph',
- 'RemoveFromLayout', 'RemoveFromTable', 'RemoveImage', 'RemoveLayoutObjects',
- 'RemovePath', 'Rename', 'RenameDataFolder', 'RenamePath', 'RenamePICT',
- 'RenameWindow', 'ReorderImages', 'ReorderTraces', 'ReplaceText', 'ReplaceWave',
- 'Resample', 'ResumeUpdate', 'Reverse', 'Rotate', 'Save', 'SaveData',
- 'SaveExperiment', 'SaveGizmoCopy', 'SaveGraphCopy', 'SaveNotebook',
- 'SavePackagePreferences', 'SavePICT', 'SaveTableCopy', 'SetActiveSubwindow',
- 'SetAxis', 'SetBackground', 'SetDashPattern', 'SetDataFolder', 'SetDimLabel',
- 'SetDrawEnv', 'SetDrawLayer', 'SetFileFolderInfo', 'SetFormula',
- 'SetIdlePeriod', 'SetIgorHook', 'SetIgorMenuMode', 'SetIgorOption',
- 'SetMarquee', 'SetProcessSleep', 'SetRandomSeed', 'SetScale', 'SetVariable',
- 'SetWaveLock', 'SetWaveTextEncoding', 'SetWindow', 'ShowIgorMenus', 'ShowInfo',
- 'ShowTools', 'Silent', 'Sleep', 'Slider', 'Smooth', 'SmoothCustom', 'Sort',
- 'SortColumns', 'SoundInRecord', 'SoundInSet', 'SoundInStartChart',
- 'SoundInStatus', 'SoundInStopChart', 'SoundLoadWave', 'SoundSaveWave',
- 'SphericalInterpolate', 'SphericalTriangulate', 'SplitString', 'SplitWave',
- 'sprintf', 'SQLHighLevelOp', 'sscanf', 'Stack', 'StackWindows',
- 'StatsAngularDistanceTest', 'StatsANOVA1Test', 'StatsANOVA2NRTest',
- 'StatsANOVA2RMTest', 'StatsANOVA2Test', 'StatsChiTest',
- 'StatsCircularCorrelationTest', 'StatsCircularMeans', 'StatsCircularMoments',
- 'StatsCircularTwoSampleTest', 'StatsCochranTest', 'StatsContingencyTable',
- 'StatsDIPTest', 'StatsDunnettTest', 'StatsFriedmanTest', 'StatsFTest',
- 'StatsHodgesAjneTest', 'StatsJBTest', 'StatsKDE', 'StatsKendallTauTest',
- 'StatsKSTest', 'StatsKWTest', 'StatsLinearCorrelationTest',
- 'StatsLinearRegression', 'StatsMultiCorrelationTest', 'StatsNPMCTest',
- 'StatsNPNominalSRTest', 'StatsQuantiles', 'StatsRankCorrelationTest',
- 'StatsResample', 'StatsSample', 'StatsScheffeTest', 'StatsShapiroWilkTest',
- 'StatsSignTest', 'StatsSRTest', 'StatsTTest', 'StatsTukeyTest',
- 'StatsVariancesTest', 'StatsWatsonUSquaredTest', 'StatsWatsonWilliamsTest',
- 'StatsWheelerWatsonTest', 'StatsWilcoxonRankTest', 'StatsWRCorrelationTest',
- 'STFT', 'StructFill', 'StructGet', 'StructPut', 'SumDimension', 'SumSeries',
- 'TabControl', 'Tag', 'TDMLoadData', 'TDMSaveData', 'TextBox', 'TextHistogram',
- 'Text2Bezier', 'ThreadGroupPutDF', 'ThreadStart', 'TickWavesFromAxis', 'Tile',
- 'TileWindows', 'TitleBox', 'ToCommandLine', 'ToolsGrid', 'Triangulate3d',
- 'TUFXOP_AcquireLock', 'TUFXOP_Clear', 'TUFXOP_GetStorage', 'TUFXOP_Init',
- 'TUFXOP_ReleaseLock', 'TUFXOP_RunningInMainThread', 'TUFXOP_Version', 'Unwrap',
- 'UnzipFile', 'URLRequest', 'ValDisplay', 'VDTClosePort2', 'VDTGetPortList2',
- 'VDTGetStatus2', 'VDTOpenPort2', 'VDTOperationsPort2', 'VDTReadBinaryWave2',
- 'VDTReadBinary2', 'VDTReadHexWave2', 'VDTReadHex2', 'VDTReadWave2', 'VDTRead2',
- 'VDTTerminalPort2', 'VDTWriteBinaryWave2', 'VDTWriteBinary2',
- 'VDTWriteHexWave2', 'VDTWriteHex2', 'VDTWriteWave2', 'VDTWrite2', 'VDT2',
- 'VISAControl', 'VISARead', 'VISAReadBinary', 'VISAReadBinaryWave',
- 'VISAReadWave', 'VISAWrite', 'VISAWriteBinary', 'VISAWriteBinaryWave',
- 'VISAWriteWave', 'WaveMeanStdv', 'WaveStats', 'WaveTracking', 'WaveTransform',
- 'wfprintf', 'WignerTransform', 'WindowFunction', 'XLLoadWave'
- )
- functions = (
- 'abs', 'acos', 'acosh', 'AddListItem', 'AiryA', 'AiryAD', 'AiryB', 'AiryBD',
- 'alog', 'AnnotationInfo', 'AnnotationList', 'area', 'areaXY', 'asin', 'asinh',
- 'atan', 'atanh', 'atan2', 'AxisInfo', 'AxisLabel', 'AxisList',
- 'AxisValFromPixel', 'AxonTelegraphAGetDataNum', 'AxonTelegraphAGetDataString',
- 'AxonTelegraphAGetDataStruct', 'AxonTelegraphGetDataNum',
- 'AxonTelegraphGetDataString', 'AxonTelegraphGetDataStruct',
- 'AxonTelegraphGetTimeoutMs', 'AxonTelegraphSetTimeoutMs', 'Base64Decode',
- 'Base64Encode', 'Besseli', 'Besselj', 'Besselk', 'Bessely', 'beta', 'betai',
- 'BinarySearch', 'BinarySearchInterp', 'binomial', 'binomialln', 'binomialNoise',
- 'cabs', 'CaptureHistory', 'CaptureHistoryStart', 'ceil', 'centerOfMass',
- 'centerOfMassXY', 'cequal', 'char2num', 'chebyshev', 'chebyshevU', 'CheckName',
- 'ChildWindowList', 'CleanupName', 'cmplx', 'cmpstr', 'conj', 'ContourInfo',
- 'ContourNameList', 'ContourNameToWaveRef', 'ContourZ', 'ControlNameList',
- 'ConvertTextEncoding', 'cos', 'cosh', 'cosIntegral', 'cot', 'coth',
- 'CountObjects', 'CountObjectsDFR', 'cpowi', 'CreateDataObjectName',
- 'CreationDate', 'csc', 'csch', 'CsrInfo', 'CsrWave', 'CsrWaveRef', 'CsrXWave',
- 'CsrXWaveRef', 'CTabList', 'DataFolderDir', 'DataFolderExists',
- 'DataFolderList', 'DataFolderRefChanges', 'DataFolderRefsEqual',
- 'DataFolderRefStatus', 'date', 'datetime', 'DateToJulian', 'date2secs',
- 'Dawson', 'defined', 'deltax', 'digamma', 'dilogarithm', 'DimDelta',
- 'DimOffset', 'DimSize', 'ei', 'ellipticE', 'ellipticK', 'enoise', 'equalWaves',
- 'erf', 'erfc', 'erfcw', 'erfcx', 'exists', 'exp', 'expInt', 'expIntegralE1',
- 'expNoise', 'factorial', 'Faddeeva', 'fakedata', 'faverage', 'faverageXY',
- 'fDAQmx_AI_ChannelConfigs', 'fDAQmx_AI_GetReader', 'fDAQmx_AO_UpdateOutputs',
- 'fDAQmx_ConnectTerminals', 'fDAQmx_CTR_Finished', 'fDAQmx_CTR_IsFinished',
- 'fDAQmx_CTR_IsPulseFinished', 'fDAQmx_CTR_ReadCounter',
- 'fDAQmx_CTR_ReadWithOptions', 'fDAQmx_CTR_SetPulseFrequency',
- 'fDAQmx_CTR_Start', 'fDAQmx_DeviceNames', 'fDAQmx_DIO_Finished',
- 'fDAQmx_DIO_PortWidth', 'fDAQmx_DIO_Read', 'fDAQmx_DIO_Write',
- 'fDAQmx_DisconnectTerminals', 'fDAQmx_ErrorString', 'fDAQmx_ExternalCalDate',
- 'fDAQmx_NumAnalogInputs', 'fDAQmx_NumAnalogOutputs', 'fDAQmx_NumCounters',
- 'fDAQmx_NumDIOPorts', 'fDAQmx_ReadChan', 'fDAQmx_ReadNamedChan',
- 'fDAQmx_ResetDevice', 'fDAQmx_ScanGetAvailable', 'fDAQmx_ScanGetNextIndex',
- 'fDAQmx_ScanStart', 'fDAQmx_ScanStop', 'fDAQmx_ScanWait',
- 'fDAQmx_ScanWaitWithTimeout', 'fDAQmx_SelfCalDate', 'fDAQmx_SelfCalibration',
- 'fDAQmx_WaveformStart', 'fDAQmx_WaveformStop', 'fDAQmx_WF_IsFinished',
- 'fDAQmx_WF_WaitUntilFinished', 'fDAQmx_WriteChan', 'FetchURL', 'FindDimLabel',
- 'FindListItem', 'floor', 'FontList', 'FontSizeHeight', 'FontSizeStringWidth',
- 'FresnelCos', 'FresnelSin', 'FuncRefInfo', 'FunctionInfo', 'FunctionList',
- 'FunctionPath', 'gamma', 'gammaEuler', 'gammaInc', 'gammaNoise', 'gammln',
- 'gammp', 'gammq', 'Gauss', 'Gauss1D', 'Gauss2D', 'gcd', 'GeometricMean',
- 'GetBrowserLine', 'GetBrowserSelection', 'GetDataFolder', 'GetDataFolderDFR',
- 'GetDefaultFont', 'GetDefaultFontSize', 'GetDefaultFontStyle', 'GetDimLabel',
- 'GetEnvironmentVariable', 'GetErrMessage', 'GetFormula',
- 'GetIndependentModuleName', 'GetIndexedObjName', 'GetIndexedObjNameDFR',
- 'GetKeyState', 'GetRTErrMessage', 'GetRTError', 'GetRTLocation', 'GetRTLocInfo',
- 'GetRTStackInfo', 'GetScrapText', 'GetUserData', 'GetWavesDataFolder',
- 'GetWavesDataFolderDFR', 'GetWindowBrowserSelection', 'GISGetAllFileFormats',
- 'GISSRefsAreEqual', 'GizmoInfo', 'GizmoScale', 'gnoise', 'GrepList',
- 'GrepString', 'GuideInfo', 'GuideNameList', 'Hash', 'hcsr', 'HDF5AttributeInfo',
- 'HDF5DatasetInfo', 'HDF5LibraryInfo', 'HDF5LinkInfo', 'HDF5TypeInfo', 'hermite',
- 'hermiteGauss', 'HyperGNoise', 'HyperGPFQ', 'HyperG0F1', 'HyperG1F1',
- 'HyperG2F1', 'i', 'IgorInfo', 'IgorVersion', 'imag', 'ImageInfo',
- 'ImageNameList', 'ImageNameToWaveRef', 'IndependentModuleList', 'IndexedDir',
- 'IndexedFile', 'IndexToScale', 'Inf', 'Integrate1D', 'interp', 'Interp2D',
- 'Interp3D', 'inverseERF', 'inverseERFC', 'ItemsInList', 'JacobiCn', 'JacobiSn',
- 'JulianToDate', 'Laguerre', 'LaguerreA', 'LaguerreGauss', 'LambertW',
- 'LayoutInfo', 'leftx', 'LegendreA', 'limit', 'ListMatch', 'ListToTextWave',
- 'ListToWaveRefWave', 'ln', 'log', 'logNormalNoise', 'lorentzianNoise',
- 'LowerStr', 'MacroInfo', 'MacroList', 'MacroPath', 'magsqr', 'MandelbrotPoint',
- 'MarcumQ', 'MatrixCondition', 'MatrixDet', 'MatrixDot', 'MatrixRank',
- 'MatrixTrace', 'max', 'MCC_AutoBridgeBal', 'MCC_AutoFastComp',
- 'MCC_AutoPipetteOffset', 'MCC_AutoSlowComp', 'MCC_AutoWholeCellComp',
- 'MCC_GetBridgeBalEnable', 'MCC_GetBridgeBalResist', 'MCC_GetFastCompCap',
- 'MCC_GetFastCompTau', 'MCC_GetHolding', 'MCC_GetHoldingEnable', 'MCC_GetMode',
- 'MCC_GetNeutralizationCap', 'MCC_GetNeutralizationEnable',
- 'MCC_GetOscKillerEnable', 'MCC_GetPipetteOffset', 'MCC_GetPrimarySignalGain',
- 'MCC_GetPrimarySignalHPF', 'MCC_GetPrimarySignalLPF', 'MCC_GetRsCompBandwidth',
- 'MCC_GetRsCompCorrection', 'MCC_GetRsCompEnable', 'MCC_GetRsCompPrediction',
- 'MCC_GetSecondarySignalGain', 'MCC_GetSecondarySignalLPF', 'MCC_GetSlowCompCap',
- 'MCC_GetSlowCompTau', 'MCC_GetSlowCompTauX20Enable',
- 'MCC_GetSlowCurrentInjEnable', 'MCC_GetSlowCurrentInjLevel',
- 'MCC_GetSlowCurrentInjSetlTime', 'MCC_GetWholeCellCompCap',
- 'MCC_GetWholeCellCompEnable', 'MCC_GetWholeCellCompResist',
- 'MCC_SelectMultiClamp700B', 'MCC_SetBridgeBalEnable', 'MCC_SetBridgeBalResist',
- 'MCC_SetFastCompCap', 'MCC_SetFastCompTau', 'MCC_SetHolding',
- 'MCC_SetHoldingEnable', 'MCC_SetMode', 'MCC_SetNeutralizationCap',
- 'MCC_SetNeutralizationEnable', 'MCC_SetOscKillerEnable', 'MCC_SetPipetteOffset',
- 'MCC_SetPrimarySignalGain', 'MCC_SetPrimarySignalHPF', 'MCC_SetPrimarySignalLPF',
- 'MCC_SetRsCompBandwidth', 'MCC_SetRsCompCorrection', 'MCC_SetRsCompEnable',
- 'MCC_SetRsCompPrediction', 'MCC_SetSecondarySignalGain',
- 'MCC_SetSecondarySignalLPF', 'MCC_SetSlowCompCap', 'MCC_SetSlowCompTau',
- 'MCC_SetSlowCompTauX20Enable', 'MCC_SetSlowCurrentInjEnable',
- 'MCC_SetSlowCurrentInjLevel', 'MCC_SetSlowCurrentInjSetlTime',
- 'MCC_SetTimeoutMs', 'MCC_SetWholeCellCompCap', 'MCC_SetWholeCellCompEnable',
- 'MCC_SetWholeCellCompResist', 'mean', 'median', 'min', 'mod', 'ModDate',
- 'MPFXEMGPeak', 'MPFXExpConvExpPeak', 'MPFXGaussPeak', 'MPFXLorentzianPeak',
- 'MPFXVoigtPeak', 'NameOfWave', 'NaN', 'NewFreeDataFolder', 'NewFreeWave', 'norm',
- 'NormalizeUnicode', 'note', 'NumberByKey', 'numpnts', 'numtype',
- 'NumVarOrDefault', 'num2char', 'num2istr', 'num2str', 'NVAR_Exists',
- 'OperationList', 'PadString', 'PanelResolution', 'ParamIsDefault',
- 'ParseFilePath', 'PathList', 'pcsr', 'Pi', 'PICTInfo', 'PICTList',
- 'PixelFromAxisVal', 'pnt2x', 'poissonNoise', 'poly', 'PolygonArea', 'poly2D',
- 'PossiblyQuoteName', 'ProcedureText', 'ProcedureVersion', 'p2rect', 'qcsr',
- 'real', 'RemoveByKey', 'RemoveEnding', 'RemoveFromList', 'RemoveListItem',
- 'ReplaceNumberByKey', 'ReplaceString', 'ReplaceStringByKey', 'ReplicateString',
- 'rightx', 'round', 'r2polar', 'sawtooth', 'scaleToIndex', 'ScreenResolution',
- 'sec', 'sech', 'Secs2Date', 'Secs2Time', 'SelectNumber', 'SelectString',
- 'SetEnvironmentVariable', 'sign', 'sin', 'sinc', 'sinh', 'sinIntegral',
- 'SortList', 'SpecialCharacterInfo', 'SpecialCharacterList', 'SpecialDirPath',
- 'SphericalBessJ', 'SphericalBessJD', 'SphericalBessY', 'SphericalBessYD',
- 'SphericalHarmonics', 'SQLAllocHandle', 'SQLAllocStmt',
- 'SQLBinaryWavesToTextWave', 'SQLBindCol', 'SQLBindParameter', 'SQLBrowseConnect',
- 'SQLBulkOperations', 'SQLCancel', 'SQLCloseCursor', 'SQLColAttributeNum',
- 'SQLColAttributeStr', 'SQLColumnPrivileges', 'SQLColumns', 'SQLConnect',
- 'SQLDataSources', 'SQLDescribeCol', 'SQLDescribeParam', 'SQLDisconnect',
- 'SQLDriverConnect', 'SQLDrivers', 'SQLEndTran', 'SQLError', 'SQLExecDirect',
- 'SQLExecute', 'SQLFetch', 'SQLFetchScroll', 'SQLForeignKeys', 'SQLFreeConnect',
- 'SQLFreeEnv', 'SQLFreeHandle', 'SQLFreeStmt', 'SQLGetConnectAttrNum',
- 'SQLGetConnectAttrStr', 'SQLGetCursorName', 'SQLGetDataNum', 'SQLGetDataStr',
- 'SQLGetDescFieldNum', 'SQLGetDescFieldStr', 'SQLGetDescRec',
- 'SQLGetDiagFieldNum', 'SQLGetDiagFieldStr', 'SQLGetDiagRec', 'SQLGetEnvAttrNum',
- 'SQLGetEnvAttrStr', 'SQLGetFunctions', 'SQLGetInfoNum', 'SQLGetInfoStr',
- 'SQLGetStmtAttrNum', 'SQLGetStmtAttrStr', 'SQLGetTypeInfo', 'SQLMoreResults',
- 'SQLNativeSql', 'SQLNumParams', 'SQLNumResultCols', 'SQLNumResultRowsIfKnown',
- 'SQLNumRowsFetched', 'SQLParamData', 'SQLPrepare', 'SQLPrimaryKeys',
- 'SQLProcedureColumns', 'SQLProcedures', 'SQLPutData', 'SQLReinitialize',
- 'SQLRowCount', 'SQLSetConnectAttrNum', 'SQLSetConnectAttrStr',
- 'SQLSetCursorName', 'SQLSetDescFieldNum', 'SQLSetDescFieldStr', 'SQLSetDescRec',
- 'SQLSetEnvAttrNum', 'SQLSetEnvAttrStr', 'SQLSetPos', 'SQLSetStmtAttrNum',
- 'SQLSetStmtAttrStr', 'SQLSpecialColumns', 'SQLStatistics', 'SQLTablePrivileges',
- 'SQLTables', 'SQLTextWaveToBinaryWaves', 'SQLTextWaveTo2DBinaryWave',
- 'SQLUpdateBoundValues', 'SQLXOPCheckState', 'SQL2DBinaryWaveToTextWave', 'sqrt',
- 'StartMSTimer', 'StatsBetaCDF', 'StatsBetaPDF', 'StatsBinomialCDF',
- 'StatsBinomialPDF', 'StatsCauchyCDF', 'StatsCauchyPDF', 'StatsChiCDF',
- 'StatsChiPDF', 'StatsCMSSDCDF', 'StatsCorrelation', 'StatsDExpCDF',
- 'StatsDExpPDF', 'StatsErlangCDF', 'StatsErlangPDF', 'StatsErrorPDF',
- 'StatsEValueCDF', 'StatsEValuePDF', 'StatsExpCDF', 'StatsExpPDF', 'StatsFCDF',
- 'StatsFPDF', 'StatsFriedmanCDF', 'StatsGammaCDF', 'StatsGammaPDF',
- 'StatsGeometricCDF', 'StatsGeometricPDF', 'StatsGEVCDF', 'StatsGEVPDF',
- 'StatsHyperGCDF', 'StatsHyperGPDF', 'StatsInvBetaCDF', 'StatsInvBinomialCDF',
- 'StatsInvCauchyCDF', 'StatsInvChiCDF', 'StatsInvCMSSDCDF', 'StatsInvDExpCDF',
- 'StatsInvEValueCDF', 'StatsInvExpCDF', 'StatsInvFCDF', 'StatsInvFriedmanCDF',
- 'StatsInvGammaCDF', 'StatsInvGeometricCDF', 'StatsInvKuiperCDF',
- 'StatsInvLogisticCDF', 'StatsInvLogNormalCDF', 'StatsInvMaxwellCDF',
- 'StatsInvMooreCDF', 'StatsInvNBinomialCDF', 'StatsInvNCChiCDF', 'StatsInvNCFCDF',
- 'StatsInvNormalCDF', 'StatsInvParetoCDF', 'StatsInvPoissonCDF',
- 'StatsInvPowerCDF', 'StatsInvQCDF', 'StatsInvQpCDF', 'StatsInvRayleighCDF',
- 'StatsInvRectangularCDF', 'StatsInvSpearmanCDF', 'StatsInvStudentCDF',
- 'StatsInvTopDownCDF', 'StatsInvTriangularCDF', 'StatsInvUsquaredCDF',
- 'StatsInvVonMisesCDF', 'StatsInvWeibullCDF', 'StatsKuiperCDF',
- 'StatsLogisticCDF', 'StatsLogisticPDF', 'StatsLogNormalCDF', 'StatsLogNormalPDF',
- 'StatsMaxwellCDF', 'StatsMaxwellPDF', 'StatsMedian', 'StatsMooreCDF',
- 'StatsNBinomialCDF', 'StatsNBinomialPDF', 'StatsNCChiCDF', 'StatsNCChiPDF',
- 'StatsNCFCDF', 'StatsNCFPDF', 'StatsNCTCDF', 'StatsNCTPDF', 'StatsNormalCDF',
- 'StatsNormalPDF', 'StatsParetoCDF', 'StatsParetoPDF', 'StatsPermute',
- 'StatsPoissonCDF', 'StatsPoissonPDF', 'StatsPowerCDF', 'StatsPowerNoise',
- 'StatsPowerPDF', 'StatsQCDF', 'StatsQpCDF', 'StatsRayleighCDF',
- 'StatsRayleighPDF', 'StatsRectangularCDF', 'StatsRectangularPDF', 'StatsRunsCDF',
- 'StatsSpearmanRhoCDF', 'StatsStudentCDF', 'StatsStudentPDF', 'StatsTopDownCDF',
- 'StatsTriangularCDF', 'StatsTriangularPDF', 'StatsTrimmedMean',
- 'StatsUSquaredCDF', 'StatsVonMisesCDF', 'StatsVonMisesNoise', 'StatsVonMisesPDF',
- 'StatsWaldCDF', 'StatsWaldPDF', 'StatsWeibullCDF', 'StatsWeibullPDF',
- 'StopMSTimer', 'StringByKey', 'stringCRC', 'StringFromList', 'StringList',
- 'stringmatch', 'StringToUnsignedByteWave', 'strlen', 'strsearch',
- 'StrVarOrDefault', 'str2num', 'StudentA', 'StudentT', 'sum', 'SVAR_Exists',
- 'TableInfo', 'TagVal', 'TagWaveRef', 'tan', 'tanh', 'TDMAddChannel',
- 'TDMAddGroup', 'TDMAppendDataValues', 'TDMAppendDataValuesTime',
- 'TDMChannelPropertyExists', 'TDMCloseChannel', 'TDMCloseFile', 'TDMCloseGroup',
- 'TDMCreateChannelProperty', 'TDMCreateFile', 'TDMCreateFileProperty',
- 'TDMCreateGroupProperty', 'TDMFilePropertyExists', 'TDMGetChannelPropertyNames',
- 'TDMGetChannelPropertyNum', 'TDMGetChannelPropertyStr',
- 'TDMGetChannelPropertyTime', 'TDMGetChannelPropertyType', 'TDMGetChannels',
- 'TDMGetChannelStringPropertyLen', 'TDMGetDataType', 'TDMGetDataValues',
- 'TDMGetDataValuesTime', 'TDMGetFilePropertyNames', 'TDMGetFilePropertyNum',
- 'TDMGetFilePropertyStr', 'TDMGetFilePropertyTime', 'TDMGetFilePropertyType',
- 'TDMGetFileStringPropertyLen', 'TDMGetGroupPropertyNames',
- 'TDMGetGroupPropertyNum', 'TDMGetGroupPropertyStr', 'TDMGetGroupPropertyTime',
- 'TDMGetGroupPropertyType', 'TDMGetGroups', 'TDMGetGroupStringPropertyLen',
- 'TDMGetLibraryErrorDescription', 'TDMGetNumChannelProperties',
- 'TDMGetNumChannels', 'TDMGetNumDataValues', 'TDMGetNumFileProperties',
- 'TDMGetNumGroupProperties', 'TDMGetNumGroups', 'TDMGroupPropertyExists',
- 'TDMOpenFile', 'TDMOpenFileEx', 'TDMRemoveChannel', 'TDMRemoveGroup',
- 'TDMReplaceDataValues', 'TDMReplaceDataValuesTime', 'TDMSaveFile',
- 'TDMSetChannelPropertyNum', 'TDMSetChannelPropertyStr',
- 'TDMSetChannelPropertyTime', 'TDMSetDataValues', 'TDMSetDataValuesTime',
- 'TDMSetFilePropertyNum', 'TDMSetFilePropertyStr', 'TDMSetFilePropertyTime',
- 'TDMSetGroupPropertyNum', 'TDMSetGroupPropertyStr', 'TDMSetGroupPropertyTime',
- 'TextEncodingCode', 'TextEncodingName', 'TextFile', 'ThreadGroupCreate',
- 'ThreadGroupGetDF', 'ThreadGroupGetDFR', 'ThreadGroupRelease', 'ThreadGroupWait',
- 'ThreadProcessorCount', 'ThreadReturnValue', 'ticks', 'time', 'TraceFromPixel',
- 'TraceInfo', 'TraceNameList', 'TraceNameToWaveRef', 'TrimString', 'trunc',
- 'UniqueName', 'UnPadString', 'UnsetEnvironmentVariable', 'UpperStr', 'URLDecode',
- 'URLEncode', 'VariableList', 'Variance', 'vcsr', 'viAssertIntrSignal',
- 'viAssertTrigger', 'viAssertUtilSignal', 'viClear', 'viClose', 'viDisableEvent',
- 'viDiscardEvents', 'viEnableEvent', 'viFindNext', 'viFindRsrc', 'viGetAttribute',
- 'viGetAttributeString', 'viGpibCommand', 'viGpibControlATN', 'viGpibControlREN',
- 'viGpibPassControl', 'viGpibSendIFC', 'viIn8', 'viIn16', 'viIn32', 'viLock',
- 'viMapAddress', 'viMapTrigger', 'viMemAlloc', 'viMemFree', 'viMoveIn8',
- 'viMoveIn16', 'viMoveIn32', 'viMoveOut8', 'viMoveOut16', 'viMoveOut32', 'viOpen',
- 'viOpenDefaultRM', 'viOut8', 'viOut16', 'viOut32', 'viPeek8', 'viPeek16',
- 'viPeek32', 'viPoke8', 'viPoke16', 'viPoke32', 'viRead', 'viReadSTB',
- 'viSetAttribute', 'viSetAttributeString', 'viStatusDesc', 'viTerminate',
- 'viUnlock', 'viUnmapAddress', 'viUnmapTrigger', 'viUsbControlIn',
- 'viUsbControlOut', 'viVxiCommandQuery', 'viWaitOnEvent', 'viWrite', 'VoigtFunc',
- 'VoigtPeak', 'WaveCRC', 'WaveDataToString', 'WaveDims', 'WaveExists', 'WaveHash',
- 'WaveInfo', 'WaveList', 'WaveMax', 'WaveMin', 'WaveMinAndMax', 'WaveModCount',
- 'WaveName', 'WaveRefIndexed', 'WaveRefIndexedDFR', 'WaveRefsEqual',
- 'WaveRefWaveToList', 'WaveTextEncoding', 'WaveType', 'WaveUnits',
- 'WhichListItem', 'WinList', 'WinName', 'WinRecreation', 'WinType', 'wnoise',
- 'xcsr', 'XWaveName', 'XWaveRefFromTrace', 'x2pnt', 'zcsr', 'ZernikeR',
- 'zeromq_client_connect', 'zeromq_client_recv', 'zeromq_client_send',
- 'zeromq_handler_start', 'zeromq_handler_stop', 'zeromq_pub_bind',
- 'zeromq_pub_send', 'zeromq_server_bind', 'zeromq_server_recv',
- 'zeromq_server_send', 'zeromq_set', 'zeromq_set_logging_template', 'zeromq_stop',
- 'zeromq_sub_add_filter', 'zeromq_sub_connect', 'zeromq_sub_recv',
- 'zeromq_sub_remove_filter', 'zeromq_test_callfunction',
- 'zeromq_test_serializeWave', 'zeta'
- )
-
- tokens = {
- 'root': [
- (r'//.*$', Comment.Single),
- (r'"([^"\\]|\\.)*"', String),
- # Flow Control.
- (words(flowControl, prefix=r'\b', suffix=r'\b'), Keyword),
- # Types.
- (words(types, prefix=r'\b', suffix=r'\b'), Keyword.Type),
- # Keywords.
- (words(keywords, prefix=r'\b', suffix=r'\b'), Keyword.Reserved),
- # Built-in operations.
- (words(operations, prefix=r'\b', suffix=r'\b'), Name.Class),
- # Built-in functions.
- (words(functions, prefix=r'\b', suffix=r'\b'), Name.Function),
- # Compiler directives.
- (r'^#(include|pragma|define|undef|ifdef|ifndef|if|elif|else|endif)',
- Name.Decorator),
- (r'\s+', Whitespace),
- (r'[^a-z"/]+$', Text),
- (r'.', Text),
- ],
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/inferno.py b/venv/lib/python3.11/site-packages/pygments/lexers/inferno.py
deleted file mode 100644
index ce1fe03..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/inferno.py
+++ /dev/null
@@ -1,96 +0,0 @@
-"""
- pygments.lexers.inferno
- ~~~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for Inferno os and all the related stuff.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, include, bygroups, default
-from pygments.token import Punctuation, Comment, Operator, Keyword, \
- Name, String, Number, Whitespace
-
-__all__ = ['LimboLexer']
-
-
-class LimboLexer(RegexLexer):
- """
- Lexer for Limbo programming language
-
- TODO:
- - maybe implement better var declaration highlighting
- - some simple syntax error highlighting
-
- .. versionadded:: 2.0
- """
- name = 'Limbo'
- url = 'http://www.vitanuova.com/inferno/limbo.html'
- aliases = ['limbo']
- filenames = ['*.b']
- mimetypes = ['text/limbo']
-
- tokens = {
- 'whitespace': [
- (r'^(\s*)([a-zA-Z_]\w*:)(\s*\n)',
- bygroups(Whitespace, Name.Label, Whitespace)),
- (r'\n', Whitespace),
- (r'\s+', Whitespace),
- (r'#(\n|(.|\n)*?[^\\]\n)', Comment.Single),
- ],
- 'string': [
- (r'"', String, '#pop'),
- (r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|'
- r'u[a-fA-F0-9]{4}|U[a-fA-F0-9]{8}|[0-7]{1,3})', String.Escape),
- (r'[^\\"\n]+', String), # all other characters
- (r'\\', String), # stray backslash
- ],
- 'statements': [
- (r'"', String, 'string'),
- (r"'(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])'", String.Char),
- (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+', Number.Float),
- (r'(\d+\.\d*|\.\d+|\d+[fF])', Number.Float),
- (r'16r[0-9a-fA-F]+', Number.Hex),
- (r'8r[0-7]+', Number.Oct),
- (r'((([1-3]\d)|([2-9]))r)?(\d+)', Number.Integer),
- (r'[()\[\],.]', Punctuation),
- (r'[~!%^&*+=|?:<>/-]|(->)|(<-)|(=>)|(::)', Operator),
- (r'(alt|break|case|continue|cyclic|do|else|exit'
- r'for|hd|if|implement|import|include|len|load|or'
- r'pick|return|spawn|tagof|tl|to|while)\b', Keyword),
- (r'(byte|int|big|real|string|array|chan|list|adt'
- r'|fn|ref|of|module|self|type)\b', Keyword.Type),
- (r'(con|iota|nil)\b', Keyword.Constant),
- (r'[a-zA-Z_]\w*', Name),
- ],
- 'statement' : [
- include('whitespace'),
- include('statements'),
- ('[{}]', Punctuation),
- (';', Punctuation, '#pop'),
- ],
- 'root': [
- include('whitespace'),
- default('statement'),
- ],
- }
-
- def analyse_text(text):
- # Any limbo module implements something
- if re.search(r'^implement \w+;', text, re.MULTILINE):
- return 0.7
-
-# TODO:
-# - Make lexers for:
-# - asm sources
-# - man pages
-# - mkfiles
-# - module definitions
-# - namespace definitions
-# - shell scripts
-# - maybe keyfiles and fonts
-# they all seem to be quite similar to their equivalents
-# from unix world, so there should not be a lot of problems
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/installers.py b/venv/lib/python3.11/site-packages/pygments/lexers/installers.py
deleted file mode 100644
index dcf8bdf..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/installers.py
+++ /dev/null
@@ -1,327 +0,0 @@
-"""
- pygments.lexers.installers
- ~~~~~~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for installer/packager DSLs and formats.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, include, bygroups, using, this, default
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Punctuation, Generic, Number, Whitespace
-
-__all__ = ['NSISLexer', 'RPMSpecLexer', 'SourcesListLexer',
- 'DebianControlLexer']
-
-
-class NSISLexer(RegexLexer):
- """
- For NSIS scripts.
-
- .. versionadded:: 1.6
- """
- name = 'NSIS'
- url = 'http://nsis.sourceforge.net/'
- aliases = ['nsis', 'nsi', 'nsh']
- filenames = ['*.nsi', '*.nsh']
- mimetypes = ['text/x-nsis']
-
- flags = re.IGNORECASE
-
- tokens = {
- 'root': [
- (r'([;#].*)(\n)', bygroups(Comment, Whitespace)),
- (r"'.*?'", String.Single),
- (r'"', String.Double, 'str_double'),
- (r'`', String.Backtick, 'str_backtick'),
- include('macro'),
- include('interpol'),
- include('basic'),
- (r'\$\{[a-z_|][\w|]*\}', Keyword.Pseudo),
- (r'/[a-z_]\w*', Name.Attribute),
- (r'\s+', Whitespace),
- (r'[\w.]+', Text),
- ],
- 'basic': [
- (r'(\n)(Function)(\s+)([._a-z][.\w]*)\b',
- bygroups(Whitespace, Keyword, Whitespace, Name.Function)),
- (r'\b([_a-z]\w*)(::)([a-z][a-z0-9]*)\b',
- bygroups(Keyword.Namespace, Punctuation, Name.Function)),
- (r'\b([_a-z]\w*)(:)', bygroups(Name.Label, Punctuation)),
- (r'(\b[ULS]|\B)([!<>=]?=|\<\>?|\>)\B', Operator),
- (r'[|+-]', Operator),
- (r'\\', Punctuation),
- (r'\b(Abort|Add(?:BrandingImage|Size)|'
- r'Allow(?:RootDirInstall|SkipFiles)|AutoCloseWindow|'
- r'BG(?:Font|Gradient)|BrandingText|BringToFront|Call(?:InstDLL)?|'
- r'(?:Sub)?Caption|ChangeUI|CheckBitmap|ClearErrors|CompletedText|'
- r'ComponentText|CopyFiles|CRCCheck|'
- r'Create(?:Directory|Font|Shortcut)|Delete(?:INI(?:Sec|Str)|'
- r'Reg(?:Key|Value))?|DetailPrint|DetailsButtonText|'
- r'Dir(?:Show|Text|Var|Verify)|(?:Disabled|Enabled)Bitmap|'
- r'EnableWindow|EnumReg(?:Key|Value)|Exch|Exec(?:Shell|Wait)?|'
- r'ExpandEnvStrings|File(?:BufSize|Close|ErrorText|Open|'
- r'Read(?:Byte)?|Seek|Write(?:Byte)?)?|'
- r'Find(?:Close|First|Next|Window)|FlushINI|Function(?:End)?|'
- r'Get(?:CurInstType|CurrentAddress|DlgItem|DLLVersion(?:Local)?|'
- r'ErrorLevel|FileTime(?:Local)?|FullPathName|FunctionAddress|'
- r'InstDirError|LabelAddress|TempFileName)|'
- r'Goto|HideWindow|Icon|'
- r'If(?:Abort|Errors|FileExists|RebootFlag|Silent)|'
- r'InitPluginsDir|Install(?:ButtonText|Colors|Dir(?:RegKey)?)|'
- r'Inst(?:ProgressFlags|Type(?:[GS]etText)?)|Int(?:CmpU?|Fmt|Op)|'
- r'IsWindow|LangString(?:UP)?|'
- r'License(?:BkColor|Data|ForceSelection|LangString|Text)|'
- r'LoadLanguageFile|LockWindow|Log(?:Set|Text)|MessageBox|'
- r'MiscButtonText|Name|Nop|OutFile|(?:Uninst)?Page(?:Ex(?:End)?)?|'
- r'PluginDir|Pop|Push|Quit|Read(?:(?:Env|INI|Reg)Str|RegDWORD)|'
- r'Reboot|(?:Un)?RegDLL|Rename|RequestExecutionLevel|ReserveFile|'
- r'Return|RMDir|SearchPath|Section(?:Divider|End|'
- r'(?:(?:Get|Set)(?:Flags|InstTypes|Size|Text))|Group(?:End)?|In)?|'
- r'SendMessage|Set(?:AutoClose|BrandingImage|Compress(?:ionLevel|'
- r'or(?:DictSize)?)?|CtlColors|CurInstType|DatablockOptimize|'
- r'DateSave|Details(?:Print|View)|Error(?:s|Level)|FileAttributes|'
- r'Font|OutPath|Overwrite|PluginUnload|RebootFlag|ShellVarContext|'
- r'Silent|StaticBkColor)|'
- r'Show(?:(?:I|Uni)nstDetails|Window)|Silent(?:Un)?Install|Sleep|'
- r'SpaceTexts|Str(?:CmpS?|Cpy|Len)|SubSection(?:End)?|'
- r'Uninstall(?:ButtonText|(?:Sub)?Caption|EXEName|Icon|Text)|'
- r'UninstPage|Var|VI(?:AddVersionKey|ProductVersion)|WindowIcon|'
- r'Write(?:INIStr|Reg(:?Bin|DWORD|(?:Expand)?Str)|Uninstaller)|'
- r'XPStyle)\b', Keyword),
- (r'\b(CUR|END|(?:FILE_ATTRIBUTE_)?'
- r'(?:ARCHIVE|HIDDEN|NORMAL|OFFLINE|READONLY|SYSTEM|TEMPORARY)|'
- r'HK(CC|CR|CU|DD|LM|PD|U)|'
- r'HKEY_(?:CLASSES_ROOT|CURRENT_(?:CONFIG|USER)|DYN_DATA|'
- r'LOCAL_MACHINE|PERFORMANCE_DATA|USERS)|'
- r'ID(?:ABORT|CANCEL|IGNORE|NO|OK|RETRY|YES)|'
- r'MB_(?:ABORTRETRYIGNORE|DEFBUTTON[1-4]|'
- r'ICON(?:EXCLAMATION|INFORMATION|QUESTION|STOP)|'
- r'OK(?:CANCEL)?|RETRYCANCEL|RIGHT|SETFOREGROUND|TOPMOST|USERICON|'
- r'YESNO(?:CANCEL)?)|SET|SHCTX|'
- r'SW_(?:HIDE|SHOW(?:MAXIMIZED|MINIMIZED|NORMAL))|'
- r'admin|all|auto|both|bottom|bzip2|checkbox|colored|current|false|'
- r'force|hide|highest|if(?:diff|newer)|lastused|leave|left|'
- r'listonly|lzma|nevershow|none|normal|off|on|pop|push|'
- r'radiobuttons|right|show|silent|silentlog|smooth|textonly|top|'
- r'true|try|user|zlib)\b', Name.Constant),
- ],
- 'macro': [
- (r'\!(addincludedir(?:dir)?|addplugindir|appendfile|cd|define|'
- r'delfilefile|echo(?:message)?|else|endif|error|execute|'
- r'if(?:macro)?n?(?:def)?|include|insertmacro|macro(?:end)?|packhdr|'
- r'search(?:parse|replace)|system|tempfilesymbol|undef|verbose|'
- r'warning)\b', Comment.Preproc),
- ],
- 'interpol': [
- (r'\$(R?[0-9])', Name.Builtin.Pseudo), # registers
- (r'\$(ADMINTOOLS|APPDATA|CDBURN_AREA|COOKIES|COMMONFILES(?:32|64)|'
- r'DESKTOP|DOCUMENTS|EXE(?:DIR|FILE|PATH)|FAVORITES|FONTS|HISTORY|'
- r'HWNDPARENT|INTERNET_CACHE|LOCALAPPDATA|MUSIC|NETHOOD|PICTURES|'
- r'PLUGINSDIR|PRINTHOOD|PROFILE|PROGRAMFILES(?:32|64)|QUICKLAUNCH|'
- r'RECENT|RESOURCES(?:_LOCALIZED)?|SENDTO|SM(?:PROGRAMS|STARTUP)|'
- r'STARTMENU|SYSDIR|TEMP(?:LATES)?|VIDEOS|WINDIR|\{NSISDIR\})',
- Name.Builtin),
- (r'\$(CMDLINE|INSTDIR|OUTDIR|LANGUAGE)', Name.Variable.Global),
- (r'\$[a-z_]\w*', Name.Variable),
- ],
- 'str_double': [
- (r'"', String.Double, '#pop'),
- (r'\$(\\[nrt"]|\$)', String.Escape),
- include('interpol'),
- (r'[^"]+', String.Double),
- ],
- 'str_backtick': [
- (r'`', String.Double, '#pop'),
- (r'\$(\\[nrt"]|\$)', String.Escape),
- include('interpol'),
- (r'[^`]+', String.Double),
- ],
- }
-
-
-class RPMSpecLexer(RegexLexer):
- """
- For RPM ``.spec`` files.
-
- .. versionadded:: 1.6
- """
-
- name = 'RPMSpec'
- aliases = ['spec']
- filenames = ['*.spec']
- mimetypes = ['text/x-rpm-spec']
-
- _directives = ('(?:package|prep|build|install|clean|check|pre[a-z]*|'
- 'post[a-z]*|trigger[a-z]*|files)')
-
- tokens = {
- 'root': [
- (r'#.*$', Comment),
- include('basic'),
- ],
- 'description': [
- (r'^(%' + _directives + ')(.*)$',
- bygroups(Name.Decorator, Text), '#pop'),
- (r'\s+', Whitespace),
- (r'.', Text),
- ],
- 'changelog': [
- (r'\*.*$', Generic.Subheading),
- (r'^(%' + _directives + ')(.*)$',
- bygroups(Name.Decorator, Text), '#pop'),
- (r'\s+', Whitespace),
- (r'.', Text),
- ],
- 'string': [
- (r'"', String.Double, '#pop'),
- (r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|[0-7]{1,3})', String.Escape),
- include('interpol'),
- (r'.', String.Double),
- ],
- 'basic': [
- include('macro'),
- (r'(?i)^(Name|Version|Release|Epoch|Summary|Group|License|Packager|'
- r'Vendor|Icon|URL|Distribution|Prefix|Patch[0-9]*|Source[0-9]*|'
- r'Requires\(?[a-z]*\)?|[a-z]+Req|Obsoletes|Suggests|Provides|Conflicts|'
- r'Build[a-z]+|[a-z]+Arch|Auto[a-z]+)(:)(.*)$',
- bygroups(Generic.Heading, Punctuation, using(this))),
- (r'^%description', Name.Decorator, 'description'),
- (r'^%changelog', Name.Decorator, 'changelog'),
- (r'^(%' + _directives + ')(.*)$', bygroups(Name.Decorator, Text)),
- (r'%(attr|defattr|dir|doc(?:dir)?|setup|config(?:ure)?|'
- r'make(?:install)|ghost|patch[0-9]+|find_lang|exclude|verify)',
- Keyword),
- include('interpol'),
- (r"'.*?'", String.Single),
- (r'"', String.Double, 'string'),
- (r'\s+', Whitespace),
- (r'.', Text),
- ],
- 'macro': [
- (r'%define.*$', Comment.Preproc),
- (r'%\{\!\?.*%define.*\}', Comment.Preproc),
- (r'(%(?:if(?:n?arch)?|else(?:if)?|endif))(.*)$',
- bygroups(Comment.Preproc, Text)),
- ],
- 'interpol': [
- (r'%\{?__[a-z_]+\}?', Name.Function),
- (r'%\{?_([a-z_]+dir|[a-z_]+path|prefix)\}?', Keyword.Pseudo),
- (r'%\{\?\w+\}', Name.Variable),
- (r'\$\{?RPM_[A-Z0-9_]+\}?', Name.Variable.Global),
- (r'%\{[a-zA-Z]\w+\}', Keyword.Constant),
- ]
- }
-
-
-class SourcesListLexer(RegexLexer):
- """
- Lexer that highlights debian sources.list files.
-
- .. versionadded:: 0.7
- """
-
- name = 'Debian Sourcelist'
- aliases = ['debsources', 'sourceslist', 'sources.list']
- filenames = ['sources.list']
- mimetype = ['application/x-debian-sourceslist']
-
- tokens = {
- 'root': [
- (r'\s+', Whitespace),
- (r'#.*?$', Comment),
- (r'^(deb(?:-src)?)(\s+)',
- bygroups(Keyword, Whitespace), 'distribution')
- ],
- 'distribution': [
- (r'#.*?$', Comment, '#pop'),
- (r'\$\(ARCH\)', Name.Variable),
- (r'[^\s$[]+', String),
- (r'\[', String.Other, 'escaped-distribution'),
- (r'\$', String),
- (r'\s+', Whitespace, 'components')
- ],
- 'escaped-distribution': [
- (r'\]', String.Other, '#pop'),
- (r'\$\(ARCH\)', Name.Variable),
- (r'[^\]$]+', String.Other),
- (r'\$', String.Other)
- ],
- 'components': [
- (r'#.*?$', Comment, '#pop:2'),
- (r'$', Text, '#pop:2'),
- (r'\s+', Whitespace),
- (r'\S+', Keyword.Pseudo),
- ]
- }
-
- def analyse_text(text):
- for line in text.splitlines():
- line = line.strip()
- if line.startswith('deb ') or line.startswith('deb-src '):
- return True
-
-
-class DebianControlLexer(RegexLexer):
- """
- Lexer for Debian ``control`` files and ``apt-cache show <pkg>`` outputs.
-
- .. versionadded:: 0.9
- """
- name = 'Debian Control file'
- url = 'https://www.debian.org/doc/debian-policy/ch-controlfields.html'
- aliases = ['debcontrol', 'control']
- filenames = ['control']
-
- tokens = {
- 'root': [
- (r'^(Description)', Keyword, 'description'),
- (r'^(Maintainer|Uploaders)(:\s*)', bygroups(Keyword, Text),
- 'maintainer'),
- (r'^((?:Build-|Pre-)?Depends(?:-Indep|-Arch)?)(:\s*)',
- bygroups(Keyword, Text), 'depends'),
- (r'^(Recommends|Suggests|Enhances)(:\s*)', bygroups(Keyword, Text),
- 'depends'),
- (r'^((?:Python-)?Version)(:\s*)(\S+)$',
- bygroups(Keyword, Text, Number)),
- (r'^((?:Installed-)?Size)(:\s*)(\S+)$',
- bygroups(Keyword, Text, Number)),
- (r'^(MD5Sum|SHA1|SHA256)(:\s*)(\S+)$',
- bygroups(Keyword, Text, Number)),
- (r'^([a-zA-Z\-0-9\.]*?)(:\s*)(.*?)$',
- bygroups(Keyword, Whitespace, String)),
- ],
- 'maintainer': [
- (r'<[^>]+>$', Generic.Strong, '#pop'),
- (r'<[^>]+>', Generic.Strong),
- (r',\n?', Text),
- (r'[^,<]+$', Text, '#pop'),
- (r'[^,<]+', Text),
- ],
- 'description': [
- (r'(.*)(Homepage)(: )(\S+)',
- bygroups(Text, String, Name, Name.Class)),
- (r':.*\n', Generic.Strong),
- (r' .*\n', Text),
- default('#pop'),
- ],
- 'depends': [
- (r'(\$)(\{)(\w+\s*:\s*\w+)(\})',
- bygroups(Operator, Text, Name.Entity, Text)),
- (r'\(', Text, 'depend_vers'),
- (r'\|', Operator),
- (r',\n', Text),
- (r'\n', Text, '#pop'),
- (r'[,\s]', Text),
- (r'[+.a-zA-Z0-9-]+', Name.Function),
- (r'\[.*?\]', Name.Entity),
- ],
- 'depend_vers': [
- (r'\)', Text, '#pop'),
- (r'([><=]+)(\s*)([^)]+)', bygroups(Operator, Text, Number)),
- ]
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/int_fiction.py b/venv/lib/python3.11/site-packages/pygments/lexers/int_fiction.py
deleted file mode 100644
index 4f4d55d..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/int_fiction.py
+++ /dev/null
@@ -1,1382 +0,0 @@
-"""
- pygments.lexers.int_fiction
- ~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for interactive fiction languages.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, include, bygroups, using, \
- this, default, words
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation, Error, Generic
-
-__all__ = ['Inform6Lexer', 'Inform6TemplateLexer', 'Inform7Lexer',
- 'Tads3Lexer']
-
-
-class Inform6Lexer(RegexLexer):
- """
- For Inform 6 source code.
-
- .. versionadded:: 2.0
- """
-
- name = 'Inform 6'
- url = 'http://inform-fiction.org/'
- aliases = ['inform6', 'i6']
- filenames = ['*.inf']
-
- flags = re.MULTILINE | re.DOTALL
-
- _name = r'[a-zA-Z_]\w*'
-
- # Inform 7 maps these four character classes to their ASCII
- # equivalents. To support Inform 6 inclusions within Inform 7,
- # Inform6Lexer maps them too.
- _dash = '\\-\u2010-\u2014'
- _dquote = '"\u201c\u201d'
- _squote = "'\u2018\u2019"
- _newline = '\\n\u0085\u2028\u2029'
-
- tokens = {
- 'root': [
- (r'\A(!%%[^%s]*[%s])+' % (_newline, _newline), Comment.Preproc,
- 'directive'),
- default('directive')
- ],
- '_whitespace': [
- (r'\s+', Text),
- (r'![^%s]*' % _newline, Comment.Single)
- ],
- 'default': [
- include('_whitespace'),
- (r'\[', Punctuation, 'many-values'), # Array initialization
- (r':|(?=;)', Punctuation, '#pop'),
- (r'<', Punctuation), # Second angle bracket in an action statement
- default(('expression', '_expression'))
- ],
-
- # Expressions
- '_expression': [
- include('_whitespace'),
- (r'(?=sp\b)', Text, '#pop'),
- (r'(?=[%s%s$0-9#a-zA-Z_])' % (_dquote, _squote), Text,
- ('#pop', 'value')),
- (r'\+\+|[%s]{1,2}(?!>)|~~?' % _dash, Operator),
- (r'(?=[()\[%s,?@{:;])' % _dash, Text, '#pop')
- ],
- 'expression': [
- include('_whitespace'),
- (r'\(', Punctuation, ('expression', '_expression')),
- (r'\)', Punctuation, '#pop'),
- (r'\[', Punctuation, ('#pop', 'statements', 'locals')),
- (r'>(?=(\s+|(![^%s]*))*[>;])' % _newline, Punctuation),
- (r'\+\+|[%s]{2}(?!>)' % _dash, Operator),
- (r',', Punctuation, '_expression'),
- (r'&&?|\|\|?|[=~><]?=|[%s]{1,2}>?|\.\.?[&#]?|::|[<>+*/%%]' % _dash,
- Operator, '_expression'),
- (r'(has|hasnt|in|notin|ofclass|or|provides)\b', Operator.Word,
- '_expression'),
- (r'sp\b', Name),
- (r'\?~?', Name.Label, 'label?'),
- (r'[@{]', Error),
- default('#pop')
- ],
- '_assembly-expression': [
- (r'\(', Punctuation, ('#push', '_expression')),
- (r'[\[\]]', Punctuation),
- (r'[%s]>' % _dash, Punctuation, '_expression'),
- (r'sp\b', Keyword.Pseudo),
- (r';', Punctuation, '#pop:3'),
- include('expression')
- ],
- '_for-expression': [
- (r'\)', Punctuation, '#pop:2'),
- (r':', Punctuation, '#pop'),
- include('expression')
- ],
- '_keyword-expression': [
- (r'(from|near|to)\b', Keyword, '_expression'),
- include('expression')
- ],
- '_list-expression': [
- (r',', Punctuation, '#pop'),
- include('expression')
- ],
- '_object-expression': [
- (r'has\b', Keyword.Declaration, '#pop'),
- include('_list-expression')
- ],
-
- # Values
- 'value': [
- include('_whitespace'),
- # Strings
- (r'[%s][^@][%s]' % (_squote, _squote), String.Char, '#pop'),
- (r'([%s])(@\{[0-9a-fA-F]*\})([%s])' % (_squote, _squote),
- bygroups(String.Char, String.Escape, String.Char), '#pop'),
- (r'([%s])(@.{2})([%s])' % (_squote, _squote),
- bygroups(String.Char, String.Escape, String.Char), '#pop'),
- (r'[%s]' % _squote, String.Single, ('#pop', 'dictionary-word')),
- (r'[%s]' % _dquote, String.Double, ('#pop', 'string')),
- # Numbers
- (r'\$[<>]?[+%s][0-9]*\.?[0-9]*([eE][+%s]?[0-9]+)?' % (_dash, _dash),
- Number.Float, '#pop'),
- (r'\$[0-9a-fA-F]+', Number.Hex, '#pop'),
- (r'\$\$[01]+', Number.Bin, '#pop'),
- (r'[0-9]+', Number.Integer, '#pop'),
- # Values prefixed by hashes
- (r'(##|#a\$)(%s)' % _name, bygroups(Operator, Name), '#pop'),
- (r'(#g\$)(%s)' % _name,
- bygroups(Operator, Name.Variable.Global), '#pop'),
- (r'#[nw]\$', Operator, ('#pop', 'obsolete-dictionary-word')),
- (r'(#r\$)(%s)' % _name, bygroups(Operator, Name.Function), '#pop'),
- (r'#', Name.Builtin, ('#pop', 'system-constant')),
- # System functions
- (words((
- 'child', 'children', 'elder', 'eldest', 'glk', 'indirect', 'metaclass',
- 'parent', 'random', 'sibling', 'younger', 'youngest'), suffix=r'\b'),
- Name.Builtin, '#pop'),
- # Metaclasses
- (r'(?i)(Class|Object|Routine|String)\b', Name.Builtin, '#pop'),
- # Veneer routines
- (words((
- 'Box__Routine', 'CA__Pr', 'CDefArt', 'CInDefArt', 'Cl__Ms',
- 'Copy__Primitive', 'CP__Tab', 'DA__Pr', 'DB__Pr', 'DefArt', 'Dynam__String',
- 'EnglishNumber', 'Glk__Wrap', 'IA__Pr', 'IB__Pr', 'InDefArt', 'Main__',
- 'Meta__class', 'OB__Move', 'OB__Remove', 'OC__Cl', 'OP__Pr', 'Print__Addr',
- 'Print__PName', 'PrintShortName', 'RA__Pr', 'RA__Sc', 'RL__Pr', 'R_Process',
- 'RT__ChG', 'RT__ChGt', 'RT__ChLDB', 'RT__ChLDW', 'RT__ChPR', 'RT__ChPrintA',
- 'RT__ChPrintC', 'RT__ChPrintO', 'RT__ChPrintS', 'RT__ChPS', 'RT__ChR',
- 'RT__ChSTB', 'RT__ChSTW', 'RT__ChT', 'RT__Err', 'RT__TrPS', 'RV__Pr',
- 'Symb__Tab', 'Unsigned__Compare', 'WV__Pr', 'Z__Region'),
- prefix='(?i)', suffix=r'\b'),
- Name.Builtin, '#pop'),
- # Other built-in symbols
- (words((
- 'call', 'copy', 'create', 'DEBUG', 'destroy', 'DICT_CHAR_SIZE',
- 'DICT_ENTRY_BYTES', 'DICT_IS_UNICODE', 'DICT_WORD_SIZE', 'DOUBLE_HI_INFINITY',
- 'DOUBLE_HI_NAN', 'DOUBLE_HI_NINFINITY', 'DOUBLE_LO_INFINITY', 'DOUBLE_LO_NAN',
- 'DOUBLE_LO_NINFINITY', 'false', 'FLOAT_INFINITY', 'FLOAT_NAN', 'FLOAT_NINFINITY',
- 'GOBJFIELD_CHAIN', 'GOBJFIELD_CHILD', 'GOBJFIELD_NAME', 'GOBJFIELD_PARENT',
- 'GOBJFIELD_PROPTAB', 'GOBJFIELD_SIBLING', 'GOBJ_EXT_START',
- 'GOBJ_TOTAL_LENGTH', 'Grammar__Version', 'INDIV_PROP_START', 'INFIX',
- 'infix__watching', 'MODULE_MODE', 'name', 'nothing', 'NUM_ATTR_BYTES', 'print',
- 'print_to_array', 'recreate', 'remaining', 'self', 'sender', 'STRICT_MODE',
- 'sw__var', 'sys__glob0', 'sys__glob1', 'sys__glob2', 'sys_statusline_flag',
- 'TARGET_GLULX', 'TARGET_ZCODE', 'temp__global2', 'temp__global3',
- 'temp__global4', 'temp_global', 'true', 'USE_MODULES', 'WORDSIZE'),
- prefix='(?i)', suffix=r'\b'),
- Name.Builtin, '#pop'),
- # Other values
- (_name, Name, '#pop')
- ],
- 'value?': [
- include('value'),
- default('#pop')
- ],
- # Strings
- 'dictionary-word': [
- (r'[~^]+', String.Escape),
- (r'[^~^\\@({%s]+' % _squote, String.Single),
- (r'[({]', String.Single),
- (r'@\{[0-9a-fA-F]*\}', String.Escape),
- (r'@.{2}', String.Escape),
- (r'[%s]' % _squote, String.Single, '#pop')
- ],
- 'string': [
- (r'[~^]+', String.Escape),
- (r'[^~^\\@({%s]+' % _dquote, String.Double),
- (r'[({]', String.Double),
- (r'\\', String.Escape),
- (r'@(\\\s*[%s]\s*)*@((\\\s*[%s]\s*)*[0-9])*' %
- (_newline, _newline), String.Escape),
- (r'@(\\\s*[%s]\s*)*[({]((\\\s*[%s]\s*)*[0-9a-zA-Z_])*'
- r'(\\\s*[%s]\s*)*[)}]' % (_newline, _newline, _newline),
- String.Escape),
- (r'@(\\\s*[%s]\s*)*.(\\\s*[%s]\s*)*.' % (_newline, _newline),
- String.Escape),
- (r'[%s]' % _dquote, String.Double, '#pop')
- ],
- 'plain-string': [
- (r'[^~^\\({\[\]%s]+' % _dquote, String.Double),
- (r'[~^({\[\]]', String.Double),
- (r'\\', String.Escape),
- (r'[%s]' % _dquote, String.Double, '#pop')
- ],
- # Names
- '_constant': [
- include('_whitespace'),
- (_name, Name.Constant, '#pop'),
- include('value')
- ],
- 'constant*': [
- include('_whitespace'),
- (r',', Punctuation),
- (r'=', Punctuation, 'value?'),
- (_name, Name.Constant, 'value?'),
- default('#pop')
- ],
- '_global': [
- include('_whitespace'),
- (_name, Name.Variable.Global, '#pop'),
- include('value')
- ],
- 'label?': [
- include('_whitespace'),
- (_name, Name.Label, '#pop'),
- default('#pop')
- ],
- 'variable?': [
- include('_whitespace'),
- (_name, Name.Variable, '#pop'),
- default('#pop')
- ],
- # Values after hashes
- 'obsolete-dictionary-word': [
- (r'\S\w*', String.Other, '#pop')
- ],
- 'system-constant': [
- include('_whitespace'),
- (_name, Name.Builtin, '#pop')
- ],
-
- # Directives
- 'directive': [
- include('_whitespace'),
- (r'#', Punctuation),
- (r';', Punctuation, '#pop'),
- (r'\[', Punctuation,
- ('default', 'statements', 'locals', 'routine-name?')),
- (words((
- 'abbreviate', 'endif', 'dictionary', 'ifdef', 'iffalse', 'ifndef', 'ifnot',
- 'iftrue', 'ifv3', 'ifv5', 'release', 'serial', 'switches', 'system_file',
- 'version'), prefix='(?i)', suffix=r'\b'),
- Keyword, 'default'),
- (r'(?i)(array|global)\b', Keyword,
- ('default', 'directive-keyword?', '_global')),
- (r'(?i)attribute\b', Keyword, ('default', 'alias?', '_constant')),
- (r'(?i)class\b', Keyword,
- ('object-body', 'duplicates', 'class-name')),
- (r'(?i)(constant|default)\b', Keyword,
- ('default', 'constant*')),
- (r'(?i)(end\b)(.*)', bygroups(Keyword, Text)),
- (r'(?i)(extend|verb)\b', Keyword, 'grammar'),
- (r'(?i)fake_action\b', Keyword, ('default', '_constant')),
- (r'(?i)import\b', Keyword, 'manifest'),
- (r'(?i)(include|link|origsource)\b', Keyword,
- ('default', 'before-plain-string?')),
- (r'(?i)(lowstring|undef)\b', Keyword, ('default', '_constant')),
- (r'(?i)message\b', Keyword, ('default', 'diagnostic')),
- (r'(?i)(nearby|object)\b', Keyword,
- ('object-body', '_object-head')),
- (r'(?i)property\b', Keyword,
- ('default', 'alias?', '_constant', 'property-keyword*')),
- (r'(?i)replace\b', Keyword,
- ('default', 'routine-name?', 'routine-name?')),
- (r'(?i)statusline\b', Keyword, ('default', 'directive-keyword?')),
- (r'(?i)stub\b', Keyword, ('default', 'routine-name?')),
- (r'(?i)trace\b', Keyword,
- ('default', 'trace-keyword?', 'trace-keyword?')),
- (r'(?i)zcharacter\b', Keyword,
- ('default', 'directive-keyword?', 'directive-keyword?')),
- (_name, Name.Class, ('object-body', '_object-head'))
- ],
- # [, Replace, Stub
- 'routine-name?': [
- include('_whitespace'),
- (_name, Name.Function, '#pop'),
- default('#pop')
- ],
- 'locals': [
- include('_whitespace'),
- (r';', Punctuation, '#pop'),
- (r'\*', Punctuation),
- (r'"', String.Double, 'plain-string'),
- (_name, Name.Variable)
- ],
- # Array
- 'many-values': [
- include('_whitespace'),
- (r';', Punctuation),
- (r'\]', Punctuation, '#pop'),
- (r':', Error),
- default(('expression', '_expression'))
- ],
- # Attribute, Property
- 'alias?': [
- include('_whitespace'),
- (r'alias\b', Keyword, ('#pop', '_constant')),
- default('#pop')
- ],
- # Class, Object, Nearby
- 'class-name': [
- include('_whitespace'),
- (r'(?=[,;]|(class|has|private|with)\b)', Text, '#pop'),
- (_name, Name.Class, '#pop')
- ],
- 'duplicates': [
- include('_whitespace'),
- (r'\(', Punctuation, ('#pop', 'expression', '_expression')),
- default('#pop')
- ],
- '_object-head': [
- (r'[%s]>' % _dash, Punctuation),
- (r'(class|has|private|with)\b', Keyword.Declaration, '#pop'),
- include('_global')
- ],
- 'object-body': [
- include('_whitespace'),
- (r';', Punctuation, '#pop:2'),
- (r',', Punctuation),
- (r'class\b', Keyword.Declaration, 'class-segment'),
- (r'(has|private|with)\b', Keyword.Declaration),
- (r':', Error),
- default(('_object-expression', '_expression'))
- ],
- 'class-segment': [
- include('_whitespace'),
- (r'(?=[,;]|(class|has|private|with)\b)', Text, '#pop'),
- (_name, Name.Class),
- default('value')
- ],
- # Extend, Verb
- 'grammar': [
- include('_whitespace'),
- (r'=', Punctuation, ('#pop', 'default')),
- (r'\*', Punctuation, ('#pop', 'grammar-line')),
- default('_directive-keyword')
- ],
- 'grammar-line': [
- include('_whitespace'),
- (r';', Punctuation, '#pop'),
- (r'[/*]', Punctuation),
- (r'[%s]>' % _dash, Punctuation, 'value'),
- (r'(noun|scope)\b', Keyword, '=routine'),
- default('_directive-keyword')
- ],
- '=routine': [
- include('_whitespace'),
- (r'=', Punctuation, 'routine-name?'),
- default('#pop')
- ],
- # Import
- 'manifest': [
- include('_whitespace'),
- (r';', Punctuation, '#pop'),
- (r',', Punctuation),
- (r'(?i)global\b', Keyword, '_global'),
- default('_global')
- ],
- # Include, Link, Message
- 'diagnostic': [
- include('_whitespace'),
- (r'[%s]' % _dquote, String.Double, ('#pop', 'message-string')),
- default(('#pop', 'before-plain-string?', 'directive-keyword?'))
- ],
- 'before-plain-string?': [
- include('_whitespace'),
- (r'[%s]' % _dquote, String.Double, ('#pop', 'plain-string')),
- default('#pop')
- ],
- 'message-string': [
- (r'[~^]+', String.Escape),
- include('plain-string')
- ],
-
- # Keywords used in directives
- '_directive-keyword!': [
- include('_whitespace'),
- (words((
- 'additive', 'alias', 'buffer', 'class', 'creature', 'data', 'error', 'fatalerror',
- 'first', 'has', 'held', 'individual', 'initial', 'initstr', 'last', 'long', 'meta',
- 'multi', 'multiexcept', 'multiheld', 'multiinside', 'noun', 'number', 'only',
- 'private', 'replace', 'reverse', 'scope', 'score', 'special', 'string', 'table',
- 'terminating', 'time', 'topic', 'warning', 'with'), suffix=r'\b'),
- Keyword, '#pop'),
- (r'static\b', Keyword),
- (r'[%s]{1,2}>|[+=]' % _dash, Punctuation, '#pop')
- ],
- '_directive-keyword': [
- include('_directive-keyword!'),
- include('value')
- ],
- 'directive-keyword?': [
- include('_directive-keyword!'),
- default('#pop')
- ],
- 'property-keyword*': [
- include('_whitespace'),
- (words(('additive', 'individual', 'long'),
- suffix=r'\b(?=(\s*|(![^%s]*[%s]))*[_a-zA-Z])' % (_newline, _newline)),
- Keyword),
- default('#pop')
- ],
- 'trace-keyword?': [
- include('_whitespace'),
- (words((
- 'assembly', 'dictionary', 'expressions', 'lines', 'linker',
- 'objects', 'off', 'on', 'symbols', 'tokens', 'verbs'), suffix=r'\b'),
- Keyword, '#pop'),
- default('#pop')
- ],
-
- # Statements
- 'statements': [
- include('_whitespace'),
- (r'\]', Punctuation, '#pop'),
- (r'[;{}]', Punctuation),
- (words((
- 'box', 'break', 'continue', 'default', 'give', 'inversion',
- 'new_line', 'quit', 'read', 'remove', 'return', 'rfalse', 'rtrue',
- 'spaces', 'string', 'until'), suffix=r'\b'),
- Keyword, 'default'),
- (r'(do|else)\b', Keyword),
- (r'(font|style)\b', Keyword,
- ('default', 'miscellaneous-keyword?')),
- (r'for\b', Keyword, ('for', '(?')),
- (r'(if|switch|while)', Keyword,
- ('expression', '_expression', '(?')),
- (r'(jump|save|restore)\b', Keyword, ('default', 'label?')),
- (r'objectloop\b', Keyword,
- ('_keyword-expression', 'variable?', '(?')),
- (r'print(_ret)?\b|(?=[%s])' % _dquote, Keyword, 'print-list'),
- (r'\.', Name.Label, 'label?'),
- (r'@', Keyword, 'opcode'),
- (r'#(?![agrnw]\$|#)', Punctuation, 'directive'),
- (r'<', Punctuation, 'default'),
- (r'move\b', Keyword,
- ('default', '_keyword-expression', '_expression')),
- default(('default', '_keyword-expression', '_expression'))
- ],
- 'miscellaneous-keyword?': [
- include('_whitespace'),
- (r'(bold|fixed|from|near|off|on|reverse|roman|to|underline)\b',
- Keyword, '#pop'),
- (r'(a|A|an|address|char|name|number|object|property|string|the|'
- r'The)\b(?=(\s+|(![^%s]*))*\))' % _newline, Keyword.Pseudo,
- '#pop'),
- (r'%s(?=(\s+|(![^%s]*))*\))' % (_name, _newline), Name.Function,
- '#pop'),
- default('#pop')
- ],
- '(?': [
- include('_whitespace'),
- (r'\(', Punctuation, '#pop'),
- default('#pop')
- ],
- 'for': [
- include('_whitespace'),
- (r';', Punctuation, ('_for-expression', '_expression')),
- default(('_for-expression', '_expression'))
- ],
- 'print-list': [
- include('_whitespace'),
- (r';', Punctuation, '#pop'),
- (r':', Error),
- default(('_list-expression', '_expression', '_list-expression', 'form'))
- ],
- 'form': [
- include('_whitespace'),
- (r'\(', Punctuation, ('#pop', 'miscellaneous-keyword?')),
- default('#pop')
- ],
-
- # Assembly
- 'opcode': [
- include('_whitespace'),
- (r'[%s]' % _dquote, String.Double, ('operands', 'plain-string')),
- (_name, Keyword, 'operands')
- ],
- 'operands': [
- (r':', Error),
- default(('_assembly-expression', '_expression'))
- ]
- }
-
- def get_tokens_unprocessed(self, text):
- # 'in' is either a keyword or an operator.
- # If the token two tokens after 'in' is ')', 'in' is a keyword:
- # objectloop(a in b)
- # Otherwise, it is an operator:
- # objectloop(a in b && true)
- objectloop_queue = []
- objectloop_token_count = -1
- previous_token = None
- for index, token, value in RegexLexer.get_tokens_unprocessed(self,
- text):
- if previous_token is Name.Variable and value == 'in':
- objectloop_queue = [[index, token, value]]
- objectloop_token_count = 2
- elif objectloop_token_count > 0:
- if token not in Comment and token not in Text:
- objectloop_token_count -= 1
- objectloop_queue.append((index, token, value))
- else:
- if objectloop_token_count == 0:
- if objectloop_queue[-1][2] == ')':
- objectloop_queue[0][1] = Keyword
- while objectloop_queue:
- yield objectloop_queue.pop(0)
- objectloop_token_count = -1
- yield index, token, value
- if token not in Comment and token not in Text:
- previous_token = token
- while objectloop_queue:
- yield objectloop_queue.pop(0)
-
- def analyse_text(text):
- """We try to find a keyword which seem relatively common, unfortunately
- there is a decent overlap with Smalltalk keywords otherwise here.."""
- result = 0
- if re.search('\borigsource\b', text, re.IGNORECASE):
- result += 0.05
-
- return result
-
-
-class Inform7Lexer(RegexLexer):
- """
- For Inform 7 source code.
-
- .. versionadded:: 2.0
- """
-
- name = 'Inform 7'
- url = 'http://inform7.com/'
- aliases = ['inform7', 'i7']
- filenames = ['*.ni', '*.i7x']
-
- flags = re.MULTILINE | re.DOTALL
-
- _dash = Inform6Lexer._dash
- _dquote = Inform6Lexer._dquote
- _newline = Inform6Lexer._newline
- _start = r'\A|(?<=[%s])' % _newline
-
- # There are three variants of Inform 7, differing in how to
- # interpret at signs and braces in I6T. In top-level inclusions, at
- # signs in the first column are inweb syntax. In phrase definitions
- # and use options, tokens in braces are treated as I7. Use options
- # also interpret "{N}".
- tokens = {}
- token_variants = ['+i6t-not-inline', '+i6t-inline', '+i6t-use-option']
-
- for level in token_variants:
- tokens[level] = {
- '+i6-root': list(Inform6Lexer.tokens['root']),
- '+i6t-root': [ # For Inform6TemplateLexer
- (r'[^%s]*' % Inform6Lexer._newline, Comment.Preproc,
- ('directive', '+p'))
- ],
- 'root': [
- (r'(\|?\s)+', Text),
- (r'\[', Comment.Multiline, '+comment'),
- (r'[%s]' % _dquote, Generic.Heading,
- ('+main', '+titling', '+titling-string')),
- default(('+main', '+heading?'))
- ],
- '+titling-string': [
- (r'[^%s]+' % _dquote, Generic.Heading),
- (r'[%s]' % _dquote, Generic.Heading, '#pop')
- ],
- '+titling': [
- (r'\[', Comment.Multiline, '+comment'),
- (r'[^%s.;:|%s]+' % (_dquote, _newline), Generic.Heading),
- (r'[%s]' % _dquote, Generic.Heading, '+titling-string'),
- (r'[%s]{2}|(?<=[\s%s])\|[\s%s]' % (_newline, _dquote, _dquote),
- Text, ('#pop', '+heading?')),
- (r'[.;:]|(?<=[\s%s])\|' % _dquote, Text, '#pop'),
- (r'[|%s]' % _newline, Generic.Heading)
- ],
- '+main': [
- (r'(?i)[^%s:a\[(|%s]+' % (_dquote, _newline), Text),
- (r'[%s]' % _dquote, String.Double, '+text'),
- (r':', Text, '+phrase-definition'),
- (r'(?i)\bas\b', Text, '+use-option'),
- (r'\[', Comment.Multiline, '+comment'),
- (r'(\([%s])(.*?)([%s]\))' % (_dash, _dash),
- bygroups(Punctuation,
- using(this, state=('+i6-root', 'directive'),
- i6t='+i6t-not-inline'), Punctuation)),
- (r'(%s|(?<=[\s;:.%s]))\|\s|[%s]{2,}' %
- (_start, _dquote, _newline), Text, '+heading?'),
- (r'(?i)[a(|%s]' % _newline, Text)
- ],
- '+phrase-definition': [
- (r'\s+', Text),
- (r'\[', Comment.Multiline, '+comment'),
- (r'(\([%s])(.*?)([%s]\))' % (_dash, _dash),
- bygroups(Punctuation,
- using(this, state=('+i6-root', 'directive',
- 'default', 'statements'),
- i6t='+i6t-inline'), Punctuation), '#pop'),
- default('#pop')
- ],
- '+use-option': [
- (r'\s+', Text),
- (r'\[', Comment.Multiline, '+comment'),
- (r'(\([%s])(.*?)([%s]\))' % (_dash, _dash),
- bygroups(Punctuation,
- using(this, state=('+i6-root', 'directive'),
- i6t='+i6t-use-option'), Punctuation), '#pop'),
- default('#pop')
- ],
- '+comment': [
- (r'[^\[\]]+', Comment.Multiline),
- (r'\[', Comment.Multiline, '#push'),
- (r'\]', Comment.Multiline, '#pop')
- ],
- '+text': [
- (r'[^\[%s]+' % _dquote, String.Double),
- (r'\[.*?\]', String.Interpol),
- (r'[%s]' % _dquote, String.Double, '#pop')
- ],
- '+heading?': [
- (r'(\|?\s)+', Text),
- (r'\[', Comment.Multiline, '+comment'),
- (r'[%s]{4}\s+' % _dash, Text, '+documentation-heading'),
- (r'[%s]{1,3}' % _dash, Text),
- (r'(?i)(volume|book|part|chapter|section)\b[^%s]*' % _newline,
- Generic.Heading, '#pop'),
- default('#pop')
- ],
- '+documentation-heading': [
- (r'\s+', Text),
- (r'\[', Comment.Multiline, '+comment'),
- (r'(?i)documentation\s+', Text, '+documentation-heading2'),
- default('#pop')
- ],
- '+documentation-heading2': [
- (r'\s+', Text),
- (r'\[', Comment.Multiline, '+comment'),
- (r'[%s]{4}\s' % _dash, Text, '+documentation'),
- default('#pop:2')
- ],
- '+documentation': [
- (r'(?i)(%s)\s*(chapter|example)\s*:[^%s]*' %
- (_start, _newline), Generic.Heading),
- (r'(?i)(%s)\s*section\s*:[^%s]*' % (_start, _newline),
- Generic.Subheading),
- (r'((%s)\t.*?[%s])+' % (_start, _newline),
- using(this, state='+main')),
- (r'[^%s\[]+|[%s\[]' % (_newline, _newline), Text),
- (r'\[', Comment.Multiline, '+comment'),
- ],
- '+i6t-not-inline': [
- (r'(%s)@c( .*?)?([%s]|\Z)' % (_start, _newline),
- Comment.Preproc),
- (r'(%s)@([%s]+|Purpose:)[^%s]*' % (_start, _dash, _newline),
- Comment.Preproc),
- (r'(%s)@p( .*?)?([%s]|\Z)' % (_start, _newline),
- Generic.Heading, '+p')
- ],
- '+i6t-use-option': [
- include('+i6t-not-inline'),
- (r'(\{)(N)(\})', bygroups(Punctuation, Text, Punctuation))
- ],
- '+i6t-inline': [
- (r'(\{)(\S[^}]*)?(\})',
- bygroups(Punctuation, using(this, state='+main'),
- Punctuation))
- ],
- '+i6t': [
- (r'(\{[%s])(![^}]*)(\}?)' % _dash,
- bygroups(Punctuation, Comment.Single, Punctuation)),
- (r'(\{[%s])(lines)(:)([^}]*)(\}?)' % _dash,
- bygroups(Punctuation, Keyword, Punctuation, Text,
- Punctuation), '+lines'),
- (r'(\{[%s])([^:}]*)(:?)([^}]*)(\}?)' % _dash,
- bygroups(Punctuation, Keyword, Punctuation, Text,
- Punctuation)),
- (r'(\(\+)(.*?)(\+\)|\Z)',
- bygroups(Punctuation, using(this, state='+main'),
- Punctuation))
- ],
- '+p': [
- (r'[^@]+', Comment.Preproc),
- (r'(%s)@c( .*?)?([%s]|\Z)' % (_start, _newline),
- Comment.Preproc, '#pop'),
- (r'(%s)@([%s]|Purpose:)' % (_start, _dash), Comment.Preproc),
- (r'(%s)@p( .*?)?([%s]|\Z)' % (_start, _newline),
- Generic.Heading),
- (r'@', Comment.Preproc)
- ],
- '+lines': [
- (r'(%s)@c( .*?)?([%s]|\Z)' % (_start, _newline),
- Comment.Preproc),
- (r'(%s)@([%s]|Purpose:)[^%s]*' % (_start, _dash, _newline),
- Comment.Preproc),
- (r'(%s)@p( .*?)?([%s]|\Z)' % (_start, _newline),
- Generic.Heading, '+p'),
- (r'(%s)@\w*[ %s]' % (_start, _newline), Keyword),
- (r'![^%s]*' % _newline, Comment.Single),
- (r'(\{)([%s]endlines)(\})' % _dash,
- bygroups(Punctuation, Keyword, Punctuation), '#pop'),
- (r'[^@!{]+?([%s]|\Z)|.' % _newline, Text)
- ]
- }
- # Inform 7 can include snippets of Inform 6 template language,
- # so all of Inform6Lexer's states are copied here, with
- # modifications to account for template syntax. Inform7Lexer's
- # own states begin with '+' to avoid name conflicts. Some of
- # Inform6Lexer's states begin with '_': these are not modified.
- # They deal with template syntax either by including modified
- # states, or by matching r'' then pushing to modified states.
- for token in Inform6Lexer.tokens:
- if token == 'root':
- continue
- tokens[level][token] = list(Inform6Lexer.tokens[token])
- if not token.startswith('_'):
- tokens[level][token][:0] = [include('+i6t'), include(level)]
-
- def __init__(self, **options):
- level = options.get('i6t', '+i6t-not-inline')
- if level not in self._all_tokens:
- self._tokens = self.__class__.process_tokendef(level)
- else:
- self._tokens = self._all_tokens[level]
- RegexLexer.__init__(self, **options)
-
-
-class Inform6TemplateLexer(Inform7Lexer):
- """
- For Inform 6 template code.
-
- .. versionadded:: 2.0
- """
-
- name = 'Inform 6 template'
- aliases = ['i6t']
- filenames = ['*.i6t']
-
- def get_tokens_unprocessed(self, text, stack=('+i6t-root',)):
- return Inform7Lexer.get_tokens_unprocessed(self, text, stack)
-
-
-class Tads3Lexer(RegexLexer):
- """
- For TADS 3 source code.
- """
-
- name = 'TADS 3'
- aliases = ['tads3']
- filenames = ['*.t']
-
- flags = re.DOTALL | re.MULTILINE
-
- _comment_single = r'(?://(?:[^\\\n]|\\+[\w\W])*$)'
- _comment_multiline = r'(?:/\*(?:[^*]|\*(?!/))*\*/)'
- _escape = (r'(?:\\(?:[\n\\<>"\'^v bnrt]|u[\da-fA-F]{,4}|x[\da-fA-F]{,2}|'
- r'[0-3]?[0-7]{1,2}))')
- _name = r'(?:[_a-zA-Z]\w*)'
- _no_quote = r'(?=\s|\\?>)'
- _operator = (r'(?:&&|\|\||\+\+|--|\?\?|::|[.,@\[\]~]|'
- r'(?:[=+\-*/%!&|^]|<<?|>>?>?)=?)')
- _ws = r'(?:\\|\s|%s|%s)' % (_comment_single, _comment_multiline)
- _ws_pp = r'(?:\\\n|[^\S\n]|%s|%s)' % (_comment_single, _comment_multiline)
-
- def _make_string_state(triple, double, verbatim=None, _escape=_escape):
- if verbatim:
- verbatim = ''.join(['(?:%s|%s)' % (re.escape(c.lower()),
- re.escape(c.upper()))
- for c in verbatim])
- char = r'"' if double else r"'"
- token = String.Double if double else String.Single
- escaped_quotes = r'+|%s(?!%s{2})' % (char, char) if triple else r''
- prefix = '%s%s' % ('t' if triple else '', 'd' if double else 's')
- tag_state_name = '%sqt' % prefix
- state = []
- if triple:
- state += [
- (r'%s{3,}' % char, token, '#pop'),
- (r'\\%s+' % char, String.Escape),
- (char, token)
- ]
- else:
- state.append((char, token, '#pop'))
- state += [
- include('s/verbatim'),
- (r'[^\\<&{}%s]+' % char, token)
- ]
- if verbatim:
- # This regex can't use `(?i)` because escape sequences are
- # case-sensitive. `<\XMP>` works; `<\xmp>` doesn't.
- state.append((r'\\?<(/|\\\\|(?!%s)\\)%s(?=[\s=>])' %
- (_escape, verbatim),
- Name.Tag, ('#pop', '%sqs' % prefix, tag_state_name)))
- else:
- state += [
- (r'\\?<!([^><\\%s]|<(?!<)|\\%s%s|%s|\\.)*>?' %
- (char, char, escaped_quotes, _escape), Comment.Multiline),
- (r'(?i)\\?<listing(?=[\s=>]|\\>)', Name.Tag,
- ('#pop', '%sqs/listing' % prefix, tag_state_name)),
- (r'(?i)\\?<xmp(?=[\s=>]|\\>)', Name.Tag,
- ('#pop', '%sqs/xmp' % prefix, tag_state_name)),
- (r'\\?<([^\s=><\\%s]|<(?!<)|\\%s%s|%s|\\.)*' %
- (char, char, escaped_quotes, _escape), Name.Tag,
- tag_state_name),
- include('s/entity')
- ]
- state += [
- include('s/escape'),
- (r'\{([^}<\\%s]|<(?!<)|\\%s%s|%s|\\.)*\}' %
- (char, char, escaped_quotes, _escape), String.Interpol),
- (r'[\\&{}<]', token)
- ]
- return state
-
- def _make_tag_state(triple, double, _escape=_escape):
- char = r'"' if double else r"'"
- quantifier = r'{3,}' if triple else r''
- state_name = '%s%sqt' % ('t' if triple else '', 'd' if double else 's')
- token = String.Double if double else String.Single
- escaped_quotes = r'+|%s(?!%s{2})' % (char, char) if triple else r''
- return [
- (r'%s%s' % (char, quantifier), token, '#pop:2'),
- (r'(\s|\\\n)+', Text),
- (r'(=)(\\?")', bygroups(Punctuation, String.Double),
- 'dqs/%s' % state_name),
- (r"(=)(\\?')", bygroups(Punctuation, String.Single),
- 'sqs/%s' % state_name),
- (r'=', Punctuation, 'uqs/%s' % state_name),
- (r'\\?>', Name.Tag, '#pop'),
- (r'\{([^}<\\%s]|<(?!<)|\\%s%s|%s|\\.)*\}' %
- (char, char, escaped_quotes, _escape), String.Interpol),
- (r'([^\s=><\\%s]|<(?!<)|\\%s%s|%s|\\.)+' %
- (char, char, escaped_quotes, _escape), Name.Attribute),
- include('s/escape'),
- include('s/verbatim'),
- include('s/entity'),
- (r'[\\{}&]', Name.Attribute)
- ]
-
- def _make_attribute_value_state(terminator, host_triple, host_double,
- _escape=_escape):
- token = (String.Double if terminator == r'"' else
- String.Single if terminator == r"'" else String.Other)
- host_char = r'"' if host_double else r"'"
- host_quantifier = r'{3,}' if host_triple else r''
- host_token = String.Double if host_double else String.Single
- escaped_quotes = (r'+|%s(?!%s{2})' % (host_char, host_char)
- if host_triple else r'')
- return [
- (r'%s%s' % (host_char, host_quantifier), host_token, '#pop:3'),
- (r'%s%s' % (r'' if token is String.Other else r'\\?', terminator),
- token, '#pop'),
- include('s/verbatim'),
- include('s/entity'),
- (r'\{([^}<\\%s]|<(?!<)|\\%s%s|%s|\\.)*\}' %
- (host_char, host_char, escaped_quotes, _escape), String.Interpol),
- (r'([^\s"\'<%s{}\\&])+' % (r'>' if token is String.Other else r''),
- token),
- include('s/escape'),
- (r'["\'\s&{<}\\]', token)
- ]
-
- tokens = {
- 'root': [
- ('\ufeff', Text),
- (r'\{', Punctuation, 'object-body'),
- (r';+', Punctuation),
- (r'(?=(argcount|break|case|catch|continue|default|definingobj|'
- r'delegated|do|else|for|foreach|finally|goto|if|inherited|'
- r'invokee|local|nil|new|operator|replaced|return|self|switch|'
- r'targetobj|targetprop|throw|true|try|while)\b)', Text, 'block'),
- (r'(%s)(%s*)(\()' % (_name, _ws),
- bygroups(Name.Function, using(this, state='whitespace'),
- Punctuation),
- ('block?/root', 'more/parameters', 'main/parameters')),
- include('whitespace'),
- (r'\++', Punctuation),
- (r'[^\s!"%-(*->@-_a-z{-~]+', Error), # Averts an infinite loop
- (r'(?!\Z)', Text, 'main/root')
- ],
- 'main/root': [
- include('main/basic'),
- default(('#pop', 'object-body/no-braces', 'classes', 'class'))
- ],
- 'object-body/no-braces': [
- (r';', Punctuation, '#pop'),
- (r'\{', Punctuation, ('#pop', 'object-body')),
- include('object-body')
- ],
- 'object-body': [
- (r';', Punctuation),
- (r'\{', Punctuation, '#push'),
- (r'\}', Punctuation, '#pop'),
- (r':', Punctuation, ('classes', 'class')),
- (r'(%s?)(%s*)(\()' % (_name, _ws),
- bygroups(Name.Function, using(this, state='whitespace'),
- Punctuation),
- ('block?', 'more/parameters', 'main/parameters')),
- (r'(%s)(%s*)(\{)' % (_name, _ws),
- bygroups(Name.Function, using(this, state='whitespace'),
- Punctuation), 'block'),
- (r'(%s)(%s*)(:)' % (_name, _ws),
- bygroups(Name.Variable, using(this, state='whitespace'),
- Punctuation),
- ('object-body/no-braces', 'classes', 'class')),
- include('whitespace'),
- (r'->|%s' % _operator, Punctuation, 'main'),
- default('main/object-body')
- ],
- 'main/object-body': [
- include('main/basic'),
- (r'(%s)(%s*)(=?)' % (_name, _ws),
- bygroups(Name.Variable, using(this, state='whitespace'),
- Punctuation), ('#pop', 'more', 'main')),
- default('#pop:2')
- ],
- 'block?/root': [
- (r'\{', Punctuation, ('#pop', 'block')),
- include('whitespace'),
- (r'(?=[\[\'"<(:])', Text, # It might be a VerbRule macro.
- ('#pop', 'object-body/no-braces', 'grammar', 'grammar-rules')),
- # It might be a macro like DefineAction.
- default(('#pop', 'object-body/no-braces'))
- ],
- 'block?': [
- (r'\{', Punctuation, ('#pop', 'block')),
- include('whitespace'),
- default('#pop')
- ],
- 'block/basic': [
- (r'[;:]+', Punctuation),
- (r'\{', Punctuation, '#push'),
- (r'\}', Punctuation, '#pop'),
- (r'default\b', Keyword.Reserved),
- (r'(%s)(%s*)(:)' % (_name, _ws),
- bygroups(Name.Label, using(this, state='whitespace'),
- Punctuation)),
- include('whitespace')
- ],
- 'block': [
- include('block/basic'),
- (r'(?!\Z)', Text, ('more', 'main'))
- ],
- 'block/embed': [
- (r'>>', String.Interpol, '#pop'),
- include('block/basic'),
- (r'(?!\Z)', Text, ('more/embed', 'main'))
- ],
- 'main/basic': [
- include('whitespace'),
- (r'\(', Punctuation, ('#pop', 'more', 'main')),
- (r'\[', Punctuation, ('#pop', 'more/list', 'main')),
- (r'\{', Punctuation, ('#pop', 'more/inner', 'main/inner',
- 'more/parameters', 'main/parameters')),
- (r'\*|\.{3}', Punctuation, '#pop'),
- (r'(?i)0x[\da-f]+', Number.Hex, '#pop'),
- (r'(\d+\.(?!\.)\d*|\.\d+)([eE][-+]?\d+)?|\d+[eE][-+]?\d+',
- Number.Float, '#pop'),
- (r'0[0-7]+', Number.Oct, '#pop'),
- (r'\d+', Number.Integer, '#pop'),
- (r'"""', String.Double, ('#pop', 'tdqs')),
- (r"'''", String.Single, ('#pop', 'tsqs')),
- (r'"', String.Double, ('#pop', 'dqs')),
- (r"'", String.Single, ('#pop', 'sqs')),
- (r'R"""', String.Regex, ('#pop', 'tdqr')),
- (r"R'''", String.Regex, ('#pop', 'tsqr')),
- (r'R"', String.Regex, ('#pop', 'dqr')),
- (r"R'", String.Regex, ('#pop', 'sqr')),
- # Two-token keywords
- (r'(extern)(%s+)(object\b)' % _ws,
- bygroups(Keyword.Reserved, using(this, state='whitespace'),
- Keyword.Reserved)),
- (r'(function|method)(%s*)(\()' % _ws,
- bygroups(Keyword.Reserved, using(this, state='whitespace'),
- Punctuation),
- ('#pop', 'block?', 'more/parameters', 'main/parameters')),
- (r'(modify)(%s+)(grammar\b)' % _ws,
- bygroups(Keyword.Reserved, using(this, state='whitespace'),
- Keyword.Reserved),
- ('#pop', 'object-body/no-braces', ':', 'grammar')),
- (r'(new)(%s+(?=(?:function|method)\b))' % _ws,
- bygroups(Keyword.Reserved, using(this, state='whitespace'))),
- (r'(object)(%s+)(template\b)' % _ws,
- bygroups(Keyword.Reserved, using(this, state='whitespace'),
- Keyword.Reserved), ('#pop', 'template')),
- (r'(string)(%s+)(template\b)' % _ws,
- bygroups(Keyword, using(this, state='whitespace'),
- Keyword.Reserved), ('#pop', 'function-name')),
- # Keywords
- (r'(argcount|definingobj|invokee|replaced|targetobj|targetprop)\b',
- Name.Builtin, '#pop'),
- (r'(break|continue|goto)\b', Keyword.Reserved, ('#pop', 'label')),
- (r'(case|extern|if|intrinsic|return|static|while)\b',
- Keyword.Reserved),
- (r'catch\b', Keyword.Reserved, ('#pop', 'catch')),
- (r'class\b', Keyword.Reserved,
- ('#pop', 'object-body/no-braces', 'class')),
- (r'(default|do|else|finally|try)\b', Keyword.Reserved, '#pop'),
- (r'(dictionary|property)\b', Keyword.Reserved,
- ('#pop', 'constants')),
- (r'enum\b', Keyword.Reserved, ('#pop', 'enum')),
- (r'export\b', Keyword.Reserved, ('#pop', 'main')),
- (r'(for|foreach)\b', Keyword.Reserved,
- ('#pop', 'more/inner', 'main/inner')),
- (r'(function|method)\b', Keyword.Reserved,
- ('#pop', 'block?', 'function-name')),
- (r'grammar\b', Keyword.Reserved,
- ('#pop', 'object-body/no-braces', 'grammar')),
- (r'inherited\b', Keyword.Reserved, ('#pop', 'inherited')),
- (r'local\b', Keyword.Reserved,
- ('#pop', 'more/local', 'main/local')),
- (r'(modify|replace|switch|throw|transient)\b', Keyword.Reserved,
- '#pop'),
- (r'new\b', Keyword.Reserved, ('#pop', 'class')),
- (r'(nil|true)\b', Keyword.Constant, '#pop'),
- (r'object\b', Keyword.Reserved, ('#pop', 'object-body/no-braces')),
- (r'operator\b', Keyword.Reserved, ('#pop', 'operator')),
- (r'propertyset\b', Keyword.Reserved,
- ('#pop', 'propertyset', 'main')),
- (r'self\b', Name.Builtin.Pseudo, '#pop'),
- (r'template\b', Keyword.Reserved, ('#pop', 'template')),
- # Operators
- (r'(__objref|defined)(%s*)(\()' % _ws,
- bygroups(Operator.Word, using(this, state='whitespace'),
- Operator), ('#pop', 'more/__objref', 'main')),
- (r'delegated\b', Operator.Word),
- # Compiler-defined macros and built-in properties
- (r'(__DATE__|__DEBUG|__LINE__|__FILE__|'
- r'__TADS_MACRO_FORMAT_VERSION|__TADS_SYS_\w*|__TADS_SYSTEM_NAME|'
- r'__TADS_VERSION_MAJOR|__TADS_VERSION_MINOR|__TADS3|__TIME__|'
- r'construct|finalize|grammarInfo|grammarTag|lexicalParent|'
- r'miscVocab|sourceTextGroup|sourceTextGroupName|'
- r'sourceTextGroupOrder|sourceTextOrder)\b', Name.Builtin, '#pop')
- ],
- 'main': [
- include('main/basic'),
- (_name, Name, '#pop'),
- default('#pop')
- ],
- 'more/basic': [
- (r'\(', Punctuation, ('more/list', 'main')),
- (r'\[', Punctuation, ('more', 'main')),
- (r'\.{3}', Punctuation),
- (r'->|\.\.', Punctuation, 'main'),
- (r'(?=;)|[:)\]]', Punctuation, '#pop'),
- include('whitespace'),
- (_operator, Operator, 'main'),
- (r'\?', Operator, ('main', 'more/conditional', 'main')),
- (r'(is|not)(%s+)(in\b)' % _ws,
- bygroups(Operator.Word, using(this, state='whitespace'),
- Operator.Word)),
- (r'[^\s!"%-_a-z{-~]+', Error) # Averts an infinite loop
- ],
- 'more': [
- include('more/basic'),
- default('#pop')
- ],
- # Then expression (conditional operator)
- 'more/conditional': [
- (r':(?!:)', Operator, '#pop'),
- include('more')
- ],
- # Embedded expressions
- 'more/embed': [
- (r'>>', String.Interpol, '#pop:2'),
- include('more')
- ],
- # For/foreach loop initializer or short-form anonymous function
- 'main/inner': [
- (r'\(', Punctuation, ('#pop', 'more/inner', 'main/inner')),
- (r'local\b', Keyword.Reserved, ('#pop', 'main/local')),
- include('main')
- ],
- 'more/inner': [
- (r'\}', Punctuation, '#pop'),
- (r',', Punctuation, 'main/inner'),
- (r'(in|step)\b', Keyword, 'main/inner'),
- include('more')
- ],
- # Local
- 'main/local': [
- (_name, Name.Variable, '#pop'),
- include('whitespace')
- ],
- 'more/local': [
- (r',', Punctuation, 'main/local'),
- include('more')
- ],
- # List
- 'more/list': [
- (r'[,:]', Punctuation, 'main'),
- include('more')
- ],
- # Parameter list
- 'main/parameters': [
- (r'(%s)(%s*)(?=:)' % (_name, _ws),
- bygroups(Name.Variable, using(this, state='whitespace')), '#pop'),
- (r'(%s)(%s+)(%s)' % (_name, _ws, _name),
- bygroups(Name.Class, using(this, state='whitespace'),
- Name.Variable), '#pop'),
- (r'\[+', Punctuation),
- include('main/basic'),
- (_name, Name.Variable, '#pop'),
- default('#pop')
- ],
- 'more/parameters': [
- (r'(:)(%s*(?=[?=,:)]))' % _ws,
- bygroups(Punctuation, using(this, state='whitespace'))),
- (r'[?\]]+', Punctuation),
- (r'[:)]', Punctuation, ('#pop', 'multimethod?')),
- (r',', Punctuation, 'main/parameters'),
- (r'=', Punctuation, ('more/parameter', 'main')),
- include('more')
- ],
- 'more/parameter': [
- (r'(?=[,)])', Text, '#pop'),
- include('more')
- ],
- 'multimethod?': [
- (r'multimethod\b', Keyword, '#pop'),
- include('whitespace'),
- default('#pop')
- ],
-
- # Statements and expressions
- 'more/__objref': [
- (r',', Punctuation, 'mode'),
- (r'\)', Operator, '#pop'),
- include('more')
- ],
- 'mode': [
- (r'(error|warn)\b', Keyword, '#pop'),
- include('whitespace')
- ],
- 'catch': [
- (r'\(+', Punctuation),
- (_name, Name.Exception, ('#pop', 'variables')),
- include('whitespace')
- ],
- 'enum': [
- include('whitespace'),
- (r'token\b', Keyword, ('#pop', 'constants')),
- default(('#pop', 'constants'))
- ],
- 'grammar': [
- (r'\)+', Punctuation),
- (r'\(', Punctuation, 'grammar-tag'),
- (r':', Punctuation, 'grammar-rules'),
- (_name, Name.Class),
- include('whitespace')
- ],
- 'grammar-tag': [
- include('whitespace'),
- (r'"""([^\\"<]|""?(?!")|\\"+|\\.|<(?!<))+("{3,}|<<)|'
- r'R"""([^\\"]|""?(?!")|\\"+|\\.)+"{3,}|'
- r"'''([^\\'<]|''?(?!')|\\'+|\\.|<(?!<))+('{3,}|<<)|"
- r"R'''([^\\']|''?(?!')|\\'+|\\.)+'{3,}|"
- r'"([^\\"<]|\\.|<(?!<))+("|<<)|R"([^\\"]|\\.)+"|'
- r"'([^\\'<]|\\.|<(?!<))+('|<<)|R'([^\\']|\\.)+'|"
- r"([^)\s\\/]|/(?![/*]))+|\)", String.Other, '#pop')
- ],
- 'grammar-rules': [
- include('string'),
- include('whitespace'),
- (r'(\[)(%s*)(badness)' % _ws,
- bygroups(Punctuation, using(this, state='whitespace'), Keyword),
- 'main'),
- (r'->|%s|[()]' % _operator, Punctuation),
- (_name, Name.Constant),
- default('#pop:2')
- ],
- ':': [
- (r':', Punctuation, '#pop')
- ],
- 'function-name': [
- (r'(<<([^>]|>>>|>(?!>))*>>)+', String.Interpol),
- (r'(?=%s?%s*[({])' % (_name, _ws), Text, '#pop'),
- (_name, Name.Function, '#pop'),
- include('whitespace')
- ],
- 'inherited': [
- (r'<', Punctuation, ('#pop', 'classes', 'class')),
- include('whitespace'),
- (_name, Name.Class, '#pop'),
- default('#pop')
- ],
- 'operator': [
- (r'negate\b', Operator.Word, '#pop'),
- include('whitespace'),
- (_operator, Operator),
- default('#pop')
- ],
- 'propertyset': [
- (r'\(', Punctuation, ('more/parameters', 'main/parameters')),
- (r'\{', Punctuation, ('#pop', 'object-body')),
- include('whitespace')
- ],
- 'template': [
- (r'(?=;)', Text, '#pop'),
- include('string'),
- (r'inherited\b', Keyword.Reserved),
- include('whitespace'),
- (r'->|\?|%s' % _operator, Punctuation),
- (_name, Name.Variable)
- ],
-
- # Identifiers
- 'class': [
- (r'\*|\.{3}', Punctuation, '#pop'),
- (r'object\b', Keyword.Reserved, '#pop'),
- (r'transient\b', Keyword.Reserved),
- (_name, Name.Class, '#pop'),
- include('whitespace'),
- default('#pop')
- ],
- 'classes': [
- (r'[:,]', Punctuation, 'class'),
- include('whitespace'),
- (r'>', Punctuation, '#pop'),
- default('#pop')
- ],
- 'constants': [
- (r',+', Punctuation),
- (r';', Punctuation, '#pop'),
- (r'property\b', Keyword.Reserved),
- (_name, Name.Constant),
- include('whitespace')
- ],
- 'label': [
- (_name, Name.Label, '#pop'),
- include('whitespace'),
- default('#pop')
- ],
- 'variables': [
- (r',+', Punctuation),
- (r'\)', Punctuation, '#pop'),
- include('whitespace'),
- (_name, Name.Variable)
- ],
-
- # Whitespace and comments
- 'whitespace': [
- (r'^%s*#(%s|[^\n]|(?<=\\)\n)*\n?' % (_ws_pp, _comment_multiline),
- Comment.Preproc),
- (_comment_single, Comment.Single),
- (_comment_multiline, Comment.Multiline),
- (r'\\+\n+%s*#?|\n+|([^\S\n]|\\)+' % _ws_pp, Text)
- ],
-
- # Strings
- 'string': [
- (r'"""', String.Double, 'tdqs'),
- (r"'''", String.Single, 'tsqs'),
- (r'"', String.Double, 'dqs'),
- (r"'", String.Single, 'sqs')
- ],
- 's/escape': [
- (r'\{\{|\}\}|%s' % _escape, String.Escape)
- ],
- 's/verbatim': [
- (r'<<\s*(as\s+decreasingly\s+likely\s+outcomes|cycling|else|end|'
- r'first\s+time|one\s+of|only|or|otherwise|'
- r'(sticky|(then\s+)?(purely\s+)?at)\s+random|stopping|'
- r'(then\s+)?(half\s+)?shuffled|\|\|)\s*>>', String.Interpol),
- (r'<<(%%(_(%s|\\?.)|[\-+ ,#]|\[\d*\]?)*\d*\.?\d*(%s|\\?.)|'
- r'\s*((else|otherwise)\s+)?(if|unless)\b)?' % (_escape, _escape),
- String.Interpol, ('block/embed', 'more/embed', 'main'))
- ],
- 's/entity': [
- (r'(?i)&(#(x[\da-f]+|\d+)|[a-z][\da-z]*);?', Name.Entity)
- ],
- 'tdqs': _make_string_state(True, True),
- 'tsqs': _make_string_state(True, False),
- 'dqs': _make_string_state(False, True),
- 'sqs': _make_string_state(False, False),
- 'tdqs/listing': _make_string_state(True, True, 'listing'),
- 'tsqs/listing': _make_string_state(True, False, 'listing'),
- 'dqs/listing': _make_string_state(False, True, 'listing'),
- 'sqs/listing': _make_string_state(False, False, 'listing'),
- 'tdqs/xmp': _make_string_state(True, True, 'xmp'),
- 'tsqs/xmp': _make_string_state(True, False, 'xmp'),
- 'dqs/xmp': _make_string_state(False, True, 'xmp'),
- 'sqs/xmp': _make_string_state(False, False, 'xmp'),
-
- # Tags
- 'tdqt': _make_tag_state(True, True),
- 'tsqt': _make_tag_state(True, False),
- 'dqt': _make_tag_state(False, True),
- 'sqt': _make_tag_state(False, False),
- 'dqs/tdqt': _make_attribute_value_state(r'"', True, True),
- 'dqs/tsqt': _make_attribute_value_state(r'"', True, False),
- 'dqs/dqt': _make_attribute_value_state(r'"', False, True),
- 'dqs/sqt': _make_attribute_value_state(r'"', False, False),
- 'sqs/tdqt': _make_attribute_value_state(r"'", True, True),
- 'sqs/tsqt': _make_attribute_value_state(r"'", True, False),
- 'sqs/dqt': _make_attribute_value_state(r"'", False, True),
- 'sqs/sqt': _make_attribute_value_state(r"'", False, False),
- 'uqs/tdqt': _make_attribute_value_state(_no_quote, True, True),
- 'uqs/tsqt': _make_attribute_value_state(_no_quote, True, False),
- 'uqs/dqt': _make_attribute_value_state(_no_quote, False, True),
- 'uqs/sqt': _make_attribute_value_state(_no_quote, False, False),
-
- # Regular expressions
- 'tdqr': [
- (r'[^\\"]+', String.Regex),
- (r'\\"*', String.Regex),
- (r'"{3,}', String.Regex, '#pop'),
- (r'"', String.Regex)
- ],
- 'tsqr': [
- (r"[^\\']+", String.Regex),
- (r"\\'*", String.Regex),
- (r"'{3,}", String.Regex, '#pop'),
- (r"'", String.Regex)
- ],
- 'dqr': [
- (r'[^\\"]+', String.Regex),
- (r'\\"?', String.Regex),
- (r'"', String.Regex, '#pop')
- ],
- 'sqr': [
- (r"[^\\']+", String.Regex),
- (r"\\'?", String.Regex),
- (r"'", String.Regex, '#pop')
- ]
- }
-
- def get_tokens_unprocessed(self, text, **kwargs):
- pp = r'^%s*#%s*' % (self._ws_pp, self._ws_pp)
- if_false_level = 0
- for index, token, value in (
- RegexLexer.get_tokens_unprocessed(self, text, **kwargs)):
- if if_false_level == 0: # Not in a false #if
- if (token is Comment.Preproc and
- re.match(r'%sif%s+(0|nil)%s*$\n?' %
- (pp, self._ws_pp, self._ws_pp), value)):
- if_false_level = 1
- else: # In a false #if
- if token is Comment.Preproc:
- if (if_false_level == 1 and
- re.match(r'%sel(if|se)\b' % pp, value)):
- if_false_level = 0
- elif re.match(r'%sif' % pp, value):
- if_false_level += 1
- elif re.match(r'%sendif\b' % pp, value):
- if_false_level -= 1
- else:
- token = Comment
- yield index, token, value
-
- def analyse_text(text):
- """This is a rather generic descriptive language without strong
- identifiers. It looks like a 'GameMainDef' has to be present,
- and/or a 'versionInfo' with an 'IFID' field."""
- result = 0
- if '__TADS' in text or 'GameMainDef' in text:
- result += 0.2
-
- # This is a fairly unique keyword which is likely used in source as well
- if 'versionInfo' in text and 'IFID' in text:
- result += 0.1
-
- return result
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/iolang.py b/venv/lib/python3.11/site-packages/pygments/lexers/iolang.py
deleted file mode 100644
index 268fbde..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/iolang.py
+++ /dev/null
@@ -1,62 +0,0 @@
-"""
- pygments.lexers.iolang
- ~~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for the Io language.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer
-from pygments.token import Comment, Operator, Keyword, Name, String, Number, \
- Whitespace
-
-__all__ = ['IoLexer']
-
-
-class IoLexer(RegexLexer):
- """
- For Io (a small, prototype-based programming language) source.
-
- .. versionadded:: 0.10
- """
- name = 'Io'
- url = 'http://iolanguage.com/'
- filenames = ['*.io']
- aliases = ['io']
- mimetypes = ['text/x-iosrc']
- tokens = {
- 'root': [
- (r'\n', Whitespace),
- (r'\s+', Whitespace),
- # Comments
- (r'//(.*?)$', Comment.Single),
- (r'#(.*?)$', Comment.Single),
- (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline),
- (r'/\+', Comment.Multiline, 'nestedcomment'),
- # DoubleQuotedString
- (r'"(\\\\|\\[^\\]|[^"\\])*"', String),
- # Operators
- (r'::=|:=|=|\(|\)|;|,|\*|-|\+|>|<|@|!|/|\||\^|\.|%|&|\[|\]|\{|\}',
- Operator),
- # keywords
- (r'(clone|do|doFile|doString|method|for|if|else|elseif|then)\b',
- Keyword),
- # constants
- (r'(nil|false|true)\b', Name.Constant),
- # names
- (r'(Object|list|List|Map|args|Sequence|Coroutine|File)\b',
- Name.Builtin),
- (r'[a-zA-Z_]\w*', Name),
- # numbers
- (r'(\d+\.?\d*|\d*\.\d+)([eE][+-]?[0-9]+)?', Number.Float),
- (r'\d+', Number.Integer)
- ],
- 'nestedcomment': [
- (r'[^+/]+', Comment.Multiline),
- (r'/\+', Comment.Multiline, '#push'),
- (r'\+/', Comment.Multiline, '#pop'),
- (r'[+/]', Comment.Multiline),
- ]
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/j.py b/venv/lib/python3.11/site-packages/pygments/lexers/j.py
deleted file mode 100644
index e99363c..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/j.py
+++ /dev/null
@@ -1,152 +0,0 @@
-"""
- pygments.lexers.j
- ~~~~~~~~~~~~~~~~~
-
- Lexer for the J programming language.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, words, include, bygroups
-from pygments.token import Comment, Keyword, Name, Number, Operator, \
- Punctuation, String, Whitespace
-
-__all__ = ['JLexer']
-
-
-class JLexer(RegexLexer):
- """
- For J source code.
-
- .. versionadded:: 2.1
- """
-
- name = 'J'
- url = 'http://jsoftware.com/'
- aliases = ['j']
- filenames = ['*.ijs']
- mimetypes = ['text/x-j']
-
- validName = r'\b[a-zA-Z]\w*'
-
- tokens = {
- 'root': [
- # Shebang script
- (r'#!.*$', Comment.Preproc),
-
- # Comments
- (r'NB\..*', Comment.Single),
- (r'(\n+\s*)(Note)', bygroups(Whitespace, Comment.Multiline),
- 'comment'),
- (r'(\s*)(Note.*)', bygroups(Whitespace, Comment.Single)),
-
- # Whitespace
- (r'\s+', Whitespace),
-
- # Strings
- (r"'", String, 'singlequote'),
-
- # Definitions
- (r'0\s+:\s*0', Name.Entity, 'nounDefinition'),
- (r'(noun)(\s+)(define)(\s*)$', bygroups(Name.Entity, Whitespace,
- Name.Entity, Whitespace), 'nounDefinition'),
- (r'([1-4]|13)\s+:\s*0\b',
- Name.Function, 'explicitDefinition'),
- (r'(adverb|conjunction|dyad|monad|verb)(\s+)(define)\b',
- bygroups(Name.Function, Whitespace, Name.Function),
- 'explicitDefinition'),
-
- # Flow Control
- (words(('for_', 'goto_', 'label_'), suffix=validName+r'\.'), Name.Label),
- (words((
- 'assert', 'break', 'case', 'catch', 'catchd',
- 'catcht', 'continue', 'do', 'else', 'elseif',
- 'end', 'fcase', 'for', 'if', 'return',
- 'select', 'throw', 'try', 'while', 'whilst',
- ), suffix=r'\.'), Name.Label),
-
- # Variable Names
- (validName, Name.Variable),
-
- # Standard Library
- (words((
- 'ARGV', 'CR', 'CRLF', 'DEL', 'Debug',
- 'EAV', 'EMPTY', 'FF', 'JVERSION', 'LF',
- 'LF2', 'Note', 'TAB', 'alpha17', 'alpha27',
- 'apply', 'bind', 'boxopen', 'boxxopen', 'bx',
- 'clear', 'cutLF', 'cutopen', 'datatype', 'def',
- 'dfh', 'drop', 'each', 'echo', 'empty',
- 'erase', 'every', 'evtloop', 'exit', 'expand',
- 'fetch', 'file2url', 'fixdotdot', 'fliprgb', 'getargs',
- 'getenv', 'hfd', 'inv', 'inverse', 'iospath',
- 'isatty', 'isutf8', 'items', 'leaf', 'list',
- 'nameclass', 'namelist', 'names', 'nc',
- 'nl', 'on', 'pick', 'rows',
- 'script', 'scriptd', 'sign', 'sminfo', 'smoutput',
- 'sort', 'split', 'stderr', 'stdin', 'stdout',
- 'table', 'take', 'timespacex', 'timex', 'tmoutput',
- 'toCRLF', 'toHOST', 'toJ', 'tolower', 'toupper',
- 'type', 'ucp', 'ucpcount', 'usleep', 'utf8',
- 'uucp',
- )), Name.Function),
-
- # Copula
- (r'=[.:]', Operator),
-
- # Builtins
- (r'[-=+*#$%@!~`^&";:.,<>{}\[\]\\|/?]', Operator),
-
- # Short Keywords
- (r'[abCdDeEfHiIjLMoprtT]\.', Keyword.Reserved),
- (r'[aDiLpqsStux]\:', Keyword.Reserved),
- (r'(_[0-9])\:', Keyword.Constant),
-
- # Parens
- (r'\(', Punctuation, 'parentheses'),
-
- # Numbers
- include('numbers'),
- ],
-
- 'comment': [
- (r'[^)]', Comment.Multiline),
- (r'^\)', Comment.Multiline, '#pop'),
- (r'[)]', Comment.Multiline),
- ],
-
- 'explicitDefinition': [
- (r'\b[nmuvxy]\b', Name.Decorator),
- include('root'),
- (r'[^)]', Name),
- (r'^\)', Name.Label, '#pop'),
- (r'[)]', Name),
- ],
-
- 'numbers': [
- (r'\b_{1,2}\b', Number),
- (r'_?\d+(\.\d+)?(\s*[ejr]\s*)_?\d+(\.?=\d+)?', Number),
- (r'_?\d+\.(?=\d+)', Number.Float),
- (r'_?\d+x', Number.Integer.Long),
- (r'_?\d+', Number.Integer),
- ],
-
- 'nounDefinition': [
- (r'[^)]+', String),
- (r'^\)', Name.Label, '#pop'),
- (r'[)]', String),
- ],
-
- 'parentheses': [
- (r'\)', Punctuation, '#pop'),
- # include('nounDefinition'),
- include('explicitDefinition'),
- include('root'),
- ],
-
- 'singlequote': [
- (r"[^']+", String),
- (r"''", String),
- (r"'", String, '#pop'),
- ],
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/javascript.py b/venv/lib/python3.11/site-packages/pygments/lexers/javascript.py
deleted file mode 100644
index bc5e2e4..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/javascript.py
+++ /dev/null
@@ -1,1588 +0,0 @@
-"""
- pygments.lexers.javascript
- ~~~~~~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for JavaScript and related languages.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import bygroups, combined, default, do_insertions, include, \
- inherit, Lexer, RegexLexer, this, using, words, line_re
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation, Other, Generic, Whitespace
-from pygments.util import get_bool_opt
-import pygments.unistring as uni
-
-__all__ = ['JavascriptLexer', 'KalLexer', 'LiveScriptLexer', 'DartLexer',
- 'TypeScriptLexer', 'LassoLexer', 'ObjectiveJLexer',
- 'CoffeeScriptLexer', 'MaskLexer', 'EarlGreyLexer', 'JuttleLexer',
- 'NodeConsoleLexer']
-
-JS_IDENT_START = ('(?:[$_' + uni.combine('Lu', 'Ll', 'Lt', 'Lm', 'Lo', 'Nl') +
- ']|\\\\u[a-fA-F0-9]{4})')
-JS_IDENT_PART = ('(?:[$' + uni.combine('Lu', 'Ll', 'Lt', 'Lm', 'Lo', 'Nl',
- 'Mn', 'Mc', 'Nd', 'Pc') +
- '\u200c\u200d]|\\\\u[a-fA-F0-9]{4})')
-JS_IDENT = JS_IDENT_START + '(?:' + JS_IDENT_PART + ')*'
-
-
-class JavascriptLexer(RegexLexer):
- """
- For JavaScript source code.
- """
-
- name = 'JavaScript'
- url = 'https://www.ecma-international.org/publications-and-standards/standards/ecma-262/'
- aliases = ['javascript', 'js']
- filenames = ['*.js', '*.jsm', '*.mjs', '*.cjs']
- mimetypes = ['application/javascript', 'application/x-javascript',
- 'text/x-javascript', 'text/javascript']
-
- flags = re.DOTALL | re.MULTILINE
-
- tokens = {
- 'commentsandwhitespace': [
- (r'\s+', Whitespace),
- (r'<!--', Comment),
- (r'//.*?$', Comment.Single),
- (r'/\*.*?\*/', Comment.Multiline)
- ],
- 'slashstartsregex': [
- include('commentsandwhitespace'),
- (r'/(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/'
- r'([gimuysd]+\b|\B)', String.Regex, '#pop'),
- (r'(?=/)', Text, ('#pop', 'badregex')),
- default('#pop')
- ],
- 'badregex': [
- (r'\n', Whitespace, '#pop')
- ],
- 'root': [
- (r'\A#! ?/.*?$', Comment.Hashbang), # recognized by node.js
- (r'^(?=\s|/|<!--)', Text, 'slashstartsregex'),
- include('commentsandwhitespace'),
-
- # Numeric literals
- (r'0[bB][01]+n?', Number.Bin),
- (r'0[oO]?[0-7]+n?', Number.Oct), # Browsers support "0o7" and "07" (< ES5) notations
- (r'0[xX][0-9a-fA-F]+n?', Number.Hex),
- (r'[0-9]+n', Number.Integer), # Javascript BigInt requires an "n" postfix
- # Javascript doesn't have actual integer literals, so every other
- # numeric literal is handled by the regex below (including "normal")
- # integers
- (r'(\.[0-9]+|[0-9]+\.[0-9]*|[0-9]+)([eE][-+]?[0-9]+)?', Number.Float),
-
- (r'\.\.\.|=>', Punctuation),
- (r'\+\+|--|~|\?\?=?|\?|:|\\(?=\n)|'
- r'(<<|>>>?|==?|!=?|(?:\*\*|\|\||&&|[-<>+*%&|^/]))=?', Operator, 'slashstartsregex'),
- (r'[{(\[;,]', Punctuation, 'slashstartsregex'),
- (r'[})\].]', Punctuation),
-
- (r'(typeof|instanceof|in|void|delete|new)\b', Operator.Word, 'slashstartsregex'),
-
- # Match stuff like: constructor
- (r'\b(constructor|from|as)\b', Keyword.Reserved),
-
- (r'(for|in|while|do|break|return|continue|switch|case|default|if|else|'
- r'throw|try|catch|finally|yield|await|async|this|of|static|export|'
- r'import|debugger|extends|super)\b', Keyword, 'slashstartsregex'),
- (r'(var|let|const|with|function|class)\b', Keyword.Declaration, 'slashstartsregex'),
-
- (r'(abstract|boolean|byte|char|double|enum|final|float|goto|'
- r'implements|int|interface|long|native|package|private|protected|'
- r'public|short|synchronized|throws|transient|volatile)\b', Keyword.Reserved),
- (r'(true|false|null|NaN|Infinity|undefined)\b', Keyword.Constant),
-
- (r'(Array|Boolean|Date|BigInt|Function|Math|ArrayBuffer|'
- r'Number|Object|RegExp|String|Promise|Proxy|decodeURI|'
- r'decodeURIComponent|encodeURI|encodeURIComponent|'
- r'eval|isFinite|isNaN|parseFloat|parseInt|DataView|'
- r'document|window|globalThis|global|Symbol|Intl|'
- r'WeakSet|WeakMap|Set|Map|Reflect|JSON|Atomics|'
- r'Int(?:8|16|32)Array|BigInt64Array|Float32Array|Float64Array|'
- r'Uint8ClampedArray|Uint(?:8|16|32)Array|BigUint64Array)\b', Name.Builtin),
-
- (r'((?:Eval|Internal|Range|Reference|Syntax|Type|URI)?Error)\b', Name.Exception),
-
- # Match stuff like: super(argument, list)
- (r'(super)(\s*)(\([\w,?.$\s]+\s*\))',
- bygroups(Keyword, Whitespace), 'slashstartsregex'),
- # Match stuff like: function() {...}
- (r'([a-zA-Z_?.$][\w?.$]*)(?=\(\) \{)', Name.Other, 'slashstartsregex'),
-
- (JS_IDENT, Name.Other),
- (r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
- (r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
- (r'`', String.Backtick, 'interp'),
- ],
- 'interp': [
- (r'`', String.Backtick, '#pop'),
- (r'\\.', String.Backtick),
- (r'\$\{', String.Interpol, 'interp-inside'),
- (r'\$', String.Backtick),
- (r'[^`\\$]+', String.Backtick),
- ],
- 'interp-inside': [
- # TODO: should this include single-line comments and allow nesting strings?
- (r'\}', String.Interpol, '#pop'),
- include('root'),
- ],
- }
-
-
-class TypeScriptLexer(JavascriptLexer):
- """
- For TypeScript source code.
-
- .. versionadded:: 1.6
- """
-
- name = 'TypeScript'
- url = 'https://www.typescriptlang.org/'
- aliases = ['typescript', 'ts']
- filenames = ['*.ts']
- mimetypes = ['application/x-typescript', 'text/x-typescript']
-
- # Higher priority than the TypoScriptLexer, as TypeScript is far more
- # common these days
- priority = 0.5
-
- tokens = {
- 'root': [
- (r'(abstract|implements|private|protected|public|readonly)\b',
- Keyword, 'slashstartsregex'),
- (r'(enum|interface|override)\b', Keyword.Declaration, 'slashstartsregex'),
- (r'\b(declare|type)\b', Keyword.Reserved),
- # Match variable type keywords
- (r'\b(string|boolean|number)\b', Keyword.Type),
- # Match stuff like: module name {...}
- (r'\b(module)(\s*)([\w?.$]+)(\s*)',
- bygroups(Keyword.Reserved, Whitespace, Name.Other, Whitespace), 'slashstartsregex'),
- # Match stuff like: (function: return type)
- (r'([\w?.$]+)(\s*)(:)(\s*)([\w?.$]+)',
- bygroups(Name.Other, Whitespace, Operator, Whitespace, Keyword.Type)),
- # Match stuff like: Decorators
- (r'@' + JS_IDENT, Keyword.Declaration),
- inherit,
- ],
- }
-
-
-class KalLexer(RegexLexer):
- """
- For Kal source code.
-
- .. versionadded:: 2.0
- """
-
- name = 'Kal'
- url = 'http://rzimmerman.github.io/kal'
- aliases = ['kal']
- filenames = ['*.kal']
- mimetypes = ['text/kal', 'application/kal']
-
- flags = re.DOTALL
- tokens = {
- 'commentsandwhitespace': [
- (r'\s+', Whitespace),
- (r'###[^#].*?###', Comment.Multiline),
- (r'(#(?!##[^#]).*?)(\n)', bygroups(Comment.Single, Whitespace)),
- ],
- 'functiondef': [
- (r'([$a-zA-Z_][\w$]*)(\s*)', bygroups(Name.Function, Whitespace),
- '#pop'),
- include('commentsandwhitespace'),
- ],
- 'classdef': [
- (r'\b(inherits)(\s+)(from)\b',
- bygroups(Keyword, Whitespace, Keyword)),
- (r'([$a-zA-Z_][\w$]*)(?=\s*\n)', Name.Class, '#pop'),
- (r'[$a-zA-Z_][\w$]*\b', Name.Class),
- include('commentsandwhitespace'),
- ],
- 'listcomprehension': [
- (r'\]', Punctuation, '#pop'),
- (r'\b(property|value)\b', Keyword),
- include('root'),
- ],
- 'waitfor': [
- (r'\n', Whitespace, '#pop'),
- (r'\bfrom\b', Keyword),
- include('root'),
- ],
- 'root': [
- include('commentsandwhitespace'),
- (r'/(?! )(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/'
- r'([gimuysd]+\b|\B)', String.Regex),
- (r'\?|:|_(?=\n)|==?|!=|-(?!>)|[<>+*/-]=?',
- Operator),
- (r'\b(and|or|isnt|is|not|but|bitwise|mod|\^|xor|exists|'
- r'doesnt\s+exist)\b', Operator.Word),
- (r'(\([^()]+\))?(\s*)(>)',
- bygroups(Name.Function, Whitespace, Punctuation)),
- (r'[{(]', Punctuation),
- (r'\[', Punctuation, 'listcomprehension'),
- (r'[})\].,]', Punctuation),
- (r'\b(function|method|task)\b', Keyword.Declaration, 'functiondef'),
- (r'\bclass\b', Keyword.Declaration, 'classdef'),
- (r'\b(safe(?=\s))?(\s*)(wait(?=\s))(\s+)(for)\b',
- bygroups(Keyword, Whitespace, Keyword, Whitespace,
- Keyword), 'waitfor'),
- (r'\b(me|this)(\.[$a-zA-Z_][\w.$]*)?\b', Name.Variable.Instance),
- (r'(?<![.$])(run)(\s+)(in)(\s+)(parallel)\b',
- bygroups(Keyword, Whitespace, Keyword, Whitespace, Keyword)),
- (r'(?<![.$])(for)(\s+)(parallel|series)?\b',
- bygroups(Keyword, Whitespace, Keyword)),
- (r'(?<![.$])(except)(\s+)(when)?\b',
- bygroups(Keyword, Whitespace, Keyword)),
- (r'(?<![.$])(fail)(\s+)(with)?\b',
- bygroups(Keyword, Whitespace, Keyword)),
- (r'(?<![.$])(inherits)(\s+)(from)?\b',
- bygroups(Keyword, Whitespace, Keyword)),
- (r'(?<![.$])(for)(\s+)(parallel|series)?\b',
- bygroups(Keyword, Whitespace, Keyword)),
- (words((
- 'in', 'of', 'while', 'until', 'break', 'return', 'continue',
- 'when', 'if', 'unless', 'else', 'otherwise', 'throw', 'raise',
- 'try', 'catch', 'finally', 'new', 'delete', 'typeof',
- 'instanceof', 'super'), prefix=r'(?<![.$])', suffix=r'\b'),
- Keyword),
- (words((
- 'true', 'false', 'yes', 'no', 'on', 'off', 'null', 'nothing',
- 'none', 'NaN', 'Infinity', 'undefined'), prefix=r'(?<![.$])',
- suffix=r'\b'), Keyword.Constant),
- (words((
- 'Array', 'Boolean', 'Date', 'Error', 'Function', 'Math',
- 'Number', 'Object', 'RegExp', 'String', 'decodeURI',
- 'decodeURIComponent', 'encodeURI', 'encodeURIComponent', 'eval',
- 'isFinite', 'isNaN', 'isSafeInteger', 'parseFloat', 'parseInt',
- 'document', 'window', 'globalThis', 'Symbol', 'print'),
- suffix=r'\b'), Name.Builtin),
- (r'([$a-zA-Z_][\w.$]*)(\s*)(:|[+\-*/]?\=)?\b',
- bygroups(Name.Variable, Whitespace, Operator)),
- (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
- (r'0x[0-9a-fA-F]+', Number.Hex),
- (r'[0-9]+', Number.Integer),
- ('"""', String, 'tdqs'),
- ("'''", String, 'tsqs'),
- ('"', String, 'dqs'),
- ("'", String, 'sqs'),
- ],
- 'strings': [
- (r'[^#\\\'"]+', String),
- # note that all kal strings are multi-line.
- # hashmarks, quotes and backslashes must be parsed one at a time
- ],
- 'interpoling_string': [
- (r'\}', String.Interpol, "#pop"),
- include('root')
- ],
- 'dqs': [
- (r'"', String, '#pop'),
- (r'\\.|\'', String), # double-quoted string don't need ' escapes
- (r'#\{', String.Interpol, "interpoling_string"),
- include('strings')
- ],
- 'sqs': [
- (r"'", String, '#pop'),
- (r'#|\\.|"', String), # single quoted strings don't need " escapses
- include('strings')
- ],
- 'tdqs': [
- (r'"""', String, '#pop'),
- (r'\\.|\'|"', String), # no need to escape quotes in triple-string
- (r'#\{', String.Interpol, "interpoling_string"),
- include('strings'),
- ],
- 'tsqs': [
- (r"'''", String, '#pop'),
- (r'#|\\.|\'|"', String), # no need to escape quotes in triple-strings
- include('strings')
- ],
- }
-
-
-class LiveScriptLexer(RegexLexer):
- """
- For LiveScript source code.
-
- .. versionadded:: 1.6
- """
-
- name = 'LiveScript'
- url = 'https://livescript.net/'
- aliases = ['livescript', 'live-script']
- filenames = ['*.ls']
- mimetypes = ['text/livescript']
-
- flags = re.DOTALL
- tokens = {
- 'commentsandwhitespace': [
- (r'\s+', Whitespace),
- (r'/\*.*?\*/', Comment.Multiline),
- (r'(#.*?)(\n)', bygroups(Comment.Single, Whitespace)),
- ],
- 'multilineregex': [
- include('commentsandwhitespace'),
- (r'//([gimuysd]+\b|\B)', String.Regex, '#pop'),
- (r'/', String.Regex),
- (r'[^/#]+', String.Regex)
- ],
- 'slashstartsregex': [
- include('commentsandwhitespace'),
- (r'//', String.Regex, ('#pop', 'multilineregex')),
- (r'/(?! )(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/'
- r'([gimuysd]+\b|\B)', String.Regex, '#pop'),
- (r'/', Operator, '#pop'),
- default('#pop'),
- ],
- 'root': [
- (r'\A(?=\s|/)', Text, 'slashstartsregex'),
- include('commentsandwhitespace'),
- (r'(?:\([^()]+\))?[ ]*[~-]{1,2}>|'
- r'(?:\(?[^()\n]+\)?)?[ ]*<[~-]{1,2}', Name.Function),
- (r'\+\+|&&|(?<![.$])\b(?:and|x?or|is|isnt|not)\b|\?|:|=|'
- r'\|\||\\(?=\n)|(<<|>>>?|==?|!=?|'
- r'~(?!\~?>)|-(?!\-?>)|<(?!\[)|(?<!\])>|'
- r'[+*`%&|^/])=?',
- Operator, 'slashstartsregex'),
- (r'[{(\[;,]', Punctuation, 'slashstartsregex'),
- (r'[})\].]', Punctuation),
- (r'(?<![.$])(for|own|in|of|while|until|loop|break|'
- r'return|continue|switch|when|then|if|unless|else|'
- r'throw|try|catch|finally|new|delete|typeof|instanceof|super|'
- r'extends|this|class|by|const|var|to|til)\b', Keyword,
- 'slashstartsregex'),
- (r'(?<![.$])(true|false|yes|no|on|off|'
- r'null|NaN|Infinity|undefined|void)\b',
- Keyword.Constant),
- (r'(Array|Boolean|Date|Error|Function|Math|'
- r'Number|Object|RegExp|String|decodeURI|'
- r'decodeURIComponent|encodeURI|encodeURIComponent|'
- r'eval|isFinite|isNaN|parseFloat|parseInt|document|window|'
- r'globalThis|Symbol|Symbol|BigInt)\b', Name.Builtin),
- (r'([$a-zA-Z_][\w.\-:$]*)(\s*)([:=])(\s+)',
- bygroups(Name.Variable, Whitespace, Operator, Whitespace),
- 'slashstartsregex'),
- (r'(@[$a-zA-Z_][\w.\-:$]*)(\s*)([:=])(\s+)',
- bygroups(Name.Variable.Instance, Whitespace, Operator,
- Whitespace),
- 'slashstartsregex'),
- (r'@', Name.Other, 'slashstartsregex'),
- (r'@?[$a-zA-Z_][\w-]*', Name.Other, 'slashstartsregex'),
- (r'[0-9]+\.[0-9]+([eE][0-9]+)?[fd]?(?:[a-zA-Z_]+)?', Number.Float),
- (r'[0-9]+(~[0-9a-z]+)?(?:[a-zA-Z_]+)?', Number.Integer),
- ('"""', String, 'tdqs'),
- ("'''", String, 'tsqs'),
- ('"', String, 'dqs'),
- ("'", String, 'sqs'),
- (r'\\\S+', String),
- (r'<\[.*?\]>', String),
- ],
- 'strings': [
- (r'[^#\\\'"]+', String),
- # note that all coffee script strings are multi-line.
- # hashmarks, quotes and backslashes must be parsed one at a time
- ],
- 'interpoling_string': [
- (r'\}', String.Interpol, "#pop"),
- include('root')
- ],
- 'dqs': [
- (r'"', String, '#pop'),
- (r'\\.|\'', String), # double-quoted string don't need ' escapes
- (r'#\{', String.Interpol, "interpoling_string"),
- (r'#', String),
- include('strings')
- ],
- 'sqs': [
- (r"'", String, '#pop'),
- (r'#|\\.|"', String), # single quoted strings don't need " escapses
- include('strings')
- ],
- 'tdqs': [
- (r'"""', String, '#pop'),
- (r'\\.|\'|"', String), # no need to escape quotes in triple-string
- (r'#\{', String.Interpol, "interpoling_string"),
- (r'#', String),
- include('strings'),
- ],
- 'tsqs': [
- (r"'''", String, '#pop'),
- (r'#|\\.|\'|"', String), # no need to escape quotes in triple-strings
- include('strings')
- ],
- }
-
-
-class DartLexer(RegexLexer):
- """
- For Dart source code.
-
- .. versionadded:: 1.5
- """
-
- name = 'Dart'
- url = 'http://dart.dev/'
- aliases = ['dart']
- filenames = ['*.dart']
- mimetypes = ['text/x-dart']
-
- flags = re.MULTILINE | re.DOTALL
-
- tokens = {
- 'root': [
- include('string_literal'),
- (r'#!(.*?)$', Comment.Preproc),
- (r'\b(import|export)\b', Keyword, 'import_decl'),
- (r'\b(library|source|part of|part)\b', Keyword),
- (r'[^\S\n]+', Whitespace),
- (r'(//.*?)(\n)', bygroups(Comment.Single, Whitespace)),
- (r'/\*.*?\*/', Comment.Multiline),
- (r'\b(class|extension|mixin)\b(\s+)',
- bygroups(Keyword.Declaration, Whitespace), 'class'),
- (r'\b(as|assert|break|case|catch|const|continue|default|do|else|finally|'
- r'for|if|in|is|new|rethrow|return|super|switch|this|throw|try|while)\b',
- Keyword),
- (r'\b(abstract|async|await|const|covariant|extends|external|factory|final|'
- r'get|implements|late|native|on|operator|required|set|static|sync|typedef|'
- r'var|with|yield)\b', Keyword.Declaration),
- (r'\b(bool|double|dynamic|int|num|Function|Never|Null|Object|String|void)\b',
- Keyword.Type),
- (r'\b(false|null|true)\b', Keyword.Constant),
- (r'[~!%^&*+=|?:<>/-]|as\b', Operator),
- (r'@[a-zA-Z_$]\w*', Name.Decorator),
- (r'[a-zA-Z_$]\w*:', Name.Label),
- (r'[a-zA-Z_$]\w*', Name),
- (r'[(){}\[\],.;]', Punctuation),
- (r'0[xX][0-9a-fA-F]+', Number.Hex),
- # DIGIT+ (‘.’ DIGIT*)? EXPONENT?
- (r'\d+(\.\d*)?([eE][+-]?\d+)?', Number),
- (r'\.\d+([eE][+-]?\d+)?', Number), # ‘.’ DIGIT+ EXPONENT?
- (r'\n', Whitespace)
- # pseudo-keyword negate intentionally left out
- ],
- 'class': [
- (r'[a-zA-Z_$]\w*', Name.Class, '#pop')
- ],
- 'import_decl': [
- include('string_literal'),
- (r'\s+', Whitespace),
- (r'\b(as|deferred|show|hide)\b', Keyword),
- (r'[a-zA-Z_$]\w*', Name),
- (r'\,', Punctuation),
- (r'\;', Punctuation, '#pop')
- ],
- 'string_literal': [
- # Raw strings.
- (r'r"""([\w\W]*?)"""', String.Double),
- (r"r'''([\w\W]*?)'''", String.Single),
- (r'r"(.*?)"', String.Double),
- (r"r'(.*?)'", String.Single),
- # Normal Strings.
- (r'"""', String.Double, 'string_double_multiline'),
- (r"'''", String.Single, 'string_single_multiline'),
- (r'"', String.Double, 'string_double'),
- (r"'", String.Single, 'string_single')
- ],
- 'string_common': [
- (r"\\(x[0-9A-Fa-f]{2}|u[0-9A-Fa-f]{4}|u\{[0-9A-Fa-f]*\}|[a-z'\"$\\])",
- String.Escape),
- (r'(\$)([a-zA-Z_]\w*)', bygroups(String.Interpol, Name)),
- (r'(\$\{)(.*?)(\})',
- bygroups(String.Interpol, using(this), String.Interpol))
- ],
- 'string_double': [
- (r'"', String.Double, '#pop'),
- (r'[^"$\\\n]+', String.Double),
- include('string_common'),
- (r'\$+', String.Double)
- ],
- 'string_double_multiline': [
- (r'"""', String.Double, '#pop'),
- (r'[^"$\\]+', String.Double),
- include('string_common'),
- (r'(\$|\")+', String.Double)
- ],
- 'string_single': [
- (r"'", String.Single, '#pop'),
- (r"[^'$\\\n]+", String.Single),
- include('string_common'),
- (r'\$+', String.Single)
- ],
- 'string_single_multiline': [
- (r"'''", String.Single, '#pop'),
- (r'[^\'$\\]+', String.Single),
- include('string_common'),
- (r'(\$|\')+', String.Single)
- ]
- }
-
-
-class LassoLexer(RegexLexer):
- """
- For Lasso source code, covering both Lasso 9
- syntax and LassoScript for Lasso 8.6 and earlier. For Lasso embedded in
- HTML, use the `LassoHtmlLexer`.
-
- Additional options accepted:
-
- `builtinshighlighting`
- If given and ``True``, highlight builtin types, traits, methods, and
- members (default: ``True``).
- `requiredelimiters`
- If given and ``True``, only highlight code between delimiters as Lasso
- (default: ``False``).
-
- .. versionadded:: 1.6
- """
-
- name = 'Lasso'
- aliases = ['lasso', 'lassoscript']
- filenames = ['*.lasso', '*.lasso[89]']
- alias_filenames = ['*.incl', '*.inc', '*.las']
- mimetypes = ['text/x-lasso']
- flags = re.IGNORECASE | re.DOTALL | re.MULTILINE
-
- tokens = {
- 'root': [
- (r'^#![ \S]+lasso9\b', Comment.Preproc, 'lasso'),
- (r'(?=\[|<)', Other, 'delimiters'),
- (r'\s+', Whitespace),
- default(('delimiters', 'lassofile')),
- ],
- 'delimiters': [
- (r'\[no_square_brackets\]', Comment.Preproc, 'nosquarebrackets'),
- (r'\[noprocess\]', Comment.Preproc, 'noprocess'),
- (r'\[', Comment.Preproc, 'squarebrackets'),
- (r'<\?(lasso(script)?|=)', Comment.Preproc, 'anglebrackets'),
- (r'<(!--.*?-->)?', Other),
- (r'[^[<]+', Other),
- ],
- 'nosquarebrackets': [
- (r'\[noprocess\]', Comment.Preproc, 'noprocess'),
- (r'\[', Other),
- (r'<\?(lasso(script)?|=)', Comment.Preproc, 'anglebrackets'),
- (r'<(!--.*?-->)?', Other),
- (r'[^[<]+', Other),
- ],
- 'noprocess': [
- (r'\[/noprocess\]', Comment.Preproc, '#pop'),
- (r'\[', Other),
- (r'[^[]', Other),
- ],
- 'squarebrackets': [
- (r'\]', Comment.Preproc, '#pop'),
- include('lasso'),
- ],
- 'anglebrackets': [
- (r'\?>', Comment.Preproc, '#pop'),
- include('lasso'),
- ],
- 'lassofile': [
- (r'\]|\?>', Comment.Preproc, '#pop'),
- include('lasso'),
- ],
- 'whitespacecomments': [
- (r'\s+', Whitespace),
- (r'(//.*?)(\s*)$', bygroups(Comment.Single, Whitespace)),
- (r'/\*\*!.*?\*/', String.Doc),
- (r'/\*.*?\*/', Comment.Multiline),
- ],
- 'lasso': [
- # whitespace/comments
- include('whitespacecomments'),
-
- # literals
- (r'\d*\.\d+(e[+-]?\d+)?', Number.Float),
- (r'0x[\da-f]+', Number.Hex),
- (r'\d+', Number.Integer),
- (r'(infinity|NaN)\b', Number),
- (r"'", String.Single, 'singlestring'),
- (r'"', String.Double, 'doublestring'),
- (r'`[^`]*`', String.Backtick),
-
- # names
- (r'\$[a-z_][\w.]*', Name.Variable),
- (r'#([a-z_][\w.]*|\d+\b)', Name.Variable.Instance),
- (r"(\.)(\s*)('[a-z_][\w.]*')",
- bygroups(Name.Builtin.Pseudo, Whitespace, Name.Variable.Class)),
- (r"(self)(\s*)(->)(\s*)('[a-z_][\w.]*')",
- bygroups(Name.Builtin.Pseudo, Whitespace, Operator, Whitespace,
- Name.Variable.Class)),
- (r'(\.\.?)(\s*)([a-z_][\w.]*(=(?!=))?)',
- bygroups(Name.Builtin.Pseudo, Whitespace, Name.Other.Member)),
- (r'(->\\?|&)(\s*)([a-z_][\w.]*(=(?!=))?)',
- bygroups(Operator, Whitespace, Name.Other.Member)),
- (r'(?<!->)(self|inherited|currentcapture|givenblock)\b',
- Name.Builtin.Pseudo),
- (r'-(?!infinity)[a-z_][\w.]*', Name.Attribute),
- (r'(::)(\s*)([a-z_][\w.]*)',
- bygroups(Punctuation, Whitespace, Name.Label)),
- (r'(error_(code|msg)_\w+|Error_AddError|Error_ColumnRestriction|'
- r'Error_DatabaseConnectionUnavailable|Error_DatabaseTimeout|'
- r'Error_DeleteError|Error_FieldRestriction|Error_FileNotFound|'
- r'Error_InvalidDatabase|Error_InvalidPassword|'
- r'Error_InvalidUsername|Error_ModuleNotFound|'
- r'Error_NoError|Error_NoPermission|Error_OutOfMemory|'
- r'Error_ReqColumnMissing|Error_ReqFieldMissing|'
- r'Error_RequiredColumnMissing|Error_RequiredFieldMissing|'
- r'Error_UpdateError)\b', Name.Exception),
-
- # definitions
- (r'(define)(\s+)([a-z_][\w.]*)(\s*)(=>)(\s*)(type|trait|thread)\b',
- bygroups(Keyword.Declaration, Whitespace, Name.Class,
- Whitespace, Operator, Whitespace, Keyword)),
- (r'(define)(\s+)([a-z_][\w.]*)(\s*)(->)(\s*)([a-z_][\w.]*=?|[-+*/%])',
- bygroups(Keyword.Declaration, Whitespace, Name.Class,
- Whitespace, Operator, Whitespace, Name.Function),
- 'signature'),
- (r'(define)(\s+)([a-z_][\w.]*)',
- bygroups(Keyword.Declaration, Whitespace, Name.Function), 'signature'),
- (r'(public|protected|private|provide)(\s+)(([a-z_][\w.]*=?|[-+*/%])'
- r'(?=\s*\())', bygroups(Keyword, Whitespace, Name.Function),
- 'signature'),
- (r'(public|protected|private|provide)(\s+)([a-z_][\w.]*)',
- bygroups(Keyword, Whitespace, Name.Function)),
-
- # keywords
- (r'(true|false|none|minimal|full|all|void)\b', Keyword.Constant),
- (r'(local|var|variable|global|data(?=\s))\b', Keyword.Declaration),
- (r'(array|date|decimal|duration|integer|map|pair|string|tag|xml|'
- r'null|boolean|bytes|keyword|list|locale|queue|set|stack|'
- r'staticarray)\b', Keyword.Type),
- (r'([a-z_][\w.]*)(\s+)(in)\b', bygroups(Name, Whitespace, Keyword)),
- (r'(let|into)(\s+)([a-z_][\w.]*)', bygroups(Keyword, Whitespace, Name)),
- (r'require\b', Keyword, 'requiresection'),
- (r'(/?)(Namespace_Using)\b', bygroups(Punctuation, Keyword.Namespace)),
- (r'(/?)(Cache|Database_Names|Database_SchemaNames|'
- r'Database_TableNames|Define_Tag|Define_Type|Email_Batch|'
- r'Encode_Set|HTML_Comment|Handle|Handle_Error|Header|If|Inline|'
- r'Iterate|LJAX_Target|Link|Link_CurrentAction|Link_CurrentGroup|'
- r'Link_CurrentRecord|Link_Detail|Link_FirstGroup|Link_FirstRecord|'
- r'Link_LastGroup|Link_LastRecord|Link_NextGroup|Link_NextRecord|'
- r'Link_PrevGroup|Link_PrevRecord|Log|Loop|Output_None|Portal|'
- r'Private|Protect|Records|Referer|Referrer|Repeating|ResultSet|'
- r'Rows|Search_Args|Search_Arguments|Select|Sort_Args|'
- r'Sort_Arguments|Thread_Atomic|Value_List|While|Abort|Case|Else|'
- r'Fail_If|Fail_IfNot|Fail|If_Empty|If_False|If_Null|If_True|'
- r'Loop_Abort|Loop_Continue|Loop_Count|Params|Params_Up|Return|'
- r'Return_Value|Run_Children|SOAP_DefineTag|SOAP_LastRequest|'
- r'SOAP_LastResponse|Tag_Name|ascending|average|by|define|'
- r'descending|do|equals|frozen|group|handle_failure|import|in|into|'
- r'join|let|match|max|min|on|order|parent|protected|provide|public|'
- r'require|returnhome|skip|split_thread|sum|take|thread|to|trait|'
- r'type|where|with|yield|yieldhome)\b',
- bygroups(Punctuation, Keyword)),
-
- # other
- (r',', Punctuation, 'commamember'),
- (r'(and|or|not)\b', Operator.Word),
- (r'([a-z_][\w.]*)(\s*)(::)(\s*)([a-z_][\w.]*)?(\s*=(?!=))',
- bygroups(Name, Whitespace, Punctuation, Whitespace, Name.Label,
- Operator)),
- (r'(/?)([\w.]+)', bygroups(Punctuation, Name.Other)),
- (r'(=)(n?bw|n?ew|n?cn|lte?|gte?|n?eq|n?rx|ft)\b',
- bygroups(Operator, Operator.Word)),
- (r':=|[-+*/%=<>&|!?\\]+', Operator),
- (r'[{}():;,@^]', Punctuation),
- ],
- 'singlestring': [
- (r"'", String.Single, '#pop'),
- (r"[^'\\]+", String.Single),
- include('escape'),
- (r"\\", String.Single),
- ],
- 'doublestring': [
- (r'"', String.Double, '#pop'),
- (r'[^"\\]+', String.Double),
- include('escape'),
- (r'\\', String.Double),
- ],
- 'escape': [
- (r'\\(U[\da-f]{8}|u[\da-f]{4}|x[\da-f]{1,2}|[0-7]{1,3}|:[^:\n\r]+:|'
- r'[abefnrtv?"\'\\]|$)', String.Escape),
- ],
- 'signature': [
- (r'=>', Operator, '#pop'),
- (r'\)', Punctuation, '#pop'),
- (r'[(,]', Punctuation, 'parameter'),
- include('lasso'),
- ],
- 'parameter': [
- (r'\)', Punctuation, '#pop'),
- (r'-?[a-z_][\w.]*', Name.Attribute, '#pop'),
- (r'\.\.\.', Name.Builtin.Pseudo),
- include('lasso'),
- ],
- 'requiresection': [
- (r'(([a-z_][\w.]*=?|[-+*/%])(?=\s*\())', Name, 'requiresignature'),
- (r'(([a-z_][\w.]*=?|[-+*/%])(?=(\s*::\s*[\w.]+)?\s*,))', Name),
- (r'[a-z_][\w.]*=?|[-+*/%]', Name, '#pop'),
- (r'(::)(\s*)([a-z_][\w.]*)',
- bygroups(Punctuation, Whitespace, Name.Label)),
- (r',', Punctuation),
- include('whitespacecomments'),
- ],
- 'requiresignature': [
- (r'(\)(?=(\s*::\s*[\w.]+)?\s*,))', Punctuation, '#pop'),
- (r'\)', Punctuation, '#pop:2'),
- (r'-?[a-z_][\w.]*', Name.Attribute),
- (r'(::)(\s*)([a-z_][\w.]*)',
- bygroups(Punctuation, Whitespace, Name.Label)),
- (r'\.\.\.', Name.Builtin.Pseudo),
- (r'[(,]', Punctuation),
- include('whitespacecomments'),
- ],
- 'commamember': [
- (r'(([a-z_][\w.]*=?|[-+*/%])'
- r'(?=\s*(\(([^()]*\([^()]*\))*[^)]*\)\s*)?(::[\w.\s]+)?=>))',
- Name.Function, 'signature'),
- include('whitespacecomments'),
- default('#pop'),
- ],
- }
-
- def __init__(self, **options):
- self.builtinshighlighting = get_bool_opt(
- options, 'builtinshighlighting', True)
- self.requiredelimiters = get_bool_opt(
- options, 'requiredelimiters', False)
-
- self._builtins = set()
- self._members = set()
- if self.builtinshighlighting:
- from pygments.lexers._lasso_builtins import BUILTINS, MEMBERS
- for key, value in BUILTINS.items():
- self._builtins.update(value)
- for key, value in MEMBERS.items():
- self._members.update(value)
- RegexLexer.__init__(self, **options)
-
- def get_tokens_unprocessed(self, text):
- stack = ['root']
- if self.requiredelimiters:
- stack.append('delimiters')
- for index, token, value in \
- RegexLexer.get_tokens_unprocessed(self, text, stack):
- if (token is Name.Other and value.lower() in self._builtins or
- token is Name.Other.Member and
- value.lower().rstrip('=') in self._members):
- yield index, Name.Builtin, value
- continue
- yield index, token, value
-
- def analyse_text(text):
- rv = 0.0
- if 'bin/lasso9' in text:
- rv += 0.8
- if re.search(r'<\?lasso', text, re.I):
- rv += 0.4
- if re.search(r'local\(', text, re.I):
- rv += 0.4
- return rv
-
-
-class ObjectiveJLexer(RegexLexer):
- """
- For Objective-J source code with preprocessor directives.
-
- .. versionadded:: 1.3
- """
-
- name = 'Objective-J'
- aliases = ['objective-j', 'objectivej', 'obj-j', 'objj']
- filenames = ['*.j']
- mimetypes = ['text/x-objective-j']
-
- #: optional Comment or Whitespace
- _ws = r'(?:\s|//[^\n]*\n|/[*](?:[^*]|[*][^/])*[*]/)*'
-
- flags = re.DOTALL | re.MULTILINE
-
- tokens = {
- 'root': [
- include('whitespace'),
-
- # function definition
- (r'^(' + _ws + r'[+-]' + _ws + r')([(a-zA-Z_].*?[^(])(' + _ws + r'\{)',
- bygroups(using(this), using(this, state='function_signature'),
- using(this))),
-
- # class definition
- (r'(@interface|@implementation)(\s+)', bygroups(Keyword, Whitespace),
- 'classname'),
- (r'(@class|@protocol)(\s*)', bygroups(Keyword, Whitespace),
- 'forward_classname'),
- (r'(\s*)(@end)(\s*)', bygroups(Whitespace, Keyword, Whitespace)),
-
- include('statements'),
- ('[{()}]', Punctuation),
- (';', Punctuation),
- ],
- 'whitespace': [
- (r'(@import)(\s+)("(?:\\\\|\\"|[^"])*")',
- bygroups(Comment.Preproc, Whitespace, String.Double)),
- (r'(@import)(\s+)(<(?:\\\\|\\>|[^>])*>)',
- bygroups(Comment.Preproc, Whitespace, String.Double)),
- (r'(#(?:include|import))(\s+)("(?:\\\\|\\"|[^"])*")',
- bygroups(Comment.Preproc, Whitespace, String.Double)),
- (r'(#(?:include|import))(\s+)(<(?:\\\\|\\>|[^>])*>)',
- bygroups(Comment.Preproc, Whitespace, String.Double)),
-
- (r'#if\s+0', Comment.Preproc, 'if0'),
- (r'#', Comment.Preproc, 'macro'),
-
- (r'\s+', Whitespace),
- (r'(\\)(\n)',
- bygroups(String.Escape, Whitespace)), # line continuation
- (r'//(\n|(.|\n)*?[^\\]\n)', Comment.Single),
- (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline),
- (r'<!--', Comment),
- ],
- 'slashstartsregex': [
- include('whitespace'),
- (r'/(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/'
- r'([gim]+\b|\B)', String.Regex, '#pop'),
- (r'(?=/)', Text, ('#pop', 'badregex')),
- default('#pop'),
- ],
- 'badregex': [
- (r'\n', Whitespace, '#pop'),
- ],
- 'statements': [
- (r'(L|@)?"', String, 'string'),
- (r"(L|@)?'(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])'",
- String.Char),
- (r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
- (r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
- (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[lL]?', Number.Float),
- (r'(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float),
- (r'0x[0-9a-fA-F]+[Ll]?', Number.Hex),
- (r'0[0-7]+[Ll]?', Number.Oct),
- (r'\d+[Ll]?', Number.Integer),
-
- (r'^(?=\s|/|<!--)', Text, 'slashstartsregex'),
-
- (r'\+\+|--|~|&&|\?|:|\|\||\\(?=\n)|'
- r'(<<|>>>?|==?|!=?|[-<>+*%&|^/])=?',
- Operator, 'slashstartsregex'),
- (r'[{(\[;,]', Punctuation, 'slashstartsregex'),
- (r'[})\].]', Punctuation),
-
- (r'(for|in|while|do|break|return|continue|switch|case|default|if|'
- r'else|throw|try|catch|finally|new|delete|typeof|instanceof|void|'
- r'prototype|__proto__)\b', Keyword, 'slashstartsregex'),
-
- (r'(var|with|function)\b', Keyword.Declaration, 'slashstartsregex'),
-
- (r'(@selector|@private|@protected|@public|@encode|'
- r'@synchronized|@try|@throw|@catch|@finally|@end|@property|'
- r'@synthesize|@dynamic|@for|@accessors|new)\b', Keyword),
-
- (r'(int|long|float|short|double|char|unsigned|signed|void|'
- r'id|BOOL|bool|boolean|IBOutlet|IBAction|SEL|@outlet|@action)\b',
- Keyword.Type),
-
- (r'(self|super)\b', Name.Builtin),
-
- (r'(TRUE|YES|FALSE|NO|Nil|nil|NULL)\b', Keyword.Constant),
- (r'(true|false|null|NaN|Infinity|undefined)\b', Keyword.Constant),
- (r'(ABS|ASIN|ACOS|ATAN|ATAN2|SIN|COS|TAN|EXP|POW|CEIL|FLOOR|ROUND|'
- r'MIN|MAX|RAND|SQRT|E|LN2|LN10|LOG2E|LOG10E|PI|PI2|PI_2|SQRT1_2|'
- r'SQRT2)\b', Keyword.Constant),
-
- (r'(Array|Boolean|Date|Error|Function|Math|'
- r'Number|Object|RegExp|String|decodeURI|'
- r'decodeURIComponent|encodeURI|encodeURIComponent|'
- r'Error|eval|isFinite|isNaN|parseFloat|parseInt|document|this|'
- r'window|globalThis|Symbol)\b', Name.Builtin),
-
- (r'([$a-zA-Z_]\w*)(' + _ws + r')(?=\()',
- bygroups(Name.Function, using(this))),
-
- (r'[$a-zA-Z_]\w*', Name),
- ],
- 'classname': [
- # interface definition that inherits
- (r'([a-zA-Z_]\w*)(' + _ws + r':' + _ws +
- r')([a-zA-Z_]\w*)?',
- bygroups(Name.Class, using(this), Name.Class), '#pop'),
- # interface definition for a category
- (r'([a-zA-Z_]\w*)(' + _ws + r'\()([a-zA-Z_]\w*)(\))',
- bygroups(Name.Class, using(this), Name.Label, Text), '#pop'),
- # simple interface / implementation
- (r'([a-zA-Z_]\w*)', Name.Class, '#pop'),
- ],
- 'forward_classname': [
- (r'([a-zA-Z_]\w*)(\s*)(,)(\s*)',
- bygroups(Name.Class, Whitespace, Text, Whitespace), '#push'),
- (r'([a-zA-Z_]\w*)(\s*)(;?)',
- bygroups(Name.Class, Whitespace, Text), '#pop'),
- ],
- 'function_signature': [
- include('whitespace'),
-
- # start of a selector w/ parameters
- (r'(\(' + _ws + r')' # open paren
- r'([a-zA-Z_]\w+)' # return type
- r'(' + _ws + r'\)' + _ws + r')' # close paren
- r'([$a-zA-Z_]\w+' + _ws + r':)', # function name
- bygroups(using(this), Keyword.Type, using(this),
- Name.Function), 'function_parameters'),
-
- # no-param function
- (r'(\(' + _ws + r')' # open paren
- r'([a-zA-Z_]\w+)' # return type
- r'(' + _ws + r'\)' + _ws + r')' # close paren
- r'([$a-zA-Z_]\w+)', # function name
- bygroups(using(this), Keyword.Type, using(this),
- Name.Function), "#pop"),
-
- # no return type given, start of a selector w/ parameters
- (r'([$a-zA-Z_]\w+' + _ws + r':)', # function name
- bygroups(Name.Function), 'function_parameters'),
-
- # no return type given, no-param function
- (r'([$a-zA-Z_]\w+)', # function name
- bygroups(Name.Function), "#pop"),
-
- default('#pop'),
- ],
- 'function_parameters': [
- include('whitespace'),
-
- # parameters
- (r'(\(' + _ws + ')' # open paren
- r'([^)]+)' # type
- r'(' + _ws + r'\)' + _ws + r')' # close paren
- r'([$a-zA-Z_]\w+)', # param name
- bygroups(using(this), Keyword.Type, using(this), Text)),
-
- # one piece of a selector name
- (r'([$a-zA-Z_]\w+' + _ws + r':)', # function name
- Name.Function),
-
- # smallest possible selector piece
- (r'(:)', Name.Function),
-
- # var args
- (r'(,' + _ws + r'\.\.\.)', using(this)),
-
- # param name
- (r'([$a-zA-Z_]\w+)', Text),
- ],
- 'expression': [
- (r'([$a-zA-Z_]\w*)(\()', bygroups(Name.Function,
- Punctuation)),
- (r'(\))', Punctuation, "#pop"),
- ],
- 'string': [
- (r'"', String, '#pop'),
- (r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|[0-7]{1,3})', String.Escape),
- (r'[^\\"\n]+', String), # all other characters
- (r'(\\)(\n)', bygroups(String.Escape, Whitespace)), # line continuation
- (r'\\', String), # stray backslash
- ],
- 'macro': [
- (r'[^/\n]+', Comment.Preproc),
- (r'/[*](.|\n)*?[*]/', Comment.Multiline),
- (r'(//.*?)(\n)', bygroups(Comment.Single, Whitespace), '#pop'),
- (r'/', Comment.Preproc),
- (r'(?<=\\)\n', Whitespace),
- (r'\n', Whitespace, '#pop'),
- ],
- 'if0': [
- (r'^\s*#if.*?(?<!\\)\n', Comment.Preproc, '#push'),
- (r'^\s*#endif.*?(?<!\\)\n', Comment.Preproc, '#pop'),
- (r'(.*?)(\n)', bygroups(Comment, Whitespace)),
- ]
- }
-
- def analyse_text(text):
- if re.search(r'^\s*@import\s+[<"]', text, re.MULTILINE):
- # special directive found in most Objective-J files
- return True
- return False
-
-
-class CoffeeScriptLexer(RegexLexer):
- """
- For CoffeeScript source code.
-
- .. versionadded:: 1.3
- """
-
- name = 'CoffeeScript'
- url = 'http://coffeescript.org'
- aliases = ['coffeescript', 'coffee-script', 'coffee']
- filenames = ['*.coffee']
- mimetypes = ['text/coffeescript']
-
- _operator_re = (
- r'\+\+|~|&&|\band\b|\bor\b|\bis\b|\bisnt\b|\bnot\b|\?|:|'
- r'\|\||\\(?=\n)|'
- r'(<<|>>>?|==?(?!>)|!=?|=(?!>)|-(?!>)|[<>+*`%&|\^/])=?')
-
- flags = re.DOTALL
- tokens = {
- 'commentsandwhitespace': [
- (r'\s+', Whitespace),
- (r'###[^#].*?###', Comment.Multiline),
- (r'(#(?!##[^#]).*?)(\n)', bygroups(Comment.Single, Whitespace)),
- ],
- 'multilineregex': [
- (r'[^/#]+', String.Regex),
- (r'///([gimuysd]+\b|\B)', String.Regex, '#pop'),
- (r'#\{', String.Interpol, 'interpoling_string'),
- (r'[/#]', String.Regex),
- ],
- 'slashstartsregex': [
- include('commentsandwhitespace'),
- (r'///', String.Regex, ('#pop', 'multilineregex')),
- (r'/(?! )(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/'
- r'([gimuysd]+\b|\B)', String.Regex, '#pop'),
- # This isn't really guarding against mishighlighting well-formed
- # code, just the ability to infinite-loop between root and
- # slashstartsregex.
- (r'/', Operator, '#pop'),
- default('#pop'),
- ],
- 'root': [
- include('commentsandwhitespace'),
- (r'\A(?=\s|/)', Text, 'slashstartsregex'),
- (_operator_re, Operator, 'slashstartsregex'),
- (r'(?:\([^()]*\))?\s*[=-]>', Name.Function, 'slashstartsregex'),
- (r'[{(\[;,]', Punctuation, 'slashstartsregex'),
- (r'[})\].]', Punctuation),
- (r'(?<![.$])(for|own|in|of|while|until|'
- r'loop|break|return|continue|'
- r'switch|when|then|if|unless|else|'
- r'throw|try|catch|finally|new|delete|typeof|instanceof|super|'
- r'extends|this|class|by)\b', Keyword, 'slashstartsregex'),
- (r'(?<![.$])(true|false|yes|no|on|off|null|'
- r'NaN|Infinity|undefined)\b',
- Keyword.Constant),
- (r'(Array|Boolean|Date|Error|Function|Math|'
- r'Number|Object|RegExp|String|decodeURI|'
- r'decodeURIComponent|encodeURI|encodeURIComponent|'
- r'eval|isFinite|isNaN|parseFloat|parseInt|document|window|globalThis|Symbol)\b',
- Name.Builtin),
- (r'([$a-zA-Z_][\w.:$]*)(\s*)([:=])(\s+)',
- bygroups(Name.Variable, Whitespace, Operator, Whitespace),
- 'slashstartsregex'),
- (r'(@[$a-zA-Z_][\w.:$]*)(\s*)([:=])(\s+)',
- bygroups(Name.Variable.Instance, Whitespace, Operator, Whitespace),
- 'slashstartsregex'),
- (r'@', Name.Other, 'slashstartsregex'),
- (r'@?[$a-zA-Z_][\w$]*', Name.Other),
- (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
- (r'0x[0-9a-fA-F]+', Number.Hex),
- (r'[0-9]+', Number.Integer),
- ('"""', String, 'tdqs'),
- ("'''", String, 'tsqs'),
- ('"', String, 'dqs'),
- ("'", String, 'sqs'),
- ],
- 'strings': [
- (r'[^#\\\'"]+', String),
- # note that all coffee script strings are multi-line.
- # hashmarks, quotes and backslashes must be parsed one at a time
- ],
- 'interpoling_string': [
- (r'\}', String.Interpol, "#pop"),
- include('root')
- ],
- 'dqs': [
- (r'"', String, '#pop'),
- (r'\\.|\'', String), # double-quoted string don't need ' escapes
- (r'#\{', String.Interpol, "interpoling_string"),
- (r'#', String),
- include('strings')
- ],
- 'sqs': [
- (r"'", String, '#pop'),
- (r'#|\\.|"', String), # single quoted strings don't need " escapses
- include('strings')
- ],
- 'tdqs': [
- (r'"""', String, '#pop'),
- (r'\\.|\'|"', String), # no need to escape quotes in triple-string
- (r'#\{', String.Interpol, "interpoling_string"),
- (r'#', String),
- include('strings'),
- ],
- 'tsqs': [
- (r"'''", String, '#pop'),
- (r'#|\\.|\'|"', String), # no need to escape quotes in triple-strings
- include('strings')
- ],
- }
-
-
-class MaskLexer(RegexLexer):
- """
- For Mask markup.
-
- .. versionadded:: 2.0
- """
- name = 'Mask'
- url = 'https://github.com/atmajs/MaskJS'
- aliases = ['mask']
- filenames = ['*.mask']
- mimetypes = ['text/x-mask']
-
- flags = re.MULTILINE | re.IGNORECASE | re.DOTALL
- tokens = {
- 'root': [
- (r'\s+', Whitespace),
- (r'(//.*?)(\n)', bygroups(Comment.Single, Whitespace)),
- (r'/\*.*?\*/', Comment.Multiline),
- (r'[{};>]', Punctuation),
- (r"'''", String, 'string-trpl-single'),
- (r'"""', String, 'string-trpl-double'),
- (r"'", String, 'string-single'),
- (r'"', String, 'string-double'),
- (r'([\w-]+)', Name.Tag, 'node'),
- (r'([^.#;{>\s]+)', Name.Class, 'node'),
- (r'(#[\w-]+)', Name.Function, 'node'),
- (r'(\.[\w-]+)', Name.Variable.Class, 'node')
- ],
- 'string-base': [
- (r'\\.', String.Escape),
- (r'~\[', String.Interpol, 'interpolation'),
- (r'.', String.Single),
- ],
- 'string-single': [
- (r"'", String.Single, '#pop'),
- include('string-base')
- ],
- 'string-double': [
- (r'"', String.Single, '#pop'),
- include('string-base')
- ],
- 'string-trpl-single': [
- (r"'''", String.Single, '#pop'),
- include('string-base')
- ],
- 'string-trpl-double': [
- (r'"""', String.Single, '#pop'),
- include('string-base')
- ],
- 'interpolation': [
- (r'\]', String.Interpol, '#pop'),
- (r'(\s*)(:)', bygroups(Whitespace, String.Interpol), 'expression'),
- (r'(\s*)(\w+)(:)', bygroups(Whitespace, Name.Other, Punctuation)),
- (r'[^\]]+', String.Interpol)
- ],
- 'expression': [
- (r'[^\]]+', using(JavascriptLexer), '#pop')
- ],
- 'node': [
- (r'\s+', Whitespace),
- (r'\.', Name.Variable.Class, 'node-class'),
- (r'\#', Name.Function, 'node-id'),
- (r'(style)([ \t]*)(=)',
- bygroups(Name.Attribute, Whitespace, Operator),
- 'node-attr-style-value'),
- (r'([\w:-]+)([ \t]*)(=)',
- bygroups(Name.Attribute, Whitespace, Operator),
- 'node-attr-value'),
- (r'[\w:-]+', Name.Attribute),
- (r'[>{;]', Punctuation, '#pop')
- ],
- 'node-class': [
- (r'[\w-]+', Name.Variable.Class),
- (r'~\[', String.Interpol, 'interpolation'),
- default('#pop')
- ],
- 'node-id': [
- (r'[\w-]+', Name.Function),
- (r'~\[', String.Interpol, 'interpolation'),
- default('#pop')
- ],
- 'node-attr-value': [
- (r'\s+', Whitespace),
- (r'\w+', Name.Variable, '#pop'),
- (r"'", String, 'string-single-pop2'),
- (r'"', String, 'string-double-pop2'),
- default('#pop')
- ],
- 'node-attr-style-value': [
- (r'\s+', Whitespace),
- (r"'", String.Single, 'css-single-end'),
- (r'"', String.Single, 'css-double-end'),
- include('node-attr-value')
- ],
- 'css-base': [
- (r'\s+', Whitespace),
- (r";", Punctuation),
- (r"[\w\-]+\s*:", Name.Builtin)
- ],
- 'css-single-end': [
- include('css-base'),
- (r"'", String.Single, '#pop:2'),
- (r"[^;']+", Name.Entity)
- ],
- 'css-double-end': [
- include('css-base'),
- (r'"', String.Single, '#pop:2'),
- (r'[^;"]+', Name.Entity)
- ],
- 'string-single-pop2': [
- (r"'", String.Single, '#pop:2'),
- include('string-base')
- ],
- 'string-double-pop2': [
- (r'"', String.Single, '#pop:2'),
- include('string-base')
- ],
- }
-
-
-class EarlGreyLexer(RegexLexer):
- """
- For Earl-Grey source code.
-
- .. versionadded: 2.1
- """
-
- name = 'Earl Grey'
- aliases = ['earl-grey', 'earlgrey', 'eg']
- filenames = ['*.eg']
- mimetypes = ['text/x-earl-grey']
-
- tokens = {
- 'root': [
- (r'\n', Whitespace),
- include('control'),
- (r'[^\S\n]+', Text),
- (r'(;;.*)(\n)', bygroups(Comment, Whitespace)),
- (r'[\[\]{}:(),;]', Punctuation),
- (r'(\\)(\n)', bygroups(String.Escape, Whitespace)),
- (r'\\', Text),
- include('errors'),
- (words((
- 'with', 'where', 'when', 'and', 'not', 'or', 'in',
- 'as', 'of', 'is'),
- prefix=r'(?<=\s|\[)', suffix=r'(?![\w$\-])'),
- Operator.Word),
- (r'[*@]?->', Name.Function),
- (r'[+\-*/~^<>%&|?!@#.]*=', Operator.Word),
- (r'\.{2,3}', Operator.Word), # Range Operator
- (r'([+*/~^<>&|?!]+)|([#\-](?=\s))|@@+(?=\s)|=+', Operator),
- (r'(?<![\w$\-])(var|let)(?:[^\w$])', Keyword.Declaration),
- include('keywords'),
- include('builtins'),
- include('assignment'),
- (r'''(?x)
- (?:()([a-zA-Z$_](?:[\w$\-]*[\w$])?)|
- (?<=[\s{\[(])(\.)([a-zA-Z$_](?:[\w$\-]*[\w$])?))
- (?=.*%)''',
- bygroups(Punctuation, Name.Tag, Punctuation, Name.Class.Start), 'dbs'),
- (r'[rR]?`', String.Backtick, 'bt'),
- (r'[rR]?```', String.Backtick, 'tbt'),
- (r'(?<=[\s\[{(,;])\.([a-zA-Z$_](?:[\w$\-]*[\w$])?)'
- r'(?=[\s\]}),;])', String.Symbol),
- include('nested'),
- (r'(?:[rR]|[rR]\.[gmi]{1,3})?"', String, combined('stringescape', 'dqs')),
- (r'(?:[rR]|[rR]\.[gmi]{1,3})?\'', String, combined('stringescape', 'sqs')),
- (r'"""', String, combined('stringescape', 'tdqs')),
- include('tuple'),
- include('import_paths'),
- include('name'),
- include('numbers'),
- ],
- 'dbs': [
- (r'(\.)([a-zA-Z$_](?:[\w$\-]*[\w$])?)(?=[.\[\s])',
- bygroups(Punctuation, Name.Class.DBS)),
- (r'(\[)([\^#][a-zA-Z$_](?:[\w$\-]*[\w$])?)(\])',
- bygroups(Punctuation, Name.Entity.DBS, Punctuation)),
- (r'\s+', Whitespace),
- (r'%', Operator.DBS, '#pop'),
- ],
- 'import_paths': [
- (r'(?<=[\s:;,])(\.{1,3}(?:[\w\-]*/)*)(\w(?:[\w\-]*\w)*)(?=[\s;,])',
- bygroups(Text.Whitespace, Text)),
- ],
- 'assignment': [
- (r'(\.)?([a-zA-Z$_](?:[\w$\-]*[\w$])?)'
- r'(?=\s+[+\-*/~^<>%&|?!@#.]*\=\s)',
- bygroups(Punctuation, Name.Variable))
- ],
- 'errors': [
- (words(('Error', 'TypeError', 'ReferenceError'),
- prefix=r'(?<![\w\-$.])', suffix=r'(?![\w\-$.])'),
- Name.Exception),
- (r'''(?x)
- (?<![\w$])
- E\.[\w$](?:[\w$\-]*[\w$])?
- (?:\.[\w$](?:[\w$\-]*[\w$])?)*
- (?=[({\[?!\s])''',
- Name.Exception),
- ],
- 'control': [
- (r'''(?x)
- ([a-zA-Z$_](?:[\w$-]*[\w$])?)
- (?!\n)\s+
- (?!and|as|each\*|each|in|is|mod|of|or|when|where|with)
- (?=(?:[+\-*/~^<>%&|?!@#.])?[a-zA-Z$_](?:[\w$-]*[\w$])?)''',
- Keyword.Control),
- (r'([a-zA-Z$_](?:[\w$-]*[\w$])?)(?!\n)(\s+)(?=[\'"\d{\[(])',
- bygroups(Keyword.Control, Whitespace)),
- (r'''(?x)
- (?:
- (?<=[%=])|
- (?<=[=\-]>)|
- (?<=with|each|with)|
- (?<=each\*|where)
- )(\s+)
- ([a-zA-Z$_](?:[\w$-]*[\w$])?)(:)''',
- bygroups(Whitespace, Keyword.Control, Punctuation)),
- (r'''(?x)
- (?<![+\-*/~^<>%&|?!@#.])(\s+)
- ([a-zA-Z$_](?:[\w$-]*[\w$])?)(:)''',
- bygroups(Whitespace, Keyword.Control, Punctuation)),
- ],
- 'nested': [
- (r'''(?x)
- (?<=[\w$\]})])(\.)
- ([a-zA-Z$_](?:[\w$-]*[\w$])?)
- (?=\s+with(?:\s|\n))''',
- bygroups(Punctuation, Name.Function)),
- (r'''(?x)
- (?<!\s)(\.)
- ([a-zA-Z$_](?:[\w$-]*[\w$])?)
- (?=[}\]).,;:\s])''',
- bygroups(Punctuation, Name.Field)),
- (r'''(?x)
- (?<=[\w$\]})])(\.)
- ([a-zA-Z$_](?:[\w$-]*[\w$])?)
- (?=[\[{(:])''',
- bygroups(Punctuation, Name.Function)),
- ],
- 'keywords': [
- (words((
- 'each', 'each*', 'mod', 'await', 'break', 'chain',
- 'continue', 'elif', 'expr-value', 'if', 'match',
- 'return', 'yield', 'pass', 'else', 'require', 'var',
- 'let', 'async', 'method', 'gen'),
- prefix=r'(?<![\w\-$.])', suffix=r'(?![\w\-$.])'),
- Keyword.Pseudo),
- (words(('this', 'self', '@'),
- prefix=r'(?<![\w\-$.])', suffix=r'(?![\w\-$])'),
- Keyword.Constant),
- (words((
- 'Function', 'Object', 'Array', 'String', 'Number',
- 'Boolean', 'ErrorFactory', 'ENode', 'Promise'),
- prefix=r'(?<![\w\-$.])', suffix=r'(?![\w\-$])'),
- Keyword.Type),
- ],
- 'builtins': [
- (words((
- 'send', 'object', 'keys', 'items', 'enumerate', 'zip',
- 'product', 'neighbours', 'predicate', 'equal',
- 'nequal', 'contains', 'repr', 'clone', 'range',
- 'getChecker', 'get-checker', 'getProperty', 'get-property',
- 'getProjector', 'get-projector', 'consume', 'take',
- 'promisify', 'spawn', 'constructor'),
- prefix=r'(?<![\w\-#.])', suffix=r'(?![\w\-.])'),
- Name.Builtin),
- (words((
- 'true', 'false', 'null', 'undefined'),
- prefix=r'(?<![\w\-$.])', suffix=r'(?![\w\-$.])'),
- Name.Constant),
- ],
- 'name': [
- (r'@([a-zA-Z$_](?:[\w$-]*[\w$])?)', Name.Variable.Instance),
- (r'([a-zA-Z$_](?:[\w$-]*[\w$])?)(\+\+|\-\-)?',
- bygroups(Name.Symbol, Operator.Word))
- ],
- 'tuple': [
- (r'#[a-zA-Z_][\w\-]*(?=[\s{(,;])', Name.Namespace)
- ],
- 'interpoling_string': [
- (r'\}', String.Interpol, '#pop'),
- include('root')
- ],
- 'stringescape': [
- (r'\\([\\abfnrtv"\']|\n|N\{.*?\}|u[a-fA-F0-9]{4}|'
- r'U[a-fA-F0-9]{8}|x[a-fA-F0-9]{2}|[0-7]{1,3})', String.Escape)
- ],
- 'strings': [
- (r'[^\\\'"]', String),
- (r'[\'"\\]', String),
- (r'\n', String) # All strings are multiline in EG
- ],
- 'dqs': [
- (r'"', String, '#pop'),
- (r'\\\\|\\"|\\\n', String.Escape),
- include('strings')
- ],
- 'sqs': [
- (r"'", String, '#pop'),
- (r"\\\\|\\'|\\\n", String.Escape),
- (r'\{', String.Interpol, 'interpoling_string'),
- include('strings')
- ],
- 'tdqs': [
- (r'"""', String, '#pop'),
- include('strings'),
- ],
- 'bt': [
- (r'`', String.Backtick, '#pop'),
- (r'(?<!`)\n', String.Backtick),
- (r'\^=?', String.Escape),
- (r'.+', String.Backtick),
- ],
- 'tbt': [
- (r'```', String.Backtick, '#pop'),
- (r'\n', String.Backtick),
- (r'\^=?', String.Escape),
- (r'[^`]+', String.Backtick),
- ],
- 'numbers': [
- (r'\d+\.(?!\.)\d*([eE][+-]?[0-9]+)?', Number.Float),
- (r'\d+[eE][+-]?[0-9]+', Number.Float),
- (r'8r[0-7]+', Number.Oct),
- (r'2r[01]+', Number.Bin),
- (r'16r[a-fA-F0-9]+', Number.Hex),
- (r'([3-79]|[12][0-9]|3[0-6])r[a-zA-Z\d]+(\.[a-zA-Z\d]+)?',
- Number.Radix),
- (r'\d+', Number.Integer)
- ],
- }
-
-
-class JuttleLexer(RegexLexer):
- """
- For Juttle source code.
-
- .. versionadded:: 2.2
- """
-
- name = 'Juttle'
- url = 'http://juttle.github.io/'
- aliases = ['juttle']
- filenames = ['*.juttle']
- mimetypes = ['application/juttle', 'application/x-juttle',
- 'text/x-juttle', 'text/juttle']
-
- flags = re.DOTALL | re.MULTILINE
-
- tokens = {
- 'commentsandwhitespace': [
- (r'\s+', Whitespace),
- (r'(//.*?)(\n)', bygroups(Comment.Single, Whitespace)),
- (r'/\*.*?\*/', Comment.Multiline)
- ],
- 'slashstartsregex': [
- include('commentsandwhitespace'),
- (r'/(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/'
- r'([gimuysd]+\b|\B)', String.Regex, '#pop'),
- (r'(?=/)', Text, ('#pop', 'badregex')),
- default('#pop')
- ],
- 'badregex': [
- (r'\n', Text, '#pop')
- ],
- 'root': [
- (r'^(?=\s|/)', Text, 'slashstartsregex'),
- include('commentsandwhitespace'),
- (r':\d{2}:\d{2}:\d{2}(\.\d*)?:', String.Moment),
- (r':(now|beginning|end|forever|yesterday|today|tomorrow|'
- r'(\d+(\.\d*)?|\.\d+)(ms|[smhdwMy])?):', String.Moment),
- (r':\d{4}-\d{2}-\d{2}(T\d{2}:\d{2}:\d{2}(\.\d*)?)?'
- r'(Z|[+-]\d{2}:\d{2}|[+-]\d{4})?:', String.Moment),
- (r':((\d+(\.\d*)?|\.\d+)[ ]+)?(millisecond|second|minute|hour|'
- r'day|week|month|year)[s]?'
- r'(([ ]+and[ ]+(\d+[ ]+)?(millisecond|second|minute|hour|'
- r'day|week|month|year)[s]?)'
- r'|[ ]+(ago|from[ ]+now))*:', String.Moment),
- (r'\+\+|--|~|&&|\?|:|\|\||\\(?=\n)|'
- r'(==?|!=?|[-<>+*%&|^/])=?', Operator, 'slashstartsregex'),
- (r'[{(\[;,]', Punctuation, 'slashstartsregex'),
- (r'[})\].]', Punctuation),
- (r'(import|return|continue|if|else)\b', Keyword, 'slashstartsregex'),
- (r'(var|const|function|reducer|sub|input)\b', Keyword.Declaration,
- 'slashstartsregex'),
- (r'(batch|emit|filter|head|join|keep|pace|pass|put|read|reduce|remove|'
- r'sequence|skip|sort|split|tail|unbatch|uniq|view|write)\b',
- Keyword.Reserved),
- (r'(true|false|null|Infinity)\b', Keyword.Constant),
- (r'(Array|Date|Juttle|Math|Number|Object|RegExp|String)\b',
- Name.Builtin),
- (JS_IDENT, Name.Other),
- (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
- (r'[0-9]+', Number.Integer),
- (r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
- (r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
- ]
-
- }
-
-
-class NodeConsoleLexer(Lexer):
- """
- For parsing within an interactive Node.js REPL, such as:
-
- .. sourcecode:: nodejsrepl
-
- > let a = 3
- undefined
- > a
- 3
- > let b = '4'
- undefined
- > b
- '4'
- > b == a
- false
-
- .. versionadded: 2.10
- """
- name = 'Node.js REPL console session'
- aliases = ['nodejsrepl', ]
- mimetypes = ['text/x-nodejsrepl', ]
-
- def get_tokens_unprocessed(self, text):
- jslexer = JavascriptLexer(**self.options)
-
- curcode = ''
- insertions = []
-
- for match in line_re.finditer(text):
- line = match.group()
- if line.startswith('> '):
- insertions.append((len(curcode),
- [(0, Generic.Prompt, line[:1]),
- (1, Whitespace, line[1:2])]))
-
- curcode += line[2:]
- elif line.startswith('...'):
- # node does a nested ... thing depending on depth
- code = line.lstrip('.')
- lead = len(line) - len(code)
-
- insertions.append((len(curcode),
- [(0, Generic.Prompt, line[:lead])]))
-
- curcode += code
- else:
- if curcode:
- yield from do_insertions(insertions,
- jslexer.get_tokens_unprocessed(curcode))
-
- curcode = ''
- insertions = []
-
- yield from do_insertions([],
- jslexer.get_tokens_unprocessed(line))
-
- if curcode:
- yield from do_insertions(insertions,
- jslexer.get_tokens_unprocessed(curcode))
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/jmespath.py b/venv/lib/python3.11/site-packages/pygments/lexers/jmespath.py
deleted file mode 100644
index 74aa572..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/jmespath.py
+++ /dev/null
@@ -1,68 +0,0 @@
-"""
- pygments.lexers.jmespath
- ~~~~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for the JMESPath language
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, bygroups, include
-from pygments.token import String, Punctuation, Whitespace, Name, Operator, \
- Number, Literal, Keyword
-
-__all__ = ['JMESPathLexer']
-
-
-class JMESPathLexer(RegexLexer):
- """
- For JMESPath queries.
- """
- name = 'JMESPath'
- url = 'https://jmespath.org'
- filenames = ['*.jp']
- aliases = ['jmespath', 'jp']
-
- tokens = {
- 'string': [
- (r"'(\\(.|\n)|[^'\\])*'", String),
- ],
- 'punctuation': [
- (r'(\[\?|[\.\*\[\],:\(\)\{\}\|])', Punctuation),
- ],
- 'ws': [
- (r" |\t|\n|\r", Whitespace)
- ],
- "dq-identifier": [
- (r'[^\\"]+', Name.Variable),
- (r'\\"', Name.Variable),
- (r'.', Punctuation, '#pop'),
- ],
- 'identifier': [
- (r'(&)?(")', bygroups(Name.Variable, Punctuation), 'dq-identifier'),
- (r'(")?(&?[A-Za-z][A-Za-z0-9_-]*)(")?', bygroups(Punctuation, Name.Variable, Punctuation)),
- ],
- 'root': [
- include('ws'),
- include('string'),
- (r'(==|!=|<=|>=|<|>|&&|\|\||!)', Operator),
- include('punctuation'),
- (r'@', Name.Variable.Global),
- (r'(&?[A-Za-z][A-Za-z0-9_]*)(\()', bygroups(Name.Function, Punctuation)),
- (r'(&)(\()', bygroups(Name.Variable, Punctuation)),
- include('identifier'),
- (r'-?\d+', Number),
- (r'`', Literal, 'literal'),
- ],
- 'literal': [
- include('ws'),
- include('string'),
- include('punctuation'),
- (r'(false|true|null)\b', Keyword.Constant),
- include('identifier'),
- (r'-?\d+\.?\d*([eE][-+]\d+)?', Number),
- (r'\\`', Literal),
- (r'`', Literal, '#pop'),
- ]
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/jslt.py b/venv/lib/python3.11/site-packages/pygments/lexers/jslt.py
deleted file mode 100644
index 0d79f8b..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/jslt.py
+++ /dev/null
@@ -1,95 +0,0 @@
-"""
- pygments.lexers.jslt
- ~~~~~~~~~~~~~~~~~~~~
-
- Lexers for the JSLT language
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, combined, words
-from pygments.token import Comment, Keyword, Name, Number, Operator, \
- Punctuation, String, Whitespace
-
-
-__all__ = ['JSLTLexer']
-
-
-_WORD_END = r'(?=[^0-9A-Z_a-z-])'
-
-
-class JSLTLexer(RegexLexer):
- """
- For JSLT source.
-
- .. versionadded:: 2.10
- """
- name = 'JSLT'
- url = 'https://github.com/schibsted/jslt'
- filenames = ['*.jslt']
- aliases = ['jslt']
- mimetypes = ['text/x-jslt']
-
- tokens = {
- 'root': [
- (r'[\t\n\f\r ]+', Whitespace),
- (r'//.*(\n|\Z)', Comment.Single),
- (r'-?(0|[1-9][0-9]*)', Number.Integer),
- (r'-?(0|[1-9][0-9]*)(.[0-9]+a)?([Ee][+-]?[0-9]+)', Number.Float),
- (r'"([^"\\]|\\.)*"', String.Double),
- (r'[(),:\[\]{}]', Punctuation),
- (r'(!=|[<=>]=?)', Operator),
- (r'[*+/|-]', Operator),
- (r'\.', Operator),
- (words(('import',), suffix=_WORD_END), Keyword.Namespace, combined('import-path', 'whitespace')),
- (words(('as',), suffix=_WORD_END), Keyword.Namespace, combined('import-alias', 'whitespace')),
- (words(('let',), suffix=_WORD_END), Keyword.Declaration, combined('constant', 'whitespace')),
- (words(('def',), suffix=_WORD_END), Keyword.Declaration, combined('function', 'whitespace')),
- (words(('false', 'null', 'true'), suffix=_WORD_END), Keyword.Constant),
- (words(('else', 'for', 'if'), suffix=_WORD_END), Keyword),
- (words(('and', 'or'), suffix=_WORD_END), Operator.Word),
- (words((
- 'all', 'any', 'array', 'boolean', 'capture', 'ceiling',
- 'contains', 'ends-with', 'error', 'flatten', 'floor',
- 'format-time', 'from-json', 'get-key', 'hash-int', 'index-of',
- 'is-array', 'is-boolean', 'is-decimal', 'is-integer',
- 'is-number', 'is-object', 'is-string', 'join', 'lowercase',
- 'max', 'min', 'mod', 'not', 'now', 'number', 'parse-time',
- 'parse-url', 'random', 'replace', 'round', 'sha256-hex', 'size',
- 'split', 'starts-with', 'string', 'sum', 'test', 'to-json',
- 'trim', 'uppercase', 'zip', 'zip-with-index', 'fallback'), suffix=_WORD_END),
- Name.Builtin),
- (r'[A-Z_a-z][0-9A-Z_a-z-]*:[A-Z_a-z][0-9A-Z_a-z-]*', Name.Function),
- (r'[A-Z_a-z][0-9A-Z_a-z-]*', Name),
- (r'\$[A-Z_a-z][0-9A-Z_a-z-]*', Name.Variable),
- ],
- 'constant': [
- (r'[A-Z_a-z][0-9A-Z_a-z-]*', Name.Variable, 'root'),
- ],
- 'function': [
- (r'[A-Z_a-z][0-9A-Z_a-z-]*', Name.Function, combined('function-parameter-list', 'whitespace')),
- ],
- 'function-parameter-list': [
- (r'\(', Punctuation, combined('function-parameters', 'whitespace')),
- ],
- 'function-parameters': [
- (r',', Punctuation),
- (r'\)', Punctuation, 'root'),
- (r'[A-Z_a-z][0-9A-Z_a-z-]*', Name.Variable),
- ],
- 'import-path': [
- (r'"([^"]|\\.)*"', String.Symbol, 'root'),
- ],
- 'import-alias': [
- (r'[A-Z_a-z][0-9A-Z_a-z-]*', Name.Namespace, 'root'),
- ],
- 'string': [
- (r'"', String.Double, '#pop'),
- (r'\\.', String.Escape),
- ],
- 'whitespace': [
- (r'[\t\n\f\r ]+', Whitespace),
- (r'//.*(\n|\Z)', Comment.Single),
- ]
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/jsonnet.py b/venv/lib/python3.11/site-packages/pygments/lexers/jsonnet.py
deleted file mode 100644
index 3905f88..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/jsonnet.py
+++ /dev/null
@@ -1,168 +0,0 @@
-"""
- pygments.lexers.jsonnet
- ~~~~~~~~~~~~~~~~~~~~~~~
-
- Lexer for Jsonnet data templating language.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import include, RegexLexer, words
-from pygments.token import Comment, Keyword, Name, Number, Operator, \
- Punctuation, String, Text, Whitespace
-
-__all__ = ['JsonnetLexer']
-
-jsonnet_token = r'[^\W\d]\w*'
-jsonnet_function_token = jsonnet_token + r'(?=\()'
-
-
-def string_rules(quote_mark):
- return [
- (r"[^{}\\]".format(quote_mark), String),
- (r"\\.", String.Escape),
- (quote_mark, String, '#pop'),
- ]
-
-
-def quoted_field_name(quote_mark):
- return [
- (r'([^{quote}\\]|\\.)*{quote}'.format(quote=quote_mark),
- Name.Variable, 'field_separator')
- ]
-
-
-class JsonnetLexer(RegexLexer):
- """Lexer for Jsonnet source code."""
-
- name = 'Jsonnet'
- aliases = ['jsonnet']
- filenames = ['*.jsonnet', '*.libsonnet']
- url = "https://jsonnet.org"
- tokens = {
- # Not used by itself
- '_comments': [
- (r'(//|#).*\n', Comment.Single),
- (r'/\*\*([^/]|/(?!\*))*\*/', String.Doc),
- (r'/\*([^/]|/(?!\*))*\*/', Comment),
- ],
- 'root': [
- include('_comments'),
- (r"@'.*'", String),
- (r'@".*"', String),
- (r"'", String, 'singlestring'),
- (r'"', String, 'doublestring'),
- (r'\|\|\|(.|\n)*\|\|\|', String),
- # Jsonnet has no integers, only an IEEE754 64-bit float
- (r'[+-]?[0-9]+(.[0-9])?', Number.Float),
- # Omit : despite spec because it appears to be used as a field
- # separator
- (r'[!$~+\-&|^=<>*/%]', Operator),
- (r'\{', Punctuation, 'object'),
- (r'\[', Punctuation, 'array'),
- (r'local\b', Keyword, ('local_name')),
- (r'assert\b', Keyword, 'assert'),
- (words([
- 'assert', 'else', 'error', 'false', 'for', 'if', 'import',
- 'importstr', 'in', 'null', 'tailstrict', 'then', 'self',
- 'super', 'true',
- ], suffix=r'\b'), Keyword),
- (r'\s+', Whitespace),
- (r'function(?=\()', Keyword, 'function_params'),
- (r'std\.' + jsonnet_function_token, Name.Builtin, 'function_args'),
- (jsonnet_function_token, Name.Function, 'function_args'),
- (jsonnet_token, Name.Variable),
- (r'[\.()]', Punctuation),
- ],
- 'singlestring': string_rules("'"),
- 'doublestring': string_rules('"'),
- 'array': [
- (r',', Punctuation),
- (r'\]', Punctuation, '#pop'),
- include('root'),
- ],
- 'local_name': [
- (jsonnet_function_token, Name.Function, 'function_params'),
- (jsonnet_token, Name.Variable),
- (r'\s+', Whitespace),
- ('(?==)', Whitespace, ('#pop', 'local_value')),
- ],
- 'local_value': [
- (r'=', Operator),
- (r';', Punctuation, '#pop'),
- include('root'),
- ],
- 'assert': [
- (r':', Punctuation),
- (r';', Punctuation, '#pop'),
- include('root'),
- ],
- 'function_params': [
- (jsonnet_token, Name.Variable),
- (r'\(', Punctuation),
- (r'\)', Punctuation, '#pop'),
- (r',', Punctuation),
- (r'\s+', Whitespace),
- (r'=', Operator, 'function_param_default'),
- ],
- 'function_args': [
- (r'\(', Punctuation),
- (r'\)', Punctuation, '#pop'),
- (r',', Punctuation),
- (r'\s+', Whitespace),
- include('root'),
- ],
- 'object': [
- (r'\s+', Whitespace),
- (r'local\b', Keyword, 'object_local_name'),
- (r'assert\b', Keyword, 'object_assert'),
- (r'\[', Operator, 'field_name_expr'),
- (fr'(?={jsonnet_token})', Text, 'field_name'),
- (r'\}', Punctuation, '#pop'),
- (r'"', Name.Variable, 'double_field_name'),
- (r"'", Name.Variable, 'single_field_name'),
- include('_comments'),
- ],
- 'field_name': [
- (jsonnet_function_token, Name.Function,
- ('field_separator', 'function_params')
- ),
- (jsonnet_token, Name.Variable, 'field_separator'),
- ],
- 'double_field_name': quoted_field_name('"'),
- 'single_field_name': quoted_field_name("'"),
- 'field_name_expr': [
- (r'\]', Operator, 'field_separator'),
- include('root'),
- ],
- 'function_param_default': [
- (r'(?=[,\)])', Whitespace, '#pop'),
- include('root'),
- ],
- 'field_separator': [
- (r'\s+', Whitespace),
- (r'\+?::?:?', Punctuation, ('#pop', '#pop', 'field_value')),
- include('_comments'),
- ],
- 'field_value': [
- (r',', Punctuation, '#pop'),
- (r'\}', Punctuation, '#pop:2'),
- include('root'),
- ],
- 'object_assert': [
- (r':', Punctuation),
- (r',', Punctuation, '#pop'),
- include('root'),
- ],
- 'object_local_name': [
- (jsonnet_token, Name.Variable, ('#pop', 'object_local_value')),
- (r'\s+', Whitespace),
- ],
- 'object_local_value': [
- (r'=', Operator),
- (r',', Punctuation, '#pop'),
- (r'\}', Punctuation, '#pop:2'),
- include('root'),
- ],
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/jsx.py b/venv/lib/python3.11/site-packages/pygments/lexers/jsx.py
deleted file mode 100644
index 90cecc0..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/jsx.py
+++ /dev/null
@@ -1,76 +0,0 @@
-"""
- pygments.lexers.jsx
- ~~~~~~~~~~~~~~~~~~~
-
- Lexers for JSX (React).
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import bygroups, default, include, inherit
-from pygments.lexers.javascript import JavascriptLexer
-from pygments.token import Name, Operator, Punctuation, String, Text, \
- Whitespace
-
-__all__ = ['JsxLexer']
-
-
-class JsxLexer(JavascriptLexer):
- """For JavaScript Syntax Extension (JSX).
-
- .. versionadded:: 2.17
- """
-
- name = "JSX"
- aliases = ["jsx", "react"]
- filenames = ["*.jsx", "*.react"]
- mimetypes = ["text/jsx", "text/typescript-jsx"]
- url = "https://facebook.github.io/jsx/"
-
- flags = re.MULTILINE | re.DOTALL
-
- # Use same tokens as `JavascriptLexer`, but with tags and attributes support
- tokens = {
- "root": [
- include("jsx"),
- inherit,
- ],
- "jsx": [
- (r"</?>", Punctuation), # JSXFragment <>|</>
- (r"(<)(\w+)(\.?)", bygroups(Punctuation, Name.Tag, Punctuation), "tag"),
- (
- r"(</)(\w+)(>)",
- bygroups(Punctuation, Name.Tag, Punctuation),
- ),
- (
- r"(</)(\w+)",
- bygroups(Punctuation, Name.Tag),
- "fragment",
- ), # Same for React.Context
- ],
- "tag": [
- (r"\s+", Whitespace),
- (r"([\w-]+)(\s*)(=)(\s*)", bygroups(Name.Attribute, Whitespace, Operator, Whitespace), "attr"),
- (r"[{}]+", Punctuation),
- (r"[\w\.]+", Name.Attribute),
- (r"(/?)(\s*)(>)", bygroups(Punctuation, Text, Punctuation), "#pop"),
- ],
- "fragment": [
- (r"(.)(\w+)", bygroups(Punctuation, Name.Attribute)),
- (r"(>)", bygroups(Punctuation), "#pop"),
- ],
- "attr": [
- (r"\{", Punctuation, "expression"),
- (r'".*?"', String, "#pop"),
- (r"'.*?'", String, "#pop"),
- default("#pop"),
- ],
- "expression": [
- (r"\{", Punctuation, "#push"),
- (r"\}", Punctuation, "#pop"),
- include("root"),
- ],
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/julia.py b/venv/lib/python3.11/site-packages/pygments/lexers/julia.py
deleted file mode 100644
index 9975ca0..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/julia.py
+++ /dev/null
@@ -1,294 +0,0 @@
-"""
- pygments.lexers.julia
- ~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for the Julia language.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import Lexer, RegexLexer, bygroups, do_insertions, \
- words, include
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation, Generic, Whitespace
-from pygments.util import shebang_matches
-from pygments.lexers._julia_builtins import OPERATORS_LIST, DOTTED_OPERATORS_LIST, \
- KEYWORD_LIST, BUILTIN_LIST, LITERAL_LIST
-
-__all__ = ['JuliaLexer', 'JuliaConsoleLexer']
-
-# see https://docs.julialang.org/en/v1/manual/variables/#Allowed-Variable-Names
-allowed_variable = \
- '(?:[a-zA-Z_\u00A1-\U0010ffff][a-zA-Z_0-9!\u00A1-\U0010ffff]*)'
-# see https://github.com/JuliaLang/julia/blob/master/src/flisp/julia_opsuffs.h
-operator_suffixes = r'[²³¹ʰʲʳʷʸˡˢˣᴬᴮᴰᴱᴳᴴᴵᴶᴷᴸᴹᴺᴼᴾᴿᵀᵁᵂᵃᵇᵈᵉᵍᵏᵐᵒᵖᵗᵘᵛᵝᵞᵟᵠᵡᵢᵣᵤᵥᵦᵧᵨᵩᵪᶜᶠᶥᶦᶫᶰᶸᶻᶿ′″‴‵‶‷⁗⁰ⁱ⁴⁵⁶⁷⁸⁹⁺⁻⁼⁽⁾ⁿ₀₁₂₃₄₅₆₇₈₉₊₋₌₍₎ₐₑₒₓₕₖₗₘₙₚₛₜⱼⱽ]*'
-
-class JuliaLexer(RegexLexer):
- """
- For Julia source code.
-
- .. versionadded:: 1.6
- """
-
- name = 'Julia'
- url = 'https://julialang.org/'
- aliases = ['julia', 'jl']
- filenames = ['*.jl']
- mimetypes = ['text/x-julia', 'application/x-julia']
-
- tokens = {
- 'root': [
- (r'\n', Whitespace),
- (r'[^\S\n]+', Whitespace),
- (r'#=', Comment.Multiline, "blockcomment"),
- (r'#.*$', Comment),
- (r'[\[\](),;]', Punctuation),
-
- # symbols
- # intercept range expressions first
- (r'(' + allowed_variable + r')(\s*)(:)(' + allowed_variable + ')',
- bygroups(Name, Whitespace, Operator, Name)),
- # then match :name which does not follow closing brackets, digits, or the
- # ::, <:, and :> operators
- (r'(?<![\]):<>\d.])(:' + allowed_variable + ')', String.Symbol),
-
- # type assertions - excludes expressions like ::typeof(sin) and ::avec[1]
- (r'(?<=::)(\s*)(' + allowed_variable + r')\b(?![(\[])',
- bygroups(Whitespace, Keyword.Type)),
- # type comparisons
- # - MyType <: A or MyType >: A
- ('(' + allowed_variable + r')(\s*)([<>]:)(\s*)(' + allowed_variable + r')\b(?![(\[])',
- bygroups(Keyword.Type, Whitespace, Operator, Whitespace, Keyword.Type)),
- # - <: B or >: B
- (r'([<>]:)(\s*)(' + allowed_variable + r')\b(?![(\[])',
- bygroups(Operator, Whitespace, Keyword.Type)),
- # - A <: or A >:
- (r'\b(' + allowed_variable + r')(\s*)([<>]:)',
- bygroups(Keyword.Type, Whitespace, Operator)),
-
- # operators
- # Suffixes aren't actually allowed on all operators, but we'll ignore that
- # since those cases are invalid Julia code.
- (words([*OPERATORS_LIST, *DOTTED_OPERATORS_LIST],
- suffix=operator_suffixes), Operator),
- (words(['.' + o for o in DOTTED_OPERATORS_LIST],
- suffix=operator_suffixes), Operator),
- (words(['...', '..']), Operator),
-
- # NOTE
- # Patterns below work only for definition sites and thus hardly reliable.
- #
- # functions
- # (r'(function)(\s+)(' + allowed_variable + ')',
- # bygroups(Keyword, Text, Name.Function)),
-
- # chars
- (r"'(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,3}|\\u[a-fA-F0-9]{1,4}|"
- r"\\U[a-fA-F0-9]{1,6}|[^\\\'\n])'", String.Char),
-
- # try to match trailing transpose
- (r'(?<=[.\w)\]])(\'' + operator_suffixes + ')+', Operator),
-
- # raw strings
- (r'(raw)(""")', bygroups(String.Affix, String), 'tqrawstring'),
- (r'(raw)(")', bygroups(String.Affix, String), 'rawstring'),
- # regular expressions
- (r'(r)(""")', bygroups(String.Affix, String.Regex), 'tqregex'),
- (r'(r)(")', bygroups(String.Affix, String.Regex), 'regex'),
- # other strings
- (r'(' + allowed_variable + ')?(""")',
- bygroups(String.Affix, String), 'tqstring'),
- (r'(' + allowed_variable + ')?(")',
- bygroups(String.Affix, String), 'string'),
-
- # backticks
- (r'(' + allowed_variable + ')?(```)',
- bygroups(String.Affix, String.Backtick), 'tqcommand'),
- (r'(' + allowed_variable + ')?(`)',
- bygroups(String.Affix, String.Backtick), 'command'),
-
- # type names
- # - names that begin a curly expression
- ('(' + allowed_variable + r')(\{)',
- bygroups(Keyword.Type, Punctuation), 'curly'),
- # - names as part of bare 'where'
- (r'(where)(\s+)(' + allowed_variable + ')',
- bygroups(Keyword, Whitespace, Keyword.Type)),
- # - curly expressions in general
- (r'(\{)', Punctuation, 'curly'),
- # - names as part of type declaration
- (r'(abstract|primitive)([ \t]+)(type\b)([\s()]+)(' +
- allowed_variable + r')',
- bygroups(Keyword, Whitespace, Keyword, Text, Keyword.Type)),
- (r'(mutable(?=[ \t]))?([ \t]+)?(struct\b)([\s()]+)(' +
- allowed_variable + r')',
- bygroups(Keyword, Whitespace, Keyword, Text, Keyword.Type)),
-
- # macros
- (r'@' + allowed_variable, Name.Decorator),
- (words([*OPERATORS_LIST, '..', '.', *DOTTED_OPERATORS_LIST],
- prefix='@', suffix=operator_suffixes), Name.Decorator),
-
- # keywords
- (words(KEYWORD_LIST, suffix=r'\b'), Keyword),
- # builtin types
- (words(BUILTIN_LIST, suffix=r'\b'), Keyword.Type),
- # builtin literals
- (words(LITERAL_LIST, suffix=r'\b'), Name.Builtin),
-
- # names
- (allowed_variable, Name),
-
- # numbers
- (r'(\d+((_\d+)+)?\.(?!\.)(\d+((_\d+)+)?)?|\.\d+((_\d+)+)?)([eEf][+-]?[0-9]+)?', Number.Float),
- (r'\d+((_\d+)+)?[eEf][+-]?[0-9]+', Number.Float),
- (r'0x[a-fA-F0-9]+((_[a-fA-F0-9]+)+)?(\.([a-fA-F0-9]+((_[a-fA-F0-9]+)+)?)?)?p[+-]?\d+', Number.Float),
- (r'0b[01]+((_[01]+)+)?', Number.Bin),
- (r'0o[0-7]+((_[0-7]+)+)?', Number.Oct),
- (r'0x[a-fA-F0-9]+((_[a-fA-F0-9]+)+)?', Number.Hex),
- (r'\d+((_\d+)+)?', Number.Integer),
-
- # single dot operator matched last to permit e.g. ".1" as a float
- (words(['.']), Operator),
- ],
-
- "blockcomment": [
- (r'[^=#]', Comment.Multiline),
- (r'#=', Comment.Multiline, '#push'),
- (r'=#', Comment.Multiline, '#pop'),
- (r'[=#]', Comment.Multiline),
- ],
-
- 'curly': [
- (r'\{', Punctuation, '#push'),
- (r'\}', Punctuation, '#pop'),
- (allowed_variable, Keyword.Type),
- include('root'),
- ],
-
- 'tqrawstring': [
- (r'"""', String, '#pop'),
- (r'([^"]|"[^"][^"])+', String),
- ],
- 'rawstring': [
- (r'"', String, '#pop'),
- (r'\\"', String.Escape),
- (r'([^"\\]|\\[^"])+', String),
- ],
-
- # Interpolation is defined as "$" followed by the shortest full
- # expression, which is something we can't parse. Include the most
- # common cases here: $word, and $(paren'd expr).
- 'interp': [
- (r'\$' + allowed_variable, String.Interpol),
- (r'(\$)(\()', bygroups(String.Interpol, Punctuation), 'in-intp'),
- ],
- 'in-intp': [
- (r'\(', Punctuation, '#push'),
- (r'\)', Punctuation, '#pop'),
- include('root'),
- ],
-
- 'string': [
- (r'(")(' + allowed_variable + r'|\d+)?',
- bygroups(String, String.Affix), '#pop'),
- # FIXME: This escape pattern is not perfect.
- (r'\\([\\"\'$nrbtfav]|(x|u|U)[a-fA-F0-9]+|\d+)', String.Escape),
- include('interp'),
- # @printf and @sprintf formats
- (r'%[-#0 +]*([0-9]+|[*])?(\.([0-9]+|[*]))?[hlL]?[E-GXc-giorsux%]',
- String.Interpol),
- (r'[^"$%\\]+', String),
- (r'.', String),
- ],
- 'tqstring': [
- (r'(""")(' + allowed_variable + r'|\d+)?',
- bygroups(String, String.Affix), '#pop'),
- (r'\\([\\"\'$nrbtfav]|(x|u|U)[a-fA-F0-9]+|\d+)', String.Escape),
- include('interp'),
- (r'[^"$%\\]+', String),
- (r'.', String),
- ],
-
- 'regex': [
- (r'(")([imsxa]*)?', bygroups(String.Regex, String.Affix), '#pop'),
- (r'\\"', String.Regex),
- (r'[^\\"]+', String.Regex),
- ],
-
- 'tqregex': [
- (r'(""")([imsxa]*)?', bygroups(String.Regex, String.Affix), '#pop'),
- (r'[^"]+', String.Regex),
- ],
-
- 'command': [
- (r'(`)(' + allowed_variable + r'|\d+)?',
- bygroups(String.Backtick, String.Affix), '#pop'),
- (r'\\[`$]', String.Escape),
- include('interp'),
- (r'[^\\`$]+', String.Backtick),
- (r'.', String.Backtick),
- ],
- 'tqcommand': [
- (r'(```)(' + allowed_variable + r'|\d+)?',
- bygroups(String.Backtick, String.Affix), '#pop'),
- (r'\\\$', String.Escape),
- include('interp'),
- (r'[^\\`$]+', String.Backtick),
- (r'.', String.Backtick),
- ],
- }
-
- def analyse_text(text):
- return shebang_matches(text, r'julia')
-
-
-class JuliaConsoleLexer(Lexer):
- """
- For Julia console sessions. Modeled after MatlabSessionLexer.
-
- .. versionadded:: 1.6
- """
- name = 'Julia console'
- aliases = ['jlcon', 'julia-repl']
-
- def get_tokens_unprocessed(self, text):
- jllexer = JuliaLexer(**self.options)
- start = 0
- curcode = ''
- insertions = []
- output = False
- error = False
-
- for line in text.splitlines(keepends=True):
- if line.startswith('julia>'):
- insertions.append((len(curcode), [(0, Generic.Prompt, line[:6])]))
- curcode += line[6:]
- output = False
- error = False
- elif line.startswith('help?>') or line.startswith('shell>'):
- yield start, Generic.Prompt, line[:6]
- yield start + 6, Text, line[6:]
- output = False
- error = False
- elif line.startswith(' ') and not output:
- insertions.append((len(curcode), [(0, Whitespace, line[:6])]))
- curcode += line[6:]
- else:
- if curcode:
- yield from do_insertions(
- insertions, jllexer.get_tokens_unprocessed(curcode))
- curcode = ''
- insertions = []
- if line.startswith('ERROR: ') or error:
- yield start, Generic.Error, line
- error = True
- else:
- yield start, Generic.Output, line
- output = True
- start += len(line)
-
- if curcode:
- yield from do_insertions(
- insertions, jllexer.get_tokens_unprocessed(curcode))
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/jvm.py b/venv/lib/python3.11/site-packages/pygments/lexers/jvm.py
deleted file mode 100644
index 9b4f8d6..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/jvm.py
+++ /dev/null
@@ -1,1820 +0,0 @@
-"""
- pygments.lexers.jvm
- ~~~~~~~~~~~~~~~~~~~
-
- Pygments lexers for JVM languages.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import Lexer, RegexLexer, include, bygroups, using, \
- this, combined, default, words
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation, Whitespace
-from pygments.util import shebang_matches
-from pygments import unistring as uni
-
-__all__ = ['JavaLexer', 'ScalaLexer', 'GosuLexer', 'GosuTemplateLexer',
- 'GroovyLexer', 'IokeLexer', 'ClojureLexer', 'ClojureScriptLexer',
- 'KotlinLexer', 'XtendLexer', 'AspectJLexer', 'CeylonLexer',
- 'PigLexer', 'GoloLexer', 'JasminLexer', 'SarlLexer']
-
-
-class JavaLexer(RegexLexer):
- """
- For Java source code.
- """
-
- name = 'Java'
- url = 'https://www.oracle.com/technetwork/java/'
- aliases = ['java']
- filenames = ['*.java']
- mimetypes = ['text/x-java']
-
- flags = re.MULTILINE | re.DOTALL
-
- tokens = {
- 'root': [
- (r'(^\s*)((?:(?:public|private|protected|static|strictfp)(?:\s+))*)(record)\b',
- bygroups(Whitespace, using(this), Keyword.Declaration), 'class'),
- (r'[^\S\n]+', Whitespace),
- (r'(//.*?)(\n)', bygroups(Comment.Single, Whitespace)),
- (r'/\*.*?\*/', Comment.Multiline),
- # keywords: go before method names to avoid lexing "throw new XYZ"
- # as a method signature
- (r'(assert|break|case|catch|continue|default|do|else|finally|for|'
- r'if|goto|instanceof|new|return|switch|this|throw|try|while)\b',
- Keyword),
- # method names
- (r'((?:(?:[^\W\d]|\$)[\w.\[\]$<>]*\s+)+?)' # return arguments
- r'((?:[^\W\d]|\$)[\w$]*)' # method name
- r'(\s*)(\()', # signature start
- bygroups(using(this), Name.Function, Whitespace, Punctuation)),
- (r'@[^\W\d][\w.]*', Name.Decorator),
- (r'(abstract|const|enum|extends|final|implements|native|private|'
- r'protected|public|sealed|static|strictfp|super|synchronized|throws|'
- r'transient|volatile|yield)\b', Keyword.Declaration),
- (r'(boolean|byte|char|double|float|int|long|short|void)\b',
- Keyword.Type),
- (r'(package)(\s+)', bygroups(Keyword.Namespace, Whitespace), 'import'),
- (r'(true|false|null)\b', Keyword.Constant),
- (r'(class|interface)\b', Keyword.Declaration, 'class'),
- (r'(var)(\s+)', bygroups(Keyword.Declaration, Whitespace), 'var'),
- (r'(import(?:\s+static)?)(\s+)', bygroups(Keyword.Namespace, Whitespace),
- 'import'),
- (r'"""\n', String, 'multiline_string'),
- (r'"', String, 'string'),
- (r"'\\.'|'[^\\]'|'\\u[0-9a-fA-F]{4}'", String.Char),
- (r'(\.)((?:[^\W\d]|\$)[\w$]*)', bygroups(Punctuation,
- Name.Attribute)),
- (r'^(\s*)(default)(:)', bygroups(Whitespace, Keyword, Punctuation)),
- (r'^(\s*)((?:[^\W\d]|\$)[\w$]*)(:)', bygroups(Whitespace, Name.Label,
- Punctuation)),
- (r'([^\W\d]|\$)[\w$]*', Name),
- (r'([0-9][0-9_]*\.([0-9][0-9_]*)?|'
- r'\.[0-9][0-9_]*)'
- r'([eE][+\-]?[0-9][0-9_]*)?[fFdD]?|'
- r'[0-9][eE][+\-]?[0-9][0-9_]*[fFdD]?|'
- r'[0-9]([eE][+\-]?[0-9][0-9_]*)?[fFdD]|'
- r'0[xX]([0-9a-fA-F][0-9a-fA-F_]*\.?|'
- r'([0-9a-fA-F][0-9a-fA-F_]*)?\.[0-9a-fA-F][0-9a-fA-F_]*)'
- r'[pP][+\-]?[0-9][0-9_]*[fFdD]?', Number.Float),
- (r'0[xX][0-9a-fA-F][0-9a-fA-F_]*[lL]?', Number.Hex),
- (r'0[bB][01][01_]*[lL]?', Number.Bin),
- (r'0[0-7_]+[lL]?', Number.Oct),
- (r'0|[1-9][0-9_]*[lL]?', Number.Integer),
- (r'[~^*!%&\[\]<>|+=/?-]', Operator),
- (r'[{}();:.,]', Punctuation),
- (r'\n', Whitespace)
- ],
- 'class': [
- (r'\s+', Text),
- (r'([^\W\d]|\$)[\w$]*', Name.Class, '#pop')
- ],
- 'var': [
- (r'([^\W\d]|\$)[\w$]*', Name, '#pop')
- ],
- 'import': [
- (r'[\w.]+\*?', Name.Namespace, '#pop')
- ],
- 'multiline_string': [
- (r'"""', String, '#pop'),
- (r'"', String),
- include('string')
- ],
- 'string': [
- (r'[^\\"]+', String),
- (r'\\\\', String), # Escaped backslash
- (r'\\"', String), # Escaped quote
- (r'\\', String), # Bare backslash
- (r'"', String, '#pop'), # Closing quote
- ],
- }
-
-
-class AspectJLexer(JavaLexer):
- """
- For AspectJ source code.
-
- .. versionadded:: 1.6
- """
-
- name = 'AspectJ'
- url = 'http://www.eclipse.org/aspectj/'
- aliases = ['aspectj']
- filenames = ['*.aj']
- mimetypes = ['text/x-aspectj']
-
- aj_keywords = {
- 'aspect', 'pointcut', 'privileged', 'call', 'execution',
- 'initialization', 'preinitialization', 'handler', 'get', 'set',
- 'staticinitialization', 'target', 'args', 'within', 'withincode',
- 'cflow', 'cflowbelow', 'annotation', 'before', 'after', 'around',
- 'proceed', 'throwing', 'returning', 'adviceexecution', 'declare',
- 'parents', 'warning', 'error', 'soft', 'precedence', 'thisJoinPoint',
- 'thisJoinPointStaticPart', 'thisEnclosingJoinPointStaticPart',
- 'issingleton', 'perthis', 'pertarget', 'percflow', 'percflowbelow',
- 'pertypewithin', 'lock', 'unlock', 'thisAspectInstance'
- }
- aj_inter_type = {'parents:', 'warning:', 'error:', 'soft:', 'precedence:'}
- aj_inter_type_annotation = {'@type', '@method', '@constructor', '@field'}
-
- def get_tokens_unprocessed(self, text):
- for index, token, value in JavaLexer.get_tokens_unprocessed(self, text):
- if token is Name and value in self.aj_keywords:
- yield index, Keyword, value
- elif token is Name.Label and value in self.aj_inter_type:
- yield index, Keyword, value[:-1]
- yield index, Operator, value[-1]
- elif token is Name.Decorator and value in self.aj_inter_type_annotation:
- yield index, Keyword, value
- else:
- yield index, token, value
-
-
-class ScalaLexer(RegexLexer):
- """
- For Scala source code.
- """
-
- name = 'Scala'
- url = 'http://www.scala-lang.org'
- aliases = ['scala']
- filenames = ['*.scala']
- mimetypes = ['text/x-scala']
-
- flags = re.MULTILINE | re.DOTALL
-
- opchar = '[!#%&*\\-\\/:?@^' + uni.combine('Sm', 'So') + ']'
- letter = '[_\\$' + uni.combine('Ll', 'Lu', 'Lo', 'Nl', 'Lt') + ']'
- upperLetter = '[' + uni.combine('Lu', 'Lt') + ']'
- letterOrDigit = '(?:%s|[0-9])' % letter
- letterOrDigitNoDollarSign = '(?:%s|[0-9])' % letter.replace('\\$', '')
- alphaId = '%s+' % letter
- simpleInterpolatedVariable = '%s%s*' % (letter, letterOrDigitNoDollarSign)
- idrest = '%s%s*(?:(?<=_)%s+)?' % (letter, letterOrDigit, opchar)
- idUpper = '%s%s*(?:(?<=_)%s+)?' % (upperLetter, letterOrDigit, opchar)
- plainid = '(?:%s|%s+)' % (idrest, opchar)
- backQuotedId = r'`[^`]+`'
- anyId = r'(?:%s|%s)' % (plainid, backQuotedId)
- notStartOfComment = r'(?!//|/\*)'
- endOfLineMaybeWithComment = r'(?=\s*(//|$))'
-
- keywords = (
- 'new', 'return', 'throw', 'classOf', 'isInstanceOf', 'asInstanceOf',
- 'else', 'if', 'then', 'do', 'while', 'for', 'yield', 'match', 'case',
- 'catch', 'finally', 'try'
- )
-
- operators = (
- '<%', '=:=', '<:<', '<%<', '>:', '<:', '=', '==', '!=', '<=', '>=',
- '<>', '<', '>', '<-', '←', '->', '→', '=>', '⇒', '?', '@', '|', '-',
- '+', '*', '%', '~', '\\'
- )
-
- storage_modifiers = (
- 'private', 'protected', 'synchronized', '@volatile', 'abstract',
- 'final', 'lazy', 'sealed', 'implicit', 'override', '@transient',
- '@native'
- )
-
- tokens = {
- 'root': [
- include('whitespace'),
- include('comments'),
- include('script-header'),
- include('imports'),
- include('exports'),
- include('storage-modifiers'),
- include('annotations'),
- include('using'),
- include('declarations'),
- include('inheritance'),
- include('extension'),
- include('end'),
- include('constants'),
- include('strings'),
- include('symbols'),
- include('singleton-type'),
- include('inline'),
- include('quoted'),
- include('keywords'),
- include('operators'),
- include('punctuation'),
- include('names'),
- ],
-
- # Includes:
- 'whitespace': [
- (r'\s+', Whitespace),
- ],
- 'comments': [
- (r'//.*?\n', Comment.Single),
- (r'/\*', Comment.Multiline, 'comment'),
- ],
- 'script-header': [
- (r'^#!([^\n]*)$', Comment.Hashbang),
- ],
- 'imports': [
- (r'\b(import)(\s+)', bygroups(Keyword, Whitespace), 'import-path'),
- ],
- 'exports': [
- (r'\b(export)(\s+)(given)(\s+)',
- bygroups(Keyword, Whitespace, Keyword, Whitespace), 'export-path'),
- (r'\b(export)(\s+)', bygroups(Keyword, Whitespace), 'export-path'),
- ],
- 'storage-modifiers': [
- (words(storage_modifiers, prefix=r'\b', suffix=r'\b'), Keyword),
- # Only highlight soft modifiers if they are eventually followed by
- # the correct keyword. Note that soft modifiers can be followed by a
- # sequence of regular modifiers; [a-z\s]* skips those, and we just
- # check that the soft modifier is applied to a supported statement.
- (r'\b(transparent|opaque|infix|open|inline)\b(?=[a-z\s]*\b'
- r'(def|val|var|given|type|class|trait|object|enum)\b)', Keyword),
- ],
- 'annotations': [
- (r'@%s' % idrest, Name.Decorator),
- ],
- 'using': [
- # using is a soft keyword, can only be used in the first position of
- # a parameter or argument list.
- (r'(\()(\s*)(using)(\s)', bygroups(Punctuation, Whitespace, Keyword, Whitespace)),
- ],
- 'declarations': [
- (r'\b(def)\b(\s*)%s(%s)?' % (notStartOfComment, anyId),
- bygroups(Keyword, Whitespace, Name.Function)),
- (r'\b(trait)\b(\s*)%s(%s)?' % (notStartOfComment, anyId),
- bygroups(Keyword, Whitespace, Name.Class)),
- (r'\b(?:(case)(\s+))?(class|object|enum)\b(\s*)%s(%s)?' %
- (notStartOfComment, anyId),
- bygroups(Keyword, Whitespace, Keyword, Whitespace, Name.Class)),
- (r'(?<!\.)\b(type)\b(\s*)%s(%s)?' % (notStartOfComment, anyId),
- bygroups(Keyword, Whitespace, Name.Class)),
- (r'\b(val|var)\b', Keyword.Declaration),
- (r'\b(package)(\s+)(object)\b(\s*)%s(%s)?' %
- (notStartOfComment, anyId),
- bygroups(Keyword, Whitespace, Keyword, Whitespace, Name.Namespace)),
- (r'\b(package)(\s+)', bygroups(Keyword, Whitespace), 'package'),
- (r'\b(given)\b(\s*)(%s)' % idUpper,
- bygroups(Keyword, Whitespace, Name.Class)),
- (r'\b(given)\b(\s*)(%s)?' % anyId,
- bygroups(Keyword, Whitespace, Name)),
- ],
- 'inheritance': [
- (r'\b(extends|with|derives)\b(\s*)'
- r'(%s|%s|(?=\([^\)]+=>)|(?=%s)|(?="))?' %
- (idUpper, backQuotedId, plainid),
- bygroups(Keyword, Whitespace, Name.Class)),
- ],
- 'extension': [
- (r'\b(extension)(\s+)(?=[\[\(])', bygroups(Keyword, Whitespace)),
- ],
- 'end': [
- # end is a soft keyword, should only be highlighted in certain cases
- (r'\b(end)(\s+)(if|while|for|match|new|extension|val|var)\b',
- bygroups(Keyword, Whitespace, Keyword)),
- (r'\b(end)(\s+)(%s)%s' % (idUpper, endOfLineMaybeWithComment),
- bygroups(Keyword, Whitespace, Name.Class)),
- (r'\b(end)(\s+)(%s|%s)?%s' %
- (backQuotedId, plainid, endOfLineMaybeWithComment),
- bygroups(Keyword, Whitespace, Name.Namespace)),
- ],
- 'punctuation': [
- (r'[{}()\[\];,.]', Punctuation),
- (r'(?<!:):(?!:)', Punctuation),
- ],
- 'keywords': [
- (words(keywords, prefix=r'\b', suffix=r'\b'), Keyword),
- ],
- 'operators': [
- (r'(%s{2,})(\s+)' % opchar, bygroups(Operator, Whitespace)),
- (r'/(?![/*])', Operator),
- (words(operators), Operator),
- (r'(?<!%s)(!|&&|\|\|)(?!%s)' % (opchar, opchar), Operator),
- ],
- 'constants': [
- (r'\b(this|super)\b', Name.Builtin.Pseudo),
- (r'(true|false|null)\b', Keyword.Constant),
- (r'0[xX][0-9a-fA-F_]*', Number.Hex),
- (r'([0-9][0-9_]*\.[0-9][0-9_]*|\.[0-9][0-9_]*)'
- r'([eE][+-]?[0-9][0-9_]*)?[fFdD]?', Number.Float),
- (r'[0-9]+([eE][+-]?[0-9]+)?[fFdD]', Number.Float),
- (r'[0-9]+([eE][+-]?[0-9]+)[fFdD]?', Number.Float),
- (r'[0-9]+[lL]', Number.Integer.Long),
- (r'[0-9]+', Number.Integer),
- (r'""".*?"""(?!")', String),
- (r'"(\\\\|\\"|[^"])*"', String),
- (r"(')(\\.)(')", bygroups(String.Char, String.Escape, String.Char)),
- (r"'[^\\]'|'\\u[0-9a-fA-F]{4}'", String.Char),
- ],
- "strings": [
- (r'[fs]"""', String, 'interpolated-string-triple'),
- (r'[fs]"', String, 'interpolated-string'),
- (r'raw"(\\\\|\\"|[^"])*"', String),
- ],
- 'symbols': [
- (r"('%s)(?!')" % plainid, String.Symbol),
- ],
- 'singleton-type': [
- (r'(\.)(type)\b', bygroups(Punctuation, Keyword)),
- ],
- 'inline': [
- # inline is a soft modifier, only highlighted if followed by if,
- # match or parameters.
- (r'\b(inline)(?=\s+(%s|%s)\s*:)' % (plainid, backQuotedId),
- Keyword),
- (r'\b(inline)\b(?=(?:.(?!\b(?:val|def|given)\b))*\b(if|match)\b)',
- Keyword),
- ],
- 'quoted': [
- # '{...} or ${...}
- (r"['$]\{(?!')", Punctuation),
- # '[...]
- (r"'\[(?!')", Punctuation),
- ],
- 'names': [
- (idUpper, Name.Class),
- (anyId, Name),
- ],
-
- # States
- 'comment': [
- (r'[^/*]+', Comment.Multiline),
- (r'/\*', Comment.Multiline, '#push'),
- (r'\*/', Comment.Multiline, '#pop'),
- (r'[*/]', Comment.Multiline),
- ],
- 'import-path': [
- (r'(?<=[\n;:])', Text, '#pop'),
- include('comments'),
- (r'\b(given)\b', Keyword),
- include('qualified-name'),
- (r'\{', Punctuation, 'import-path-curly-brace'),
- ],
- 'import-path-curly-brace': [
- include('whitespace'),
- include('comments'),
- (r'\b(given)\b', Keyword),
- (r'=>', Operator),
- (r'\}', Punctuation, '#pop'),
- (r',', Punctuation),
- (r'[\[\]]', Punctuation),
- include('qualified-name'),
- ],
- 'export-path': [
- (r'(?<=[\n;:])', Text, '#pop'),
- include('comments'),
- include('qualified-name'),
- (r'\{', Punctuation, 'export-path-curly-brace'),
- ],
- 'export-path-curly-brace': [
- include('whitespace'),
- include('comments'),
- (r'=>', Operator),
- (r'\}', Punctuation, '#pop'),
- (r',', Punctuation),
- include('qualified-name'),
- ],
- 'package': [
- (r'(?<=[\n;])', Text, '#pop'),
- (r':', Punctuation, '#pop'),
- include('comments'),
- include('qualified-name'),
- ],
- 'interpolated-string-triple': [
- (r'"""(?!")', String, '#pop'),
- (r'"', String),
- include('interpolated-string-common'),
- ],
- 'interpolated-string': [
- (r'"', String, '#pop'),
- include('interpolated-string-common'),
- ],
- 'interpolated-string-brace': [
- (r'\}', String.Interpol, '#pop'),
- (r'\{', Punctuation, 'interpolated-string-nested-brace'),
- include('root'),
- ],
- 'interpolated-string-nested-brace': [
- (r'\{', Punctuation, '#push'),
- (r'\}', Punctuation, '#pop'),
- include('root'),
- ],
-
- # Helpers
- 'qualified-name': [
- (idUpper, Name.Class),
- (r'(%s)(\.)' % anyId, bygroups(Name.Namespace, Punctuation)),
- (r'\.', Punctuation),
- (anyId, Name),
- (r'[^\S\n]+', Whitespace),
- ],
- 'interpolated-string-common': [
- (r'[^"$\\]+', String),
- (r'\$\$', String.Escape),
- (r'(\$)(%s)' % simpleInterpolatedVariable,
- bygroups(String.Interpol, Name)),
- (r'\$\{', String.Interpol, 'interpolated-string-brace'),
- (r'\\.', String),
- ],
- }
-
-
-class GosuLexer(RegexLexer):
- """
- For Gosu source code.
-
- .. versionadded:: 1.5
- """
-
- name = 'Gosu'
- aliases = ['gosu']
- filenames = ['*.gs', '*.gsx', '*.gsp', '*.vark']
- mimetypes = ['text/x-gosu']
-
- flags = re.MULTILINE | re.DOTALL
-
- tokens = {
- 'root': [
- # method names
- (r'^(\s*(?:[a-zA-Z_][\w.\[\]]*\s+)+?)' # modifiers etc.
- r'([a-zA-Z_]\w*)' # method name
- r'(\s*)(\()', # signature start
- bygroups(using(this), Name.Function, Whitespace, Operator)),
- (r'[^\S\n]+', Whitespace),
- (r'//.*?\n', Comment.Single),
- (r'/\*.*?\*/', Comment.Multiline),
- (r'@[a-zA-Z_][\w.]*', Name.Decorator),
- (r'(in|as|typeof|statictypeof|typeis|typeas|if|else|foreach|for|'
- r'index|while|do|continue|break|return|try|catch|finally|this|'
- r'throw|new|switch|case|default|eval|super|outer|classpath|'
- r'using)\b', Keyword),
- (r'(var|delegate|construct|function|private|internal|protected|'
- r'public|abstract|override|final|static|extends|transient|'
- r'implements|represents|readonly)\b', Keyword.Declaration),
- (r'(property)(\s+)(get|set)?', bygroups(Keyword.Declaration, Whitespace, Keyword.Declaration)),
- (r'(boolean|byte|char|double|float|int|long|short|void|block)\b',
- Keyword.Type),
- (r'(package)(\s+)', bygroups(Keyword.Namespace, Whitespace)),
- (r'(true|false|null|NaN|Infinity)\b', Keyword.Constant),
- (r'(class|interface|enhancement|enum)(\s+)([a-zA-Z_]\w*)',
- bygroups(Keyword.Declaration, Whitespace, Name.Class)),
- (r'(uses)(\s+)([\w.]+\*?)',
- bygroups(Keyword.Namespace, Whitespace, Name.Namespace)),
- (r'"', String, 'string'),
- (r'(\??[.#])([a-zA-Z_]\w*)',
- bygroups(Operator, Name.Attribute)),
- (r'(:)([a-zA-Z_]\w*)',
- bygroups(Operator, Name.Attribute)),
- (r'[a-zA-Z_$]\w*', Name),
- (r'and|or|not|[\\~^*!%&\[\](){}<>|+=:;,./?-]', Operator),
- (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
- (r'[0-9]+', Number.Integer),
- (r'\n', Whitespace)
- ],
- 'templateText': [
- (r'(\\<)|(\\\$)', String),
- (r'(<%@\s+)(extends|params)',
- bygroups(Operator, Name.Decorator), 'stringTemplate'),
- (r'<%!--.*?--%>', Comment.Multiline),
- (r'(<%)|(<%=)', Operator, 'stringTemplate'),
- (r'\$\{', Operator, 'stringTemplateShorthand'),
- (r'.', String)
- ],
- 'string': [
- (r'"', String, '#pop'),
- include('templateText')
- ],
- 'stringTemplate': [
- (r'"', String, 'string'),
- (r'%>', Operator, '#pop'),
- include('root')
- ],
- 'stringTemplateShorthand': [
- (r'"', String, 'string'),
- (r'\{', Operator, 'stringTemplateShorthand'),
- (r'\}', Operator, '#pop'),
- include('root')
- ],
- }
-
-
-class GosuTemplateLexer(Lexer):
- """
- For Gosu templates.
-
- .. versionadded:: 1.5
- """
-
- name = 'Gosu Template'
- aliases = ['gst']
- filenames = ['*.gst']
- mimetypes = ['text/x-gosu-template']
-
- def get_tokens_unprocessed(self, text):
- lexer = GosuLexer()
- stack = ['templateText']
- yield from lexer.get_tokens_unprocessed(text, stack)
-
-
-class GroovyLexer(RegexLexer):
- """
- For Groovy source code.
-
- .. versionadded:: 1.5
- """
-
- name = 'Groovy'
- url = 'https://groovy-lang.org/'
- aliases = ['groovy']
- filenames = ['*.groovy','*.gradle']
- mimetypes = ['text/x-groovy']
-
- flags = re.MULTILINE | re.DOTALL
-
- tokens = {
- 'root': [
- # Groovy allows a file to start with a shebang
- (r'#!(.*?)$', Comment.Preproc, 'base'),
- default('base'),
- ],
- 'base': [
- (r'[^\S\n]+', Whitespace),
- (r'(//.*?)(\n)', bygroups(Comment.Single, Whitespace)),
- (r'/\*.*?\*/', Comment.Multiline),
- # keywords: go before method names to avoid lexing "throw new XYZ"
- # as a method signature
- (r'(assert|break|case|catch|continue|default|do|else|finally|for|'
- r'if|goto|instanceof|new|return|switch|this|throw|try|while|in|as)\b',
- Keyword),
- # method names
- (r'^(\s*(?:[a-zA-Z_][\w.\[\]]*\s+)+?)' # return arguments
- r'('
- r'[a-zA-Z_]\w*' # method name
- r'|"(?:\\\\|\\[^\\]|[^"\\])*"' # or double-quoted method name
- r"|'(?:\\\\|\\[^\\]|[^'\\])*'" # or single-quoted method name
- r')'
- r'(\s*)(\()', # signature start
- bygroups(using(this), Name.Function, Whitespace, Operator)),
- (r'@[a-zA-Z_][\w.]*', Name.Decorator),
- (r'(abstract|const|enum|extends|final|implements|native|private|'
- r'protected|public|static|strictfp|super|synchronized|throws|'
- r'transient|volatile)\b', Keyword.Declaration),
- (r'(def|boolean|byte|char|double|float|int|long|short|void)\b',
- Keyword.Type),
- (r'(package)(\s+)', bygroups(Keyword.Namespace, Whitespace)),
- (r'(true|false|null)\b', Keyword.Constant),
- (r'(class|interface)(\s+)', bygroups(Keyword.Declaration, Whitespace),
- 'class'),
- (r'(import)(\s+)', bygroups(Keyword.Namespace, Whitespace), 'import'),
- (r'""".*?"""', String.Double),
- (r"'''.*?'''", String.Single),
- (r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
- (r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
- (r'\$/((?!/\$).)*/\$', String),
- (r'/(\\\\|\\[^\\]|[^/\\])*/', String),
- (r"'\\.'|'[^\\]'|'\\u[0-9a-fA-F]{4}'", String.Char),
- (r'(\.)([a-zA-Z_]\w*)', bygroups(Operator, Name.Attribute)),
- (r'[a-zA-Z_]\w*:', Name.Label),
- (r'[a-zA-Z_$]\w*', Name),
- (r'[~^*!%&\[\](){}<>|+=:;,./?-]', Operator),
- (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
- (r'0x[0-9a-fA-F]+', Number.Hex),
- (r'[0-9]+L?', Number.Integer),
- (r'\n', Whitespace)
- ],
- 'class': [
- (r'[a-zA-Z_]\w*', Name.Class, '#pop')
- ],
- 'import': [
- (r'[\w.]+\*?', Name.Namespace, '#pop')
- ],
- }
-
- def analyse_text(text):
- return shebang_matches(text, r'groovy')
-
-
-class IokeLexer(RegexLexer):
- """
- For Ioke (a strongly typed, dynamic,
- prototype based programming language) source.
-
- .. versionadded:: 1.4
- """
- name = 'Ioke'
- url = 'https://ioke.org/'
- filenames = ['*.ik']
- aliases = ['ioke', 'ik']
- mimetypes = ['text/x-iokesrc']
- tokens = {
- 'interpolatableText': [
- (r'(\\b|\\e|\\t|\\n|\\f|\\r|\\"|\\\\|\\#|\\\Z|\\u[0-9a-fA-F]{1,4}'
- r'|\\[0-3]?[0-7]?[0-7])', String.Escape),
- (r'#\{', Punctuation, 'textInterpolationRoot')
- ],
-
- 'text': [
- (r'(?<!\\)"', String, '#pop'),
- include('interpolatableText'),
- (r'[^"]', String)
- ],
-
- 'documentation': [
- (r'(?<!\\)"', String.Doc, '#pop'),
- include('interpolatableText'),
- (r'[^"]', String.Doc)
- ],
-
- 'textInterpolationRoot': [
- (r'\}', Punctuation, '#pop'),
- include('root')
- ],
-
- 'slashRegexp': [
- (r'(?<!\\)/[im-psux]*', String.Regex, '#pop'),
- include('interpolatableText'),
- (r'\\/', String.Regex),
- (r'[^/]', String.Regex)
- ],
-
- 'squareRegexp': [
- (r'(?<!\\)][im-psux]*', String.Regex, '#pop'),
- include('interpolatableText'),
- (r'\\]', String.Regex),
- (r'[^\]]', String.Regex)
- ],
-
- 'squareText': [
- (r'(?<!\\)]', String, '#pop'),
- include('interpolatableText'),
- (r'[^\]]', String)
- ],
-
- 'root': [
- (r'\n', Whitespace),
- (r'\s+', Whitespace),
-
- # Comments
- (r';(.*?)\n', Comment),
- (r'\A#!(.*?)\n', Comment),
-
- # Regexps
- (r'#/', String.Regex, 'slashRegexp'),
- (r'#r\[', String.Regex, 'squareRegexp'),
-
- # Symbols
- (r':[\w!:?]+', String.Symbol),
- (r'[\w!:?]+:(?![\w!?])', String.Other),
- (r':"(\\\\|\\[^\\]|[^"\\])*"', String.Symbol),
-
- # Documentation
- (r'((?<=fn\()|(?<=fnx\()|(?<=method\()|(?<=macro\()|(?<=lecro\()'
- r'|(?<=syntax\()|(?<=dmacro\()|(?<=dlecro\()|(?<=dlecrox\()'
- r'|(?<=dsyntax\())(\s*)"', String.Doc, 'documentation'),
-
- # Text
- (r'"', String, 'text'),
- (r'#\[', String, 'squareText'),
-
- # Mimic
- (r'\w[\w!:?]+(?=\s*=.*mimic\s)', Name.Entity),
-
- # Assignment
- (r'[a-zA-Z_][\w!:?]*(?=[\s]*[+*/-]?=[^=].*($|\.))',
- Name.Variable),
-
- # keywords
- (r'(break|cond|continue|do|ensure|for|for:dict|for:set|if|let|'
- r'loop|p:for|p:for:dict|p:for:set|return|unless|until|while|'
- r'with)(?![\w!:?])', Keyword.Reserved),
-
- # Origin
- (r'(eval|mimic|print|println)(?![\w!:?])', Keyword),
-
- # Base
- (r'(cell\?|cellNames|cellOwner\?|cellOwner|cells|cell|'
- r'documentation|hash|identity|mimic|removeCell\!|undefineCell\!)'
- r'(?![\w!:?])', Keyword),
-
- # Ground
- (r'(stackTraceAsText)(?![\w!:?])', Keyword),
-
- # DefaultBehaviour Literals
- (r'(dict|list|message|set)(?![\w!:?])', Keyword.Reserved),
-
- # DefaultBehaviour Case
- (r'(case|case:and|case:else|case:nand|case:nor|case:not|case:or|'
- r'case:otherwise|case:xor)(?![\w!:?])', Keyword.Reserved),
-
- # DefaultBehaviour Reflection
- (r'(asText|become\!|derive|freeze\!|frozen\?|in\?|is\?|kind\?|'
- r'mimic\!|mimics|mimics\?|prependMimic\!|removeAllMimics\!|'
- r'removeMimic\!|same\?|send|thaw\!|uniqueHexId)'
- r'(?![\w!:?])', Keyword),
-
- # DefaultBehaviour Aspects
- (r'(after|around|before)(?![\w!:?])', Keyword.Reserved),
-
- # DefaultBehaviour
- (r'(kind|cellDescriptionDict|cellSummary|genSym|inspect|notice)'
- r'(?![\w!:?])', Keyword),
- (r'(use|destructuring)', Keyword.Reserved),
-
- # DefaultBehavior BaseBehavior
- (r'(cell\?|cellOwner\?|cellOwner|cellNames|cells|cell|'
- r'documentation|identity|removeCell!|undefineCell)'
- r'(?![\w!:?])', Keyword),
-
- # DefaultBehavior Internal
- (r'(internal:compositeRegexp|internal:concatenateText|'
- r'internal:createDecimal|internal:createNumber|'
- r'internal:createRegexp|internal:createText)'
- r'(?![\w!:?])', Keyword.Reserved),
-
- # DefaultBehaviour Conditions
- (r'(availableRestarts|bind|error\!|findRestart|handle|'
- r'invokeRestart|rescue|restart|signal\!|warn\!)'
- r'(?![\w!:?])', Keyword.Reserved),
-
- # constants
- (r'(nil|false|true)(?![\w!:?])', Name.Constant),
-
- # names
- (r'(Arity|Base|Call|Condition|DateTime|Aspects|Pointcut|'
- r'Assignment|BaseBehavior|Boolean|Case|AndCombiner|Else|'
- r'NAndCombiner|NOrCombiner|NotCombiner|OrCombiner|XOrCombiner|'
- r'Conditions|Definitions|FlowControl|Internal|Literals|'
- r'Reflection|DefaultMacro|DefaultMethod|DefaultSyntax|Dict|'
- r'FileSystem|Ground|Handler|Hook|IO|IokeGround|Struct|'
- r'LexicalBlock|LexicalMacro|List|Message|Method|Mixins|'
- r'NativeMethod|Number|Origin|Pair|Range|Reflector|Regexp Match|'
- r'Regexp|Rescue|Restart|Runtime|Sequence|Set|Symbol|'
- r'System|Text|Tuple)(?![\w!:?])', Name.Builtin),
-
- # functions
- ('(generateMatchMethod|aliasMethod|\u03bb|\u028E|fnx|fn|method|'
- 'dmacro|dlecro|syntax|macro|dlecrox|lecrox|lecro|syntax)'
- '(?![\\w!:?])', Name.Function),
-
- # Numbers
- (r'-?0[xX][0-9a-fA-F]+', Number.Hex),
- (r'-?(\d+\.?\d*|\d*\.\d+)([eE][+-]?[0-9]+)?', Number.Float),
- (r'-?\d+', Number.Integer),
-
- (r'#\(', Punctuation),
-
- # Operators
- (r'(&&>>|\|\|>>|\*\*>>|:::|::|\.\.\.|===|\*\*>|\*\*=|&&>|&&=|'
- r'\|\|>|\|\|=|\->>|\+>>|!>>|<>>>|<>>|&>>|%>>|#>>|@>>|/>>|\*>>|'
- r'\?>>|\|>>|\^>>|~>>|\$>>|=>>|<<=|>>=|<=>|<\->|=~|!~|=>|\+\+|'
- r'\-\-|<=|>=|==|!=|&&|\.\.|\+=|\-=|\*=|\/=|%=|&=|\^=|\|=|<\-|'
- r'\+>|!>|<>|&>|%>|#>|\@>|\/>|\*>|\?>|\|>|\^>|~>|\$>|<\->|\->|'
- r'<<|>>|\*\*|\?\||\?&|\|\||>|<|\*|\/|%|\+|\-|&|\^|\||=|\$|!|~|'
- r'\?|#|\u2260|\u2218|\u2208|\u2209)', Operator),
- (r'(and|nand|or|xor|nor|return|import)(?![\w!?])',
- Operator),
-
- # Punctuation
- (r'(\`\`|\`|\'\'|\'|\.|\,|@@|@|\[|\]|\(|\)|\{|\})', Punctuation),
-
- # kinds
- (r'[A-Z][\w!:?]*', Name.Class),
-
- # default cellnames
- (r'[a-z_][\w!:?]*', Name)
- ]
- }
-
-
-class ClojureLexer(RegexLexer):
- """
- Lexer for Clojure source code.
-
- .. versionadded:: 0.11
- """
- name = 'Clojure'
- url = 'http://clojure.org/'
- aliases = ['clojure', 'clj']
- filenames = ['*.clj', '*.cljc']
- mimetypes = ['text/x-clojure', 'application/x-clojure']
-
- special_forms = (
- '.', 'def', 'do', 'fn', 'if', 'let', 'new', 'quote', 'var', 'loop'
- )
-
- # It's safe to consider 'ns' a declaration thing because it defines a new
- # namespace.
- declarations = (
- 'def-', 'defn', 'defn-', 'defmacro', 'defmulti', 'defmethod',
- 'defstruct', 'defonce', 'declare', 'definline', 'definterface',
- 'defprotocol', 'defrecord', 'deftype', 'defproject', 'ns'
- )
-
- builtins = (
- '*', '+', '-', '->', '/', '<', '<=', '=', '==', '>', '>=', '..',
- 'accessor', 'agent', 'agent-errors', 'aget', 'alength', 'all-ns',
- 'alter', 'and', 'append-child', 'apply', 'array-map', 'aset',
- 'aset-boolean', 'aset-byte', 'aset-char', 'aset-double', 'aset-float',
- 'aset-int', 'aset-long', 'aset-short', 'assert', 'assoc', 'await',
- 'await-for', 'bean', 'binding', 'bit-and', 'bit-not', 'bit-or',
- 'bit-shift-left', 'bit-shift-right', 'bit-xor', 'boolean', 'branch?',
- 'butlast', 'byte', 'cast', 'char', 'children', 'class',
- 'clear-agent-errors', 'comment', 'commute', 'comp', 'comparator',
- 'complement', 'concat', 'conj', 'cons', 'constantly', 'cond', 'if-not',
- 'construct-proxy', 'contains?', 'count', 'create-ns', 'create-struct',
- 'cycle', 'dec', 'deref', 'difference', 'disj', 'dissoc', 'distinct',
- 'doall', 'doc', 'dorun', 'doseq', 'dosync', 'dotimes', 'doto',
- 'double', 'down', 'drop', 'drop-while', 'edit', 'end?', 'ensure',
- 'eval', 'every?', 'false?', 'ffirst', 'file-seq', 'filter', 'find',
- 'find-doc', 'find-ns', 'find-var', 'first', 'float', 'flush', 'for',
- 'fnseq', 'frest', 'gensym', 'get-proxy-class', 'get',
- 'hash-map', 'hash-set', 'identical?', 'identity', 'if-let', 'import',
- 'in-ns', 'inc', 'index', 'insert-child', 'insert-left', 'insert-right',
- 'inspect-table', 'inspect-tree', 'instance?', 'int', 'interleave',
- 'intersection', 'into', 'into-array', 'iterate', 'join', 'key', 'keys',
- 'keyword', 'keyword?', 'last', 'lazy-cat', 'lazy-cons', 'left',
- 'lefts', 'line-seq', 'list*', 'list', 'load', 'load-file',
- 'locking', 'long', 'loop', 'macroexpand', 'macroexpand-1',
- 'make-array', 'make-node', 'map', 'map-invert', 'map?', 'mapcat',
- 'max', 'max-key', 'memfn', 'merge', 'merge-with', 'meta', 'min',
- 'min-key', 'name', 'namespace', 'neg?', 'new', 'newline', 'next',
- 'nil?', 'node', 'not', 'not-any?', 'not-every?', 'not=', 'ns-imports',
- 'ns-interns', 'ns-map', 'ns-name', 'ns-publics', 'ns-refers',
- 'ns-resolve', 'ns-unmap', 'nth', 'nthrest', 'or', 'parse', 'partial',
- 'path', 'peek', 'pop', 'pos?', 'pr', 'pr-str', 'print', 'print-str',
- 'println', 'println-str', 'prn', 'prn-str', 'project', 'proxy',
- 'proxy-mappings', 'quot', 'rand', 'rand-int', 'range', 're-find',
- 're-groups', 're-matcher', 're-matches', 're-pattern', 're-seq',
- 'read', 'read-line', 'reduce', 'ref', 'ref-set', 'refer', 'rem',
- 'remove', 'remove-method', 'remove-ns', 'rename', 'rename-keys',
- 'repeat', 'replace', 'replicate', 'resolve', 'rest', 'resultset-seq',
- 'reverse', 'rfirst', 'right', 'rights', 'root', 'rrest', 'rseq',
- 'second', 'select', 'select-keys', 'send', 'send-off', 'seq',
- 'seq-zip', 'seq?', 'set', 'short', 'slurp', 'some', 'sort',
- 'sort-by', 'sorted-map', 'sorted-map-by', 'sorted-set',
- 'special-symbol?', 'split-at', 'split-with', 'str', 'string?',
- 'struct', 'struct-map', 'subs', 'subvec', 'symbol', 'symbol?',
- 'sync', 'take', 'take-nth', 'take-while', 'test', 'time', 'to-array',
- 'to-array-2d', 'tree-seq', 'true?', 'union', 'up', 'update-proxy',
- 'val', 'vals', 'var-get', 'var-set', 'var?', 'vector', 'vector-zip',
- 'vector?', 'when', 'when-first', 'when-let', 'when-not',
- 'with-local-vars', 'with-meta', 'with-open', 'with-out-str',
- 'xml-seq', 'xml-zip', 'zero?', 'zipmap', 'zipper')
-
- # valid names for identifiers
- # well, names can only not consist fully of numbers
- # but this should be good enough for now
-
- # TODO / should divide keywords/symbols into namespace/rest
- # but that's hard, so just pretend / is part of the name
- valid_name = r'(?!#)[\w!$%*+<=>?/.#|-]+'
-
- tokens = {
- 'root': [
- # the comments - always starting with semicolon
- # and going to the end of the line
- (r';.*$', Comment.Single),
-
- # whitespaces - usually not relevant
- (r',+', Text),
- (r'\s+', Whitespace),
-
- # numbers
- (r'-?\d+\.\d+', Number.Float),
- (r'-?\d+/\d+', Number),
- (r'-?\d+', Number.Integer),
- (r'0x-?[abcdef\d]+', Number.Hex),
-
- # strings, symbols and characters
- (r'"(\\\\|\\[^\\]|[^"\\])*"', String),
- (r"'" + valid_name, String.Symbol),
- (r"\\(.|[a-z]+)", String.Char),
-
- # keywords
- (r'::?#?' + valid_name, String.Symbol),
-
- # special operators
- (r'~@|[`\'#^~&@]', Operator),
-
- # highlight the special forms
- (words(special_forms, suffix=' '), Keyword),
-
- # Technically, only the special forms are 'keywords'. The problem
- # is that only treating them as keywords means that things like
- # 'defn' and 'ns' need to be highlighted as builtins. This is ugly
- # and weird for most styles. So, as a compromise we're going to
- # highlight them as Keyword.Declarations.
- (words(declarations, suffix=' '), Keyword.Declaration),
-
- # highlight the builtins
- (words(builtins, suffix=' '), Name.Builtin),
-
- # the remaining functions
- (r'(?<=\()' + valid_name, Name.Function),
-
- # find the remaining variables
- (valid_name, Name.Variable),
-
- # Clojure accepts vector notation
- (r'(\[|\])', Punctuation),
-
- # Clojure accepts map notation
- (r'(\{|\})', Punctuation),
-
- # the famous parentheses!
- (r'(\(|\))', Punctuation),
- ],
- }
-
-
-class ClojureScriptLexer(ClojureLexer):
- """
- Lexer for ClojureScript source code.
-
- .. versionadded:: 2.0
- """
- name = 'ClojureScript'
- url = 'http://clojure.org/clojurescript'
- aliases = ['clojurescript', 'cljs']
- filenames = ['*.cljs']
- mimetypes = ['text/x-clojurescript', 'application/x-clojurescript']
-
-
-class TeaLangLexer(RegexLexer):
- """
- For Tea source code. Only used within a
- TeaTemplateLexer.
-
- .. versionadded:: 1.5
- """
-
- flags = re.MULTILINE | re.DOTALL
-
- tokens = {
- 'root': [
- # method names
- (r'^(\s*(?:[a-zA-Z_][\w\.\[\]]*\s+)+?)' # return arguments
- r'([a-zA-Z_]\w*)' # method name
- r'(\s*)(\()', # signature start
- bygroups(using(this), Name.Function, Whitespace, Operator)),
- (r'[^\S\n]+', Whitespace),
- (r'(//.*?)(\n)', bygroups(Comment.Single, Whitespace)),
- (r'/\*.*?\*/', Comment.Multiline),
- (r'@[a-zA-Z_][\w\.]*', Name.Decorator),
- (r'(and|break|else|foreach|if|in|not|or|reverse)\b',
- Keyword),
- (r'(as|call|define)\b', Keyword.Declaration),
- (r'(true|false|null)\b', Keyword.Constant),
- (r'(template)(\s+)', bygroups(Keyword.Declaration, Whitespace), 'template'),
- (r'(import)(\s+)', bygroups(Keyword.Namespace, Whitespace), 'import'),
- (r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
- (r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
- (r'(\.)([a-zA-Z_]\w*)', bygroups(Operator, Name.Attribute)),
- (r'[a-zA-Z_]\w*:', Name.Label),
- (r'[a-zA-Z_\$]\w*', Name),
- (r'(isa|[.]{3}|[.]{2}|[=#!<>+-/%&;,.\*\\\(\)\[\]\{\}])', Operator),
- (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
- (r'0x[0-9a-fA-F]+', Number.Hex),
- (r'[0-9]+L?', Number.Integer),
- (r'\n', Whitespace)
- ],
- 'template': [
- (r'[a-zA-Z_]\w*', Name.Class, '#pop')
- ],
- 'import': [
- (r'[\w.]+\*?', Name.Namespace, '#pop')
- ],
- }
-
-
-class CeylonLexer(RegexLexer):
- """
- For Ceylon source code.
-
- .. versionadded:: 1.6
- """
-
- name = 'Ceylon'
- url = 'http://ceylon-lang.org/'
- aliases = ['ceylon']
- filenames = ['*.ceylon']
- mimetypes = ['text/x-ceylon']
-
- flags = re.MULTILINE | re.DOTALL
-
- #: optional Comment or Whitespace
- _ws = r'(?:\s|//.*?\n|/[*].*?[*]/)+'
-
- tokens = {
- 'root': [
- # method names
- (r'^(\s*(?:[a-zA-Z_][\w.\[\]]*\s+)+?)' # return arguments
- r'([a-zA-Z_]\w*)' # method name
- r'(\s*)(\()', # signature start
- bygroups(using(this), Name.Function, Whitespace, Operator)),
- (r'[^\S\n]+', Whitespace),
- (r'(//.*?)(\n)', bygroups(Comment.Single, Whitespace)),
- (r'/\*', Comment.Multiline, 'comment'),
- (r'(shared|abstract|formal|default|actual|variable|deprecated|small|'
- r'late|literal|doc|by|see|throws|optional|license|tagged|final|native|'
- r'annotation|sealed)\b', Name.Decorator),
- (r'(break|case|catch|continue|else|finally|for|in|'
- r'if|return|switch|this|throw|try|while|is|exists|dynamic|'
- r'nonempty|then|outer|assert|let)\b', Keyword),
- (r'(abstracts|extends|satisfies|'
- r'super|given|of|out|assign)\b', Keyword.Declaration),
- (r'(function|value|void|new)\b',
- Keyword.Type),
- (r'(assembly|module|package)(\s+)', bygroups(Keyword.Namespace, Whitespace)),
- (r'(true|false|null)\b', Keyword.Constant),
- (r'(class|interface|object|alias)(\s+)',
- bygroups(Keyword.Declaration, Whitespace), 'class'),
- (r'(import)(\s+)', bygroups(Keyword.Namespace, Whitespace), 'import'),
- (r'"(\\\\|\\[^\\]|[^"\\])*"', String),
- (r"'\\.'|'[^\\]'|'\\\{#[0-9a-fA-F]{4}\}'", String.Char),
- (r'(\.)([a-z_]\w*)',
- bygroups(Operator, Name.Attribute)),
- (r'[a-zA-Z_]\w*:', Name.Label),
- (r'[a-zA-Z_]\w*', Name),
- (r'[~^*!%&\[\](){}<>|+=:;,./?-]', Operator),
- (r'\d{1,3}(_\d{3})+\.\d{1,3}(_\d{3})+[kMGTPmunpf]?', Number.Float),
- (r'\d{1,3}(_\d{3})+\.[0-9]+([eE][+-]?[0-9]+)?[kMGTPmunpf]?',
- Number.Float),
- (r'[0-9][0-9]*\.\d{1,3}(_\d{3})+[kMGTPmunpf]?', Number.Float),
- (r'[0-9][0-9]*\.[0-9]+([eE][+-]?[0-9]+)?[kMGTPmunpf]?',
- Number.Float),
- (r'#([0-9a-fA-F]{4})(_[0-9a-fA-F]{4})+', Number.Hex),
- (r'#[0-9a-fA-F]+', Number.Hex),
- (r'\$([01]{4})(_[01]{4})+', Number.Bin),
- (r'\$[01]+', Number.Bin),
- (r'\d{1,3}(_\d{3})+[kMGTP]?', Number.Integer),
- (r'[0-9]+[kMGTP]?', Number.Integer),
- (r'\n', Whitespace)
- ],
- 'class': [
- (r'[A-Za-z_]\w*', Name.Class, '#pop')
- ],
- 'import': [
- (r'[a-z][\w.]*',
- Name.Namespace, '#pop')
- ],
- 'comment': [
- (r'[^*/]', Comment.Multiline),
- (r'/\*', Comment.Multiline, '#push'),
- (r'\*/', Comment.Multiline, '#pop'),
- (r'[*/]', Comment.Multiline)
- ],
- }
-
-
-class KotlinLexer(RegexLexer):
- """
- For Kotlin source code.
-
- .. versionadded:: 1.5
- """
-
- name = 'Kotlin'
- url = 'http://kotlinlang.org/'
- aliases = ['kotlin']
- filenames = ['*.kt', '*.kts']
- mimetypes = ['text/x-kotlin']
-
- flags = re.MULTILINE | re.DOTALL
-
- kt_name = ('@?[_' + uni.combine('Lu', 'Ll', 'Lt', 'Lm', 'Nl') + ']' +
- '[' + uni.combine('Lu', 'Ll', 'Lt', 'Lm', 'Nl', 'Nd', 'Pc', 'Cf',
- 'Mn', 'Mc') + ']*')
-
- kt_space_name = ('@?[_' + uni.combine('Lu', 'Ll', 'Lt', 'Lm', 'Nl') + ']' +
- '[' + uni.combine('Lu', 'Ll', 'Lt', 'Lm', 'Nl', 'Nd', 'Pc', 'Cf',
- 'Mn', 'Mc', 'Zs')
- + r'\'~!%^&*()+=|\[\]:;,.<>/\?-]*')
-
- kt_id = '(' + kt_name + '|`' + kt_space_name + '`)'
-
- modifiers = (r'actual|abstract|annotation|companion|const|crossinline|'
- r'data|enum|expect|external|final|infix|inline|inner|'
- r'internal|lateinit|noinline|open|operator|override|private|'
- r'protected|public|sealed|suspend|tailrec|value')
-
- tokens = {
- 'root': [
- # Whitespaces
- (r'[^\S\n]+', Whitespace),
- (r'\s+', Whitespace),
- (r'\\$', String.Escape), # line continuation
- (r'\n', Whitespace),
- # Comments
- (r'(//.*?)(\n)', bygroups(Comment.Single, Whitespace)),
- (r'^(#!/.+?)(\n)', bygroups(Comment.Single, Whitespace)), # shebang for kotlin scripts
- (r'/[*].*?[*]/', Comment.Multiline),
- # Keywords
- (r'as\?', Keyword),
- (r'(as|break|by|catch|constructor|continue|do|dynamic|else|finally|'
- r'get|for|if|init|[!]*in|[!]*is|out|reified|return|set|super|this|'
- r'throw|try|typealias|typeof|vararg|when|where|while)\b', Keyword),
- (r'it\b', Name.Builtin),
- # Built-in types
- (words(('Boolean?', 'Byte?', 'Char?', 'Double?', 'Float?',
- 'Int?', 'Long?', 'Short?', 'String?', 'Any?', 'Unit?')), Keyword.Type),
- (words(('Boolean', 'Byte', 'Char', 'Double', 'Float',
- 'Int', 'Long', 'Short', 'String', 'Any', 'Unit'), suffix=r'\b'), Keyword.Type),
- # Constants
- (r'(true|false|null)\b', Keyword.Constant),
- # Imports
- (r'(package|import)(\s+)(\S+)', bygroups(Keyword, Whitespace, Name.Namespace)),
- # Dot access
- (r'(\?\.)((?:[^\W\d]|\$)[\w$]*)', bygroups(Operator, Name.Attribute)),
- (r'(\.)((?:[^\W\d]|\$)[\w$]*)', bygroups(Punctuation, Name.Attribute)),
- # Annotations
- (r'@[^\W\d][\w.]*', Name.Decorator),
- # Labels
- (r'[^\W\d][\w.]+@', Name.Decorator),
- # Object expression
- (r'(object)(\s+)(:)(\s+)', bygroups(Keyword, Whitespace, Punctuation, Whitespace), 'class'),
- # Types
- (r'((?:(?:' + modifiers + r'|fun)\s+)*)(class|interface|object)(\s+)',
- bygroups(using(this, state='modifiers'), Keyword.Declaration, Whitespace), 'class'),
- # Variables
- (r'(var|val)(\s+)(\()', bygroups(Keyword.Declaration, Whitespace, Punctuation),
- 'destructuring_assignment'),
- (r'((?:(?:' + modifiers + r')\s+)*)(var|val)(\s+)',
- bygroups(using(this, state='modifiers'), Keyword.Declaration, Whitespace), 'variable'),
- # Functions
- (r'((?:(?:' + modifiers + r')\s+)*)(fun)(\s+)',
- bygroups(using(this, state='modifiers'), Keyword.Declaration, Whitespace), 'function'),
- # Operators
- (r'::|!!|\?[:.]', Operator),
- (r'[~^*!%&\[\]<>|+=/?-]', Operator),
- # Punctuation
- (r'[{}();:.,]', Punctuation),
- # Strings
- (r'"""', String, 'multiline_string'),
- (r'"', String, 'string'),
- (r"'\\.'|'[^\\]'", String.Char),
- # Numbers
- (r"[0-9](\.[0-9]*)?([eE][+-][0-9]+)?[flFL]?|"
- r"0[xX][0-9a-fA-F]+[Ll]?", Number),
- # Identifiers
- (r'' + kt_id + r'((\?[^.])?)', Name) # additionally handle nullable types
- ],
- 'class': [
- (kt_id, Name.Class, '#pop')
- ],
- 'variable': [
- (kt_id, Name.Variable, '#pop')
- ],
- 'destructuring_assignment': [
- (r',', Punctuation),
- (r'\s+', Whitespace),
- (kt_id, Name.Variable),
- (r'(:)(\s+)(' + kt_id + ')', bygroups(Punctuation, Whitespace, Name)),
- (r'<', Operator, 'generic'),
- (r'\)', Punctuation, '#pop')
- ],
- 'function': [
- (r'<', Operator, 'generic'),
- (r'' + kt_id + r'(\.)' + kt_id, bygroups(Name, Punctuation, Name.Function), '#pop'),
- (kt_id, Name.Function, '#pop')
- ],
- 'generic': [
- (r'(>)(\s*)', bygroups(Operator, Whitespace), '#pop'),
- (r':', Punctuation),
- (r'(reified|out|in)\b', Keyword),
- (r',', Punctuation),
- (r'\s+', Whitespace),
- (kt_id, Name)
- ],
- 'modifiers': [
- (r'\w+', Keyword.Declaration),
- (r'\s+', Whitespace),
- default('#pop')
- ],
- 'string': [
- (r'"', String, '#pop'),
- include('string_common')
- ],
- 'multiline_string': [
- (r'"""', String, '#pop'),
- (r'"', String),
- include('string_common')
- ],
- 'string_common': [
- (r'\\\\', String), # escaped backslash
- (r'\\"', String), # escaped quote
- (r'\\', String), # bare backslash
- (r'\$\{', String.Interpol, 'interpolation'),
- (r'(\$)(\w+)', bygroups(String.Interpol, Name)),
- (r'[^\\"$]+', String)
- ],
- 'interpolation': [
- (r'"', String),
- (r'\$\{', String.Interpol, 'interpolation'),
- (r'\{', Punctuation, 'scope'),
- (r'\}', String.Interpol, '#pop'),
- include('root')
- ],
- 'scope': [
- (r'\{', Punctuation, 'scope'),
- (r'\}', Punctuation, '#pop'),
- include('root')
- ]
- }
-
-
-class XtendLexer(RegexLexer):
- """
- For Xtend source code.
-
- .. versionadded:: 1.6
- """
-
- name = 'Xtend'
- url = 'https://www.eclipse.org/xtend/'
- aliases = ['xtend']
- filenames = ['*.xtend']
- mimetypes = ['text/x-xtend']
-
- flags = re.MULTILINE | re.DOTALL
-
- tokens = {
- 'root': [
- # method names
- (r'^(\s*(?:[a-zA-Z_][\w.\[\]]*\s+)+?)' # return arguments
- r'([a-zA-Z_$][\w$]*)' # method name
- r'(\s*)(\()', # signature start
- bygroups(using(this), Name.Function, Whitespace, Operator)),
- (r'[^\S\n]+', Whitespace),
- (r'(//.*?)(\n)', bygroups(Comment.Single, Whitespace)),
- (r'/\*.*?\*/', Comment.Multiline),
- (r'@[a-zA-Z_][\w.]*', Name.Decorator),
- (r'(assert|break|case|catch|continue|default|do|else|finally|for|'
- r'if|goto|instanceof|new|return|switch|this|throw|try|while|IF|'
- r'ELSE|ELSEIF|ENDIF|FOR|ENDFOR|SEPARATOR|BEFORE|AFTER)\b',
- Keyword),
- (r'(def|abstract|const|enum|extends|final|implements|native|private|'
- r'protected|public|static|strictfp|super|synchronized|throws|'
- r'transient|volatile)\b', Keyword.Declaration),
- (r'(boolean|byte|char|double|float|int|long|short|void)\b',
- Keyword.Type),
- (r'(package)(\s+)', bygroups(Keyword.Namespace, Whitespace)),
- (r'(true|false|null)\b', Keyword.Constant),
- (r'(class|interface)(\s+)', bygroups(Keyword.Declaration, Whitespace),
- 'class'),
- (r'(import)(\s+)', bygroups(Keyword.Namespace, Whitespace), 'import'),
- (r"(''')", String, 'template'),
- (r'(\u00BB)', String, 'template'),
- (r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
- (r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
- (r'[a-zA-Z_]\w*:', Name.Label),
- (r'[a-zA-Z_$]\w*', Name),
- (r'[~^*!%&\[\](){}<>\|+=:;,./?-]', Operator),
- (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
- (r'0x[0-9a-fA-F]+', Number.Hex),
- (r'[0-9]+L?', Number.Integer),
- (r'\n', Whitespace)
- ],
- 'class': [
- (r'[a-zA-Z_]\w*', Name.Class, '#pop')
- ],
- 'import': [
- (r'[\w.]+\*?', Name.Namespace, '#pop')
- ],
- 'template': [
- (r"'''", String, '#pop'),
- (r'\u00AB', String, '#pop'),
- (r'.', String)
- ],
- }
-
-
-class PigLexer(RegexLexer):
- """
- For Pig Latin source code.
-
- .. versionadded:: 2.0
- """
-
- name = 'Pig'
- url = 'https://pig.apache.org/'
- aliases = ['pig']
- filenames = ['*.pig']
- mimetypes = ['text/x-pig']
-
- flags = re.MULTILINE | re.IGNORECASE
-
- tokens = {
- 'root': [
- (r'\s+', Whitespace),
- (r'--.*', Comment),
- (r'/\*[\w\W]*?\*/', Comment.Multiline),
- (r'\\$', String.Escape),
- (r'\\', Text),
- (r'\'(?:\\[ntbrf\\\']|\\u[0-9a-f]{4}|[^\'\\\n\r])*\'', String),
- include('keywords'),
- include('types'),
- include('builtins'),
- include('punct'),
- include('operators'),
- (r'[0-9]*\.[0-9]+(e[0-9]+)?[fd]?', Number.Float),
- (r'0x[0-9a-f]+', Number.Hex),
- (r'[0-9]+L?', Number.Integer),
- (r'\n', Whitespace),
- (r'([a-z_]\w*)(\s*)(\()',
- bygroups(Name.Function, Whitespace, Punctuation)),
- (r'[()#:]', Text),
- (r'[^(:#\'")\s]+', Text),
- (r'\S+\s+', Text) # TODO: make tests pass without \s+
- ],
- 'keywords': [
- (r'(assert|and|any|all|arrange|as|asc|bag|by|cache|CASE|cat|cd|cp|'
- r'%declare|%default|define|dense|desc|describe|distinct|du|dump|'
- r'eval|exex|explain|filter|flatten|foreach|full|generate|group|'
- r'help|if|illustrate|import|inner|input|into|is|join|kill|left|'
- r'limit|load|ls|map|matches|mkdir|mv|not|null|onschema|or|order|'
- r'outer|output|parallel|pig|pwd|quit|register|returns|right|rm|'
- r'rmf|rollup|run|sample|set|ship|split|stderr|stdin|stdout|store|'
- r'stream|through|union|using|void)\b', Keyword)
- ],
- 'builtins': [
- (r'(AVG|BinStorage|cogroup|CONCAT|copyFromLocal|copyToLocal|COUNT|'
- r'cross|DIFF|MAX|MIN|PigDump|PigStorage|SIZE|SUM|TextLoader|'
- r'TOKENIZE)\b', Name.Builtin)
- ],
- 'types': [
- (r'(bytearray|BIGINTEGER|BIGDECIMAL|chararray|datetime|double|float|'
- r'int|long|tuple)\b', Keyword.Type)
- ],
- 'punct': [
- (r'[;(){}\[\]]', Punctuation),
- ],
- 'operators': [
- (r'[#=,./%+\-?]', Operator),
- (r'(eq|gt|lt|gte|lte|neq|matches)\b', Operator),
- (r'(==|<=|<|>=|>|!=)', Operator),
- ],
- }
-
-
-class GoloLexer(RegexLexer):
- """
- For Golo source code.
-
- .. versionadded:: 2.0
- """
-
- name = 'Golo'
- url = 'http://golo-lang.org/'
- filenames = ['*.golo']
- aliases = ['golo']
-
- tokens = {
- 'root': [
- (r'[^\S\n]+', Whitespace),
-
- (r'#.*$', Comment),
-
- (r'(\^|\.\.\.|:|\?:|->|==|!=|=|\+|\*|%|/|<=|<|>=|>|=|\.)',
- Operator),
- (r'(?<=[^-])(-)(?=[^-])', Operator),
-
- (r'(?<=[^`])(is|isnt|and|or|not|oftype|in|orIfNull)\b', Operator.Word),
- (r'[]{}|(),[]', Punctuation),
-
- (r'(module|import)(\s+)',
- bygroups(Keyword.Namespace, Whitespace),
- 'modname'),
- (r'\b([a-zA-Z_][\w$.]*)(::)', bygroups(Name.Namespace, Punctuation)),
- (r'\b([a-zA-Z_][\w$]*(?:\.[a-zA-Z_][\w$]*)+)\b', Name.Namespace),
-
- (r'(let|var)(\s+)',
- bygroups(Keyword.Declaration, Whitespace),
- 'varname'),
- (r'(struct)(\s+)',
- bygroups(Keyword.Declaration, Whitespace),
- 'structname'),
- (r'(function)(\s+)',
- bygroups(Keyword.Declaration, Whitespace),
- 'funcname'),
-
- (r'(null|true|false)\b', Keyword.Constant),
- (r'(augment|pimp'
- r'|if|else|case|match|return'
- r'|case|when|then|otherwise'
- r'|while|for|foreach'
- r'|try|catch|finally|throw'
- r'|local'
- r'|continue|break)\b', Keyword),
-
- (r'(map|array|list|set|vector|tuple)(\[)',
- bygroups(Name.Builtin, Punctuation)),
- (r'(print|println|readln|raise|fun'
- r'|asInterfaceInstance)\b', Name.Builtin),
- (r'(`?[a-zA-Z_][\w$]*)(\()',
- bygroups(Name.Function, Punctuation)),
-
- (r'-?[\d_]*\.[\d_]*([eE][+-]?\d[\d_]*)?F?', Number.Float),
- (r'0[0-7]+j?', Number.Oct),
- (r'0[xX][a-fA-F0-9]+', Number.Hex),
- (r'-?\d[\d_]*L', Number.Integer.Long),
- (r'-?\d[\d_]*', Number.Integer),
-
- (r'`?[a-zA-Z_][\w$]*', Name),
- (r'@[a-zA-Z_][\w$.]*', Name.Decorator),
-
- (r'"""', String, combined('stringescape', 'triplestring')),
- (r'"', String, combined('stringescape', 'doublestring')),
- (r"'", String, combined('stringescape', 'singlestring')),
- (r'----((.|\n)*?)----', String.Doc)
-
- ],
-
- 'funcname': [
- (r'`?[a-zA-Z_][\w$]*', Name.Function, '#pop'),
- ],
- 'modname': [
- (r'[a-zA-Z_][\w$.]*\*?', Name.Namespace, '#pop')
- ],
- 'structname': [
- (r'`?[\w.]+\*?', Name.Class, '#pop')
- ],
- 'varname': [
- (r'`?[a-zA-Z_][\w$]*', Name.Variable, '#pop'),
- ],
- 'string': [
- (r'[^\\\'"\n]+', String),
- (r'[\'"\\]', String)
- ],
- 'stringescape': [
- (r'\\([\\abfnrtv"\']|\n|N\{.*?\}|u[a-fA-F0-9]{4}|'
- r'U[a-fA-F0-9]{8}|x[a-fA-F0-9]{2}|[0-7]{1,3})', String.Escape)
- ],
- 'triplestring': [
- (r'"""', String, '#pop'),
- include('string'),
- (r'\n', String),
- ],
- 'doublestring': [
- (r'"', String.Double, '#pop'),
- include('string'),
- ],
- 'singlestring': [
- (r"'", String, '#pop'),
- include('string'),
- ],
- 'operators': [
- (r'[#=,./%+\-?]', Operator),
- (r'(eq|gt|lt|gte|lte|neq|matches)\b', Operator),
- (r'(==|<=|<|>=|>|!=)', Operator),
- ],
- }
-
-
-class JasminLexer(RegexLexer):
- """
- For Jasmin assembly code.
-
- .. versionadded:: 2.0
- """
-
- name = 'Jasmin'
- url = 'http://jasmin.sourceforge.net/'
- aliases = ['jasmin', 'jasminxt']
- filenames = ['*.j']
-
- _whitespace = r' \n\t\r'
- _ws = r'(?:[%s]+)' % _whitespace
- _separator = r'%s:=' % _whitespace
- _break = r'(?=[%s]|$)' % _separator
- _name = r'[^%s]+' % _separator
- _unqualified_name = r'(?:[^%s.;\[/]+)' % _separator
-
- tokens = {
- 'default': [
- (r'\n', Whitespace, '#pop'),
- (r"'", String.Single, ('#pop', 'quote')),
- (r'"', String.Double, 'string'),
- (r'=', Punctuation),
- (r':', Punctuation, 'label'),
- (_ws, Whitespace),
- (r';.*', Comment.Single),
- (r'(\$[-+])?0x-?[\da-fA-F]+%s' % _break, Number.Hex),
- (r'(\$[-+]|\+)?-?\d+%s' % _break, Number.Integer),
- (r'-?(\d+\.\d*|\.\d+)([eE][-+]?\d+)?[fFdD]?'
- r'[\x00-\x08\x0b\x0c\x0e-\x1f]*%s' % _break, Number.Float),
- (r'\$%s' % _name, Name.Variable),
-
- # Directives
- (r'\.annotation%s' % _break, Keyword.Reserved, 'annotation'),
- (r'(\.attribute|\.bytecode|\.debug|\.deprecated|\.enclosing|'
- r'\.interface|\.line|\.signature|\.source|\.stack|\.var|abstract|'
- r'annotation|bridge|class|default|enum|field|final|fpstrict|'
- r'interface|native|private|protected|public|signature|static|'
- r'synchronized|synthetic|transient|varargs|volatile)%s' % _break,
- Keyword.Reserved),
- (r'\.catch%s' % _break, Keyword.Reserved, 'caught-exception'),
- (r'(\.class|\.implements|\.inner|\.super|inner|invisible|'
- r'invisibleparam|outer|visible|visibleparam)%s' % _break,
- Keyword.Reserved, 'class/convert-dots'),
- (r'\.field%s' % _break, Keyword.Reserved,
- ('descriptor/convert-dots', 'field')),
- (r'(\.end|\.limit|use)%s' % _break, Keyword.Reserved,
- 'no-verification'),
- (r'\.method%s' % _break, Keyword.Reserved, 'method'),
- (r'\.set%s' % _break, Keyword.Reserved, 'var'),
- (r'\.throws%s' % _break, Keyword.Reserved, 'exception'),
- (r'(from|offset|to|using)%s' % _break, Keyword.Reserved, 'label'),
- (r'is%s' % _break, Keyword.Reserved,
- ('descriptor/convert-dots', 'var')),
- (r'(locals|stack)%s' % _break, Keyword.Reserved, 'verification'),
- (r'method%s' % _break, Keyword.Reserved, 'enclosing-method'),
-
- # Instructions
- (words((
- 'aaload', 'aastore', 'aconst_null', 'aload', 'aload_0', 'aload_1', 'aload_2',
- 'aload_3', 'aload_w', 'areturn', 'arraylength', 'astore', 'astore_0', 'astore_1',
- 'astore_2', 'astore_3', 'astore_w', 'athrow', 'baload', 'bastore', 'bipush',
- 'breakpoint', 'caload', 'castore', 'd2f', 'd2i', 'd2l', 'dadd', 'daload', 'dastore',
- 'dcmpg', 'dcmpl', 'dconst_0', 'dconst_1', 'ddiv', 'dload', 'dload_0', 'dload_1',
- 'dload_2', 'dload_3', 'dload_w', 'dmul', 'dneg', 'drem', 'dreturn', 'dstore', 'dstore_0',
- 'dstore_1', 'dstore_2', 'dstore_3', 'dstore_w', 'dsub', 'dup', 'dup2', 'dup2_x1',
- 'dup2_x2', 'dup_x1', 'dup_x2', 'f2d', 'f2i', 'f2l', 'fadd', 'faload', 'fastore', 'fcmpg',
- 'fcmpl', 'fconst_0', 'fconst_1', 'fconst_2', 'fdiv', 'fload', 'fload_0', 'fload_1',
- 'fload_2', 'fload_3', 'fload_w', 'fmul', 'fneg', 'frem', 'freturn', 'fstore', 'fstore_0',
- 'fstore_1', 'fstore_2', 'fstore_3', 'fstore_w', 'fsub', 'i2b', 'i2c', 'i2d', 'i2f', 'i2l',
- 'i2s', 'iadd', 'iaload', 'iand', 'iastore', 'iconst_0', 'iconst_1', 'iconst_2',
- 'iconst_3', 'iconst_4', 'iconst_5', 'iconst_m1', 'idiv', 'iinc', 'iinc_w', 'iload',
- 'iload_0', 'iload_1', 'iload_2', 'iload_3', 'iload_w', 'imul', 'ineg', 'int2byte',
- 'int2char', 'int2short', 'ior', 'irem', 'ireturn', 'ishl', 'ishr', 'istore', 'istore_0',
- 'istore_1', 'istore_2', 'istore_3', 'istore_w', 'isub', 'iushr', 'ixor', 'l2d', 'l2f',
- 'l2i', 'ladd', 'laload', 'land', 'lastore', 'lcmp', 'lconst_0', 'lconst_1', 'ldc2_w',
- 'ldiv', 'lload', 'lload_0', 'lload_1', 'lload_2', 'lload_3', 'lload_w', 'lmul', 'lneg',
- 'lookupswitch', 'lor', 'lrem', 'lreturn', 'lshl', 'lshr', 'lstore', 'lstore_0',
- 'lstore_1', 'lstore_2', 'lstore_3', 'lstore_w', 'lsub', 'lushr', 'lxor',
- 'monitorenter', 'monitorexit', 'nop', 'pop', 'pop2', 'ret', 'ret_w', 'return', 'saload',
- 'sastore', 'sipush', 'swap'), suffix=_break), Keyword.Reserved),
- (r'(anewarray|checkcast|instanceof|ldc|ldc_w|new)%s' % _break,
- Keyword.Reserved, 'class/no-dots'),
- (r'invoke(dynamic|interface|nonvirtual|special|'
- r'static|virtual)%s' % _break, Keyword.Reserved,
- 'invocation'),
- (r'(getfield|putfield)%s' % _break, Keyword.Reserved,
- ('descriptor/no-dots', 'field')),
- (r'(getstatic|putstatic)%s' % _break, Keyword.Reserved,
- ('descriptor/no-dots', 'static')),
- (words((
- 'goto', 'goto_w', 'if_acmpeq', 'if_acmpne', 'if_icmpeq',
- 'if_icmpge', 'if_icmpgt', 'if_icmple', 'if_icmplt', 'if_icmpne',
- 'ifeq', 'ifge', 'ifgt', 'ifle', 'iflt', 'ifne', 'ifnonnull',
- 'ifnull', 'jsr', 'jsr_w'), suffix=_break),
- Keyword.Reserved, 'label'),
- (r'(multianewarray|newarray)%s' % _break, Keyword.Reserved,
- 'descriptor/convert-dots'),
- (r'tableswitch%s' % _break, Keyword.Reserved, 'table')
- ],
- 'quote': [
- (r"'", String.Single, '#pop'),
- (r'\\u[\da-fA-F]{4}', String.Escape),
- (r"[^'\\]+", String.Single)
- ],
- 'string': [
- (r'"', String.Double, '#pop'),
- (r'\\([nrtfb"\'\\]|u[\da-fA-F]{4}|[0-3]?[0-7]{1,2})',
- String.Escape),
- (r'[^"\\]+', String.Double)
- ],
- 'root': [
- (r'\n+', Whitespace),
- (r"'", String.Single, 'quote'),
- include('default'),
- (r'(%s)([ \t\r]*)(:)' % _name,
- bygroups(Name.Label, Whitespace, Punctuation)),
- (_name, String.Other)
- ],
- 'annotation': [
- (r'\n', Whitespace, ('#pop', 'annotation-body')),
- (r'default%s' % _break, Keyword.Reserved,
- ('#pop', 'annotation-default')),
- include('default')
- ],
- 'annotation-body': [
- (r'\n+', Whitespace),
- (r'\.end%s' % _break, Keyword.Reserved, '#pop'),
- include('default'),
- (_name, String.Other, ('annotation-items', 'descriptor/no-dots'))
- ],
- 'annotation-default': [
- (r'\n+', Whitespace),
- (r'\.end%s' % _break, Keyword.Reserved, '#pop'),
- include('default'),
- default(('annotation-items', 'descriptor/no-dots'))
- ],
- 'annotation-items': [
- (r"'", String.Single, 'quote'),
- include('default'),
- (_name, String.Other)
- ],
- 'caught-exception': [
- (r'all%s' % _break, Keyword, '#pop'),
- include('exception')
- ],
- 'class/convert-dots': [
- include('default'),
- (r'(L)((?:%s[/.])*)(%s)(;)' % (_unqualified_name, _name),
- bygroups(Keyword.Type, Name.Namespace, Name.Class, Punctuation),
- '#pop'),
- (r'((?:%s[/.])*)(%s)' % (_unqualified_name, _name),
- bygroups(Name.Namespace, Name.Class), '#pop')
- ],
- 'class/no-dots': [
- include('default'),
- (r'\[+', Punctuation, ('#pop', 'descriptor/no-dots')),
- (r'(L)((?:%s/)*)(%s)(;)' % (_unqualified_name, _name),
- bygroups(Keyword.Type, Name.Namespace, Name.Class, Punctuation),
- '#pop'),
- (r'((?:%s/)*)(%s)' % (_unqualified_name, _name),
- bygroups(Name.Namespace, Name.Class), '#pop')
- ],
- 'descriptor/convert-dots': [
- include('default'),
- (r'\[+', Punctuation),
- (r'(L)((?:%s[/.])*)(%s?)(;)' % (_unqualified_name, _name),
- bygroups(Keyword.Type, Name.Namespace, Name.Class, Punctuation),
- '#pop'),
- (r'[^%s\[)L]+' % _separator, Keyword.Type, '#pop'),
- default('#pop')
- ],
- 'descriptor/no-dots': [
- include('default'),
- (r'\[+', Punctuation),
- (r'(L)((?:%s/)*)(%s)(;)' % (_unqualified_name, _name),
- bygroups(Keyword.Type, Name.Namespace, Name.Class, Punctuation),
- '#pop'),
- (r'[^%s\[)L]+' % _separator, Keyword.Type, '#pop'),
- default('#pop')
- ],
- 'descriptors/convert-dots': [
- (r'\)', Punctuation, '#pop'),
- default('descriptor/convert-dots')
- ],
- 'enclosing-method': [
- (_ws, Whitespace),
- (r'(?=[^%s]*\()' % _separator, Text, ('#pop', 'invocation')),
- default(('#pop', 'class/convert-dots'))
- ],
- 'exception': [
- include('default'),
- (r'((?:%s[/.])*)(%s)' % (_unqualified_name, _name),
- bygroups(Name.Namespace, Name.Exception), '#pop')
- ],
- 'field': [
- (r'static%s' % _break, Keyword.Reserved, ('#pop', 'static')),
- include('default'),
- (r'((?:%s[/.](?=[^%s]*[/.]))*)(%s[/.])?(%s)' %
- (_unqualified_name, _separator, _unqualified_name, _name),
- bygroups(Name.Namespace, Name.Class, Name.Variable.Instance),
- '#pop')
- ],
- 'invocation': [
- include('default'),
- (r'((?:%s[/.](?=[^%s(]*[/.]))*)(%s[/.])?(%s)(\()' %
- (_unqualified_name, _separator, _unqualified_name, _name),
- bygroups(Name.Namespace, Name.Class, Name.Function, Punctuation),
- ('#pop', 'descriptor/convert-dots', 'descriptors/convert-dots',
- 'descriptor/convert-dots'))
- ],
- 'label': [
- include('default'),
- (_name, Name.Label, '#pop')
- ],
- 'method': [
- include('default'),
- (r'(%s)(\()' % _name, bygroups(Name.Function, Punctuation),
- ('#pop', 'descriptor/convert-dots', 'descriptors/convert-dots',
- 'descriptor/convert-dots'))
- ],
- 'no-verification': [
- (r'(locals|method|stack)%s' % _break, Keyword.Reserved, '#pop'),
- include('default')
- ],
- 'static': [
- include('default'),
- (r'((?:%s[/.](?=[^%s]*[/.]))*)(%s[/.])?(%s)' %
- (_unqualified_name, _separator, _unqualified_name, _name),
- bygroups(Name.Namespace, Name.Class, Name.Variable.Class), '#pop')
- ],
- 'table': [
- (r'\n+', Whitespace),
- (r'default%s' % _break, Keyword.Reserved, '#pop'),
- include('default'),
- (_name, Name.Label)
- ],
- 'var': [
- include('default'),
- (_name, Name.Variable, '#pop')
- ],
- 'verification': [
- include('default'),
- (r'(Double|Float|Integer|Long|Null|Top|UninitializedThis)%s' %
- _break, Keyword, '#pop'),
- (r'Object%s' % _break, Keyword, ('#pop', 'class/no-dots')),
- (r'Uninitialized%s' % _break, Keyword, ('#pop', 'label'))
- ]
- }
-
- def analyse_text(text):
- score = 0
- if re.search(r'^\s*\.class\s', text, re.MULTILINE):
- score += 0.5
- if re.search(r'^\s*[a-z]+_[a-z]+\b', text, re.MULTILINE):
- score += 0.3
- if re.search(r'^\s*\.(attribute|bytecode|debug|deprecated|enclosing|'
- r'inner|interface|limit|set|signature|stack)\b', text,
- re.MULTILINE):
- score += 0.6
- return min(score, 1.0)
-
-
-class SarlLexer(RegexLexer):
- """
- For SARL source code.
-
- .. versionadded:: 2.4
- """
-
- name = 'SARL'
- url = 'http://www.sarl.io'
- aliases = ['sarl']
- filenames = ['*.sarl']
- mimetypes = ['text/x-sarl']
-
- flags = re.MULTILINE | re.DOTALL
-
- tokens = {
- 'root': [
- # method names
- (r'^(\s*(?:[a-zA-Z_][\w.\[\]]*\s+)+?)' # return arguments
- r'([a-zA-Z_$][\w$]*)' # method name
- r'(\s*)(\()', # signature start
- bygroups(using(this), Name.Function, Whitespace, Operator)),
- (r'[^\S\n]+', Whitespace),
- (r'(//.*?)(\n)', bygroups(Comment.Single, Whitespace)),
- (r'/\*.*?\*/', Comment.Multiline),
- (r'@[a-zA-Z_][\w.]*', Name.Decorator),
- (r'(as|break|case|catch|default|do|else|extends|extension|finally|'
- r'fires|for|if|implements|instanceof|new|on|requires|return|super|'
- r'switch|throw|throws|try|typeof|uses|while|with)\b',
- Keyword),
- (r'(abstract|def|dispatch|final|native|override|private|protected|'
- r'public|static|strictfp|synchronized|transient|val|var|volatile)\b',
- Keyword.Declaration),
- (r'(boolean|byte|char|double|float|int|long|short|void)\b',
- Keyword.Type),
- (r'(package)(\s+)', bygroups(Keyword.Namespace, Whitespace)),
- (r'(false|it|null|occurrence|this|true|void)\b', Keyword.Constant),
- (r'(agent|annotation|artifact|behavior|capacity|class|enum|event|'
- r'interface|skill|space)(\s+)', bygroups(Keyword.Declaration, Whitespace),
- 'class'),
- (r'(import)(\s+)', bygroups(Keyword.Namespace, Whitespace), 'import'),
- (r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
- (r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
- (r'[a-zA-Z_]\w*:', Name.Label),
- (r'[a-zA-Z_$]\w*', Name),
- (r'[~^*!%&\[\](){}<>\|+=:;,./?-]', Operator),
- (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
- (r'0x[0-9a-fA-F]+', Number.Hex),
- (r'[0-9]+L?', Number.Integer),
- (r'\n', Whitespace)
- ],
- 'class': [
- (r'[a-zA-Z_]\w*', Name.Class, '#pop')
- ],
- 'import': [
- (r'[\w.]+\*?', Name.Namespace, '#pop')
- ],
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/kuin.py b/venv/lib/python3.11/site-packages/pygments/lexers/kuin.py
deleted file mode 100644
index aeb9cad..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/kuin.py
+++ /dev/null
@@ -1,333 +0,0 @@
-"""
- pygments.lexers.kuin
- ~~~~~~~~~~~~~~~~~~~~
-
- Lexers for the Kuin language.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, include, using, this, bygroups, words
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation, Whitespace
-
-__all__ = ['KuinLexer']
-
-
-class KuinLexer(RegexLexer):
- """
- For Kuin source code.
-
- .. versionadded:: 2.9
- """
- name = 'Kuin'
- url = 'https://github.com/kuina/Kuin'
- aliases = ['kuin']
- filenames = ['*.kn']
-
- tokens = {
- 'root': [
- include('statement'),
- ],
- 'statement': [
- # Whitespace / Comment
- include('whitespace'),
-
- # Block-statement
- (r'(\+?)([ \t]*)(\*?)([ \t]*)(\bfunc)([ \t]+(?:\n\s*\|)*[ \t]*)([a-zA-Z_][0-9a-zA-Z_]*)',
- bygroups(Keyword,Whitespace, Keyword, Whitespace, Keyword,
- using(this), Name.Function), 'func_'),
- (r'\b(class)([ \t]+(?:\n\s*\|)*[ \t]*)([a-zA-Z_][0-9a-zA-Z_]*)',
- bygroups(Keyword, using(this), Name.Class), 'class_'),
- (r'\b(enum)([ \t]+(?:\n\s*\|)*[ \t]*)([a-zA-Z_][0-9a-zA-Z_]*)',
- bygroups(Keyword, using(this), Name.Constant), 'enum_'),
- (r'\b(block)\b(?:([ \t]+(?:\n\s*\|)*[ \t]*)([a-zA-Z_][0-9a-zA-Z_]*))?',
- bygroups(Keyword, using(this), Name.Other), 'block_'),
- (r'\b(ifdef)\b(?:([ \t]+(?:\n\s*\|)*[ \t]*)([a-zA-Z_][0-9a-zA-Z_]*))?',
- bygroups(Keyword, using(this), Name.Other), 'ifdef_'),
- (r'\b(if)\b(?:([ \t]+(?:\n\s*\|)*[ \t]*)([a-zA-Z_][0-9a-zA-Z_]*))?',
- bygroups(Keyword, using(this), Name.Other), 'if_'),
- (r'\b(switch)\b(?:([ \t]+(?:\n\s*\|)*[ \t]*)([a-zA-Z_][0-9a-zA-Z_]*))?',
- bygroups(Keyword, using(this), Name.Other), 'switch_'),
- (r'\b(while)\b(?:([ \t]+(?:\n\s*\|)*[ \t]*)([a-zA-Z_][0-9a-zA-Z_]*))?',
- bygroups(Keyword, using(this), Name.Other), 'while_'),
- (r'\b(for)\b(?:([ \t]+(?:\n\s*\|)*[ \t]*)([a-zA-Z_][0-9a-zA-Z_]*))?',
- bygroups(Keyword, using(this), Name.Other), 'for_'),
- (r'\b(foreach)\b(?:([ \t]+(?:\n\s*\|)*[ \t]*)([a-zA-Z_][0-9a-zA-Z_]*))?',
- bygroups(Keyword, using(this), Name.Other), 'foreach_'),
- (r'\b(try)\b(?:([ \t]+(?:\n\s*\|)*[ \t]*)([a-zA-Z_][0-9a-zA-Z_]*))?',
- bygroups(Keyword, using(this), Name.Other), 'try_'),
-
- # Line-statement
- (r'\b(do)\b', Keyword, 'do'),
- (r'(\+?[ \t]*\bvar)\b', Keyword, 'var'),
- (r'\b(const)\b', Keyword, 'const'),
- (r'\b(ret)\b', Keyword, 'ret'),
- (r'\b(throw)\b', Keyword, 'throw'),
- (r'\b(alias)\b', Keyword, 'alias'),
- (r'\b(assert)\b', Keyword, 'assert'),
- (r'\|', Text, 'continued_line'),
- (r'[ \t]*\n', Whitespace),
- ],
-
- # Whitespace / Comment
- 'whitespace': [
- (r'^([ \t]*)(;.*)', bygroups(Comment.Single, Whitespace)),
- (r'[ \t]+(?![; \t])', Whitespace),
- (r'\{', Comment.Multiline, 'multiline_comment'),
- ],
- 'multiline_comment': [
- (r'\{', Comment.Multiline, 'multiline_comment'),
- (r'(?:\s*;.*|[^{}\n]+)', Comment.Multiline),
- (r'\n', Comment.Multiline),
- (r'\}', Comment.Multiline, '#pop'),
- ],
-
- # Block-statement
- 'func_': [
- include('expr'),
- (r'\n', Whitespace, 'func'),
- ],
- 'func': [
- (r'\b(end)([ \t]+(?:\n\s*\|)*[ \t]*)(func)\b',
- bygroups(Keyword, using(this), Keyword), '#pop:2'),
- include('statement'),
- ],
- 'class_': [
- include('expr'),
- (r'\n', Whitespace, 'class'),
- ],
- 'class': [
- (r'\b(end)([ \t]+(?:\n\s*\|)*[ \t]*)(class)\b',
- bygroups(Keyword, using(this), Keyword), '#pop:2'),
- include('statement'),
- ],
- 'enum_': [
- include('expr'),
- (r'\n', Whitespace, 'enum'),
- ],
- 'enum': [
- (r'\b(end)([ \t]+(?:\n\s*\|)*[ \t]*)(enum)\b',
- bygroups(Keyword, using(this), Keyword), '#pop:2'),
- include('expr'),
- (r'\n', Whitespace),
- ],
- 'block_': [
- include('expr'),
- (r'\n', Whitespace, 'block'),
- ],
- 'block': [
- (r'\b(end)([ \t]+(?:\n\s*\|)*[ \t]*)(block)\b',
- bygroups(Keyword, using(this), Keyword), '#pop:2'),
- include('statement'),
- include('break'),
- include('skip'),
- ],
- 'ifdef_': [
- include('expr'),
- (r'\n', Whitespace, 'ifdef'),
- ],
- 'ifdef': [
- (r'\b(end)([ \t]+(?:\n\s*\|)*[ \t]*)(ifdef)\b',
- bygroups(Keyword, using(this), Keyword), '#pop:2'),
- (words(('rls', 'dbg'), prefix=r'\b', suffix=r'\b'),
- Keyword.Constant, 'ifdef_sp'),
- include('statement'),
- include('break'),
- include('skip'),
- ],
- 'ifdef_sp': [
- include('expr'),
- (r'\n', Whitespace, '#pop'),
- ],
- 'if_': [
- include('expr'),
- (r'\n', Whitespace, 'if'),
- ],
- 'if': [
- (r'\b(end)([ \t]+(?:\n\s*\|)*[ \t]*)(if)\b',
- bygroups(Keyword, using(this), Keyword), '#pop:2'),
- (words(('elif', 'else'), prefix=r'\b', suffix=r'\b'), Keyword, 'if_sp'),
- include('statement'),
- include('break'),
- include('skip'),
- ],
- 'if_sp': [
- include('expr'),
- (r'\n', Whitespace, '#pop'),
- ],
- 'switch_': [
- include('expr'),
- (r'\n', Whitespace, 'switch'),
- ],
- 'switch': [
- (r'\b(end)([ \t]+(?:\n\s*\|)*[ \t]*)(switch)\b',
- bygroups(Keyword, using(this), Keyword), '#pop:2'),
- (words(('case', 'default', 'to'), prefix=r'\b', suffix=r'\b'),
- Keyword, 'switch_sp'),
- include('statement'),
- include('break'),
- include('skip'),
- ],
- 'switch_sp': [
- include('expr'),
- (r'\n', Whitespace, '#pop'),
- ],
- 'while_': [
- include('expr'),
- (r'\n', Whitespace, 'while'),
- ],
- 'while': [
- (r'\b(end)([ \t]+(?:\n\s*\|)*[ \t]*)(while)\b',
- bygroups(Keyword, using(this), Keyword), '#pop:2'),
- include('statement'),
- include('break'),
- include('skip'),
- ],
- 'for_': [
- include('expr'),
- (r'\n', Whitespace, 'for'),
- ],
- 'for': [
- (r'\b(end)([ \t]+(?:\n\s*\|)*[ \t]*)(for)\b',
- bygroups(Keyword, using(this), Keyword), '#pop:2'),
- include('statement'),
- include('break'),
- include('skip'),
- ],
- 'foreach_': [
- include('expr'),
- (r'\n', Whitespace, 'foreach'),
- ],
- 'foreach': [
- (r'\b(end)([ \t]+(?:\n\s*\|)*[ \t]*)(foreach)\b',
- bygroups(Keyword, using(this), Keyword), '#pop:2'),
- include('statement'),
- include('break'),
- include('skip'),
- ],
- 'try_': [
- include('expr'),
- (r'\n', Whitespace, 'try'),
- ],
- 'try': [
- (r'\b(end)([ \t]+(?:\n\s*\|)*[ \t]*)(try)\b',
- bygroups(Keyword, using(this), Keyword), '#pop:2'),
- (words(('catch', 'finally', 'to'), prefix=r'\b', suffix=r'\b'),
- Keyword, 'try_sp'),
- include('statement'),
- include('break'),
- include('skip'),
- ],
- 'try_sp': [
- include('expr'),
- (r'\n', Whitespace, '#pop'),
- ],
-
- # Line-statement
- 'break': [
- (r'\b(break)\b([ \t]+)([a-zA-Z_][0-9a-zA-Z_]*)',
- bygroups(Keyword, using(this), Name.Other)),
- ],
- 'skip': [
- (r'\b(skip)\b([ \t]+)([a-zA-Z_][0-9a-zA-Z_]*)',
- bygroups(Keyword, using(this), Name.Other)),
- ],
- 'alias': [
- include('expr'),
- (r'\n', Whitespace, '#pop'),
- ],
- 'assert': [
- include('expr'),
- (r'\n', Whitespace, '#pop'),
- ],
- 'const': [
- include('expr'),
- (r'\n', Whitespace, '#pop'),
- ],
- 'do': [
- include('expr'),
- (r'\n', Whitespace, '#pop'),
- ],
- 'ret': [
- include('expr'),
- (r'\n', Whitespace, '#pop'),
- ],
- 'throw': [
- include('expr'),
- (r'\n', Whitespace, '#pop'),
- ],
- 'var': [
- include('expr'),
- (r'\n', Whitespace, '#pop'),
- ],
- 'continued_line': [
- include('expr'),
- (r'\n', Whitespace, '#pop'),
- ],
-
- 'expr': [
- # Whitespace / Comment
- include('whitespace'),
-
- # Punctuation
- (r'\(', Punctuation,),
- (r'\)', Punctuation,),
- (r'\[', Punctuation,),
- (r'\]', Punctuation,),
- (r',', Punctuation),
-
- # Keyword
- (words((
- 'true', 'false', 'null', 'inf'
- ), prefix=r'\b', suffix=r'\b'), Keyword.Constant),
- (words((
- 'me'
- ), prefix=r'\b', suffix=r'\b'), Keyword),
- (words((
- 'bit16', 'bit32', 'bit64', 'bit8', 'bool',
- 'char', 'class', 'dict', 'enum', 'float', 'func',
- 'int', 'list', 'queue', 'stack'
- ), prefix=r'\b', suffix=r'\b'), Keyword.Type),
-
- # Number
- (r'\b[0-9]\.[0-9]+(?!\.)(:?e[\+-][0-9]+)?\b', Number.Float),
- (r'\b2#[01]+(?:b(?:8|16|32|64))?\b', Number.Bin),
- (r'\b8#[0-7]+(?:b(?:8|16|32|64))?\b', Number.Oct),
- (r'\b16#[0-9A-F]+(?:b(?:8|16|32|64))?\b', Number.Hex),
- (r'\b[0-9]+(?:b(?:8|16|32|64))?\b', Number.Decimal),
-
- # String / Char
- (r'"', String.Double, 'string'),
- (r"'(?:\\.|.)+?'", String.Char),
-
- # Operator
- (r'(?:\.|\$(?:>|<)?)', Operator),
- (r'(?:\^)', Operator),
- (r'(?:\+|-|!|##?)', Operator),
- (r'(?:\*|/|%)', Operator),
- (r'(?:~)', Operator),
- (r'(?:(?:=|<>)(?:&|\$)?|<=?|>=?)', Operator),
- (r'(?:&)', Operator),
- (r'(?:\|)', Operator),
- (r'(?:\?)', Operator),
- (r'(?::(?::|\+|-|\*|/|%|\^|~)?)', Operator),
-
- # Identifier
- (r"\b([a-zA-Z_][0-9a-zA-Z_]*)(?=@)\b", Name),
- (r"(@)?\b([a-zA-Z_][0-9a-zA-Z_]*)\b",
- bygroups(Name.Other, Name.Variable)),
- ],
-
- # String
- 'string': [
- (r'(?:\\[^{\n]|[^"\\])+', String.Double),
- (r'\\\{', String.Double, 'toStrInString'),
- (r'"', String.Double, '#pop'),
- ],
- 'toStrInString': [
- include('expr'),
- (r'\}', String.Double, '#pop'),
- ],
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/kusto.py b/venv/lib/python3.11/site-packages/pygments/lexers/kusto.py
deleted file mode 100644
index 9f30fd6..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/kusto.py
+++ /dev/null
@@ -1,94 +0,0 @@
-"""
- pygments.lexers.kusto
- ~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for Kusto Query Language (KQL).
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, words
-from pygments.token import (Comment, Keyword, Name, Number, Punctuation,
- String, Whitespace)
-
-__all__ = ["KustoLexer"]
-
-# Although these all seem to be keywords
-# https://github.com/microsoft/Kusto-Query-Language/blob/master/src/Kusto.Language/Syntax/SyntaxFacts.cs
-# it appears that only the ones with tags here
-# https://github.com/microsoft/Kusto-Query-Language/blob/master/src/Kusto.Language/Parser/QueryGrammar.cs
-# are highlighted in the Azure portal log query editor.
-KUSTO_KEYWORDS = [
- 'and', 'as', 'between', 'by', 'consume', 'contains', 'containscs', 'count',
- 'distinct', 'evaluate', 'extend', 'facet', 'filter', 'find', 'fork',
- 'getschema', 'has', 'invoke', 'join', 'limit', 'lookup', 'make-series',
- 'matches regex', 'mv-apply', 'mv-expand', 'notcontains', 'notcontainscs',
- '!contains', '!has', '!startswith', 'on', 'or', 'order', 'parse', 'parse-where',
- 'parse-kv', 'partition', 'print', 'project', 'project-away', 'project-keep',
- 'project-rename', 'project-reorder', 'range', 'reduce', 'regex', 'render',
- 'sample', 'sample-distinct', 'scan', 'search', 'serialize', 'sort', 'startswith',
- 'summarize', 'take', 'top', 'top-hitters', 'top-nested', 'typeof', 'union',
- 'where', 'bool', 'date', 'datetime', 'int', 'long', 'real', 'string', 'time'
-]
-
-# From
-# https://github.com/microsoft/Kusto-Query-Language/blob/master/src/Kusto.Language/Syntax/SyntaxFacts.cs
-KUSTO_PUNCTUATION = [
- "(", ")", "[", "]", "{", "}", "|", "<|", "+", "-", "*", "/",
- "%", ".." "!", "<", "<=", ">", ">=", "=", "==", "!=", "<>",
- ":", ";", ",", "=~", "!~", "?", "=>",
-]
-
-
-class KustoLexer(RegexLexer):
- """For Kusto Query Language source code.
-
- .. versionadded:: 2.17
- """
-
- name = "Kusto"
- aliases = ["kql", "kusto"]
- filenames = ["*.kql", "*.kusto", ".csl"]
- url = "https://learn.microsoft.com/en-us/azure/data-explorer/kusto/query"
-
- tokens = {
- "root": [
- (r"\s+", Whitespace),
- (words(KUSTO_KEYWORDS, suffix=r"\b"), Keyword),
- (r"//.*", Comment),
- (words(KUSTO_PUNCTUATION), Punctuation),
- (r"[^\W\d]\w*", Name),
- # Numbers can take the form 1, .1, 1., 1.1, 1.1111, etc.
- (r"\d+[.]\d*|[.]\d+", Number.Float),
- (r"\d+", Number.Integer),
- (r"'", String, "single_string"),
- (r'"', String, "double_string"),
- (r"@'", String, "single_verbatim"),
- (r'@"', String, "double_verbatim"),
- (r"```", String, "multi_string"),
- ],
- "single_string": [
- (r"'", String, "#pop"),
- (r"\\.", String.Escape),
- (r"[^'\\]+", String),
- ],
- "double_string": [
- (r'"', String, "#pop"),
- (r"\\.", String.Escape),
- (r'[^"\\]+', String),
- ],
- "single_verbatim": [
- (r"'", String, "#pop"),
- (r"[^']+", String),
- ],
- "double_verbatim": [
- (r'"', String, "#pop"),
- (r'[^"]+', String),
- ],
- "multi_string": [
- (r"[^`]+", String),
- (r"```", String, "#pop"),
- (r"`", String),
- ],
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/ldap.py b/venv/lib/python3.11/site-packages/pygments/lexers/ldap.py
deleted file mode 100644
index a669f79..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/ldap.py
+++ /dev/null
@@ -1,157 +0,0 @@
-"""
- pygments.lexers.ldap
- ~~~~~~~~~~~~~~~~~~~~
-
- Pygments lexers for LDAP.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-
-from pygments.lexer import RegexLexer, bygroups, default
-from pygments.token import Operator, Comment, Keyword, Literal, Name, String, \
- Number, Punctuation, Whitespace, Escape
-
-__all__ = ['LdifLexer', 'LdaprcLexer']
-
-
-class LdifLexer(RegexLexer):
-
- """
- Lexer for LDIF
-
- .. versionadded:: 2.17
- """
-
- name = 'LDIF'
- aliases = ['ldif']
- filenames = ['*.ldif']
- mimetypes = ["text/x-ldif"]
- url = "https://datatracker.ietf.org/doc/html/rfc2849"
-
- tokens = {
- 'root': [
- (r'\s*\n', Whitespace),
- (r'(-)(\n)', bygroups(Punctuation, Whitespace)),
- (r'(#.*)(\n)', bygroups(Comment.Single, Whitespace)),
- (r'(version)(:)([ \t]*)(.*)([ \t]*\n)', bygroups(Keyword,
- Punctuation, Whitespace, Number.Integer, Whitespace)),
- (r'(control)(:)([ \t]*)([\.0-9]+)([ \t]+)((?:true|false)?)([ \t]*)',
- bygroups(Keyword, Punctuation, Whitespace, Name.Other, Whitespace, Keyword, Whitespace), "after-control"),
- (r'(deleteoldrdn)(:)([ \n]*)([0-1]+)([ \t]*\n)',
- bygroups(Keyword, Punctuation, Whitespace, Number, Whitespace)),
- (r'(add|delete|replace)(::?)(\s*)(.*)([ \t]*\n)', bygroups(
- Keyword, Punctuation, Whitespace, Name.Attribute, Whitespace)),
- (r'(changetype)(:)([ \t]*)([a-z]*)([ \t]*\n)',
- bygroups(Keyword, Punctuation, Whitespace, Keyword, Whitespace)),
- (r'(dn|newrdn)(::)', bygroups(Keyword, Punctuation), "base64-dn"),
- (r'(dn|newrdn)(:)', bygroups(Keyword, Punctuation), "dn"),
- (r'(objectclass)(:)([ \t]*)([^ \t\n]*)([ \t]*\n)', bygroups(
- Keyword, Punctuation, Whitespace, Name.Class, Whitespace)),
- (r'([a-zA-Z]*|[0-9][0-9\.]*[0-9])(;)',
- bygroups(Name.Attribute, Punctuation), "property"),
- (r'([a-zA-Z]*|[0-9][0-9\.]*[0-9])(:<)',
- bygroups(Name.Attribute, Punctuation), "url"),
- (r'([a-zA-Z]*|[0-9][0-9\.]*[0-9])(::?)',
- bygroups(Name.Attribute, Punctuation), "value"),
- ],
- "after-control": [
- (r":<", Punctuation, ("#pop", "url")),
- (r"::?", Punctuation, ("#pop", "value")),
- default("#pop"),
- ],
- 'property': [
- (r'([-a-zA-Z0-9]*)(;)', bygroups(Name.Property, Punctuation)),
- (r'([-a-zA-Z0-9]*)(:<)',
- bygroups(Name.Property, Punctuation), ("#pop", "url")),
- (r'([-a-zA-Z0-9]*)(::?)',
- bygroups(Name.Property, Punctuation), ("#pop", "value")),
- ],
- 'value': [
- (r'(\s*)([^\n]+\S)(\n )',
- bygroups(Whitespace, String, Whitespace)),
- (r'(\s*)([^\n]+\S)(\n)',
- bygroups(Whitespace, String, Whitespace), "#pop"),
- ],
- 'url': [
- (r'([ \t]*)(\S*)([ \t]*\n )',
- bygroups(Whitespace, Comment.PreprocFile, Whitespace)),
- (r'([ \t]*)(\S*)([ \t]*\n)', bygroups(Whitespace,
- Comment.PreprocFile, Whitespace), "#pop"),
- ],
- "dn": [
- (r'([ \t]*)([-a-zA-Z0-9\.]+)(=)', bygroups(Whitespace,
- Name.Attribute, Operator), ("#pop", "dn-value")),
- ],
- "dn-value": [
- (r'\\[^\n]', Escape),
- (r',', Punctuation, ("#pop", "dn")),
- (r'\+', Operator, ("#pop", "dn")),
- (r'[^,\+\n]+', String),
- (r'\n ', Whitespace),
- (r'\n', Whitespace, "#pop"),
- ],
- "base64-dn": [
- (r'([ \t]*)([^ \t\n][^ \t\n]*[^\n])([ \t]*\n )',
- bygroups(Whitespace, Name, Whitespace)),
- (r'([ \t]*)([^ \t\n][^ \t\n]*[^\n])([ \t]*\n)',
- bygroups(Whitespace, Name, Whitespace), "#pop"),
- ]
- }
-
-
-class LdaprcLexer(RegexLexer):
- """
- Lexer for OpenLDAP configuration files.
-
- .. versionadded:: 2.17
- """
-
- name = 'LDAP configuration file'
- aliases = ['ldapconf', 'ldaprc']
- filenames = ['.ldaprc', 'ldaprc', 'ldap.conf']
- mimetypes = ["text/x-ldapconf"]
- url = 'https://www.openldap.org/software//man.cgi?query=ldap.conf&sektion=5&apropos=0&manpath=OpenLDAP+2.4-Release'
-
- _sasl_keywords = r'SASL_(?:MECH|REALM|AUTHCID|AUTHZID|CBINDING)'
- _tls_keywords = r'TLS_(?:CACERT|CACERTDIR|CERT|ECNAME|KEY|CIPHER_SUITE|PROTOCOL_MIN|RANDFILE|CRLFILE)'
- _literal_keywords = rf'(?:URI|SOCKET_BIND_ADDRESSES|{_sasl_keywords}|{_tls_keywords})'
- _boolean_keywords = r'GSSAPI_(?:ALLOW_REMOTE_PRINCIPAL|ENCRYPT|SIGN)|REFERRALS|SASL_NOCANON'
- _integer_keywords = r'KEEPALIVE_(?:IDLE|PROBES|INTERVAL)|NETWORK_TIMEOUT|PORT|SIZELIMIT|TIMELIMIT|TIMEOUT'
- _secprops = r'none|noanonymous|noplain|noactive|nodict|forwardsec|passcred|(?:minssf|maxssf|maxbufsize)=\d+'
-
- flags = re.IGNORECASE | re.MULTILINE
-
- tokens = {
- 'root': [
- (r'#.*', Comment.Single),
- (r'\s+', Whitespace),
- (rf'({_boolean_keywords})(\s+)(on|true|yes|off|false|no)$',
- bygroups(Keyword, Whitespace, Keyword.Constant)),
- (rf'({_integer_keywords})(\s+)(\d+)',
- bygroups(Keyword, Whitespace, Number.Integer)),
- (r'(VERSION)(\s+)(2|3)', bygroups(Keyword, Whitespace, Number.Integer)),
- # Constants
- (r'(DEREF)(\s+)(never|searching|finding|always)',
- bygroups(Keyword, Whitespace, Keyword.Constant)),
- (rf'(SASL_SECPROPS)(\s+)((?:{_secprops})(?:,{_secprops})*)',
- bygroups(Keyword, Whitespace, Keyword.Constant)),
- (r'(SASL_CBINDING)(\s+)(none|tls-unique|tls-endpoint)',
- bygroups(Keyword, Whitespace, Keyword.Constant)),
- (r'(TLS_REQ(?:CERT|SAN))(\s+)(allow|demand|hard|never|try)',
- bygroups(Keyword, Whitespace, Keyword.Constant)),
- (r'(TLS_CRLCHECK)(\s+)(none|peer|all)',
- bygroups(Keyword, Whitespace, Keyword.Constant)),
- # Literals
- (r'(BASE|BINDDN)(\s+)(\S+)$',
- bygroups(Keyword, Whitespace, Literal)),
- # Accepts hostname with or without port.
- (r'(HOST)(\s+)([a-z0-9]+)((?::(\d+))?)',
- bygroups(Keyword, Whitespace, Literal, Number.Integer)),
- (rf'({_literal_keywords})(\s+)(\S+)$',
- bygroups(Keyword, Whitespace, Literal)),
- ],
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/lean.py b/venv/lib/python3.11/site-packages/pygments/lexers/lean.py
deleted file mode 100644
index d16cd73..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/lean.py
+++ /dev/null
@@ -1,122 +0,0 @@
-"""
- pygments.lexers.lean
- ~~~~~~~~~~~~~~~~~~~~
-
- Lexers for the Lean theorem prover.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-import re
-
-from pygments.lexer import RegexLexer, default, words, include
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation, Generic, Whitespace
-
-__all__ = ['Lean3Lexer']
-
-class Lean3Lexer(RegexLexer):
- """
- For the Lean 3 theorem prover.
-
- .. versionadded:: 2.0
- """
- name = 'Lean'
- url = 'https://leanprover-community.github.io/lean3'
- aliases = ['lean', 'lean3']
- filenames = ['*.lean']
- mimetypes = ['text/x-lean', 'text/x-lean3']
-
- tokens = {
- 'expression': [
- (r'\s+', Text),
- (r'/--', String.Doc, 'docstring'),
- (r'/-', Comment, 'comment'),
- (r'--.*?$', Comment.Single),
- (words((
- 'forall', 'fun', 'Pi', 'from', 'have', 'show', 'assume', 'suffices',
- 'let', 'if', 'else', 'then', 'in', 'with', 'calc', 'match',
- 'do'
- ), prefix=r'\b', suffix=r'\b'), Keyword),
- (words(('sorry', 'admit'), prefix=r'\b', suffix=r'\b'), Generic.Error),
- (words(('Sort', 'Prop', 'Type'), prefix=r'\b', suffix=r'\b'), Keyword.Type),
- (words((
- '(', ')', ':', '{', '}', '[', ']', '⟨', '⟩', '‹', '›', '⦃', '⦄', ':=', ',',
- )), Operator),
- (r'[A-Za-z_\u03b1-\u03ba\u03bc-\u03fb\u1f00-\u1ffe\u2100-\u214f]'
- r'[.A-Za-z_\'\u03b1-\u03ba\u03bc-\u03fb\u1f00-\u1ffe\u2070-\u2079'
- r'\u207f-\u2089\u2090-\u209c\u2100-\u214f0-9]*', Name),
- (r'0x[A-Za-z0-9]+', Number.Integer),
- (r'0b[01]+', Number.Integer),
- (r'\d+', Number.Integer),
- (r'"', String.Double, 'string'),
- (r"'(?:(\\[\\\"'nt])|(\\x[0-9a-fA-F]{2})|(\\u[0-9a-fA-F]{4})|.)'", String.Char),
- (r'[~?][a-z][\w\']*:', Name.Variable),
- (r'\S', Name.Builtin.Pseudo),
- ],
- 'root': [
- (words((
- 'import', 'renaming', 'hiding',
- 'namespace',
- 'local',
- 'private', 'protected', 'section',
- 'include', 'omit', 'section',
- 'protected', 'export',
- 'open',
- 'attribute',
- ), prefix=r'\b', suffix=r'\b'), Keyword.Namespace),
- (words((
- 'lemma', 'theorem', 'def', 'definition', 'example',
- 'axiom', 'axioms', 'constant', 'constants',
- 'universe', 'universes',
- 'inductive', 'coinductive', 'structure', 'extends',
- 'class', 'instance',
- 'abbreviation',
-
- 'noncomputable theory',
-
- 'noncomputable', 'mutual', 'meta',
-
- 'attribute',
-
- 'parameter', 'parameters',
- 'variable', 'variables',
-
- 'reserve', 'precedence',
- 'postfix', 'prefix', 'notation', 'infix', 'infixl', 'infixr',
-
- 'begin', 'by', 'end',
-
- 'set_option',
- 'run_cmd',
- ), prefix=r'\b', suffix=r'\b'), Keyword.Declaration),
- (r'@\[', Keyword.Declaration, 'attribute'),
- (words((
- '#eval', '#check', '#reduce', '#exit',
- '#print', '#help',
- ), suffix=r'\b'), Keyword),
- include('expression')
- ],
- 'attribute': [
- (r'\]', Keyword.Declaration, '#pop'),
- include('expression'),
- ],
- 'comment': [
- (r'[^/-]', Comment.Multiline),
- (r'/-', Comment.Multiline, '#push'),
- (r'-/', Comment.Multiline, '#pop'),
- (r'[/-]', Comment.Multiline)
- ],
- 'docstring': [
- (r'[^/-]', String.Doc),
- (r'-/', String.Doc, '#pop'),
- (r'[/-]', String.Doc)
- ],
- 'string': [
- (r'[^\\"]+', String.Double),
- (r"(?:(\\[\\\"'nt])|(\\x[0-9a-fA-F]{2})|(\\u[0-9a-fA-F]{4}))", String.Escape),
- ('"', String.Double, '#pop'),
- ],
- }
-
-LeanLexer = Lean3Lexer
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/lilypond.py b/venv/lib/python3.11/site-packages/pygments/lexers/lilypond.py
deleted file mode 100644
index 6b4ed20..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/lilypond.py
+++ /dev/null
@@ -1,226 +0,0 @@
-"""
- pygments.lexers.lilypond
- ~~~~~~~~~~~~~~~~~~~~~~~~
-
- Lexer for LilyPond.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import bygroups, default, inherit, words
-from pygments.lexers.lisp import SchemeLexer
-from pygments.lexers._lilypond_builtins import (
- keywords, pitch_language_names, clefs, scales, repeat_types, units,
- chord_modifiers, pitches, music_functions, dynamics, articulations,
- music_commands, markup_commands, grobs, translators, contexts,
- context_properties, grob_properties, scheme_functions, paper_variables,
- header_variables
-)
-from pygments.token import Token
-
-__all__ = ["LilyPondLexer"]
-
-# In LilyPond, (unquoted) name tokens only contain letters, hyphens,
-# and underscores, where hyphens and underscores must not start or end
-# a name token.
-#
-# Note that many of the entities listed as LilyPond built-in keywords
-# (in file `_lilypond_builtins.py`) are only valid if surrounded by
-# double quotes, for example, 'hufnagel-fa1'. This means that
-# `NAME_END_RE` doesn't apply to such entities in valid LilyPond code.
-NAME_END_RE = r"(?=\d|[^\w\-]|[\-_][\W\d])"
-
-def builtin_words(names, backslash, suffix=NAME_END_RE):
- prefix = r"[\-_^]?"
- if backslash == "mandatory":
- prefix += r"\\"
- elif backslash == "optional":
- prefix += r"\\?"
- else:
- assert backslash == "disallowed"
- return words(names, prefix, suffix)
-
-
-class LilyPondLexer(SchemeLexer):
- """
- Lexer for input to LilyPond, a text-based music typesetter.
-
- .. important::
-
- This lexer is meant to be used in conjunction with the ``lilypond`` style.
-
- .. versionadded:: 2.11
- """
- name = 'LilyPond'
- url = 'https://lilypond.org'
- aliases = ['lilypond']
- filenames = ['*.ly']
- mimetypes = []
-
- flags = re.DOTALL | re.MULTILINE
-
- # Because parsing LilyPond input is very tricky (and in fact
- # impossible without executing LilyPond when there is Scheme
- # code in the file), this lexer does not try to recognize
- # lexical modes. Instead, it catches the most frequent pieces
- # of syntax, and, above all, knows about many kinds of builtins.
-
- # In order to parse embedded Scheme, this lexer subclasses the SchemeLexer.
- # It redefines the 'root' state entirely, and adds a rule for #{ #}
- # to the 'value' state. The latter is used to parse a Scheme expression
- # after #.
-
- def get_tokens_unprocessed(self, text):
- """Highlight Scheme variables as LilyPond builtins when applicable."""
- for index, token, value in super().get_tokens_unprocessed(text):
- if token is Token.Name.Function or token is Token.Name.Variable:
- if value in scheme_functions:
- token = Token.Name.Builtin.SchemeFunction
- elif token is Token.Name.Builtin:
- token = Token.Name.Builtin.SchemeBuiltin
- yield index, token, value
-
- tokens = {
- "root": [
- # Whitespace.
- (r"\s+", Token.Text.Whitespace),
-
- # Multi-line comments. These are non-nestable.
- (r"%\{.*?%\}", Token.Comment.Multiline),
-
- # Simple comments.
- (r"%.*?$", Token.Comment.Single),
-
- # End of embedded LilyPond in Scheme.
- (r"#\}", Token.Punctuation, "#pop"),
-
- # Embedded Scheme, starting with # ("delayed"),
- # or $ (immediate). #@ and and $@ are the lesser known
- # "list splicing operators".
- (r"[#$]@?", Token.Punctuation, "value"),
-
- # Any kind of punctuation:
- # - sequential music: { },
- # - parallel music: << >>,
- # - voice separator: << \\ >>,
- # - chord: < >,
- # - bar check: |,
- # - dot in nested properties: \revert NoteHead.color,
- # - equals sign in assignments and lists for various commands:
- # \override Stem.color = red,
- # - comma as alternative syntax for lists: \time 3,3,2 4/4,
- # - colon in tremolos: c:32,
- # - double hyphen and underscore in lyrics: li -- ly -- pond __
- # (which must be preceded by ASCII whitespace)
- (r"""(?x)
- \\\\
- | (?<= \s ) (?: -- | __ )
- | [{}<>=.,:|]
- """, Token.Punctuation),
-
- # Pitches, with optional octavation marks, octave check,
- # and forced or cautionary accidental.
- (words(pitches, suffix=r"=?[',]*!?\??" + NAME_END_RE), Token.Pitch),
-
- # Strings, optionally with direction specifier.
- (r'[\-_^]?"', Token.String, "string"),
-
- # Numbers.
- (r"-?\d+\.\d+", Token.Number.Float), # 5. and .5 are not allowed
- (r"-?\d+/\d+", Token.Number.Fraction),
- # Integers, or durations with optional augmentation dots.
- # We have no way to distinguish these, so we highlight
- # them all as numbers.
- #
- # Normally, there is a space before the integer (being an
- # argument to a music function), which we check here. The
- # case without a space is handled below (as a fingering
- # number).
- (r"""(?x)
- (?<= \s ) -\d+
- | (?: (?: \d+ | \\breve | \\longa | \\maxima )
- \.* )
- """, Token.Number),
- # Separates duration and duration multiplier highlighted as fraction.
- (r"\*", Token.Number),
-
- # Ties, slurs, manual beams.
- (r"[~()[\]]", Token.Name.Builtin.Articulation),
-
- # Predefined articulation shortcuts. A direction specifier is
- # required here.
- (r"[\-_^][>^_!.\-+]", Token.Name.Builtin.Articulation),
-
- # Fingering numbers, string numbers.
- (r"[\-_^]?\\?\d+", Token.Name.Builtin.Articulation),
-
- # Builtins.
- (builtin_words(keywords, "mandatory"), Token.Keyword),
- (builtin_words(pitch_language_names, "disallowed"), Token.Name.PitchLanguage),
- (builtin_words(clefs, "disallowed"), Token.Name.Builtin.Clef),
- (builtin_words(scales, "mandatory"), Token.Name.Builtin.Scale),
- (builtin_words(repeat_types, "disallowed"), Token.Name.Builtin.RepeatType),
- (builtin_words(units, "mandatory"), Token.Number),
- (builtin_words(chord_modifiers, "disallowed"), Token.ChordModifier),
- (builtin_words(music_functions, "mandatory"), Token.Name.Builtin.MusicFunction),
- (builtin_words(dynamics, "mandatory"), Token.Name.Builtin.Dynamic),
- # Those like slurs that don't take a backslash are covered above.
- (builtin_words(articulations, "mandatory"), Token.Name.Builtin.Articulation),
- (builtin_words(music_commands, "mandatory"), Token.Name.Builtin.MusicCommand),
- (builtin_words(markup_commands, "mandatory"), Token.Name.Builtin.MarkupCommand),
- (builtin_words(grobs, "disallowed"), Token.Name.Builtin.Grob),
- (builtin_words(translators, "disallowed"), Token.Name.Builtin.Translator),
- # Optional backslash because of \layout { \context { \Score ... } }.
- (builtin_words(contexts, "optional"), Token.Name.Builtin.Context),
- (builtin_words(context_properties, "disallowed"), Token.Name.Builtin.ContextProperty),
- (builtin_words(grob_properties, "disallowed"),
- Token.Name.Builtin.GrobProperty,
- "maybe-subproperties"),
- # Optional backslashes here because output definitions are wrappers
- # around modules. Concretely, you can do, e.g.,
- # \paper { oddHeaderMarkup = \evenHeaderMarkup }
- (builtin_words(paper_variables, "optional"), Token.Name.Builtin.PaperVariable),
- (builtin_words(header_variables, "optional"), Token.Name.Builtin.HeaderVariable),
-
- # Other backslashed-escaped names (like dereferencing a
- # music variable), possibly with a direction specifier.
- (r"[\-_^]?\\.+?" + NAME_END_RE, Token.Name.BackslashReference),
-
- # Definition of a variable. Support assignments to alist keys
- # (myAlist.my-key.my-nested-key = \markup \spam \eggs).
- (r"""(?x)
- (?: [^\W\d] | - )+
- (?= (?: [^\W\d] | [\-.] )* \s* = )
- """, Token.Name.Lvalue),
-
- # Virtually everything can appear in markup mode, so we highlight
- # as text. Try to get a complete word, or we might wrongly lex
- # a suffix that happens to be a builtin as a builtin (e.g., "myStaff").
- (r"([^\W\d]|-)+?" + NAME_END_RE, Token.Text),
- (r".", Token.Text),
- ],
- "string": [
- (r'"', Token.String, "#pop"),
- (r'\\.', Token.String.Escape),
- (r'[^\\"]+', Token.String),
- ],
- "value": [
- # Scan a LilyPond value, then pop back since we had a
- # complete expression.
- (r"#\{", Token.Punctuation, ("#pop", "root")),
- inherit,
- ],
- # Grob subproperties are undeclared and it would be tedious
- # to maintain them by hand. Instead, this state allows recognizing
- # everything that looks like a-known-property.foo.bar-baz as
- # one single property name.
- "maybe-subproperties": [
- (r"\s+", Token.Text.Whitespace),
- (r"(\.)((?:[^\W\d]|-)+?)" + NAME_END_RE,
- bygroups(Token.Punctuation, Token.Name.Builtin.GrobProperty)),
- default("#pop"),
- ]
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/lisp.py b/venv/lib/python3.11/site-packages/pygments/lexers/lisp.py
deleted file mode 100644
index 966b606..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/lisp.py
+++ /dev/null
@@ -1,2848 +0,0 @@
-"""
- pygments.lexers.lisp
- ~~~~~~~~~~~~~~~~~~~~
-
- Lexers for Lispy languages.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, include, bygroups, words, default
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation, Literal, Error, Whitespace
-
-from pygments.lexers.python import PythonLexer
-
-from pygments.lexers._scheme_builtins import scheme_keywords, scheme_builtins
-
-__all__ = ['SchemeLexer', 'CommonLispLexer', 'HyLexer', 'RacketLexer',
- 'NewLispLexer', 'EmacsLispLexer', 'ShenLexer', 'CPSALexer',
- 'XtlangLexer', 'FennelLexer']
-
-
-class SchemeLexer(RegexLexer):
- """
- A Scheme lexer.
-
- This parser is checked with pastes from the LISP pastebin
- at http://paste.lisp.org/ to cover as much syntax as possible.
-
- It supports the full Scheme syntax as defined in R5RS.
-
- .. versionadded:: 0.6
- """
- name = 'Scheme'
- url = 'http://www.scheme-reports.org/'
- aliases = ['scheme', 'scm']
- filenames = ['*.scm', '*.ss']
- mimetypes = ['text/x-scheme', 'application/x-scheme']
-
- flags = re.DOTALL | re.MULTILINE
-
- # valid names for identifiers
- # well, names can only not consist fully of numbers
- # but this should be good enough for now
- valid_name = r'[\w!$%&*+,/:<=>?@^~|-]+'
-
- # Use within verbose regexes
- token_end = r'''
- (?=
- \s # whitespace
- | ; # comment
- | \#[;|!] # fancy comments
- | [)\]] # end delimiters
- | $ # end of file
- )
- '''
-
- # Recognizing builtins.
- def get_tokens_unprocessed(self, text):
- for index, token, value in super().get_tokens_unprocessed(text):
- if token is Name.Function or token is Name.Variable:
- if value in scheme_keywords:
- yield index, Keyword, value
- elif value in scheme_builtins:
- yield index, Name.Builtin, value
- else:
- yield index, token, value
- else:
- yield index, token, value
-
- # Scheme has funky syntactic rules for numbers. These are all
- # valid number literals: 5.0e55|14, 14/13, -1+5j, +1@5, #b110,
- # #o#Iinf.0-nan.0i. This is adapted from the formal grammar given
- # in http://www.r6rs.org/final/r6rs.pdf, section 4.2.1. Take a
- # deep breath ...
-
- # It would be simpler if we could just not bother about invalid
- # numbers like #b35. But we cannot parse 'abcdef' without #x as a
- # number.
-
- number_rules = {}
- for base in (2, 8, 10, 16):
- if base == 2:
- digit = r'[01]'
- radix = r'( \#[bB] )'
- elif base == 8:
- digit = r'[0-7]'
- radix = r'( \#[oO] )'
- elif base == 10:
- digit = r'[0-9]'
- radix = r'( (\#[dD])? )'
- elif base == 16:
- digit = r'[0-9a-fA-F]'
- radix = r'( \#[xX] )'
-
- # Radix, optional exactness indicator.
- prefix = rf'''
- (
- {radix} (\#[iIeE])?
- | \#[iIeE] {radix}
- )
- '''
-
- # Simple unsigned number or fraction.
- ureal = rf'''
- (
- {digit}+
- ( / {digit}+ )?
- )
- '''
-
- # Add decimal numbers.
- if base == 10:
- decimal = r'''
- (
- # Decimal part
- (
- [0-9]+ ([.][0-9]*)?
- | [.][0-9]+
- )
-
- # Optional exponent
- (
- [eEsSfFdDlL] [+-]? [0-9]+
- )?
-
- # Optional mantissa width
- (
- \|[0-9]+
- )?
- )
- '''
- ureal = rf'''
- (
- {decimal} (?!/)
- | {ureal}
- )
- '''
-
- naninf = r'(nan.0|inf.0)'
-
- real = rf'''
- (
- [+-] {naninf} # Sign mandatory
- | [+-]? {ureal} # Sign optional
- )
- '''
-
- complex_ = rf'''
- (
- {real}? [+-] ({naninf}|{ureal})? i
- | {real} (@ {real})?
-
- )
- '''
-
- num = rf'''(?x)
- (
- {prefix}
- {complex_}
- )
- # Need to ensure we have a full token. 1+ is not a
- # number followed by something else, but a function
- # name.
- {token_end}
- '''
-
- number_rules[base] = num
-
- # If you have a headache now, say thanks to RnRS editors.
-
- # Doing it this way is simpler than splitting the number(10)
- # regex in a floating-point and a no-floating-point version.
- def decimal_cb(self, match):
- if '.' in match.group():
- token_type = Number.Float # includes [+-](inf|nan).0
- else:
- token_type = Number.Integer
- yield match.start(), token_type, match.group()
-
- # --
-
- # The 'scheme-root' state parses as many expressions as needed, always
- # delegating to the 'scheme-value' state. The latter parses one complete
- # expression and immediately pops back. This is needed for the LilyPondLexer.
- # When LilyPond encounters a #, it starts parsing embedded Scheme code, and
- # returns to normal syntax after one expression. We implement this
- # by letting the LilyPondLexer subclass the SchemeLexer. When it finds
- # the #, the LilyPondLexer goes to the 'value' state, which then pops back
- # to LilyPondLexer. The 'root' state of the SchemeLexer merely delegates the
- # work to 'scheme-root'; this is so that LilyPondLexer can inherit
- # 'scheme-root' and redefine 'root'.
-
- tokens = {
- 'root': [
- default('scheme-root'),
- ],
- 'scheme-root': [
- default('value'),
- ],
- 'value': [
- # the comments
- # and going to the end of the line
- (r';.*?$', Comment.Single),
- # multi-line comment
- (r'#\|', Comment.Multiline, 'multiline-comment'),
- # commented form (entire sexpr following)
- (r'#;[([]', Comment, 'commented-form'),
- # commented datum
- (r'#;', Comment, 'commented-datum'),
- # signifies that the program text that follows is written with the
- # lexical and datum syntax described in r6rs
- (r'#!r6rs', Comment),
-
- # whitespaces - usually not relevant
- (r'\s+', Whitespace),
-
- # numbers
- (number_rules[2], Number.Bin, '#pop'),
- (number_rules[8], Number.Oct, '#pop'),
- (number_rules[10], decimal_cb, '#pop'),
- (number_rules[16], Number.Hex, '#pop'),
-
- # strings, symbols, keywords and characters
- (r'"', String, 'string'),
- (r"'" + valid_name, String.Symbol, "#pop"),
- (r'#:' + valid_name, Keyword.Declaration, '#pop'),
- (r"#\\([()/'\"._!§$%& ?=+-]|[a-zA-Z0-9]+)", String.Char, "#pop"),
-
- # constants
- (r'(#t|#f)', Name.Constant, '#pop'),
-
- # special operators
- (r"('|#|`|,@|,|\.)", Operator),
-
- # first variable in a quoted string like
- # '(this is syntactic sugar)
- (r"(?<='\()" + valid_name, Name.Variable, '#pop'),
- (r"(?<=#\()" + valid_name, Name.Variable, '#pop'),
-
- # Functions -- note that this also catches variables
- # defined in let/let*, but there is little that can
- # be done about it.
- (r'(?<=\()' + valid_name, Name.Function, '#pop'),
-
- # find the remaining variables
- (valid_name, Name.Variable, '#pop'),
-
- # the famous parentheses!
-
- # Push scheme-root to enter a state that will parse as many things
- # as needed in the parentheses.
- (r'[([]', Punctuation, 'scheme-root'),
- # Pop one 'value', one 'scheme-root', and yet another 'value', so
- # we get back to a state parsing expressions as needed in the
- # enclosing context.
- (r'[)\]]', Punctuation, '#pop:3'),
- ],
- 'multiline-comment': [
- (r'#\|', Comment.Multiline, '#push'),
- (r'\|#', Comment.Multiline, '#pop'),
- (r'[^|#]+', Comment.Multiline),
- (r'[|#]', Comment.Multiline),
- ],
- 'commented-form': [
- (r'[([]', Comment, '#push'),
- (r'[)\]]', Comment, '#pop'),
- (r'[^()[\]]+', Comment),
- ],
- 'commented-datum': [
- (rf'(?x).*?{token_end}', Comment, '#pop'),
- ],
- 'string': [
- # Pops back from 'string', and pops 'value' as well.
- ('"', String, '#pop:2'),
- # Hex escape sequences, R6RS-style.
- (r'\\x[0-9a-fA-F]+;', String.Escape),
- # We try R6RS style first, but fall back to Guile-style.
- (r'\\x[0-9a-fA-F]{2}', String.Escape),
- # Other special escape sequences implemented by Guile.
- (r'\\u[0-9a-fA-F]{4}', String.Escape),
- (r'\\U[0-9a-fA-F]{6}', String.Escape),
- # Escape sequences are not overly standardized. Recognizing
- # a single character after the backslash should be good enough.
- # NB: we have DOTALL.
- (r'\\.', String.Escape),
- # The rest
- (r'[^\\"]+', String),
- ]
- }
-
-
-class CommonLispLexer(RegexLexer):
- """
- A Common Lisp lexer.
-
- .. versionadded:: 0.9
- """
- name = 'Common Lisp'
- url = 'https://lisp-lang.org/'
- aliases = ['common-lisp', 'cl', 'lisp']
- filenames = ['*.cl', '*.lisp']
- mimetypes = ['text/x-common-lisp']
-
- flags = re.IGNORECASE | re.MULTILINE
-
- # couple of useful regexes
-
- # characters that are not macro-characters and can be used to begin a symbol
- nonmacro = r'\\.|[\w!$%&*+-/<=>?@\[\]^{}~]'
- constituent = nonmacro + '|[#.:]'
- terminated = r'(?=[ "()\'\n,;`])' # whitespace or terminating macro characters
-
- # symbol token, reverse-engineered from hyperspec
- # Take a deep breath...
- symbol = r'(\|[^|]+\||(?:%s)(?:%s)*)' % (nonmacro, constituent)
-
- def __init__(self, **options):
- from pygments.lexers._cl_builtins import BUILTIN_FUNCTIONS, \
- SPECIAL_FORMS, MACROS, LAMBDA_LIST_KEYWORDS, DECLARATIONS, \
- BUILTIN_TYPES, BUILTIN_CLASSES
- self.builtin_function = BUILTIN_FUNCTIONS
- self.special_forms = SPECIAL_FORMS
- self.macros = MACROS
- self.lambda_list_keywords = LAMBDA_LIST_KEYWORDS
- self.declarations = DECLARATIONS
- self.builtin_types = BUILTIN_TYPES
- self.builtin_classes = BUILTIN_CLASSES
- RegexLexer.__init__(self, **options)
-
- def get_tokens_unprocessed(self, text):
- stack = ['root']
- for index, token, value in RegexLexer.get_tokens_unprocessed(self, text, stack):
- if token is Name.Variable:
- if value in self.builtin_function:
- yield index, Name.Builtin, value
- continue
- if value in self.special_forms:
- yield index, Keyword, value
- continue
- if value in self.macros:
- yield index, Name.Builtin, value
- continue
- if value in self.lambda_list_keywords:
- yield index, Keyword, value
- continue
- if value in self.declarations:
- yield index, Keyword, value
- continue
- if value in self.builtin_types:
- yield index, Keyword.Type, value
- continue
- if value in self.builtin_classes:
- yield index, Name.Class, value
- continue
- yield index, token, value
-
- tokens = {
- 'root': [
- default('body'),
- ],
- 'multiline-comment': [
- (r'#\|', Comment.Multiline, '#push'), # (cf. Hyperspec 2.4.8.19)
- (r'\|#', Comment.Multiline, '#pop'),
- (r'[^|#]+', Comment.Multiline),
- (r'[|#]', Comment.Multiline),
- ],
- 'commented-form': [
- (r'\(', Comment.Preproc, '#push'),
- (r'\)', Comment.Preproc, '#pop'),
- (r'[^()]+', Comment.Preproc),
- ],
- 'body': [
- # whitespace
- (r'\s+', Whitespace),
-
- # single-line comment
- (r';.*$', Comment.Single),
-
- # multi-line comment
- (r'#\|', Comment.Multiline, 'multiline-comment'),
-
- # encoding comment (?)
- (r'#\d*Y.*$', Comment.Special),
-
- # strings and characters
- (r'"(\\.|\\\n|[^"\\])*"', String),
- # quoting
- (r":" + symbol, String.Symbol),
- (r"::" + symbol, String.Symbol),
- (r":#" + symbol, String.Symbol),
- (r"'" + symbol, String.Symbol),
- (r"'", Operator),
- (r"`", Operator),
-
- # decimal numbers
- (r'[-+]?\d+\.?' + terminated, Number.Integer),
- (r'[-+]?\d+/\d+' + terminated, Number),
- (r'[-+]?(\d*\.\d+([defls][-+]?\d+)?|\d+(\.\d*)?[defls][-+]?\d+)' +
- terminated, Number.Float),
-
- # sharpsign strings and characters
- (r"#\\." + terminated, String.Char),
- (r"#\\" + symbol, String.Char),
-
- # vector
- (r'#\(', Operator, 'body'),
-
- # bitstring
- (r'#\d*\*[01]*', Literal.Other),
-
- # uninterned symbol
- (r'#:' + symbol, String.Symbol),
-
- # read-time and load-time evaluation
- (r'#[.,]', Operator),
-
- # function shorthand
- (r'#\'', Name.Function),
-
- # binary rational
- (r'#b[+-]?[01]+(/[01]+)?', Number.Bin),
-
- # octal rational
- (r'#o[+-]?[0-7]+(/[0-7]+)?', Number.Oct),
-
- # hex rational
- (r'#x[+-]?[0-9a-f]+(/[0-9a-f]+)?', Number.Hex),
-
- # radix rational
- (r'#\d+r[+-]?[0-9a-z]+(/[0-9a-z]+)?', Number),
-
- # complex
- (r'(#c)(\()', bygroups(Number, Punctuation), 'body'),
-
- # array
- (r'(#\d+a)(\()', bygroups(Literal.Other, Punctuation), 'body'),
-
- # structure
- (r'(#s)(\()', bygroups(Literal.Other, Punctuation), 'body'),
-
- # path
- (r'#p?"(\\.|[^"])*"', Literal.Other),
-
- # reference
- (r'#\d+=', Operator),
- (r'#\d+#', Operator),
-
- # read-time comment
- (r'#+nil' + terminated + r'\s*\(', Comment.Preproc, 'commented-form'),
-
- # read-time conditional
- (r'#[+-]', Operator),
-
- # special operators that should have been parsed already
- (r'(,@|,|\.)', Operator),
-
- # special constants
- (r'(t|nil)' + terminated, Name.Constant),
-
- # functions and variables
- (r'\*' + symbol + r'\*', Name.Variable.Global),
- (symbol, Name.Variable),
-
- # parentheses
- (r'\(', Punctuation, 'body'),
- (r'\)', Punctuation, '#pop'),
- ],
- }
-
- def analyse_text(text):
- """Competes with Visual Prolog on *.cl"""
- # This is a *really* good indicator (and not conflicting with Visual Prolog)
- # '(defun ' first on a line
- # section keyword alone on line e.g. 'clauses'
- if re.search(r'^\s*\(defun\s', text):
- return 0.8
- else:
- return 0
-
-
-class HyLexer(RegexLexer):
- """
- Lexer for Hy source code.
-
- .. versionadded:: 2.0
- """
- name = 'Hy'
- url = 'http://hylang.org/'
- aliases = ['hylang']
- filenames = ['*.hy']
- mimetypes = ['text/x-hy', 'application/x-hy']
-
- special_forms = (
- 'cond', 'for', '->', '->>', 'car',
- 'cdr', 'first', 'rest', 'let', 'when', 'unless',
- 'import', 'do', 'progn', 'get', 'slice', 'assoc', 'with-decorator',
- ',', 'list_comp', 'kwapply', '~', 'is', 'in', 'is-not', 'not-in',
- 'quasiquote', 'unquote', 'unquote-splice', 'quote', '|', '<<=', '>>=',
- 'foreach', 'while',
- 'eval-and-compile', 'eval-when-compile'
- )
-
- declarations = (
- 'def', 'defn', 'defun', 'defmacro', 'defclass', 'lambda', 'fn', 'setv'
- )
-
- hy_builtins = ()
-
- hy_core = (
- 'cycle', 'dec', 'distinct', 'drop', 'even?', 'filter', 'inc',
- 'instance?', 'iterable?', 'iterate', 'iterator?', 'neg?',
- 'none?', 'nth', 'numeric?', 'odd?', 'pos?', 'remove', 'repeat',
- 'repeatedly', 'take', 'take_nth', 'take_while', 'zero?'
- )
-
- builtins = hy_builtins + hy_core
-
- # valid names for identifiers
- # well, names can only not consist fully of numbers
- # but this should be good enough for now
- valid_name = r'(?!#)[\w!$%*+<=>?/.#:-]+'
-
- def _multi_escape(entries):
- return words(entries, suffix=' ')
-
- tokens = {
- 'root': [
- # the comments - always starting with semicolon
- # and going to the end of the line
- (r';.*$', Comment.Single),
-
- # whitespaces - usually not relevant
- (r',+', Text),
- (r'\s+', Whitespace),
-
- # numbers
- (r'-?\d+\.\d+', Number.Float),
- (r'-?\d+', Number.Integer),
- (r'0[0-7]+j?', Number.Oct),
- (r'0[xX][a-fA-F0-9]+', Number.Hex),
-
- # strings, symbols and characters
- (r'"(\\\\|\\[^\\]|[^"\\])*"', String),
- (r"'" + valid_name, String.Symbol),
- (r"\\(.|[a-z]+)", String.Char),
- (r'^(\s*)([rRuU]{,2}"""(?:.|\n)*?""")', bygroups(Text, String.Doc)),
- (r"^(\s*)([rRuU]{,2}'''(?:.|\n)*?''')", bygroups(Text, String.Doc)),
-
- # keywords
- (r'::?' + valid_name, String.Symbol),
-
- # special operators
- (r'~@|[`\'#^~&@]', Operator),
-
- include('py-keywords'),
- include('py-builtins'),
-
- # highlight the special forms
- (_multi_escape(special_forms), Keyword),
-
- # Technically, only the special forms are 'keywords'. The problem
- # is that only treating them as keywords means that things like
- # 'defn' and 'ns' need to be highlighted as builtins. This is ugly
- # and weird for most styles. So, as a compromise we're going to
- # highlight them as Keyword.Declarations.
- (_multi_escape(declarations), Keyword.Declaration),
-
- # highlight the builtins
- (_multi_escape(builtins), Name.Builtin),
-
- # the remaining functions
- (r'(?<=\()' + valid_name, Name.Function),
-
- # find the remaining variables
- (valid_name, Name.Variable),
-
- # Hy accepts vector notation
- (r'(\[|\])', Punctuation),
-
- # Hy accepts map notation
- (r'(\{|\})', Punctuation),
-
- # the famous parentheses!
- (r'(\(|\))', Punctuation),
-
- ],
- 'py-keywords': PythonLexer.tokens['keywords'],
- 'py-builtins': PythonLexer.tokens['builtins'],
- }
-
- def analyse_text(text):
- if '(import ' in text or '(defn ' in text:
- return 0.9
-
-
-class RacketLexer(RegexLexer):
- """
- Lexer for Racket source code (formerly
- known as PLT Scheme).
-
- .. versionadded:: 1.6
- """
-
- name = 'Racket'
- url = 'http://racket-lang.org/'
- aliases = ['racket', 'rkt']
- filenames = ['*.rkt', '*.rktd', '*.rktl']
- mimetypes = ['text/x-racket', 'application/x-racket']
-
- # Generated by example.rkt
- _keywords = (
- '#%app', '#%datum', '#%declare', '#%expression', '#%module-begin',
- '#%plain-app', '#%plain-lambda', '#%plain-module-begin',
- '#%printing-module-begin', '#%provide', '#%require',
- '#%stratified-body', '#%top', '#%top-interaction',
- '#%variable-reference', '->', '->*', '->*m', '->d', '->dm', '->i',
- '->m', '...', ':do-in', '==', '=>', '_', 'absent', 'abstract',
- 'all-defined-out', 'all-from-out', 'and', 'any', 'augment', 'augment*',
- 'augment-final', 'augment-final*', 'augride', 'augride*', 'begin',
- 'begin-for-syntax', 'begin0', 'case', 'case->', 'case->m',
- 'case-lambda', 'class', 'class*', 'class-field-accessor',
- 'class-field-mutator', 'class/c', 'class/derived', 'combine-in',
- 'combine-out', 'command-line', 'compound-unit', 'compound-unit/infer',
- 'cond', 'cons/dc', 'contract', 'contract-out', 'contract-struct',
- 'contracted', 'define', 'define-compound-unit',
- 'define-compound-unit/infer', 'define-contract-struct',
- 'define-custom-hash-types', 'define-custom-set-types',
- 'define-for-syntax', 'define-local-member-name', 'define-logger',
- 'define-match-expander', 'define-member-name',
- 'define-module-boundary-contract', 'define-namespace-anchor',
- 'define-opt/c', 'define-sequence-syntax', 'define-serializable-class',
- 'define-serializable-class*', 'define-signature',
- 'define-signature-form', 'define-struct', 'define-struct/contract',
- 'define-struct/derived', 'define-syntax', 'define-syntax-rule',
- 'define-syntaxes', 'define-unit', 'define-unit-binding',
- 'define-unit-from-context', 'define-unit/contract',
- 'define-unit/new-import-export', 'define-unit/s', 'define-values',
- 'define-values-for-export', 'define-values-for-syntax',
- 'define-values/invoke-unit', 'define-values/invoke-unit/infer',
- 'define/augment', 'define/augment-final', 'define/augride',
- 'define/contract', 'define/final-prop', 'define/match',
- 'define/overment', 'define/override', 'define/override-final',
- 'define/private', 'define/public', 'define/public-final',
- 'define/pubment', 'define/subexpression-pos-prop',
- 'define/subexpression-pos-prop/name', 'delay', 'delay/idle',
- 'delay/name', 'delay/strict', 'delay/sync', 'delay/thread', 'do',
- 'else', 'except', 'except-in', 'except-out', 'export', 'extends',
- 'failure-cont', 'false', 'false/c', 'field', 'field-bound?', 'file',
- 'flat-murec-contract', 'flat-rec-contract', 'for', 'for*', 'for*/and',
- 'for*/async', 'for*/first', 'for*/fold', 'for*/fold/derived',
- 'for*/hash', 'for*/hasheq', 'for*/hasheqv', 'for*/last', 'for*/list',
- 'for*/lists', 'for*/mutable-set', 'for*/mutable-seteq',
- 'for*/mutable-seteqv', 'for*/or', 'for*/product', 'for*/set',
- 'for*/seteq', 'for*/seteqv', 'for*/stream', 'for*/sum', 'for*/vector',
- 'for*/weak-set', 'for*/weak-seteq', 'for*/weak-seteqv', 'for-label',
- 'for-meta', 'for-syntax', 'for-template', 'for/and', 'for/async',
- 'for/first', 'for/fold', 'for/fold/derived', 'for/hash', 'for/hasheq',
- 'for/hasheqv', 'for/last', 'for/list', 'for/lists', 'for/mutable-set',
- 'for/mutable-seteq', 'for/mutable-seteqv', 'for/or', 'for/product',
- 'for/set', 'for/seteq', 'for/seteqv', 'for/stream', 'for/sum',
- 'for/vector', 'for/weak-set', 'for/weak-seteq', 'for/weak-seteqv',
- 'gen:custom-write', 'gen:dict', 'gen:equal+hash', 'gen:set',
- 'gen:stream', 'generic', 'get-field', 'hash/dc', 'if', 'implies',
- 'import', 'include', 'include-at/relative-to',
- 'include-at/relative-to/reader', 'include/reader', 'inherit',
- 'inherit-field', 'inherit/inner', 'inherit/super', 'init',
- 'init-depend', 'init-field', 'init-rest', 'inner', 'inspect',
- 'instantiate', 'interface', 'interface*', 'invariant-assertion',
- 'invoke-unit', 'invoke-unit/infer', 'lambda', 'lazy', 'let', 'let*',
- 'let*-values', 'let-syntax', 'let-syntaxes', 'let-values', 'let/cc',
- 'let/ec', 'letrec', 'letrec-syntax', 'letrec-syntaxes',
- 'letrec-syntaxes+values', 'letrec-values', 'lib', 'link', 'local',
- 'local-require', 'log-debug', 'log-error', 'log-fatal', 'log-info',
- 'log-warning', 'match', 'match*', 'match*/derived', 'match-define',
- 'match-define-values', 'match-lambda', 'match-lambda*',
- 'match-lambda**', 'match-let', 'match-let*', 'match-let*-values',
- 'match-let-values', 'match-letrec', 'match-letrec-values',
- 'match/derived', 'match/values', 'member-name-key', 'mixin', 'module',
- 'module*', 'module+', 'nand', 'new', 'nor', 'object-contract',
- 'object/c', 'only', 'only-in', 'only-meta-in', 'open', 'opt/c', 'or',
- 'overment', 'overment*', 'override', 'override*', 'override-final',
- 'override-final*', 'parameterize', 'parameterize*',
- 'parameterize-break', 'parametric->/c', 'place', 'place*',
- 'place/context', 'planet', 'prefix', 'prefix-in', 'prefix-out',
- 'private', 'private*', 'prompt-tag/c', 'protect-out', 'provide',
- 'provide-signature-elements', 'provide/contract', 'public', 'public*',
- 'public-final', 'public-final*', 'pubment', 'pubment*', 'quasiquote',
- 'quasisyntax', 'quasisyntax/loc', 'quote', 'quote-syntax',
- 'quote-syntax/prune', 'recontract-out', 'recursive-contract',
- 'relative-in', 'rename', 'rename-in', 'rename-inner', 'rename-out',
- 'rename-super', 'require', 'send', 'send*', 'send+', 'send-generic',
- 'send/apply', 'send/keyword-apply', 'set!', 'set!-values',
- 'set-field!', 'shared', 'stream', 'stream*', 'stream-cons', 'struct',
- 'struct*', 'struct-copy', 'struct-field-index', 'struct-out',
- 'struct/c', 'struct/ctc', 'struct/dc', 'submod', 'super',
- 'super-instantiate', 'super-make-object', 'super-new', 'syntax',
- 'syntax-case', 'syntax-case*', 'syntax-id-rules', 'syntax-rules',
- 'syntax/loc', 'tag', 'this', 'this%', 'thunk', 'thunk*', 'time',
- 'unconstrained-domain->', 'unit', 'unit-from-context', 'unit/c',
- 'unit/new-import-export', 'unit/s', 'unless', 'unquote',
- 'unquote-splicing', 'unsyntax', 'unsyntax-splicing', 'values/drop',
- 'when', 'with-continuation-mark', 'with-contract',
- 'with-contract-continuation-mark', 'with-handlers', 'with-handlers*',
- 'with-method', 'with-syntax', 'λ'
- )
-
- # Generated by example.rkt
- _builtins = (
- '*', '*list/c', '+', '-', '/', '<', '</c', '<=', '<=/c', '=', '=/c',
- '>', '>/c', '>=', '>=/c', 'abort-current-continuation', 'abs',
- 'absolute-path?', 'acos', 'add-between', 'add1', 'alarm-evt',
- 'always-evt', 'and/c', 'andmap', 'angle', 'any/c', 'append', 'append*',
- 'append-map', 'apply', 'argmax', 'argmin', 'arithmetic-shift',
- 'arity-at-least', 'arity-at-least-value', 'arity-at-least?',
- 'arity-checking-wrapper', 'arity-includes?', 'arity=?',
- 'arrow-contract-info', 'arrow-contract-info-accepts-arglist',
- 'arrow-contract-info-chaperone-procedure',
- 'arrow-contract-info-check-first-order', 'arrow-contract-info?',
- 'asin', 'assf', 'assoc', 'assq', 'assv', 'atan',
- 'bad-number-of-results', 'banner', 'base->-doms/c', 'base->-rngs/c',
- 'base->?', 'between/c', 'bitwise-and', 'bitwise-bit-field',
- 'bitwise-bit-set?', 'bitwise-ior', 'bitwise-not', 'bitwise-xor',
- 'blame-add-car-context', 'blame-add-cdr-context', 'blame-add-context',
- 'blame-add-missing-party', 'blame-add-nth-arg-context',
- 'blame-add-range-context', 'blame-add-unknown-context',
- 'blame-context', 'blame-contract', 'blame-fmt->-string',
- 'blame-missing-party?', 'blame-negative', 'blame-original?',
- 'blame-positive', 'blame-replace-negative', 'blame-source',
- 'blame-swap', 'blame-swapped?', 'blame-update', 'blame-value',
- 'blame?', 'boolean=?', 'boolean?', 'bound-identifier=?', 'box',
- 'box-cas!', 'box-immutable', 'box-immutable/c', 'box/c', 'box?',
- 'break-enabled', 'break-parameterization?', 'break-thread',
- 'build-chaperone-contract-property', 'build-compound-type-name',
- 'build-contract-property', 'build-flat-contract-property',
- 'build-list', 'build-path', 'build-path/convention-type',
- 'build-string', 'build-vector', 'byte-pregexp', 'byte-pregexp?',
- 'byte-ready?', 'byte-regexp', 'byte-regexp?', 'byte?', 'bytes',
- 'bytes->immutable-bytes', 'bytes->list', 'bytes->path',
- 'bytes->path-element', 'bytes->string/latin-1', 'bytes->string/locale',
- 'bytes->string/utf-8', 'bytes-append', 'bytes-append*',
- 'bytes-close-converter', 'bytes-convert', 'bytes-convert-end',
- 'bytes-converter?', 'bytes-copy', 'bytes-copy!',
- 'bytes-environment-variable-name?', 'bytes-fill!', 'bytes-join',
- 'bytes-length', 'bytes-no-nuls?', 'bytes-open-converter', 'bytes-ref',
- 'bytes-set!', 'bytes-utf-8-index', 'bytes-utf-8-length',
- 'bytes-utf-8-ref', 'bytes<?', 'bytes=?', 'bytes>?', 'bytes?', 'caaaar',
- 'caaadr', 'caaar', 'caadar', 'caaddr', 'caadr', 'caar', 'cadaar',
- 'cadadr', 'cadar', 'caddar', 'cadddr', 'caddr', 'cadr',
- 'call-in-nested-thread', 'call-with-atomic-output-file',
- 'call-with-break-parameterization',
- 'call-with-composable-continuation', 'call-with-continuation-barrier',
- 'call-with-continuation-prompt', 'call-with-current-continuation',
- 'call-with-default-reading-parameterization',
- 'call-with-escape-continuation', 'call-with-exception-handler',
- 'call-with-file-lock/timeout', 'call-with-immediate-continuation-mark',
- 'call-with-input-bytes', 'call-with-input-file',
- 'call-with-input-file*', 'call-with-input-string',
- 'call-with-output-bytes', 'call-with-output-file',
- 'call-with-output-file*', 'call-with-output-string',
- 'call-with-parameterization', 'call-with-semaphore',
- 'call-with-semaphore/enable-break', 'call-with-values', 'call/cc',
- 'call/ec', 'car', 'cartesian-product', 'cdaaar', 'cdaadr', 'cdaar',
- 'cdadar', 'cdaddr', 'cdadr', 'cdar', 'cddaar', 'cddadr', 'cddar',
- 'cdddar', 'cddddr', 'cdddr', 'cddr', 'cdr', 'ceiling', 'channel-get',
- 'channel-put', 'channel-put-evt', 'channel-put-evt?',
- 'channel-try-get', 'channel/c', 'channel?', 'chaperone-box',
- 'chaperone-channel', 'chaperone-continuation-mark-key',
- 'chaperone-contract-property?', 'chaperone-contract?', 'chaperone-evt',
- 'chaperone-hash', 'chaperone-hash-set', 'chaperone-of?',
- 'chaperone-procedure', 'chaperone-procedure*', 'chaperone-prompt-tag',
- 'chaperone-struct', 'chaperone-struct-type', 'chaperone-vector',
- 'chaperone?', 'char->integer', 'char-alphabetic?', 'char-blank?',
- 'char-ci<=?', 'char-ci<?', 'char-ci=?', 'char-ci>=?', 'char-ci>?',
- 'char-downcase', 'char-foldcase', 'char-general-category',
- 'char-graphic?', 'char-in', 'char-in/c', 'char-iso-control?',
- 'char-lower-case?', 'char-numeric?', 'char-punctuation?',
- 'char-ready?', 'char-symbolic?', 'char-title-case?', 'char-titlecase',
- 'char-upcase', 'char-upper-case?', 'char-utf-8-length',
- 'char-whitespace?', 'char<=?', 'char<?', 'char=?', 'char>=?', 'char>?',
- 'char?', 'check-duplicate-identifier', 'check-duplicates',
- 'checked-procedure-check-and-extract', 'choice-evt',
- 'class->interface', 'class-info', 'class-seal', 'class-unseal',
- 'class?', 'cleanse-path', 'close-input-port', 'close-output-port',
- 'coerce-chaperone-contract', 'coerce-chaperone-contracts',
- 'coerce-contract', 'coerce-contract/f', 'coerce-contracts',
- 'coerce-flat-contract', 'coerce-flat-contracts', 'collect-garbage',
- 'collection-file-path', 'collection-path', 'combinations', 'compile',
- 'compile-allow-set!-undefined', 'compile-context-preservation-enabled',
- 'compile-enforce-module-constants', 'compile-syntax',
- 'compiled-expression-recompile', 'compiled-expression?',
- 'compiled-module-expression?', 'complete-path?', 'complex?', 'compose',
- 'compose1', 'conjoin', 'conjugate', 'cons', 'cons/c', 'cons?', 'const',
- 'continuation-mark-key/c', 'continuation-mark-key?',
- 'continuation-mark-set->context', 'continuation-mark-set->list',
- 'continuation-mark-set->list*', 'continuation-mark-set-first',
- 'continuation-mark-set?', 'continuation-marks',
- 'continuation-prompt-available?', 'continuation-prompt-tag?',
- 'continuation?', 'contract-continuation-mark-key',
- 'contract-custom-write-property-proc', 'contract-exercise',
- 'contract-first-order', 'contract-first-order-passes?',
- 'contract-late-neg-projection', 'contract-name', 'contract-proc',
- 'contract-projection', 'contract-property?',
- 'contract-random-generate', 'contract-random-generate-fail',
- 'contract-random-generate-fail?',
- 'contract-random-generate-get-current-environment',
- 'contract-random-generate-stash', 'contract-random-generate/choose',
- 'contract-stronger?', 'contract-struct-exercise',
- 'contract-struct-generate', 'contract-struct-late-neg-projection',
- 'contract-struct-list-contract?', 'contract-val-first-projection',
- 'contract?', 'convert-stream', 'copy-directory/files', 'copy-file',
- 'copy-port', 'cos', 'cosh', 'count', 'current-blame-format',
- 'current-break-parameterization', 'current-code-inspector',
- 'current-command-line-arguments', 'current-compile',
- 'current-compiled-file-roots', 'current-continuation-marks',
- 'current-contract-region', 'current-custodian', 'current-directory',
- 'current-directory-for-user', 'current-drive',
- 'current-environment-variables', 'current-error-port', 'current-eval',
- 'current-evt-pseudo-random-generator',
- 'current-force-delete-permissions', 'current-future',
- 'current-gc-milliseconds', 'current-get-interaction-input-port',
- 'current-inexact-milliseconds', 'current-input-port',
- 'current-inspector', 'current-library-collection-links',
- 'current-library-collection-paths', 'current-load',
- 'current-load-extension', 'current-load-relative-directory',
- 'current-load/use-compiled', 'current-locale', 'current-logger',
- 'current-memory-use', 'current-milliseconds',
- 'current-module-declare-name', 'current-module-declare-source',
- 'current-module-name-resolver', 'current-module-path-for-load',
- 'current-namespace', 'current-output-port', 'current-parameterization',
- 'current-plumber', 'current-preserved-thread-cell-values',
- 'current-print', 'current-process-milliseconds', 'current-prompt-read',
- 'current-pseudo-random-generator', 'current-read-interaction',
- 'current-reader-guard', 'current-readtable', 'current-seconds',
- 'current-security-guard', 'current-subprocess-custodian-mode',
- 'current-thread', 'current-thread-group',
- 'current-thread-initial-stack-size',
- 'current-write-relative-directory', 'curry', 'curryr',
- 'custodian-box-value', 'custodian-box?', 'custodian-limit-memory',
- 'custodian-managed-list', 'custodian-memory-accounting-available?',
- 'custodian-require-memory', 'custodian-shutdown-all', 'custodian?',
- 'custom-print-quotable-accessor', 'custom-print-quotable?',
- 'custom-write-accessor', 'custom-write-property-proc', 'custom-write?',
- 'date', 'date*', 'date*-nanosecond', 'date*-time-zone-name', 'date*?',
- 'date-day', 'date-dst?', 'date-hour', 'date-minute', 'date-month',
- 'date-second', 'date-time-zone-offset', 'date-week-day', 'date-year',
- 'date-year-day', 'date?', 'datum->syntax', 'datum-intern-literal',
- 'default-continuation-prompt-tag', 'degrees->radians',
- 'delete-directory', 'delete-directory/files', 'delete-file',
- 'denominator', 'dict->list', 'dict-can-functional-set?',
- 'dict-can-remove-keys?', 'dict-clear', 'dict-clear!', 'dict-copy',
- 'dict-count', 'dict-empty?', 'dict-for-each', 'dict-has-key?',
- 'dict-implements/c', 'dict-implements?', 'dict-iter-contract',
- 'dict-iterate-first', 'dict-iterate-key', 'dict-iterate-next',
- 'dict-iterate-value', 'dict-key-contract', 'dict-keys', 'dict-map',
- 'dict-mutable?', 'dict-ref', 'dict-ref!', 'dict-remove',
- 'dict-remove!', 'dict-set', 'dict-set!', 'dict-set*', 'dict-set*!',
- 'dict-update', 'dict-update!', 'dict-value-contract', 'dict-values',
- 'dict?', 'directory-exists?', 'directory-list', 'disjoin', 'display',
- 'display-lines', 'display-lines-to-file', 'display-to-file',
- 'displayln', 'double-flonum?', 'drop', 'drop-common-prefix',
- 'drop-right', 'dropf', 'dropf-right', 'dump-memory-stats',
- 'dup-input-port', 'dup-output-port', 'dynamic->*', 'dynamic-get-field',
- 'dynamic-object/c', 'dynamic-place', 'dynamic-place*',
- 'dynamic-require', 'dynamic-require-for-syntax', 'dynamic-send',
- 'dynamic-set-field!', 'dynamic-wind', 'eighth', 'empty',
- 'empty-sequence', 'empty-stream', 'empty?',
- 'environment-variables-copy', 'environment-variables-names',
- 'environment-variables-ref', 'environment-variables-set!',
- 'environment-variables?', 'eof', 'eof-evt', 'eof-object?',
- 'ephemeron-value', 'ephemeron?', 'eprintf', 'eq-contract-val',
- 'eq-contract?', 'eq-hash-code', 'eq?', 'equal-contract-val',
- 'equal-contract?', 'equal-hash-code', 'equal-secondary-hash-code',
- 'equal<%>', 'equal?', 'equal?/recur', 'eqv-hash-code', 'eqv?', 'error',
- 'error-display-handler', 'error-escape-handler',
- 'error-print-context-length', 'error-print-source-location',
- 'error-print-width', 'error-value->string-handler', 'eval',
- 'eval-jit-enabled', 'eval-syntax', 'even?', 'evt/c', 'evt?',
- 'exact->inexact', 'exact-ceiling', 'exact-floor', 'exact-integer?',
- 'exact-nonnegative-integer?', 'exact-positive-integer?', 'exact-round',
- 'exact-truncate', 'exact?', 'executable-yield-handler', 'exit',
- 'exit-handler', 'exn', 'exn-continuation-marks', 'exn-message',
- 'exn:break', 'exn:break-continuation', 'exn:break:hang-up',
- 'exn:break:hang-up?', 'exn:break:terminate', 'exn:break:terminate?',
- 'exn:break?', 'exn:fail', 'exn:fail:contract',
- 'exn:fail:contract:arity', 'exn:fail:contract:arity?',
- 'exn:fail:contract:blame', 'exn:fail:contract:blame-object',
- 'exn:fail:contract:blame?', 'exn:fail:contract:continuation',
- 'exn:fail:contract:continuation?', 'exn:fail:contract:divide-by-zero',
- 'exn:fail:contract:divide-by-zero?',
- 'exn:fail:contract:non-fixnum-result',
- 'exn:fail:contract:non-fixnum-result?', 'exn:fail:contract:variable',
- 'exn:fail:contract:variable-id', 'exn:fail:contract:variable?',
- 'exn:fail:contract?', 'exn:fail:filesystem',
- 'exn:fail:filesystem:errno', 'exn:fail:filesystem:errno-errno',
- 'exn:fail:filesystem:errno?', 'exn:fail:filesystem:exists',
- 'exn:fail:filesystem:exists?', 'exn:fail:filesystem:missing-module',
- 'exn:fail:filesystem:missing-module-path',
- 'exn:fail:filesystem:missing-module?', 'exn:fail:filesystem:version',
- 'exn:fail:filesystem:version?', 'exn:fail:filesystem?',
- 'exn:fail:network', 'exn:fail:network:errno',
- 'exn:fail:network:errno-errno', 'exn:fail:network:errno?',
- 'exn:fail:network?', 'exn:fail:object', 'exn:fail:object?',
- 'exn:fail:out-of-memory', 'exn:fail:out-of-memory?', 'exn:fail:read',
- 'exn:fail:read-srclocs', 'exn:fail:read:eof', 'exn:fail:read:eof?',
- 'exn:fail:read:non-char', 'exn:fail:read:non-char?', 'exn:fail:read?',
- 'exn:fail:syntax', 'exn:fail:syntax-exprs',
- 'exn:fail:syntax:missing-module',
- 'exn:fail:syntax:missing-module-path',
- 'exn:fail:syntax:missing-module?', 'exn:fail:syntax:unbound',
- 'exn:fail:syntax:unbound?', 'exn:fail:syntax?', 'exn:fail:unsupported',
- 'exn:fail:unsupported?', 'exn:fail:user', 'exn:fail:user?',
- 'exn:fail?', 'exn:misc:match?', 'exn:missing-module-accessor',
- 'exn:missing-module?', 'exn:srclocs-accessor', 'exn:srclocs?', 'exn?',
- 'exp', 'expand', 'expand-once', 'expand-syntax', 'expand-syntax-once',
- 'expand-syntax-to-top-form', 'expand-to-top-form', 'expand-user-path',
- 'explode-path', 'expt', 'externalizable<%>', 'failure-result/c',
- 'false?', 'field-names', 'fifth', 'file->bytes', 'file->bytes-lines',
- 'file->lines', 'file->list', 'file->string', 'file->value',
- 'file-exists?', 'file-name-from-path', 'file-or-directory-identity',
- 'file-or-directory-modify-seconds', 'file-or-directory-permissions',
- 'file-position', 'file-position*', 'file-size',
- 'file-stream-buffer-mode', 'file-stream-port?', 'file-truncate',
- 'filename-extension', 'filesystem-change-evt',
- 'filesystem-change-evt-cancel', 'filesystem-change-evt?',
- 'filesystem-root-list', 'filter', 'filter-map', 'filter-not',
- 'filter-read-input-port', 'find-executable-path', 'find-files',
- 'find-library-collection-links', 'find-library-collection-paths',
- 'find-relative-path', 'find-system-path', 'findf', 'first',
- 'first-or/c', 'fixnum?', 'flat-contract', 'flat-contract-predicate',
- 'flat-contract-property?', 'flat-contract?', 'flat-named-contract',
- 'flatten', 'floating-point-bytes->real', 'flonum?', 'floor',
- 'flush-output', 'fold-files', 'foldl', 'foldr', 'for-each', 'force',
- 'format', 'fourth', 'fprintf', 'free-identifier=?',
- 'free-label-identifier=?', 'free-template-identifier=?',
- 'free-transformer-identifier=?', 'fsemaphore-count', 'fsemaphore-post',
- 'fsemaphore-try-wait?', 'fsemaphore-wait', 'fsemaphore?', 'future',
- 'future?', 'futures-enabled?', 'gcd', 'generate-member-key',
- 'generate-temporaries', 'generic-set?', 'generic?', 'gensym',
- 'get-output-bytes', 'get-output-string', 'get-preference',
- 'get/build-late-neg-projection', 'get/build-val-first-projection',
- 'getenv', 'global-port-print-handler', 'group-by', 'group-execute-bit',
- 'group-read-bit', 'group-write-bit', 'guard-evt', 'handle-evt',
- 'handle-evt?', 'has-blame?', 'has-contract?', 'hash', 'hash->list',
- 'hash-clear', 'hash-clear!', 'hash-copy', 'hash-copy-clear',
- 'hash-count', 'hash-empty?', 'hash-eq?', 'hash-equal?', 'hash-eqv?',
- 'hash-for-each', 'hash-has-key?', 'hash-iterate-first',
- 'hash-iterate-key', 'hash-iterate-key+value', 'hash-iterate-next',
- 'hash-iterate-pair', 'hash-iterate-value', 'hash-keys', 'hash-map',
- 'hash-placeholder?', 'hash-ref', 'hash-ref!', 'hash-remove',
- 'hash-remove!', 'hash-set', 'hash-set!', 'hash-set*', 'hash-set*!',
- 'hash-update', 'hash-update!', 'hash-values', 'hash-weak?', 'hash/c',
- 'hash?', 'hasheq', 'hasheqv', 'identifier-binding',
- 'identifier-binding-symbol', 'identifier-label-binding',
- 'identifier-prune-lexical-context',
- 'identifier-prune-to-source-module',
- 'identifier-remove-from-definition-context',
- 'identifier-template-binding', 'identifier-transformer-binding',
- 'identifier?', 'identity', 'if/c', 'imag-part', 'immutable?',
- 'impersonate-box', 'impersonate-channel',
- 'impersonate-continuation-mark-key', 'impersonate-hash',
- 'impersonate-hash-set', 'impersonate-procedure',
- 'impersonate-procedure*', 'impersonate-prompt-tag',
- 'impersonate-struct', 'impersonate-vector', 'impersonator-contract?',
- 'impersonator-ephemeron', 'impersonator-of?',
- 'impersonator-prop:application-mark', 'impersonator-prop:blame',
- 'impersonator-prop:contracted',
- 'impersonator-property-accessor-procedure?', 'impersonator-property?',
- 'impersonator?', 'implementation?', 'implementation?/c', 'in-bytes',
- 'in-bytes-lines', 'in-combinations', 'in-cycle', 'in-dict',
- 'in-dict-keys', 'in-dict-pairs', 'in-dict-values', 'in-directory',
- 'in-hash', 'in-hash-keys', 'in-hash-pairs', 'in-hash-values',
- 'in-immutable-hash', 'in-immutable-hash-keys',
- 'in-immutable-hash-pairs', 'in-immutable-hash-values',
- 'in-immutable-set', 'in-indexed', 'in-input-port-bytes',
- 'in-input-port-chars', 'in-lines', 'in-list', 'in-mlist',
- 'in-mutable-hash', 'in-mutable-hash-keys', 'in-mutable-hash-pairs',
- 'in-mutable-hash-values', 'in-mutable-set', 'in-naturals',
- 'in-parallel', 'in-permutations', 'in-port', 'in-producer', 'in-range',
- 'in-sequences', 'in-set', 'in-slice', 'in-stream', 'in-string',
- 'in-syntax', 'in-value', 'in-values*-sequence', 'in-values-sequence',
- 'in-vector', 'in-weak-hash', 'in-weak-hash-keys', 'in-weak-hash-pairs',
- 'in-weak-hash-values', 'in-weak-set', 'inexact->exact',
- 'inexact-real?', 'inexact?', 'infinite?', 'input-port-append',
- 'input-port?', 'inspector?', 'instanceof/c', 'integer->char',
- 'integer->integer-bytes', 'integer-bytes->integer', 'integer-in',
- 'integer-length', 'integer-sqrt', 'integer-sqrt/remainder', 'integer?',
- 'interface->method-names', 'interface-extension?', 'interface?',
- 'internal-definition-context-binding-identifiers',
- 'internal-definition-context-introduce',
- 'internal-definition-context-seal', 'internal-definition-context?',
- 'is-a?', 'is-a?/c', 'keyword->string', 'keyword-apply', 'keyword<?',
- 'keyword?', 'keywords-match', 'kill-thread', 'last', 'last-pair',
- 'lcm', 'length', 'liberal-define-context?', 'link-exists?', 'list',
- 'list*', 'list*of', 'list->bytes', 'list->mutable-set',
- 'list->mutable-seteq', 'list->mutable-seteqv', 'list->set',
- 'list->seteq', 'list->seteqv', 'list->string', 'list->vector',
- 'list->weak-set', 'list->weak-seteq', 'list->weak-seteqv',
- 'list-contract?', 'list-prefix?', 'list-ref', 'list-set', 'list-tail',
- 'list-update', 'list/c', 'list?', 'listen-port-number?', 'listof',
- 'load', 'load-extension', 'load-on-demand-enabled', 'load-relative',
- 'load-relative-extension', 'load/cd', 'load/use-compiled',
- 'local-expand', 'local-expand/capture-lifts',
- 'local-transformer-expand', 'local-transformer-expand/capture-lifts',
- 'locale-string-encoding', 'log', 'log-all-levels', 'log-level-evt',
- 'log-level?', 'log-max-level', 'log-message', 'log-receiver?',
- 'logger-name', 'logger?', 'magnitude', 'make-arity-at-least',
- 'make-base-empty-namespace', 'make-base-namespace', 'make-bytes',
- 'make-channel', 'make-chaperone-contract',
- 'make-continuation-mark-key', 'make-continuation-prompt-tag',
- 'make-contract', 'make-custodian', 'make-custodian-box',
- 'make-custom-hash', 'make-custom-hash-types', 'make-custom-set',
- 'make-custom-set-types', 'make-date', 'make-date*',
- 'make-derived-parameter', 'make-directory', 'make-directory*',
- 'make-do-sequence', 'make-empty-namespace',
- 'make-environment-variables', 'make-ephemeron', 'make-exn',
- 'make-exn:break', 'make-exn:break:hang-up', 'make-exn:break:terminate',
- 'make-exn:fail', 'make-exn:fail:contract',
- 'make-exn:fail:contract:arity', 'make-exn:fail:contract:blame',
- 'make-exn:fail:contract:continuation',
- 'make-exn:fail:contract:divide-by-zero',
- 'make-exn:fail:contract:non-fixnum-result',
- 'make-exn:fail:contract:variable', 'make-exn:fail:filesystem',
- 'make-exn:fail:filesystem:errno', 'make-exn:fail:filesystem:exists',
- 'make-exn:fail:filesystem:missing-module',
- 'make-exn:fail:filesystem:version', 'make-exn:fail:network',
- 'make-exn:fail:network:errno', 'make-exn:fail:object',
- 'make-exn:fail:out-of-memory', 'make-exn:fail:read',
- 'make-exn:fail:read:eof', 'make-exn:fail:read:non-char',
- 'make-exn:fail:syntax', 'make-exn:fail:syntax:missing-module',
- 'make-exn:fail:syntax:unbound', 'make-exn:fail:unsupported',
- 'make-exn:fail:user', 'make-file-or-directory-link',
- 'make-flat-contract', 'make-fsemaphore', 'make-generic',
- 'make-handle-get-preference-locked', 'make-hash',
- 'make-hash-placeholder', 'make-hasheq', 'make-hasheq-placeholder',
- 'make-hasheqv', 'make-hasheqv-placeholder',
- 'make-immutable-custom-hash', 'make-immutable-hash',
- 'make-immutable-hasheq', 'make-immutable-hasheqv',
- 'make-impersonator-property', 'make-input-port',
- 'make-input-port/read-to-peek', 'make-inspector',
- 'make-keyword-procedure', 'make-known-char-range-list',
- 'make-limited-input-port', 'make-list', 'make-lock-file-name',
- 'make-log-receiver', 'make-logger', 'make-mixin-contract',
- 'make-mutable-custom-set', 'make-none/c', 'make-object',
- 'make-output-port', 'make-parameter', 'make-parent-directory*',
- 'make-phantom-bytes', 'make-pipe', 'make-pipe-with-specials',
- 'make-placeholder', 'make-plumber', 'make-polar', 'make-prefab-struct',
- 'make-primitive-class', 'make-proj-contract',
- 'make-pseudo-random-generator', 'make-reader-graph', 'make-readtable',
- 'make-rectangular', 'make-rename-transformer',
- 'make-resolved-module-path', 'make-security-guard', 'make-semaphore',
- 'make-set!-transformer', 'make-shared-bytes', 'make-sibling-inspector',
- 'make-special-comment', 'make-srcloc', 'make-string',
- 'make-struct-field-accessor', 'make-struct-field-mutator',
- 'make-struct-type', 'make-struct-type-property',
- 'make-syntax-delta-introducer', 'make-syntax-introducer',
- 'make-temporary-file', 'make-tentative-pretty-print-output-port',
- 'make-thread-cell', 'make-thread-group', 'make-vector',
- 'make-weak-box', 'make-weak-custom-hash', 'make-weak-custom-set',
- 'make-weak-hash', 'make-weak-hasheq', 'make-weak-hasheqv',
- 'make-will-executor', 'map', 'match-equality-test',
- 'matches-arity-exactly?', 'max', 'mcar', 'mcdr', 'mcons', 'member',
- 'member-name-key-hash-code', 'member-name-key=?', 'member-name-key?',
- 'memf', 'memq', 'memv', 'merge-input', 'method-in-interface?', 'min',
- 'mixin-contract', 'module->exports', 'module->imports',
- 'module->language-info', 'module->namespace',
- 'module-compiled-cross-phase-persistent?', 'module-compiled-exports',
- 'module-compiled-imports', 'module-compiled-language-info',
- 'module-compiled-name', 'module-compiled-submodules',
- 'module-declared?', 'module-path-index-join',
- 'module-path-index-resolve', 'module-path-index-split',
- 'module-path-index-submodule', 'module-path-index?', 'module-path?',
- 'module-predefined?', 'module-provide-protected?', 'modulo', 'mpair?',
- 'mutable-set', 'mutable-seteq', 'mutable-seteqv', 'n->th',
- 'nack-guard-evt', 'namespace-anchor->empty-namespace',
- 'namespace-anchor->namespace', 'namespace-anchor?',
- 'namespace-attach-module', 'namespace-attach-module-declaration',
- 'namespace-base-phase', 'namespace-mapped-symbols',
- 'namespace-module-identifier', 'namespace-module-registry',
- 'namespace-require', 'namespace-require/constant',
- 'namespace-require/copy', 'namespace-require/expansion-time',
- 'namespace-set-variable-value!', 'namespace-symbol->identifier',
- 'namespace-syntax-introduce', 'namespace-undefine-variable!',
- 'namespace-unprotect-module', 'namespace-variable-value', 'namespace?',
- 'nan?', 'natural-number/c', 'negate', 'negative?', 'never-evt',
- 'new-∀/c', 'new-∃/c', 'newline', 'ninth', 'non-empty-listof',
- 'non-empty-string?', 'none/c', 'normal-case-path', 'normalize-arity',
- 'normalize-path', 'normalized-arity?', 'not', 'not/c', 'null', 'null?',
- 'number->string', 'number?', 'numerator', 'object%', 'object->vector',
- 'object-info', 'object-interface', 'object-method-arity-includes?',
- 'object-name', 'object-or-false=?', 'object=?', 'object?', 'odd?',
- 'one-of/c', 'open-input-bytes', 'open-input-file',
- 'open-input-output-file', 'open-input-string', 'open-output-bytes',
- 'open-output-file', 'open-output-nowhere', 'open-output-string',
- 'or/c', 'order-of-magnitude', 'ormap', 'other-execute-bit',
- 'other-read-bit', 'other-write-bit', 'output-port?', 'pair?',
- 'parameter-procedure=?', 'parameter/c', 'parameter?',
- 'parameterization?', 'parse-command-line', 'partition', 'path->bytes',
- 'path->complete-path', 'path->directory-path', 'path->string',
- 'path-add-suffix', 'path-convention-type', 'path-element->bytes',
- 'path-element->string', 'path-element?', 'path-for-some-system?',
- 'path-list-string->path-list', 'path-only', 'path-replace-suffix',
- 'path-string?', 'path<?', 'path?', 'pathlist-closure', 'peek-byte',
- 'peek-byte-or-special', 'peek-bytes', 'peek-bytes!', 'peek-bytes!-evt',
- 'peek-bytes-avail!', 'peek-bytes-avail!*', 'peek-bytes-avail!-evt',
- 'peek-bytes-avail!/enable-break', 'peek-bytes-evt', 'peek-char',
- 'peek-char-or-special', 'peek-string', 'peek-string!',
- 'peek-string!-evt', 'peek-string-evt', 'peeking-input-port',
- 'permutations', 'phantom-bytes?', 'pi', 'pi.f', 'pipe-content-length',
- 'place-break', 'place-channel', 'place-channel-get',
- 'place-channel-put', 'place-channel-put/get', 'place-channel?',
- 'place-dead-evt', 'place-enabled?', 'place-kill', 'place-location?',
- 'place-message-allowed?', 'place-sleep', 'place-wait', 'place?',
- 'placeholder-get', 'placeholder-set!', 'placeholder?',
- 'plumber-add-flush!', 'plumber-flush-all',
- 'plumber-flush-handle-remove!', 'plumber-flush-handle?', 'plumber?',
- 'poll-guard-evt', 'port->bytes', 'port->bytes-lines', 'port->lines',
- 'port->list', 'port->string', 'port-closed-evt', 'port-closed?',
- 'port-commit-peeked', 'port-count-lines!', 'port-count-lines-enabled',
- 'port-counts-lines?', 'port-display-handler', 'port-file-identity',
- 'port-file-unlock', 'port-next-location', 'port-number?',
- 'port-print-handler', 'port-progress-evt',
- 'port-provides-progress-evts?', 'port-read-handler',
- 'port-try-file-lock?', 'port-write-handler', 'port-writes-atomic?',
- 'port-writes-special?', 'port?', 'positive?', 'predicate/c',
- 'prefab-key->struct-type', 'prefab-key?', 'prefab-struct-key',
- 'preferences-lock-file-mode', 'pregexp', 'pregexp?', 'pretty-display',
- 'pretty-format', 'pretty-print', 'pretty-print-.-symbol-without-bars',
- 'pretty-print-abbreviate-read-macros', 'pretty-print-columns',
- 'pretty-print-current-style-table', 'pretty-print-depth',
- 'pretty-print-exact-as-decimal', 'pretty-print-extend-style-table',
- 'pretty-print-handler', 'pretty-print-newline',
- 'pretty-print-post-print-hook', 'pretty-print-pre-print-hook',
- 'pretty-print-print-hook', 'pretty-print-print-line',
- 'pretty-print-remap-stylable', 'pretty-print-show-inexactness',
- 'pretty-print-size-hook', 'pretty-print-style-table?',
- 'pretty-printing', 'pretty-write', 'primitive-closure?',
- 'primitive-result-arity', 'primitive?', 'print', 'print-as-expression',
- 'print-boolean-long-form', 'print-box', 'print-graph',
- 'print-hash-table', 'print-mpair-curly-braces',
- 'print-pair-curly-braces', 'print-reader-abbreviations',
- 'print-struct', 'print-syntax-width', 'print-unreadable',
- 'print-vector-length', 'printable/c', 'printable<%>', 'printf',
- 'println', 'procedure->method', 'procedure-arity',
- 'procedure-arity-includes/c', 'procedure-arity-includes?',
- 'procedure-arity?', 'procedure-closure-contents-eq?',
- 'procedure-extract-target', 'procedure-keywords',
- 'procedure-reduce-arity', 'procedure-reduce-keyword-arity',
- 'procedure-rename', 'procedure-result-arity', 'procedure-specialize',
- 'procedure-struct-type?', 'procedure?', 'process', 'process*',
- 'process*/ports', 'process/ports', 'processor-count', 'progress-evt?',
- 'promise-forced?', 'promise-running?', 'promise/c', 'promise/name?',
- 'promise?', 'prop:arity-string', 'prop:arrow-contract',
- 'prop:arrow-contract-get-info', 'prop:arrow-contract?', 'prop:blame',
- 'prop:chaperone-contract', 'prop:checked-procedure', 'prop:contract',
- 'prop:contracted', 'prop:custom-print-quotable', 'prop:custom-write',
- 'prop:dict', 'prop:dict/contract', 'prop:equal+hash', 'prop:evt',
- 'prop:exn:missing-module', 'prop:exn:srclocs',
- 'prop:expansion-contexts', 'prop:flat-contract',
- 'prop:impersonator-of', 'prop:input-port',
- 'prop:liberal-define-context', 'prop:object-name',
- 'prop:opt-chaperone-contract', 'prop:opt-chaperone-contract-get-test',
- 'prop:opt-chaperone-contract?', 'prop:orc-contract',
- 'prop:orc-contract-get-subcontracts', 'prop:orc-contract?',
- 'prop:output-port', 'prop:place-location', 'prop:procedure',
- 'prop:recursive-contract', 'prop:recursive-contract-unroll',
- 'prop:recursive-contract?', 'prop:rename-transformer', 'prop:sequence',
- 'prop:set!-transformer', 'prop:stream', 'proper-subset?',
- 'pseudo-random-generator->vector', 'pseudo-random-generator-vector?',
- 'pseudo-random-generator?', 'put-preferences', 'putenv', 'quotient',
- 'quotient/remainder', 'radians->degrees', 'raise',
- 'raise-argument-error', 'raise-arguments-error', 'raise-arity-error',
- 'raise-blame-error', 'raise-contract-error', 'raise-mismatch-error',
- 'raise-not-cons-blame-error', 'raise-range-error',
- 'raise-result-error', 'raise-syntax-error', 'raise-type-error',
- 'raise-user-error', 'random', 'random-seed', 'range', 'rational?',
- 'rationalize', 'read', 'read-accept-bar-quote', 'read-accept-box',
- 'read-accept-compiled', 'read-accept-dot', 'read-accept-graph',
- 'read-accept-infix-dot', 'read-accept-lang', 'read-accept-quasiquote',
- 'read-accept-reader', 'read-byte', 'read-byte-or-special',
- 'read-bytes', 'read-bytes!', 'read-bytes!-evt', 'read-bytes-avail!',
- 'read-bytes-avail!*', 'read-bytes-avail!-evt',
- 'read-bytes-avail!/enable-break', 'read-bytes-evt', 'read-bytes-line',
- 'read-bytes-line-evt', 'read-case-sensitive', 'read-cdot', 'read-char',
- 'read-char-or-special', 'read-curly-brace-as-paren',
- 'read-curly-brace-with-tag', 'read-decimal-as-inexact',
- 'read-eval-print-loop', 'read-language', 'read-line', 'read-line-evt',
- 'read-on-demand-source', 'read-square-bracket-as-paren',
- 'read-square-bracket-with-tag', 'read-string', 'read-string!',
- 'read-string!-evt', 'read-string-evt', 'read-syntax',
- 'read-syntax/recursive', 'read/recursive', 'readtable-mapping',
- 'readtable?', 'real->decimal-string', 'real->double-flonum',
- 'real->floating-point-bytes', 'real->single-flonum', 'real-in',
- 'real-part', 'real?', 'reencode-input-port', 'reencode-output-port',
- 'regexp', 'regexp-match', 'regexp-match*', 'regexp-match-evt',
- 'regexp-match-exact?', 'regexp-match-peek',
- 'regexp-match-peek-immediate', 'regexp-match-peek-positions',
- 'regexp-match-peek-positions*',
- 'regexp-match-peek-positions-immediate',
- 'regexp-match-peek-positions-immediate/end',
- 'regexp-match-peek-positions/end', 'regexp-match-positions',
- 'regexp-match-positions*', 'regexp-match-positions/end',
- 'regexp-match/end', 'regexp-match?', 'regexp-max-lookbehind',
- 'regexp-quote', 'regexp-replace', 'regexp-replace*',
- 'regexp-replace-quote', 'regexp-replaces', 'regexp-split',
- 'regexp-try-match', 'regexp?', 'relative-path?', 'relocate-input-port',
- 'relocate-output-port', 'remainder', 'remf', 'remf*', 'remove',
- 'remove*', 'remove-duplicates', 'remq', 'remq*', 'remv', 'remv*',
- 'rename-contract', 'rename-file-or-directory',
- 'rename-transformer-target', 'rename-transformer?', 'replace-evt',
- 'reroot-path', 'resolve-path', 'resolved-module-path-name',
- 'resolved-module-path?', 'rest', 'reverse', 'round', 'second',
- 'seconds->date', 'security-guard?', 'semaphore-peek-evt',
- 'semaphore-peek-evt?', 'semaphore-post', 'semaphore-try-wait?',
- 'semaphore-wait', 'semaphore-wait/enable-break', 'semaphore?',
- 'sequence->list', 'sequence->stream', 'sequence-add-between',
- 'sequence-andmap', 'sequence-append', 'sequence-count',
- 'sequence-filter', 'sequence-fold', 'sequence-for-each',
- 'sequence-generate', 'sequence-generate*', 'sequence-length',
- 'sequence-map', 'sequence-ormap', 'sequence-ref', 'sequence-tail',
- 'sequence/c', 'sequence?', 'set', 'set!-transformer-procedure',
- 'set!-transformer?', 'set->list', 'set->stream', 'set-add', 'set-add!',
- 'set-box!', 'set-clear', 'set-clear!', 'set-copy', 'set-copy-clear',
- 'set-count', 'set-empty?', 'set-eq?', 'set-equal?', 'set-eqv?',
- 'set-first', 'set-for-each', 'set-implements/c', 'set-implements?',
- 'set-intersect', 'set-intersect!', 'set-map', 'set-mcar!', 'set-mcdr!',
- 'set-member?', 'set-mutable?', 'set-phantom-bytes!',
- 'set-port-next-location!', 'set-remove', 'set-remove!', 'set-rest',
- 'set-some-basic-contracts!', 'set-subtract', 'set-subtract!',
- 'set-symmetric-difference', 'set-symmetric-difference!', 'set-union',
- 'set-union!', 'set-weak?', 'set/c', 'set=?', 'set?', 'seteq', 'seteqv',
- 'seventh', 'sgn', 'shared-bytes', 'shell-execute', 'shrink-path-wrt',
- 'shuffle', 'simple-form-path', 'simplify-path', 'sin',
- 'single-flonum?', 'sinh', 'sixth', 'skip-projection-wrapper?', 'sleep',
- 'some-system-path->string', 'sort', 'special-comment-value',
- 'special-comment?', 'special-filter-input-port', 'split-at',
- 'split-at-right', 'split-common-prefix', 'split-path', 'splitf-at',
- 'splitf-at-right', 'sqr', 'sqrt', 'srcloc', 'srcloc->string',
- 'srcloc-column', 'srcloc-line', 'srcloc-position', 'srcloc-source',
- 'srcloc-span', 'srcloc?', 'stop-after', 'stop-before', 'stream->list',
- 'stream-add-between', 'stream-andmap', 'stream-append', 'stream-count',
- 'stream-empty?', 'stream-filter', 'stream-first', 'stream-fold',
- 'stream-for-each', 'stream-length', 'stream-map', 'stream-ormap',
- 'stream-ref', 'stream-rest', 'stream-tail', 'stream/c', 'stream?',
- 'string', 'string->bytes/latin-1', 'string->bytes/locale',
- 'string->bytes/utf-8', 'string->immutable-string', 'string->keyword',
- 'string->list', 'string->number', 'string->path',
- 'string->path-element', 'string->some-system-path', 'string->symbol',
- 'string->uninterned-symbol', 'string->unreadable-symbol',
- 'string-append', 'string-append*', 'string-ci<=?', 'string-ci<?',
- 'string-ci=?', 'string-ci>=?', 'string-ci>?', 'string-contains?',
- 'string-copy', 'string-copy!', 'string-downcase',
- 'string-environment-variable-name?', 'string-fill!', 'string-foldcase',
- 'string-join', 'string-len/c', 'string-length', 'string-locale-ci<?',
- 'string-locale-ci=?', 'string-locale-ci>?', 'string-locale-downcase',
- 'string-locale-upcase', 'string-locale<?', 'string-locale=?',
- 'string-locale>?', 'string-no-nuls?', 'string-normalize-nfc',
- 'string-normalize-nfd', 'string-normalize-nfkc',
- 'string-normalize-nfkd', 'string-normalize-spaces', 'string-port?',
- 'string-prefix?', 'string-ref', 'string-replace', 'string-set!',
- 'string-split', 'string-suffix?', 'string-titlecase', 'string-trim',
- 'string-upcase', 'string-utf-8-length', 'string<=?', 'string<?',
- 'string=?', 'string>=?', 'string>?', 'string?', 'struct->vector',
- 'struct-accessor-procedure?', 'struct-constructor-procedure?',
- 'struct-info', 'struct-mutator-procedure?',
- 'struct-predicate-procedure?', 'struct-type-info',
- 'struct-type-make-constructor', 'struct-type-make-predicate',
- 'struct-type-property-accessor-procedure?', 'struct-type-property/c',
- 'struct-type-property?', 'struct-type?', 'struct:arity-at-least',
- 'struct:arrow-contract-info', 'struct:date', 'struct:date*',
- 'struct:exn', 'struct:exn:break', 'struct:exn:break:hang-up',
- 'struct:exn:break:terminate', 'struct:exn:fail',
- 'struct:exn:fail:contract', 'struct:exn:fail:contract:arity',
- 'struct:exn:fail:contract:blame',
- 'struct:exn:fail:contract:continuation',
- 'struct:exn:fail:contract:divide-by-zero',
- 'struct:exn:fail:contract:non-fixnum-result',
- 'struct:exn:fail:contract:variable', 'struct:exn:fail:filesystem',
- 'struct:exn:fail:filesystem:errno',
- 'struct:exn:fail:filesystem:exists',
- 'struct:exn:fail:filesystem:missing-module',
- 'struct:exn:fail:filesystem:version', 'struct:exn:fail:network',
- 'struct:exn:fail:network:errno', 'struct:exn:fail:object',
- 'struct:exn:fail:out-of-memory', 'struct:exn:fail:read',
- 'struct:exn:fail:read:eof', 'struct:exn:fail:read:non-char',
- 'struct:exn:fail:syntax', 'struct:exn:fail:syntax:missing-module',
- 'struct:exn:fail:syntax:unbound', 'struct:exn:fail:unsupported',
- 'struct:exn:fail:user', 'struct:srcloc',
- 'struct:wrapped-extra-arg-arrow', 'struct?', 'sub1', 'subbytes',
- 'subclass?', 'subclass?/c', 'subprocess', 'subprocess-group-enabled',
- 'subprocess-kill', 'subprocess-pid', 'subprocess-status',
- 'subprocess-wait', 'subprocess?', 'subset?', 'substring', 'suggest/c',
- 'symbol->string', 'symbol-interned?', 'symbol-unreadable?', 'symbol<?',
- 'symbol=?', 'symbol?', 'symbols', 'sync', 'sync/enable-break',
- 'sync/timeout', 'sync/timeout/enable-break', 'syntax->datum',
- 'syntax->list', 'syntax-arm', 'syntax-column', 'syntax-debug-info',
- 'syntax-disarm', 'syntax-e', 'syntax-line',
- 'syntax-local-bind-syntaxes', 'syntax-local-certifier',
- 'syntax-local-context', 'syntax-local-expand-expression',
- 'syntax-local-get-shadower', 'syntax-local-identifier-as-binding',
- 'syntax-local-introduce', 'syntax-local-lift-context',
- 'syntax-local-lift-expression', 'syntax-local-lift-module',
- 'syntax-local-lift-module-end-declaration',
- 'syntax-local-lift-provide', 'syntax-local-lift-require',
- 'syntax-local-lift-values-expression',
- 'syntax-local-make-definition-context',
- 'syntax-local-make-delta-introducer',
- 'syntax-local-module-defined-identifiers',
- 'syntax-local-module-exports',
- 'syntax-local-module-required-identifiers', 'syntax-local-name',
- 'syntax-local-phase-level', 'syntax-local-submodules',
- 'syntax-local-transforming-module-provides?', 'syntax-local-value',
- 'syntax-local-value/immediate', 'syntax-original?', 'syntax-position',
- 'syntax-property', 'syntax-property-preserved?',
- 'syntax-property-symbol-keys', 'syntax-protect', 'syntax-rearm',
- 'syntax-recertify', 'syntax-shift-phase-level', 'syntax-source',
- 'syntax-source-module', 'syntax-span', 'syntax-taint',
- 'syntax-tainted?', 'syntax-track-origin',
- 'syntax-transforming-module-expression?',
- 'syntax-transforming-with-lifts?', 'syntax-transforming?', 'syntax/c',
- 'syntax?', 'system', 'system*', 'system*/exit-code',
- 'system-big-endian?', 'system-idle-evt', 'system-language+country',
- 'system-library-subpath', 'system-path-convention-type', 'system-type',
- 'system/exit-code', 'tail-marks-match?', 'take', 'take-common-prefix',
- 'take-right', 'takef', 'takef-right', 'tan', 'tanh',
- 'tcp-abandon-port', 'tcp-accept', 'tcp-accept-evt',
- 'tcp-accept-ready?', 'tcp-accept/enable-break', 'tcp-addresses',
- 'tcp-close', 'tcp-connect', 'tcp-connect/enable-break', 'tcp-listen',
- 'tcp-listener?', 'tcp-port?', 'tentative-pretty-print-port-cancel',
- 'tentative-pretty-print-port-transfer', 'tenth', 'terminal-port?',
- 'the-unsupplied-arg', 'third', 'thread', 'thread-cell-ref',
- 'thread-cell-set!', 'thread-cell-values?', 'thread-cell?',
- 'thread-dead-evt', 'thread-dead?', 'thread-group?', 'thread-receive',
- 'thread-receive-evt', 'thread-resume', 'thread-resume-evt',
- 'thread-rewind-receive', 'thread-running?', 'thread-send',
- 'thread-suspend', 'thread-suspend-evt', 'thread-try-receive',
- 'thread-wait', 'thread/suspend-to-kill', 'thread?', 'time-apply',
- 'touch', 'transplant-input-port', 'transplant-output-port', 'true',
- 'truncate', 'udp-addresses', 'udp-bind!', 'udp-bound?', 'udp-close',
- 'udp-connect!', 'udp-connected?', 'udp-multicast-interface',
- 'udp-multicast-join-group!', 'udp-multicast-leave-group!',
- 'udp-multicast-loopback?', 'udp-multicast-set-interface!',
- 'udp-multicast-set-loopback!', 'udp-multicast-set-ttl!',
- 'udp-multicast-ttl', 'udp-open-socket', 'udp-receive!',
- 'udp-receive!*', 'udp-receive!-evt', 'udp-receive!/enable-break',
- 'udp-receive-ready-evt', 'udp-send', 'udp-send*', 'udp-send-evt',
- 'udp-send-ready-evt', 'udp-send-to', 'udp-send-to*', 'udp-send-to-evt',
- 'udp-send-to/enable-break', 'udp-send/enable-break', 'udp?', 'unbox',
- 'uncaught-exception-handler', 'unit?', 'unspecified-dom',
- 'unsupplied-arg?', 'use-collection-link-paths',
- 'use-compiled-file-paths', 'use-user-specific-search-paths',
- 'user-execute-bit', 'user-read-bit', 'user-write-bit', 'value-blame',
- 'value-contract', 'values', 'variable-reference->empty-namespace',
- 'variable-reference->module-base-phase',
- 'variable-reference->module-declaration-inspector',
- 'variable-reference->module-path-index',
- 'variable-reference->module-source', 'variable-reference->namespace',
- 'variable-reference->phase',
- 'variable-reference->resolved-module-path',
- 'variable-reference-constant?', 'variable-reference?', 'vector',
- 'vector->immutable-vector', 'vector->list',
- 'vector->pseudo-random-generator', 'vector->pseudo-random-generator!',
- 'vector->values', 'vector-append', 'vector-argmax', 'vector-argmin',
- 'vector-copy', 'vector-copy!', 'vector-count', 'vector-drop',
- 'vector-drop-right', 'vector-fill!', 'vector-filter',
- 'vector-filter-not', 'vector-immutable', 'vector-immutable/c',
- 'vector-immutableof', 'vector-length', 'vector-map', 'vector-map!',
- 'vector-member', 'vector-memq', 'vector-memv', 'vector-ref',
- 'vector-set!', 'vector-set*!', 'vector-set-performance-stats!',
- 'vector-split-at', 'vector-split-at-right', 'vector-take',
- 'vector-take-right', 'vector/c', 'vector?', 'vectorof', 'version',
- 'void', 'void?', 'weak-box-value', 'weak-box?', 'weak-set',
- 'weak-seteq', 'weak-seteqv', 'will-execute', 'will-executor?',
- 'will-register', 'will-try-execute', 'with-input-from-bytes',
- 'with-input-from-file', 'with-input-from-string',
- 'with-output-to-bytes', 'with-output-to-file', 'with-output-to-string',
- 'would-be-future', 'wrap-evt', 'wrapped-extra-arg-arrow',
- 'wrapped-extra-arg-arrow-extra-neg-party-argument',
- 'wrapped-extra-arg-arrow-real-func', 'wrapped-extra-arg-arrow?',
- 'writable<%>', 'write', 'write-byte', 'write-bytes',
- 'write-bytes-avail', 'write-bytes-avail*', 'write-bytes-avail-evt',
- 'write-bytes-avail/enable-break', 'write-char', 'write-special',
- 'write-special-avail*', 'write-special-evt', 'write-string',
- 'write-to-file', 'writeln', 'xor', 'zero?', '~.a', '~.s', '~.v', '~a',
- '~e', '~r', '~s', '~v'
- )
-
- _opening_parenthesis = r'[([{]'
- _closing_parenthesis = r'[)\]}]'
- _delimiters = r'()[\]{}",\'`;\s'
- _symbol = r'(?:\|[^|]*\||\\[\w\W]|[^|\\%s]+)+' % _delimiters
- _exact_decimal_prefix = r'(?:#e)?(?:#d)?(?:#e)?'
- _exponent = r'(?:[defls][-+]?\d+)'
- _inexact_simple_no_hashes = r'(?:\d+(?:/\d+|\.\d*)?|\.\d+)'
- _inexact_simple = (r'(?:%s|(?:\d+#+(?:\.#*|/\d+#*)?|\.\d+#+|'
- r'\d+(?:\.\d*#+|/\d+#+)))' % _inexact_simple_no_hashes)
- _inexact_normal_no_hashes = r'(?:%s%s?)' % (_inexact_simple_no_hashes,
- _exponent)
- _inexact_normal = r'(?:%s%s?)' % (_inexact_simple, _exponent)
- _inexact_special = r'(?:(?:inf|nan)\.[0f])'
- _inexact_real = r'(?:[-+]?%s|[-+]%s)' % (_inexact_normal,
- _inexact_special)
- _inexact_unsigned = r'(?:%s|%s)' % (_inexact_normal, _inexact_special)
-
- tokens = {
- 'root': [
- (_closing_parenthesis, Error),
- (r'(?!\Z)', Text, 'unquoted-datum')
- ],
- 'datum': [
- (r'(?s)#;|#![ /]([^\\\n]|\\.)*', Comment),
- (r';[^\n\r\x85\u2028\u2029]*', Comment.Single),
- (r'#\|', Comment.Multiline, 'block-comment'),
-
- # Whitespaces
- (r'(?u)\s+', Whitespace),
-
- # Numbers: Keep in mind Racket reader hash prefixes, which
- # can denote the base or the type. These don't map neatly
- # onto Pygments token types; some judgment calls here.
-
- # #d or no prefix
- (r'(?i)%s[-+]?\d+(?=[%s])' % (_exact_decimal_prefix, _delimiters),
- Number.Integer, '#pop'),
- (r'(?i)%s[-+]?(\d+(\.\d*)?|\.\d+)([deflst][-+]?\d+)?(?=[%s])' %
- (_exact_decimal_prefix, _delimiters), Number.Float, '#pop'),
- (r'(?i)%s[-+]?(%s([-+]%s?i)?|[-+]%s?i)(?=[%s])' %
- (_exact_decimal_prefix, _inexact_normal_no_hashes,
- _inexact_normal_no_hashes, _inexact_normal_no_hashes,
- _delimiters), Number, '#pop'),
-
- # Inexact without explicit #i
- (r'(?i)(#d)?(%s([-+]%s?i)?|[-+]%s?i|%s@%s)(?=[%s])' %
- (_inexact_real, _inexact_unsigned, _inexact_unsigned,
- _inexact_real, _inexact_real, _delimiters), Number.Float,
- '#pop'),
-
- # The remaining extflonums
- (r'(?i)(([-+]?%st[-+]?\d+)|[-+](inf|nan)\.t)(?=[%s])' %
- (_inexact_simple, _delimiters), Number.Float, '#pop'),
-
- # #b
- (r'(?iu)(#[ei])?#b%s' % _symbol, Number.Bin, '#pop'),
-
- # #o
- (r'(?iu)(#[ei])?#o%s' % _symbol, Number.Oct, '#pop'),
-
- # #x
- (r'(?iu)(#[ei])?#x%s' % _symbol, Number.Hex, '#pop'),
-
- # #i is always inexact, i.e. float
- (r'(?iu)(#d)?#i%s' % _symbol, Number.Float, '#pop'),
-
- # Strings and characters
- (r'#?"', String.Double, ('#pop', 'string')),
- (r'#<<(.+)\n(^(?!\1$).*$\n)*^\1$', String.Heredoc, '#pop'),
- (r'#\\(u[\da-fA-F]{1,4}|U[\da-fA-F]{1,8})', String.Char, '#pop'),
- (r'(?is)#\\([0-7]{3}|[a-z]+|.)', String.Char, '#pop'),
- (r'(?s)#[pr]x#?"(\\?.)*?"', String.Regex, '#pop'),
-
- # Constants
- (r'#(true|false|[tTfF])', Name.Constant, '#pop'),
-
- # Keyword argument names (e.g. #:keyword)
- (r'#:%s' % _symbol, Keyword.Declaration, '#pop'),
-
- # Reader extensions
- (r'(#lang |#!)(\S+)',
- bygroups(Keyword.Namespace, Name.Namespace)),
- (r'#reader', Keyword.Namespace, 'quoted-datum'),
-
- # Other syntax
- (r"(?i)\.(?=[%s])|#c[is]|#['`]|#,@?" % _delimiters, Operator),
- (r"'|#[s&]|#hash(eqv?)?|#\d*(?=%s)" % _opening_parenthesis,
- Operator, ('#pop', 'quoted-datum'))
- ],
- 'datum*': [
- (r'`|,@?', Operator),
- (_symbol, String.Symbol, '#pop'),
- (r'[|\\]', Error),
- default('#pop')
- ],
- 'list': [
- (_closing_parenthesis, Punctuation, '#pop')
- ],
- 'unquoted-datum': [
- include('datum'),
- (r'quote(?=[%s])' % _delimiters, Keyword,
- ('#pop', 'quoted-datum')),
- (r'`', Operator, ('#pop', 'quasiquoted-datum')),
- (r'quasiquote(?=[%s])' % _delimiters, Keyword,
- ('#pop', 'quasiquoted-datum')),
- (_opening_parenthesis, Punctuation, ('#pop', 'unquoted-list')),
- (words(_keywords, suffix='(?=[%s])' % _delimiters),
- Keyword, '#pop'),
- (words(_builtins, suffix='(?=[%s])' % _delimiters),
- Name.Builtin, '#pop'),
- (_symbol, Name, '#pop'),
- include('datum*')
- ],
- 'unquoted-list': [
- include('list'),
- (r'(?!\Z)', Text, 'unquoted-datum')
- ],
- 'quasiquoted-datum': [
- include('datum'),
- (r',@?', Operator, ('#pop', 'unquoted-datum')),
- (r'unquote(-splicing)?(?=[%s])' % _delimiters, Keyword,
- ('#pop', 'unquoted-datum')),
- (_opening_parenthesis, Punctuation, ('#pop', 'quasiquoted-list')),
- include('datum*')
- ],
- 'quasiquoted-list': [
- include('list'),
- (r'(?!\Z)', Text, 'quasiquoted-datum')
- ],
- 'quoted-datum': [
- include('datum'),
- (_opening_parenthesis, Punctuation, ('#pop', 'quoted-list')),
- include('datum*')
- ],
- 'quoted-list': [
- include('list'),
- (r'(?!\Z)', Text, 'quoted-datum')
- ],
- 'block-comment': [
- (r'#\|', Comment.Multiline, '#push'),
- (r'\|#', Comment.Multiline, '#pop'),
- (r'[^#|]+|.', Comment.Multiline)
- ],
- 'string': [
- (r'"', String.Double, '#pop'),
- (r'(?s)\\([0-7]{1,3}|x[\da-fA-F]{1,2}|u[\da-fA-F]{1,4}|'
- r'U[\da-fA-F]{1,8}|.)', String.Escape),
- (r'[^\\"]+', String.Double)
- ]
- }
-
-
-class NewLispLexer(RegexLexer):
- """
- For newLISP source code (version 10.3.0).
-
- .. versionadded:: 1.5
- """
-
- name = 'NewLisp'
- url = 'http://www.newlisp.org/'
- aliases = ['newlisp']
- filenames = ['*.lsp', '*.nl', '*.kif']
- mimetypes = ['text/x-newlisp', 'application/x-newlisp']
-
- flags = re.IGNORECASE | re.MULTILINE
-
- # list of built-in functions for newLISP version 10.3
- builtins = (
- '^', '--', '-', ':', '!', '!=', '?', '@', '*', '/', '&', '%', '+', '++',
- '<', '<<', '<=', '=', '>', '>=', '>>', '|', '~', '$', '$0', '$1', '$10',
- '$11', '$12', '$13', '$14', '$15', '$2', '$3', '$4', '$5', '$6', '$7',
- '$8', '$9', '$args', '$idx', '$it', '$main-args', 'abort', 'abs',
- 'acos', 'acosh', 'add', 'address', 'amb', 'and', 'append-file',
- 'append', 'apply', 'args', 'array-list', 'array?', 'array', 'asin',
- 'asinh', 'assoc', 'atan', 'atan2', 'atanh', 'atom?', 'base64-dec',
- 'base64-enc', 'bayes-query', 'bayes-train', 'begin',
- 'beta', 'betai', 'bind', 'binomial', 'bits', 'callback',
- 'case', 'catch', 'ceil', 'change-dir', 'char', 'chop', 'Class', 'clean',
- 'close', 'command-event', 'cond', 'cons', 'constant',
- 'context?', 'context', 'copy-file', 'copy', 'cos', 'cosh', 'count',
- 'cpymem', 'crc32', 'crit-chi2', 'crit-z', 'current-line', 'curry',
- 'date-list', 'date-parse', 'date-value', 'date', 'debug', 'dec',
- 'def-new', 'default', 'define-macro', 'define',
- 'delete-file', 'delete-url', 'delete', 'destroy', 'det', 'device',
- 'difference', 'directory?', 'directory', 'div', 'do-until', 'do-while',
- 'doargs', 'dolist', 'dostring', 'dotimes', 'dotree', 'dump', 'dup',
- 'empty?', 'encrypt', 'ends-with', 'env', 'erf', 'error-event',
- 'eval-string', 'eval', 'exec', 'exists', 'exit', 'exp', 'expand',
- 'explode', 'extend', 'factor', 'fft', 'file-info', 'file?', 'filter',
- 'find-all', 'find', 'first', 'flat', 'float?', 'float', 'floor', 'flt',
- 'fn', 'for-all', 'for', 'fork', 'format', 'fv', 'gammai', 'gammaln',
- 'gcd', 'get-char', 'get-float', 'get-int', 'get-long', 'get-string',
- 'get-url', 'global?', 'global', 'if-not', 'if', 'ifft', 'import', 'inc',
- 'index', 'inf?', 'int', 'integer?', 'integer', 'intersect', 'invert',
- 'irr', 'join', 'lambda-macro', 'lambda?', 'lambda', 'last-error',
- 'last', 'legal?', 'length', 'let', 'letex', 'letn',
- 'list?', 'list', 'load', 'local', 'log', 'lookup',
- 'lower-case', 'macro?', 'main-args', 'MAIN', 'make-dir', 'map', 'mat',
- 'match', 'max', 'member', 'min', 'mod', 'module', 'mul', 'multiply',
- 'NaN?', 'net-accept', 'net-close', 'net-connect', 'net-error',
- 'net-eval', 'net-interface', 'net-ipv', 'net-listen', 'net-local',
- 'net-lookup', 'net-packet', 'net-peek', 'net-peer', 'net-ping',
- 'net-receive-from', 'net-receive-udp', 'net-receive', 'net-select',
- 'net-send-to', 'net-send-udp', 'net-send', 'net-service',
- 'net-sessions', 'new', 'nil?', 'nil', 'normal', 'not', 'now', 'nper',
- 'npv', 'nth', 'null?', 'number?', 'open', 'or', 'ostype', 'pack',
- 'parse-date', 'parse', 'peek', 'pipe', 'pmt', 'pop-assoc', 'pop',
- 'post-url', 'pow', 'prefix', 'pretty-print', 'primitive?', 'print',
- 'println', 'prob-chi2', 'prob-z', 'process', 'prompt-event',
- 'protected?', 'push', 'put-url', 'pv', 'quote?', 'quote', 'rand',
- 'random', 'randomize', 'read', 'read-char', 'read-expr', 'read-file',
- 'read-key', 'read-line', 'read-utf8', 'reader-event',
- 'real-path', 'receive', 'ref-all', 'ref', 'regex-comp', 'regex',
- 'remove-dir', 'rename-file', 'replace', 'reset', 'rest', 'reverse',
- 'rotate', 'round', 'save', 'search', 'seed', 'seek', 'select', 'self',
- 'semaphore', 'send', 'sequence', 'series', 'set-locale', 'set-ref-all',
- 'set-ref', 'set', 'setf', 'setq', 'sgn', 'share', 'signal', 'silent',
- 'sin', 'sinh', 'sleep', 'slice', 'sort', 'source', 'spawn', 'sqrt',
- 'starts-with', 'string?', 'string', 'sub', 'swap', 'sym', 'symbol?',
- 'symbols', 'sync', 'sys-error', 'sys-info', 'tan', 'tanh', 'term',
- 'throw-error', 'throw', 'time-of-day', 'time', 'timer', 'title-case',
- 'trace-highlight', 'trace', 'transpose', 'Tree', 'trim', 'true?',
- 'true', 'unicode', 'unify', 'unique', 'unless', 'unpack', 'until',
- 'upper-case', 'utf8', 'utf8len', 'uuid', 'wait-pid', 'when', 'while',
- 'write', 'write-char', 'write-file', 'write-line',
- 'xfer-event', 'xml-error', 'xml-parse', 'xml-type-tags', 'zero?',
- )
-
- # valid names
- valid_name = r'([\w!$%&*+.,/<=>?@^~|-])+|(\[.*?\])+'
-
- tokens = {
- 'root': [
- # shebang
- (r'#!(.*?)$', Comment.Preproc),
- # comments starting with semicolon
- (r';.*$', Comment.Single),
- # comments starting with #
- (r'#.*$', Comment.Single),
-
- # whitespace
- (r'\s+', Whitespace),
-
- # strings, symbols and characters
- (r'"(\\\\|\\[^\\]|[^"\\])*"', String),
-
- # braces
- (r'\{', String, "bracestring"),
-
- # [text] ... [/text] delimited strings
- (r'\[text\]*', String, "tagstring"),
-
- # 'special' operators...
- (r"('|:)", Operator),
-
- # highlight the builtins
- (words(builtins, suffix=r'\b'),
- Keyword),
-
- # the remaining functions
- (r'(?<=\()' + valid_name, Name.Variable),
-
- # the remaining variables
- (valid_name, String.Symbol),
-
- # parentheses
- (r'(\(|\))', Punctuation),
- ],
-
- # braced strings...
- 'bracestring': [
- (r'\{', String, "#push"),
- (r'\}', String, "#pop"),
- ('[^{}]+', String),
- ],
-
- # tagged [text]...[/text] delimited strings...
- 'tagstring': [
- (r'(?s)(.*?)(\[/text\])', String, '#pop'),
- ],
- }
-
-
-class EmacsLispLexer(RegexLexer):
- """
- An ELisp lexer, parsing a stream and outputting the tokens
- needed to highlight elisp code.
-
- .. versionadded:: 2.1
- """
- name = 'EmacsLisp'
- aliases = ['emacs-lisp', 'elisp', 'emacs']
- filenames = ['*.el']
- mimetypes = ['text/x-elisp', 'application/x-elisp']
-
- flags = re.MULTILINE
-
- # couple of useful regexes
-
- # characters that are not macro-characters and can be used to begin a symbol
- nonmacro = r'\\.|[\w!$%&*+-/<=>?@^{}~|]'
- constituent = nonmacro + '|[#.:]'
- terminated = r'(?=[ "()\]\'\n,;`])' # whitespace or terminating macro characters
-
- # symbol token, reverse-engineered from hyperspec
- # Take a deep breath...
- symbol = r'((?:%s)(?:%s)*)' % (nonmacro, constituent)
-
- macros = {
- 'atomic-change-group', 'case', 'block', 'cl-block', 'cl-callf', 'cl-callf2',
- 'cl-case', 'cl-decf', 'cl-declaim', 'cl-declare',
- 'cl-define-compiler-macro', 'cl-defmacro', 'cl-defstruct',
- 'cl-defsubst', 'cl-deftype', 'cl-defun', 'cl-destructuring-bind',
- 'cl-do', 'cl-do*', 'cl-do-all-symbols', 'cl-do-symbols', 'cl-dolist',
- 'cl-dotimes', 'cl-ecase', 'cl-etypecase', 'eval-when', 'cl-eval-when', 'cl-flet',
- 'cl-flet*', 'cl-function', 'cl-incf', 'cl-labels', 'cl-letf',
- 'cl-letf*', 'cl-load-time-value', 'cl-locally', 'cl-loop',
- 'cl-macrolet', 'cl-multiple-value-bind', 'cl-multiple-value-setq',
- 'cl-progv', 'cl-psetf', 'cl-psetq', 'cl-pushnew', 'cl-remf',
- 'cl-return', 'cl-return-from', 'cl-rotatef', 'cl-shiftf',
- 'cl-symbol-macrolet', 'cl-tagbody', 'cl-the', 'cl-typecase',
- 'combine-after-change-calls', 'condition-case-unless-debug', 'decf',
- 'declaim', 'declare', 'declare-function', 'def-edebug-spec',
- 'defadvice', 'defclass', 'defcustom', 'defface', 'defgeneric',
- 'defgroup', 'define-advice', 'define-alternatives',
- 'define-compiler-macro', 'define-derived-mode', 'define-generic-mode',
- 'define-global-minor-mode', 'define-globalized-minor-mode',
- 'define-minor-mode', 'define-modify-macro',
- 'define-obsolete-face-alias', 'define-obsolete-function-alias',
- 'define-obsolete-variable-alias', 'define-setf-expander',
- 'define-skeleton', 'defmacro', 'defmethod', 'defsetf', 'defstruct',
- 'defsubst', 'deftheme', 'deftype', 'defun', 'defvar-local',
- 'delay-mode-hooks', 'destructuring-bind', 'do', 'do*',
- 'do-all-symbols', 'do-symbols', 'dolist', 'dont-compile', 'dotimes',
- 'dotimes-with-progress-reporter', 'ecase', 'ert-deftest', 'etypecase',
- 'eval-and-compile', 'eval-when-compile', 'flet', 'ignore-errors',
- 'incf', 'labels', 'lambda', 'letrec', 'lexical-let', 'lexical-let*',
- 'loop', 'multiple-value-bind', 'multiple-value-setq', 'noreturn',
- 'oref', 'oref-default', 'oset', 'oset-default', 'pcase',
- 'pcase-defmacro', 'pcase-dolist', 'pcase-exhaustive', 'pcase-let',
- 'pcase-let*', 'pop', 'psetf', 'psetq', 'push', 'pushnew', 'remf',
- 'return', 'rotatef', 'rx', 'save-match-data', 'save-selected-window',
- 'save-window-excursion', 'setf', 'setq-local', 'shiftf',
- 'track-mouse', 'typecase', 'unless', 'use-package', 'when',
- 'while-no-input', 'with-case-table', 'with-category-table',
- 'with-coding-priority', 'with-current-buffer', 'with-demoted-errors',
- 'with-eval-after-load', 'with-file-modes', 'with-local-quit',
- 'with-output-to-string', 'with-output-to-temp-buffer',
- 'with-parsed-tramp-file-name', 'with-selected-frame',
- 'with-selected-window', 'with-silent-modifications', 'with-slots',
- 'with-syntax-table', 'with-temp-buffer', 'with-temp-file',
- 'with-temp-message', 'with-timeout', 'with-tramp-connection-property',
- 'with-tramp-file-property', 'with-tramp-progress-reporter',
- 'with-wrapper-hook', 'load-time-value', 'locally', 'macrolet', 'progv',
- 'return-from',
- }
-
- special_forms = {
- 'and', 'catch', 'cond', 'condition-case', 'defconst', 'defvar',
- 'function', 'if', 'interactive', 'let', 'let*', 'or', 'prog1',
- 'prog2', 'progn', 'quote', 'save-current-buffer', 'save-excursion',
- 'save-restriction', 'setq', 'setq-default', 'subr-arity',
- 'unwind-protect', 'while',
- }
-
- builtin_function = {
- '%', '*', '+', '-', '/', '/=', '1+', '1-', '<', '<=', '=', '>', '>=',
- 'Snarf-documentation', 'abort-recursive-edit', 'abs',
- 'accept-process-output', 'access-file', 'accessible-keymaps', 'acos',
- 'active-minibuffer-window', 'add-face-text-property',
- 'add-name-to-file', 'add-text-properties', 'all-completions',
- 'append', 'apply', 'apropos-internal', 'aref', 'arrayp', 'aset',
- 'ash', 'asin', 'assoc', 'assoc-string', 'assq', 'atan', 'atom',
- 'autoload', 'autoload-do-load', 'backtrace', 'backtrace--locals',
- 'backtrace-debug', 'backtrace-eval', 'backtrace-frame',
- 'backward-char', 'backward-prefix-chars', 'barf-if-buffer-read-only',
- 'base64-decode-region', 'base64-decode-string',
- 'base64-encode-region', 'base64-encode-string', 'beginning-of-line',
- 'bidi-find-overridden-directionality', 'bidi-resolved-levels',
- 'bitmap-spec-p', 'bobp', 'bolp', 'bool-vector',
- 'bool-vector-count-consecutive', 'bool-vector-count-population',
- 'bool-vector-exclusive-or', 'bool-vector-intersection',
- 'bool-vector-not', 'bool-vector-p', 'bool-vector-set-difference',
- 'bool-vector-subsetp', 'bool-vector-union', 'boundp',
- 'buffer-base-buffer', 'buffer-chars-modified-tick',
- 'buffer-enable-undo', 'buffer-file-name', 'buffer-has-markers-at',
- 'buffer-list', 'buffer-live-p', 'buffer-local-value',
- 'buffer-local-variables', 'buffer-modified-p', 'buffer-modified-tick',
- 'buffer-name', 'buffer-size', 'buffer-string', 'buffer-substring',
- 'buffer-substring-no-properties', 'buffer-swap-text', 'bufferp',
- 'bury-buffer-internal', 'byte-code', 'byte-code-function-p',
- 'byte-to-position', 'byte-to-string', 'byteorder',
- 'call-interactively', 'call-last-kbd-macro', 'call-process',
- 'call-process-region', 'cancel-kbd-macro-events', 'capitalize',
- 'capitalize-region', 'capitalize-word', 'car', 'car-less-than-car',
- 'car-safe', 'case-table-p', 'category-docstring',
- 'category-set-mnemonics', 'category-table', 'category-table-p',
- 'ccl-execute', 'ccl-execute-on-string', 'ccl-program-p', 'cdr',
- 'cdr-safe', 'ceiling', 'char-after', 'char-before',
- 'char-category-set', 'char-charset', 'char-equal', 'char-or-string-p',
- 'char-resolve-modifiers', 'char-syntax', 'char-table-extra-slot',
- 'char-table-p', 'char-table-parent', 'char-table-range',
- 'char-table-subtype', 'char-to-string', 'char-width', 'characterp',
- 'charset-after', 'charset-id-internal', 'charset-plist',
- 'charset-priority-list', 'charsetp', 'check-coding-system',
- 'check-coding-systems-region', 'clear-buffer-auto-save-failure',
- 'clear-charset-maps', 'clear-face-cache', 'clear-font-cache',
- 'clear-image-cache', 'clear-string', 'clear-this-command-keys',
- 'close-font', 'clrhash', 'coding-system-aliases',
- 'coding-system-base', 'coding-system-eol-type', 'coding-system-p',
- 'coding-system-plist', 'coding-system-priority-list',
- 'coding-system-put', 'color-distance', 'color-gray-p',
- 'color-supported-p', 'combine-after-change-execute',
- 'command-error-default-function', 'command-remapping', 'commandp',
- 'compare-buffer-substrings', 'compare-strings',
- 'compare-window-configurations', 'completing-read',
- 'compose-region-internal', 'compose-string-internal',
- 'composition-get-gstring', 'compute-motion', 'concat', 'cons',
- 'consp', 'constrain-to-field', 'continue-process',
- 'controlling-tty-p', 'coordinates-in-window-p', 'copy-alist',
- 'copy-category-table', 'copy-file', 'copy-hash-table', 'copy-keymap',
- 'copy-marker', 'copy-sequence', 'copy-syntax-table', 'copysign',
- 'cos', 'current-active-maps', 'current-bidi-paragraph-direction',
- 'current-buffer', 'current-case-table', 'current-column',
- 'current-global-map', 'current-idle-time', 'current-indentation',
- 'current-input-mode', 'current-local-map', 'current-message',
- 'current-minor-mode-maps', 'current-time', 'current-time-string',
- 'current-time-zone', 'current-window-configuration',
- 'cygwin-convert-file-name-from-windows',
- 'cygwin-convert-file-name-to-windows', 'daemon-initialized',
- 'daemonp', 'dbus--init-bus', 'dbus-get-unique-name',
- 'dbus-message-internal', 'debug-timer-check', 'declare-equiv-charset',
- 'decode-big5-char', 'decode-char', 'decode-coding-region',
- 'decode-coding-string', 'decode-sjis-char', 'decode-time',
- 'default-boundp', 'default-file-modes', 'default-printer-name',
- 'default-toplevel-value', 'default-value', 'define-category',
- 'define-charset-alias', 'define-charset-internal',
- 'define-coding-system-alias', 'define-coding-system-internal',
- 'define-fringe-bitmap', 'define-hash-table-test', 'define-key',
- 'define-prefix-command', 'delete',
- 'delete-all-overlays', 'delete-and-extract-region', 'delete-char',
- 'delete-directory-internal', 'delete-field', 'delete-file',
- 'delete-frame', 'delete-other-windows-internal', 'delete-overlay',
- 'delete-process', 'delete-region', 'delete-terminal',
- 'delete-window-internal', 'delq', 'describe-buffer-bindings',
- 'describe-vector', 'destroy-fringe-bitmap', 'detect-coding-region',
- 'detect-coding-string', 'ding', 'directory-file-name',
- 'directory-files', 'directory-files-and-attributes', 'discard-input',
- 'display-supports-face-attributes-p', 'do-auto-save', 'documentation',
- 'documentation-property', 'downcase', 'downcase-region',
- 'downcase-word', 'draw-string', 'dump-colors', 'dump-emacs',
- 'dump-face', 'dump-frame-glyph-matrix', 'dump-glyph-matrix',
- 'dump-glyph-row', 'dump-redisplay-history', 'dump-tool-bar-row',
- 'elt', 'emacs-pid', 'encode-big5-char', 'encode-char',
- 'encode-coding-region', 'encode-coding-string', 'encode-sjis-char',
- 'encode-time', 'end-kbd-macro', 'end-of-line', 'eobp', 'eolp', 'eq',
- 'eql', 'equal', 'equal-including-properties', 'erase-buffer',
- 'error-message-string', 'eval', 'eval-buffer', 'eval-region',
- 'event-convert-list', 'execute-kbd-macro', 'exit-recursive-edit',
- 'exp', 'expand-file-name', 'expt', 'external-debugging-output',
- 'face-attribute-relative-p', 'face-attributes-as-vector', 'face-font',
- 'fboundp', 'fceiling', 'fetch-bytecode', 'ffloor',
- 'field-beginning', 'field-end', 'field-string',
- 'field-string-no-properties', 'file-accessible-directory-p',
- 'file-acl', 'file-attributes', 'file-attributes-lessp',
- 'file-directory-p', 'file-executable-p', 'file-exists-p',
- 'file-locked-p', 'file-modes', 'file-name-absolute-p',
- 'file-name-all-completions', 'file-name-as-directory',
- 'file-name-completion', 'file-name-directory',
- 'file-name-nondirectory', 'file-newer-than-file-p', 'file-readable-p',
- 'file-regular-p', 'file-selinux-context', 'file-symlink-p',
- 'file-system-info', 'file-system-info', 'file-writable-p',
- 'fillarray', 'find-charset-region', 'find-charset-string',
- 'find-coding-systems-region-internal', 'find-composition-internal',
- 'find-file-name-handler', 'find-font', 'find-operation-coding-system',
- 'float', 'float-time', 'floatp', 'floor', 'fmakunbound',
- 'following-char', 'font-at', 'font-drive-otf', 'font-face-attributes',
- 'font-family-list', 'font-get', 'font-get-glyphs',
- 'font-get-system-font', 'font-get-system-normal-font', 'font-info',
- 'font-match-p', 'font-otf-alternates', 'font-put',
- 'font-shape-gstring', 'font-spec', 'font-variation-glyphs',
- 'font-xlfd-name', 'fontp', 'fontset-font', 'fontset-info',
- 'fontset-list', 'fontset-list-all', 'force-mode-line-update',
- 'force-window-update', 'format', 'format-mode-line',
- 'format-network-address', 'format-time-string', 'forward-char',
- 'forward-comment', 'forward-line', 'forward-word',
- 'frame-border-width', 'frame-bottom-divider-width',
- 'frame-can-run-window-configuration-change-hook', 'frame-char-height',
- 'frame-char-width', 'frame-face-alist', 'frame-first-window',
- 'frame-focus', 'frame-font-cache', 'frame-fringe-width', 'frame-list',
- 'frame-live-p', 'frame-or-buffer-changed-p', 'frame-parameter',
- 'frame-parameters', 'frame-pixel-height', 'frame-pixel-width',
- 'frame-pointer-visible-p', 'frame-right-divider-width',
- 'frame-root-window', 'frame-scroll-bar-height',
- 'frame-scroll-bar-width', 'frame-selected-window', 'frame-terminal',
- 'frame-text-cols', 'frame-text-height', 'frame-text-lines',
- 'frame-text-width', 'frame-total-cols', 'frame-total-lines',
- 'frame-visible-p', 'framep', 'frexp', 'fringe-bitmaps-at-pos',
- 'fround', 'fset', 'ftruncate', 'funcall', 'funcall-interactively',
- 'function-equal', 'functionp', 'gap-position', 'gap-size',
- 'garbage-collect', 'gc-status', 'generate-new-buffer-name', 'get',
- 'get-buffer', 'get-buffer-create', 'get-buffer-process',
- 'get-buffer-window', 'get-byte', 'get-char-property',
- 'get-char-property-and-overlay', 'get-file-buffer', 'get-file-char',
- 'get-internal-run-time', 'get-load-suffixes', 'get-pos-property',
- 'get-process', 'get-screen-color', 'get-text-property',
- 'get-unicode-property-internal', 'get-unused-category',
- 'get-unused-iso-final-char', 'getenv-internal', 'gethash',
- 'gfile-add-watch', 'gfile-rm-watch', 'global-key-binding',
- 'gnutls-available-p', 'gnutls-boot', 'gnutls-bye', 'gnutls-deinit',
- 'gnutls-error-fatalp', 'gnutls-error-string', 'gnutls-errorp',
- 'gnutls-get-initstage', 'gnutls-peer-status',
- 'gnutls-peer-status-warning-describe', 'goto-char', 'gpm-mouse-start',
- 'gpm-mouse-stop', 'group-gid', 'group-real-gid',
- 'handle-save-session', 'handle-switch-frame', 'hash-table-count',
- 'hash-table-p', 'hash-table-rehash-size',
- 'hash-table-rehash-threshold', 'hash-table-size', 'hash-table-test',
- 'hash-table-weakness', 'iconify-frame', 'identity', 'image-flush',
- 'image-mask-p', 'image-metadata', 'image-size', 'imagemagick-types',
- 'imagep', 'indent-to', 'indirect-function', 'indirect-variable',
- 'init-image-library', 'inotify-add-watch', 'inotify-rm-watch',
- 'input-pending-p', 'insert', 'insert-and-inherit',
- 'insert-before-markers', 'insert-before-markers-and-inherit',
- 'insert-buffer-substring', 'insert-byte', 'insert-char',
- 'insert-file-contents', 'insert-startup-screen', 'int86',
- 'integer-or-marker-p', 'integerp', 'interactive-form', 'intern',
- 'intern-soft', 'internal--track-mouse', 'internal-char-font',
- 'internal-complete-buffer', 'internal-copy-lisp-face',
- 'internal-default-process-filter',
- 'internal-default-process-sentinel', 'internal-describe-syntax-value',
- 'internal-event-symbol-parse-modifiers',
- 'internal-face-x-get-resource', 'internal-get-lisp-face-attribute',
- 'internal-lisp-face-attribute-values', 'internal-lisp-face-empty-p',
- 'internal-lisp-face-equal-p', 'internal-lisp-face-p',
- 'internal-make-lisp-face', 'internal-make-var-non-special',
- 'internal-merge-in-global-face',
- 'internal-set-alternative-font-family-alist',
- 'internal-set-alternative-font-registry-alist',
- 'internal-set-font-selection-order',
- 'internal-set-lisp-face-attribute',
- 'internal-set-lisp-face-attribute-from-resource',
- 'internal-show-cursor', 'internal-show-cursor-p', 'interrupt-process',
- 'invisible-p', 'invocation-directory', 'invocation-name', 'isnan',
- 'iso-charset', 'key-binding', 'key-description',
- 'keyboard-coding-system', 'keymap-parent', 'keymap-prompt', 'keymapp',
- 'keywordp', 'kill-all-local-variables', 'kill-buffer', 'kill-emacs',
- 'kill-local-variable', 'kill-process', 'last-nonminibuffer-frame',
- 'lax-plist-get', 'lax-plist-put', 'ldexp', 'length',
- 'libxml-parse-html-region', 'libxml-parse-xml-region',
- 'line-beginning-position', 'line-end-position', 'line-pixel-height',
- 'list', 'list-fonts', 'list-system-processes', 'listp', 'load',
- 'load-average', 'local-key-binding', 'local-variable-if-set-p',
- 'local-variable-p', 'locale-info', 'locate-file-internal',
- 'lock-buffer', 'log', 'logand', 'logb', 'logior', 'lognot', 'logxor',
- 'looking-at', 'lookup-image', 'lookup-image-map', 'lookup-key',
- 'lower-frame', 'lsh', 'macroexpand', 'make-bool-vector',
- 'make-byte-code', 'make-category-set', 'make-category-table',
- 'make-char', 'make-char-table', 'make-directory-internal',
- 'make-frame-invisible', 'make-frame-visible', 'make-hash-table',
- 'make-indirect-buffer', 'make-keymap', 'make-list',
- 'make-local-variable', 'make-marker', 'make-network-process',
- 'make-overlay', 'make-serial-process', 'make-sparse-keymap',
- 'make-string', 'make-symbol', 'make-symbolic-link', 'make-temp-name',
- 'make-terminal-frame', 'make-variable-buffer-local',
- 'make-variable-frame-local', 'make-vector', 'makunbound',
- 'map-char-table', 'map-charset-chars', 'map-keymap',
- 'map-keymap-internal', 'mapatoms', 'mapc', 'mapcar', 'mapconcat',
- 'maphash', 'mark-marker', 'marker-buffer', 'marker-insertion-type',
- 'marker-position', 'markerp', 'match-beginning', 'match-data',
- 'match-end', 'matching-paren', 'max', 'max-char', 'md5', 'member',
- 'memory-info', 'memory-limit', 'memory-use-counts', 'memq', 'memql',
- 'menu-bar-menu-at-x-y', 'menu-or-popup-active-p',
- 'menu-or-popup-active-p', 'merge-face-attribute', 'message',
- 'message-box', 'message-or-box', 'min',
- 'minibuffer-completion-contents', 'minibuffer-contents',
- 'minibuffer-contents-no-properties', 'minibuffer-depth',
- 'minibuffer-prompt', 'minibuffer-prompt-end',
- 'minibuffer-selected-window', 'minibuffer-window', 'minibufferp',
- 'minor-mode-key-binding', 'mod', 'modify-category-entry',
- 'modify-frame-parameters', 'modify-syntax-entry',
- 'mouse-pixel-position', 'mouse-position', 'move-overlay',
- 'move-point-visually', 'move-to-column', 'move-to-window-line',
- 'msdos-downcase-filename', 'msdos-long-file-names', 'msdos-memget',
- 'msdos-memput', 'msdos-mouse-disable', 'msdos-mouse-enable',
- 'msdos-mouse-init', 'msdos-mouse-p', 'msdos-remember-default-colors',
- 'msdos-set-keyboard', 'msdos-set-mouse-buttons',
- 'multibyte-char-to-unibyte', 'multibyte-string-p', 'narrow-to-region',
- 'natnump', 'nconc', 'network-interface-info',
- 'network-interface-list', 'new-fontset', 'newline-cache-check',
- 'next-char-property-change', 'next-frame', 'next-overlay-change',
- 'next-property-change', 'next-read-file-uses-dialog-p',
- 'next-single-char-property-change', 'next-single-property-change',
- 'next-window', 'nlistp', 'nreverse', 'nth', 'nthcdr', 'null',
- 'number-or-marker-p', 'number-to-string', 'numberp',
- 'open-dribble-file', 'open-font', 'open-termscript',
- 'optimize-char-table', 'other-buffer', 'other-window-for-scrolling',
- 'overlay-buffer', 'overlay-end', 'overlay-get', 'overlay-lists',
- 'overlay-properties', 'overlay-put', 'overlay-recenter',
- 'overlay-start', 'overlayp', 'overlays-at', 'overlays-in',
- 'parse-partial-sexp', 'play-sound-internal', 'plist-get',
- 'plist-member', 'plist-put', 'point', 'point-marker', 'point-max',
- 'point-max-marker', 'point-min', 'point-min-marker',
- 'pos-visible-in-window-p', 'position-bytes', 'posix-looking-at',
- 'posix-search-backward', 'posix-search-forward', 'posix-string-match',
- 'posn-at-point', 'posn-at-x-y', 'preceding-char',
- 'prefix-numeric-value', 'previous-char-property-change',
- 'previous-frame', 'previous-overlay-change',
- 'previous-property-change', 'previous-single-char-property-change',
- 'previous-single-property-change', 'previous-window', 'prin1',
- 'prin1-to-string', 'princ', 'print', 'process-attributes',
- 'process-buffer', 'process-coding-system', 'process-command',
- 'process-connection', 'process-contact', 'process-datagram-address',
- 'process-exit-status', 'process-filter', 'process-filter-multibyte-p',
- 'process-id', 'process-inherit-coding-system-flag', 'process-list',
- 'process-mark', 'process-name', 'process-plist',
- 'process-query-on-exit-flag', 'process-running-child-p',
- 'process-send-eof', 'process-send-region', 'process-send-string',
- 'process-sentinel', 'process-status', 'process-tty-name',
- 'process-type', 'processp', 'profiler-cpu-log',
- 'profiler-cpu-running-p', 'profiler-cpu-start', 'profiler-cpu-stop',
- 'profiler-memory-log', 'profiler-memory-running-p',
- 'profiler-memory-start', 'profiler-memory-stop', 'propertize',
- 'purecopy', 'put', 'put-text-property',
- 'put-unicode-property-internal', 'puthash', 'query-font',
- 'query-fontset', 'quit-process', 'raise-frame', 'random', 'rassoc',
- 'rassq', 're-search-backward', 're-search-forward', 'read',
- 'read-buffer', 'read-char', 'read-char-exclusive',
- 'read-coding-system', 'read-command', 'read-event',
- 'read-from-minibuffer', 'read-from-string', 'read-function',
- 'read-key-sequence', 'read-key-sequence-vector',
- 'read-no-blanks-input', 'read-non-nil-coding-system', 'read-string',
- 'read-variable', 'recent-auto-save-p', 'recent-doskeys',
- 'recent-keys', 'recenter', 'recursion-depth', 'recursive-edit',
- 'redirect-debugging-output', 'redirect-frame-focus', 'redisplay',
- 'redraw-display', 'redraw-frame', 'regexp-quote', 'region-beginning',
- 'region-end', 'register-ccl-program', 'register-code-conversion-map',
- 'remhash', 'remove-list-of-text-properties', 'remove-text-properties',
- 'rename-buffer', 'rename-file', 'replace-match',
- 'reset-this-command-lengths', 'resize-mini-window-internal',
- 'restore-buffer-modified-p', 'resume-tty', 'reverse', 'round',
- 'run-hook-with-args', 'run-hook-with-args-until-failure',
- 'run-hook-with-args-until-success', 'run-hook-wrapped', 'run-hooks',
- 'run-window-configuration-change-hook', 'run-window-scroll-functions',
- 'safe-length', 'scan-lists', 'scan-sexps', 'scroll-down',
- 'scroll-left', 'scroll-other-window', 'scroll-right', 'scroll-up',
- 'search-backward', 'search-forward', 'secure-hash', 'select-frame',
- 'select-window', 'selected-frame', 'selected-window',
- 'self-insert-command', 'send-string-to-terminal', 'sequencep',
- 'serial-process-configure', 'set', 'set-buffer',
- 'set-buffer-auto-saved', 'set-buffer-major-mode',
- 'set-buffer-modified-p', 'set-buffer-multibyte', 'set-case-table',
- 'set-category-table', 'set-char-table-extra-slot',
- 'set-char-table-parent', 'set-char-table-range', 'set-charset-plist',
- 'set-charset-priority', 'set-coding-system-priority',
- 'set-cursor-size', 'set-default', 'set-default-file-modes',
- 'set-default-toplevel-value', 'set-file-acl', 'set-file-modes',
- 'set-file-selinux-context', 'set-file-times', 'set-fontset-font',
- 'set-frame-height', 'set-frame-position', 'set-frame-selected-window',
- 'set-frame-size', 'set-frame-width', 'set-fringe-bitmap-face',
- 'set-input-interrupt-mode', 'set-input-meta-mode', 'set-input-mode',
- 'set-keyboard-coding-system-internal', 'set-keymap-parent',
- 'set-marker', 'set-marker-insertion-type', 'set-match-data',
- 'set-message-beep', 'set-minibuffer-window',
- 'set-mouse-pixel-position', 'set-mouse-position',
- 'set-network-process-option', 'set-output-flow-control',
- 'set-process-buffer', 'set-process-coding-system',
- 'set-process-datagram-address', 'set-process-filter',
- 'set-process-filter-multibyte',
- 'set-process-inherit-coding-system-flag', 'set-process-plist',
- 'set-process-query-on-exit-flag', 'set-process-sentinel',
- 'set-process-window-size', 'set-quit-char',
- 'set-safe-terminal-coding-system-internal', 'set-screen-color',
- 'set-standard-case-table', 'set-syntax-table',
- 'set-terminal-coding-system-internal', 'set-terminal-local-value',
- 'set-terminal-parameter', 'set-text-properties', 'set-time-zone-rule',
- 'set-visited-file-modtime', 'set-window-buffer',
- 'set-window-combination-limit', 'set-window-configuration',
- 'set-window-dedicated-p', 'set-window-display-table',
- 'set-window-fringes', 'set-window-hscroll', 'set-window-margins',
- 'set-window-new-normal', 'set-window-new-pixel',
- 'set-window-new-total', 'set-window-next-buffers',
- 'set-window-parameter', 'set-window-point', 'set-window-prev-buffers',
- 'set-window-redisplay-end-trigger', 'set-window-scroll-bars',
- 'set-window-start', 'set-window-vscroll', 'setcar', 'setcdr',
- 'setplist', 'show-face-resources', 'signal', 'signal-process', 'sin',
- 'single-key-description', 'skip-chars-backward', 'skip-chars-forward',
- 'skip-syntax-backward', 'skip-syntax-forward', 'sleep-for', 'sort',
- 'sort-charsets', 'special-variable-p', 'split-char',
- 'split-window-internal', 'sqrt', 'standard-case-table',
- 'standard-category-table', 'standard-syntax-table', 'start-kbd-macro',
- 'start-process', 'stop-process', 'store-kbd-macro-event', 'string',
- 'string=', 'string<', 'string>', 'string-as-multibyte',
- 'string-as-unibyte', 'string-bytes', 'string-collate-equalp',
- 'string-collate-lessp', 'string-equal', 'string-greaterp',
- 'string-lessp', 'string-make-multibyte', 'string-make-unibyte',
- 'string-match', 'string-to-char', 'string-to-multibyte',
- 'string-to-number', 'string-to-syntax', 'string-to-unibyte',
- 'string-width', 'stringp', 'subr-name', 'subrp',
- 'subst-char-in-region', 'substitute-command-keys',
- 'substitute-in-file-name', 'substring', 'substring-no-properties',
- 'suspend-emacs', 'suspend-tty', 'suspicious-object', 'sxhash',
- 'symbol-function', 'symbol-name', 'symbol-plist', 'symbol-value',
- 'symbolp', 'syntax-table', 'syntax-table-p', 'system-groups',
- 'system-move-file-to-trash', 'system-name', 'system-users', 'tan',
- 'terminal-coding-system', 'terminal-list', 'terminal-live-p',
- 'terminal-local-value', 'terminal-name', 'terminal-parameter',
- 'terminal-parameters', 'terpri', 'test-completion',
- 'text-char-description', 'text-properties-at', 'text-property-any',
- 'text-property-not-all', 'this-command-keys',
- 'this-command-keys-vector', 'this-single-command-keys',
- 'this-single-command-raw-keys', 'time-add', 'time-less-p',
- 'time-subtract', 'tool-bar-get-system-style', 'tool-bar-height',
- 'tool-bar-pixel-width', 'top-level', 'trace-redisplay',
- 'trace-to-stderr', 'translate-region-internal', 'transpose-regions',
- 'truncate', 'try-completion', 'tty-display-color-cells',
- 'tty-display-color-p', 'tty-no-underline',
- 'tty-suppress-bold-inverse-default-colors', 'tty-top-frame',
- 'tty-type', 'type-of', 'undo-boundary', 'unencodable-char-position',
- 'unhandled-file-name-directory', 'unibyte-char-to-multibyte',
- 'unibyte-string', 'unicode-property-table-internal', 'unify-charset',
- 'unintern', 'unix-sync', 'unlock-buffer', 'upcase', 'upcase-initials',
- 'upcase-initials-region', 'upcase-region', 'upcase-word',
- 'use-global-map', 'use-local-map', 'user-full-name',
- 'user-login-name', 'user-real-login-name', 'user-real-uid',
- 'user-uid', 'variable-binding-locus', 'vconcat', 'vector',
- 'vector-or-char-table-p', 'vectorp', 'verify-visited-file-modtime',
- 'vertical-motion', 'visible-frame-list', 'visited-file-modtime',
- 'w16-get-clipboard-data', 'w16-selection-exists-p',
- 'w16-set-clipboard-data', 'w32-battery-status',
- 'w32-default-color-map', 'w32-define-rgb-color',
- 'w32-display-monitor-attributes-list', 'w32-frame-menu-bar-size',
- 'w32-frame-rect', 'w32-get-clipboard-data',
- 'w32-get-codepage-charset', 'w32-get-console-codepage',
- 'w32-get-console-output-codepage', 'w32-get-current-locale-id',
- 'w32-get-default-locale-id', 'w32-get-keyboard-layout',
- 'w32-get-locale-info', 'w32-get-valid-codepages',
- 'w32-get-valid-keyboard-layouts', 'w32-get-valid-locale-ids',
- 'w32-has-winsock', 'w32-long-file-name', 'w32-reconstruct-hot-key',
- 'w32-register-hot-key', 'w32-registered-hot-keys',
- 'w32-selection-exists-p', 'w32-send-sys-command',
- 'w32-set-clipboard-data', 'w32-set-console-codepage',
- 'w32-set-console-output-codepage', 'w32-set-current-locale',
- 'w32-set-keyboard-layout', 'w32-set-process-priority',
- 'w32-shell-execute', 'w32-short-file-name', 'w32-toggle-lock-key',
- 'w32-unload-winsock', 'w32-unregister-hot-key', 'w32-window-exists-p',
- 'w32notify-add-watch', 'w32notify-rm-watch',
- 'waiting-for-user-input-p', 'where-is-internal', 'widen',
- 'widget-apply', 'widget-get', 'widget-put',
- 'window-absolute-pixel-edges', 'window-at', 'window-body-height',
- 'window-body-width', 'window-bottom-divider-width', 'window-buffer',
- 'window-combination-limit', 'window-configuration-frame',
- 'window-configuration-p', 'window-dedicated-p',
- 'window-display-table', 'window-edges', 'window-end', 'window-frame',
- 'window-fringes', 'window-header-line-height', 'window-hscroll',
- 'window-inside-absolute-pixel-edges', 'window-inside-edges',
- 'window-inside-pixel-edges', 'window-left-child',
- 'window-left-column', 'window-line-height', 'window-list',
- 'window-list-1', 'window-live-p', 'window-margins',
- 'window-minibuffer-p', 'window-mode-line-height', 'window-new-normal',
- 'window-new-pixel', 'window-new-total', 'window-next-buffers',
- 'window-next-sibling', 'window-normal-size', 'window-old-point',
- 'window-parameter', 'window-parameters', 'window-parent',
- 'window-pixel-edges', 'window-pixel-height', 'window-pixel-left',
- 'window-pixel-top', 'window-pixel-width', 'window-point',
- 'window-prev-buffers', 'window-prev-sibling',
- 'window-redisplay-end-trigger', 'window-resize-apply',
- 'window-resize-apply-total', 'window-right-divider-width',
- 'window-scroll-bar-height', 'window-scroll-bar-width',
- 'window-scroll-bars', 'window-start', 'window-system',
- 'window-text-height', 'window-text-pixel-size', 'window-text-width',
- 'window-top-child', 'window-top-line', 'window-total-height',
- 'window-total-width', 'window-use-time', 'window-valid-p',
- 'window-vscroll', 'windowp', 'write-char', 'write-region',
- 'x-backspace-delete-keys-p', 'x-change-window-property',
- 'x-change-window-property', 'x-close-connection',
- 'x-close-connection', 'x-create-frame', 'x-create-frame',
- 'x-delete-window-property', 'x-delete-window-property',
- 'x-disown-selection-internal', 'x-display-backing-store',
- 'x-display-backing-store', 'x-display-color-cells',
- 'x-display-color-cells', 'x-display-grayscale-p',
- 'x-display-grayscale-p', 'x-display-list', 'x-display-list',
- 'x-display-mm-height', 'x-display-mm-height', 'x-display-mm-width',
- 'x-display-mm-width', 'x-display-monitor-attributes-list',
- 'x-display-pixel-height', 'x-display-pixel-height',
- 'x-display-pixel-width', 'x-display-pixel-width', 'x-display-planes',
- 'x-display-planes', 'x-display-save-under', 'x-display-save-under',
- 'x-display-screens', 'x-display-screens', 'x-display-visual-class',
- 'x-display-visual-class', 'x-family-fonts', 'x-file-dialog',
- 'x-file-dialog', 'x-file-dialog', 'x-focus-frame', 'x-frame-geometry',
- 'x-frame-geometry', 'x-get-atom-name', 'x-get-resource',
- 'x-get-selection-internal', 'x-hide-tip', 'x-hide-tip',
- 'x-list-fonts', 'x-load-color-file', 'x-menu-bar-open-internal',
- 'x-menu-bar-open-internal', 'x-open-connection', 'x-open-connection',
- 'x-own-selection-internal', 'x-parse-geometry', 'x-popup-dialog',
- 'x-popup-menu', 'x-register-dnd-atom', 'x-select-font',
- 'x-select-font', 'x-selection-exists-p', 'x-selection-owner-p',
- 'x-send-client-message', 'x-server-max-request-size',
- 'x-server-max-request-size', 'x-server-vendor', 'x-server-vendor',
- 'x-server-version', 'x-server-version', 'x-show-tip', 'x-show-tip',
- 'x-synchronize', 'x-synchronize', 'x-uses-old-gtk-dialog',
- 'x-window-property', 'x-window-property', 'x-wm-set-size-hint',
- 'xw-color-defined-p', 'xw-color-defined-p', 'xw-color-values',
- 'xw-color-values', 'xw-display-color-p', 'xw-display-color-p',
- 'yes-or-no-p', 'zlib-available-p', 'zlib-decompress-region',
- 'forward-point',
- }
-
- builtin_function_highlighted = {
- 'defvaralias', 'provide', 'require',
- 'with-no-warnings', 'define-widget', 'with-electric-help',
- 'throw', 'defalias', 'featurep'
- }
-
- lambda_list_keywords = {
- '&allow-other-keys', '&aux', '&body', '&environment', '&key', '&optional',
- '&rest', '&whole',
- }
-
- error_keywords = {
- 'cl-assert', 'cl-check-type', 'error', 'signal',
- 'user-error', 'warn',
- }
-
- def get_tokens_unprocessed(self, text):
- stack = ['root']
- for index, token, value in RegexLexer.get_tokens_unprocessed(self, text, stack):
- if token is Name.Variable:
- if value in EmacsLispLexer.builtin_function:
- yield index, Name.Function, value
- continue
- if value in EmacsLispLexer.special_forms:
- yield index, Keyword, value
- continue
- if value in EmacsLispLexer.error_keywords:
- yield index, Name.Exception, value
- continue
- if value in EmacsLispLexer.builtin_function_highlighted:
- yield index, Name.Builtin, value
- continue
- if value in EmacsLispLexer.macros:
- yield index, Name.Builtin, value
- continue
- if value in EmacsLispLexer.lambda_list_keywords:
- yield index, Keyword.Pseudo, value
- continue
- yield index, token, value
-
- tokens = {
- 'root': [
- default('body'),
- ],
- 'body': [
- # whitespace
- (r'\s+', Whitespace),
-
- # single-line comment
- (r';.*$', Comment.Single),
-
- # strings and characters
- (r'"', String, 'string'),
- (r'\?([^\\]|\\.)', String.Char),
- # quoting
- (r":" + symbol, Name.Builtin),
- (r"::" + symbol, String.Symbol),
- (r"'" + symbol, String.Symbol),
- (r"'", Operator),
- (r"`", Operator),
-
- # decimal numbers
- (r'[-+]?\d+\.?' + terminated, Number.Integer),
- (r'[-+]?\d+/\d+' + terminated, Number),
- (r'[-+]?(\d*\.\d+([defls][-+]?\d+)?|\d+(\.\d*)?[defls][-+]?\d+)' +
- terminated, Number.Float),
-
- # vectors
- (r'\[|\]', Punctuation),
-
- # uninterned symbol
- (r'#:' + symbol, String.Symbol),
-
- # read syntax for char tables
- (r'#\^\^?', Operator),
-
- # function shorthand
- (r'#\'', Name.Function),
-
- # binary rational
- (r'#[bB][+-]?[01]+(/[01]+)?', Number.Bin),
-
- # octal rational
- (r'#[oO][+-]?[0-7]+(/[0-7]+)?', Number.Oct),
-
- # hex rational
- (r'#[xX][+-]?[0-9a-fA-F]+(/[0-9a-fA-F]+)?', Number.Hex),
-
- # radix rational
- (r'#\d+r[+-]?[0-9a-zA-Z]+(/[0-9a-zA-Z]+)?', Number),
-
- # reference
- (r'#\d+=', Operator),
- (r'#\d+#', Operator),
-
- # special operators that should have been parsed already
- (r'(,@|,|\.|:)', Operator),
-
- # special constants
- (r'(t|nil)' + terminated, Name.Constant),
-
- # functions and variables
- (r'\*' + symbol + r'\*', Name.Variable.Global),
- (symbol, Name.Variable),
-
- # parentheses
- (r'#\(', Operator, 'body'),
- (r'\(', Punctuation, 'body'),
- (r'\)', Punctuation, '#pop'),
- ],
- 'string': [
- (r'[^"\\`]+', String),
- (r'`%s\'' % symbol, String.Symbol),
- (r'`', String),
- (r'\\.', String),
- (r'\\\n', String),
- (r'"', String, '#pop'),
- ],
- }
-
-
-class ShenLexer(RegexLexer):
- """
- Lexer for Shen source code.
-
- .. versionadded:: 2.1
- """
- name = 'Shen'
- url = 'http://shenlanguage.org/'
- aliases = ['shen']
- filenames = ['*.shen']
- mimetypes = ['text/x-shen', 'application/x-shen']
-
- DECLARATIONS = (
- 'datatype', 'define', 'defmacro', 'defprolog', 'defcc',
- 'synonyms', 'declare', 'package', 'type', 'function',
- )
-
- SPECIAL_FORMS = (
- 'lambda', 'get', 'let', 'if', 'cases', 'cond', 'put', 'time', 'freeze',
- 'value', 'load', '$', 'protect', 'or', 'and', 'not', 'do', 'output',
- 'prolog?', 'trap-error', 'error', 'make-string', '/.', 'set', '@p',
- '@s', '@v',
- )
-
- BUILTINS = (
- '==', '=', '*', '+', '-', '/', '<', '>', '>=', '<=', '<-address',
- '<-vector', 'abort', 'absvector', 'absvector?', 'address->', 'adjoin',
- 'append', 'arity', 'assoc', 'bind', 'boolean?', 'bound?', 'call', 'cd',
- 'close', 'cn', 'compile', 'concat', 'cons', 'cons?', 'cut', 'destroy',
- 'difference', 'element?', 'empty?', 'enable-type-theory',
- 'error-to-string', 'eval', 'eval-kl', 'exception', 'explode', 'external',
- 'fail', 'fail-if', 'file', 'findall', 'fix', 'fst', 'fwhen', 'gensym',
- 'get-time', 'hash', 'hd', 'hdstr', 'hdv', 'head', 'identical',
- 'implementation', 'in', 'include', 'include-all-but', 'inferences',
- 'input', 'input+', 'integer?', 'intern', 'intersection', 'is', 'kill',
- 'language', 'length', 'limit', 'lineread', 'loaded', 'macro', 'macroexpand',
- 'map', 'mapcan', 'maxinferences', 'mode', 'n->string', 'nl', 'nth', 'null',
- 'number?', 'occurrences', 'occurs-check', 'open', 'os', 'out', 'port',
- 'porters', 'pos', 'pr', 'preclude', 'preclude-all-but', 'print', 'profile',
- 'profile-results', 'ps', 'quit', 'read', 'read+', 'read-byte', 'read-file',
- 'read-file-as-bytelist', 'read-file-as-string', 'read-from-string',
- 'release', 'remove', 'return', 'reverse', 'run', 'save', 'set',
- 'simple-error', 'snd', 'specialise', 'spy', 'step', 'stinput', 'stoutput',
- 'str', 'string->n', 'string->symbol', 'string?', 'subst', 'symbol?',
- 'systemf', 'tail', 'tc', 'tc?', 'thaw', 'tl', 'tlstr', 'tlv', 'track',
- 'tuple?', 'undefmacro', 'unify', 'unify!', 'union', 'unprofile',
- 'unspecialise', 'untrack', 'variable?', 'vector', 'vector->', 'vector?',
- 'verified', 'version', 'warn', 'when', 'write-byte', 'write-to-file',
- 'y-or-n?',
- )
-
- BUILTINS_ANYWHERE = ('where', 'skip', '>>', '_', '!', '<e>', '<!>')
-
- MAPPINGS = {s: Keyword for s in DECLARATIONS}
- MAPPINGS.update((s, Name.Builtin) for s in BUILTINS)
- MAPPINGS.update((s, Keyword) for s in SPECIAL_FORMS)
-
- valid_symbol_chars = r'[\w!$%*+,<=>?/.\'@&#:-]'
- valid_name = '%s+' % valid_symbol_chars
- symbol_name = r'[a-z!$%%*+,<=>?/.\'@&#_-]%s*' % valid_symbol_chars
- variable = r'[A-Z]%s*' % valid_symbol_chars
-
- tokens = {
- 'string': [
- (r'"', String, '#pop'),
- (r'c#\d{1,3};', String.Escape),
- (r'~[ARS%]', String.Interpol),
- (r'(?s).', String),
- ],
-
- 'root': [
- (r'(?s)\\\*.*?\*\\', Comment.Multiline), # \* ... *\
- (r'\\\\.*', Comment.Single), # \\ ...
- (r'\s+', Whitespace),
- (r'_{5,}', Punctuation),
- (r'={5,}', Punctuation),
- (r'(;|:=|\||--?>|<--?)', Punctuation),
- (r'(:-|:|\{|\})', Literal),
- (r'[+-]*\d*\.\d+(e[+-]?\d+)?', Number.Float),
- (r'[+-]*\d+', Number.Integer),
- (r'"', String, 'string'),
- (variable, Name.Variable),
- (r'(true|false|<>|\[\])', Keyword.Pseudo),
- (symbol_name, Literal),
- (r'(\[|\]|\(|\))', Punctuation),
- ],
- }
-
- def get_tokens_unprocessed(self, text):
- tokens = RegexLexer.get_tokens_unprocessed(self, text)
- tokens = self._process_symbols(tokens)
- tokens = self._process_declarations(tokens)
- return tokens
-
- def _relevant(self, token):
- return token not in (Text, Whitespace, Comment.Single, Comment.Multiline)
-
- def _process_declarations(self, tokens):
- opening_paren = False
- for index, token, value in tokens:
- yield index, token, value
- if self._relevant(token):
- if opening_paren and token == Keyword and value in self.DECLARATIONS:
- declaration = value
- yield from self._process_declaration(declaration, tokens)
- opening_paren = value == '(' and token == Punctuation
-
- def _process_symbols(self, tokens):
- opening_paren = False
- for index, token, value in tokens:
- if opening_paren and token in (Literal, Name.Variable):
- token = self.MAPPINGS.get(value, Name.Function)
- elif token == Literal and value in self.BUILTINS_ANYWHERE:
- token = Name.Builtin
- opening_paren = value == '(' and token == Punctuation
- yield index, token, value
-
- def _process_declaration(self, declaration, tokens):
- for index, token, value in tokens:
- if self._relevant(token):
- break
- yield index, token, value
-
- if declaration == 'datatype':
- prev_was_colon = False
- token = Keyword.Type if token == Literal else token
- yield index, token, value
- for index, token, value in tokens:
- if prev_was_colon and token == Literal:
- token = Keyword.Type
- yield index, token, value
- if self._relevant(token):
- prev_was_colon = token == Literal and value == ':'
- elif declaration == 'package':
- token = Name.Namespace if token == Literal else token
- yield index, token, value
- elif declaration == 'define':
- token = Name.Function if token == Literal else token
- yield index, token, value
- for index, token, value in tokens:
- if self._relevant(token):
- break
- yield index, token, value
- if value == '{' and token == Literal:
- yield index, Punctuation, value
- for index, token, value in self._process_signature(tokens):
- yield index, token, value
- else:
- yield index, token, value
- else:
- token = Name.Function if token == Literal else token
- yield index, token, value
-
- return
-
- def _process_signature(self, tokens):
- for index, token, value in tokens:
- if token == Literal and value == '}':
- yield index, Punctuation, value
- return
- elif token in (Literal, Name.Function):
- token = Name.Variable if value.istitle() else Keyword.Type
- yield index, token, value
-
-
-class CPSALexer(RegexLexer):
- """
- A CPSA lexer based on the CPSA language as of version 2.2.12
-
- .. versionadded:: 2.1
- """
- name = 'CPSA'
- aliases = ['cpsa']
- filenames = ['*.cpsa']
- mimetypes = []
-
- # list of known keywords and builtins taken form vim 6.4 scheme.vim
- # syntax file.
- _keywords = (
- 'herald', 'vars', 'defmacro', 'include', 'defprotocol', 'defrole',
- 'defskeleton', 'defstrand', 'deflistener', 'non-orig', 'uniq-orig',
- 'pen-non-orig', 'precedes', 'trace', 'send', 'recv', 'name', 'text',
- 'skey', 'akey', 'data', 'mesg',
- )
- _builtins = (
- 'cat', 'enc', 'hash', 'privk', 'pubk', 'invk', 'ltk', 'gen', 'exp',
- )
-
- # valid names for identifiers
- # well, names can only not consist fully of numbers
- # but this should be good enough for now
- valid_name = r'[\w!$%&*+,/:<=>?@^~|-]+'
-
- tokens = {
- 'root': [
- # the comments - always starting with semicolon
- # and going to the end of the line
- (r';.*$', Comment.Single),
-
- # whitespaces - usually not relevant
- (r'\s+', Whitespace),
-
- # numbers
- (r'-?\d+\.\d+', Number.Float),
- (r'-?\d+', Number.Integer),
- # support for uncommon kinds of numbers -
- # have to figure out what the characters mean
- # (r'(#e|#i|#b|#o|#d|#x)[\d.]+', Number),
-
- # strings, symbols and characters
- (r'"(\\\\|\\[^\\]|[^"\\])*"', String),
- (r"'" + valid_name, String.Symbol),
- (r"#\\([()/'\"._!§$%& ?=+-]|[a-zA-Z0-9]+)", String.Char),
-
- # constants
- (r'(#t|#f)', Name.Constant),
-
- # special operators
- (r"('|#|`|,@|,|\.)", Operator),
-
- # highlight the keywords
- (words(_keywords, suffix=r'\b'), Keyword),
-
- # first variable in a quoted string like
- # '(this is syntactic sugar)
- (r"(?<='\()" + valid_name, Name.Variable),
- (r"(?<=#\()" + valid_name, Name.Variable),
-
- # highlight the builtins
- (words(_builtins, prefix=r'(?<=\()', suffix=r'\b'), Name.Builtin),
-
- # the remaining functions
- (r'(?<=\()' + valid_name, Name.Function),
- # find the remaining variables
- (valid_name, Name.Variable),
-
- # the famous parentheses!
- (r'(\(|\))', Punctuation),
- (r'(\[|\])', Punctuation),
- ],
- }
-
-
-class XtlangLexer(RegexLexer):
- """An xtlang lexer for the Extempore programming environment.
-
- This is a mixture of Scheme and xtlang, really. Keyword lists are
- taken from the Extempore Emacs mode
- (https://github.com/extemporelang/extempore-emacs-mode)
-
- .. versionadded:: 2.2
- """
- name = 'xtlang'
- url = 'http://extempore.moso.com.au'
- aliases = ['extempore']
- filenames = ['*.xtm']
- mimetypes = []
-
- common_keywords = (
- 'lambda', 'define', 'if', 'else', 'cond', 'and',
- 'or', 'let', 'begin', 'set!', 'map', 'for-each',
- )
- scheme_keywords = (
- 'do', 'delay', 'quasiquote', 'unquote', 'unquote-splicing', 'eval',
- 'case', 'let*', 'letrec', 'quote',
- )
- xtlang_bind_keywords = (
- 'bind-func', 'bind-val', 'bind-lib', 'bind-type', 'bind-alias',
- 'bind-poly', 'bind-dylib', 'bind-lib-func', 'bind-lib-val',
- )
- xtlang_keywords = (
- 'letz', 'memzone', 'cast', 'convert', 'dotimes', 'doloop',
- )
- common_functions = (
- '*', '+', '-', '/', '<', '<=', '=', '>', '>=', '%', 'abs', 'acos',
- 'angle', 'append', 'apply', 'asin', 'assoc', 'assq', 'assv',
- 'atan', 'boolean?', 'caaaar', 'caaadr', 'caaar', 'caadar',
- 'caaddr', 'caadr', 'caar', 'cadaar', 'cadadr', 'cadar',
- 'caddar', 'cadddr', 'caddr', 'cadr', 'car', 'cdaaar',
- 'cdaadr', 'cdaar', 'cdadar', 'cdaddr', 'cdadr', 'cdar',
- 'cddaar', 'cddadr', 'cddar', 'cdddar', 'cddddr', 'cdddr',
- 'cddr', 'cdr', 'ceiling', 'cons', 'cos', 'floor', 'length',
- 'list', 'log', 'max', 'member', 'min', 'modulo', 'not',
- 'reverse', 'round', 'sin', 'sqrt', 'substring', 'tan',
- 'println', 'random', 'null?', 'callback', 'now',
- )
- scheme_functions = (
- 'call-with-current-continuation', 'call-with-input-file',
- 'call-with-output-file', 'call-with-values', 'call/cc',
- 'char->integer', 'char-alphabetic?', 'char-ci<=?', 'char-ci<?',
- 'char-ci=?', 'char-ci>=?', 'char-ci>?', 'char-downcase',
- 'char-lower-case?', 'char-numeric?', 'char-ready?',
- 'char-upcase', 'char-upper-case?', 'char-whitespace?',
- 'char<=?', 'char<?', 'char=?', 'char>=?', 'char>?', 'char?',
- 'close-input-port', 'close-output-port', 'complex?',
- 'current-input-port', 'current-output-port', 'denominator',
- 'display', 'dynamic-wind', 'eof-object?', 'eq?', 'equal?',
- 'eqv?', 'even?', 'exact->inexact', 'exact?', 'exp', 'expt',
- 'force', 'gcd', 'imag-part', 'inexact->exact', 'inexact?',
- 'input-port?', 'integer->char', 'integer?',
- 'interaction-environment', 'lcm', 'list->string',
- 'list->vector', 'list-ref', 'list-tail', 'list?', 'load',
- 'magnitude', 'make-polar', 'make-rectangular', 'make-string',
- 'make-vector', 'memq', 'memv', 'negative?', 'newline',
- 'null-environment', 'number->string', 'number?',
- 'numerator', 'odd?', 'open-input-file', 'open-output-file',
- 'output-port?', 'pair?', 'peek-char', 'port?', 'positive?',
- 'procedure?', 'quotient', 'rational?', 'rationalize', 'read',
- 'read-char', 'real-part', 'real?',
- 'remainder', 'scheme-report-environment', 'set-car!', 'set-cdr!',
- 'string', 'string->list', 'string->number', 'string->symbol',
- 'string-append', 'string-ci<=?', 'string-ci<?', 'string-ci=?',
- 'string-ci>=?', 'string-ci>?', 'string-copy', 'string-fill!',
- 'string-length', 'string-ref', 'string-set!', 'string<=?',
- 'string<?', 'string=?', 'string>=?', 'string>?', 'string?',
- 'symbol->string', 'symbol?', 'transcript-off', 'transcript-on',
- 'truncate', 'values', 'vector', 'vector->list', 'vector-fill!',
- 'vector-length', 'vector?',
- 'with-input-from-file', 'with-output-to-file', 'write',
- 'write-char', 'zero?',
- )
- xtlang_functions = (
- 'toString', 'afill!', 'pfill!', 'tfill!', 'tbind', 'vfill!',
- 'array-fill!', 'pointer-fill!', 'tuple-fill!', 'vector-fill!', 'free',
- 'array', 'tuple', 'list', '~', 'cset!', 'cref', '&', 'bor',
- 'ang-names', '<<', '>>', 'nil', 'printf', 'sprintf', 'null', 'now',
- 'pset!', 'pref-ptr', 'vset!', 'vref', 'aset!', 'aref', 'aref-ptr',
- 'tset!', 'tref', 'tref-ptr', 'salloc', 'halloc', 'zalloc', 'alloc',
- 'schedule', 'exp', 'log', 'sin', 'cos', 'tan', 'asin', 'acos', 'atan',
- 'sqrt', 'expt', 'floor', 'ceiling', 'truncate', 'round',
- 'llvm_printf', 'push_zone', 'pop_zone', 'memzone', 'callback',
- 'llvm_sprintf', 'make-array', 'array-set!', 'array-ref',
- 'array-ref-ptr', 'pointer-set!', 'pointer-ref', 'pointer-ref-ptr',
- 'stack-alloc', 'heap-alloc', 'zone-alloc', 'make-tuple', 'tuple-set!',
- 'tuple-ref', 'tuple-ref-ptr', 'closure-set!', 'closure-ref', 'pref',
- 'pdref', 'impc_null', 'bitcast', 'void', 'ifret', 'ret->', 'clrun->',
- 'make-env-zone', 'make-env', '<>', 'dtof', 'ftod', 'i1tof',
- 'i1tod', 'i1toi8', 'i1toi32', 'i1toi64', 'i8tof', 'i8tod',
- 'i8toi1', 'i8toi32', 'i8toi64', 'i32tof', 'i32tod', 'i32toi1',
- 'i32toi8', 'i32toi64', 'i64tof', 'i64tod', 'i64toi1',
- 'i64toi8', 'i64toi32',
- )
-
- # valid names for Scheme identifiers (names cannot consist fully
- # of numbers, but this should be good enough for now)
- valid_scheme_name = r'[\w!$%&*+,/:<=>?@^~|-]+'
-
- # valid characters in xtlang names & types
- valid_xtlang_name = r'[\w.!-]+'
- valid_xtlang_type = r'[]{}[\w<>,*/|!-]+'
-
- tokens = {
- # keep track of when we're exiting the xtlang form
- 'xtlang': [
- (r'\(', Punctuation, '#push'),
- (r'\)', Punctuation, '#pop'),
-
- (r'(?<=bind-func\s)' + valid_xtlang_name, Name.Function),
- (r'(?<=bind-val\s)' + valid_xtlang_name, Name.Function),
- (r'(?<=bind-type\s)' + valid_xtlang_name, Name.Function),
- (r'(?<=bind-alias\s)' + valid_xtlang_name, Name.Function),
- (r'(?<=bind-poly\s)' + valid_xtlang_name, Name.Function),
- (r'(?<=bind-lib\s)' + valid_xtlang_name, Name.Function),
- (r'(?<=bind-dylib\s)' + valid_xtlang_name, Name.Function),
- (r'(?<=bind-lib-func\s)' + valid_xtlang_name, Name.Function),
- (r'(?<=bind-lib-val\s)' + valid_xtlang_name, Name.Function),
-
- # type annotations
- (r':' + valid_xtlang_type, Keyword.Type),
-
- # types
- (r'(<' + valid_xtlang_type + r'>|\|' + valid_xtlang_type + r'\||/' +
- valid_xtlang_type + r'/|' + valid_xtlang_type + r'\*)\**',
- Keyword.Type),
-
- # keywords
- (words(xtlang_keywords, prefix=r'(?<=\()'), Keyword),
-
- # builtins
- (words(xtlang_functions, prefix=r'(?<=\()'), Name.Function),
-
- include('common'),
-
- # variables
- (valid_xtlang_name, Name.Variable),
- ],
- 'scheme': [
- # quoted symbols
- (r"'" + valid_scheme_name, String.Symbol),
-
- # char literals
- (r"#\\([()/'\"._!§$%& ?=+-]|[a-zA-Z0-9]+)", String.Char),
-
- # special operators
- (r"('|#|`|,@|,|\.)", Operator),
-
- # keywords
- (words(scheme_keywords, prefix=r'(?<=\()'), Keyword),
-
- # builtins
- (words(scheme_functions, prefix=r'(?<=\()'), Name.Function),
-
- include('common'),
-
- # variables
- (valid_scheme_name, Name.Variable),
- ],
- # common to both xtlang and Scheme
- 'common': [
- # comments
- (r';.*$', Comment.Single),
-
- # whitespaces - usually not relevant
- (r'\s+', Whitespace),
-
- # numbers
- (r'-?\d+\.\d+', Number.Float),
- (r'-?\d+', Number.Integer),
-
- # binary/oct/hex literals
- (r'(#b|#o|#x)[\d.]+', Number),
-
- # strings
- (r'"(\\\\|\\[^\\]|[^"\\])*"', String),
-
- # true/false constants
- (r'(#t|#f)', Name.Constant),
-
- # keywords
- (words(common_keywords, prefix=r'(?<=\()'), Keyword),
-
- # builtins
- (words(common_functions, prefix=r'(?<=\()'), Name.Function),
-
- # the famous parentheses!
- (r'(\(|\))', Punctuation),
- ],
- 'root': [
- # go into xtlang mode
- (words(xtlang_bind_keywords, prefix=r'(?<=\()', suffix=r'\b'),
- Keyword, 'xtlang'),
-
- include('scheme')
- ],
- }
-
-
-class FennelLexer(RegexLexer):
- """A lexer for the Fennel programming language.
-
- Fennel compiles to Lua, so all the Lua builtins are recognized as well
- as the special forms that are particular to the Fennel compiler.
-
- .. versionadded:: 2.3
- """
- name = 'Fennel'
- url = 'https://fennel-lang.org'
- aliases = ['fennel', 'fnl']
- filenames = ['*.fnl']
-
- # this list is current as of Fennel version 0.10.0.
- special_forms = (
- '#', '%', '*', '+', '-', '->', '->>', '-?>', '-?>>', '.', '..',
- '/', '//', ':', '<', '<=', '=', '>', '>=', '?.', '^', 'accumulate',
- 'and', 'band', 'bnot', 'bor', 'bxor', 'collect', 'comment', 'do', 'doc',
- 'doto', 'each', 'eval-compiler', 'for', 'hashfn', 'icollect', 'if',
- 'import-macros', 'include', 'length', 'let', 'lshift', 'lua',
- 'macrodebug', 'match', 'not', 'not=', 'or', 'partial', 'pick-args',
- 'pick-values', 'quote', 'require-macros', 'rshift', 'set',
- 'set-forcibly!', 'tset', 'values', 'when', 'while', 'with-open', '~='
- )
-
- declarations = (
- 'fn', 'global', 'lambda', 'local', 'macro', 'macros', 'var', 'λ'
- )
-
- builtins = (
- '_G', '_VERSION', 'arg', 'assert', 'bit32', 'collectgarbage',
- 'coroutine', 'debug', 'dofile', 'error', 'getfenv',
- 'getmetatable', 'io', 'ipairs', 'load', 'loadfile', 'loadstring',
- 'math', 'next', 'os', 'package', 'pairs', 'pcall', 'print',
- 'rawequal', 'rawget', 'rawlen', 'rawset', 'require', 'select',
- 'setfenv', 'setmetatable', 'string', 'table', 'tonumber',
- 'tostring', 'type', 'unpack', 'xpcall'
- )
-
- # based on the scheme definition, but disallowing leading digits and
- # commas, and @ is not allowed.
- valid_name = r'[a-zA-Z_!$%&*+/:<=>?^~|-][\w!$%&*+/:<=>?^~|\.-]*'
-
- tokens = {
- 'root': [
- # the only comment form is a semicolon; goes to the end of the line
- (r';.*$', Comment.Single),
-
- (r',+', Text),
- (r'\s+', Whitespace),
- (r'-?\d+\.\d+', Number.Float),
- (r'-?\d+', Number.Integer),
-
- (r'"(\\\\|\\[^\\]|[^"\\])*"', String),
-
- (r'(true|false|nil)', Name.Constant),
-
- # these are technically strings, but it's worth visually
- # distinguishing them because their intent is different
- # from regular strings.
- (r':' + valid_name, String.Symbol),
-
- # special forms are keywords
- (words(special_forms, suffix=' '), Keyword),
- # these are ... even more special!
- (words(declarations, suffix=' '), Keyword.Declaration),
- # lua standard library are builtins
- (words(builtins, suffix=' '), Name.Builtin),
- # special-case the vararg symbol
- (r'\.\.\.', Name.Variable),
- # regular identifiers
- (valid_name, Name.Variable),
-
- # all your normal paired delimiters for your programming enjoyment
- (r'(\(|\))', Punctuation),
- (r'(\[|\])', Punctuation),
- (r'(\{|\})', Punctuation),
-
- # the # symbol is shorthand for a lambda function
- (r'#', Punctuation),
- ]
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/macaulay2.py b/venv/lib/python3.11/site-packages/pygments/lexers/macaulay2.py
deleted file mode 100644
index a624890..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/macaulay2.py
+++ /dev/null
@@ -1,1755 +0,0 @@
-"""
- pygments.lexers.macaulay2
- ~~~~~~~~~~~~~~~~~~~~~~~~~
-
- Lexer for Macaulay2.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, words
-from pygments.token import Comment, Keyword, Name, String, Text
-
-__all__ = ['Macaulay2Lexer']
-
-# Auto-generated for Macaulay2-1.22. Do not modify this file manually.
-
-M2KEYWORDS = (
- "and",
- "break",
- "catch",
- "continue",
- "do",
- "elapsedTime",
- "elapsedTiming",
- "else",
- "for",
- "from",
- "global",
- "if",
- "in",
- "list",
- "local",
- "new",
- "not",
- "of",
- "or",
- "return",
- "shield",
- "SPACE",
- "step",
- "symbol",
- "then",
- "threadVariable",
- "throw",
- "time",
- "timing",
- "to",
- "try",
- "when",
- "while",
- "xor"
- )
-
-M2DATATYPES = (
- "Adjacent",
- "AffineVariety",
- "Analyzer",
- "ANCHOR",
- "AngleBarList",
- "Array",
- "AssociativeExpression",
- "Bag",
- "BasicList",
- "BettiTally",
- "BinaryOperation",
- "BLOCKQUOTE",
- "BODY",
- "BOLD",
- "Boolean",
- "BR",
- "BUTTON",
- "CacheFunction",
- "CacheTable",
- "CC",
- "CDATA",
- "ChainComplex",
- "ChainComplexMap",
- "CODE",
- "CoherentSheaf",
- "Command",
- "COMMENT",
- "CompiledFunction",
- "CompiledFunctionBody",
- "CompiledFunctionClosure",
- "ComplexField",
- "Constant",
- "Database",
- "DD",
- "Descent",
- "Describe",
- "Dictionary",
- "DirectSum",
- "DIV",
- "Divide",
- "DL",
- "DocumentTag",
- "DT",
- "Eliminate",
- "EM",
- "EngineRing",
- "Equation",
- "ExampleItem",
- "Expression",
- "File",
- "FilePosition",
- "FractionField",
- "Function",
- "FunctionApplication",
- "FunctionBody",
- "FunctionClosure",
- "GaloisField",
- "GeneralOrderedMonoid",
- "GlobalDictionary",
- "GradedModule",
- "GradedModuleMap",
- "GroebnerBasis",
- "GroebnerBasisOptions",
- "HashTable",
- "HEAD",
- "HEADER1",
- "HEADER2",
- "HEADER3",
- "HEADER4",
- "HEADER5",
- "HEADER6",
- "HeaderType",
- "Holder",
- "HR",
- "HREF",
- "HTML",
- "Hybrid",
- "Hypertext",
- "HypertextContainer",
- "HypertextParagraph",
- "HypertextVoid",
- "Ideal",
- "IMG",
- "ImmutableType",
- "INDENT",
- "IndeterminateNumber",
- "IndexedVariable",
- "IndexedVariableTable",
- "InexactField",
- "InexactFieldFamily",
- "InexactNumber",
- "InfiniteNumber",
- "INPUT",
- "IntermediateMarkUpType",
- "ITALIC",
- "Iterator",
- "KBD",
- "Keyword",
- "LABEL",
- "LATER",
- "LI",
- "LINK",
- "List",
- "LITERAL",
- "LocalDictionary",
- "LowerBound",
- "Manipulator",
- "MapExpression",
- "MarkUpType",
- "Matrix",
- "MatrixExpression",
- "MENU",
- "META",
- "MethodFunction",
- "MethodFunctionBinary",
- "MethodFunctionSingle",
- "MethodFunctionWithOptions",
- "Minus",
- "Module",
- "Monoid",
- "MonoidElement",
- "MonomialIdeal",
- "MultigradedBettiTally",
- "MutableHashTable",
- "MutableList",
- "MutableMatrix",
- "Net",
- "NetFile",
- "Nothing",
- "Number",
- "NumberedVerticalList",
- "OL",
- "OneExpression",
- "Option",
- "OptionTable",
- "OrderedMonoid",
- "Package",
- "PARA",
- "Parenthesize",
- "Parser",
- "Partition",
- "PolynomialRing",
- "Power",
- "PRE",
- "Product",
- "ProductOrder",
- "Program",
- "ProgramRun",
- "ProjectiveHilbertPolynomial",
- "ProjectiveVariety",
- "Pseudocode",
- "QQ",
- "QuotientRing",
- "RealField",
- "Resolution",
- "Ring",
- "RingElement",
- "RingFamily",
- "RingMap",
- "RowExpression",
- "RR",
- "RRi",
- "SAMP",
- "SCRIPT",
- "ScriptedFunctor",
- "SelfInitializingType",
- "Sequence",
- "Set",
- "SheafExpression",
- "SheafOfRings",
- "SMALL",
- "SPAN",
- "SparseMonomialVectorExpression",
- "SparseVectorExpression",
- "String",
- "STRONG",
- "STYLE",
- "SUB",
- "Subscript",
- "SUBSECTION",
- "Sum",
- "SumOfTwists",
- "SUP",
- "Superscript",
- "Symbol",
- "SymbolBody",
- "TABLE",
- "Table",
- "Tally",
- "Task",
- "TD",
- "TensorProduct",
- "TestInput",
- "TEX",
- "TH",
- "Thing",
- "Time",
- "TITLE",
- "TO",
- "TO2",
- "TOH",
- "TR",
- "TT",
- "Type",
- "UL",
- "URL",
- "VAR",
- "Variety",
- "Vector",
- "VectorExpression",
- "VerticalList",
- "VirtualTally",
- "VisibleList",
- "WrapperType",
- "ZeroExpression",
- "ZZ"
- )
-
-M2FUNCTIONS = (
- "about",
- "abs",
- "accumulate",
- "acos",
- "acosh",
- "acot",
- "acoth",
- "addCancelTask",
- "addDependencyTask",
- "addEndFunction",
- "addHook",
- "addStartFunction",
- "addStartTask",
- "adjoint",
- "agm",
- "alarm",
- "all",
- "ambient",
- "analyticSpread",
- "ancestor",
- "ancestors",
- "andP",
- "ann",
- "annihilator",
- "antipode",
- "any",
- "append",
- "applicationDirectory",
- "apply",
- "applyKeys",
- "applyPairs",
- "applyTable",
- "applyValues",
- "apropos",
- "arXiv",
- "ascii",
- "asin",
- "asinh",
- "ass",
- "assert",
- "associatedGradedRing",
- "associatedPrimes",
- "atan",
- "atan2",
- "atanh",
- "atEndOfFile",
- "autoload",
- "baseFilename",
- "baseName",
- "baseRing",
- "basis",
- "beginDocumentation",
- "benchmark",
- "BesselJ",
- "BesselY",
- "Beta",
- "betti",
- "between",
- "binomial",
- "borel",
- "cacheValue",
- "cancelTask",
- "capture",
- "ceiling",
- "centerString",
- "chainComplex",
- "changeBase",
- "char",
- "characters",
- "charAnalyzer",
- "check",
- "checkDegrees",
- "chi",
- "class",
- "clean",
- "clearEcho",
- "code",
- "codim",
- "coefficient",
- "coefficientRing",
- "coefficients",
- "cohomology",
- "coimage",
- "coker",
- "cokernel",
- "collectGarbage",
- "columnAdd",
- "columnate",
- "columnMult",
- "columnPermute",
- "columnRankProfile",
- "columnSwap",
- "combine",
- "commandInterpreter",
- "commonest",
- "commonRing",
- "comodule",
- "complement",
- "complete",
- "components",
- "compose",
- "compositions",
- "compress",
- "concatenate",
- "conductor",
- "cone",
- "conjugate",
- "connectionCount",
- "constParser",
- "content",
- "contract",
- "conwayPolynomial",
- "copy",
- "copyDirectory",
- "copyFile",
- "cos",
- "cosh",
- "cot",
- "cotangentSheaf",
- "coth",
- "cover",
- "coverMap",
- "cpuTime",
- "createTask",
- "csc",
- "csch",
- "currentColumnNumber",
- "currentDirectory",
- "currentPosition",
- "currentRowNumber",
- "currentTime",
- "deadParser",
- "debug",
- "debugError",
- "decompose",
- "deepSplice",
- "default",
- "degree",
- "degreeGroup",
- "degreeLength",
- "degrees",
- "degreesMonoid",
- "degreesRing",
- "delete",
- "demark",
- "denominator",
- "depth",
- "describe",
- "det",
- "determinant",
- "diagonalMatrix",
- "diameter",
- "dictionary",
- "diff",
- "difference",
- "Digamma",
- "dim",
- "directSum",
- "disassemble",
- "discriminant",
- "dismiss",
- "distinguished",
- "divideByVariable",
- "doc",
- "document",
- "drop",
- "dual",
- "eagonNorthcott",
- "echoOff",
- "echoOn",
- "eigenvalues",
- "eigenvectors",
- "eint",
- "elements",
- "eliminate",
- "End",
- "endPackage",
- "entries",
- "erase",
- "erf",
- "erfc",
- "error",
- "euler",
- "eulers",
- "even",
- "EXAMPLE",
- "examples",
- "exec",
- "exp",
- "expectedReesIdeal",
- "expm1",
- "exponents",
- "export",
- "exportFrom",
- "exportMutable",
- "expression",
- "extend",
- "exteriorPower",
- "factor",
- "Fano",
- "fileExecutable",
- "fileExists",
- "fileLength",
- "fileMode",
- "fileReadable",
- "fileTime",
- "fileWritable",
- "fillMatrix",
- "findFiles",
- "findHeft",
- "findProgram",
- "findSynonyms",
- "first",
- "firstkey",
- "fittingIdeal",
- "flagLookup",
- "flatten",
- "flattenRing",
- "flip",
- "floor",
- "fold",
- "forceGB",
- "fork",
- "format",
- "formation",
- "frac",
- "fraction",
- "frames",
- "fromDividedPowers",
- "fromDual",
- "functionBody",
- "futureParser",
- "Gamma",
- "gb",
- "gbRemove",
- "gbSnapshot",
- "gcd",
- "gcdCoefficients",
- "gcdLLL",
- "GCstats",
- "genera",
- "generateAssertions",
- "generator",
- "generators",
- "genericMatrix",
- "genericSkewMatrix",
- "genericSymmetricMatrix",
- "gens",
- "genus",
- "get",
- "getc",
- "getChangeMatrix",
- "getenv",
- "getGlobalSymbol",
- "getNetFile",
- "getNonUnit",
- "getPrimeWithRootOfUnity",
- "getSymbol",
- "getWWW",
- "GF",
- "globalAssign",
- "globalAssignFunction",
- "globalAssignment",
- "globalReleaseFunction",
- "gradedModule",
- "gradedModuleMap",
- "gramm",
- "graphIdeal",
- "graphRing",
- "Grassmannian",
- "groebnerBasis",
- "groupID",
- "hash",
- "hashTable",
- "heft",
- "height",
- "hermite",
- "hilbertFunction",
- "hilbertPolynomial",
- "hilbertSeries",
- "hold",
- "Hom",
- "homogenize",
- "homology",
- "homomorphism",
- "hooks",
- "horizontalJoin",
- "html",
- "httpHeaders",
- "hypertext",
- "icFracP",
- "icFractions",
- "icMap",
- "icPIdeal",
- "ideal",
- "idealizer",
- "identity",
- "image",
- "imaginaryPart",
- "importFrom",
- "independentSets",
- "index",
- "indices",
- "inducedMap",
- "inducesWellDefinedMap",
- "info",
- "input",
- "insert",
- "installAssignmentMethod",
- "installedPackages",
- "installHilbertFunction",
- "installMethod",
- "installMinprimes",
- "installPackage",
- "instance",
- "instances",
- "integralClosure",
- "integrate",
- "intersect",
- "intersectInP",
- "intersection",
- "interval",
- "inverse",
- "inverseErf",
- "inversePermutation",
- "inverseRegularizedBeta",
- "inverseRegularizedGamma",
- "inverseSystem",
- "irreducibleCharacteristicSeries",
- "irreducibleDecomposition",
- "isAffineRing",
- "isANumber",
- "isBorel",
- "isc",
- "isCanceled",
- "isCommutative",
- "isConstant",
- "isDirectory",
- "isDirectSum",
- "isEmpty",
- "isField",
- "isFinite",
- "isFinitePrimeField",
- "isFreeModule",
- "isGlobalSymbol",
- "isHomogeneous",
- "isIdeal",
- "isInfinite",
- "isInjective",
- "isInputFile",
- "isIsomorphic",
- "isIsomorphism",
- "isLinearType",
- "isListener",
- "isLLL",
- "isMember",
- "isModule",
- "isMonomialIdeal",
- "isMutable",
- "isNormal",
- "isOpen",
- "isOutputFile",
- "isPolynomialRing",
- "isPrimary",
- "isPrime",
- "isPrimitive",
- "isPseudoprime",
- "isQuotientModule",
- "isQuotientOf",
- "isQuotientRing",
- "isReady",
- "isReal",
- "isReduction",
- "isRegularFile",
- "isRing",
- "isSkewCommutative",
- "isSorted",
- "isSquareFree",
- "isStandardGradedPolynomialRing",
- "isSubmodule",
- "isSubquotient",
- "isSubset",
- "isSupportedInZeroLocus",
- "isSurjective",
- "isTable",
- "isUnit",
- "isWellDefined",
- "isWeylAlgebra",
- "iterator",
- "jacobian",
- "jacobianDual",
- "join",
- "ker",
- "kernel",
- "kernelLLL",
- "kernelOfLocalization",
- "keys",
- "kill",
- "koszul",
- "last",
- "lcm",
- "leadCoefficient",
- "leadComponent",
- "leadMonomial",
- "leadTerm",
- "left",
- "length",
- "letterParser",
- "lift",
- "liftable",
- "limitFiles",
- "limitProcesses",
- "lines",
- "linkFile",
- "listForm",
- "listSymbols",
- "LLL",
- "lngamma",
- "load",
- "loadPackage",
- "localDictionaries",
- "localize",
- "locate",
- "log",
- "log1p",
- "lookup",
- "lookupCount",
- "LUdecomposition",
- "M2CODE",
- "makeDirectory",
- "makeDocumentTag",
- "makePackageIndex",
- "makeS2",
- "map",
- "markedGB",
- "match",
- "mathML",
- "matrix",
- "max",
- "maxPosition",
- "member",
- "memoize",
- "memoizeClear",
- "memoizeValues",
- "merge",
- "mergePairs",
- "method",
- "methodOptions",
- "methods",
- "midpoint",
- "min",
- "mingens",
- "mingle",
- "minimalBetti",
- "minimalPresentation",
- "minimalPrimes",
- "minimalReduction",
- "minimize",
- "minimizeFilename",
- "minors",
- "minPosition",
- "minPres",
- "minprimes",
- "minus",
- "mkdir",
- "mod",
- "module",
- "modulo",
- "monoid",
- "monomialCurveIdeal",
- "monomialIdeal",
- "monomials",
- "monomialSubideal",
- "moveFile",
- "multidegree",
- "multidoc",
- "multigraded",
- "multiplicity",
- "mutable",
- "mutableIdentity",
- "mutableMatrix",
- "nanosleep",
- "needs",
- "needsPackage",
- "net",
- "netList",
- "newClass",
- "newCoordinateSystem",
- "newNetFile",
- "newPackage",
- "newRing",
- "next",
- "nextkey",
- "nextPrime",
- "NNParser",
- "nonspaceAnalyzer",
- "norm",
- "normalCone",
- "notImplemented",
- "nullhomotopy",
- "nullParser",
- "nullSpace",
- "number",
- "numcols",
- "numColumns",
- "numerator",
- "numeric",
- "numericInterval",
- "numgens",
- "numRows",
- "numrows",
- "odd",
- "oeis",
- "ofClass",
- "on",
- "openDatabase",
- "openDatabaseOut",
- "openFiles",
- "openIn",
- "openInOut",
- "openListener",
- "openOut",
- "openOutAppend",
- "optionalSignParser",
- "options",
- "optP",
- "orP",
- "override",
- "pack",
- "package",
- "packageTemplate",
- "pad",
- "pager",
- "pairs",
- "parent",
- "part",
- "partition",
- "partitions",
- "parts",
- "pdim",
- "peek",
- "permanents",
- "permutations",
- "pfaffians",
- "pivots",
- "plus",
- "poincare",
- "poincareN",
- "polarize",
- "poly",
- "position",
- "positions",
- "power",
- "powermod",
- "precision",
- "preimage",
- "prepend",
- "presentation",
- "pretty",
- "primaryComponent",
- "primaryDecomposition",
- "print",
- "printerr",
- "printString",
- "processID",
- "product",
- "profile",
- "Proj",
- "projectiveHilbertPolynomial",
- "promote",
- "protect",
- "prune",
- "pseudocode",
- "pseudoRemainder",
- "pushForward",
- "QQParser",
- "QRDecomposition",
- "quotient",
- "quotientRemainder",
- "radical",
- "radicalContainment",
- "random",
- "randomKRationalPoint",
- "randomMutableMatrix",
- "rank",
- "read",
- "readDirectory",
- "readlink",
- "readPackage",
- "realPart",
- "realpath",
- "recursionDepth",
- "reducedRowEchelonForm",
- "reduceHilbert",
- "reductionNumber",
- "reesAlgebra",
- "reesAlgebraIdeal",
- "reesIdeal",
- "regex",
- "regexQuote",
- "registerFinalizer",
- "regSeqInIdeal",
- "regularity",
- "regularizedBeta",
- "regularizedGamma",
- "relations",
- "relativizeFilename",
- "remainder",
- "remove",
- "removeDirectory",
- "removeFile",
- "removeLowestDimension",
- "reorganize",
- "replace",
- "res",
- "reshape",
- "resolution",
- "resultant",
- "reverse",
- "right",
- "ring",
- "ringFromFractions",
- "roots",
- "rotate",
- "round",
- "rowAdd",
- "rowMult",
- "rowPermute",
- "rowRankProfile",
- "rowSwap",
- "rsort",
- "run",
- "runHooks",
- "runLengthEncode",
- "runProgram",
- "same",
- "saturate",
- "scan",
- "scanKeys",
- "scanLines",
- "scanPairs",
- "scanValues",
- "schedule",
- "schreyerOrder",
- "Schubert",
- "searchPath",
- "sec",
- "sech",
- "seeParsing",
- "select",
- "selectInSubring",
- "selectVariables",
- "separate",
- "separateRegexp",
- "sequence",
- "serialNumber",
- "set",
- "setEcho",
- "setGroupID",
- "setIOExclusive",
- "setIOSynchronized",
- "setIOUnSynchronized",
- "setRandomSeed",
- "setup",
- "setupEmacs",
- "sheaf",
- "sheafHom",
- "show",
- "showHtml",
- "showTex",
- "simpleDocFrob",
- "sin",
- "singularLocus",
- "sinh",
- "size",
- "size2",
- "sleep",
- "smithNormalForm",
- "solve",
- "someTerms",
- "sort",
- "sortColumns",
- "source",
- "span",
- "Spec",
- "specialFiber",
- "specialFiberIdeal",
- "splice",
- "splitWWW",
- "sqrt",
- "stack",
- "stacksProject",
- "standardForm",
- "standardPairs",
- "stashValue",
- "status",
- "style",
- "sub",
- "sublists",
- "submatrix",
- "submatrixByDegrees",
- "subquotient",
- "subsets",
- "substitute",
- "substring",
- "subtable",
- "sum",
- "super",
- "support",
- "SVD",
- "switch",
- "sylvesterMatrix",
- "symbolBody",
- "symlinkDirectory",
- "symlinkFile",
- "symmetricAlgebra",
- "symmetricAlgebraIdeal",
- "symmetricKernel",
- "symmetricPower",
- "synonym",
- "SYNOPSIS",
- "syz",
- "syzygyScheme",
- "table",
- "take",
- "tally",
- "tan",
- "tangentCone",
- "tangentSheaf",
- "tanh",
- "target",
- "taskResult",
- "temporaryFileName",
- "tensor",
- "tensorAssociativity",
- "terminalParser",
- "terms",
- "TEST",
- "testHunekeQuestion",
- "tests",
- "tex",
- "texMath",
- "times",
- "toAbsolutePath",
- "toCC",
- "toDividedPowers",
- "toDual",
- "toExternalString",
- "toField",
- "toList",
- "toLower",
- "top",
- "topCoefficients",
- "topComponents",
- "toRR",
- "toRRi",
- "toSequence",
- "toString",
- "toUpper",
- "trace",
- "transpose",
- "trim",
- "truncate",
- "truncateOutput",
- "tutorial",
- "ultimate",
- "unbag",
- "uncurry",
- "undocumented",
- "uniform",
- "uninstallAllPackages",
- "uninstallPackage",
- "unique",
- "uniquePermutations",
- "unsequence",
- "unstack",
- "urlEncode",
- "use",
- "userSymbols",
- "utf8",
- "utf8check",
- "utf8substring",
- "validate",
- "value",
- "values",
- "variety",
- "vars",
- "vector",
- "versalEmbedding",
- "wait",
- "wedgeProduct",
- "weightRange",
- "whichGm",
- "width",
- "wikipedia",
- "wrap",
- "youngest",
- "zero",
- "zeta",
- "ZZParser"
- )
-
-M2CONSTANTS = (
- "AbstractToricVarieties",
- "Acknowledgement",
- "AdditionalPaths",
- "AdjointIdeal",
- "AfterEval",
- "AfterNoPrint",
- "AfterPrint",
- "AInfinity",
- "AlgebraicSplines",
- "Algorithm",
- "Alignment",
- "AllCodimensions",
- "allowableThreads",
- "AnalyzeSheafOnP1",
- "applicationDirectorySuffix",
- "argument",
- "Ascending",
- "AssociativeAlgebras",
- "Authors",
- "AuxiliaryFiles",
- "backtrace",
- "Bareiss",
- "BaseFunction",
- "baseRings",
- "BaseRow",
- "BasisElementLimit",
- "Bayer",
- "BeforePrint",
- "BeginningMacaulay2",
- "Benchmark",
- "BernsteinSato",
- "Bertini",
- "BettiCharacters",
- "BGG",
- "BIBasis",
- "Binary",
- "Binomial",
- "BinomialEdgeIdeals",
- "Binomials",
- "BKZ",
- "blockMatrixForm",
- "Body",
- "BoijSoederberg",
- "Book3264Examples",
- "BooleanGB",
- "Boxes",
- "Browse",
- "Bruns",
- "cache",
- "CacheExampleOutput",
- "CallLimit",
- "CannedExample",
- "CatalanConstant",
- "Caveat",
- "CellularResolutions",
- "Center",
- "Certification",
- "ChainComplexExtras",
- "ChainComplexOperations",
- "ChangeMatrix",
- "CharacteristicClasses",
- "CheckDocumentation",
- "Chordal",
- "Classic",
- "clearAll",
- "clearOutput",
- "close",
- "closeIn",
- "closeOut",
- "ClosestFit",
- "Code",
- "CodimensionLimit",
- "CodingTheory",
- "CoefficientRing",
- "Cofactor",
- "CohenEngine",
- "CohenTopLevel",
- "CohomCalg",
- "CoincidentRootLoci",
- "commandLine",
- "compactMatrixForm",
- "Complement",
- "CompleteIntersection",
- "CompleteIntersectionResolutions",
- "Complexes",
- "ConductorElement",
- "Configuration",
- "ConformalBlocks",
- "Consequences",
- "Constants",
- "Contributors",
- "ConvexInterface",
- "ConwayPolynomials",
- "copyright",
- "Core",
- "CorrespondenceScrolls",
- "CotangentSchubert",
- "Cremona",
- "currentFileDirectory",
- "currentFileName",
- "currentLayout",
- "currentPackage",
- "Cyclotomic",
- "Date",
- "dd",
- "DebuggingMode",
- "debuggingMode",
- "debugLevel",
- "DecomposableSparseSystems",
- "Decompose",
- "Default",
- "defaultPrecision",
- "Degree",
- "DegreeGroup",
- "DegreeLift",
- "DegreeLimit",
- "DegreeMap",
- "DegreeOrder",
- "DegreeRank",
- "Degrees",
- "Dense",
- "Density",
- "Depth",
- "Descending",
- "Description",
- "DeterminantalRepresentations",
- "DGAlgebras",
- "dictionaryPath",
- "DiffAlg",
- "Dispatch",
- "DivideConquer",
- "DividedPowers",
- "Divisor",
- "Dmodules",
- "docExample",
- "docTemplate",
- "Down",
- "Dynamic",
- "EagonResolution",
- "EdgeIdeals",
- "edit",
- "EigenSolver",
- "EisenbudHunekeVasconcelos",
- "Elimination",
- "EliminationMatrices",
- "EllipticCurves",
- "EllipticIntegrals",
- "Email",
- "end",
- "endl",
- "Engine",
- "engineDebugLevel",
- "EngineTests",
- "EnumerationCurves",
- "environment",
- "EquivariantGB",
- "errorDepth",
- "EulerConstant",
- "Example",
- "ExampleFiles",
- "ExampleSystems",
- "Exclude",
- "exit",
- "Ext",
- "ExteriorIdeals",
- "ExteriorModules",
- "false",
- "FastMinors",
- "FastNonminimal",
- "FGLM",
- "fileDictionaries",
- "fileExitHooks",
- "FileName",
- "FindOne",
- "FiniteFittingIdeals",
- "First",
- "FirstPackage",
- "FlatMonoid",
- "Flexible",
- "flush",
- "FollowLinks",
- "ForeignFunctions",
- "FormalGroupLaws",
- "Format",
- "FourierMotzkin",
- "FourTiTwo",
- "fpLLL",
- "FrobeniusThresholds",
- "FunctionFieldDesingularization",
- "GBDegrees",
- "gbTrace",
- "GenerateAssertions",
- "Generic",
- "GenericInitialIdeal",
- "GeometricDecomposability",
- "gfanInterface",
- "Givens",
- "GKMVarieties",
- "GLex",
- "Global",
- "GlobalAssignHook",
- "globalAssignmentHooks",
- "GlobalHookStore",
- "GlobalReleaseHook",
- "Gorenstein",
- "GradedLieAlgebras",
- "GraphicalModels",
- "GraphicalModelsMLE",
- "Graphics",
- "Graphs",
- "GRevLex",
- "GroebnerStrata",
- "GroebnerWalk",
- "GroupLex",
- "GroupRevLex",
- "GTZ",
- "Hadamard",
- "handleInterrupts",
- "HardDegreeLimit",
- "Heading",
- "Headline",
- "Heft",
- "Height",
- "help",
- "Hermite",
- "Hermitian",
- "HH",
- "hh",
- "HigherCIOperators",
- "HighestWeights",
- "Hilbert",
- "HodgeIntegrals",
- "HolonomicSystems",
- "homeDirectory",
- "HomePage",
- "Homogeneous",
- "Homogeneous2",
- "HomotopyLieAlgebra",
- "HorizontalSpace",
- "HyperplaneArrangements",
- "id",
- "IgnoreExampleErrors",
- "ii",
- "incomparable",
- "Increment",
- "indeterminate",
- "Index",
- "indexComponents",
- "infinity",
- "InfoDirSection",
- "infoHelp",
- "Inhomogeneous",
- "Inputs",
- "InstallPrefix",
- "IntegralClosure",
- "interpreterDepth",
- "Intersection",
- "InvariantRing",
- "InverseMethod",
- "Inverses",
- "InverseSystems",
- "Invertible",
- "InvolutiveBases",
- "Isomorphism",
- "Item",
- "Iterate",
- "Jacobian",
- "Jets",
- "Join",
- "JSON",
- "Jupyter",
- "K3Carpets",
- "K3Surfaces",
- "Keep",
- "KeepFiles",
- "KeepZeroes",
- "Key",
- "Keywords",
- "Kronecker",
- "KustinMiller",
- "lastMatch",
- "LatticePolytopes",
- "Layout",
- "Left",
- "LengthLimit",
- "Lex",
- "LexIdeals",
- "Licenses",
- "LieTypes",
- "Limit",
- "Linear",
- "LinearAlgebra",
- "LinearTruncations",
- "lineNumber",
- "listLocalSymbols",
- "listUserSymbols",
- "LLLBases",
- "loadDepth",
- "LoadDocumentation",
- "loadedFiles",
- "loadedPackages",
- "Local",
- "LocalRings",
- "LongPolynomial",
- "M0nbar",
- "Macaulay2Doc",
- "MakeDocumentation",
- "MakeHTML",
- "MakeInfo",
- "MakeLinks",
- "MakePDF",
- "MapleInterface",
- "Markov",
- "MatchingFields",
- "Matroids",
- "maxAllowableThreads",
- "maxExponent",
- "MaximalRank",
- "MaxReductionCount",
- "MCMApproximations",
- "MergeTeX",
- "minExponent",
- "MinimalGenerators",
- "MinimalMatrix",
- "minimalPresentationMap",
- "minimalPresentationMapInv",
- "MinimalPrimes",
- "Minimize",
- "MinimumVersion",
- "Miura",
- "MixedMultiplicity",
- "ModuleDeformations",
- "MonodromySolver",
- "Monomial",
- "MonomialAlgebras",
- "MonomialIntegerPrograms",
- "MonomialOrbits",
- "MonomialOrder",
- "Monomials",
- "MonomialSize",
- "MultiGradedRationalMap",
- "MultiplicitySequence",
- "MultiplierIdeals",
- "MultiplierIdealsDim2",
- "MultiprojectiveVarieties",
- "NAGtypes",
- "Name",
- "Nauty",
- "NautyGraphs",
- "NCAlgebra",
- "NCLex",
- "NewFromMethod",
- "newline",
- "NewMethod",
- "NewOfFromMethod",
- "NewOfMethod",
- "nil",
- "Node",
- "NoetherianOperators",
- "NoetherNormalization",
- "NonminimalComplexes",
- "NoPrint",
- "Normaliz",
- "NormalToricVarieties",
- "notify",
- "NTL",
- "null",
- "nullaryMethods",
- "NumericalAlgebraicGeometry",
- "NumericalCertification",
- "NumericalImplicitization",
- "NumericalLinearAlgebra",
- "NumericalSchubertCalculus",
- "NumericSolutions",
- "OldPolyhedra",
- "OldToricVectorBundles",
- "OnlineLookup",
- "OO",
- "oo",
- "ooo",
- "oooo",
- "OpenMath",
- "operatorAttributes",
- "OptionalComponentsPresent",
- "Options",
- "Order",
- "order",
- "OutputDictionary",
- "Outputs",
- "PackageCitations",
- "PackageDictionary",
- "PackageExports",
- "PackageImports",
- "PackageTemplate",
- "PairLimit",
- "PairsRemaining",
- "Parametrization",
- "Parsing",
- "path",
- "PencilsOfQuadrics",
- "Permanents",
- "PHCpack",
- "PhylogeneticTrees",
- "pi",
- "PieriMaps",
- "PlaneCurveSingularities",
- "Points",
- "Polyhedra",
- "Polymake",
- "PolyominoIdeals",
- "Posets",
- "Position",
- "PositivityToricBundles",
- "POSIX",
- "Postfix",
- "Pre",
- "Precision",
- "Prefix",
- "prefixDirectory",
- "prefixPath",
- "PrimaryDecomposition",
- "PrimaryTag",
- "PrimitiveElement",
- "Print",
- "printingAccuracy",
- "printingLeadLimit",
- "printingPrecision",
- "printingSeparator",
- "printingTimeLimit",
- "printingTrailLimit",
- "printWidth",
- "Probability",
- "profileSummary",
- "programPaths",
- "Projective",
- "Prune",
- "PruneComplex",
- "pruningMap",
- "PseudomonomialPrimaryDecomposition",
- "Pullback",
- "PushForward",
- "Python",
- "QthPower",
- "Quasidegrees",
- "QuaternaryQuartics",
- "QuillenSuslin",
- "quit",
- "Quotient",
- "Radical",
- "RadicalCodim1",
- "RaiseError",
- "RandomCanonicalCurves",
- "RandomComplexes",
- "RandomCurves",
- "RandomCurvesOverVerySmallFiniteFields",
- "RandomGenus14Curves",
- "RandomIdeals",
- "RandomMonomialIdeals",
- "RandomObjects",
- "RandomPlaneCurves",
- "RandomPoints",
- "RandomSpaceCurves",
- "Range",
- "RationalMaps",
- "RationalPoints",
- "RationalPoints2",
- "ReactionNetworks",
- "RealFP",
- "RealQP",
- "RealQP1",
- "RealRoots",
- "RealRR",
- "RealXD",
- "recursionLimit",
- "Reduce",
- "ReesAlgebra",
- "References",
- "ReflexivePolytopesDB",
- "Regularity",
- "RelativeCanonicalResolution",
- "Reload",
- "RemakeAllDocumentation",
- "RerunExamples",
- "ResidualIntersections",
- "ResLengthThree",
- "ResolutionsOfStanleyReisnerRings",
- "restart",
- "Result",
- "Resultants",
- "returnCode",
- "Reverse",
- "RevLex",
- "Right",
- "rootPath",
- "rootURI",
- "RunDirectory",
- "RunExamples",
- "RunExternalM2",
- "SagbiGbDetection",
- "Saturation",
- "Schubert2",
- "SchurComplexes",
- "SchurFunctors",
- "SchurRings",
- "scriptCommandLine",
- "SCSCP",
- "SectionRing",
- "SeeAlso",
- "SegreClasses",
- "SemidefiniteProgramming",
- "Seminormalization",
- "SeparateExec",
- "Serialization",
- "sheafExt",
- "ShimoyamaYokoyama",
- "showClassStructure",
- "showStructure",
- "showUserStructure",
- "SimpleDoc",
- "SimplicialComplexes",
- "SimplicialDecomposability",
- "SimplicialPosets",
- "SimplifyFractions",
- "SizeLimit",
- "SkewCommutative",
- "SlackIdeals",
- "SLnEquivariantMatrices",
- "SLPexpressions",
- "Sort",
- "SortStrategy",
- "SourceCode",
- "SourceRing",
- "SpaceCurves",
- "SparseResultants",
- "SpechtModule",
- "SpecialFanoFourfolds",
- "SpectralSequences",
- "SRdeformations",
- "Standard",
- "StartWithOneMinor",
- "StatePolytope",
- "StatGraphs",
- "stderr",
- "stdio",
- "StopBeforeComputation",
- "stopIfError",
- "StopIteration",
- "StopWithMinimalGenerators",
- "Strategy",
- "Strict",
- "StronglyStableIdeals",
- "Style",
- "SubalgebraBases",
- "Subnodes",
- "SubringLimit",
- "subscript",
- "Sugarless",
- "SumsOfSquares",
- "SuperLinearAlgebra",
- "superscript",
- "SVDComplexes",
- "SwitchingFields",
- "SymbolicPowers",
- "SymmetricPolynomials",
- "Synopsis",
- "Syzygies",
- "SyzygyLimit",
- "SyzygyMatrix",
- "SyzygyRows",
- "TangentCone",
- "TateOnProducts",
- "TensorComplexes",
- "Test",
- "testExample",
- "TestIdeals",
- "TeXmacs",
- "Text",
- "ThinSincereQuivers",
- "ThreadedGB",
- "Threshold",
- "Topcom",
- "topLevelMode",
- "Tor",
- "TorAlgebra",
- "Toric",
- "ToricInvariants",
- "ToricTopology",
- "ToricVectorBundles",
- "Torsion",
- "TotalPairs",
- "Tree",
- "TriangularSets",
- "Triangulations",
- "Tries",
- "Trim",
- "Triplets",
- "Tropical",
- "true",
- "Truncate",
- "Truncations",
- "TSpreadIdeals",
- "TypicalValue",
- "typicalValues",
- "Undo",
- "Unique",
- "Units",
- "Unmixed",
- "Up",
- "UpdateOnly",
- "UpperTriangular",
- "Usage",
- "UseCachedExampleOutput",
- "UseHilbertFunction",
- "UserMode",
- "UseSyzygies",
- "Variable",
- "VariableBaseName",
- "Variables",
- "Vasconcelos",
- "VectorFields",
- "VectorGraphics",
- "Verbose",
- "Verbosity",
- "Verify",
- "VersalDeformations",
- "Version",
- "version",
- "VerticalSpace",
- "viewHelp",
- "VirtualResolutions",
- "Visualize",
- "WebApp",
- "Weights",
- "WeylAlgebra",
- "WeylGroups",
- "WhitneyStratifications",
- "Wrap",
- "XML"
- )
-
-class Macaulay2Lexer(RegexLexer):
- """Lexer for Macaulay2, a software system for research in algebraic geometry."""
-
- name = 'Macaulay2'
- url = 'https://macaulay2.com/'
- aliases = ['macaulay2']
- filenames = ['*.m2']
-
- tokens = {
- 'root': [
- (r'--.*$', Comment.Single),
- (r'-\*', Comment.Multiline, 'block comment'),
- (r'"', String, 'quote string'),
- (r'///', String, 'slash string'),
- (words(M2KEYWORDS, prefix=r'\b', suffix=r'\b'), Keyword),
- (words(M2DATATYPES, prefix=r'\b', suffix=r'\b'), Name.Builtin),
- (words(M2FUNCTIONS, prefix=r'\b', suffix=r'\b'), Name.Function),
- (words(M2CONSTANTS, prefix=r'\b', suffix=r'\b'), Name.Constant),
- (r'\s+', Text.Whitespace),
- (r'.', Text)
- ],
- 'block comment' : [
- (r'[^*-]+', Comment.Multiline),
- (r'\*-', Comment.Multiline, '#pop'),
- (r'[*-]', Comment.Multiline)
- ],
- 'quote string' : [
- (r'[^\\"]+', String),
- (r'"', String, '#pop'),
- (r'\\"?', String),
- ],
- 'slash string' : [
- (r'[^/]+', String),
- (r'(//)+(?!/)', String),
- (r'/(//)+(?!/)', String, '#pop'),
- (r'/', String)
- ]
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/make.py b/venv/lib/python3.11/site-packages/pygments/lexers/make.py
deleted file mode 100644
index 0f54ab6..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/make.py
+++ /dev/null
@@ -1,211 +0,0 @@
-"""
- pygments.lexers.make
- ~~~~~~~~~~~~~~~~~~~~
-
- Lexers for Makefiles and similar.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import Lexer, RegexLexer, include, bygroups, \
- do_insertions, using
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Punctuation, Whitespace
-from pygments.lexers.shell import BashLexer
-
-__all__ = ['MakefileLexer', 'BaseMakefileLexer', 'CMakeLexer']
-
-
-class MakefileLexer(Lexer):
- """
- Lexer for BSD and GNU make extensions (lenient enough to handle both in
- the same file even).
-
- *Rewritten in Pygments 0.10.*
- """
-
- name = 'Makefile'
- aliases = ['make', 'makefile', 'mf', 'bsdmake']
- filenames = ['*.mak', '*.mk', 'Makefile', 'makefile', 'Makefile.*', 'GNUmakefile']
- mimetypes = ['text/x-makefile']
-
- r_special = re.compile(
- r'^(?:'
- # BSD Make
- r'\.\s*(include|undef|error|warning|if|else|elif|endif|for|endfor)|'
- # GNU Make
- r'\s*(ifeq|ifneq|ifdef|ifndef|else|endif|-?include|define|endef|:|vpath)|'
- # GNU Automake
- r'\s*(if|else|endif))(?=\s)')
- r_comment = re.compile(r'^\s*@?#')
-
- def get_tokens_unprocessed(self, text):
- ins = []
- lines = text.splitlines(keepends=True)
- done = ''
- lex = BaseMakefileLexer(**self.options)
- backslashflag = False
- for line in lines:
- if self.r_special.match(line) or backslashflag:
- ins.append((len(done), [(0, Comment.Preproc, line)]))
- backslashflag = line.strip().endswith('\\')
- elif self.r_comment.match(line):
- ins.append((len(done), [(0, Comment, line)]))
- else:
- done += line
- yield from do_insertions(ins, lex.get_tokens_unprocessed(done))
-
- def analyse_text(text):
- # Many makefiles have $(BIG_CAPS) style variables
- if re.search(r'\$\([A-Z_]+\)', text):
- return 0.1
-
-
-class BaseMakefileLexer(RegexLexer):
- """
- Lexer for simple Makefiles (no preprocessing).
-
- .. versionadded:: 0.10
- """
-
- name = 'Base Makefile'
- aliases = ['basemake']
- filenames = []
- mimetypes = []
-
- tokens = {
- 'root': [
- # recipes (need to allow spaces because of expandtabs)
- (r'^(?:[\t ]+.*\n|\n)+', using(BashLexer)),
- # special variables
- (r'\$[<@$+%?|*]', Keyword),
- (r'\s+', Whitespace),
- (r'#.*?\n', Comment),
- (r'((?:un)?export)(\s+)(?=[\w${}\t -]+\n)',
- bygroups(Keyword, Whitespace), 'export'),
- (r'(?:un)?export\s+', Keyword),
- # assignment
- (r'([\w${}().-]+)(\s*)([!?:+]?=)([ \t]*)((?:.*\\\n)+|.*\n)',
- bygroups(
- Name.Variable, Whitespace, Operator, Whitespace,
- using(BashLexer))),
- # strings
- (r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
- (r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
- # targets
- (r'([^\n:]+)(:+)([ \t]*)', bygroups(
- Name.Function, Operator, Whitespace),
- 'block-header'),
- # expansions
- (r'\$\(', Keyword, 'expansion'),
- ],
- 'expansion': [
- (r'[^\w$().-]+', Text),
- (r'[\w.-]+', Name.Variable),
- (r'\$', Keyword),
- (r'\(', Keyword, '#push'),
- (r'\)', Keyword, '#pop'),
- ],
- 'export': [
- (r'[\w${}-]+', Name.Variable),
- (r'\n', Text, '#pop'),
- (r'\s+', Whitespace),
- ],
- 'block-header': [
- (r'[,|]', Punctuation),
- (r'#.*?\n', Comment, '#pop'),
- (r'\\\n', Text), # line continuation
- (r'\$\(', Keyword, 'expansion'),
- (r'[a-zA-Z_]+', Name),
- (r'\n', Whitespace, '#pop'),
- (r'.', Text),
- ],
- }
-
-
-class CMakeLexer(RegexLexer):
- """
- Lexer for CMake files.
-
- .. versionadded:: 1.2
- """
- name = 'CMake'
- url = 'https://cmake.org/documentation/'
- aliases = ['cmake']
- filenames = ['*.cmake', 'CMakeLists.txt']
- mimetypes = ['text/x-cmake']
-
- tokens = {
- 'root': [
- # (r'(ADD_CUSTOM_COMMAND|ADD_CUSTOM_TARGET|ADD_DEFINITIONS|'
- # r'ADD_DEPENDENCIES|ADD_EXECUTABLE|ADD_LIBRARY|ADD_SUBDIRECTORY|'
- # r'ADD_TEST|AUX_SOURCE_DIRECTORY|BUILD_COMMAND|BUILD_NAME|'
- # r'CMAKE_MINIMUM_REQUIRED|CONFIGURE_FILE|CREATE_TEST_SOURCELIST|'
- # r'ELSE|ELSEIF|ENABLE_LANGUAGE|ENABLE_TESTING|ENDFOREACH|'
- # r'ENDFUNCTION|ENDIF|ENDMACRO|ENDWHILE|EXEC_PROGRAM|'
- # r'EXECUTE_PROCESS|EXPORT_LIBRARY_DEPENDENCIES|FILE|FIND_FILE|'
- # r'FIND_LIBRARY|FIND_PACKAGE|FIND_PATH|FIND_PROGRAM|FLTK_WRAP_UI|'
- # r'FOREACH|FUNCTION|GET_CMAKE_PROPERTY|GET_DIRECTORY_PROPERTY|'
- # r'GET_FILENAME_COMPONENT|GET_SOURCE_FILE_PROPERTY|'
- # r'GET_TARGET_PROPERTY|GET_TEST_PROPERTY|IF|INCLUDE|'
- # r'INCLUDE_DIRECTORIES|INCLUDE_EXTERNAL_MSPROJECT|'
- # r'INCLUDE_REGULAR_EXPRESSION|INSTALL|INSTALL_FILES|'
- # r'INSTALL_PROGRAMS|INSTALL_TARGETS|LINK_DIRECTORIES|'
- # r'LINK_LIBRARIES|LIST|LOAD_CACHE|LOAD_COMMAND|MACRO|'
- # r'MAKE_DIRECTORY|MARK_AS_ADVANCED|MATH|MESSAGE|OPTION|'
- # r'OUTPUT_REQUIRED_FILES|PROJECT|QT_WRAP_CPP|QT_WRAP_UI|REMOVE|'
- # r'REMOVE_DEFINITIONS|SEPARATE_ARGUMENTS|SET|'
- # r'SET_DIRECTORY_PROPERTIES|SET_SOURCE_FILES_PROPERTIES|'
- # r'SET_TARGET_PROPERTIES|SET_TESTS_PROPERTIES|SITE_NAME|'
- # r'SOURCE_GROUP|STRING|SUBDIR_DEPENDS|SUBDIRS|'
- # r'TARGET_LINK_LIBRARIES|TRY_COMPILE|TRY_RUN|UNSET|'
- # r'USE_MANGLED_MESA|UTILITY_SOURCE|VARIABLE_REQUIRES|'
- # r'VTK_MAKE_INSTANTIATOR|VTK_WRAP_JAVA|VTK_WRAP_PYTHON|'
- # r'VTK_WRAP_TCL|WHILE|WRITE_FILE|'
- # r'COUNTARGS)\b', Name.Builtin, 'args'),
- (r'\b(\w+)([ \t]*)(\()', bygroups(Name.Builtin, Whitespace,
- Punctuation), 'args'),
- include('keywords'),
- include('ws')
- ],
- 'args': [
- (r'\(', Punctuation, '#push'),
- (r'\)', Punctuation, '#pop'),
- (r'(\$\{)(.+?)(\})', bygroups(Operator, Name.Variable, Operator)),
- (r'(\$ENV\{)(.+?)(\})', bygroups(Operator, Name.Variable, Operator)),
- (r'(\$<)(.+?)(>)', bygroups(Operator, Name.Variable, Operator)),
- (r'(?s)".*?"', String.Double),
- (r'\\\S+', String),
- (r'\[(?P<level>=*)\[[\w\W]*?\](?P=level)\]', String.Multiline),
- (r'[^)$"# \t\n]+', String),
- (r'\n', Whitespace), # explicitly legal
- include('keywords'),
- include('ws')
- ],
- 'string': [
-
- ],
- 'keywords': [
- (r'\b(WIN32|UNIX|APPLE|CYGWIN|BORLAND|MINGW|MSVC|MSVC_IDE|MSVC60|'
- r'MSVC70|MSVC71|MSVC80|MSVC90)\b', Keyword),
- ],
- 'ws': [
- (r'[ \t]+', Whitespace),
- (r'#\[(?P<level>=*)\[[\w\W]*?\](?P=level)\]', Comment),
- (r'#.*\n', Comment),
- ]
- }
-
- def analyse_text(text):
- exp = (
- r'^[ \t]*CMAKE_MINIMUM_REQUIRED[ \t]*'
- r'\([ \t]*VERSION[ \t]*\d+(\.\d+)*[ \t]*'
- r'([ \t]FATAL_ERROR)?[ \t]*\)[ \t]*'
- r'(#[^\n]*)?$'
- )
- if re.search(exp, text, flags=re.MULTILINE | re.IGNORECASE):
- return 0.8
- return 0.0
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/markup.py b/venv/lib/python3.11/site-packages/pygments/lexers/markup.py
deleted file mode 100644
index bb4c7ce..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/markup.py
+++ /dev/null
@@ -1,1550 +0,0 @@
-"""
- pygments.lexers.markup
- ~~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for non-HTML markup languages.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexers.html import XmlLexer
-from pygments.lexers.javascript import JavascriptLexer
-from pygments.lexers.css import CssLexer
-from pygments.lexers.lilypond import LilyPondLexer
-from pygments.lexers.data import JsonLexer
-
-from pygments.lexer import RegexLexer, DelegatingLexer, include, bygroups, \
- using, this, do_insertions, default, words
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation, Generic, Other, Whitespace
-from pygments.util import get_bool_opt, ClassNotFound
-
-__all__ = ['BBCodeLexer', 'MoinWikiLexer', 'RstLexer', 'TexLexer', 'GroffLexer',
- 'MozPreprocHashLexer', 'MozPreprocPercentLexer',
- 'MozPreprocXulLexer', 'MozPreprocJavascriptLexer',
- 'MozPreprocCssLexer', 'MarkdownLexer', 'TiddlyWiki5Lexer', 'WikitextLexer']
-
-
-class BBCodeLexer(RegexLexer):
- """
- A lexer that highlights BBCode(-like) syntax.
-
- .. versionadded:: 0.6
- """
-
- name = 'BBCode'
- aliases = ['bbcode']
- mimetypes = ['text/x-bbcode']
-
- tokens = {
- 'root': [
- (r'[^[]+', Text),
- # tag/end tag begin
- (r'\[/?\w+', Keyword, 'tag'),
- # stray bracket
- (r'\[', Text),
- ],
- 'tag': [
- (r'\s+', Text),
- # attribute with value
- (r'(\w+)(=)("?[^\s"\]]+"?)',
- bygroups(Name.Attribute, Operator, String)),
- # tag argument (a la [color=green])
- (r'(=)("?[^\s"\]]+"?)',
- bygroups(Operator, String)),
- # tag end
- (r'\]', Keyword, '#pop'),
- ],
- }
-
-
-class MoinWikiLexer(RegexLexer):
- """
- For MoinMoin (and Trac) Wiki markup.
-
- .. versionadded:: 0.7
- """
-
- name = 'MoinMoin/Trac Wiki markup'
- aliases = ['trac-wiki', 'moin']
- filenames = []
- mimetypes = ['text/x-trac-wiki']
- flags = re.MULTILINE | re.IGNORECASE
-
- tokens = {
- 'root': [
- (r'^#.*$', Comment),
- (r'(!)(\S+)', bygroups(Keyword, Text)), # Ignore-next
- # Titles
- (r'^(=+)([^=]+)(=+)(\s*#.+)?$',
- bygroups(Generic.Heading, using(this), Generic.Heading, String)),
- # Literal code blocks, with optional shebang
- (r'(\{\{\{)(\n#!.+)?', bygroups(Name.Builtin, Name.Namespace), 'codeblock'),
- (r'(\'\'\'?|\|\||`|__|~~|\^|,,|::)', Comment), # Formatting
- # Lists
- (r'^( +)([.*-])( )', bygroups(Text, Name.Builtin, Text)),
- (r'^( +)([a-z]{1,5}\.)( )', bygroups(Text, Name.Builtin, Text)),
- # Other Formatting
- (r'\[\[\w+.*?\]\]', Keyword), # Macro
- (r'(\[[^\s\]]+)(\s+[^\]]+?)?(\])',
- bygroups(Keyword, String, Keyword)), # Link
- (r'^----+$', Keyword), # Horizontal rules
- (r'[^\n\'\[{!_~^,|]+', Text),
- (r'\n', Text),
- (r'.', Text),
- ],
- 'codeblock': [
- (r'\}\}\}', Name.Builtin, '#pop'),
- # these blocks are allowed to be nested in Trac, but not MoinMoin
- (r'\{\{\{', Text, '#push'),
- (r'[^{}]+', Comment.Preproc), # slurp boring text
- (r'.', Comment.Preproc), # allow loose { or }
- ],
- }
-
-
-class RstLexer(RegexLexer):
- """
- For reStructuredText markup.
-
- .. versionadded:: 0.7
-
- Additional options accepted:
-
- `handlecodeblocks`
- Highlight the contents of ``.. sourcecode:: language``,
- ``.. code:: language`` and ``.. code-block:: language``
- directives with a lexer for the given language (default:
- ``True``).
-
- .. versionadded:: 0.8
- """
- name = 'reStructuredText'
- url = 'https://docutils.sourceforge.io/rst.html'
- aliases = ['restructuredtext', 'rst', 'rest']
- filenames = ['*.rst', '*.rest']
- mimetypes = ["text/x-rst", "text/prs.fallenstein.rst"]
- flags = re.MULTILINE
-
- def _handle_sourcecode(self, match):
- from pygments.lexers import get_lexer_by_name
-
- # section header
- yield match.start(1), Punctuation, match.group(1)
- yield match.start(2), Text, match.group(2)
- yield match.start(3), Operator.Word, match.group(3)
- yield match.start(4), Punctuation, match.group(4)
- yield match.start(5), Text, match.group(5)
- yield match.start(6), Keyword, match.group(6)
- yield match.start(7), Text, match.group(7)
-
- # lookup lexer if wanted and existing
- lexer = None
- if self.handlecodeblocks:
- try:
- lexer = get_lexer_by_name(match.group(6).strip())
- except ClassNotFound:
- pass
- indention = match.group(8)
- indention_size = len(indention)
- code = (indention + match.group(9) + match.group(10) + match.group(11))
-
- # no lexer for this language. handle it like it was a code block
- if lexer is None:
- yield match.start(8), String, code
- return
-
- # highlight the lines with the lexer.
- ins = []
- codelines = code.splitlines(True)
- code = ''
- for line in codelines:
- if len(line) > indention_size:
- ins.append((len(code), [(0, Text, line[:indention_size])]))
- code += line[indention_size:]
- else:
- code += line
- yield from do_insertions(ins, lexer.get_tokens_unprocessed(code))
-
- # from docutils.parsers.rst.states
- closers = '\'")]}>\u2019\u201d\xbb!?'
- unicode_delimiters = '\u2010\u2011\u2012\u2013\u2014\u00a0'
- end_string_suffix = (r'((?=$)|(?=[-/:.,; \n\x00%s%s]))'
- % (re.escape(unicode_delimiters),
- re.escape(closers)))
-
- tokens = {
- 'root': [
- # Heading with overline
- (r'^(=+|-+|`+|:+|\.+|\'+|"+|~+|\^+|_+|\*+|\++|#+)([ \t]*\n)'
- r'(.+)(\n)(\1)(\n)',
- bygroups(Generic.Heading, Text, Generic.Heading,
- Text, Generic.Heading, Text)),
- # Plain heading
- (r'^(\S.*)(\n)(={3,}|-{3,}|`{3,}|:{3,}|\.{3,}|\'{3,}|"{3,}|'
- r'~{3,}|\^{3,}|_{3,}|\*{3,}|\+{3,}|#{3,})(\n)',
- bygroups(Generic.Heading, Text, Generic.Heading, Text)),
- # Bulleted lists
- (r'^(\s*)([-*+])( .+\n(?:\1 .+\n)*)',
- bygroups(Text, Number, using(this, state='inline'))),
- # Numbered lists
- (r'^(\s*)([0-9#ivxlcmIVXLCM]+\.)( .+\n(?:\1 .+\n)*)',
- bygroups(Text, Number, using(this, state='inline'))),
- (r'^(\s*)(\(?[0-9#ivxlcmIVXLCM]+\))( .+\n(?:\1 .+\n)*)',
- bygroups(Text, Number, using(this, state='inline'))),
- # Numbered, but keep words at BOL from becoming lists
- (r'^(\s*)([A-Z]+\.)( .+\n(?:\1 .+\n)+)',
- bygroups(Text, Number, using(this, state='inline'))),
- (r'^(\s*)(\(?[A-Za-z]+\))( .+\n(?:\1 .+\n)+)',
- bygroups(Text, Number, using(this, state='inline'))),
- # Line blocks
- (r'^(\s*)(\|)( .+\n(?:\| .+\n)*)',
- bygroups(Text, Operator, using(this, state='inline'))),
- # Sourcecode directives
- (r'^( *\.\.)(\s*)((?:source)?code(?:-block)?)(::)([ \t]*)([^\n]+)'
- r'(\n[ \t]*\n)([ \t]+)(.*)(\n)((?:(?:\8.*)?\n)+)',
- _handle_sourcecode),
- # A directive
- (r'^( *\.\.)(\s*)([\w:-]+?)(::)(?:([ \t]*)(.*))',
- bygroups(Punctuation, Text, Operator.Word, Punctuation, Text,
- using(this, state='inline'))),
- # A reference target
- (r'^( *\.\.)(\s*)(_(?:[^:\\]|\\.)+:)(.*?)$',
- bygroups(Punctuation, Text, Name.Tag, using(this, state='inline'))),
- # A footnote/citation target
- (r'^( *\.\.)(\s*)(\[.+\])(.*?)$',
- bygroups(Punctuation, Text, Name.Tag, using(this, state='inline'))),
- # A substitution def
- (r'^( *\.\.)(\s*)(\|.+\|)(\s*)([\w:-]+?)(::)(?:([ \t]*)(.*))',
- bygroups(Punctuation, Text, Name.Tag, Text, Operator.Word,
- Punctuation, Text, using(this, state='inline'))),
- # Comments
- (r'^ *\.\..*(\n( +.*\n|\n)+)?', Comment.Preproc),
- # Field list marker
- (r'^( *)(:(?:\\\\|\\:|[^:\n])+:(?=\s))([ \t]*)',
- bygroups(Text, Name.Class, Text)),
- # Definition list
- (r'^(\S.*(?<!::)\n)((?:(?: +.*)\n)+)',
- bygroups(using(this, state='inline'), using(this, state='inline'))),
- # Code blocks
- (r'(::)(\n[ \t]*\n)([ \t]+)(.*)(\n)((?:(?:\3.*)?\n)+)',
- bygroups(String.Escape, Text, String, String, Text, String)),
- include('inline'),
- ],
- 'inline': [
- (r'\\.', Text), # escape
- (r'``', String, 'literal'), # code
- (r'(`.+?)(<.+?>)(`__?)', # reference with inline target
- bygroups(String, String.Interpol, String)),
- (r'`.+?`__?', String), # reference
- (r'(`.+?`)(:[a-zA-Z0-9:-]+?:)?',
- bygroups(Name.Variable, Name.Attribute)), # role
- (r'(:[a-zA-Z0-9:-]+?:)(`.+?`)',
- bygroups(Name.Attribute, Name.Variable)), # role (content first)
- (r'\*\*.+?\*\*', Generic.Strong), # Strong emphasis
- (r'\*.+?\*', Generic.Emph), # Emphasis
- (r'\[.*?\]_', String), # Footnote or citation
- (r'<.+?>', Name.Tag), # Hyperlink
- (r'[^\\\n\[*`:]+', Text),
- (r'.', Text),
- ],
- 'literal': [
- (r'[^`]+', String),
- (r'``' + end_string_suffix, String, '#pop'),
- (r'`', String),
- ]
- }
-
- def __init__(self, **options):
- self.handlecodeblocks = get_bool_opt(options, 'handlecodeblocks', True)
- RegexLexer.__init__(self, **options)
-
- def analyse_text(text):
- if text[:2] == '..' and text[2:3] != '.':
- return 0.3
- p1 = text.find("\n")
- p2 = text.find("\n", p1 + 1)
- if (p2 > -1 and # has two lines
- p1 * 2 + 1 == p2 and # they are the same length
- text[p1+1] in '-=' and # the next line both starts and ends with
- text[p1+1] == text[p2-1]): # ...a sufficiently high header
- return 0.5
-
-
-class TexLexer(RegexLexer):
- """
- Lexer for the TeX and LaTeX typesetting languages.
- """
-
- name = 'TeX'
- aliases = ['tex', 'latex']
- filenames = ['*.tex', '*.aux', '*.toc']
- mimetypes = ['text/x-tex', 'text/x-latex']
-
- tokens = {
- 'general': [
- (r'%.*?\n', Comment),
- (r'[{}]', Name.Builtin),
- (r'[&_^]', Name.Builtin),
- ],
- 'root': [
- (r'\\\[', String.Backtick, 'displaymath'),
- (r'\\\(', String, 'inlinemath'),
- (r'\$\$', String.Backtick, 'displaymath'),
- (r'\$', String, 'inlinemath'),
- (r'\\([a-zA-Z]+|.)', Keyword, 'command'),
- (r'\\$', Keyword),
- include('general'),
- (r'[^\\$%&_^{}]+', Text),
- ],
- 'math': [
- (r'\\([a-zA-Z]+|.)', Name.Variable),
- include('general'),
- (r'[0-9]+', Number),
- (r'[-=!+*/()\[\]]', Operator),
- (r'[^=!+*/()\[\]\\$%&_^{}0-9-]+', Name.Builtin),
- ],
- 'inlinemath': [
- (r'\\\)', String, '#pop'),
- (r'\$', String, '#pop'),
- include('math'),
- ],
- 'displaymath': [
- (r'\\\]', String, '#pop'),
- (r'\$\$', String, '#pop'),
- (r'\$', Name.Builtin),
- include('math'),
- ],
- 'command': [
- (r'\[.*?\]', Name.Attribute),
- (r'\*', Keyword),
- default('#pop'),
- ],
- }
-
- def analyse_text(text):
- for start in ("\\documentclass", "\\input", "\\documentstyle",
- "\\relax"):
- if text[:len(start)] == start:
- return True
-
-
-class GroffLexer(RegexLexer):
- """
- Lexer for the (g)roff typesetting language, supporting groff
- extensions. Mainly useful for highlighting manpage sources.
-
- .. versionadded:: 0.6
- """
-
- name = 'Groff'
- aliases = ['groff', 'nroff', 'man']
- filenames = ['*.[1-9]', '*.man', '*.1p', '*.3pm']
- mimetypes = ['application/x-troff', 'text/troff']
-
- tokens = {
- 'root': [
- (r'(\.)(\w+)', bygroups(Text, Keyword), 'request'),
- (r'\.', Punctuation, 'request'),
- # Regular characters, slurp till we find a backslash or newline
- (r'[^\\\n]+', Text, 'textline'),
- default('textline'),
- ],
- 'textline': [
- include('escapes'),
- (r'[^\\\n]+', Text),
- (r'\n', Text, '#pop'),
- ],
- 'escapes': [
- # groff has many ways to write escapes.
- (r'\\"[^\n]*', Comment),
- (r'\\[fn]\w', String.Escape),
- (r'\\\(.{2}', String.Escape),
- (r'\\.\[.*\]', String.Escape),
- (r'\\.', String.Escape),
- (r'\\\n', Text, 'request'),
- ],
- 'request': [
- (r'\n', Text, '#pop'),
- include('escapes'),
- (r'"[^\n"]+"', String.Double),
- (r'\d+', Number),
- (r'\S+', String),
- (r'\s+', Text),
- ],
- }
-
- def analyse_text(text):
- if text[:1] != '.':
- return False
- if text[:3] == '.\\"':
- return True
- if text[:4] == '.TH ':
- return True
- if text[1:3].isalnum() and text[3].isspace():
- return 0.9
-
-
-class MozPreprocHashLexer(RegexLexer):
- """
- Lexer for Mozilla Preprocessor files (with '#' as the marker).
-
- Other data is left untouched.
-
- .. versionadded:: 2.0
- """
- name = 'mozhashpreproc'
- aliases = [name]
- filenames = []
- mimetypes = []
-
- tokens = {
- 'root': [
- (r'^#', Comment.Preproc, ('expr', 'exprstart')),
- (r'.+', Other),
- ],
- 'exprstart': [
- (r'(literal)(.*)', bygroups(Comment.Preproc, Text), '#pop:2'),
- (words((
- 'define', 'undef', 'if', 'ifdef', 'ifndef', 'else', 'elif',
- 'elifdef', 'elifndef', 'endif', 'expand', 'filter', 'unfilter',
- 'include', 'includesubst', 'error')),
- Comment.Preproc, '#pop'),
- ],
- 'expr': [
- (words(('!', '!=', '==', '&&', '||')), Operator),
- (r'(defined)(\()', bygroups(Keyword, Punctuation)),
- (r'\)', Punctuation),
- (r'[0-9]+', Number.Decimal),
- (r'__\w+?__', Name.Variable),
- (r'@\w+?@', Name.Class),
- (r'\w+', Name),
- (r'\n', Text, '#pop'),
- (r'\s+', Text),
- (r'\S', Punctuation),
- ],
- }
-
-
-class MozPreprocPercentLexer(MozPreprocHashLexer):
- """
- Lexer for Mozilla Preprocessor files (with '%' as the marker).
-
- Other data is left untouched.
-
- .. versionadded:: 2.0
- """
- name = 'mozpercentpreproc'
- aliases = [name]
- filenames = []
- mimetypes = []
-
- tokens = {
- 'root': [
- (r'^%', Comment.Preproc, ('expr', 'exprstart')),
- (r'.+', Other),
- ],
- }
-
-
-class MozPreprocXulLexer(DelegatingLexer):
- """
- Subclass of the `MozPreprocHashLexer` that highlights unlexed data with the
- `XmlLexer`.
-
- .. versionadded:: 2.0
- """
- name = "XUL+mozpreproc"
- aliases = ['xul+mozpreproc']
- filenames = ['*.xul.in']
- mimetypes = []
-
- def __init__(self, **options):
- super().__init__(XmlLexer, MozPreprocHashLexer, **options)
-
-
-class MozPreprocJavascriptLexer(DelegatingLexer):
- """
- Subclass of the `MozPreprocHashLexer` that highlights unlexed data with the
- `JavascriptLexer`.
-
- .. versionadded:: 2.0
- """
- name = "Javascript+mozpreproc"
- aliases = ['javascript+mozpreproc']
- filenames = ['*.js.in']
- mimetypes = []
-
- def __init__(self, **options):
- super().__init__(JavascriptLexer, MozPreprocHashLexer, **options)
-
-
-class MozPreprocCssLexer(DelegatingLexer):
- """
- Subclass of the `MozPreprocHashLexer` that highlights unlexed data with the
- `CssLexer`.
-
- .. versionadded:: 2.0
- """
- name = "CSS+mozpreproc"
- aliases = ['css+mozpreproc']
- filenames = ['*.css.in']
- mimetypes = []
-
- def __init__(self, **options):
- super().__init__(CssLexer, MozPreprocPercentLexer, **options)
-
-
-class MarkdownLexer(RegexLexer):
- """
- For Markdown markup.
-
- .. versionadded:: 2.2
- """
- name = 'Markdown'
- url = 'https://daringfireball.net/projects/markdown/'
- aliases = ['markdown', 'md']
- filenames = ['*.md', '*.markdown']
- mimetypes = ["text/x-markdown"]
- flags = re.MULTILINE
-
- def _handle_codeblock(self, match):
- from pygments.lexers import get_lexer_by_name
-
- yield match.start('initial'), String.Backtick, match.group('initial')
- yield match.start('lang'), String.Backtick, match.group('lang')
- if match.group('afterlang') is not None:
- yield match.start('whitespace'), Whitespace, match.group('whitespace')
- yield match.start('extra'), Text, match.group('extra')
- yield match.start('newline'), Whitespace, match.group('newline')
-
- # lookup lexer if wanted and existing
- lexer = None
- if self.handlecodeblocks:
- try:
- lexer = get_lexer_by_name(match.group('lang').strip())
- except ClassNotFound:
- pass
- code = match.group('code')
- # no lexer for this language. handle it like it was a code block
- if lexer is None:
- yield match.start('code'), String, code
- else:
- # FIXME: aren't the offsets wrong?
- yield from do_insertions([], lexer.get_tokens_unprocessed(code))
-
- yield match.start('terminator'), String.Backtick, match.group('terminator')
-
- tokens = {
- 'root': [
- # heading with '#' prefix (atx-style)
- (r'(^#[^#].+)(\n)', bygroups(Generic.Heading, Text)),
- # subheading with '#' prefix (atx-style)
- (r'(^#{2,6}[^#].+)(\n)', bygroups(Generic.Subheading, Text)),
- # heading with '=' underlines (Setext-style)
- (r'^(.+)(\n)(=+)(\n)', bygroups(Generic.Heading, Text, Generic.Heading, Text)),
- # subheading with '-' underlines (Setext-style)
- (r'^(.+)(\n)(-+)(\n)', bygroups(Generic.Subheading, Text, Generic.Subheading, Text)),
- # task list
- (r'^(\s*)([*-] )(\[[ xX]\])( .+\n)',
- bygroups(Whitespace, Keyword, Keyword, using(this, state='inline'))),
- # bulleted list
- (r'^(\s*)([*-])(\s)(.+\n)',
- bygroups(Whitespace, Keyword, Whitespace, using(this, state='inline'))),
- # numbered list
- (r'^(\s*)([0-9]+\.)( .+\n)',
- bygroups(Whitespace, Keyword, using(this, state='inline'))),
- # quote
- (r'^(\s*>\s)(.+\n)', bygroups(Keyword, Generic.Emph)),
- # code block fenced by 3 backticks
- (r'^(\s*```\n[\w\W]*?^\s*```$\n)', String.Backtick),
- # code block with language
- # Some tools include extra stuff after the language name, just
- # highlight that as text. For example: https://docs.enola.dev/use/execmd
- (r'''(?x)
- ^(?P<initial>\s*```)
- (?P<lang>[\w\-]+)
- (?P<afterlang>
- (?P<whitespace>[^\S\n]+)
- (?P<extra>.*))?
- (?P<newline>\n)
- (?P<code>(.|\n)*?)
- (?P<terminator>^\s*```$\n)
- ''',
- _handle_codeblock),
-
- include('inline'),
- ],
- 'inline': [
- # escape
- (r'\\.', Text),
- # inline code
- (r'([^`]?)(`[^`\n]+`)', bygroups(Text, String.Backtick)),
- # warning: the following rules eat outer tags.
- # eg. **foo _bar_ baz** => foo and baz are not recognized as bold
- # bold fenced by '**'
- (r'([^\*]?)(\*\*[^* \n][^*\n]*\*\*)', bygroups(Text, Generic.Strong)),
- # bold fenced by '__'
- (r'([^_]?)(__[^_ \n][^_\n]*__)', bygroups(Text, Generic.Strong)),
- # italics fenced by '*'
- (r'([^\*]?)(\*[^* \n][^*\n]*\*)', bygroups(Text, Generic.Emph)),
- # italics fenced by '_'
- (r'([^_]?)(_[^_ \n][^_\n]*_)', bygroups(Text, Generic.Emph)),
- # strikethrough
- (r'([^~]?)(~~[^~ \n][^~\n]*~~)', bygroups(Text, Generic.Deleted)),
- # mentions and topics (twitter and github stuff)
- (r'[@#][\w/:]+', Name.Entity),
- # (image?) links eg: ![Image of Yaktocat](https://octodex.github.com/images/yaktocat.png)
- (r'(!?\[)([^]]+)(\])(\()([^)]+)(\))',
- bygroups(Text, Name.Tag, Text, Text, Name.Attribute, Text)),
- # reference-style links, e.g.:
- # [an example][id]
- # [id]: http://example.com/
- (r'(\[)([^]]+)(\])(\[)([^]]*)(\])',
- bygroups(Text, Name.Tag, Text, Text, Name.Label, Text)),
- (r'^(\s*\[)([^]]*)(\]:\s*)(.+)',
- bygroups(Text, Name.Label, Text, Name.Attribute)),
-
- # general text, must come last!
- (r'[^\\\s]+', Text),
- (r'.', Text),
- ],
- }
-
- def __init__(self, **options):
- self.handlecodeblocks = get_bool_opt(options, 'handlecodeblocks', True)
- RegexLexer.__init__(self, **options)
-
-
-class TiddlyWiki5Lexer(RegexLexer):
- """
- For TiddlyWiki5 markup.
-
- .. versionadded:: 2.7
- """
- name = 'tiddler'
- url = 'https://tiddlywiki.com/#TiddlerFiles'
- aliases = ['tid']
- filenames = ['*.tid']
- mimetypes = ["text/vnd.tiddlywiki"]
- flags = re.MULTILINE
-
- def _handle_codeblock(self, match):
- """
- match args: 1:backticks, 2:lang_name, 3:newline, 4:code, 5:backticks
- """
- from pygments.lexers import get_lexer_by_name
-
- # section header
- yield match.start(1), String, match.group(1)
- yield match.start(2), String, match.group(2)
- yield match.start(3), Text, match.group(3)
-
- # lookup lexer if wanted and existing
- lexer = None
- if self.handlecodeblocks:
- try:
- lexer = get_lexer_by_name(match.group(2).strip())
- except ClassNotFound:
- pass
- code = match.group(4)
-
- # no lexer for this language. handle it like it was a code block
- if lexer is None:
- yield match.start(4), String, code
- return
-
- yield from do_insertions([], lexer.get_tokens_unprocessed(code))
-
- yield match.start(5), String, match.group(5)
-
- def _handle_cssblock(self, match):
- """
- match args: 1:style tag 2:newline, 3:code, 4:closing style tag
- """
- from pygments.lexers import get_lexer_by_name
-
- # section header
- yield match.start(1), String, match.group(1)
- yield match.start(2), String, match.group(2)
-
- lexer = None
- if self.handlecodeblocks:
- try:
- lexer = get_lexer_by_name('css')
- except ClassNotFound:
- pass
- code = match.group(3)
-
- # no lexer for this language. handle it like it was a code block
- if lexer is None:
- yield match.start(3), String, code
- return
-
- yield from do_insertions([], lexer.get_tokens_unprocessed(code))
-
- yield match.start(4), String, match.group(4)
-
- tokens = {
- 'root': [
- # title in metadata section
- (r'^(title)(:\s)(.+\n)', bygroups(Keyword, Text, Generic.Heading)),
- # headings
- (r'^(!)([^!].+\n)', bygroups(Generic.Heading, Text)),
- (r'^(!{2,6})(.+\n)', bygroups(Generic.Subheading, Text)),
- # bulleted or numbered lists or single-line block quotes
- # (can be mixed)
- (r'^(\s*)([*#>]+)(\s*)(.+\n)',
- bygroups(Text, Keyword, Text, using(this, state='inline'))),
- # multi-line block quotes
- (r'^(<<<.*\n)([\w\W]*?)(^<<<.*$)', bygroups(String, Text, String)),
- # table header
- (r'^(\|.*?\|h)$', bygroups(Generic.Strong)),
- # table footer or caption
- (r'^(\|.*?\|[cf])$', bygroups(Generic.Emph)),
- # table class
- (r'^(\|.*?\|k)$', bygroups(Name.Tag)),
- # definitions
- (r'^(;.*)$', bygroups(Generic.Strong)),
- # text block
- (r'^(```\n)([\w\W]*?)(^```$)', bygroups(String, Text, String)),
- # code block with language
- (r'^(```)(\w+)(\n)([\w\W]*?)(^```$)', _handle_codeblock),
- # CSS style block
- (r'^(<style>)(\n)([\w\W]*?)(^</style>$)', _handle_cssblock),
-
- include('keywords'),
- include('inline'),
- ],
- 'keywords': [
- (words((
- '\\define', '\\end', 'caption', 'created', 'modified', 'tags',
- 'title', 'type'), prefix=r'^', suffix=r'\b'),
- Keyword),
- ],
- 'inline': [
- # escape
- (r'\\.', Text),
- # created or modified date
- (r'\d{17}', Number.Integer),
- # italics
- (r'(\s)(//[^/]+//)((?=\W|\n))',
- bygroups(Text, Generic.Emph, Text)),
- # superscript
- (r'(\s)(\^\^[^\^]+\^\^)', bygroups(Text, Generic.Emph)),
- # subscript
- (r'(\s)(,,[^,]+,,)', bygroups(Text, Generic.Emph)),
- # underscore
- (r'(\s)(__[^_]+__)', bygroups(Text, Generic.Strong)),
- # bold
- (r"(\s)(''[^']+'')((?=\W|\n))",
- bygroups(Text, Generic.Strong, Text)),
- # strikethrough
- (r'(\s)(~~[^~]+~~)((?=\W|\n))',
- bygroups(Text, Generic.Deleted, Text)),
- # TiddlyWiki variables
- (r'<<[^>]+>>', Name.Tag),
- (r'\$\$[^$]+\$\$', Name.Tag),
- (r'\$\([^)]+\)\$', Name.Tag),
- # TiddlyWiki style or class
- (r'^@@.*$', Name.Tag),
- # HTML tags
- (r'</?[^>]+>', Name.Tag),
- # inline code
- (r'`[^`]+`', String.Backtick),
- # HTML escaped symbols
- (r'&\S*?;', String.Regex),
- # Wiki links
- (r'(\[{2})([^]\|]+)(\]{2})', bygroups(Text, Name.Tag, Text)),
- # External links
- (r'(\[{2})([^]\|]+)(\|)([^]\|]+)(\]{2})',
- bygroups(Text, Name.Tag, Text, Name.Attribute, Text)),
- # Transclusion
- (r'(\{{2})([^}]+)(\}{2})', bygroups(Text, Name.Tag, Text)),
- # URLs
- (r'(\b.?.?tps?://[^\s"]+)', bygroups(Name.Attribute)),
-
- # general text, must come last!
- (r'[\w]+', Text),
- (r'.', Text)
- ],
- }
-
- def __init__(self, **options):
- self.handlecodeblocks = get_bool_opt(options, 'handlecodeblocks', True)
- RegexLexer.__init__(self, **options)
-
-
-class WikitextLexer(RegexLexer):
- """
- For MediaWiki Wikitext.
-
- Parsing Wikitext is tricky, and results vary between different MediaWiki
- installations, so we only highlight common syntaxes (built-in or from
- popular extensions), and also assume templates produce no unbalanced
- syntaxes.
-
- .. versionadded:: 2.15
- """
- name = 'Wikitext'
- url = 'https://www.mediawiki.org/wiki/Wikitext'
- aliases = ['wikitext', 'mediawiki']
- filenames = []
- mimetypes = ['text/x-wiki']
- flags = re.MULTILINE
-
- def nowiki_tag_rules(tag_name):
- return [
- (r'(?i)(</)({})(\s*)(>)'.format(tag_name), bygroups(Punctuation,
- Name.Tag, Whitespace, Punctuation), '#pop'),
- include('entity'),
- include('text'),
- ]
-
- def plaintext_tag_rules(tag_name):
- return [
- (r'(?si)(.*?)(</)({})(\s*)(>)'.format(tag_name), bygroups(Text,
- Punctuation, Name.Tag, Whitespace, Punctuation), '#pop'),
- ]
-
- def delegate_tag_rules(tag_name, lexer):
- return [
- (r'(?i)(</)({})(\s*)(>)'.format(tag_name), bygroups(Punctuation,
- Name.Tag, Whitespace, Punctuation), '#pop'),
- (r'(?si).+?(?=</{}\s*>)'.format(tag_name), using(lexer)),
- ]
-
- def text_rules(token):
- return [
- (r'\w+', token),
- (r'[^\S\n]+', token),
- (r'(?s).', token),
- ]
-
- def handle_syntaxhighlight(self, match, ctx):
- from pygments.lexers import get_lexer_by_name
-
- attr_content = match.group()
- start = 0
- index = 0
- while True:
- index = attr_content.find('>', start)
- # Exclude comment end (-->)
- if attr_content[index-2:index] != '--':
- break
- start = index + 1
-
- if index == -1:
- # No tag end
- yield from self.get_tokens_unprocessed(attr_content, stack=['root', 'attr'])
- return
- attr = attr_content[:index]
- yield from self.get_tokens_unprocessed(attr, stack=['root', 'attr'])
- yield match.start(3) + index, Punctuation, '>'
-
- lexer = None
- content = attr_content[index+1:]
- lang_match = re.findall(r'\blang=("|\'|)(\w+)(\1)', attr)
-
- if len(lang_match) >= 1:
- # Pick the last match in case of multiple matches
- lang = lang_match[-1][1]
- try:
- lexer = get_lexer_by_name(lang)
- except ClassNotFound:
- pass
-
- if lexer is None:
- yield match.start() + index + 1, Text, content
- else:
- yield from lexer.get_tokens_unprocessed(content)
-
- def handle_score(self, match, ctx):
- attr_content = match.group()
- start = 0
- index = 0
- while True:
- index = attr_content.find('>', start)
- # Exclude comment end (-->)
- if attr_content[index-2:index] != '--':
- break
- start = index + 1
-
- if index == -1:
- # No tag end
- yield from self.get_tokens_unprocessed(attr_content, stack=['root', 'attr'])
- return
- attr = attr_content[:index]
- content = attr_content[index+1:]
- yield from self.get_tokens_unprocessed(attr, stack=['root', 'attr'])
- yield match.start(3) + index, Punctuation, '>'
-
- lang_match = re.findall(r'\blang=("|\'|)(\w+)(\1)', attr)
- # Pick the last match in case of multiple matches
- lang = lang_match[-1][1] if len(lang_match) >= 1 else 'lilypond'
-
- if lang == 'lilypond': # Case sensitive
- yield from LilyPondLexer().get_tokens_unprocessed(content)
- else: # ABC
- # FIXME: Use ABC lexer in the future
- yield match.start() + index + 1, Text, content
-
- # a-z removed to prevent linter from complaining, REMEMBER to use (?i)
- title_char = r' %!"$&\'()*,\-./0-9:;=?@A-Z\\\^_`~+\u0080-\uFFFF'
- nbsp_char = r'(?:\t|&nbsp;|&\#0*160;|&\#[Xx]0*[Aa]0;|[ \xA0\u1680\u2000-\u200A\u202F\u205F\u3000])'
- link_address = r'(?:[0-9.]+|\[[0-9a-f:.]+\]|[^\x00-\x20"<>\[\]\x7F\xA0\u1680\u2000-\u200A\u202F\u205F\u3000\uFFFD])'
- link_char_class = r'[^\x00-\x20"<>\[\]\x7F\xA0\u1680\u2000-\u200A\u202F\u205F\u3000\uFFFD]'
- double_slashes_i = {
- '__FORCETOC__', '__NOCONTENTCONVERT__', '__NOCC__', '__NOEDITSECTION__', '__NOGALLERY__',
- '__NOTITLECONVERT__', '__NOTC__', '__NOTOC__', '__TOC__',
- }
- double_slashes = {
- '__EXPECTUNUSEDCATEGORY__', '__HIDDENCAT__', '__INDEX__', '__NEWSECTIONLINK__',
- '__NOINDEX__', '__NONEWSECTIONLINK__', '__STATICREDIRECT__', '__NOGLOBAL__',
- '__DISAMBIG__', '__EXPECTED_UNCONNECTED_PAGE__',
- }
- protocols = {
- 'bitcoin:', 'ftp://', 'ftps://', 'geo:', 'git://', 'gopher://', 'http://', 'https://',
- 'irc://', 'ircs://', 'magnet:', 'mailto:', 'mms://', 'news:', 'nntp://', 'redis://',
- 'sftp://', 'sip:', 'sips:', 'sms:', 'ssh://', 'svn://', 'tel:', 'telnet://', 'urn:',
- 'worldwind://', 'xmpp:', '//',
- }
- non_relative_protocols = protocols - {'//'}
- html_tags = {
- 'abbr', 'b', 'bdi', 'bdo', 'big', 'blockquote', 'br', 'caption', 'center', 'cite', 'code',
- 'data', 'dd', 'del', 'dfn', 'div', 'dl', 'dt', 'em', 'font', 'h1', 'h2', 'h3', 'h4', 'h5',
- 'h6', 'hr', 'i', 'ins', 'kbd', 'li', 'link', 'mark', 'meta', 'ol', 'p', 'q', 'rb', 'rp',
- 'rt', 'rtc', 'ruby', 's', 'samp', 'small', 'span', 'strike', 'strong', 'sub', 'sup',
- 'table', 'td', 'th', 'time', 'tr', 'tt', 'u', 'ul', 'var', 'wbr',
- }
- parser_tags = {
- 'graph', 'charinsert', 'rss', 'chem', 'categorytree', 'nowiki', 'inputbox', 'math',
- 'hiero', 'score', 'pre', 'ref', 'translate', 'imagemap', 'templatestyles', 'languages',
- 'noinclude', 'mapframe', 'section', 'poem', 'syntaxhighlight', 'includeonly', 'tvar',
- 'onlyinclude', 'templatedata', 'langconvert', 'timeline', 'dynamicpagelist', 'gallery',
- 'maplink', 'ce', 'references',
- }
- variant_langs = {
- # ZhConverter.php
- 'zh', 'zh-hans', 'zh-hant', 'zh-cn', 'zh-hk', 'zh-mo', 'zh-my', 'zh-sg', 'zh-tw',
- # WuuConverter.php
- 'wuu', 'wuu-hans', 'wuu-hant',
- # UzConverter.php
- 'uz', 'uz-latn', 'uz-cyrl',
- # TlyConverter.php
- 'tly', 'tly-cyrl',
- # TgConverter.php
- 'tg', 'tg-latn',
- # SrConverter.php
- 'sr', 'sr-ec', 'sr-el',
- # ShiConverter.php
- 'shi', 'shi-tfng', 'shi-latn',
- # ShConverter.php
- 'sh-latn', 'sh-cyrl',
- # KuConverter.php
- 'ku', 'ku-arab', 'ku-latn',
- # KkConverter.php
- 'kk', 'kk-cyrl', 'kk-latn', 'kk-arab', 'kk-kz', 'kk-tr', 'kk-cn',
- # IuConverter.php
- 'iu', 'ike-cans', 'ike-latn',
- # GanConverter.php
- 'gan', 'gan-hans', 'gan-hant',
- # EnConverter.php
- 'en', 'en-x-piglatin',
- # CrhConverter.php
- 'crh', 'crh-cyrl', 'crh-latn',
- # BanConverter.php
- 'ban', 'ban-bali', 'ban-x-dharma', 'ban-x-palmleaf', 'ban-x-pku',
- }
- magic_vars_i = {
- 'ARTICLEPATH', 'INT', 'PAGEID', 'SCRIPTPATH', 'SERVER', 'SERVERNAME', 'STYLEPATH',
- }
- magic_vars = {
- '!', '=', 'BASEPAGENAME', 'BASEPAGENAMEE', 'CASCADINGSOURCES', 'CONTENTLANGUAGE',
- 'CONTENTLANG', 'CURRENTDAY', 'CURRENTDAY2', 'CURRENTDAYNAME', 'CURRENTDOW', 'CURRENTHOUR',
- 'CURRENTMONTH', 'CURRENTMONTH2', 'CURRENTMONTH1', 'CURRENTMONTHABBREV', 'CURRENTMONTHNAME',
- 'CURRENTMONTHNAMEGEN', 'CURRENTTIME', 'CURRENTTIMESTAMP', 'CURRENTVERSION', 'CURRENTWEEK',
- 'CURRENTYEAR', 'DIRECTIONMARK', 'DIRMARK', 'FULLPAGENAME', 'FULLPAGENAMEE', 'LOCALDAY',
- 'LOCALDAY2', 'LOCALDAYNAME', 'LOCALDOW', 'LOCALHOUR', 'LOCALMONTH', 'LOCALMONTH2',
- 'LOCALMONTH1', 'LOCALMONTHABBREV', 'LOCALMONTHNAME', 'LOCALMONTHNAMEGEN', 'LOCALTIME',
- 'LOCALTIMESTAMP', 'LOCALWEEK', 'LOCALYEAR', 'NAMESPACE', 'NAMESPACEE', 'NAMESPACENUMBER',
- 'NUMBEROFACTIVEUSERS', 'NUMBEROFADMINS', 'NUMBEROFARTICLES', 'NUMBEROFEDITS',
- 'NUMBEROFFILES', 'NUMBEROFPAGES', 'NUMBEROFUSERS', 'PAGELANGUAGE', 'PAGENAME', 'PAGENAMEE',
- 'REVISIONDAY', 'REVISIONDAY2', 'REVISIONID', 'REVISIONMONTH', 'REVISIONMONTH1',
- 'REVISIONSIZE', 'REVISIONTIMESTAMP', 'REVISIONUSER', 'REVISIONYEAR', 'ROOTPAGENAME',
- 'ROOTPAGENAMEE', 'SITENAME', 'SUBJECTPAGENAME', 'ARTICLEPAGENAME', 'SUBJECTPAGENAMEE',
- 'ARTICLEPAGENAMEE', 'SUBJECTSPACE', 'ARTICLESPACE', 'SUBJECTSPACEE', 'ARTICLESPACEE',
- 'SUBPAGENAME', 'SUBPAGENAMEE', 'TALKPAGENAME', 'TALKPAGENAMEE', 'TALKSPACE', 'TALKSPACEE',
- }
- parser_functions_i = {
- 'ANCHORENCODE', 'BIDI', 'CANONICALURL', 'CANONICALURLE', 'FILEPATH', 'FORMATNUM',
- 'FULLURL', 'FULLURLE', 'GENDER', 'GRAMMAR', 'INT', r'\#LANGUAGE', 'LC', 'LCFIRST', 'LOCALURL',
- 'LOCALURLE', 'NS', 'NSE', 'PADLEFT', 'PADRIGHT', 'PAGEID', 'PLURAL', 'UC', 'UCFIRST',
- 'URLENCODE',
- }
- parser_functions = {
- 'BASEPAGENAME', 'BASEPAGENAMEE', 'CASCADINGSOURCES', 'DEFAULTSORT', 'DEFAULTSORTKEY',
- 'DEFAULTCATEGORYSORT', 'FULLPAGENAME', 'FULLPAGENAMEE', 'NAMESPACE', 'NAMESPACEE',
- 'NAMESPACENUMBER', 'NUMBERINGROUP', 'NUMINGROUP', 'NUMBEROFACTIVEUSERS', 'NUMBEROFADMINS',
- 'NUMBEROFARTICLES', 'NUMBEROFEDITS', 'NUMBEROFFILES', 'NUMBEROFPAGES', 'NUMBEROFUSERS',
- 'PAGENAME', 'PAGENAMEE', 'PAGESINCATEGORY', 'PAGESINCAT', 'PAGESIZE', 'PROTECTIONEXPIRY',
- 'PROTECTIONLEVEL', 'REVISIONDAY', 'REVISIONDAY2', 'REVISIONID', 'REVISIONMONTH',
- 'REVISIONMONTH1', 'REVISIONTIMESTAMP', 'REVISIONUSER', 'REVISIONYEAR', 'ROOTPAGENAME',
- 'ROOTPAGENAMEE', 'SUBJECTPAGENAME', 'ARTICLEPAGENAME', 'SUBJECTPAGENAMEE',
- 'ARTICLEPAGENAMEE', 'SUBJECTSPACE', 'ARTICLESPACE', 'SUBJECTSPACEE', 'ARTICLESPACEE',
- 'SUBPAGENAME', 'SUBPAGENAMEE', 'TALKPAGENAME', 'TALKPAGENAMEE', 'TALKSPACE', 'TALKSPACEE',
- 'INT', 'DISPLAYTITLE', 'PAGESINNAMESPACE', 'PAGESINNS',
- }
-
- tokens = {
- 'root': [
- # Redirects
- (r"""(?xi)
- (\A\s*?)(\#REDIRECT:?) # may contain a colon
- (\s+)(\[\[) (?=[^\]\n]* \]\]$)
- """,
- bygroups(Whitespace, Keyword, Whitespace, Punctuation), 'redirect-inner'),
- # Subheadings
- (r'^(={2,6})(.+?)(\1)(\s*$\n)',
- bygroups(Generic.Subheading, Generic.Subheading, Generic.Subheading, Whitespace)),
- # Headings
- (r'^(=.+?=)(\s*$\n)',
- bygroups(Generic.Heading, Whitespace)),
- # Double-slashed magic words
- (words(double_slashes_i, prefix=r'(?i)'), Name.Function.Magic),
- (words(double_slashes), Name.Function.Magic),
- # Raw URLs
- (r'(?i)\b(?:{}){}{}*'.format('|'.join(protocols),
- link_address, link_char_class), Name.Label),
- # Magic links
- (r'\b(?:RFC|PMID){}+[0-9]+\b'.format(nbsp_char),
- Name.Function.Magic),
- (r"""(?x)
- \bISBN {nbsp_char}
- (?: 97[89] {nbsp_dash}? )?
- (?: [0-9] {nbsp_dash}? ){{9}} # escape format()
- [0-9Xx]\b
- """.format(nbsp_char=nbsp_char, nbsp_dash=f'(?:-|{nbsp_char})'), Name.Function.Magic),
- include('list'),
- include('inline'),
- include('text'),
- ],
- 'redirect-inner': [
- (r'(\]\])(\s*?\n)', bygroups(Punctuation, Whitespace), '#pop'),
- (r'(\#)([^#]*?)', bygroups(Punctuation, Name.Label)),
- (r'(?i)[{}]+'.format(title_char), Name.Tag),
- ],
- 'list': [
- # Description lists
- (r'^;', Keyword, 'dt'),
- # Ordered lists, unordered lists and indents
- (r'^[#:*]+', Keyword),
- # Horizontal rules
- (r'^-{4,}', Keyword),
- ],
- 'inline': [
- # Signatures
- (r'~{3,5}', Keyword),
- # Entities
- include('entity'),
- # Bold & italic
- (r"('')(''')(?!')", bygroups(Generic.Emph,
- Generic.EmphStrong), 'inline-italic-bold'),
- (r"'''(?!')", Generic.Strong, 'inline-bold'),
- (r"''(?!')", Generic.Emph, 'inline-italic'),
- # Comments & parameters & templates
- include('replaceable'),
- # Media links
- (
- r"""(?xi)
- (\[\[)
- (File|Image) (:)
- ((?: [%s] | \{{2,3}[^{}]*?\}{2,3} | <!--[\s\S]*?--> )*)
- (?: (\#) ([%s]*?) )?
- """ % (title_char, f'{title_char}#'),
- bygroups(Punctuation, Name.Namespace, Punctuation,
- using(this, state=['wikilink-name']), Punctuation, Name.Label),
- 'medialink-inner'
- ),
- # Wikilinks
- (
- r"""(?xi)
- (\[\[)(?!%s) # Should not contain URLs
- (?: ([%s]*) (:))?
- ((?: [%s] | \{{2,3}[^{}]*?\}{2,3} | <!--[\s\S]*?--> )*?)
- (?: (\#) ([%s]*?) )?
- (\]\])
- """ % ('|'.join(protocols), title_char.replace('/', ''),
- title_char, f'{title_char}#'),
- bygroups(Punctuation, Name.Namespace, Punctuation,
- using(this, state=['wikilink-name']), Punctuation, Name.Label, Punctuation)
- ),
- (
- r"""(?xi)
- (\[\[)(?!%s)
- (?: ([%s]*) (:))?
- ((?: [%s] | \{{2,3}[^{}]*?\}{2,3} | <!--[\s\S]*?--> )*?)
- (?: (\#) ([%s]*?) )?
- (\|)
- """ % ('|'.join(protocols), title_char.replace('/', ''),
- title_char, f'{title_char}#'),
- bygroups(Punctuation, Name.Namespace, Punctuation,
- using(this, state=['wikilink-name']), Punctuation, Name.Label, Punctuation),
- 'wikilink-inner'
- ),
- # External links
- (
- r"""(?xi)
- (\[)
- ((?:{}) {} {}*)
- (\s*)
- """.format('|'.join(protocols), link_address, link_char_class),
- bygroups(Punctuation, Name.Label, Whitespace),
- 'extlink-inner'
- ),
- # Tables
- (r'^(:*)(\s*?)(\{\|)([^\n]*)$', bygroups(Keyword,
- Whitespace, Punctuation, using(this, state=['root', 'attr'])), 'table'),
- # HTML tags
- (r'(?i)(<)({})\b'.format('|'.join(html_tags)),
- bygroups(Punctuation, Name.Tag), 'tag-inner-ordinary'),
- (r'(?i)(</)({})\b(\s*)(>)'.format('|'.join(html_tags)),
- bygroups(Punctuation, Name.Tag, Whitespace, Punctuation)),
- # <nowiki>
- (r'(?i)(<)(nowiki)\b', bygroups(Punctuation,
- Name.Tag), ('tag-nowiki', 'tag-inner')),
- # <pre>
- (r'(?i)(<)(pre)\b', bygroups(Punctuation,
- Name.Tag), ('tag-pre', 'tag-inner')),
- # <categorytree>
- (r'(?i)(<)(categorytree)\b', bygroups(
- Punctuation, Name.Tag), ('tag-categorytree', 'tag-inner')),
- # <hiero>
- (r'(?i)(<)(hiero)\b', bygroups(Punctuation,
- Name.Tag), ('tag-hiero', 'tag-inner')),
- # <math>
- (r'(?i)(<)(math)\b', bygroups(Punctuation,
- Name.Tag), ('tag-math', 'tag-inner')),
- # <chem>
- (r'(?i)(<)(chem)\b', bygroups(Punctuation,
- Name.Tag), ('tag-chem', 'tag-inner')),
- # <ce>
- (r'(?i)(<)(ce)\b', bygroups(Punctuation,
- Name.Tag), ('tag-ce', 'tag-inner')),
- # <charinsert>
- (r'(?i)(<)(charinsert)\b', bygroups(
- Punctuation, Name.Tag), ('tag-charinsert', 'tag-inner')),
- # <templatedata>
- (r'(?i)(<)(templatedata)\b', bygroups(
- Punctuation, Name.Tag), ('tag-templatedata', 'tag-inner')),
- # <gallery>
- (r'(?i)(<)(gallery)\b', bygroups(
- Punctuation, Name.Tag), ('tag-gallery', 'tag-inner')),
- # <graph>
- (r'(?i)(<)(gallery)\b', bygroups(
- Punctuation, Name.Tag), ('tag-graph', 'tag-inner')),
- # <dynamicpagelist>
- (r'(?i)(<)(dynamicpagelist)\b', bygroups(
- Punctuation, Name.Tag), ('tag-dynamicpagelist', 'tag-inner')),
- # <inputbox>
- (r'(?i)(<)(inputbox)\b', bygroups(
- Punctuation, Name.Tag), ('tag-inputbox', 'tag-inner')),
- # <rss>
- (r'(?i)(<)(rss)\b', bygroups(
- Punctuation, Name.Tag), ('tag-rss', 'tag-inner')),
- # <imagemap>
- (r'(?i)(<)(imagemap)\b', bygroups(
- Punctuation, Name.Tag), ('tag-imagemap', 'tag-inner')),
- # <syntaxhighlight>
- (r'(?i)(</)(syntaxhighlight)\b(\s*)(>)',
- bygroups(Punctuation, Name.Tag, Whitespace, Punctuation)),
- (r'(?si)(<)(syntaxhighlight)\b([^>]*?(?<!/)>.*?)(?=</\2\s*>)',
- bygroups(Punctuation, Name.Tag, handle_syntaxhighlight)),
- # <syntaxhighlight>: Fallback case for self-closing tags
- (r'(?i)(<)(syntaxhighlight)\b(\s*?)((?:[^>]|-->)*?)(/\s*?(?<!--)>)', bygroups(
- Punctuation, Name.Tag, Whitespace, using(this, state=['root', 'attr']), Punctuation)),
- # <source>
- (r'(?i)(</)(source)\b(\s*)(>)',
- bygroups(Punctuation, Name.Tag, Whitespace, Punctuation)),
- (r'(?si)(<)(source)\b([^>]*?(?<!/)>.*?)(?=</\2\s*>)',
- bygroups(Punctuation, Name.Tag, handle_syntaxhighlight)),
- # <source>: Fallback case for self-closing tags
- (r'(?i)(<)(source)\b(\s*?)((?:[^>]|-->)*?)(/\s*?(?<!--)>)', bygroups(
- Punctuation, Name.Tag, Whitespace, using(this, state=['root', 'attr']), Punctuation)),
- # <score>
- (r'(?i)(</)(score)\b(\s*)(>)',
- bygroups(Punctuation, Name.Tag, Whitespace, Punctuation)),
- (r'(?si)(<)(score)\b([^>]*?(?<!/)>.*?)(?=</\2\s*>)',
- bygroups(Punctuation, Name.Tag, handle_score)),
- # <score>: Fallback case for self-closing tags
- (r'(?i)(<)(score)\b(\s*?)((?:[^>]|-->)*?)(/\s*?(?<!--)>)', bygroups(
- Punctuation, Name.Tag, Whitespace, using(this, state=['root', 'attr']), Punctuation)),
- # Other parser tags
- (r'(?i)(<)({})\b'.format('|'.join(parser_tags)),
- bygroups(Punctuation, Name.Tag), 'tag-inner-ordinary'),
- (r'(?i)(</)({})\b(\s*)(>)'.format('|'.join(parser_tags)),
- bygroups(Punctuation, Name.Tag, Whitespace, Punctuation)),
- # LanguageConverter markups
- (
- r"""(?xi)
- (-\{{) # Use {{ to escape format()
- ([^|]) (\|)
- (?:
- (?: ([^;]*?) (=>))?
- (\s* (?:{variants}) \s*) (:)
- )?
- """.format(variants='|'.join(variant_langs)),
- bygroups(Punctuation, Keyword, Punctuation,
- using(this, state=['root', 'lc-raw']),
- Operator, Name.Label, Punctuation),
- 'lc-inner'
- ),
- # LanguageConverter markups: composite conversion grammar
- (
- r"""(?xi)
- (-\{)
- ([a-z\s;-]*?) (\|)
- """,
- bygroups(Punctuation,
- using(this, state=['root', 'lc-flag']),
- Punctuation),
- 'lc-raw'
- ),
- # LanguageConverter markups: fallbacks
- (
- r"""(?xi)
- (-\{{) (?!\{{) # Use {{ to escape format()
- (?: (\s* (?:{variants}) \s*) (:))?
- """.format(variants='|'.join(variant_langs)),
- bygroups(Punctuation, Name.Label, Punctuation),
- 'lc-inner'
- ),
- ],
- 'wikilink-name': [
- include('replaceable'),
- (r'[^{<]+', Name.Tag),
- (r'(?s).', Name.Tag),
- ],
- 'wikilink-inner': [
- # Quit in case of another wikilink
- (r'(?=\[\[)', Punctuation, '#pop'),
- (r'\]\]', Punctuation, '#pop'),
- include('inline'),
- include('text'),
- ],
- 'medialink-inner': [
- (r'\]\]', Punctuation, '#pop'),
- (r'(\|)([^\n=|]*)(=)',
- bygroups(Punctuation, Name.Attribute, Operator)),
- (r'\|', Punctuation),
- include('inline'),
- include('text'),
- ],
- 'quote-common': [
- # Quit in case of link/template endings
- (r'(?=\]\]|\{\{|\}\})', Punctuation, '#pop'),
- (r'\n', Text, '#pop'),
- ],
- 'inline-italic': [
- include('quote-common'),
- (r"('')(''')(?!')", bygroups(Generic.Emph,
- Generic.Strong), ('#pop', 'inline-bold')),
- (r"'''(?!')", Generic.EmphStrong, ('#pop', 'inline-italic-bold')),
- (r"''(?!')", Generic.Emph, '#pop'),
- include('inline'),
- include('text-italic'),
- ],
- 'inline-bold': [
- include('quote-common'),
- (r"(''')('')(?!')", bygroups(
- Generic.Strong, Generic.Emph), ('#pop', 'inline-italic')),
- (r"'''(?!')", Generic.Strong, '#pop'),
- (r"''(?!')", Generic.EmphStrong, ('#pop', 'inline-bold-italic')),
- include('inline'),
- include('text-bold'),
- ],
- 'inline-bold-italic': [
- include('quote-common'),
- (r"('')(''')(?!')", bygroups(Generic.EmphStrong,
- Generic.Strong), '#pop'),
- (r"'''(?!')", Generic.EmphStrong, ('#pop', 'inline-italic')),
- (r"''(?!')", Generic.EmphStrong, ('#pop', 'inline-bold')),
- include('inline'),
- include('text-bold-italic'),
- ],
- 'inline-italic-bold': [
- include('quote-common'),
- (r"(''')('')(?!')", bygroups(
- Generic.EmphStrong, Generic.Emph), '#pop'),
- (r"'''(?!')", Generic.EmphStrong, ('#pop', 'inline-italic')),
- (r"''(?!')", Generic.EmphStrong, ('#pop', 'inline-bold')),
- include('inline'),
- include('text-bold-italic'),
- ],
- 'lc-flag': [
- (r'\s+', Whitespace),
- (r';', Punctuation),
- *text_rules(Keyword),
- ],
- 'lc-inner': [
- (
- r"""(?xi)
- (;)
- (?: ([^;]*?) (=>))?
- (\s* (?:{variants}) \s*) (:)
- """.format(variants='|'.join(variant_langs)),
- bygroups(Punctuation, using(this, state=['root', 'lc-raw']),
- Operator, Name.Label, Punctuation)
- ),
- (r';?\s*?\}-', Punctuation, '#pop'),
- include('inline'),
- include('text'),
- ],
- 'lc-raw': [
- (r'\}-', Punctuation, '#pop'),
- include('inline'),
- include('text'),
- ],
- 'replaceable': [
- # Comments
- (r'<!--[\s\S]*?(?:-->|\Z)', Comment.Multiline),
- # Parameters
- (
- r"""(?x)
- (\{{3})
- ([^|]*?)
- (?=\}{3}|\|)
- """,
- bygroups(Punctuation, Name.Variable),
- 'parameter-inner',
- ),
- # Magic variables
- (r'(?i)(\{\{)(\s*)(%s)(\s*)(\}\})' % '|'.join(magic_vars_i),
- bygroups(Punctuation, Whitespace, Name.Function, Whitespace, Punctuation)),
- (r'(\{\{)(\s*)(%s)(\s*)(\}\})' % '|'.join(magic_vars),
- bygroups(Punctuation, Whitespace, Name.Function, Whitespace, Punctuation)),
- # Parser functions & templates
- (r'\{\{', Punctuation, 'template-begin-space'),
- # <tvar> legacy syntax
- (r'(?i)(<)(tvar)\b(\|)([^>]*?)(>)', bygroups(Punctuation,
- Name.Tag, Punctuation, String, Punctuation)),
- (r'</>', Punctuation, '#pop'),
- # <tvar>
- (r'(?i)(<)(tvar)\b', bygroups(Punctuation, Name.Tag), 'tag-inner-ordinary'),
- (r'(?i)(</)(tvar)\b(\s*)(>)',
- bygroups(Punctuation, Name.Tag, Whitespace, Punctuation)),
- ],
- 'parameter-inner': [
- (r'\}{3}', Punctuation, '#pop'),
- (r'\|', Punctuation),
- include('inline'),
- include('text'),
- ],
- 'template-begin-space': [
- # Templates allow line breaks at the beginning, and due to how MediaWiki handles
- # comments, an extra state is required to handle things like {{\n<!---->\n name}}
- (r'<!--[\s\S]*?(?:-->|\Z)', Comment.Multiline),
- (r'\s+', Whitespace),
- # Parser functions
- (
- r'(?i)(\#[%s]*?|%s)(:)' % (title_char,
- '|'.join(parser_functions_i)),
- bygroups(Name.Function, Punctuation), ('#pop', 'template-inner')
- ),
- (
- r'(%s)(:)' % ('|'.join(parser_functions)),
- bygroups(Name.Function, Punctuation), ('#pop', 'template-inner')
- ),
- # Templates
- (
- r'(?i)([%s]*?)(:)' % title_char,
- bygroups(Name.Namespace, Punctuation), ('#pop', 'template-name')
- ),
- default(('#pop', 'template-name'),),
- ],
- 'template-name': [
- (r'(\s*?)(\|)', bygroups(Text, Punctuation), ('#pop', 'template-inner')),
- (r'\}\}', Punctuation, '#pop'),
- (r'\n', Text, '#pop'),
- include('replaceable'),
- *text_rules(Name.Tag),
- ],
- 'template-inner': [
- (r'\}\}', Punctuation, '#pop'),
- (r'\|', Punctuation),
- (
- r"""(?x)
- (?<=\|)
- ( (?: (?! \{\{ | \}\} )[^=\|<])*? ) # Exclude templates and tags
- (=)
- """,
- bygroups(Name.Label, Operator)
- ),
- include('inline'),
- include('text'),
- ],
- 'table': [
- # Use [ \t\n\r\0\x0B] instead of \s to follow PHP trim() behavior
- # Endings
- (r'^([ \t\n\r\0\x0B]*?)(\|\})',
- bygroups(Whitespace, Punctuation), '#pop'),
- # Table rows
- (r'^([ \t\n\r\0\x0B]*?)(\|-+)(.*)$', bygroups(Whitespace, Punctuation,
- using(this, state=['root', 'attr']))),
- # Captions
- (
- r"""(?x)
- ^([ \t\n\r\0\x0B]*?)(\|\+)
- # Exclude links, template and tags
- (?: ( (?: (?! \[\[ | \{\{ )[^|\n<] )*? )(\|) )?
- (.*?)$
- """,
- bygroups(Whitespace, Punctuation, using(this, state=[
- 'root', 'attr']), Punctuation, Generic.Heading),
- ),
- # Table data
- (
- r"""(?x)
- ( ^(?:[ \t\n\r\0\x0B]*?)\| | \|\| )
- (?: ( (?: (?! \[\[ | \{\{ )[^|\n<] )*? )(\|)(?!\|) )?
- """,
- bygroups(Punctuation, using(this, state=[
- 'root', 'attr']), Punctuation),
- ),
- # Table headers
- (
- r"""(?x)
- ( ^(?:[ \t\n\r\0\x0B]*?)! )
- (?: ( (?: (?! \[\[ | \{\{ )[^|\n<] )*? )(\|)(?!\|) )?
- """,
- bygroups(Punctuation, using(this, state=[
- 'root', 'attr']), Punctuation),
- 'table-header',
- ),
- include('list'),
- include('inline'),
- include('text'),
- ],
- 'table-header': [
- # Requires another state for || handling inside headers
- (r'\n', Text, '#pop'),
- (
- r"""(?x)
- (!!|\|\|)
- (?:
- ( (?: (?! \[\[ | \{\{ )[^|\n<] )*? )
- (\|)(?!\|)
- )?
- """,
- bygroups(Punctuation, using(this, state=[
- 'root', 'attr']), Punctuation)
- ),
- *text_rules(Generic.Subheading),
- ],
- 'entity': [
- (r'&\S*?;', Name.Entity),
- ],
- 'dt': [
- (r'\n', Text, '#pop'),
- include('inline'),
- (r':', Keyword, '#pop'),
- include('text'),
- ],
- 'extlink-inner': [
- (r'\]', Punctuation, '#pop'),
- include('inline'),
- include('text'),
- ],
- 'nowiki-ish': [
- include('entity'),
- include('text'),
- ],
- 'attr': [
- include('replaceable'),
- (r'\s+', Whitespace),
- (r'(=)(\s*)(")', bygroups(Operator, Whitespace, String.Double), 'attr-val-2'),
- (r"(=)(\s*)(')", bygroups(Operator, Whitespace, String.Single), 'attr-val-1'),
- (r'(=)(\s*)', bygroups(Operator, Whitespace), 'attr-val-0'),
- (r'[\w:-]+', Name.Attribute),
-
- ],
- 'attr-val-0': [
- (r'\s', Whitespace, '#pop'),
- include('replaceable'),
- *text_rules(String),
- ],
- 'attr-val-1': [
- (r"'", String.Single, '#pop'),
- include('replaceable'),
- *text_rules(String.Single),
- ],
- 'attr-val-2': [
- (r'"', String.Double, '#pop'),
- include('replaceable'),
- *text_rules(String.Double),
- ],
- 'tag-inner-ordinary': [
- (r'/?\s*>', Punctuation, '#pop'),
- include('tag-attr'),
- ],
- 'tag-inner': [
- # Return to root state for self-closing tags
- (r'/\s*>', Punctuation, '#pop:2'),
- (r'\s*>', Punctuation, '#pop'),
- include('tag-attr'),
- ],
- # There states below are just like their non-tag variants, the key difference is
- # they forcibly quit when encountering tag closing markup
- 'tag-attr': [
- include('replaceable'),
- (r'\s+', Whitespace),
- (r'(=)(\s*)(")', bygroups(Operator,
- Whitespace, String.Double), 'tag-attr-val-2'),
- (r"(=)(\s*)(')", bygroups(Operator,
- Whitespace, String.Single), 'tag-attr-val-1'),
- (r'(=)(\s*)', bygroups(Operator, Whitespace), 'tag-attr-val-0'),
- (r'[\w:-]+', Name.Attribute),
-
- ],
- 'tag-attr-val-0': [
- (r'\s', Whitespace, '#pop'),
- (r'/?>', Punctuation, '#pop:2'),
- include('replaceable'),
- *text_rules(String),
- ],
- 'tag-attr-val-1': [
- (r"'", String.Single, '#pop'),
- (r'/?>', Punctuation, '#pop:2'),
- include('replaceable'),
- *text_rules(String.Single),
- ],
- 'tag-attr-val-2': [
- (r'"', String.Double, '#pop'),
- (r'/?>', Punctuation, '#pop:2'),
- include('replaceable'),
- *text_rules(String.Double),
- ],
- 'tag-nowiki': nowiki_tag_rules('nowiki'),
- 'tag-pre': nowiki_tag_rules('pre'),
- 'tag-categorytree': plaintext_tag_rules('categorytree'),
- 'tag-dynamicpagelist': plaintext_tag_rules('dynamicpagelist'),
- 'tag-hiero': plaintext_tag_rules('hiero'),
- 'tag-inputbox': plaintext_tag_rules('inputbox'),
- 'tag-imagemap': plaintext_tag_rules('imagemap'),
- 'tag-charinsert': plaintext_tag_rules('charinsert'),
- 'tag-timeline': plaintext_tag_rules('timeline'),
- 'tag-gallery': plaintext_tag_rules('gallery'),
- 'tag-graph': plaintext_tag_rules('graph'),
- 'tag-rss': plaintext_tag_rules('rss'),
- 'tag-math': delegate_tag_rules('math', TexLexer),
- 'tag-chem': delegate_tag_rules('chem', TexLexer),
- 'tag-ce': delegate_tag_rules('ce', TexLexer),
- 'tag-templatedata': delegate_tag_rules('templatedata', JsonLexer),
- 'text-italic': text_rules(Generic.Emph),
- 'text-bold': text_rules(Generic.Strong),
- 'text-bold-italic': text_rules(Generic.EmphStrong),
- 'text': text_rules(Text),
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/math.py b/venv/lib/python3.11/site-packages/pygments/lexers/math.py
deleted file mode 100644
index 530f853..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/math.py
+++ /dev/null
@@ -1,20 +0,0 @@
-"""
- pygments.lexers.math
- ~~~~~~~~~~~~~~~~~~~~
-
- Just export lexers that were contained in this module.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexers.python import NumPyLexer
-from pygments.lexers.matlab import MatlabLexer, MatlabSessionLexer, \
- OctaveLexer, ScilabLexer
-from pygments.lexers.julia import JuliaLexer, JuliaConsoleLexer
-from pygments.lexers.r import RConsoleLexer, SLexer, RdLexer
-from pygments.lexers.modeling import BugsLexer, JagsLexer, StanLexer
-from pygments.lexers.idl import IDLLexer
-from pygments.lexers.algebra import MuPADLexer
-
-__all__ = []
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/matlab.py b/venv/lib/python3.11/site-packages/pygments/lexers/matlab.py
deleted file mode 100644
index 753a6ef..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/matlab.py
+++ /dev/null
@@ -1,3308 +0,0 @@
-"""
- pygments.lexers.matlab
- ~~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for Matlab and related languages.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import Lexer, RegexLexer, bygroups, default, words, \
- do_insertions, include
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation, Generic, Whitespace
-
-from pygments.lexers import _scilab_builtins
-
-__all__ = ['MatlabLexer', 'MatlabSessionLexer', 'OctaveLexer', 'ScilabLexer']
-
-
-class MatlabLexer(RegexLexer):
- """
- For Matlab source code.
-
- .. versionadded:: 0.10
- """
- name = 'Matlab'
- aliases = ['matlab']
- filenames = ['*.m']
- mimetypes = ['text/matlab']
-
- _operators = r'-|==|~=|<=|>=|<|>|&&|&|~|\|\|?|\.\*|\*|\+|\.\^|\.\\|\./|/|\\'
-
- tokens = {
- 'expressions': [
- # operators:
- (_operators, Operator),
-
- # numbers (must come before punctuation to handle `.5`; cannot use
- # `\b` due to e.g. `5. + .5`). The negative lookahead on operators
- # avoids including the dot in `1./x` (the dot is part of `./`).
- (r'(?<!\w)((\d+\.\d+)|(\d*\.\d+)|(\d+\.(?!%s)))'
- r'([eEf][+-]?\d+)?(?!\w)' % _operators, Number.Float),
- (r'\b\d+[eEf][+-]?[0-9]+\b', Number.Float),
- (r'\b\d+\b', Number.Integer),
-
- # punctuation:
- (r'\[|\]|\(|\)|\{|\}|:|@|\.|,', Punctuation),
- (r'=|:|;', Punctuation),
-
- # quote can be transpose, instead of string:
- # (not great, but handles common cases...)
- (r'(?<=[\w)\].])\'+', Operator),
-
- (r'"(""|[^"])*"', String),
-
- (r'(?<![\w)\].])\'', String, 'string'),
- (r'[a-zA-Z_]\w*', Name),
- (r'\s+', Whitespace),
- (r'.', Text),
- ],
- 'root': [
- # line starting with '!' is sent as a system command. not sure what
- # label to use...
- (r'^!.*', String.Other),
- (r'%\{\s*\n', Comment.Multiline, 'blockcomment'),
- (r'%.*$', Comment),
- (r'(\s*^\s*)(function)\b', bygroups(Whitespace, Keyword), 'deffunc'),
- (r'(\s*^\s*)(properties)(\s+)(\()',
- bygroups(Whitespace, Keyword, Whitespace, Punctuation),
- ('defprops', 'propattrs')),
- (r'(\s*^\s*)(properties)\b',
- bygroups(Whitespace, Keyword), 'defprops'),
-
- # from 'iskeyword' on version 9.4 (R2018a):
- # Check that there is no preceding dot, as keywords are valid field
- # names.
- (words(('break', 'case', 'catch', 'classdef', 'continue',
- 'dynamicprops', 'else', 'elseif', 'end', 'for', 'function',
- 'global', 'if', 'methods', 'otherwise', 'parfor',
- 'persistent', 'return', 'spmd', 'switch',
- 'try', 'while'),
- prefix=r'(?<!\.)(\s*)(', suffix=r')\b'),
- bygroups(Whitespace, Keyword)),
-
- (
- words(
- [
- # See https://mathworks.com/help/matlab/referencelist.html
- # Below data from 2021-02-10T18:24:08Z
- # for Matlab release R2020b
- "BeginInvoke",
- "COM",
- "Combine",
- "CombinedDatastore",
- "EndInvoke",
- "Execute",
- "FactoryGroup",
- "FactorySetting",
- "Feval",
- "FunctionTestCase",
- "GetCharArray",
- "GetFullMatrix",
- "GetVariable",
- "GetWorkspaceData",
- "GraphPlot",
- "H5.close",
- "H5.garbage_collect",
- "H5.get_libversion",
- "H5.open",
- "H5.set_free_list_limits",
- "H5A.close",
- "H5A.create",
- "H5A.delete",
- "H5A.get_info",
- "H5A.get_name",
- "H5A.get_space",
- "H5A.get_type",
- "H5A.iterate",
- "H5A.open",
- "H5A.open_by_idx",
- "H5A.open_by_name",
- "H5A.read",
- "H5A.write",
- "H5D.close",
- "H5D.create",
- "H5D.get_access_plist",
- "H5D.get_create_plist",
- "H5D.get_offset",
- "H5D.get_space",
- "H5D.get_space_status",
- "H5D.get_storage_size",
- "H5D.get_type",
- "H5D.open",
- "H5D.read",
- "H5D.set_extent",
- "H5D.vlen_get_buf_size",
- "H5D.write",
- "H5DS.attach_scale",
- "H5DS.detach_scale",
- "H5DS.get_label",
- "H5DS.get_num_scales",
- "H5DS.get_scale_name",
- "H5DS.is_scale",
- "H5DS.iterate_scales",
- "H5DS.set_label",
- "H5DS.set_scale",
- "H5E.clear",
- "H5E.get_major",
- "H5E.get_minor",
- "H5E.walk",
- "H5F.close",
- "H5F.create",
- "H5F.flush",
- "H5F.get_access_plist",
- "H5F.get_create_plist",
- "H5F.get_filesize",
- "H5F.get_freespace",
- "H5F.get_info",
- "H5F.get_mdc_config",
- "H5F.get_mdc_hit_rate",
- "H5F.get_mdc_size",
- "H5F.get_name",
- "H5F.get_obj_count",
- "H5F.get_obj_ids",
- "H5F.is_hdf5",
- "H5F.mount",
- "H5F.open",
- "H5F.reopen",
- "H5F.set_mdc_config",
- "H5F.unmount",
- "H5G.close",
- "H5G.create",
- "H5G.get_info",
- "H5G.open",
- "H5I.dec_ref",
- "H5I.get_file_id",
- "H5I.get_name",
- "H5I.get_ref",
- "H5I.get_type",
- "H5I.inc_ref",
- "H5I.is_valid",
- "H5L.copy",
- "H5L.create_external",
- "H5L.create_hard",
- "H5L.create_soft",
- "H5L.delete",
- "H5L.exists",
- "H5L.get_info",
- "H5L.get_name_by_idx",
- "H5L.get_val",
- "H5L.iterate",
- "H5L.iterate_by_name",
- "H5L.move",
- "H5L.visit",
- "H5L.visit_by_name",
- "H5ML.compare_values",
- "H5ML.get_constant_names",
- "H5ML.get_constant_value",
- "H5ML.get_function_names",
- "H5ML.get_mem_datatype",
- "H5O.close",
- "H5O.copy",
- "H5O.get_comment",
- "H5O.get_comment_by_name",
- "H5O.get_info",
- "H5O.link",
- "H5O.open",
- "H5O.open_by_idx",
- "H5O.set_comment",
- "H5O.set_comment_by_name",
- "H5O.visit",
- "H5O.visit_by_name",
- "H5P.all_filters_avail",
- "H5P.close",
- "H5P.close_class",
- "H5P.copy",
- "H5P.create",
- "H5P.equal",
- "H5P.exist",
- "H5P.fill_value_defined",
- "H5P.get",
- "H5P.get_alignment",
- "H5P.get_alloc_time",
- "H5P.get_attr_creation_order",
- "H5P.get_attr_phase_change",
- "H5P.get_btree_ratios",
- "H5P.get_char_encoding",
- "H5P.get_chunk",
- "H5P.get_chunk_cache",
- "H5P.get_class",
- "H5P.get_class_name",
- "H5P.get_class_parent",
- "H5P.get_copy_object",
- "H5P.get_create_intermediate_group",
- "H5P.get_driver",
- "H5P.get_edc_check",
- "H5P.get_external",
- "H5P.get_external_count",
- "H5P.get_family_offset",
- "H5P.get_fapl_core",
- "H5P.get_fapl_family",
- "H5P.get_fapl_multi",
- "H5P.get_fclose_degree",
- "H5P.get_fill_time",
- "H5P.get_fill_value",
- "H5P.get_filter",
- "H5P.get_filter_by_id",
- "H5P.get_gc_references",
- "H5P.get_hyper_vector_size",
- "H5P.get_istore_k",
- "H5P.get_layout",
- "H5P.get_libver_bounds",
- "H5P.get_link_creation_order",
- "H5P.get_link_phase_change",
- "H5P.get_mdc_config",
- "H5P.get_meta_block_size",
- "H5P.get_multi_type",
- "H5P.get_nfilters",
- "H5P.get_nprops",
- "H5P.get_sieve_buf_size",
- "H5P.get_size",
- "H5P.get_sizes",
- "H5P.get_small_data_block_size",
- "H5P.get_sym_k",
- "H5P.get_userblock",
- "H5P.get_version",
- "H5P.isa_class",
- "H5P.iterate",
- "H5P.modify_filter",
- "H5P.remove_filter",
- "H5P.set",
- "H5P.set_alignment",
- "H5P.set_alloc_time",
- "H5P.set_attr_creation_order",
- "H5P.set_attr_phase_change",
- "H5P.set_btree_ratios",
- "H5P.set_char_encoding",
- "H5P.set_chunk",
- "H5P.set_chunk_cache",
- "H5P.set_copy_object",
- "H5P.set_create_intermediate_group",
- "H5P.set_deflate",
- "H5P.set_edc_check",
- "H5P.set_external",
- "H5P.set_family_offset",
- "H5P.set_fapl_core",
- "H5P.set_fapl_family",
- "H5P.set_fapl_log",
- "H5P.set_fapl_multi",
- "H5P.set_fapl_sec2",
- "H5P.set_fapl_split",
- "H5P.set_fapl_stdio",
- "H5P.set_fclose_degree",
- "H5P.set_fill_time",
- "H5P.set_fill_value",
- "H5P.set_filter",
- "H5P.set_fletcher32",
- "H5P.set_gc_references",
- "H5P.set_hyper_vector_size",
- "H5P.set_istore_k",
- "H5P.set_layout",
- "H5P.set_libver_bounds",
- "H5P.set_link_creation_order",
- "H5P.set_link_phase_change",
- "H5P.set_mdc_config",
- "H5P.set_meta_block_size",
- "H5P.set_multi_type",
- "H5P.set_nbit",
- "H5P.set_scaleoffset",
- "H5P.set_shuffle",
- "H5P.set_sieve_buf_size",
- "H5P.set_sizes",
- "H5P.set_small_data_block_size",
- "H5P.set_sym_k",
- "H5P.set_userblock",
- "H5R.create",
- "H5R.dereference",
- "H5R.get_name",
- "H5R.get_obj_type",
- "H5R.get_region",
- "H5S.close",
- "H5S.copy",
- "H5S.create",
- "H5S.create_simple",
- "H5S.extent_copy",
- "H5S.get_select_bounds",
- "H5S.get_select_elem_npoints",
- "H5S.get_select_elem_pointlist",
- "H5S.get_select_hyper_blocklist",
- "H5S.get_select_hyper_nblocks",
- "H5S.get_select_npoints",
- "H5S.get_select_type",
- "H5S.get_simple_extent_dims",
- "H5S.get_simple_extent_ndims",
- "H5S.get_simple_extent_npoints",
- "H5S.get_simple_extent_type",
- "H5S.is_simple",
- "H5S.offset_simple",
- "H5S.select_all",
- "H5S.select_elements",
- "H5S.select_hyperslab",
- "H5S.select_none",
- "H5S.select_valid",
- "H5S.set_extent_none",
- "H5S.set_extent_simple",
- "H5T.array_create",
- "H5T.close",
- "H5T.commit",
- "H5T.committed",
- "H5T.copy",
- "H5T.create",
- "H5T.detect_class",
- "H5T.enum_create",
- "H5T.enum_insert",
- "H5T.enum_nameof",
- "H5T.enum_valueof",
- "H5T.equal",
- "H5T.get_array_dims",
- "H5T.get_array_ndims",
- "H5T.get_class",
- "H5T.get_create_plist",
- "H5T.get_cset",
- "H5T.get_ebias",
- "H5T.get_fields",
- "H5T.get_inpad",
- "H5T.get_member_class",
- "H5T.get_member_index",
- "H5T.get_member_name",
- "H5T.get_member_offset",
- "H5T.get_member_type",
- "H5T.get_member_value",
- "H5T.get_native_type",
- "H5T.get_nmembers",
- "H5T.get_norm",
- "H5T.get_offset",
- "H5T.get_order",
- "H5T.get_pad",
- "H5T.get_precision",
- "H5T.get_sign",
- "H5T.get_size",
- "H5T.get_strpad",
- "H5T.get_super",
- "H5T.get_tag",
- "H5T.insert",
- "H5T.is_variable_str",
- "H5T.lock",
- "H5T.open",
- "H5T.pack",
- "H5T.set_cset",
- "H5T.set_ebias",
- "H5T.set_fields",
- "H5T.set_inpad",
- "H5T.set_norm",
- "H5T.set_offset",
- "H5T.set_order",
- "H5T.set_pad",
- "H5T.set_precision",
- "H5T.set_sign",
- "H5T.set_size",
- "H5T.set_strpad",
- "H5T.set_tag",
- "H5T.vlen_create",
- "H5Z.filter_avail",
- "H5Z.get_filter_info",
- "Inf",
- "KeyValueDatastore",
- "KeyValueStore",
- "MException",
- "MException.last",
- "MaximizeCommandWindow",
- "MemoizedFunction",
- "MinimizeCommandWindow",
- "NET",
- "NET.Assembly",
- "NET.GenericClass",
- "NET.NetException",
- "NET.addAssembly",
- "NET.convertArray",
- "NET.createArray",
- "NET.createGeneric",
- "NET.disableAutoRelease",
- "NET.enableAutoRelease",
- "NET.invokeGenericMethod",
- "NET.isNETSupported",
- "NET.setStaticProperty",
- "NaN",
- "NaT",
- "OperationResult",
- "PutCharArray",
- "PutFullMatrix",
- "PutWorkspaceData",
- "PythonEnvironment",
- "Quit",
- "RandStream",
- "ReleaseCompatibilityException",
- "ReleaseCompatibilityResults",
- "Remove",
- "RemoveAll",
- "Setting",
- "SettingsGroup",
- "TallDatastore",
- "Test",
- "TestResult",
- "Tiff",
- "TransformedDatastore",
- "ValueIterator",
- "VersionResults",
- "VideoReader",
- "VideoWriter",
- "abs",
- "accumarray",
- "acos",
- "acosd",
- "acosh",
- "acot",
- "acotd",
- "acoth",
- "acsc",
- "acscd",
- "acsch",
- "actxGetRunningServer",
- "actxserver",
- "add",
- "addCause",
- "addCorrection",
- "addFile",
- "addFolderIncludingChildFiles",
- "addGroup",
- "addLabel",
- "addPath",
- "addReference",
- "addSetting",
- "addShortcut",
- "addShutdownFile",
- "addStartupFile",
- "addStyle",
- "addToolbarExplorationButtons",
- "addboundary",
- "addcats",
- "addedge",
- "addevent",
- "addlistener",
- "addmulti",
- "addnode",
- "addpath",
- "addpoints",
- "addpref",
- "addprop",
- "addsample",
- "addsampletocollection",
- "addtodate",
- "addts",
- "addvars",
- "adjacency",
- "airy",
- "align",
- "alim",
- "all",
- "allchild",
- "alpha",
- "alphaShape",
- "alphaSpectrum",
- "alphaTriangulation",
- "alphamap",
- "alphanumericBoundary",
- "alphanumericsPattern",
- "amd",
- "analyzeCodeCompatibility",
- "ancestor",
- "angle",
- "animatedline",
- "annotation",
- "ans",
- "any",
- "appdesigner",
- "append",
- "area",
- "arguments",
- "array2table",
- "array2timetable",
- "arrayDatastore",
- "arrayfun",
- "asFewOfPattern",
- "asManyOfPattern",
- "ascii",
- "asec",
- "asecd",
- "asech",
- "asin",
- "asind",
- "asinh",
- "assert",
- "assignin",
- "atan",
- "atan2",
- "atan2d",
- "atand",
- "atanh",
- "audiodevinfo",
- "audiodevreset",
- "audioinfo",
- "audioplayer",
- "audioread",
- "audiorecorder",
- "audiowrite",
- "autumn",
- "axes",
- "axis",
- "axtoolbar",
- "axtoolbarbtn",
- "balance",
- "bandwidth",
- "bar",
- "bar3",
- "bar3h",
- "barh",
- "barycentricToCartesian",
- "base2dec",
- "batchStartupOptionUsed",
- "bctree",
- "beep",
- "bench",
- "besselh",
- "besseli",
- "besselj",
- "besselk",
- "bessely",
- "beta",
- "betainc",
- "betaincinv",
- "betaln",
- "between",
- "bfsearch",
- "bicg",
- "bicgstab",
- "bicgstabl",
- "biconncomp",
- "bin2dec",
- "binary",
- "binscatter",
- "bitand",
- "bitcmp",
- "bitget",
- "bitnot",
- "bitor",
- "bitset",
- "bitshift",
- "bitxor",
- "blanks",
- "ble",
- "blelist",
- "blkdiag",
- "bluetooth",
- "bluetoothlist",
- "bone",
- "boundary",
- "boundaryFacets",
- "boundaryshape",
- "boundingbox",
- "bounds",
- "box",
- "boxchart",
- "brighten",
- "brush",
- "bsxfun",
- "bubblechart",
- "bubblechart3",
- "bubblelegend",
- "bubblelim",
- "bubblesize",
- "builddocsearchdb",
- "builtin",
- "bvp4c",
- "bvp5c",
- "bvpget",
- "bvpinit",
- "bvpset",
- "bvpxtend",
- "caldays",
- "caldiff",
- "calendar",
- "calendarDuration",
- "calllib",
- "calmonths",
- "calquarters",
- "calweeks",
- "calyears",
- "camdolly",
- "cameratoolbar",
- "camlight",
- "camlookat",
- "camorbit",
- "campan",
- "campos",
- "camproj",
- "camroll",
- "camtarget",
- "camup",
- "camva",
- "camzoom",
- "canUseGPU",
- "canUseParallelPool",
- "cart2pol",
- "cart2sph",
- "cartesianToBarycentric",
- "caseInsensitivePattern",
- "caseSensitivePattern",
- "cast",
- "cat",
- "categorical",
- "categories",
- "caxis",
- "cd",
- "cdf2rdf",
- "cdfepoch",
- "cdfinfo",
- "cdflib",
- "cdfread",
- "ceil",
- "cell",
- "cell2mat",
- "cell2struct",
- "cell2table",
- "celldisp",
- "cellfun",
- "cellplot",
- "cellstr",
- "centrality",
- "centroid",
- "cgs",
- "char",
- "characterListPattern",
- "characteristic",
- "checkcode",
- "chol",
- "cholupdate",
- "choose",
- "chooseContextMenu",
- "circshift",
- "circumcenter",
- "cla",
- "clabel",
- "class",
- "classUnderlying",
- "clc",
- "clear",
- "clearAllMemoizedCaches",
- "clearPersonalValue",
- "clearTemporaryValue",
- "clearpoints",
- "clearvars",
- "clf",
- "clibArray",
- "clibConvertArray",
- "clibIsNull",
- "clibIsReadOnly",
- "clibRelease",
- "clibgen.buildInterface",
- "clibgen.generateLibraryDefinition",
- "clipboard",
- "clock",
- "clone",
- "close",
- "closeFile",
- "closereq",
- "cmap2gray",
- "cmpermute",
- "cmunique",
- "codeCompatibilityReport",
- "colamd",
- "collapse",
- "colon",
- "colorbar",
- "colorcube",
- "colormap",
- "colororder",
- "colperm",
- "com.mathworks.engine.MatlabEngine",
- "com.mathworks.matlab.types.CellStr",
- "com.mathworks.matlab.types.Complex",
- "com.mathworks.matlab.types.HandleObject",
- "com.mathworks.matlab.types.Struct",
- "combine",
- "comet",
- "comet3",
- "compan",
- "compass",
- "complex",
- "compose",
- "computer",
- "comserver",
- "cond",
- "condeig",
- "condensation",
- "condest",
- "coneplot",
- "configureCallback",
- "configureTerminator",
- "conj",
- "conncomp",
- "containers.Map",
- "contains",
- "containsrange",
- "contour",
- "contour3",
- "contourc",
- "contourf",
- "contourslice",
- "contrast",
- "conv",
- "conv2",
- "convertCharsToStrings",
- "convertContainedStringsToChars",
- "convertStringsToChars",
- "convertTo",
- "convertvars",
- "convexHull",
- "convhull",
- "convhulln",
- "convn",
- "cool",
- "copper",
- "copyHDU",
- "copyfile",
- "copygraphics",
- "copyobj",
- "corrcoef",
- "cos",
- "cosd",
- "cosh",
- "cospi",
- "cot",
- "cotd",
- "coth",
- "count",
- "countcats",
- "cov",
- "cplxpair",
- "cputime",
- "createCategory",
- "createFile",
- "createImg",
- "createLabel",
- "createTbl",
- "criticalAlpha",
- "cross",
- "csc",
- "cscd",
- "csch",
- "ctranspose",
- "cummax",
- "cummin",
- "cumprod",
- "cumsum",
- "cumtrapz",
- "curl",
- "currentProject",
- "cylinder",
- "daspect",
- "dataTipInteraction",
- "dataTipTextRow",
- "datacursormode",
- "datastore",
- "datatip",
- "date",
- "datenum",
- "dateshift",
- "datestr",
- "datetick",
- "datetime",
- "datevec",
- "day",
- "days",
- "dbclear",
- "dbcont",
- "dbdown",
- "dbmex",
- "dbquit",
- "dbstack",
- "dbstatus",
- "dbstep",
- "dbstop",
- "dbtype",
- "dbup",
- "dde23",
- "ddeget",
- "ddensd",
- "ddesd",
- "ddeset",
- "deblank",
- "dec2base",
- "dec2bin",
- "dec2hex",
- "decic",
- "decomposition",
- "deconv",
- "deg2rad",
- "degree",
- "del2",
- "delaunay",
- "delaunayTriangulation",
- "delaunayn",
- "delete",
- "deleteCol",
- "deleteFile",
- "deleteHDU",
- "deleteKey",
- "deleteRecord",
- "deleteRows",
- "delevent",
- "delimitedTextImportOptions",
- "delsample",
- "delsamplefromcollection",
- "demo",
- "descriptor",
- "det",
- "details",
- "detectImportOptions",
- "detrend",
- "deval",
- "dfsearch",
- "diag",
- "dialog",
- "diary",
- "diff",
- "diffuse",
- "digitBoundary",
- "digitsPattern",
- "digraph",
- "dir",
- "disableDefaultInteractivity",
- "discretize",
- "disp",
- "display",
- "dissect",
- "distances",
- "dither",
- "divergence",
- "dmperm",
- "doc",
- "docsearch",
- "dos",
- "dot",
- "double",
- "drag",
- "dragrect",
- "drawnow",
- "dsearchn",
- "duration",
- "dynamicprops",
- "echo",
- "echodemo",
- "echotcpip",
- "edgeAttachments",
- "edgecount",
- "edges",
- "edit",
- "eig",
- "eigs",
- "ellipj",
- "ellipke",
- "ellipsoid",
- "empty",
- "enableDefaultInteractivity",
- "enableLegacyExplorationModes",
- "enableNETfromNetworkDrive",
- "enableservice",
- "endsWith",
- "enumeration",
- "eomday",
- "eps",
- "eq",
- "equilibrate",
- "erase",
- "eraseBetween",
- "erf",
- "erfc",
- "erfcinv",
- "erfcx",
- "erfinv",
- "error",
- "errorbar",
- "errordlg",
- "etime",
- "etree",
- "etreeplot",
- "eval",
- "evalc",
- "evalin",
- "event.ClassInstanceEvent",
- "event.DynamicPropertyEvent",
- "event.EventData",
- "event.PropertyEvent",
- "event.hasListener",
- "event.listener",
- "event.proplistener",
- "eventlisteners",
- "events",
- "exceltime",
- "exist",
- "exit",
- "exp",
- "expand",
- "expint",
- "expm",
- "expm1",
- "export",
- "export2wsdlg",
- "exportapp",
- "exportgraphics",
- "exportsetupdlg",
- "extract",
- "extractAfter",
- "extractBefore",
- "extractBetween",
- "eye",
- "ezpolar",
- "faceNormal",
- "factor",
- "factorial",
- "false",
- "fclose",
- "fcontour",
- "feather",
- "featureEdges",
- "feof",
- "ferror",
- "feval",
- "fewerbins",
- "fft",
- "fft2",
- "fftn",
- "fftshift",
- "fftw",
- "fgetl",
- "fgets",
- "fieldnames",
- "figure",
- "figurepalette",
- "fileDatastore",
- "fileMode",
- "fileName",
- "fileattrib",
- "filemarker",
- "fileparts",
- "fileread",
- "filesep",
- "fill",
- "fill3",
- "fillmissing",
- "filloutliers",
- "filter",
- "filter2",
- "fimplicit",
- "fimplicit3",
- "find",
- "findCategory",
- "findEvent",
- "findFile",
- "findLabel",
- "findall",
- "findedge",
- "findfigs",
- "findgroups",
- "findnode",
- "findobj",
- "findprop",
- "finish",
- "fitsdisp",
- "fitsinfo",
- "fitsread",
- "fitswrite",
- "fix",
- "fixedWidthImportOptions",
- "flag",
- "flintmax",
- "flip",
- "flipedge",
- "fliplr",
- "flipud",
- "floor",
- "flow",
- "flush",
- "fmesh",
- "fminbnd",
- "fminsearch",
- "fopen",
- "format",
- "fplot",
- "fplot3",
- "fprintf",
- "frame2im",
- "fread",
- "freeBoundary",
- "freqspace",
- "frewind",
- "fscanf",
- "fseek",
- "fsurf",
- "ftell",
- "ftp",
- "full",
- "fullfile",
- "func2str",
- "function_handle",
- "functions",
- "functiontests",
- "funm",
- "fwrite",
- "fzero",
- "gallery",
- "gamma",
- "gammainc",
- "gammaincinv",
- "gammaln",
- "gather",
- "gca",
- "gcbf",
- "gcbo",
- "gcd",
- "gcf",
- "gcmr",
- "gco",
- "genpath",
- "geoaxes",
- "geobasemap",
- "geobubble",
- "geodensityplot",
- "geolimits",
- "geoplot",
- "geoscatter",
- "geotickformat",
- "get",
- "getAColParms",
- "getAxes",
- "getBColParms",
- "getColName",
- "getColType",
- "getColorbar",
- "getConstantValue",
- "getEqColType",
- "getFileFormats",
- "getHDUnum",
- "getHDUtype",
- "getHdrSpace",
- "getImgSize",
- "getImgType",
- "getLayout",
- "getLegend",
- "getMockHistory",
- "getNumCols",
- "getNumHDUs",
- "getNumInputs",
- "getNumInputsImpl",
- "getNumOutputs",
- "getNumOutputsImpl",
- "getNumRows",
- "getOpenFiles",
- "getProfiles",
- "getPropertyGroupsImpl",
- "getReport",
- "getTimeStr",
- "getVersion",
- "getabstime",
- "getappdata",
- "getaudiodata",
- "getdatasamples",
- "getdatasamplesize",
- "getenv",
- "getfield",
- "getframe",
- "getinterpmethod",
- "getnext",
- "getpinstatus",
- "getpixelposition",
- "getplayer",
- "getpoints",
- "getpref",
- "getqualitydesc",
- "getrangefromclass",
- "getsamples",
- "getsampleusingtime",
- "gettimeseriesnames",
- "gettsafteratevent",
- "gettsafterevent",
- "gettsatevent",
- "gettsbeforeatevent",
- "gettsbeforeevent",
- "gettsbetweenevents",
- "getvaropts",
- "ginput",
- "gmres",
- "gobjects",
- "gplot",
- "grabcode",
- "gradient",
- "graph",
- "gray",
- "grid",
- "griddata",
- "griddatan",
- "griddedInterpolant",
- "groot",
- "groupcounts",
- "groupfilter",
- "groupsummary",
- "grouptransform",
- "gsvd",
- "gtext",
- "guidata",
- "guide",
- "guihandles",
- "gunzip",
- "gzip",
- "h5create",
- "h5disp",
- "h5info",
- "h5read",
- "h5readatt",
- "h5write",
- "h5writeatt",
- "hadamard",
- "handle",
- "hankel",
- "hasFactoryValue",
- "hasFrame",
- "hasGroup",
- "hasPersonalValue",
- "hasSetting",
- "hasTemporaryValue",
- "hasdata",
- "hasnext",
- "hdfan",
- "hdfdf24",
- "hdfdfr8",
- "hdfh",
- "hdfhd",
- "hdfhe",
- "hdfhx",
- "hdfinfo",
- "hdfml",
- "hdfpt",
- "hdfread",
- "hdfv",
- "hdfvf",
- "hdfvh",
- "hdfvs",
- "head",
- "heatmap",
- "height",
- "help",
- "helpdlg",
- "hess",
- "hex2dec",
- "hex2num",
- "hgexport",
- "hggroup",
- "hgtransform",
- "hidden",
- "highlight",
- "hilb",
- "histcounts",
- "histcounts2",
- "histogram",
- "histogram2",
- "hms",
- "hold",
- "holes",
- "home",
- "horzcat",
- "hot",
- "hour",
- "hours",
- "hover",
- "hsv",
- "hsv2rgb",
- "hypot",
- "i",
- "ichol",
- "idealfilter",
- "idivide",
- "ifft",
- "ifft2",
- "ifftn",
- "ifftshift",
- "ilu",
- "im2double",
- "im2frame",
- "im2gray",
- "im2java",
- "imag",
- "image",
- "imageDatastore",
- "imagesc",
- "imapprox",
- "imfinfo",
- "imformats",
- "imgCompress",
- "import",
- "importdata",
- "imread",
- "imresize",
- "imshow",
- "imtile",
- "imwrite",
- "inShape",
- "incenter",
- "incidence",
- "ind2rgb",
- "ind2sub",
- "indegree",
- "inedges",
- "infoImpl",
- "inmem",
- "inner2outer",
- "innerjoin",
- "inpolygon",
- "input",
- "inputParser",
- "inputdlg",
- "inputname",
- "insertATbl",
- "insertAfter",
- "insertBTbl",
- "insertBefore",
- "insertCol",
- "insertImg",
- "insertRows",
- "int16",
- "int2str",
- "int32",
- "int64",
- "int8",
- "integral",
- "integral2",
- "integral3",
- "interp1",
- "interp2",
- "interp3",
- "interpft",
- "interpn",
- "interpstreamspeed",
- "intersect",
- "intmax",
- "intmin",
- "inv",
- "invhilb",
- "ipermute",
- "iqr",
- "isCompressedImg",
- "isConnected",
- "isDiscreteStateSpecificationMutableImpl",
- "isDone",
- "isDoneImpl",
- "isInactivePropertyImpl",
- "isInputComplexityMutableImpl",
- "isInputDataTypeMutableImpl",
- "isInputSizeMutableImpl",
- "isInterior",
- "isKey",
- "isLoaded",
- "isLocked",
- "isMATLABReleaseOlderThan",
- "isPartitionable",
- "isShuffleable",
- "isStringScalar",
- "isTunablePropertyDataTypeMutableImpl",
- "isUnderlyingType",
- "isa",
- "isaUnderlying",
- "isappdata",
- "isbanded",
- "isbetween",
- "iscalendarduration",
- "iscategorical",
- "iscategory",
- "iscell",
- "iscellstr",
- "ischange",
- "ischar",
- "iscolumn",
- "iscom",
- "isdag",
- "isdatetime",
- "isdiag",
- "isdst",
- "isduration",
- "isempty",
- "isenum",
- "isequal",
- "isequaln",
- "isevent",
- "isfield",
- "isfile",
- "isfinite",
- "isfloat",
- "isfolder",
- "isgraphics",
- "ishandle",
- "ishermitian",
- "ishold",
- "ishole",
- "isinf",
- "isinteger",
- "isinterface",
- "isinterior",
- "isisomorphic",
- "isjava",
- "iskeyword",
- "isletter",
- "islocalmax",
- "islocalmin",
- "islogical",
- "ismac",
- "ismatrix",
- "ismember",
- "ismembertol",
- "ismethod",
- "ismissing",
- "ismultigraph",
- "isnan",
- "isnat",
- "isnumeric",
- "isobject",
- "isocaps",
- "isocolors",
- "isomorphism",
- "isonormals",
- "isordinal",
- "isosurface",
- "isoutlier",
- "ispc",
- "isplaying",
- "ispref",
- "isprime",
- "isprop",
- "isprotected",
- "isreal",
- "isrecording",
- "isregular",
- "isrow",
- "isscalar",
- "issimplified",
- "issorted",
- "issortedrows",
- "isspace",
- "issparse",
- "isstring",
- "isstrprop",
- "isstruct",
- "isstudent",
- "issymmetric",
- "istable",
- "istall",
- "istimetable",
- "istril",
- "istriu",
- "isundefined",
- "isunix",
- "isvalid",
- "isvarname",
- "isvector",
- "isweekend",
- "j",
- "javaArray",
- "javaMethod",
- "javaMethodEDT",
- "javaObject",
- "javaObjectEDT",
- "javaaddpath",
- "javachk",
- "javaclasspath",
- "javarmpath",
- "jet",
- "join",
- "jsondecode",
- "jsonencode",
- "juliandate",
- "keyboard",
- "keys",
- "kron",
- "labeledge",
- "labelnode",
- "lag",
- "laplacian",
- "lastwarn",
- "layout",
- "lcm",
- "ldl",
- "leapseconds",
- "legend",
- "legendre",
- "length",
- "letterBoundary",
- "lettersPattern",
- "lib.pointer",
- "libfunctions",
- "libfunctionsview",
- "libisloaded",
- "libpointer",
- "libstruct",
- "license",
- "light",
- "lightangle",
- "lighting",
- "lin2mu",
- "line",
- "lineBoundary",
- "lines",
- "linkaxes",
- "linkdata",
- "linkprop",
- "linsolve",
- "linspace",
- "listModifiedFiles",
- "listRequiredFiles",
- "listdlg",
- "listener",
- "listfonts",
- "load",
- "loadObjectImpl",
- "loadlibrary",
- "loadobj",
- "localfunctions",
- "log",
- "log10",
- "log1p",
- "log2",
- "logical",
- "loglog",
- "logm",
- "logspace",
- "lookAheadBoundary",
- "lookBehindBoundary",
- "lookfor",
- "lower",
- "ls",
- "lscov",
- "lsqminnorm",
- "lsqnonneg",
- "lsqr",
- "lu",
- "magic",
- "makehgtform",
- "makima",
- "mapreduce",
- "mapreducer",
- "maskedPattern",
- "mat2cell",
- "mat2str",
- "matches",
- "matchpairs",
- "material",
- "matfile",
- "matlab.System",
- "matlab.addons.disableAddon",
- "matlab.addons.enableAddon",
- "matlab.addons.install",
- "matlab.addons.installedAddons",
- "matlab.addons.isAddonEnabled",
- "matlab.addons.toolbox.installToolbox",
- "matlab.addons.toolbox.installedToolboxes",
- "matlab.addons.toolbox.packageToolbox",
- "matlab.addons.toolbox.toolboxVersion",
- "matlab.addons.toolbox.uninstallToolbox",
- "matlab.addons.uninstall",
- "matlab.apputil.create",
- "matlab.apputil.getInstalledAppInfo",
- "matlab.apputil.install",
- "matlab.apputil.package",
- "matlab.apputil.run",
- "matlab.apputil.uninstall",
- "matlab.codetools.requiredFilesAndProducts",
- "matlab.engine.FutureResult",
- "matlab.engine.MatlabEngine",
- "matlab.engine.connect_matlab",
- "matlab.engine.engineName",
- "matlab.engine.find_matlab",
- "matlab.engine.isEngineShared",
- "matlab.engine.shareEngine",
- "matlab.engine.start_matlab",
- "matlab.exception.JavaException",
- "matlab.exception.PyException",
- "matlab.graphics.chartcontainer.ChartContainer",
- "matlab.graphics.chartcontainer.mixin.Colorbar",
- "matlab.graphics.chartcontainer.mixin.Legend",
- "matlab.io.Datastore",
- "matlab.io.datastore.BlockedFileSet",
- "matlab.io.datastore.DsFileReader",
- "matlab.io.datastore.DsFileSet",
- "matlab.io.datastore.FileSet",
- "matlab.io.datastore.FileWritable",
- "matlab.io.datastore.FoldersPropertyProvider",
- "matlab.io.datastore.HadoopLocationBased",
- "matlab.io.datastore.Partitionable",
- "matlab.io.datastore.Shuffleable",
- "matlab.io.hdf4.sd",
- "matlab.io.hdfeos.gd",
- "matlab.io.hdfeos.sw",
- "matlab.io.saveVariablesToScript",
- "matlab.lang.OnOffSwitchState",
- "matlab.lang.correction.AppendArgumentsCorrection",
- "matlab.lang.correction.ConvertToFunctionNotationCorrection",
- "matlab.lang.correction.ReplaceIdentifierCorrection",
- "matlab.lang.makeUniqueStrings",
- "matlab.lang.makeValidName",
- "matlab.mex.MexHost",
- "matlab.mixin.Copyable",
- "matlab.mixin.CustomDisplay",
- "matlab.mixin.Heterogeneous",
- "matlab.mixin.SetGet",
- "matlab.mixin.SetGetExactNames",
- "matlab.mixin.util.PropertyGroup",
- "matlab.mock.AnyArguments",
- "matlab.mock.InteractionHistory",
- "matlab.mock.InteractionHistory.forMock",
- "matlab.mock.MethodCallBehavior",
- "matlab.mock.PropertyBehavior",
- "matlab.mock.PropertyGetBehavior",
- "matlab.mock.PropertySetBehavior",
- "matlab.mock.TestCase",
- "matlab.mock.actions.AssignOutputs",
- "matlab.mock.actions.DoNothing",
- "matlab.mock.actions.Invoke",
- "matlab.mock.actions.ReturnStoredValue",
- "matlab.mock.actions.StoreValue",
- "matlab.mock.actions.ThrowException",
- "matlab.mock.constraints.Occurred",
- "matlab.mock.constraints.WasAccessed",
- "matlab.mock.constraints.WasCalled",
- "matlab.mock.constraints.WasSet",
- "matlab.net.ArrayFormat",
- "matlab.net.QueryParameter",
- "matlab.net.URI",
- "matlab.net.base64decode",
- "matlab.net.base64encode",
- "matlab.net.http.AuthInfo",
- "matlab.net.http.AuthenticationScheme",
- "matlab.net.http.Cookie",
- "matlab.net.http.CookieInfo",
- "matlab.net.http.Credentials",
- "matlab.net.http.Disposition",
- "matlab.net.http.HTTPException",
- "matlab.net.http.HTTPOptions",
- "matlab.net.http.HeaderField",
- "matlab.net.http.LogRecord",
- "matlab.net.http.MediaType",
- "matlab.net.http.Message",
- "matlab.net.http.MessageBody",
- "matlab.net.http.MessageType",
- "matlab.net.http.ProgressMonitor",
- "matlab.net.http.ProtocolVersion",
- "matlab.net.http.RequestLine",
- "matlab.net.http.RequestMessage",
- "matlab.net.http.RequestMethod",
- "matlab.net.http.ResponseMessage",
- "matlab.net.http.StartLine",
- "matlab.net.http.StatusClass",
- "matlab.net.http.StatusCode",
- "matlab.net.http.StatusLine",
- "matlab.net.http.field.AcceptField",
- "matlab.net.http.field.AuthenticateField",
- "matlab.net.http.field.AuthenticationInfoField",
- "matlab.net.http.field.AuthorizationField",
- "matlab.net.http.field.ContentDispositionField",
- "matlab.net.http.field.ContentLengthField",
- "matlab.net.http.field.ContentLocationField",
- "matlab.net.http.field.ContentTypeField",
- "matlab.net.http.field.CookieField",
- "matlab.net.http.field.DateField",
- "matlab.net.http.field.GenericField",
- "matlab.net.http.field.GenericParameterizedField",
- "matlab.net.http.field.HTTPDateField",
- "matlab.net.http.field.IntegerField",
- "matlab.net.http.field.LocationField",
- "matlab.net.http.field.MediaRangeField",
- "matlab.net.http.field.SetCookieField",
- "matlab.net.http.field.URIReferenceField",
- "matlab.net.http.io.BinaryConsumer",
- "matlab.net.http.io.ContentConsumer",
- "matlab.net.http.io.ContentProvider",
- "matlab.net.http.io.FileConsumer",
- "matlab.net.http.io.FileProvider",
- "matlab.net.http.io.FormProvider",
- "matlab.net.http.io.GenericConsumer",
- "matlab.net.http.io.GenericProvider",
- "matlab.net.http.io.ImageConsumer",
- "matlab.net.http.io.ImageProvider",
- "matlab.net.http.io.JSONConsumer",
- "matlab.net.http.io.JSONProvider",
- "matlab.net.http.io.MultipartConsumer",
- "matlab.net.http.io.MultipartFormProvider",
- "matlab.net.http.io.MultipartProvider",
- "matlab.net.http.io.StringConsumer",
- "matlab.net.http.io.StringProvider",
- "matlab.perftest.FixedTimeExperiment",
- "matlab.perftest.FrequentistTimeExperiment",
- "matlab.perftest.TestCase",
- "matlab.perftest.TimeExperiment",
- "matlab.perftest.TimeResult",
- "matlab.project.Project",
- "matlab.project.convertDefinitionFiles",
- "matlab.project.createProject",
- "matlab.project.deleteProject",
- "matlab.project.loadProject",
- "matlab.project.rootProject",
- "matlab.settings.FactoryGroup.createToolboxGroup",
- "matlab.settings.SettingsFileUpgrader",
- "matlab.settings.loadSettingsCompatibilityResults",
- "matlab.settings.mustBeIntegerScalar",
- "matlab.settings.mustBeLogicalScalar",
- "matlab.settings.mustBeNumericScalar",
- "matlab.settings.mustBeStringScalar",
- "matlab.settings.reloadFactoryFile",
- "matlab.system.mixin.FiniteSource",
- "matlab.tall.blockMovingWindow",
- "matlab.tall.movingWindow",
- "matlab.tall.reduce",
- "matlab.tall.transform",
- "matlab.test.behavior.Missing",
- "matlab.ui.componentcontainer.ComponentContainer",
- "matlab.uitest.TestCase",
- "matlab.uitest.TestCase.forInteractiveUse",
- "matlab.uitest.unlock",
- "matlab.unittest.Test",
- "matlab.unittest.TestCase",
- "matlab.unittest.TestResult",
- "matlab.unittest.TestRunner",
- "matlab.unittest.TestSuite",
- "matlab.unittest.constraints.BooleanConstraint",
- "matlab.unittest.constraints.Constraint",
- "matlab.unittest.constraints.Tolerance",
- "matlab.unittest.diagnostics.ConstraintDiagnostic",
- "matlab.unittest.diagnostics.Diagnostic",
- "matlab.unittest.fixtures.Fixture",
- "matlab.unittest.measurement.DefaultMeasurementResult",
- "matlab.unittest.measurement.MeasurementResult",
- "matlab.unittest.measurement.chart.ComparisonPlot",
- "matlab.unittest.plugins.OutputStream",
- "matlab.unittest.plugins.Parallelizable",
- "matlab.unittest.plugins.QualifyingPlugin",
- "matlab.unittest.plugins.TestRunnerPlugin",
- "matlab.wsdl.createWSDLClient",
- "matlab.wsdl.setWSDLToolPath",
- "matlabRelease",
- "matlabrc",
- "matlabroot",
- "max",
- "maxflow",
- "maxk",
- "mean",
- "median",
- "memmapfile",
- "memoize",
- "memory",
- "mergecats",
- "mergevars",
- "mesh",
- "meshc",
- "meshgrid",
- "meshz",
- "meta.ArrayDimension",
- "meta.DynamicProperty",
- "meta.EnumeratedValue",
- "meta.FixedDimension",
- "meta.MetaData",
- "meta.UnrestrictedDimension",
- "meta.Validation",
- "meta.abstractDetails",
- "meta.class",
- "meta.class.fromName",
- "meta.event",
- "meta.method",
- "meta.package",
- "meta.package.fromName",
- "meta.package.getAllPackages",
- "meta.property",
- "metaclass",
- "methods",
- "methodsview",
- "mex",
- "mexext",
- "mexhost",
- "mfilename",
- "mget",
- "milliseconds",
- "min",
- "mink",
- "minres",
- "minspantree",
- "minute",
- "minutes",
- "mislocked",
- "missing",
- "mkdir",
- "mkpp",
- "mldivide",
- "mlintrpt",
- "mlock",
- "mmfileinfo",
- "mod",
- "mode",
- "month",
- "more",
- "morebins",
- "movAbsHDU",
- "movNamHDU",
- "movRelHDU",
- "move",
- "movefile",
- "movegui",
- "movevars",
- "movie",
- "movmad",
- "movmax",
- "movmean",
- "movmedian",
- "movmin",
- "movprod",
- "movstd",
- "movsum",
- "movvar",
- "mpower",
- "mput",
- "mrdivide",
- "msgbox",
- "mtimes",
- "mu2lin",
- "multibandread",
- "multibandwrite",
- "munlock",
- "mustBeA",
- "mustBeFile",
- "mustBeFinite",
- "mustBeFloat",
- "mustBeFolder",
- "mustBeGreaterThan",
- "mustBeGreaterThanOrEqual",
- "mustBeInRange",
- "mustBeInteger",
- "mustBeLessThan",
- "mustBeLessThanOrEqual",
- "mustBeMember",
- "mustBeNegative",
- "mustBeNonNan",
- "mustBeNonempty",
- "mustBeNonmissing",
- "mustBeNonnegative",
- "mustBeNonpositive",
- "mustBeNonsparse",
- "mustBeNonzero",
- "mustBeNonzeroLengthText",
- "mustBeNumeric",
- "mustBeNumericOrLogical",
- "mustBePositive",
- "mustBeReal",
- "mustBeScalarOrEmpty",
- "mustBeText",
- "mustBeTextScalar",
- "mustBeUnderlyingType",
- "mustBeValidVariableName",
- "mustBeVector",
- "namedPattern",
- "namedargs2cell",
- "namelengthmax",
- "nargin",
- "narginchk",
- "nargout",
- "nargoutchk",
- "native2unicode",
- "nccreate",
- "ncdisp",
- "nchoosek",
- "ncinfo",
- "ncread",
- "ncreadatt",
- "ncwrite",
- "ncwriteatt",
- "ncwriteschema",
- "ndgrid",
- "ndims",
- "nearest",
- "nearestNeighbor",
- "nearestvertex",
- "neighbors",
- "netcdf.abort",
- "netcdf.close",
- "netcdf.copyAtt",
- "netcdf.create",
- "netcdf.defDim",
- "netcdf.defGrp",
- "netcdf.defVar",
- "netcdf.defVarChunking",
- "netcdf.defVarDeflate",
- "netcdf.defVarFill",
- "netcdf.defVarFletcher32",
- "netcdf.delAtt",
- "netcdf.endDef",
- "netcdf.getAtt",
- "netcdf.getChunkCache",
- "netcdf.getConstant",
- "netcdf.getConstantNames",
- "netcdf.getVar",
- "netcdf.inq",
- "netcdf.inqAtt",
- "netcdf.inqAttID",
- "netcdf.inqAttName",
- "netcdf.inqDim",
- "netcdf.inqDimID",
- "netcdf.inqDimIDs",
- "netcdf.inqFormat",
- "netcdf.inqGrpName",
- "netcdf.inqGrpNameFull",
- "netcdf.inqGrpParent",
- "netcdf.inqGrps",
- "netcdf.inqLibVers",
- "netcdf.inqNcid",
- "netcdf.inqUnlimDims",
- "netcdf.inqVar",
- "netcdf.inqVarChunking",
- "netcdf.inqVarDeflate",
- "netcdf.inqVarFill",
- "netcdf.inqVarFletcher32",
- "netcdf.inqVarID",
- "netcdf.inqVarIDs",
- "netcdf.open",
- "netcdf.putAtt",
- "netcdf.putVar",
- "netcdf.reDef",
- "netcdf.renameAtt",
- "netcdf.renameDim",
- "netcdf.renameVar",
- "netcdf.setChunkCache",
- "netcdf.setDefaultFormat",
- "netcdf.setFill",
- "netcdf.sync",
- "newline",
- "newplot",
- "nextpow2",
- "nexttile",
- "nnz",
- "nonzeros",
- "norm",
- "normalize",
- "normest",
- "notify",
- "now",
- "nsidedpoly",
- "nthroot",
- "nufft",
- "nufftn",
- "null",
- "num2cell",
- "num2hex",
- "num2ruler",
- "num2str",
- "numArgumentsFromSubscript",
- "numRegions",
- "numboundaries",
- "numedges",
- "numel",
- "numnodes",
- "numpartitions",
- "numsides",
- "nzmax",
- "ode113",
- "ode15i",
- "ode15s",
- "ode23",
- "ode23s",
- "ode23t",
- "ode23tb",
- "ode45",
- "odeget",
- "odeset",
- "odextend",
- "onCleanup",
- "ones",
- "open",
- "openDiskFile",
- "openFile",
- "openProject",
- "openfig",
- "opengl",
- "openvar",
- "optimget",
- "optimset",
- "optionalPattern",
- "ordeig",
- "orderfields",
- "ordqz",
- "ordschur",
- "orient",
- "orth",
- "outdegree",
- "outedges",
- "outerjoin",
- "overlaps",
- "overlapsrange",
- "pack",
- "pad",
- "padecoef",
- "pagectranspose",
- "pagemtimes",
- "pagetranspose",
- "pan",
- "panInteraction",
- "parallelplot",
- "pareto",
- "parquetDatastore",
- "parquetinfo",
- "parquetread",
- "parquetwrite",
- "partition",
- "parula",
- "pascal",
- "patch",
- "path",
- "pathsep",
- "pathtool",
- "pattern",
- "pause",
- "pbaspect",
- "pcg",
- "pchip",
- "pcode",
- "pcolor",
- "pdepe",
- "pdeval",
- "peaks",
- "perimeter",
- "perl",
- "perms",
- "permute",
- "pi",
- "pie",
- "pie3",
- "pink",
- "pinv",
- "planerot",
- "play",
- "playblocking",
- "plot",
- "plot3",
- "plotbrowser",
- "plotedit",
- "plotmatrix",
- "plottools",
- "plus",
- "pointLocation",
- "pol2cart",
- "polaraxes",
- "polarbubblechart",
- "polarhistogram",
- "polarplot",
- "polarscatter",
- "poly",
- "polyarea",
- "polybuffer",
- "polyder",
- "polyeig",
- "polyfit",
- "polyint",
- "polyshape",
- "polyval",
- "polyvalm",
- "posixtime",
- "possessivePattern",
- "pow2",
- "ppval",
- "predecessors",
- "prefdir",
- "preferences",
- "press",
- "preview",
- "primes",
- "print",
- "printdlg",
- "printopt",
- "printpreview",
- "prism",
- "processInputSpecificationChangeImpl",
- "processTunedPropertiesImpl",
- "prod",
- "profile",
- "propedit",
- "properties",
- "propertyeditor",
- "psi",
- "publish",
- "pwd",
- "pyargs",
- "pyenv",
- "qmr",
- "qr",
- "qrdelete",
- "qrinsert",
- "qrupdate",
- "quad2d",
- "quadgk",
- "quarter",
- "questdlg",
- "quit",
- "quiver",
- "quiver3",
- "qz",
- "rad2deg",
- "rand",
- "randi",
- "randn",
- "randperm",
- "rank",
- "rat",
- "rats",
- "rbbox",
- "rcond",
- "read",
- "readATblHdr",
- "readBTblHdr",
- "readCard",
- "readCol",
- "readFrame",
- "readImg",
- "readKey",
- "readKeyCmplx",
- "readKeyDbl",
- "readKeyLongLong",
- "readKeyLongStr",
- "readKeyUnit",
- "readRecord",
- "readall",
- "readcell",
- "readline",
- "readlines",
- "readmatrix",
- "readstruct",
- "readtable",
- "readtimetable",
- "readvars",
- "real",
- "reallog",
- "realmax",
- "realmin",
- "realpow",
- "realsqrt",
- "record",
- "recordblocking",
- "rectangle",
- "rectint",
- "recycle",
- "reducepatch",
- "reducevolume",
- "refresh",
- "refreshSourceControl",
- "refreshdata",
- "regexp",
- "regexpPattern",
- "regexpi",
- "regexprep",
- "regexptranslate",
- "regionZoomInteraction",
- "regions",
- "registerevent",
- "regmatlabserver",
- "rehash",
- "relationaloperators",
- "release",
- "releaseImpl",
- "reload",
- "rem",
- "remove",
- "removeCategory",
- "removeFile",
- "removeGroup",
- "removeLabel",
- "removePath",
- "removeReference",
- "removeSetting",
- "removeShortcut",
- "removeShutdownFile",
- "removeStartupFile",
- "removeStyle",
- "removeToolbarExplorationButtons",
- "removecats",
- "removets",
- "removevars",
- "rename",
- "renamecats",
- "renamevars",
- "rendererinfo",
- "reordercats",
- "reordernodes",
- "repelem",
- "replace",
- "replaceBetween",
- "repmat",
- "resample",
- "rescale",
- "reset",
- "resetImpl",
- "reshape",
- "residue",
- "restoredefaultpath",
- "resume",
- "rethrow",
- "retime",
- "reverse",
- "rgb2gray",
- "rgb2hsv",
- "rgb2ind",
- "rgbplot",
- "ribbon",
- "rlim",
- "rmappdata",
- "rmboundary",
- "rmdir",
- "rmedge",
- "rmfield",
- "rmholes",
- "rmmissing",
- "rmnode",
- "rmoutliers",
- "rmpath",
- "rmpref",
- "rmprop",
- "rmslivers",
- "rng",
- "roots",
- "rosser",
- "rot90",
- "rotate",
- "rotate3d",
- "rotateInteraction",
- "round",
- "rowfun",
- "rows2vars",
- "rref",
- "rsf2csf",
- "rtickangle",
- "rtickformat",
- "rticklabels",
- "rticks",
- "ruler2num",
- "rulerPanInteraction",
- "run",
- "runChecks",
- "runperf",
- "runtests",
- "save",
- "saveObjectImpl",
- "saveas",
- "savefig",
- "saveobj",
- "savepath",
- "scale",
- "scatter",
- "scatter3",
- "scatteredInterpolant",
- "scatterhistogram",
- "schur",
- "scroll",
- "sec",
- "secd",
- "sech",
- "second",
- "seconds",
- "semilogx",
- "semilogy",
- "sendmail",
- "serialport",
- "serialportlist",
- "set",
- "setBscale",
- "setCompressionType",
- "setDTR",
- "setHCompScale",
- "setHCompSmooth",
- "setProperties",
- "setRTS",
- "setTileDim",
- "setTscale",
- "setabstime",
- "setappdata",
- "setcats",
- "setdiff",
- "setenv",
- "setfield",
- "setinterpmethod",
- "setpixelposition",
- "setpref",
- "settimeseriesnames",
- "settings",
- "setuniformtime",
- "setup",
- "setupImpl",
- "setvaropts",
- "setvartype",
- "setxor",
- "sgtitle",
- "shading",
- "sheetnames",
- "shg",
- "shiftdim",
- "shortestpath",
- "shortestpathtree",
- "showplottool",
- "shrinkfaces",
- "shuffle",
- "sign",
- "simplify",
- "sin",
- "sind",
- "single",
- "sinh",
- "sinpi",
- "size",
- "slice",
- "smooth3",
- "smoothdata",
- "snapnow",
- "sort",
- "sortboundaries",
- "sortregions",
- "sortrows",
- "sortx",
- "sorty",
- "sound",
- "soundsc",
- "spalloc",
- "sparse",
- "spaugment",
- "spconvert",
- "spdiags",
- "specular",
- "speye",
- "spfun",
- "sph2cart",
- "sphere",
- "spinmap",
- "spline",
- "split",
- "splitapply",
- "splitlines",
- "splitvars",
- "spones",
- "spparms",
- "sprand",
- "sprandn",
- "sprandsym",
- "sprank",
- "spreadsheetDatastore",
- "spreadsheetImportOptions",
- "spring",
- "sprintf",
- "spy",
- "sqrt",
- "sqrtm",
- "squeeze",
- "ss2tf",
- "sscanf",
- "stack",
- "stackedplot",
- "stairs",
- "standardizeMissing",
- "start",
- "startat",
- "startsWith",
- "startup",
- "std",
- "stem",
- "stem3",
- "step",
- "stepImpl",
- "stlread",
- "stlwrite",
- "stop",
- "str2double",
- "str2func",
- "str2num",
- "strcat",
- "strcmp",
- "strcmpi",
- "stream2",
- "stream3",
- "streamline",
- "streamparticles",
- "streamribbon",
- "streamslice",
- "streamtube",
- "strfind",
- "string",
- "strings",
- "strip",
- "strjoin",
- "strjust",
- "strlength",
- "strncmp",
- "strncmpi",
- "strrep",
- "strsplit",
- "strtok",
- "strtrim",
- "struct",
- "struct2cell",
- "struct2table",
- "structfun",
- "sub2ind",
- "subgraph",
- "subplot",
- "subsasgn",
- "subscribe",
- "subsindex",
- "subspace",
- "subsref",
- "substruct",
- "subtitle",
- "subtract",
- "subvolume",
- "successors",
- "sum",
- "summary",
- "summer",
- "superclasses",
- "surf",
- "surf2patch",
- "surface",
- "surfaceArea",
- "surfc",
- "surfl",
- "surfnorm",
- "svd",
- "svds",
- "svdsketch",
- "swapbytes",
- "swarmchart",
- "swarmchart3",
- "sylvester",
- "symamd",
- "symbfact",
- "symmlq",
- "symrcm",
- "synchronize",
- "sysobjupdate",
- "system",
- "table",
- "table2array",
- "table2cell",
- "table2struct",
- "table2timetable",
- "tabularTextDatastore",
- "tail",
- "tall",
- "tallrng",
- "tan",
- "tand",
- "tanh",
- "tar",
- "tcpclient",
- "tempdir",
- "tempname",
- "testsuite",
- "tetramesh",
- "texlabel",
- "text",
- "textBoundary",
- "textscan",
- "textwrap",
- "tfqmr",
- "thetalim",
- "thetatickformat",
- "thetaticklabels",
- "thetaticks",
- "thingSpeakRead",
- "thingSpeakWrite",
- "throw",
- "throwAsCaller",
- "tic",
- "tiledlayout",
- "time",
- "timeit",
- "timeofday",
- "timer",
- "timerange",
- "timerfind",
- "timerfindall",
- "timeseries",
- "timetable",
- "timetable2table",
- "timezones",
- "title",
- "toc",
- "todatenum",
- "toeplitz",
- "toolboxdir",
- "topkrows",
- "toposort",
- "trace",
- "transclosure",
- "transform",
- "translate",
- "transpose",
- "transreduction",
- "trapz",
- "treelayout",
- "treeplot",
- "triangulation",
- "tril",
- "trimesh",
- "triplot",
- "trisurf",
- "triu",
- "true",
- "tscollection",
- "tsdata.event",
- "tsearchn",
- "turbo",
- "turningdist",
- "type",
- "typecast",
- "tzoffset",
- "uialert",
- "uiaxes",
- "uibutton",
- "uibuttongroup",
- "uicheckbox",
- "uiconfirm",
- "uicontextmenu",
- "uicontrol",
- "uidatepicker",
- "uidropdown",
- "uieditfield",
- "uifigure",
- "uigauge",
- "uigetdir",
- "uigetfile",
- "uigetpref",
- "uigridlayout",
- "uihtml",
- "uiimage",
- "uiknob",
- "uilabel",
- "uilamp",
- "uilistbox",
- "uimenu",
- "uint16",
- "uint32",
- "uint64",
- "uint8",
- "uiopen",
- "uipanel",
- "uiprogressdlg",
- "uipushtool",
- "uiputfile",
- "uiradiobutton",
- "uiresume",
- "uisave",
- "uisetcolor",
- "uisetfont",
- "uisetpref",
- "uislider",
- "uispinner",
- "uistack",
- "uistyle",
- "uiswitch",
- "uitab",
- "uitabgroup",
- "uitable",
- "uitextarea",
- "uitogglebutton",
- "uitoggletool",
- "uitoolbar",
- "uitree",
- "uitreenode",
- "uiwait",
- "uminus",
- "underlyingType",
- "underlyingValue",
- "unicode2native",
- "union",
- "unique",
- "uniquetol",
- "unix",
- "unloadlibrary",
- "unmesh",
- "unmkpp",
- "unregisterallevents",
- "unregisterevent",
- "unstack",
- "unsubscribe",
- "untar",
- "unwrap",
- "unzip",
- "update",
- "updateDependencies",
- "uplus",
- "upper",
- "usejava",
- "userpath",
- "validateFunctionSignaturesJSON",
- "validateInputsImpl",
- "validatePropertiesImpl",
- "validateattributes",
- "validatecolor",
- "validatestring",
- "values",
- "vander",
- "var",
- "varargin",
- "varargout",
- "varfun",
- "vartype",
- "vecnorm",
- "ver",
- "verLessThan",
- "version",
- "vertcat",
- "vertexAttachments",
- "vertexNormal",
- "view",
- "viewmtx",
- "visdiff",
- "volume",
- "volumebounds",
- "voronoi",
- "voronoiDiagram",
- "voronoin",
- "wait",
- "waitbar",
- "waitfor",
- "waitforbuttonpress",
- "warndlg",
- "warning",
- "waterfall",
- "web",
- "weboptions",
- "webread",
- "websave",
- "webwrite",
- "week",
- "weekday",
- "what",
- "which",
- "whitespaceBoundary",
- "whitespacePattern",
- "who",
- "whos",
- "width",
- "wildcardPattern",
- "wilkinson",
- "winopen",
- "winqueryreg",
- "winter",
- "withinrange",
- "withtol",
- "wordcloud",
- "write",
- "writeChecksum",
- "writeCol",
- "writeComment",
- "writeDate",
- "writeHistory",
- "writeImg",
- "writeKey",
- "writeKeyUnit",
- "writeVideo",
- "writeall",
- "writecell",
- "writeline",
- "writematrix",
- "writestruct",
- "writetable",
- "writetimetable",
- "xcorr",
- "xcov",
- "xlabel",
- "xlim",
- "xline",
- "xmlread",
- "xmlwrite",
- "xor",
- "xslt",
- "xtickangle",
- "xtickformat",
- "xticklabels",
- "xticks",
- "year",
- "years",
- "ylabel",
- "ylim",
- "yline",
- "ymd",
- "ytickangle",
- "ytickformat",
- "yticklabels",
- "yticks",
- "yyaxis",
- "yyyymmdd",
- "zeros",
- "zip",
- "zlabel",
- "zlim",
- "zoom",
- "zoomInteraction",
- "ztickangle",
- "ztickformat",
- "zticklabels",
- "zticks",
- ],
- prefix=r"(?<!\.)(", # Exclude field names
- suffix=r")\b"
- ),
- Name.Builtin
- ),
-
- # line continuation with following comment:
- (r'(\.\.\.)(.*)$', bygroups(Keyword, Comment)),
-
- # command form:
- # "How MATLAB Recognizes Command Syntax" specifies that an operator
- # is recognized if it is either surrounded by spaces or by no
- # spaces on both sides (this allows distinguishing `cd ./foo` from
- # `cd ./ foo`.). Here, the regex checks that the first word in the
- # line is not followed by <spaces> and then
- # (equal | open-parenthesis | <operator><space> | <space>).
- (r'(?:^|(?<=;))(\s*)(\w+)(\s+)(?!=|\(|%s\s|\s)' % _operators,
- bygroups(Whitespace, Name, Whitespace), 'commandargs'),
-
- include('expressions')
- ],
- 'blockcomment': [
- (r'^\s*%\}', Comment.Multiline, '#pop'),
- (r'^.*\n', Comment.Multiline),
- (r'.', Comment.Multiline),
- ],
- 'deffunc': [
- (r'(\s*)(?:(\S+)(\s*)(=)(\s*))?(.+)(\()(.*)(\))(\s*)',
- bygroups(Whitespace, Text, Whitespace, Punctuation,
- Whitespace, Name.Function, Punctuation, Text,
- Punctuation, Whitespace), '#pop'),
- # function with no args
- (r'(\s*)([a-zA-Z_]\w*)',
- bygroups(Whitespace, Name.Function), '#pop'),
- ],
- 'propattrs': [
- (r'(\w+)(\s*)(=)(\s*)(\d+)',
- bygroups(Name.Builtin, Whitespace, Punctuation, Whitespace,
- Number)),
- (r'(\w+)(\s*)(=)(\s*)([a-zA-Z]\w*)',
- bygroups(Name.Builtin, Whitespace, Punctuation, Whitespace,
- Keyword)),
- (r',', Punctuation),
- (r'\)', Punctuation, '#pop'),
- (r'\s+', Whitespace),
- (r'.', Text),
- ],
- 'defprops': [
- (r'%\{\s*\n', Comment.Multiline, 'blockcomment'),
- (r'%.*$', Comment),
- (r'(?<!\.)end\b', Keyword, '#pop'),
- include('expressions'),
- ],
- 'string': [
- (r"[^']*'", String, '#pop'),
- ],
- 'commandargs': [
- # If an equal sign or other operator is encountered, this
- # isn't a command. It might be a variable assignment or
- # comparison operation with multiple spaces before the
- # equal sign or operator
- (r"=", Punctuation, '#pop'),
- (_operators, Operator, '#pop'),
- (r"[ \t]+", Whitespace),
- ("'[^']*'", String),
- (r"[^';\s]+", String),
- (";", Punctuation, '#pop'),
- default('#pop'),
- ]
- }
-
- def analyse_text(text):
- # function declaration.
- first_non_comment = next((line for line in text.splitlines()
- if not re.match(r'^\s*%', text)), '').strip()
- if (first_non_comment.startswith('function')
- and '{' not in first_non_comment):
- return 1.
- # comment
- elif re.search(r'^\s*%', text, re.M):
- return 0.2
- # system cmd
- elif re.search(r'^!\w+', text, re.M):
- return 0.2
-
-
-line_re = re.compile('.*?\n')
-
-
-class MatlabSessionLexer(Lexer):
- """
- For Matlab sessions. Modeled after PythonConsoleLexer.
- Contributed by Ken Schutte <kschutte@csail.mit.edu>.
-
- .. versionadded:: 0.10
- """
- name = 'Matlab session'
- aliases = ['matlabsession']
-
- def get_tokens_unprocessed(self, text):
- mlexer = MatlabLexer(**self.options)
-
- curcode = ''
- insertions = []
- continuation = False
-
- for match in line_re.finditer(text):
- line = match.group()
-
- if line.startswith('>> '):
- insertions.append((len(curcode),
- [(0, Generic.Prompt, line[:3])]))
- curcode += line[3:]
-
- elif line.startswith('>>'):
- insertions.append((len(curcode),
- [(0, Generic.Prompt, line[:2])]))
- curcode += line[2:]
-
- elif line.startswith('???'):
-
- idx = len(curcode)
-
- # without is showing error on same line as before...?
- # line = "\n" + line
- token = (0, Generic.Traceback, line)
- insertions.append((idx, [token]))
- elif continuation and insertions:
- # line_start is the length of the most recent prompt symbol
- line_start = len(insertions[-1][-1][-1])
- # Set leading spaces with the length of the prompt to be a generic prompt
- # This keeps code aligned when prompts are removed, say with some Javascript
- if line.startswith(' '*line_start):
- insertions.append(
- (len(curcode), [(0, Generic.Prompt, line[:line_start])]))
- curcode += line[line_start:]
- else:
- curcode += line
- else:
- if curcode:
- yield from do_insertions(
- insertions, mlexer.get_tokens_unprocessed(curcode))
- curcode = ''
- insertions = []
-
- yield match.start(), Generic.Output, line
-
- # Does not allow continuation if a comment is included after the ellipses.
- # Continues any line that ends with ..., even comments (lines that start with %)
- if line.strip().endswith('...'):
- continuation = True
- else:
- continuation = False
-
- if curcode: # or item:
- yield from do_insertions(
- insertions, mlexer.get_tokens_unprocessed(curcode))
-
-
-class OctaveLexer(RegexLexer):
- """
- For GNU Octave source code.
-
- .. versionadded:: 1.5
- """
- name = 'Octave'
- url = 'https://www.gnu.org/software/octave/index'
- aliases = ['octave']
- filenames = ['*.m']
- mimetypes = ['text/octave']
-
- # These lists are generated automatically.
- # Run the following in bash shell:
- #
- # First dump all of the Octave manual into a plain text file:
- #
- # $ info octave --subnodes -o octave-manual
- #
- # Now grep through it:
-
- # for i in \
- # "Built-in Function" "Command" "Function File" \
- # "Loadable Function" "Mapping Function";
- # do
- # perl -e '@name = qw('"$i"');
- # print lc($name[0]),"_kw = [\n"';
- #
- # perl -n -e 'print "\"$1\",\n" if /-- '"$i"': .* (\w*) \(/;' \
- # octave-manual | sort | uniq ;
- # echo "]" ;
- # echo;
- # done
-
- # taken from Octave Mercurial changeset 8cc154f45e37 (30-jan-2011)
-
- builtin_kw = (
- "addlistener", "addpath", "addproperty", "all",
- "and", "any", "argnames", "argv", "assignin",
- "atexit", "autoload",
- "available_graphics_toolkits", "beep_on_error",
- "bitand", "bitmax", "bitor", "bitshift", "bitxor",
- "cat", "cell", "cellstr", "char", "class", "clc",
- "columns", "command_line_path",
- "completion_append_char", "completion_matches",
- "complex", "confirm_recursive_rmdir", "cputime",
- "crash_dumps_octave_core", "ctranspose", "cumprod",
- "cumsum", "debug_on_error", "debug_on_interrupt",
- "debug_on_warning", "default_save_options",
- "dellistener", "diag", "diff", "disp",
- "doc_cache_file", "do_string_escapes", "double",
- "drawnow", "e", "echo_executing_commands", "eps",
- "eq", "errno", "errno_list", "error", "eval",
- "evalin", "exec", "exist", "exit", "eye", "false",
- "fclear", "fclose", "fcntl", "fdisp", "feof",
- "ferror", "feval", "fflush", "fgetl", "fgets",
- "fieldnames", "file_in_loadpath", "file_in_path",
- "filemarker", "filesep", "find_dir_in_path",
- "fixed_point_format", "fnmatch", "fopen", "fork",
- "formula", "fprintf", "fputs", "fread", "freport",
- "frewind", "fscanf", "fseek", "fskipl", "ftell",
- "functions", "fwrite", "ge", "genpath", "get",
- "getegid", "getenv", "geteuid", "getgid",
- "getpgrp", "getpid", "getppid", "getuid", "glob",
- "gt", "gui_mode", "history_control",
- "history_file", "history_size",
- "history_timestamp_format_string", "home",
- "horzcat", "hypot", "ifelse",
- "ignore_function_time_stamp", "inferiorto",
- "info_file", "info_program", "inline", "input",
- "intmax", "intmin", "ipermute",
- "is_absolute_filename", "isargout", "isbool",
- "iscell", "iscellstr", "ischar", "iscomplex",
- "isempty", "isfield", "isfloat", "isglobal",
- "ishandle", "isieee", "isindex", "isinteger",
- "islogical", "ismatrix", "ismethod", "isnull",
- "isnumeric", "isobject", "isreal",
- "is_rooted_relative_filename", "issorted",
- "isstruct", "isvarname", "kbhit", "keyboard",
- "kill", "lasterr", "lasterror", "lastwarn",
- "ldivide", "le", "length", "link", "linspace",
- "logical", "lstat", "lt", "make_absolute_filename",
- "makeinfo_program", "max_recursion_depth", "merge",
- "methods", "mfilename", "minus", "mislocked",
- "mkdir", "mkfifo", "mkstemp", "mldivide", "mlock",
- "mouse_wheel_zoom", "mpower", "mrdivide", "mtimes",
- "munlock", "nargin", "nargout",
- "native_float_format", "ndims", "ne", "nfields",
- "nnz", "norm", "not", "numel", "nzmax",
- "octave_config_info", "octave_core_file_limit",
- "octave_core_file_name",
- "octave_core_file_options", "ones", "or",
- "output_max_field_width", "output_precision",
- "page_output_immediately", "page_screen_output",
- "path", "pathsep", "pause", "pclose", "permute",
- "pi", "pipe", "plus", "popen", "power",
- "print_empty_dimensions", "printf",
- "print_struct_array_contents", "prod",
- "program_invocation_name", "program_name",
- "putenv", "puts", "pwd", "quit", "rats", "rdivide",
- "readdir", "readlink", "read_readline_init_file",
- "realmax", "realmin", "rehash", "rename",
- "repelems", "re_read_readline_init_file", "reset",
- "reshape", "resize", "restoredefaultpath",
- "rethrow", "rmdir", "rmfield", "rmpath", "rows",
- "save_header_format_string", "save_precision",
- "saving_history", "scanf", "set", "setenv",
- "shell_cmd", "sighup_dumps_octave_core",
- "sigterm_dumps_octave_core", "silent_functions",
- "single", "size", "size_equal", "sizemax",
- "sizeof", "sleep", "source", "sparse_auto_mutate",
- "split_long_rows", "sprintf", "squeeze", "sscanf",
- "stat", "stderr", "stdin", "stdout", "strcmp",
- "strcmpi", "string_fill_char", "strncmp",
- "strncmpi", "struct", "struct_levels_to_print",
- "strvcat", "subsasgn", "subsref", "sum", "sumsq",
- "superiorto", "suppress_verbose_help_message",
- "symlink", "system", "tic", "tilde_expand",
- "times", "tmpfile", "tmpnam", "toc", "toupper",
- "transpose", "true", "typeinfo", "umask", "uminus",
- "uname", "undo_string_escapes", "unlink", "uplus",
- "upper", "usage", "usleep", "vec", "vectorize",
- "vertcat", "waitpid", "warning", "warranty",
- "whos_line_format", "yes_or_no", "zeros",
- "inf", "Inf", "nan", "NaN")
-
- command_kw = ("close", "load", "who", "whos")
-
- function_kw = (
- "accumarray", "accumdim", "acosd", "acotd",
- "acscd", "addtodate", "allchild", "ancestor",
- "anova", "arch_fit", "arch_rnd", "arch_test",
- "area", "arma_rnd", "arrayfun", "ascii", "asctime",
- "asecd", "asind", "assert", "atand",
- "autoreg_matrix", "autumn", "axes", "axis", "bar",
- "barh", "bartlett", "bartlett_test", "beep",
- "betacdf", "betainv", "betapdf", "betarnd",
- "bicgstab", "bicubic", "binary", "binocdf",
- "binoinv", "binopdf", "binornd", "bitcmp",
- "bitget", "bitset", "blackman", "blanks",
- "blkdiag", "bone", "box", "brighten", "calendar",
- "cast", "cauchy_cdf", "cauchy_inv", "cauchy_pdf",
- "cauchy_rnd", "caxis", "celldisp", "center", "cgs",
- "chisquare_test_homogeneity",
- "chisquare_test_independence", "circshift", "cla",
- "clabel", "clf", "clock", "cloglog", "closereq",
- "colon", "colorbar", "colormap", "colperm",
- "comet", "common_size", "commutation_matrix",
- "compan", "compare_versions", "compass",
- "computer", "cond", "condest", "contour",
- "contourc", "contourf", "contrast", "conv",
- "convhull", "cool", "copper", "copyfile", "cor",
- "corrcoef", "cor_test", "cosd", "cotd", "cov",
- "cplxpair", "cross", "cscd", "cstrcat", "csvread",
- "csvwrite", "ctime", "cumtrapz", "curl", "cut",
- "cylinder", "date", "datenum", "datestr",
- "datetick", "datevec", "dblquad", "deal",
- "deblank", "deconv", "delaunay", "delaunayn",
- "delete", "demo", "detrend", "diffpara", "diffuse",
- "dir", "discrete_cdf", "discrete_inv",
- "discrete_pdf", "discrete_rnd", "display",
- "divergence", "dlmwrite", "dos", "dsearch",
- "dsearchn", "duplication_matrix", "durbinlevinson",
- "ellipsoid", "empirical_cdf", "empirical_inv",
- "empirical_pdf", "empirical_rnd", "eomday",
- "errorbar", "etime", "etreeplot", "example",
- "expcdf", "expinv", "expm", "exppdf", "exprnd",
- "ezcontour", "ezcontourf", "ezmesh", "ezmeshc",
- "ezplot", "ezpolar", "ezsurf", "ezsurfc", "factor",
- "factorial", "fail", "fcdf", "feather", "fftconv",
- "fftfilt", "fftshift", "figure", "fileattrib",
- "fileparts", "fill", "findall", "findobj",
- "findstr", "finv", "flag", "flipdim", "fliplr",
- "flipud", "fpdf", "fplot", "fractdiff", "freqz",
- "freqz_plot", "frnd", "fsolve",
- "f_test_regression", "ftp", "fullfile", "fzero",
- "gamcdf", "gaminv", "gampdf", "gamrnd", "gca",
- "gcbf", "gcbo", "gcf", "genvarname", "geocdf",
- "geoinv", "geopdf", "geornd", "getfield", "ginput",
- "glpk", "gls", "gplot", "gradient",
- "graphics_toolkit", "gray", "grid", "griddata",
- "griddatan", "gtext", "gunzip", "gzip", "hadamard",
- "hamming", "hankel", "hanning", "hggroup",
- "hidden", "hilb", "hist", "histc", "hold", "hot",
- "hotelling_test", "housh", "hsv", "hurst",
- "hygecdf", "hygeinv", "hygepdf", "hygernd",
- "idivide", "ifftshift", "image", "imagesc",
- "imfinfo", "imread", "imshow", "imwrite", "index",
- "info", "inpolygon", "inputname", "interpft",
- "interpn", "intersect", "invhilb", "iqr", "isa",
- "isdefinite", "isdir", "is_duplicate_entry",
- "isequal", "isequalwithequalnans", "isfigure",
- "ishermitian", "ishghandle", "is_leap_year",
- "isletter", "ismac", "ismember", "ispc", "isprime",
- "isprop", "isscalar", "issquare", "isstrprop",
- "issymmetric", "isunix", "is_valid_file_id",
- "isvector", "jet", "kendall",
- "kolmogorov_smirnov_cdf",
- "kolmogorov_smirnov_test", "kruskal_wallis_test",
- "krylov", "kurtosis", "laplace_cdf", "laplace_inv",
- "laplace_pdf", "laplace_rnd", "legend", "legendre",
- "license", "line", "linkprop", "list_primes",
- "loadaudio", "loadobj", "logistic_cdf",
- "logistic_inv", "logistic_pdf", "logistic_rnd",
- "logit", "loglog", "loglogerr", "logm", "logncdf",
- "logninv", "lognpdf", "lognrnd", "logspace",
- "lookfor", "ls_command", "lsqnonneg", "magic",
- "mahalanobis", "manova", "matlabroot",
- "mcnemar_test", "mean", "meansq", "median", "menu",
- "mesh", "meshc", "meshgrid", "meshz", "mexext",
- "mget", "mkpp", "mode", "moment", "movefile",
- "mpoles", "mput", "namelengthmax", "nargchk",
- "nargoutchk", "nbincdf", "nbininv", "nbinpdf",
- "nbinrnd", "nchoosek", "ndgrid", "newplot", "news",
- "nonzeros", "normcdf", "normest", "norminv",
- "normpdf", "normrnd", "now", "nthroot", "null",
- "ocean", "ols", "onenormest", "optimget",
- "optimset", "orderfields", "orient", "orth",
- "pack", "pareto", "parseparams", "pascal", "patch",
- "pathdef", "pcg", "pchip", "pcolor", "pcr",
- "peaks", "periodogram", "perl", "perms", "pie",
- "pink", "planerot", "playaudio", "plot",
- "plotmatrix", "plotyy", "poisscdf", "poissinv",
- "poisspdf", "poissrnd", "polar", "poly",
- "polyaffine", "polyarea", "polyderiv", "polyfit",
- "polygcd", "polyint", "polyout", "polyreduce",
- "polyval", "polyvalm", "postpad", "powerset",
- "ppder", "ppint", "ppjumps", "ppplot", "ppval",
- "pqpnonneg", "prepad", "primes", "print",
- "print_usage", "prism", "probit", "qp", "qqplot",
- "quadcc", "quadgk", "quadl", "quadv", "quiver",
- "qzhess", "rainbow", "randi", "range", "rank",
- "ranks", "rat", "reallog", "realpow", "realsqrt",
- "record", "rectangle_lw", "rectangle_sw",
- "rectint", "refresh", "refreshdata",
- "regexptranslate", "repmat", "residue", "ribbon",
- "rindex", "roots", "rose", "rosser", "rotdim",
- "rref", "run", "run_count", "rundemos", "run_test",
- "runtests", "saveas", "saveaudio", "saveobj",
- "savepath", "scatter", "secd", "semilogx",
- "semilogxerr", "semilogy", "semilogyerr",
- "setaudio", "setdiff", "setfield", "setxor",
- "shading", "shift", "shiftdim", "sign_test",
- "sinc", "sind", "sinetone", "sinewave", "skewness",
- "slice", "sombrero", "sortrows", "spaugment",
- "spconvert", "spdiags", "spearman", "spectral_adf",
- "spectral_xdf", "specular", "speed", "spencer",
- "speye", "spfun", "sphere", "spinmap", "spline",
- "spones", "sprand", "sprandn", "sprandsym",
- "spring", "spstats", "spy", "sqp", "stairs",
- "statistics", "std", "stdnormal_cdf",
- "stdnormal_inv", "stdnormal_pdf", "stdnormal_rnd",
- "stem", "stft", "strcat", "strchr", "strjust",
- "strmatch", "strread", "strsplit", "strtok",
- "strtrim", "strtrunc", "structfun", "studentize",
- "subplot", "subsindex", "subspace", "substr",
- "substruct", "summer", "surf", "surface", "surfc",
- "surfl", "surfnorm", "svds", "swapbytes",
- "sylvester_matrix", "symvar", "synthesis", "table",
- "tand", "tar", "tcdf", "tempdir", "tempname",
- "test", "text", "textread", "textscan", "tinv",
- "title", "toeplitz", "tpdf", "trace", "trapz",
- "treelayout", "treeplot", "triangle_lw",
- "triangle_sw", "tril", "trimesh", "triplequad",
- "triplot", "trisurf", "triu", "trnd", "tsearchn",
- "t_test", "t_test_regression", "type", "unidcdf",
- "unidinv", "unidpdf", "unidrnd", "unifcdf",
- "unifinv", "unifpdf", "unifrnd", "union", "unique",
- "unix", "unmkpp", "unpack", "untabify", "untar",
- "unwrap", "unzip", "u_test", "validatestring",
- "vander", "var", "var_test", "vech", "ver",
- "version", "view", "voronoi", "voronoin",
- "waitforbuttonpress", "wavread", "wavwrite",
- "wblcdf", "wblinv", "wblpdf", "wblrnd", "weekday",
- "welch_test", "what", "white", "whitebg",
- "wienrnd", "wilcoxon_test", "wilkinson", "winter",
- "xlabel", "xlim", "ylabel", "yulewalker", "zip",
- "zlabel", "z_test")
-
- loadable_kw = (
- "airy", "amd", "balance", "besselh", "besseli",
- "besselj", "besselk", "bessely", "bitpack",
- "bsxfun", "builtin", "ccolamd", "cellfun",
- "cellslices", "chol", "choldelete", "cholinsert",
- "cholinv", "cholshift", "cholupdate", "colamd",
- "colloc", "convhulln", "convn", "csymamd",
- "cummax", "cummin", "daspk", "daspk_options",
- "dasrt", "dasrt_options", "dassl", "dassl_options",
- "dbclear", "dbdown", "dbstack", "dbstatus",
- "dbstop", "dbtype", "dbup", "dbwhere", "det",
- "dlmread", "dmperm", "dot", "eig", "eigs",
- "endgrent", "endpwent", "etree", "fft", "fftn",
- "fftw", "filter", "find", "full", "gcd",
- "getgrent", "getgrgid", "getgrnam", "getpwent",
- "getpwnam", "getpwuid", "getrusage", "givens",
- "gmtime", "gnuplot_binary", "hess", "ifft",
- "ifftn", "inv", "isdebugmode", "issparse", "kron",
- "localtime", "lookup", "lsode", "lsode_options",
- "lu", "luinc", "luupdate", "matrix_type", "max",
- "min", "mktime", "pinv", "qr", "qrdelete",
- "qrinsert", "qrshift", "qrupdate", "quad",
- "quad_options", "qz", "rand", "rande", "randg",
- "randn", "randp", "randperm", "rcond", "regexp",
- "regexpi", "regexprep", "schur", "setgrent",
- "setpwent", "sort", "spalloc", "sparse", "spparms",
- "sprank", "sqrtm", "strfind", "strftime",
- "strptime", "strrep", "svd", "svd_driver", "syl",
- "symamd", "symbfact", "symrcm", "time", "tsearch",
- "typecast", "urlread", "urlwrite")
-
- mapping_kw = (
- "abs", "acos", "acosh", "acot", "acoth", "acsc",
- "acsch", "angle", "arg", "asec", "asech", "asin",
- "asinh", "atan", "atanh", "beta", "betainc",
- "betaln", "bincoeff", "cbrt", "ceil", "conj", "cos",
- "cosh", "cot", "coth", "csc", "csch", "erf", "erfc",
- "erfcx", "erfinv", "exp", "finite", "fix", "floor",
- "fmod", "gamma", "gammainc", "gammaln", "imag",
- "isalnum", "isalpha", "isascii", "iscntrl",
- "isdigit", "isfinite", "isgraph", "isinf",
- "islower", "isna", "isnan", "isprint", "ispunct",
- "isspace", "isupper", "isxdigit", "lcm", "lgamma",
- "log", "lower", "mod", "real", "rem", "round",
- "roundb", "sec", "sech", "sign", "sin", "sinh",
- "sqrt", "tan", "tanh", "toascii", "tolower", "xor")
-
- builtin_consts = (
- "EDITOR", "EXEC_PATH", "I", "IMAGE_PATH", "NA",
- "OCTAVE_HOME", "OCTAVE_VERSION", "PAGER",
- "PAGER_FLAGS", "SEEK_CUR", "SEEK_END", "SEEK_SET",
- "SIG", "S_ISBLK", "S_ISCHR", "S_ISDIR", "S_ISFIFO",
- "S_ISLNK", "S_ISREG", "S_ISSOCK", "WCONTINUE",
- "WCOREDUMP", "WEXITSTATUS", "WIFCONTINUED",
- "WIFEXITED", "WIFSIGNALED", "WIFSTOPPED", "WNOHANG",
- "WSTOPSIG", "WTERMSIG", "WUNTRACED")
-
- tokens = {
- 'root': [
- (r'%\{\s*\n', Comment.Multiline, 'percentblockcomment'),
- (r'#\{\s*\n', Comment.Multiline, 'hashblockcomment'),
- (r'[%#].*$', Comment),
- (r'^\s*function\b', Keyword, 'deffunc'),
-
- # from 'iskeyword' on hg changeset 8cc154f45e37
- (words((
- '__FILE__', '__LINE__', 'break', 'case', 'catch', 'classdef',
- 'continue', 'do', 'else', 'elseif', 'end', 'end_try_catch',
- 'end_unwind_protect', 'endclassdef', 'endevents', 'endfor',
- 'endfunction', 'endif', 'endmethods', 'endproperties', 'endswitch',
- 'endwhile', 'events', 'for', 'function', 'get', 'global', 'if',
- 'methods', 'otherwise', 'persistent', 'properties', 'return',
- 'set', 'static', 'switch', 'try', 'until', 'unwind_protect',
- 'unwind_protect_cleanup', 'while'), suffix=r'\b'),
- Keyword),
-
- (words(builtin_kw + command_kw + function_kw + loadable_kw + mapping_kw,
- suffix=r'\b'), Name.Builtin),
-
- (words(builtin_consts, suffix=r'\b'), Name.Constant),
-
- # operators in Octave but not Matlab:
- (r'-=|!=|!|/=|--', Operator),
- # operators:
- (r'-|==|~=|<|>|<=|>=|&&|&|~|\|\|?', Operator),
- # operators in Octave but not Matlab requiring escape for re:
- (r'\*=|\+=|\^=|\/=|\\=|\*\*|\+\+|\.\*\*', Operator),
- # operators requiring escape for re:
- (r'\.\*|\*|\+|\.\^|\.\\|\.\/|\/|\\', Operator),
-
-
- # punctuation:
- (r'[\[\](){}:@.,]', Punctuation),
- (r'=|:|;', Punctuation),
-
- (r'"[^"]*"', String),
-
- (r'(\d+\.\d*|\d*\.\d+)([eEf][+-]?[0-9]+)?', Number.Float),
- (r'\d+[eEf][+-]?[0-9]+', Number.Float),
- (r'\d+', Number.Integer),
-
- # quote can be transpose, instead of string:
- # (not great, but handles common cases...)
- (r'(?<=[\w)\].])\'+', Operator),
- (r'(?<![\w)\].])\'', String, 'string'),
-
- (r'[a-zA-Z_]\w*', Name),
- (r'\s+', Text),
- (r'.', Text),
- ],
- 'percentblockcomment': [
- (r'^\s*%\}', Comment.Multiline, '#pop'),
- (r'^.*\n', Comment.Multiline),
- (r'.', Comment.Multiline),
- ],
- 'hashblockcomment': [
- (r'^\s*#\}', Comment.Multiline, '#pop'),
- (r'^.*\n', Comment.Multiline),
- (r'.', Comment.Multiline),
- ],
- 'string': [
- (r"[^']*'", String, '#pop'),
- ],
- 'deffunc': [
- (r'(\s*)(?:(\S+)(\s*)(=)(\s*))?(.+)(\()(.*)(\))(\s*)',
- bygroups(Whitespace, Text, Whitespace, Punctuation,
- Whitespace, Name.Function, Punctuation, Text,
- Punctuation, Whitespace), '#pop'),
- # function with no args
- (r'(\s*)([a-zA-Z_]\w*)',
- bygroups(Whitespace, Name.Function), '#pop'),
- ],
- }
-
- def analyse_text(text):
- """Octave is quite hard to spot, and it looks like Matlab as well."""
- return 0
-
-
-class ScilabLexer(RegexLexer):
- """
- For Scilab source code.
-
- .. versionadded:: 1.5
- """
- name = 'Scilab'
- url = 'https://www.scilab.org/'
- aliases = ['scilab']
- filenames = ['*.sci', '*.sce', '*.tst']
- mimetypes = ['text/scilab']
-
- tokens = {
- 'root': [
- (r'//.*?$', Comment.Single),
- (r'^\s*function\b', Keyword, 'deffunc'),
-
- (words((
- '__FILE__', '__LINE__', 'break', 'case', 'catch', 'classdef', 'continue', 'do', 'else',
- 'elseif', 'end', 'end_try_catch', 'end_unwind_protect', 'endclassdef',
- 'endevents', 'endfor', 'endfunction', 'endif', 'endmethods', 'endproperties',
- 'endswitch', 'endwhile', 'events', 'for', 'function', 'get', 'global', 'if', 'methods',
- 'otherwise', 'persistent', 'properties', 'return', 'set', 'static', 'switch', 'try',
- 'until', 'unwind_protect', 'unwind_protect_cleanup', 'while'), suffix=r'\b'),
- Keyword),
-
- (words(_scilab_builtins.functions_kw +
- _scilab_builtins.commands_kw +
- _scilab_builtins.macros_kw, suffix=r'\b'), Name.Builtin),
-
- (words(_scilab_builtins.variables_kw, suffix=r'\b'), Name.Constant),
-
- # operators:
- (r'-|==|~=|<|>|<=|>=|&&|&|~|\|\|?', Operator),
- # operators requiring escape for re:
- (r'\.\*|\*|\+|\.\^|\.\\|\.\/|\/|\\', Operator),
-
- # punctuation:
- (r'[\[\](){}@.,=:;]+', Punctuation),
-
- (r'"[^"]*"', String),
-
- # quote can be transpose, instead of string:
- # (not great, but handles common cases...)
- (r'(?<=[\w)\].])\'+', Operator),
- (r'(?<![\w)\].])\'', String, 'string'),
-
- (r'(\d+\.\d*|\d*\.\d+)([eEf][+-]?[0-9]+)?', Number.Float),
- (r'\d+[eEf][+-]?[0-9]+', Number.Float),
- (r'\d+', Number.Integer),
-
- (r'[a-zA-Z_]\w*', Name),
- (r'\s+', Whitespace),
- (r'.', Text),
- ],
- 'string': [
- (r"[^']*'", String, '#pop'),
- (r'.', String, '#pop'),
- ],
- 'deffunc': [
- (r'(\s*)(?:(\S+)(\s*)(=)(\s*))?(.+)(\()(.*)(\))(\s*)',
- bygroups(Whitespace, Text, Whitespace, Punctuation,
- Whitespace, Name.Function, Punctuation, Text,
- Punctuation, Whitespace), '#pop'),
- # function with no args
- (r'(\s*)([a-zA-Z_]\w*)', bygroups(Text, Name.Function), '#pop'),
- ],
- }
-
- # the following is needed to distinguish Scilab and GAP .tst files
- def analyse_text(text):
- score = 0.0
-
- # Scilab comments (don't appear in e.g. GAP code)
- if re.search(r"^\s*//", text):
- score += 0.1
- if re.search(r"^\s*/\*", text):
- score += 0.1
-
- return min(score, 1.0)
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/maxima.py b/venv/lib/python3.11/site-packages/pygments/lexers/maxima.py
deleted file mode 100644
index 4c6dc79..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/maxima.py
+++ /dev/null
@@ -1,85 +0,0 @@
-"""
- pygments.lexers.maxima
- ~~~~~~~~~~~~~~~~~~~~~~
-
- Lexer for the computer algebra system Maxima.
-
- Derived from pygments/lexers/algebra.py.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, bygroups, words
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation
-
-__all__ = ['MaximaLexer']
-
-class MaximaLexer(RegexLexer):
- """
- A Maxima lexer.
- Derived from pygments.lexers.MuPADLexer.
-
- .. versionadded:: 2.11
- """
- name = 'Maxima'
- url = 'http://maxima.sourceforge.net'
- aliases = ['maxima', 'macsyma']
- filenames = ['*.mac', '*.max']
-
- keywords = ('if', 'then', 'else', 'elseif',
- 'do', 'while', 'repeat', 'until',
- 'for', 'from', 'to', 'downto', 'step', 'thru')
-
- constants = ('%pi', '%e', '%phi', '%gamma', '%i',
- 'und', 'ind', 'infinity', 'inf', 'minf',
- 'true', 'false', 'unknown', 'done')
-
- operators = (r'.', r':', r'=', r'#',
- r'+', r'-', r'*', r'/', r'^',
- r'@', r'>', r'<', r'|', r'!', r"'")
-
- operator_words = ('and', 'or', 'not')
-
- tokens = {
- 'root': [
- (r'/\*', Comment.Multiline, 'comment'),
- (r'"(?:[^"\\]|\\.)*"', String),
- (r'\(|\)|\[|\]|\{|\}', Punctuation),
- (r'[,;$]', Punctuation),
- (words (constants), Name.Constant),
- (words (keywords), Keyword),
- (words (operators), Operator),
- (words (operator_words), Operator.Word),
- (r'''(?x)
- ((?:[a-zA-Z_#][\w#]*|`[^`]*`)
- (?:::[a-zA-Z_#][\w#]*|`[^`]*`)*)(\s*)([(])''',
- bygroups(Name.Function, Text.Whitespace, Punctuation)),
- (r'''(?x)
- (?:[a-zA-Z_#%][\w#%]*|`[^`]*`)
- (?:::[a-zA-Z_#%][\w#%]*|`[^`]*`)*''', Name.Variable),
- (r'[-+]?(\d*\.\d+([bdefls][-+]?\d+)?|\d+(\.\d*)?[bdefls][-+]?\d+)', Number.Float),
- (r'[-+]?\d+', Number.Integer),
- (r'\s+', Text.Whitespace),
- (r'.', Text)
- ],
- 'comment': [
- (r'[^*/]+', Comment.Multiline),
- (r'/\*', Comment.Multiline, '#push'),
- (r'\*/', Comment.Multiline, '#pop'),
- (r'[*/]', Comment.Multiline)
- ]
- }
-
- def analyse_text (text):
- strength = 0.0
- # Input expression terminator.
- if re.search (r'\$\s*$', text, re.MULTILINE):
- strength += 0.05
- # Function definition operator.
- if ':=' in text:
- strength += 0.02
- return strength
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/meson.py b/venv/lib/python3.11/site-packages/pygments/lexers/meson.py
deleted file mode 100644
index f74f719..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/meson.py
+++ /dev/null
@@ -1,140 +0,0 @@
-"""
- pygments.lexers.meson
- ~~~~~~~~~~~~~~~~~~~~~
-
- Pygments lexer for the Meson build system
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, words, include
-from pygments.token import Comment, Name, Number, Punctuation, Operator, \
- Keyword, String, Whitespace
-
-__all__ = ['MesonLexer']
-
-
-class MesonLexer(RegexLexer):
- """Meson language lexer.
-
- The grammar definition use to transcribe the syntax was retrieved from
- https://mesonbuild.com/Syntax.html#grammar for version 0.58.
- Some of those definitions are improperly transcribed, so the Meson++
- implementation was also checked: https://github.com/dcbaker/meson-plus-plus.
-
- .. versionadded:: 2.10
- """
-
- # TODO String interpolation @VARNAME@ inner matches
- # TODO keyword_arg: value inner matches
-
- name = 'Meson'
- url = 'https://mesonbuild.com/'
- aliases = ['meson', 'meson.build']
- filenames = ['meson.build', 'meson_options.txt']
- mimetypes = ['text/x-meson']
-
- tokens = {
- 'root': [
- (r'#.*?$', Comment),
- (r"'''.*'''", String.Single),
- (r'[1-9][0-9]*', Number.Integer),
- (r'0o[0-7]+', Number.Oct),
- (r'0x[a-fA-F0-9]+', Number.Hex),
- include('string'),
- include('keywords'),
- include('expr'),
- (r'[a-zA-Z_][a-zA-Z_0-9]*', Name),
- (r'\s+', Whitespace),
- ],
- 'string': [
- (r"[']{3}([']{0,2}([^\\']|\\(.|\n)))*[']{3}", String),
- (r"'.*?(?<!\\)(\\\\)*?'", String),
- ],
- 'keywords': [
- (words((
- 'if',
- 'elif',
- 'else',
- 'endif',
- 'foreach',
- 'endforeach',
- 'break',
- 'continue',
- ),
- suffix=r'\b'), Keyword),
- ],
- 'expr': [
- (r'(in|and|or|not)\b', Operator.Word),
- (r'(\*=|/=|%=|\+]=|-=|==|!=|\+|-|=)', Operator),
- (r'[\[\]{}:().,?]', Punctuation),
- (words(('true', 'false'), suffix=r'\b'), Keyword.Constant),
- include('builtins'),
- (words((
- 'meson',
- 'build_machine',
- 'host_machine',
- 'target_machine',
- ),
- suffix=r'\b'), Name.Variable.Magic),
- ],
- 'builtins': [
- # This list was extracted from the v0.58 reference manual
- (words((
- 'add_global_arguments',
- 'add_global_link_arguments',
- 'add_languages',
- 'add_project_arguments',
- 'add_project_link_arguments',
- 'add_test_setup',
- 'assert',
- 'benchmark',
- 'both_libraries',
- 'build_target',
- 'configuration_data',
- 'configure_file',
- 'custom_target',
- 'declare_dependency',
- 'dependency',
- 'disabler',
- 'environment',
- 'error',
- 'executable',
- 'files',
- 'find_library',
- 'find_program',
- 'generator',
- 'get_option',
- 'get_variable',
- 'include_directories',
- 'install_data',
- 'install_headers',
- 'install_man',
- 'install_subdir',
- 'is_disabler',
- 'is_variable',
- 'jar',
- 'join_paths',
- 'library',
- 'message',
- 'project',
- 'range',
- 'run_command',
- 'set_variable',
- 'shared_library',
- 'shared_module',
- 'static_library',
- 'subdir',
- 'subdir_done',
- 'subproject',
- 'summary',
- 'test',
- 'vcs_tag',
- 'warning',
- ),
- prefix=r'(?<!\.)',
- suffix=r'\b'), Name.Builtin),
- (r'(?<!\.)import\b', Name.Namespace),
- ],
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/mime.py b/venv/lib/python3.11/site-packages/pygments/lexers/mime.py
deleted file mode 100644
index 8bf16f7..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/mime.py
+++ /dev/null
@@ -1,210 +0,0 @@
-"""
- pygments.lexers.mime
- ~~~~~~~~~~~~~~~~~~~~
-
- Lexer for Multipurpose Internet Mail Extensions (MIME) data.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, include
-from pygments.lexers import get_lexer_for_mimetype
-from pygments.token import Text, Name, String, Operator, Comment, Other
-from pygments.util import get_int_opt, ClassNotFound
-
-__all__ = ["MIMELexer"]
-
-
-class MIMELexer(RegexLexer):
- """
- Lexer for Multipurpose Internet Mail Extensions (MIME) data. This lexer is
- designed to process nested multipart data.
-
- It assumes that the given data contains both header and body (and is
- split at an empty line). If no valid header is found, then the entire data
- will be treated as body.
-
- Additional options accepted:
-
- `MIME-max-level`
- Max recursion level for nested MIME structure. Any negative number
- would treated as unlimited. (default: -1)
-
- `Content-Type`
- Treat the data as a specific content type. Useful when header is
- missing, or this lexer would try to parse from header. (default:
- `text/plain`)
-
- `Multipart-Boundary`
- Set the default multipart boundary delimiter. This option is only used
- when `Content-Type` is `multipart` and header is missing. This lexer
- would try to parse from header by default. (default: None)
-
- `Content-Transfer-Encoding`
- Treat the data as a specific encoding. Or this lexer would try to parse
- from header by default. (default: None)
-
- .. versionadded:: 2.5
- """
-
- name = "MIME"
- aliases = ["mime"]
- mimetypes = ["multipart/mixed",
- "multipart/related",
- "multipart/alternative"]
-
- def __init__(self, **options):
- super().__init__(**options)
- self.boundary = options.get("Multipart-Boundary")
- self.content_transfer_encoding = options.get("Content_Transfer_Encoding")
- self.content_type = options.get("Content_Type", "text/plain")
- self.max_nested_level = get_int_opt(options, "MIME-max-level", -1)
-
- def get_header_tokens(self, match):
- field = match.group(1)
-
- if field.lower() in self.attention_headers:
- yield match.start(1), Name.Tag, field + ":"
- yield match.start(2), Text.Whitespace, match.group(2)
-
- pos = match.end(2)
- body = match.group(3)
- for i, t, v in self.get_tokens_unprocessed(body, ("root", field.lower())):
- yield pos + i, t, v
-
- else:
- yield match.start(), Comment, match.group()
-
- def get_body_tokens(self, match):
- pos_body_start = match.start()
- entire_body = match.group()
-
- # skip first newline
- if entire_body[0] == '\n':
- yield pos_body_start, Text.Whitespace, '\n'
- pos_body_start = pos_body_start + 1
- entire_body = entire_body[1:]
-
- # if it is not a multipart
- if not self.content_type.startswith("multipart") or not self.boundary:
- for i, t, v in self.get_bodypart_tokens(entire_body):
- yield pos_body_start + i, t, v
- return
-
- # find boundary
- bdry_pattern = r"^--%s(--)?\n" % re.escape(self.boundary)
- bdry_matcher = re.compile(bdry_pattern, re.MULTILINE)
-
- # some data has prefix text before first boundary
- m = bdry_matcher.search(entire_body)
- if m:
- pos_part_start = pos_body_start + m.end()
- pos_iter_start = lpos_end = m.end()
- yield pos_body_start, Text, entire_body[:m.start()]
- yield pos_body_start + lpos_end, String.Delimiter, m.group()
- else:
- pos_part_start = pos_body_start
- pos_iter_start = 0
-
- # process tokens of each body part
- for m in bdry_matcher.finditer(entire_body, pos_iter_start):
- # bodypart
- lpos_start = pos_part_start - pos_body_start
- lpos_end = m.start()
- part = entire_body[lpos_start:lpos_end]
- for i, t, v in self.get_bodypart_tokens(part):
- yield pos_part_start + i, t, v
-
- # boundary
- yield pos_body_start + lpos_end, String.Delimiter, m.group()
- pos_part_start = pos_body_start + m.end()
-
- # some data has suffix text after last boundary
- lpos_start = pos_part_start - pos_body_start
- if lpos_start != len(entire_body):
- yield pos_part_start, Text, entire_body[lpos_start:]
-
- def get_bodypart_tokens(self, text):
- # return if:
- # * no content
- # * no content type specific
- # * content encoding is not readable
- # * max recurrsion exceed
- if not text.strip() or not self.content_type:
- return [(0, Other, text)]
-
- cte = self.content_transfer_encoding
- if cte and cte not in {"8bit", "7bit", "quoted-printable"}:
- return [(0, Other, text)]
-
- if self.max_nested_level == 0:
- return [(0, Other, text)]
-
- # get lexer
- try:
- lexer = get_lexer_for_mimetype(self.content_type)
- except ClassNotFound:
- return [(0, Other, text)]
-
- if isinstance(lexer, type(self)):
- lexer.max_nested_level = self.max_nested_level - 1
-
- return lexer.get_tokens_unprocessed(text)
-
- def store_content_type(self, match):
- self.content_type = match.group(1)
-
- prefix_len = match.start(1) - match.start(0)
- yield match.start(0), Text.Whitespace, match.group(0)[:prefix_len]
- yield match.start(1), Name.Label, match.group(2)
- yield match.end(2), String.Delimiter, '/'
- yield match.start(3), Name.Label, match.group(3)
-
- def get_content_type_subtokens(self, match):
- yield match.start(1), Text, match.group(1)
- yield match.start(2), Text.Whitespace, match.group(2)
- yield match.start(3), Name.Attribute, match.group(3)
- yield match.start(4), Operator, match.group(4)
- yield match.start(5), String, match.group(5)
-
- if match.group(3).lower() == "boundary":
- boundary = match.group(5).strip()
- if boundary[0] == '"' and boundary[-1] == '"':
- boundary = boundary[1:-1]
- self.boundary = boundary
-
- def store_content_transfer_encoding(self, match):
- self.content_transfer_encoding = match.group(0).lower()
- yield match.start(0), Name.Constant, match.group(0)
-
- attention_headers = {"content-type", "content-transfer-encoding"}
-
- tokens = {
- "root": [
- (r"^([\w-]+):( *)([\s\S]*?\n)(?![ \t])", get_header_tokens),
- (r"^$[\s\S]+", get_body_tokens),
- ],
- "header": [
- # folding
- (r"\n[ \t]", Text.Whitespace),
- (r"\n(?![ \t])", Text.Whitespace, "#pop"),
- ],
- "content-type": [
- include("header"),
- (
- r"^\s*((multipart|application|audio|font|image|model|text|video"
- r"|message)/([\w-]+))",
- store_content_type,
- ),
- (r'(;)((?:[ \t]|\n[ \t])*)([\w:-]+)(=)([\s\S]*?)(?=;|\n(?![ \t]))',
- get_content_type_subtokens),
- (r';[ \t]*\n(?![ \t])', Text, '#pop'),
- ],
- "content-transfer-encoding": [
- include("header"),
- (r"([\w-]+)", store_content_transfer_encoding),
- ],
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/minecraft.py b/venv/lib/python3.11/site-packages/pygments/lexers/minecraft.py
deleted file mode 100644
index 11faa00..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/minecraft.py
+++ /dev/null
@@ -1,394 +0,0 @@
-"""
- pygments.lexers.minecraft
- ~~~~~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for Minecraft related languages.
-
- SNBT. A data communication format used in Minecraft.
- wiki: https://minecraft.wiki/w/NBT_format
-
- MCFunction. The Function file for Minecraft Data packs and Add-ons.
- official: https://learn.microsoft.com/en-us/minecraft/creator/documents/functionsintroduction
- wiki: https://minecraft.wiki/w/Function
-
- MCSchema. A kind of data Schema for Minecraft Add-on Development.
- official: https://learn.microsoft.com/en-us/minecraft/creator/reference/content/schemasreference/
- community example: https://www.mcbe-dev.net/addons/data-driven/manifest.html
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, default, include, bygroups
-from pygments.token import Comment, Keyword, Literal, Name, Number, Operator, \
- Punctuation, String, Text, Whitespace
-
-__all__ = ['SNBTLexer', 'MCFunctionLexer', 'MCSchemaLexer']
-
-
-class SNBTLexer(RegexLexer):
- """Lexer for stringified NBT, a data format used in Minecraft
-
- .. versionadded:: 2.12.0
- """
-
- name = "SNBT"
- url = "https://minecraft.wiki/w/NBT_format"
- aliases = ["snbt"]
- filenames = ["*.snbt"]
- mimetypes = ["text/snbt"]
-
- tokens = {
- "root": [
- # We only look for the open bracket here since square bracket
- # is only valid in NBT pathing (which is a mcfunction idea).
- (r"\{", Punctuation, "compound"),
- (r"[^\{]+", Text),
- ],
-
- "whitespace": [
- (r"\s+", Whitespace),
- ],
-
- "operators": [
- (r"[,:;]", Punctuation),
- ],
-
- "literals": [
- (r"(true|false)", Keyword.Constant),
- (r"-?\d+[eE]-?\d+", Number.Float),
- (r"-?\d*\.\d+[fFdD]?", Number.Float),
- (r"-?\d+[bBsSlLfFdD]?", Number.Integer),
-
- # Separate states for both types of strings so they don't entangle
- (r'"', String.Double, "literals.string_double"),
- (r"'", String.Single, "literals.string_single"),
- ],
- "literals.string_double": [
- (r"\\.", String.Escape),
- (r'[^\\"\n]+', String.Double),
- (r'"', String.Double, "#pop"),
- ],
- "literals.string_single": [
- (r"\\.", String.Escape),
- (r"[^\\'\n]+", String.Single),
- (r"'", String.Single, "#pop"),
- ],
-
- "compound": [
- # this handles the unquoted snbt keys
- # note: stringified keys still work
- (r"[A-Z_a-z]+", Name.Attribute),
- include("operators"),
- include("whitespace"),
- include("literals"),
- (r"\{", Punctuation, "#push"),
- (r"\[", Punctuation, "list"),
- (r"\}", Punctuation, "#pop"),
- ],
-
- "list": [
- (r"[A-Z_a-z]+", Name.Attribute),
- include("literals"),
- include("operators"),
- include("whitespace"),
- (r"\[", Punctuation, "#push"),
- (r"\{", Punctuation, "compound"),
- (r"\]", Punctuation, "#pop"),
- ],
- }
-
-
-class MCFunctionLexer(RegexLexer):
- """Lexer for the mcfunction scripting language used in Minecraft
- Modelled somewhat after the `GitHub mcfunction grammar <https://github.com/Arcensoth/language-mcfunction>`_.
-
- .. versionadded:: 2.12.0
- """
-
- name = "MCFunction"
- url = "https://minecraft.wiki/w/Commands"
- aliases = ["mcfunction", "mcf"]
- filenames = ["*.mcfunction"]
- mimetypes = ["text/mcfunction"]
-
- # Used to denotate the start of a block comment, borrowed from Github's mcfunction
- _block_comment_prefix = "[>!]"
-
- tokens = {
- "root": [
- include("names"),
- include("comments"),
- include("literals"),
- include("whitespace"),
- include("property"),
- include("operators"),
- include("selectors"),
- ],
-
- "names": [
- # The start of a command (either beginning of line OR after the run keyword)
- # We don't encode a list of keywords since mods, plugins, or even pre-processors
- # may add new commands, so we have a 'close-enough' regex which catches them.
- (r"^(\s*)([a-z_]+)", bygroups(Whitespace, Name.Builtin)),
- (r"(?<=run)\s+[a-z_]+", Name.Builtin),
-
- # UUID
- (r"\b[0-9a-fA-F]+(?:-[0-9a-fA-F]+){4}\b", Name.Variable),
- include("resource-name"),
- # normal command names and scoreboards
- # there's no way to know the differences unfortuntely
- (r"[A-Za-z_][\w.#%$]+", Keyword.Constant),
- (r"[#%$][\w.#%$]+", Name.Variable.Magic),
- ],
-
- "resource-name": [
- # resource names have to be lowercase
- (r"#?[a-z_][a-z_.-]*:[a-z0-9_./-]+", Name.Function),
- # similar to above except optional `:``
- # a `/` must be present "somewhere"
- (r"#?[a-z0-9_\.\-]+\/[a-z0-9_\.\-\/]+", Name.Function),
- ],
-
- "whitespace": [
- (r"\s+", Whitespace),
- ],
-
- "comments": [
- (rf"^\s*(#{_block_comment_prefix})", Comment.Multiline,
- ("comments.block", "comments.block.emphasized")),
- (r"#.*$", Comment.Single),
- ],
- "comments.block": [
- (rf"^\s*#{_block_comment_prefix}", Comment.Multiline,
- "comments.block.emphasized"),
- (r"^\s*#", Comment.Multiline, "comments.block.normal"),
- default("#pop"),
- ],
- "comments.block.normal": [
- include("comments.block.special"),
- (r"\S+", Comment.Multiline),
- (r"\n", Text, "#pop"),
- include("whitespace"),
- ],
- "comments.block.emphasized": [
- include("comments.block.special"),
- (r"\S+", String.Doc),
- (r"\n", Text, "#pop"),
- include("whitespace"),
- ],
- "comments.block.special": [
- # Params
- (r"@\S+", Name.Decorator),
-
- include("resource-name"),
-
- # Scoreboard player names
- (r"[#%$][\w.#%$]+", Name.Variable.Magic),
- ],
-
- "operators": [
- (r"[\-~%^?!+*<>\\/|&=.]", Operator),
- ],
-
- "literals": [
- (r"\.\.", Literal),
- (r"(true|false)", Keyword.Pseudo),
-
- # these are like unquoted strings and appear in many places
- (r"[A-Za-z_]+", Name.Variable.Class),
-
- (r"[0-7]b", Number.Byte),
- (r"[+-]?\d*\.?\d+([eE]?[+-]?\d+)?[df]?\b", Number.Float),
- (r"[+-]?\d+\b", Number.Integer),
- (r'"', String.Double, "literals.string-double"),
- (r"'", String.Single, "literals.string-single"),
- ],
- "literals.string-double": [
- (r"\\.", String.Escape),
- (r'[^\\"\n]+', String.Double),
- (r'"', String.Double, "#pop"),
- ],
- "literals.string-single": [
- (r"\\.", String.Escape),
- (r"[^\\'\n]+", String.Single),
- (r"'", String.Single, "#pop"),
- ],
-
- "selectors": [
- (r"@[a-z]", Name.Variable),
- ],
-
-
- ## Generic Property Container
- # There are several, differing instances where the language accepts
- # specific contained keys or contained key, value pairings.
- #
- # Property Maps:
- # - Starts with either `[` or `{`
- # - Key separated by `:` or `=`
- # - Deliminated by `,`
- #
- # Property Lists:
- # - Starts with `[`
- # - Deliminated by `,`
- #
- # For simplicity, these patterns match a generic, nestable structure
- # which follow a key, value pattern. For normal lists, there's only keys.
- # This allow some "illegal" structures, but we'll accept those for
- # sake of simplicity
- #
- # Examples:
- # - `[facing=up, powered=true]` (blockstate)
- # - `[name="hello world", nbt={key: 1b}]` (selector + nbt)
- # - `[{"text": "value"}, "literal"]` (json)
- ##
- "property": [
- # This state gets included in root and also several substates
- # We do this to shortcut the starting of new properties
- # within other properties. Lists can have sublists and compounds
- # and values can start a new property (see the `difficult_1.txt`
- # snippet).
- (r"\{", Punctuation, ("property.curly", "property.key")),
- (r"\[", Punctuation, ("property.square", "property.key")),
- ],
- "property.curly": [
- include("whitespace"),
- include("property"),
- (r"\}", Punctuation, "#pop"),
- ],
- "property.square": [
- include("whitespace"),
- include("property"),
- (r"\]", Punctuation, "#pop"),
-
- # lists can have sequences of items
- (r",", Punctuation),
- ],
- "property.key": [
- include("whitespace"),
-
- # resource names (for advancements)
- # can omit `:` to default `minecraft:`
- # must check if there is a future equals sign if `:` is in the name
- (r"#?[a-z_][a-z_\.\-]*\:[a-z0-9_\.\-/]+(?=\s*\=)", Name.Attribute, "property.delimiter"),
- (r"#?[a-z_][a-z0-9_\.\-/]+", Name.Attribute, "property.delimiter"),
-
- # unquoted NBT key
- (r"[A-Za-z_\-\+]+", Name.Attribute, "property.delimiter"),
-
- # quoted JSON or NBT key
- (r'"', Name.Attribute, "property.delimiter", "literals.string-double"),
- (r"'", Name.Attribute, "property.delimiter", "literals.string-single"),
-
- # index for a list
- (r"-?\d+", Number.Integer, "property.delimiter"),
-
- default("#pop"),
- ],
- "property.key.string-double": [
- (r"\\.", String.Escape),
- (r'[^\\"\n]+', Name.Attribute),
- (r'"', Name.Attribute, "#pop"),
- ],
- "property.key.string-single": [
- (r"\\.", String.Escape),
- (r"[^\\'\n]+", Name.Attribute),
- (r"'", Name.Attribute, "#pop"),
- ],
- "property.delimiter": [
- include("whitespace"),
-
- (r"[:=]!?", Punctuation, "property.value"),
- (r",", Punctuation),
-
- default("#pop"),
- ],
- "property.value": [
- include("whitespace"),
-
- # unquoted resource names are valid literals here
- (r"#?[a-z_][a-z_\.\-]*\:[a-z0-9_\.\-/]+", Name.Tag),
- (r"#?[a-z_][a-z0-9_\.\-/]+", Name.Tag),
-
- include("literals"),
- include("property"),
-
- default("#pop"),
- ],
- }
-
-
-class MCSchemaLexer(RegexLexer):
- """Lexer for Minecraft Add-ons data Schemas, an interface structure standard used in Minecraft
-
- .. versionadded:: 2.14.0
- """
-
- name = 'MCSchema'
- url = 'https://learn.microsoft.com/en-us/minecraft/creator/reference/content/schemasreference/'
- aliases = ['mcschema']
- filenames = ['*.mcschema']
- mimetypes = ['text/mcschema']
-
- tokens = {
- 'commentsandwhitespace': [
- (r'\s+', Whitespace),
- (r'//.*?$', Comment.Single),
- (r'/\*.*?\*/', Comment.Multiline)
- ],
- 'slashstartsregex': [
- include('commentsandwhitespace'),
- (r'/(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/'
- r'([gimuysd]+\b|\B)', String.Regex, '#pop'),
- (r'(?=/)', Text, ('#pop', 'badregex')),
- default('#pop')
- ],
- 'badregex': [
- (r'\n', Whitespace, '#pop')
- ],
- 'singlestring': [
- (r'\\.', String.Escape),
- (r"'", String.Single, '#pop'),
- (r"[^\\']+", String.Single),
- ],
- 'doublestring': [
- (r'\\.', String.Escape),
- (r'"', String.Double, '#pop'),
- (r'[^\\"]+', String.Double),
- ],
- 'root': [
- (r'^(?=\s|/|<!--)', Text, 'slashstartsregex'),
- include('commentsandwhitespace'),
-
- # keywords for optional word and field types
- (r'(?<=: )opt', Operator.Word),
- (r'(?<=\s)[\w-]*(?=(\s+"|\n))', Keyword.Declaration),
-
- # numeric literals
- (r'0[bB][01]+', Number.Bin),
- (r'0[oO]?[0-7]+', Number.Oct),
- (r'0[xX][0-9a-fA-F]+', Number.Hex),
- (r'\d+', Number.Integer),
- (r'(\.\d+|\d+\.\d*|\d+)([eE][-+]?\d+)?', Number.Float),
-
- # possible punctuations
- (r'\.\.\.|=>', Punctuation),
- (r'\+\+|--|~|\?\?=?|\?|:|\\(?=\n)|'
- r'(<<|>>>?|==?|!=?|(?:\*\*|\|\||&&|[-<>+*%&|^/]))=?', Operator, 'slashstartsregex'),
- (r'[{(\[;,]', Punctuation, 'slashstartsregex'),
- (r'[})\].]', Punctuation),
-
- # strings
- (r"'", String.Single, 'singlestring'),
- (r'"', String.Double, 'doublestring'),
-
- # title line
- (r'[\w-]*?(?=:\{?\n)', String.Symbol),
- # title line with a version code, formatted
- # `major.minor.patch-prerelease+buildmeta`
- (r'([\w-]*?)(:)(\d+)(?:(\.)(\d+)(?:(\.)(\d+)(?:(\-)((?:[^\W_]|-)*(?:\.(?:[^\W_]|-)*)*))?(?:(\+)((?:[^\W_]|-)+(?:\.(?:[^\W_]|-)+)*))?)?)?(?=:\{?\n)', bygroups(String.Symbol, Operator, Number.Integer, Operator, Number.Integer, Operator, Number.Integer, Operator, String, Operator, String)),
-
- (r'.*\n', Text),
- ]
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/mips.py b/venv/lib/python3.11/site-packages/pygments/lexers/mips.py
deleted file mode 100644
index 257605d..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/mips.py
+++ /dev/null
@@ -1,128 +0,0 @@
-"""
- pygments.lexers.mips
- ~~~~~~~~~~~~~~~~~~~~
-
- Lexers for MIPS assembly.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, words
-from pygments.token import Whitespace, Comment, String, Keyword, Name, Text
-
-__all__ = ["MIPSLexer"]
-
-
-class MIPSLexer(RegexLexer):
- """
- A MIPS Assembly Lexer.
-
- Based on the Emacs major mode by hlissner:
- https://github.com/hlissner/emacs-mips-mode
- """
-
- name = 'MIPS'
- aliases = ['mips']
- # TODO: add '*.s' and '*.asm', which will require designing an analyse_text
- # method for this lexer and refactoring those from Gas and Nasm in order to
- # have relatively reliable detection
- filenames = ['*.mips', '*.MIPS']
-
- keywords = [
- # Arithmetic insturctions
- "add", "sub", "subu", "addi", "subi", "addu", "addiu",
- # Multiplication/division
- "mul", "mult", "multu", "mulu", "madd", "maddu", "msub", "msubu", "div", "divu",
- # Bitwise operations
- "and", "or", "nor", "xor", "andi", "ori", "xori", "clo", "clz",
- # Shifts
- "sll", "srl", "sllv", "srlv", "sra", "srav",
- # Comparisons
- "slt", "sltu", "slti", "sltiu",
- # Move data
- "mfhi", "mthi", "mflo", "mtlo", "movn", "movz", "movf", "movt",
- # Jump
- "j", "jal", "jalr", "jr",
- # branch
- "bc1f", "bc1t", "beq", "bgez", "bgezal", "bgtz", "blez", "bltzal", "bltz", "bne",
- # Load
- "lui", "lb", "lbu", "lh", "lhu", "lw", "lwcl", "lwl", "lwr",
- # Store
- "sb", "sh", "sw", "swl", "swr", # coproc: swc1 sdc1
- # Concurrent load/store
- "ll", "sc",
- # Trap handling
- "teq", "teqi", "tne", "tneqi", "tge", "tgeu", "tgei", "tgeiu", "tlt", "tltu", "tlti",
- "tltiu",
- # Exception / Interrupt
- "eret", "break", "bop", "syscall",
- # --- Floats -----------------------------------------------------
- # Arithmetic
- "add.s", "add.d", "sub.s", "sub.d", "mul.s", "mul.d", "div.s", "div.d", "neg.d",
- "neg.s",
- # Comparison
- "c.e.d", "c.e.s", "c.le.d", "c.le.s", "c.lt.s", "c.lt.d", # "c.gt.s", "c.gt.d",
- "madd.s", "madd.d", "msub.s", "msub.d",
- # Move Floats
- "mov.d", "move.s", "movf.d", "movf.s", "movt.d", "movt.s", "movn.d", "movn.s",
- "movnzd", "movz.s", "movz.d",
- # Conversion
- "cvt.d.s", "cvt.d.w", "cvt.s.d", "cvt.s.w", "cvt.w.d", "cvt.w.s", "trunc.w.d",
- "trunc.w.s",
- # Math
- "abs.s", "abs.d", "sqrt.s", "sqrt.d", "ceil.w.d", "ceil.w.s", "floor.w.d",
- "floor.w.s", "round.w.d", "round.w.s",
- ]
-
- pseudoinstructions = [
- # Arithmetic & logical
- "rem", "remu", "mulo", "mulou", "abs", "neg", "negu", "not", "rol", "ror",
- # branches
- "b", "beqz", "bge", "bgeu", "bgt", "bgtu", "ble", "bleu", "blt", "bltu", "bnez",
- # loads
- "la", "li", "ld", "ulh", "ulhu", "ulw",
- # Store
- "sd", "ush", "usw",
- # move
- "move", # coproc: "mfc1.d",
- # comparisons
- "sgt", "sgtu", "sge", "sgeu", "sle", "sleu", "sne", "seq",
- # --- Floats -----------------------------------------------------
- # load-store
- "l.d", "l.s", "s.d", "s.s",
- ]
-
- directives = [
- ".align", ".ascii", ".asciiz", ".byte", ".data", ".double", ".extern", ".float",
- ".globl", ".half", ".kdata", ".ktext", ".space", ".text", ".word",
- ]
-
- deprecated = [
- "beql", "bnel", "bgtzl", "bgezl", "bltzl", "blezl", "bltzall", "bgezall",
- ]
-
- tokens = {
- 'root': [
- (r'\s+', Whitespace),
- (r'#.*', Comment),
- (r'"', String, 'string'),
- (r'-?[0-9]+?', Keyword.Constant),
- (r'\w*:', Name.Function),
- (words(deprecated, suffix=r'\b'), Keyword.Pseudo), # need warning face
- (words(pseudoinstructions, suffix=r'\b'), Name.Variable),
- (words(keywords, suffix=r'\b'), Keyword),
- (r'[slm][ftwd]c[0-9]([.]d)?', Keyword),
- (r'\$(f?[0-2][0-9]|f?3[01]|[ft]?[0-9]|[vk][01]|a[0-3]|s[0-7]|[gsf]p|ra|at|zero)',
- Keyword.Type),
- (words(directives, suffix=r'\b'), Name.Entity), # Preprocessor?
- (r':|,|;|\{|\}|=>|@|\$|=', Name.Builtin),
- (r'\w+', Text),
- (r'.', Text),
- ],
- 'string': [
- (r'\\.', String.Escape),
- (r'"', String, '#pop'),
- (r'[^\\"]+', String),
- ],
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/ml.py b/venv/lib/python3.11/site-packages/pygments/lexers/ml.py
deleted file mode 100644
index 3dfa6d9..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/ml.py
+++ /dev/null
@@ -1,960 +0,0 @@
-"""
- pygments.lexers.ml
- ~~~~~~~~~~~~~~~~~~
-
- Lexers for ML family languages.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, include, bygroups, default, words
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation, Error
-
-__all__ = ['SMLLexer', 'OcamlLexer', 'OpaLexer', 'ReasonLexer', 'FStarLexer']
-
-
-class SMLLexer(RegexLexer):
- """
- For the Standard ML language.
-
- .. versionadded:: 1.5
- """
-
- name = 'Standard ML'
- aliases = ['sml']
- filenames = ['*.sml', '*.sig', '*.fun']
- mimetypes = ['text/x-standardml', 'application/x-standardml']
-
- alphanumid_reserved = {
- # Core
- 'abstype', 'and', 'andalso', 'as', 'case', 'datatype', 'do', 'else',
- 'end', 'exception', 'fn', 'fun', 'handle', 'if', 'in', 'infix',
- 'infixr', 'let', 'local', 'nonfix', 'of', 'op', 'open', 'orelse',
- 'raise', 'rec', 'then', 'type', 'val', 'with', 'withtype', 'while',
- # Modules
- 'eqtype', 'functor', 'include', 'sharing', 'sig', 'signature',
- 'struct', 'structure', 'where',
- }
-
- symbolicid_reserved = {
- # Core
- ':', r'\|', '=', '=>', '->', '#',
- # Modules
- ':>',
- }
-
- nonid_reserved = {'(', ')', '[', ']', '{', '}', ',', ';', '...', '_'}
-
- alphanumid_re = r"[a-zA-Z][\w']*"
- symbolicid_re = r"[!%&$#+\-/:<=>?@\\~`^|*]+"
-
- # A character constant is a sequence of the form #s, where s is a string
- # constant denoting a string of size one character. This setup just parses
- # the entire string as either a String.Double or a String.Char (depending
- # on the argument), even if the String.Char is an erroneous
- # multiple-character string.
- def stringy(whatkind):
- return [
- (r'[^"\\]', whatkind),
- (r'\\[\\"abtnvfr]', String.Escape),
- # Control-character notation is used for codes < 32,
- # where \^@ == \000
- (r'\\\^[\x40-\x5e]', String.Escape),
- # Docs say 'decimal digits'
- (r'\\[0-9]{3}', String.Escape),
- (r'\\u[0-9a-fA-F]{4}', String.Escape),
- (r'\\\s+\\', String.Interpol),
- (r'"', whatkind, '#pop'),
- ]
-
- # Callbacks for distinguishing tokens and reserved words
- def long_id_callback(self, match):
- if match.group(1) in self.alphanumid_reserved:
- token = Error
- else:
- token = Name.Namespace
- yield match.start(1), token, match.group(1)
- yield match.start(2), Punctuation, match.group(2)
-
- def end_id_callback(self, match):
- if match.group(1) in self.alphanumid_reserved:
- token = Error
- elif match.group(1) in self.symbolicid_reserved:
- token = Error
- else:
- token = Name
- yield match.start(1), token, match.group(1)
-
- def id_callback(self, match):
- str = match.group(1)
- if str in self.alphanumid_reserved:
- token = Keyword.Reserved
- elif str in self.symbolicid_reserved:
- token = Punctuation
- else:
- token = Name
- yield match.start(1), token, str
-
- tokens = {
- # Whitespace and comments are (almost) everywhere
- 'whitespace': [
- (r'\s+', Text),
- (r'\(\*', Comment.Multiline, 'comment'),
- ],
-
- 'delimiters': [
- # This lexer treats these delimiters specially:
- # Delimiters define scopes, and the scope is how the meaning of
- # the `|' is resolved - is it a case/handle expression, or function
- # definition by cases? (This is not how the Definition works, but
- # it's how MLton behaves, see http://mlton.org/SMLNJDeviations)
- (r'\(|\[|\{', Punctuation, 'main'),
- (r'\)|\]|\}', Punctuation, '#pop'),
- (r'\b(let|if|local)\b(?!\')', Keyword.Reserved, ('main', 'main')),
- (r'\b(struct|sig|while)\b(?!\')', Keyword.Reserved, 'main'),
- (r'\b(do|else|end|in|then)\b(?!\')', Keyword.Reserved, '#pop'),
- ],
-
- 'core': [
- # Punctuation that doesn't overlap symbolic identifiers
- (r'(%s)' % '|'.join(re.escape(z) for z in nonid_reserved),
- Punctuation),
-
- # Special constants: strings, floats, numbers in decimal and hex
- (r'#"', String.Char, 'char'),
- (r'"', String.Double, 'string'),
- (r'~?0x[0-9a-fA-F]+', Number.Hex),
- (r'0wx[0-9a-fA-F]+', Number.Hex),
- (r'0w\d+', Number.Integer),
- (r'~?\d+\.\d+[eE]~?\d+', Number.Float),
- (r'~?\d+\.\d+', Number.Float),
- (r'~?\d+[eE]~?\d+', Number.Float),
- (r'~?\d+', Number.Integer),
-
- # Labels
- (r'#\s*[1-9][0-9]*', Name.Label),
- (r'#\s*(%s)' % alphanumid_re, Name.Label),
- (r'#\s+(%s)' % symbolicid_re, Name.Label),
- # Some reserved words trigger a special, local lexer state change
- (r'\b(datatype|abstype)\b(?!\')', Keyword.Reserved, 'dname'),
- (r'\b(exception)\b(?!\')', Keyword.Reserved, 'ename'),
- (r'\b(functor|include|open|signature|structure)\b(?!\')',
- Keyword.Reserved, 'sname'),
- (r'\b(type|eqtype)\b(?!\')', Keyword.Reserved, 'tname'),
-
- # Regular identifiers, long and otherwise
- (r'\'[\w\']*', Name.Decorator),
- (r'(%s)(\.)' % alphanumid_re, long_id_callback, "dotted"),
- (r'(%s)' % alphanumid_re, id_callback),
- (r'(%s)' % symbolicid_re, id_callback),
- ],
- 'dotted': [
- (r'(%s)(\.)' % alphanumid_re, long_id_callback),
- (r'(%s)' % alphanumid_re, end_id_callback, "#pop"),
- (r'(%s)' % symbolicid_re, end_id_callback, "#pop"),
- (r'\s+', Error),
- (r'\S+', Error),
- ],
-
-
- # Main parser (prevents errors in files that have scoping errors)
- 'root': [
- default('main')
- ],
-
- # In this scope, I expect '|' to not be followed by a function name,
- # and I expect 'and' to be followed by a binding site
- 'main': [
- include('whitespace'),
-
- # Special behavior of val/and/fun
- (r'\b(val|and)\b(?!\')', Keyword.Reserved, 'vname'),
- (r'\b(fun)\b(?!\')', Keyword.Reserved,
- ('#pop', 'main-fun', 'fname')),
-
- include('delimiters'),
- include('core'),
- (r'\S+', Error),
- ],
-
- # In this scope, I expect '|' and 'and' to be followed by a function
- 'main-fun': [
- include('whitespace'),
-
- (r'\s', Text),
- (r'\(\*', Comment.Multiline, 'comment'),
-
- # Special behavior of val/and/fun
- (r'\b(fun|and)\b(?!\')', Keyword.Reserved, 'fname'),
- (r'\b(val)\b(?!\')', Keyword.Reserved,
- ('#pop', 'main', 'vname')),
-
- # Special behavior of '|' and '|'-manipulating keywords
- (r'\|', Punctuation, 'fname'),
- (r'\b(case|handle)\b(?!\')', Keyword.Reserved,
- ('#pop', 'main')),
-
- include('delimiters'),
- include('core'),
- (r'\S+', Error),
- ],
-
- # Character and string parsers
- 'char': stringy(String.Char),
- 'string': stringy(String.Double),
-
- 'breakout': [
- (r'(?=\b(%s)\b(?!\'))' % '|'.join(alphanumid_reserved), Text, '#pop'),
- ],
-
- # Dealing with what comes after module system keywords
- 'sname': [
- include('whitespace'),
- include('breakout'),
-
- (r'(%s)' % alphanumid_re, Name.Namespace),
- default('#pop'),
- ],
-
- # Dealing with what comes after the 'fun' (or 'and' or '|') keyword
- 'fname': [
- include('whitespace'),
- (r'\'[\w\']*', Name.Decorator),
- (r'\(', Punctuation, 'tyvarseq'),
-
- (r'(%s)' % alphanumid_re, Name.Function, '#pop'),
- (r'(%s)' % symbolicid_re, Name.Function, '#pop'),
-
- # Ignore interesting function declarations like "fun (x + y) = ..."
- default('#pop'),
- ],
-
- # Dealing with what comes after the 'val' (or 'and') keyword
- 'vname': [
- include('whitespace'),
- (r'\'[\w\']*', Name.Decorator),
- (r'\(', Punctuation, 'tyvarseq'),
-
- (r'(%s)(\s*)(=(?!%s))' % (alphanumid_re, symbolicid_re),
- bygroups(Name.Variable, Text, Punctuation), '#pop'),
- (r'(%s)(\s*)(=(?!%s))' % (symbolicid_re, symbolicid_re),
- bygroups(Name.Variable, Text, Punctuation), '#pop'),
- (r'(%s)' % alphanumid_re, Name.Variable, '#pop'),
- (r'(%s)' % symbolicid_re, Name.Variable, '#pop'),
-
- # Ignore interesting patterns like 'val (x, y)'
- default('#pop'),
- ],
-
- # Dealing with what comes after the 'type' (or 'and') keyword
- 'tname': [
- include('whitespace'),
- include('breakout'),
-
- (r'\'[\w\']*', Name.Decorator),
- (r'\(', Punctuation, 'tyvarseq'),
- (r'=(?!%s)' % symbolicid_re, Punctuation, ('#pop', 'typbind')),
-
- (r'(%s)' % alphanumid_re, Keyword.Type),
- (r'(%s)' % symbolicid_re, Keyword.Type),
- (r'\S+', Error, '#pop'),
- ],
-
- # A type binding includes most identifiers
- 'typbind': [
- include('whitespace'),
-
- (r'\b(and)\b(?!\')', Keyword.Reserved, ('#pop', 'tname')),
-
- include('breakout'),
- include('core'),
- (r'\S+', Error, '#pop'),
- ],
-
- # Dealing with what comes after the 'datatype' (or 'and') keyword
- 'dname': [
- include('whitespace'),
- include('breakout'),
-
- (r'\'[\w\']*', Name.Decorator),
- (r'\(', Punctuation, 'tyvarseq'),
- (r'(=)(\s*)(datatype)',
- bygroups(Punctuation, Text, Keyword.Reserved), '#pop'),
- (r'=(?!%s)' % symbolicid_re, Punctuation,
- ('#pop', 'datbind', 'datcon')),
-
- (r'(%s)' % alphanumid_re, Keyword.Type),
- (r'(%s)' % symbolicid_re, Keyword.Type),
- (r'\S+', Error, '#pop'),
- ],
-
- # common case - A | B | C of int
- 'datbind': [
- include('whitespace'),
-
- (r'\b(and)\b(?!\')', Keyword.Reserved, ('#pop', 'dname')),
- (r'\b(withtype)\b(?!\')', Keyword.Reserved, ('#pop', 'tname')),
- (r'\b(of)\b(?!\')', Keyword.Reserved),
-
- (r'(\|)(\s*)(%s)' % alphanumid_re,
- bygroups(Punctuation, Text, Name.Class)),
- (r'(\|)(\s+)(%s)' % symbolicid_re,
- bygroups(Punctuation, Text, Name.Class)),
-
- include('breakout'),
- include('core'),
- (r'\S+', Error),
- ],
-
- # Dealing with what comes after an exception
- 'ename': [
- include('whitespace'),
-
- (r'(and\b)(\s+)(%s)' % alphanumid_re,
- bygroups(Keyword.Reserved, Text, Name.Class)),
- (r'(and\b)(\s*)(%s)' % symbolicid_re,
- bygroups(Keyword.Reserved, Text, Name.Class)),
- (r'\b(of)\b(?!\')', Keyword.Reserved),
- (r'(%s)|(%s)' % (alphanumid_re, symbolicid_re), Name.Class),
-
- default('#pop'),
- ],
-
- 'datcon': [
- include('whitespace'),
- (r'(%s)' % alphanumid_re, Name.Class, '#pop'),
- (r'(%s)' % symbolicid_re, Name.Class, '#pop'),
- (r'\S+', Error, '#pop'),
- ],
-
- # Series of type variables
- 'tyvarseq': [
- (r'\s', Text),
- (r'\(\*', Comment.Multiline, 'comment'),
-
- (r'\'[\w\']*', Name.Decorator),
- (alphanumid_re, Name),
- (r',', Punctuation),
- (r'\)', Punctuation, '#pop'),
- (symbolicid_re, Name),
- ],
-
- 'comment': [
- (r'[^(*)]', Comment.Multiline),
- (r'\(\*', Comment.Multiline, '#push'),
- (r'\*\)', Comment.Multiline, '#pop'),
- (r'[(*)]', Comment.Multiline),
- ],
- }
-
-
-class OcamlLexer(RegexLexer):
- """
- For the OCaml language.
-
- .. versionadded:: 0.7
- """
-
- name = 'OCaml'
- url = 'https://ocaml.org/'
- aliases = ['ocaml']
- filenames = ['*.ml', '*.mli', '*.mll', '*.mly']
- mimetypes = ['text/x-ocaml']
-
- keywords = (
- 'and', 'as', 'assert', 'begin', 'class', 'constraint', 'do', 'done',
- 'downto', 'else', 'end', 'exception', 'external', 'false',
- 'for', 'fun', 'function', 'functor', 'if', 'in', 'include',
- 'inherit', 'initializer', 'lazy', 'let', 'match', 'method',
- 'module', 'mutable', 'new', 'object', 'of', 'open', 'private',
- 'raise', 'rec', 'sig', 'struct', 'then', 'to', 'true', 'try',
- 'type', 'val', 'virtual', 'when', 'while', 'with',
- )
- keyopts = (
- '!=', '#', '&', '&&', r'\(', r'\)', r'\*', r'\+', ',', '-',
- r'-\.', '->', r'\.', r'\.\.', ':', '::', ':=', ':>', ';', ';;', '<',
- '<-', '=', '>', '>]', r'>\}', r'\?', r'\?\?', r'\[', r'\[<', r'\[>',
- r'\[\|', ']', '_', '`', r'\{', r'\{<', r'\|', r'\|]', r'\}', '~'
- )
-
- operators = r'[!$%&*+\./:<=>?@^|~-]'
- word_operators = ('asr', 'land', 'lor', 'lsl', 'lxor', 'mod', 'or')
- prefix_syms = r'[!?~]'
- infix_syms = r'[=<>@^|&+\*/$%-]'
- primitives = ('unit', 'int', 'float', 'bool', 'string', 'char', 'list', 'array')
-
- tokens = {
- 'escape-sequence': [
- (r'\\[\\"\'ntbr]', String.Escape),
- (r'\\[0-9]{3}', String.Escape),
- (r'\\x[0-9a-fA-F]{2}', String.Escape),
- ],
- 'root': [
- (r'\s+', Text),
- (r'false|true|\(\)|\[\]', Name.Builtin.Pseudo),
- (r'\b([A-Z][\w\']*)(?=\s*\.)', Name.Namespace, 'dotted'),
- (r'\b([A-Z][\w\']*)', Name.Class),
- (r'\(\*(?![)])', Comment, 'comment'),
- (r'\b(%s)\b' % '|'.join(keywords), Keyword),
- (r'(%s)' % '|'.join(keyopts[::-1]), Operator),
- (r'(%s|%s)?%s' % (infix_syms, prefix_syms, operators), Operator),
- (r'\b(%s)\b' % '|'.join(word_operators), Operator.Word),
- (r'\b(%s)\b' % '|'.join(primitives), Keyword.Type),
-
- (r"[^\W\d][\w']*", Name),
-
- (r'-?\d[\d_]*(.[\d_]*)?([eE][+\-]?\d[\d_]*)', Number.Float),
- (r'0[xX][\da-fA-F][\da-fA-F_]*', Number.Hex),
- (r'0[oO][0-7][0-7_]*', Number.Oct),
- (r'0[bB][01][01_]*', Number.Bin),
- (r'\d[\d_]*', Number.Integer),
-
- (r"'(?:(\\[\\\"'ntbr ])|(\\[0-9]{3})|(\\x[0-9a-fA-F]{2}))'",
- String.Char),
- (r"'.'", String.Char),
- (r"'", Keyword), # a stray quote is another syntax element
-
- (r'"', String.Double, 'string'),
-
- (r'[~?][a-z][\w\']*:', Name.Variable),
- ],
- 'comment': [
- (r'[^(*)]+', Comment),
- (r'\(\*', Comment, '#push'),
- (r'\*\)', Comment, '#pop'),
- (r'[(*)]', Comment),
- ],
- 'string': [
- (r'[^\\"]+', String.Double),
- include('escape-sequence'),
- (r'\\\n', String.Double),
- (r'"', String.Double, '#pop'),
- ],
- 'dotted': [
- (r'\s+', Text),
- (r'\.', Punctuation),
- (r'[A-Z][\w\']*(?=\s*\.)', Name.Namespace),
- (r'[A-Z][\w\']*', Name.Class, '#pop'),
- (r'[a-z_][\w\']*', Name, '#pop'),
- default('#pop'),
- ],
- }
-
-
-class OpaLexer(RegexLexer):
- """
- Lexer for the Opa language.
-
- .. versionadded:: 1.5
- """
-
- name = 'Opa'
- aliases = ['opa']
- filenames = ['*.opa']
- mimetypes = ['text/x-opa']
-
- # most of these aren't strictly keywords
- # but if you color only real keywords, you might just
- # as well not color anything
- keywords = (
- 'and', 'as', 'begin', 'case', 'client', 'css', 'database', 'db', 'do',
- 'else', 'end', 'external', 'forall', 'function', 'if', 'import',
- 'match', 'module', 'or', 'package', 'parser', 'rec', 'server', 'then',
- 'type', 'val', 'with', 'xml_parser',
- )
-
- # matches both stuff and `stuff`
- ident_re = r'(([a-zA-Z_]\w*)|(`[^`]*`))'
-
- op_re = r'[.=\-<>,@~%/+?*&^!]'
- punc_re = r'[()\[\],;|]' # '{' and '}' are treated elsewhere
- # because they are also used for inserts
-
- tokens = {
- # copied from the caml lexer, should be adapted
- 'escape-sequence': [
- (r'\\[\\"\'ntr}]', String.Escape),
- (r'\\[0-9]{3}', String.Escape),
- (r'\\x[0-9a-fA-F]{2}', String.Escape),
- ],
-
- # factorizing these rules, because they are inserted many times
- 'comments': [
- (r'/\*', Comment, 'nested-comment'),
- (r'//.*?$', Comment),
- ],
- 'comments-and-spaces': [
- include('comments'),
- (r'\s+', Text),
- ],
-
- 'root': [
- include('comments-and-spaces'),
- # keywords
- (words(keywords, prefix=r'\b', suffix=r'\b'), Keyword),
- # directives
- # we could parse the actual set of directives instead of anything
- # starting with @, but this is troublesome
- # because it needs to be adjusted all the time
- # and assuming we parse only sources that compile, it is useless
- (r'@' + ident_re + r'\b', Name.Builtin.Pseudo),
-
- # number literals
- (r'-?.[\d]+([eE][+\-]?\d+)', Number.Float),
- (r'-?\d+.\d*([eE][+\-]?\d+)', Number.Float),
- (r'-?\d+[eE][+\-]?\d+', Number.Float),
- (r'0[xX][\da-fA-F]+', Number.Hex),
- (r'0[oO][0-7]+', Number.Oct),
- (r'0[bB][01]+', Number.Bin),
- (r'\d+', Number.Integer),
- # color literals
- (r'#[\da-fA-F]{3,6}', Number.Integer),
-
- # string literals
- (r'"', String.Double, 'string'),
- # char literal, should be checked because this is the regexp from
- # the caml lexer
- (r"'(?:(\\[\\\"'ntbr ])|(\\[0-9]{3})|(\\x[0-9a-fA-F]{2})|.)'",
- String.Char),
-
- # this is meant to deal with embedded exprs in strings
- # every time we find a '}' we pop a state so that if we were
- # inside a string, we are back in the string state
- # as a consequence, we must also push a state every time we find a
- # '{' or else we will have errors when parsing {} for instance
- (r'\{', Operator, '#push'),
- (r'\}', Operator, '#pop'),
-
- # html literals
- # this is a much more strict that the actual parser,
- # since a<b would not be parsed as html
- # but then again, the parser is way too lax, and we can't hope
- # to have something as tolerant
- (r'<(?=[a-zA-Z>])', String.Single, 'html-open-tag'),
-
- # db path
- # matching the '[_]' in '/a[_]' because it is a part
- # of the syntax of the db path definition
- # unfortunately, i don't know how to match the ']' in
- # /a[1], so this is somewhat inconsistent
- (r'[@?!]?(/\w+)+(\[_\])?', Name.Variable),
- # putting the same color on <- as on db path, since
- # it can be used only to mean Db.write
- (r'<-(?!'+op_re+r')', Name.Variable),
-
- # 'modules'
- # although modules are not distinguished by their names as in caml
- # the standard library seems to follow the convention that modules
- # only area capitalized
- (r'\b([A-Z]\w*)(?=\.)', Name.Namespace),
-
- # operators
- # = has a special role because this is the only
- # way to syntactic distinguish binding constructions
- # unfortunately, this colors the equal in {x=2} too
- (r'=(?!'+op_re+r')', Keyword),
- (r'(%s)+' % op_re, Operator),
- (r'(%s)+' % punc_re, Operator),
-
- # coercions
- (r':', Operator, 'type'),
- # type variables
- # we need this rule because we don't parse specially type
- # definitions so in "type t('a) = ...", "'a" is parsed by 'root'
- ("'"+ident_re, Keyword.Type),
-
- # id literal, #something, or #{expr}
- (r'#'+ident_re, String.Single),
- (r'#(?=\{)', String.Single),
-
- # identifiers
- # this avoids to color '2' in 'a2' as an integer
- (ident_re, Text),
-
- # default, not sure if that is needed or not
- # (r'.', Text),
- ],
-
- # it is quite painful to have to parse types to know where they end
- # this is the general rule for a type
- # a type is either:
- # * -> ty
- # * type-with-slash
- # * type-with-slash -> ty
- # * type-with-slash (, type-with-slash)+ -> ty
- #
- # the code is pretty funky in here, but this code would roughly
- # translate in caml to:
- # let rec type stream =
- # match stream with
- # | [< "->"; stream >] -> type stream
- # | [< ""; stream >] ->
- # type_with_slash stream
- # type_lhs_1 stream;
- # and type_1 stream = ...
- 'type': [
- include('comments-and-spaces'),
- (r'->', Keyword.Type),
- default(('#pop', 'type-lhs-1', 'type-with-slash')),
- ],
-
- # parses all the atomic or closed constructions in the syntax of type
- # expressions: record types, tuple types, type constructors, basic type
- # and type variables
- 'type-1': [
- include('comments-and-spaces'),
- (r'\(', Keyword.Type, ('#pop', 'type-tuple')),
- (r'~?\{', Keyword.Type, ('#pop', 'type-record')),
- (ident_re+r'\(', Keyword.Type, ('#pop', 'type-tuple')),
- (ident_re, Keyword.Type, '#pop'),
- ("'"+ident_re, Keyword.Type),
- # this case is not in the syntax but sometimes
- # we think we are parsing types when in fact we are parsing
- # some css, so we just pop the states until we get back into
- # the root state
- default('#pop'),
- ],
-
- # type-with-slash is either:
- # * type-1
- # * type-1 (/ type-1)+
- 'type-with-slash': [
- include('comments-and-spaces'),
- default(('#pop', 'slash-type-1', 'type-1')),
- ],
- 'slash-type-1': [
- include('comments-and-spaces'),
- ('/', Keyword.Type, ('#pop', 'type-1')),
- # same remark as above
- default('#pop'),
- ],
-
- # we go in this state after having parsed a type-with-slash
- # while trying to parse a type
- # and at this point we must determine if we are parsing an arrow
- # type (in which case we must continue parsing) or not (in which
- # case we stop)
- 'type-lhs-1': [
- include('comments-and-spaces'),
- (r'->', Keyword.Type, ('#pop', 'type')),
- (r'(?=,)', Keyword.Type, ('#pop', 'type-arrow')),
- default('#pop'),
- ],
- 'type-arrow': [
- include('comments-and-spaces'),
- # the look ahead here allows to parse f(x : int, y : float -> truc)
- # correctly
- (r',(?=[^:]*?->)', Keyword.Type, 'type-with-slash'),
- (r'->', Keyword.Type, ('#pop', 'type')),
- # same remark as above
- default('#pop'),
- ],
-
- # no need to do precise parsing for tuples and records
- # because they are closed constructions, so we can simply
- # find the closing delimiter
- # note that this function would be not work if the source
- # contained identifiers like `{)` (although it could be patched
- # to support it)
- 'type-tuple': [
- include('comments-and-spaces'),
- (r'[^()/*]+', Keyword.Type),
- (r'[/*]', Keyword.Type),
- (r'\(', Keyword.Type, '#push'),
- (r'\)', Keyword.Type, '#pop'),
- ],
- 'type-record': [
- include('comments-and-spaces'),
- (r'[^{}/*]+', Keyword.Type),
- (r'[/*]', Keyword.Type),
- (r'\{', Keyword.Type, '#push'),
- (r'\}', Keyword.Type, '#pop'),
- ],
-
- # 'type-tuple': [
- # include('comments-and-spaces'),
- # (r'\)', Keyword.Type, '#pop'),
- # default(('#pop', 'type-tuple-1', 'type-1')),
- # ],
- # 'type-tuple-1': [
- # include('comments-and-spaces'),
- # (r',?\s*\)', Keyword.Type, '#pop'), # ,) is a valid end of tuple, in (1,)
- # (r',', Keyword.Type, 'type-1'),
- # ],
- # 'type-record':[
- # include('comments-and-spaces'),
- # (r'\}', Keyword.Type, '#pop'),
- # (r'~?(?:\w+|`[^`]*`)', Keyword.Type, 'type-record-field-expr'),
- # ],
- # 'type-record-field-expr': [
- #
- # ],
-
- 'nested-comment': [
- (r'[^/*]+', Comment),
- (r'/\*', Comment, '#push'),
- (r'\*/', Comment, '#pop'),
- (r'[/*]', Comment),
- ],
-
- # the copy pasting between string and single-string
- # is kinda sad. Is there a way to avoid that??
- 'string': [
- (r'[^\\"{]+', String.Double),
- (r'"', String.Double, '#pop'),
- (r'\{', Operator, 'root'),
- include('escape-sequence'),
- ],
- 'single-string': [
- (r'[^\\\'{]+', String.Double),
- (r'\'', String.Double, '#pop'),
- (r'\{', Operator, 'root'),
- include('escape-sequence'),
- ],
-
- # all the html stuff
- # can't really reuse some existing html parser
- # because we must be able to parse embedded expressions
-
- # we are in this state after someone parsed the '<' that
- # started the html literal
- 'html-open-tag': [
- (r'[\w\-:]+', String.Single, ('#pop', 'html-attr')),
- (r'>', String.Single, ('#pop', 'html-content')),
- ],
-
- # we are in this state after someone parsed the '</' that
- # started the end of the closing tag
- 'html-end-tag': [
- # this is a star, because </> is allowed
- (r'[\w\-:]*>', String.Single, '#pop'),
- ],
-
- # we are in this state after having parsed '<ident(:ident)?'
- # we thus parse a possibly empty list of attributes
- 'html-attr': [
- (r'\s+', Text),
- (r'[\w\-:]+=', String.Single, 'html-attr-value'),
- (r'/>', String.Single, '#pop'),
- (r'>', String.Single, ('#pop', 'html-content')),
- ],
-
- 'html-attr-value': [
- (r"'", String.Single, ('#pop', 'single-string')),
- (r'"', String.Single, ('#pop', 'string')),
- (r'#'+ident_re, String.Single, '#pop'),
- (r'#(?=\{)', String.Single, ('#pop', 'root')),
- (r'[^"\'{`=<>]+', String.Single, '#pop'),
- (r'\{', Operator, ('#pop', 'root')), # this is a tail call!
- ],
-
- # we should probably deal with '\' escapes here
- 'html-content': [
- (r'<!--', Comment, 'html-comment'),
- (r'</', String.Single, ('#pop', 'html-end-tag')),
- (r'<', String.Single, 'html-open-tag'),
- (r'\{', Operator, 'root'),
- (r'[^<{]+', String.Single),
- ],
-
- 'html-comment': [
- (r'-->', Comment, '#pop'),
- (r'[^\-]+|-', Comment),
- ],
- }
-
-
-class ReasonLexer(RegexLexer):
- """
- For the ReasonML language.
-
- .. versionadded:: 2.6
- """
-
- name = 'ReasonML'
- url = 'https://reasonml.github.io/'
- aliases = ['reasonml', 'reason']
- filenames = ['*.re', '*.rei']
- mimetypes = ['text/x-reasonml']
-
- keywords = (
- 'as', 'assert', 'begin', 'class', 'constraint', 'do', 'done', 'downto',
- 'else', 'end', 'exception', 'external', 'false', 'for', 'fun', 'esfun',
- 'function', 'functor', 'if', 'in', 'include', 'inherit', 'initializer', 'lazy',
- 'let', 'switch', 'module', 'pub', 'mutable', 'new', 'nonrec', 'object', 'of',
- 'open', 'pri', 'rec', 'sig', 'struct', 'then', 'to', 'true', 'try',
- 'type', 'val', 'virtual', 'when', 'while', 'with',
- )
- keyopts = (
- '!=', '#', '&', '&&', r'\(', r'\)', r'\*', r'\+', ',', '-',
- r'-\.', '=>', r'\.', r'\.\.', r'\.\.\.', ':', '::', ':=', ':>', ';', ';;', '<',
- '<-', '=', '>', '>]', r'>\}', r'\?', r'\?\?', r'\[', r'\[<', r'\[>',
- r'\[\|', ']', '_', '`', r'\{', r'\{<', r'\|', r'\|\|', r'\|]', r'\}', '~'
- )
-
- operators = r'[!$%&*+\./:<=>?@^|~-]'
- word_operators = ('and', 'asr', 'land', 'lor', 'lsl', 'lsr', 'lxor', 'mod', 'or')
- prefix_syms = r'[!?~]'
- infix_syms = r'[=<>@^|&+\*/$%-]'
- primitives = ('unit', 'int', 'float', 'bool', 'string', 'char', 'list', 'array')
-
- tokens = {
- 'escape-sequence': [
- (r'\\[\\"\'ntbr]', String.Escape),
- (r'\\[0-9]{3}', String.Escape),
- (r'\\x[0-9a-fA-F]{2}', String.Escape),
- ],
- 'root': [
- (r'\s+', Text),
- (r'false|true|\(\)|\[\]', Name.Builtin.Pseudo),
- (r'\b([A-Z][\w\']*)(?=\s*\.)', Name.Namespace, 'dotted'),
- (r'\b([A-Z][\w\']*)', Name.Class),
- (r'//.*?\n', Comment.Single),
- (r'\/\*(?!/)', Comment.Multiline, 'comment'),
- (r'\b(%s)\b' % '|'.join(keywords), Keyword),
- (r'(%s)' % '|'.join(keyopts[::-1]), Operator.Word),
- (r'(%s|%s)?%s' % (infix_syms, prefix_syms, operators), Operator),
- (r'\b(%s)\b' % '|'.join(word_operators), Operator.Word),
- (r'\b(%s)\b' % '|'.join(primitives), Keyword.Type),
-
- (r"[^\W\d][\w']*", Name),
-
- (r'-?\d[\d_]*(.[\d_]*)?([eE][+\-]?\d[\d_]*)', Number.Float),
- (r'0[xX][\da-fA-F][\da-fA-F_]*', Number.Hex),
- (r'0[oO][0-7][0-7_]*', Number.Oct),
- (r'0[bB][01][01_]*', Number.Bin),
- (r'\d[\d_]*', Number.Integer),
-
- (r"'(?:(\\[\\\"'ntbr ])|(\\[0-9]{3})|(\\x[0-9a-fA-F]{2}))'",
- String.Char),
- (r"'.'", String.Char),
- (r"'", Keyword),
-
- (r'"', String.Double, 'string'),
-
- (r'[~?][a-z][\w\']*:', Name.Variable),
- ],
- 'comment': [
- (r'[^/*]+', Comment.Multiline),
- (r'\/\*', Comment.Multiline, '#push'),
- (r'\*\/', Comment.Multiline, '#pop'),
- (r'\*', Comment.Multiline),
- ],
- 'string': [
- (r'[^\\"]+', String.Double),
- include('escape-sequence'),
- (r'\\\n', String.Double),
- (r'"', String.Double, '#pop'),
- ],
- 'dotted': [
- (r'\s+', Text),
- (r'\.', Punctuation),
- (r'[A-Z][\w\']*(?=\s*\.)', Name.Namespace),
- (r'[A-Z][\w\']*', Name.Class, '#pop'),
- (r'[a-z_][\w\']*', Name, '#pop'),
- default('#pop'),
- ],
- }
-
-
-class FStarLexer(RegexLexer):
- """
- For the F* language.
- .. versionadded:: 2.7
- """
-
- name = 'FStar'
- url = 'https://www.fstar-lang.org/'
- aliases = ['fstar']
- filenames = ['*.fst', '*.fsti']
- mimetypes = ['text/x-fstar']
-
- keywords = (
- 'abstract', 'attributes', 'noeq', 'unopteq', 'and'
- 'begin', 'by', 'default', 'effect', 'else', 'end', 'ensures',
- 'exception', 'exists', 'false', 'forall', 'fun', 'function', 'if',
- 'in', 'include', 'inline', 'inline_for_extraction', 'irreducible',
- 'logic', 'match', 'module', 'mutable', 'new', 'new_effect', 'noextract',
- 'of', 'open', 'opaque', 'private', 'range_of', 'reifiable',
- 'reify', 'reflectable', 'requires', 'set_range_of', 'sub_effect',
- 'synth', 'then', 'total', 'true', 'try', 'type', 'unfold', 'unfoldable',
- 'val', 'when', 'with', 'not'
- )
- decl_keywords = ('let', 'rec')
- assume_keywords = ('assume', 'admit', 'assert', 'calc')
- keyopts = (
- r'~', r'-', r'/\\', r'\\/', r'<:', r'<@', r'\(\|', r'\|\)', r'#', r'u#',
- r'&', r'\(', r'\)', r'\(\)', r',', r'~>', r'->', r'<-', r'<--', r'<==>',
- r'==>', r'\.', r'\?', r'\?\.', r'\.\[', r'\.\(', r'\.\(\|', r'\.\[\|',
- r'\{:pattern', r':', r'::', r':=', r';', r';;', r'=', r'%\[', r'!\{',
- r'\[', r'\[@', r'\[\|', r'\|>', r'\]', r'\|\]', r'\{', r'\|', r'\}', r'\$'
- )
-
- operators = r'[!$%&*+\./:<=>?@^|~-]'
- prefix_syms = r'[!?~]'
- infix_syms = r'[=<>@^|&+\*/$%-]'
- primitives = ('unit', 'int', 'float', 'bool', 'string', 'char', 'list', 'array')
-
- tokens = {
- 'escape-sequence': [
- (r'\\[\\"\'ntbr]', String.Escape),
- (r'\\[0-9]{3}', String.Escape),
- (r'\\x[0-9a-fA-F]{2}', String.Escape),
- ],
- 'root': [
- (r'\s+', Text),
- (r'false|true|False|True|\(\)|\[\]', Name.Builtin.Pseudo),
- (r'\b([A-Z][\w\']*)(?=\s*\.)', Name.Namespace, 'dotted'),
- (r'\b([A-Z][\w\']*)', Name.Class),
- (r'\(\*(?![)])', Comment, 'comment'),
- (r'\/\/.+$', Comment),
- (r'\b(%s)\b' % '|'.join(keywords), Keyword),
- (r'\b(%s)\b' % '|'.join(assume_keywords), Name.Exception),
- (r'\b(%s)\b' % '|'.join(decl_keywords), Keyword.Declaration),
- (r'(%s)' % '|'.join(keyopts[::-1]), Operator),
- (r'(%s|%s)?%s' % (infix_syms, prefix_syms, operators), Operator),
- (r'\b(%s)\b' % '|'.join(primitives), Keyword.Type),
-
- (r"[^\W\d][\w']*", Name),
-
- (r'-?\d[\d_]*(.[\d_]*)?([eE][+\-]?\d[\d_]*)', Number.Float),
- (r'0[xX][\da-fA-F][\da-fA-F_]*', Number.Hex),
- (r'0[oO][0-7][0-7_]*', Number.Oct),
- (r'0[bB][01][01_]*', Number.Bin),
- (r'\d[\d_]*', Number.Integer),
-
- (r"'(?:(\\[\\\"'ntbr ])|(\\[0-9]{3})|(\\x[0-9a-fA-F]{2}))'",
- String.Char),
- (r"'.'", String.Char),
- (r"'", Keyword), # a stray quote is another syntax element
- (r"\`([\w\'.]+)\`", Operator.Word), # for infix applications
- (r"\`", Keyword), # for quoting
- (r'"', String.Double, 'string'),
-
- (r'[~?][a-z][\w\']*:', Name.Variable),
- ],
- 'comment': [
- (r'[^(*)]+', Comment),
- (r'\(\*', Comment, '#push'),
- (r'\*\)', Comment, '#pop'),
- (r'[(*)]', Comment),
- ],
- 'string': [
- (r'[^\\"]+', String.Double),
- include('escape-sequence'),
- (r'\\\n', String.Double),
- (r'"', String.Double, '#pop'),
- ],
- 'dotted': [
- (r'\s+', Text),
- (r'\.', Punctuation),
- (r'[A-Z][\w\']*(?=\s*\.)', Name.Namespace),
- (r'[A-Z][\w\']*', Name.Class, '#pop'),
- (r'[a-z_][\w\']*', Name, '#pop'),
- default('#pop'),
- ],
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/modeling.py b/venv/lib/python3.11/site-packages/pygments/lexers/modeling.py
deleted file mode 100644
index e247680..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/modeling.py
+++ /dev/null
@@ -1,369 +0,0 @@
-"""
- pygments.lexers.modeling
- ~~~~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for modeling languages.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, include, bygroups, using, default
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation, Whitespace
-
-from pygments.lexers.html import HtmlLexer
-from pygments.lexers import _stan_builtins
-
-__all__ = ['ModelicaLexer', 'BugsLexer', 'JagsLexer', 'StanLexer']
-
-
-class ModelicaLexer(RegexLexer):
- """
- For Modelica source code.
-
- .. versionadded:: 1.1
- """
- name = 'Modelica'
- url = 'http://www.modelica.org/'
- aliases = ['modelica']
- filenames = ['*.mo']
- mimetypes = ['text/x-modelica']
-
- flags = re.DOTALL | re.MULTILINE
-
- _name = r"(?:'(?:[^\\']|\\.)+'|[a-zA-Z_]\w*)"
-
- tokens = {
- 'whitespace': [
- (r'[\s\ufeff]+', Text),
- (r'//[^\n]*\n?', Comment.Single),
- (r'/\*.*?\*/', Comment.Multiline)
- ],
- 'root': [
- include('whitespace'),
- (r'"', String.Double, 'string'),
- (r'[()\[\]{},;]+', Punctuation),
- (r'\.?[*^/+-]|\.|<>|[<>:=]=?', Operator),
- (r'\d+(\.?\d*[eE][-+]?\d+|\.\d*)', Number.Float),
- (r'\d+', Number.Integer),
- (r'(abs|acos|actualStream|array|asin|assert|AssertionLevel|atan|'
- r'atan2|backSample|Boolean|cardinality|cat|ceil|change|Clock|'
- r'Connections|cos|cosh|cross|delay|diagonal|div|edge|exp|'
- r'ExternalObject|fill|floor|getInstanceName|hold|homotopy|'
- r'identity|inStream|integer|Integer|interval|inverse|isPresent|'
- r'linspace|log|log10|matrix|max|min|mod|ndims|noClock|noEvent|'
- r'ones|outerProduct|pre|previous|product|Real|reinit|rem|rooted|'
- r'sample|scalar|semiLinear|shiftSample|sign|sin|sinh|size|skew|'
- r'smooth|spatialDistribution|sqrt|StateSelect|String|subSample|'
- r'sum|superSample|symmetric|tan|tanh|terminal|terminate|time|'
- r'transpose|vector|zeros)\b', Name.Builtin),
- (r'(algorithm|annotation|break|connect|constant|constrainedby|der|'
- r'discrete|each|else|elseif|elsewhen|encapsulated|enumeration|'
- r'equation|exit|expandable|extends|external|firstTick|final|flow|for|if|'
- r'import|impure|in|initial|inner|input|interval|loop|nondiscrete|outer|'
- r'output|parameter|partial|protected|public|pure|redeclare|'
- r'replaceable|return|stream|then|when|while)\b',
- Keyword.Reserved),
- (r'(and|not|or)\b', Operator.Word),
- (r'(block|class|connector|end|function|model|operator|package|'
- r'record|type)\b', Keyword.Reserved, 'class'),
- (r'(false|true)\b', Keyword.Constant),
- (r'within\b', Keyword.Reserved, 'package-prefix'),
- (_name, Name)
- ],
- 'class': [
- include('whitespace'),
- (r'(function|record)\b', Keyword.Reserved),
- (r'(if|for|when|while)\b', Keyword.Reserved, '#pop'),
- (_name, Name.Class, '#pop'),
- default('#pop')
- ],
- 'package-prefix': [
- include('whitespace'),
- (_name, Name.Namespace, '#pop'),
- default('#pop')
- ],
- 'string': [
- (r'"', String.Double, '#pop'),
- (r'\\[\'"?\\abfnrtv]', String.Escape),
- (r'(?i)<\s*html\s*>([^\\"]|\\.)+?(<\s*/\s*html\s*>|(?="))',
- using(HtmlLexer)),
- (r'<|\\?[^"\\<]+', String.Double)
- ]
- }
-
-
-class BugsLexer(RegexLexer):
- """
- Pygments Lexer for OpenBugs and WinBugs
- models.
-
- .. versionadded:: 1.6
- """
-
- name = 'BUGS'
- aliases = ['bugs', 'winbugs', 'openbugs']
- filenames = ['*.bug']
-
- _FUNCTIONS = (
- # Scalar functions
- 'abs', 'arccos', 'arccosh', 'arcsin', 'arcsinh', 'arctan', 'arctanh',
- 'cloglog', 'cos', 'cosh', 'cumulative', 'cut', 'density', 'deviance',
- 'equals', 'expr', 'gammap', 'ilogit', 'icloglog', 'integral', 'log',
- 'logfact', 'loggam', 'logit', 'max', 'min', 'phi', 'post.p.value',
- 'pow', 'prior.p.value', 'probit', 'replicate.post', 'replicate.prior',
- 'round', 'sin', 'sinh', 'solution', 'sqrt', 'step', 'tan', 'tanh',
- 'trunc',
- # Vector functions
- 'inprod', 'interp.lin', 'inverse', 'logdet', 'mean', 'eigen.vals',
- 'ode', 'prod', 'p.valueM', 'rank', 'ranked', 'replicate.postM',
- 'sd', 'sort', 'sum',
- # Special
- 'D', 'I', 'F', 'T', 'C')
- """ OpenBUGS built-in functions
-
- From http://www.openbugs.info/Manuals/ModelSpecification.html#ContentsAII
-
- This also includes
-
- - T, C, I : Truncation and censoring.
- ``T`` and ``C`` are in OpenBUGS. ``I`` in WinBUGS.
- - D : ODE
- - F : Functional http://www.openbugs.info/Examples/Functionals.html
-
- """
-
- _DISTRIBUTIONS = ('dbern', 'dbin', 'dcat', 'dnegbin', 'dpois',
- 'dhyper', 'dbeta', 'dchisqr', 'ddexp', 'dexp',
- 'dflat', 'dgamma', 'dgev', 'df', 'dggamma', 'dgpar',
- 'dloglik', 'dlnorm', 'dlogis', 'dnorm', 'dpar',
- 'dt', 'dunif', 'dweib', 'dmulti', 'ddirch', 'dmnorm',
- 'dmt', 'dwish')
- """ OpenBUGS built-in distributions
-
- Functions from
- http://www.openbugs.info/Manuals/ModelSpecification.html#ContentsAI
- """
-
- tokens = {
- 'whitespace': [
- (r"\s+", Text),
- ],
- 'comments': [
- # Comments
- (r'#.*$', Comment.Single),
- ],
- 'root': [
- # Comments
- include('comments'),
- include('whitespace'),
- # Block start
- (r'(model)(\s+)(\{)',
- bygroups(Keyword.Namespace, Text, Punctuation)),
- # Reserved Words
- (r'(for|in)(?![\w.])', Keyword.Reserved),
- # Built-in Functions
- (r'(%s)(?=\s*\()'
- % r'|'.join(_FUNCTIONS + _DISTRIBUTIONS),
- Name.Builtin),
- # Regular variable names
- (r'[A-Za-z][\w.]*', Name),
- # Number Literals
- (r'[-+]?[0-9]*\.?[0-9]+([eE][-+]?[0-9]+)?', Number),
- # Punctuation
- (r'\[|\]|\(|\)|:|,|;', Punctuation),
- # Assignment operators
- # SLexer makes these tokens Operators.
- (r'<-|~', Operator),
- # Infix and prefix operators
- (r'\+|-|\*|/', Operator),
- # Block
- (r'[{}]', Punctuation),
- ]
- }
-
- def analyse_text(text):
- if re.search(r"^\s*model\s*{", text, re.M):
- return 0.7
- else:
- return 0.0
-
-
-class JagsLexer(RegexLexer):
- """
- Pygments Lexer for JAGS.
-
- .. versionadded:: 1.6
- """
-
- name = 'JAGS'
- aliases = ['jags']
- filenames = ['*.jag', '*.bug']
-
- # JAGS
- _FUNCTIONS = (
- 'abs', 'arccos', 'arccosh', 'arcsin', 'arcsinh', 'arctan', 'arctanh',
- 'cos', 'cosh', 'cloglog',
- 'equals', 'exp', 'icloglog', 'ifelse', 'ilogit', 'log', 'logfact',
- 'loggam', 'logit', 'phi', 'pow', 'probit', 'round', 'sin', 'sinh',
- 'sqrt', 'step', 'tan', 'tanh', 'trunc', 'inprod', 'interp.lin',
- 'logdet', 'max', 'mean', 'min', 'prod', 'sum', 'sd', 'inverse',
- 'rank', 'sort', 't', 'acos', 'acosh', 'asin', 'asinh', 'atan',
- # Truncation/Censoring (should I include)
- 'T', 'I')
- # Distributions with density, probability and quartile functions
- _DISTRIBUTIONS = tuple('[dpq]%s' % x for x in
- ('bern', 'beta', 'dchiqsqr', 'ddexp', 'dexp',
- 'df', 'gamma', 'gen.gamma', 'logis', 'lnorm',
- 'negbin', 'nchisqr', 'norm', 'par', 'pois', 'weib'))
- # Other distributions without density and probability
- _OTHER_DISTRIBUTIONS = (
- 'dt', 'dunif', 'dbetabin', 'dbern', 'dbin', 'dcat', 'dhyper',
- 'ddirch', 'dmnorm', 'dwish', 'dmt', 'dmulti', 'dbinom', 'dchisq',
- 'dnbinom', 'dweibull', 'ddirich')
-
- tokens = {
- 'whitespace': [
- (r"\s+", Text),
- ],
- 'names': [
- # Regular variable names
- (r'[a-zA-Z][\w.]*\b', Name),
- ],
- 'comments': [
- # do not use stateful comments
- (r'(?s)/\*.*?\*/', Comment.Multiline),
- # Comments
- (r'#.*$', Comment.Single),
- ],
- 'root': [
- # Comments
- include('comments'),
- include('whitespace'),
- # Block start
- (r'(model|data)(\s+)(\{)',
- bygroups(Keyword.Namespace, Text, Punctuation)),
- (r'var(?![\w.])', Keyword.Declaration),
- # Reserved Words
- (r'(for|in)(?![\w.])', Keyword.Reserved),
- # Builtins
- # Need to use lookahead because . is a valid char
- (r'(%s)(?=\s*\()' % r'|'.join(_FUNCTIONS
- + _DISTRIBUTIONS
- + _OTHER_DISTRIBUTIONS),
- Name.Builtin),
- # Names
- include('names'),
- # Number Literals
- (r'[-+]?[0-9]*\.?[0-9]+([eE][-+]?[0-9]+)?', Number),
- (r'\[|\]|\(|\)|:|,|;', Punctuation),
- # Assignment operators
- (r'<-|~', Operator),
- # # JAGS includes many more than OpenBUGS
- (r'\+|-|\*|\/|\|\|[&]{2}|[<>=]=?|\^|%.*?%', Operator),
- (r'[{}]', Punctuation),
- ]
- }
-
- def analyse_text(text):
- if re.search(r'^\s*model\s*\{', text, re.M):
- if re.search(r'^\s*data\s*\{', text, re.M):
- return 0.9
- elif re.search(r'^\s*var', text, re.M):
- return 0.9
- else:
- return 0.3
- else:
- return 0
-
-
-class StanLexer(RegexLexer):
- """Pygments Lexer for Stan models.
-
- The Stan modeling language is specified in the *Stan Modeling Language
- User's Guide and Reference Manual, v2.17.0*,
- `pdf <https://github.com/stan-dev/stan/releases/download/v2.17.0/stan-reference-2.17.0.pdf>`__.
-
- .. versionadded:: 1.6
- """
-
- name = 'Stan'
- aliases = ['stan']
- filenames = ['*.stan']
-
- tokens = {
- 'whitespace': [
- (r"\s+", Text),
- ],
- 'comments': [
- (r'(?s)/\*.*?\*/', Comment.Multiline),
- # Comments
- (r'(//|#).*$', Comment.Single),
- ],
- 'root': [
- (r'"[^"]*"', String),
- # Comments
- include('comments'),
- # block start
- include('whitespace'),
- # Block start
- (r'(%s)(\s*)(\{)' %
- r'|'.join(('functions', 'data', r'transformed\s+?data',
- 'parameters', r'transformed\s+parameters',
- 'model', r'generated\s+quantities')),
- bygroups(Keyword.Namespace, Text, Punctuation)),
- # target keyword
- (r'target\s*\+=', Keyword),
- # Reserved Words
- (r'(%s)\b' % r'|'.join(_stan_builtins.KEYWORDS), Keyword),
- # Truncation
- (r'T(?=\s*\[)', Keyword),
- # Data types
- (r'(%s)\b' % r'|'.join(_stan_builtins.TYPES), Keyword.Type),
- # < should be punctuation, but elsewhere I can't tell if it is in
- # a range constraint
- (r'(<)(\s*)(upper|lower|offset|multiplier)(\s*)(=)',
- bygroups(Operator, Whitespace, Keyword, Whitespace, Punctuation)),
- (r'(,)(\s*)(upper)(\s*)(=)',
- bygroups(Punctuation, Whitespace, Keyword, Whitespace, Punctuation)),
- # Punctuation
- (r"[;,\[\]()]", Punctuation),
- # Builtin
- (r'(%s)(?=\s*\()' % '|'.join(_stan_builtins.FUNCTIONS), Name.Builtin),
- (r'(~)(\s*)(%s)(?=\s*\()' % '|'.join(_stan_builtins.DISTRIBUTIONS),
- bygroups(Operator, Whitespace, Name.Builtin)),
- # Special names ending in __, like lp__
- (r'[A-Za-z]\w*__\b', Name.Builtin.Pseudo),
- (r'(%s)\b' % r'|'.join(_stan_builtins.RESERVED), Keyword.Reserved),
- # user-defined functions
- (r'[A-Za-z]\w*(?=\s*\()]', Name.Function),
- # Imaginary Literals
- (r'[0-9]+(\.[0-9]*)?([eE][+-]?[0-9]+)?i', Number.Float),
- (r'\.[0-9]+([eE][+-]?[0-9]+)?i', Number.Float),
- (r'[0-9]+i', Number.Float),
- # Real Literals
- (r'[0-9]+(\.[0-9]*)?([eE][+-]?[0-9]+)?', Number.Float),
- (r'\.[0-9]+([eE][+-]?[0-9]+)?', Number.Float),
- # Integer Literals
- (r'[0-9]+', Number.Integer),
- # Regular variable names
- (r'[A-Za-z]\w*\b', Name),
- # Assignment operators
- (r'<-|(?:\+|-|\.?/|\.?\*|=)?=|~', Operator),
- # Infix, prefix and postfix operators (and = )
- (r"\+|-|\.?\*|\.?/|\\|'|\.?\^|!=?|<=?|>=?|\|\||&&|%|\?|:|%/%|!", Operator),
- # Block delimiters
- (r'[{}]', Punctuation),
- # Distribution |
- (r'\|', Punctuation)
- ]
- }
-
- def analyse_text(text):
- if re.search(r'^\s*parameters\s*\{', text, re.M):
- return 1.0
- else:
- return 0.0
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/modula2.py b/venv/lib/python3.11/site-packages/pygments/lexers/modula2.py
deleted file mode 100644
index 8bd4765..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/modula2.py
+++ /dev/null
@@ -1,1580 +0,0 @@
-"""
- pygments.lexers.modula2
- ~~~~~~~~~~~~~~~~~~~~~~~
-
- Multi-Dialect Lexer for Modula-2.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, include
-from pygments.util import get_bool_opt, get_list_opt
-from pygments.token import Text, Comment, Operator, Keyword, Name, \
- String, Number, Punctuation, Error
-
-__all__ = ['Modula2Lexer']
-
-
-# Multi-Dialect Modula-2 Lexer
-class Modula2Lexer(RegexLexer):
- """
- For Modula-2 source code.
-
- The Modula-2 lexer supports several dialects. By default, it operates in
- fallback mode, recognising the *combined* literals, punctuation symbols
- and operators of all supported dialects, and the *combined* reserved words
- and builtins of PIM Modula-2, ISO Modula-2 and Modula-2 R10, while not
- differentiating between library defined identifiers.
-
- To select a specific dialect, a dialect option may be passed
- or a dialect tag may be embedded into a source file.
-
- Dialect Options:
-
- `m2pim`
- Select PIM Modula-2 dialect.
- `m2iso`
- Select ISO Modula-2 dialect.
- `m2r10`
- Select Modula-2 R10 dialect.
- `objm2`
- Select Objective Modula-2 dialect.
-
- The PIM and ISO dialect options may be qualified with a language extension.
-
- Language Extensions:
-
- `+aglet`
- Select Aglet Modula-2 extensions, available with m2iso.
- `+gm2`
- Select GNU Modula-2 extensions, available with m2pim.
- `+p1`
- Select p1 Modula-2 extensions, available with m2iso.
- `+xds`
- Select XDS Modula-2 extensions, available with m2iso.
-
-
- Passing a Dialect Option via Unix Commandline Interface
-
- Dialect options may be passed to the lexer using the `dialect` key.
- Only one such option should be passed. If multiple dialect options are
- passed, the first valid option is used, any subsequent options are ignored.
-
- Examples:
-
- `$ pygmentize -O full,dialect=m2iso -f html -o /path/to/output /path/to/input`
- Use ISO dialect to render input to HTML output
- `$ pygmentize -O full,dialect=m2iso+p1 -f rtf -o /path/to/output /path/to/input`
- Use ISO dialect with p1 extensions to render input to RTF output
-
-
- Embedding a Dialect Option within a source file
-
- A dialect option may be embedded in a source file in form of a dialect
- tag, a specially formatted comment that specifies a dialect option.
-
- Dialect Tag EBNF::
-
- dialectTag :
- OpeningCommentDelim Prefix dialectOption ClosingCommentDelim ;
-
- dialectOption :
- 'm2pim' | 'm2iso' | 'm2r10' | 'objm2' |
- 'm2iso+aglet' | 'm2pim+gm2' | 'm2iso+p1' | 'm2iso+xds' ;
-
- Prefix : '!' ;
-
- OpeningCommentDelim : '(*' ;
-
- ClosingCommentDelim : '*)' ;
-
- No whitespace is permitted between the tokens of a dialect tag.
-
- In the event that a source file contains multiple dialect tags, the first
- tag that contains a valid dialect option will be used and any subsequent
- dialect tags will be ignored. Ideally, a dialect tag should be placed
- at the beginning of a source file.
-
- An embedded dialect tag overrides a dialect option set via command line.
-
- Examples:
-
- ``(*!m2r10*) DEFINITION MODULE Foobar; ...``
- Use Modula2 R10 dialect to render this source file.
- ``(*!m2pim+gm2*) DEFINITION MODULE Bazbam; ...``
- Use PIM dialect with GNU extensions to render this source file.
-
-
- Algol Publication Mode:
-
- In Algol publication mode, source text is rendered for publication of
- algorithms in scientific papers and academic texts, following the format
- of the Revised Algol-60 Language Report. It is activated by passing
- one of two corresponding styles as an option:
-
- `algol`
- render reserved words lowercase underline boldface
- and builtins lowercase boldface italic
- `algol_nu`
- render reserved words lowercase boldface (no underlining)
- and builtins lowercase boldface italic
-
- The lexer automatically performs the required lowercase conversion when
- this mode is activated.
-
- Example:
-
- ``$ pygmentize -O full,style=algol -f latex -o /path/to/output /path/to/input``
- Render input file in Algol publication mode to LaTeX output.
-
-
- Rendering Mode of First Class ADT Identifiers:
-
- The rendering of standard library first class ADT identifiers is controlled
- by option flag "treat_stdlib_adts_as_builtins".
-
- When this option is turned on, standard library ADT identifiers are rendered
- as builtins. When it is turned off, they are rendered as ordinary library
- identifiers.
-
- `treat_stdlib_adts_as_builtins` (default: On)
-
- The option is useful for dialects that support ADTs as first class objects
- and provide ADTs in the standard library that would otherwise be built-in.
-
- At present, only Modula-2 R10 supports library ADTs as first class objects
- and therefore, no ADT identifiers are defined for any other dialects.
-
- Example:
-
- ``$ pygmentize -O full,dialect=m2r10,treat_stdlib_adts_as_builtins=Off ...``
- Render standard library ADTs as ordinary library types.
-
- .. versionadded:: 1.3
-
- .. versionchanged:: 2.1
- Added multi-dialect support.
- """
- name = 'Modula-2'
- url = 'http://www.modula2.org/'
- aliases = ['modula2', 'm2']
- filenames = ['*.def', '*.mod']
- mimetypes = ['text/x-modula2']
-
- flags = re.MULTILINE | re.DOTALL
-
- tokens = {
- 'whitespace': [
- (r'\n+', Text), # blank lines
- (r'\s+', Text), # whitespace
- ],
- 'dialecttags': [
- # PIM Dialect Tag
- (r'\(\*!m2pim\*\)', Comment.Special),
- # ISO Dialect Tag
- (r'\(\*!m2iso\*\)', Comment.Special),
- # M2R10 Dialect Tag
- (r'\(\*!m2r10\*\)', Comment.Special),
- # ObjM2 Dialect Tag
- (r'\(\*!objm2\*\)', Comment.Special),
- # Aglet Extensions Dialect Tag
- (r'\(\*!m2iso\+aglet\*\)', Comment.Special),
- # GNU Extensions Dialect Tag
- (r'\(\*!m2pim\+gm2\*\)', Comment.Special),
- # p1 Extensions Dialect Tag
- (r'\(\*!m2iso\+p1\*\)', Comment.Special),
- # XDS Extensions Dialect Tag
- (r'\(\*!m2iso\+xds\*\)', Comment.Special),
- ],
- 'identifiers': [
- (r'([a-zA-Z_$][\w$]*)', Name),
- ],
- 'prefixed_number_literals': [
- #
- # Base-2, whole number
- (r'0b[01]+(\'[01]+)*', Number.Bin),
- #
- # Base-16, whole number
- (r'0[ux][0-9A-F]+(\'[0-9A-F]+)*', Number.Hex),
- ],
- 'plain_number_literals': [
- #
- # Base-10, real number with exponent
- (r'[0-9]+(\'[0-9]+)*' # integral part
- r'\.[0-9]+(\'[0-9]+)*' # fractional part
- r'[eE][+-]?[0-9]+(\'[0-9]+)*', # exponent
- Number.Float),
- #
- # Base-10, real number without exponent
- (r'[0-9]+(\'[0-9]+)*' # integral part
- r'\.[0-9]+(\'[0-9]+)*', # fractional part
- Number.Float),
- #
- # Base-10, whole number
- (r'[0-9]+(\'[0-9]+)*', Number.Integer),
- ],
- 'suffixed_number_literals': [
- #
- # Base-8, whole number
- (r'[0-7]+B', Number.Oct),
- #
- # Base-8, character code
- (r'[0-7]+C', Number.Oct),
- #
- # Base-16, number
- (r'[0-9A-F]+H', Number.Hex),
- ],
- 'string_literals': [
- (r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
- (r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
- ],
- 'digraph_operators': [
- # Dot Product Operator
- (r'\*\.', Operator),
- # Array Concatenation Operator
- (r'\+>', Operator), # M2R10 + ObjM2
- # Inequality Operator
- (r'<>', Operator), # ISO + PIM
- # Less-Or-Equal, Subset
- (r'<=', Operator),
- # Greater-Or-Equal, Superset
- (r'>=', Operator),
- # Identity Operator
- (r'==', Operator), # M2R10 + ObjM2
- # Type Conversion Operator
- (r'::', Operator), # M2R10 + ObjM2
- # Assignment Symbol
- (r':=', Operator),
- # Postfix Increment Mutator
- (r'\+\+', Operator), # M2R10 + ObjM2
- # Postfix Decrement Mutator
- (r'--', Operator), # M2R10 + ObjM2
- ],
- 'unigraph_operators': [
- # Arithmetic Operators
- (r'[+-]', Operator),
- (r'[*/]', Operator),
- # ISO 80000-2 compliant Set Difference Operator
- (r'\\', Operator), # M2R10 + ObjM2
- # Relational Operators
- (r'[=#<>]', Operator),
- # Dereferencing Operator
- (r'\^', Operator),
- # Dereferencing Operator Synonym
- (r'@', Operator), # ISO
- # Logical AND Operator Synonym
- (r'&', Operator), # PIM + ISO
- # Logical NOT Operator Synonym
- (r'~', Operator), # PIM + ISO
- # Smalltalk Message Prefix
- (r'`', Operator), # ObjM2
- ],
- 'digraph_punctuation': [
- # Range Constructor
- (r'\.\.', Punctuation),
- # Opening Chevron Bracket
- (r'<<', Punctuation), # M2R10 + ISO
- # Closing Chevron Bracket
- (r'>>', Punctuation), # M2R10 + ISO
- # Blueprint Punctuation
- (r'->', Punctuation), # M2R10 + ISO
- # Distinguish |# and # in M2 R10
- (r'\|#', Punctuation),
- # Distinguish ## and # in M2 R10
- (r'##', Punctuation),
- # Distinguish |* and * in M2 R10
- (r'\|\*', Punctuation),
- ],
- 'unigraph_punctuation': [
- # Common Punctuation
- (r'[()\[\]{},.:;|]', Punctuation),
- # Case Label Separator Synonym
- (r'!', Punctuation), # ISO
- # Blueprint Punctuation
- (r'\?', Punctuation), # M2R10 + ObjM2
- ],
- 'comments': [
- # Single Line Comment
- (r'^//.*?\n', Comment.Single), # M2R10 + ObjM2
- # Block Comment
- (r'\(\*([^$].*?)\*\)', Comment.Multiline),
- # Template Block Comment
- (r'/\*(.*?)\*/', Comment.Multiline), # M2R10 + ObjM2
- ],
- 'pragmas': [
- # ISO Style Pragmas
- (r'<\*.*?\*>', Comment.Preproc), # ISO, M2R10 + ObjM2
- # Pascal Style Pragmas
- (r'\(\*\$.*?\*\)', Comment.Preproc), # PIM
- ],
- 'root': [
- include('whitespace'),
- include('dialecttags'),
- include('pragmas'),
- include('comments'),
- include('identifiers'),
- include('suffixed_number_literals'), # PIM + ISO
- include('prefixed_number_literals'), # M2R10 + ObjM2
- include('plain_number_literals'),
- include('string_literals'),
- include('digraph_punctuation'),
- include('digraph_operators'),
- include('unigraph_punctuation'),
- include('unigraph_operators'),
- ]
- }
-
-# C o m m o n D a t a s e t s
-
- # Common Reserved Words Dataset
- common_reserved_words = (
- # 37 common reserved words
- 'AND', 'ARRAY', 'BEGIN', 'BY', 'CASE', 'CONST', 'DEFINITION', 'DIV',
- 'DO', 'ELSE', 'ELSIF', 'END', 'EXIT', 'FOR', 'FROM', 'IF',
- 'IMPLEMENTATION', 'IMPORT', 'IN', 'LOOP', 'MOD', 'MODULE', 'NOT',
- 'OF', 'OR', 'POINTER', 'PROCEDURE', 'RECORD', 'REPEAT', 'RETURN',
- 'SET', 'THEN', 'TO', 'TYPE', 'UNTIL', 'VAR', 'WHILE',
- )
-
- # Common Builtins Dataset
- common_builtins = (
- # 16 common builtins
- 'ABS', 'BOOLEAN', 'CARDINAL', 'CHAR', 'CHR', 'FALSE', 'INTEGER',
- 'LONGINT', 'LONGREAL', 'MAX', 'MIN', 'NIL', 'ODD', 'ORD', 'REAL',
- 'TRUE',
- )
-
- # Common Pseudo-Module Builtins Dataset
- common_pseudo_builtins = (
- # 4 common pseudo builtins
- 'ADDRESS', 'BYTE', 'WORD', 'ADR'
- )
-
-# P I M M o d u l a - 2 D a t a s e t s
-
- # Lexemes to Mark as Error Tokens for PIM Modula-2
- pim_lexemes_to_reject = (
- '!', '`', '@', '$', '%', '?', '\\', '==', '++', '--', '::', '*.',
- '+>', '->', '<<', '>>', '|#', '##',
- )
-
- # PIM Modula-2 Additional Reserved Words Dataset
- pim_additional_reserved_words = (
- # 3 additional reserved words
- 'EXPORT', 'QUALIFIED', 'WITH',
- )
-
- # PIM Modula-2 Additional Builtins Dataset
- pim_additional_builtins = (
- # 16 additional builtins
- 'BITSET', 'CAP', 'DEC', 'DISPOSE', 'EXCL', 'FLOAT', 'HALT', 'HIGH',
- 'INC', 'INCL', 'NEW', 'NIL', 'PROC', 'SIZE', 'TRUNC', 'VAL',
- )
-
- # PIM Modula-2 Additional Pseudo-Module Builtins Dataset
- pim_additional_pseudo_builtins = (
- # 5 additional pseudo builtins
- 'SYSTEM', 'PROCESS', 'TSIZE', 'NEWPROCESS', 'TRANSFER',
- )
-
-# I S O M o d u l a - 2 D a t a s e t s
-
- # Lexemes to Mark as Error Tokens for ISO Modula-2
- iso_lexemes_to_reject = (
- '`', '$', '%', '?', '\\', '==', '++', '--', '::', '*.', '+>', '->',
- '<<', '>>', '|#', '##',
- )
-
- # ISO Modula-2 Additional Reserved Words Dataset
- iso_additional_reserved_words = (
- # 9 additional reserved words (ISO 10514-1)
- 'EXCEPT', 'EXPORT', 'FINALLY', 'FORWARD', 'PACKEDSET', 'QUALIFIED',
- 'REM', 'RETRY', 'WITH',
- # 10 additional reserved words (ISO 10514-2 & ISO 10514-3)
- 'ABSTRACT', 'AS', 'CLASS', 'GUARD', 'INHERIT', 'OVERRIDE', 'READONLY',
- 'REVEAL', 'TRACED', 'UNSAFEGUARDED',
- )
-
- # ISO Modula-2 Additional Builtins Dataset
- iso_additional_builtins = (
- # 26 additional builtins (ISO 10514-1)
- 'BITSET', 'CAP', 'CMPLX', 'COMPLEX', 'DEC', 'DISPOSE', 'EXCL', 'FLOAT',
- 'HALT', 'HIGH', 'IM', 'INC', 'INCL', 'INT', 'INTERRUPTIBLE', 'LENGTH',
- 'LFLOAT', 'LONGCOMPLEX', 'NEW', 'PROC', 'PROTECTION', 'RE', 'SIZE',
- 'TRUNC', 'UNINTERRUBTIBLE', 'VAL',
- # 5 additional builtins (ISO 10514-2 & ISO 10514-3)
- 'CREATE', 'DESTROY', 'EMPTY', 'ISMEMBER', 'SELF',
- )
-
- # ISO Modula-2 Additional Pseudo-Module Builtins Dataset
- iso_additional_pseudo_builtins = (
- # 14 additional builtins (SYSTEM)
- 'SYSTEM', 'BITSPERLOC', 'LOCSPERBYTE', 'LOCSPERWORD', 'LOC',
- 'ADDADR', 'SUBADR', 'DIFADR', 'MAKEADR', 'ADR',
- 'ROTATE', 'SHIFT', 'CAST', 'TSIZE',
- # 13 additional builtins (COROUTINES)
- 'COROUTINES', 'ATTACH', 'COROUTINE', 'CURRENT', 'DETACH', 'HANDLER',
- 'INTERRUPTSOURCE', 'IOTRANSFER', 'IsATTACHED', 'LISTEN',
- 'NEWCOROUTINE', 'PROT', 'TRANSFER',
- # 9 additional builtins (EXCEPTIONS)
- 'EXCEPTIONS', 'AllocateSource', 'CurrentNumber', 'ExceptionNumber',
- 'ExceptionSource', 'GetMessage', 'IsCurrentSource',
- 'IsExceptionalExecution', 'RAISE',
- # 3 additional builtins (TERMINATION)
- 'TERMINATION', 'IsTerminating', 'HasHalted',
- # 4 additional builtins (M2EXCEPTION)
- 'M2EXCEPTION', 'M2Exceptions', 'M2Exception', 'IsM2Exception',
- 'indexException', 'rangeException', 'caseSelectException',
- 'invalidLocation', 'functionException', 'wholeValueException',
- 'wholeDivException', 'realValueException', 'realDivException',
- 'complexValueException', 'complexDivException', 'protException',
- 'sysException', 'coException', 'exException',
- )
-
-# M o d u l a - 2 R 1 0 D a t a s e t s
-
- # Lexemes to Mark as Error Tokens for Modula-2 R10
- m2r10_lexemes_to_reject = (
- '!', '`', '@', '$', '%', '&', '<>',
- )
-
- # Modula-2 R10 reserved words in addition to the common set
- m2r10_additional_reserved_words = (
- # 12 additional reserved words
- 'ALIAS', 'ARGLIST', 'BLUEPRINT', 'COPY', 'GENLIB', 'INDETERMINATE',
- 'NEW', 'NONE', 'OPAQUE', 'REFERENTIAL', 'RELEASE', 'RETAIN',
- # 2 additional reserved words with symbolic assembly option
- 'ASM', 'REG',
- )
-
- # Modula-2 R10 builtins in addition to the common set
- m2r10_additional_builtins = (
- # 26 additional builtins
- 'CARDINAL', 'COUNT', 'EMPTY', 'EXISTS', 'INSERT', 'LENGTH', 'LONGCARD',
- 'OCTET', 'PTR', 'PRED', 'READ', 'READNEW', 'REMOVE', 'RETRIEVE', 'SORT',
- 'STORE', 'SUBSET', 'SUCC', 'TLIMIT', 'TMAX', 'TMIN', 'TRUE', 'TSIZE',
- 'UNICHAR', 'WRITE', 'WRITEF',
- )
-
- # Modula-2 R10 Additional Pseudo-Module Builtins Dataset
- m2r10_additional_pseudo_builtins = (
- # 13 additional builtins (TPROPERTIES)
- 'TPROPERTIES', 'PROPERTY', 'LITERAL', 'TPROPERTY', 'TLITERAL',
- 'TBUILTIN', 'TDYN', 'TREFC', 'TNIL', 'TBASE', 'TPRECISION',
- 'TMAXEXP', 'TMINEXP',
- # 4 additional builtins (CONVERSION)
- 'CONVERSION', 'TSXFSIZE', 'SXF', 'VAL',
- # 35 additional builtins (UNSAFE)
- 'UNSAFE', 'CAST', 'INTRINSIC', 'AVAIL', 'ADD', 'SUB', 'ADDC', 'SUBC',
- 'FETCHADD', 'FETCHSUB', 'SHL', 'SHR', 'ASHR', 'ROTL', 'ROTR', 'ROTLC',
- 'ROTRC', 'BWNOT', 'BWAND', 'BWOR', 'BWXOR', 'BWNAND', 'BWNOR',
- 'SETBIT', 'TESTBIT', 'LSBIT', 'MSBIT', 'CSBITS', 'BAIL', 'HALT',
- 'TODO', 'FFI', 'ADDR', 'VARGLIST', 'VARGC',
- # 11 additional builtins (ATOMIC)
- 'ATOMIC', 'INTRINSIC', 'AVAIL', 'SWAP', 'CAS', 'INC', 'DEC', 'BWAND',
- 'BWNAND', 'BWOR', 'BWXOR',
- # 7 additional builtins (COMPILER)
- 'COMPILER', 'DEBUG', 'MODNAME', 'PROCNAME', 'LINENUM', 'DEFAULT',
- 'HASH',
- # 5 additional builtins (ASSEMBLER)
- 'ASSEMBLER', 'REGISTER', 'SETREG', 'GETREG', 'CODE',
- )
-
-# O b j e c t i v e M o d u l a - 2 D a t a s e t s
-
- # Lexemes to Mark as Error Tokens for Objective Modula-2
- objm2_lexemes_to_reject = (
- '!', '$', '%', '&', '<>',
- )
-
- # Objective Modula-2 Extensions
- # reserved words in addition to Modula-2 R10
- objm2_additional_reserved_words = (
- # 16 additional reserved words
- 'BYCOPY', 'BYREF', 'CLASS', 'CONTINUE', 'CRITICAL', 'INOUT', 'METHOD',
- 'ON', 'OPTIONAL', 'OUT', 'PRIVATE', 'PROTECTED', 'PROTOCOL', 'PUBLIC',
- 'SUPER', 'TRY',
- )
-
- # Objective Modula-2 Extensions
- # builtins in addition to Modula-2 R10
- objm2_additional_builtins = (
- # 3 additional builtins
- 'OBJECT', 'NO', 'YES',
- )
-
- # Objective Modula-2 Extensions
- # pseudo-module builtins in addition to Modula-2 R10
- objm2_additional_pseudo_builtins = (
- # None
- )
-
-# A g l e t M o d u l a - 2 D a t a s e t s
-
- # Aglet Extensions
- # reserved words in addition to ISO Modula-2
- aglet_additional_reserved_words = (
- # None
- )
-
- # Aglet Extensions
- # builtins in addition to ISO Modula-2
- aglet_additional_builtins = (
- # 9 additional builtins
- 'BITSET8', 'BITSET16', 'BITSET32', 'CARDINAL8', 'CARDINAL16',
- 'CARDINAL32', 'INTEGER8', 'INTEGER16', 'INTEGER32',
- )
-
- # Aglet Modula-2 Extensions
- # pseudo-module builtins in addition to ISO Modula-2
- aglet_additional_pseudo_builtins = (
- # None
- )
-
-# G N U M o d u l a - 2 D a t a s e t s
-
- # GNU Extensions
- # reserved words in addition to PIM Modula-2
- gm2_additional_reserved_words = (
- # 10 additional reserved words
- 'ASM', '__ATTRIBUTE__', '__BUILTIN__', '__COLUMN__', '__DATE__',
- '__FILE__', '__FUNCTION__', '__LINE__', '__MODULE__', 'VOLATILE',
- )
-
- # GNU Extensions
- # builtins in addition to PIM Modula-2
- gm2_additional_builtins = (
- # 21 additional builtins
- 'BITSET8', 'BITSET16', 'BITSET32', 'CARDINAL8', 'CARDINAL16',
- 'CARDINAL32', 'CARDINAL64', 'COMPLEX32', 'COMPLEX64', 'COMPLEX96',
- 'COMPLEX128', 'INTEGER8', 'INTEGER16', 'INTEGER32', 'INTEGER64',
- 'REAL8', 'REAL16', 'REAL32', 'REAL96', 'REAL128', 'THROW',
- )
-
- # GNU Extensions
- # pseudo-module builtins in addition to PIM Modula-2
- gm2_additional_pseudo_builtins = (
- # None
- )
-
-# p 1 M o d u l a - 2 D a t a s e t s
-
- # p1 Extensions
- # reserved words in addition to ISO Modula-2
- p1_additional_reserved_words = (
- # None
- )
-
- # p1 Extensions
- # builtins in addition to ISO Modula-2
- p1_additional_builtins = (
- # None
- )
-
- # p1 Modula-2 Extensions
- # pseudo-module builtins in addition to ISO Modula-2
- p1_additional_pseudo_builtins = (
- # 1 additional builtin
- 'BCD',
- )
-
-# X D S M o d u l a - 2 D a t a s e t s
-
- # XDS Extensions
- # reserved words in addition to ISO Modula-2
- xds_additional_reserved_words = (
- # 1 additional reserved word
- 'SEQ',
- )
-
- # XDS Extensions
- # builtins in addition to ISO Modula-2
- xds_additional_builtins = (
- # 9 additional builtins
- 'ASH', 'ASSERT', 'DIFFADR_TYPE', 'ENTIER', 'INDEX', 'LEN',
- 'LONGCARD', 'SHORTCARD', 'SHORTINT',
- )
-
- # XDS Modula-2 Extensions
- # pseudo-module builtins in addition to ISO Modula-2
- xds_additional_pseudo_builtins = (
- # 22 additional builtins (SYSTEM)
- 'PROCESS', 'NEWPROCESS', 'BOOL8', 'BOOL16', 'BOOL32', 'CARD8',
- 'CARD16', 'CARD32', 'INT8', 'INT16', 'INT32', 'REF', 'MOVE',
- 'FILL', 'GET', 'PUT', 'CC', 'int', 'unsigned', 'size_t', 'void'
- # 3 additional builtins (COMPILER)
- 'COMPILER', 'OPTION', 'EQUATION'
- )
-
-# P I M S t a n d a r d L i b r a r y D a t a s e t s
-
- # PIM Modula-2 Standard Library Modules Dataset
- pim_stdlib_module_identifiers = (
- 'Terminal', 'FileSystem', 'InOut', 'RealInOut', 'MathLib0', 'Storage',
- )
-
- # PIM Modula-2 Standard Library Types Dataset
- pim_stdlib_type_identifiers = (
- 'Flag', 'FlagSet', 'Response', 'Command', 'Lock', 'Permission',
- 'MediumType', 'File', 'FileProc', 'DirectoryProc', 'FileCommand',
- 'DirectoryCommand',
- )
-
- # PIM Modula-2 Standard Library Procedures Dataset
- pim_stdlib_proc_identifiers = (
- 'Read', 'BusyRead', 'ReadAgain', 'Write', 'WriteString', 'WriteLn',
- 'Create', 'Lookup', 'Close', 'Delete', 'Rename', 'SetRead', 'SetWrite',
- 'SetModify', 'SetOpen', 'Doio', 'SetPos', 'GetPos', 'Length', 'Reset',
- 'Again', 'ReadWord', 'WriteWord', 'ReadChar', 'WriteChar',
- 'CreateMedium', 'DeleteMedium', 'AssignName', 'DeassignName',
- 'ReadMedium', 'LookupMedium', 'OpenInput', 'OpenOutput', 'CloseInput',
- 'CloseOutput', 'ReadString', 'ReadInt', 'ReadCard', 'ReadWrd',
- 'WriteInt', 'WriteCard', 'WriteOct', 'WriteHex', 'WriteWrd',
- 'ReadReal', 'WriteReal', 'WriteFixPt', 'WriteRealOct', 'sqrt', 'exp',
- 'ln', 'sin', 'cos', 'arctan', 'entier', 'ALLOCATE', 'DEALLOCATE',
- )
-
- # PIM Modula-2 Standard Library Variables Dataset
- pim_stdlib_var_identifiers = (
- 'Done', 'termCH', 'in', 'out'
- )
-
- # PIM Modula-2 Standard Library Constants Dataset
- pim_stdlib_const_identifiers = (
- 'EOL',
- )
-
-# I S O S t a n d a r d L i b r a r y D a t a s e t s
-
- # ISO Modula-2 Standard Library Modules Dataset
- iso_stdlib_module_identifiers = (
- # TO DO
- )
-
- # ISO Modula-2 Standard Library Types Dataset
- iso_stdlib_type_identifiers = (
- # TO DO
- )
-
- # ISO Modula-2 Standard Library Procedures Dataset
- iso_stdlib_proc_identifiers = (
- # TO DO
- )
-
- # ISO Modula-2 Standard Library Variables Dataset
- iso_stdlib_var_identifiers = (
- # TO DO
- )
-
- # ISO Modula-2 Standard Library Constants Dataset
- iso_stdlib_const_identifiers = (
- # TO DO
- )
-
-# M 2 R 1 0 S t a n d a r d L i b r a r y D a t a s e t s
-
- # Modula-2 R10 Standard Library ADTs Dataset
- m2r10_stdlib_adt_identifiers = (
- 'BCD', 'LONGBCD', 'BITSET', 'SHORTBITSET', 'LONGBITSET',
- 'LONGLONGBITSET', 'COMPLEX', 'LONGCOMPLEX', 'SHORTCARD', 'LONGLONGCARD',
- 'SHORTINT', 'LONGLONGINT', 'POSINT', 'SHORTPOSINT', 'LONGPOSINT',
- 'LONGLONGPOSINT', 'BITSET8', 'BITSET16', 'BITSET32', 'BITSET64',
- 'BITSET128', 'BS8', 'BS16', 'BS32', 'BS64', 'BS128', 'CARDINAL8',
- 'CARDINAL16', 'CARDINAL32', 'CARDINAL64', 'CARDINAL128', 'CARD8',
- 'CARD16', 'CARD32', 'CARD64', 'CARD128', 'INTEGER8', 'INTEGER16',
- 'INTEGER32', 'INTEGER64', 'INTEGER128', 'INT8', 'INT16', 'INT32',
- 'INT64', 'INT128', 'STRING', 'UNISTRING',
- )
-
- # Modula-2 R10 Standard Library Blueprints Dataset
- m2r10_stdlib_blueprint_identifiers = (
- 'ProtoRoot', 'ProtoComputational', 'ProtoNumeric', 'ProtoScalar',
- 'ProtoNonScalar', 'ProtoCardinal', 'ProtoInteger', 'ProtoReal',
- 'ProtoComplex', 'ProtoVector', 'ProtoTuple', 'ProtoCompArray',
- 'ProtoCollection', 'ProtoStaticArray', 'ProtoStaticSet',
- 'ProtoStaticString', 'ProtoArray', 'ProtoString', 'ProtoSet',
- 'ProtoMultiSet', 'ProtoDictionary', 'ProtoMultiDict', 'ProtoExtension',
- 'ProtoIO', 'ProtoCardMath', 'ProtoIntMath', 'ProtoRealMath',
- )
-
- # Modula-2 R10 Standard Library Modules Dataset
- m2r10_stdlib_module_identifiers = (
- 'ASCII', 'BooleanIO', 'CharIO', 'UnicharIO', 'OctetIO',
- 'CardinalIO', 'LongCardIO', 'IntegerIO', 'LongIntIO', 'RealIO',
- 'LongRealIO', 'BCDIO', 'LongBCDIO', 'CardMath', 'LongCardMath',
- 'IntMath', 'LongIntMath', 'RealMath', 'LongRealMath', 'BCDMath',
- 'LongBCDMath', 'FileIO', 'FileSystem', 'Storage', 'IOSupport',
- )
-
- # Modula-2 R10 Standard Library Types Dataset
- m2r10_stdlib_type_identifiers = (
- 'File', 'Status',
- # TO BE COMPLETED
- )
-
- # Modula-2 R10 Standard Library Procedures Dataset
- m2r10_stdlib_proc_identifiers = (
- 'ALLOCATE', 'DEALLOCATE', 'SIZE',
- # TO BE COMPLETED
- )
-
- # Modula-2 R10 Standard Library Variables Dataset
- m2r10_stdlib_var_identifiers = (
- 'stdIn', 'stdOut', 'stdErr',
- )
-
- # Modula-2 R10 Standard Library Constants Dataset
- m2r10_stdlib_const_identifiers = (
- 'pi', 'tau',
- )
-
-# D i a l e c t s
-
- # Dialect modes
- dialects = (
- 'unknown',
- 'm2pim', 'm2iso', 'm2r10', 'objm2',
- 'm2iso+aglet', 'm2pim+gm2', 'm2iso+p1', 'm2iso+xds',
- )
-
-# D a t a b a s e s
-
- # Lexemes to Mark as Errors Database
- lexemes_to_reject_db = {
- # Lexemes to reject for unknown dialect
- 'unknown': (
- # LEAVE THIS EMPTY
- ),
- # Lexemes to reject for PIM Modula-2
- 'm2pim': (
- pim_lexemes_to_reject,
- ),
- # Lexemes to reject for ISO Modula-2
- 'm2iso': (
- iso_lexemes_to_reject,
- ),
- # Lexemes to reject for Modula-2 R10
- 'm2r10': (
- m2r10_lexemes_to_reject,
- ),
- # Lexemes to reject for Objective Modula-2
- 'objm2': (
- objm2_lexemes_to_reject,
- ),
- # Lexemes to reject for Aglet Modula-2
- 'm2iso+aglet': (
- iso_lexemes_to_reject,
- ),
- # Lexemes to reject for GNU Modula-2
- 'm2pim+gm2': (
- pim_lexemes_to_reject,
- ),
- # Lexemes to reject for p1 Modula-2
- 'm2iso+p1': (
- iso_lexemes_to_reject,
- ),
- # Lexemes to reject for XDS Modula-2
- 'm2iso+xds': (
- iso_lexemes_to_reject,
- ),
- }
-
- # Reserved Words Database
- reserved_words_db = {
- # Reserved words for unknown dialect
- 'unknown': (
- common_reserved_words,
- pim_additional_reserved_words,
- iso_additional_reserved_words,
- m2r10_additional_reserved_words,
- ),
-
- # Reserved words for PIM Modula-2
- 'm2pim': (
- common_reserved_words,
- pim_additional_reserved_words,
- ),
-
- # Reserved words for Modula-2 R10
- 'm2iso': (
- common_reserved_words,
- iso_additional_reserved_words,
- ),
-
- # Reserved words for ISO Modula-2
- 'm2r10': (
- common_reserved_words,
- m2r10_additional_reserved_words,
- ),
-
- # Reserved words for Objective Modula-2
- 'objm2': (
- common_reserved_words,
- m2r10_additional_reserved_words,
- objm2_additional_reserved_words,
- ),
-
- # Reserved words for Aglet Modula-2 Extensions
- 'm2iso+aglet': (
- common_reserved_words,
- iso_additional_reserved_words,
- aglet_additional_reserved_words,
- ),
-
- # Reserved words for GNU Modula-2 Extensions
- 'm2pim+gm2': (
- common_reserved_words,
- pim_additional_reserved_words,
- gm2_additional_reserved_words,
- ),
-
- # Reserved words for p1 Modula-2 Extensions
- 'm2iso+p1': (
- common_reserved_words,
- iso_additional_reserved_words,
- p1_additional_reserved_words,
- ),
-
- # Reserved words for XDS Modula-2 Extensions
- 'm2iso+xds': (
- common_reserved_words,
- iso_additional_reserved_words,
- xds_additional_reserved_words,
- ),
- }
-
- # Builtins Database
- builtins_db = {
- # Builtins for unknown dialect
- 'unknown': (
- common_builtins,
- pim_additional_builtins,
- iso_additional_builtins,
- m2r10_additional_builtins,
- ),
-
- # Builtins for PIM Modula-2
- 'm2pim': (
- common_builtins,
- pim_additional_builtins,
- ),
-
- # Builtins for ISO Modula-2
- 'm2iso': (
- common_builtins,
- iso_additional_builtins,
- ),
-
- # Builtins for ISO Modula-2
- 'm2r10': (
- common_builtins,
- m2r10_additional_builtins,
- ),
-
- # Builtins for Objective Modula-2
- 'objm2': (
- common_builtins,
- m2r10_additional_builtins,
- objm2_additional_builtins,
- ),
-
- # Builtins for Aglet Modula-2 Extensions
- 'm2iso+aglet': (
- common_builtins,
- iso_additional_builtins,
- aglet_additional_builtins,
- ),
-
- # Builtins for GNU Modula-2 Extensions
- 'm2pim+gm2': (
- common_builtins,
- pim_additional_builtins,
- gm2_additional_builtins,
- ),
-
- # Builtins for p1 Modula-2 Extensions
- 'm2iso+p1': (
- common_builtins,
- iso_additional_builtins,
- p1_additional_builtins,
- ),
-
- # Builtins for XDS Modula-2 Extensions
- 'm2iso+xds': (
- common_builtins,
- iso_additional_builtins,
- xds_additional_builtins,
- ),
- }
-
- # Pseudo-Module Builtins Database
- pseudo_builtins_db = {
- # Builtins for unknown dialect
- 'unknown': (
- common_pseudo_builtins,
- pim_additional_pseudo_builtins,
- iso_additional_pseudo_builtins,
- m2r10_additional_pseudo_builtins,
- ),
-
- # Builtins for PIM Modula-2
- 'm2pim': (
- common_pseudo_builtins,
- pim_additional_pseudo_builtins,
- ),
-
- # Builtins for ISO Modula-2
- 'm2iso': (
- common_pseudo_builtins,
- iso_additional_pseudo_builtins,
- ),
-
- # Builtins for ISO Modula-2
- 'm2r10': (
- common_pseudo_builtins,
- m2r10_additional_pseudo_builtins,
- ),
-
- # Builtins for Objective Modula-2
- 'objm2': (
- common_pseudo_builtins,
- m2r10_additional_pseudo_builtins,
- objm2_additional_pseudo_builtins,
- ),
-
- # Builtins for Aglet Modula-2 Extensions
- 'm2iso+aglet': (
- common_pseudo_builtins,
- iso_additional_pseudo_builtins,
- aglet_additional_pseudo_builtins,
- ),
-
- # Builtins for GNU Modula-2 Extensions
- 'm2pim+gm2': (
- common_pseudo_builtins,
- pim_additional_pseudo_builtins,
- gm2_additional_pseudo_builtins,
- ),
-
- # Builtins for p1 Modula-2 Extensions
- 'm2iso+p1': (
- common_pseudo_builtins,
- iso_additional_pseudo_builtins,
- p1_additional_pseudo_builtins,
- ),
-
- # Builtins for XDS Modula-2 Extensions
- 'm2iso+xds': (
- common_pseudo_builtins,
- iso_additional_pseudo_builtins,
- xds_additional_pseudo_builtins,
- ),
- }
-
- # Standard Library ADTs Database
- stdlib_adts_db = {
- # Empty entry for unknown dialect
- 'unknown': (
- # LEAVE THIS EMPTY
- ),
- # Standard Library ADTs for PIM Modula-2
- 'm2pim': (
- # No first class library types
- ),
-
- # Standard Library ADTs for ISO Modula-2
- 'm2iso': (
- # No first class library types
- ),
-
- # Standard Library ADTs for Modula-2 R10
- 'm2r10': (
- m2r10_stdlib_adt_identifiers,
- ),
-
- # Standard Library ADTs for Objective Modula-2
- 'objm2': (
- m2r10_stdlib_adt_identifiers,
- ),
-
- # Standard Library ADTs for Aglet Modula-2
- 'm2iso+aglet': (
- # No first class library types
- ),
-
- # Standard Library ADTs for GNU Modula-2
- 'm2pim+gm2': (
- # No first class library types
- ),
-
- # Standard Library ADTs for p1 Modula-2
- 'm2iso+p1': (
- # No first class library types
- ),
-
- # Standard Library ADTs for XDS Modula-2
- 'm2iso+xds': (
- # No first class library types
- ),
- }
-
- # Standard Library Modules Database
- stdlib_modules_db = {
- # Empty entry for unknown dialect
- 'unknown': (
- # LEAVE THIS EMPTY
- ),
- # Standard Library Modules for PIM Modula-2
- 'm2pim': (
- pim_stdlib_module_identifiers,
- ),
-
- # Standard Library Modules for ISO Modula-2
- 'm2iso': (
- iso_stdlib_module_identifiers,
- ),
-
- # Standard Library Modules for Modula-2 R10
- 'm2r10': (
- m2r10_stdlib_blueprint_identifiers,
- m2r10_stdlib_module_identifiers,
- m2r10_stdlib_adt_identifiers,
- ),
-
- # Standard Library Modules for Objective Modula-2
- 'objm2': (
- m2r10_stdlib_blueprint_identifiers,
- m2r10_stdlib_module_identifiers,
- ),
-
- # Standard Library Modules for Aglet Modula-2
- 'm2iso+aglet': (
- iso_stdlib_module_identifiers,
- ),
-
- # Standard Library Modules for GNU Modula-2
- 'm2pim+gm2': (
- pim_stdlib_module_identifiers,
- ),
-
- # Standard Library Modules for p1 Modula-2
- 'm2iso+p1': (
- iso_stdlib_module_identifiers,
- ),
-
- # Standard Library Modules for XDS Modula-2
- 'm2iso+xds': (
- iso_stdlib_module_identifiers,
- ),
- }
-
- # Standard Library Types Database
- stdlib_types_db = {
- # Empty entry for unknown dialect
- 'unknown': (
- # LEAVE THIS EMPTY
- ),
- # Standard Library Types for PIM Modula-2
- 'm2pim': (
- pim_stdlib_type_identifiers,
- ),
-
- # Standard Library Types for ISO Modula-2
- 'm2iso': (
- iso_stdlib_type_identifiers,
- ),
-
- # Standard Library Types for Modula-2 R10
- 'm2r10': (
- m2r10_stdlib_type_identifiers,
- ),
-
- # Standard Library Types for Objective Modula-2
- 'objm2': (
- m2r10_stdlib_type_identifiers,
- ),
-
- # Standard Library Types for Aglet Modula-2
- 'm2iso+aglet': (
- iso_stdlib_type_identifiers,
- ),
-
- # Standard Library Types for GNU Modula-2
- 'm2pim+gm2': (
- pim_stdlib_type_identifiers,
- ),
-
- # Standard Library Types for p1 Modula-2
- 'm2iso+p1': (
- iso_stdlib_type_identifiers,
- ),
-
- # Standard Library Types for XDS Modula-2
- 'm2iso+xds': (
- iso_stdlib_type_identifiers,
- ),
- }
-
- # Standard Library Procedures Database
- stdlib_procedures_db = {
- # Empty entry for unknown dialect
- 'unknown': (
- # LEAVE THIS EMPTY
- ),
- # Standard Library Procedures for PIM Modula-2
- 'm2pim': (
- pim_stdlib_proc_identifiers,
- ),
-
- # Standard Library Procedures for ISO Modula-2
- 'm2iso': (
- iso_stdlib_proc_identifiers,
- ),
-
- # Standard Library Procedures for Modula-2 R10
- 'm2r10': (
- m2r10_stdlib_proc_identifiers,
- ),
-
- # Standard Library Procedures for Objective Modula-2
- 'objm2': (
- m2r10_stdlib_proc_identifiers,
- ),
-
- # Standard Library Procedures for Aglet Modula-2
- 'm2iso+aglet': (
- iso_stdlib_proc_identifiers,
- ),
-
- # Standard Library Procedures for GNU Modula-2
- 'm2pim+gm2': (
- pim_stdlib_proc_identifiers,
- ),
-
- # Standard Library Procedures for p1 Modula-2
- 'm2iso+p1': (
- iso_stdlib_proc_identifiers,
- ),
-
- # Standard Library Procedures for XDS Modula-2
- 'm2iso+xds': (
- iso_stdlib_proc_identifiers,
- ),
- }
-
- # Standard Library Variables Database
- stdlib_variables_db = {
- # Empty entry for unknown dialect
- 'unknown': (
- # LEAVE THIS EMPTY
- ),
- # Standard Library Variables for PIM Modula-2
- 'm2pim': (
- pim_stdlib_var_identifiers,
- ),
-
- # Standard Library Variables for ISO Modula-2
- 'm2iso': (
- iso_stdlib_var_identifiers,
- ),
-
- # Standard Library Variables for Modula-2 R10
- 'm2r10': (
- m2r10_stdlib_var_identifiers,
- ),
-
- # Standard Library Variables for Objective Modula-2
- 'objm2': (
- m2r10_stdlib_var_identifiers,
- ),
-
- # Standard Library Variables for Aglet Modula-2
- 'm2iso+aglet': (
- iso_stdlib_var_identifiers,
- ),
-
- # Standard Library Variables for GNU Modula-2
- 'm2pim+gm2': (
- pim_stdlib_var_identifiers,
- ),
-
- # Standard Library Variables for p1 Modula-2
- 'm2iso+p1': (
- iso_stdlib_var_identifiers,
- ),
-
- # Standard Library Variables for XDS Modula-2
- 'm2iso+xds': (
- iso_stdlib_var_identifiers,
- ),
- }
-
- # Standard Library Constants Database
- stdlib_constants_db = {
- # Empty entry for unknown dialect
- 'unknown': (
- # LEAVE THIS EMPTY
- ),
- # Standard Library Constants for PIM Modula-2
- 'm2pim': (
- pim_stdlib_const_identifiers,
- ),
-
- # Standard Library Constants for ISO Modula-2
- 'm2iso': (
- iso_stdlib_const_identifiers,
- ),
-
- # Standard Library Constants for Modula-2 R10
- 'm2r10': (
- m2r10_stdlib_const_identifiers,
- ),
-
- # Standard Library Constants for Objective Modula-2
- 'objm2': (
- m2r10_stdlib_const_identifiers,
- ),
-
- # Standard Library Constants for Aglet Modula-2
- 'm2iso+aglet': (
- iso_stdlib_const_identifiers,
- ),
-
- # Standard Library Constants for GNU Modula-2
- 'm2pim+gm2': (
- pim_stdlib_const_identifiers,
- ),
-
- # Standard Library Constants for p1 Modula-2
- 'm2iso+p1': (
- iso_stdlib_const_identifiers,
- ),
-
- # Standard Library Constants for XDS Modula-2
- 'm2iso+xds': (
- iso_stdlib_const_identifiers,
- ),
- }
-
-# M e t h o d s
-
- # initialise a lexer instance
- def __init__(self, **options):
- #
- # check dialect options
- #
- dialects = get_list_opt(options, 'dialect', [])
- #
- for dialect_option in dialects:
- if dialect_option in self.dialects[1:-1]:
- # valid dialect option found
- self.set_dialect(dialect_option)
- break
- #
- # Fallback Mode (DEFAULT)
- else:
- # no valid dialect option
- self.set_dialect('unknown')
- #
- self.dialect_set_by_tag = False
- #
- # check style options
- #
- styles = get_list_opt(options, 'style', [])
- #
- # use lowercase mode for Algol style
- if 'algol' in styles or 'algol_nu' in styles:
- self.algol_publication_mode = True
- else:
- self.algol_publication_mode = False
- #
- # Check option flags
- #
- self.treat_stdlib_adts_as_builtins = get_bool_opt(
- options, 'treat_stdlib_adts_as_builtins', True)
- #
- # call superclass initialiser
- RegexLexer.__init__(self, **options)
-
- # Set lexer to a specified dialect
- def set_dialect(self, dialect_id):
- #
- # if __debug__:
- # print 'entered set_dialect with arg: ', dialect_id
- #
- # check dialect name against known dialects
- if dialect_id not in self.dialects:
- dialect = 'unknown' # default
- else:
- dialect = dialect_id
- #
- # compose lexemes to reject set
- lexemes_to_reject_set = set()
- # add each list of reject lexemes for this dialect
- for list in self.lexemes_to_reject_db[dialect]:
- lexemes_to_reject_set.update(set(list))
- #
- # compose reserved words set
- reswords_set = set()
- # add each list of reserved words for this dialect
- for list in self.reserved_words_db[dialect]:
- reswords_set.update(set(list))
- #
- # compose builtins set
- builtins_set = set()
- # add each list of builtins for this dialect excluding reserved words
- for list in self.builtins_db[dialect]:
- builtins_set.update(set(list).difference(reswords_set))
- #
- # compose pseudo-builtins set
- pseudo_builtins_set = set()
- # add each list of builtins for this dialect excluding reserved words
- for list in self.pseudo_builtins_db[dialect]:
- pseudo_builtins_set.update(set(list).difference(reswords_set))
- #
- # compose ADTs set
- adts_set = set()
- # add each list of ADTs for this dialect excluding reserved words
- for list in self.stdlib_adts_db[dialect]:
- adts_set.update(set(list).difference(reswords_set))
- #
- # compose modules set
- modules_set = set()
- # add each list of builtins for this dialect excluding builtins
- for list in self.stdlib_modules_db[dialect]:
- modules_set.update(set(list).difference(builtins_set))
- #
- # compose types set
- types_set = set()
- # add each list of types for this dialect excluding builtins
- for list in self.stdlib_types_db[dialect]:
- types_set.update(set(list).difference(builtins_set))
- #
- # compose procedures set
- procedures_set = set()
- # add each list of procedures for this dialect excluding builtins
- for list in self.stdlib_procedures_db[dialect]:
- procedures_set.update(set(list).difference(builtins_set))
- #
- # compose variables set
- variables_set = set()
- # add each list of variables for this dialect excluding builtins
- for list in self.stdlib_variables_db[dialect]:
- variables_set.update(set(list).difference(builtins_set))
- #
- # compose constants set
- constants_set = set()
- # add each list of constants for this dialect excluding builtins
- for list in self.stdlib_constants_db[dialect]:
- constants_set.update(set(list).difference(builtins_set))
- #
- # update lexer state
- self.dialect = dialect
- self.lexemes_to_reject = lexemes_to_reject_set
- self.reserved_words = reswords_set
- self.builtins = builtins_set
- self.pseudo_builtins = pseudo_builtins_set
- self.adts = adts_set
- self.modules = modules_set
- self.types = types_set
- self.procedures = procedures_set
- self.variables = variables_set
- self.constants = constants_set
- #
- # if __debug__:
- # print 'exiting set_dialect'
- # print ' self.dialect: ', self.dialect
- # print ' self.lexemes_to_reject: ', self.lexemes_to_reject
- # print ' self.reserved_words: ', self.reserved_words
- # print ' self.builtins: ', self.builtins
- # print ' self.pseudo_builtins: ', self.pseudo_builtins
- # print ' self.adts: ', self.adts
- # print ' self.modules: ', self.modules
- # print ' self.types: ', self.types
- # print ' self.procedures: ', self.procedures
- # print ' self.variables: ', self.variables
- # print ' self.types: ', self.types
- # print ' self.constants: ', self.constants
-
- # Extracts a dialect name from a dialect tag comment string and checks
- # the extracted name against known dialects. If a match is found, the
- # matching name is returned, otherwise dialect id 'unknown' is returned
- def get_dialect_from_dialect_tag(self, dialect_tag):
- #
- # if __debug__:
- # print 'entered get_dialect_from_dialect_tag with arg: ', dialect_tag
- #
- # constants
- left_tag_delim = '(*!'
- right_tag_delim = '*)'
- left_tag_delim_len = len(left_tag_delim)
- right_tag_delim_len = len(right_tag_delim)
- indicator_start = left_tag_delim_len
- indicator_end = -(right_tag_delim_len)
- #
- # check comment string for dialect indicator
- if len(dialect_tag) > (left_tag_delim_len + right_tag_delim_len) \
- and dialect_tag.startswith(left_tag_delim) \
- and dialect_tag.endswith(right_tag_delim):
- #
- # if __debug__:
- # print 'dialect tag found'
- #
- # extract dialect indicator
- indicator = dialect_tag[indicator_start:indicator_end]
- #
- # if __debug__:
- # print 'extracted: ', indicator
- #
- # check against known dialects
- for index in range(1, len(self.dialects)):
- #
- # if __debug__:
- # print 'dialects[', index, ']: ', self.dialects[index]
- #
- if indicator == self.dialects[index]:
- #
- # if __debug__:
- # print 'matching dialect found'
- #
- # indicator matches known dialect
- return indicator
- else:
- # indicator does not match any dialect
- return 'unknown' # default
- else:
- # invalid indicator string
- return 'unknown' # default
-
- # intercept the token stream, modify token attributes and return them
- def get_tokens_unprocessed(self, text):
- for index, token, value in RegexLexer.get_tokens_unprocessed(self, text):
- #
- # check for dialect tag if dialect has not been set by tag
- if not self.dialect_set_by_tag and token == Comment.Special:
- indicated_dialect = self.get_dialect_from_dialect_tag(value)
- if indicated_dialect != 'unknown':
- # token is a dialect indicator
- # reset reserved words and builtins
- self.set_dialect(indicated_dialect)
- self.dialect_set_by_tag = True
- #
- # check for reserved words, predefined and stdlib identifiers
- if token is Name:
- if value in self.reserved_words:
- token = Keyword.Reserved
- if self.algol_publication_mode:
- value = value.lower()
- #
- elif value in self.builtins:
- token = Name.Builtin
- if self.algol_publication_mode:
- value = value.lower()
- #
- elif value in self.pseudo_builtins:
- token = Name.Builtin.Pseudo
- if self.algol_publication_mode:
- value = value.lower()
- #
- elif value in self.adts:
- if not self.treat_stdlib_adts_as_builtins:
- token = Name.Namespace
- else:
- token = Name.Builtin.Pseudo
- if self.algol_publication_mode:
- value = value.lower()
- #
- elif value in self.modules:
- token = Name.Namespace
- #
- elif value in self.types:
- token = Name.Class
- #
- elif value in self.procedures:
- token = Name.Function
- #
- elif value in self.variables:
- token = Name.Variable
- #
- elif value in self.constants:
- token = Name.Constant
- #
- elif token in Number:
- #
- # mark prefix number literals as error for PIM and ISO dialects
- if self.dialect not in ('unknown', 'm2r10', 'objm2'):
- if "'" in value or value[0:2] in ('0b', '0x', '0u'):
- token = Error
- #
- elif self.dialect in ('m2r10', 'objm2'):
- # mark base-8 number literals as errors for M2 R10 and ObjM2
- if token is Number.Oct:
- token = Error
- # mark suffix base-16 literals as errors for M2 R10 and ObjM2
- elif token is Number.Hex and 'H' in value:
- token = Error
- # mark real numbers with E as errors for M2 R10 and ObjM2
- elif token is Number.Float and 'E' in value:
- token = Error
- #
- elif token in Comment:
- #
- # mark single line comment as error for PIM and ISO dialects
- if token is Comment.Single:
- if self.dialect not in ('unknown', 'm2r10', 'objm2'):
- token = Error
- #
- if token is Comment.Preproc:
- # mark ISO pragma as error for PIM dialects
- if value.startswith('<*') and \
- self.dialect.startswith('m2pim'):
- token = Error
- # mark PIM pragma as comment for other dialects
- elif value.startswith('(*$') and \
- self.dialect != 'unknown' and \
- not self.dialect.startswith('m2pim'):
- token = Comment.Multiline
- #
- else: # token is neither Name nor Comment
- #
- # mark lexemes matching the dialect's error token set as errors
- if value in self.lexemes_to_reject:
- token = Error
- #
- # substitute lexemes when in Algol mode
- if self.algol_publication_mode:
- if value == '#':
- value = '≠'
- elif value == '<=':
- value = '≤'
- elif value == '>=':
- value = '≥'
- elif value == '==':
- value = '≡'
- elif value == '*.':
- value = '•'
-
- # return result
- yield index, token, value
-
- def analyse_text(text):
- """It's Pascal-like, but does not use FUNCTION -- uses PROCEDURE
- instead."""
-
- # Check if this looks like Pascal, if not, bail out early
- if not ('(*' in text and '*)' in text and ':=' in text):
- return
-
- result = 0
- # Procedure is in Modula2
- if re.search(r'\bPROCEDURE\b', text):
- result += 0.6
-
- # FUNCTION is only valid in Pascal, but not in Modula2
- if re.search(r'\bFUNCTION\b', text):
- result = 0.0
-
- return result
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/monte.py b/venv/lib/python3.11/site-packages/pygments/lexers/monte.py
deleted file mode 100644
index 18f5a03..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/monte.py
+++ /dev/null
@@ -1,204 +0,0 @@
-"""
- pygments.lexers.monte
- ~~~~~~~~~~~~~~~~~~~~~
-
- Lexer for the Monte programming language.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.token import Comment, Error, Keyword, Name, Number, Operator, \
- Punctuation, String, Whitespace
-from pygments.lexer import RegexLexer, include, words
-
-__all__ = ['MonteLexer']
-
-
-# `var` handled separately
-# `interface` handled separately
-_declarations = ['bind', 'def', 'fn', 'object']
-_methods = ['method', 'to']
-_keywords = [
- 'as', 'break', 'catch', 'continue', 'else', 'escape', 'exit', 'exports',
- 'extends', 'finally', 'for', 'guards', 'if', 'implements', 'import',
- 'in', 'match', 'meta', 'pass', 'return', 'switch', 'try', 'via', 'when',
- 'while',
-]
-_operators = [
- # Unary
- '~', '!',
- # Binary
- '+', '-', '*', '/', '%', '**', '&', '|', '^', '<<', '>>',
- # Binary augmented
- '+=', '-=', '*=', '/=', '%=', '**=', '&=', '|=', '^=', '<<=', '>>=',
- # Comparison
- '==', '!=', '<', '<=', '>', '>=', '<=>',
- # Patterns and assignment
- ':=', '?', '=~', '!~', '=>',
- # Calls and sends
- '.', '<-', '->',
-]
-_escape_pattern = (
- r'(?:\\x[0-9a-fA-F]{2}|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8}|'
- r'\\["\'\\bftnr])')
-# _char = _escape_chars + [('.', String.Char)]
-_identifier = r'[_a-zA-Z]\w*'
-
-_constants = [
- # Void constants
- 'null',
- # Bool constants
- 'false', 'true',
- # Double constants
- 'Infinity', 'NaN',
- # Special objects
- 'M', 'Ref', 'throw', 'traceln',
-]
-
-_guards = [
- 'Any', 'Binding', 'Bool', 'Bytes', 'Char', 'DeepFrozen', 'Double',
- 'Empty', 'Int', 'List', 'Map', 'Near', 'NullOk', 'Same', 'Selfless',
- 'Set', 'Str', 'SubrangeGuard', 'Transparent', 'Void',
-]
-
-_safeScope = [
- '_accumulateList', '_accumulateMap', '_auditedBy', '_bind',
- '_booleanFlow', '_comparer', '_equalizer', '_iterForever', '_loop',
- '_makeBytes', '_makeDouble', '_makeFinalSlot', '_makeInt', '_makeList',
- '_makeMap', '_makeMessageDesc', '_makeOrderedSpace', '_makeParamDesc',
- '_makeProtocolDesc', '_makeSourceSpan', '_makeString', '_makeVarSlot',
- '_makeVerbFacet', '_mapExtract', '_matchSame', '_quasiMatcher',
- '_slotToBinding', '_splitList', '_suchThat', '_switchFailed',
- '_validateFor', 'b__quasiParser', 'eval', 'import', 'm__quasiParser',
- 'makeBrandPair', 'makeLazySlot', 'safeScope', 'simple__quasiParser',
-]
-
-
-class MonteLexer(RegexLexer):
- """
- Lexer for the Monte programming language.
-
- .. versionadded:: 2.2
- """
- name = 'Monte'
- url = 'https://monte.readthedocs.io/'
- aliases = ['monte']
- filenames = ['*.mt']
-
- tokens = {
- 'root': [
- # Comments
- (r'#[^\n]*\n', Comment),
-
- # Docstrings
- # Apologies for the non-greedy matcher here.
- (r'/\*\*.*?\*/', String.Doc),
-
- # `var` declarations
- (r'\bvar\b', Keyword.Declaration, 'var'),
-
- # `interface` declarations
- (r'\binterface\b', Keyword.Declaration, 'interface'),
-
- # method declarations
- (words(_methods, prefix='\\b', suffix='\\b'),
- Keyword, 'method'),
-
- # All other declarations
- (words(_declarations, prefix='\\b', suffix='\\b'),
- Keyword.Declaration),
-
- # Keywords
- (words(_keywords, prefix='\\b', suffix='\\b'), Keyword),
-
- # Literals
- ('[+-]?0x[_0-9a-fA-F]+', Number.Hex),
- (r'[+-]?[_0-9]+\.[_0-9]*([eE][+-]?[_0-9]+)?', Number.Float),
- ('[+-]?[_0-9]+', Number.Integer),
- ("'", String.Double, 'char'),
- ('"', String.Double, 'string'),
-
- # Quasiliterals
- ('`', String.Backtick, 'ql'),
-
- # Operators
- (words(_operators), Operator),
-
- # Verb operators
- (_identifier + '=', Operator.Word),
-
- # Safe scope constants
- (words(_constants, prefix='\\b', suffix='\\b'),
- Keyword.Pseudo),
-
- # Safe scope guards
- (words(_guards, prefix='\\b', suffix='\\b'), Keyword.Type),
-
- # All other safe scope names
- (words(_safeScope, prefix='\\b', suffix='\\b'),
- Name.Builtin),
-
- # Identifiers
- (_identifier, Name),
-
- # Punctuation
- (r'\(|\)|\{|\}|\[|\]|:|,', Punctuation),
-
- # Whitespace
- (' +', Whitespace),
-
- # Definite lexer errors
- ('=', Error),
- ],
- 'char': [
- # It is definitely an error to have a char of width == 0.
- ("'", Error, 'root'),
- (_escape_pattern, String.Escape, 'charEnd'),
- ('.', String.Char, 'charEnd'),
- ],
- 'charEnd': [
- ("'", String.Char, '#pop:2'),
- # It is definitely an error to have a char of width > 1.
- ('.', Error),
- ],
- # The state of things coming into an interface.
- 'interface': [
- (' +', Whitespace),
- (_identifier, Name.Class, '#pop'),
- include('root'),
- ],
- # The state of things coming into a method.
- 'method': [
- (' +', Whitespace),
- (_identifier, Name.Function, '#pop'),
- include('root'),
- ],
- 'string': [
- ('"', String.Double, 'root'),
- (_escape_pattern, String.Escape),
- (r'\n', String.Double),
- ('.', String.Double),
- ],
- 'ql': [
- ('`', String.Backtick, 'root'),
- (r'\$' + _escape_pattern, String.Escape),
- (r'\$\$', String.Escape),
- (r'@@', String.Escape),
- (r'\$\{', String.Interpol, 'qlNest'),
- (r'@\{', String.Interpol, 'qlNest'),
- (r'\$' + _identifier, Name),
- ('@' + _identifier, Name),
- ('.', String.Backtick),
- ],
- 'qlNest': [
- (r'\}', String.Interpol, '#pop'),
- include('root'),
- ],
- # The state of things immediately following `var`.
- 'var': [
- (' +', Whitespace),
- (_identifier, Name.Variable, '#pop'),
- include('root'),
- ],
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/mosel.py b/venv/lib/python3.11/site-packages/pygments/lexers/mosel.py
deleted file mode 100644
index f3c86cc..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/mosel.py
+++ /dev/null
@@ -1,447 +0,0 @@
-"""
- pygments.lexers.mosel
- ~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for the mosel language.
- http://www.fico.com/en/products/fico-xpress-optimization
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, words
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation
-
-__all__ = ['MoselLexer']
-
-FUNCTIONS = (
- # core functions
- '_',
- 'abs',
- 'arctan',
- 'asproc',
- 'assert',
- 'bitflip',
- 'bitneg',
- 'bitset',
- 'bitshift',
- 'bittest',
- 'bitval',
- 'ceil',
- 'cos',
- 'create',
- 'currentdate',
- 'currenttime',
- 'cutelt',
- 'cutfirst',
- 'cuthead',
- 'cutlast',
- 'cuttail',
- 'datablock',
- 'delcell',
- 'exists',
- 'exit',
- 'exp',
- 'exportprob',
- 'fclose',
- 'fflush',
- 'finalize',
- 'findfirst',
- 'findlast',
- 'floor',
- 'fopen',
- 'fselect',
- 'fskipline',
- 'fwrite',
- 'fwrite_',
- 'fwriteln',
- 'fwriteln_',
- 'getact',
- 'getcoeff',
- 'getcoeffs',
- 'getdual',
- 'getelt',
- 'getfid',
- 'getfirst',
- 'getfname',
- 'gethead',
- 'getlast',
- 'getobjval',
- 'getparam',
- 'getrcost',
- 'getreadcnt',
- 'getreverse',
- 'getsize',
- 'getslack',
- 'getsol',
- 'gettail',
- 'gettype',
- 'getvars',
- 'isdynamic',
- 'iseof',
- 'isfinite',
- 'ishidden',
- 'isinf',
- 'isnan',
- 'isodd',
- 'ln',
- 'localsetparam',
- 'log',
- 'makesos1',
- 'makesos2',
- 'maxlist',
- 'memoryuse',
- 'minlist',
- 'newmuid',
- 'publish',
- 'random',
- 'read',
- 'readln',
- 'reset',
- 'restoreparam',
- 'reverse',
- 'round',
- 'setcoeff',
- 'sethidden',
- 'setioerr',
- 'setmatherr',
- 'setname',
- 'setparam',
- 'setrandseed',
- 'setrange',
- 'settype',
- 'sin',
- 'splithead',
- 'splittail',
- 'sqrt',
- 'strfmt',
- 'substr',
- 'timestamp',
- 'unpublish',
- 'versionnum',
- 'versionstr',
- 'write',
- 'write_',
- 'writeln',
- 'writeln_',
-
- # mosel exam mmxprs | sed -n -e "s/ [pf][a-z]* \([a-zA-Z0-9_]*\).*/'\1',/p" | sort -u
- 'addcut',
- 'addcuts',
- 'addmipsol',
- 'basisstability',
- 'calcsolinfo',
- 'clearmipdir',
- 'clearmodcut',
- 'command',
- 'copysoltoinit',
- 'crossoverlpsol',
- 'defdelayedrows',
- 'defsecurevecs',
- 'delcuts',
- 'dropcuts',
- 'estimatemarginals',
- 'fixglobal',
- 'flushmsgq',
- 'getbstat',
- 'getcnlist',
- 'getcplist',
- 'getdualray',
- 'getiis',
- 'getiissense',
- 'getiistype',
- 'getinfcause',
- 'getinfeas',
- 'getlb',
- 'getlct',
- 'getleft',
- 'getloadedlinctrs',
- 'getloadedmpvars',
- 'getname',
- 'getprimalray',
- 'getprobstat',
- 'getrange',
- 'getright',
- 'getsensrng',
- 'getsize',
- 'getsol',
- 'gettype',
- 'getub',
- 'getvars',
- 'gety',
- 'hasfeature',
- 'implies',
- 'indicator',
- 'initglobal',
- 'ishidden',
- 'isiisvalid',
- 'isintegral',
- 'loadbasis',
- 'loadcuts',
- 'loadlpsol',
- 'loadmipsol',
- 'loadprob',
- 'maximise',
- 'maximize',
- 'minimise',
- 'minimize',
- 'postsolve',
- 'readbasis',
- 'readdirs',
- 'readsol',
- 'refinemipsol',
- 'rejectintsol',
- 'repairinfeas',
- 'repairinfeas_deprec',
- 'resetbasis',
- 'resetiis',
- 'resetsol',
- 'savebasis',
- 'savemipsol',
- 'savesol',
- 'savestate',
- 'selectsol',
- 'setarchconsistency',
- 'setbstat',
- 'setcallback',
- 'setcbcutoff',
- 'setgndata',
- 'sethidden',
- 'setlb',
- 'setmipdir',
- 'setmodcut',
- 'setsol',
- 'setub',
- 'setucbdata',
- 'stopoptimise',
- 'stopoptimize',
- 'storecut',
- 'storecuts',
- 'unloadprob',
- 'uselastbarsol',
- 'writebasis',
- 'writedirs',
- 'writeprob',
- 'writesol',
- 'xor',
- 'xprs_addctr',
- 'xprs_addindic',
-
- # mosel exam mmsystem | sed -n -e "s/ [pf][a-z]* \([a-zA-Z0-9_]*\).*/'\1',/p" | sort -u
- 'addmonths',
- 'copytext',
- 'cuttext',
- 'deltext',
- 'endswith',
- 'erase',
- 'expandpath',
- 'fcopy',
- 'fdelete',
- 'findfiles',
- 'findtext',
- 'fmove',
- 'formattext',
- 'getasnumber',
- 'getchar',
- 'getcwd',
- 'getdate',
- 'getday',
- 'getdaynum',
- 'getdays',
- 'getdirsep',
- 'getdsoparam',
- 'getendparse',
- 'getenv',
- 'getfsize',
- 'getfstat',
- 'getftime',
- 'gethour',
- 'getminute',
- 'getmonth',
- 'getmsec',
- 'getoserrmsg',
- 'getoserror',
- 'getpathsep',
- 'getqtype',
- 'getsecond',
- 'getsepchar',
- 'getsize',
- 'getstart',
- 'getsucc',
- 'getsysinfo',
- 'getsysstat',
- 'gettime',
- 'gettmpdir',
- 'gettrim',
- 'getweekday',
- 'getyear',
- 'inserttext',
- 'isvalid',
- 'jointext',
- 'makedir',
- 'makepath',
- 'newtar',
- 'newzip',
- 'nextfield',
- 'openpipe',
- 'parseextn',
- 'parseint',
- 'parsereal',
- 'parsetext',
- 'pastetext',
- 'pathmatch',
- 'pathsplit',
- 'qsort',
- 'quote',
- 'readtextline',
- 'regmatch',
- 'regreplace',
- 'removedir',
- 'removefiles',
- 'setchar',
- 'setdate',
- 'setday',
- 'setdsoparam',
- 'setendparse',
- 'setenv',
- 'sethour',
- 'setminute',
- 'setmonth',
- 'setmsec',
- 'setoserror',
- 'setqtype',
- 'setsecond',
- 'setsepchar',
- 'setstart',
- 'setsucc',
- 'settime',
- 'settrim',
- 'setyear',
- 'sleep',
- 'splittext',
- 'startswith',
- 'system',
- 'tarlist',
- 'textfmt',
- 'tolower',
- 'toupper',
- 'trim',
- 'untar',
- 'unzip',
- 'ziplist',
-
- # mosel exam mmjobs | sed -n -e "s/ [pf][a-z]* \([a-zA-Z0-9_]*\).*/'\1',/p" | sort -u
- 'canceltimer',
- 'clearaliases',
- 'compile',
- 'connect',
- 'detach',
- 'disconnect',
- 'dropnextevent',
- 'findxsrvs',
- 'getaliases',
- 'getannidents',
- 'getannotations',
- 'getbanner',
- 'getclass',
- 'getdsoprop',
- 'getdsopropnum',
- 'getexitcode',
- 'getfromgid',
- 'getfromid',
- 'getfromuid',
- 'getgid',
- 'gethostalias',
- 'getid',
- 'getmodprop',
- 'getmodpropnum',
- 'getnextevent',
- 'getnode',
- 'getrmtid',
- 'getstatus',
- 'getsysinfo',
- 'gettimer',
- 'getuid',
- 'getvalue',
- 'isqueueempty',
- 'load',
- 'nullevent',
- 'peeknextevent',
- 'resetmodpar',
- 'run',
- 'send',
- 'setcontrol',
- 'setdefstream',
- 'setgid',
- 'sethostalias',
- 'setmodpar',
- 'settimer',
- 'setuid',
- 'setworkdir',
- 'stop',
- 'unload',
- 'wait',
- 'waitexpired',
- 'waitfor',
- 'waitforend',
-)
-
-
-class MoselLexer(RegexLexer):
- """
- For the Mosel optimization language.
-
- .. versionadded:: 2.6
- """
- name = 'Mosel'
- aliases = ['mosel']
- filenames = ['*.mos']
-
- tokens = {
- 'root': [
- (r'\n', Text),
- (r'\s+', Text.Whitespace),
- (r'!.*?\n', Comment.Single),
- (r'\(!(.|\n)*?!\)', Comment.Multiline),
- (words((
- 'and', 'as', 'break', 'case', 'count', 'declarations', 'do',
- 'dynamic', 'elif', 'else', 'end-', 'end', 'evaluation', 'false',
- 'forall', 'forward', 'from', 'function', 'hashmap', 'if',
- 'imports', 'include', 'initialisations', 'initializations', 'inter',
- 'max', 'min', 'model', 'namespace', 'next', 'not', 'nsgroup',
- 'nssearch', 'of', 'options', 'or', 'package', 'parameters',
- 'procedure', 'public', 'prod', 'record', 'repeat', 'requirements',
- 'return', 'sum', 'then', 'to', 'true', 'union', 'until', 'uses',
- 'version', 'while', 'with'), prefix=r'\b', suffix=r'\b'),
- Keyword.Builtin),
- (words((
- 'range', 'array', 'set', 'list', 'mpvar', 'mpproblem', 'linctr',
- 'nlctr', 'integer', 'string', 'real', 'boolean', 'text', 'time',
- 'date', 'datetime', 'returned', 'Model', 'Mosel', 'counter',
- 'xmldoc', 'is_sos1', 'is_sos2', 'is_integer', 'is_binary',
- 'is_continuous', 'is_free', 'is_semcont', 'is_semint',
- 'is_partint'), prefix=r'\b', suffix=r'\b'),
- Keyword.Type),
- (r'(\+|\-|\*|/|=|<=|>=|\||\^|<|>|<>|\.\.|\.|:=|::|:|in|mod|div)',
- Operator),
- (r'[()\[\]{},;]+', Punctuation),
- (words(FUNCTIONS, prefix=r'\b', suffix=r'\b'), Name.Function),
- (r'(\d+\.(?!\.)\d*|\.(?!.)\d+)([eE][+-]?\d+)?', Number.Float),
- (r'\d+([eE][+-]?\d+)?', Number.Integer),
- (r'[+-]?Infinity', Number.Integer),
- (r'0[xX][0-9a-fA-F]+', Number),
- (r'"', String.Double, 'double_quote'),
- (r'\'', String.Single, 'single_quote'),
- (r'(\w+|(\.(?!\.)))', Text),
- ],
- 'single_quote': [
- (r'\'', String.Single, '#pop'),
- (r'[^\']+', String.Single),
- ],
- 'double_quote': [
- (r'(\\"|\\[0-7]{1,3}\D|\\[abfnrtv]|\\\\)', String.Escape),
- (r'\"', String.Double, '#pop'),
- (r'[^"\\]+', String.Double),
- ],
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/ncl.py b/venv/lib/python3.11/site-packages/pygments/lexers/ncl.py
deleted file mode 100644
index b1ec145..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/ncl.py
+++ /dev/null
@@ -1,893 +0,0 @@
-"""
- pygments.lexers.ncl
- ~~~~~~~~~~~~~~~~~~~
-
- Lexers for NCAR Command Language.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, include, words
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation
-
-__all__ = ['NCLLexer']
-
-
-class NCLLexer(RegexLexer):
- """
- Lexer for NCL code.
-
- .. versionadded:: 2.2
- """
- name = 'NCL'
- aliases = ['ncl']
- filenames = ['*.ncl']
- mimetypes = ['text/ncl']
- flags = re.MULTILINE
-
- tokens = {
- 'root': [
- (r';.*\n', Comment),
- include('strings'),
- include('core'),
- (r'[a-zA-Z_]\w*', Name),
- include('nums'),
- (r'[\s]+', Text),
- ],
- 'core': [
- # Statements
- (words((
- 'begin', 'break', 'continue', 'create', 'defaultapp', 'do',
- 'else', 'end', 'external', 'exit', 'True', 'False', 'file', 'function',
- 'getvalues', 'graphic', 'group', 'if', 'list', 'load', 'local',
- 'new', '_Missing', 'Missing', 'noparent', 'procedure',
- 'quit', 'QUIT', 'Quit', 'record', 'return', 'setvalues', 'stop',
- 'then', 'while'), prefix=r'\b', suffix=r'\s*\b'),
- Keyword),
-
- # Data Types
- (words((
- 'ubyte', 'uint', 'uint64', 'ulong', 'string', 'byte',
- 'character', 'double', 'float', 'integer', 'int64', 'logical',
- 'long', 'short', 'ushort', 'enumeric', 'numeric', 'snumeric'),
- prefix=r'\b', suffix=r'\s*\b'),
- Keyword.Type),
-
- # Operators
- (r'[\%^*+\-/<>]', Operator),
-
- # punctuation:
- (r'[\[\]():@$!&|.,\\{}]', Punctuation),
- (r'[=:]', Punctuation),
-
- # Intrinsics
- (words((
- 'abs', 'acos', 'addfile', 'addfiles', 'all', 'angmom_atm', 'any',
- 'area_conserve_remap', 'area_hi2lores', 'area_poly_sphere',
- 'asciiread', 'asciiwrite', 'asin', 'atan', 'atan2', 'attsetvalues',
- 'avg', 'betainc', 'bin_avg', 'bin_sum', 'bw_bandpass_filter',
- 'cancor', 'cbinread', 'cbinwrite', 'cd_calendar', 'cd_inv_calendar',
- 'cdfbin_p', 'cdfbin_pr', 'cdfbin_s', 'cdfbin_xn', 'cdfchi_p',
- 'cdfchi_x', 'cdfgam_p', 'cdfgam_x', 'cdfnor_p', 'cdfnor_x',
- 'cdft_p', 'cdft_t', 'ceil', 'center_finite_diff',
- 'center_finite_diff_n', 'cfftb', 'cfftf', 'cfftf_frq_reorder',
- 'charactertodouble', 'charactertofloat', 'charactertointeger',
- 'charactertolong', 'charactertoshort', 'charactertostring',
- 'chartodouble', 'chartofloat', 'chartoint', 'chartointeger',
- 'chartolong', 'chartoshort', 'chartostring', 'chiinv', 'clear',
- 'color_index_to_rgba', 'conform', 'conform_dims', 'cos', 'cosh',
- 'count_unique_values', 'covcorm', 'covcorm_xy', 'craybinnumrec',
- 'craybinrecread', 'create_graphic', 'csa1', 'csa1d', 'csa1s',
- 'csa1x', 'csa1xd', 'csa1xs', 'csa2', 'csa2d', 'csa2l', 'csa2ld',
- 'csa2ls', 'csa2lx', 'csa2lxd', 'csa2lxs', 'csa2s', 'csa2x',
- 'csa2xd', 'csa2xs', 'csa3', 'csa3d', 'csa3l', 'csa3ld', 'csa3ls',
- 'csa3lx', 'csa3lxd', 'csa3lxs', 'csa3s', 'csa3x', 'csa3xd',
- 'csa3xs', 'csc2s', 'csgetp', 'css2c', 'cssetp', 'cssgrid', 'csstri',
- 'csvoro', 'cumsum', 'cz2ccm', 'datatondc', 'day_of_week',
- 'day_of_year', 'days_in_month', 'default_fillvalue', 'delete',
- 'depth_to_pres', 'destroy', 'determinant', 'dewtemp_trh',
- 'dgeevx_lapack', 'dim_acumrun_n', 'dim_avg', 'dim_avg_n',
- 'dim_avg_wgt', 'dim_avg_wgt_n', 'dim_cumsum', 'dim_cumsum_n',
- 'dim_gamfit_n', 'dim_gbits', 'dim_max', 'dim_max_n', 'dim_median',
- 'dim_median_n', 'dim_min', 'dim_min_n', 'dim_num', 'dim_num_n',
- 'dim_numrun_n', 'dim_pqsort', 'dim_pqsort_n', 'dim_product',
- 'dim_product_n', 'dim_rmsd', 'dim_rmsd_n', 'dim_rmvmean',
- 'dim_rmvmean_n', 'dim_rmvmed', 'dim_rmvmed_n', 'dim_spi_n',
- 'dim_standardize', 'dim_standardize_n', 'dim_stat4', 'dim_stat4_n',
- 'dim_stddev', 'dim_stddev_n', 'dim_sum', 'dim_sum_n', 'dim_sum_wgt',
- 'dim_sum_wgt_n', 'dim_variance', 'dim_variance_n', 'dimsizes',
- 'doubletobyte', 'doubletochar', 'doubletocharacter',
- 'doubletofloat', 'doubletoint', 'doubletointeger', 'doubletolong',
- 'doubletoshort', 'dpres_hybrid_ccm', 'dpres_plevel', 'draw',
- 'draw_color_palette', 'dsgetp', 'dsgrid2', 'dsgrid2d', 'dsgrid2s',
- 'dsgrid3', 'dsgrid3d', 'dsgrid3s', 'dspnt2', 'dspnt2d', 'dspnt2s',
- 'dspnt3', 'dspnt3d', 'dspnt3s', 'dssetp', 'dtrend', 'dtrend_msg',
- 'dtrend_msg_n', 'dtrend_n', 'dtrend_quadratic',
- 'dtrend_quadratic_msg_n', 'dv2uvf', 'dv2uvg', 'dz_height',
- 'echo_off', 'echo_on', 'eof2data', 'eof_varimax', 'eofcor',
- 'eofcor_pcmsg', 'eofcor_ts', 'eofcov', 'eofcov_pcmsg', 'eofcov_ts',
- 'eofunc', 'eofunc_ts', 'eofunc_varimax', 'equiv_sample_size', 'erf',
- 'erfc', 'esacr', 'esacv', 'esccr', 'esccv', 'escorc', 'escorc_n',
- 'escovc', 'exit', 'exp', 'exp_tapersh', 'exp_tapersh_wgts',
- 'exp_tapershC', 'ezfftb', 'ezfftb_n', 'ezfftf', 'ezfftf_n',
- 'f2fosh', 'f2foshv', 'f2fsh', 'f2fshv', 'f2gsh', 'f2gshv', 'fabs',
- 'fbindirread', 'fbindirwrite', 'fbinnumrec', 'fbinread',
- 'fbinrecread', 'fbinrecwrite', 'fbinwrite', 'fft2db', 'fft2df',
- 'fftshift', 'fileattdef', 'filechunkdimdef', 'filedimdef',
- 'fileexists', 'filegrpdef', 'filevarattdef', 'filevarchunkdef',
- 'filevarcompressleveldef', 'filevardef', 'filevardimsizes',
- 'filwgts_lancos', 'filwgts_lanczos', 'filwgts_normal',
- 'floattobyte', 'floattochar', 'floattocharacter', 'floattoint',
- 'floattointeger', 'floattolong', 'floattoshort', 'floor',
- 'fluxEddy', 'fo2fsh', 'fo2fshv', 'fourier_info', 'frame', 'fspan',
- 'ftcurv', 'ftcurvd', 'ftcurvi', 'ftcurvp', 'ftcurvpi', 'ftcurvps',
- 'ftcurvs', 'ftest', 'ftgetp', 'ftkurv', 'ftkurvd', 'ftkurvp',
- 'ftkurvpd', 'ftsetp', 'ftsurf', 'g2fsh', 'g2fshv', 'g2gsh',
- 'g2gshv', 'gamma', 'gammainc', 'gaus', 'gaus_lobat',
- 'gaus_lobat_wgt', 'gc_aangle', 'gc_clkwise', 'gc_dangle',
- 'gc_inout', 'gc_latlon', 'gc_onarc', 'gc_pnt2gc', 'gc_qarea',
- 'gc_tarea', 'generate_2d_array', 'get_color_index',
- 'get_color_rgba', 'get_cpu_time', 'get_isolines', 'get_ncl_version',
- 'get_script_name', 'get_script_prefix_name', 'get_sphere_radius',
- 'get_unique_values', 'getbitsone', 'getenv', 'getfiledimsizes',
- 'getfilegrpnames', 'getfilepath', 'getfilevaratts',
- 'getfilevarchunkdimsizes', 'getfilevardims', 'getfilevardimsizes',
- 'getfilevarnames', 'getfilevartypes', 'getvaratts', 'getvardims',
- 'gradsf', 'gradsg', 'greg2jul', 'grid2triple', 'hlsrgb', 'hsvrgb',
- 'hydro', 'hyi2hyo', 'idsfft', 'igradsf', 'igradsg', 'ilapsf',
- 'ilapsg', 'ilapvf', 'ilapvg', 'ind', 'ind_resolve', 'int2p',
- 'int2p_n', 'integertobyte', 'integertochar', 'integertocharacter',
- 'integertoshort', 'inttobyte', 'inttochar', 'inttoshort',
- 'inverse_matrix', 'isatt', 'isbigendian', 'isbyte', 'ischar',
- 'iscoord', 'isdefined', 'isdim', 'isdimnamed', 'isdouble',
- 'isenumeric', 'isfile', 'isfilepresent', 'isfilevar',
- 'isfilevaratt', 'isfilevarcoord', 'isfilevardim', 'isfloat',
- 'isfunc', 'isgraphic', 'isint', 'isint64', 'isinteger',
- 'isleapyear', 'islogical', 'islong', 'ismissing', 'isnan_ieee',
- 'isnumeric', 'ispan', 'isproc', 'isshort', 'issnumeric', 'isstring',
- 'isubyte', 'isuint', 'isuint64', 'isulong', 'isunlimited',
- 'isunsigned', 'isushort', 'isvar', 'jul2greg', 'kmeans_as136',
- 'kolsm2_n', 'kron_product', 'lapsf', 'lapsg', 'lapvf', 'lapvg',
- 'latlon2utm', 'lclvl', 'lderuvf', 'lderuvg', 'linint1', 'linint1_n',
- 'linint2', 'linint2_points', 'linmsg', 'linmsg_n', 'linrood_latwgt',
- 'linrood_wgt', 'list_files', 'list_filevars', 'list_hlus',
- 'list_procfuncs', 'list_vars', 'ListAppend', 'ListCount',
- 'ListGetType', 'ListIndex', 'ListIndexFromName', 'ListPop',
- 'ListPush', 'ListSetType', 'loadscript', 'local_max', 'local_min',
- 'log', 'log10', 'longtobyte', 'longtochar', 'longtocharacter',
- 'longtoint', 'longtointeger', 'longtoshort', 'lspoly', 'lspoly_n',
- 'mask', 'max', 'maxind', 'min', 'minind', 'mixed_layer_depth',
- 'mixhum_ptd', 'mixhum_ptrh', 'mjo_cross_coh2pha',
- 'mjo_cross_segment', 'moc_globe_atl', 'monthday', 'natgrid',
- 'natgridd', 'natgrids', 'ncargpath', 'ncargversion', 'ndctodata',
- 'ndtooned', 'new', 'NewList', 'ngezlogo', 'nggcog', 'nggetp',
- 'nglogo', 'ngsetp', 'NhlAddAnnotation', 'NhlAddData',
- 'NhlAddOverlay', 'NhlAddPrimitive', 'NhlAppGetDefaultParentId',
- 'NhlChangeWorkstation', 'NhlClassName', 'NhlClearWorkstation',
- 'NhlDataPolygon', 'NhlDataPolyline', 'NhlDataPolymarker',
- 'NhlDataToNDC', 'NhlDestroy', 'NhlDraw', 'NhlFrame', 'NhlFreeColor',
- 'NhlGetBB', 'NhlGetClassResources', 'NhlGetErrorObjectId',
- 'NhlGetNamedColorIndex', 'NhlGetParentId',
- 'NhlGetParentWorkstation', 'NhlGetWorkspaceObjectId',
- 'NhlIsAllocatedColor', 'NhlIsApp', 'NhlIsDataComm', 'NhlIsDataItem',
- 'NhlIsDataSpec', 'NhlIsTransform', 'NhlIsView', 'NhlIsWorkstation',
- 'NhlName', 'NhlNDCPolygon', 'NhlNDCPolyline', 'NhlNDCPolymarker',
- 'NhlNDCToData', 'NhlNewColor', 'NhlNewDashPattern', 'NhlNewMarker',
- 'NhlPalGetDefined', 'NhlRemoveAnnotation', 'NhlRemoveData',
- 'NhlRemoveOverlay', 'NhlRemovePrimitive', 'NhlSetColor',
- 'NhlSetDashPattern', 'NhlSetMarker', 'NhlUpdateData',
- 'NhlUpdateWorkstation', 'nice_mnmxintvl', 'nngetaspectd',
- 'nngetaspects', 'nngetp', 'nngetsloped', 'nngetslopes', 'nngetwts',
- 'nngetwtsd', 'nnpnt', 'nnpntd', 'nnpntend', 'nnpntendd',
- 'nnpntinit', 'nnpntinitd', 'nnpntinits', 'nnpnts', 'nnsetp', 'num',
- 'obj_anal_ic', 'omega_ccm', 'onedtond', 'overlay', 'paleo_outline',
- 'pdfxy_bin', 'poisson_grid_fill', 'pop_remap', 'potmp_insitu_ocn',
- 'prcwater_dp', 'pres2hybrid', 'pres_hybrid_ccm', 'pres_sigma',
- 'print', 'print_table', 'printFileVarSummary', 'printVarSummary',
- 'product', 'pslec', 'pslhor', 'pslhyp', 'qsort', 'rand',
- 'random_chi', 'random_gamma', 'random_normal', 'random_setallseed',
- 'random_uniform', 'rcm2points', 'rcm2rgrid', 'rdsstoi',
- 'read_colormap_file', 'reg_multlin', 'regcoef', 'regCoef_n',
- 'regline', 'relhum', 'replace_ieeenan', 'reshape', 'reshape_ind',
- 'rgba_to_color_index', 'rgbhls', 'rgbhsv', 'rgbyiq', 'rgrid2rcm',
- 'rhomb_trunc', 'rip_cape_2d', 'rip_cape_3d', 'round', 'rtest',
- 'runave', 'runave_n', 'set_default_fillvalue', 'set_sphere_radius',
- 'setfileoption', 'sfvp2uvf', 'sfvp2uvg', 'shaec', 'shagc',
- 'shgetnp', 'shgetp', 'shgrid', 'shorttobyte', 'shorttochar',
- 'shorttocharacter', 'show_ascii', 'shsec', 'shsetp', 'shsgc',
- 'shsgc_R42', 'sigma2hybrid', 'simpeq', 'simpne', 'sin',
- 'sindex_yrmo', 'sinh', 'sizeof', 'sleep', 'smth9', 'snindex_yrmo',
- 'solve_linsys', 'span_color_indexes', 'span_color_rgba',
- 'sparse_matrix_mult', 'spcorr', 'spcorr_n', 'specx_anal',
- 'specxy_anal', 'spei', 'sprintf', 'sprinti', 'sqrt', 'sqsort',
- 'srand', 'stat2', 'stat4', 'stat_medrng', 'stat_trim',
- 'status_exit', 'stdatmus_p2tdz', 'stdatmus_z2tdp', 'stddev',
- 'str_capital', 'str_concat', 'str_fields_count', 'str_get_cols',
- 'str_get_dq', 'str_get_field', 'str_get_nl', 'str_get_sq',
- 'str_get_tab', 'str_index_of_substr', 'str_insert', 'str_is_blank',
- 'str_join', 'str_left_strip', 'str_lower', 'str_match',
- 'str_match_ic', 'str_match_ic_regex', 'str_match_ind',
- 'str_match_ind_ic', 'str_match_ind_ic_regex', 'str_match_ind_regex',
- 'str_match_regex', 'str_right_strip', 'str_split',
- 'str_split_by_length', 'str_split_csv', 'str_squeeze', 'str_strip',
- 'str_sub_str', 'str_switch', 'str_upper', 'stringtochar',
- 'stringtocharacter', 'stringtodouble', 'stringtofloat',
- 'stringtoint', 'stringtointeger', 'stringtolong', 'stringtoshort',
- 'strlen', 'student_t', 'sum', 'svd_lapack', 'svdcov', 'svdcov_sv',
- 'svdstd', 'svdstd_sv', 'system', 'systemfunc', 'tan', 'tanh',
- 'taper', 'taper_n', 'tdclrs', 'tdctri', 'tdcudp', 'tdcurv',
- 'tddtri', 'tdez2d', 'tdez3d', 'tdgetp', 'tdgrds', 'tdgrid',
- 'tdgtrs', 'tdinit', 'tditri', 'tdlbla', 'tdlblp', 'tdlbls',
- 'tdline', 'tdlndp', 'tdlnpa', 'tdlpdp', 'tdmtri', 'tdotri',
- 'tdpara', 'tdplch', 'tdprpa', 'tdprpi', 'tdprpt', 'tdsetp',
- 'tdsort', 'tdstri', 'tdstrs', 'tdttri', 'thornthwaite', 'tobyte',
- 'tochar', 'todouble', 'tofloat', 'toint', 'toint64', 'tointeger',
- 'tolong', 'toshort', 'tosigned', 'tostring', 'tostring_with_format',
- 'totype', 'toubyte', 'touint', 'touint64', 'toulong', 'tounsigned',
- 'toushort', 'trend_manken', 'tri_trunc', 'triple2grid',
- 'triple2grid2d', 'trop_wmo', 'ttest', 'typeof', 'undef',
- 'unique_string', 'update', 'ushorttoint', 'ut_calendar',
- 'ut_inv_calendar', 'utm2latlon', 'uv2dv_cfd', 'uv2dvf', 'uv2dvg',
- 'uv2sfvpf', 'uv2sfvpg', 'uv2vr_cfd', 'uv2vrdvf', 'uv2vrdvg',
- 'uv2vrf', 'uv2vrg', 'v5d_close', 'v5d_create', 'v5d_setLowLev',
- 'v5d_setUnits', 'v5d_write', 'v5d_write_var', 'variance', 'vhaec',
- 'vhagc', 'vhsec', 'vhsgc', 'vibeta', 'vinth2p', 'vinth2p_ecmwf',
- 'vinth2p_ecmwf_nodes', 'vinth2p_nodes', 'vintp2p_ecmwf', 'vr2uvf',
- 'vr2uvg', 'vrdv2uvf', 'vrdv2uvg', 'wavelet', 'wavelet_default',
- 'weibull', 'wgt_area_smooth', 'wgt_areaave', 'wgt_areaave2',
- 'wgt_arearmse', 'wgt_arearmse2', 'wgt_areasum2', 'wgt_runave',
- 'wgt_runave_n', 'wgt_vert_avg_beta', 'wgt_volave', 'wgt_volave_ccm',
- 'wgt_volrmse', 'wgt_volrmse_ccm', 'where', 'wk_smooth121', 'wmbarb',
- 'wmbarbmap', 'wmdrft', 'wmgetp', 'wmlabs', 'wmsetp', 'wmstnm',
- 'wmvect', 'wmvectmap', 'wmvlbl', 'wrf_avo', 'wrf_cape_2d',
- 'wrf_cape_3d', 'wrf_dbz', 'wrf_eth', 'wrf_helicity', 'wrf_ij_to_ll',
- 'wrf_interp_1d', 'wrf_interp_2d_xy', 'wrf_interp_3d_z',
- 'wrf_latlon_to_ij', 'wrf_ll_to_ij', 'wrf_omega', 'wrf_pvo',
- 'wrf_rh', 'wrf_slp', 'wrf_smooth_2d', 'wrf_td', 'wrf_tk',
- 'wrf_updraft_helicity', 'wrf_uvmet', 'wrf_virtual_temp',
- 'wrf_wetbulb', 'wrf_wps_close_int', 'wrf_wps_open_int',
- 'wrf_wps_rddata_int', 'wrf_wps_rdhead_int', 'wrf_wps_read_int',
- 'wrf_wps_write_int', 'write_matrix', 'write_table', 'yiqrgb',
- 'z2geouv', 'zonal_mpsi', 'addfiles_GetVar', 'advect_variable',
- 'area_conserve_remap_Wrap', 'area_hi2lores_Wrap',
- 'array_append_record', 'assignFillValue', 'byte2flt',
- 'byte2flt_hdf', 'calcDayAnomTLL', 'calcMonAnomLLLT',
- 'calcMonAnomLLT', 'calcMonAnomTLL', 'calcMonAnomTLLL',
- 'calculate_monthly_values', 'cd_convert', 'changeCase',
- 'changeCaseChar', 'clmDayTLL', 'clmDayTLLL', 'clmMon2clmDay',
- 'clmMonLLLT', 'clmMonLLT', 'clmMonTLL', 'clmMonTLLL', 'closest_val',
- 'copy_VarAtts', 'copy_VarCoords', 'copy_VarCoords_1',
- 'copy_VarCoords_2', 'copy_VarMeta', 'copyatt', 'crossp3',
- 'cshstringtolist', 'cssgrid_Wrap', 'dble2flt', 'decimalPlaces',
- 'delete_VarAtts', 'dim_avg_n_Wrap', 'dim_avg_wgt_n_Wrap',
- 'dim_avg_wgt_Wrap', 'dim_avg_Wrap', 'dim_cumsum_n_Wrap',
- 'dim_cumsum_Wrap', 'dim_max_n_Wrap', 'dim_min_n_Wrap',
- 'dim_rmsd_n_Wrap', 'dim_rmsd_Wrap', 'dim_rmvmean_n_Wrap',
- 'dim_rmvmean_Wrap', 'dim_rmvmed_n_Wrap', 'dim_rmvmed_Wrap',
- 'dim_standardize_n_Wrap', 'dim_standardize_Wrap',
- 'dim_stddev_n_Wrap', 'dim_stddev_Wrap', 'dim_sum_n_Wrap',
- 'dim_sum_wgt_n_Wrap', 'dim_sum_wgt_Wrap', 'dim_sum_Wrap',
- 'dim_variance_n_Wrap', 'dim_variance_Wrap', 'dpres_plevel_Wrap',
- 'dtrend_leftdim', 'dv2uvF_Wrap', 'dv2uvG_Wrap', 'eof_north',
- 'eofcor_Wrap', 'eofcov_Wrap', 'eofunc_north', 'eofunc_ts_Wrap',
- 'eofunc_varimax_reorder', 'eofunc_varimax_Wrap', 'eofunc_Wrap',
- 'epsZero', 'f2fosh_Wrap', 'f2foshv_Wrap', 'f2fsh_Wrap',
- 'f2fshv_Wrap', 'f2gsh_Wrap', 'f2gshv_Wrap', 'fbindirSwap',
- 'fbinseqSwap1', 'fbinseqSwap2', 'flt2dble', 'flt2string',
- 'fo2fsh_Wrap', 'fo2fshv_Wrap', 'g2fsh_Wrap', 'g2fshv_Wrap',
- 'g2gsh_Wrap', 'g2gshv_Wrap', 'generate_resample_indices',
- 'generate_sample_indices', 'generate_unique_indices',
- 'genNormalDist', 'get1Dindex', 'get1Dindex_Collapse',
- 'get1Dindex_Exclude', 'get_file_suffix', 'GetFillColor',
- 'GetFillColorIndex', 'getFillValue', 'getind_latlon2d',
- 'getVarDimNames', 'getVarFillValue', 'grib_stime2itime',
- 'hyi2hyo_Wrap', 'ilapsF_Wrap', 'ilapsG_Wrap', 'ind_nearest_coord',
- 'indStrSubset', 'int2dble', 'int2flt', 'int2p_n_Wrap', 'int2p_Wrap',
- 'isMonotonic', 'isStrSubset', 'latGau', 'latGauWgt', 'latGlobeF',
- 'latGlobeFo', 'latRegWgt', 'linint1_n_Wrap', 'linint1_Wrap',
- 'linint2_points_Wrap', 'linint2_Wrap', 'local_max_1d',
- 'local_min_1d', 'lonFlip', 'lonGlobeF', 'lonGlobeFo', 'lonPivot',
- 'merge_levels_sfc', 'mod', 'month_to_annual',
- 'month_to_annual_weighted', 'month_to_season', 'month_to_season12',
- 'month_to_seasonN', 'monthly_total_to_daily_mean', 'nameDim',
- 'natgrid_Wrap', 'NewCosWeight', 'niceLatLon2D', 'NormCosWgtGlobe',
- 'numAsciiCol', 'numAsciiRow', 'numeric2int',
- 'obj_anal_ic_deprecated', 'obj_anal_ic_Wrap', 'omega_ccm_driver',
- 'omega_to_w', 'oneDtostring', 'pack_values', 'pattern_cor', 'pdfx',
- 'pdfxy', 'pdfxy_conform', 'pot_temp', 'pot_vort_hybrid',
- 'pot_vort_isobaric', 'pres2hybrid_Wrap', 'print_clock',
- 'printMinMax', 'quadroots', 'rcm2points_Wrap', 'rcm2rgrid_Wrap',
- 'readAsciiHead', 'readAsciiTable', 'reg_multlin_stats',
- 'region_ind', 'regline_stats', 'relhum_ttd', 'replaceSingleChar',
- 'RGBtoCmap', 'rgrid2rcm_Wrap', 'rho_mwjf', 'rm_single_dims',
- 'rmAnnCycle1D', 'rmInsufData', 'rmMonAnnCycLLLT', 'rmMonAnnCycLLT',
- 'rmMonAnnCycTLL', 'runave_n_Wrap', 'runave_Wrap', 'short2flt',
- 'short2flt_hdf', 'shsgc_R42_Wrap', 'sign_f90', 'sign_matlab',
- 'smth9_Wrap', 'smthClmDayTLL', 'smthClmDayTLLL', 'SqrtCosWeight',
- 'stat_dispersion', 'static_stability', 'stdMonLLLT', 'stdMonLLT',
- 'stdMonTLL', 'stdMonTLLL', 'symMinMaxPlt', 'table_attach_columns',
- 'table_attach_rows', 'time_to_newtime', 'transpose',
- 'triple2grid_Wrap', 'ut_convert', 'uv2dvF_Wrap', 'uv2dvG_Wrap',
- 'uv2vrF_Wrap', 'uv2vrG_Wrap', 'vr2uvF_Wrap', 'vr2uvG_Wrap',
- 'w_to_omega', 'wallClockElapseTime', 'wave_number_spc',
- 'wgt_areaave_Wrap', 'wgt_runave_leftdim', 'wgt_runave_n_Wrap',
- 'wgt_runave_Wrap', 'wgt_vertical_n', 'wind_component',
- 'wind_direction', 'yyyyddd_to_yyyymmdd', 'yyyymm_time',
- 'yyyymm_to_yyyyfrac', 'yyyymmdd_time', 'yyyymmdd_to_yyyyddd',
- 'yyyymmdd_to_yyyyfrac', 'yyyymmddhh_time', 'yyyymmddhh_to_yyyyfrac',
- 'zonal_mpsi_Wrap', 'zonalAve', 'calendar_decode2', 'cd_string',
- 'kf_filter', 'run_cor', 'time_axis_labels', 'ut_string',
- 'wrf_contour', 'wrf_map', 'wrf_map_overlay', 'wrf_map_overlays',
- 'wrf_map_resources', 'wrf_map_zoom', 'wrf_overlay', 'wrf_overlays',
- 'wrf_user_getvar', 'wrf_user_ij_to_ll', 'wrf_user_intrp2d',
- 'wrf_user_intrp3d', 'wrf_user_latlon_to_ij', 'wrf_user_list_times',
- 'wrf_user_ll_to_ij', 'wrf_user_unstagger', 'wrf_user_vert_interp',
- 'wrf_vector', 'gsn_add_annotation', 'gsn_add_polygon',
- 'gsn_add_polyline', 'gsn_add_polymarker',
- 'gsn_add_shapefile_polygons', 'gsn_add_shapefile_polylines',
- 'gsn_add_shapefile_polymarkers', 'gsn_add_text', 'gsn_attach_plots',
- 'gsn_blank_plot', 'gsn_contour', 'gsn_contour_map',
- 'gsn_contour_shade', 'gsn_coordinates', 'gsn_create_labelbar',
- 'gsn_create_legend', 'gsn_create_text',
- 'gsn_csm_attach_zonal_means', 'gsn_csm_blank_plot',
- 'gsn_csm_contour', 'gsn_csm_contour_map', 'gsn_csm_contour_map_ce',
- 'gsn_csm_contour_map_overlay', 'gsn_csm_contour_map_polar',
- 'gsn_csm_hov', 'gsn_csm_lat_time', 'gsn_csm_map', 'gsn_csm_map_ce',
- 'gsn_csm_map_polar', 'gsn_csm_pres_hgt',
- 'gsn_csm_pres_hgt_streamline', 'gsn_csm_pres_hgt_vector',
- 'gsn_csm_streamline', 'gsn_csm_streamline_contour_map',
- 'gsn_csm_streamline_contour_map_ce',
- 'gsn_csm_streamline_contour_map_polar', 'gsn_csm_streamline_map',
- 'gsn_csm_streamline_map_ce', 'gsn_csm_streamline_map_polar',
- 'gsn_csm_streamline_scalar', 'gsn_csm_streamline_scalar_map',
- 'gsn_csm_streamline_scalar_map_ce',
- 'gsn_csm_streamline_scalar_map_polar', 'gsn_csm_time_lat',
- 'gsn_csm_vector', 'gsn_csm_vector_map', 'gsn_csm_vector_map_ce',
- 'gsn_csm_vector_map_polar', 'gsn_csm_vector_scalar',
- 'gsn_csm_vector_scalar_map', 'gsn_csm_vector_scalar_map_ce',
- 'gsn_csm_vector_scalar_map_polar', 'gsn_csm_x2y', 'gsn_csm_x2y2',
- 'gsn_csm_xy', 'gsn_csm_xy2', 'gsn_csm_xy3', 'gsn_csm_y',
- 'gsn_define_colormap', 'gsn_draw_colormap', 'gsn_draw_named_colors',
- 'gsn_histogram', 'gsn_labelbar_ndc', 'gsn_legend_ndc', 'gsn_map',
- 'gsn_merge_colormaps', 'gsn_open_wks', 'gsn_panel', 'gsn_polygon',
- 'gsn_polygon_ndc', 'gsn_polyline', 'gsn_polyline_ndc',
- 'gsn_polymarker', 'gsn_polymarker_ndc', 'gsn_retrieve_colormap',
- 'gsn_reverse_colormap', 'gsn_streamline', 'gsn_streamline_map',
- 'gsn_streamline_scalar', 'gsn_streamline_scalar_map', 'gsn_table',
- 'gsn_text', 'gsn_text_ndc', 'gsn_vector', 'gsn_vector_map',
- 'gsn_vector_scalar', 'gsn_vector_scalar_map', 'gsn_xy', 'gsn_y',
- 'hsv2rgb', 'maximize_output', 'namedcolor2rgb', 'namedcolor2rgba',
- 'reset_device_coordinates', 'span_named_colors'), prefix=r'\b'),
- Name.Builtin),
-
- # Resources
- (words((
- 'amDataXF', 'amDataYF', 'amJust', 'amOn', 'amOrthogonalPosF',
- 'amParallelPosF', 'amResizeNotify', 'amSide', 'amTrackData',
- 'amViewId', 'amZone', 'appDefaultParent', 'appFileSuffix',
- 'appResources', 'appSysDir', 'appUsrDir', 'caCopyArrays',
- 'caXArray', 'caXCast', 'caXMaxV', 'caXMinV', 'caXMissingV',
- 'caYArray', 'caYCast', 'caYMaxV', 'caYMinV', 'caYMissingV',
- 'cnCellFillEdgeColor', 'cnCellFillMissingValEdgeColor',
- 'cnConpackParams', 'cnConstFEnableFill', 'cnConstFLabelAngleF',
- 'cnConstFLabelBackgroundColor', 'cnConstFLabelConstantSpacingF',
- 'cnConstFLabelFont', 'cnConstFLabelFontAspectF',
- 'cnConstFLabelFontColor', 'cnConstFLabelFontHeightF',
- 'cnConstFLabelFontQuality', 'cnConstFLabelFontThicknessF',
- 'cnConstFLabelFormat', 'cnConstFLabelFuncCode', 'cnConstFLabelJust',
- 'cnConstFLabelOn', 'cnConstFLabelOrthogonalPosF',
- 'cnConstFLabelParallelPosF', 'cnConstFLabelPerimColor',
- 'cnConstFLabelPerimOn', 'cnConstFLabelPerimSpaceF',
- 'cnConstFLabelPerimThicknessF', 'cnConstFLabelSide',
- 'cnConstFLabelString', 'cnConstFLabelTextDirection',
- 'cnConstFLabelZone', 'cnConstFUseInfoLabelRes',
- 'cnExplicitLabelBarLabelsOn', 'cnExplicitLegendLabelsOn',
- 'cnExplicitLineLabelsOn', 'cnFillBackgroundColor', 'cnFillColor',
- 'cnFillColors', 'cnFillDotSizeF', 'cnFillDrawOrder', 'cnFillMode',
- 'cnFillOn', 'cnFillOpacityF', 'cnFillPalette', 'cnFillPattern',
- 'cnFillPatterns', 'cnFillScaleF', 'cnFillScales', 'cnFixFillBleed',
- 'cnGridBoundFillColor', 'cnGridBoundFillPattern',
- 'cnGridBoundFillScaleF', 'cnGridBoundPerimColor',
- 'cnGridBoundPerimDashPattern', 'cnGridBoundPerimOn',
- 'cnGridBoundPerimThicknessF', 'cnHighLabelAngleF',
- 'cnHighLabelBackgroundColor', 'cnHighLabelConstantSpacingF',
- 'cnHighLabelCount', 'cnHighLabelFont', 'cnHighLabelFontAspectF',
- 'cnHighLabelFontColor', 'cnHighLabelFontHeightF',
- 'cnHighLabelFontQuality', 'cnHighLabelFontThicknessF',
- 'cnHighLabelFormat', 'cnHighLabelFuncCode', 'cnHighLabelPerimColor',
- 'cnHighLabelPerimOn', 'cnHighLabelPerimSpaceF',
- 'cnHighLabelPerimThicknessF', 'cnHighLabelString', 'cnHighLabelsOn',
- 'cnHighLowLabelOverlapMode', 'cnHighUseLineLabelRes',
- 'cnInfoLabelAngleF', 'cnInfoLabelBackgroundColor',
- 'cnInfoLabelConstantSpacingF', 'cnInfoLabelFont',
- 'cnInfoLabelFontAspectF', 'cnInfoLabelFontColor',
- 'cnInfoLabelFontHeightF', 'cnInfoLabelFontQuality',
- 'cnInfoLabelFontThicknessF', 'cnInfoLabelFormat',
- 'cnInfoLabelFuncCode', 'cnInfoLabelJust', 'cnInfoLabelOn',
- 'cnInfoLabelOrthogonalPosF', 'cnInfoLabelParallelPosF',
- 'cnInfoLabelPerimColor', 'cnInfoLabelPerimOn',
- 'cnInfoLabelPerimSpaceF', 'cnInfoLabelPerimThicknessF',
- 'cnInfoLabelSide', 'cnInfoLabelString', 'cnInfoLabelTextDirection',
- 'cnInfoLabelZone', 'cnLabelBarEndLabelsOn', 'cnLabelBarEndStyle',
- 'cnLabelDrawOrder', 'cnLabelMasking', 'cnLabelScaleFactorF',
- 'cnLabelScaleValueF', 'cnLabelScalingMode', 'cnLegendLevelFlags',
- 'cnLevelCount', 'cnLevelFlag', 'cnLevelFlags', 'cnLevelSelectionMode',
- 'cnLevelSpacingF', 'cnLevels', 'cnLineColor', 'cnLineColors',
- 'cnLineDashPattern', 'cnLineDashPatterns', 'cnLineDashSegLenF',
- 'cnLineDrawOrder', 'cnLineLabelAngleF', 'cnLineLabelBackgroundColor',
- 'cnLineLabelConstantSpacingF', 'cnLineLabelCount',
- 'cnLineLabelDensityF', 'cnLineLabelFont', 'cnLineLabelFontAspectF',
- 'cnLineLabelFontColor', 'cnLineLabelFontColors',
- 'cnLineLabelFontHeightF', 'cnLineLabelFontQuality',
- 'cnLineLabelFontThicknessF', 'cnLineLabelFormat',
- 'cnLineLabelFuncCode', 'cnLineLabelInterval', 'cnLineLabelPerimColor',
- 'cnLineLabelPerimOn', 'cnLineLabelPerimSpaceF',
- 'cnLineLabelPerimThicknessF', 'cnLineLabelPlacementMode',
- 'cnLineLabelStrings', 'cnLineLabelsOn', 'cnLinePalette',
- 'cnLineThicknessF', 'cnLineThicknesses', 'cnLinesOn',
- 'cnLowLabelAngleF', 'cnLowLabelBackgroundColor',
- 'cnLowLabelConstantSpacingF', 'cnLowLabelCount', 'cnLowLabelFont',
- 'cnLowLabelFontAspectF', 'cnLowLabelFontColor',
- 'cnLowLabelFontHeightF', 'cnLowLabelFontQuality',
- 'cnLowLabelFontThicknessF', 'cnLowLabelFormat', 'cnLowLabelFuncCode',
- 'cnLowLabelPerimColor', 'cnLowLabelPerimOn', 'cnLowLabelPerimSpaceF',
- 'cnLowLabelPerimThicknessF', 'cnLowLabelString', 'cnLowLabelsOn',
- 'cnLowUseHighLabelRes', 'cnMaxDataValueFormat', 'cnMaxLevelCount',
- 'cnMaxLevelValF', 'cnMaxPointDistanceF', 'cnMinLevelValF',
- 'cnMissingValFillColor', 'cnMissingValFillPattern',
- 'cnMissingValFillScaleF', 'cnMissingValPerimColor',
- 'cnMissingValPerimDashPattern', 'cnMissingValPerimGridBoundOn',
- 'cnMissingValPerimOn', 'cnMissingValPerimThicknessF',
- 'cnMonoFillColor', 'cnMonoFillPattern', 'cnMonoFillScale',
- 'cnMonoLevelFlag', 'cnMonoLineColor', 'cnMonoLineDashPattern',
- 'cnMonoLineLabelFontColor', 'cnMonoLineThickness', 'cnNoDataLabelOn',
- 'cnNoDataLabelString', 'cnOutOfRangeFillColor',
- 'cnOutOfRangeFillPattern', 'cnOutOfRangeFillScaleF',
- 'cnOutOfRangePerimColor', 'cnOutOfRangePerimDashPattern',
- 'cnOutOfRangePerimOn', 'cnOutOfRangePerimThicknessF',
- 'cnRasterCellSizeF', 'cnRasterMinCellSizeF', 'cnRasterModeOn',
- 'cnRasterSampleFactorF', 'cnRasterSmoothingOn', 'cnScalarFieldData',
- 'cnSmoothingDistanceF', 'cnSmoothingOn', 'cnSmoothingTensionF',
- 'cnSpanFillPalette', 'cnSpanLinePalette', 'ctCopyTables',
- 'ctXElementSize', 'ctXMaxV', 'ctXMinV', 'ctXMissingV', 'ctXTable',
- 'ctXTableLengths', 'ctXTableType', 'ctYElementSize', 'ctYMaxV',
- 'ctYMinV', 'ctYMissingV', 'ctYTable', 'ctYTableLengths',
- 'ctYTableType', 'dcDelayCompute', 'errBuffer',
- 'errFileName', 'errFilePtr', 'errLevel', 'errPrint', 'errUnitNumber',
- 'gsClipOn', 'gsColors', 'gsEdgeColor', 'gsEdgeDashPattern',
- 'gsEdgeDashSegLenF', 'gsEdgeThicknessF', 'gsEdgesOn',
- 'gsFillBackgroundColor', 'gsFillColor', 'gsFillDotSizeF',
- 'gsFillIndex', 'gsFillLineThicknessF', 'gsFillOpacityF',
- 'gsFillScaleF', 'gsFont', 'gsFontAspectF', 'gsFontColor',
- 'gsFontHeightF', 'gsFontOpacityF', 'gsFontQuality',
- 'gsFontThicknessF', 'gsLineColor', 'gsLineDashPattern',
- 'gsLineDashSegLenF', 'gsLineLabelConstantSpacingF', 'gsLineLabelFont',
- 'gsLineLabelFontAspectF', 'gsLineLabelFontColor',
- 'gsLineLabelFontHeightF', 'gsLineLabelFontQuality',
- 'gsLineLabelFontThicknessF', 'gsLineLabelFuncCode',
- 'gsLineLabelString', 'gsLineOpacityF', 'gsLineThicknessF',
- 'gsMarkerColor', 'gsMarkerIndex', 'gsMarkerOpacityF', 'gsMarkerSizeF',
- 'gsMarkerThicknessF', 'gsSegments', 'gsTextAngleF',
- 'gsTextConstantSpacingF', 'gsTextDirection', 'gsTextFuncCode',
- 'gsTextJustification', 'gsnAboveYRefLineBarColors',
- 'gsnAboveYRefLineBarFillScales', 'gsnAboveYRefLineBarPatterns',
- 'gsnAboveYRefLineColor', 'gsnAddCyclic', 'gsnAttachBorderOn',
- 'gsnAttachPlotsXAxis', 'gsnBelowYRefLineBarColors',
- 'gsnBelowYRefLineBarFillScales', 'gsnBelowYRefLineBarPatterns',
- 'gsnBelowYRefLineColor', 'gsnBoxMargin', 'gsnCenterString',
- 'gsnCenterStringFontColor', 'gsnCenterStringFontHeightF',
- 'gsnCenterStringFuncCode', 'gsnCenterStringOrthogonalPosF',
- 'gsnCenterStringParallelPosF', 'gsnContourLineThicknessesScale',
- 'gsnContourNegLineDashPattern', 'gsnContourPosLineDashPattern',
- 'gsnContourZeroLineThicknessF', 'gsnDebugWriteFileName', 'gsnDraw',
- 'gsnFrame', 'gsnHistogramBarWidthPercent', 'gsnHistogramBinIntervals',
- 'gsnHistogramBinMissing', 'gsnHistogramBinWidth',
- 'gsnHistogramClassIntervals', 'gsnHistogramCompare',
- 'gsnHistogramComputePercentages',
- 'gsnHistogramComputePercentagesNoMissing',
- 'gsnHistogramDiscreteBinValues', 'gsnHistogramDiscreteClassValues',
- 'gsnHistogramHorizontal', 'gsnHistogramMinMaxBinsOn',
- 'gsnHistogramNumberOfBins', 'gsnHistogramPercentSign',
- 'gsnHistogramSelectNiceIntervals', 'gsnLeftString',
- 'gsnLeftStringFontColor', 'gsnLeftStringFontHeightF',
- 'gsnLeftStringFuncCode', 'gsnLeftStringOrthogonalPosF',
- 'gsnLeftStringParallelPosF', 'gsnMajorLatSpacing',
- 'gsnMajorLonSpacing', 'gsnMaskLambertConformal',
- 'gsnMaskLambertConformalOutlineOn', 'gsnMaximize',
- 'gsnMinorLatSpacing', 'gsnMinorLonSpacing', 'gsnPanelBottom',
- 'gsnPanelCenter', 'gsnPanelDebug', 'gsnPanelFigureStrings',
- 'gsnPanelFigureStringsBackgroundFillColor',
- 'gsnPanelFigureStringsFontHeightF', 'gsnPanelFigureStringsJust',
- 'gsnPanelFigureStringsPerimOn', 'gsnPanelLabelBar', 'gsnPanelLeft',
- 'gsnPanelMainFont', 'gsnPanelMainFontColor',
- 'gsnPanelMainFontHeightF', 'gsnPanelMainString', 'gsnPanelRight',
- 'gsnPanelRowSpec', 'gsnPanelScalePlotIndex', 'gsnPanelTop',
- 'gsnPanelXF', 'gsnPanelXWhiteSpacePercent', 'gsnPanelYF',
- 'gsnPanelYWhiteSpacePercent', 'gsnPaperHeight', 'gsnPaperMargin',
- 'gsnPaperOrientation', 'gsnPaperWidth', 'gsnPolar',
- 'gsnPolarLabelDistance', 'gsnPolarLabelFont',
- 'gsnPolarLabelFontHeightF', 'gsnPolarLabelSpacing', 'gsnPolarTime',
- 'gsnPolarUT', 'gsnRightString', 'gsnRightStringFontColor',
- 'gsnRightStringFontHeightF', 'gsnRightStringFuncCode',
- 'gsnRightStringOrthogonalPosF', 'gsnRightStringParallelPosF',
- 'gsnScalarContour', 'gsnScale', 'gsnShape', 'gsnSpreadColorEnd',
- 'gsnSpreadColorStart', 'gsnSpreadColors', 'gsnStringFont',
- 'gsnStringFontColor', 'gsnStringFontHeightF', 'gsnStringFuncCode',
- 'gsnTickMarksOn', 'gsnXAxisIrregular2Linear', 'gsnXAxisIrregular2Log',
- 'gsnXRefLine', 'gsnXRefLineColor', 'gsnXRefLineDashPattern',
- 'gsnXRefLineThicknessF', 'gsnXYAboveFillColors', 'gsnXYBarChart',
- 'gsnXYBarChartBarWidth', 'gsnXYBarChartColors',
- 'gsnXYBarChartColors2', 'gsnXYBarChartFillDotSizeF',
- 'gsnXYBarChartFillLineThicknessF', 'gsnXYBarChartFillOpacityF',
- 'gsnXYBarChartFillScaleF', 'gsnXYBarChartOutlineOnly',
- 'gsnXYBarChartOutlineThicknessF', 'gsnXYBarChartPatterns',
- 'gsnXYBarChartPatterns2', 'gsnXYBelowFillColors', 'gsnXYFillColors',
- 'gsnXYFillOpacities', 'gsnXYLeftFillColors', 'gsnXYRightFillColors',
- 'gsnYAxisIrregular2Linear', 'gsnYAxisIrregular2Log', 'gsnYRefLine',
- 'gsnYRefLineColor', 'gsnYRefLineColors', 'gsnYRefLineDashPattern',
- 'gsnYRefLineDashPatterns', 'gsnYRefLineThicknessF',
- 'gsnYRefLineThicknesses', 'gsnZonalMean', 'gsnZonalMeanXMaxF',
- 'gsnZonalMeanXMinF', 'gsnZonalMeanYRefLine', 'lbAutoManage',
- 'lbBottomMarginF', 'lbBoxCount', 'lbBoxEndCapStyle', 'lbBoxFractions',
- 'lbBoxLineColor', 'lbBoxLineDashPattern', 'lbBoxLineDashSegLenF',
- 'lbBoxLineThicknessF', 'lbBoxLinesOn', 'lbBoxMajorExtentF',
- 'lbBoxMinorExtentF', 'lbBoxSeparatorLinesOn', 'lbBoxSizing',
- 'lbFillBackground', 'lbFillColor', 'lbFillColors', 'lbFillDotSizeF',
- 'lbFillLineThicknessF', 'lbFillPattern', 'lbFillPatterns',
- 'lbFillScaleF', 'lbFillScales', 'lbJustification', 'lbLabelAlignment',
- 'lbLabelAngleF', 'lbLabelAutoStride', 'lbLabelBarOn',
- 'lbLabelConstantSpacingF', 'lbLabelDirection', 'lbLabelFont',
- 'lbLabelFontAspectF', 'lbLabelFontColor', 'lbLabelFontHeightF',
- 'lbLabelFontQuality', 'lbLabelFontThicknessF', 'lbLabelFuncCode',
- 'lbLabelJust', 'lbLabelOffsetF', 'lbLabelPosition', 'lbLabelStride',
- 'lbLabelStrings', 'lbLabelsOn', 'lbLeftMarginF', 'lbMaxLabelLenF',
- 'lbMinLabelSpacingF', 'lbMonoFillColor', 'lbMonoFillPattern',
- 'lbMonoFillScale', 'lbOrientation', 'lbPerimColor',
- 'lbPerimDashPattern', 'lbPerimDashSegLenF', 'lbPerimFill',
- 'lbPerimFillColor', 'lbPerimOn', 'lbPerimThicknessF',
- 'lbRasterFillOn', 'lbRightMarginF', 'lbTitleAngleF',
- 'lbTitleConstantSpacingF', 'lbTitleDirection', 'lbTitleExtentF',
- 'lbTitleFont', 'lbTitleFontAspectF', 'lbTitleFontColor',
- 'lbTitleFontHeightF', 'lbTitleFontQuality', 'lbTitleFontThicknessF',
- 'lbTitleFuncCode', 'lbTitleJust', 'lbTitleOffsetF', 'lbTitleOn',
- 'lbTitlePosition', 'lbTitleString', 'lbTopMarginF', 'lgAutoManage',
- 'lgBottomMarginF', 'lgBoxBackground', 'lgBoxLineColor',
- 'lgBoxLineDashPattern', 'lgBoxLineDashSegLenF', 'lgBoxLineThicknessF',
- 'lgBoxLinesOn', 'lgBoxMajorExtentF', 'lgBoxMinorExtentF',
- 'lgDashIndex', 'lgDashIndexes', 'lgItemCount', 'lgItemOrder',
- 'lgItemPlacement', 'lgItemPositions', 'lgItemType', 'lgItemTypes',
- 'lgJustification', 'lgLabelAlignment', 'lgLabelAngleF',
- 'lgLabelAutoStride', 'lgLabelConstantSpacingF', 'lgLabelDirection',
- 'lgLabelFont', 'lgLabelFontAspectF', 'lgLabelFontColor',
- 'lgLabelFontHeightF', 'lgLabelFontQuality', 'lgLabelFontThicknessF',
- 'lgLabelFuncCode', 'lgLabelJust', 'lgLabelOffsetF', 'lgLabelPosition',
- 'lgLabelStride', 'lgLabelStrings', 'lgLabelsOn', 'lgLeftMarginF',
- 'lgLegendOn', 'lgLineColor', 'lgLineColors', 'lgLineDashSegLenF',
- 'lgLineDashSegLens', 'lgLineLabelConstantSpacingF', 'lgLineLabelFont',
- 'lgLineLabelFontAspectF', 'lgLineLabelFontColor',
- 'lgLineLabelFontColors', 'lgLineLabelFontHeightF',
- 'lgLineLabelFontHeights', 'lgLineLabelFontQuality',
- 'lgLineLabelFontThicknessF', 'lgLineLabelFuncCode',
- 'lgLineLabelStrings', 'lgLineLabelsOn', 'lgLineThicknessF',
- 'lgLineThicknesses', 'lgMarkerColor', 'lgMarkerColors',
- 'lgMarkerIndex', 'lgMarkerIndexes', 'lgMarkerSizeF', 'lgMarkerSizes',
- 'lgMarkerThicknessF', 'lgMarkerThicknesses', 'lgMonoDashIndex',
- 'lgMonoItemType', 'lgMonoLineColor', 'lgMonoLineDashSegLen',
- 'lgMonoLineLabelFontColor', 'lgMonoLineLabelFontHeight',
- 'lgMonoLineThickness', 'lgMonoMarkerColor', 'lgMonoMarkerIndex',
- 'lgMonoMarkerSize', 'lgMonoMarkerThickness', 'lgOrientation',
- 'lgPerimColor', 'lgPerimDashPattern', 'lgPerimDashSegLenF',
- 'lgPerimFill', 'lgPerimFillColor', 'lgPerimOn', 'lgPerimThicknessF',
- 'lgRightMarginF', 'lgTitleAngleF', 'lgTitleConstantSpacingF',
- 'lgTitleDirection', 'lgTitleExtentF', 'lgTitleFont',
- 'lgTitleFontAspectF', 'lgTitleFontColor', 'lgTitleFontHeightF',
- 'lgTitleFontQuality', 'lgTitleFontThicknessF', 'lgTitleFuncCode',
- 'lgTitleJust', 'lgTitleOffsetF', 'lgTitleOn', 'lgTitlePosition',
- 'lgTitleString', 'lgTopMarginF', 'mpAreaGroupCount',
- 'mpAreaMaskingOn', 'mpAreaNames', 'mpAreaTypes', 'mpBottomAngleF',
- 'mpBottomMapPosF', 'mpBottomNDCF', 'mpBottomNPCF',
- 'mpBottomPointLatF', 'mpBottomPointLonF', 'mpBottomWindowF',
- 'mpCenterLatF', 'mpCenterLonF', 'mpCenterRotF', 'mpCountyLineColor',
- 'mpCountyLineDashPattern', 'mpCountyLineDashSegLenF',
- 'mpCountyLineThicknessF', 'mpDataBaseVersion', 'mpDataResolution',
- 'mpDataSetName', 'mpDefaultFillColor', 'mpDefaultFillPattern',
- 'mpDefaultFillScaleF', 'mpDynamicAreaGroups', 'mpEllipticalBoundary',
- 'mpFillAreaSpecifiers', 'mpFillBoundarySets', 'mpFillColor',
- 'mpFillColors', 'mpFillColors-default', 'mpFillDotSizeF',
- 'mpFillDrawOrder', 'mpFillOn', 'mpFillPatternBackground',
- 'mpFillPattern', 'mpFillPatterns', 'mpFillPatterns-default',
- 'mpFillScaleF', 'mpFillScales', 'mpFillScales-default',
- 'mpFixedAreaGroups', 'mpGeophysicalLineColor',
- 'mpGeophysicalLineDashPattern', 'mpGeophysicalLineDashSegLenF',
- 'mpGeophysicalLineThicknessF', 'mpGreatCircleLinesOn',
- 'mpGridAndLimbDrawOrder', 'mpGridAndLimbOn', 'mpGridLatSpacingF',
- 'mpGridLineColor', 'mpGridLineDashPattern', 'mpGridLineDashSegLenF',
- 'mpGridLineThicknessF', 'mpGridLonSpacingF', 'mpGridMaskMode',
- 'mpGridMaxLatF', 'mpGridPolarLonSpacingF', 'mpGridSpacingF',
- 'mpInlandWaterFillColor', 'mpInlandWaterFillPattern',
- 'mpInlandWaterFillScaleF', 'mpLabelDrawOrder', 'mpLabelFontColor',
- 'mpLabelFontHeightF', 'mpLabelsOn', 'mpLambertMeridianF',
- 'mpLambertParallel1F', 'mpLambertParallel2F', 'mpLandFillColor',
- 'mpLandFillPattern', 'mpLandFillScaleF', 'mpLeftAngleF',
- 'mpLeftCornerLatF', 'mpLeftCornerLonF', 'mpLeftMapPosF',
- 'mpLeftNDCF', 'mpLeftNPCF', 'mpLeftPointLatF',
- 'mpLeftPointLonF', 'mpLeftWindowF', 'mpLimbLineColor',
- 'mpLimbLineDashPattern', 'mpLimbLineDashSegLenF',
- 'mpLimbLineThicknessF', 'mpLimitMode', 'mpMaskAreaSpecifiers',
- 'mpMaskOutlineSpecifiers', 'mpMaxLatF', 'mpMaxLonF',
- 'mpMinLatF', 'mpMinLonF', 'mpMonoFillColor', 'mpMonoFillPattern',
- 'mpMonoFillScale', 'mpNationalLineColor', 'mpNationalLineDashPattern',
- 'mpNationalLineThicknessF', 'mpOceanFillColor', 'mpOceanFillPattern',
- 'mpOceanFillScaleF', 'mpOutlineBoundarySets', 'mpOutlineDrawOrder',
- 'mpOutlineMaskingOn', 'mpOutlineOn', 'mpOutlineSpecifiers',
- 'mpPerimDrawOrder', 'mpPerimLineColor', 'mpPerimLineDashPattern',
- 'mpPerimLineDashSegLenF', 'mpPerimLineThicknessF', 'mpPerimOn',
- 'mpPolyMode', 'mpProjection', 'mpProvincialLineColor',
- 'mpProvincialLineDashPattern', 'mpProvincialLineDashSegLenF',
- 'mpProvincialLineThicknessF', 'mpRelativeCenterLat',
- 'mpRelativeCenterLon', 'mpRightAngleF', 'mpRightCornerLatF',
- 'mpRightCornerLonF', 'mpRightMapPosF', 'mpRightNDCF',
- 'mpRightNPCF', 'mpRightPointLatF', 'mpRightPointLonF',
- 'mpRightWindowF', 'mpSatelliteAngle1F', 'mpSatelliteAngle2F',
- 'mpSatelliteDistF', 'mpShapeMode', 'mpSpecifiedFillColors',
- 'mpSpecifiedFillDirectIndexing', 'mpSpecifiedFillPatterns',
- 'mpSpecifiedFillPriority', 'mpSpecifiedFillScales',
- 'mpTopAngleF', 'mpTopMapPosF', 'mpTopNDCF', 'mpTopNPCF',
- 'mpTopPointLatF', 'mpTopPointLonF', 'mpTopWindowF',
- 'mpUSStateLineColor', 'mpUSStateLineDashPattern',
- 'mpUSStateLineDashSegLenF', 'mpUSStateLineThicknessF',
- 'pmAnnoManagers', 'pmAnnoViews', 'pmLabelBarDisplayMode',
- 'pmLabelBarHeightF', 'pmLabelBarKeepAspect', 'pmLabelBarOrthogonalPosF',
- 'pmLabelBarParallelPosF', 'pmLabelBarSide', 'pmLabelBarWidthF',
- 'pmLabelBarZone', 'pmLegendDisplayMode', 'pmLegendHeightF',
- 'pmLegendKeepAspect', 'pmLegendOrthogonalPosF',
- 'pmLegendParallelPosF', 'pmLegendSide', 'pmLegendWidthF',
- 'pmLegendZone', 'pmOverlaySequenceIds', 'pmTickMarkDisplayMode',
- 'pmTickMarkZone', 'pmTitleDisplayMode', 'pmTitleZone',
- 'prGraphicStyle', 'prPolyType', 'prXArray', 'prYArray',
- 'sfCopyData', 'sfDataArray', 'sfDataMaxV', 'sfDataMinV',
- 'sfElementNodes', 'sfExchangeDimensions', 'sfFirstNodeIndex',
- 'sfMissingValueV', 'sfXArray', 'sfXCActualEndF', 'sfXCActualStartF',
- 'sfXCEndIndex', 'sfXCEndSubsetV', 'sfXCEndV', 'sfXCStartIndex',
- 'sfXCStartSubsetV', 'sfXCStartV', 'sfXCStride', 'sfXCellBounds',
- 'sfYArray', 'sfYCActualEndF', 'sfYCActualStartF', 'sfYCEndIndex',
- 'sfYCEndSubsetV', 'sfYCEndV', 'sfYCStartIndex', 'sfYCStartSubsetV',
- 'sfYCStartV', 'sfYCStride', 'sfYCellBounds', 'stArrowLengthF',
- 'stArrowStride', 'stCrossoverCheckCount',
- 'stExplicitLabelBarLabelsOn', 'stLabelBarEndLabelsOn',
- 'stLabelFormat', 'stLengthCheckCount', 'stLevelColors',
- 'stLevelCount', 'stLevelPalette', 'stLevelSelectionMode',
- 'stLevelSpacingF', 'stLevels', 'stLineColor', 'stLineOpacityF',
- 'stLineStartStride', 'stLineThicknessF', 'stMapDirection',
- 'stMaxLevelCount', 'stMaxLevelValF', 'stMinArrowSpacingF',
- 'stMinDistanceF', 'stMinLevelValF', 'stMinLineSpacingF',
- 'stMinStepFactorF', 'stMonoLineColor', 'stNoDataLabelOn',
- 'stNoDataLabelString', 'stScalarFieldData', 'stScalarMissingValColor',
- 'stSpanLevelPalette', 'stStepSizeF', 'stStreamlineDrawOrder',
- 'stUseScalarArray', 'stVectorFieldData', 'stZeroFLabelAngleF',
- 'stZeroFLabelBackgroundColor', 'stZeroFLabelConstantSpacingF',
- 'stZeroFLabelFont', 'stZeroFLabelFontAspectF',
- 'stZeroFLabelFontColor', 'stZeroFLabelFontHeightF',
- 'stZeroFLabelFontQuality', 'stZeroFLabelFontThicknessF',
- 'stZeroFLabelFuncCode', 'stZeroFLabelJust', 'stZeroFLabelOn',
- 'stZeroFLabelOrthogonalPosF', 'stZeroFLabelParallelPosF',
- 'stZeroFLabelPerimColor', 'stZeroFLabelPerimOn',
- 'stZeroFLabelPerimSpaceF', 'stZeroFLabelPerimThicknessF',
- 'stZeroFLabelSide', 'stZeroFLabelString', 'stZeroFLabelTextDirection',
- 'stZeroFLabelZone', 'tfDoNDCOverlay', 'tfPlotManagerOn',
- 'tfPolyDrawList', 'tfPolyDrawOrder', 'tiDeltaF', 'tiMainAngleF',
- 'tiMainConstantSpacingF', 'tiMainDirection', 'tiMainFont',
- 'tiMainFontAspectF', 'tiMainFontColor', 'tiMainFontHeightF',
- 'tiMainFontQuality', 'tiMainFontThicknessF', 'tiMainFuncCode',
- 'tiMainJust', 'tiMainOffsetXF', 'tiMainOffsetYF', 'tiMainOn',
- 'tiMainPosition', 'tiMainSide', 'tiMainString', 'tiUseMainAttributes',
- 'tiXAxisAngleF', 'tiXAxisConstantSpacingF', 'tiXAxisDirection',
- 'tiXAxisFont', 'tiXAxisFontAspectF', 'tiXAxisFontColor',
- 'tiXAxisFontHeightF', 'tiXAxisFontQuality', 'tiXAxisFontThicknessF',
- 'tiXAxisFuncCode', 'tiXAxisJust', 'tiXAxisOffsetXF',
- 'tiXAxisOffsetYF', 'tiXAxisOn', 'tiXAxisPosition', 'tiXAxisSide',
- 'tiXAxisString', 'tiYAxisAngleF', 'tiYAxisConstantSpacingF',
- 'tiYAxisDirection', 'tiYAxisFont', 'tiYAxisFontAspectF',
- 'tiYAxisFontColor', 'tiYAxisFontHeightF', 'tiYAxisFontQuality',
- 'tiYAxisFontThicknessF', 'tiYAxisFuncCode', 'tiYAxisJust',
- 'tiYAxisOffsetXF', 'tiYAxisOffsetYF', 'tiYAxisOn', 'tiYAxisPosition',
- 'tiYAxisSide', 'tiYAxisString', 'tmBorderLineColor',
- 'tmBorderThicknessF', 'tmEqualizeXYSizes', 'tmLabelAutoStride',
- 'tmSciNoteCutoff', 'tmXBAutoPrecision', 'tmXBBorderOn',
- 'tmXBDataLeftF', 'tmXBDataRightF', 'tmXBFormat', 'tmXBIrrTensionF',
- 'tmXBIrregularPoints', 'tmXBLabelAngleF', 'tmXBLabelConstantSpacingF',
- 'tmXBLabelDeltaF', 'tmXBLabelDirection', 'tmXBLabelFont',
- 'tmXBLabelFontAspectF', 'tmXBLabelFontColor', 'tmXBLabelFontHeightF',
- 'tmXBLabelFontQuality', 'tmXBLabelFontThicknessF',
- 'tmXBLabelFuncCode', 'tmXBLabelJust', 'tmXBLabelStride', 'tmXBLabels',
- 'tmXBLabelsOn', 'tmXBMajorLengthF', 'tmXBMajorLineColor',
- 'tmXBMajorOutwardLengthF', 'tmXBMajorThicknessF', 'tmXBMaxLabelLenF',
- 'tmXBMaxTicks', 'tmXBMinLabelSpacingF', 'tmXBMinorLengthF',
- 'tmXBMinorLineColor', 'tmXBMinorOn', 'tmXBMinorOutwardLengthF',
- 'tmXBMinorPerMajor', 'tmXBMinorThicknessF', 'tmXBMinorValues',
- 'tmXBMode', 'tmXBOn', 'tmXBPrecision', 'tmXBStyle', 'tmXBTickEndF',
- 'tmXBTickSpacingF', 'tmXBTickStartF', 'tmXBValues', 'tmXMajorGrid',
- 'tmXMajorGridLineColor', 'tmXMajorGridLineDashPattern',
- 'tmXMajorGridThicknessF', 'tmXMinorGrid', 'tmXMinorGridLineColor',
- 'tmXMinorGridLineDashPattern', 'tmXMinorGridThicknessF',
- 'tmXTAutoPrecision', 'tmXTBorderOn', 'tmXTDataLeftF',
- 'tmXTDataRightF', 'tmXTFormat', 'tmXTIrrTensionF',
- 'tmXTIrregularPoints', 'tmXTLabelAngleF', 'tmXTLabelConstantSpacingF',
- 'tmXTLabelDeltaF', 'tmXTLabelDirection', 'tmXTLabelFont',
- 'tmXTLabelFontAspectF', 'tmXTLabelFontColor', 'tmXTLabelFontHeightF',
- 'tmXTLabelFontQuality', 'tmXTLabelFontThicknessF',
- 'tmXTLabelFuncCode', 'tmXTLabelJust', 'tmXTLabelStride', 'tmXTLabels',
- 'tmXTLabelsOn', 'tmXTMajorLengthF', 'tmXTMajorLineColor',
- 'tmXTMajorOutwardLengthF', 'tmXTMajorThicknessF', 'tmXTMaxLabelLenF',
- 'tmXTMaxTicks', 'tmXTMinLabelSpacingF', 'tmXTMinorLengthF',
- 'tmXTMinorLineColor', 'tmXTMinorOn', 'tmXTMinorOutwardLengthF',
- 'tmXTMinorPerMajor', 'tmXTMinorThicknessF', 'tmXTMinorValues',
- 'tmXTMode', 'tmXTOn', 'tmXTPrecision', 'tmXTStyle', 'tmXTTickEndF',
- 'tmXTTickSpacingF', 'tmXTTickStartF', 'tmXTValues', 'tmXUseBottom',
- 'tmYLAutoPrecision', 'tmYLBorderOn', 'tmYLDataBottomF',
- 'tmYLDataTopF', 'tmYLFormat', 'tmYLIrrTensionF',
- 'tmYLIrregularPoints', 'tmYLLabelAngleF', 'tmYLLabelConstantSpacingF',
- 'tmYLLabelDeltaF', 'tmYLLabelDirection', 'tmYLLabelFont',
- 'tmYLLabelFontAspectF', 'tmYLLabelFontColor', 'tmYLLabelFontHeightF',
- 'tmYLLabelFontQuality', 'tmYLLabelFontThicknessF',
- 'tmYLLabelFuncCode', 'tmYLLabelJust', 'tmYLLabelStride', 'tmYLLabels',
- 'tmYLLabelsOn', 'tmYLMajorLengthF', 'tmYLMajorLineColor',
- 'tmYLMajorOutwardLengthF', 'tmYLMajorThicknessF', 'tmYLMaxLabelLenF',
- 'tmYLMaxTicks', 'tmYLMinLabelSpacingF', 'tmYLMinorLengthF',
- 'tmYLMinorLineColor', 'tmYLMinorOn', 'tmYLMinorOutwardLengthF',
- 'tmYLMinorPerMajor', 'tmYLMinorThicknessF', 'tmYLMinorValues',
- 'tmYLMode', 'tmYLOn', 'tmYLPrecision', 'tmYLStyle', 'tmYLTickEndF',
- 'tmYLTickSpacingF', 'tmYLTickStartF', 'tmYLValues', 'tmYMajorGrid',
- 'tmYMajorGridLineColor', 'tmYMajorGridLineDashPattern',
- 'tmYMajorGridThicknessF', 'tmYMinorGrid', 'tmYMinorGridLineColor',
- 'tmYMinorGridLineDashPattern', 'tmYMinorGridThicknessF',
- 'tmYRAutoPrecision', 'tmYRBorderOn', 'tmYRDataBottomF',
- 'tmYRDataTopF', 'tmYRFormat', 'tmYRIrrTensionF',
- 'tmYRIrregularPoints', 'tmYRLabelAngleF', 'tmYRLabelConstantSpacingF',
- 'tmYRLabelDeltaF', 'tmYRLabelDirection', 'tmYRLabelFont',
- 'tmYRLabelFontAspectF', 'tmYRLabelFontColor', 'tmYRLabelFontHeightF',
- 'tmYRLabelFontQuality', 'tmYRLabelFontThicknessF',
- 'tmYRLabelFuncCode', 'tmYRLabelJust', 'tmYRLabelStride', 'tmYRLabels',
- 'tmYRLabelsOn', 'tmYRMajorLengthF', 'tmYRMajorLineColor',
- 'tmYRMajorOutwardLengthF', 'tmYRMajorThicknessF', 'tmYRMaxLabelLenF',
- 'tmYRMaxTicks', 'tmYRMinLabelSpacingF', 'tmYRMinorLengthF',
- 'tmYRMinorLineColor', 'tmYRMinorOn', 'tmYRMinorOutwardLengthF',
- 'tmYRMinorPerMajor', 'tmYRMinorThicknessF', 'tmYRMinorValues',
- 'tmYRMode', 'tmYROn', 'tmYRPrecision', 'tmYRStyle', 'tmYRTickEndF',
- 'tmYRTickSpacingF', 'tmYRTickStartF', 'tmYRValues', 'tmYUseLeft',
- 'trGridType', 'trLineInterpolationOn',
- 'trXAxisType', 'trXCoordPoints', 'trXInterPoints', 'trXLog',
- 'trXMaxF', 'trXMinF', 'trXReverse', 'trXSamples', 'trXTensionF',
- 'trYAxisType', 'trYCoordPoints', 'trYInterPoints', 'trYLog',
- 'trYMaxF', 'trYMinF', 'trYReverse', 'trYSamples', 'trYTensionF',
- 'txAngleF', 'txBackgroundFillColor', 'txConstantSpacingF', 'txDirection',
- 'txFont', 'HLU-Fonts', 'txFontAspectF', 'txFontColor',
- 'txFontHeightF', 'txFontOpacityF', 'txFontQuality',
- 'txFontThicknessF', 'txFuncCode', 'txJust', 'txPerimColor',
- 'txPerimDashLengthF', 'txPerimDashPattern', 'txPerimOn',
- 'txPerimSpaceF', 'txPerimThicknessF', 'txPosXF', 'txPosYF',
- 'txString', 'vcExplicitLabelBarLabelsOn', 'vcFillArrowEdgeColor',
- 'vcFillArrowEdgeThicknessF', 'vcFillArrowFillColor',
- 'vcFillArrowHeadInteriorXF', 'vcFillArrowHeadMinFracXF',
- 'vcFillArrowHeadMinFracYF', 'vcFillArrowHeadXF', 'vcFillArrowHeadYF',
- 'vcFillArrowMinFracWidthF', 'vcFillArrowWidthF', 'vcFillArrowsOn',
- 'vcFillOverEdge', 'vcGlyphOpacityF', 'vcGlyphStyle',
- 'vcLabelBarEndLabelsOn', 'vcLabelFontColor', 'vcLabelFontHeightF',
- 'vcLabelsOn', 'vcLabelsUseVectorColor', 'vcLevelColors',
- 'vcLevelCount', 'vcLevelPalette', 'vcLevelSelectionMode',
- 'vcLevelSpacingF', 'vcLevels', 'vcLineArrowColor',
- 'vcLineArrowHeadMaxSizeF', 'vcLineArrowHeadMinSizeF',
- 'vcLineArrowThicknessF', 'vcMagnitudeFormat',
- 'vcMagnitudeScaleFactorF', 'vcMagnitudeScaleValueF',
- 'vcMagnitudeScalingMode', 'vcMapDirection', 'vcMaxLevelCount',
- 'vcMaxLevelValF', 'vcMaxMagnitudeF', 'vcMinAnnoAngleF',
- 'vcMinAnnoArrowAngleF', 'vcMinAnnoArrowEdgeColor',
- 'vcMinAnnoArrowFillColor', 'vcMinAnnoArrowLineColor',
- 'vcMinAnnoArrowMinOffsetF', 'vcMinAnnoArrowSpaceF',
- 'vcMinAnnoArrowUseVecColor', 'vcMinAnnoBackgroundColor',
- 'vcMinAnnoConstantSpacingF', 'vcMinAnnoExplicitMagnitudeF',
- 'vcMinAnnoFont', 'vcMinAnnoFontAspectF', 'vcMinAnnoFontColor',
- 'vcMinAnnoFontHeightF', 'vcMinAnnoFontQuality',
- 'vcMinAnnoFontThicknessF', 'vcMinAnnoFuncCode', 'vcMinAnnoJust',
- 'vcMinAnnoOn', 'vcMinAnnoOrientation', 'vcMinAnnoOrthogonalPosF',
- 'vcMinAnnoParallelPosF', 'vcMinAnnoPerimColor', 'vcMinAnnoPerimOn',
- 'vcMinAnnoPerimSpaceF', 'vcMinAnnoPerimThicknessF', 'vcMinAnnoSide',
- 'vcMinAnnoString1', 'vcMinAnnoString1On', 'vcMinAnnoString2',
- 'vcMinAnnoString2On', 'vcMinAnnoTextDirection', 'vcMinAnnoZone',
- 'vcMinDistanceF', 'vcMinFracLengthF', 'vcMinLevelValF',
- 'vcMinMagnitudeF', 'vcMonoFillArrowEdgeColor',
- 'vcMonoFillArrowFillColor', 'vcMonoLineArrowColor',
- 'vcMonoWindBarbColor', 'vcNoDataLabelOn', 'vcNoDataLabelString',
- 'vcPositionMode', 'vcRefAnnoAngleF', 'vcRefAnnoArrowAngleF',
- 'vcRefAnnoArrowEdgeColor', 'vcRefAnnoArrowFillColor',
- 'vcRefAnnoArrowLineColor', 'vcRefAnnoArrowMinOffsetF',
- 'vcRefAnnoArrowSpaceF', 'vcRefAnnoArrowUseVecColor',
- 'vcRefAnnoBackgroundColor', 'vcRefAnnoConstantSpacingF',
- 'vcRefAnnoExplicitMagnitudeF', 'vcRefAnnoFont',
- 'vcRefAnnoFontAspectF', 'vcRefAnnoFontColor', 'vcRefAnnoFontHeightF',
- 'vcRefAnnoFontQuality', 'vcRefAnnoFontThicknessF',
- 'vcRefAnnoFuncCode', 'vcRefAnnoJust', 'vcRefAnnoOn',
- 'vcRefAnnoOrientation', 'vcRefAnnoOrthogonalPosF',
- 'vcRefAnnoParallelPosF', 'vcRefAnnoPerimColor', 'vcRefAnnoPerimOn',
- 'vcRefAnnoPerimSpaceF', 'vcRefAnnoPerimThicknessF', 'vcRefAnnoSide',
- 'vcRefAnnoString1', 'vcRefAnnoString1On', 'vcRefAnnoString2',
- 'vcRefAnnoString2On', 'vcRefAnnoTextDirection', 'vcRefAnnoZone',
- 'vcRefLengthF', 'vcRefMagnitudeF', 'vcScalarFieldData',
- 'vcScalarMissingValColor', 'vcScalarValueFormat',
- 'vcScalarValueScaleFactorF', 'vcScalarValueScaleValueF',
- 'vcScalarValueScalingMode', 'vcSpanLevelPalette', 'vcUseRefAnnoRes',
- 'vcUseScalarArray', 'vcVectorDrawOrder', 'vcVectorFieldData',
- 'vcWindBarbCalmCircleSizeF', 'vcWindBarbColor',
- 'vcWindBarbLineThicknessF', 'vcWindBarbScaleFactorF',
- 'vcWindBarbTickAngleF', 'vcWindBarbTickLengthF',
- 'vcWindBarbTickSpacingF', 'vcZeroFLabelAngleF',
- 'vcZeroFLabelBackgroundColor', 'vcZeroFLabelConstantSpacingF',
- 'vcZeroFLabelFont', 'vcZeroFLabelFontAspectF',
- 'vcZeroFLabelFontColor', 'vcZeroFLabelFontHeightF',
- 'vcZeroFLabelFontQuality', 'vcZeroFLabelFontThicknessF',
- 'vcZeroFLabelFuncCode', 'vcZeroFLabelJust', 'vcZeroFLabelOn',
- 'vcZeroFLabelOrthogonalPosF', 'vcZeroFLabelParallelPosF',
- 'vcZeroFLabelPerimColor', 'vcZeroFLabelPerimOn',
- 'vcZeroFLabelPerimSpaceF', 'vcZeroFLabelPerimThicknessF',
- 'vcZeroFLabelSide', 'vcZeroFLabelString', 'vcZeroFLabelTextDirection',
- 'vcZeroFLabelZone', 'vfCopyData', 'vfDataArray',
- 'vfExchangeDimensions', 'vfExchangeUVData', 'vfMagMaxV', 'vfMagMinV',
- 'vfMissingUValueV', 'vfMissingVValueV', 'vfPolarData',
- 'vfSingleMissingValue', 'vfUDataArray', 'vfUMaxV', 'vfUMinV',
- 'vfVDataArray', 'vfVMaxV', 'vfVMinV', 'vfXArray', 'vfXCActualEndF',
- 'vfXCActualStartF', 'vfXCEndIndex', 'vfXCEndSubsetV', 'vfXCEndV',
- 'vfXCStartIndex', 'vfXCStartSubsetV', 'vfXCStartV', 'vfXCStride',
- 'vfYArray', 'vfYCActualEndF', 'vfYCActualStartF', 'vfYCEndIndex',
- 'vfYCEndSubsetV', 'vfYCEndV', 'vfYCStartIndex', 'vfYCStartSubsetV',
- 'vfYCStartV', 'vfYCStride', 'vpAnnoManagerId', 'vpClipOn',
- 'vpHeightF', 'vpKeepAspect', 'vpOn', 'vpUseSegments', 'vpWidthF',
- 'vpXF', 'vpYF', 'wkAntiAlias', 'wkBackgroundColor', 'wkBackgroundOpacityF',
- 'wkColorMapLen', 'wkColorMap', 'wkColorModel', 'wkDashTableLength',
- 'wkDefGraphicStyleId', 'wkDeviceLowerX', 'wkDeviceLowerY',
- 'wkDeviceUpperX', 'wkDeviceUpperY', 'wkFileName', 'wkFillTableLength',
- 'wkForegroundColor', 'wkFormat', 'wkFullBackground', 'wkGksWorkId',
- 'wkHeight', 'wkMarkerTableLength', 'wkMetaName', 'wkOrientation',
- 'wkPDFFileName', 'wkPDFFormat', 'wkPDFResolution', 'wkPSFileName',
- 'wkPSFormat', 'wkPSResolution', 'wkPaperHeightF', 'wkPaperSize',
- 'wkPaperWidthF', 'wkPause', 'wkTopLevelViews', 'wkViews',
- 'wkVisualType', 'wkWidth', 'wkWindowId', 'wkXColorMode', 'wsCurrentSize',
- 'wsMaximumSize', 'wsThresholdSize', 'xyComputeXMax',
- 'xyComputeXMin', 'xyComputeYMax', 'xyComputeYMin', 'xyCoordData',
- 'xyCoordDataSpec', 'xyCurveDrawOrder', 'xyDashPattern',
- 'xyDashPatterns', 'xyExplicitLabels', 'xyExplicitLegendLabels',
- 'xyLabelMode', 'xyLineColor', 'xyLineColors', 'xyLineDashSegLenF',
- 'xyLineLabelConstantSpacingF', 'xyLineLabelFont',
- 'xyLineLabelFontAspectF', 'xyLineLabelFontColor',
- 'xyLineLabelFontColors', 'xyLineLabelFontHeightF',
- 'xyLineLabelFontQuality', 'xyLineLabelFontThicknessF',
- 'xyLineLabelFuncCode', 'xyLineThicknessF', 'xyLineThicknesses',
- 'xyMarkLineMode', 'xyMarkLineModes', 'xyMarker', 'xyMarkerColor',
- 'xyMarkerColors', 'xyMarkerSizeF', 'xyMarkerSizes',
- 'xyMarkerThicknessF', 'xyMarkerThicknesses', 'xyMarkers',
- 'xyMonoDashPattern', 'xyMonoLineColor', 'xyMonoLineLabelFontColor',
- 'xyMonoLineThickness', 'xyMonoMarkLineMode', 'xyMonoMarker',
- 'xyMonoMarkerColor', 'xyMonoMarkerSize', 'xyMonoMarkerThickness',
- 'xyXIrrTensionF', 'xyXIrregularPoints', 'xyXStyle', 'xyYIrrTensionF',
- 'xyYIrregularPoints', 'xyYStyle'), prefix=r'\b'),
- Name.Builtin),
-
- # Booleans
- (r'\.(True|False)\.', Name.Builtin),
- # Comparing Operators
- (r'\.(eq|ne|lt|le|gt|ge|not|and|or|xor)\.', Operator.Word),
- ],
-
- 'strings': [
- (r'(?s)"(\\\\|\\[0-7]+|\\.|[^"\\])*"', String.Double),
- ],
-
- 'nums': [
- (r'\d+(?![.e])(_[a-z]\w+)?', Number.Integer),
- (r'[+-]?\d*\.\d+(e[-+]?\d+)?(_[a-z]\w+)?', Number.Float),
- (r'[+-]?\d+\.\d*(e[-+]?\d+)?(_[a-z]\w+)?', Number.Float),
- ],
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/nimrod.py b/venv/lib/python3.11/site-packages/pygments/lexers/nimrod.py
deleted file mode 100644
index 11f5b93..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/nimrod.py
+++ /dev/null
@@ -1,200 +0,0 @@
-"""
- pygments.lexers.nimrod
- ~~~~~~~~~~~~~~~~~~~~~~
-
- Lexer for the Nim language (formerly known as Nimrod).
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, include, default, bygroups
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation, Error
-
-__all__ = ['NimrodLexer']
-
-
-class NimrodLexer(RegexLexer):
- """
- For Nim source code.
-
- .. versionadded:: 1.5
- """
-
- name = 'Nimrod'
- url = 'http://nim-lang.org/'
- aliases = ['nimrod', 'nim']
- filenames = ['*.nim', '*.nimrod']
- mimetypes = ['text/x-nim']
-
- flags = re.MULTILINE | re.IGNORECASE
-
- def underscorize(words):
- newWords = []
- new = []
- for word in words:
- for ch in word:
- new.append(ch)
- new.append("_?")
- newWords.append(''.join(new))
- new = []
- return "|".join(newWords)
-
- keywords = [
- 'addr', 'and', 'as', 'asm', 'bind', 'block', 'break', 'case',
- 'cast', 'concept', 'const', 'continue', 'converter', 'defer', 'discard',
- 'distinct', 'div', 'do', 'elif', 'else', 'end', 'enum', 'except',
- 'export', 'finally', 'for', 'if', 'in', 'yield', 'interface',
- 'is', 'isnot', 'iterator', 'let', 'mixin', 'mod',
- 'not', 'notin', 'object', 'of', 'or', 'out', 'ptr', 'raise',
- 'ref', 'return', 'shl', 'shr', 'static', 'try',
- 'tuple', 'type', 'using', 'when', 'while', 'xor'
- ]
-
- keywordsPseudo = [
- 'nil', 'true', 'false'
- ]
-
- opWords = [
- 'and', 'or', 'not', 'xor', 'shl', 'shr', 'div', 'mod', 'in',
- 'notin', 'is', 'isnot'
- ]
-
- types = [
- 'int', 'int8', 'int16', 'int32', 'int64', 'float', 'float32', 'float64',
- 'bool', 'char', 'range', 'array', 'seq', 'set', 'string'
- ]
-
- tokens = {
- 'root': [
- # Comments
- (r'##\[', String.Doc, 'doccomment'),
- (r'##.*$', String.Doc),
- (r'#\[', Comment.Multiline, 'comment'),
- (r'#.*$', Comment),
-
- # Pragmas
- (r'\{\.', String.Other, 'pragma'),
-
- # Operators
- (r'[*=><+\-/@$~&%!?|\\\[\]]', Operator),
- (r'\.\.|\.|,|\[\.|\.\]|\{\.|\.\}|\(\.|\.\)|\{|\}|\(|\)|:|\^|`|;',
- Punctuation),
-
- # Case statement branch
- (r'(\n\s*)(of)(\s)', bygroups(Text.Whitespace, Keyword,
- Text.Whitespace), 'casebranch'),
-
- # Strings
- (r'(?:[\w]+)"', String, 'rdqs'),
- (r'"""', String.Double, 'tdqs'),
- ('"', String, 'dqs'),
-
- # Char
- ("'", String.Char, 'chars'),
-
- # Keywords
- (r'(%s)\b' % underscorize(opWords), Operator.Word),
- (r'(proc|func|method|macro|template)(\s)(?![(\[\]])',
- bygroups(Keyword, Text.Whitespace), 'funcname'),
- (r'(%s)\b' % underscorize(keywords), Keyword),
- (r'(%s)\b' % underscorize(['from', 'import', 'include', 'export']),
- Keyword.Namespace),
- (r'(v_?a_?r)\b', Keyword.Declaration),
- (r'(%s)\b' % underscorize(types), Name.Builtin),
- (r'(%s)\b' % underscorize(keywordsPseudo), Keyword.Pseudo),
-
- # Identifiers
- (r'\b((?![_\d])\w)(((?!_)\w)|(_(?!_)\w))*', Name),
-
- # Numbers
- (r'[0-9][0-9_]*(?=([e.]|\'f(32|64)))',
- Number.Float, ('float-suffix', 'float-number')),
- (r'0x[a-f0-9][a-f0-9_]*', Number.Hex, 'int-suffix'),
- (r'0b[01][01_]*', Number.Bin, 'int-suffix'),
- (r'0o[0-7][0-7_]*', Number.Oct, 'int-suffix'),
- (r'[0-9][0-9_]*', Number.Integer, 'int-suffix'),
-
- # Whitespace
- (r'\s+', Text.Whitespace),
- (r'.+$', Error),
- ],
- 'chars': [
- (r'\\([\\abcefnrtvl"\']|x[a-f0-9]{2}|[0-9]{1,3})', String.Escape),
- (r"'", String.Char, '#pop'),
- (r".", String.Char)
- ],
- 'strings': [
- (r'(?<!\$)\$(\d+|#|\w+)+', String.Interpol),
- (r'[^\\\'"$\n]+', String),
- # quotes, dollars and backslashes must be parsed one at a time
- (r'[\'"\\]', String),
- # unhandled string formatting sign
- (r'\$', String)
- # newlines are an error (use "nl" state)
- ],
- 'doccomment': [
- (r'[^\]#]+', String.Doc),
- (r'##\[', String.Doc, '#push'),
- (r'\]##', String.Doc, '#pop'),
- (r'[\]#]', String.Doc),
- ],
- 'comment': [
- (r'[^\]#]+', Comment.Multiline),
- (r'#\[', Comment.Multiline, '#push'),
- (r'\]#', Comment.Multiline, '#pop'),
- (r'[\]#]', Comment.Multiline),
- ],
- 'dqs': [
- (r'\\([\\abcefnrtvl"\']|\n|x[a-f0-9]{2}|[0-9]{1,3})',
- String.Escape),
- (r'"', String, '#pop'),
- include('strings')
- ],
- 'rdqs': [
- (r'"(?!")', String, '#pop'),
- (r'""', String.Escape),
- include('strings')
- ],
- 'tdqs': [
- (r'"""', String.Double, '#pop'),
- include('strings'),
- (r'\n', String.Double)
- ],
- 'funcname': [
- (r'((?![\d_])\w)(((?!_)\w)|(_(?!_)\w))*', Name.Function, '#pop'),
- (r'`.+`', Name.Function, '#pop')
- ],
- 'nl': [
- (r'\n', String)
- ],
- 'float-number': [
- (r'\.(?!\.)[0-9_]*[f]*', Number.Float),
- (r'e[+-]?[0-9][0-9_]*', Number.Float),
- default('#pop')
- ],
- 'float-suffix': [
- (r'\'f(32|64)', Number.Float),
- default('#pop')
- ],
- 'int-suffix': [
- (r'\'i(32|64)', Number.Integer.Long),
- (r'\'i(8|16)', Number.Integer),
- default('#pop')
- ],
- 'casebranch': [
- (r',', Punctuation),
- (r'[\n ]+', Text.Whitespace),
- (r':', Operator, '#pop'),
- (r'\w+|[^:]', Name.Label),
- ],
- 'pragma': [
- (r'[:,]', Text),
- (r'[\n ]+', Text.Whitespace),
- (r'\.\}', String.Other, '#pop'),
- (r'\w+|\W+|[^.}]', String.Other),
- ],
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/nit.py b/venv/lib/python3.11/site-packages/pygments/lexers/nit.py
deleted file mode 100644
index b4e85f3..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/nit.py
+++ /dev/null
@@ -1,64 +0,0 @@
-"""
- pygments.lexers.nit
- ~~~~~~~~~~~~~~~~~~~
-
- Lexer for the Nit language.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, words
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation
-
-__all__ = ['NitLexer']
-
-
-class NitLexer(RegexLexer):
- """
- For nit source.
-
- .. versionadded:: 2.0
- """
-
- name = 'Nit'
- url = 'http://nitlanguage.org'
- aliases = ['nit']
- filenames = ['*.nit']
- tokens = {
- 'root': [
- (r'#.*?$', Comment.Single),
- (words((
- 'package', 'module', 'import', 'class', 'abstract', 'interface',
- 'universal', 'enum', 'end', 'fun', 'type', 'init', 'redef',
- 'isa', 'do', 'readable', 'writable', 'var', 'intern', 'extern',
- 'public', 'protected', 'private', 'intrude', 'if', 'then',
- 'else', 'while', 'loop', 'for', 'in', 'and', 'or', 'not',
- 'implies', 'return', 'continue', 'break', 'abort', 'assert',
- 'new', 'is', 'once', 'super', 'self', 'true', 'false', 'nullable',
- 'null', 'as', 'isset', 'label', '__debug__'), suffix=r'(?=[\r\n\t( ])'),
- Keyword),
- (r'[A-Z]\w*', Name.Class),
- (r'"""(([^\'\\]|\\.)|\\r|\\n)*((\{\{?)?(""?\{\{?)*""""*)', String), # Simple long string
- (r'\'\'\'(((\\.|[^\'\\])|\\r|\\n)|\'((\\.|[^\'\\])|\\r|\\n)|'
- r'\'\'((\\.|[^\'\\])|\\r|\\n))*\'\'\'', String), # Simple long string alt
- (r'"""(([^\'\\]|\\.)|\\r|\\n)*((""?)?(\{\{?""?)*\{\{\{\{*)', String), # Start long string
- (r'\}\}\}(((\\.|[^\'\\])|\\r|\\n))*(""?)?(\{\{?""?)*\{\{\{\{*', String), # Mid long string
- (r'\}\}\}(((\\.|[^\'\\])|\\r|\\n))*(\{\{?)?(""?\{\{?)*""""*', String), # End long string
- (r'"(\\.|([^"}{\\]))*"', String), # Simple String
- (r'"(\\.|([^"}{\\]))*\{', String), # Start string
- (r'\}(\\.|([^"}{\\]))*\{', String), # Mid String
- (r'\}(\\.|([^"}{\\]))*"', String), # End String
- (r'(\'[^\'\\]\')|(\'\\.\')', String.Char),
- (r'[0-9]+', Number.Integer),
- (r'[0-9]*.[0-9]+', Number.Float),
- (r'0(x|X)[0-9A-Fa-f]+', Number.Hex),
- (r'[a-z]\w*', Name),
- (r'_\w+', Name.Variable.Instance),
- (r'==|!=|<==>|>=|>>|>|<=|<<|<|\+|-|=|/|\*|%|\+=|-=|!|@', Operator),
- (r'\(|\)|\[|\]|,|\.\.\.|\.\.|\.|::|:', Punctuation),
- (r'`\{[^`]*`\}', Text), # Extern blocks won't be Lexed by Nit
- (r'[\r\n\t ]+', Text),
- ],
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/nix.py b/venv/lib/python3.11/site-packages/pygments/lexers/nix.py
deleted file mode 100644
index 7ab59bb..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/nix.py
+++ /dev/null
@@ -1,145 +0,0 @@
-"""
- pygments.lexers.nix
- ~~~~~~~~~~~~~~~~~~~
-
- Lexers for the NixOS Nix language.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, include
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation, Literal
-
-__all__ = ['NixLexer']
-
-
-class NixLexer(RegexLexer):
- """
- For the Nix language.
-
- .. versionadded:: 2.0
- """
-
- name = 'Nix'
- url = 'http://nixos.org/nix/'
- aliases = ['nixos', 'nix']
- filenames = ['*.nix']
- mimetypes = ['text/x-nix']
-
- keywords = ['rec', 'with', 'let', 'in', 'inherit', 'assert', 'if',
- 'else', 'then', '...']
- builtins = ['import', 'abort', 'baseNameOf', 'dirOf', 'isNull', 'builtins',
- 'map', 'removeAttrs', 'throw', 'toString', 'derivation']
- operators = ['++', '+', '?', '.', '!', '//', '==', '/',
- '!=', '&&', '||', '->', '=', '<', '>', '*', '-']
-
- punctuations = ["(", ")", "[", "]", ";", "{", "}", ":", ",", "@"]
-
- tokens = {
- 'root': [
- # comments starting with #
- (r'#.*$', Comment.Single),
-
- # multiline comments
- (r'/\*', Comment.Multiline, 'comment'),
-
- # whitespace
- (r'\s+', Text),
-
- # keywords
- ('(%s)' % '|'.join(re.escape(entry) + '\\b' for entry in keywords), Keyword),
-
- # highlight the builtins
- ('(%s)' % '|'.join(re.escape(entry) + '\\b' for entry in builtins),
- Name.Builtin),
-
- (r'\b(true|false|null)\b', Name.Constant),
-
- # floats
- (r'-?(\d+\.\d*|\.\d+)([eE][-+]?\d+)?', Number.Float),
-
- # integers
- (r'-?[0-9]+', Number.Integer),
-
- # paths
- (r'[\w.+-]*(\/[\w.+-]+)+', Literal),
- (r'~(\/[\w.+-]+)+', Literal),
- (r'\<[\w.+-]+(\/[\w.+-]+)*\>', Literal),
-
- # operators
- ('(%s)' % '|'.join(re.escape(entry) for entry in operators),
- Operator),
-
- # word operators
- (r'\b(or|and)\b', Operator.Word),
-
- (r'\{', Punctuation, 'block'),
-
- # punctuations
- ('(%s)' % '|'.join(re.escape(entry) for entry in punctuations), Punctuation),
-
- # strings
- (r'"', String.Double, 'doublequote'),
- (r"''", String.Multiline, 'multiline'),
-
- # urls
- (r'[a-zA-Z][a-zA-Z0-9\+\-\.]*\:[\w%/?:@&=+$,\\.!~*\'-]+', Literal),
-
- # names of variables
- (r'[\w-]+(?=\s*=)', String.Symbol),
- (r'[a-zA-Z_][\w\'-]*', Text),
-
- (r"\$\{", String.Interpol, 'antiquote'),
- ],
- 'comment': [
- (r'[^/*]+', Comment.Multiline),
- (r'/\*', Comment.Multiline, '#push'),
- (r'\*/', Comment.Multiline, '#pop'),
- (r'[*/]', Comment.Multiline),
- ],
- 'multiline': [
- (r"''(\$|'|\\n|\\r|\\t|\\)", String.Escape),
- (r"''", String.Multiline, '#pop'),
- (r'\$\{', String.Interpol, 'antiquote'),
- (r"[^'\$]+", String.Multiline),
- (r"\$[^\{']", String.Multiline),
- (r"'[^']", String.Multiline),
- (r"\$(?=')", String.Multiline),
- ],
- 'doublequote': [
- (r'\\(\\|"|\$|n)', String.Escape),
- (r'"', String.Double, '#pop'),
- (r'\$\{', String.Interpol, 'antiquote'),
- (r'[^"\\\$]+', String.Double),
- (r'\$[^\{"]', String.Double),
- (r'\$(?=")', String.Double),
- (r'\\', String.Double),
- ],
- 'antiquote': [
- (r"\}", String.Interpol, '#pop'),
- # TODO: we should probably escape also here ''${ \${
- (r"\$\{", String.Interpol, '#push'),
- include('root'),
- ],
- 'block': [
- (r"\}", Punctuation, '#pop'),
- include('root'),
- ],
- }
-
- def analyse_text(text):
- rv = 0.0
- # TODO: let/in
- if re.search(r'import.+?<[^>]+>', text):
- rv += 0.4
- if re.search(r'mkDerivation\s+(\(|\{|rec)', text):
- rv += 0.4
- if re.search(r'=\s+mkIf\s+', text):
- rv += 0.4
- if re.search(r'\{[a-zA-Z,\s]+\}:', text):
- rv += 0.1
- return rv
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/oberon.py b/venv/lib/python3.11/site-packages/pygments/lexers/oberon.py
deleted file mode 100644
index 3aaa763..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/oberon.py
+++ /dev/null
@@ -1,120 +0,0 @@
-"""
- pygments.lexers.oberon
- ~~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for Oberon family languages.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, include, words
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation
-
-__all__ = ['ComponentPascalLexer']
-
-
-class ComponentPascalLexer(RegexLexer):
- """
- For Component Pascal source code.
-
- .. versionadded:: 2.1
- """
- name = 'Component Pascal'
- aliases = ['componentpascal', 'cp']
- filenames = ['*.cp', '*.cps']
- mimetypes = ['text/x-component-pascal']
-
- flags = re.MULTILINE | re.DOTALL
-
- tokens = {
- 'root': [
- include('whitespace'),
- include('comments'),
- include('punctuation'),
- include('numliterals'),
- include('strings'),
- include('operators'),
- include('builtins'),
- include('identifiers'),
- ],
- 'whitespace': [
- (r'\n+', Text), # blank lines
- (r'\s+', Text), # whitespace
- ],
- 'comments': [
- (r'\(\*([^$].*?)\*\)', Comment.Multiline),
- # TODO: nested comments (* (* ... *) ... (* ... *) *) not supported!
- ],
- 'punctuation': [
- (r'[()\[\]{},.:;|]', Punctuation),
- ],
- 'numliterals': [
- (r'[0-9A-F]+X\b', Number.Hex), # char code
- (r'[0-9A-F]+[HL]\b', Number.Hex), # hexadecimal number
- (r'[0-9]+\.[0-9]+E[+-][0-9]+', Number.Float), # real number
- (r'[0-9]+\.[0-9]+', Number.Float), # real number
- (r'[0-9]+', Number.Integer), # decimal whole number
- ],
- 'strings': [
- (r"'[^\n']*'", String), # single quoted string
- (r'"[^\n"]*"', String), # double quoted string
- ],
- 'operators': [
- # Arithmetic Operators
- (r'[+-]', Operator),
- (r'[*/]', Operator),
- # Relational Operators
- (r'[=#<>]', Operator),
- # Dereferencing Operator
- (r'\^', Operator),
- # Logical AND Operator
- (r'&', Operator),
- # Logical NOT Operator
- (r'~', Operator),
- # Assignment Symbol
- (r':=', Operator),
- # Range Constructor
- (r'\.\.', Operator),
- (r'\$', Operator),
- ],
- 'identifiers': [
- (r'([a-zA-Z_$][\w$]*)', Name),
- ],
- 'builtins': [
- (words((
- 'ANYPTR', 'ANYREC', 'BOOLEAN', 'BYTE', 'CHAR', 'INTEGER', 'LONGINT',
- 'REAL', 'SET', 'SHORTCHAR', 'SHORTINT', 'SHORTREAL'
- ), suffix=r'\b'), Keyword.Type),
- (words((
- 'ABS', 'ABSTRACT', 'ARRAY', 'ASH', 'ASSERT', 'BEGIN', 'BITS', 'BY',
- 'CAP', 'CASE', 'CHR', 'CLOSE', 'CONST', 'DEC', 'DIV', 'DO', 'ELSE',
- 'ELSIF', 'EMPTY', 'END', 'ENTIER', 'EXCL', 'EXIT', 'EXTENSIBLE', 'FOR',
- 'HALT', 'IF', 'IMPORT', 'IN', 'INC', 'INCL', 'IS', 'LEN', 'LIMITED',
- 'LONG', 'LOOP', 'MAX', 'MIN', 'MOD', 'MODULE', 'NEW', 'ODD', 'OF',
- 'OR', 'ORD', 'OUT', 'POINTER', 'PROCEDURE', 'RECORD', 'REPEAT', 'RETURN',
- 'SHORT', 'SHORTCHAR', 'SHORTINT', 'SIZE', 'THEN', 'TYPE', 'TO', 'UNTIL',
- 'VAR', 'WHILE', 'WITH'
- ), suffix=r'\b'), Keyword.Reserved),
- (r'(TRUE|FALSE|NIL|INF)\b', Keyword.Constant),
- ]
- }
-
- def analyse_text(text):
- """The only other lexer using .cp is the C++ one, so we check if for
- a few common Pascal keywords here. Those are unfortunately quite
- common across various business languages as well."""
- result = 0
- if 'BEGIN' in text:
- result += 0.01
- if 'END' in text:
- result += 0.01
- if 'PROCEDURE' in text:
- result += 0.01
- if 'END' in text:
- result += 0.01
-
- return result
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/objective.py b/venv/lib/python3.11/site-packages/pygments/lexers/objective.py
deleted file mode 100644
index 2e4332a..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/objective.py
+++ /dev/null
@@ -1,505 +0,0 @@
-"""
- pygments.lexers.objective
- ~~~~~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for Objective-C family languages.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, include, bygroups, using, this, words, \
- inherit, default
-from pygments.token import Text, Keyword, Name, String, Operator, \
- Number, Punctuation, Literal, Comment
-
-from pygments.lexers.c_cpp import CLexer, CppLexer
-
-__all__ = ['ObjectiveCLexer', 'ObjectiveCppLexer', 'LogosLexer', 'SwiftLexer']
-
-
-def objective(baselexer):
- """
- Generate a subclass of baselexer that accepts the Objective-C syntax
- extensions.
- """
-
- # Have to be careful not to accidentally match JavaDoc/Doxygen syntax here,
- # since that's quite common in ordinary C/C++ files. It's OK to match
- # JavaDoc/Doxygen keywords that only apply to Objective-C, mind.
- #
- # The upshot of this is that we CANNOT match @class or @interface
- _oc_keywords = re.compile(r'@(?:end|implementation|protocol)')
-
- # Matches [ <ws>? identifier <ws> ( identifier <ws>? ] | identifier? : )
- # (note the identifier is *optional* when there is a ':'!)
- _oc_message = re.compile(r'\[\s*[a-zA-Z_]\w*\s+'
- r'(?:[a-zA-Z_]\w*\s*\]|'
- r'(?:[a-zA-Z_]\w*)?:)')
-
- class GeneratedObjectiveCVariant(baselexer):
- """
- Implements Objective-C syntax on top of an existing C family lexer.
- """
-
- tokens = {
- 'statements': [
- (r'@"', String, 'string'),
- (r'@(YES|NO)', Number),
- (r"@'(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])'", String.Char),
- (r'@(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[lL]?', Number.Float),
- (r'@(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float),
- (r'@0x[0-9a-fA-F]+[Ll]?', Number.Hex),
- (r'@0[0-7]+[Ll]?', Number.Oct),
- (r'@\d+[Ll]?', Number.Integer),
- (r'@\(', Literal, 'literal_number'),
- (r'@\[', Literal, 'literal_array'),
- (r'@\{', Literal, 'literal_dictionary'),
- (words((
- '@selector', '@private', '@protected', '@public', '@encode',
- '@synchronized', '@try', '@throw', '@catch', '@finally',
- '@end', '@property', '@synthesize', '__bridge', '__bridge_transfer',
- '__autoreleasing', '__block', '__weak', '__strong', 'weak', 'strong',
- 'copy', 'retain', 'assign', 'unsafe_unretained', 'atomic', 'nonatomic',
- 'readonly', 'readwrite', 'setter', 'getter', 'typeof', 'in',
- 'out', 'inout', 'release', 'class', '@dynamic', '@optional',
- '@required', '@autoreleasepool', '@import'), suffix=r'\b'),
- Keyword),
- (words(('id', 'instancetype', 'Class', 'IMP', 'SEL', 'BOOL',
- 'IBOutlet', 'IBAction', 'unichar'), suffix=r'\b'),
- Keyword.Type),
- (r'@(true|false|YES|NO)\n', Name.Builtin),
- (r'(YES|NO|nil|self|super)\b', Name.Builtin),
- # Carbon types
- (r'(Boolean|UInt8|SInt8|UInt16|SInt16|UInt32|SInt32)\b', Keyword.Type),
- # Carbon built-ins
- (r'(TRUE|FALSE)\b', Name.Builtin),
- (r'(@interface|@implementation)(\s+)', bygroups(Keyword, Text),
- ('#pop', 'oc_classname')),
- (r'(@class|@protocol)(\s+)', bygroups(Keyword, Text),
- ('#pop', 'oc_forward_classname')),
- # @ can also prefix other expressions like @{...} or @(...)
- (r'@', Punctuation),
- inherit,
- ],
- 'oc_classname': [
- # interface definition that inherits
- (r'([a-zA-Z$_][\w$]*)(\s*:\s*)([a-zA-Z$_][\w$]*)?(\s*)(\{)',
- bygroups(Name.Class, Text, Name.Class, Text, Punctuation),
- ('#pop', 'oc_ivars')),
- (r'([a-zA-Z$_][\w$]*)(\s*:\s*)([a-zA-Z$_][\w$]*)?',
- bygroups(Name.Class, Text, Name.Class), '#pop'),
- # interface definition for a category
- (r'([a-zA-Z$_][\w$]*)(\s*)(\([a-zA-Z$_][\w$]*\))(\s*)(\{)',
- bygroups(Name.Class, Text, Name.Label, Text, Punctuation),
- ('#pop', 'oc_ivars')),
- (r'([a-zA-Z$_][\w$]*)(\s*)(\([a-zA-Z$_][\w$]*\))',
- bygroups(Name.Class, Text, Name.Label), '#pop'),
- # simple interface / implementation
- (r'([a-zA-Z$_][\w$]*)(\s*)(\{)',
- bygroups(Name.Class, Text, Punctuation), ('#pop', 'oc_ivars')),
- (r'([a-zA-Z$_][\w$]*)', Name.Class, '#pop')
- ],
- 'oc_forward_classname': [
- (r'([a-zA-Z$_][\w$]*)(\s*,\s*)',
- bygroups(Name.Class, Text), 'oc_forward_classname'),
- (r'([a-zA-Z$_][\w$]*)(\s*;?)',
- bygroups(Name.Class, Text), '#pop')
- ],
- 'oc_ivars': [
- include('whitespace'),
- include('statements'),
- (';', Punctuation),
- (r'\{', Punctuation, '#push'),
- (r'\}', Punctuation, '#pop'),
- ],
- 'root': [
- # methods
- (r'^([-+])(\s*)' # method marker
- r'(\(.*?\))?(\s*)' # return type
- r'([a-zA-Z$_][\w$]*:?)', # begin of method name
- bygroups(Punctuation, Text, using(this),
- Text, Name.Function),
- 'method'),
- inherit,
- ],
- 'method': [
- include('whitespace'),
- # TODO unsure if ellipses are allowed elsewhere, see
- # discussion in Issue 789
- (r',', Punctuation),
- (r'\.\.\.', Punctuation),
- (r'(\(.*?\))(\s*)([a-zA-Z$_][\w$]*)',
- bygroups(using(this), Text, Name.Variable)),
- (r'[a-zA-Z$_][\w$]*:', Name.Function),
- (';', Punctuation, '#pop'),
- (r'\{', Punctuation, 'function'),
- default('#pop'),
- ],
- 'literal_number': [
- (r'\(', Punctuation, 'literal_number_inner'),
- (r'\)', Literal, '#pop'),
- include('statement'),
- ],
- 'literal_number_inner': [
- (r'\(', Punctuation, '#push'),
- (r'\)', Punctuation, '#pop'),
- include('statement'),
- ],
- 'literal_array': [
- (r'\[', Punctuation, 'literal_array_inner'),
- (r'\]', Literal, '#pop'),
- include('statement'),
- ],
- 'literal_array_inner': [
- (r'\[', Punctuation, '#push'),
- (r'\]', Punctuation, '#pop'),
- include('statement'),
- ],
- 'literal_dictionary': [
- (r'\}', Literal, '#pop'),
- include('statement'),
- ],
- }
-
- def analyse_text(text):
- if _oc_keywords.search(text):
- return 1.0
- elif '@"' in text: # strings
- return 0.8
- elif re.search('@[0-9]+', text):
- return 0.7
- elif _oc_message.search(text):
- return 0.8
- return 0
-
- def get_tokens_unprocessed(self, text, stack=('root',)):
- from pygments.lexers._cocoa_builtins import COCOA_INTERFACES, \
- COCOA_PROTOCOLS, COCOA_PRIMITIVES
-
- for index, token, value in \
- baselexer.get_tokens_unprocessed(self, text, stack):
- if token is Name or token is Name.Class:
- if value in COCOA_INTERFACES or value in COCOA_PROTOCOLS \
- or value in COCOA_PRIMITIVES:
- token = Name.Builtin.Pseudo
-
- yield index, token, value
-
- return GeneratedObjectiveCVariant
-
-
-class ObjectiveCLexer(objective(CLexer)):
- """
- For Objective-C source code with preprocessor directives.
- """
-
- name = 'Objective-C'
- url = 'https://developer.apple.com/library/archive/documentation/Cocoa/Conceptual/ProgrammingWithObjectiveC/Introduction/Introduction.html'
- aliases = ['objective-c', 'objectivec', 'obj-c', 'objc']
- filenames = ['*.m', '*.h']
- mimetypes = ['text/x-objective-c']
- priority = 0.05 # Lower than C
-
-
-class ObjectiveCppLexer(objective(CppLexer)):
- """
- For Objective-C++ source code with preprocessor directives.
- """
-
- name = 'Objective-C++'
- aliases = ['objective-c++', 'objectivec++', 'obj-c++', 'objc++']
- filenames = ['*.mm', '*.hh']
- mimetypes = ['text/x-objective-c++']
- priority = 0.05 # Lower than C++
-
-
-class LogosLexer(ObjectiveCppLexer):
- """
- For Logos + Objective-C source code with preprocessor directives.
-
- .. versionadded:: 1.6
- """
-
- name = 'Logos'
- aliases = ['logos']
- filenames = ['*.x', '*.xi', '*.xm', '*.xmi']
- mimetypes = ['text/x-logos']
- priority = 0.25
-
- tokens = {
- 'statements': [
- (r'(%orig|%log)\b', Keyword),
- (r'(%c)\b(\()(\s*)([a-zA-Z$_][\w$]*)(\s*)(\))',
- bygroups(Keyword, Punctuation, Text, Name.Class, Text, Punctuation)),
- (r'(%init)\b(\()',
- bygroups(Keyword, Punctuation), 'logos_init_directive'),
- (r'(%init)(?=\s*;)', bygroups(Keyword)),
- (r'(%hook|%group)(\s+)([a-zA-Z$_][\w$]+)',
- bygroups(Keyword, Text, Name.Class), '#pop'),
- (r'(%subclass)(\s+)', bygroups(Keyword, Text),
- ('#pop', 'logos_classname')),
- inherit,
- ],
- 'logos_init_directive': [
- (r'\s+', Text),
- (',', Punctuation, ('logos_init_directive', '#pop')),
- (r'([a-zA-Z$_][\w$]*)(\s*)(=)(\s*)([^);]*)',
- bygroups(Name.Class, Text, Punctuation, Text, Text)),
- (r'([a-zA-Z$_][\w$]*)', Name.Class),
- (r'\)', Punctuation, '#pop'),
- ],
- 'logos_classname': [
- (r'([a-zA-Z$_][\w$]*)(\s*:\s*)([a-zA-Z$_][\w$]*)?',
- bygroups(Name.Class, Text, Name.Class), '#pop'),
- (r'([a-zA-Z$_][\w$]*)', Name.Class, '#pop')
- ],
- 'root': [
- (r'(%subclass)(\s+)', bygroups(Keyword, Text),
- 'logos_classname'),
- (r'(%hook|%group)(\s+)([a-zA-Z$_][\w$]+)',
- bygroups(Keyword, Text, Name.Class)),
- (r'(%config)(\s*\(\s*)(\w+)(\s*=)(.*?)(\)\s*)',
- bygroups(Keyword, Text, Name.Variable, Text, String, Text)),
- (r'(%ctor)(\s*)(\{)', bygroups(Keyword, Text, Punctuation),
- 'function'),
- (r'(%new)(\s*)(\()(.*?)(\))',
- bygroups(Keyword, Text, Keyword, String, Keyword)),
- (r'(\s*)(%end)(\s*)', bygroups(Text, Keyword, Text)),
- inherit,
- ],
- }
-
- _logos_keywords = re.compile(r'%(?:hook|ctor|init|c\()')
-
- def analyse_text(text):
- if LogosLexer._logos_keywords.search(text):
- return 1.0
- return 0
-
-
-class SwiftLexer(RegexLexer):
- """
- For Swift source.
-
- .. versionadded:: 2.0
- """
- name = 'Swift'
- url = 'https://www.swift.org/'
- filenames = ['*.swift']
- aliases = ['swift']
- mimetypes = ['text/x-swift']
-
- tokens = {
- 'root': [
- # Whitespace and Comments
- (r'\n', Text),
- (r'\s+', Text),
- (r'//', Comment.Single, 'comment-single'),
- (r'/\*', Comment.Multiline, 'comment-multi'),
- (r'#(if|elseif|else|endif|available)\b', Comment.Preproc, 'preproc'),
-
- # Keywords
- include('keywords'),
-
- # Global Types
- (words((
- 'Array', 'AutoreleasingUnsafeMutablePointer', 'BidirectionalReverseView',
- 'Bit', 'Bool', 'CFunctionPointer', 'COpaquePointer', 'CVaListPointer',
- 'Character', 'ClosedInterval', 'CollectionOfOne', 'ContiguousArray',
- 'Dictionary', 'DictionaryGenerator', 'DictionaryIndex', 'Double',
- 'EmptyCollection', 'EmptyGenerator', 'EnumerateGenerator',
- 'EnumerateSequence', 'FilterCollectionView',
- 'FilterCollectionViewIndex', 'FilterGenerator', 'FilterSequenceView',
- 'Float', 'Float80', 'FloatingPointClassification', 'GeneratorOf',
- 'GeneratorOfOne', 'GeneratorSequence', 'HalfOpenInterval', 'HeapBuffer',
- 'HeapBufferStorage', 'ImplicitlyUnwrappedOptional', 'IndexingGenerator',
- 'Int', 'Int16', 'Int32', 'Int64', 'Int8', 'LazyBidirectionalCollection',
- 'LazyForwardCollection', 'LazyRandomAccessCollection',
- 'LazySequence', 'MapCollectionView', 'MapSequenceGenerator',
- 'MapSequenceView', 'MirrorDisposition', 'ObjectIdentifier', 'OnHeap',
- 'Optional', 'PermutationGenerator', 'QuickLookObject',
- 'RandomAccessReverseView', 'Range', 'RangeGenerator', 'RawByte', 'Repeat',
- 'ReverseBidirectionalIndex', 'ReverseRandomAccessIndex', 'SequenceOf',
- 'SinkOf', 'Slice', 'StaticString', 'StrideThrough', 'StrideThroughGenerator',
- 'StrideTo', 'StrideToGenerator', 'String', 'UInt', 'UInt16', 'UInt32',
- 'UInt64', 'UInt8', 'UTF16', 'UTF32', 'UTF8', 'UnicodeDecodingResult',
- 'UnicodeScalar', 'Unmanaged', 'UnsafeBufferPointer',
- 'UnsafeBufferPointerGenerator', 'UnsafeMutableBufferPointer',
- 'UnsafeMutablePointer', 'UnsafePointer', 'Zip2', 'ZipGenerator2',
- # Protocols
- 'AbsoluteValuable', 'AnyObject', 'ArrayLiteralConvertible',
- 'BidirectionalIndexType', 'BitwiseOperationsType',
- 'BooleanLiteralConvertible', 'BooleanType', 'CVarArgType',
- 'CollectionType', 'Comparable', 'DebugPrintable',
- 'DictionaryLiteralConvertible', 'Equatable',
- 'ExtendedGraphemeClusterLiteralConvertible',
- 'ExtensibleCollectionType', 'FloatLiteralConvertible',
- 'FloatingPointType', 'ForwardIndexType', 'GeneratorType', 'Hashable',
- 'IntegerArithmeticType', 'IntegerLiteralConvertible', 'IntegerType',
- 'IntervalType', 'MirrorType', 'MutableCollectionType', 'MutableSliceable',
- 'NilLiteralConvertible', 'OutputStreamType', 'Printable',
- 'RandomAccessIndexType', 'RangeReplaceableCollectionType',
- 'RawOptionSetType', 'RawRepresentable', 'Reflectable', 'SequenceType',
- 'SignedIntegerType', 'SignedNumberType', 'SinkType', 'Sliceable',
- 'Streamable', 'Strideable', 'StringInterpolationConvertible',
- 'StringLiteralConvertible', 'UnicodeCodecType',
- 'UnicodeScalarLiteralConvertible', 'UnsignedIntegerType',
- '_ArrayBufferType', '_BidirectionalIndexType', '_CocoaStringType',
- '_CollectionType', '_Comparable', '_ExtensibleCollectionType',
- '_ForwardIndexType', '_Incrementable', '_IntegerArithmeticType',
- '_IntegerType', '_ObjectiveCBridgeable', '_RandomAccessIndexType',
- '_RawOptionSetType', '_SequenceType', '_Sequence_Type',
- '_SignedIntegerType', '_SignedNumberType', '_Sliceable', '_Strideable',
- '_SwiftNSArrayRequiredOverridesType', '_SwiftNSArrayType',
- '_SwiftNSCopyingType', '_SwiftNSDictionaryRequiredOverridesType',
- '_SwiftNSDictionaryType', '_SwiftNSEnumeratorType',
- '_SwiftNSFastEnumerationType', '_SwiftNSStringRequiredOverridesType',
- '_SwiftNSStringType', '_UnsignedIntegerType',
- # Variables
- 'C_ARGC', 'C_ARGV', 'Process',
- # Typealiases
- 'Any', 'AnyClass', 'BooleanLiteralType', 'CBool', 'CChar', 'CChar16',
- 'CChar32', 'CDouble', 'CFloat', 'CInt', 'CLong', 'CLongLong', 'CShort',
- 'CSignedChar', 'CUnsignedInt', 'CUnsignedLong', 'CUnsignedShort',
- 'CWideChar', 'ExtendedGraphemeClusterType', 'Float32', 'Float64',
- 'FloatLiteralType', 'IntMax', 'IntegerLiteralType', 'StringLiteralType',
- 'UIntMax', 'UWord', 'UnicodeScalarType', 'Void', 'Word',
- # Foundation/Cocoa
- 'NSErrorPointer', 'NSObjectProtocol', 'Selector'), suffix=r'\b'),
- Name.Builtin),
- # Functions
- (words((
- 'abs', 'advance', 'alignof', 'alignofValue', 'assert', 'assertionFailure',
- 'contains', 'count', 'countElements', 'debugPrint', 'debugPrintln',
- 'distance', 'dropFirst', 'dropLast', 'dump', 'enumerate', 'equal',
- 'extend', 'fatalError', 'filter', 'find', 'first', 'getVaList', 'indices',
- 'insert', 'isEmpty', 'join', 'last', 'lazy', 'lexicographicalCompare',
- 'map', 'max', 'maxElement', 'min', 'minElement', 'numericCast', 'overlaps',
- 'partition', 'precondition', 'preconditionFailure', 'prefix', 'print',
- 'println', 'reduce', 'reflect', 'removeAll', 'removeAtIndex', 'removeLast',
- 'removeRange', 'reverse', 'sizeof', 'sizeofValue', 'sort', 'sorted',
- 'splice', 'split', 'startsWith', 'stride', 'strideof', 'strideofValue',
- 'suffix', 'swap', 'toDebugString', 'toString', 'transcode',
- 'underestimateCount', 'unsafeAddressOf', 'unsafeBitCast', 'unsafeDowncast',
- 'withExtendedLifetime', 'withUnsafeMutablePointer',
- 'withUnsafeMutablePointers', 'withUnsafePointer', 'withUnsafePointers',
- 'withVaList'), suffix=r'\b'),
- Name.Builtin.Pseudo),
-
- # Implicit Block Variables
- (r'\$\d+', Name.Variable),
-
- # Binary Literal
- (r'0b[01_]+', Number.Bin),
- # Octal Literal
- (r'0o[0-7_]+', Number.Oct),
- # Hexadecimal Literal
- (r'0x[0-9a-fA-F_]+', Number.Hex),
- # Decimal Literal
- (r'[0-9][0-9_]*(\.[0-9_]+[eE][+\-]?[0-9_]+|'
- r'\.[0-9_]*|[eE][+\-]?[0-9_]+)', Number.Float),
- (r'[0-9][0-9_]*', Number.Integer),
- # String Literal
- (r'"', String, 'string'),
-
- # Operators and Punctuation
- (r'[(){}\[\].,:;=@#`?]|->|[<&?](?=\w)|(?<=\w)[>!?]', Punctuation),
- (r'[/=\-+!*%<>&|^?~]+', Operator),
-
- # Identifier
- (r'[a-zA-Z_]\w*', Name)
- ],
- 'keywords': [
- (words((
- 'as', 'async', 'await', 'break', 'case', 'catch', 'continue', 'default', 'defer',
- 'do', 'else', 'fallthrough', 'for', 'guard', 'if', 'in', 'is',
- 'repeat', 'return', '#selector', 'switch', 'throw', 'try',
- 'where', 'while'), suffix=r'\b'),
- Keyword),
- (r'@availability\([^)]+\)', Keyword.Reserved),
- (words((
- 'associativity', 'convenience', 'dynamic', 'didSet', 'final',
- 'get', 'indirect', 'infix', 'inout', 'lazy', 'left', 'mutating',
- 'none', 'nonmutating', 'optional', 'override', 'postfix',
- 'precedence', 'prefix', 'Protocol', 'required', 'rethrows',
- 'right', 'set', 'throws', 'Type', 'unowned', 'weak', 'willSet',
- '@availability', '@autoclosure', '@noreturn',
- '@NSApplicationMain', '@NSCopying', '@NSManaged', '@objc',
- '@UIApplicationMain', '@IBAction', '@IBDesignable',
- '@IBInspectable', '@IBOutlet'), suffix=r'\b'),
- Keyword.Reserved),
- (r'(as|dynamicType|false|is|nil|self|Self|super|true|__COLUMN__'
- r'|__FILE__|__FUNCTION__|__LINE__|_'
- r'|#(?:file|line|column|function))\b', Keyword.Constant),
- (r'import\b', Keyword.Declaration, 'module'),
- (r'(class|enum|extension|struct|protocol)(\s+)([a-zA-Z_]\w*)',
- bygroups(Keyword.Declaration, Text, Name.Class)),
- (r'(func)(\s+)([a-zA-Z_]\w*)',
- bygroups(Keyword.Declaration, Text, Name.Function)),
- (r'(var|let)(\s+)([a-zA-Z_]\w*)', bygroups(Keyword.Declaration,
- Text, Name.Variable)),
- (words((
- 'actor', 'associatedtype', 'class', 'deinit', 'enum', 'extension', 'func', 'import',
- 'init', 'internal', 'let', 'operator', 'private', 'protocol', 'public',
- 'static', 'struct', 'subscript', 'typealias', 'var'), suffix=r'\b'),
- Keyword.Declaration)
- ],
- 'comment': [
- (r':param: [a-zA-Z_]\w*|:returns?:|(FIXME|MARK|TODO):',
- Comment.Special)
- ],
-
- # Nested
- 'comment-single': [
- (r'\n', Text, '#pop'),
- include('comment'),
- (r'[^\n]', Comment.Single)
- ],
- 'comment-multi': [
- include('comment'),
- (r'[^*/]', Comment.Multiline),
- (r'/\*', Comment.Multiline, '#push'),
- (r'\*/', Comment.Multiline, '#pop'),
- (r'[*/]', Comment.Multiline)
- ],
- 'module': [
- (r'\n', Text, '#pop'),
- (r'[a-zA-Z_]\w*', Name.Class),
- include('root')
- ],
- 'preproc': [
- (r'\n', Text, '#pop'),
- include('keywords'),
- (r'[A-Za-z]\w*', Comment.Preproc),
- include('root')
- ],
- 'string': [
- (r'\\\(', String.Interpol, 'string-intp'),
- (r'"', String, '#pop'),
- (r"""\\['"\\nrt]|\\x[0-9a-fA-F]{2}|\\[0-7]{1,3}"""
- r"""|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8}""", String.Escape),
- (r'[^\\"]+', String),
- (r'\\', String)
- ],
- 'string-intp': [
- (r'\(', String.Interpol, '#push'),
- (r'\)', String.Interpol, '#pop'),
- include('root')
- ]
- }
-
- def get_tokens_unprocessed(self, text):
- from pygments.lexers._cocoa_builtins import COCOA_INTERFACES, \
- COCOA_PROTOCOLS, COCOA_PRIMITIVES
-
- for index, token, value in \
- RegexLexer.get_tokens_unprocessed(self, text):
- if token is Name or token is Name.Class:
- if value in COCOA_INTERFACES or value in COCOA_PROTOCOLS \
- or value in COCOA_PRIMITIVES:
- token = Name.Builtin.Pseudo
-
- yield index, token, value
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/ooc.py b/venv/lib/python3.11/site-packages/pygments/lexers/ooc.py
deleted file mode 100644
index c4600ea..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/ooc.py
+++ /dev/null
@@ -1,85 +0,0 @@
-"""
- pygments.lexers.ooc
- ~~~~~~~~~~~~~~~~~~~
-
- Lexers for the Ooc language.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, bygroups, words
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation
-
-__all__ = ['OocLexer']
-
-
-class OocLexer(RegexLexer):
- """
- For Ooc source code
-
- .. versionadded:: 1.2
- """
- name = 'Ooc'
- url = 'http://ooc-lang.org/'
- aliases = ['ooc']
- filenames = ['*.ooc']
- mimetypes = ['text/x-ooc']
-
- tokens = {
- 'root': [
- (words((
- 'class', 'interface', 'implement', 'abstract', 'extends', 'from',
- 'this', 'super', 'new', 'const', 'final', 'static', 'import',
- 'use', 'extern', 'inline', 'proto', 'break', 'continue',
- 'fallthrough', 'operator', 'if', 'else', 'for', 'while', 'do',
- 'switch', 'case', 'as', 'in', 'version', 'return', 'true',
- 'false', 'null'), prefix=r'\b', suffix=r'\b'),
- Keyword),
- (r'include\b', Keyword, 'include'),
- (r'(cover)([ \t]+)(from)([ \t]+)(\w+[*@]?)',
- bygroups(Keyword, Text, Keyword, Text, Name.Class)),
- (r'(func)((?:[ \t]|\\\n)+)(~[a-z_]\w*)',
- bygroups(Keyword, Text, Name.Function)),
- (r'\bfunc\b', Keyword),
- # Note: %= and ^= not listed on http://ooc-lang.org/syntax
- (r'//.*', Comment),
- (r'(?s)/\*.*?\*/', Comment.Multiline),
- (r'(==?|\+=?|-[=>]?|\*=?|/=?|:=|!=?|%=?|\?|>{1,3}=?|<{1,3}=?|\.\.|'
- r'&&?|\|\|?|\^=?)', Operator),
- (r'(\.)([ \t]*)([a-z]\w*)', bygroups(Operator, Text,
- Name.Function)),
- (r'[A-Z][A-Z0-9_]+', Name.Constant),
- (r'[A-Z]\w*([@*]|\[[ \t]*\])?', Name.Class),
-
- (r'([a-z]\w*(?:~[a-z]\w*)?)((?:[ \t]|\\\n)*)(?=\()',
- bygroups(Name.Function, Text)),
- (r'[a-z]\w*', Name.Variable),
-
- # : introduces types
- (r'[:(){}\[\];,]', Punctuation),
-
- (r'0x[0-9a-fA-F]+', Number.Hex),
- (r'0c[0-9]+', Number.Oct),
- (r'0b[01]+', Number.Bin),
- (r'[0-9_]\.[0-9_]*(?!\.)', Number.Float),
- (r'[0-9_]+', Number.Decimal),
-
- (r'"(?:\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\"])*"',
- String.Double),
- (r"'(?:\\.|\\[0-9]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])'",
- String.Char),
- (r'@', Punctuation), # pointer dereference
- (r'\.', Punctuation), # imports or chain operator
-
- (r'\\[ \t\n]', Text),
- (r'[ \t]+', Text),
- ],
- 'include': [
- (r'[\w/]+', Name),
- (r',', Punctuation),
- (r'[ \t]', Text),
- (r'[;\n]', Text, '#pop'),
- ],
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/openscad.py b/venv/lib/python3.11/site-packages/pygments/lexers/openscad.py
deleted file mode 100644
index de8fdaf..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/openscad.py
+++ /dev/null
@@ -1,97 +0,0 @@
-"""
- pygments.lexers.openscad
- ~~~~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for the OpenSCAD languages.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, bygroups, words, include
-from pygments.token import Text, Comment, Punctuation, Operator, Keyword, Name, Number, Whitespace, Literal, String
-
-__all__ = ['OpenScadLexer']
-
-
-class OpenScadLexer(RegexLexer):
- """For openSCAD code.
-
- .. versionadded:: 2.16
- """
- name = "OpenSCAD"
- url = "https://openscad.org/"
- aliases = ["openscad"]
- filenames = ["*.scad"]
- mimetypes = ["application/x-openscad"]
-
- tokens = {
- "root": [
- (r"[^\S\n]+", Whitespace),
- (r'//', Comment.Single, 'comment-single'),
- (r'/\*', Comment.Multiline, 'comment-multi'),
- (r"[{}\[\]\(\),;:]", Punctuation),
- (r"[*!#%\-+=?/]", Operator),
- (r"<=|<|==|!=|>=|>|&&|\|\|", Operator),
- (r"\$(f[asn]|t|vp[rtd]|children)", Operator),
- (r"(undef|PI)\b", Keyword.Constant),
- (
- r"(use|include)((?:\s|\\\\s)+)",
- bygroups(Keyword.Namespace, Text),
- "includes",
- ),
- (r"(module)(\s*)([^\s\(]+)",
- bygroups(Keyword.Namespace, Whitespace, Name.Namespace)),
- (r"(function)(\s*)([^\s\(]+)",
- bygroups(Keyword.Declaration, Whitespace, Name.Function)),
- (words(("true", "false"), prefix=r"\b", suffix=r"\b"), Literal),
- (words((
- "function", "module", "include", "use", "for",
- "intersection_for", "if", "else", "return"
- ), prefix=r"\b", suffix=r"\b"), Keyword
- ),
- (words((
- "circle", "square", "polygon", "text", "sphere", "cube",
- "cylinder", "polyhedron", "translate", "rotate", "scale",
- "resize", "mirror", "multmatrix", "color", "offset", "hull",
- "minkowski", "union", "difference", "intersection", "abs",
- "sign", "sin", "cos", "tan", "acos", "asin", "atan", "atan2",
- "floor", "round", "ceil", "ln", "log", "pow", "sqrt", "exp",
- "rands", "min", "max", "concat", "lookup", "str", "chr",
- "search", "version", "version_num", "norm", "cross",
- "parent_module", "echo", "import", "import_dxf",
- "dxf_linear_extrude", "linear_extrude", "rotate_extrude",
- "surface", "projection", "render", "dxf_cross",
- "dxf_dim", "let", "assign", "len"
- ), prefix=r"\b", suffix=r"\b"),
- Name.Builtin
- ),
- (r"\bchildren\b", Name.Builtin.Pseudo),
- (r'""".*?"""', String.Double),
- (r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
- (r"-?\d+(\.\d+)?(e[+-]?\d+)?", Number),
- (r"\w+", Name),
- ],
- "includes": [
- (
- r"(<)([^>]*)(>)",
- bygroups(Punctuation, Comment.PreprocFile, Punctuation),
- ),
- ],
- 'comment': [
- (r':param: [a-zA-Z_]\w*|:returns?:|(FIXME|MARK|TODO):',
- Comment.Special)
- ],
- 'comment-single': [
- (r'\n', Text, '#pop'),
- include('comment'),
- (r'[^\n]+', Comment.Single)
- ],
- 'comment-multi': [
- include('comment'),
- (r'[^*/]+', Comment.Multiline),
- (r'/\*', Comment.Multiline, '#push'),
- (r'\*/', Comment.Multiline, '#pop'),
- (r'[*/]', Comment.Multiline)
- ],
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/other.py b/venv/lib/python3.11/site-packages/pygments/lexers/other.py
deleted file mode 100644
index f2c07d7..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/other.py
+++ /dev/null
@@ -1,40 +0,0 @@
-"""
- pygments.lexers.other
- ~~~~~~~~~~~~~~~~~~~~~
-
- Just export lexer classes previously contained in this module.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexers.sql import SqlLexer, MySqlLexer, SqliteConsoleLexer
-from pygments.lexers.shell import BashLexer, BashSessionLexer, BatchLexer, \
- TcshLexer
-from pygments.lexers.robotframework import RobotFrameworkLexer
-from pygments.lexers.testing import GherkinLexer
-from pygments.lexers.esoteric import BrainfuckLexer, BefungeLexer, RedcodeLexer
-from pygments.lexers.prolog import LogtalkLexer
-from pygments.lexers.snobol import SnobolLexer
-from pygments.lexers.rebol import RebolLexer
-from pygments.lexers.configs import KconfigLexer, Cfengine3Lexer
-from pygments.lexers.modeling import ModelicaLexer
-from pygments.lexers.scripting import AppleScriptLexer, MOOCodeLexer, \
- HybrisLexer
-from pygments.lexers.graphics import PostScriptLexer, GnuplotLexer, \
- AsymptoteLexer, PovrayLexer
-from pygments.lexers.business import ABAPLexer, OpenEdgeLexer, \
- GoodDataCLLexer, MaqlLexer
-from pygments.lexers.automation import AutoItLexer, AutohotkeyLexer
-from pygments.lexers.dsls import ProtoBufLexer, BroLexer, PuppetLexer, \
- MscgenLexer, VGLLexer
-from pygments.lexers.basic import CbmBasicV2Lexer
-from pygments.lexers.pawn import SourcePawnLexer, PawnLexer
-from pygments.lexers.ecl import ECLLexer
-from pygments.lexers.urbi import UrbiscriptLexer
-from pygments.lexers.smalltalk import SmalltalkLexer, NewspeakLexer
-from pygments.lexers.installers import NSISLexer, RPMSpecLexer
-from pygments.lexers.textedit import AwkLexer
-from pygments.lexers.smv import NuSMVLexer
-
-__all__ = []
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/parasail.py b/venv/lib/python3.11/site-packages/pygments/lexers/parasail.py
deleted file mode 100644
index 5a7238e..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/parasail.py
+++ /dev/null
@@ -1,79 +0,0 @@
-"""
- pygments.lexers.parasail
- ~~~~~~~~~~~~~~~~~~~~~~~~
-
- Lexer for ParaSail.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, include
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation, Literal
-
-__all__ = ['ParaSailLexer']
-
-
-class ParaSailLexer(RegexLexer):
- """
- For ParaSail source code.
-
- .. versionadded:: 2.1
- """
-
- name = 'ParaSail'
- url = 'http://www.parasail-lang.org'
- aliases = ['parasail']
- filenames = ['*.psi', '*.psl']
- mimetypes = ['text/x-parasail']
-
- flags = re.MULTILINE
-
- tokens = {
- 'root': [
- (r'[^\S\n]+', Text),
- (r'//.*?\n', Comment.Single),
- (r'\b(and|or|xor)=', Operator.Word),
- (r'\b(and(\s+then)?|or(\s+else)?|xor|rem|mod|'
- r'(is|not)\s+null)\b',
- Operator.Word),
- # Keywords
- (r'\b(abs|abstract|all|block|class|concurrent|const|continue|'
- r'each|end|exit|extends|exports|forward|func|global|implements|'
- r'import|in|interface|is|lambda|locked|new|not|null|of|op|'
- r'optional|private|queued|ref|return|reverse|separate|some|'
- r'type|until|var|with|'
- # Control flow
- r'if|then|else|elsif|case|for|while|loop)\b',
- Keyword.Reserved),
- (r'(abstract\s+)?(interface|class|op|func|type)',
- Keyword.Declaration),
- # Literals
- (r'"[^"]*"', String),
- (r'\\[\'ntrf"0]', String.Escape),
- (r'#[a-zA-Z]\w*', Literal), # Enumeration
- include('numbers'),
- (r"'[^']'", String.Char),
- (r'[a-zA-Z]\w*', Name),
- # Operators and Punctuation
- (r'(<==|==>|<=>|\*\*=|<\|=|<<=|>>=|==|!=|=\?|<=|>=|'
- r'\*\*|<<|>>|=>|:=|\+=|-=|\*=|\|=|\||/=|\+|-|\*|/|'
- r'\.\.|<\.\.|\.\.<|<\.\.<)',
- Operator),
- (r'(<|>|\[|\]|\(|\)|\||:|;|,|.|\{|\}|->)',
- Punctuation),
- (r'\n+', Text),
- ],
- 'numbers': [
- (r'\d[0-9_]*#[0-9a-fA-F][0-9a-fA-F_]*#', Number.Hex), # any base
- (r'0[xX][0-9a-fA-F][0-9a-fA-F_]*', Number.Hex), # C-like hex
- (r'0[bB][01][01_]*', Number.Bin), # C-like bin
- (r'\d[0-9_]*\.\d[0-9_]*[eE][+-]\d[0-9_]*', # float exp
- Number.Float),
- (r'\d[0-9_]*\.\d[0-9_]*', Number.Float), # float
- (r'\d[0-9_]*', Number.Integer), # integer
- ],
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/parsers.py b/venv/lib/python3.11/site-packages/pygments/lexers/parsers.py
deleted file mode 100644
index 0415ac6..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/parsers.py
+++ /dev/null
@@ -1,801 +0,0 @@
-"""
- pygments.lexers.parsers
- ~~~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for parser generators.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, DelegatingLexer, \
- include, bygroups, using
-from pygments.token import Punctuation, Other, Text, Comment, Operator, \
- Keyword, Name, String, Number, Whitespace
-from pygments.lexers.jvm import JavaLexer
-from pygments.lexers.c_cpp import CLexer, CppLexer
-from pygments.lexers.objective import ObjectiveCLexer
-from pygments.lexers.d import DLexer
-from pygments.lexers.dotnet import CSharpLexer
-from pygments.lexers.ruby import RubyLexer
-from pygments.lexers.python import PythonLexer
-from pygments.lexers.perl import PerlLexer
-
-__all__ = ['RagelLexer', 'RagelEmbeddedLexer', 'RagelCLexer', 'RagelDLexer',
- 'RagelCppLexer', 'RagelObjectiveCLexer', 'RagelRubyLexer',
- 'RagelJavaLexer', 'AntlrLexer', 'AntlrPythonLexer',
- 'AntlrPerlLexer', 'AntlrRubyLexer', 'AntlrCppLexer',
- 'AntlrCSharpLexer', 'AntlrObjectiveCLexer',
- 'AntlrJavaLexer', 'AntlrActionScriptLexer',
- 'TreetopLexer', 'EbnfLexer']
-
-
-class RagelLexer(RegexLexer):
- """A pure `Ragel <www.colm.net/open-source/ragel>`_ lexer. Use this
- for fragments of Ragel. For ``.rl`` files, use
- :class:`RagelEmbeddedLexer` instead (or one of the
- language-specific subclasses).
-
- .. versionadded:: 1.1
-
- """
-
- name = 'Ragel'
- url = 'http://www.colm.net/open-source/ragel/'
- aliases = ['ragel']
- filenames = []
-
- tokens = {
- 'whitespace': [
- (r'\s+', Whitespace)
- ],
- 'comments': [
- (r'\#.*$', Comment),
- ],
- 'keywords': [
- (r'(access|action|alphtype)\b', Keyword),
- (r'(getkey|write|machine|include)\b', Keyword),
- (r'(any|ascii|extend|alpha|digit|alnum|lower|upper)\b', Keyword),
- (r'(xdigit|cntrl|graph|print|punct|space|zlen|empty)\b', Keyword)
- ],
- 'numbers': [
- (r'0x[0-9A-Fa-f]+', Number.Hex),
- (r'[+-]?[0-9]+', Number.Integer),
- ],
- 'literals': [
- (r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
- (r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
- (r'\[(\\\\|\\[^\\]|[^\\\]])*\]', String), # square bracket literals
- (r'/(?!\*)(\\\\|\\[^\\]|[^/\\])*/', String.Regex), # regular expressions
- ],
- 'identifiers': [
- (r'[a-zA-Z_]\w*', Name.Variable),
- ],
- 'operators': [
- (r',', Operator), # Join
- (r'\||&|--?', Operator), # Union, Intersection and Subtraction
- (r'\.|<:|:>>?', Operator), # Concatention
- (r':', Operator), # Label
- (r'->', Operator), # Epsilon Transition
- (r'(>|\$|%|<|@|<>)(/|eof\b)', Operator), # EOF Actions
- (r'(>|\$|%|<|@|<>)(!|err\b)', Operator), # Global Error Actions
- (r'(>|\$|%|<|@|<>)(\^|lerr\b)', Operator), # Local Error Actions
- (r'(>|\$|%|<|@|<>)(~|to\b)', Operator), # To-State Actions
- (r'(>|\$|%|<|@|<>)(\*|from\b)', Operator), # From-State Actions
- (r'>|@|\$|%', Operator), # Transition Actions and Priorities
- (r'\*|\?|\+|\{[0-9]*,[0-9]*\}', Operator), # Repetition
- (r'!|\^', Operator), # Negation
- (r'\(|\)', Operator), # Grouping
- ],
- 'root': [
- include('literals'),
- include('whitespace'),
- include('comments'),
- include('keywords'),
- include('numbers'),
- include('identifiers'),
- include('operators'),
- (r'\{', Punctuation, 'host'),
- (r'=', Operator),
- (r';', Punctuation),
- ],
- 'host': [
- (r'(' + r'|'.join(( # keep host code in largest possible chunks
- r'[^{}\'"/#]+', # exclude unsafe characters
- r'[^\\]\\[{}]', # allow escaped { or }
-
- # strings and comments may safely contain unsafe characters
- r'"(\\\\|\\[^\\]|[^"\\])*"',
- r"'(\\\\|\\[^\\]|[^'\\])*'",
- r'//.*$\n?', # single line comment
- r'/\*(.|\n)*?\*/', # multi-line javadoc-style comment
- r'\#.*$\n?', # ruby comment
-
- # regular expression: There's no reason for it to start
- # with a * and this stops confusion with comments.
- r'/(?!\*)(\\\\|\\[^\\]|[^/\\])*/',
-
- # / is safe now that we've handled regex and javadoc comments
- r'/',
- )) + r')+', Other),
-
- (r'\{', Punctuation, '#push'),
- (r'\}', Punctuation, '#pop'),
- ],
- }
-
-
-class RagelEmbeddedLexer(RegexLexer):
- """
- A lexer for Ragel embedded in a host language file.
-
- This will only highlight Ragel statements. If you want host language
- highlighting then call the language-specific Ragel lexer.
-
- .. versionadded:: 1.1
- """
-
- name = 'Embedded Ragel'
- aliases = ['ragel-em']
- filenames = ['*.rl']
-
- tokens = {
- 'root': [
- (r'(' + r'|'.join(( # keep host code in largest possible chunks
- r'[^%\'"/#]+', # exclude unsafe characters
- r'%(?=[^%]|$)', # a single % sign is okay, just not 2 of them
-
- # strings and comments may safely contain unsafe characters
- r'"(\\\\|\\[^\\]|[^"\\])*"',
- r"'(\\\\|\\[^\\]|[^'\\])*'",
- r'/\*(.|\n)*?\*/', # multi-line javadoc-style comment
- r'//.*$\n?', # single line comment
- r'\#.*$\n?', # ruby/ragel comment
- r'/(?!\*)(\\\\|\\[^\\]|[^/\\])*/', # regular expression
-
- # / is safe now that we've handled regex and javadoc comments
- r'/',
- )) + r')+', Other),
-
- # Single Line FSM.
- # Please don't put a quoted newline in a single line FSM.
- # That's just mean. It will break this.
- (r'(%%)(?![{%])(.*)($|;)(\n?)', bygroups(Punctuation,
- using(RagelLexer),
- Punctuation, Text)),
-
- # Multi Line FSM.
- (r'(%%%%|%%)\{', Punctuation, 'multi-line-fsm'),
- ],
- 'multi-line-fsm': [
- (r'(' + r'|'.join(( # keep ragel code in largest possible chunks.
- r'(' + r'|'.join((
- r'[^}\'"\[/#]', # exclude unsafe characters
- r'\}(?=[^%]|$)', # } is okay as long as it's not followed by %
- r'\}%(?=[^%]|$)', # ...well, one %'s okay, just not two...
- r'[^\\]\\[{}]', # ...and } is okay if it's escaped
-
- # allow / if it's preceded with one of these symbols
- # (ragel EOF actions)
- r'(>|\$|%|<|@|<>)/',
-
- # specifically allow regex followed immediately by *
- # so it doesn't get mistaken for a comment
- r'/(?!\*)(\\\\|\\[^\\]|[^/\\])*/\*',
-
- # allow / as long as it's not followed by another / or by a *
- r'/(?=[^/*]|$)',
-
- # We want to match as many of these as we can in one block.
- # Not sure if we need the + sign here,
- # does it help performance?
- )) + r')+',
-
- # strings and comments may safely contain unsafe characters
- r'"(\\\\|\\[^\\]|[^"\\])*"',
- r"'(\\\\|\\[^\\]|[^'\\])*'",
- r"\[(\\\\|\\[^\\]|[^\]\\])*\]", # square bracket literal
- r'/\*(.|\n)*?\*/', # multi-line javadoc-style comment
- r'//.*$\n?', # single line comment
- r'\#.*$\n?', # ruby/ragel comment
- )) + r')+', using(RagelLexer)),
-
- (r'\}%%', Punctuation, '#pop'),
- ]
- }
-
- def analyse_text(text):
- return '@LANG: indep' in text
-
-
-class RagelRubyLexer(DelegatingLexer):
- """
- A lexer for Ragel in a Ruby host file.
-
- .. versionadded:: 1.1
- """
-
- name = 'Ragel in Ruby Host'
- aliases = ['ragel-ruby', 'ragel-rb']
- filenames = ['*.rl']
-
- def __init__(self, **options):
- super().__init__(RubyLexer, RagelEmbeddedLexer, **options)
-
- def analyse_text(text):
- return '@LANG: ruby' in text
-
-
-class RagelCLexer(DelegatingLexer):
- """
- A lexer for Ragel in a C host file.
-
- .. versionadded:: 1.1
- """
-
- name = 'Ragel in C Host'
- aliases = ['ragel-c']
- filenames = ['*.rl']
-
- def __init__(self, **options):
- super().__init__(CLexer, RagelEmbeddedLexer, **options)
-
- def analyse_text(text):
- return '@LANG: c' in text
-
-
-class RagelDLexer(DelegatingLexer):
- """
- A lexer for Ragel in a D host file.
-
- .. versionadded:: 1.1
- """
-
- name = 'Ragel in D Host'
- aliases = ['ragel-d']
- filenames = ['*.rl']
-
- def __init__(self, **options):
- super().__init__(DLexer, RagelEmbeddedLexer, **options)
-
- def analyse_text(text):
- return '@LANG: d' in text
-
-
-class RagelCppLexer(DelegatingLexer):
- """
- A lexer for Ragel in a C++ host file.
-
- .. versionadded:: 1.1
- """
-
- name = 'Ragel in CPP Host'
- aliases = ['ragel-cpp']
- filenames = ['*.rl']
-
- def __init__(self, **options):
- super().__init__(CppLexer, RagelEmbeddedLexer, **options)
-
- def analyse_text(text):
- return '@LANG: c++' in text
-
-
-class RagelObjectiveCLexer(DelegatingLexer):
- """
- A lexer for Ragel in an Objective C host file.
-
- .. versionadded:: 1.1
- """
-
- name = 'Ragel in Objective C Host'
- aliases = ['ragel-objc']
- filenames = ['*.rl']
-
- def __init__(self, **options):
- super().__init__(ObjectiveCLexer, RagelEmbeddedLexer, **options)
-
- def analyse_text(text):
- return '@LANG: objc' in text
-
-
-class RagelJavaLexer(DelegatingLexer):
- """
- A lexer for Ragel in a Java host file.
-
- .. versionadded:: 1.1
- """
-
- name = 'Ragel in Java Host'
- aliases = ['ragel-java']
- filenames = ['*.rl']
-
- def __init__(self, **options):
- super().__init__(JavaLexer, RagelEmbeddedLexer, **options)
-
- def analyse_text(text):
- return '@LANG: java' in text
-
-
-class AntlrLexer(RegexLexer):
- """
- Generic `ANTLR`_ Lexer.
- Should not be called directly, instead
- use DelegatingLexer for your target language.
-
- .. versionadded:: 1.1
-
- .. _ANTLR: http://www.antlr.org/
- """
-
- name = 'ANTLR'
- aliases = ['antlr']
- filenames = []
-
- _id = r'[A-Za-z]\w*'
- _TOKEN_REF = r'[A-Z]\w*'
- _RULE_REF = r'[a-z]\w*'
- _STRING_LITERAL = r'\'(?:\\\\|\\\'|[^\']*)\''
- _INT = r'[0-9]+'
-
- tokens = {
- 'whitespace': [
- (r'\s+', Whitespace),
- ],
- 'comments': [
- (r'//.*$', Comment),
- (r'/\*(.|\n)*?\*/', Comment),
- ],
- 'root': [
- include('whitespace'),
- include('comments'),
-
- (r'(lexer|parser|tree)?(\s*)(grammar\b)(\s*)(' + _id + ')(;)',
- bygroups(Keyword, Whitespace, Keyword, Whitespace, Name.Class,
- Punctuation)),
- # optionsSpec
- (r'options\b', Keyword, 'options'),
- # tokensSpec
- (r'tokens\b', Keyword, 'tokens'),
- # attrScope
- (r'(scope)(\s*)(' + _id + r')(\s*)(\{)',
- bygroups(Keyword, Whitespace, Name.Variable, Whitespace,
- Punctuation), 'action'),
- # exception
- (r'(catch|finally)\b', Keyword, 'exception'),
- # action
- (r'(@' + _id + r')(\s*)(::)?(\s*)(' + _id + r')(\s*)(\{)',
- bygroups(Name.Label, Whitespace, Punctuation, Whitespace,
- Name.Label, Whitespace, Punctuation), 'action'),
- # rule
- (r'((?:protected|private|public|fragment)\b)?(\s*)(' + _id + ')(!)?',
- bygroups(Keyword, Whitespace, Name.Label, Punctuation),
- ('rule-alts', 'rule-prelims')),
- ],
- 'exception': [
- (r'\n', Whitespace, '#pop'),
- (r'\s', Whitespace),
- include('comments'),
-
- (r'\[', Punctuation, 'nested-arg-action'),
- (r'\{', Punctuation, 'action'),
- ],
- 'rule-prelims': [
- include('whitespace'),
- include('comments'),
-
- (r'returns\b', Keyword),
- (r'\[', Punctuation, 'nested-arg-action'),
- (r'\{', Punctuation, 'action'),
- # throwsSpec
- (r'(throws)(\s+)(' + _id + ')',
- bygroups(Keyword, Whitespace, Name.Label)),
- (r'(,)(\s*)(' + _id + ')',
- bygroups(Punctuation, Whitespace, Name.Label)), # Additional throws
- # optionsSpec
- (r'options\b', Keyword, 'options'),
- # ruleScopeSpec - scope followed by target language code or name of action
- # TODO finish implementing other possibilities for scope
- # L173 ANTLRv3.g from ANTLR book
- (r'(scope)(\s+)(\{)', bygroups(Keyword, Whitespace, Punctuation),
- 'action'),
- (r'(scope)(\s+)(' + _id + r')(\s*)(;)',
- bygroups(Keyword, Whitespace, Name.Label, Whitespace, Punctuation)),
- # ruleAction
- (r'(@' + _id + r')(\s*)(\{)',
- bygroups(Name.Label, Whitespace, Punctuation), 'action'),
- # finished prelims, go to rule alts!
- (r':', Punctuation, '#pop')
- ],
- 'rule-alts': [
- include('whitespace'),
- include('comments'),
-
- # These might need to go in a separate 'block' state triggered by (
- (r'options\b', Keyword, 'options'),
- (r':', Punctuation),
-
- # literals
- (r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
- (r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
- (r'<<([^>]|>[^>])>>', String),
- # identifiers
- # Tokens start with capital letter.
- (r'\$?[A-Z_]\w*', Name.Constant),
- # Rules start with small letter.
- (r'\$?[a-z_]\w*', Name.Variable),
- # operators
- (r'(\+|\||->|=>|=|\(|\)|\.\.|\.|\?|\*|\^|!|\#|~)', Operator),
- (r',', Punctuation),
- (r'\[', Punctuation, 'nested-arg-action'),
- (r'\{', Punctuation, 'action'),
- (r';', Punctuation, '#pop')
- ],
- 'tokens': [
- include('whitespace'),
- include('comments'),
- (r'\{', Punctuation),
- (r'(' + _TOKEN_REF + r')(\s*)(=)?(\s*)(' + _STRING_LITERAL
- + r')?(\s*)(;)',
- bygroups(Name.Label, Whitespace, Punctuation, Whitespace,
- String, Whitespace, Punctuation)),
- (r'\}', Punctuation, '#pop'),
- ],
- 'options': [
- include('whitespace'),
- include('comments'),
- (r'\{', Punctuation),
- (r'(' + _id + r')(\s*)(=)(\s*)(' +
- '|'.join((_id, _STRING_LITERAL, _INT, r'\*')) + r')(\s*)(;)',
- bygroups(Name.Variable, Whitespace, Punctuation, Whitespace,
- Text, Whitespace, Punctuation)),
- (r'\}', Punctuation, '#pop'),
- ],
- 'action': [
- (r'(' + r'|'.join(( # keep host code in largest possible chunks
- r'[^${}\'"/\\]+', # exclude unsafe characters
-
- # strings and comments may safely contain unsafe characters
- r'"(\\\\|\\[^\\]|[^"\\])*"',
- r"'(\\\\|\\[^\\]|[^'\\])*'",
- r'//.*$\n?', # single line comment
- r'/\*(.|\n)*?\*/', # multi-line javadoc-style comment
-
- # regular expression: There's no reason for it to start
- # with a * and this stops confusion with comments.
- r'/(?!\*)(\\\\|\\[^\\]|[^/\\])*/',
-
- # backslashes are okay, as long as we are not backslashing a %
- r'\\(?!%)',
-
- # Now that we've handled regex and javadoc comments
- # it's safe to let / through.
- r'/',
- )) + r')+', Other),
- (r'(\\)(%)', bygroups(Punctuation, Other)),
- (r'(\$[a-zA-Z]+)(\.?)(text|value)?',
- bygroups(Name.Variable, Punctuation, Name.Property)),
- (r'\{', Punctuation, '#push'),
- (r'\}', Punctuation, '#pop'),
- ],
- 'nested-arg-action': [
- (r'(' + r'|'.join(( # keep host code in largest possible chunks.
- r'[^$\[\]\'"/]+', # exclude unsafe characters
-
- # strings and comments may safely contain unsafe characters
- r'"(\\\\|\\[^\\]|[^"\\])*"',
- r"'(\\\\|\\[^\\]|[^'\\])*'",
- r'//.*$\n?', # single line comment
- r'/\*(.|\n)*?\*/', # multi-line javadoc-style comment
-
- # regular expression: There's no reason for it to start
- # with a * and this stops confusion with comments.
- r'/(?!\*)(\\\\|\\[^\\]|[^/\\])*/',
-
- # Now that we've handled regex and javadoc comments
- # it's safe to let / through.
- r'/',
- )) + r')+', Other),
-
-
- (r'\[', Punctuation, '#push'),
- (r'\]', Punctuation, '#pop'),
- (r'(\$[a-zA-Z]+)(\.?)(text|value)?',
- bygroups(Name.Variable, Punctuation, Name.Property)),
- (r'(\\\\|\\\]|\\\[|[^\[\]])+', Other),
- ]
- }
-
- def analyse_text(text):
- return re.search(r'^\s*grammar\s+[a-zA-Z0-9]+\s*;', text, re.M)
-
-
-# http://www.antlr.org/wiki/display/ANTLR3/Code+Generation+Targets
-
-class AntlrCppLexer(DelegatingLexer):
- """
- ANTLR with C++ Target
-
- .. versionadded:: 1.1
- """
-
- name = 'ANTLR With CPP Target'
- aliases = ['antlr-cpp']
- filenames = ['*.G', '*.g']
-
- def __init__(self, **options):
- super().__init__(CppLexer, AntlrLexer, **options)
-
- def analyse_text(text):
- return AntlrLexer.analyse_text(text) and \
- re.search(r'^\s*language\s*=\s*C\s*;', text, re.M)
-
-
-class AntlrObjectiveCLexer(DelegatingLexer):
- """
- ANTLR with Objective-C Target
-
- .. versionadded:: 1.1
- """
-
- name = 'ANTLR With ObjectiveC Target'
- aliases = ['antlr-objc']
- filenames = ['*.G', '*.g']
-
- def __init__(self, **options):
- super().__init__(ObjectiveCLexer, AntlrLexer, **options)
-
- def analyse_text(text):
- return AntlrLexer.analyse_text(text) and \
- re.search(r'^\s*language\s*=\s*ObjC\s*;', text)
-
-
-class AntlrCSharpLexer(DelegatingLexer):
- """
- ANTLR with C# Target
-
- .. versionadded:: 1.1
- """
-
- name = 'ANTLR With C# Target'
- aliases = ['antlr-csharp', 'antlr-c#']
- filenames = ['*.G', '*.g']
-
- def __init__(self, **options):
- super().__init__(CSharpLexer, AntlrLexer, **options)
-
- def analyse_text(text):
- return AntlrLexer.analyse_text(text) and \
- re.search(r'^\s*language\s*=\s*CSharp2\s*;', text, re.M)
-
-
-class AntlrPythonLexer(DelegatingLexer):
- """
- ANTLR with Python Target
-
- .. versionadded:: 1.1
- """
-
- name = 'ANTLR With Python Target'
- aliases = ['antlr-python']
- filenames = ['*.G', '*.g']
-
- def __init__(self, **options):
- super().__init__(PythonLexer, AntlrLexer, **options)
-
- def analyse_text(text):
- return AntlrLexer.analyse_text(text) and \
- re.search(r'^\s*language\s*=\s*Python\s*;', text, re.M)
-
-
-class AntlrJavaLexer(DelegatingLexer):
- """
- ANTLR with Java Target
-
- .. versionadded:: 1.
- """
-
- name = 'ANTLR With Java Target'
- aliases = ['antlr-java']
- filenames = ['*.G', '*.g']
-
- def __init__(self, **options):
- super().__init__(JavaLexer, AntlrLexer, **options)
-
- def analyse_text(text):
- # Antlr language is Java by default
- return AntlrLexer.analyse_text(text) and 0.9
-
-
-class AntlrRubyLexer(DelegatingLexer):
- """
- ANTLR with Ruby Target
-
- .. versionadded:: 1.1
- """
-
- name = 'ANTLR With Ruby Target'
- aliases = ['antlr-ruby', 'antlr-rb']
- filenames = ['*.G', '*.g']
-
- def __init__(self, **options):
- super().__init__(RubyLexer, AntlrLexer, **options)
-
- def analyse_text(text):
- return AntlrLexer.analyse_text(text) and \
- re.search(r'^\s*language\s*=\s*Ruby\s*;', text, re.M)
-
-
-class AntlrPerlLexer(DelegatingLexer):
- """
- ANTLR with Perl Target
-
- .. versionadded:: 1.1
- """
-
- name = 'ANTLR With Perl Target'
- aliases = ['antlr-perl']
- filenames = ['*.G', '*.g']
-
- def __init__(self, **options):
- super().__init__(PerlLexer, AntlrLexer, **options)
-
- def analyse_text(text):
- return AntlrLexer.analyse_text(text) and \
- re.search(r'^\s*language\s*=\s*Perl5\s*;', text, re.M)
-
-
-class AntlrActionScriptLexer(DelegatingLexer):
- """
- ANTLR with ActionScript Target
-
- .. versionadded:: 1.1
- """
-
- name = 'ANTLR With ActionScript Target'
- aliases = ['antlr-actionscript', 'antlr-as']
- filenames = ['*.G', '*.g']
-
- def __init__(self, **options):
- from pygments.lexers.actionscript import ActionScriptLexer
- super().__init__(ActionScriptLexer, AntlrLexer, **options)
-
- def analyse_text(text):
- return AntlrLexer.analyse_text(text) and \
- re.search(r'^\s*language\s*=\s*ActionScript\s*;', text, re.M)
-
-
-class TreetopBaseLexer(RegexLexer):
- """
- A base lexer for `Treetop <http://treetop.rubyforge.org/>`_ grammars.
- Not for direct use; use :class:`TreetopLexer` instead.
-
- .. versionadded:: 1.6
- """
-
- tokens = {
- 'root': [
- include('space'),
- (r'require[ \t]+[^\n\r]+[\n\r]', Other),
- (r'module\b', Keyword.Namespace, 'module'),
- (r'grammar\b', Keyword, 'grammar'),
- ],
- 'module': [
- include('space'),
- include('end'),
- (r'module\b', Keyword, '#push'),
- (r'grammar\b', Keyword, 'grammar'),
- (r'[A-Z]\w*(?:::[A-Z]\w*)*', Name.Namespace),
- ],
- 'grammar': [
- include('space'),
- include('end'),
- (r'rule\b', Keyword, 'rule'),
- (r'include\b', Keyword, 'include'),
- (r'[A-Z]\w*', Name),
- ],
- 'include': [
- include('space'),
- (r'[A-Z]\w*(?:::[A-Z]\w*)*', Name.Class, '#pop'),
- ],
- 'rule': [
- include('space'),
- include('end'),
- (r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
- (r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
- (r'([A-Za-z_]\w*)(:)', bygroups(Name.Label, Punctuation)),
- (r'[A-Za-z_]\w*', Name),
- (r'[()]', Punctuation),
- (r'[?+*/&!~]', Operator),
- (r'\[(?:\\.|\[:\^?[a-z]+:\]|[^\\\]])+\]', String.Regex),
- (r'([0-9]*)(\.\.)([0-9]*)',
- bygroups(Number.Integer, Operator, Number.Integer)),
- (r'(<)([^>]+)(>)', bygroups(Punctuation, Name.Class, Punctuation)),
- (r'\{', Punctuation, 'inline_module'),
- (r'\.', String.Regex),
- ],
- 'inline_module': [
- (r'\{', Other, 'ruby'),
- (r'\}', Punctuation, '#pop'),
- (r'[^{}]+', Other),
- ],
- 'ruby': [
- (r'\{', Other, '#push'),
- (r'\}', Other, '#pop'),
- (r'[^{}]+', Other),
- ],
- 'space': [
- (r'[ \t\n\r]+', Whitespace),
- (r'#[^\n]*', Comment.Single),
- ],
- 'end': [
- (r'end\b', Keyword, '#pop'),
- ],
- }
-
-
-class TreetopLexer(DelegatingLexer):
- """
- A lexer for `Treetop <http://treetop.rubyforge.org/>`_ grammars.
-
- .. versionadded:: 1.6
- """
-
- name = 'Treetop'
- aliases = ['treetop']
- filenames = ['*.treetop', '*.tt']
-
- def __init__(self, **options):
- super().__init__(RubyLexer, TreetopBaseLexer, **options)
-
-
-class EbnfLexer(RegexLexer):
- """
- Lexer for `ISO/IEC 14977 EBNF
- <http://en.wikipedia.org/wiki/Extended_Backus%E2%80%93Naur_Form>`_
- grammars.
-
- .. versionadded:: 2.0
- """
-
- name = 'EBNF'
- aliases = ['ebnf']
- filenames = ['*.ebnf']
- mimetypes = ['text/x-ebnf']
-
- tokens = {
- 'root': [
- include('whitespace'),
- include('comment_start'),
- include('identifier'),
- (r'=', Operator, 'production'),
- ],
- 'production': [
- include('whitespace'),
- include('comment_start'),
- include('identifier'),
- (r'"[^"]*"', String.Double),
- (r"'[^']*'", String.Single),
- (r'(\?[^?]*\?)', Name.Entity),
- (r'[\[\]{}(),|]', Punctuation),
- (r'-', Operator),
- (r';', Punctuation, '#pop'),
- (r'\.', Punctuation, '#pop'),
- ],
- 'whitespace': [
- (r'\s+', Text),
- ],
- 'comment_start': [
- (r'\(\*', Comment.Multiline, 'comment'),
- ],
- 'comment': [
- (r'[^*)]', Comment.Multiline),
- include('comment_start'),
- (r'\*\)', Comment.Multiline, '#pop'),
- (r'[*)]', Comment.Multiline),
- ],
- 'identifier': [
- (r'([a-zA-Z][\w \-]*)', Keyword),
- ],
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/pascal.py b/venv/lib/python3.11/site-packages/pygments/lexers/pascal.py
deleted file mode 100644
index 34df192..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/pascal.py
+++ /dev/null
@@ -1,641 +0,0 @@
-"""
- pygments.lexers.pascal
- ~~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for Pascal family languages.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import Lexer
-from pygments.util import get_bool_opt, get_list_opt
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation, Error, Whitespace
-from pygments.scanner import Scanner
-
-# compatibility import
-from pygments.lexers.modula2 import Modula2Lexer
-
-__all__ = ['DelphiLexer', 'PortugolLexer']
-
-
-class PortugolLexer(Lexer):
- """For Portugol, a Pascal dialect with keywords in Portuguese."""
- name = 'Portugol'
- aliases = ['portugol']
- filenames = ['*.alg', '*.portugol']
- mimetypes = []
- url = "https://www.apoioinformatica.inf.br/produtos/visualg/linguagem"
-
- def __init__(self, **options):
- Lexer.__init__(self, **options)
- self.lexer = DelphiLexer(**options, portugol=True)
-
- def get_tokens_unprocessed(self, text):
- return self.lexer.get_tokens_unprocessed(text)
-
-
-class DelphiLexer(Lexer):
- """
- For Delphi (Borland Object Pascal),
- Turbo Pascal and Free Pascal source code.
-
- Additional options accepted:
-
- `turbopascal`
- Highlight Turbo Pascal specific keywords (default: ``True``).
- `delphi`
- Highlight Borland Delphi specific keywords (default: ``True``).
- `freepascal`
- Highlight Free Pascal specific keywords (default: ``True``).
- `units`
- A list of units that should be considered builtin, supported are
- ``System``, ``SysUtils``, ``Classes`` and ``Math``.
- Default is to consider all of them builtin.
- """
- name = 'Delphi'
- aliases = ['delphi', 'pas', 'pascal', 'objectpascal']
- filenames = ['*.pas', '*.dpr']
- mimetypes = ['text/x-pascal']
-
- TURBO_PASCAL_KEYWORDS = (
- 'absolute', 'and', 'array', 'asm', 'begin', 'break', 'case',
- 'const', 'constructor', 'continue', 'destructor', 'div', 'do',
- 'downto', 'else', 'end', 'file', 'for', 'function', 'goto',
- 'if', 'implementation', 'in', 'inherited', 'inline', 'interface',
- 'label', 'mod', 'nil', 'not', 'object', 'of', 'on', 'operator',
- 'or', 'packed', 'procedure', 'program', 'record', 'reintroduce',
- 'repeat', 'self', 'set', 'shl', 'shr', 'string', 'then', 'to',
- 'type', 'unit', 'until', 'uses', 'var', 'while', 'with', 'xor'
- )
-
- DELPHI_KEYWORDS = (
- 'as', 'class', 'except', 'exports', 'finalization', 'finally',
- 'initialization', 'is', 'library', 'on', 'property', 'raise',
- 'threadvar', 'try'
- )
-
- FREE_PASCAL_KEYWORDS = (
- 'dispose', 'exit', 'false', 'new', 'true'
- )
-
- BLOCK_KEYWORDS = {
- 'begin', 'class', 'const', 'constructor', 'destructor', 'end',
- 'finalization', 'function', 'implementation', 'initialization',
- 'label', 'library', 'operator', 'procedure', 'program', 'property',
- 'record', 'threadvar', 'type', 'unit', 'uses', 'var'
- }
-
- FUNCTION_MODIFIERS = {
- 'alias', 'cdecl', 'export', 'inline', 'interrupt', 'nostackframe',
- 'pascal', 'register', 'safecall', 'softfloat', 'stdcall',
- 'varargs', 'name', 'dynamic', 'near', 'virtual', 'external',
- 'override', 'assembler'
- }
-
- # XXX: those aren't global. but currently we know no way for defining
- # them just for the type context.
- DIRECTIVES = {
- 'absolute', 'abstract', 'assembler', 'cppdecl', 'default', 'far',
- 'far16', 'forward', 'index', 'oldfpccall', 'private', 'protected',
- 'published', 'public'
- }
-
- BUILTIN_TYPES = {
- 'ansichar', 'ansistring', 'bool', 'boolean', 'byte', 'bytebool',
- 'cardinal', 'char', 'comp', 'currency', 'double', 'dword',
- 'extended', 'int64', 'integer', 'iunknown', 'longbool', 'longint',
- 'longword', 'pansichar', 'pansistring', 'pbool', 'pboolean',
- 'pbyte', 'pbytearray', 'pcardinal', 'pchar', 'pcomp', 'pcurrency',
- 'pdate', 'pdatetime', 'pdouble', 'pdword', 'pextended', 'phandle',
- 'pint64', 'pinteger', 'plongint', 'plongword', 'pointer',
- 'ppointer', 'pshortint', 'pshortstring', 'psingle', 'psmallint',
- 'pstring', 'pvariant', 'pwidechar', 'pwidestring', 'pword',
- 'pwordarray', 'pwordbool', 'real', 'real48', 'shortint',
- 'shortstring', 'single', 'smallint', 'string', 'tclass', 'tdate',
- 'tdatetime', 'textfile', 'thandle', 'tobject', 'ttime', 'variant',
- 'widechar', 'widestring', 'word', 'wordbool'
- }
-
- BUILTIN_UNITS = {
- 'System': (
- 'abs', 'acquireexceptionobject', 'addr', 'ansitoutf8',
- 'append', 'arctan', 'assert', 'assigned', 'assignfile',
- 'beginthread', 'blockread', 'blockwrite', 'break', 'chdir',
- 'chr', 'close', 'closefile', 'comptocurrency', 'comptodouble',
- 'concat', 'continue', 'copy', 'cos', 'dec', 'delete',
- 'dispose', 'doubletocomp', 'endthread', 'enummodules',
- 'enumresourcemodules', 'eof', 'eoln', 'erase', 'exceptaddr',
- 'exceptobject', 'exclude', 'exit', 'exp', 'filepos', 'filesize',
- 'fillchar', 'finalize', 'findclasshinstance', 'findhinstance',
- 'findresourcehinstance', 'flush', 'frac', 'freemem',
- 'get8087cw', 'getdir', 'getlasterror', 'getmem',
- 'getmemorymanager', 'getmodulefilename', 'getvariantmanager',
- 'halt', 'hi', 'high', 'inc', 'include', 'initialize', 'insert',
- 'int', 'ioresult', 'ismemorymanagerset', 'isvariantmanagerset',
- 'length', 'ln', 'lo', 'low', 'mkdir', 'move', 'new', 'odd',
- 'olestrtostring', 'olestrtostrvar', 'ord', 'paramcount',
- 'paramstr', 'pi', 'pos', 'pred', 'ptr', 'pucs4chars', 'random',
- 'randomize', 'read', 'readln', 'reallocmem',
- 'releaseexceptionobject', 'rename', 'reset', 'rewrite', 'rmdir',
- 'round', 'runerror', 'seek', 'seekeof', 'seekeoln',
- 'set8087cw', 'setlength', 'setlinebreakstyle',
- 'setmemorymanager', 'setstring', 'settextbuf',
- 'setvariantmanager', 'sin', 'sizeof', 'slice', 'sqr', 'sqrt',
- 'str', 'stringofchar', 'stringtoolestr', 'stringtowidechar',
- 'succ', 'swap', 'trunc', 'truncate', 'typeinfo',
- 'ucs4stringtowidestring', 'unicodetoutf8', 'uniquestring',
- 'upcase', 'utf8decode', 'utf8encode', 'utf8toansi',
- 'utf8tounicode', 'val', 'vararrayredim', 'varclear',
- 'widecharlentostring', 'widecharlentostrvar',
- 'widechartostring', 'widechartostrvar',
- 'widestringtoucs4string', 'write', 'writeln'
- ),
- 'SysUtils': (
- 'abort', 'addexitproc', 'addterminateproc', 'adjustlinebreaks',
- 'allocmem', 'ansicomparefilename', 'ansicomparestr',
- 'ansicomparetext', 'ansidequotedstr', 'ansiextractquotedstr',
- 'ansilastchar', 'ansilowercase', 'ansilowercasefilename',
- 'ansipos', 'ansiquotedstr', 'ansisamestr', 'ansisametext',
- 'ansistrcomp', 'ansistricomp', 'ansistrlastchar', 'ansistrlcomp',
- 'ansistrlicomp', 'ansistrlower', 'ansistrpos', 'ansistrrscan',
- 'ansistrscan', 'ansistrupper', 'ansiuppercase',
- 'ansiuppercasefilename', 'appendstr', 'assignstr', 'beep',
- 'booltostr', 'bytetocharindex', 'bytetocharlen', 'bytetype',
- 'callterminateprocs', 'changefileext', 'charlength',
- 'chartobyteindex', 'chartobytelen', 'comparemem', 'comparestr',
- 'comparetext', 'createdir', 'createguid', 'currentyear',
- 'currtostr', 'currtostrf', 'date', 'datetimetofiledate',
- 'datetimetostr', 'datetimetostring', 'datetimetosystemtime',
- 'datetimetotimestamp', 'datetostr', 'dayofweek', 'decodedate',
- 'decodedatefully', 'decodetime', 'deletefile', 'directoryexists',
- 'diskfree', 'disksize', 'disposestr', 'encodedate', 'encodetime',
- 'exceptionerrormessage', 'excludetrailingbackslash',
- 'excludetrailingpathdelimiter', 'expandfilename',
- 'expandfilenamecase', 'expanduncfilename', 'extractfiledir',
- 'extractfiledrive', 'extractfileext', 'extractfilename',
- 'extractfilepath', 'extractrelativepath', 'extractshortpathname',
- 'fileage', 'fileclose', 'filecreate', 'filedatetodatetime',
- 'fileexists', 'filegetattr', 'filegetdate', 'fileisreadonly',
- 'fileopen', 'fileread', 'filesearch', 'fileseek', 'filesetattr',
- 'filesetdate', 'filesetreadonly', 'filewrite', 'finalizepackage',
- 'findclose', 'findcmdlineswitch', 'findfirst', 'findnext',
- 'floattocurr', 'floattodatetime', 'floattodecimal', 'floattostr',
- 'floattostrf', 'floattotext', 'floattotextfmt', 'fmtloadstr',
- 'fmtstr', 'forcedirectories', 'format', 'formatbuf', 'formatcurr',
- 'formatdatetime', 'formatfloat', 'freeandnil', 'getcurrentdir',
- 'getenvironmentvariable', 'getfileversion', 'getformatsettings',
- 'getlocaleformatsettings', 'getmodulename', 'getpackagedescription',
- 'getpackageinfo', 'gettime', 'guidtostring', 'incamonth',
- 'includetrailingbackslash', 'includetrailingpathdelimiter',
- 'incmonth', 'initializepackage', 'interlockeddecrement',
- 'interlockedexchange', 'interlockedexchangeadd',
- 'interlockedincrement', 'inttohex', 'inttostr', 'isdelimiter',
- 'isequalguid', 'isleapyear', 'ispathdelimiter', 'isvalidident',
- 'languages', 'lastdelimiter', 'loadpackage', 'loadstr',
- 'lowercase', 'msecstotimestamp', 'newstr', 'nextcharindex', 'now',
- 'outofmemoryerror', 'quotedstr', 'raiselastoserror',
- 'raiselastwin32error', 'removedir', 'renamefile', 'replacedate',
- 'replacetime', 'safeloadlibrary', 'samefilename', 'sametext',
- 'setcurrentdir', 'showexception', 'sleep', 'stralloc', 'strbufsize',
- 'strbytetype', 'strcat', 'strcharlength', 'strcomp', 'strcopy',
- 'strdispose', 'strecopy', 'strend', 'strfmt', 'stricomp',
- 'stringreplace', 'stringtoguid', 'strlcat', 'strlcomp', 'strlcopy',
- 'strlen', 'strlfmt', 'strlicomp', 'strlower', 'strmove', 'strnew',
- 'strnextchar', 'strpas', 'strpcopy', 'strplcopy', 'strpos',
- 'strrscan', 'strscan', 'strtobool', 'strtobooldef', 'strtocurr',
- 'strtocurrdef', 'strtodate', 'strtodatedef', 'strtodatetime',
- 'strtodatetimedef', 'strtofloat', 'strtofloatdef', 'strtoint',
- 'strtoint64', 'strtoint64def', 'strtointdef', 'strtotime',
- 'strtotimedef', 'strupper', 'supports', 'syserrormessage',
- 'systemtimetodatetime', 'texttofloat', 'time', 'timestamptodatetime',
- 'timestamptomsecs', 'timetostr', 'trim', 'trimleft', 'trimright',
- 'tryencodedate', 'tryencodetime', 'tryfloattocurr', 'tryfloattodatetime',
- 'trystrtobool', 'trystrtocurr', 'trystrtodate', 'trystrtodatetime',
- 'trystrtofloat', 'trystrtoint', 'trystrtoint64', 'trystrtotime',
- 'unloadpackage', 'uppercase', 'widecomparestr', 'widecomparetext',
- 'widefmtstr', 'wideformat', 'wideformatbuf', 'widelowercase',
- 'widesamestr', 'widesametext', 'wideuppercase', 'win32check',
- 'wraptext'
- ),
- 'Classes': (
- 'activateclassgroup', 'allocatehwnd', 'bintohex', 'checksynchronize',
- 'collectionsequal', 'countgenerations', 'deallocatehwnd', 'equalrect',
- 'extractstrings', 'findclass', 'findglobalcomponent', 'getclass',
- 'groupdescendantswith', 'hextobin', 'identtoint',
- 'initinheritedcomponent', 'inttoident', 'invalidpoint',
- 'isuniqueglobalcomponentname', 'linestart', 'objectbinarytotext',
- 'objectresourcetotext', 'objecttexttobinary', 'objecttexttoresource',
- 'pointsequal', 'readcomponentres', 'readcomponentresex',
- 'readcomponentresfile', 'rect', 'registerclass', 'registerclassalias',
- 'registerclasses', 'registercomponents', 'registerintegerconsts',
- 'registernoicon', 'registernonactivex', 'smallpoint', 'startclassgroup',
- 'teststreamformat', 'unregisterclass', 'unregisterclasses',
- 'unregisterintegerconsts', 'unregistermoduleclasses',
- 'writecomponentresfile'
- ),
- 'Math': (
- 'arccos', 'arccosh', 'arccot', 'arccoth', 'arccsc', 'arccsch', 'arcsec',
- 'arcsech', 'arcsin', 'arcsinh', 'arctan2', 'arctanh', 'ceil',
- 'comparevalue', 'cosecant', 'cosh', 'cot', 'cotan', 'coth', 'csc',
- 'csch', 'cycletodeg', 'cycletograd', 'cycletorad', 'degtocycle',
- 'degtograd', 'degtorad', 'divmod', 'doubledecliningbalance',
- 'ensurerange', 'floor', 'frexp', 'futurevalue', 'getexceptionmask',
- 'getprecisionmode', 'getroundmode', 'gradtocycle', 'gradtodeg',
- 'gradtorad', 'hypot', 'inrange', 'interestpayment', 'interestrate',
- 'internalrateofreturn', 'intpower', 'isinfinite', 'isnan', 'iszero',
- 'ldexp', 'lnxp1', 'log10', 'log2', 'logn', 'max', 'maxintvalue',
- 'maxvalue', 'mean', 'meanandstddev', 'min', 'minintvalue', 'minvalue',
- 'momentskewkurtosis', 'netpresentvalue', 'norm', 'numberofperiods',
- 'payment', 'periodpayment', 'poly', 'popnstddev', 'popnvariance',
- 'power', 'presentvalue', 'radtocycle', 'radtodeg', 'radtograd',
- 'randg', 'randomrange', 'roundto', 'samevalue', 'sec', 'secant',
- 'sech', 'setexceptionmask', 'setprecisionmode', 'setroundmode',
- 'sign', 'simpleroundto', 'sincos', 'sinh', 'slndepreciation', 'stddev',
- 'sum', 'sumint', 'sumofsquares', 'sumsandsquares', 'syddepreciation',
- 'tan', 'tanh', 'totalvariance', 'variance'
- )
- }
-
- ASM_REGISTERS = {
- 'ah', 'al', 'ax', 'bh', 'bl', 'bp', 'bx', 'ch', 'cl', 'cr0',
- 'cr1', 'cr2', 'cr3', 'cr4', 'cs', 'cx', 'dh', 'di', 'dl', 'dr0',
- 'dr1', 'dr2', 'dr3', 'dr4', 'dr5', 'dr6', 'dr7', 'ds', 'dx',
- 'eax', 'ebp', 'ebx', 'ecx', 'edi', 'edx', 'es', 'esi', 'esp',
- 'fs', 'gs', 'mm0', 'mm1', 'mm2', 'mm3', 'mm4', 'mm5', 'mm6',
- 'mm7', 'si', 'sp', 'ss', 'st0', 'st1', 'st2', 'st3', 'st4', 'st5',
- 'st6', 'st7', 'xmm0', 'xmm1', 'xmm2', 'xmm3', 'xmm4', 'xmm5',
- 'xmm6', 'xmm7'
- }
-
- ASM_INSTRUCTIONS = {
- 'aaa', 'aad', 'aam', 'aas', 'adc', 'add', 'and', 'arpl', 'bound',
- 'bsf', 'bsr', 'bswap', 'bt', 'btc', 'btr', 'bts', 'call', 'cbw',
- 'cdq', 'clc', 'cld', 'cli', 'clts', 'cmc', 'cmova', 'cmovae',
- 'cmovb', 'cmovbe', 'cmovc', 'cmovcxz', 'cmove', 'cmovg',
- 'cmovge', 'cmovl', 'cmovle', 'cmovna', 'cmovnae', 'cmovnb',
- 'cmovnbe', 'cmovnc', 'cmovne', 'cmovng', 'cmovnge', 'cmovnl',
- 'cmovnle', 'cmovno', 'cmovnp', 'cmovns', 'cmovnz', 'cmovo',
- 'cmovp', 'cmovpe', 'cmovpo', 'cmovs', 'cmovz', 'cmp', 'cmpsb',
- 'cmpsd', 'cmpsw', 'cmpxchg', 'cmpxchg486', 'cmpxchg8b', 'cpuid',
- 'cwd', 'cwde', 'daa', 'das', 'dec', 'div', 'emms', 'enter', 'hlt',
- 'ibts', 'icebp', 'idiv', 'imul', 'in', 'inc', 'insb', 'insd',
- 'insw', 'int', 'int01', 'int03', 'int1', 'int3', 'into', 'invd',
- 'invlpg', 'iret', 'iretd', 'iretw', 'ja', 'jae', 'jb', 'jbe',
- 'jc', 'jcxz', 'jcxz', 'je', 'jecxz', 'jg', 'jge', 'jl', 'jle',
- 'jmp', 'jna', 'jnae', 'jnb', 'jnbe', 'jnc', 'jne', 'jng', 'jnge',
- 'jnl', 'jnle', 'jno', 'jnp', 'jns', 'jnz', 'jo', 'jp', 'jpe',
- 'jpo', 'js', 'jz', 'lahf', 'lar', 'lcall', 'lds', 'lea', 'leave',
- 'les', 'lfs', 'lgdt', 'lgs', 'lidt', 'ljmp', 'lldt', 'lmsw',
- 'loadall', 'loadall286', 'lock', 'lodsb', 'lodsd', 'lodsw',
- 'loop', 'loope', 'loopne', 'loopnz', 'loopz', 'lsl', 'lss', 'ltr',
- 'mov', 'movd', 'movq', 'movsb', 'movsd', 'movsw', 'movsx',
- 'movzx', 'mul', 'neg', 'nop', 'not', 'or', 'out', 'outsb', 'outsd',
- 'outsw', 'pop', 'popa', 'popad', 'popaw', 'popf', 'popfd', 'popfw',
- 'push', 'pusha', 'pushad', 'pushaw', 'pushf', 'pushfd', 'pushfw',
- 'rcl', 'rcr', 'rdmsr', 'rdpmc', 'rdshr', 'rdtsc', 'rep', 'repe',
- 'repne', 'repnz', 'repz', 'ret', 'retf', 'retn', 'rol', 'ror',
- 'rsdc', 'rsldt', 'rsm', 'sahf', 'sal', 'salc', 'sar', 'sbb',
- 'scasb', 'scasd', 'scasw', 'seta', 'setae', 'setb', 'setbe',
- 'setc', 'setcxz', 'sete', 'setg', 'setge', 'setl', 'setle',
- 'setna', 'setnae', 'setnb', 'setnbe', 'setnc', 'setne', 'setng',
- 'setnge', 'setnl', 'setnle', 'setno', 'setnp', 'setns', 'setnz',
- 'seto', 'setp', 'setpe', 'setpo', 'sets', 'setz', 'sgdt', 'shl',
- 'shld', 'shr', 'shrd', 'sidt', 'sldt', 'smi', 'smint', 'smintold',
- 'smsw', 'stc', 'std', 'sti', 'stosb', 'stosd', 'stosw', 'str',
- 'sub', 'svdc', 'svldt', 'svts', 'syscall', 'sysenter', 'sysexit',
- 'sysret', 'test', 'ud1', 'ud2', 'umov', 'verr', 'verw', 'wait',
- 'wbinvd', 'wrmsr', 'wrshr', 'xadd', 'xbts', 'xchg', 'xlat',
- 'xlatb', 'xor'
- }
-
- PORTUGOL_KEYWORDS = (
- 'aleatorio',
- 'algoritmo',
- 'arquivo',
- 'ate',
- 'caso',
- 'cronometro',
- 'debug',
- 'e',
- 'eco',
- 'enquanto',
- 'entao',
- 'escolha',
- 'escreva',
- 'escreval',
- 'faca',
- 'falso',
- 'fimalgoritmo',
- 'fimenquanto',
- 'fimescolha',
- 'fimfuncao',
- 'fimpara',
- 'fimprocedimento',
- 'fimrepita',
- 'fimse',
- 'funcao',
- 'inicio',
- 'int',
- 'interrompa',
- 'leia',
- 'limpatela',
- 'mod',
- 'nao',
- 'ou',
- 'outrocaso',
- 'para',
- 'passo',
- 'pausa',
- 'procedimento',
- 'repita',
- 'retorne',
- 'se',
- 'senao',
- 'timer',
- 'var',
- 'vetor',
- 'verdadeiro',
- 'xou',
- 'div',
- 'mod',
- 'abs',
- 'arccos',
- 'arcsen',
- 'arctan',
- 'cos',
- 'cotan',
- 'Exp',
- 'grauprad',
- 'int',
- 'log',
- 'logn',
- 'pi',
- 'quad',
- 'radpgrau',
- 'raizq',
- 'rand',
- 'randi',
- 'sen',
- 'Tan',
- 'asc',
- 'carac',
- 'caracpnum',
- 'compr',
- 'copia',
- 'maiusc',
- 'minusc',
- 'numpcarac',
- 'pos',
- )
-
- PORTUGOL_BUILTIN_TYPES = {
- 'inteiro', 'real', 'caractere', 'logico'
- }
-
- def __init__(self, **options):
- Lexer.__init__(self, **options)
- self.keywords = set()
- self.builtins = set()
- if get_bool_opt(options, 'portugol', False):
- self.keywords.update(self.PORTUGOL_KEYWORDS)
- self.builtins.update(self.PORTUGOL_BUILTIN_TYPES)
- self.is_portugol = True
- else:
- self.is_portugol = False
-
- if get_bool_opt(options, 'turbopascal', True):
- self.keywords.update(self.TURBO_PASCAL_KEYWORDS)
- if get_bool_opt(options, 'delphi', True):
- self.keywords.update(self.DELPHI_KEYWORDS)
- if get_bool_opt(options, 'freepascal', True):
- self.keywords.update(self.FREE_PASCAL_KEYWORDS)
- for unit in get_list_opt(options, 'units', list(self.BUILTIN_UNITS)):
- self.builtins.update(self.BUILTIN_UNITS[unit])
-
- def get_tokens_unprocessed(self, text):
- scanner = Scanner(text, re.DOTALL | re.MULTILINE | re.IGNORECASE)
- stack = ['initial']
- in_function_block = False
- in_property_block = False
- was_dot = False
- next_token_is_function = False
- next_token_is_property = False
- collect_labels = False
- block_labels = set()
- brace_balance = [0, 0]
-
- while not scanner.eos:
- token = Error
-
- if stack[-1] == 'initial':
- if scanner.scan(r'\s+'):
- token = Whitespace
- elif not self.is_portugol and scanner.scan(r'\{.*?\}|\(\*.*?\*\)'):
- if scanner.match.startswith('$'):
- token = Comment.Preproc
- else:
- token = Comment.Multiline
- elif scanner.scan(r'//.*?$'):
- token = Comment.Single
- elif self.is_portugol and scanner.scan(r'(<\-)|(>=)|(<=)|%|<|>|-|\+|\*|\=|(<>)|\/|\.|:|,'):
- token = Operator
- elif not self.is_portugol and scanner.scan(r'[-+*\/=<>:;,.@\^]'):
- token = Operator
- # stop label highlighting on next ";"
- if collect_labels and scanner.match == ';':
- collect_labels = False
- elif scanner.scan(r'[\(\)\[\]]+'):
- token = Punctuation
- # abort function naming ``foo = Function(...)``
- next_token_is_function = False
- # if we are in a function block we count the open
- # braces because ootherwise it's impossible to
- # determine the end of the modifier context
- if in_function_block or in_property_block:
- if scanner.match == '(':
- brace_balance[0] += 1
- elif scanner.match == ')':
- brace_balance[0] -= 1
- elif scanner.match == '[':
- brace_balance[1] += 1
- elif scanner.match == ']':
- brace_balance[1] -= 1
- elif scanner.scan(r'[A-Za-z_][A-Za-z_0-9]*'):
- lowercase_name = scanner.match.lower()
- if lowercase_name == 'result':
- token = Name.Builtin.Pseudo
- elif lowercase_name in self.keywords:
- token = Keyword
- # if we are in a special block and a
- # block ending keyword occurs (and the parenthesis
- # is balanced) we end the current block context
- if self.is_portugol:
- if lowercase_name in ('funcao', 'procedimento'):
- in_function_block = True
- next_token_is_function = True
- else:
- if (in_function_block or in_property_block) and \
- lowercase_name in self.BLOCK_KEYWORDS and \
- brace_balance[0] <= 0 and \
- brace_balance[1] <= 0:
- in_function_block = False
- in_property_block = False
- brace_balance = [0, 0]
- block_labels = set()
- if lowercase_name in ('label', 'goto'):
- collect_labels = True
- elif lowercase_name == 'asm':
- stack.append('asm')
- elif lowercase_name == 'property':
- in_property_block = True
- next_token_is_property = True
- elif lowercase_name in ('procedure', 'operator',
- 'function', 'constructor',
- 'destructor'):
- in_function_block = True
- next_token_is_function = True
- # we are in a function block and the current name
- # is in the set of registered modifiers. highlight
- # it as pseudo keyword
- elif not self.is_portugol and in_function_block and \
- lowercase_name in self.FUNCTION_MODIFIERS:
- token = Keyword.Pseudo
- # if we are in a property highlight some more
- # modifiers
- elif not self.is_portugol and in_property_block and \
- lowercase_name in ('read', 'write'):
- token = Keyword.Pseudo
- next_token_is_function = True
- # if the last iteration set next_token_is_function
- # to true we now want this name highlighted as
- # function. so do that and reset the state
- elif next_token_is_function:
- # Look if the next token is a dot. If yes it's
- # not a function, but a class name and the
- # part after the dot a function name
- if not self.is_portugol and scanner.test(r'\s*\.\s*'):
- token = Name.Class
- # it's not a dot, our job is done
- else:
- token = Name.Function
- next_token_is_function = False
-
- if self.is_portugol:
- block_labels.add(scanner.match.lower())
-
- # same for properties
- elif not self.is_portugol and next_token_is_property:
- token = Name.Property
- next_token_is_property = False
- # Highlight this token as label and add it
- # to the list of known labels
- elif not self.is_portugol and collect_labels:
- token = Name.Label
- block_labels.add(scanner.match.lower())
- # name is in list of known labels
- elif lowercase_name in block_labels:
- token = Name.Label
- elif self.is_portugol and lowercase_name in self.PORTUGOL_BUILTIN_TYPES:
- token = Keyword.Type
- elif not self.is_portugol and lowercase_name in self.BUILTIN_TYPES:
- token = Keyword.Type
- elif not self.is_portugol and lowercase_name in self.DIRECTIVES:
- token = Keyword.Pseudo
- # builtins are just builtins if the token
- # before isn't a dot
- elif not self.is_portugol and not was_dot and lowercase_name in self.builtins:
- token = Name.Builtin
- else:
- token = Name
- elif self.is_portugol and scanner.scan(r"\""):
- token = String
- stack.append('string')
- elif not self.is_portugol and scanner.scan(r"'"):
- token = String
- stack.append('string')
- elif not self.is_portugol and scanner.scan(r'\#(\d+|\$[0-9A-Fa-f]+)'):
- token = String.Char
- elif not self.is_portugol and scanner.scan(r'\$[0-9A-Fa-f]+'):
- token = Number.Hex
- elif scanner.scan(r'\d+(?![eE]|\.[^.])'):
- token = Number.Integer
- elif scanner.scan(r'\d+(\.\d+([eE][+-]?\d+)?|[eE][+-]?\d+)'):
- token = Number.Float
- else:
- # if the stack depth is deeper than once, pop
- if len(stack) > 1:
- stack.pop()
- scanner.get_char()
-
- elif stack[-1] == 'string':
- if self.is_portugol:
- if scanner.scan(r"''"):
- token = String.Escape
- elif scanner.scan(r"\""):
- token = String
- stack.pop()
- elif scanner.scan(r"[^\"]*"):
- token = String
- else:
- scanner.get_char()
- stack.pop()
- else:
- if scanner.scan(r"''"):
- token = String.Escape
- elif scanner.scan(r"'"):
- token = String
- stack.pop()
- elif scanner.scan(r"[^']*"):
- token = String
- else:
- scanner.get_char()
- stack.pop()
- elif not self.is_portugol and stack[-1] == 'asm':
- if scanner.scan(r'\s+'):
- token = Whitespace
- elif scanner.scan(r'end'):
- token = Keyword
- stack.pop()
- elif scanner.scan(r'\{.*?\}|\(\*.*?\*\)'):
- if scanner.match.startswith('$'):
- token = Comment.Preproc
- else:
- token = Comment.Multiline
- elif scanner.scan(r'//.*?$'):
- token = Comment.Single
- elif scanner.scan(r"'"):
- token = String
- stack.append('string')
- elif scanner.scan(r'@@[A-Za-z_][A-Za-z_0-9]*'):
- token = Name.Label
- elif scanner.scan(r'[A-Za-z_][A-Za-z_0-9]*'):
- lowercase_name = scanner.match.lower()
- if lowercase_name in self.ASM_INSTRUCTIONS:
- token = Keyword
- elif lowercase_name in self.ASM_REGISTERS:
- token = Name.Builtin
- else:
- token = Name
- elif scanner.scan(r'[-+*\/=<>:;,.@\^]+'):
- token = Operator
- elif scanner.scan(r'[\(\)\[\]]+'):
- token = Punctuation
- elif scanner.scan(r'\$[0-9A-Fa-f]+'):
- token = Number.Hex
- elif scanner.scan(r'\d+(?![eE]|\.[^.])'):
- token = Number.Integer
- elif scanner.scan(r'\d+(\.\d+([eE][+-]?\d+)?|[eE][+-]?\d+)'):
- token = Number.Float
- else:
- scanner.get_char()
- stack.pop()
-
- # save the dot!!!11
- if not self.is_portugol and scanner.match.strip():
- was_dot = scanner.match == '.'
-
- yield scanner.start_pos, token, scanner.match or ''
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/pawn.py b/venv/lib/python3.11/site-packages/pygments/lexers/pawn.py
deleted file mode 100644
index 36b48fc..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/pawn.py
+++ /dev/null
@@ -1,202 +0,0 @@
-"""
- pygments.lexers.pawn
- ~~~~~~~~~~~~~~~~~~~~
-
- Lexers for the Pawn languages.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation
-from pygments.util import get_bool_opt
-
-__all__ = ['SourcePawnLexer', 'PawnLexer']
-
-
-class SourcePawnLexer(RegexLexer):
- """
- For SourcePawn source code with preprocessor directives.
-
- .. versionadded:: 1.6
- """
- name = 'SourcePawn'
- aliases = ['sp']
- filenames = ['*.sp']
- mimetypes = ['text/x-sourcepawn']
-
- #: optional Comment or Whitespace
- _ws = r'(?:\s|//.*?\n|/\*.*?\*/)+'
- #: only one /* */ style comment
- _ws1 = r'\s*(?:/[*].*?[*]/\s*)*'
-
- tokens = {
- 'root': [
- # preprocessor directives: without whitespace
- (r'^#if\s+0', Comment.Preproc, 'if0'),
- ('^#', Comment.Preproc, 'macro'),
- # or with whitespace
- ('^' + _ws1 + r'#if\s+0', Comment.Preproc, 'if0'),
- ('^' + _ws1 + '#', Comment.Preproc, 'macro'),
- (r'\n', Text),
- (r'\s+', Text),
- (r'\\\n', Text), # line continuation
- (r'/(\\\n)?/(\n|(.|\n)*?[^\\]\n)', Comment.Single),
- (r'/(\\\n)?\*(.|\n)*?\*(\\\n)?/', Comment.Multiline),
- (r'[{}]', Punctuation),
- (r'L?"', String, 'string'),
- (r"L?'(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])'", String.Char),
- (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[LlUu]*', Number.Float),
- (r'(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float),
- (r'0x[0-9a-fA-F]+[LlUu]*', Number.Hex),
- (r'0[0-7]+[LlUu]*', Number.Oct),
- (r'\d+[LlUu]*', Number.Integer),
- (r'[~!%^&*+=|?:<>/-]', Operator),
- (r'[()\[\],.;]', Punctuation),
- (r'(case|const|continue|native|'
- r'default|else|enum|for|if|new|operator|'
- r'public|return|sizeof|static|decl|struct|switch)\b', Keyword),
- (r'(bool|Float)\b', Keyword.Type),
- (r'(true|false)\b', Keyword.Constant),
- (r'[a-zA-Z_]\w*', Name),
- ],
- 'string': [
- (r'"', String, '#pop'),
- (r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|[0-7]{1,3})', String.Escape),
- (r'[^\\"\n]+', String), # all other characters
- (r'\\\n', String), # line continuation
- (r'\\', String), # stray backslash
- ],
- 'macro': [
- (r'[^/\n]+', Comment.Preproc),
- (r'/\*(.|\n)*?\*/', Comment.Multiline),
- (r'//.*?\n', Comment.Single, '#pop'),
- (r'/', Comment.Preproc),
- (r'(?<=\\)\n', Comment.Preproc),
- (r'\n', Comment.Preproc, '#pop'),
- ],
- 'if0': [
- (r'^\s*#if.*?(?<!\\)\n', Comment.Preproc, '#push'),
- (r'^\s*#endif.*?(?<!\\)\n', Comment.Preproc, '#pop'),
- (r'.*?\n', Comment),
- ]
- }
-
- SM_TYPES = {'Action', 'bool', 'Float', 'Plugin', 'String', 'any',
- 'AdminFlag', 'OverrideType', 'OverrideRule', 'ImmunityType',
- 'GroupId', 'AdminId', 'AdmAccessMode', 'AdminCachePart',
- 'CookieAccess', 'CookieMenu', 'CookieMenuAction', 'NetFlow',
- 'ConVarBounds', 'QueryCookie', 'ReplySource',
- 'ConVarQueryResult', 'ConVarQueryFinished', 'Function',
- 'Action', 'Identity', 'PluginStatus', 'PluginInfo', 'DBResult',
- 'DBBindType', 'DBPriority', 'PropType', 'PropFieldType',
- 'MoveType', 'RenderMode', 'RenderFx', 'EventHookMode',
- 'EventHook', 'FileType', 'FileTimeMode', 'PathType',
- 'ParamType', 'ExecType', 'DialogType', 'Handle', 'KvDataTypes',
- 'NominateResult', 'MapChange', 'MenuStyle', 'MenuAction',
- 'MenuSource', 'RegexError', 'SDKCallType', 'SDKLibrary',
- 'SDKFuncConfSource', 'SDKType', 'SDKPassMethod', 'RayType',
- 'TraceEntityFilter', 'ListenOverride', 'SortOrder', 'SortType',
- 'SortFunc2D', 'APLRes', 'FeatureType', 'FeatureStatus',
- 'SMCResult', 'SMCError', 'TFClassType', 'TFTeam', 'TFCond',
- 'TFResourceType', 'Timer', 'TopMenuAction', 'TopMenuObjectType',
- 'TopMenuPosition', 'TopMenuObject', 'UserMsg'}
-
- def __init__(self, **options):
- self.smhighlighting = get_bool_opt(options,
- 'sourcemod', True)
-
- self._functions = set()
- if self.smhighlighting:
- from pygments.lexers._sourcemod_builtins import FUNCTIONS
- self._functions.update(FUNCTIONS)
- RegexLexer.__init__(self, **options)
-
- def get_tokens_unprocessed(self, text):
- for index, token, value in \
- RegexLexer.get_tokens_unprocessed(self, text):
- if token is Name:
- if self.smhighlighting:
- if value in self.SM_TYPES:
- token = Keyword.Type
- elif value in self._functions:
- token = Name.Builtin
- yield index, token, value
-
-
-class PawnLexer(RegexLexer):
- """
- For Pawn source code.
-
- .. versionadded:: 2.0
- """
-
- name = 'Pawn'
- aliases = ['pawn']
- filenames = ['*.p', '*.pwn', '*.inc']
- mimetypes = ['text/x-pawn']
-
- #: optional Comment or Whitespace
- _ws = r'(?:\s|//.*?\n|/[*][\w\W]*?[*]/)+'
- #: only one /* */ style comment
- _ws1 = r'\s*(?:/[*].*?[*]/\s*)*'
-
- tokens = {
- 'root': [
- # preprocessor directives: without whitespace
- (r'^#if\s+0', Comment.Preproc, 'if0'),
- ('^#', Comment.Preproc, 'macro'),
- # or with whitespace
- ('^' + _ws1 + r'#if\s+0', Comment.Preproc, 'if0'),
- ('^' + _ws1 + '#', Comment.Preproc, 'macro'),
- (r'\n', Text),
- (r'\s+', Text),
- (r'\\\n', Text), # line continuation
- (r'/(\\\n)?/(\n|(.|\n)*?[^\\]\n)', Comment.Single),
- (r'/(\\\n)?\*[\w\W]*?\*(\\\n)?/', Comment.Multiline),
- (r'[{}]', Punctuation),
- (r'L?"', String, 'string'),
- (r"L?'(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])'", String.Char),
- (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[LlUu]*', Number.Float),
- (r'(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float),
- (r'0x[0-9a-fA-F]+[LlUu]*', Number.Hex),
- (r'0[0-7]+[LlUu]*', Number.Oct),
- (r'\d+[LlUu]*', Number.Integer),
- (r'[~!%^&*+=|?:<>/-]', Operator),
- (r'[()\[\],.;]', Punctuation),
- (r'(switch|case|default|const|new|static|char|continue|break|'
- r'if|else|for|while|do|operator|enum|'
- r'public|return|sizeof|tagof|state|goto)\b', Keyword),
- (r'(bool|Float)\b', Keyword.Type),
- (r'(true|false)\b', Keyword.Constant),
- (r'[a-zA-Z_]\w*', Name),
- ],
- 'string': [
- (r'"', String, '#pop'),
- (r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|[0-7]{1,3})', String.Escape),
- (r'[^\\"\n]+', String), # all other characters
- (r'\\\n', String), # line continuation
- (r'\\', String), # stray backslash
- ],
- 'macro': [
- (r'[^/\n]+', Comment.Preproc),
- (r'/\*(.|\n)*?\*/', Comment.Multiline),
- (r'//.*?\n', Comment.Single, '#pop'),
- (r'/', Comment.Preproc),
- (r'(?<=\\)\n', Comment.Preproc),
- (r'\n', Comment.Preproc, '#pop'),
- ],
- 'if0': [
- (r'^\s*#if.*?(?<!\\)\n', Comment.Preproc, '#push'),
- (r'^\s*#endif.*?(?<!\\)\n', Comment.Preproc, '#pop'),
- (r'.*?\n', Comment),
- ]
- }
-
- def analyse_text(text):
- """This is basically C. There is a keyword which doesn't exist in C
- though and is nearly unique to this language."""
- if 'tagof' in text:
- return 0.01
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/perl.py b/venv/lib/python3.11/site-packages/pygments/lexers/perl.py
deleted file mode 100644
index 88c6486..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/perl.py
+++ /dev/null
@@ -1,733 +0,0 @@
-"""
- pygments.lexers.perl
- ~~~~~~~~~~~~~~~~~~~~
-
- Lexers for Perl, Raku and related languages.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, ExtendedRegexLexer, include, bygroups, \
- using, this, default, words
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation, Whitespace
-from pygments.util import shebang_matches
-
-__all__ = ['PerlLexer', 'Perl6Lexer']
-
-
-class PerlLexer(RegexLexer):
- """
- For Perl source code.
- """
-
- name = 'Perl'
- url = 'https://www.perl.org'
- aliases = ['perl', 'pl']
- filenames = ['*.pl', '*.pm', '*.t', '*.perl']
- mimetypes = ['text/x-perl', 'application/x-perl']
-
- flags = re.DOTALL | re.MULTILINE
- # TODO: give this to a perl guy who knows how to parse perl...
- tokens = {
- 'balanced-regex': [
- (r'/(\\\\|\\[^\\]|[^\\/])*/[egimosx]*', String.Regex, '#pop'),
- (r'!(\\\\|\\[^\\]|[^\\!])*![egimosx]*', String.Regex, '#pop'),
- (r'\\(\\\\|[^\\])*\\[egimosx]*', String.Regex, '#pop'),
- (r'\{(\\\\|\\[^\\]|[^\\}])*\}[egimosx]*', String.Regex, '#pop'),
- (r'<(\\\\|\\[^\\]|[^\\>])*>[egimosx]*', String.Regex, '#pop'),
- (r'\[(\\\\|\\[^\\]|[^\\\]])*\][egimosx]*', String.Regex, '#pop'),
- (r'\((\\\\|\\[^\\]|[^\\)])*\)[egimosx]*', String.Regex, '#pop'),
- (r'@(\\\\|\\[^\\]|[^\\@])*@[egimosx]*', String.Regex, '#pop'),
- (r'%(\\\\|\\[^\\]|[^\\%])*%[egimosx]*', String.Regex, '#pop'),
- (r'\$(\\\\|\\[^\\]|[^\\$])*\$[egimosx]*', String.Regex, '#pop'),
- ],
- 'root': [
- (r'\A\#!.+?$', Comment.Hashbang),
- (r'\#.*?$', Comment.Single),
- (r'^=[a-zA-Z0-9]+\s+.*?\n=cut', Comment.Multiline),
- (words((
- 'case', 'continue', 'do', 'else', 'elsif', 'for', 'foreach',
- 'if', 'last', 'my', 'next', 'our', 'redo', 'reset', 'then',
- 'unless', 'until', 'while', 'print', 'new', 'BEGIN',
- 'CHECK', 'INIT', 'END', 'return'), suffix=r'\b'),
- Keyword),
- (r'(format)(\s+)(\w+)(\s*)(=)(\s*\n)',
- bygroups(Keyword, Whitespace, Name, Whitespace, Punctuation, Whitespace), 'format'),
- (r'(eq|lt|gt|le|ge|ne|not|and|or|cmp)\b', Operator.Word),
- # common delimiters
- (r's/(\\\\|\\[^\\]|[^\\/])*/(\\\\|\\[^\\]|[^\\/])*/[egimosx]*',
- String.Regex),
- (r's!(\\\\|\\!|[^!])*!(\\\\|\\!|[^!])*![egimosx]*', String.Regex),
- (r's\\(\\\\|[^\\])*\\(\\\\|[^\\])*\\[egimosx]*', String.Regex),
- (r's@(\\\\|\\[^\\]|[^\\@])*@(\\\\|\\[^\\]|[^\\@])*@[egimosx]*',
- String.Regex),
- (r's%(\\\\|\\[^\\]|[^\\%])*%(\\\\|\\[^\\]|[^\\%])*%[egimosx]*',
- String.Regex),
- # balanced delimiters
- (r's\{(\\\\|\\[^\\]|[^\\}])*\}\s*', String.Regex, 'balanced-regex'),
- (r's<(\\\\|\\[^\\]|[^\\>])*>\s*', String.Regex, 'balanced-regex'),
- (r's\[(\\\\|\\[^\\]|[^\\\]])*\]\s*', String.Regex,
- 'balanced-regex'),
- (r's\((\\\\|\\[^\\]|[^\\)])*\)\s*', String.Regex,
- 'balanced-regex'),
-
- (r'm?/(\\\\|\\[^\\]|[^\\/\n])*/[gcimosx]*', String.Regex),
- (r'm(?=[/!\\{<\[(@%$])', String.Regex, 'balanced-regex'),
- (r'((?<==~)|(?<=\())\s*/(\\\\|\\[^\\]|[^\\/])*/[gcimosx]*',
- String.Regex),
- (r'\s+', Whitespace),
- (words((
- 'abs', 'accept', 'alarm', 'atan2', 'bind', 'binmode', 'bless', 'caller', 'chdir',
- 'chmod', 'chomp', 'chop', 'chown', 'chr', 'chroot', 'close', 'closedir', 'connect',
- 'continue', 'cos', 'crypt', 'dbmclose', 'dbmopen', 'defined', 'delete', 'die',
- 'dump', 'each', 'endgrent', 'endhostent', 'endnetent', 'endprotoent',
- 'endpwent', 'endservent', 'eof', 'eval', 'exec', 'exists', 'exit', 'exp', 'fcntl',
- 'fileno', 'flock', 'fork', 'format', 'formline', 'getc', 'getgrent', 'getgrgid',
- 'getgrnam', 'gethostbyaddr', 'gethostbyname', 'gethostent', 'getlogin',
- 'getnetbyaddr', 'getnetbyname', 'getnetent', 'getpeername', 'getpgrp',
- 'getppid', 'getpriority', 'getprotobyname', 'getprotobynumber',
- 'getprotoent', 'getpwent', 'getpwnam', 'getpwuid', 'getservbyname',
- 'getservbyport', 'getservent', 'getsockname', 'getsockopt', 'glob', 'gmtime',
- 'goto', 'grep', 'hex', 'import', 'index', 'int', 'ioctl', 'join', 'keys', 'kill', 'last',
- 'lc', 'lcfirst', 'length', 'link', 'listen', 'local', 'localtime', 'log', 'lstat',
- 'map', 'mkdir', 'msgctl', 'msgget', 'msgrcv', 'msgsnd', 'my', 'next', 'oct', 'open',
- 'opendir', 'ord', 'our', 'pack', 'pipe', 'pop', 'pos', 'printf',
- 'prototype', 'push', 'quotemeta', 'rand', 'read', 'readdir',
- 'readline', 'readlink', 'readpipe', 'recv', 'redo', 'ref', 'rename',
- 'reverse', 'rewinddir', 'rindex', 'rmdir', 'scalar', 'seek', 'seekdir',
- 'select', 'semctl', 'semget', 'semop', 'send', 'setgrent', 'sethostent', 'setnetent',
- 'setpgrp', 'setpriority', 'setprotoent', 'setpwent', 'setservent',
- 'setsockopt', 'shift', 'shmctl', 'shmget', 'shmread', 'shmwrite', 'shutdown',
- 'sin', 'sleep', 'socket', 'socketpair', 'sort', 'splice', 'split', 'sprintf', 'sqrt',
- 'srand', 'stat', 'study', 'substr', 'symlink', 'syscall', 'sysopen', 'sysread',
- 'sysseek', 'system', 'syswrite', 'tell', 'telldir', 'tie', 'tied', 'time', 'times', 'tr',
- 'truncate', 'uc', 'ucfirst', 'umask', 'undef', 'unlink', 'unpack', 'unshift', 'untie',
- 'utime', 'values', 'vec', 'wait', 'waitpid', 'wantarray', 'warn', 'write'), suffix=r'\b'),
- Name.Builtin),
- (r'((__(DATA|DIE|WARN)__)|(STD(IN|OUT|ERR)))\b', Name.Builtin.Pseudo),
- (r'(<<)([\'"]?)([a-zA-Z_]\w*)(\2;?\n.*?\n)(\3)(\n)',
- bygroups(String, String, String.Delimiter, String, String.Delimiter, Whitespace)),
- (r'__END__', Comment.Preproc, 'end-part'),
- (r'\$\^[ADEFHILMOPSTWX]', Name.Variable.Global),
- (r"\$[\\\"\[\]'&`+*.,;=%~?@$!<>(^|/-](?!\w)", Name.Variable.Global),
- (r'[$@%#]+', Name.Variable, 'varname'),
- (r'0_?[0-7]+(_[0-7]+)*', Number.Oct),
- (r'0x[0-9A-Fa-f]+(_[0-9A-Fa-f]+)*', Number.Hex),
- (r'0b[01]+(_[01]+)*', Number.Bin),
- (r'(?i)(\d*(_\d*)*\.\d+(_\d*)*|\d+(_\d*)*\.\d+(_\d*)*)(e[+-]?\d+)?',
- Number.Float),
- (r'(?i)\d+(_\d*)*e[+-]?\d+(_\d*)*', Number.Float),
- (r'\d+(_\d+)*', Number.Integer),
- (r"'(\\\\|\\[^\\]|[^'\\])*'", String),
- (r'"(\\\\|\\[^\\]|[^"\\])*"', String),
- (r'`(\\\\|\\[^\\]|[^`\\])*`', String.Backtick),
- (r'<([^\s>]+)>', String.Regex),
- (r'(q|qq|qw|qr|qx)\{', String.Other, 'cb-string'),
- (r'(q|qq|qw|qr|qx)\(', String.Other, 'rb-string'),
- (r'(q|qq|qw|qr|qx)\[', String.Other, 'sb-string'),
- (r'(q|qq|qw|qr|qx)\<', String.Other, 'lt-string'),
- (r'(q|qq|qw|qr|qx)([\W_])(.|\n)*?\2', String.Other),
- (r'(package)(\s+)([a-zA-Z_]\w*(?:::[a-zA-Z_]\w*)*)',
- bygroups(Keyword, Whitespace, Name.Namespace)),
- (r'(use|require|no)(\s+)([a-zA-Z_]\w*(?:::[a-zA-Z_]\w*)*)',
- bygroups(Keyword, Whitespace, Name.Namespace)),
- (r'(sub)(\s+)', bygroups(Keyword, Whitespace), 'funcname'),
- (words((
- 'no', 'package', 'require', 'use'), suffix=r'\b'),
- Keyword),
- (r'(\[\]|\*\*|::|<<|>>|>=|<=>|<=|={3}|!=|=~|'
- r'!~|&&?|\|\||\.{1,3})', Operator),
- (r'[-+/*%=<>&^|!\\~]=?', Operator),
- (r'[()\[\]:;,<>/?{}]', Punctuation), # yes, there's no shortage
- # of punctuation in Perl!
- (r'(?=\w)', Name, 'name'),
- ],
- 'format': [
- (r'\.\n', String.Interpol, '#pop'),
- (r'[^\n]*\n', String.Interpol),
- ],
- 'varname': [
- (r'\s+', Whitespace),
- (r'\{', Punctuation, '#pop'), # hash syntax?
- (r'\)|,', Punctuation, '#pop'), # argument specifier
- (r'\w+::', Name.Namespace),
- (r'[\w:]+', Name.Variable, '#pop'),
- ],
- 'name': [
- (r'[a-zA-Z_]\w*(::[a-zA-Z_]\w*)*(::)?(?=\s*->)', Name.Namespace, '#pop'),
- (r'[a-zA-Z_]\w*(::[a-zA-Z_]\w*)*::', Name.Namespace, '#pop'),
- (r'[\w:]+', Name, '#pop'),
- (r'[A-Z_]+(?=\W)', Name.Constant, '#pop'),
- (r'(?=\W)', Text, '#pop'),
- ],
- 'funcname': [
- (r'[a-zA-Z_]\w*[!?]?', Name.Function),
- (r'\s+', Whitespace),
- # argument declaration
- (r'(\([$@%]*\))(\s*)', bygroups(Punctuation, Whitespace)),
- (r';', Punctuation, '#pop'),
- (r'.*?\{', Punctuation, '#pop'),
- ],
- 'cb-string': [
- (r'\\[{}\\]', String.Other),
- (r'\\', String.Other),
- (r'\{', String.Other, 'cb-string'),
- (r'\}', String.Other, '#pop'),
- (r'[^{}\\]+', String.Other)
- ],
- 'rb-string': [
- (r'\\[()\\]', String.Other),
- (r'\\', String.Other),
- (r'\(', String.Other, 'rb-string'),
- (r'\)', String.Other, '#pop'),
- (r'[^()]+', String.Other)
- ],
- 'sb-string': [
- (r'\\[\[\]\\]', String.Other),
- (r'\\', String.Other),
- (r'\[', String.Other, 'sb-string'),
- (r'\]', String.Other, '#pop'),
- (r'[^\[\]]+', String.Other)
- ],
- 'lt-string': [
- (r'\\[<>\\]', String.Other),
- (r'\\', String.Other),
- (r'\<', String.Other, 'lt-string'),
- (r'\>', String.Other, '#pop'),
- (r'[^<>]+', String.Other)
- ],
- 'end-part': [
- (r'.+', Comment.Preproc, '#pop')
- ]
- }
-
- def analyse_text(text):
- if shebang_matches(text, r'perl'):
- return True
-
- result = 0
-
- if re.search(r'(?:my|our)\s+[$@%(]', text):
- result += 0.9
-
- if ':=' in text:
- # := is not valid Perl, but it appears in unicon, so we should
- # become less confident if we think we found Perl with :=
- result /= 2
-
- return result
-
-
-class Perl6Lexer(ExtendedRegexLexer):
- """
- For Raku (a.k.a. Perl 6) source code.
-
- .. versionadded:: 2.0
- """
-
- name = 'Perl6'
- url = 'https://www.raku.org'
- aliases = ['perl6', 'pl6', 'raku']
- filenames = ['*.pl', '*.pm', '*.nqp', '*.p6', '*.6pl', '*.p6l', '*.pl6',
- '*.6pm', '*.p6m', '*.pm6', '*.t', '*.raku', '*.rakumod',
- '*.rakutest', '*.rakudoc']
- mimetypes = ['text/x-perl6', 'application/x-perl6']
- flags = re.MULTILINE | re.DOTALL
-
- PERL6_IDENTIFIER_RANGE = r"['\w:-]"
-
- PERL6_KEYWORDS = (
- #Phasers
- 'BEGIN','CATCH','CHECK','CLOSE','CONTROL','DOC','END','ENTER','FIRST',
- 'INIT','KEEP','LAST','LEAVE','NEXT','POST','PRE','QUIT','UNDO',
- #Keywords
- 'anon','augment','but','class','constant','default','does','else',
- 'elsif','enum','for','gather','given','grammar','has','if','import',
- 'is','let','loop','made','make','method','module','multi','my','need',
- 'orwith','our','proceed','proto','repeat','require','return',
- 'return-rw','returns','role','rule','state','sub','submethod','subset',
- 'succeed','supersede','token','try','unit','unless','until','use',
- 'when','while','with','without',
- #Traits
- 'export','native','repr','required','rw','symbol',
- )
-
- PERL6_BUILTINS = (
- 'ACCEPTS','abs','abs2rel','absolute','accept','accessed','acos',
- 'acosec','acosech','acosh','acotan','acotanh','acquire','act','action',
- 'actions','add','add_attribute','add_enum_value','add_fallback',
- 'add_method','add_parent','add_private_method','add_role','add_trustee',
- 'adverb','after','all','allocate','allof','allowed','alternative-names',
- 'annotations','antipair','antipairs','any','anyof','app_lifetime',
- 'append','arch','archname','args','arity','Array','asec','asech','asin',
- 'asinh','ASSIGN-KEY','ASSIGN-POS','assuming','ast','at','atan','atan2',
- 'atanh','AT-KEY','atomic-assign','atomic-dec-fetch','atomic-fetch',
- 'atomic-fetch-add','atomic-fetch-dec','atomic-fetch-inc',
- 'atomic-fetch-sub','atomic-inc-fetch','AT-POS','attributes','auth',
- 'await','backtrace','Bag','BagHash','bail-out','base','basename',
- 'base-repeating','batch','BIND-KEY','BIND-POS','bind-stderr',
- 'bind-stdin','bind-stdout','bind-udp','bits','bless','block','Bool',
- 'bool-only','bounds','break','Bridge','broken','BUILD','build-date',
- 'bytes','cache','callframe','calling-package','CALL-ME','callsame',
- 'callwith','can','cancel','candidates','cando','can-ok','canonpath',
- 'caps','caption','Capture','cas','catdir','categorize','categorize-list',
- 'catfile','catpath','cause','ceiling','cglobal','changed','Channel',
- 'chars','chdir','child','child-name','child-typename','chmod','chomp',
- 'chop','chr','chrs','chunks','cis','classify','classify-list','cleanup',
- 'clone','close','closed','close-stdin','cmp-ok','code','codes','collate',
- 'column','comb','combinations','command','comment','compiler','Complex',
- 'compose','compose_type','composer','condition','config',
- 'configure_destroy','configure_type_checking','conj','connect',
- 'constraints','construct','contains','contents','copy','cos','cosec',
- 'cosech','cosh','cotan','cotanh','count','count-only','cpu-cores',
- 'cpu-usage','CREATE','create_type','cross','cue','curdir','curupdir','d',
- 'Date','DateTime','day','daycount','day-of-month','day-of-week',
- 'day-of-year','days-in-month','declaration','decode','decoder','deepmap',
- 'default','defined','DEFINITE','delayed','DELETE-KEY','DELETE-POS',
- 'denominator','desc','DESTROY','destroyers','devnull','diag',
- 'did-you-mean','die','dies-ok','dir','dirname','dir-sep','DISTROnames',
- 'do','does','does-ok','done','done-testing','duckmap','dynamic','e',
- 'eager','earlier','elems','emit','enclosing','encode','encoder',
- 'encoding','end','ends-with','enum_from_value','enum_value_list',
- 'enum_values','enums','eof','EVAL','eval-dies-ok','EVALFILE',
- 'eval-lives-ok','exception','excludes-max','excludes-min','EXISTS-KEY',
- 'EXISTS-POS','exit','exitcode','exp','expected','explicitly-manage',
- 'expmod','extension','f','fail','fails-like','fc','feature','file',
- 'filename','find_method','find_method_qualified','finish','first','flat',
- 'flatmap','flip','floor','flunk','flush','fmt','format','formatter',
- 'freeze','from','from-list','from-loop','from-posix','full',
- 'full-barrier','get','get_value','getc','gist','got','grab','grabpairs',
- 'grep','handle','handled','handles','hardware','has_accessor','Hash',
- 'head','headers','hh-mm-ss','hidden','hides','hour','how','hyper','id',
- 'illegal','im','in','indent','index','indices','indir','infinite',
- 'infix','infix:<+>','infix:<->','install_method_cache','Instant',
- 'instead','Int','int-bounds','interval','in-timezone','invalid-str',
- 'invert','invocant','IO','IO::Notification.watch-path','is_trusted',
- 'is_type','isa','is-absolute','isa-ok','is-approx','is-deeply',
- 'is-hidden','is-initial-thread','is-int','is-lazy','is-leap-year',
- 'isNaN','isnt','is-prime','is-relative','is-routine','is-setting',
- 'is-win','item','iterator','join','keep','kept','KERNELnames','key',
- 'keyof','keys','kill','kv','kxxv','l','lang','last','lastcall','later',
- 'lazy','lc','leading','level','like','line','lines','link','List',
- 'listen','live','lives-ok','local','lock','log','log10','lookup','lsb',
- 'made','MAIN','make','Map','match','max','maxpairs','merge','message',
- 'method','method_table','methods','migrate','min','minmax','minpairs',
- 'minute','misplaced','Mix','MixHash','mkdir','mode','modified','month',
- 'move','mro','msb','multi','multiness','my','name','named','named_names',
- 'narrow','nativecast','native-descriptor','nativesizeof','new','new_type',
- 'new-from-daycount','new-from-pairs','next','nextcallee','next-handle',
- 'nextsame','nextwith','NFC','NFD','NFKC','NFKD','nl-in','nl-out',
- 'nodemap','nok','none','norm','not','note','now','nude','Num',
- 'numerator','Numeric','of','offset','offset-in-hours','offset-in-minutes',
- 'ok','old','on-close','one','on-switch','open','opened','operation',
- 'optional','ord','ords','orig','os-error','osname','out-buffer','pack',
- 'package','package-kind','package-name','packages','pair','pairs',
- 'pairup','parameter','params','parent','parent-name','parents','parse',
- 'parse-base','parsefile','parse-names','parts','pass','path','path-sep',
- 'payload','peer-host','peer-port','periods','perl','permutations','phaser',
- 'pick','pickpairs','pid','placeholder','plan','plus','polar','poll',
- 'polymod','pop','pos','positional','posix','postfix','postmatch',
- 'precomp-ext','precomp-target','pred','prefix','prematch','prepend',
- 'print','printf','print-nl','print-to','private','private_method_table',
- 'proc','produce','Promise','prompt','protect','pull-one','push',
- 'push-all','push-at-least','push-exactly','push-until-lazy','put',
- 'qualifier-type','quit','r','race','radix','rand','range','Rat','raw',
- 're','read','readchars','readonly','ready','Real','reallocate','reals',
- 'reason','rebless','receive','recv','redispatcher','redo','reduce',
- 'rel2abs','relative','release','rename','repeated','replacement',
- 'report','reserved','resolve','restore','result','resume','rethrow',
- 'reverse','right','rindex','rmdir','role','roles_to_compose','rolish',
- 'roll','rootdir','roots','rotate','rotor','round','roundrobin',
- 'routine-type','run','rwx','s','samecase','samemark','samewith','say',
- 'schedule-on','scheduler','scope','sec','sech','second','seek','self',
- 'send','Set','set_hidden','set_name','set_package','set_rw','set_value',
- 'SetHash','set-instruments','setup_finalization','shape','share','shell',
- 'shift','sibling','sigil','sign','signal','signals','signature','sin',
- 'sinh','sink','sink-all','skip','skip-at-least','skip-at-least-pull-one',
- 'skip-one','skip-rest','sleep','sleep-timer','sleep-until','Slip','slurp',
- 'slurp-rest','slurpy','snap','snapper','so','socket-host','socket-port',
- 'sort','source','source-package','spawn','SPEC','splice','split',
- 'splitdir','splitpath','sprintf','spurt','sqrt','squish','srand','stable',
- 'start','started','starts-with','status','stderr','stdout','Str',
- 'sub_signature','subbuf','subbuf-rw','subname','subparse','subst',
- 'subst-mutate','substr','substr-eq','substr-rw','subtest','succ','sum',
- 'Supply','symlink','t','tail','take','take-rw','tan','tanh','tap',
- 'target','target-name','tc','tclc','tell','then','throttle','throw',
- 'throws-like','timezone','tmpdir','to','today','todo','toggle','to-posix',
- 'total','trailing','trans','tree','trim','trim-leading','trim-trailing',
- 'truncate','truncated-to','trusts','try_acquire','trying','twigil','type',
- 'type_captures','typename','uc','udp','uncaught_handler','unimatch',
- 'uniname','uninames','uniparse','uniprop','uniprops','unique','unival',
- 'univals','unlike','unlink','unlock','unpack','unpolar','unshift',
- 'unwrap','updir','USAGE','use-ok','utc','val','value','values','VAR',
- 'variable','verbose-config','version','VMnames','volume','vow','w','wait',
- 'warn','watch','watch-path','week','weekday-of-month','week-number',
- 'week-year','WHAT','when','WHERE','WHEREFORE','WHICH','WHO',
- 'whole-second','WHY','wordcase','words','workaround','wrap','write',
- 'write-to','x','yada','year','yield','yyyy-mm-dd','z','zip','zip-latest',
-
- )
-
- PERL6_BUILTIN_CLASSES = (
- #Booleans
- 'False','True',
- #Classes
- 'Any','Array','Associative','AST','atomicint','Attribute','Backtrace',
- 'Backtrace::Frame','Bag','Baggy','BagHash','Blob','Block','Bool','Buf',
- 'Callable','CallFrame','Cancellation','Capture','CArray','Channel','Code',
- 'compiler','Complex','ComplexStr','Cool','CurrentThreadScheduler',
- 'Cursor','Date','Dateish','DateTime','Distro','Duration','Encoding',
- 'Exception','Failure','FatRat','Grammar','Hash','HyperWhatever','Instant',
- 'Int','int16','int32','int64','int8','IntStr','IO','IO::ArgFiles',
- 'IO::CatHandle','IO::Handle','IO::Notification','IO::Path',
- 'IO::Path::Cygwin','IO::Path::QNX','IO::Path::Unix','IO::Path::Win32',
- 'IO::Pipe','IO::Socket','IO::Socket::Async','IO::Socket::INET','IO::Spec',
- 'IO::Spec::Cygwin','IO::Spec::QNX','IO::Spec::Unix','IO::Spec::Win32',
- 'IO::Special','Iterable','Iterator','Junction','Kernel','Label','List',
- 'Lock','Lock::Async','long','longlong','Macro','Map','Match',
- 'Metamodel::AttributeContainer','Metamodel::C3MRO','Metamodel::ClassHOW',
- 'Metamodel::EnumHOW','Metamodel::Finalization','Metamodel::MethodContainer',
- 'Metamodel::MROBasedMethodDispatch','Metamodel::MultipleInheritance',
- 'Metamodel::Naming','Metamodel::Primitives','Metamodel::PrivateMethodContainer',
- 'Metamodel::RoleContainer','Metamodel::Trusting','Method','Mix','MixHash',
- 'Mixy','Mu','NFC','NFD','NFKC','NFKD','Nil','Num','num32','num64',
- 'Numeric','NumStr','ObjAt','Order','Pair','Parameter','Perl','Pod::Block',
- 'Pod::Block::Code','Pod::Block::Comment','Pod::Block::Declarator',
- 'Pod::Block::Named','Pod::Block::Para','Pod::Block::Table','Pod::Heading',
- 'Pod::Item','Pointer','Positional','PositionalBindFailover','Proc',
- 'Proc::Async','Promise','Proxy','PseudoStash','QuantHash','Range','Rat',
- 'Rational','RatStr','Real','Regex','Routine','Scalar','Scheduler',
- 'Semaphore','Seq','Set','SetHash','Setty','Signature','size_t','Slip',
- 'Stash','Str','StrDistance','Stringy','Sub','Submethod','Supplier',
- 'Supplier::Preserving','Supply','Systemic','Tap','Telemetry',
- 'Telemetry::Instrument::Thread','Telemetry::Instrument::Usage',
- 'Telemetry::Period','Telemetry::Sampler','Thread','ThreadPoolScheduler',
- 'UInt','uint16','uint32','uint64','uint8','Uni','utf8','Variable',
- 'Version','VM','Whatever','WhateverCode','WrapHandle'
- )
-
- PERL6_OPERATORS = (
- 'X', 'Z', 'after', 'also', 'and', 'andthen', 'before', 'cmp', 'div',
- 'eq', 'eqv', 'extra', 'ff', 'fff', 'ge', 'gt', 'le', 'leg', 'lt', 'm',
- 'mm', 'mod', 'ne', 'or', 'orelse', 'rx', 's', 'tr', 'x', 'xor', 'xx',
- '++', '--', '**', '!', '+', '-', '~', '?', '|', '||', '+^', '~^', '?^',
- '^', '*', '/', '%', '%%', '+&', '+<', '+>', '~&', '~<', '~>', '?&',
- 'gcd', 'lcm', '+', '-', '+|', '+^', '~|', '~^', '?|', '?^',
- '~', '&', '^', 'but', 'does', '<=>', '..', '..^', '^..', '^..^',
- '!=', '==', '<', '<=', '>', '>=', '~~', '===', '!eqv',
- '&&', '||', '^^', '//', 'min', 'max', '??', '!!', 'ff', 'fff', 'so',
- 'not', '<==', '==>', '<<==', '==>>','unicmp',
- )
-
- # Perl 6 has a *lot* of possible bracketing characters
- # this list was lifted from STD.pm6 (https://github.com/perl6/std)
- PERL6_BRACKETS = {
- '\u0028': '\u0029', '\u003c': '\u003e', '\u005b': '\u005d',
- '\u007b': '\u007d', '\u00ab': '\u00bb', '\u0f3a': '\u0f3b',
- '\u0f3c': '\u0f3d', '\u169b': '\u169c', '\u2018': '\u2019',
- '\u201a': '\u2019', '\u201b': '\u2019', '\u201c': '\u201d',
- '\u201e': '\u201d', '\u201f': '\u201d', '\u2039': '\u203a',
- '\u2045': '\u2046', '\u207d': '\u207e', '\u208d': '\u208e',
- '\u2208': '\u220b', '\u2209': '\u220c', '\u220a': '\u220d',
- '\u2215': '\u29f5', '\u223c': '\u223d', '\u2243': '\u22cd',
- '\u2252': '\u2253', '\u2254': '\u2255', '\u2264': '\u2265',
- '\u2266': '\u2267', '\u2268': '\u2269', '\u226a': '\u226b',
- '\u226e': '\u226f', '\u2270': '\u2271', '\u2272': '\u2273',
- '\u2274': '\u2275', '\u2276': '\u2277', '\u2278': '\u2279',
- '\u227a': '\u227b', '\u227c': '\u227d', '\u227e': '\u227f',
- '\u2280': '\u2281', '\u2282': '\u2283', '\u2284': '\u2285',
- '\u2286': '\u2287', '\u2288': '\u2289', '\u228a': '\u228b',
- '\u228f': '\u2290', '\u2291': '\u2292', '\u2298': '\u29b8',
- '\u22a2': '\u22a3', '\u22a6': '\u2ade', '\u22a8': '\u2ae4',
- '\u22a9': '\u2ae3', '\u22ab': '\u2ae5', '\u22b0': '\u22b1',
- '\u22b2': '\u22b3', '\u22b4': '\u22b5', '\u22b6': '\u22b7',
- '\u22c9': '\u22ca', '\u22cb': '\u22cc', '\u22d0': '\u22d1',
- '\u22d6': '\u22d7', '\u22d8': '\u22d9', '\u22da': '\u22db',
- '\u22dc': '\u22dd', '\u22de': '\u22df', '\u22e0': '\u22e1',
- '\u22e2': '\u22e3', '\u22e4': '\u22e5', '\u22e6': '\u22e7',
- '\u22e8': '\u22e9', '\u22ea': '\u22eb', '\u22ec': '\u22ed',
- '\u22f0': '\u22f1', '\u22f2': '\u22fa', '\u22f3': '\u22fb',
- '\u22f4': '\u22fc', '\u22f6': '\u22fd', '\u22f7': '\u22fe',
- '\u2308': '\u2309', '\u230a': '\u230b', '\u2329': '\u232a',
- '\u23b4': '\u23b5', '\u2768': '\u2769', '\u276a': '\u276b',
- '\u276c': '\u276d', '\u276e': '\u276f', '\u2770': '\u2771',
- '\u2772': '\u2773', '\u2774': '\u2775', '\u27c3': '\u27c4',
- '\u27c5': '\u27c6', '\u27d5': '\u27d6', '\u27dd': '\u27de',
- '\u27e2': '\u27e3', '\u27e4': '\u27e5', '\u27e6': '\u27e7',
- '\u27e8': '\u27e9', '\u27ea': '\u27eb', '\u2983': '\u2984',
- '\u2985': '\u2986', '\u2987': '\u2988', '\u2989': '\u298a',
- '\u298b': '\u298c', '\u298d': '\u298e', '\u298f': '\u2990',
- '\u2991': '\u2992', '\u2993': '\u2994', '\u2995': '\u2996',
- '\u2997': '\u2998', '\u29c0': '\u29c1', '\u29c4': '\u29c5',
- '\u29cf': '\u29d0', '\u29d1': '\u29d2', '\u29d4': '\u29d5',
- '\u29d8': '\u29d9', '\u29da': '\u29db', '\u29f8': '\u29f9',
- '\u29fc': '\u29fd', '\u2a2b': '\u2a2c', '\u2a2d': '\u2a2e',
- '\u2a34': '\u2a35', '\u2a3c': '\u2a3d', '\u2a64': '\u2a65',
- '\u2a79': '\u2a7a', '\u2a7d': '\u2a7e', '\u2a7f': '\u2a80',
- '\u2a81': '\u2a82', '\u2a83': '\u2a84', '\u2a8b': '\u2a8c',
- '\u2a91': '\u2a92', '\u2a93': '\u2a94', '\u2a95': '\u2a96',
- '\u2a97': '\u2a98', '\u2a99': '\u2a9a', '\u2a9b': '\u2a9c',
- '\u2aa1': '\u2aa2', '\u2aa6': '\u2aa7', '\u2aa8': '\u2aa9',
- '\u2aaa': '\u2aab', '\u2aac': '\u2aad', '\u2aaf': '\u2ab0',
- '\u2ab3': '\u2ab4', '\u2abb': '\u2abc', '\u2abd': '\u2abe',
- '\u2abf': '\u2ac0', '\u2ac1': '\u2ac2', '\u2ac3': '\u2ac4',
- '\u2ac5': '\u2ac6', '\u2acd': '\u2ace', '\u2acf': '\u2ad0',
- '\u2ad1': '\u2ad2', '\u2ad3': '\u2ad4', '\u2ad5': '\u2ad6',
- '\u2aec': '\u2aed', '\u2af7': '\u2af8', '\u2af9': '\u2afa',
- '\u2e02': '\u2e03', '\u2e04': '\u2e05', '\u2e09': '\u2e0a',
- '\u2e0c': '\u2e0d', '\u2e1c': '\u2e1d', '\u2e20': '\u2e21',
- '\u3008': '\u3009', '\u300a': '\u300b', '\u300c': '\u300d',
- '\u300e': '\u300f', '\u3010': '\u3011', '\u3014': '\u3015',
- '\u3016': '\u3017', '\u3018': '\u3019', '\u301a': '\u301b',
- '\u301d': '\u301e', '\ufd3e': '\ufd3f', '\ufe17': '\ufe18',
- '\ufe35': '\ufe36', '\ufe37': '\ufe38', '\ufe39': '\ufe3a',
- '\ufe3b': '\ufe3c', '\ufe3d': '\ufe3e', '\ufe3f': '\ufe40',
- '\ufe41': '\ufe42', '\ufe43': '\ufe44', '\ufe47': '\ufe48',
- '\ufe59': '\ufe5a', '\ufe5b': '\ufe5c', '\ufe5d': '\ufe5e',
- '\uff08': '\uff09', '\uff1c': '\uff1e', '\uff3b': '\uff3d',
- '\uff5b': '\uff5d', '\uff5f': '\uff60', '\uff62': '\uff63',
- }
-
- def _build_word_match(words, boundary_regex_fragment=None, prefix='', suffix=''):
- if boundary_regex_fragment is None:
- return r'\b(' + prefix + r'|'.join(re.escape(x) for x in words) + \
- suffix + r')\b'
- else:
- return r'(?<!' + boundary_regex_fragment + r')' + prefix + r'(' + \
- r'|'.join(re.escape(x) for x in words) + r')' + suffix + r'(?!' + \
- boundary_regex_fragment + r')'
-
- def brackets_callback(token_class):
- def callback(lexer, match, context):
- groups = match.groupdict()
- opening_chars = groups['delimiter']
- n_chars = len(opening_chars)
- adverbs = groups.get('adverbs')
-
- closer = Perl6Lexer.PERL6_BRACKETS.get(opening_chars[0])
- text = context.text
-
- if closer is None: # it's not a mirrored character, which means we
- # just need to look for the next occurrence
-
- end_pos = text.find(opening_chars, match.start('delimiter') + n_chars)
- else: # we need to look for the corresponding closing character,
- # keep nesting in mind
- closing_chars = closer * n_chars
- nesting_level = 1
-
- search_pos = match.start('delimiter')
-
- while nesting_level > 0:
- next_open_pos = text.find(opening_chars, search_pos + n_chars)
- next_close_pos = text.find(closing_chars, search_pos + n_chars)
-
- if next_close_pos == -1:
- next_close_pos = len(text)
- nesting_level = 0
- elif next_open_pos != -1 and next_open_pos < next_close_pos:
- nesting_level += 1
- search_pos = next_open_pos
- else: # next_close_pos < next_open_pos
- nesting_level -= 1
- search_pos = next_close_pos
-
- end_pos = next_close_pos
-
- if end_pos < 0: # if we didn't find a closer, just highlight the
- # rest of the text in this class
- end_pos = len(text)
-
- if adverbs is not None and re.search(r':to\b', adverbs):
- heredoc_terminator = text[match.start('delimiter') + n_chars:end_pos]
- end_heredoc = re.search(r'^\s*' + re.escape(heredoc_terminator) +
- r'\s*$', text[end_pos:], re.MULTILINE)
-
- if end_heredoc:
- end_pos += end_heredoc.end()
- else:
- end_pos = len(text)
-
- yield match.start(), token_class, text[match.start():end_pos + n_chars]
- context.pos = end_pos + n_chars
-
- return callback
-
- def opening_brace_callback(lexer, match, context):
- stack = context.stack
-
- yield match.start(), Text, context.text[match.start():match.end()]
- context.pos = match.end()
-
- # if we encounter an opening brace and we're one level
- # below a token state, it means we need to increment
- # the nesting level for braces so we know later when
- # we should return to the token rules.
- if len(stack) > 2 and stack[-2] == 'token':
- context.perl6_token_nesting_level += 1
-
- def closing_brace_callback(lexer, match, context):
- stack = context.stack
-
- yield match.start(), Text, context.text[match.start():match.end()]
- context.pos = match.end()
-
- # if we encounter a free closing brace and we're one level
- # below a token state, it means we need to check the nesting
- # level to see if we need to return to the token state.
- if len(stack) > 2 and stack[-2] == 'token':
- context.perl6_token_nesting_level -= 1
- if context.perl6_token_nesting_level == 0:
- stack.pop()
-
- def embedded_perl6_callback(lexer, match, context):
- context.perl6_token_nesting_level = 1
- yield match.start(), Text, context.text[match.start():match.end()]
- context.pos = match.end()
- context.stack.append('root')
-
- # If you're modifying these rules, be careful if you need to process '{' or '}'
- # characters. We have special logic for processing these characters (due to the fact
- # that you can nest Perl 6 code in regex blocks), so if you need to process one of
- # them, make sure you also process the corresponding one!
- tokens = {
- 'common': [
- (r'#[`|=](?P<delimiter>(?P<first_char>[' + ''.join(PERL6_BRACKETS) + r'])(?P=first_char)*)',
- brackets_callback(Comment.Multiline)),
- (r'#[^\n]*$', Comment.Single),
- (r'^(\s*)=begin\s+(\w+)\b.*?^\1=end\s+\2', Comment.Multiline),
- (r'^(\s*)=for.*?\n\s*?\n', Comment.Multiline),
- (r'^=.*?\n\s*?\n', Comment.Multiline),
- (r'(regex|token|rule)(\s*' + PERL6_IDENTIFIER_RANGE + '+:sym)',
- bygroups(Keyword, Name), 'token-sym-brackets'),
- (r'(regex|token|rule)(?!' + PERL6_IDENTIFIER_RANGE + r')(\s*' + PERL6_IDENTIFIER_RANGE + '+)?',
- bygroups(Keyword, Name), 'pre-token'),
- # deal with a special case in the Perl 6 grammar (role q { ... })
- (r'(role)(\s+)(q)(\s*)', bygroups(Keyword, Whitespace, Name, Whitespace)),
- (_build_word_match(PERL6_KEYWORDS, PERL6_IDENTIFIER_RANGE), Keyword),
- (_build_word_match(PERL6_BUILTIN_CLASSES, PERL6_IDENTIFIER_RANGE, suffix='(?::[UD])?'),
- Name.Builtin),
- (_build_word_match(PERL6_BUILTINS, PERL6_IDENTIFIER_RANGE), Name.Builtin),
- # copied from PerlLexer
- (r'[$@%&][.^:?=!~]?' + PERL6_IDENTIFIER_RANGE + '+(?:<<.*?>>|<.*?>|«.*?»)*',
- Name.Variable),
- (r'\$[!/](?:<<.*?>>|<.*?>|«.*?»)*', Name.Variable.Global),
- (r'::\?\w+', Name.Variable.Global),
- (r'[$@%&]\*' + PERL6_IDENTIFIER_RANGE + '+(?:<<.*?>>|<.*?>|«.*?»)*',
- Name.Variable.Global),
- (r'\$(?:<.*?>)+', Name.Variable),
- (r'(?:q|qq|Q)[a-zA-Z]?\s*(?P<adverbs>:[\w\s:]+)?\s*(?P<delimiter>(?P<first_char>[^0-9a-zA-Z:\s])'
- r'(?P=first_char)*)', brackets_callback(String)),
- # copied from PerlLexer
- (r'0_?[0-7]+(_[0-7]+)*', Number.Oct),
- (r'0x[0-9A-Fa-f]+(_[0-9A-Fa-f]+)*', Number.Hex),
- (r'0b[01]+(_[01]+)*', Number.Bin),
- (r'(?i)(\d*(_\d*)*\.\d+(_\d*)*|\d+(_\d*)*\.\d+(_\d*)*)(e[+-]?\d+)?',
- Number.Float),
- (r'(?i)\d+(_\d*)*e[+-]?\d+(_\d*)*', Number.Float),
- (r'\d+(_\d+)*', Number.Integer),
- (r'(?<=~~)\s*/(?:\\\\|\\/|.)*?/', String.Regex),
- (r'(?<=[=(,])\s*/(?:\\\\|\\/|.)*?/', String.Regex),
- (r'm\w+(?=\()', Name),
- (r'(?:m|ms|rx)\s*(?P<adverbs>:[\w\s:]+)?\s*(?P<delimiter>(?P<first_char>[^\w:\s])'
- r'(?P=first_char)*)', brackets_callback(String.Regex)),
- (r'(?:s|ss|tr)\s*(?::[\w\s:]+)?\s*/(?:\\\\|\\/|.)*?/(?:\\\\|\\/|.)*?/',
- String.Regex),
- (r'<[^\s=].*?\S>', String),
- (_build_word_match(PERL6_OPERATORS), Operator),
- (r'\w' + PERL6_IDENTIFIER_RANGE + '*', Name),
- (r"'(\\\\|\\[^\\]|[^'\\])*'", String),
- (r'"(\\\\|\\[^\\]|[^"\\])*"', String),
- ],
- 'root': [
- include('common'),
- (r'\{', opening_brace_callback),
- (r'\}', closing_brace_callback),
- (r'.+?', Text),
- ],
- 'pre-token': [
- include('common'),
- (r'\{', Text, ('#pop', 'token')),
- (r'.+?', Text),
- ],
- 'token-sym-brackets': [
- (r'(?P<delimiter>(?P<first_char>[' + ''.join(PERL6_BRACKETS) + '])(?P=first_char)*)',
- brackets_callback(Name), ('#pop', 'pre-token')),
- default(('#pop', 'pre-token')),
- ],
- 'token': [
- (r'\}', Text, '#pop'),
- (r'(?<=:)(?:my|our|state|constant|temp|let).*?;', using(this)),
- # make sure that quotes in character classes aren't treated as strings
- (r'<(?:[-!?+.]\s*)?\[.*?\]>', String.Regex),
- # make sure that '#' characters in quotes aren't treated as comments
- (r"(?<!\\)'(\\\\|\\[^\\]|[^'\\])*'", String.Regex),
- (r'(?<!\\)"(\\\\|\\[^\\]|[^"\\])*"', String.Regex),
- (r'#.*?$', Comment.Single),
- (r'\{', embedded_perl6_callback),
- ('.+?', String.Regex),
- ],
- }
-
- def analyse_text(text):
- def strip_pod(lines):
- in_pod = False
- stripped_lines = []
-
- for line in lines:
- if re.match(r'^=(?:end|cut)', line):
- in_pod = False
- elif re.match(r'^=\w+', line):
- in_pod = True
- elif not in_pod:
- stripped_lines.append(line)
-
- return stripped_lines
-
- # XXX handle block comments
- lines = text.splitlines()
- lines = strip_pod(lines)
- text = '\n'.join(lines)
-
- if shebang_matches(text, r'perl6|rakudo|niecza|pugs'):
- return True
-
- saw_perl_decl = False
- rating = False
-
- # check for my/our/has declarations
- if re.search(r"(?:my|our|has)\s+(?:" + Perl6Lexer.PERL6_IDENTIFIER_RANGE +
- r"+\s+)?[$@%&(]", text):
- rating = 0.8
- saw_perl_decl = True
-
- for line in lines:
- line = re.sub('#.*', '', line)
- if re.match(r'^\s*$', line):
- continue
-
- # match v6; use v6; use v6.0; use v6.0.0;
- if re.match(r'^\s*(?:use\s+)?v6(?:\.\d(?:\.\d)?)?;', line):
- return True
- # match class, module, role, enum, grammar declarations
- class_decl = re.match(r'^\s*(?:(?P<scope>my|our)\s+)?(?:module|class|role|enum|grammar)', line)
- if class_decl:
- if saw_perl_decl or class_decl.group('scope') is not None:
- return True
- rating = 0.05
- continue
- break
-
- if ':=' in text:
- # Same logic as above for PerlLexer
- rating /= 2
-
- return rating
-
- def __init__(self, **options):
- super().__init__(**options)
- self.encoding = options.get('encoding', 'utf-8')
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/phix.py b/venv/lib/python3.11/site-packages/pygments/lexers/phix.py
deleted file mode 100644
index fb08b1d..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/phix.py
+++ /dev/null
@@ -1,364 +0,0 @@
-"""
- pygments.lexers.phix
- ~~~~~~~~~~~~~~~~~~~~
-
- Lexers for Phix.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, words
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Whitespace
-
-__all__ = ['PhixLexer']
-
-
-class PhixLexer(RegexLexer):
- """
- Pygments Lexer for Phix files (.exw).
- See http://phix.x10.mx
-
- .. versionadded:: 2.14.0
- """
-
- name = 'Phix'
- url = 'http://phix.x10.mx'
- aliases = ['phix']
- filenames = ['*.exw']
- mimetypes = ['text/x-phix']
-
- flags = re.MULTILINE # nb: **NOT** re.DOTALL! (totally spanners comment handling)
-
- preproc = (
- 'ifdef', 'elsifdef', 'elsedef'
- )
- # Note these lists are auto-generated by pwa/p2js.exw, when pwa\src\p2js_keywords.e (etc)
- # change, though of course subsequent copy/commit/pull requests are all manual steps.
- types = (
- 'string', 'nullable_string', 'atom_string', 'atom', 'bool', 'boolean',
- 'cdCanvan', 'cdCanvas', 'complex', 'CURLcode', 'dictionary', 'int',
- 'integer', 'Ihandle', 'Ihandles', 'Ihandln', 'mpfr', 'mpq', 'mpz',
- 'mpz_or_string', 'number', 'rid_string', 'seq', 'sequence', 'timedate',
- 'object'
- )
- keywords = (
- 'abstract', 'class', 'continue', 'export', 'extends', 'nullable',
- 'private', 'public', 'static', 'struct', 'trace',
- 'and', 'break', 'by', 'case', 'catch', 'const', 'constant', 'debug',
- 'default', 'do', 'else', 'elsif', 'end', 'enum', 'exit', 'fallthru',
- 'fallthrough', 'for', 'forward', 'function', 'global', 'if', 'in',
- 'include', 'js', 'javascript', 'javascript_semantics', 'let', 'not',
- 'or', 'procedure', 'profile', 'profile_time', 'return', 'safe_mode',
- 'switch', 'then', 'to', 'try', 'type', 'type_check', 'until', 'warning',
- 'while', 'with', 'without', 'xor'
- )
- routines = (
- 'abort', 'abs', 'adjust_timedate', 'and_bits', 'and_bitsu', 'apply',
- 'append', 'arccos', 'arcsin', 'arctan', 'assert', 'atan2',
- 'atom_to_float32', 'atom_to_float64', 'bankers_rounding', 'beep',
- 'begins', 'binary_search', 'bits_to_int', 'bk_color', 'bytes_to_int',
- 'call_func', 'call_proc', 'cdCanvasActivate', 'cdCanvasArc',
- 'cdCanvasBegin', 'cdCanvasBox', 'cdCanvasChord', 'cdCanvasCircle',
- 'cdCanvasClear', 'cdCanvasEnd', 'cdCanvasFlush', 'cdCanvasFont',
- 'cdCanvasGetImageRGB', 'cdCanvasGetSize', 'cdCanvasGetTextAlignment',
- 'cdCanvasGetTextSize', 'cdCanvasLine', 'cdCanvasMark',
- 'cdCanvasMarkSize', 'cdCanvasMultiLineVectorText', 'cdCanvasPixel',
- 'cdCanvasRect', 'cdCanvasRoundedBox', 'cdCanvasRoundedRect',
- 'cdCanvasSector', 'cdCanvasSetAttribute', 'cdCanvasSetBackground',
- 'cdCanvasSetFillMode', 'cdCanvasSetForeground',
- 'cdCanvasSetInteriorStyle', 'cdCanvasSetLineStyle',
- 'cdCanvasSetLineWidth', 'cdCanvasSetTextAlignment', 'cdCanvasText',
- 'cdCanvasSetTextOrientation', 'cdCanvasGetTextOrientation',
- 'cdCanvasVectorText', 'cdCanvasVectorTextDirection',
- 'cdCanvasVectorTextSize', 'cdCanvasVertex', 'cdCreateCanvas',
- 'cdDecodeAlpha', 'cdDecodeColor', 'cdDecodeColorAlpha', 'cdEncodeAlpha',
- 'cdEncodeColor', 'cdEncodeColorAlpha', 'cdKillCanvas', 'cdVersion',
- 'cdVersionDate', 'ceil', 'change_timezone', 'choose', 'clear_screen',
- 'columnize', 'command_line', 'compare', 'complex_abs', 'complex_add',
- 'complex_arg', 'complex_conjugate', 'complex_cos', 'complex_cosh',
- 'complex_div', 'complex_exp', 'complex_imag', 'complex_inv',
- 'complex_log', 'complex_mul', 'complex_neg', 'complex_new',
- 'complex_norm', 'complex_power', 'complex_rho', 'complex_real',
- 'complex_round', 'complex_sin', 'complex_sinh', 'complex_sprint',
- 'complex_sqrt', 'complex_sub', 'complex_theta', 'concat', 'cos',
- 'crash', 'custom_sort', 'date', 'day_of_week', 'day_of_year',
- 'days_in_month', 'decode_base64', 'decode_flags', 'deep_copy', 'deld',
- 'deserialize', 'destroy_dict', 'destroy_queue', 'destroy_stack',
- 'dict_name', 'dict_size', 'elapsed', 'elapsed_short', 'encode_base64',
- 'equal', 'even', 'exp', 'extract', 'factorial', 'factors',
- 'file_size_k', 'find', 'find_all', 'find_any', 'find_replace', 'filter',
- 'flatten', 'float32_to_atom', 'float64_to_atom', 'floor',
- 'format_timedate', 'free_console', 'from_polar', 'gcd', 'get_file_base',
- 'get_file_extension', 'get_file_name', 'get_file_name_and_path',
- 'get_file_path', 'get_file_path_and_name', 'get_maxprime', 'get_prime',
- 'get_primes', 'get_primes_le', 'get_proper_dir', 'get_proper_path',
- 'get_rand', 'get_routine_info', 'get_test_abort', 'get_test_logfile',
- 'get_test_pause', 'get_test_verbosity', 'get_tzid', 'getd', 'getdd',
- 'getd_all_keys', 'getd_by_index', 'getd_index', 'getd_partial_key',
- 'glAttachShader', 'glBindBuffer', 'glBindTexture', 'glBufferData',
- 'glCanvasSpecialText', 'glClear', 'glClearColor', 'glColor',
- 'glCompileShader', 'glCreateBuffer', 'glCreateProgram',
- 'glCreateShader', 'glCreateTexture', 'glDeleteProgram',
- 'glDeleteShader', 'glDrawArrays', 'glEnable',
- 'glEnableVertexAttribArray', 'glFloat32Array', 'glInt32Array',
- 'glFlush', 'glGetAttribLocation', 'glGetError', 'glGetProgramInfoLog',
- 'glGetProgramParameter', 'glGetShaderInfoLog', 'glGetShaderParameter',
- 'glGetUniformLocation', 'glLinkProgram', 'glLoadIdentity',
- 'glMatrixMode', 'glOrtho', 'glRotatef', 'glShadeModel',
- 'glShaderSource', 'glSimpleA7texcoords', 'glTexImage2Dc',
- 'glTexParameteri', 'glTranslate', 'glUniform1f', 'glUniform1i',
- 'glUniformMatrix4fv', 'glUseProgram', 'glVertex',
- 'glVertexAttribPointer', 'glViewport', 'head', 'hsv_to_rgb', 'iff',
- 'iif', 'include_file', 'incl0de_file', 'insert', 'instance',
- 'int_to_bits', 'int_to_bytes', 'is_dict', 'is_integer', 's_leap_year',
- 'is_prime', 'is_prime2', 'islower', 'isupper', 'Icallback',
- 'iup_isdouble', 'iup_isprint', 'iup_XkeyBase', 'IupAppend', 'IupAlarm',
- 'IupBackgroundBox', 'IupButton', 'IupCalendar', 'IupCanvas',
- 'IupClipboard', 'IupClose', 'IupCloseOnEscape', 'IupControlsOpen',
- 'IupDatePick', 'IupDestroy', 'IupDialog', 'IupDrawArc', 'IupDrawBegin',
- 'IupDrawEnd', 'IupDrawGetSize', 'IupDrawGetTextSize', 'IupDrawLine',
- 'IupDrawRectangle', 'IupDrawText', 'IupExpander', 'IupFill',
- 'IupFlatLabel', 'IupFlatList', 'IupFlatTree', 'IupFlush', 'IupFrame',
- 'IupGetAttribute', 'IupGetAttributeId', 'IupGetAttributePtr',
- 'IupGetBrother', 'IupGetChild', 'IupGetChildCount', 'IupGetClassName',
- 'IupGetDialog', 'IupGetDialogChild', 'IupGetDouble', 'IupGetFocus',
- 'IupGetGlobal', 'IupGetGlobalInt', 'IupGetGlobalIntInt', 'IupGetInt',
- 'IupGetInt2', 'IupGetIntId', 'IupGetIntInt', 'IupGetParent',
- 'IupGLCanvas', 'IupGLCanvasOpen', 'IupGLMakeCurrent', 'IupGraph',
- 'IupHbox', 'IupHide', 'IupImage', 'IupImageRGBA', 'IupItem',
- 'iupKeyCodeToName', 'IupLabel', 'IupLink', 'IupList', 'IupMap',
- 'IupMenu', 'IupMenuItem', 'IupMessage', 'IupMessageDlg', 'IupMultiBox',
- 'IupMultiLine', 'IupNextField', 'IupNormaliser', 'IupOpen',
- 'IupPlayInput', 'IupPopup', 'IupPreviousField', 'IupProgressBar',
- 'IupRadio', 'IupRecordInput', 'IupRedraw', 'IupRefresh',
- 'IupRefreshChildren', 'IupSeparator', 'IupSetAttribute',
- 'IupSetAttributes', 'IupSetAttributeHandle', 'IupSetAttributeId',
- 'IupSetAttributePtr', 'IupSetCallback', 'IupSetCallbacks',
- 'IupSetDouble', 'IupSetFocus', 'IupSetGlobal', 'IupSetGlobalInt',
- 'IupSetGlobalFunction', 'IupSetHandle', 'IupSetInt',
- 'IupSetStrAttribute', 'IupSetStrGlobal', 'IupShow', 'IupShowXY',
- 'IupSplit', 'IupStoreAttribute', 'IupSubmenu', 'IupTable',
- 'IupTableClearSelected', 'IupTableClick_cb', 'IupTableGetSelected',
- 'IupTableResize_cb', 'IupTableSetData', 'IupTabs', 'IupText',
- 'IupTimer', 'IupToggle', 'IupTreeAddNodes', 'IupTreeView', 'IupUpdate',
- 'IupValuator', 'IupVbox', 'join', 'join_by', 'join_path', 'k_perm',
- 'largest', 'lcm', 'length', 'log', 'log10', 'log2', 'lower',
- 'm4_crossProduct', 'm4_inverse', 'm4_lookAt', 'm4_multiply',
- 'm4_normalize', 'm4_perspective', 'm4_subtractVectors', 'm4_xRotate',
- 'm4_yRotate', 'machine_bits', 'machine_word', 'match', 'match_all',
- 'match_replace', 'max', 'maxsq', 'min', 'minsq', 'mod', 'mpfr_add',
- 'mpfr_ceil', 'mpfr_cmp', 'mpfr_cmp_si', 'mpfr_const_pi', 'mpfr_div',
- 'mpfr_div_si', 'mpfr_div_z', 'mpfr_floor', 'mpfr_free', 'mpfr_get_d',
- 'mpfr_get_default_precision', 'mpfr_get_default_rounding_mode',
- 'mpfr_get_fixed', 'mpfr_get_precision', 'mpfr_get_si', 'mpfr_init',
- 'mpfr_inits', 'mpfr_init_set', 'mpfr_init_set_q', 'mpfr_init_set_z',
- 'mpfr_mul', 'mpfr_mul_si', 'mpfr_pow_si', 'mpfr_set', 'mpfr_set_d',
- 'mpfr_set_default_precision', 'mpfr_set_default_rounding_mode',
- 'mpfr_set_precision', 'mpfr_set_q', 'mpfr_set_si', 'mpfr_set_str',
- 'mpfr_set_z', 'mpfr_si_div', 'mpfr_si_sub', 'mpfr_sqrt', 'mpfr_sub',
- 'mpfr_sub_si', 'mpq_abs', 'mpq_add', 'mpq_add_si', 'mpq_canonicalize',
- 'mpq_cmp', 'mpq_cmp_si', 'mpq_div', 'mpq_div_2exp', 'mpq_free',
- 'mpq_get_den', 'mpq_get_num', 'mpq_get_str', 'mpq_init', 'mpq_init_set',
- 'mpq_init_set_si', 'mpq_init_set_str', 'mpq_init_set_z', 'mpq_inits',
- 'mpq_inv', 'mpq_mul', 'mpq_neg', 'mpq_set', 'mpq_set_si', 'mpq_set_str',
- 'mpq_set_z', 'mpq_sub', 'mpz_abs', 'mpz_add', 'mpz_addmul',
- 'mpz_addmul_ui', 'mpz_addmul_si', 'mpz_add_si', 'mpz_add_ui', 'mpz_and',
- 'mpz_bin_uiui', 'mpz_cdiv_q', 'mpz_cmp', 'mpz_cmp_si', 'mpz_divexact',
- 'mpz_divexact_ui', 'mpz_divisible_p', 'mpz_divisible_ui_p', 'mpz_even',
- 'mpz_fac_ui', 'mpz_factorstring', 'mpz_fdiv_q', 'mpz_fdiv_q_2exp',
- 'mpz_fdiv_q_ui', 'mpz_fdiv_qr', 'mpz_fdiv_r', 'mpz_fdiv_ui',
- 'mpz_fib_ui', 'mpz_fib2_ui', 'mpz_fits_atom', 'mpz_fits_integer',
- 'mpz_free', 'mpz_gcd', 'mpz_gcd_ui', 'mpz_get_atom', 'mpz_get_integer',
- 'mpz_get_short_str', 'mpz_get_str', 'mpz_init', 'mpz_init_set',
- 'mpz_inits', 'mpz_invert', 'mpz_lcm', 'mpz_lcm_ui', 'mpz_max',
- 'mpz_min', 'mpz_mod', 'mpz_mod_ui', 'mpz_mul', 'mpz_mul_2exp',
- 'mpz_mul_d', 'mpz_mul_si', 'mpz_neg', 'mpz_nthroot', 'mpz_odd',
- 'mpz_pollard_rho', 'mpz_pow_ui', 'mpz_powm', 'mpz_powm_ui', 'mpz_prime',
- 'mpz_prime_factors', 'mpz_prime_mr', 'mpz_rand', 'mpz_rand_ui',
- 'mpz_re_compose', 'mpz_remove', 'mpz_scan0', 'mpz_scan1', 'mpz_set',
- 'mpz_set_d', 'mpz_set_si', 'mpz_set_str', 'mpz_set_v', 'mpz_sign',
- 'mpz_sizeinbase', 'mpz_sqrt', 'mpz_sub', 'mpz_sub_si', 'mpz_sub_ui',
- 'mpz_si_sub', 'mpz_tdiv_q_2exp', 'mpz_tdiv_r_2exp', 'mpz_tstbit',
- 'mpz_ui_pow_ui', 'mpz_xor', 'named_dict', 'new_dict', 'new_queue',
- 'new_stack', 'not_bits', 'not_bitsu', 'odd', 'or_all', 'or_allu',
- 'or_bits', 'or_bitsu', 'ord', 'ordinal', 'ordinant',
- 'override_timezone', 'pad', 'pad_head', 'pad_tail', 'parse_date_string',
- 'papply', 'peep', 'peepn', 'peep_dict', 'permute', 'permutes',
- 'platform', 'pop', 'popn', 'pop_dict', 'power', 'pp', 'ppEx', 'ppExf',
- 'ppf', 'ppOpt', 'pq_add', 'pq_destroy', 'pq_empty', 'pq_new', 'pq_peek',
- 'pq_pop', 'pq_pop_data', 'pq_size', 'prepend', 'prime_factors',
- 'printf', 'product', 'proper', 'push', 'pushn', 'putd', 'puts',
- 'queue_empty', 'queue_size', 'rand', 'rand_range', 'reinstate',
- 'remainder', 'remove', 'remove_all', 'repeat', 'repeatch', 'replace',
- 'requires', 'reverse', 'rfind', 'rgb', 'rmatch', 'rmdr', 'rnd', 'round',
- 'routine_id', 'scanf', 'serialize', 'series', 'set_rand',
- 'set_test_abort', 'set_test_logfile', 'set_test_module',
- 'set_test_pause', 'set_test_verbosity', 'set_timedate_formats',
- 'set_timezone', 'setd', 'setd_default', 'shorten', 'sha256',
- 'shift_bits', 'shuffle', 'sign', 'sin', 'smallest', 'sort',
- 'sort_columns', 'speak', 'splice', 'split', 'split_any', 'split_by',
- 'sprint', 'sprintf', 'sq_abs', 'sq_add', 'sq_and', 'sq_and_bits',
- 'sq_arccos', 'sq_arcsin', 'sq_arctan', 'sq_atom', 'sq_ceil', 'sq_cmp',
- 'sq_cos', 'sq_div', 'sq_even', 'sq_eq', 'sq_floor', 'sq_floor_div',
- 'sq_ge', 'sq_gt', 'sq_int', 'sq_le', 'sq_log', 'sq_log10', 'sq_log2',
- 'sq_lt', 'sq_max', 'sq_min', 'sq_mod', 'sq_mul', 'sq_ne', 'sq_not',
- 'sq_not_bits', 'sq_odd', 'sq_or', 'sq_or_bits', 'sq_power', 'sq_rand',
- 'sq_remainder', 'sq_rmdr', 'sq_rnd', 'sq_round', 'sq_seq', 'sq_sign',
- 'sq_sin', 'sq_sqrt', 'sq_str', 'sq_sub', 'sq_tan', 'sq_trunc',
- 'sq_uminus', 'sq_xor', 'sq_xor_bits', 'sqrt', 'square_free',
- 'stack_empty', 'stack_size', 'substitute', 'substitute_all', 'sum',
- 'tail', 'tan', 'test_equal', 'test_fail', 'test_false',
- 'test_not_equal', 'test_pass', 'test_summary', 'test_true',
- 'text_color', 'throw', 'time', 'timedate_diff', 'timedelta',
- 'to_integer', 'to_number', 'to_rgb', 'to_string', 'traverse_dict',
- 'traverse_dict_partial_key', 'trim', 'trim_head', 'trim_tail', 'trunc',
- 'tagset', 'tagstart', 'typeof', 'unique', 'unix_dict', 'upper',
- 'utf8_to_utf32', 'utf32_to_utf8', 'version', 'vlookup', 'vslice',
- 'wglGetProcAddress', 'wildcard_file', 'wildcard_match', 'with_rho',
- 'with_theta', 'xml_new_doc', 'xml_new_element', 'xml_set_attribute',
- 'xml_sprint', 'xor_bits', 'xor_bitsu',
- 'accept', 'allocate', 'allocate_string', 'allow_break', 'ARM',
- 'atom_to_float80', 'c_func', 'c_proc', 'call_back', 'chdir',
- 'check_break', 'clearDib', 'close', 'closesocket', 'console',
- 'copy_file', 'create', 'create_directory', 'create_thread',
- 'curl_easy_cleanup', 'curl_easy_get_file', 'curl_easy_init',
- 'curl_easy_perform', 'curl_easy_perform_ex', 'curl_easy_setopt',
- 'curl_easy_strerror', 'curl_global_cleanup', 'curl_global_init',
- 'curl_slist_append', 'curl_slist_free_all', 'current_dir', 'cursor',
- 'define_c_func', 'define_c_proc', 'delete', 'delete_cs', 'delete_file',
- 'dir', 'DLL', 'drawDib', 'drawShadedPolygonToDib', 'ELF32', 'ELF64',
- 'enter_cs', 'eval', 'exit_thread', 'free', 'file_exists', 'final',
- 'float80_to_atom', 'format', 'get_bytes', 'get_file_date',
- 'get_file_size', 'get_file_type', 'get_interpreter', 'get_key',
- 'get_socket_error', 'get_text', 'get_thread_exitcode', 'get_thread_id',
- 'getc', 'getenv', 'gets', 'getsockaddr', 'glBegin', 'glCallList',
- 'glFrustum', 'glGenLists', 'glGetString', 'glLight', 'glMaterial',
- 'glNewList', 'glNormal', 'glPopMatrix', 'glPushMatrix', 'glRotate',
- 'glEnd', 'glEndList', 'glTexImage2D', 'goto', 'GUI', 'icons', 'ilASM',
- 'include_files', 'include_paths', 'init_cs', 'ip_to_string',
- 'IupConfig', 'IupConfigDialogClosed', 'IupConfigDialogShow',
- 'IupConfigGetVariableInt', 'IupConfigLoad', 'IupConfigSave',
- 'IupConfigSetVariableInt', 'IupExitLoop', 'IupFileDlg', 'IupFileList',
- 'IupGLSwapBuffers', 'IupHelp', 'IupLoopStep', 'IupMainLoop',
- 'IupNormalizer', 'IupPlot', 'IupPlotAdd', 'IupPlotBegin', 'IupPlotEnd',
- 'IupPlotInsert', 'IupSaveImage', 'IupTreeGetUserId', 'IupUser',
- 'IupVersion', 'IupVersionDate', 'IupVersionNumber', 'IupVersionShow',
- 'killDib', 'leave_cs', 'listen', 'manifest', 'mem_copy', 'mem_set',
- 'mpfr_gamma', 'mpfr_printf', 'mpfr_sprintf', 'mpz_export', 'mpz_import',
- 'namespace', 'new', 'newDib', 'open', 'open_dll', 'PE32', 'PE64',
- 'peek', 'peek_string', 'peek1s', 'peek1u', 'peek2s', 'peek2u', 'peek4s',
- 'peek4u', 'peek8s', 'peek8u', 'peekNS', 'peekns', 'peeknu', 'poke',
- 'poke2', 'poke4', 'poke8', 'pokeN', 'poke_string', 'poke_wstring',
- 'position', 'progress', 'prompt_number', 'prompt_string', 'read_file',
- 'read_lines', 'recv', 'resume_thread', 'seek', 'select', 'send',
- 'setHandler', 'shutdown', 'sleep', 'SO', 'sockaddr_in', 'socket',
- 'split_path', 'suspend_thread', 'system', 'system_exec', 'system_open',
- 'system_wait', 'task_clock_start', 'task_clock_stop', 'task_create',
- 'task_delay', 'task_list', 'task_schedule', 'task_self', 'task_status',
- 'task_suspend', 'task_yield', 'thread_safe_string', 'try_cs',
- 'utf8_to_utf16', 'utf16_to_utf8', 'utf16_to_utf32', 'utf32_to_utf16',
- 'video_config', 'WSACleanup', 'wait_thread', 'walk_dir', 'where',
- 'write_lines', 'wait_key'
- )
- constants = (
- 'ANY_QUEUE', 'ASCENDING', 'BLACK', 'BLOCK_CURSOR', 'BLUE',
- 'BRIGHT_CYAN', 'BRIGHT_BLUE', 'BRIGHT_GREEN', 'BRIGHT_MAGENTA',
- 'BRIGHT_RED', 'BRIGHT_WHITE', 'BROWN', 'C_DWORD', 'C_INT', 'C_POINTER',
- 'C_USHORT', 'C_WORD', 'CD_AMBER', 'CD_BLACK', 'CD_BLUE', 'CD_BOLD',
- 'CD_BOLD_ITALIC', 'CD_BOX', 'CD_CENTER', 'CD_CIRCLE', 'CD_CLOSED_LINES',
- 'CD_CONTINUOUS', 'CD_CUSTOM', 'CD_CYAN', 'CD_DARK_BLUE', 'CD_DARK_CYAN',
- 'CD_DARK_GRAY', 'CD_DARK_GREY', 'CD_DARK_GREEN', 'CD_DARK_MAGENTA',
- 'CD_DARK_RED', 'CD_DARK_YELLOW', 'CD_DASH_DOT', 'CD_DASH_DOT_DOT',
- 'CD_DASHED', 'CD_DBUFFER', 'CD_DEG2RAD', 'CD_DIAMOND', 'CD_DOTTED',
- 'CD_EAST', 'CD_EVENODD', 'CD_FILL', 'CD_GL', 'CD_GRAY', 'CD_GREY',
- 'CD_GREEN', 'CD_HATCH', 'CD_HOLLOW', 'CD_HOLLOW_BOX',
- 'CD_HOLLOW_CIRCLE', 'CD_HOLLOW_DIAMOND', 'CD_INDIGO', 'CD_ITALIC',
- 'CD_IUP', 'CD_IUPDBUFFER', 'CD_LIGHT_BLUE', 'CD_LIGHT_GRAY',
- 'CD_LIGHT_GREY', 'CD_LIGHT_GREEN', 'CD_LIGHT_PARCHMENT', 'CD_MAGENTA',
- 'CD_NAVY', 'CD_NORTH', 'CD_NORTH_EAST', 'CD_NORTH_WEST', 'CD_OLIVE',
- 'CD_OPEN_LINES', 'CD_ORANGE', 'CD_PARCHMENT', 'CD_PATTERN',
- 'CD_PRINTER', 'CD_PURPLE', 'CD_PLAIN', 'CD_PLUS', 'CD_QUERY',
- 'CD_RAD2DEG', 'CD_RED', 'CD_SILVER', 'CD_SOLID', 'CD_SOUTH_EAST',
- 'CD_SOUTH_WEST', 'CD_STAR', 'CD_STIPPLE', 'CD_STRIKEOUT',
- 'CD_UNDERLINE', 'CD_WEST', 'CD_WHITE', 'CD_WINDING', 'CD_VIOLET',
- 'CD_X', 'CD_YELLOW', 'CURLE_OK', 'CURLOPT_MAIL_FROM',
- 'CURLOPT_MAIL_RCPT', 'CURLOPT_PASSWORD', 'CURLOPT_READDATA',
- 'CURLOPT_READFUNCTION', 'CURLOPT_SSL_VERIFYPEER',
- 'CURLOPT_SSL_VERIFYHOST', 'CURLOPT_UPLOAD', 'CURLOPT_URL',
- 'CURLOPT_USE_SSL', 'CURLOPT_USERNAME', 'CURLOPT_VERBOSE',
- 'CURLOPT_WRITEFUNCTION', 'CURLUSESSL_ALL', 'CYAN', 'D_NAME',
- 'D_ATTRIBUTES', 'D_SIZE', 'D_YEAR', 'D_MONTH', 'D_DAY', 'D_HOUR',
- 'D_MINUTE', 'D_SECOND', 'D_CREATION', 'D_LASTACCESS', 'D_MODIFICATION',
- 'DT_YEAR', 'DT_MONTH', 'DT_DAY', 'DT_HOUR', 'DT_MINUTE', 'DT_SECOND',
- 'DT_DOW', 'DT_MSEC', 'DT_DOY', 'DT_GMT', 'EULER', 'E_CODE', 'E_ADDR',
- 'E_LINE', 'E_RTN', 'E_NAME', 'E_FILE', 'E_PATH', 'E_USER', 'false',
- 'False', 'FALSE', 'FIFO_QUEUE', 'FILETYPE_DIRECTORY', 'FILETYPE_FILE',
- 'GET_EOF', 'GET_FAIL', 'GET_IGNORE', 'GET_SUCCESS',
- 'GL_AMBIENT_AND_DIFFUSE', 'GL_ARRAY_BUFFER', 'GL_CLAMP',
- 'GL_CLAMP_TO_BORDER', 'GL_CLAMP_TO_EDGE', 'GL_COLOR_BUFFER_BIT',
- 'GL_COMPILE', 'GL_COMPILE_STATUS', 'GL_CULL_FACE',
- 'GL_DEPTH_BUFFER_BIT', 'GL_DEPTH_TEST', 'GL_EXTENSIONS', 'GL_FLAT',
- 'GL_FLOAT', 'GL_FRAGMENT_SHADER', 'GL_FRONT', 'GL_LIGHT0',
- 'GL_LIGHTING', 'GL_LINEAR', 'GL_LINK_STATUS', 'GL_MODELVIEW',
- 'GL_NEAREST', 'GL_NO_ERROR', 'GL_NORMALIZE', 'GL_POSITION',
- 'GL_PROJECTION', 'GL_QUAD_STRIP', 'GL_QUADS', 'GL_RENDERER',
- 'GL_REPEAT', 'GL_RGB', 'GL_RGBA', 'GL_SMOOTH', 'GL_STATIC_DRAW',
- 'GL_TEXTURE_2D', 'GL_TEXTURE_MAG_FILTER', 'GL_TEXTURE_MIN_FILTER',
- 'GL_TEXTURE_WRAP_S', 'GL_TEXTURE_WRAP_T', 'GL_TRIANGLES',
- 'GL_UNSIGNED_BYTE', 'GL_VENDOR', 'GL_VERSION', 'GL_VERTEX_SHADER',
- 'GRAY', 'GREEN', 'GT_LF_STRIPPED', 'GT_WHOLE_FILE', 'INVLN10',
- 'IUP_CLOSE', 'IUP_CONTINUE', 'IUP_DEFAULT', 'IUP_BLACK', 'IUP_BLUE',
- 'IUP_BUTTON1', 'IUP_BUTTON3', 'IUP_CENTER', 'IUP_CYAN', 'IUP_DARK_BLUE',
- 'IUP_DARK_CYAN', 'IUP_DARK_GRAY', 'IUP_DARK_GREY', 'IUP_DARK_GREEN',
- 'IUP_DARK_MAGENTA', 'IUP_DARK_RED', 'IUP_GRAY', 'IUP_GREY', 'IUP_GREEN',
- 'IUP_IGNORE', 'IUP_INDIGO', 'IUP_MAGENTA', 'IUP_MASK_INT',
- 'IUP_MASK_UINT', 'IUP_MOUSEPOS', 'IUP_NAVY', 'IUP_OLIVE', 'IUP_RECTEXT',
- 'IUP_RED', 'IUP_LIGHT_BLUE', 'IUP_LIGHT_GRAY', 'IUP_LIGHT_GREY',
- 'IUP_LIGHT_GREEN', 'IUP_ORANGE', 'IUP_PARCHMENT', 'IUP_PURPLE',
- 'IUP_SILVER', 'IUP_TEAL', 'IUP_VIOLET', 'IUP_WHITE', 'IUP_YELLOW',
- 'K_BS', 'K_cA', 'K_cC', 'K_cD', 'K_cF5', 'K_cK', 'K_cM', 'K_cN', 'K_cO',
- 'K_cP', 'K_cR', 'K_cS', 'K_cT', 'K_cW', 'K_CR', 'K_DEL', 'K_DOWN',
- 'K_END', 'K_ESC', 'K_F1', 'K_F2', 'K_F3', 'K_F4', 'K_F5', 'K_F6',
- 'K_F7', 'K_F8', 'K_F9', 'K_F10', 'K_F11', 'K_F12', 'K_HOME', 'K_INS',
- 'K_LEFT', 'K_MIDDLE', 'K_PGDN', 'K_PGUP', 'K_RIGHT', 'K_SP', 'K_TAB',
- 'K_UP', 'K_h', 'K_i', 'K_j', 'K_p', 'K_r', 'K_s', 'JS', 'LIFO_QUEUE',
- 'LINUX', 'MAX_HEAP', 'MAGENTA', 'MIN_HEAP', 'Nan', 'NO_CURSOR', 'null',
- 'NULL', 'PI', 'pp_Ascii', 'pp_Brkt', 'pp_Date', 'pp_File', 'pp_FltFmt',
- 'pp_Indent', 'pp_IntCh', 'pp_IntFmt', 'pp_Maxlen', 'pp_Nest',
- 'pp_Pause', 'pp_Q22', 'pp_StrFmt', 'RED', 'SEEK_OK', 'SLASH',
- 'TEST_ABORT', 'TEST_CRASH', 'TEST_PAUSE', 'TEST_PAUSE_FAIL',
- 'TEST_QUIET', 'TEST_SHOW_ALL', 'TEST_SHOW_FAILED', 'TEST_SUMMARY',
- 'true', 'True', 'TRUE', 'VC_SCRNLINES', 'WHITE', 'WINDOWS', 'YELLOW'
- )
-
- tokens = {
- 'root': [
- (r"\s+", Whitespace),
- (r'/\*|--/\*|#\[', Comment.Multiline, 'comment'),
- (r'(?://|--|#!).*$', Comment.Single),
-#Alt:
-# (r'//.*$|--.*$|#!.*$', Comment.Single),
- (r'"([^"\\]|\\.)*"', String.Other),
- (r'\'[^\']*\'', String.Other),
- (r'`[^`]*`', String.Other),
-
- (words(types, prefix=r'\b', suffix=r'\b'), Name.Function),
- (words(routines, prefix=r'\b', suffix=r'\b'), Name.Function),
- (words(preproc, prefix=r'\b', suffix=r'\b'), Keyword.Declaration),
- (words(keywords, prefix=r'\b', suffix=r'\b'), Keyword.Declaration),
- (words(constants, prefix=r'\b', suffix=r'\b'), Name.Constant),
- # Aside: Phix only supports/uses the ascii/non-unicode tilde
- (r'!=|==|<<|>>|:=|[-~+/*%=<>&^|\.(){},?:\[\]$\\;#]', Operator),
- (r'[\w-]+', Text)
- ],
- 'comment': [
- (r'[^*/#]+', Comment.Multiline),
- (r'/\*|#\[', Comment.Multiline, '#push'),
- (r'\*/|#\]', Comment.Multiline, '#pop'),
- (r'[*/#]', Comment.Multiline)
- ]
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/php.py b/venv/lib/python3.11/site-packages/pygments/lexers/php.py
deleted file mode 100644
index a0a0021..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/php.py
+++ /dev/null
@@ -1,335 +0,0 @@
-"""
- pygments.lexers.php
- ~~~~~~~~~~~~~~~~~~~
-
- Lexers for PHP and related languages.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import Lexer, RegexLexer, include, bygroups, default, \
- using, this, words, do_insertions, line_re
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation, Other, Generic
-from pygments.util import get_bool_opt, get_list_opt, shebang_matches
-
-__all__ = ['ZephirLexer', 'PsyshConsoleLexer', 'PhpLexer']
-
-
-class ZephirLexer(RegexLexer):
- """
- For Zephir language source code.
-
- Zephir is a compiled high level language aimed
- to the creation of C-extensions for PHP.
-
- .. versionadded:: 2.0
- """
-
- name = 'Zephir'
- url = 'http://zephir-lang.com/'
- aliases = ['zephir']
- filenames = ['*.zep']
-
- zephir_keywords = ['fetch', 'echo', 'isset', 'empty']
- zephir_type = ['bit', 'bits', 'string']
-
- flags = re.DOTALL | re.MULTILINE
-
- tokens = {
- 'commentsandwhitespace': [
- (r'\s+', Text),
- (r'//.*?\n', Comment.Single),
- (r'/\*.*?\*/', Comment.Multiline)
- ],
- 'slashstartsregex': [
- include('commentsandwhitespace'),
- (r'/(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/'
- r'([gim]+\b|\B)', String.Regex, '#pop'),
- (r'/', Operator, '#pop'),
- default('#pop')
- ],
- 'badregex': [
- (r'\n', Text, '#pop')
- ],
- 'root': [
- (r'^(?=\s|/)', Text, 'slashstartsregex'),
- include('commentsandwhitespace'),
- (r'\+\+|--|~|&&|\?|:|\|\||\\(?=\n)|'
- r'(<<|>>>?|==?|!=?|->|[-<>+*%&|^/])=?', Operator, 'slashstartsregex'),
- (r'[{(\[;,]', Punctuation, 'slashstartsregex'),
- (r'[})\].]', Punctuation),
- (r'(for|in|while|do|break|return|continue|switch|case|default|if|else|loop|'
- r'require|inline|throw|try|catch|finally|new|delete|typeof|instanceof|void|'
- r'namespace|use|extends|this|fetch|isset|unset|echo|fetch|likely|unlikely|'
- r'empty)\b', Keyword, 'slashstartsregex'),
- (r'(var|let|with|function)\b', Keyword.Declaration, 'slashstartsregex'),
- (r'(abstract|boolean|bool|char|class|const|double|enum|export|extends|final|'
- r'native|goto|implements|import|int|string|interface|long|ulong|char|uchar|'
- r'float|unsigned|private|protected|public|short|static|self|throws|reverse|'
- r'transient|volatile|readonly)\b', Keyword.Reserved),
- (r'(true|false|null|undefined)\b', Keyword.Constant),
- (r'(Array|Boolean|Date|_REQUEST|_COOKIE|_SESSION|'
- r'_GET|_POST|_SERVER|this|stdClass|range|count|iterator|'
- r'window)\b', Name.Builtin),
- (r'[$a-zA-Z_][\w\\]*', Name.Other),
- (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
- (r'0x[0-9a-fA-F]+', Number.Hex),
- (r'[0-9]+', Number.Integer),
- (r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
- (r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
- ]
- }
-
-
-class PsyshConsoleLexer(Lexer):
- """
- For PsySH console output, such as:
-
- .. sourcecode:: psysh
-
- >>> $greeting = function($name): string {
- ... return "Hello, {$name}";
- ... };
- => Closure($name): string {#2371 …3}
- >>> $greeting('World')
- => "Hello, World"
-
- .. versionadded:: 2.7
- """
- name = 'PsySH console session for PHP'
- url = 'https://psysh.org/'
- aliases = ['psysh']
-
- def __init__(self, **options):
- options['startinline'] = True
- Lexer.__init__(self, **options)
-
- def get_tokens_unprocessed(self, text):
- phplexer = PhpLexer(**self.options)
- curcode = ''
- insertions = []
- for match in line_re.finditer(text):
- line = match.group()
- if line.startswith('>>> ') or line.startswith('... '):
- insertions.append((len(curcode),
- [(0, Generic.Prompt, line[:4])]))
- curcode += line[4:]
- elif line.rstrip() == '...':
- insertions.append((len(curcode),
- [(0, Generic.Prompt, '...')]))
- curcode += line[3:]
- else:
- if curcode:
- yield from do_insertions(
- insertions, phplexer.get_tokens_unprocessed(curcode))
- curcode = ''
- insertions = []
- yield match.start(), Generic.Output, line
- if curcode:
- yield from do_insertions(insertions,
- phplexer.get_tokens_unprocessed(curcode))
-
-
-class PhpLexer(RegexLexer):
- """
- For PHP source code.
- For PHP embedded in HTML, use the `HtmlPhpLexer`.
-
- Additional options accepted:
-
- `startinline`
- If given and ``True`` the lexer starts highlighting with
- php code (i.e.: no starting ``<?php`` required). The default
- is ``False``.
- `funcnamehighlighting`
- If given and ``True``, highlight builtin function names
- (default: ``True``).
- `disabledmodules`
- If given, must be a list of module names whose function names
- should not be highlighted. By default all modules are highlighted
- except the special ``'unknown'`` module that includes functions
- that are known to php but are undocumented.
-
- To get a list of allowed modules have a look into the
- `_php_builtins` module:
-
- .. sourcecode:: pycon
-
- >>> from pygments.lexers._php_builtins import MODULES
- >>> MODULES.keys()
- ['PHP Options/Info', 'Zip', 'dba', ...]
-
- In fact the names of those modules match the module names from
- the php documentation.
- """
-
- name = 'PHP'
- url = 'https://www.php.net/'
- aliases = ['php', 'php3', 'php4', 'php5']
- filenames = ['*.php', '*.php[345]', '*.inc']
- mimetypes = ['text/x-php']
-
- # Note that a backslash is included, PHP uses a backslash as a namespace
- # separator.
- _ident_inner = r'(?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*'
- # But not inside strings.
- _ident_nons = r'(?:[_a-z]|[^\x00-\x7f])(?:\w|[^\x00-\x7f])*'
-
- flags = re.IGNORECASE | re.DOTALL | re.MULTILINE
- tokens = {
- 'root': [
- (r'<\?(php)?', Comment.Preproc, 'php'),
- (r'[^<]+', Other),
- (r'<', Other)
- ],
- 'php': [
- (r'\?>', Comment.Preproc, '#pop'),
- (r'(<<<)([\'"]?)(' + _ident_nons + r')(\2\n.*?\n\s*)(\3)(;?)(\n)',
- bygroups(String, String, String.Delimiter, String, String.Delimiter,
- Punctuation, Text)),
- (r'\s+', Text),
- (r'#\[', Punctuation, 'attribute'),
- (r'#.*?\n', Comment.Single),
- (r'//.*?\n', Comment.Single),
- # put the empty comment here, it is otherwise seen as
- # the start of a docstring
- (r'/\*\*/', Comment.Multiline),
- (r'/\*\*.*?\*/', String.Doc),
- (r'/\*.*?\*/', Comment.Multiline),
- (r'(->|::)(\s*)(' + _ident_nons + ')',
- bygroups(Operator, Text, Name.Attribute)),
- (r'[~!%^&*+=|:.<>/@-]+', Operator),
- (r'\?', Operator), # don't add to the charclass above!
- (r'[\[\]{}();,]+', Punctuation),
- (r'(new)(\s+)(class)\b', bygroups(Keyword, Text, Keyword)),
- (r'(class)(\s+)', bygroups(Keyword, Text), 'classname'),
- (r'(function)(\s*)(?=\()', bygroups(Keyword, Text)),
- (r'(function)(\s+)(&?)(\s*)',
- bygroups(Keyword, Text, Operator, Text), 'functionname'),
- (r'(const)(\s+)(' + _ident_inner + ')',
- bygroups(Keyword, Text, Name.Constant)),
- (r'(and|E_PARSE|old_function|E_ERROR|or|as|E_WARNING|parent|'
- r'eval|PHP_OS|break|exit|case|extends|PHP_VERSION|cfunction|'
- r'FALSE|print|for|require|continue|foreach|require_once|'
- r'declare|return|default|static|do|switch|die|stdClass|'
- r'echo|else|TRUE|elseif|var|empty|if|xor|enddeclare|include|'
- r'virtual|endfor|include_once|while|endforeach|global|'
- r'endif|list|endswitch|new|endwhile|not|'
- r'array|E_ALL|NULL|final|php_user_filter|interface|'
- r'implements|public|private|protected|abstract|clone|try|'
- r'catch|throw|this|use|namespace|trait|yield|'
- r'finally|match)\b', Keyword),
- (r'(true|false|null)\b', Keyword.Constant),
- include('magicconstants'),
- (r'\$\{', Name.Variable, 'variablevariable'),
- (r'\$+' + _ident_inner, Name.Variable),
- (_ident_inner, Name.Other),
- (r'(\d+\.\d*|\d*\.\d+)(e[+-]?[0-9]+)?', Number.Float),
- (r'\d+e[+-]?[0-9]+', Number.Float),
- (r'0[0-7]+', Number.Oct),
- (r'0x[a-f0-9]+', Number.Hex),
- (r'\d+', Number.Integer),
- (r'0b[01]+', Number.Bin),
- (r"'([^'\\]*(?:\\.[^'\\]*)*)'", String.Single),
- (r'`([^`\\]*(?:\\.[^`\\]*)*)`', String.Backtick),
- (r'"', String.Double, 'string'),
- ],
- 'variablevariable': [
- (r'\}', Name.Variable, '#pop'),
- include('php')
- ],
- 'magicfuncs': [
- # source: http://php.net/manual/en/language.oop5.magic.php
- (words((
- '__construct', '__destruct', '__call', '__callStatic', '__get', '__set',
- '__isset', '__unset', '__sleep', '__wakeup', '__toString', '__invoke',
- '__set_state', '__clone', '__debugInfo',), suffix=r'\b'),
- Name.Function.Magic),
- ],
- 'magicconstants': [
- # source: http://php.net/manual/en/language.constants.predefined.php
- (words((
- '__LINE__', '__FILE__', '__DIR__', '__FUNCTION__', '__CLASS__',
- '__TRAIT__', '__METHOD__', '__NAMESPACE__',),
- suffix=r'\b'),
- Name.Constant),
- ],
- 'classname': [
- (_ident_inner, Name.Class, '#pop')
- ],
- 'functionname': [
- include('magicfuncs'),
- (_ident_inner, Name.Function, '#pop'),
- default('#pop')
- ],
- 'string': [
- (r'"', String.Double, '#pop'),
- (r'[^{$"\\]+', String.Double),
- (r'\\([nrt"$\\]|[0-7]{1,3}|x[0-9a-f]{1,2})', String.Escape),
- (r'\$' + _ident_nons + r'(\[\S+?\]|->' + _ident_nons + ')?',
- String.Interpol),
- (r'(\{\$\{)(.*?)(\}\})',
- bygroups(String.Interpol, using(this, _startinline=True),
- String.Interpol)),
- (r'(\{)(\$.*?)(\})',
- bygroups(String.Interpol, using(this, _startinline=True),
- String.Interpol)),
- (r'(\$\{)(\S+)(\})',
- bygroups(String.Interpol, Name.Variable, String.Interpol)),
- (r'[${\\]', String.Double)
- ],
- 'attribute': [
- (r'\]', Punctuation, '#pop'),
- (r'\(', Punctuation, 'attributeparams'),
- (_ident_inner, Name.Decorator),
- include('php')
- ],
- 'attributeparams': [
- (r'\)', Punctuation, '#pop'),
- include('php')
- ],
- }
-
- def __init__(self, **options):
- self.funcnamehighlighting = get_bool_opt(
- options, 'funcnamehighlighting', True)
- self.disabledmodules = get_list_opt(
- options, 'disabledmodules', ['unknown'])
- self.startinline = get_bool_opt(options, 'startinline', False)
-
- # private option argument for the lexer itself
- if '_startinline' in options:
- self.startinline = options.pop('_startinline')
-
- # collect activated functions in a set
- self._functions = set()
- if self.funcnamehighlighting:
- from pygments.lexers._php_builtins import MODULES
- for key, value in MODULES.items():
- if key not in self.disabledmodules:
- self._functions.update(value)
- RegexLexer.__init__(self, **options)
-
- def get_tokens_unprocessed(self, text):
- stack = ['root']
- if self.startinline:
- stack.append('php')
- for index, token, value in \
- RegexLexer.get_tokens_unprocessed(self, text, stack):
- if token is Name.Other:
- if value in self._functions:
- yield index, Name.Builtin, value
- continue
- yield index, token, value
-
- def analyse_text(text):
- if shebang_matches(text, r'php'):
- return True
- rv = 0.0
- if re.search(r'<\?(?!xml)', text):
- rv += 0.3
- return rv
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/pointless.py b/venv/lib/python3.11/site-packages/pygments/lexers/pointless.py
deleted file mode 100644
index eb73b2a..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/pointless.py
+++ /dev/null
@@ -1,71 +0,0 @@
-"""
- pygments.lexers.pointless
- ~~~~~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for Pointless.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, words
-from pygments.token import Comment, Error, Keyword, Name, Number, Operator, \
- Punctuation, String, Text
-
-__all__ = ['PointlessLexer']
-
-
-class PointlessLexer(RegexLexer):
- """
- For Pointless source code.
-
- .. versionadded:: 2.7
- """
-
- name = 'Pointless'
- url = 'https://ptls.dev'
- aliases = ['pointless']
- filenames = ['*.ptls']
-
- ops = words([
- "+", "-", "*", "/", "**", "%", "+=", "-=", "*=",
- "/=", "**=", "%=", "|>", "=", "==", "!=", "<", ">",
- "<=", ">=", "=>", "$", "++",
- ])
-
- keywords = words([
- "if", "then", "else", "where", "with", "cond",
- "case", "and", "or", "not", "in", "as", "for",
- "requires", "throw", "try", "catch", "when",
- "yield", "upval",
- ], suffix=r'\b')
-
- tokens = {
- 'root': [
- (r'[ \n\r]+', Text),
- (r'--.*$', Comment.Single),
- (r'"""', String, 'multiString'),
- (r'"', String, 'string'),
- (r'[\[\](){}:;,.]', Punctuation),
- (ops, Operator),
- (keywords, Keyword),
- (r'\d+|\d*\.\d+', Number),
- (r'(true|false)\b', Name.Builtin),
- (r'[A-Z][a-zA-Z0-9]*\b', String.Symbol),
- (r'output\b', Name.Variable.Magic),
- (r'(export|import)\b', Keyword.Namespace),
- (r'[a-z][a-zA-Z0-9]*\b', Name.Variable)
- ],
- 'multiString': [
- (r'\\.', String.Escape),
- (r'"""', String, '#pop'),
- (r'"', String),
- (r'[^\\"]+', String),
- ],
- 'string': [
- (r'\\.', String.Escape),
- (r'"', String, '#pop'),
- (r'\n', Error),
- (r'[^\\"]+', String),
- ],
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/pony.py b/venv/lib/python3.11/site-packages/pygments/lexers/pony.py
deleted file mode 100644
index 03adc5f..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/pony.py
+++ /dev/null
@@ -1,93 +0,0 @@
-"""
- pygments.lexers.pony
- ~~~~~~~~~~~~~~~~~~~~
-
- Lexers for Pony and related languages.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, bygroups, words
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation
-
-__all__ = ['PonyLexer']
-
-
-class PonyLexer(RegexLexer):
- """
- For Pony source code.
-
- .. versionadded:: 2.4
- """
-
- name = 'Pony'
- aliases = ['pony']
- filenames = ['*.pony']
-
- _caps = r'(iso|trn|ref|val|box|tag)'
-
- tokens = {
- 'root': [
- (r'\n', Text),
- (r'[^\S\n]+', Text),
- (r'//.*\n', Comment.Single),
- (r'/\*', Comment.Multiline, 'nested_comment'),
- (r'"""(?:.|\n)*?"""', String.Doc),
- (r'"', String, 'string'),
- (r'\'.*\'', String.Char),
- (r'=>|[]{}:().~;,|&!^?[]', Punctuation),
- (words((
- 'addressof', 'and', 'as', 'consume', 'digestof', 'is', 'isnt',
- 'not', 'or'),
- suffix=r'\b'),
- Operator.Word),
- (r'!=|==|<<|>>|[-+/*%=<>]', Operator),
- (words((
- 'box', 'break', 'compile_error', 'compile_intrinsic',
- 'continue', 'do', 'else', 'elseif', 'embed', 'end', 'error',
- 'for', 'if', 'ifdef', 'in', 'iso', 'lambda', 'let', 'match',
- 'object', 'recover', 'ref', 'repeat', 'return', 'tag', 'then',
- 'this', 'trn', 'try', 'until', 'use', 'var', 'val', 'where',
- 'while', 'with', '#any', '#read', '#send', '#share'),
- suffix=r'\b'),
- Keyword),
- (r'(actor|class|struct|primitive|interface|trait|type)((?:\s)+)',
- bygroups(Keyword, Text), 'typename'),
- (r'(new|fun|be)((?:\s)+)', bygroups(Keyword, Text), 'methodname'),
- (words((
- 'I8', 'U8', 'I16', 'U16', 'I32', 'U32', 'I64', 'U64', 'I128',
- 'U128', 'ILong', 'ULong', 'ISize', 'USize', 'F32', 'F64',
- 'Bool', 'Pointer', 'None', 'Any', 'Array', 'String',
- 'Iterator'),
- suffix=r'\b'),
- Name.Builtin.Type),
- (r'_?[A-Z]\w*', Name.Type),
- (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+', Number.Float),
- (r'0x[0-9a-fA-F]+', Number.Hex),
- (r'\d+', Number.Integer),
- (r'(true|false)\b', Name.Builtin),
- (r'_\d*', Name),
- (r'_?[a-z][\w\']*', Name)
- ],
- 'typename': [
- (_caps + r'?((?:\s)*)(_?[A-Z]\w*)',
- bygroups(Keyword, Text, Name.Class), '#pop')
- ],
- 'methodname': [
- (_caps + r'?((?:\s)*)(_?[a-z]\w*)',
- bygroups(Keyword, Text, Name.Function), '#pop')
- ],
- 'nested_comment': [
- (r'[^*/]+', Comment.Multiline),
- (r'/\*', Comment.Multiline, '#push'),
- (r'\*/', Comment.Multiline, '#pop'),
- (r'[*/]', Comment.Multiline)
- ],
- 'string': [
- (r'"', String, '#pop'),
- (r'\\"', String),
- (r'[^\\"]+', String)
- ]
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/praat.py b/venv/lib/python3.11/site-packages/pygments/lexers/praat.py
deleted file mode 100644
index c8533a6..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/praat.py
+++ /dev/null
@@ -1,304 +0,0 @@
-"""
- pygments.lexers.praat
- ~~~~~~~~~~~~~~~~~~~~~
-
- Lexer for Praat
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, words, bygroups, include
-from pygments.token import Name, Text, Comment, Keyword, String, Punctuation, \
- Number, Operator, Whitespace
-
-__all__ = ['PraatLexer']
-
-
-class PraatLexer(RegexLexer):
- """
- For Praat scripts.
-
- .. versionadded:: 2.1
- """
-
- name = 'Praat'
- url = 'http://www.praat.org'
- aliases = ['praat']
- filenames = ['*.praat', '*.proc', '*.psc']
-
- keywords = (
- 'if', 'then', 'else', 'elsif', 'elif', 'endif', 'fi', 'for', 'from', 'to',
- 'endfor', 'endproc', 'while', 'endwhile', 'repeat', 'until', 'select', 'plus',
- 'minus', 'demo', 'assert', 'stopwatch', 'nocheck', 'nowarn', 'noprogress',
- 'editor', 'endeditor', 'clearinfo',
- )
-
- functions_string = (
- 'backslashTrigraphsToUnicode', 'chooseDirectory', 'chooseReadFile',
- 'chooseWriteFile', 'date', 'demoKey', 'do', 'environment', 'extractLine',
- 'extractWord', 'fixed', 'info', 'left', 'mid', 'percent', 'readFile', 'replace',
- 'replace_regex', 'right', 'selected', 'string', 'unicodeToBackslashTrigraphs',
- )
-
- functions_numeric = (
- 'abs', 'appendFile', 'appendFileLine', 'appendInfo', 'appendInfoLine', 'arccos',
- 'arccosh', 'arcsin', 'arcsinh', 'arctan', 'arctan2', 'arctanh', 'barkToHertz',
- 'beginPause', 'beginSendPraat', 'besselI', 'besselK', 'beta', 'beta2',
- 'binomialP', 'binomialQ', 'boolean', 'ceiling', 'chiSquareP', 'chiSquareQ',
- 'choice', 'comment', 'cos', 'cosh', 'createDirectory', 'deleteFile',
- 'demoClicked', 'demoClickedIn', 'demoCommandKeyPressed',
- 'demoExtraControlKeyPressed', 'demoInput', 'demoKeyPressed',
- 'demoOptionKeyPressed', 'demoShiftKeyPressed', 'demoShow', 'demoWaitForInput',
- 'demoWindowTitle', 'demoX', 'demoY', 'differenceLimensToPhon', 'do', 'editor',
- 'endPause', 'endSendPraat', 'endsWith', 'erb', 'erbToHertz', 'erf', 'erfc',
- 'exitScript', 'exp', 'extractNumber', 'fileReadable', 'fisherP', 'fisherQ',
- 'floor', 'gaussP', 'gaussQ', 'hertzToBark', 'hertzToErb', 'hertzToMel',
- 'hertzToSemitones', 'imax', 'imin', 'incompleteBeta', 'incompleteGammaP', 'index',
- 'index_regex', 'integer', 'invBinomialP', 'invBinomialQ', 'invChiSquareQ', 'invFisherQ',
- 'invGaussQ', 'invSigmoid', 'invStudentQ', 'length', 'ln', 'lnBeta', 'lnGamma',
- 'log10', 'log2', 'max', 'melToHertz', 'min', 'minusObject', 'natural', 'number',
- 'numberOfColumns', 'numberOfRows', 'numberOfSelected', 'objectsAreIdentical',
- 'option', 'optionMenu', 'pauseScript', 'phonToDifferenceLimens', 'plusObject',
- 'positive', 'randomBinomial', 'randomGauss', 'randomInteger', 'randomPoisson',
- 'randomUniform', 'real', 'readFile', 'removeObject', 'rindex', 'rindex_regex',
- 'round', 'runScript', 'runSystem', 'runSystem_nocheck', 'selectObject',
- 'selected', 'semitonesToHertz', 'sentence', 'sentencetext', 'sigmoid', 'sin', 'sinc',
- 'sincpi', 'sinh', 'soundPressureToPhon', 'sqrt', 'startsWith', 'studentP',
- 'studentQ', 'tan', 'tanh', 'text', 'variableExists', 'word', 'writeFile', 'writeFileLine',
- 'writeInfo', 'writeInfoLine',
- )
-
- functions_array = (
- 'linear', 'randomGauss', 'randomInteger', 'randomUniform', 'zero',
- )
-
- objects = (
- 'Activation', 'AffineTransform', 'AmplitudeTier', 'Art', 'Artword',
- 'Autosegment', 'BarkFilter', 'BarkSpectrogram', 'CCA', 'Categories',
- 'Cepstrogram', 'Cepstrum', 'Cepstrumc', 'ChebyshevSeries', 'ClassificationTable',
- 'Cochleagram', 'Collection', 'ComplexSpectrogram', 'Configuration', 'Confusion',
- 'ContingencyTable', 'Corpus', 'Correlation', 'Covariance',
- 'CrossCorrelationTable', 'CrossCorrelationTables', 'DTW', 'DataModeler',
- 'Diagonalizer', 'Discriminant', 'Dissimilarity', 'Distance', 'Distributions',
- 'DurationTier', 'EEG', 'ERP', 'ERPTier', 'EditCostsTable', 'EditDistanceTable',
- 'Eigen', 'Excitation', 'Excitations', 'ExperimentMFC', 'FFNet', 'FeatureWeights',
- 'FileInMemory', 'FilesInMemory', 'Formant', 'FormantFilter', 'FormantGrid',
- 'FormantModeler', 'FormantPoint', 'FormantTier', 'GaussianMixture', 'HMM',
- 'HMM_Observation', 'HMM_ObservationSequence', 'HMM_State', 'HMM_StateSequence',
- 'Harmonicity', 'ISpline', 'Index', 'Intensity', 'IntensityTier', 'IntervalTier',
- 'KNN', 'KlattGrid', 'KlattTable', 'LFCC', 'LPC', 'Label', 'LegendreSeries',
- 'LinearRegression', 'LogisticRegression', 'LongSound', 'Ltas', 'MFCC', 'MSpline',
- 'ManPages', 'Manipulation', 'Matrix', 'MelFilter', 'MelSpectrogram',
- 'MixingMatrix', 'Movie', 'Network', 'Object', 'OTGrammar', 'OTHistory', 'OTMulti',
- 'PCA', 'PairDistribution', 'ParamCurve', 'Pattern', 'Permutation', 'Photo',
- 'Pitch', 'PitchModeler', 'PitchTier', 'PointProcess', 'Polygon', 'Polynomial',
- 'PowerCepstrogram', 'PowerCepstrum', 'Procrustes', 'RealPoint', 'RealTier',
- 'ResultsMFC', 'Roots', 'SPINET', 'SSCP', 'SVD', 'Salience', 'ScalarProduct',
- 'Similarity', 'SimpleString', 'SortedSetOfString', 'Sound', 'Speaker',
- 'Spectrogram', 'Spectrum', 'SpectrumTier', 'SpeechSynthesizer', 'SpellingChecker',
- 'Strings', 'StringsIndex', 'Table', 'TableOfReal', 'TextGrid', 'TextInterval',
- 'TextPoint', 'TextTier', 'Tier', 'Transition', 'VocalTract', 'VocalTractTier',
- 'Weight', 'WordList',
- )
-
- variables_numeric = (
- 'macintosh', 'windows', 'unix', 'praatVersion', 'pi', 'e', 'undefined',
- )
-
- variables_string = (
- 'praatVersion', 'tab', 'shellDirectory', 'homeDirectory',
- 'preferencesDirectory', 'newline', 'temporaryDirectory',
- 'defaultDirectory',
- )
-
- object_attributes = (
- 'ncol', 'nrow', 'xmin', 'ymin', 'xmax', 'ymax', 'nx', 'ny', 'dx', 'dy',
- )
-
- tokens = {
- 'root': [
- (r'(\s+)(#.*?$)', bygroups(Whitespace, Comment.Single)),
- (r'^#.*?$', Comment.Single),
- (r';[^\n]*', Comment.Single),
- (r'\s+', Whitespace),
-
- (r'\bprocedure\b', Keyword, 'procedure_definition'),
- (r'\bcall\b', Keyword, 'procedure_call'),
- (r'@', Name.Function, 'procedure_call'),
-
- include('function_call'),
-
- (words(keywords, suffix=r'\b'), Keyword),
-
- (r'(\bform\b)(\s+)([^\n]+)',
- bygroups(Keyword, Whitespace, String), 'old_form'),
-
- (r'(print(?:line|tab)?|echo|exit|asserterror|pause|send(?:praat|socket)|'
- r'include|execute|system(?:_nocheck)?)(\s+)',
- bygroups(Keyword, Whitespace), 'string_unquoted'),
-
- (r'(goto|label)(\s+)(\w+)', bygroups(Keyword, Whitespace, Name.Label)),
-
- include('variable_name'),
- include('number'),
-
- (r'"', String, 'string'),
-
- (words((objects), suffix=r'(?=\s+\S+\n)'), Name.Class, 'string_unquoted'),
-
- (r'\b[A-Z]', Keyword, 'command'),
- (r'(\.{3}|[)(,])', Punctuation),
- ],
- 'command': [
- (r'( ?[\w()-]+ ?)', Keyword),
-
- include('string_interpolated'),
-
- (r'\.{3}', Keyword, ('#pop', 'old_arguments')),
- (r':', Keyword, ('#pop', 'comma_list')),
- (r'\s', Whitespace, '#pop'),
- ],
- 'procedure_call': [
- (r'\s+', Whitespace),
- (r'([\w.]+)(?:(:)|(?:(\s*)(\()))',
- bygroups(Name.Function, Punctuation,
- Text.Whitespace, Punctuation), '#pop'),
- (r'([\w.]+)', Name.Function, ('#pop', 'old_arguments')),
- ],
- 'procedure_definition': [
- (r'\s', Whitespace),
- (r'([\w.]+)(\s*?[(:])',
- bygroups(Name.Function, Whitespace), '#pop'),
- (r'([\w.]+)([^\n]*)',
- bygroups(Name.Function, Text), '#pop'),
- ],
- 'function_call': [
- (words(functions_string, suffix=r'\$(?=\s*[:(])'), Name.Function, 'function'),
- (words(functions_array, suffix=r'#(?=\s*[:(])'), Name.Function, 'function'),
- (words(functions_numeric, suffix=r'(?=\s*[:(])'), Name.Function, 'function'),
- ],
- 'function': [
- (r'\s+', Whitespace),
- (r':', Punctuation, ('#pop', 'comma_list')),
- (r'\s*\(', Punctuation, ('#pop', 'comma_list')),
- ],
- 'comma_list': [
- (r'(\s*\n\s*)(\.{3})', bygroups(Whitespace, Punctuation)),
-
- (r'(\s*)(?:([)\]])|(\n))', bygroups(
- Whitespace, Punctuation, Whitespace), '#pop'),
-
- (r'\s+', Whitespace),
- (r'"', String, 'string'),
- (r'\b(if|then|else|fi|endif)\b', Keyword),
-
- include('function_call'),
- include('variable_name'),
- include('operator'),
- include('number'),
-
- (r'[()]', Text),
- (r',', Punctuation),
- ],
- 'old_arguments': [
- (r'\n', Whitespace, '#pop'),
-
- include('variable_name'),
- include('operator'),
- include('number'),
-
- (r'"', String, 'string'),
- (r'[^\n]', Text),
- ],
- 'number': [
- (r'\n', Whitespace, '#pop'),
- (r'\b\d+(\.\d*)?([eE][-+]?\d+)?%?', Number),
- ],
- 'object_reference': [
- include('string_interpolated'),
- (r'([a-z][a-zA-Z0-9_]*|\d+)', Name.Builtin),
-
- (words(object_attributes, prefix=r'\.'), Name.Builtin, '#pop'),
-
- (r'\$', Name.Builtin),
- (r'\[', Text, '#pop'),
- ],
- 'variable_name': [
- include('operator'),
- include('number'),
-
- (words(variables_string, suffix=r'\$'), Name.Variable.Global),
- (words(variables_numeric,
- suffix=r'(?=[^a-zA-Z0-9_."\'$#\[:(]|\s|^|$)'),
- Name.Variable.Global),
-
- (words(objects, prefix=r'\b', suffix=r"(_)"),
- bygroups(Name.Builtin, Name.Builtin),
- 'object_reference'),
-
- (r'\.?_?[a-z][\w.]*(\$|#)?', Text),
- (r'[\[\]]', Punctuation, 'comma_list'),
-
- include('string_interpolated'),
- ],
- 'operator': [
- (r'([+\/*<>=!-]=?|[&*|][&*|]?|\^|<>)', Operator),
- (r'(?<![\w.])(and|or|not|div|mod)(?![\w.])', Operator.Word),
- ],
- 'string_interpolated': [
- (r'\'[_a-z][^\[\]\'":]*(\[([\d,]+|"[\w,]+")\])?(:[0-9]+)?\'',
- String.Interpol),
- ],
- 'string_unquoted': [
- (r'(\n\s*)(\.{3})', bygroups(Whitespace, Punctuation)),
-
- (r'\n', Whitespace, '#pop'),
- (r'\s', Whitespace),
-
- include('string_interpolated'),
-
- (r"'", String),
- (r"[^'\n]+", String),
- ],
- 'string': [
- (r'(\n\s*)(\.{3})', bygroups(Whitespace, Punctuation)),
-
- (r'"', String, '#pop'),
-
- include('string_interpolated'),
-
- (r"'", String),
- (r'[^\'"\n]+', String),
- ],
- 'old_form': [
- (r'(\s+)(#.*?$)', bygroups(Whitespace, Comment.Single)),
- (r'\s+', Whitespace),
-
- (r'(optionmenu|choice)([ \t]+)(\S+)(:)([ \t]+)',
- bygroups(Keyword, Whitespace, Text, Punctuation, Whitespace), 'number'),
-
- (r'(option|button)([ \t]+)',
- bygroups(Keyword, Whitespace), 'string_unquoted'),
-
- (r'(sentence|text)([ \t]+)(\S+)',
- bygroups(Keyword, Whitespace, String), 'string_unquoted'),
-
- (r'(word)([ \t]+)(\S+)([ \t]*)(\S+)?(?:([ \t]+)(.*))?',
- bygroups(Keyword, Whitespace, Text, Whitespace, Text, Whitespace, Text)),
-
- (r'(boolean)(\s+\S+\s*)(0|1|"?(?:yes|no)"?)',
- bygroups(Keyword, Whitespace, Name.Variable)),
-
- # Ideally processing of the number would happen in the 'number'
- # but that doesn't seem to work
- (r'(real|natural|positive|integer)([ \t]+\S+[ \t]*)([+-]?)(\d+(?:\.\d*)?'
- r'(?:[eE][-+]?\d+)?%?)',
- bygroups(Keyword, Whitespace, Operator, Number)),
-
- (r'(comment)(\s+)',
- bygroups(Keyword, Whitespace), 'string_unquoted'),
-
- (r'\bendform\b', Keyword, '#pop'),
- ]
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/procfile.py b/venv/lib/python3.11/site-packages/pygments/lexers/procfile.py
deleted file mode 100644
index 4f9b59c..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/procfile.py
+++ /dev/null
@@ -1,42 +0,0 @@
-"""
- pygments.lexers.procfile
- ~~~~~~~~~~~~~~~~~~~~~~~~
-
- Lexer for Procfile file format.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, bygroups
-from pygments.token import Name, Number, String, Text, Punctuation
-
-__all__ = ["ProcfileLexer"]
-
-
-class ProcfileLexer(RegexLexer):
- """
- Lexer for Procfile file format.
-
- The format is used to run processes on Heroku or is used by Foreman or
- Honcho tools.
-
- .. versionadded:: 2.10
- """
- name = 'Procfile'
- url = 'https://devcenter.heroku.com/articles/procfile#procfile-format'
- aliases = ['procfile']
- filenames = ['Procfile']
-
- tokens = {
- 'root': [
- (r'^([a-z]+)(:)', bygroups(Name.Label, Punctuation)),
- (r'\s+', Text.Whitespace),
- (r'"[^"]*"', String),
- (r"'[^']*'", String),
- (r'[0-9]+', Number.Integer),
- (r'\$[a-zA-Z_][\w]*', Name.Variable),
- (r'(\w+)(=)(\w+)', bygroups(Name.Variable, Punctuation, String)),
- (r'([\w\-\./]+)', Text),
- ],
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/prolog.py b/venv/lib/python3.11/site-packages/pygments/lexers/prolog.py
deleted file mode 100644
index 37c1e9c..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/prolog.py
+++ /dev/null
@@ -1,309 +0,0 @@
-"""
- pygments.lexers.prolog
- ~~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for Prolog and Prolog-like languages.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, bygroups
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation
-
-__all__ = ['PrologLexer', 'LogtalkLexer']
-
-
-class PrologLexer(RegexLexer):
- """
- Lexer for Prolog files.
- """
- name = 'Prolog'
- aliases = ['prolog']
- filenames = ['*.ecl', '*.prolog', '*.pro', '*.pl']
- mimetypes = ['text/x-prolog']
-
- tokens = {
- 'root': [
- (r'/\*', Comment.Multiline, 'nested-comment'),
- (r'%.*', Comment.Single),
- # character literal
- (r'0\'.', String.Char),
- (r'0b[01]+', Number.Bin),
- (r'0o[0-7]+', Number.Oct),
- (r'0x[0-9a-fA-F]+', Number.Hex),
- # literal with prepended base
- (r'\d\d?\'[a-zA-Z0-9]+', Number.Integer),
- (r'(\d+\.\d*|\d*\.\d+)([eE][+-]?[0-9]+)?', Number.Float),
- (r'\d+', Number.Integer),
- (r'[\[\](){}|.,;!]', Punctuation),
- (r':-|-->', Punctuation),
- (r'"(?:\\x[0-9a-fA-F]+\\|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8}|'
- r'\\[0-7]+\\|\\["\\abcefnrstv]|[^\\"])*"', String.Double),
- (r"'(?:''|[^'])*'", String.Atom), # quoted atom
- # Needs to not be followed by an atom.
- # (r'=(?=\s|[a-zA-Z\[])', Operator),
- (r'is\b', Operator),
- (r'(<|>|=<|>=|==|=:=|=|/|//|\*|\+|-)(?=\s|[a-zA-Z0-9\[])',
- Operator),
- (r'(mod|div|not)\b', Operator),
- (r'_', Keyword), # The don't-care variable
- (r'([a-z]+)(:)', bygroups(Name.Namespace, Punctuation)),
- (r'([a-z\u00c0-\u1fff\u3040-\ud7ff\ue000-\uffef]'
- r'[\w$\u00c0-\u1fff\u3040-\ud7ff\ue000-\uffef]*)'
- r'(\s*)(:-|-->)',
- bygroups(Name.Function, Text, Operator)), # function defn
- (r'([a-z\u00c0-\u1fff\u3040-\ud7ff\ue000-\uffef]'
- r'[\w$\u00c0-\u1fff\u3040-\ud7ff\ue000-\uffef]*)'
- r'(\s*)(\()',
- bygroups(Name.Function, Text, Punctuation)),
- (r'[a-z\u00c0-\u1fff\u3040-\ud7ff\ue000-\uffef]'
- r'[\w$\u00c0-\u1fff\u3040-\ud7ff\ue000-\uffef]*',
- String.Atom), # atom, characters
- # This one includes !
- (r'[#&*+\-./:<=>?@\\^~\u00a1-\u00bf\u2010-\u303f]+',
- String.Atom), # atom, graphics
- (r'[A-Z_]\w*', Name.Variable),
- (r'\s+|[\u2000-\u200f\ufff0-\ufffe\uffef]', Text),
- ],
- 'nested-comment': [
- (r'\*/', Comment.Multiline, '#pop'),
- (r'/\*', Comment.Multiline, '#push'),
- (r'[^*/]+', Comment.Multiline),
- (r'[*/]', Comment.Multiline),
- ],
- }
-
- def analyse_text(text):
- """Competes with IDL and Visual Prolog on *.pro"""
- if ':-' in text:
- # Visual Prolog also uses :-
- return 0.5
- else:
- return 0
-
-
-class LogtalkLexer(RegexLexer):
- """
- For Logtalk source code.
-
- .. versionadded:: 0.10
- """
-
- name = 'Logtalk'
- url = 'http://logtalk.org/'
- aliases = ['logtalk']
- filenames = ['*.lgt', '*.logtalk']
- mimetypes = ['text/x-logtalk']
-
- tokens = {
- 'root': [
- # Directives
- (r'^\s*:-\s', Punctuation, 'directive'),
- # Comments
- (r'%.*?\n', Comment),
- (r'/\*(.|\n)*?\*/', Comment),
- # Whitespace
- (r'\n', Text),
- (r'\s+', Text),
- # Numbers
- (r"0'[\\]?.", Number),
- (r'0b[01]+', Number.Bin),
- (r'0o[0-7]+', Number.Oct),
- (r'0x[0-9a-fA-F]+', Number.Hex),
- (r'\d+\.?\d*((e|E)(\+|-)?\d+)?', Number),
- # Variables
- (r'([A-Z_][a-zA-Z0-9_]*)', Name.Variable),
- # Event handlers
- (r'(after|before)(?=[(])', Keyword),
- # Message forwarding handler
- (r'forward(?=[(])', Keyword),
- # Execution-context methods
- (r'(context|parameter|this|se(lf|nder))(?=[(])', Keyword),
- # Reflection
- (r'(current_predicate|predicate_property)(?=[(])', Keyword),
- # DCGs and term expansion
- (r'(expand_(goal|term)|(goal|term)_expansion|phrase)(?=[(])', Keyword),
- # Entity
- (r'(abolish|c(reate|urrent))_(object|protocol|category)(?=[(])', Keyword),
- (r'(object|protocol|category)_property(?=[(])', Keyword),
- # Entity relations
- (r'co(mplements_object|nforms_to_protocol)(?=[(])', Keyword),
- (r'extends_(object|protocol|category)(?=[(])', Keyword),
- (r'imp(lements_protocol|orts_category)(?=[(])', Keyword),
- (r'(instantiat|specializ)es_class(?=[(])', Keyword),
- # Events
- (r'(current_event|(abolish|define)_events)(?=[(])', Keyword),
- # Flags
- (r'(create|current|set)_logtalk_flag(?=[(])', Keyword),
- # Compiling, loading, and library paths
- (r'logtalk_(compile|l(ibrary_path|oad|oad_context)|make(_target_action)?)(?=[(])', Keyword),
- (r'\blogtalk_make\b', Keyword),
- # Database
- (r'(clause|retract(all)?)(?=[(])', Keyword),
- (r'a(bolish|ssert(a|z))(?=[(])', Keyword),
- # Control constructs
- (r'(ca(ll|tch)|throw)(?=[(])', Keyword),
- (r'(fa(il|lse)|true|(instantiation|system)_error)\b', Keyword),
- (r'(type|domain|existence|permission|representation|evaluation|resource|syntax)_error(?=[(])', Keyword),
- # All solutions
- (r'((bag|set)of|f(ind|or)all)(?=[(])', Keyword),
- # Multi-threading predicates
- (r'threaded(_(ca(ll|ncel)|once|ignore|exit|peek|wait|notify))?(?=[(])', Keyword),
- # Engine predicates
- (r'threaded_engine(_(create|destroy|self|next|next_reified|yield|post|fetch))?(?=[(])', Keyword),
- # Term unification
- (r'(subsumes_term|unify_with_occurs_check)(?=[(])', Keyword),
- # Term creation and decomposition
- (r'(functor|arg|copy_term|numbervars|term_variables)(?=[(])', Keyword),
- # Evaluable functors
- (r'(div|rem|m(ax|in|od)|abs|sign)(?=[(])', Keyword),
- (r'float(_(integer|fractional)_part)?(?=[(])', Keyword),
- (r'(floor|t(an|runcate)|round|ceiling)(?=[(])', Keyword),
- # Other arithmetic functors
- (r'(cos|a(cos|sin|tan|tan2)|exp|log|s(in|qrt)|xor)(?=[(])', Keyword),
- # Term testing
- (r'(var|atom(ic)?|integer|float|c(allable|ompound)|n(onvar|umber)|ground|acyclic_term)(?=[(])', Keyword),
- # Term comparison
- (r'compare(?=[(])', Keyword),
- # Stream selection and control
- (r'(curren|se)t_(in|out)put(?=[(])', Keyword),
- (r'(open|close)(?=[(])', Keyword),
- (r'flush_output(?=[(])', Keyword),
- (r'(at_end_of_stream|flush_output)\b', Keyword),
- (r'(stream_property|at_end_of_stream|set_stream_position)(?=[(])', Keyword),
- # Character and byte input/output
- (r'(nl|(get|peek|put)_(byte|c(har|ode)))(?=[(])', Keyword),
- (r'\bnl\b', Keyword),
- # Term input/output
- (r'read(_term)?(?=[(])', Keyword),
- (r'write(q|_(canonical|term))?(?=[(])', Keyword),
- (r'(current_)?op(?=[(])', Keyword),
- (r'(current_)?char_conversion(?=[(])', Keyword),
- # Atomic term processing
- (r'atom_(length|c(hars|o(ncat|des)))(?=[(])', Keyword),
- (r'(char_code|sub_atom)(?=[(])', Keyword),
- (r'number_c(har|ode)s(?=[(])', Keyword),
- # Implementation defined hooks functions
- (r'(se|curren)t_prolog_flag(?=[(])', Keyword),
- (r'\bhalt\b', Keyword),
- (r'halt(?=[(])', Keyword),
- # Message sending operators
- (r'(::|:|\^\^)', Operator),
- # External call
- (r'[{}]', Keyword),
- # Logic and control
- (r'(ignore|once)(?=[(])', Keyword),
- (r'\brepeat\b', Keyword),
- # Sorting
- (r'(key)?sort(?=[(])', Keyword),
- # Bitwise functors
- (r'(>>|<<|/\\|\\\\|\\)', Operator),
- # Predicate aliases
- (r'\bas\b', Operator),
- # Arithmetic evaluation
- (r'\bis\b', Keyword),
- # Arithmetic comparison
- (r'(=:=|=\\=|<|=<|>=|>)', Operator),
- # Term creation and decomposition
- (r'=\.\.', Operator),
- # Term unification
- (r'(=|\\=)', Operator),
- # Term comparison
- (r'(==|\\==|@=<|@<|@>=|@>)', Operator),
- # Evaluable functors
- (r'(//|[-+*/])', Operator),
- (r'\b(e|pi|div|mod|rem)\b', Operator),
- # Other arithmetic functors
- (r'\b\*\*\b', Operator),
- # DCG rules
- (r'-->', Operator),
- # Control constructs
- (r'([!;]|->)', Operator),
- # Logic and control
- (r'\\+', Operator),
- # Mode operators
- (r'[?@]', Operator),
- # Existential quantifier
- (r'\^', Operator),
- # Strings
- (r'"(\\\\|\\[^\\]|[^"\\])*"', String),
- # Punctuation
- (r'[()\[\],.|]', Text),
- # Atoms
- (r"[a-z][a-zA-Z0-9_]*", Text),
- (r"'", String, 'quoted_atom'),
- ],
-
- 'quoted_atom': [
- (r"''", String),
- (r"'", String, '#pop'),
- (r'\\([\\abfnrtv"\']|(x[a-fA-F0-9]+|[0-7]+)\\)', String.Escape),
- (r"[^\\'\n]+", String),
- (r'\\', String),
- ],
-
- 'directive': [
- # Conditional compilation directives
- (r'(el)?if(?=[(])', Keyword, 'root'),
- (r'(e(lse|ndif))(?=[.])', Keyword, 'root'),
- # Entity directives
- (r'(category|object|protocol)(?=[(])', Keyword, 'entityrelations'),
- (r'(end_(category|object|protocol))(?=[.])', Keyword, 'root'),
- # Predicate scope directives
- (r'(public|protected|private)(?=[(])', Keyword, 'root'),
- # Other directives
- (r'e(n(coding|sure_loaded)|xport)(?=[(])', Keyword, 'root'),
- (r'in(clude|itialization|fo)(?=[(])', Keyword, 'root'),
- (r'(built_in|dynamic|synchronized|threaded)(?=[.])', Keyword, 'root'),
- (r'(alias|d(ynamic|iscontiguous)|m(eta_(non_terminal|predicate)|ode|ultifile)|s(et_(logtalk|prolog)_flag|ynchronized))(?=[(])', Keyword, 'root'),
- (r'op(?=[(])', Keyword, 'root'),
- (r'(c(alls|oinductive)|module|reexport|use(s|_module))(?=[(])', Keyword, 'root'),
- (r'[a-z][a-zA-Z0-9_]*(?=[(])', Text, 'root'),
- (r'[a-z][a-zA-Z0-9_]*(?=[.])', Text, 'root'),
- ],
-
- 'entityrelations': [
- (r'(complements|extends|i(nstantiates|mp(lements|orts))|specializes)(?=[(])', Keyword),
- # Numbers
- (r"0'[\\]?.", Number),
- (r'0b[01]+', Number.Bin),
- (r'0o[0-7]+', Number.Oct),
- (r'0x[0-9a-fA-F]+', Number.Hex),
- (r'\d+\.?\d*((e|E)(\+|-)?\d+)?', Number),
- # Variables
- (r'([A-Z_][a-zA-Z0-9_]*)', Name.Variable),
- # Atoms
- (r"[a-z][a-zA-Z0-9_]*", Text),
- (r"'", String, 'quoted_atom'),
- # Strings
- (r'"(\\\\|\\[^\\]|[^"\\])*"', String),
- # End of entity-opening directive
- (r'([)]\.)', Text, 'root'),
- # Scope operator
- (r'(::)', Operator),
- # Punctuation
- (r'[()\[\],.|]', Text),
- # Comments
- (r'%.*?\n', Comment),
- (r'/\*(.|\n)*?\*/', Comment),
- # Whitespace
- (r'\n', Text),
- (r'\s+', Text),
- ]
- }
-
- def analyse_text(text):
- if ':- object(' in text:
- return 1.0
- elif ':- protocol(' in text:
- return 1.0
- elif ':- category(' in text:
- return 1.0
- elif re.search(r'^:-\s[a-z]', text, re.M):
- return 0.9
- else:
- return 0.0
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/promql.py b/venv/lib/python3.11/site-packages/pygments/lexers/promql.py
deleted file mode 100644
index b6d2d66..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/promql.py
+++ /dev/null
@@ -1,175 +0,0 @@
-"""
- pygments.lexers.promql
- ~~~~~~~~~~~~~~~~~~~~~~
-
- Lexer for Prometheus Query Language.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, bygroups, default, words
-from pygments.token import Comment, Keyword, Name, Number, Operator, \
- Punctuation, String, Whitespace
-
-__all__ = ["PromQLLexer"]
-
-
-class PromQLLexer(RegexLexer):
- """
- For PromQL queries.
-
- For details about the grammar see:
- https://github.com/prometheus/prometheus/tree/master/promql/parser
-
- .. versionadded: 2.7
- """
-
- name = "PromQL"
- url = 'https://prometheus.io/docs/prometheus/latest/querying/basics/'
- aliases = ["promql"]
- filenames = ["*.promql"]
-
- base_keywords = (
- words(
- (
- "bool",
- "by",
- "group_left",
- "group_right",
- "ignoring",
- "offset",
- "on",
- "without",
- ),
- suffix=r"\b",
- ),
- Keyword,
- )
-
- aggregator_keywords = (
- words(
- (
- "sum",
- "min",
- "max",
- "avg",
- "group",
- "stddev",
- "stdvar",
- "count",
- "count_values",
- "bottomk",
- "topk",
- "quantile",
- ),
- suffix=r"\b",
- ),
- Keyword,
- )
-
- function_keywords = (
- words(
- (
- "abs",
- "absent",
- "absent_over_time",
- "avg_over_time",
- "ceil",
- "changes",
- "clamp_max",
- "clamp_min",
- "count_over_time",
- "day_of_month",
- "day_of_week",
- "days_in_month",
- "delta",
- "deriv",
- "exp",
- "floor",
- "histogram_quantile",
- "holt_winters",
- "hour",
- "idelta",
- "increase",
- "irate",
- "label_join",
- "label_replace",
- "ln",
- "log10",
- "log2",
- "max_over_time",
- "min_over_time",
- "minute",
- "month",
- "predict_linear",
- "quantile_over_time",
- "rate",
- "resets",
- "round",
- "scalar",
- "sort",
- "sort_desc",
- "sqrt",
- "stddev_over_time",
- "stdvar_over_time",
- "sum_over_time",
- "time",
- "timestamp",
- "vector",
- "year",
- ),
- suffix=r"\b",
- ),
- Keyword.Reserved,
- )
-
- tokens = {
- "root": [
- (r"\n", Whitespace),
- (r"\s+", Whitespace),
- (r",", Punctuation),
- # Keywords
- base_keywords,
- aggregator_keywords,
- function_keywords,
- # Offsets
- (r"[1-9][0-9]*[smhdwy]", String),
- # Numbers
- (r"-?[0-9]+\.[0-9]+", Number.Float),
- (r"-?[0-9]+", Number.Integer),
- # Comments
- (r"#.*?$", Comment.Single),
- # Operators
- (r"(\+|\-|\*|\/|\%|\^)", Operator),
- (r"==|!=|>=|<=|<|>", Operator),
- (r"and|or|unless", Operator.Word),
- # Metrics
- (r"[_a-zA-Z][a-zA-Z0-9_]+", Name.Variable),
- # Params
- (r'(["\'])(.*?)(["\'])', bygroups(Punctuation, String, Punctuation)),
- # Other states
- (r"\(", Operator, "function"),
- (r"\)", Operator),
- (r"\{", Punctuation, "labels"),
- (r"\[", Punctuation, "range"),
- ],
- "labels": [
- (r"\}", Punctuation, "#pop"),
- (r"\n", Whitespace),
- (r"\s+", Whitespace),
- (r",", Punctuation),
- (r'([_a-zA-Z][a-zA-Z0-9_]*?)(\s*?)(=~|!=|=|!~)(\s*?)("|\')(.*?)("|\')',
- bygroups(Name.Label, Whitespace, Operator, Whitespace,
- Punctuation, String, Punctuation)),
- ],
- "range": [
- (r"\]", Punctuation, "#pop"),
- (r"[1-9][0-9]*[smhdwy]", String),
- ],
- "function": [
- (r"\)", Operator, "#pop"),
- (r"\(", Operator, "#push"),
- default("#pop"),
- ],
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/prql.py b/venv/lib/python3.11/site-packages/pygments/lexers/prql.py
deleted file mode 100644
index 4c2f12e..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/prql.py
+++ /dev/null
@@ -1,252 +0,0 @@
-"""
- pygments.lexers.prql
- ~~~~~~~~~~~~~~~~~~~~
-
- Lexer for the PRQL query language.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, combined, words, include, bygroups
-from pygments.token import Comment, Literal, Keyword, Name, Number, Operator, \
- Punctuation, String, Text, Whitespace
-
-__all__ = ['PrqlLexer']
-
-
-class PrqlLexer(RegexLexer):
- """
- For PRQL source code.
-
- .. versionadded:: 2.17
-
- grammar: https://github.com/PRQL/prql/tree/main/grammars
- """
-
- name = 'PRQL'
- url = 'https://prql-lang.org/'
- aliases = ['prql']
- filenames = ['*.prql']
- mimetypes = ['application/prql', 'application/x-prql']
-
- builtinTypes = words((
- "bool",
- "int",
- "int8", "int16", "int32", "int64", "int128",
- "float",
- "text",
- "set"), suffix=r'\b')
-
- def innerstring_rules(ttype):
- return [
- # the new style '{}'.format(...) string formatting
- (r'\{'
- r'((\w+)((\.\w+)|(\[[^\]]+\]))*)?' # field name
- r'(\:(.?[<>=\^])?[-+ ]?#?0?(\d+)?,?(\.\d+)?[E-GXb-gnosx%]?)?'
- r'\}', String.Interpol),
-
- (r'[^\\\'"%{\n]+', ttype),
- (r'[\'"\\]', ttype),
- (r'%|(\{{1,2})', ttype)
- ]
-
- def fstring_rules(ttype):
- return [
- (r'\}', String.Interpol),
- (r'\{', String.Interpol, 'expr-inside-fstring'),
- (r'[^\\\'"{}\n]+', ttype),
- (r'[\'"\\]', ttype),
- ]
-
- tokens = {
- 'root': [
-
- # Comments
- (r'#!.*', String.Doc),
- (r'#.*', Comment.Single),
-
- # Whitespace
- (r'\s+', Whitespace),
-
- # Modules
- (r'^(\s*)(module)(\s*)',
- bygroups(Whitespace, Keyword.Namespace, Whitespace),
- 'imports'),
-
- (builtinTypes, Keyword.Type),
-
- # Main
- (r'^prql ', Keyword.Reserved),
-
- ('let', Keyword.Declaration),
-
- include('keywords'),
- include('expr'),
-
- # Transforms
- (r'^[A-Za-z_][a-zA-Z0-9_]*', Keyword),
- ],
- 'expr': [
- # non-raw f-strings
- ('(f)(""")', bygroups(String.Affix, String.Double),
- combined('fstringescape', 'tdqf')),
- ("(f)(''')", bygroups(String.Affix, String.Single),
- combined('fstringescape', 'tsqf')),
- ('(f)(")', bygroups(String.Affix, String.Double),
- combined('fstringescape', 'dqf')),
- ("(f)(')", bygroups(String.Affix, String.Single),
- combined('fstringescape', 'sqf')),
-
- # non-raw s-strings
- ('(s)(""")', bygroups(String.Affix, String.Double),
- combined('stringescape', 'tdqf')),
- ("(s)(''')", bygroups(String.Affix, String.Single),
- combined('stringescape', 'tsqf')),
- ('(s)(")', bygroups(String.Affix, String.Double),
- combined('stringescape', 'dqf')),
- ("(s)(')", bygroups(String.Affix, String.Single),
- combined('stringescape', 'sqf')),
-
- # raw strings
- ('(?i)(r)(""")',
- bygroups(String.Affix, String.Double), 'tdqs'),
- ("(?i)(r)(''')",
- bygroups(String.Affix, String.Single), 'tsqs'),
- ('(?i)(r)(")',
- bygroups(String.Affix, String.Double), 'dqs'),
- ("(?i)(r)(')",
- bygroups(String.Affix, String.Single), 'sqs'),
-
- # non-raw strings
- ('"""', String.Double, combined('stringescape', 'tdqs')),
- ("'''", String.Single, combined('stringescape', 'tsqs')),
- ('"', String.Double, combined('stringescape', 'dqs')),
- ("'", String.Single, combined('stringescape', 'sqs')),
-
- # Time and dates
- (r'@\d{4}-\d{2}-\d{2}T\d{2}(:\d{2})?(:\d{2})?(\.\d{1,6})?(Z|[+-]\d{1,2}(:\d{1,2})?)?', Literal.Date),
- (r'@\d{4}-\d{2}-\d{2}', Literal.Date),
- (r'@\d{2}(:\d{2})?(:\d{2})?(\.\d{1,6})?(Z|[+-]\d{1,2}(:\d{1,2})?)?', Literal.Date),
-
- (r'[^\S\n]+', Text),
- include('numbers'),
- (r'->|=>|==|!=|>=|<=|~=|&&|\|\||\?\?|\/\/', Operator),
- (r'[-~+/*%=<>&^|.@]', Operator),
- (r'[]{}:(),;[]', Punctuation),
- include('functions'),
-
- # Variable Names
- (r'[A-Za-z_][a-zA-Z0-9_]*', Name.Variable),
- ],
- 'numbers': [
- (r'(\d(?:_?\d)*\.(?:\d(?:_?\d)*)?|(?:\d(?:_?\d)*)?\.\d(?:_?\d)*)'
- r'([eE][+-]?\d(?:_?\d)*)?', Number.Float),
- (r'\d(?:_?\d)*[eE][+-]?\d(?:_?\d)*j?', Number.Float),
- (r'0[oO](?:_?[0-7])+', Number.Oct),
- (r'0[bB](?:_?[01])+', Number.Bin),
- (r'0[xX](?:_?[a-fA-F0-9])+', Number.Hex),
- (r'\d(?:_?\d)*', Number.Integer),
- ],
- 'fstringescape': [
- include('stringescape'),
- ],
- 'bytesescape': [
- (r'\\([\\bfnrt"\']|\n|x[a-fA-F0-9]{2}|[0-7]{1,3})', String.Escape)
- ],
- 'stringescape': [
- (r'\\(N\{.*?\}|u\{[a-fA-F0-9]{1,6}\})', String.Escape),
- include('bytesescape')
- ],
- 'fstrings-single': fstring_rules(String.Single),
- 'fstrings-double': fstring_rules(String.Double),
- 'strings-single': innerstring_rules(String.Single),
- 'strings-double': innerstring_rules(String.Double),
- 'dqf': [
- (r'"', String.Double, '#pop'),
- (r'\\\\|\\"|\\\n', String.Escape), # included here for raw strings
- include('fstrings-double')
- ],
- 'sqf': [
- (r"'", String.Single, '#pop'),
- (r"\\\\|\\'|\\\n", String.Escape), # included here for raw strings
- include('fstrings-single')
- ],
- 'dqs': [
- (r'"', String.Double, '#pop'),
- (r'\\\\|\\"|\\\n', String.Escape), # included here for raw strings
- include('strings-double')
- ],
- 'sqs': [
- (r"'", String.Single, '#pop'),
- (r"\\\\|\\'|\\\n", String.Escape), # included here for raw strings
- include('strings-single')
- ],
- 'tdqf': [
- (r'"""', String.Double, '#pop'),
- include('fstrings-double'),
- (r'\n', String.Double)
- ],
- 'tsqf': [
- (r"'''", String.Single, '#pop'),
- include('fstrings-single'),
- (r'\n', String.Single)
- ],
- 'tdqs': [
- (r'"""', String.Double, '#pop'),
- include('strings-double'),
- (r'\n', String.Double)
- ],
- 'tsqs': [
- (r"'''", String.Single, '#pop'),
- include('strings-single'),
- (r'\n', String.Single)
- ],
-
- 'expr-inside-fstring': [
- (r'[{([]', Punctuation, 'expr-inside-fstring-inner'),
- # without format specifier
- (r'(=\s*)?' # debug (https://bugs.python.org/issue36817)
- r'\}', String.Interpol, '#pop'),
- # with format specifier
- # we'll catch the remaining '}' in the outer scope
- (r'(=\s*)?' # debug (https://bugs.python.org/issue36817)
- r':', String.Interpol, '#pop'),
- (r'\s+', Whitespace), # allow new lines
- include('expr'),
- ],
- 'expr-inside-fstring-inner': [
- (r'[{([]', Punctuation, 'expr-inside-fstring-inner'),
- (r'[])}]', Punctuation, '#pop'),
- (r'\s+', Whitespace), # allow new lines
- include('expr'),
- ],
- 'keywords': [
- (words((
- 'into', 'case', 'type', 'module', 'internal',
- ), suffix=r'\b'),
- Keyword),
- (words(('true', 'false', 'null'), suffix=r'\b'), Keyword.Constant),
- ],
- 'functions': [
- (words((
- "min", "max", "sum", "average", "stddev", "every", "any",
- "concat_array", "count", "lag", "lead", "first", "last",
- "rank", "rank_dense", "row_number", "round", "as", "in",
- "tuple_every", "tuple_map", "tuple_zip", "_eq", "_is_null",
- "from_text", "lower", "upper", "read_parquet", "read_csv"),
- suffix=r'\b'),
- Name.Function),
- ],
-
- 'comment': [
- (r'-(?!\})', Comment.Multiline),
- (r'\{-', Comment.Multiline, 'comment'),
- (r'[^-}]', Comment.Multiline),
- (r'-\}', Comment.Multiline, '#pop'),
- ],
-
- 'imports': [
- (r'\w+(\.\w+)*', Name.Class, '#pop'),
- ],
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/ptx.py b/venv/lib/python3.11/site-packages/pygments/lexers/ptx.py
deleted file mode 100644
index 218d694..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/ptx.py
+++ /dev/null
@@ -1,120 +0,0 @@
-"""
- pygments.lexers.ptx
- ~~~~~~~~~~~~~~~~~~~
-
- Lexer for other PTX language.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, include, words
-from pygments.token import Comment, Keyword, Name, String, Number, \
- Punctuation, Whitespace, Operator
-
-__all__ = ["PtxLexer"]
-
-
-class PtxLexer(RegexLexer):
- """
- For NVIDIA `PTX <https://docs.nvidia.com/cuda/parallel-thread-execution/>`_
- source.
-
- .. versionadded:: 2.16
- """
- name = 'PTX'
- url = "https://docs.nvidia.com/cuda/parallel-thread-execution/"
- filenames = ['*.ptx']
- aliases = ['ptx']
- mimetypes = ['text/x-ptx']
-
- #: optional Comment or Whitespace
- string = r'"[^"]*?"'
- followsym = r'[a-zA-Z0-9_$]'
- identifier = r'([-a-zA-Z$._][\w\-$.]*|' + string + ')'
- block_label = r'(' + identifier + r'|(\d+))'
-
- tokens = {
- 'root': [
- include('whitespace'),
-
- (block_label + r'\s*:', Name.Label),
-
- include('keyword'),
-
- (r'%' + identifier, Name.Variable),
- (r'%\d+', Name.Variable.Anonymous),
- (r'c?' + string, String),
- (identifier, Name.Variable),
- (r';', Punctuation),
- (r'[*+-/]', Operator),
-
- (r'0[xX][a-fA-F0-9]+', Number),
- (r'-?\d+(?:[.]\d+)?(?:[eE][-+]?\d+(?:[.]\d+)?)?', Number),
-
- (r'[=<>{}\[\]()*.,!]|x\b', Punctuation)
-
- ],
- 'whitespace': [
- (r'(\n|\s+)+', Whitespace),
- (r'//.*?\n', Comment)
- ],
-
- 'keyword': [
- # Instruction keywords
- (words((
- 'abs', 'discard', 'min', 'shf', 'vadd',
- 'activemask', 'div', 'mma', 'shfl', 'vadd2',
- 'add', 'dp2a', 'mov', 'shl', 'vadd4',
- 'addc', 'dp4a', 'movmatrix', 'shr', 'vavrg2',
- 'alloca', 'elect', 'mul', 'sin', 'vavrg4',
- 'and', 'ex2', 'mul24', 'slct', 'vmad',
- 'applypriority', 'exit', 'multimem', 'sqrt', 'vmax',
- 'atom', 'fence', 'nanosleep', 'st', 'vmax2',
- 'bar', 'fma', 'neg', 'stackrestore', 'vmax4',
- 'barrier', 'fns', 'not', 'stacksave', 'vmin',
- 'bfe', 'getctarank', 'or', 'stmatrix', 'vmin2',
- 'bfi', 'griddepcontrol', 'pmevent', 'sub', 'vmin4',
- 'bfind', 'isspacep', 'popc', 'subc', 'vote',
- 'bmsk', 'istypep', 'prefetch', 'suld', 'vset',
- 'bra', 'ld', 'prefetchu', 'suq', 'vset2',
- 'brev', 'ldmatrix', 'prmt', 'sured', 'vset4',
- 'brkpt', 'ldu', 'rcp', 'sust', 'vshl',
- 'brx', 'lg2', 'red', 'szext', 'vshr',
- 'call', 'lop3', 'redux', 'tanh', 'vsub',
- 'clz', 'mad', 'rem', 'testp', 'vsub2',
- 'cnot', 'mad24', 'ret', 'tex', 'vsub4',
- 'copysign', 'madc', 'rsqrt', 'tld4', 'wgmma',
- 'cos', 'mapa', 'sad', 'trap', 'wmma',
- 'cp', 'match', 'selp', 'txq', 'xor',
- 'createpolicy', 'max', 'set', 'vabsdiff', 'cvt',
- 'mbarrier', 'setmaxnreg', 'vabsdiff2', 'cvta',
- 'membar', 'setp', 'vabsdiff4')), Keyword),
- # State Spaces and Suffixes
- (words((
- 'reg', '.sreg', '.const', '.global',
- '.local', '.param', '.shared', '.tex',
- '.wide', '.loc'
- )), Keyword.Pseudo),
- # PTX Directives
- (words((
- '.address_size', '.explicitcluster', '.maxnreg', '.section',
- '.alias', '.extern', '.maxntid', '.shared',
- '.align', '.file', '.minnctapersm', '.sreg',
- '.branchtargets', '.func', '.noreturn', '.target',
- '.callprototype', '.global', '.param', '.tex',
- '.calltargets', '.loc', '.pragma', '.version',
- '.common', '.local', '.reg', '.visible',
- '.const', '.maxclusterrank', '.reqnctapercluster', '.weak',
- '.entry', '.maxnctapersm', '.reqntid')), Keyword.Reserved),
- # Fundamental Types
- (words((
- '.s8', '.s16', '.s32', '.s64',
- '.u8', '.u16', '.u32', '.u64',
- '.f16', '.f16x2', '.f32', '.f64',
- '.b8', '.b16', '.b32', '.b64',
- '.pred'
- )), Keyword.Type)
- ],
-
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/python.py b/venv/lib/python3.11/site-packages/pygments/lexers/python.py
deleted file mode 100644
index cdb88ab..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/python.py
+++ /dev/null
@@ -1,1198 +0,0 @@
-"""
- pygments.lexers.python
- ~~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for Python and related languages.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-import keyword
-
-from pygments.lexer import DelegatingLexer, Lexer, RegexLexer, include, \
- bygroups, using, default, words, combined, do_insertions, this, line_re
-from pygments.util import get_bool_opt, shebang_matches
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation, Generic, Other, Error, Whitespace
-from pygments import unistring as uni
-
-__all__ = ['PythonLexer', 'PythonConsoleLexer', 'PythonTracebackLexer',
- 'Python2Lexer', 'Python2TracebackLexer',
- 'CythonLexer', 'DgLexer', 'NumPyLexer']
-
-
-class PythonLexer(RegexLexer):
- """
- For Python source code (version 3.x).
-
- .. versionadded:: 0.10
-
- .. versionchanged:: 2.5
- This is now the default ``PythonLexer``. It is still available as the
- alias ``Python3Lexer``.
- """
-
- name = 'Python'
- url = 'https://www.python.org'
- aliases = ['python', 'py', 'sage', 'python3', 'py3', 'bazel', 'starlark']
- filenames = [
- '*.py',
- '*.pyw',
- # Type stubs
- '*.pyi',
- # Jython
- '*.jy',
- # Sage
- '*.sage',
- # SCons
- '*.sc',
- 'SConstruct',
- 'SConscript',
- # Skylark/Starlark (used by Bazel, Buck, and Pants)
- '*.bzl',
- 'BUCK',
- 'BUILD',
- 'BUILD.bazel',
- 'WORKSPACE',
- # Twisted Application infrastructure
- '*.tac',
- ]
- mimetypes = ['text/x-python', 'application/x-python',
- 'text/x-python3', 'application/x-python3']
-
- uni_name = "[%s][%s]*" % (uni.xid_start, uni.xid_continue)
-
- def innerstring_rules(ttype):
- return [
- # the old style '%s' % (...) string formatting (still valid in Py3)
- (r'%(\(\w+\))?[-#0 +]*([0-9]+|[*])?(\.([0-9]+|[*]))?'
- '[hlL]?[E-GXc-giorsaux%]', String.Interpol),
- # the new style '{}'.format(...) string formatting
- (r'\{'
- r'((\w+)((\.\w+)|(\[[^\]]+\]))*)?' # field name
- r'(\![sra])?' # conversion
- r'(\:(.?[<>=\^])?[-+ ]?#?0?(\d+)?,?(\.\d+)?[E-GXb-gnosx%]?)?'
- r'\}', String.Interpol),
-
- # backslashes, quotes and formatting signs must be parsed one at a time
- (r'[^\\\'"%{\n]+', ttype),
- (r'[\'"\\]', ttype),
- # unhandled string formatting sign
- (r'%|(\{{1,2})', ttype)
- # newlines are an error (use "nl" state)
- ]
-
- def fstring_rules(ttype):
- return [
- # Assuming that a '}' is the closing brace after format specifier.
- # Sadly, this means that we won't detect syntax error. But it's
- # more important to parse correct syntax correctly, than to
- # highlight invalid syntax.
- (r'\}', String.Interpol),
- (r'\{', String.Interpol, 'expr-inside-fstring'),
- # backslashes, quotes and formatting signs must be parsed one at a time
- (r'[^\\\'"{}\n]+', ttype),
- (r'[\'"\\]', ttype),
- # newlines are an error (use "nl" state)
- ]
-
- tokens = {
- 'root': [
- (r'\n', Whitespace),
- (r'^(\s*)([rRuUbB]{,2})("""(?:.|\n)*?""")',
- bygroups(Whitespace, String.Affix, String.Doc)),
- (r"^(\s*)([rRuUbB]{,2})('''(?:.|\n)*?''')",
- bygroups(Whitespace, String.Affix, String.Doc)),
- (r'\A#!.+$', Comment.Hashbang),
- (r'#.*$', Comment.Single),
- (r'\\\n', Text),
- (r'\\', Text),
- include('keywords'),
- include('soft-keywords'),
- (r'(def)((?:\s|\\\s)+)', bygroups(Keyword, Text), 'funcname'),
- (r'(class)((?:\s|\\\s)+)', bygroups(Keyword, Text), 'classname'),
- (r'(from)((?:\s|\\\s)+)', bygroups(Keyword.Namespace, Text),
- 'fromimport'),
- (r'(import)((?:\s|\\\s)+)', bygroups(Keyword.Namespace, Text),
- 'import'),
- include('expr'),
- ],
- 'expr': [
- # raw f-strings
- ('(?i)(rf|fr)(""")',
- bygroups(String.Affix, String.Double),
- combined('rfstringescape', 'tdqf')),
- ("(?i)(rf|fr)(''')",
- bygroups(String.Affix, String.Single),
- combined('rfstringescape', 'tsqf')),
- ('(?i)(rf|fr)(")',
- bygroups(String.Affix, String.Double),
- combined('rfstringescape', 'dqf')),
- ("(?i)(rf|fr)(')",
- bygroups(String.Affix, String.Single),
- combined('rfstringescape', 'sqf')),
- # non-raw f-strings
- ('([fF])(""")', bygroups(String.Affix, String.Double),
- combined('fstringescape', 'tdqf')),
- ("([fF])(''')", bygroups(String.Affix, String.Single),
- combined('fstringescape', 'tsqf')),
- ('([fF])(")', bygroups(String.Affix, String.Double),
- combined('fstringescape', 'dqf')),
- ("([fF])(')", bygroups(String.Affix, String.Single),
- combined('fstringescape', 'sqf')),
- # raw bytes and strings
- ('(?i)(rb|br|r)(""")',
- bygroups(String.Affix, String.Double), 'tdqs'),
- ("(?i)(rb|br|r)(''')",
- bygroups(String.Affix, String.Single), 'tsqs'),
- ('(?i)(rb|br|r)(")',
- bygroups(String.Affix, String.Double), 'dqs'),
- ("(?i)(rb|br|r)(')",
- bygroups(String.Affix, String.Single), 'sqs'),
- # non-raw strings
- ('([uU]?)(""")', bygroups(String.Affix, String.Double),
- combined('stringescape', 'tdqs')),
- ("([uU]?)(''')", bygroups(String.Affix, String.Single),
- combined('stringescape', 'tsqs')),
- ('([uU]?)(")', bygroups(String.Affix, String.Double),
- combined('stringescape', 'dqs')),
- ("([uU]?)(')", bygroups(String.Affix, String.Single),
- combined('stringescape', 'sqs')),
- # non-raw bytes
- ('([bB])(""")', bygroups(String.Affix, String.Double),
- combined('bytesescape', 'tdqs')),
- ("([bB])(''')", bygroups(String.Affix, String.Single),
- combined('bytesescape', 'tsqs')),
- ('([bB])(")', bygroups(String.Affix, String.Double),
- combined('bytesescape', 'dqs')),
- ("([bB])(')", bygroups(String.Affix, String.Single),
- combined('bytesescape', 'sqs')),
-
- (r'[^\S\n]+', Text),
- include('numbers'),
- (r'!=|==|<<|>>|:=|[-~+/*%=<>&^|.]', Operator),
- (r'[]{}:(),;[]', Punctuation),
- (r'(in|is|and|or|not)\b', Operator.Word),
- include('expr-keywords'),
- include('builtins'),
- include('magicfuncs'),
- include('magicvars'),
- include('name'),
- ],
- 'expr-inside-fstring': [
- (r'[{([]', Punctuation, 'expr-inside-fstring-inner'),
- # without format specifier
- (r'(=\s*)?' # debug (https://bugs.python.org/issue36817)
- r'(\![sraf])?' # conversion
- r'\}', String.Interpol, '#pop'),
- # with format specifier
- # we'll catch the remaining '}' in the outer scope
- (r'(=\s*)?' # debug (https://bugs.python.org/issue36817)
- r'(\![sraf])?' # conversion
- r':', String.Interpol, '#pop'),
- (r'\s+', Whitespace), # allow new lines
- include('expr'),
- ],
- 'expr-inside-fstring-inner': [
- (r'[{([]', Punctuation, 'expr-inside-fstring-inner'),
- (r'[])}]', Punctuation, '#pop'),
- (r'\s+', Whitespace), # allow new lines
- include('expr'),
- ],
- 'expr-keywords': [
- # Based on https://docs.python.org/3/reference/expressions.html
- (words((
- 'async for', 'await', 'else', 'for', 'if', 'lambda',
- 'yield', 'yield from'), suffix=r'\b'),
- Keyword),
- (words(('True', 'False', 'None'), suffix=r'\b'), Keyword.Constant),
- ],
- 'keywords': [
- (words((
- 'assert', 'async', 'await', 'break', 'continue', 'del', 'elif',
- 'else', 'except', 'finally', 'for', 'global', 'if', 'lambda',
- 'pass', 'raise', 'nonlocal', 'return', 'try', 'while', 'yield',
- 'yield from', 'as', 'with'), suffix=r'\b'),
- Keyword),
- (words(('True', 'False', 'None'), suffix=r'\b'), Keyword.Constant),
- ],
- 'soft-keywords': [
- # `match`, `case` and `_` soft keywords
- (r'(^[ \t]*)' # at beginning of line + possible indentation
- r'(match|case)\b' # a possible keyword
- r'(?![ \t]*(?:' # not followed by...
- r'[:,;=^&|@~)\]}]|(?:' + # characters and keywords that mean this isn't
- r'|'.join(keyword.kwlist) + r')\b))', # pattern matching
- bygroups(Text, Keyword), 'soft-keywords-inner'),
- ],
- 'soft-keywords-inner': [
- # optional `_` keyword
- (r'(\s+)([^\n_]*)(_\b)', bygroups(Whitespace, using(this), Keyword)),
- default('#pop')
- ],
- 'builtins': [
- (words((
- '__import__', 'abs', 'aiter', 'all', 'any', 'bin', 'bool', 'bytearray',
- 'breakpoint', 'bytes', 'callable', 'chr', 'classmethod', 'compile',
- 'complex', 'delattr', 'dict', 'dir', 'divmod', 'enumerate', 'eval',
- 'filter', 'float', 'format', 'frozenset', 'getattr', 'globals',
- 'hasattr', 'hash', 'hex', 'id', 'input', 'int', 'isinstance',
- 'issubclass', 'iter', 'len', 'list', 'locals', 'map', 'max',
- 'memoryview', 'min', 'next', 'object', 'oct', 'open', 'ord', 'pow',
- 'print', 'property', 'range', 'repr', 'reversed', 'round', 'set',
- 'setattr', 'slice', 'sorted', 'staticmethod', 'str', 'sum', 'super',
- 'tuple', 'type', 'vars', 'zip'), prefix=r'(?<!\.)', suffix=r'\b'),
- Name.Builtin),
- (r'(?<!\.)(self|Ellipsis|NotImplemented|cls)\b', Name.Builtin.Pseudo),
- (words((
- 'ArithmeticError', 'AssertionError', 'AttributeError',
- 'BaseException', 'BufferError', 'BytesWarning', 'DeprecationWarning',
- 'EOFError', 'EnvironmentError', 'Exception', 'FloatingPointError',
- 'FutureWarning', 'GeneratorExit', 'IOError', 'ImportError',
- 'ImportWarning', 'IndentationError', 'IndexError', 'KeyError',
- 'KeyboardInterrupt', 'LookupError', 'MemoryError', 'NameError',
- 'NotImplementedError', 'OSError', 'OverflowError',
- 'PendingDeprecationWarning', 'ReferenceError', 'ResourceWarning',
- 'RuntimeError', 'RuntimeWarning', 'StopIteration',
- 'SyntaxError', 'SyntaxWarning', 'SystemError', 'SystemExit',
- 'TabError', 'TypeError', 'UnboundLocalError', 'UnicodeDecodeError',
- 'UnicodeEncodeError', 'UnicodeError', 'UnicodeTranslateError',
- 'UnicodeWarning', 'UserWarning', 'ValueError', 'VMSError',
- 'Warning', 'WindowsError', 'ZeroDivisionError',
- # new builtin exceptions from PEP 3151
- 'BlockingIOError', 'ChildProcessError', 'ConnectionError',
- 'BrokenPipeError', 'ConnectionAbortedError', 'ConnectionRefusedError',
- 'ConnectionResetError', 'FileExistsError', 'FileNotFoundError',
- 'InterruptedError', 'IsADirectoryError', 'NotADirectoryError',
- 'PermissionError', 'ProcessLookupError', 'TimeoutError',
- # others new in Python 3
- 'StopAsyncIteration', 'ModuleNotFoundError', 'RecursionError',
- 'EncodingWarning'),
- prefix=r'(?<!\.)', suffix=r'\b'),
- Name.Exception),
- ],
- 'magicfuncs': [
- (words((
- '__abs__', '__add__', '__aenter__', '__aexit__', '__aiter__',
- '__and__', '__anext__', '__await__', '__bool__', '__bytes__',
- '__call__', '__complex__', '__contains__', '__del__', '__delattr__',
- '__delete__', '__delitem__', '__dir__', '__divmod__', '__enter__',
- '__eq__', '__exit__', '__float__', '__floordiv__', '__format__',
- '__ge__', '__get__', '__getattr__', '__getattribute__',
- '__getitem__', '__gt__', '__hash__', '__iadd__', '__iand__',
- '__ifloordiv__', '__ilshift__', '__imatmul__', '__imod__',
- '__imul__', '__index__', '__init__', '__instancecheck__',
- '__int__', '__invert__', '__ior__', '__ipow__', '__irshift__',
- '__isub__', '__iter__', '__itruediv__', '__ixor__', '__le__',
- '__len__', '__length_hint__', '__lshift__', '__lt__', '__matmul__',
- '__missing__', '__mod__', '__mul__', '__ne__', '__neg__',
- '__new__', '__next__', '__or__', '__pos__', '__pow__',
- '__prepare__', '__radd__', '__rand__', '__rdivmod__', '__repr__',
- '__reversed__', '__rfloordiv__', '__rlshift__', '__rmatmul__',
- '__rmod__', '__rmul__', '__ror__', '__round__', '__rpow__',
- '__rrshift__', '__rshift__', '__rsub__', '__rtruediv__',
- '__rxor__', '__set__', '__setattr__', '__setitem__', '__str__',
- '__sub__', '__subclasscheck__', '__truediv__',
- '__xor__'), suffix=r'\b'),
- Name.Function.Magic),
- ],
- 'magicvars': [
- (words((
- '__annotations__', '__bases__', '__class__', '__closure__',
- '__code__', '__defaults__', '__dict__', '__doc__', '__file__',
- '__func__', '__globals__', '__kwdefaults__', '__module__',
- '__mro__', '__name__', '__objclass__', '__qualname__',
- '__self__', '__slots__', '__weakref__'), suffix=r'\b'),
- Name.Variable.Magic),
- ],
- 'numbers': [
- (r'(\d(?:_?\d)*\.(?:\d(?:_?\d)*)?|(?:\d(?:_?\d)*)?\.\d(?:_?\d)*)'
- r'([eE][+-]?\d(?:_?\d)*)?', Number.Float),
- (r'\d(?:_?\d)*[eE][+-]?\d(?:_?\d)*j?', Number.Float),
- (r'0[oO](?:_?[0-7])+', Number.Oct),
- (r'0[bB](?:_?[01])+', Number.Bin),
- (r'0[xX](?:_?[a-fA-F0-9])+', Number.Hex),
- (r'\d(?:_?\d)*', Number.Integer),
- ],
- 'name': [
- (r'@' + uni_name, Name.Decorator),
- (r'@', Operator), # new matrix multiplication operator
- (uni_name, Name),
- ],
- 'funcname': [
- include('magicfuncs'),
- (uni_name, Name.Function, '#pop'),
- default('#pop'),
- ],
- 'classname': [
- (uni_name, Name.Class, '#pop'),
- ],
- 'import': [
- (r'(\s+)(as)(\s+)', bygroups(Text, Keyword, Text)),
- (r'\.', Name.Namespace),
- (uni_name, Name.Namespace),
- (r'(\s*)(,)(\s*)', bygroups(Text, Operator, Text)),
- default('#pop') # all else: go back
- ],
- 'fromimport': [
- (r'(\s+)(import)\b', bygroups(Text, Keyword.Namespace), '#pop'),
- (r'\.', Name.Namespace),
- # if None occurs here, it's "raise x from None", since None can
- # never be a module name
- (r'None\b', Keyword.Constant, '#pop'),
- (uni_name, Name.Namespace),
- default('#pop'),
- ],
- 'rfstringescape': [
- (r'\{\{', String.Escape),
- (r'\}\}', String.Escape),
- ],
- 'fstringescape': [
- include('rfstringescape'),
- include('stringescape'),
- ],
- 'bytesescape': [
- (r'\\([\\abfnrtv"\']|\n|x[a-fA-F0-9]{2}|[0-7]{1,3})', String.Escape)
- ],
- 'stringescape': [
- (r'\\(N\{.*?\}|u[a-fA-F0-9]{4}|U[a-fA-F0-9]{8})', String.Escape),
- include('bytesescape')
- ],
- 'fstrings-single': fstring_rules(String.Single),
- 'fstrings-double': fstring_rules(String.Double),
- 'strings-single': innerstring_rules(String.Single),
- 'strings-double': innerstring_rules(String.Double),
- 'dqf': [
- (r'"', String.Double, '#pop'),
- (r'\\\\|\\"|\\\n', String.Escape), # included here for raw strings
- include('fstrings-double')
- ],
- 'sqf': [
- (r"'", String.Single, '#pop'),
- (r"\\\\|\\'|\\\n", String.Escape), # included here for raw strings
- include('fstrings-single')
- ],
- 'dqs': [
- (r'"', String.Double, '#pop'),
- (r'\\\\|\\"|\\\n', String.Escape), # included here for raw strings
- include('strings-double')
- ],
- 'sqs': [
- (r"'", String.Single, '#pop'),
- (r"\\\\|\\'|\\\n", String.Escape), # included here for raw strings
- include('strings-single')
- ],
- 'tdqf': [
- (r'"""', String.Double, '#pop'),
- include('fstrings-double'),
- (r'\n', String.Double)
- ],
- 'tsqf': [
- (r"'''", String.Single, '#pop'),
- include('fstrings-single'),
- (r'\n', String.Single)
- ],
- 'tdqs': [
- (r'"""', String.Double, '#pop'),
- include('strings-double'),
- (r'\n', String.Double)
- ],
- 'tsqs': [
- (r"'''", String.Single, '#pop'),
- include('strings-single'),
- (r'\n', String.Single)
- ],
- }
-
- def analyse_text(text):
- return shebang_matches(text, r'pythonw?(3(\.\d)?)?') or \
- 'import ' in text[:1000]
-
-
-Python3Lexer = PythonLexer
-
-
-class Python2Lexer(RegexLexer):
- """
- For Python 2.x source code.
-
- .. versionchanged:: 2.5
- This class has been renamed from ``PythonLexer``. ``PythonLexer`` now
- refers to the Python 3 variant. File name patterns like ``*.py`` have
- been moved to Python 3 as well.
- """
-
- name = 'Python 2.x'
- url = 'https://www.python.org'
- aliases = ['python2', 'py2']
- filenames = [] # now taken over by PythonLexer (3.x)
- mimetypes = ['text/x-python2', 'application/x-python2']
-
- def innerstring_rules(ttype):
- return [
- # the old style '%s' % (...) string formatting
- (r'%(\(\w+\))?[-#0 +]*([0-9]+|[*])?(\.([0-9]+|[*]))?'
- '[hlL]?[E-GXc-giorsux%]', String.Interpol),
- # backslashes, quotes and formatting signs must be parsed one at a time
- (r'[^\\\'"%\n]+', ttype),
- (r'[\'"\\]', ttype),
- # unhandled string formatting sign
- (r'%', ttype),
- # newlines are an error (use "nl" state)
- ]
-
- tokens = {
- 'root': [
- (r'\n', Whitespace),
- (r'^(\s*)([rRuUbB]{,2})("""(?:.|\n)*?""")',
- bygroups(Whitespace, String.Affix, String.Doc)),
- (r"^(\s*)([rRuUbB]{,2})('''(?:.|\n)*?''')",
- bygroups(Whitespace, String.Affix, String.Doc)),
- (r'[^\S\n]+', Text),
- (r'\A#!.+$', Comment.Hashbang),
- (r'#.*$', Comment.Single),
- (r'[]{}:(),;[]', Punctuation),
- (r'\\\n', Text),
- (r'\\', Text),
- (r'(in|is|and|or|not)\b', Operator.Word),
- (r'!=|==|<<|>>|[-~+/*%=<>&^|.]', Operator),
- include('keywords'),
- (r'(def)((?:\s|\\\s)+)', bygroups(Keyword, Text), 'funcname'),
- (r'(class)((?:\s|\\\s)+)', bygroups(Keyword, Text), 'classname'),
- (r'(from)((?:\s|\\\s)+)', bygroups(Keyword.Namespace, Text),
- 'fromimport'),
- (r'(import)((?:\s|\\\s)+)', bygroups(Keyword.Namespace, Text),
- 'import'),
- include('builtins'),
- include('magicfuncs'),
- include('magicvars'),
- include('backtick'),
- ('([rR]|[uUbB][rR]|[rR][uUbB])(""")',
- bygroups(String.Affix, String.Double), 'tdqs'),
- ("([rR]|[uUbB][rR]|[rR][uUbB])(''')",
- bygroups(String.Affix, String.Single), 'tsqs'),
- ('([rR]|[uUbB][rR]|[rR][uUbB])(")',
- bygroups(String.Affix, String.Double), 'dqs'),
- ("([rR]|[uUbB][rR]|[rR][uUbB])(')",
- bygroups(String.Affix, String.Single), 'sqs'),
- ('([uUbB]?)(""")', bygroups(String.Affix, String.Double),
- combined('stringescape', 'tdqs')),
- ("([uUbB]?)(''')", bygroups(String.Affix, String.Single),
- combined('stringescape', 'tsqs')),
- ('([uUbB]?)(")', bygroups(String.Affix, String.Double),
- combined('stringescape', 'dqs')),
- ("([uUbB]?)(')", bygroups(String.Affix, String.Single),
- combined('stringescape', 'sqs')),
- include('name'),
- include('numbers'),
- ],
- 'keywords': [
- (words((
- 'assert', 'break', 'continue', 'del', 'elif', 'else', 'except',
- 'exec', 'finally', 'for', 'global', 'if', 'lambda', 'pass',
- 'print', 'raise', 'return', 'try', 'while', 'yield',
- 'yield from', 'as', 'with'), suffix=r'\b'),
- Keyword),
- ],
- 'builtins': [
- (words((
- '__import__', 'abs', 'all', 'any', 'apply', 'basestring', 'bin',
- 'bool', 'buffer', 'bytearray', 'bytes', 'callable', 'chr', 'classmethod',
- 'cmp', 'coerce', 'compile', 'complex', 'delattr', 'dict', 'dir', 'divmod',
- 'enumerate', 'eval', 'execfile', 'exit', 'file', 'filter', 'float',
- 'frozenset', 'getattr', 'globals', 'hasattr', 'hash', 'hex', 'id',
- 'input', 'int', 'intern', 'isinstance', 'issubclass', 'iter', 'len',
- 'list', 'locals', 'long', 'map', 'max', 'min', 'next', 'object',
- 'oct', 'open', 'ord', 'pow', 'property', 'range', 'raw_input', 'reduce',
- 'reload', 'repr', 'reversed', 'round', 'set', 'setattr', 'slice',
- 'sorted', 'staticmethod', 'str', 'sum', 'super', 'tuple', 'type',
- 'unichr', 'unicode', 'vars', 'xrange', 'zip'),
- prefix=r'(?<!\.)', suffix=r'\b'),
- Name.Builtin),
- (r'(?<!\.)(self|None|Ellipsis|NotImplemented|False|True|cls'
- r')\b', Name.Builtin.Pseudo),
- (words((
- 'ArithmeticError', 'AssertionError', 'AttributeError',
- 'BaseException', 'DeprecationWarning', 'EOFError', 'EnvironmentError',
- 'Exception', 'FloatingPointError', 'FutureWarning', 'GeneratorExit',
- 'IOError', 'ImportError', 'ImportWarning', 'IndentationError',
- 'IndexError', 'KeyError', 'KeyboardInterrupt', 'LookupError',
- 'MemoryError', 'NameError',
- 'NotImplementedError', 'OSError', 'OverflowError', 'OverflowWarning',
- 'PendingDeprecationWarning', 'ReferenceError',
- 'RuntimeError', 'RuntimeWarning', 'StandardError', 'StopIteration',
- 'SyntaxError', 'SyntaxWarning', 'SystemError', 'SystemExit',
- 'TabError', 'TypeError', 'UnboundLocalError', 'UnicodeDecodeError',
- 'UnicodeEncodeError', 'UnicodeError', 'UnicodeTranslateError',
- 'UnicodeWarning', 'UserWarning', 'ValueError', 'VMSError', 'Warning',
- 'WindowsError', 'ZeroDivisionError'), prefix=r'(?<!\.)', suffix=r'\b'),
- Name.Exception),
- ],
- 'magicfuncs': [
- (words((
- '__abs__', '__add__', '__and__', '__call__', '__cmp__', '__coerce__',
- '__complex__', '__contains__', '__del__', '__delattr__', '__delete__',
- '__delitem__', '__delslice__', '__div__', '__divmod__', '__enter__',
- '__eq__', '__exit__', '__float__', '__floordiv__', '__ge__', '__get__',
- '__getattr__', '__getattribute__', '__getitem__', '__getslice__', '__gt__',
- '__hash__', '__hex__', '__iadd__', '__iand__', '__idiv__', '__ifloordiv__',
- '__ilshift__', '__imod__', '__imul__', '__index__', '__init__',
- '__instancecheck__', '__int__', '__invert__', '__iop__', '__ior__',
- '__ipow__', '__irshift__', '__isub__', '__iter__', '__itruediv__',
- '__ixor__', '__le__', '__len__', '__long__', '__lshift__', '__lt__',
- '__missing__', '__mod__', '__mul__', '__ne__', '__neg__', '__new__',
- '__nonzero__', '__oct__', '__op__', '__or__', '__pos__', '__pow__',
- '__radd__', '__rand__', '__rcmp__', '__rdiv__', '__rdivmod__', '__repr__',
- '__reversed__', '__rfloordiv__', '__rlshift__', '__rmod__', '__rmul__',
- '__rop__', '__ror__', '__rpow__', '__rrshift__', '__rshift__', '__rsub__',
- '__rtruediv__', '__rxor__', '__set__', '__setattr__', '__setitem__',
- '__setslice__', '__str__', '__sub__', '__subclasscheck__', '__truediv__',
- '__unicode__', '__xor__'), suffix=r'\b'),
- Name.Function.Magic),
- ],
- 'magicvars': [
- (words((
- '__bases__', '__class__', '__closure__', '__code__', '__defaults__',
- '__dict__', '__doc__', '__file__', '__func__', '__globals__',
- '__metaclass__', '__module__', '__mro__', '__name__', '__self__',
- '__slots__', '__weakref__'),
- suffix=r'\b'),
- Name.Variable.Magic),
- ],
- 'numbers': [
- (r'(\d+\.\d*|\d*\.\d+)([eE][+-]?[0-9]+)?j?', Number.Float),
- (r'\d+[eE][+-]?[0-9]+j?', Number.Float),
- (r'0[0-7]+j?', Number.Oct),
- (r'0[bB][01]+', Number.Bin),
- (r'0[xX][a-fA-F0-9]+', Number.Hex),
- (r'\d+L', Number.Integer.Long),
- (r'\d+j?', Number.Integer)
- ],
- 'backtick': [
- ('`.*?`', String.Backtick),
- ],
- 'name': [
- (r'@[\w.]+', Name.Decorator),
- (r'[a-zA-Z_]\w*', Name),
- ],
- 'funcname': [
- include('magicfuncs'),
- (r'[a-zA-Z_]\w*', Name.Function, '#pop'),
- default('#pop'),
- ],
- 'classname': [
- (r'[a-zA-Z_]\w*', Name.Class, '#pop')
- ],
- 'import': [
- (r'(?:[ \t]|\\\n)+', Text),
- (r'as\b', Keyword.Namespace),
- (r',', Operator),
- (r'[a-zA-Z_][\w.]*', Name.Namespace),
- default('#pop') # all else: go back
- ],
- 'fromimport': [
- (r'(?:[ \t]|\\\n)+', Text),
- (r'import\b', Keyword.Namespace, '#pop'),
- # if None occurs here, it's "raise x from None", since None can
- # never be a module name
- (r'None\b', Name.Builtin.Pseudo, '#pop'),
- # sadly, in "raise x from y" y will be highlighted as namespace too
- (r'[a-zA-Z_.][\w.]*', Name.Namespace),
- # anything else here also means "raise x from y" and is therefore
- # not an error
- default('#pop'),
- ],
- 'stringescape': [
- (r'\\([\\abfnrtv"\']|\n|N\{.*?\}|u[a-fA-F0-9]{4}|'
- r'U[a-fA-F0-9]{8}|x[a-fA-F0-9]{2}|[0-7]{1,3})', String.Escape)
- ],
- 'strings-single': innerstring_rules(String.Single),
- 'strings-double': innerstring_rules(String.Double),
- 'dqs': [
- (r'"', String.Double, '#pop'),
- (r'\\\\|\\"|\\\n', String.Escape), # included here for raw strings
- include('strings-double')
- ],
- 'sqs': [
- (r"'", String.Single, '#pop'),
- (r"\\\\|\\'|\\\n", String.Escape), # included here for raw strings
- include('strings-single')
- ],
- 'tdqs': [
- (r'"""', String.Double, '#pop'),
- include('strings-double'),
- (r'\n', String.Double)
- ],
- 'tsqs': [
- (r"'''", String.Single, '#pop'),
- include('strings-single'),
- (r'\n', String.Single)
- ],
- }
-
- def analyse_text(text):
- return shebang_matches(text, r'pythonw?2(\.\d)?')
-
-class _PythonConsoleLexerBase(RegexLexer):
- name = 'Python console session'
- aliases = ['pycon']
- mimetypes = ['text/x-python-doctest']
-
- """Auxiliary lexer for `PythonConsoleLexer`.
-
- Code tokens are output as ``Token.Other.Code``, traceback tokens as
- ``Token.Other.Traceback``.
- """
- tokens = {
- 'root': [
- (r'(>>> )(.*\n)', bygroups(Generic.Prompt, Other.Code), 'continuations'),
- # This happens, e.g., when tracebacks are embedded in documentation;
- # trailing whitespaces are often stripped in such contexts.
- (r'(>>>)(\n)', bygroups(Generic.Prompt, Whitespace)),
- (r'(\^C)?Traceback \(most recent call last\):\n', Other.Traceback, 'traceback'),
- # SyntaxError starts with this
- (r' File "[^"]+", line \d+', Other.Traceback, 'traceback'),
- (r'.*\n', Generic.Output),
- ],
- 'continuations': [
- (r'(\.\.\. )(.*\n)', bygroups(Generic.Prompt, Other.Code)),
- # See above.
- (r'(\.\.\.)(\n)', bygroups(Generic.Prompt, Whitespace)),
- default('#pop'),
- ],
- 'traceback': [
- # As soon as we see a traceback, consume everything until the next
- # >>> prompt.
- (r'(?=>>>( |$))', Text, '#pop'),
- (r'(KeyboardInterrupt)(\n)', bygroups(Name.Class, Whitespace)),
- (r'.*\n', Other.Traceback),
- ],
- }
-
-class PythonConsoleLexer(DelegatingLexer):
- """
- For Python console output or doctests, such as:
-
- .. sourcecode:: pycon
-
- >>> a = 'foo'
- >>> print(a)
- foo
- >>> 1 / 0
- Traceback (most recent call last):
- File "<stdin>", line 1, in <module>
- ZeroDivisionError: integer division or modulo by zero
-
- Additional options:
-
- `python3`
- Use Python 3 lexer for code. Default is ``True``.
-
- .. versionadded:: 1.0
- .. versionchanged:: 2.5
- Now defaults to ``True``.
- """
-
- name = 'Python console session'
- aliases = ['pycon']
- mimetypes = ['text/x-python-doctest']
-
- def __init__(self, **options):
- python3 = get_bool_opt(options, 'python3', True)
- if python3:
- pylexer = PythonLexer
- tblexer = PythonTracebackLexer
- else:
- pylexer = Python2Lexer
- tblexer = Python2TracebackLexer
- # We have two auxiliary lexers. Use DelegatingLexer twice with
- # different tokens. TODO: DelegatingLexer should support this
- # directly, by accepting a tuplet of auxiliary lexers and a tuple of
- # distinguishing tokens. Then we wouldn't need this intermediary
- # class.
- class _ReplaceInnerCode(DelegatingLexer):
- def __init__(self, **options):
- super().__init__(pylexer, _PythonConsoleLexerBase, Other.Code, **options)
- super().__init__(tblexer, _ReplaceInnerCode, Other.Traceback, **options)
-
-class PythonTracebackLexer(RegexLexer):
- """
- For Python 3.x tracebacks, with support for chained exceptions.
-
- .. versionadded:: 1.0
-
- .. versionchanged:: 2.5
- This is now the default ``PythonTracebackLexer``. It is still available
- as the alias ``Python3TracebackLexer``.
- """
-
- name = 'Python Traceback'
- aliases = ['pytb', 'py3tb']
- filenames = ['*.pytb', '*.py3tb']
- mimetypes = ['text/x-python-traceback', 'text/x-python3-traceback']
-
- tokens = {
- 'root': [
- (r'\n', Whitespace),
- (r'^(\^C)?Traceback \(most recent call last\):\n', Generic.Traceback, 'intb'),
- (r'^During handling of the above exception, another '
- r'exception occurred:\n\n', Generic.Traceback),
- (r'^The above exception was the direct cause of the '
- r'following exception:\n\n', Generic.Traceback),
- (r'^(?= File "[^"]+", line \d+)', Generic.Traceback, 'intb'),
- (r'^.*\n', Other),
- ],
- 'intb': [
- (r'^( File )("[^"]+")(, line )(\d+)(, in )(.+)(\n)',
- bygroups(Text, Name.Builtin, Text, Number, Text, Name, Whitespace)),
- (r'^( File )("[^"]+")(, line )(\d+)(\n)',
- bygroups(Text, Name.Builtin, Text, Number, Whitespace)),
- (r'^( )(.+)(\n)',
- bygroups(Whitespace, using(PythonLexer), Whitespace), 'markers'),
- (r'^([ \t]*)(\.\.\.)(\n)',
- bygroups(Whitespace, Comment, Whitespace)), # for doctests...
- (r'^([^:]+)(: )(.+)(\n)',
- bygroups(Generic.Error, Text, Name, Whitespace), '#pop'),
- (r'^([a-zA-Z_][\w.]*)(:?\n)',
- bygroups(Generic.Error, Whitespace), '#pop'),
- default('#pop'),
- ],
- 'markers': [
- # Either `PEP 657 <https://www.python.org/dev/peps/pep-0657/>`
- # error locations in Python 3.11+, or single-caret markers
- # for syntax errors before that.
- (r'^( {4,})([~^]+)(\n)',
- bygroups(Whitespace, Punctuation.Marker, Whitespace),
- '#pop'),
- default('#pop'),
- ],
- }
-
-
-Python3TracebackLexer = PythonTracebackLexer
-
-
-class Python2TracebackLexer(RegexLexer):
- """
- For Python tracebacks.
-
- .. versionadded:: 0.7
-
- .. versionchanged:: 2.5
- This class has been renamed from ``PythonTracebackLexer``.
- ``PythonTracebackLexer`` now refers to the Python 3 variant.
- """
-
- name = 'Python 2.x Traceback'
- aliases = ['py2tb']
- filenames = ['*.py2tb']
- mimetypes = ['text/x-python2-traceback']
-
- tokens = {
- 'root': [
- # Cover both (most recent call last) and (innermost last)
- # The optional ^C allows us to catch keyboard interrupt signals.
- (r'^(\^C)?(Traceback.*\n)',
- bygroups(Text, Generic.Traceback), 'intb'),
- # SyntaxError starts with this.
- (r'^(?= File "[^"]+", line \d+)', Generic.Traceback, 'intb'),
- (r'^.*\n', Other),
- ],
- 'intb': [
- (r'^( File )("[^"]+")(, line )(\d+)(, in )(.+)(\n)',
- bygroups(Text, Name.Builtin, Text, Number, Text, Name, Whitespace)),
- (r'^( File )("[^"]+")(, line )(\d+)(\n)',
- bygroups(Text, Name.Builtin, Text, Number, Whitespace)),
- (r'^( )(.+)(\n)',
- bygroups(Text, using(Python2Lexer), Whitespace), 'marker'),
- (r'^([ \t]*)(\.\.\.)(\n)',
- bygroups(Text, Comment, Whitespace)), # for doctests...
- (r'^([^:]+)(: )(.+)(\n)',
- bygroups(Generic.Error, Text, Name, Whitespace), '#pop'),
- (r'^([a-zA-Z_]\w*)(:?\n)',
- bygroups(Generic.Error, Whitespace), '#pop')
- ],
- 'marker': [
- # For syntax errors.
- (r'( {4,})(\^)', bygroups(Text, Punctuation.Marker), '#pop'),
- default('#pop'),
- ],
- }
-
-
-class CythonLexer(RegexLexer):
- """
- For Pyrex and Cython source code.
-
- .. versionadded:: 1.1
- """
-
- name = 'Cython'
- url = 'https://cython.org'
- aliases = ['cython', 'pyx', 'pyrex']
- filenames = ['*.pyx', '*.pxd', '*.pxi']
- mimetypes = ['text/x-cython', 'application/x-cython']
-
- tokens = {
- 'root': [
- (r'\n', Whitespace),
- (r'^(\s*)("""(?:.|\n)*?""")', bygroups(Whitespace, String.Doc)),
- (r"^(\s*)('''(?:.|\n)*?''')", bygroups(Whitespace, String.Doc)),
- (r'[^\S\n]+', Text),
- (r'#.*$', Comment),
- (r'[]{}:(),;[]', Punctuation),
- (r'\\\n', Whitespace),
- (r'\\', Text),
- (r'(in|is|and|or|not)\b', Operator.Word),
- (r'(<)([a-zA-Z0-9.?]+)(>)',
- bygroups(Punctuation, Keyword.Type, Punctuation)),
- (r'!=|==|<<|>>|[-~+/*%=<>&^|.?]', Operator),
- (r'(from)(\d+)(<=)(\s+)(<)(\d+)(:)',
- bygroups(Keyword, Number.Integer, Operator, Name, Operator,
- Name, Punctuation)),
- include('keywords'),
- (r'(def|property)(\s+)', bygroups(Keyword, Text), 'funcname'),
- (r'(cp?def)(\s+)', bygroups(Keyword, Text), 'cdef'),
- # (should actually start a block with only cdefs)
- (r'(cdef)(:)', bygroups(Keyword, Punctuation)),
- (r'(class|struct)(\s+)', bygroups(Keyword, Text), 'classname'),
- (r'(from)(\s+)', bygroups(Keyword, Text), 'fromimport'),
- (r'(c?import)(\s+)', bygroups(Keyword, Text), 'import'),
- include('builtins'),
- include('backtick'),
- ('(?:[rR]|[uU][rR]|[rR][uU])"""', String, 'tdqs'),
- ("(?:[rR]|[uU][rR]|[rR][uU])'''", String, 'tsqs'),
- ('(?:[rR]|[uU][rR]|[rR][uU])"', String, 'dqs'),
- ("(?:[rR]|[uU][rR]|[rR][uU])'", String, 'sqs'),
- ('[uU]?"""', String, combined('stringescape', 'tdqs')),
- ("[uU]?'''", String, combined('stringescape', 'tsqs')),
- ('[uU]?"', String, combined('stringescape', 'dqs')),
- ("[uU]?'", String, combined('stringescape', 'sqs')),
- include('name'),
- include('numbers'),
- ],
- 'keywords': [
- (words((
- 'assert', 'async', 'await', 'break', 'by', 'continue', 'ctypedef', 'del', 'elif',
- 'else', 'except', 'except?', 'exec', 'finally', 'for', 'fused', 'gil',
- 'global', 'if', 'include', 'lambda', 'nogil', 'pass', 'print',
- 'raise', 'return', 'try', 'while', 'yield', 'as', 'with'), suffix=r'\b'),
- Keyword),
- (r'(DEF|IF|ELIF|ELSE)\b', Comment.Preproc),
- ],
- 'builtins': [
- (words((
- '__import__', 'abs', 'all', 'any', 'apply', 'basestring', 'bin', 'bint',
- 'bool', 'buffer', 'bytearray', 'bytes', 'callable', 'chr',
- 'classmethod', 'cmp', 'coerce', 'compile', 'complex', 'delattr',
- 'dict', 'dir', 'divmod', 'enumerate', 'eval', 'execfile', 'exit',
- 'file', 'filter', 'float', 'frozenset', 'getattr', 'globals',
- 'hasattr', 'hash', 'hex', 'id', 'input', 'int', 'intern', 'isinstance',
- 'issubclass', 'iter', 'len', 'list', 'locals', 'long', 'map', 'max',
- 'min', 'next', 'object', 'oct', 'open', 'ord', 'pow', 'property', 'Py_ssize_t',
- 'range', 'raw_input', 'reduce', 'reload', 'repr', 'reversed',
- 'round', 'set', 'setattr', 'slice', 'sorted', 'staticmethod',
- 'str', 'sum', 'super', 'tuple', 'type', 'unichr', 'unicode', 'unsigned',
- 'vars', 'xrange', 'zip'), prefix=r'(?<!\.)', suffix=r'\b'),
- Name.Builtin),
- (r'(?<!\.)(self|None|Ellipsis|NotImplemented|False|True|NULL'
- r')\b', Name.Builtin.Pseudo),
- (words((
- 'ArithmeticError', 'AssertionError', 'AttributeError',
- 'BaseException', 'DeprecationWarning', 'EOFError', 'EnvironmentError',
- 'Exception', 'FloatingPointError', 'FutureWarning', 'GeneratorExit',
- 'IOError', 'ImportError', 'ImportWarning', 'IndentationError',
- 'IndexError', 'KeyError', 'KeyboardInterrupt', 'LookupError',
- 'MemoryError', 'NameError', 'NotImplemented', 'NotImplementedError',
- 'OSError', 'OverflowError', 'OverflowWarning',
- 'PendingDeprecationWarning', 'ReferenceError', 'RuntimeError',
- 'RuntimeWarning', 'StandardError', 'StopIteration', 'SyntaxError',
- 'SyntaxWarning', 'SystemError', 'SystemExit', 'TabError',
- 'TypeError', 'UnboundLocalError', 'UnicodeDecodeError',
- 'UnicodeEncodeError', 'UnicodeError', 'UnicodeTranslateError',
- 'UnicodeWarning', 'UserWarning', 'ValueError', 'Warning',
- 'ZeroDivisionError'), prefix=r'(?<!\.)', suffix=r'\b'),
- Name.Exception),
- ],
- 'numbers': [
- (r'(\d+\.?\d*|\d*\.\d+)([eE][+-]?[0-9]+)?', Number.Float),
- (r'0\d+', Number.Oct),
- (r'0[xX][a-fA-F0-9]+', Number.Hex),
- (r'\d+L', Number.Integer.Long),
- (r'\d+', Number.Integer)
- ],
- 'backtick': [
- ('`.*?`', String.Backtick),
- ],
- 'name': [
- (r'@\w+', Name.Decorator),
- (r'[a-zA-Z_]\w*', Name),
- ],
- 'funcname': [
- (r'[a-zA-Z_]\w*', Name.Function, '#pop')
- ],
- 'cdef': [
- (r'(public|readonly|extern|api|inline)\b', Keyword.Reserved),
- (r'(struct|enum|union|class)\b', Keyword),
- (r'([a-zA-Z_]\w*)(\s*)(?=[(:#=]|$)',
- bygroups(Name.Function, Text), '#pop'),
- (r'([a-zA-Z_]\w*)(\s*)(,)',
- bygroups(Name.Function, Text, Punctuation)),
- (r'from\b', Keyword, '#pop'),
- (r'as\b', Keyword),
- (r':', Punctuation, '#pop'),
- (r'(?=["\'])', Text, '#pop'),
- (r'[a-zA-Z_]\w*', Keyword.Type),
- (r'.', Text),
- ],
- 'classname': [
- (r'[a-zA-Z_]\w*', Name.Class, '#pop')
- ],
- 'import': [
- (r'(\s+)(as)(\s+)', bygroups(Text, Keyword, Text)),
- (r'[a-zA-Z_][\w.]*', Name.Namespace),
- (r'(\s*)(,)(\s*)', bygroups(Text, Operator, Text)),
- default('#pop') # all else: go back
- ],
- 'fromimport': [
- (r'(\s+)(c?import)\b', bygroups(Text, Keyword), '#pop'),
- (r'[a-zA-Z_.][\w.]*', Name.Namespace),
- # ``cdef foo from "header"``, or ``for foo from 0 < i < 10``
- default('#pop'),
- ],
- 'stringescape': [
- (r'\\([\\abfnrtv"\']|\n|N\{.*?\}|u[a-fA-F0-9]{4}|'
- r'U[a-fA-F0-9]{8}|x[a-fA-F0-9]{2}|[0-7]{1,3})', String.Escape)
- ],
- 'strings': [
- (r'%(\([a-zA-Z0-9]+\))?[-#0 +]*([0-9]+|[*])?(\.([0-9]+|[*]))?'
- '[hlL]?[E-GXc-giorsux%]', String.Interpol),
- (r'[^\\\'"%\n]+', String),
- # quotes, percents and backslashes must be parsed one at a time
- (r'[\'"\\]', String),
- # unhandled string formatting sign
- (r'%', String)
- # newlines are an error (use "nl" state)
- ],
- 'nl': [
- (r'\n', String)
- ],
- 'dqs': [
- (r'"', String, '#pop'),
- (r'\\\\|\\"|\\\n', String.Escape), # included here again for raw strings
- include('strings')
- ],
- 'sqs': [
- (r"'", String, '#pop'),
- (r"\\\\|\\'|\\\n", String.Escape), # included here again for raw strings
- include('strings')
- ],
- 'tdqs': [
- (r'"""', String, '#pop'),
- include('strings'),
- include('nl')
- ],
- 'tsqs': [
- (r"'''", String, '#pop'),
- include('strings'),
- include('nl')
- ],
- }
-
-
-class DgLexer(RegexLexer):
- """
- Lexer for dg,
- a functional and object-oriented programming language
- running on the CPython 3 VM.
-
- .. versionadded:: 1.6
- """
- name = 'dg'
- aliases = ['dg']
- filenames = ['*.dg']
- mimetypes = ['text/x-dg']
-
- tokens = {
- 'root': [
- (r'\s+', Text),
- (r'#.*?$', Comment.Single),
-
- (r'(?i)0b[01]+', Number.Bin),
- (r'(?i)0o[0-7]+', Number.Oct),
- (r'(?i)0x[0-9a-f]+', Number.Hex),
- (r'(?i)[+-]?[0-9]+\.[0-9]+(e[+-]?[0-9]+)?j?', Number.Float),
- (r'(?i)[+-]?[0-9]+e[+-]?\d+j?', Number.Float),
- (r'(?i)[+-]?[0-9]+j?', Number.Integer),
-
- (r"(?i)(br|r?b?)'''", String, combined('stringescape', 'tsqs', 'string')),
- (r'(?i)(br|r?b?)"""', String, combined('stringescape', 'tdqs', 'string')),
- (r"(?i)(br|r?b?)'", String, combined('stringescape', 'sqs', 'string')),
- (r'(?i)(br|r?b?)"', String, combined('stringescape', 'dqs', 'string')),
-
- (r"`\w+'*`", Operator),
- (r'\b(and|in|is|or|where)\b', Operator.Word),
- (r'[!$%&*+\-./:<-@\\^|~;,]+', Operator),
-
- (words((
- 'bool', 'bytearray', 'bytes', 'classmethod', 'complex', 'dict', 'dict\'',
- 'float', 'frozenset', 'int', 'list', 'list\'', 'memoryview', 'object',
- 'property', 'range', 'set', 'set\'', 'slice', 'staticmethod', 'str',
- 'super', 'tuple', 'tuple\'', 'type'),
- prefix=r'(?<!\.)', suffix=r'(?![\'\w])'),
- Name.Builtin),
- (words((
- '__import__', 'abs', 'all', 'any', 'bin', 'bind', 'chr', 'cmp', 'compile',
- 'complex', 'delattr', 'dir', 'divmod', 'drop', 'dropwhile', 'enumerate',
- 'eval', 'exhaust', 'filter', 'flip', 'foldl1?', 'format', 'fst',
- 'getattr', 'globals', 'hasattr', 'hash', 'head', 'hex', 'id', 'init',
- 'input', 'isinstance', 'issubclass', 'iter', 'iterate', 'last', 'len',
- 'locals', 'map', 'max', 'min', 'next', 'oct', 'open', 'ord', 'pow',
- 'print', 'repr', 'reversed', 'round', 'setattr', 'scanl1?', 'snd',
- 'sorted', 'sum', 'tail', 'take', 'takewhile', 'vars', 'zip'),
- prefix=r'(?<!\.)', suffix=r'(?![\'\w])'),
- Name.Builtin),
- (r"(?<!\.)(self|Ellipsis|NotImplemented|None|True|False)(?!['\w])",
- Name.Builtin.Pseudo),
-
- (r"(?<!\.)[A-Z]\w*(Error|Exception|Warning)'*(?!['\w])",
- Name.Exception),
- (r"(?<!\.)(Exception|GeneratorExit|KeyboardInterrupt|StopIteration|"
- r"SystemExit)(?!['\w])", Name.Exception),
-
- (r"(?<![\w.])(except|finally|for|if|import|not|otherwise|raise|"
- r"subclass|while|with|yield)(?!['\w])", Keyword.Reserved),
-
- (r"[A-Z_]+'*(?!['\w])", Name),
- (r"[A-Z]\w+'*(?!['\w])", Keyword.Type),
- (r"\w+'*", Name),
-
- (r'[()]', Punctuation),
- (r'.', Error),
- ],
- 'stringescape': [
- (r'\\([\\abfnrtv"\']|\n|N\{.*?\}|u[a-fA-F0-9]{4}|'
- r'U[a-fA-F0-9]{8}|x[a-fA-F0-9]{2}|[0-7]{1,3})', String.Escape)
- ],
- 'string': [
- (r'%(\(\w+\))?[-#0 +]*([0-9]+|[*])?(\.([0-9]+|[*]))?'
- '[hlL]?[E-GXc-giorsux%]', String.Interpol),
- (r'[^\\\'"%\n]+', String),
- # quotes, percents and backslashes must be parsed one at a time
- (r'[\'"\\]', String),
- # unhandled string formatting sign
- (r'%', String),
- (r'\n', String)
- ],
- 'dqs': [
- (r'"', String, '#pop')
- ],
- 'sqs': [
- (r"'", String, '#pop')
- ],
- 'tdqs': [
- (r'"""', String, '#pop')
- ],
- 'tsqs': [
- (r"'''", String, '#pop')
- ],
- }
-
-
-class NumPyLexer(PythonLexer):
- """
- A Python lexer recognizing Numerical Python builtins.
-
- .. versionadded:: 0.10
- """
-
- name = 'NumPy'
- url = 'https://numpy.org/'
- aliases = ['numpy']
-
- # override the mimetypes to not inherit them from python
- mimetypes = []
- filenames = []
-
- EXTRA_KEYWORDS = {
- 'abs', 'absolute', 'accumulate', 'add', 'alen', 'all', 'allclose',
- 'alltrue', 'alterdot', 'amax', 'amin', 'angle', 'any', 'append',
- 'apply_along_axis', 'apply_over_axes', 'arange', 'arccos', 'arccosh',
- 'arcsin', 'arcsinh', 'arctan', 'arctan2', 'arctanh', 'argmax', 'argmin',
- 'argsort', 'argwhere', 'around', 'array', 'array2string', 'array_equal',
- 'array_equiv', 'array_repr', 'array_split', 'array_str', 'arrayrange',
- 'asanyarray', 'asarray', 'asarray_chkfinite', 'ascontiguousarray',
- 'asfarray', 'asfortranarray', 'asmatrix', 'asscalar', 'astype',
- 'atleast_1d', 'atleast_2d', 'atleast_3d', 'average', 'bartlett',
- 'base_repr', 'beta', 'binary_repr', 'bincount', 'binomial',
- 'bitwise_and', 'bitwise_not', 'bitwise_or', 'bitwise_xor', 'blackman',
- 'bmat', 'broadcast', 'byte_bounds', 'bytes', 'byteswap', 'c_',
- 'can_cast', 'ceil', 'choose', 'clip', 'column_stack', 'common_type',
- 'compare_chararrays', 'compress', 'concatenate', 'conj', 'conjugate',
- 'convolve', 'copy', 'corrcoef', 'correlate', 'cos', 'cosh', 'cov',
- 'cross', 'cumprod', 'cumproduct', 'cumsum', 'delete', 'deprecate',
- 'diag', 'diagflat', 'diagonal', 'diff', 'digitize', 'disp', 'divide',
- 'dot', 'dsplit', 'dstack', 'dtype', 'dump', 'dumps', 'ediff1d', 'empty',
- 'empty_like', 'equal', 'exp', 'expand_dims', 'expm1', 'extract', 'eye',
- 'fabs', 'fastCopyAndTranspose', 'fft', 'fftfreq', 'fftshift', 'fill',
- 'finfo', 'fix', 'flat', 'flatnonzero', 'flatten', 'fliplr', 'flipud',
- 'floor', 'floor_divide', 'fmod', 'frexp', 'fromarrays', 'frombuffer',
- 'fromfile', 'fromfunction', 'fromiter', 'frompyfunc', 'fromstring',
- 'generic', 'get_array_wrap', 'get_include', 'get_numarray_include',
- 'get_numpy_include', 'get_printoptions', 'getbuffer', 'getbufsize',
- 'geterr', 'geterrcall', 'geterrobj', 'getfield', 'gradient', 'greater',
- 'greater_equal', 'gumbel', 'hamming', 'hanning', 'histogram',
- 'histogram2d', 'histogramdd', 'hsplit', 'hstack', 'hypot', 'i0',
- 'identity', 'ifft', 'imag', 'index_exp', 'indices', 'inf', 'info',
- 'inner', 'insert', 'int_asbuffer', 'interp', 'intersect1d',
- 'intersect1d_nu', 'inv', 'invert', 'iscomplex', 'iscomplexobj',
- 'isfinite', 'isfortran', 'isinf', 'isnan', 'isneginf', 'isposinf',
- 'isreal', 'isrealobj', 'isscalar', 'issctype', 'issubclass_',
- 'issubdtype', 'issubsctype', 'item', 'itemset', 'iterable', 'ix_',
- 'kaiser', 'kron', 'ldexp', 'left_shift', 'less', 'less_equal', 'lexsort',
- 'linspace', 'load', 'loads', 'loadtxt', 'log', 'log10', 'log1p', 'log2',
- 'logical_and', 'logical_not', 'logical_or', 'logical_xor', 'logspace',
- 'lstsq', 'mat', 'matrix', 'max', 'maximum', 'maximum_sctype',
- 'may_share_memory', 'mean', 'median', 'meshgrid', 'mgrid', 'min',
- 'minimum', 'mintypecode', 'mod', 'modf', 'msort', 'multiply', 'nan',
- 'nan_to_num', 'nanargmax', 'nanargmin', 'nanmax', 'nanmin', 'nansum',
- 'ndenumerate', 'ndim', 'ndindex', 'negative', 'newaxis', 'newbuffer',
- 'newbyteorder', 'nonzero', 'not_equal', 'obj2sctype', 'ogrid', 'ones',
- 'ones_like', 'outer', 'permutation', 'piecewise', 'pinv', 'pkgload',
- 'place', 'poisson', 'poly', 'poly1d', 'polyadd', 'polyder', 'polydiv',
- 'polyfit', 'polyint', 'polymul', 'polysub', 'polyval', 'power', 'prod',
- 'product', 'ptp', 'put', 'putmask', 'r_', 'randint', 'random_integers',
- 'random_sample', 'ranf', 'rank', 'ravel', 'real', 'real_if_close',
- 'recarray', 'reciprocal', 'reduce', 'remainder', 'repeat', 'require',
- 'reshape', 'resize', 'restoredot', 'right_shift', 'rint', 'roll',
- 'rollaxis', 'roots', 'rot90', 'round', 'round_', 'row_stack', 's_',
- 'sample', 'savetxt', 'sctype2char', 'searchsorted', 'seed', 'select',
- 'set_numeric_ops', 'set_printoptions', 'set_string_function',
- 'setbufsize', 'setdiff1d', 'seterr', 'seterrcall', 'seterrobj',
- 'setfield', 'setflags', 'setmember1d', 'setxor1d', 'shape',
- 'show_config', 'shuffle', 'sign', 'signbit', 'sin', 'sinc', 'sinh',
- 'size', 'slice', 'solve', 'sometrue', 'sort', 'sort_complex', 'source',
- 'split', 'sqrt', 'square', 'squeeze', 'standard_normal', 'std',
- 'subtract', 'sum', 'svd', 'swapaxes', 'take', 'tan', 'tanh', 'tensordot',
- 'test', 'tile', 'tofile', 'tolist', 'tostring', 'trace', 'transpose',
- 'trapz', 'tri', 'tril', 'trim_zeros', 'triu', 'true_divide', 'typeDict',
- 'typename', 'uniform', 'union1d', 'unique', 'unique1d', 'unravel_index',
- 'unwrap', 'vander', 'var', 'vdot', 'vectorize', 'view', 'vonmises',
- 'vsplit', 'vstack', 'weibull', 'where', 'who', 'zeros', 'zeros_like'
- }
-
- def get_tokens_unprocessed(self, text):
- for index, token, value in \
- PythonLexer.get_tokens_unprocessed(self, text):
- if token is Name and value in self.EXTRA_KEYWORDS:
- yield index, Keyword.Pseudo, value
- else:
- yield index, token, value
-
- def analyse_text(text):
- ltext = text[:1000]
- return (shebang_matches(text, r'pythonw?(3(\.\d)?)?') or
- 'import ' in ltext) \
- and ('import numpy' in ltext or 'from numpy import' in ltext)
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/q.py b/venv/lib/python3.11/site-packages/pygments/lexers/q.py
deleted file mode 100644
index e5c1591..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/q.py
+++ /dev/null
@@ -1,188 +0,0 @@
-"""
- pygments.lexers.q
- ~~~~~~~~~~~~~~~~~
-
- Lexer for the Q programming language.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, words, include, bygroups, inherit
-from pygments.token import Comment, Name, Number, Operator, Punctuation, \
- String, Whitespace, Literal, Generic
-
-__all__ = ["KLexer", "QLexer"]
-
-
-class KLexer(RegexLexer):
- """
- For `K <https://code.kx.com/>`_ source code.
-
- .. versionadded:: 2.12
- """
-
- name = "K"
- aliases = ["k"]
- filenames = ["*.k"]
-
- tokens = {
- "whitespace": [
- # hashbang script
- (r"^#!.*", Comment.Hashbang),
- # Comments
- (r"^/\s*\n", Comment.Multiline, "comments"),
- (r"(?<!\S)/.*", Comment.Single),
- # Whitespace
- (r"\s+", Whitespace),
- # Strings
- (r"\"", String.Double, "strings"),
- ],
- "root": [
- include("whitespace"),
- include("keywords"),
- include("declarations"),
- ],
- "keywords": [
- (words(("abs", "acos", "asin", "atan", "avg", "bin",
- "binr", "by", "cor", "cos", "cov", "dev",
- "delete", "div", "do", "enlist", "exec", "exit",
- "exp", "from", "getenv", "hopen", "if", "in",
- "insert", "last", "like", "log", "max", "min",
- "prd", "select", "setenv", "sin", "sqrt", "ss",
- "sum", "tan", "update", "var", "wavg", "while",
- "within", "wsum", "xexp"),
- suffix=r"\b"), Operator.Word),
- ],
- "declarations": [
- # Timing
- (r"^\\ts?", Comment.Preproc),
- (r"^(\\\w\s+[^/\n]*?)(/.*)",
- bygroups(Comment.Preproc, Comment.Single)),
- # Generic System Commands
- (r"^\\\w.*", Comment.Preproc),
- # Prompt
- (r"^[a-zA-Z]\)", Generic.Prompt),
- # Function Names
- (r"([.]?[a-zA-Z][\w.]*)(\s*)([-.~=!@#$%^&*_+|,<>?/\\:']?:)(\s*)(\{)",
- bygroups(Name.Function, Whitespace, Operator, Whitespace, Punctuation),
- "functions"),
- # Variable Names
- (r"([.]?[a-zA-Z][\w.]*)(\s*)([-.~=!@#$%^&*_+|,<>?/\\:']?:)",
- bygroups(Name.Variable, Whitespace, Operator)),
- # Functions
- (r"\{", Punctuation, "functions"),
- # Parentheses
- (r"\(", Punctuation, "parentheses"),
- # Brackets
- (r"\[", Punctuation, "brackets"),
- # Errors
- (r"'`([a-zA-Z][\w.]*)?", Name.Exception),
- # File Symbols
- (r"`:([a-zA-Z/][\w./]*)?", String.Symbol),
- # Symbols
- (r"`([a-zA-Z][\w.]*)?", String.Symbol),
- # Numbers
- include("numbers"),
- # Variable Names
- (r"[a-zA-Z][\w.]*", Name),
- # Operators
- (r"[-=+*#$%@!~^&:.,<>'\\|/?_]", Operator),
- # Punctuation
- (r";", Punctuation),
- ],
- "functions": [
- include("root"),
- (r"\}", Punctuation, "#pop"),
- ],
- "parentheses": [
- include("root"),
- (r"\)", Punctuation, "#pop"),
- ],
- "brackets": [
- include("root"),
- (r"\]", Punctuation, "#pop"),
- ],
- "numbers": [
- # Binary Values
- (r"[01]+b", Number.Bin),
- # Nulls/Infinities
- (r"0[nNwW][cefghijmndzuvtp]?", Number),
- # Timestamps
- ((r"(?:[0-9]{4}[.][0-9]{2}[.][0-9]{2}|[0-9]+)"
- "D(?:[0-9](?:[0-9](?::[0-9]{2}"
- "(?::[0-9]{2}(?:[.][0-9]*)?)?)?)?)?"), Literal.Date),
- # Datetimes
- ((r"[0-9]{4}[.][0-9]{2}"
- "(?:m|[.][0-9]{2}(?:T(?:[0-9]{2}:[0-9]{2}"
- "(?::[0-9]{2}(?:[.][0-9]*)?)?)?)?)"), Literal.Date),
- # Times
- (r"[0-9]{2}:[0-9]{2}(?::[0-9]{2}(?:[.][0-9]{1,3})?)?",
- Literal.Date),
- # GUIDs
- (r"[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}",
- Number.Hex),
- # Byte Vectors
- (r"0x[0-9a-fA-F]+", Number.Hex),
- # Floats
- (r"([0-9]*[.]?[0-9]+|[0-9]+[.]?[0-9]*)[eE][+-]?[0-9]+[ef]?",
- Number.Float),
- (r"([0-9]*[.][0-9]+|[0-9]+[.][0-9]*)[ef]?", Number.Float),
- (r"[0-9]+[ef]", Number.Float),
- # Characters
- (r"[0-9]+c", Number),
- # Integers
- (r"[0-9]+[ihtuv]", Number.Integer),
- # Long Integers
- (r"[0-9]+[jnp]?", Number.Integer.Long),
- ],
- "comments": [
- (r"[^\\]+", Comment.Multiline),
- (r"^\\", Comment.Multiline, "#pop"),
- (r"\\", Comment.Multiline),
- ],
- "strings": [
- (r'[^"\\]+', String.Double),
- (r"\\.", String.Escape),
- (r'"', String.Double, "#pop"),
- ],
- }
-
-
-class QLexer(KLexer):
- """
- For `Q <https://code.kx.com/>`_ source code.
-
- .. versionadded:: 2.12
- """
-
- name = "Q"
- aliases = ["q"]
- filenames = ["*.q"]
-
- tokens = {
- "root": [
- (words(("aj", "aj0", "ajf", "ajf0", "all", "and", "any", "asc",
- "asof", "attr", "avgs", "ceiling", "cols", "count", "cross",
- "csv", "cut", "deltas", "desc", "differ", "distinct", "dsave",
- "each", "ej", "ema", "eval", "except", "fby", "fills", "first",
- "fkeys", "flip", "floor", "get", "group", "gtime", "hclose",
- "hcount", "hdel", "hsym", "iasc", "idesc", "ij", "ijf",
- "inter", "inv", "key", "keys", "lj", "ljf", "load", "lower",
- "lsq", "ltime", "ltrim", "mavg", "maxs", "mcount", "md5",
- "mdev", "med", "meta", "mins", "mmax", "mmin", "mmu", "mod",
- "msum", "neg", "next", "not", "null", "or", "over", "parse",
- "peach", "pj", "prds", "prior", "prev", "rand", "rank", "ratios",
- "raze", "read0", "read1", "reciprocal", "reval", "reverse",
- "rload", "rotate", "rsave", "rtrim", "save", "scan", "scov",
- "sdev", "set", "show", "signum", "ssr", "string", "sublist",
- "sums", "sv", "svar", "system", "tables", "til", "trim", "txf",
- "type", "uj", "ujf", "ungroup", "union", "upper", "upsert",
- "value", "view", "views", "vs", "where", "wj", "wj1", "ww",
- "xasc", "xbar", "xcol", "xcols", "xdesc", "xgroup", "xkey",
- "xlog", "xprev", "xrank"),
- suffix=r"\b"), Name.Builtin,
- ),
- inherit,
- ],
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/qlik.py b/venv/lib/python3.11/site-packages/pygments/lexers/qlik.py
deleted file mode 100644
index b265b60..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/qlik.py
+++ /dev/null
@@ -1,117 +0,0 @@
-"""
- pygments.lexers.qlik
- ~~~~~~~~~~~~~~~~~~~~
-
- Lexer for the qlik scripting language
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, include, bygroups, words
-from pygments.token import Comment, Keyword, Name, Number, Operator, \
- Punctuation, String, Text
-from pygments.lexers._qlik_builtins import OPERATORS_LIST, STATEMENT_LIST, \
- SCRIPT_FUNCTIONS, CONSTANT_LIST
-
-__all__ = ["QlikLexer"]
-
-
-class QlikLexer(RegexLexer):
- """
- Lexer for qlik code, including .qvs files
-
- .. versionadded:: 2.12
- """
-
- name = "Qlik"
- aliases = ["qlik", "qlikview", "qliksense", "qlikscript"]
- filenames = ["*.qvs", "*.qvw"]
-
- flags = re.IGNORECASE
-
- tokens = {
- # Handle multi-line comments
- "comment": [
- (r"\*/", Comment.Multiline, "#pop"),
- (r"[^*]+", Comment.Multiline),
- ],
- # Handle numbers
- "numerics": [
- (r"\b\d+\.\d+(e\d+)?[fd]?\b", Number.Float),
- (r"\b\d+\b", Number.Integer),
- ],
- # Handle variable names in things
- "interp": [
- (
- r"(\$\()(\w+)(\))",
- bygroups(String.Interpol, Name.Variable, String.Interpol),
- ),
- ],
- # Handle strings
- "string": [
- (r"'", String, "#pop"),
- include("interp"),
- (r"[^'$]+", String),
- (r"\$", String),
- ],
- #
- "assignment": [
- (r";", Punctuation, "#pop"),
- include("root"),
- ],
- "field_name_quote": [
- (r'"', String.Symbol, "#pop"),
- include("interp"),
- (r"[^\"$]+", String.Symbol),
- (r"\$", String.Symbol),
- ],
- "field_name_bracket": [
- (r"\]", String.Symbol, "#pop"),
- include("interp"),
- (r"[^\]$]+", String.Symbol),
- (r"\$", String.Symbol),
- ],
- "function": [(r"\)", Punctuation, "#pop"), include("root")],
- "root": [
- # Whitespace and comments
- (r"\s+", Text.Whitespace),
- (r"/\*", Comment.Multiline, "comment"),
- (r"//.*\n", Comment.Single),
- # variable assignment
- (r"(let|set)(\s+)", bygroups(Keyword.Declaration, Text.Whitespace),
- "assignment"),
- # Word operators
- (words(OPERATORS_LIST["words"], prefix=r"\b", suffix=r"\b"),
- Operator.Word),
- # Statements
- (words(STATEMENT_LIST, suffix=r"\b"), Keyword),
- # Table names
- (r"[a-z]\w*:", Keyword.Declaration),
- # Constants
- (words(CONSTANT_LIST, suffix=r"\b"), Keyword.Constant),
- # Functions
- (words(SCRIPT_FUNCTIONS, suffix=r"(?=\s*\()"), Name.Builtin,
- "function"),
- # interpolation - e.g. $(variableName)
- include("interp"),
- # Quotes denote a field/file name
- (r'"', String.Symbol, "field_name_quote"),
- # Square brackets denote a field/file name
- (r"\[", String.Symbol, "field_name_bracket"),
- # Strings
- (r"'", String, "string"),
- # Numbers
- include("numerics"),
- # Operator symbols
- (words(OPERATORS_LIST["symbols"]), Operator),
- # Strings denoted by single quotes
- (r"'.+?'", String),
- # Words as text
- (r"\b\w+\b", Text),
- # Basic punctuation
- (r"[,;.()\\/]", Punctuation),
- ],
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/qvt.py b/venv/lib/python3.11/site-packages/pygments/lexers/qvt.py
deleted file mode 100644
index dc329f7..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/qvt.py
+++ /dev/null
@@ -1,151 +0,0 @@
-"""
- pygments.lexers.qvt
- ~~~~~~~~~~~~~~~~~~~
-
- Lexer for QVT Operational language.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, bygroups, include, combined, default, \
- words
-from pygments.token import Text, Comment, Operator, Keyword, Punctuation, \
- Name, String, Number
-
-__all__ = ['QVToLexer']
-
-
-class QVToLexer(RegexLexer):
- """
- For the `QVT Operational Mapping language <http://www.omg.org/spec/QVT/1.1/>`_.
-
- Reference for implementing this: «Meta Object Facility (MOF) 2.0
- Query/View/Transformation Specification», Version 1.1 - January 2011
- (http://www.omg.org/spec/QVT/1.1/), see §8.4, «Concrete Syntax» in
- particular.
-
- Notable tokens assignments:
-
- - Name.Class is assigned to the identifier following any of the following
- keywords: metamodel, class, exception, primitive, enum, transformation
- or library
-
- - Name.Function is assigned to the names of mappings and queries
-
- - Name.Builtin.Pseudo is assigned to the pre-defined variables 'this',
- 'self' and 'result'.
- """
- # With obvious borrowings & inspiration from the Java, Python and C lexers
-
- name = 'QVTO'
- aliases = ['qvto', 'qvt']
- filenames = ['*.qvto']
-
- tokens = {
- 'root': [
- (r'\n', Text),
- (r'[^\S\n]+', Text),
- (r'(--|//)(\s*)(directive:)?(.*)$',
- bygroups(Comment, Comment, Comment.Preproc, Comment)),
- # Uncomment the following if you want to distinguish between
- # '/*' and '/**', à la javadoc
- # (r'/[*]{2}(.|\n)*?[*]/', Comment.Multiline),
- (r'/[*](.|\n)*?[*]/', Comment.Multiline),
- (r'\\\n', Text),
- (r'(and|not|or|xor|##?)\b', Operator.Word),
- (r'(:{1,2}=|[-+]=)\b', Operator.Word),
- (r'(@|<<|>>)\b', Keyword), # stereotypes
- (r'!=|<>|==|=|!->|->|>=|<=|[.]{3}|[+/*%=<>&|.~]', Operator),
- (r'[]{}:(),;[]', Punctuation),
- (r'(true|false|unlimited|null)\b', Keyword.Constant),
- (r'(this|self|result)\b', Name.Builtin.Pseudo),
- (r'(var)\b', Keyword.Declaration),
- (r'(from|import)\b', Keyword.Namespace, 'fromimport'),
- (r'(metamodel|class|exception|primitive|enum|transformation|'
- r'library)(\s+)(\w+)',
- bygroups(Keyword.Word, Text, Name.Class)),
- (r'(exception)(\s+)(\w+)',
- bygroups(Keyword.Word, Text, Name.Exception)),
- (r'(main)\b', Name.Function),
- (r'(mapping|helper|query)(\s+)',
- bygroups(Keyword.Declaration, Text), 'operation'),
- (r'(assert)(\s+)\b', bygroups(Keyword, Text), 'assert'),
- (r'(Bag|Collection|Dict|OrderedSet|Sequence|Set|Tuple|List)\b',
- Keyword.Type),
- include('keywords'),
- ('"', String, combined('stringescape', 'dqs')),
- ("'", String, combined('stringescape', 'sqs')),
- include('name'),
- include('numbers'),
- # (r'([a-zA-Z_]\w*)(::)([a-zA-Z_]\w*)',
- # bygroups(Text, Text, Text)),
- ],
-
- 'fromimport': [
- (r'(?:[ \t]|\\\n)+', Text),
- (r'[a-zA-Z_][\w.]*', Name.Namespace),
- default('#pop'),
- ],
-
- 'operation': [
- (r'::', Text),
- (r'(.*::)([a-zA-Z_]\w*)([ \t]*)(\()',
- bygroups(Text, Name.Function, Text, Punctuation), '#pop')
- ],
-
- 'assert': [
- (r'(warning|error|fatal)\b', Keyword, '#pop'),
- default('#pop'), # all else: go back
- ],
-
- 'keywords': [
- (words((
- 'abstract', 'access', 'any', 'assert', 'blackbox', 'break',
- 'case', 'collect', 'collectNested', 'collectOne', 'collectselect',
- 'collectselectOne', 'composes', 'compute', 'configuration',
- 'constructor', 'continue', 'datatype', 'default', 'derived',
- 'disjuncts', 'do', 'elif', 'else', 'end', 'endif', 'except',
- 'exists', 'extends', 'forAll', 'forEach', 'forOne', 'from', 'if',
- 'implies', 'in', 'inherits', 'init', 'inout', 'intermediate',
- 'invresolve', 'invresolveIn', 'invresolveone', 'invresolveoneIn',
- 'isUnique', 'iterate', 'late', 'let', 'literal', 'log', 'map',
- 'merges', 'modeltype', 'new', 'object', 'one', 'ordered', 'out',
- 'package', 'population', 'property', 'raise', 'readonly',
- 'references', 'refines', 'reject', 'resolve', 'resolveIn',
- 'resolveone', 'resolveoneIn', 'return', 'select', 'selectOne',
- 'sortedBy', 'static', 'switch', 'tag', 'then', 'try', 'typedef',
- 'unlimited', 'uses', 'when', 'where', 'while', 'with', 'xcollect',
- 'xmap', 'xselect'), suffix=r'\b'), Keyword),
- ],
-
- # There is no need to distinguish between String.Single and
- # String.Double: 'strings' is factorised for 'dqs' and 'sqs'
- 'strings': [
- (r'[^\\\'"\n]+', String),
- # quotes, percents and backslashes must be parsed one at a time
- (r'[\'"\\]', String),
- ],
- 'stringescape': [
- (r'\\([\\btnfr"\']|u[0-3][0-7]{2}|u[0-7]{1,2})', String.Escape)
- ],
- 'dqs': [ # double-quoted string
- (r'"', String, '#pop'),
- (r'\\\\|\\"', String.Escape),
- include('strings')
- ],
- 'sqs': [ # single-quoted string
- (r"'", String, '#pop'),
- (r"\\\\|\\'", String.Escape),
- include('strings')
- ],
- 'name': [
- (r'[a-zA-Z_]\w*', Name),
- ],
- # numbers: excerpt taken from the python lexer
- 'numbers': [
- (r'(\d+\.\d*|\d*\.\d+)([eE][+-]?[0-9]+)?', Number.Float),
- (r'\d+[eE][+-]?[0-9]+', Number.Float),
- (r'\d+', Number.Integer)
- ],
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/r.py b/venv/lib/python3.11/site-packages/pygments/lexers/r.py
deleted file mode 100644
index ed62fa2..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/r.py
+++ /dev/null
@@ -1,190 +0,0 @@
-"""
- pygments.lexers.r
- ~~~~~~~~~~~~~~~~~
-
- Lexers for the R/S languages.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import Lexer, RegexLexer, include, do_insertions
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation, Generic, Whitespace
-
-__all__ = ['RConsoleLexer', 'SLexer', 'RdLexer']
-
-
-line_re = re.compile('.*?\n')
-
-
-class RConsoleLexer(Lexer):
- """
- For R console transcripts or R CMD BATCH output files.
- """
-
- name = 'RConsole'
- aliases = ['rconsole', 'rout']
- filenames = ['*.Rout']
-
- def get_tokens_unprocessed(self, text):
- slexer = SLexer(**self.options)
-
- current_code_block = ''
- insertions = []
-
- for match in line_re.finditer(text):
- line = match.group()
- if line.startswith('>') or line.startswith('+'):
- # Colorize the prompt as such,
- # then put rest of line into current_code_block
- insertions.append((len(current_code_block),
- [(0, Generic.Prompt, line[:2])]))
- current_code_block += line[2:]
- else:
- # We have reached a non-prompt line!
- # If we have stored prompt lines, need to process them first.
- if current_code_block:
- # Weave together the prompts and highlight code.
- yield from do_insertions(
- insertions, slexer.get_tokens_unprocessed(current_code_block))
- # Reset vars for next code block.
- current_code_block = ''
- insertions = []
- # Now process the actual line itself, this is output from R.
- yield match.start(), Generic.Output, line
-
- # If we happen to end on a code block with nothing after it, need to
- # process the last code block. This is neither elegant nor DRY so
- # should be changed.
- if current_code_block:
- yield from do_insertions(
- insertions, slexer.get_tokens_unprocessed(current_code_block))
-
-
-class SLexer(RegexLexer):
- """
- For S, S-plus, and R source code.
-
- .. versionadded:: 0.10
- """
-
- name = 'S'
- aliases = ['splus', 's', 'r']
- filenames = ['*.S', '*.R', '.Rhistory', '.Rprofile', '.Renviron']
- mimetypes = ['text/S-plus', 'text/S', 'text/x-r-source', 'text/x-r',
- 'text/x-R', 'text/x-r-history', 'text/x-r-profile']
-
- valid_name = r'`[^`\\]*(?:\\.[^`\\]*)*`|(?:[a-zA-Z]|\.[A-Za-z_.])[\w.]*|\.'
- tokens = {
- 'comments': [
- (r'#.*$', Comment.Single),
- ],
- 'valid_name': [
- (valid_name, Name),
- ],
- 'punctuation': [
- (r'\[{1,2}|\]{1,2}|\(|\)|;|,', Punctuation),
- ],
- 'keywords': [
- (r'(if|else|for|while|repeat|in|next|break|return|switch|function)'
- r'(?![\w.])',
- Keyword.Reserved),
- ],
- 'operators': [
- (r'<<?-|->>?|-|==|<=|>=|<|>|&&?|!=|\|\|?|\?', Operator),
- (r'\*|\+|\^|/|!|%[^%]*%|=|~|\$|@|:{1,3}', Operator),
- ],
- 'builtin_symbols': [
- (r'(NULL|NA(_(integer|real|complex|character)_)?|'
- r'letters|LETTERS|Inf|TRUE|FALSE|NaN|pi|\.\.(\.|[0-9]+))'
- r'(?![\w.])',
- Keyword.Constant),
- (r'(T|F)\b', Name.Builtin.Pseudo),
- ],
- 'numbers': [
- # hex number
- (r'0[xX][a-fA-F0-9]+([pP][0-9]+)?[Li]?', Number.Hex),
- # decimal number
- (r'[+-]?([0-9]+(\.[0-9]+)?|\.[0-9]+|\.)([eE][+-]?[0-9]+)?[Li]?',
- Number),
- ],
- 'statements': [
- include('comments'),
- # whitespaces
- (r'\s+', Whitespace),
- (r'\'', String, 'string_squote'),
- (r'\"', String, 'string_dquote'),
- include('builtin_symbols'),
- include('valid_name'),
- include('numbers'),
- include('keywords'),
- include('punctuation'),
- include('operators'),
- ],
- 'root': [
- # calls:
- (r'(%s)\s*(?=\()' % valid_name, Name.Function),
- include('statements'),
- # blocks:
- (r'\{|\}', Punctuation),
- # (r'\{', Punctuation, 'block'),
- (r'.', Text),
- ],
- # 'block': [
- # include('statements'),
- # ('\{', Punctuation, '#push'),
- # ('\}', Punctuation, '#pop')
- # ],
- 'string_squote': [
- (r'([^\'\\]|\\.)*\'', String, '#pop'),
- ],
- 'string_dquote': [
- (r'([^"\\]|\\.)*"', String, '#pop'),
- ],
- }
-
- def analyse_text(text):
- if re.search(r'[a-z0-9_\])\s]<-(?!-)', text):
- return 0.11
-
-
-class RdLexer(RegexLexer):
- """
- Pygments Lexer for R documentation (Rd) files
-
- This is a very minimal implementation, highlighting little more
- than the macros. A description of Rd syntax is found in `Writing R
- Extensions <http://cran.r-project.org/doc/manuals/R-exts.html>`_
- and `Parsing Rd files <http://developer.r-project.org/parseRd.pdf>`_.
-
- .. versionadded:: 1.6
- """
- name = 'Rd'
- aliases = ['rd']
- filenames = ['*.Rd']
- mimetypes = ['text/x-r-doc']
-
- # To account for verbatim / LaTeX-like / and R-like areas
- # would require parsing.
- tokens = {
- 'root': [
- # catch escaped brackets and percent sign
- (r'\\[\\{}%]', String.Escape),
- # comments
- (r'%.*$', Comment),
- # special macros with no arguments
- (r'\\(?:cr|l?dots|R|tab)\b', Keyword.Constant),
- # macros
- (r'\\[a-zA-Z]+\b', Keyword),
- # special preprocessor macros
- (r'^\s*#(?:ifn?def|endif).*\b', Comment.Preproc),
- # non-escaped brackets
- (r'[{}]', Name.Builtin),
- # everything else
- (r'[^\\%\n{}]+', Text),
- (r'.', Text),
- ]
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/rdf.py b/venv/lib/python3.11/site-packages/pygments/lexers/rdf.py
deleted file mode 100644
index c4fb998..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/rdf.py
+++ /dev/null
@@ -1,466 +0,0 @@
-"""
- pygments.lexers.rdf
- ~~~~~~~~~~~~~~~~~~~
-
- Lexers for semantic web and RDF query languages and markup.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, bygroups, default
-from pygments.token import Keyword, Punctuation, String, Number, Operator, \
- Generic, Whitespace, Name, Literal, Comment, Text
-
-__all__ = ['SparqlLexer', 'TurtleLexer', 'ShExCLexer']
-
-
-class SparqlLexer(RegexLexer):
- """
- Lexer for `SPARQL <https://www.w3.org/TR/sparql11-query/>`_ query language.
-
- .. versionadded:: 2.0
- """
- name = 'SPARQL'
- aliases = ['sparql']
- filenames = ['*.rq', '*.sparql']
- mimetypes = ['application/sparql-query']
-
- # character group definitions ::
-
- PN_CHARS_BASE_GRP = ('a-zA-Z'
- '\u00c0-\u00d6'
- '\u00d8-\u00f6'
- '\u00f8-\u02ff'
- '\u0370-\u037d'
- '\u037f-\u1fff'
- '\u200c-\u200d'
- '\u2070-\u218f'
- '\u2c00-\u2fef'
- '\u3001-\ud7ff'
- '\uf900-\ufdcf'
- '\ufdf0-\ufffd')
-
- PN_CHARS_U_GRP = (PN_CHARS_BASE_GRP + '_')
-
- PN_CHARS_GRP = (PN_CHARS_U_GRP +
- r'\-' +
- r'0-9' +
- '\u00b7' +
- '\u0300-\u036f' +
- '\u203f-\u2040')
-
- HEX_GRP = '0-9A-Fa-f'
-
- PN_LOCAL_ESC_CHARS_GRP = r' _~.\-!$&"()*+,;=/?#@%'
-
- # terminal productions ::
-
- PN_CHARS_BASE = '[' + PN_CHARS_BASE_GRP + ']'
-
- PN_CHARS_U = '[' + PN_CHARS_U_GRP + ']'
-
- PN_CHARS = '[' + PN_CHARS_GRP + ']'
-
- HEX = '[' + HEX_GRP + ']'
-
- PN_LOCAL_ESC_CHARS = '[' + PN_LOCAL_ESC_CHARS_GRP + ']'
-
- IRIREF = r'<(?:[^<>"{}|^`\\\x00-\x20])*>'
-
- BLANK_NODE_LABEL = '_:[0-9' + PN_CHARS_U_GRP + '](?:[' + PN_CHARS_GRP + \
- '.]*' + PN_CHARS + ')?'
-
- PN_PREFIX = PN_CHARS_BASE + '(?:[' + PN_CHARS_GRP + '.]*' + PN_CHARS + ')?'
-
- VARNAME = '[0-9' + PN_CHARS_U_GRP + '][' + PN_CHARS_U_GRP + \
- '0-9\u00b7\u0300-\u036f\u203f-\u2040]*'
-
- PERCENT = '%' + HEX + HEX
-
- PN_LOCAL_ESC = r'\\' + PN_LOCAL_ESC_CHARS
-
- PLX = '(?:' + PERCENT + ')|(?:' + PN_LOCAL_ESC + ')'
-
- PN_LOCAL = ('(?:[' + PN_CHARS_U_GRP + ':0-9' + ']|' + PLX + ')' +
- '(?:(?:[' + PN_CHARS_GRP + '.:]|' + PLX + ')*(?:[' +
- PN_CHARS_GRP + ':]|' + PLX + '))?')
-
- EXPONENT = r'[eE][+-]?\d+'
-
- # Lexer token definitions ::
-
- tokens = {
- 'root': [
- (r'\s+', Text),
- # keywords ::
- (r'(?i)(select|construct|describe|ask|where|filter|group\s+by|minus|'
- r'distinct|reduced|from\s+named|from|order\s+by|desc|asc|limit|'
- r'offset|values|bindings|load|into|clear|drop|create|add|move|copy|'
- r'insert\s+data|delete\s+data|delete\s+where|with|delete|insert|'
- r'using\s+named|using|graph|default|named|all|optional|service|'
- r'silent|bind|undef|union|not\s+in|in|as|having|to|prefix|base)\b', Keyword),
- (r'(a)\b', Keyword),
- # IRIs ::
- ('(' + IRIREF + ')', Name.Label),
- # blank nodes ::
- ('(' + BLANK_NODE_LABEL + ')', Name.Label),
- # # variables ::
- ('[?$]' + VARNAME, Name.Variable),
- # prefixed names ::
- (r'(' + PN_PREFIX + r')?(\:)(' + PN_LOCAL + r')?',
- bygroups(Name.Namespace, Punctuation, Name.Tag)),
- # function names ::
- (r'(?i)(str|lang|langmatches|datatype|bound|iri|uri|bnode|rand|abs|'
- r'ceil|floor|round|concat|strlen|ucase|lcase|encode_for_uri|'
- r'contains|strstarts|strends|strbefore|strafter|year|month|day|'
- r'hours|minutes|seconds|timezone|tz|now|uuid|struuid|md5|sha1|sha256|sha384|'
- r'sha512|coalesce|if|strlang|strdt|sameterm|isiri|isuri|isblank|'
- r'isliteral|isnumeric|regex|substr|replace|exists|not\s+exists|'
- r'count|sum|min|max|avg|sample|group_concat|separator)\b',
- Name.Function),
- # boolean literals ::
- (r'(true|false)', Keyword.Constant),
- # double literals ::
- (r'[+\-]?(\d+\.\d*' + EXPONENT + r'|\.?\d+' + EXPONENT + ')', Number.Float),
- # decimal literals ::
- (r'[+\-]?(\d+\.\d*|\.\d+)', Number.Float),
- # integer literals ::
- (r'[+\-]?\d+', Number.Integer),
- # operators ::
- (r'(\|\||&&|=|\*|\-|\+|/|!=|<=|>=|!|<|>)', Operator),
- # punctuation characters ::
- (r'[(){}.;,:^\[\]]', Punctuation),
- # line comments ::
- (r'#[^\n]*', Comment),
- # strings ::
- (r'"""', String, 'triple-double-quoted-string'),
- (r'"', String, 'single-double-quoted-string'),
- (r"'''", String, 'triple-single-quoted-string'),
- (r"'", String, 'single-single-quoted-string'),
- ],
- 'triple-double-quoted-string': [
- (r'"""', String, 'end-of-string'),
- (r'[^\\]+', String),
- (r'\\', String, 'string-escape'),
- ],
- 'single-double-quoted-string': [
- (r'"', String, 'end-of-string'),
- (r'[^"\\\n]+', String),
- (r'\\', String, 'string-escape'),
- ],
- 'triple-single-quoted-string': [
- (r"'''", String, 'end-of-string'),
- (r'[^\\]+', String),
- (r'\\', String.Escape, 'string-escape'),
- ],
- 'single-single-quoted-string': [
- (r"'", String, 'end-of-string'),
- (r"[^'\\\n]+", String),
- (r'\\', String, 'string-escape'),
- ],
- 'string-escape': [
- (r'u' + HEX + '{4}', String.Escape, '#pop'),
- (r'U' + HEX + '{8}', String.Escape, '#pop'),
- (r'.', String.Escape, '#pop'),
- ],
- 'end-of-string': [
- (r'(@)([a-zA-Z]+(?:-[a-zA-Z0-9]+)*)',
- bygroups(Operator, Name.Function), '#pop:2'),
- (r'\^\^', Operator, '#pop:2'),
- default('#pop:2'),
- ],
- }
-
-
-class TurtleLexer(RegexLexer):
- """
- Lexer for `Turtle <http://www.w3.org/TR/turtle/>`_ data language.
-
- .. versionadded:: 2.1
- """
- name = 'Turtle'
- aliases = ['turtle']
- filenames = ['*.ttl']
- mimetypes = ['text/turtle', 'application/x-turtle']
-
- # character group definitions ::
- PN_CHARS_BASE_GRP = ('a-zA-Z'
- '\u00c0-\u00d6'
- '\u00d8-\u00f6'
- '\u00f8-\u02ff'
- '\u0370-\u037d'
- '\u037f-\u1fff'
- '\u200c-\u200d'
- '\u2070-\u218f'
- '\u2c00-\u2fef'
- '\u3001-\ud7ff'
- '\uf900-\ufdcf'
- '\ufdf0-\ufffd')
-
- PN_CHARS_U_GRP = (PN_CHARS_BASE_GRP + '_')
-
- PN_CHARS_GRP = (PN_CHARS_U_GRP +
- r'\-' +
- r'0-9' +
- '\u00b7' +
- '\u0300-\u036f' +
- '\u203f-\u2040')
-
- PN_CHARS = '[' + PN_CHARS_GRP + ']'
-
- PN_CHARS_BASE = '[' + PN_CHARS_BASE_GRP + ']'
-
- PN_PREFIX = PN_CHARS_BASE + '(?:[' + PN_CHARS_GRP + '.]*' + PN_CHARS + ')?'
-
- HEX_GRP = '0-9A-Fa-f'
-
- HEX = '[' + HEX_GRP + ']'
-
- PERCENT = '%' + HEX + HEX
-
- PN_LOCAL_ESC_CHARS_GRP = r' _~.\-!$&"()*+,;=/?#@%'
-
- PN_LOCAL_ESC_CHARS = '[' + PN_LOCAL_ESC_CHARS_GRP + ']'
-
- PN_LOCAL_ESC = r'\\' + PN_LOCAL_ESC_CHARS
-
- PLX = '(?:' + PERCENT + ')|(?:' + PN_LOCAL_ESC + ')'
-
- PN_LOCAL = ('(?:[' + PN_CHARS_U_GRP + ':0-9' + ']|' + PLX + ')' +
- '(?:(?:[' + PN_CHARS_GRP + '.:]|' + PLX + ')*(?:[' +
- PN_CHARS_GRP + ':]|' + PLX + '))?')
-
- patterns = {
- 'PNAME_NS': r'((?:[a-zA-Z][\w-]*)?\:)', # Simplified character range
- 'IRIREF': r'(<[^<>"{}|^`\\\x00-\x20]*>)'
- }
-
- tokens = {
- 'root': [
- (r'\s+', Text),
-
- # Base / prefix
- (r'(@base|BASE)(\s+)%(IRIREF)s(\s*)(\.?)' % patterns,
- bygroups(Keyword, Whitespace, Name.Variable, Whitespace,
- Punctuation)),
- (r'(@prefix|PREFIX)(\s+)%(PNAME_NS)s(\s+)%(IRIREF)s(\s*)(\.?)' % patterns,
- bygroups(Keyword, Whitespace, Name.Namespace, Whitespace,
- Name.Variable, Whitespace, Punctuation)),
-
- # The shorthand predicate 'a'
- (r'(?<=\s)a(?=\s)', Keyword.Type),
-
- # IRIREF
- (r'%(IRIREF)s' % patterns, Name.Variable),
-
- # PrefixedName
- (r'(' + PN_PREFIX + r')?(\:)(' + PN_LOCAL + r')?',
- bygroups(Name.Namespace, Punctuation, Name.Tag)),
-
- # BlankNodeLabel
- (r'(_)(:)([' + PN_CHARS_U_GRP + r'0-9]([' + PN_CHARS_GRP + r'.]*' + PN_CHARS + ')?)',
- bygroups(Name.Namespace, Punctuation, Name.Tag)),
-
- # Comment
- (r'#[^\n]+', Comment),
-
- (r'\b(true|false)\b', Literal),
- (r'[+\-]?\d*\.\d+', Number.Float),
- (r'[+\-]?\d*(:?\.\d+)?E[+\-]?\d+', Number.Float),
- (r'[+\-]?\d+', Number.Integer),
- (r'[\[\](){}.;,:^]', Punctuation),
-
- (r'"""', String, 'triple-double-quoted-string'),
- (r'"', String, 'single-double-quoted-string'),
- (r"'''", String, 'triple-single-quoted-string'),
- (r"'", String, 'single-single-quoted-string'),
- ],
- 'triple-double-quoted-string': [
- (r'"""', String, 'end-of-string'),
- (r'[^\\]+', String),
- (r'\\', String, 'string-escape'),
- ],
- 'single-double-quoted-string': [
- (r'"', String, 'end-of-string'),
- (r'[^"\\\n]+', String),
- (r'\\', String, 'string-escape'),
- ],
- 'triple-single-quoted-string': [
- (r"'''", String, 'end-of-string'),
- (r'[^\\]+', String),
- (r'\\', String, 'string-escape'),
- ],
- 'single-single-quoted-string': [
- (r"'", String, 'end-of-string'),
- (r"[^'\\\n]+", String),
- (r'\\', String, 'string-escape'),
- ],
- 'string-escape': [
- (r'.', String, '#pop'),
- ],
- 'end-of-string': [
- (r'(@)([a-zA-Z]+(?:-[a-zA-Z0-9]+)*)',
- bygroups(Operator, Generic.Emph), '#pop:2'),
-
- (r'(\^\^)%(IRIREF)s' % patterns, bygroups(Operator, Generic.Emph), '#pop:2'),
-
- default('#pop:2'),
-
- ],
- }
-
- # Turtle and Tera Term macro files share the same file extension
- # but each has a recognizable and distinct syntax.
- def analyse_text(text):
- for t in ('@base ', 'BASE ', '@prefix ', 'PREFIX '):
- if re.search(r'^\s*%s' % t, text):
- return 0.80
-
-
-class ShExCLexer(RegexLexer):
- """
- Lexer for `ShExC <https://shex.io/shex-semantics/#shexc>`_ shape expressions language syntax.
- """
- name = 'ShExC'
- aliases = ['shexc', 'shex']
- filenames = ['*.shex']
- mimetypes = ['text/shex']
-
- # character group definitions ::
-
- PN_CHARS_BASE_GRP = ('a-zA-Z'
- '\u00c0-\u00d6'
- '\u00d8-\u00f6'
- '\u00f8-\u02ff'
- '\u0370-\u037d'
- '\u037f-\u1fff'
- '\u200c-\u200d'
- '\u2070-\u218f'
- '\u2c00-\u2fef'
- '\u3001-\ud7ff'
- '\uf900-\ufdcf'
- '\ufdf0-\ufffd')
-
- PN_CHARS_U_GRP = (PN_CHARS_BASE_GRP + '_')
-
- PN_CHARS_GRP = (PN_CHARS_U_GRP +
- r'\-' +
- r'0-9' +
- '\u00b7' +
- '\u0300-\u036f' +
- '\u203f-\u2040')
-
- HEX_GRP = '0-9A-Fa-f'
-
- PN_LOCAL_ESC_CHARS_GRP = r"_~.\-!$&'()*+,;=/?#@%"
-
- # terminal productions ::
-
- PN_CHARS_BASE = '[' + PN_CHARS_BASE_GRP + ']'
-
- PN_CHARS_U = '[' + PN_CHARS_U_GRP + ']'
-
- PN_CHARS = '[' + PN_CHARS_GRP + ']'
-
- HEX = '[' + HEX_GRP + ']'
-
- PN_LOCAL_ESC_CHARS = '[' + PN_LOCAL_ESC_CHARS_GRP + ']'
-
- UCHAR_NO_BACKSLASH = '(?:u' + HEX + '{4}|U' + HEX + '{8})'
-
- UCHAR = r'\\' + UCHAR_NO_BACKSLASH
-
- IRIREF = r'<(?:[^\x00-\x20<>"{}|^`\\]|' + UCHAR + ')*>'
-
- BLANK_NODE_LABEL = '_:[0-9' + PN_CHARS_U_GRP + '](?:[' + PN_CHARS_GRP + \
- '.]*' + PN_CHARS + ')?'
-
- PN_PREFIX = PN_CHARS_BASE + '(?:[' + PN_CHARS_GRP + '.]*' + PN_CHARS + ')?'
-
- PERCENT = '%' + HEX + HEX
-
- PN_LOCAL_ESC = r'\\' + PN_LOCAL_ESC_CHARS
-
- PLX = '(?:' + PERCENT + ')|(?:' + PN_LOCAL_ESC + ')'
-
- PN_LOCAL = ('(?:[' + PN_CHARS_U_GRP + ':0-9' + ']|' + PLX + ')' +
- '(?:(?:[' + PN_CHARS_GRP + '.:]|' + PLX + ')*(?:[' +
- PN_CHARS_GRP + ':]|' + PLX + '))?')
-
- EXPONENT = r'[eE][+-]?\d+'
-
- # Lexer token definitions ::
-
- tokens = {
- 'root': [
- (r'\s+', Text),
- # keywords ::
- (r'(?i)(base|prefix|start|external|'
- r'literal|iri|bnode|nonliteral|length|minlength|maxlength|'
- r'mininclusive|minexclusive|maxinclusive|maxexclusive|'
- r'totaldigits|fractiondigits|'
- r'closed|extra)\b', Keyword),
- (r'(a)\b', Keyword),
- # IRIs ::
- ('(' + IRIREF + ')', Name.Label),
- # blank nodes ::
- ('(' + BLANK_NODE_LABEL + ')', Name.Label),
- # prefixed names ::
- (r'(' + PN_PREFIX + r')?(\:)(' + PN_LOCAL + ')?',
- bygroups(Name.Namespace, Punctuation, Name.Tag)),
- # boolean literals ::
- (r'(true|false)', Keyword.Constant),
- # double literals ::
- (r'[+\-]?(\d+\.\d*' + EXPONENT + r'|\.?\d+' + EXPONENT + ')', Number.Float),
- # decimal literals ::
- (r'[+\-]?(\d+\.\d*|\.\d+)', Number.Float),
- # integer literals ::
- (r'[+\-]?\d+', Number.Integer),
- # operators ::
- (r'[@|$&=*+?^\-~]', Operator),
- # operator keywords ::
- (r'(?i)(and|or|not)\b', Operator.Word),
- # punctuation characters ::
- (r'[(){}.;,:^\[\]]', Punctuation),
- # line comments ::
- (r'#[^\n]*', Comment),
- # strings ::
- (r'"""', String, 'triple-double-quoted-string'),
- (r'"', String, 'single-double-quoted-string'),
- (r"'''", String, 'triple-single-quoted-string'),
- (r"'", String, 'single-single-quoted-string'),
- ],
- 'triple-double-quoted-string': [
- (r'"""', String, 'end-of-string'),
- (r'[^\\]+', String),
- (r'\\', String, 'string-escape'),
- ],
- 'single-double-quoted-string': [
- (r'"', String, 'end-of-string'),
- (r'[^"\\\n]+', String),
- (r'\\', String, 'string-escape'),
- ],
- 'triple-single-quoted-string': [
- (r"'''", String, 'end-of-string'),
- (r'[^\\]+', String),
- (r'\\', String.Escape, 'string-escape'),
- ],
- 'single-single-quoted-string': [
- (r"'", String, 'end-of-string'),
- (r"[^'\\\n]+", String),
- (r'\\', String, 'string-escape'),
- ],
- 'string-escape': [
- (UCHAR_NO_BACKSLASH, String.Escape, '#pop'),
- (r'.', String.Escape, '#pop'),
- ],
- 'end-of-string': [
- (r'(@)([a-zA-Z]+(?:-[a-zA-Z0-9]+)*)',
- bygroups(Operator, Name.Function), '#pop:2'),
- (r'\^\^', Operator, '#pop:2'),
- default('#pop:2'),
- ],
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/rebol.py b/venv/lib/python3.11/site-packages/pygments/lexers/rebol.py
deleted file mode 100644
index 6170f0c..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/rebol.py
+++ /dev/null
@@ -1,419 +0,0 @@
-"""
- pygments.lexers.rebol
- ~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for the REBOL and related languages.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, bygroups
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Generic, Whitespace
-
-__all__ = ['RebolLexer', 'RedLexer']
-
-
-class RebolLexer(RegexLexer):
- """
- A `REBOL <http://www.rebol.com/>`_ lexer.
-
- .. versionadded:: 1.1
- """
- name = 'REBOL'
- aliases = ['rebol']
- filenames = ['*.r', '*.r3', '*.reb']
- mimetypes = ['text/x-rebol']
-
- flags = re.IGNORECASE | re.MULTILINE
-
- escape_re = r'(?:\^\([0-9a-f]{1,4}\)*)'
-
- def word_callback(lexer, match):
- word = match.group()
-
- if re.match(".*:$", word):
- yield match.start(), Generic.Subheading, word
- elif re.match(
- r'(native|alias|all|any|as-string|as-binary|bind|bound\?|case|'
- r'catch|checksum|comment|debase|dehex|exclude|difference|disarm|'
- r'either|else|enbase|foreach|remove-each|form|free|get|get-env|if|'
- r'in|intersect|loop|minimum-of|maximum-of|mold|new-line|'
- r'new-line\?|not|now|prin|print|reduce|compose|construct|repeat|'
- r'reverse|save|script\?|set|shift|switch|throw|to-hex|trace|try|'
- r'type\?|union|unique|unless|unprotect|unset|until|use|value\?|'
- r'while|compress|decompress|secure|open|close|read|read-io|'
- r'write-io|write|update|query|wait|input\?|exp|log-10|log-2|'
- r'log-e|square-root|cosine|sine|tangent|arccosine|arcsine|'
- r'arctangent|protect|lowercase|uppercase|entab|detab|connected\?|'
- r'browse|launch|stats|get-modes|set-modes|to-local-file|'
- r'to-rebol-file|encloak|decloak|create-link|do-browser|bind\?|'
- r'hide|draw|show|size-text|textinfo|offset-to-caret|'
- r'caret-to-offset|local-request-file|rgb-to-hsv|hsv-to-rgb|'
- r'crypt-strength\?|dh-make-key|dh-generate-key|dh-compute-key|'
- r'dsa-make-key|dsa-generate-key|dsa-make-signature|'
- r'dsa-verify-signature|rsa-make-key|rsa-generate-key|'
- r'rsa-encrypt)$', word):
- yield match.start(), Name.Builtin, word
- elif re.match(
- r'(add|subtract|multiply|divide|remainder|power|and~|or~|xor~|'
- r'minimum|maximum|negate|complement|absolute|random|head|tail|'
- r'next|back|skip|at|pick|first|second|third|fourth|fifth|sixth|'
- r'seventh|eighth|ninth|tenth|last|path|find|select|make|to|copy\*|'
- r'insert|remove|change|poke|clear|trim|sort|min|max|abs|cp|'
- r'copy)$', word):
- yield match.start(), Name.Function, word
- elif re.match(
- r'(error|source|input|license|help|install|echo|Usage|with|func|'
- r'throw-on-error|function|does|has|context|probe|\?\?|as-pair|'
- r'mod|modulo|round|repend|about|set-net|append|join|rejoin|reform|'
- r'remold|charset|array|replace|move|extract|forskip|forall|alter|'
- r'first+|also|take|for|forever|dispatch|attempt|what-dir|'
- r'change-dir|clean-path|list-dir|dirize|rename|split-path|delete|'
- r'make-dir|delete-dir|in-dir|confirm|dump-obj|upgrade|what|'
- r'build-tag|process-source|build-markup|decode-cgi|read-cgi|'
- r'write-user|save-user|set-user-name|protect-system|parse-xml|'
- r'cvs-date|cvs-version|do-boot|get-net-info|desktop|layout|'
- r'scroll-para|get-face|alert|set-face|uninstall|unfocus|'
- r'request-dir|center-face|do-events|net-error|decode-url|'
- r'parse-header|parse-header-date|parse-email-addrs|import-email|'
- r'send|build-attach-body|resend|show-popup|hide-popup|open-events|'
- r'find-key-face|do-face|viewtop|confine|find-window|'
- r'insert-event-func|remove-event-func|inform|dump-pane|dump-face|'
- r'flag-face|deflag-face|clear-fields|read-net|vbug|path-thru|'
- r'read-thru|load-thru|do-thru|launch-thru|load-image|'
- r'request-download|do-face-alt|set-font|set-para|get-style|'
- r'set-style|make-face|stylize|choose|hilight-text|hilight-all|'
- r'unlight-text|focus|scroll-drag|clear-face|reset-face|scroll-face|'
- r'resize-face|load-stock|load-stock-block|notify|request|flash|'
- r'request-color|request-pass|request-text|request-list|'
- r'request-date|request-file|dbug|editor|link-relative-path|'
- r'emailer|parse-error)$', word):
- yield match.start(), Keyword.Namespace, word
- elif re.match(
- r'(halt|quit|do|load|q|recycle|call|run|ask|parse|view|unview|'
- r'return|exit|break)$', word):
- yield match.start(), Name.Exception, word
- elif re.match('REBOL$', word):
- yield match.start(), Generic.Heading, word
- elif re.match("to-.*", word):
- yield match.start(), Keyword, word
- elif re.match(r'(\+|-|\*|/|//|\*\*|and|or|xor|=\?|=|==|<>|<|>|<=|>=)$',
- word):
- yield match.start(), Operator, word
- elif re.match(r".*\?$", word):
- yield match.start(), Keyword, word
- elif re.match(r".*\!$", word):
- yield match.start(), Keyword.Type, word
- elif re.match("'.*", word):
- yield match.start(), Name.Variable.Instance, word # lit-word
- elif re.match("#.*", word):
- yield match.start(), Name.Label, word # issue
- elif re.match("%.*", word):
- yield match.start(), Name.Decorator, word # file
- else:
- yield match.start(), Name.Variable, word
-
- tokens = {
- 'root': [
- (r'\s+', Text),
- (r'#"', String.Char, 'char'),
- (r'#\{[0-9a-f]*\}', Number.Hex),
- (r'2#\{', Number.Hex, 'bin2'),
- (r'64#\{[0-9a-z+/=\s]*\}', Number.Hex),
- (r'"', String, 'string'),
- (r'\{', String, 'string2'),
- (r';#+.*\n', Comment.Special),
- (r';\*+.*\n', Comment.Preproc),
- (r';.*\n', Comment),
- (r'%"', Name.Decorator, 'stringFile'),
- (r'%[^(^{")\s\[\]]+', Name.Decorator),
- (r'[+-]?([a-z]{1,3})?\$\d+(\.\d+)?', Number.Float), # money
- (r'[+-]?\d+\:\d+(\:\d+)?(\.\d+)?', String.Other), # time
- (r'\d+[\-/][0-9a-z]+[\-/]\d+(\/\d+\:\d+((\:\d+)?'
- r'([.\d+]?([+-]?\d+:\d+)?)?)?)?', String.Other), # date
- (r'\d+(\.\d+)+\.\d+', Keyword.Constant), # tuple
- (r'\d+X\d+', Keyword.Constant), # pair
- (r'[+-]?\d+(\'\d+)?([.,]\d*)?E[+-]?\d+', Number.Float),
- (r'[+-]?\d+(\'\d+)?[.,]\d*', Number.Float),
- (r'[+-]?\d+(\'\d+)?', Number),
- (r'[\[\]()]', Generic.Strong),
- (r'[a-z]+[^(^{"\s:)]*://[^(^{"\s)]*', Name.Decorator), # url
- (r'mailto:[^(^{"@\s)]+@[^(^{"@\s)]+', Name.Decorator), # url
- (r'[^(^{"@\s)]+@[^(^{"@\s)]+', Name.Decorator), # email
- (r'comment\s"', Comment, 'commentString1'),
- (r'comment\s\{', Comment, 'commentString2'),
- (r'comment\s\[', Comment, 'commentBlock'),
- (r'comment\s[^(\s{"\[]+', Comment),
- (r'/[^(^{")\s/[\]]*', Name.Attribute),
- (r'([^(^{")\s/[\]]+)(?=[:({"\s/\[\]])', word_callback),
- (r'<[\w:.-]*>', Name.Tag),
- (r'<[^(<>\s")]+', Name.Tag, 'tag'),
- (r'([^(^{")\s]+)', Text),
- ],
- 'string': [
- (r'[^(^")]+', String),
- (escape_re, String.Escape),
- (r'[(|)]+', String),
- (r'\^.', String.Escape),
- (r'"', String, '#pop'),
- ],
- 'string2': [
- (r'[^(^{})]+', String),
- (escape_re, String.Escape),
- (r'[(|)]+', String),
- (r'\^.', String.Escape),
- (r'\{', String, '#push'),
- (r'\}', String, '#pop'),
- ],
- 'stringFile': [
- (r'[^(^")]+', Name.Decorator),
- (escape_re, Name.Decorator),
- (r'\^.', Name.Decorator),
- (r'"', Name.Decorator, '#pop'),
- ],
- 'char': [
- (escape_re + '"', String.Char, '#pop'),
- (r'\^."', String.Char, '#pop'),
- (r'."', String.Char, '#pop'),
- ],
- 'tag': [
- (escape_re, Name.Tag),
- (r'"', Name.Tag, 'tagString'),
- (r'[^(<>\r\n")]+', Name.Tag),
- (r'>', Name.Tag, '#pop'),
- ],
- 'tagString': [
- (r'[^(^")]+', Name.Tag),
- (escape_re, Name.Tag),
- (r'[(|)]+', Name.Tag),
- (r'\^.', Name.Tag),
- (r'"', Name.Tag, '#pop'),
- ],
- 'tuple': [
- (r'(\d+\.)+', Keyword.Constant),
- (r'\d+', Keyword.Constant, '#pop'),
- ],
- 'bin2': [
- (r'\s+', Number.Hex),
- (r'([01]\s*){8}', Number.Hex),
- (r'\}', Number.Hex, '#pop'),
- ],
- 'commentString1': [
- (r'[^(^")]+', Comment),
- (escape_re, Comment),
- (r'[(|)]+', Comment),
- (r'\^.', Comment),
- (r'"', Comment, '#pop'),
- ],
- 'commentString2': [
- (r'[^(^{})]+', Comment),
- (escape_re, Comment),
- (r'[(|)]+', Comment),
- (r'\^.', Comment),
- (r'\{', Comment, '#push'),
- (r'\}', Comment, '#pop'),
- ],
- 'commentBlock': [
- (r'\[', Comment, '#push'),
- (r'\]', Comment, '#pop'),
- (r'"', Comment, "commentString1"),
- (r'\{', Comment, "commentString2"),
- (r'[^(\[\]"{)]+', Comment),
- ],
- }
-
- def analyse_text(text):
- """
- Check if code contains REBOL header and so it probably not R code
- """
- if re.match(r'^\s*REBOL\s*\[', text, re.IGNORECASE):
- # The code starts with REBOL header
- return 1.0
- elif re.search(r'\s*REBOL\s*\[', text, re.IGNORECASE):
- # The code contains REBOL header but also some text before it
- return 0.5
-
-
-class RedLexer(RegexLexer):
- """
- A `Red-language <http://www.red-lang.org/>`_ lexer.
-
- .. versionadded:: 2.0
- """
- name = 'Red'
- aliases = ['red', 'red/system']
- filenames = ['*.red', '*.reds']
- mimetypes = ['text/x-red', 'text/x-red-system']
-
- flags = re.IGNORECASE | re.MULTILINE
-
- escape_re = r'(?:\^\([0-9a-f]{1,4}\)*)'
-
- def word_callback(lexer, match):
- word = match.group()
-
- if re.match(".*:$", word):
- yield match.start(), Generic.Subheading, word
- elif re.match(r'(if|unless|either|any|all|while|until|loop|repeat|'
- r'foreach|forall|func|function|does|has|switch|'
- r'case|reduce|compose|get|set|print|prin|equal\?|'
- r'not-equal\?|strict-equal\?|lesser\?|greater\?|lesser-or-equal\?|'
- r'greater-or-equal\?|same\?|not|type\?|stats|'
- r'bind|union|replace|charset|routine)$', word):
- yield match.start(), Name.Builtin, word
- elif re.match(r'(make|random|reflect|to|form|mold|absolute|add|divide|multiply|negate|'
- r'power|remainder|round|subtract|even\?|odd\?|and~|complement|or~|xor~|'
- r'append|at|back|change|clear|copy|find|head|head\?|index\?|insert|'
- r'length\?|next|pick|poke|remove|reverse|select|sort|skip|swap|tail|tail\?|'
- r'take|trim|create|close|delete|modify|open|open\?|query|read|rename|'
- r'update|write)$', word):
- yield match.start(), Name.Function, word
- elif re.match(r'(yes|on|no|off|true|false|tab|cr|lf|newline|escape|slash|sp|space|null|'
- r'none|crlf|dot|null-byte)$', word):
- yield match.start(), Name.Builtin.Pseudo, word
- elif re.match(r'(#system-global|#include|#enum|#define|#either|#if|#import|#export|'
- r'#switch|#default|#get-definition)$', word):
- yield match.start(), Keyword.Namespace, word
- elif re.match(r'(system|halt|quit|quit-return|do|load|q|recycle|call|run|ask|parse|'
- r'raise-error|return|exit|break|alias|push|pop|probe|\?\?|spec-of|body-of|'
- r'quote|forever)$', word):
- yield match.start(), Name.Exception, word
- elif re.match(r'(action\?|block\?|char\?|datatype\?|file\?|function\?|get-path\?|zero\?|'
- r'get-word\?|integer\?|issue\?|lit-path\?|lit-word\?|logic\?|native\?|'
- r'op\?|paren\?|path\?|refinement\?|set-path\?|set-word\?|string\?|unset\?|'
- r'any-struct\?|none\?|word\?|any-series\?)$', word):
- yield match.start(), Keyword, word
- elif re.match(r'(JNICALL|stdcall|cdecl|infix)$', word):
- yield match.start(), Keyword.Namespace, word
- elif re.match("to-.*", word):
- yield match.start(), Keyword, word
- elif re.match(r'(\+|-\*\*|-|\*\*|//|/|\*|and|or|xor|=\?|===|==|=|<>|<=|>=|'
- r'<<<|>>>|<<|>>|<|>%)$', word):
- yield match.start(), Operator, word
- elif re.match(r".*\!$", word):
- yield match.start(), Keyword.Type, word
- elif re.match("'.*", word):
- yield match.start(), Name.Variable.Instance, word # lit-word
- elif re.match("#.*", word):
- yield match.start(), Name.Label, word # issue
- elif re.match("%.*", word):
- yield match.start(), Name.Decorator, word # file
- elif re.match(":.*", word):
- yield match.start(), Generic.Subheading, word # get-word
- else:
- yield match.start(), Name.Variable, word
-
- tokens = {
- 'root': [
- (r'\s+', Text),
- (r'#"', String.Char, 'char'),
- (r'#\{[0-9a-f\s]*\}', Number.Hex),
- (r'2#\{', Number.Hex, 'bin2'),
- (r'64#\{[0-9a-z+/=\s]*\}', Number.Hex),
- (r'([0-9a-f]+)(h)((\s)|(?=[\[\]{}"()]))',
- bygroups(Number.Hex, Name.Variable, Whitespace)),
- (r'"', String, 'string'),
- (r'\{', String, 'string2'),
- (r';#+.*\n', Comment.Special),
- (r';\*+.*\n', Comment.Preproc),
- (r';.*\n', Comment),
- (r'%"', Name.Decorator, 'stringFile'),
- (r'%[^(^{")\s\[\]]+', Name.Decorator),
- (r'[+-]?([a-z]{1,3})?\$\d+(\.\d+)?', Number.Float), # money
- (r'[+-]?\d+\:\d+(\:\d+)?(\.\d+)?', String.Other), # time
- (r'\d+[\-/][0-9a-z]+[\-/]\d+(/\d+:\d+((:\d+)?'
- r'([\.\d+]?([+-]?\d+:\d+)?)?)?)?', String.Other), # date
- (r'\d+(\.\d+)+\.\d+', Keyword.Constant), # tuple
- (r'\d+X\d+', Keyword.Constant), # pair
- (r'[+-]?\d+(\'\d+)?([.,]\d*)?E[+-]?\d+', Number.Float),
- (r'[+-]?\d+(\'\d+)?[.,]\d*', Number.Float),
- (r'[+-]?\d+(\'\d+)?', Number),
- (r'[\[\]()]', Generic.Strong),
- (r'[a-z]+[^(^{"\s:)]*://[^(^{"\s)]*', Name.Decorator), # url
- (r'mailto:[^(^{"@\s)]+@[^(^{"@\s)]+', Name.Decorator), # url
- (r'[^(^{"@\s)]+@[^(^{"@\s)]+', Name.Decorator), # email
- (r'comment\s"', Comment, 'commentString1'),
- (r'comment\s\{', Comment, 'commentString2'),
- (r'comment\s\[', Comment, 'commentBlock'),
- (r'comment\s[^(\s{"\[]+', Comment),
- (r'/[^(^{^")\s/[\]]*', Name.Attribute),
- (r'([^(^{^")\s/[\]]+)(?=[:({"\s/\[\]])', word_callback),
- (r'<[\w:.-]*>', Name.Tag),
- (r'<[^(<>\s")]+', Name.Tag, 'tag'),
- (r'([^(^{")\s]+)', Text),
- ],
- 'string': [
- (r'[^(^")]+', String),
- (escape_re, String.Escape),
- (r'[(|)]+', String),
- (r'\^.', String.Escape),
- (r'"', String, '#pop'),
- ],
- 'string2': [
- (r'[^(^{})]+', String),
- (escape_re, String.Escape),
- (r'[(|)]+', String),
- (r'\^.', String.Escape),
- (r'\{', String, '#push'),
- (r'\}', String, '#pop'),
- ],
- 'stringFile': [
- (r'[^(^")]+', Name.Decorator),
- (escape_re, Name.Decorator),
- (r'\^.', Name.Decorator),
- (r'"', Name.Decorator, '#pop'),
- ],
- 'char': [
- (escape_re + '"', String.Char, '#pop'),
- (r'\^."', String.Char, '#pop'),
- (r'."', String.Char, '#pop'),
- ],
- 'tag': [
- (escape_re, Name.Tag),
- (r'"', Name.Tag, 'tagString'),
- (r'[^(<>\r\n")]+', Name.Tag),
- (r'>', Name.Tag, '#pop'),
- ],
- 'tagString': [
- (r'[^(^")]+', Name.Tag),
- (escape_re, Name.Tag),
- (r'[(|)]+', Name.Tag),
- (r'\^.', Name.Tag),
- (r'"', Name.Tag, '#pop'),
- ],
- 'tuple': [
- (r'(\d+\.)+', Keyword.Constant),
- (r'\d+', Keyword.Constant, '#pop'),
- ],
- 'bin2': [
- (r'\s+', Number.Hex),
- (r'([01]\s*){8}', Number.Hex),
- (r'\}', Number.Hex, '#pop'),
- ],
- 'commentString1': [
- (r'[^(^")]+', Comment),
- (escape_re, Comment),
- (r'[(|)]+', Comment),
- (r'\^.', Comment),
- (r'"', Comment, '#pop'),
- ],
- 'commentString2': [
- (r'[^(^{})]+', Comment),
- (escape_re, Comment),
- (r'[(|)]+', Comment),
- (r'\^.', Comment),
- (r'\{', Comment, '#push'),
- (r'\}', Comment, '#pop'),
- ],
- 'commentBlock': [
- (r'\[', Comment, '#push'),
- (r'\]', Comment, '#pop'),
- (r'"', Comment, "commentString1"),
- (r'\{', Comment, "commentString2"),
- (r'[^(\[\]"{)]+', Comment),
- ],
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/resource.py b/venv/lib/python3.11/site-packages/pygments/lexers/resource.py
deleted file mode 100644
index 2583ba8..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/resource.py
+++ /dev/null
@@ -1,84 +0,0 @@
-"""
- pygments.lexers.resource
- ~~~~~~~~~~~~~~~~~~~~~~~~
-
- Lexer for resource definition files.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, bygroups, words
-from pygments.token import Comment, String, Number, Operator, Text, \
- Keyword, Name
-
-__all__ = ['ResourceLexer']
-
-
-class ResourceLexer(RegexLexer):
- """Lexer for `ICU Resource bundles
- <http://userguide.icu-project.org/locale/resources>`_.
-
- .. versionadded:: 2.0
- """
- name = 'ResourceBundle'
- aliases = ['resourcebundle', 'resource']
- filenames = []
-
- _types = (':table', ':array', ':string', ':bin', ':import', ':intvector',
- ':int', ':alias')
-
- flags = re.MULTILINE | re.IGNORECASE
- tokens = {
- 'root': [
- (r'//.*?$', Comment),
- (r'"', String, 'string'),
- (r'-?\d+', Number.Integer),
- (r'[,{}]', Operator),
- (r'([^\s{:]+)(\s*)(%s?)' % '|'.join(_types),
- bygroups(Name, Text, Keyword)),
- (r'\s+', Text),
- (words(_types), Keyword),
- ],
- 'string': [
- (r'(\\x[0-9a-f]{2}|\\u[0-9a-f]{4}|\\U00[0-9a-f]{6}|'
- r'\\[0-7]{1,3}|\\c.|\\[abtnvfre\'"?\\]|\\\{|[^"{\\])+', String),
- (r'\{', String.Escape, 'msgname'),
- (r'"', String, '#pop')
- ],
- 'msgname': [
- (r'([^{},]+)(\s*)', bygroups(Name, String.Escape), ('#pop', 'message'))
- ],
- 'message': [
- (r'\{', String.Escape, 'msgname'),
- (r'\}', String.Escape, '#pop'),
- (r'(,)(\s*)([a-z]+)(\s*\})',
- bygroups(Operator, String.Escape, Keyword, String.Escape), '#pop'),
- (r'(,)(\s*)([a-z]+)(\s*)(,)(\s*)(offset)(\s*)(:)(\s*)(-?\d+)(\s*)',
- bygroups(Operator, String.Escape, Keyword, String.Escape, Operator,
- String.Escape, Operator.Word, String.Escape, Operator,
- String.Escape, Number.Integer, String.Escape), 'choice'),
- (r'(,)(\s*)([a-z]+)(\s*)(,)(\s*)',
- bygroups(Operator, String.Escape, Keyword, String.Escape, Operator,
- String.Escape), 'choice'),
- (r'\s+', String.Escape)
- ],
- 'choice': [
- (r'(=|<|>|<=|>=|!=)(-?\d+)(\s*\{)',
- bygroups(Operator, Number.Integer, String.Escape), 'message'),
- (r'([a-z]+)(\s*\{)', bygroups(Keyword.Type, String.Escape), 'str'),
- (r'\}', String.Escape, ('#pop', '#pop')),
- (r'\s+', String.Escape)
- ],
- 'str': [
- (r'\}', String.Escape, '#pop'),
- (r'\{', String.Escape, 'msgname'),
- (r'[^{}]+', String)
- ]
- }
-
- def analyse_text(text):
- if text.startswith('root:table'):
- return 1.0
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/ride.py b/venv/lib/python3.11/site-packages/pygments/lexers/ride.py
deleted file mode 100644
index 077fcc7..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/ride.py
+++ /dev/null
@@ -1,139 +0,0 @@
-"""
- pygments.lexers.ride
- ~~~~~~~~~~~~~~~~~~~~
-
- Lexer for the Ride programming language.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, words, include
-from pygments.token import Comment, Keyword, Name, Number, Punctuation, \
- String, Text
-
-__all__ = ['RideLexer']
-
-
-class RideLexer(RegexLexer):
- """
- For `Ride <https://docs.wavesplatform.com/en/ride/about-ride.html>`_
- source code.
-
- .. versionadded:: 2.6
- """
-
- name = 'Ride'
- aliases = ['ride']
- filenames = ['*.ride']
- mimetypes = ['text/x-ride']
-
- validName = r'[a-zA-Z_][a-zA-Z0-9_\']*'
-
- builtinOps = (
- '||', '|', '>=', '>', '==', '!',
- '=', '<=', '<', '::', ':+', ':', '!=', '/',
- '.', '=>', '-', '+', '*', '&&', '%', '++',
- )
-
- globalVariablesName = (
- 'NOALG', 'MD5', 'SHA1', 'SHA224', 'SHA256', 'SHA384', 'SHA512',
- 'SHA3224', 'SHA3256', 'SHA3384', 'SHA3512', 'nil', 'this', 'unit',
- 'height', 'lastBlock', 'Buy', 'Sell', 'CEILING', 'FLOOR', 'DOWN',
- 'HALFDOWN', 'HALFEVEN', 'HALFUP', 'UP',
- )
-
- typesName = (
- 'Unit', 'Int', 'Boolean', 'ByteVector', 'String', 'Address', 'Alias',
- 'Transfer', 'AssetPair', 'DataEntry', 'Order', 'Transaction',
- 'GenesisTransaction', 'PaymentTransaction', 'ReissueTransaction',
- 'BurnTransaction', 'MassTransferTransaction', 'ExchangeTransaction',
- 'TransferTransaction', 'SetAssetScriptTransaction',
- 'InvokeScriptTransaction', 'IssueTransaction', 'LeaseTransaction',
- 'LeaseCancelTransaction', 'CreateAliasTransaction',
- 'SetScriptTransaction', 'SponsorFeeTransaction', 'DataTransaction',
- 'WriteSet', 'AttachedPayment', 'ScriptTransfer', 'TransferSet',
- 'ScriptResult', 'Invocation', 'Asset', 'BlockInfo', 'Issue', 'Reissue',
- 'Burn', 'NoAlg', 'Md5', 'Sha1', 'Sha224', 'Sha256', 'Sha384', 'Sha512',
- 'Sha3224', 'Sha3256', 'Sha3384', 'Sha3512', 'BinaryEntry',
- 'BooleanEntry', 'IntegerEntry', 'StringEntry', 'List', 'Ceiling',
- 'Down', 'Floor', 'HalfDown', 'HalfEven', 'HalfUp', 'Up',
- )
-
- functionsName = (
- 'fraction', 'size', 'toBytes', 'take', 'drop', 'takeRight', 'dropRight',
- 'toString', 'isDefined', 'extract', 'throw', 'getElement', 'value',
- 'cons', 'toUtf8String', 'toInt', 'indexOf', 'lastIndexOf', 'split',
- 'parseInt', 'parseIntValue', 'keccak256', 'blake2b256', 'sha256',
- 'sigVerify', 'toBase58String', 'fromBase58String', 'toBase64String',
- 'fromBase64String', 'transactionById', 'transactionHeightById',
- 'getInteger', 'getBoolean', 'getBinary', 'getString',
- 'addressFromPublicKey', 'addressFromString', 'addressFromRecipient',
- 'assetBalance', 'wavesBalance', 'getIntegerValue', 'getBooleanValue',
- 'getBinaryValue', 'getStringValue', 'addressFromStringValue',
- 'assetInfo', 'rsaVerify', 'checkMerkleProof', 'median',
- 'valueOrElse', 'valueOrErrorMessage', 'contains', 'log', 'pow',
- 'toBase16String', 'fromBase16String', 'blockInfoByHeight',
- 'transferTransactionById',
- )
-
- reservedWords = words((
- 'match', 'case', 'else', 'func', 'if',
- 'let', 'then', '@Callable', '@Verifier',
- ), suffix=r'\b')
-
- tokens = {
- 'root': [
- # Comments
- (r'#.*', Comment.Single),
- # Whitespace
- (r'\s+', Text),
- # Strings
- (r'"', String, 'doublequote'),
- (r'utf8\'', String, 'utf8quote'),
- (r'base(58|64|16)\'', String, 'singlequote'),
- # Keywords
- (reservedWords, Keyword.Reserved),
- (r'\{-#.*?#-\}', Keyword.Reserved),
- (r'FOLD<\d+>', Keyword.Reserved),
- # Types
- (words(typesName), Keyword.Type),
- # Main
- # (specialName, Keyword.Reserved),
- # Prefix Operators
- (words(builtinOps, prefix=r'\(', suffix=r'\)'), Name.Function),
- # Infix Operators
- (words(builtinOps), Name.Function),
- (words(globalVariablesName), Name.Function),
- (words(functionsName), Name.Function),
- # Numbers
- include('numbers'),
- # Variable Names
- (validName, Name.Variable),
- # Parens
- (r'[,()\[\]{}]', Punctuation),
- ],
-
- 'doublequote': [
- (r'\\u[0-9a-fA-F]{4}', String.Escape),
- (r'\\[nrfvb\\"]', String.Escape),
- (r'[^"]', String),
- (r'"', String, '#pop'),
- ],
-
- 'utf8quote': [
- (r'\\u[0-9a-fA-F]{4}', String.Escape),
- (r'\\[nrfvb\\\']', String.Escape),
- (r'[^\']', String),
- (r'\'', String, '#pop'),
- ],
-
- 'singlequote': [
- (r'[^\']', String),
- (r'\'', String, '#pop'),
- ],
-
- 'numbers': [
- (r'_?\d+', Number.Integer),
- ],
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/rita.py b/venv/lib/python3.11/site-packages/pygments/lexers/rita.py
deleted file mode 100644
index 9aa8569..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/rita.py
+++ /dev/null
@@ -1,43 +0,0 @@
-"""
- pygments.lexers.rita
- ~~~~~~~~~~~~~~~~~~~~
-
- Lexers for RITA language
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer
-from pygments.token import Comment, Operator, Keyword, Name, Literal, \
- Punctuation, Whitespace
-
-__all__ = ['RitaLexer']
-
-
-class RitaLexer(RegexLexer):
- """
- Lexer for RITA.
-
- .. versionadded:: 2.11
- """
- name = 'Rita'
- url = 'https://github.com/zaibacu/rita-dsl'
- filenames = ['*.rita']
- aliases = ['rita']
- mimetypes = ['text/rita']
-
- tokens = {
- 'root': [
- (r'\n', Whitespace),
- (r'\s+', Whitespace),
- (r'#(.*?)\n', Comment.Single),
- (r'@(.*?)\n', Operator), # Yes, whole line as an operator
- (r'"(\w|\d|\s|(\\")|[\'_\-./,\?\!])+?"', Literal),
- (r'\'(\w|\d|\s|(\\\')|["_\-./,\?\!])+?\'', Literal),
- (r'([A-Z_]+)', Keyword),
- (r'([a-z0-9_]+)', Name),
- (r'((->)|[!?+*|=])', Operator),
- (r'[\(\),\{\}]', Punctuation)
- ]
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/rnc.py b/venv/lib/python3.11/site-packages/pygments/lexers/rnc.py
deleted file mode 100644
index d717175..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/rnc.py
+++ /dev/null
@@ -1,67 +0,0 @@
-"""
- pygments.lexers.rnc
- ~~~~~~~~~~~~~~~~~~~
-
- Lexer for Relax-NG Compact syntax
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Punctuation
-
-__all__ = ['RNCCompactLexer']
-
-
-class RNCCompactLexer(RegexLexer):
- """
- For RelaxNG-compact syntax.
-
- .. versionadded:: 2.2
- """
-
- name = 'Relax-NG Compact'
- url = 'http://relaxng.org'
- aliases = ['rng-compact', 'rnc']
- filenames = ['*.rnc']
-
- tokens = {
- 'root': [
- (r'namespace\b', Keyword.Namespace),
- (r'(?:default|datatypes)\b', Keyword.Declaration),
- (r'##.*$', Comment.Preproc),
- (r'#.*$', Comment.Single),
- (r'"[^"]*"', String.Double),
- # TODO single quoted strings and escape sequences outside of
- # double-quoted strings
- (r'(?:element|attribute|mixed)\b', Keyword.Declaration, 'variable'),
- (r'(text\b|xsd:[^ ]+)', Keyword.Type, 'maybe_xsdattributes'),
- (r'[,?&*=|~]|>>', Operator),
- (r'[(){}]', Punctuation),
- (r'.', Text),
- ],
-
- # a variable has been declared using `element` or `attribute`
- 'variable': [
- (r'[^{]+', Name.Variable),
- (r'\{', Punctuation, '#pop'),
- ],
-
- # after an xsd:<datatype> declaration there may be attributes
- 'maybe_xsdattributes': [
- (r'\{', Punctuation, 'xsdattributes'),
- (r'\}', Punctuation, '#pop'),
- (r'.', Text),
- ],
-
- # attributes take the form { key1 = value1 key2 = value2 ... }
- 'xsdattributes': [
- (r'[^ =}]', Name.Attribute),
- (r'=', Operator),
- (r'"[^"]*"', String.Double),
- (r'\}', Punctuation, '#pop'),
- (r'.', Text),
- ],
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/roboconf.py b/venv/lib/python3.11/site-packages/pygments/lexers/roboconf.py
deleted file mode 100644
index 5d7d76e..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/roboconf.py
+++ /dev/null
@@ -1,81 +0,0 @@
-"""
- pygments.lexers.roboconf
- ~~~~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for Roboconf DSL.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, words, re
-from pygments.token import Text, Operator, Keyword, Name, Comment
-
-__all__ = ['RoboconfGraphLexer', 'RoboconfInstancesLexer']
-
-
-class RoboconfGraphLexer(RegexLexer):
- """
- Lexer for Roboconf graph files.
-
- .. versionadded:: 2.1
- """
- name = 'Roboconf Graph'
- aliases = ['roboconf-graph']
- filenames = ['*.graph']
-
- flags = re.IGNORECASE | re.MULTILINE
- tokens = {
- 'root': [
- # Skip white spaces
- (r'\s+', Text),
-
- # There is one operator
- (r'=', Operator),
-
- # Keywords
- (words(('facet', 'import'), suffix=r'\s*\b', prefix=r'\b'), Keyword),
- (words((
- 'installer', 'extends', 'exports', 'imports', 'facets',
- 'children'), suffix=r'\s*:?', prefix=r'\b'), Name),
-
- # Comments
- (r'#.*\n', Comment),
-
- # Default
- (r'[^#]', Text),
- (r'.*\n', Text)
- ]
- }
-
-
-class RoboconfInstancesLexer(RegexLexer):
- """
- Lexer for Roboconf instances files.
-
- .. versionadded:: 2.1
- """
- name = 'Roboconf Instances'
- aliases = ['roboconf-instances']
- filenames = ['*.instances']
-
- flags = re.IGNORECASE | re.MULTILINE
- tokens = {
- 'root': [
-
- # Skip white spaces
- (r'\s+', Text),
-
- # Keywords
- (words(('instance of', 'import'), suffix=r'\s*\b', prefix=r'\b'), Keyword),
- (words(('name', 'count'), suffix=r's*:?', prefix=r'\b'), Name),
- (r'\s*[\w.-]+\s*:', Name),
-
- # Comments
- (r'#.*\n', Comment),
-
- # Default
- (r'[^#]', Text),
- (r'.*\n', Text)
- ]
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/robotframework.py b/venv/lib/python3.11/site-packages/pygments/lexers/robotframework.py
deleted file mode 100644
index 3b676cc..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/robotframework.py
+++ /dev/null
@@ -1,552 +0,0 @@
-"""
- pygments.lexers.robotframework
- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
- Lexer for Robot Framework.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-# Copyright 2012 Nokia Siemens Networks Oyj
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import re
-
-from pygments.lexer import Lexer
-from pygments.token import Token
-
-__all__ = ['RobotFrameworkLexer']
-
-
-HEADING = Token.Generic.Heading
-SETTING = Token.Keyword.Namespace
-IMPORT = Token.Name.Namespace
-TC_KW_NAME = Token.Generic.Subheading
-KEYWORD = Token.Name.Function
-ARGUMENT = Token.String
-VARIABLE = Token.Name.Variable
-COMMENT = Token.Comment
-SEPARATOR = Token.Punctuation
-SYNTAX = Token.Punctuation
-GHERKIN = Token.Generic.Emph
-ERROR = Token.Error
-
-
-def normalize(string, remove=''):
- string = string.lower()
- for char in remove + ' ':
- if char in string:
- string = string.replace(char, '')
- return string
-
-
-class RobotFrameworkLexer(Lexer):
- """
- For Robot Framework test data.
-
- Supports both space and pipe separated plain text formats.
-
- .. versionadded:: 1.6
- """
- name = 'RobotFramework'
- url = 'http://robotframework.org'
- aliases = ['robotframework']
- filenames = ['*.robot', '*.resource']
- mimetypes = ['text/x-robotframework']
-
- def __init__(self, **options):
- options['tabsize'] = 2
- options['encoding'] = 'UTF-8'
- Lexer.__init__(self, **options)
-
- def get_tokens_unprocessed(self, text):
- row_tokenizer = RowTokenizer()
- var_tokenizer = VariableTokenizer()
- index = 0
- for row in text.splitlines():
- for value, token in row_tokenizer.tokenize(row):
- for value, token in var_tokenizer.tokenize(value, token):
- if value:
- yield index, token, str(value)
- index += len(value)
-
-
-class VariableTokenizer:
-
- def tokenize(self, string, token):
- var = VariableSplitter(string, identifiers='$@%&')
- if var.start < 0 or token in (COMMENT, ERROR):
- yield string, token
- return
- for value, token in self._tokenize(var, string, token):
- if value:
- yield value, token
-
- def _tokenize(self, var, string, orig_token):
- before = string[:var.start]
- yield before, orig_token
- yield var.identifier + '{', SYNTAX
- yield from self.tokenize(var.base, VARIABLE)
- yield '}', SYNTAX
- if var.index is not None:
- yield '[', SYNTAX
- yield from self.tokenize(var.index, VARIABLE)
- yield ']', SYNTAX
- yield from self.tokenize(string[var.end:], orig_token)
-
-
-class RowTokenizer:
-
- def __init__(self):
- self._table = UnknownTable()
- self._splitter = RowSplitter()
- testcases = TestCaseTable()
- settings = SettingTable(testcases.set_default_template)
- variables = VariableTable()
- keywords = KeywordTable()
- self._tables = {'settings': settings, 'setting': settings,
- 'metadata': settings,
- 'variables': variables, 'variable': variables,
- 'testcases': testcases, 'testcase': testcases,
- 'tasks': testcases, 'task': testcases,
- 'keywords': keywords, 'keyword': keywords,
- 'userkeywords': keywords, 'userkeyword': keywords}
-
- def tokenize(self, row):
- commented = False
- heading = False
- for index, value in enumerate(self._splitter.split(row)):
- # First value, and every second after that, is a separator.
- index, separator = divmod(index-1, 2)
- if value.startswith('#'):
- commented = True
- elif index == 0 and value.startswith('*'):
- self._table = self._start_table(value)
- heading = True
- yield from self._tokenize(value, index, commented,
- separator, heading)
- self._table.end_row()
-
- def _start_table(self, header):
- name = normalize(header, remove='*')
- return self._tables.get(name, UnknownTable())
-
- def _tokenize(self, value, index, commented, separator, heading):
- if commented:
- yield value, COMMENT
- elif separator:
- yield value, SEPARATOR
- elif heading:
- yield value, HEADING
- else:
- yield from self._table.tokenize(value, index)
-
-
-class RowSplitter:
- _space_splitter = re.compile('( {2,})')
- _pipe_splitter = re.compile(r'((?:^| +)\|(?: +|$))')
-
- def split(self, row):
- splitter = (row.startswith('| ') and self._split_from_pipes
- or self._split_from_spaces)
- yield from splitter(row)
- yield '\n'
-
- def _split_from_spaces(self, row):
- yield '' # Start with (pseudo)separator similarly as with pipes
- yield from self._space_splitter.split(row)
-
- def _split_from_pipes(self, row):
- _, separator, rest = self._pipe_splitter.split(row, 1)
- yield separator
- while self._pipe_splitter.search(rest):
- cell, separator, rest = self._pipe_splitter.split(rest, 1)
- yield cell
- yield separator
- yield rest
-
-
-class Tokenizer:
- _tokens = None
-
- def __init__(self):
- self._index = 0
-
- def tokenize(self, value):
- values_and_tokens = self._tokenize(value, self._index)
- self._index += 1
- if isinstance(values_and_tokens, type(Token)):
- values_and_tokens = [(value, values_and_tokens)]
- return values_and_tokens
-
- def _tokenize(self, value, index):
- index = min(index, len(self._tokens) - 1)
- return self._tokens[index]
-
- def _is_assign(self, value):
- if value.endswith('='):
- value = value[:-1].strip()
- var = VariableSplitter(value, identifiers='$@&')
- return var.start == 0 and var.end == len(value)
-
-
-class Comment(Tokenizer):
- _tokens = (COMMENT,)
-
-
-class Setting(Tokenizer):
- _tokens = (SETTING, ARGUMENT)
- _keyword_settings = ('suitesetup', 'suiteprecondition', 'suiteteardown',
- 'suitepostcondition', 'testsetup', 'tasksetup', 'testprecondition',
- 'testteardown','taskteardown', 'testpostcondition', 'testtemplate', 'tasktemplate')
- _import_settings = ('library', 'resource', 'variables')
- _other_settings = ('documentation', 'metadata', 'forcetags', 'defaulttags',
- 'testtimeout','tasktimeout')
- _custom_tokenizer = None
-
- def __init__(self, template_setter=None):
- Tokenizer.__init__(self)
- self._template_setter = template_setter
-
- def _tokenize(self, value, index):
- if index == 1 and self._template_setter:
- self._template_setter(value)
- if index == 0:
- normalized = normalize(value)
- if normalized in self._keyword_settings:
- self._custom_tokenizer = KeywordCall(support_assign=False)
- elif normalized in self._import_settings:
- self._custom_tokenizer = ImportSetting()
- elif normalized not in self._other_settings:
- return ERROR
- elif self._custom_tokenizer:
- return self._custom_tokenizer.tokenize(value)
- return Tokenizer._tokenize(self, value, index)
-
-
-class ImportSetting(Tokenizer):
- _tokens = (IMPORT, ARGUMENT)
-
-
-class TestCaseSetting(Setting):
- _keyword_settings = ('setup', 'precondition', 'teardown', 'postcondition',
- 'template')
- _import_settings = ()
- _other_settings = ('documentation', 'tags', 'timeout')
-
- def _tokenize(self, value, index):
- if index == 0:
- type = Setting._tokenize(self, value[1:-1], index)
- return [('[', SYNTAX), (value[1:-1], type), (']', SYNTAX)]
- return Setting._tokenize(self, value, index)
-
-
-class KeywordSetting(TestCaseSetting):
- _keyword_settings = ('teardown',)
- _other_settings = ('documentation', 'arguments', 'return', 'timeout', 'tags')
-
-
-class Variable(Tokenizer):
- _tokens = (SYNTAX, ARGUMENT)
-
- def _tokenize(self, value, index):
- if index == 0 and not self._is_assign(value):
- return ERROR
- return Tokenizer._tokenize(self, value, index)
-
-
-class KeywordCall(Tokenizer):
- _tokens = (KEYWORD, ARGUMENT)
-
- def __init__(self, support_assign=True):
- Tokenizer.__init__(self)
- self._keyword_found = not support_assign
- self._assigns = 0
-
- def _tokenize(self, value, index):
- if not self._keyword_found and self._is_assign(value):
- self._assigns += 1
- return SYNTAX # VariableTokenizer tokenizes this later.
- if self._keyword_found:
- return Tokenizer._tokenize(self, value, index - self._assigns)
- self._keyword_found = True
- return GherkinTokenizer().tokenize(value, KEYWORD)
-
-
-class GherkinTokenizer:
- _gherkin_prefix = re.compile('^(Given|When|Then|And|But) ', re.IGNORECASE)
-
- def tokenize(self, value, token):
- match = self._gherkin_prefix.match(value)
- if not match:
- return [(value, token)]
- end = match.end()
- return [(value[:end], GHERKIN), (value[end:], token)]
-
-
-class TemplatedKeywordCall(Tokenizer):
- _tokens = (ARGUMENT,)
-
-
-class ForLoop(Tokenizer):
-
- def __init__(self):
- Tokenizer.__init__(self)
- self._in_arguments = False
-
- def _tokenize(self, value, index):
- token = self._in_arguments and ARGUMENT or SYNTAX
- if value.upper() in ('IN', 'IN RANGE'):
- self._in_arguments = True
- return token
-
-
-class _Table:
- _tokenizer_class = None
-
- def __init__(self, prev_tokenizer=None):
- self._tokenizer = self._tokenizer_class()
- self._prev_tokenizer = prev_tokenizer
- self._prev_values_on_row = []
-
- def tokenize(self, value, index):
- if self._continues(value, index):
- self._tokenizer = self._prev_tokenizer
- yield value, SYNTAX
- else:
- yield from self._tokenize(value, index)
- self._prev_values_on_row.append(value)
-
- def _continues(self, value, index):
- return value == '...' and all(self._is_empty(t)
- for t in self._prev_values_on_row)
-
- def _is_empty(self, value):
- return value in ('', '\\')
-
- def _tokenize(self, value, index):
- return self._tokenizer.tokenize(value)
-
- def end_row(self):
- self.__init__(prev_tokenizer=self._tokenizer)
-
-
-class UnknownTable(_Table):
- _tokenizer_class = Comment
-
- def _continues(self, value, index):
- return False
-
-
-class VariableTable(_Table):
- _tokenizer_class = Variable
-
-
-class SettingTable(_Table):
- _tokenizer_class = Setting
-
- def __init__(self, template_setter, prev_tokenizer=None):
- _Table.__init__(self, prev_tokenizer)
- self._template_setter = template_setter
-
- def _tokenize(self, value, index):
- if index == 0 and normalize(value) == 'testtemplate':
- self._tokenizer = Setting(self._template_setter)
- return _Table._tokenize(self, value, index)
-
- def end_row(self):
- self.__init__(self._template_setter, prev_tokenizer=self._tokenizer)
-
-
-class TestCaseTable(_Table):
- _setting_class = TestCaseSetting
- _test_template = None
- _default_template = None
-
- @property
- def _tokenizer_class(self):
- if self._test_template or (self._default_template and
- self._test_template is not False):
- return TemplatedKeywordCall
- return KeywordCall
-
- def _continues(self, value, index):
- return index > 0 and _Table._continues(self, value, index)
-
- def _tokenize(self, value, index):
- if index == 0:
- if value:
- self._test_template = None
- return GherkinTokenizer().tokenize(value, TC_KW_NAME)
- if index == 1 and self._is_setting(value):
- if self._is_template(value):
- self._test_template = False
- self._tokenizer = self._setting_class(self.set_test_template)
- else:
- self._tokenizer = self._setting_class()
- if index == 1 and self._is_for_loop(value):
- self._tokenizer = ForLoop()
- if index == 1 and self._is_empty(value):
- return [(value, SYNTAX)]
- return _Table._tokenize(self, value, index)
-
- def _is_setting(self, value):
- return value.startswith('[') and value.endswith(']')
-
- def _is_template(self, value):
- return normalize(value) == '[template]'
-
- def _is_for_loop(self, value):
- return value.startswith(':') and normalize(value, remove=':') == 'for'
-
- def set_test_template(self, template):
- self._test_template = self._is_template_set(template)
-
- def set_default_template(self, template):
- self._default_template = self._is_template_set(template)
-
- def _is_template_set(self, template):
- return normalize(template) not in ('', '\\', 'none', '${empty}')
-
-
-class KeywordTable(TestCaseTable):
- _tokenizer_class = KeywordCall
- _setting_class = KeywordSetting
-
- def _is_template(self, value):
- return False
-
-
-# Following code copied directly from Robot Framework 2.7.5.
-
-class VariableSplitter:
-
- def __init__(self, string, identifiers):
- self.identifier = None
- self.base = None
- self.index = None
- self.start = -1
- self.end = -1
- self._identifiers = identifiers
- self._may_have_internal_variables = False
- try:
- self._split(string)
- except ValueError:
- pass
- else:
- self._finalize()
-
- def get_replaced_base(self, variables):
- if self._may_have_internal_variables:
- return variables.replace_string(self.base)
- return self.base
-
- def _finalize(self):
- self.identifier = self._variable_chars[0]
- self.base = ''.join(self._variable_chars[2:-1])
- self.end = self.start + len(self._variable_chars)
- if self._has_list_or_dict_variable_index():
- self.index = ''.join(self._list_and_dict_variable_index_chars[1:-1])
- self.end += len(self._list_and_dict_variable_index_chars)
-
- def _has_list_or_dict_variable_index(self):
- return self._list_and_dict_variable_index_chars\
- and self._list_and_dict_variable_index_chars[-1] == ']'
-
- def _split(self, string):
- start_index, max_index = self._find_variable(string)
- self.start = start_index
- self._open_curly = 1
- self._state = self._variable_state
- self._variable_chars = [string[start_index], '{']
- self._list_and_dict_variable_index_chars = []
- self._string = string
- start_index += 2
- for index, char in enumerate(string[start_index:]):
- index += start_index # Giving start to enumerate only in Py 2.6+
- try:
- self._state(char, index)
- except StopIteration:
- return
- if index == max_index and not self._scanning_list_variable_index():
- return
-
- def _scanning_list_variable_index(self):
- return self._state in [self._waiting_list_variable_index_state,
- self._list_variable_index_state]
-
- def _find_variable(self, string):
- max_end_index = string.rfind('}')
- if max_end_index == -1:
- raise ValueError('No variable end found')
- if self._is_escaped(string, max_end_index):
- return self._find_variable(string[:max_end_index])
- start_index = self._find_start_index(string, 1, max_end_index)
- if start_index == -1:
- raise ValueError('No variable start found')
- return start_index, max_end_index
-
- def _find_start_index(self, string, start, end):
- index = string.find('{', start, end) - 1
- if index < 0:
- return -1
- if self._start_index_is_ok(string, index):
- return index
- return self._find_start_index(string, index+2, end)
-
- def _start_index_is_ok(self, string, index):
- return string[index] in self._identifiers\
- and not self._is_escaped(string, index)
-
- def _is_escaped(self, string, index):
- escaped = False
- while index > 0 and string[index-1] == '\\':
- index -= 1
- escaped = not escaped
- return escaped
-
- def _variable_state(self, char, index):
- self._variable_chars.append(char)
- if char == '}' and not self._is_escaped(self._string, index):
- self._open_curly -= 1
- if self._open_curly == 0:
- if not self._is_list_or_dict_variable():
- raise StopIteration
- self._state = self._waiting_list_variable_index_state
- elif char in self._identifiers:
- self._state = self._internal_variable_start_state
-
- def _is_list_or_dict_variable(self):
- return self._variable_chars[0] in ('@','&')
-
- def _internal_variable_start_state(self, char, index):
- self._state = self._variable_state
- if char == '{':
- self._variable_chars.append(char)
- self._open_curly += 1
- self._may_have_internal_variables = True
- else:
- self._variable_state(char, index)
-
- def _waiting_list_variable_index_state(self, char, index):
- if char != '[':
- raise StopIteration
- self._list_and_dict_variable_index_chars.append(char)
- self._state = self._list_variable_index_state
-
- def _list_variable_index_state(self, char, index):
- self._list_and_dict_variable_index_chars.append(char)
- if char == ']':
- raise StopIteration
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/ruby.py b/venv/lib/python3.11/site-packages/pygments/lexers/ruby.py
deleted file mode 100644
index 466d6e7..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/ruby.py
+++ /dev/null
@@ -1,516 +0,0 @@
-"""
- pygments.lexers.ruby
- ~~~~~~~~~~~~~~~~~~~~
-
- Lexers for Ruby and related languages.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import Lexer, RegexLexer, ExtendedRegexLexer, include, \
- bygroups, default, LexerContext, do_insertions, words, line_re
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation, Error, Generic, Whitespace
-from pygments.util import shebang_matches
-
-__all__ = ['RubyLexer', 'RubyConsoleLexer', 'FancyLexer']
-
-
-RUBY_OPERATORS = (
- '*', '**', '-', '+', '-@', '+@', '/', '%', '&', '|', '^', '`', '~',
- '[]', '[]=', '<<', '>>', '<', '<>', '<=>', '>', '>=', '==', '==='
-)
-
-
-class RubyLexer(ExtendedRegexLexer):
- """
- For Ruby source code.
- """
-
- name = 'Ruby'
- url = 'http://www.ruby-lang.org'
- aliases = ['ruby', 'rb', 'duby']
- filenames = ['*.rb', '*.rbw', 'Rakefile', '*.rake', '*.gemspec',
- '*.rbx', '*.duby', 'Gemfile', 'Vagrantfile']
- mimetypes = ['text/x-ruby', 'application/x-ruby']
-
- flags = re.DOTALL | re.MULTILINE
-
- def heredoc_callback(self, match, ctx):
- # okay, this is the hardest part of parsing Ruby...
- # match: 1 = <<[-~]?, 2 = quote? 3 = name 4 = quote? 5 = rest of line
-
- start = match.start(1)
- yield start, Operator, match.group(1) # <<[-~]?
- yield match.start(2), String.Heredoc, match.group(2) # quote ", ', `
- yield match.start(3), String.Delimiter, match.group(3) # heredoc name
- yield match.start(4), String.Heredoc, match.group(4) # quote again
-
- heredocstack = ctx.__dict__.setdefault('heredocstack', [])
- outermost = not bool(heredocstack)
- heredocstack.append((match.group(1) in ('<<-', '<<~'), match.group(3)))
-
- ctx.pos = match.start(5)
- ctx.end = match.end(5)
- # this may find other heredocs, so limit the recursion depth
- if len(heredocstack) < 100:
- yield from self.get_tokens_unprocessed(context=ctx)
- else:
- yield ctx.pos, String.Heredoc, match.group(5)
- ctx.pos = match.end()
-
- if outermost:
- # this is the outer heredoc again, now we can process them all
- for tolerant, hdname in heredocstack:
- lines = []
- for match in line_re.finditer(ctx.text, ctx.pos):
- if tolerant:
- check = match.group().strip()
- else:
- check = match.group().rstrip()
- if check == hdname:
- for amatch in lines:
- yield amatch.start(), String.Heredoc, amatch.group()
- yield match.start(), String.Delimiter, match.group()
- ctx.pos = match.end()
- break
- else:
- lines.append(match)
- else:
- # end of heredoc not found -- error!
- for amatch in lines:
- yield amatch.start(), Error, amatch.group()
- ctx.end = len(ctx.text)
- del heredocstack[:]
-
- def gen_rubystrings_rules():
- def intp_regex_callback(self, match, ctx):
- yield match.start(1), String.Regex, match.group(1) # begin
- nctx = LexerContext(match.group(3), 0, ['interpolated-regex'])
- for i, t, v in self.get_tokens_unprocessed(context=nctx):
- yield match.start(3)+i, t, v
- yield match.start(4), String.Regex, match.group(4) # end[mixounse]*
- ctx.pos = match.end()
-
- def intp_string_callback(self, match, ctx):
- yield match.start(1), String.Other, match.group(1)
- nctx = LexerContext(match.group(3), 0, ['interpolated-string'])
- for i, t, v in self.get_tokens_unprocessed(context=nctx):
- yield match.start(3)+i, t, v
- yield match.start(4), String.Other, match.group(4) # end
- ctx.pos = match.end()
-
- states = {}
- states['strings'] = [
- # easy ones
- (r'\:@{0,2}[a-zA-Z_]\w*[!?]?', String.Symbol),
- (words(RUBY_OPERATORS, prefix=r'\:@{0,2}'), String.Symbol),
- (r":'(\\\\|\\[^\\]|[^'\\])*'", String.Symbol),
- (r':"', String.Symbol, 'simple-sym'),
- (r'([a-zA-Z_]\w*)(:)(?!:)',
- bygroups(String.Symbol, Punctuation)), # Since Ruby 1.9
- (r'"', String.Double, 'simple-string-double'),
- (r"'", String.Single, 'simple-string-single'),
- (r'(?<!\.)`', String.Backtick, 'simple-backtick'),
- ]
-
- # quoted string and symbol
- for name, ttype, end in ('string-double', String.Double, '"'), \
- ('string-single', String.Single, "'"),\
- ('sym', String.Symbol, '"'), \
- ('backtick', String.Backtick, '`'):
- states['simple-'+name] = [
- include('string-intp-escaped'),
- (r'[^\\%s#]+' % end, ttype),
- (r'[\\#]', ttype),
- (end, ttype, '#pop'),
- ]
-
- # braced quoted strings
- for lbrace, rbrace, bracecc, name in \
- ('\\{', '\\}', '{}', 'cb'), \
- ('\\[', '\\]', '\\[\\]', 'sb'), \
- ('\\(', '\\)', '()', 'pa'), \
- ('<', '>', '<>', 'ab'):
- states[name+'-intp-string'] = [
- (r'\\[\\' + bracecc + ']', String.Other),
- (lbrace, String.Other, '#push'),
- (rbrace, String.Other, '#pop'),
- include('string-intp-escaped'),
- (r'[\\#' + bracecc + ']', String.Other),
- (r'[^\\#' + bracecc + ']+', String.Other),
- ]
- states['strings'].append((r'%[QWx]?' + lbrace, String.Other,
- name+'-intp-string'))
- states[name+'-string'] = [
- (r'\\[\\' + bracecc + ']', String.Other),
- (lbrace, String.Other, '#push'),
- (rbrace, String.Other, '#pop'),
- (r'[\\#' + bracecc + ']', String.Other),
- (r'[^\\#' + bracecc + ']+', String.Other),
- ]
- states['strings'].append((r'%[qsw]' + lbrace, String.Other,
- name+'-string'))
- states[name+'-regex'] = [
- (r'\\[\\' + bracecc + ']', String.Regex),
- (lbrace, String.Regex, '#push'),
- (rbrace + '[mixounse]*', String.Regex, '#pop'),
- include('string-intp'),
- (r'[\\#' + bracecc + ']', String.Regex),
- (r'[^\\#' + bracecc + ']+', String.Regex),
- ]
- states['strings'].append((r'%r' + lbrace, String.Regex,
- name+'-regex'))
-
- # these must come after %<brace>!
- states['strings'] += [
- # %r regex
- (r'(%r([\W_]))((?:\\\2|(?!\2).)*)(\2[mixounse]*)',
- intp_regex_callback),
- # regular fancy strings with qsw
- (r'%[qsw]([\W_])((?:\\\1|(?!\1).)*)\1', String.Other),
- (r'(%[QWx]([\W_]))((?:\\\2|(?!\2).)*)(\2)',
- intp_string_callback),
- # special forms of fancy strings after operators or
- # in method calls with braces
- (r'(?<=[-+/*%=<>&!^|~,(])(\s*)(%([\t ])(?:(?:\\\3|(?!\3).)*)\3)',
- bygroups(Whitespace, String.Other, None)),
- # and because of fixed width lookbehinds the whole thing a
- # second time for line startings...
- (r'^(\s*)(%([\t ])(?:(?:\\\3|(?!\3).)*)\3)',
- bygroups(Whitespace, String.Other, None)),
- # all regular fancy strings without qsw
- (r'(%([^a-zA-Z0-9\s]))((?:\\\2|(?!\2).)*)(\2)',
- intp_string_callback),
- ]
-
- return states
-
- tokens = {
- 'root': [
- (r'\A#!.+?$', Comment.Hashbang),
- (r'#.*?$', Comment.Single),
- (r'=begin\s.*?\n=end.*?$', Comment.Multiline),
- # keywords
- (words((
- 'BEGIN', 'END', 'alias', 'begin', 'break', 'case', 'defined?',
- 'do', 'else', 'elsif', 'end', 'ensure', 'for', 'if', 'in', 'next', 'redo',
- 'rescue', 'raise', 'retry', 'return', 'super', 'then', 'undef',
- 'unless', 'until', 'when', 'while', 'yield'), suffix=r'\b'),
- Keyword),
- # start of function, class and module names
- (r'(module)(\s+)([a-zA-Z_]\w*'
- r'(?:::[a-zA-Z_]\w*)*)',
- bygroups(Keyword, Whitespace, Name.Namespace)),
- (r'(def)(\s+)', bygroups(Keyword, Whitespace), 'funcname'),
- (r'def(?=[*%&^`~+-/\[<>=])', Keyword, 'funcname'),
- (r'(class)(\s+)', bygroups(Keyword, Whitespace), 'classname'),
- # special methods
- (words((
- 'initialize', 'new', 'loop', 'include', 'extend', 'raise', 'attr_reader',
- 'attr_writer', 'attr_accessor', 'attr', 'catch', 'throw', 'private',
- 'module_function', 'public', 'protected', 'true', 'false', 'nil'),
- suffix=r'\b'),
- Keyword.Pseudo),
- (r'(not|and|or)\b', Operator.Word),
- (words((
- 'autoload', 'block_given', 'const_defined', 'eql', 'equal', 'frozen', 'include',
- 'instance_of', 'is_a', 'iterator', 'kind_of', 'method_defined', 'nil',
- 'private_method_defined', 'protected_method_defined',
- 'public_method_defined', 'respond_to', 'tainted'), suffix=r'\?'),
- Name.Builtin),
- (r'(chomp|chop|exit|gsub|sub)!', Name.Builtin),
- (words((
- 'Array', 'Float', 'Integer', 'String', '__id__', '__send__', 'abort',
- 'ancestors', 'at_exit', 'autoload', 'binding', 'callcc', 'caller',
- 'catch', 'chomp', 'chop', 'class_eval', 'class_variables',
- 'clone', 'const_defined?', 'const_get', 'const_missing', 'const_set',
- 'constants', 'display', 'dup', 'eval', 'exec', 'exit', 'extend', 'fail', 'fork',
- 'format', 'freeze', 'getc', 'gets', 'global_variables', 'gsub',
- 'hash', 'id', 'included_modules', 'inspect', 'instance_eval',
- 'instance_method', 'instance_methods',
- 'instance_variable_get', 'instance_variable_set', 'instance_variables',
- 'lambda', 'load', 'local_variables', 'loop',
- 'method', 'method_missing', 'methods', 'module_eval', 'name',
- 'object_id', 'open', 'p', 'print', 'printf', 'private_class_method',
- 'private_instance_methods',
- 'private_methods', 'proc', 'protected_instance_methods',
- 'protected_methods', 'public_class_method',
- 'public_instance_methods', 'public_methods',
- 'putc', 'puts', 'raise', 'rand', 'readline', 'readlines', 'require',
- 'scan', 'select', 'self', 'send', 'set_trace_func', 'singleton_methods', 'sleep',
- 'split', 'sprintf', 'srand', 'sub', 'syscall', 'system', 'taint',
- 'test', 'throw', 'to_a', 'to_s', 'trace_var', 'trap', 'untaint',
- 'untrace_var', 'warn'), prefix=r'(?<!\.)', suffix=r'\b'),
- Name.Builtin),
- (r'__(FILE|LINE)__\b', Name.Builtin.Pseudo),
- # normal heredocs
- (r'(?<!\w)(<<[-~]?)(["`\']?)([a-zA-Z_]\w*)(\2)(.*?\n)',
- heredoc_callback),
- # empty string heredocs
- (r'(<<[-~]?)("|\')()(\2)(.*?\n)', heredoc_callback),
- (r'__END__', Comment.Preproc, 'end-part'),
- # multiline regex (after keywords or assignments)
- (r'(?:^|(?<=[=<>~!:])|'
- r'(?<=(?:\s|;)when\s)|'
- r'(?<=(?:\s|;)or\s)|'
- r'(?<=(?:\s|;)and\s)|'
- r'(?<=\.index\s)|'
- r'(?<=\.scan\s)|'
- r'(?<=\.sub\s)|'
- r'(?<=\.sub!\s)|'
- r'(?<=\.gsub\s)|'
- r'(?<=\.gsub!\s)|'
- r'(?<=\.match\s)|'
- r'(?<=(?:\s|;)if\s)|'
- r'(?<=(?:\s|;)elsif\s)|'
- r'(?<=^when\s)|'
- r'(?<=^index\s)|'
- r'(?<=^scan\s)|'
- r'(?<=^sub\s)|'
- r'(?<=^gsub\s)|'
- r'(?<=^sub!\s)|'
- r'(?<=^gsub!\s)|'
- r'(?<=^match\s)|'
- r'(?<=^if\s)|'
- r'(?<=^elsif\s)'
- r')(\s*)(/)', bygroups(Text, String.Regex), 'multiline-regex'),
- # multiline regex (in method calls or subscripts)
- (r'(?<=\(|,|\[)/', String.Regex, 'multiline-regex'),
- # multiline regex (this time the funny no whitespace rule)
- (r'(\s+)(/)(?![\s=])', bygroups(Whitespace, String.Regex),
- 'multiline-regex'),
- # lex numbers and ignore following regular expressions which
- # are division operators in fact (grrrr. i hate that. any
- # better ideas?)
- # since pygments 0.7 we also eat a "?" operator after numbers
- # so that the char operator does not work. Chars are not allowed
- # there so that you can use the ternary operator.
- # stupid example:
- # x>=0?n[x]:""
- (r'(0_?[0-7]+(?:_[0-7]+)*)(\s*)([/?])?',
- bygroups(Number.Oct, Whitespace, Operator)),
- (r'(0x[0-9A-Fa-f]+(?:_[0-9A-Fa-f]+)*)(\s*)([/?])?',
- bygroups(Number.Hex, Whitespace, Operator)),
- (r'(0b[01]+(?:_[01]+)*)(\s*)([/?])?',
- bygroups(Number.Bin, Whitespace, Operator)),
- (r'([\d]+(?:_\d+)*)(\s*)([/?])?',
- bygroups(Number.Integer, Whitespace, Operator)),
- # Names
- (r'@@[a-zA-Z_]\w*', Name.Variable.Class),
- (r'@[a-zA-Z_]\w*', Name.Variable.Instance),
- (r'\$\w+', Name.Variable.Global),
- (r'\$[!@&`\'+~=/\\,;.<>_*$?:"^-]', Name.Variable.Global),
- (r'\$-[0adFiIlpvw]', Name.Variable.Global),
- (r'::', Operator),
- include('strings'),
- # chars
- (r'\?(\\[MC]-)*' # modifiers
- r'(\\([\\abefnrstv#"\']|x[a-fA-F0-9]{1,2}|[0-7]{1,3})|\S)'
- r'(?!\w)',
- String.Char),
- (r'[A-Z]\w+', Name.Constant),
- # this is needed because ruby attributes can look
- # like keywords (class) or like this: ` ?!?
- (words(RUBY_OPERATORS, prefix=r'(\.|::)'),
- bygroups(Operator, Name.Operator)),
- (r'(\.|::)([a-zA-Z_]\w*[!?]?|[*%&^`~+\-/\[<>=])',
- bygroups(Operator, Name)),
- (r'[a-zA-Z_]\w*[!?]?', Name),
- (r'(\[|\]|\*\*|<<?|>>?|>=|<=|<=>|=~|={3}|'
- r'!~|&&?|\|\||\.{1,3})', Operator),
- (r'[-+/*%=<>&!^|~]=?', Operator),
- (r'[(){};,/?:\\]', Punctuation),
- (r'\s+', Whitespace)
- ],
- 'funcname': [
- (r'\(', Punctuation, 'defexpr'),
- (r'(?:([a-zA-Z_]\w*)(\.))?' # optional scope name, like "self."
- r'('
- r'[a-zA-Z\u0080-\uffff][a-zA-Z0-9_\u0080-\uffff]*[!?=]?' # method name
- r'|!=|!~|=~|\*\*?|[-+!~]@?|[/%&|^]|<=>|<[<=]?|>[>=]?|===?' # or operator override
- r'|\[\]=?' # or element reference/assignment override
- r'|`' # or the undocumented backtick override
- r')',
- bygroups(Name.Class, Operator, Name.Function), '#pop'),
- default('#pop')
- ],
- 'classname': [
- (r'\(', Punctuation, 'defexpr'),
- (r'<<', Operator, '#pop'),
- (r'[A-Z_]\w*', Name.Class, '#pop'),
- default('#pop')
- ],
- 'defexpr': [
- (r'(\))(\.|::)?', bygroups(Punctuation, Operator), '#pop'),
- (r'\(', Operator, '#push'),
- include('root')
- ],
- 'in-intp': [
- (r'\{', String.Interpol, '#push'),
- (r'\}', String.Interpol, '#pop'),
- include('root'),
- ],
- 'string-intp': [
- (r'#\{', String.Interpol, 'in-intp'),
- (r'#@@?[a-zA-Z_]\w*', String.Interpol),
- (r'#\$[a-zA-Z_]\w*', String.Interpol)
- ],
- 'string-intp-escaped': [
- include('string-intp'),
- (r'\\([\\abefnrstv#"\']|x[a-fA-F0-9]{1,2}|[0-7]{1,3})',
- String.Escape)
- ],
- 'interpolated-regex': [
- include('string-intp'),
- (r'[\\#]', String.Regex),
- (r'[^\\#]+', String.Regex),
- ],
- 'interpolated-string': [
- include('string-intp'),
- (r'[\\#]', String.Other),
- (r'[^\\#]+', String.Other),
- ],
- 'multiline-regex': [
- include('string-intp'),
- (r'\\\\', String.Regex),
- (r'\\/', String.Regex),
- (r'[\\#]', String.Regex),
- (r'[^\\/#]+', String.Regex),
- (r'/[mixounse]*', String.Regex, '#pop'),
- ],
- 'end-part': [
- (r'.+', Comment.Preproc, '#pop')
- ]
- }
- tokens.update(gen_rubystrings_rules())
-
- def analyse_text(text):
- return shebang_matches(text, r'ruby(1\.\d)?')
-
-
-class RubyConsoleLexer(Lexer):
- """
- For Ruby interactive console (**irb**) output.
- """
- name = 'Ruby irb session'
- aliases = ['rbcon', 'irb']
- mimetypes = ['text/x-ruby-shellsession']
- _example = 'rbcon/console'
-
- _prompt_re = re.compile(r'irb\([a-zA-Z_]\w*\):\d{3}:\d+[>*"\'] '
- r'|>> |\?> ')
-
- def get_tokens_unprocessed(self, text):
- rblexer = RubyLexer(**self.options)
-
- curcode = ''
- insertions = []
- for match in line_re.finditer(text):
- line = match.group()
- m = self._prompt_re.match(line)
- if m is not None:
- end = m.end()
- insertions.append((len(curcode),
- [(0, Generic.Prompt, line[:end])]))
- curcode += line[end:]
- else:
- if curcode:
- yield from do_insertions(
- insertions, rblexer.get_tokens_unprocessed(curcode))
- curcode = ''
- insertions = []
- yield match.start(), Generic.Output, line
- if curcode:
- yield from do_insertions(
- insertions, rblexer.get_tokens_unprocessed(curcode))
-
-
-class FancyLexer(RegexLexer):
- """
- Pygments Lexer For Fancy.
-
- Fancy is a self-hosted, pure object-oriented, dynamic,
- class-based, concurrent general-purpose programming language
- running on Rubinius, the Ruby VM.
-
- .. versionadded:: 1.5
- """
- name = 'Fancy'
- url = 'https://github.com/bakkdoor/fancy'
- filenames = ['*.fy', '*.fancypack']
- aliases = ['fancy', 'fy']
- mimetypes = ['text/x-fancysrc']
-
- tokens = {
- # copied from PerlLexer:
- 'balanced-regex': [
- (r'/(\\\\|\\[^\\]|[^/\\])*/[egimosx]*', String.Regex, '#pop'),
- (r'!(\\\\|\\[^\\]|[^!\\])*![egimosx]*', String.Regex, '#pop'),
- (r'\\(\\\\|[^\\])*\\[egimosx]*', String.Regex, '#pop'),
- (r'\{(\\\\|\\[^\\]|[^}\\])*\}[egimosx]*', String.Regex, '#pop'),
- (r'<(\\\\|\\[^\\]|[^>\\])*>[egimosx]*', String.Regex, '#pop'),
- (r'\[(\\\\|\\[^\\]|[^\]\\])*\][egimosx]*', String.Regex, '#pop'),
- (r'\((\\\\|\\[^\\]|[^)\\])*\)[egimosx]*', String.Regex, '#pop'),
- (r'@(\\\\|\\[^\\]|[^@\\])*@[egimosx]*', String.Regex, '#pop'),
- (r'%(\\\\|\\[^\\]|[^%\\])*%[egimosx]*', String.Regex, '#pop'),
- (r'\$(\\\\|\\[^\\]|[^$\\])*\$[egimosx]*', String.Regex, '#pop'),
- ],
- 'root': [
- (r'\s+', Whitespace),
-
- # balanced delimiters (copied from PerlLexer):
- (r's\{(\\\\|\\[^\\]|[^}\\])*\}\s*', String.Regex, 'balanced-regex'),
- (r's<(\\\\|\\[^\\]|[^>\\])*>\s*', String.Regex, 'balanced-regex'),
- (r's\[(\\\\|\\[^\\]|[^\]\\])*\]\s*', String.Regex, 'balanced-regex'),
- (r's\((\\\\|\\[^\\]|[^)\\])*\)\s*', String.Regex, 'balanced-regex'),
- (r'm?/(\\\\|\\[^\\]|[^///\n])*/[gcimosx]*', String.Regex),
- (r'm(?=[/!\\{<\[(@%$])', String.Regex, 'balanced-regex'),
-
- # Comments
- (r'#(.*?)\n', Comment.Single),
- # Symbols
- (r'\'([^\'\s\[\](){}]+|\[\])', String.Symbol),
- # Multi-line DoubleQuotedString
- (r'"""(\\\\|\\[^\\]|[^\\])*?"""', String),
- # DoubleQuotedString
- (r'"(\\\\|\\[^\\]|[^"\\])*"', String),
- # keywords
- (r'(def|class|try|catch|finally|retry|return|return_local|match|'
- r'case|->|=>)\b', Keyword),
- # constants
- (r'(self|super|nil|false|true)\b', Name.Constant),
- (r'[(){};,/?|:\\]', Punctuation),
- # names
- (words((
- 'Object', 'Array', 'Hash', 'Directory', 'File', 'Class', 'String',
- 'Number', 'Enumerable', 'FancyEnumerable', 'Block', 'TrueClass',
- 'NilClass', 'FalseClass', 'Tuple', 'Symbol', 'Stack', 'Set',
- 'FancySpec', 'Method', 'Package', 'Range'), suffix=r'\b'),
- Name.Builtin),
- # functions
- (r'[a-zA-Z](\w|[-+?!=*/^><%])*:', Name.Function),
- # operators, must be below functions
- (r'[-+*/~,<>=&!?%^\[\].$]+', Operator),
- (r'[A-Z]\w*', Name.Constant),
- (r'@[a-zA-Z_]\w*', Name.Variable.Instance),
- (r'@@[a-zA-Z_]\w*', Name.Variable.Class),
- ('@@?', Operator),
- (r'[a-zA-Z_]\w*', Name),
- # numbers - / checks are necessary to avoid mismarking regexes,
- # see comment in RubyLexer
- (r'(0[oO]?[0-7]+(?:_[0-7]+)*)(\s*)([/?])?',
- bygroups(Number.Oct, Whitespace, Operator)),
- (r'(0[xX][0-9A-Fa-f]+(?:_[0-9A-Fa-f]+)*)(\s*)([/?])?',
- bygroups(Number.Hex, Whitespace, Operator)),
- (r'(0[bB][01]+(?:_[01]+)*)(\s*)([/?])?',
- bygroups(Number.Bin, Whitespace, Operator)),
- (r'([\d]+(?:_\d+)*)(\s*)([/?])?',
- bygroups(Number.Integer, Whitespace, Operator)),
- (r'\d+([eE][+-]?[0-9]+)|\d+\.\d+([eE][+-]?[0-9]+)?', Number.Float),
- (r'\d+', Number.Integer)
- ]
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/rust.py b/venv/lib/python3.11/site-packages/pygments/lexers/rust.py
deleted file mode 100644
index db68bb3..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/rust.py
+++ /dev/null
@@ -1,223 +0,0 @@
-"""
- pygments.lexers.rust
- ~~~~~~~~~~~~~~~~~~~~
-
- Lexers for the Rust language.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, include, bygroups, words, default
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation, Whitespace
-
-__all__ = ['RustLexer']
-
-
-class RustLexer(RegexLexer):
- """
- Lexer for the Rust programming language (version 1.47).
-
- .. versionadded:: 1.6
- """
- name = 'Rust'
- url = 'https://www.rust-lang.org/'
- filenames = ['*.rs', '*.rs.in']
- aliases = ['rust', 'rs']
- mimetypes = ['text/rust', 'text/x-rust']
-
- keyword_types = (words((
- 'u8', 'u16', 'u32', 'u64', 'u128', 'i8', 'i16', 'i32', 'i64', 'i128',
- 'usize', 'isize', 'f32', 'f64', 'char', 'str', 'bool',
- ), suffix=r'\b'), Keyword.Type)
-
- builtin_funcs_types = (words((
- 'Copy', 'Send', 'Sized', 'Sync', 'Unpin',
- 'Drop', 'Fn', 'FnMut', 'FnOnce', 'drop',
- 'Box', 'ToOwned', 'Clone',
- 'PartialEq', 'PartialOrd', 'Eq', 'Ord',
- 'AsRef', 'AsMut', 'Into', 'From', 'Default',
- 'Iterator', 'Extend', 'IntoIterator', 'DoubleEndedIterator',
- 'ExactSizeIterator',
- 'Option', 'Some', 'None',
- 'Result', 'Ok', 'Err',
- 'String', 'ToString', 'Vec',
- ), suffix=r'\b'), Name.Builtin)
-
- builtin_macros = (words((
- 'asm', 'assert', 'assert_eq', 'assert_ne', 'cfg', 'column',
- 'compile_error', 'concat', 'concat_idents', 'dbg', 'debug_assert',
- 'debug_assert_eq', 'debug_assert_ne', 'env', 'eprint', 'eprintln',
- 'file', 'format', 'format_args', 'format_args_nl', 'global_asm',
- 'include', 'include_bytes', 'include_str',
- 'is_aarch64_feature_detected',
- 'is_arm_feature_detected',
- 'is_mips64_feature_detected',
- 'is_mips_feature_detected',
- 'is_powerpc64_feature_detected',
- 'is_powerpc_feature_detected',
- 'is_x86_feature_detected',
- 'line', 'llvm_asm', 'log_syntax', 'macro_rules', 'matches',
- 'module_path', 'option_env', 'panic', 'print', 'println', 'stringify',
- 'thread_local', 'todo', 'trace_macros', 'unimplemented', 'unreachable',
- 'vec', 'write', 'writeln',
- ), suffix=r'!'), Name.Function.Magic)
-
- tokens = {
- 'root': [
- # rust allows a file to start with a shebang, but if the first line
- # starts with #![ then it's not a shebang but a crate attribute.
- (r'#![^[\r\n].*$', Comment.Preproc),
- default('base'),
- ],
- 'base': [
- # Whitespace and Comments
- (r'\n', Whitespace),
- (r'\s+', Whitespace),
- (r'//!.*?\n', String.Doc),
- (r'///(\n|[^/].*?\n)', String.Doc),
- (r'//(.*?)\n', Comment.Single),
- (r'/\*\*(\n|[^/*])', String.Doc, 'doccomment'),
- (r'/\*!', String.Doc, 'doccomment'),
- (r'/\*', Comment.Multiline, 'comment'),
-
- # Macro parameters
- (r"""\$([a-zA-Z_]\w*|\(,?|\),?|,?)""", Comment.Preproc),
- # Keywords
- (words(('as', 'async', 'await', 'box', 'const', 'crate', 'dyn',
- 'else', 'extern', 'for', 'if', 'impl', 'in', 'loop',
- 'match', 'move', 'mut', 'pub', 'ref', 'return', 'static',
- 'super', 'trait', 'unsafe', 'use', 'where', 'while'),
- suffix=r'\b'), Keyword),
- (words(('abstract', 'become', 'do', 'final', 'macro', 'override',
- 'priv', 'typeof', 'try', 'unsized', 'virtual', 'yield'),
- suffix=r'\b'), Keyword.Reserved),
- (r'(true|false)\b', Keyword.Constant),
- (r'self\b', Name.Builtin.Pseudo),
- (r'mod\b', Keyword, 'modname'),
- (r'let\b', Keyword.Declaration),
- (r'fn\b', Keyword, 'funcname'),
- (r'(struct|enum|type|union)\b', Keyword, 'typename'),
- (r'(default)(\s+)(type|fn)\b', bygroups(Keyword, Text, Keyword)),
- keyword_types,
- (r'[sS]elf\b', Name.Builtin.Pseudo),
- # Prelude (taken from Rust's src/libstd/prelude.rs)
- builtin_funcs_types,
- builtin_macros,
- # Path separators, so types don't catch them.
- (r'::\b', Text),
- # Types in positions.
- (r'(?::|->)', Text, 'typename'),
- # Labels
- (r'(break|continue)(\b\s*)(\'[A-Za-z_]\w*)?',
- bygroups(Keyword, Text.Whitespace, Name.Label)),
-
- # Character literals
- (r"""'(\\['"\\nrt]|\\x[0-7][0-9a-fA-F]|\\0"""
- r"""|\\u\{[0-9a-fA-F]{1,6}\}|.)'""",
- String.Char),
- (r"""b'(\\['"\\nrt]|\\x[0-9a-fA-F]{2}|\\0"""
- r"""|\\u\{[0-9a-fA-F]{1,6}\}|.)'""",
- String.Char),
-
- # Binary literals
- (r'0b[01_]+', Number.Bin, 'number_lit'),
- # Octal literals
- (r'0o[0-7_]+', Number.Oct, 'number_lit'),
- # Hexadecimal literals
- (r'0[xX][0-9a-fA-F_]+', Number.Hex, 'number_lit'),
- # Decimal literals
- (r'[0-9][0-9_]*(\.[0-9_]+[eE][+\-]?[0-9_]+|'
- r'\.[0-9_]*(?!\.)|[eE][+\-]?[0-9_]+)', Number.Float,
- 'number_lit'),
- (r'[0-9][0-9_]*', Number.Integer, 'number_lit'),
-
- # String literals
- (r'b"', String, 'bytestring'),
- (r'"', String, 'string'),
- (r'(?s)b?r(#*)".*?"\1', String),
-
- # Lifetime names
- (r"'", Operator, 'lifetime'),
-
- # Operators and Punctuation
- (r'\.\.=?', Operator),
- (r'[{}()\[\],.;]', Punctuation),
- (r'[+\-*/%&|<>^!~@=:?]', Operator),
-
- # Identifiers
- (r'[a-zA-Z_]\w*', Name),
- # Raw identifiers
- (r'r#[a-zA-Z_]\w*', Name),
-
- # Attributes
- (r'#!?\[', Comment.Preproc, 'attribute['),
-
- # Misc
- # Lone hashes: not used in Rust syntax, but allowed in macro
- # arguments, most famously for quote::quote!()
- (r'#', Text),
- ],
- 'comment': [
- (r'[^*/]+', Comment.Multiline),
- (r'/\*', Comment.Multiline, '#push'),
- (r'\*/', Comment.Multiline, '#pop'),
- (r'[*/]', Comment.Multiline),
- ],
- 'doccomment': [
- (r'[^*/]+', String.Doc),
- (r'/\*', String.Doc, '#push'),
- (r'\*/', String.Doc, '#pop'),
- (r'[*/]', String.Doc),
- ],
- 'modname': [
- (r'\s+', Text),
- (r'[a-zA-Z_]\w*', Name.Namespace, '#pop'),
- default('#pop'),
- ],
- 'funcname': [
- (r'\s+', Text),
- (r'[a-zA-Z_]\w*', Name.Function, '#pop'),
- default('#pop'),
- ],
- 'typename': [
- (r'\s+', Text),
- (r'&', Keyword.Pseudo),
- (r"'", Operator, 'lifetime'),
- builtin_funcs_types,
- keyword_types,
- (r'[a-zA-Z_]\w*', Name.Class, '#pop'),
- default('#pop'),
- ],
- 'lifetime': [
- (r"(static|_)", Name.Builtin),
- (r"[a-zA-Z_]+\w*", Name.Attribute),
- default('#pop'),
- ],
- 'number_lit': [
- (r'[ui](8|16|32|64|size)', Keyword, '#pop'),
- (r'f(32|64)', Keyword, '#pop'),
- default('#pop'),
- ],
- 'string': [
- (r'"', String, '#pop'),
- (r"""\\['"\\nrt]|\\x[0-7][0-9a-fA-F]|\\0"""
- r"""|\\u\{[0-9a-fA-F]{1,6}\}""", String.Escape),
- (r'[^\\"]+', String),
- (r'\\', String),
- ],
- 'bytestring': [
- (r"""\\x[89a-fA-F][0-9a-fA-F]""", String.Escape),
- include('string'),
- ],
- 'attribute_common': [
- (r'"', String, 'string'),
- (r'\[', Comment.Preproc, 'attribute['),
- ],
- 'attribute[': [
- include('attribute_common'),
- (r'\]', Comment.Preproc, '#pop'),
- (r'[^"\]\[]+', Comment.Preproc),
- ],
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/sas.py b/venv/lib/python3.11/site-packages/pygments/lexers/sas.py
deleted file mode 100644
index c34066b..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/sas.py
+++ /dev/null
@@ -1,227 +0,0 @@
-"""
- pygments.lexers.sas
- ~~~~~~~~~~~~~~~~~~~
-
- Lexer for SAS.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-from pygments.lexer import RegexLexer, include, words
-from pygments.token import Comment, Keyword, Name, Number, String, Text, \
- Other, Generic
-
-__all__ = ['SASLexer']
-
-
-class SASLexer(RegexLexer):
- """
- For SAS files.
-
- .. versionadded:: 2.2
- """
- # Syntax from syntax/sas.vim by James Kidd <james.kidd@covance.com>
-
- name = 'SAS'
- aliases = ['sas']
- filenames = ['*.SAS', '*.sas']
- mimetypes = ['text/x-sas', 'text/sas', 'application/x-sas']
- flags = re.IGNORECASE | re.MULTILINE
-
- builtins_macros = (
- "bquote", "nrbquote", "cmpres", "qcmpres", "compstor", "datatyp",
- "display", "do", "else", "end", "eval", "global", "goto", "if",
- "index", "input", "keydef", "label", "left", "length", "let",
- "local", "lowcase", "macro", "mend", "nrquote",
- "nrstr", "put", "qleft", "qlowcase", "qscan",
- "qsubstr", "qsysfunc", "qtrim", "quote", "qupcase", "scan",
- "str", "substr", "superq", "syscall", "sysevalf", "sysexec",
- "sysfunc", "sysget", "syslput", "sysprod", "sysrc", "sysrput",
- "then", "to", "trim", "unquote", "until", "upcase", "verify",
- "while", "window"
- )
-
- builtins_conditionals = (
- "do", "if", "then", "else", "end", "until", "while"
- )
-
- builtins_statements = (
- "abort", "array", "attrib", "by", "call", "cards", "cards4",
- "catname", "continue", "datalines", "datalines4", "delete", "delim",
- "delimiter", "display", "dm", "drop", "endsas", "error", "file",
- "filename", "footnote", "format", "goto", "in", "infile", "informat",
- "input", "keep", "label", "leave", "length", "libname", "link",
- "list", "lostcard", "merge", "missing", "modify", "options", "output",
- "out", "page", "put", "redirect", "remove", "rename", "replace",
- "retain", "return", "select", "set", "skip", "startsas", "stop",
- "title", "update", "waitsas", "where", "window", "x", "systask"
- )
-
- builtins_sql = (
- "add", "and", "alter", "as", "cascade", "check", "create",
- "delete", "describe", "distinct", "drop", "foreign", "from",
- "group", "having", "index", "insert", "into", "in", "key", "like",
- "message", "modify", "msgtype", "not", "null", "on", "or",
- "order", "primary", "references", "reset", "restrict", "select",
- "set", "table", "unique", "update", "validate", "view", "where"
- )
-
- builtins_functions = (
- "abs", "addr", "airy", "arcos", "arsin", "atan", "attrc",
- "attrn", "band", "betainv", "blshift", "bnot", "bor",
- "brshift", "bxor", "byte", "cdf", "ceil", "cexist", "cinv",
- "close", "cnonct", "collate", "compbl", "compound",
- "compress", "cos", "cosh", "css", "curobs", "cv", "daccdb",
- "daccdbsl", "daccsl", "daccsyd", "dacctab", "dairy", "date",
- "datejul", "datepart", "datetime", "day", "dclose", "depdb",
- "depdbsl", "depsl", "depsyd",
- "deptab", "dequote", "dhms", "dif", "digamma",
- "dim", "dinfo", "dnum", "dopen", "doptname", "doptnum",
- "dread", "dropnote", "dsname", "erf", "erfc", "exist", "exp",
- "fappend", "fclose", "fcol", "fdelete", "fetch", "fetchobs",
- "fexist", "fget", "fileexist", "filename", "fileref",
- "finfo", "finv", "fipname", "fipnamel", "fipstate", "floor",
- "fnonct", "fnote", "fopen", "foptname", "foptnum", "fpoint",
- "fpos", "fput", "fread", "frewind", "frlen", "fsep", "fuzz",
- "fwrite", "gaminv", "gamma", "getoption", "getvarc", "getvarn",
- "hbound", "hms", "hosthelp", "hour", "ibessel", "index",
- "indexc", "indexw", "input", "inputc", "inputn", "int",
- "intck", "intnx", "intrr", "irr", "jbessel", "juldate",
- "kurtosis", "lag", "lbound", "left", "length", "lgamma",
- "libname", "libref", "log", "log10", "log2", "logpdf", "logpmf",
- "logsdf", "lowcase", "max", "mdy", "mean", "min", "minute",
- "mod", "month", "mopen", "mort", "n", "netpv", "nmiss",
- "normal", "note", "npv", "open", "ordinal", "pathname",
- "pdf", "peek", "peekc", "pmf", "point", "poisson", "poke",
- "probbeta", "probbnml", "probchi", "probf", "probgam",
- "probhypr", "probit", "probnegb", "probnorm", "probt",
- "put", "putc", "putn", "qtr", "quote", "ranbin", "rancau",
- "ranexp", "rangam", "range", "rank", "rannor", "ranpoi",
- "rantbl", "rantri", "ranuni", "repeat", "resolve", "reverse",
- "rewind", "right", "round", "saving", "scan", "sdf", "second",
- "sign", "sin", "sinh", "skewness", "soundex", "spedis",
- "sqrt", "std", "stderr", "stfips", "stname", "stnamel",
- "substr", "sum", "symget", "sysget", "sysmsg", "sysprod",
- "sysrc", "system", "tan", "tanh", "time", "timepart", "tinv",
- "tnonct", "today", "translate", "tranwrd", "trigamma",
- "trim", "trimn", "trunc", "uniform", "upcase", "uss", "var",
- "varfmt", "varinfmt", "varlabel", "varlen", "varname",
- "varnum", "varray", "varrayx", "vartype", "verify", "vformat",
- "vformatd", "vformatdx", "vformatn", "vformatnx", "vformatw",
- "vformatwx", "vformatx", "vinarray", "vinarrayx", "vinformat",
- "vinformatd", "vinformatdx", "vinformatn", "vinformatnx",
- "vinformatw", "vinformatwx", "vinformatx", "vlabel",
- "vlabelx", "vlength", "vlengthx", "vname", "vnamex", "vtype",
- "vtypex", "weekday", "year", "yyq", "zipfips", "zipname",
- "zipnamel", "zipstate"
- )
-
- tokens = {
- 'root': [
- include('comments'),
- include('proc-data'),
- include('cards-datalines'),
- include('logs'),
- include('general'),
- (r'.', Text),
- ],
- # SAS is multi-line regardless, but * is ended by ;
- 'comments': [
- (r'^\s*\*.*?;', Comment),
- (r'/\*.*?\*/', Comment),
- (r'^\s*\*(.|\n)*?;', Comment.Multiline),
- (r'/[*](.|\n)*?[*]/', Comment.Multiline),
- ],
- # Special highlight for proc, data, quit, run
- 'proc-data': [
- (r'(^|;)\s*(proc \w+|data|run|quit)[\s;]',
- Keyword.Reserved),
- ],
- # Special highlight cards and datalines
- 'cards-datalines': [
- (r'^\s*(datalines|cards)\s*;\s*$', Keyword, 'data'),
- ],
- 'data': [
- (r'(.|\n)*^\s*;\s*$', Other, '#pop'),
- ],
- # Special highlight for put NOTE|ERROR|WARNING (order matters)
- 'logs': [
- (r'\n?^\s*%?put ', Keyword, 'log-messages'),
- ],
- 'log-messages': [
- (r'NOTE(:|-).*', Generic, '#pop'),
- (r'WARNING(:|-).*', Generic.Emph, '#pop'),
- (r'ERROR(:|-).*', Generic.Error, '#pop'),
- include('general'),
- ],
- 'general': [
- include('keywords'),
- include('vars-strings'),
- include('special'),
- include('numbers'),
- ],
- # Keywords, statements, functions, macros
- 'keywords': [
- (words(builtins_statements,
- prefix = r'\b',
- suffix = r'\b'),
- Keyword),
- (words(builtins_sql,
- prefix = r'\b',
- suffix = r'\b'),
- Keyword),
- (words(builtins_conditionals,
- prefix = r'\b',
- suffix = r'\b'),
- Keyword),
- (words(builtins_macros,
- prefix = r'%',
- suffix = r'\b'),
- Name.Builtin),
- (words(builtins_functions,
- prefix = r'\b',
- suffix = r'\('),
- Name.Builtin),
- ],
- # Strings and user-defined variables and macros (order matters)
- 'vars-strings': [
- (r'&[a-z_]\w{0,31}\.?', Name.Variable),
- (r'%[a-z_]\w{0,31}', Name.Function),
- (r'\'', String, 'string_squote'),
- (r'"', String, 'string_dquote'),
- ],
- 'string_squote': [
- ('\'', String, '#pop'),
- (r'\\\\|\\"|\\\n', String.Escape),
- # AFAIK, macro variables are not evaluated in single quotes
- # (r'&', Name.Variable, 'validvar'),
- (r'[^$\'\\]+', String),
- (r'[$\'\\]', String),
- ],
- 'string_dquote': [
- (r'"', String, '#pop'),
- (r'\\\\|\\"|\\\n', String.Escape),
- (r'&', Name.Variable, 'validvar'),
- (r'[^$&"\\]+', String),
- (r'[$"\\]', String),
- ],
- 'validvar': [
- (r'[a-z_]\w{0,31}\.?', Name.Variable, '#pop'),
- ],
- # SAS numbers and special variables
- 'numbers': [
- (r'\b[+-]?([0-9]+(\.[0-9]+)?|\.[0-9]+|\.)(E[+-]?[0-9]+)?i?\b',
- Number),
- ],
- 'special': [
- (r'(null|missing|_all_|_automatic_|_character_|_n_|'
- r'_infile_|_name_|_null_|_numeric_|_user_|_webout_)',
- Keyword.Constant),
- ],
- # 'operators': [
- # (r'(-|=|<=|>=|<|>|<>|&|!=|'
- # r'\||\*|\+|\^|/|!|~|~=)', Operator)
- # ],
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/savi.py b/venv/lib/python3.11/site-packages/pygments/lexers/savi.py
deleted file mode 100644
index 48927f3..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/savi.py
+++ /dev/null
@@ -1,170 +0,0 @@
-"""
- pygments.lexers.savi
- ~~~~~~~~~~~~~~~~~~~~
-
- Lexer for Savi.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, bygroups, include
-from pygments.token import Whitespace, Keyword, Name, String, Number, \
- Operator, Punctuation, Comment, Generic, Error
-
-__all__ = ['SaviLexer']
-
-
-# The canonical version of this file can be found in the following repository,
-# where it is kept in sync with any language changes, as well as the other
-# pygments-like lexers that are maintained for use with other tools:
-# - https://github.com/savi-lang/savi/blob/main/tooling/pygments/lexers/savi.py
-#
-# If you're changing this file in the pygments repository, please ensure that
-# any changes you make are also propagated to the official Savi repository,
-# in order to avoid accidental clobbering of your changes later when an update
-# from the Savi repository flows forward into the pygments repository.
-#
-# If you're changing this file in the Savi repository, please ensure that
-# any changes you make are also reflected in the other pygments-like lexers
-# (rouge, vscode, etc) so that all of the lexers can be kept cleanly in sync.
-
-class SaviLexer(RegexLexer):
- """
- For Savi source code.
-
- .. versionadded: 2.10
- """
-
- name = 'Savi'
- url = 'https://github.com/savi-lang/savi'
- aliases = ['savi']
- filenames = ['*.savi']
-
- tokens = {
- "root": [
- # Line Comment
- (r'//.*?$', Comment.Single),
-
- # Doc Comment
- (r'::.*?$', Comment.Single),
-
- # Capability Operator
- (r'(\')(\w+)(?=[^\'])', bygroups(Operator, Name)),
-
- # Double-Quote String
- (r'\w?"', String.Double, "string.double"),
-
- # Single-Char String
- (r"'", String.Char, "string.char"),
-
- # Type Name
- (r'(_?[A-Z]\w*)', Name.Class),
-
- # Nested Type Name
- (r'(\.)(\s*)(_?[A-Z]\w*)', bygroups(Punctuation, Whitespace, Name.Class)),
-
- # Declare
- (r'^([ \t]*)(:\w+)',
- bygroups(Whitespace, Name.Tag),
- "decl"),
-
- # Error-Raising Calls/Names
- (r'((\w+|\+|\-|\*)\!)', Generic.Deleted),
-
- # Numeric Values
- (r'\b\d([\d_]*(\.[\d_]+)?)\b', Number),
-
- # Hex Numeric Values
- (r'\b0x([0-9a-fA-F_]+)\b', Number.Hex),
-
- # Binary Numeric Values
- (r'\b0b([01_]+)\b', Number.Bin),
-
- # Function Call (with braces)
- (r'\w+(?=\()', Name.Function),
-
- # Function Call (with receiver)
- (r'(\.)(\s*)(\w+)', bygroups(Punctuation, Whitespace, Name.Function)),
-
- # Function Call (with self receiver)
- (r'(@)(\w+)', bygroups(Punctuation, Name.Function)),
-
- # Parenthesis
- (r'\(', Punctuation, "root"),
- (r'\)', Punctuation, "#pop"),
-
- # Brace
- (r'\{', Punctuation, "root"),
- (r'\}', Punctuation, "#pop"),
-
- # Bracket
- (r'\[', Punctuation, "root"),
- (r'(\])(\!)', bygroups(Punctuation, Generic.Deleted), "#pop"),
- (r'\]', Punctuation, "#pop"),
-
- # Punctuation
- (r'[,;:\.@]', Punctuation),
-
- # Piping Operators
- (r'(\|\>)', Operator),
-
- # Branching Operators
- (r'(\&\&|\|\||\?\?|\&\?|\|\?|\.\?)', Operator),
-
- # Comparison Operators
- (r'(\<\=\>|\=\~|\=\=|\<\=|\>\=|\<|\>)', Operator),
-
- # Arithmetic Operators
- (r'(\+|\-|\/|\*|\%)', Operator),
-
- # Assignment Operators
- (r'(\=)', Operator),
-
- # Other Operators
- (r'(\!|\<\<|\<|\&|\|)', Operator),
-
- # Identifiers
- (r'\b\w+\b', Name),
-
- # Whitespace
- (r'[ \t\r]+\n*|\n+', Whitespace),
- ],
-
- # Declare (nested rules)
- "decl": [
- (r'\b[a-z_]\w*\b(?!\!)', Keyword.Declaration),
- (r':', Punctuation, "#pop"),
- (r'\n', Whitespace, "#pop"),
- include("root"),
- ],
-
- # Double-Quote String (nested rules)
- "string.double": [
- (r'\\\(', String.Interpol, "string.interpolation"),
- (r'\\u[0-9a-fA-F]{4}', String.Escape),
- (r'\\x[0-9a-fA-F]{2}', String.Escape),
- (r'\\[bfnrt\\\']', String.Escape),
- (r'\\"', String.Escape),
- (r'"', String.Double, "#pop"),
- (r'[^\\"]+', String.Double),
- (r'.', Error),
- ],
-
- # Single-Char String (nested rules)
- "string.char": [
- (r'\\u[0-9a-fA-F]{4}', String.Escape),
- (r'\\x[0-9a-fA-F]{2}', String.Escape),
- (r'\\[bfnrt\\\']', String.Escape),
- (r"\\'", String.Escape),
- (r"'", String.Char, "#pop"),
- (r"[^\\']+", String.Char),
- (r'.', Error),
- ],
-
- # Interpolation inside String (nested rules)
- "string.interpolation": [
- (r"\)", String.Interpol, "#pop"),
- include("root"),
- ]
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/scdoc.py b/venv/lib/python3.11/site-packages/pygments/lexers/scdoc.py
deleted file mode 100644
index 90478ac..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/scdoc.py
+++ /dev/null
@@ -1,86 +0,0 @@
-"""
- pygments.lexers.scdoc
- ~~~~~~~~~~~~~~~~~~~~~
-
- Lexer for scdoc, a simple man page generator.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, include, bygroups, using, this
-from pygments.token import Text, Comment, Keyword, String, Generic
-
-__all__ = ['ScdocLexer']
-
-
-class ScdocLexer(RegexLexer):
- """
- `scdoc` is a simple man page generator for POSIX systems written in C99.
-
- .. versionadded:: 2.5
- """
- name = 'scdoc'
- url = 'https://git.sr.ht/~sircmpwn/scdoc'
- aliases = ['scdoc', 'scd']
- filenames = ['*.scd', '*.scdoc']
- flags = re.MULTILINE
-
- tokens = {
- 'root': [
- # comment
- (r'^(;.+\n)', bygroups(Comment)),
-
- # heading with pound prefix
- (r'^(#)([^#].+\n)', bygroups(Generic.Heading, Text)),
- (r'^(#{2})(.+\n)', bygroups(Generic.Subheading, Text)),
- # bulleted lists
- (r'^(\s*)([*-])(\s)(.+\n)',
- bygroups(Text, Keyword, Text, using(this, state='inline'))),
- # numbered lists
- (r'^(\s*)(\.+\.)( .+\n)',
- bygroups(Text, Keyword, using(this, state='inline'))),
- # quote
- (r'^(\s*>\s)(.+\n)', bygroups(Keyword, Generic.Emph)),
- # text block
- (r'^(```\n)([\w\W]*?)(^```$)', bygroups(String, Text, String)),
-
- include('inline'),
- ],
- 'inline': [
- # escape
- (r'\\.', Text),
- # underlines
- (r'(\s)(_[^_]+_)(\W|\n)', bygroups(Text, Generic.Emph, Text)),
- # bold
- (r'(\s)(\*[^*]+\*)(\W|\n)', bygroups(Text, Generic.Strong, Text)),
- # inline code
- (r'`[^`]+`', String.Backtick),
-
- # general text, must come last!
- (r'[^\\\s]+', Text),
- (r'.', Text),
- ],
- }
-
- def analyse_text(text):
- """We checks for bold and underline text with * and _. Also
- every scdoc file must start with a strictly defined first line."""
- result = 0
-
- if '*' in text:
- result += 0.01
-
- if '_' in text:
- result += 0.01
-
- # name(section) ["left_footer" ["center_header"]]
- first_line = text.partition('\n')[0]
- scdoc_preamble_pattern = r'^.*\([1-7]\)( "[^"]+"){0,2}$'
-
- if re.search(scdoc_preamble_pattern, first_line):
- result += 0.5
-
- return result
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/scripting.py b/venv/lib/python3.11/site-packages/pygments/lexers/scripting.py
deleted file mode 100644
index eab7ec9..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/scripting.py
+++ /dev/null
@@ -1,1286 +0,0 @@
-"""
- pygments.lexers.scripting
- ~~~~~~~~~~~~~~~~~~~~~~~~~
-
- Lexer for scripting and embedded languages.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, include, bygroups, default, combined, \
- words
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation, Error, Whitespace, Other
-from pygments.util import get_bool_opt, get_list_opt
-
-__all__ = ['LuaLexer', 'MoonScriptLexer', 'ChaiscriptLexer', 'LSLLexer',
- 'AppleScriptLexer', 'RexxLexer', 'MOOCodeLexer', 'HybrisLexer',
- 'EasytrieveLexer', 'JclLexer', 'MiniScriptLexer']
-
-
-class LuaLexer(RegexLexer):
- """
- For Lua source code.
-
- Additional options accepted:
-
- `func_name_highlighting`
- If given and ``True``, highlight builtin function names
- (default: ``True``).
- `disabled_modules`
- If given, must be a list of module names whose function names
- should not be highlighted. By default all modules are highlighted.
-
- To get a list of allowed modules have a look into the
- `_lua_builtins` module:
-
- .. sourcecode:: pycon
-
- >>> from pygments.lexers._lua_builtins import MODULES
- >>> MODULES.keys()
- ['string', 'coroutine', 'modules', 'io', 'basic', ...]
- """
-
- name = 'Lua'
- url = 'https://www.lua.org/'
- aliases = ['lua']
- filenames = ['*.lua', '*.wlua']
- mimetypes = ['text/x-lua', 'application/x-lua']
-
- _comment_multiline = r'(?:--\[(?P<level>=*)\[[\w\W]*?\](?P=level)\])'
- _comment_single = r'(?:--.*$)'
- _space = r'(?:\s+)'
- _s = r'(?:%s|%s|%s)' % (_comment_multiline, _comment_single, _space)
- _name = r'(?:[^\W\d]\w*)'
-
- tokens = {
- 'root': [
- # Lua allows a file to start with a shebang.
- (r'#!.*', Comment.Preproc),
- default('base'),
- ],
- 'ws': [
- (_comment_multiline, Comment.Multiline),
- (_comment_single, Comment.Single),
- (_space, Text),
- ],
- 'base': [
- include('ws'),
-
- (r'(?i)0x[\da-f]*(\.[\da-f]*)?(p[+-]?\d+)?', Number.Hex),
- (r'(?i)(\d*\.\d+|\d+\.\d*)(e[+-]?\d+)?', Number.Float),
- (r'(?i)\d+e[+-]?\d+', Number.Float),
- (r'\d+', Number.Integer),
-
- # multiline strings
- (r'(?s)\[(=*)\[.*?\]\1\]', String),
-
- (r'::', Punctuation, 'label'),
- (r'\.{3}', Punctuation),
- (r'[=<>|~&+\-*/%#^]+|\.\.', Operator),
- (r'[\[\]{}().,:;]', Punctuation),
- (r'(and|or|not)\b', Operator.Word),
-
- ('(break|do|else|elseif|end|for|if|in|repeat|return|then|until|'
- r'while)\b', Keyword.Reserved),
- (r'goto\b', Keyword.Reserved, 'goto'),
- (r'(local)\b', Keyword.Declaration),
- (r'(true|false|nil)\b', Keyword.Constant),
-
- (r'(function)\b', Keyword.Reserved, 'funcname'),
-
- (r'[A-Za-z_]\w*(\.[A-Za-z_]\w*)?', Name),
-
- ("'", String.Single, combined('stringescape', 'sqs')),
- ('"', String.Double, combined('stringescape', 'dqs'))
- ],
-
- 'funcname': [
- include('ws'),
- (r'[.:]', Punctuation),
- (r'%s(?=%s*[.:])' % (_name, _s), Name.Class),
- (_name, Name.Function, '#pop'),
- # inline function
- (r'\(', Punctuation, '#pop'),
- ],
-
- 'goto': [
- include('ws'),
- (_name, Name.Label, '#pop'),
- ],
-
- 'label': [
- include('ws'),
- (r'::', Punctuation, '#pop'),
- (_name, Name.Label),
- ],
-
- 'stringescape': [
- (r'\\([abfnrtv\\"\']|[\r\n]{1,2}|z\s*|x[0-9a-fA-F]{2}|\d{1,3}|'
- r'u\{[0-9a-fA-F]+\})', String.Escape),
- ],
-
- 'sqs': [
- (r"'", String.Single, '#pop'),
- (r"[^\\']+", String.Single),
- ],
-
- 'dqs': [
- (r'"', String.Double, '#pop'),
- (r'[^\\"]+', String.Double),
- ]
- }
-
- def __init__(self, **options):
- self.func_name_highlighting = get_bool_opt(
- options, 'func_name_highlighting', True)
- self.disabled_modules = get_list_opt(options, 'disabled_modules', [])
-
- self._functions = set()
- if self.func_name_highlighting:
- from pygments.lexers._lua_builtins import MODULES
- for mod, func in MODULES.items():
- if mod not in self.disabled_modules:
- self._functions.update(func)
- RegexLexer.__init__(self, **options)
-
- def get_tokens_unprocessed(self, text):
- for index, token, value in \
- RegexLexer.get_tokens_unprocessed(self, text):
- if token is Name:
- if value in self._functions:
- yield index, Name.Builtin, value
- continue
- elif '.' in value:
- a, b = value.split('.')
- yield index, Name, a
- yield index + len(a), Punctuation, '.'
- yield index + len(a) + 1, Name, b
- continue
- yield index, token, value
-
-class MoonScriptLexer(LuaLexer):
- """
- For MoonScript source code.
-
- .. versionadded:: 1.5
- """
-
- name = 'MoonScript'
- url = 'http://moonscript.org'
- aliases = ['moonscript', 'moon']
- filenames = ['*.moon']
- mimetypes = ['text/x-moonscript', 'application/x-moonscript']
-
- tokens = {
- 'root': [
- (r'#!(.*?)$', Comment.Preproc),
- default('base'),
- ],
- 'base': [
- ('--.*$', Comment.Single),
- (r'(?i)(\d*\.\d+|\d+\.\d*)(e[+-]?\d+)?', Number.Float),
- (r'(?i)\d+e[+-]?\d+', Number.Float),
- (r'(?i)0x[0-9a-f]*', Number.Hex),
- (r'\d+', Number.Integer),
- (r'\n', Whitespace),
- (r'[^\S\n]+', Text),
- (r'(?s)\[(=*)\[.*?\]\1\]', String),
- (r'(->|=>)', Name.Function),
- (r':[a-zA-Z_]\w*', Name.Variable),
- (r'(==|!=|~=|<=|>=|\.\.\.|\.\.|[=+\-*/%^<>#!.\\:])', Operator),
- (r'[;,]', Punctuation),
- (r'[\[\]{}()]', Keyword.Type),
- (r'[a-zA-Z_]\w*:', Name.Variable),
- (words((
- 'class', 'extends', 'if', 'then', 'super', 'do', 'with',
- 'import', 'export', 'while', 'elseif', 'return', 'for', 'in',
- 'from', 'when', 'using', 'else', 'and', 'or', 'not', 'switch',
- 'break'), suffix=r'\b'),
- Keyword),
- (r'(true|false|nil)\b', Keyword.Constant),
- (r'(and|or|not)\b', Operator.Word),
- (r'(self)\b', Name.Builtin.Pseudo),
- (r'@@?([a-zA-Z_]\w*)?', Name.Variable.Class),
- (r'[A-Z]\w*', Name.Class), # proper name
- (r'[A-Za-z_]\w*(\.[A-Za-z_]\w*)?', Name),
- ("'", String.Single, combined('stringescape', 'sqs')),
- ('"', String.Double, combined('stringescape', 'dqs'))
- ],
- 'stringescape': [
- (r'''\\([abfnrtv\\"']|\d{1,3})''', String.Escape)
- ],
- 'sqs': [
- ("'", String.Single, '#pop'),
- ("[^']+", String)
- ],
- 'dqs': [
- ('"', String.Double, '#pop'),
- ('[^"]+', String)
- ]
- }
-
- def get_tokens_unprocessed(self, text):
- # set . as Operator instead of Punctuation
- for index, token, value in LuaLexer.get_tokens_unprocessed(self, text):
- if token == Punctuation and value == ".":
- token = Operator
- yield index, token, value
-
-
-class ChaiscriptLexer(RegexLexer):
- """
- For ChaiScript source code.
-
- .. versionadded:: 2.0
- """
-
- name = 'ChaiScript'
- url = 'http://chaiscript.com/'
- aliases = ['chaiscript', 'chai']
- filenames = ['*.chai']
- mimetypes = ['text/x-chaiscript', 'application/x-chaiscript']
-
- flags = re.DOTALL | re.MULTILINE
-
- tokens = {
- 'commentsandwhitespace': [
- (r'\s+', Text),
- (r'//.*?\n', Comment.Single),
- (r'/\*.*?\*/', Comment.Multiline),
- (r'^\#.*?\n', Comment.Single)
- ],
- 'slashstartsregex': [
- include('commentsandwhitespace'),
- (r'/(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/'
- r'([gim]+\b|\B)', String.Regex, '#pop'),
- (r'(?=/)', Text, ('#pop', 'badregex')),
- default('#pop')
- ],
- 'badregex': [
- (r'\n', Text, '#pop')
- ],
- 'root': [
- include('commentsandwhitespace'),
- (r'\n', Text),
- (r'[^\S\n]+', Text),
- (r'\+\+|--|~|&&|\?|:|\|\||\\(?=\n)|\.\.'
- r'(<<|>>>?|==?|!=?|[-<>+*%&|^/])=?', Operator, 'slashstartsregex'),
- (r'[{(\[;,]', Punctuation, 'slashstartsregex'),
- (r'[})\].]', Punctuation),
- (r'[=+\-*/]', Operator),
- (r'(for|in|while|do|break|return|continue|if|else|'
- r'throw|try|catch'
- r')\b', Keyword, 'slashstartsregex'),
- (r'(var)\b', Keyword.Declaration, 'slashstartsregex'),
- (r'(attr|def|fun)\b', Keyword.Reserved),
- (r'(true|false)\b', Keyword.Constant),
- (r'(eval|throw)\b', Name.Builtin),
- (r'`\S+`', Name.Builtin),
- (r'[$a-zA-Z_]\w*', Name.Other),
- (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
- (r'0x[0-9a-fA-F]+', Number.Hex),
- (r'[0-9]+', Number.Integer),
- (r'"', String.Double, 'dqstring'),
- (r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
- ],
- 'dqstring': [
- (r'\$\{[^"}]+?\}', String.Interpol),
- (r'\$', String.Double),
- (r'\\\\', String.Double),
- (r'\\"', String.Double),
- (r'[^\\"$]+', String.Double),
- (r'"', String.Double, '#pop'),
- ],
- }
-
-
-class LSLLexer(RegexLexer):
- """
- For Second Life's Linden Scripting Language source code.
-
- .. versionadded:: 2.0
- """
-
- name = 'LSL'
- aliases = ['lsl']
- filenames = ['*.lsl']
- mimetypes = ['text/x-lsl']
-
- flags = re.MULTILINE
-
- lsl_keywords = r'\b(?:do|else|for|if|jump|return|while)\b'
- lsl_types = r'\b(?:float|integer|key|list|quaternion|rotation|string|vector)\b'
- lsl_states = r'\b(?:(?:state)\s+\w+|default)\b'
- lsl_events = r'\b(?:state_(?:entry|exit)|touch(?:_(?:start|end))?|(?:land_)?collision(?:_(?:start|end))?|timer|listen|(?:no_)?sensor|control|(?:not_)?at_(?:rot_)?target|money|email|run_time_permissions|changed|attach|dataserver|moving_(?:start|end)|link_message|(?:on|object)_rez|remote_data|http_re(?:sponse|quest)|path_update|transaction_result)\b'
- lsl_functions_builtin = r'\b(?:ll(?:ReturnObjectsBy(?:ID|Owner)|Json(?:2List|[GS]etValue|ValueType)|Sin|Cos|Tan|Atan2|Sqrt|Pow|Abs|Fabs|Frand|Floor|Ceil|Round|Vec(?:Mag|Norm|Dist)|Rot(?:Between|2(?:Euler|Fwd|Left|Up))|(?:Euler|Axes)2Rot|Whisper|(?:Region|Owner)?Say|Shout|Listen(?:Control|Remove)?|Sensor(?:Repeat|Remove)?|Detected(?:Name|Key|Owner|Type|Pos|Vel|Grab|Rot|Group|LinkNumber)|Die|Ground|Wind|(?:[GS]et)(?:AnimationOverride|MemoryLimit|PrimMediaParams|ParcelMusicURL|Object(?:Desc|Name)|PhysicsMaterial|Status|Scale|Color|Alpha|Texture|Pos|Rot|Force|Torque)|ResetAnimationOverride|(?:Scale|Offset|Rotate)Texture|(?:Rot)?Target(?:Remove)?|(?:Stop)?MoveToTarget|Apply(?:Rotational)?Impulse|Set(?:KeyframedMotion|ContentType|RegionPos|(?:Angular)?Velocity|Buoyancy|HoverHeight|ForceAndTorque|TimerEvent|ScriptState|Damage|TextureAnim|Sound(?:Queueing|Radius)|Vehicle(?:Type|(?:Float|Vector|Rotation)Param)|(?:Touch|Sit)?Text|Camera(?:Eye|At)Offset|PrimitiveParams|ClickAction|Link(?:Alpha|Color|PrimitiveParams(?:Fast)?|Texture(?:Anim)?|Camera|Media)|RemoteScriptAccessPin|PayPrice|LocalRot)|ScaleByFactor|Get(?:(?:Max|Min)ScaleFactor|ClosestNavPoint|StaticPath|SimStats|Env|PrimitiveParams|Link(?:PrimitiveParams|Number(?:OfSides)?|Key|Name|Media)|HTTPHeader|FreeURLs|Object(?:Details|PermMask|PrimCount)|Parcel(?:MaxPrims|Details|Prim(?:Count|Owners))|Attached|(?:SPMax|Free|Used)Memory|Region(?:Name|TimeDilation|FPS|Corner|AgentCount)|Root(?:Position|Rotation)|UnixTime|(?:Parcel|Region)Flags|(?:Wall|GMT)clock|SimulatorHostname|BoundingBox|GeometricCenter|Creator|NumberOf(?:Prims|NotecardLines|Sides)|Animation(?:List)?|(?:Camera|Local)(?:Pos|Rot)|Vel|Accel|Omega|Time(?:stamp|OfDay)|(?:Object|CenterOf)?Mass|MassMKS|Energy|Owner|(?:Owner)?Key|SunDirection|Texture(?:Offset|Scale|Rot)|Inventory(?:Number|Name|Key|Type|Creator|PermMask)|Permissions(?:Key)?|StartParameter|List(?:Length|EntryType)|Date|Agent(?:Size|Info|Language|List)|LandOwnerAt|NotecardLine|Script(?:Name|State))|(?:Get|Reset|GetAndReset)Time|PlaySound(?:Slave)?|LoopSound(?:Master|Slave)?|(?:Trigger|Stop|Preload)Sound|(?:(?:Get|Delete)Sub|Insert)String|To(?:Upper|Lower)|Give(?:InventoryList|Money)|RezObject|(?:Stop)?LookAt|Sleep|CollisionFilter|(?:Take|Release)Controls|DetachFromAvatar|AttachToAvatar(?:Temp)?|InstantMessage|(?:GetNext)?Email|StopHover|MinEventDelay|RotLookAt|String(?:Length|Trim)|(?:Start|Stop)Animation|TargetOmega|RequestPermissions|(?:Create|Break)Link|BreakAllLinks|(?:Give|Remove)Inventory|Water|PassTouches|Request(?:Agent|Inventory)Data|TeleportAgent(?:Home|GlobalCoords)?|ModifyLand|CollisionSound|ResetScript|MessageLinked|PushObject|PassCollisions|AxisAngle2Rot|Rot2(?:Axis|Angle)|A(?:cos|sin)|AngleBetween|AllowInventoryDrop|SubStringIndex|List2(?:CSV|Integer|Json|Float|String|Key|Vector|Rot|List(?:Strided)?)|DeleteSubList|List(?:Statistics|Sort|Randomize|(?:Insert|Find|Replace)List)|EdgeOfWorld|AdjustSoundVolume|Key2Name|TriggerSoundLimited|EjectFromLand|(?:CSV|ParseString)2List|OverMyLand|SameGroup|UnSit|Ground(?:Slope|Normal|Contour)|GroundRepel|(?:Set|Remove)VehicleFlags|(?:AvatarOn)?(?:Link)?SitTarget|Script(?:Danger|Profiler)|Dialog|VolumeDetect|ResetOtherScript|RemoteLoadScriptPin|(?:Open|Close)RemoteDataChannel|SendRemoteData|RemoteDataReply|(?:Integer|String)ToBase64|XorBase64|Log(?:10)?|Base64To(?:String|Integer)|ParseStringKeepNulls|RezAtRoot|RequestSimulatorData|ForceMouselook|(?:Load|Release|(?:E|Une)scape)URL|ParcelMedia(?:CommandList|Query)|ModPow|MapDestination|(?:RemoveFrom|AddTo|Reset)Land(?:Pass|Ban)List|(?:Set|Clear)CameraParams|HTTP(?:Request|Response)|TextBox|DetectedTouch(?:UV|Face|Pos|(?:N|Bin)ormal|ST)|(?:MD5|SHA1|DumpList2)String|Request(?:Secure)?URL|Clear(?:Prim|Link)Media|(?:Link)?ParticleSystem|(?:Get|Request)(?:Username|DisplayName)|RegionSayTo|CastRay|GenerateKey|TransferLindenDollars|ManageEstateAccess|(?:Create|Delete)Character|ExecCharacterCmd|Evade|FleeFrom|NavigateTo|PatrolPoints|Pursue|UpdateCharacter|WanderWithin))\b'
- lsl_constants_float = r'\b(?:DEG_TO_RAD|PI(?:_BY_TWO)?|RAD_TO_DEG|SQRT2|TWO_PI)\b'
- lsl_constants_integer = r'\b(?:JSON_APPEND|STATUS_(?:PHYSICS|ROTATE_[XYZ]|PHANTOM|SANDBOX|BLOCK_GRAB(?:_OBJECT)?|(?:DIE|RETURN)_AT_EDGE|CAST_SHADOWS|OK|MALFORMED_PARAMS|TYPE_MISMATCH|BOUNDS_ERROR|NOT_(?:FOUND|SUPPORTED)|INTERNAL_ERROR|WHITELIST_FAILED)|AGENT(?:_(?:BY_(?:LEGACY_|USER)NAME|FLYING|ATTACHMENTS|SCRIPTED|MOUSELOOK|SITTING|ON_OBJECT|AWAY|WALKING|IN_AIR|TYPING|CROUCHING|BUSY|ALWAYS_RUN|AUTOPILOT|LIST_(?:PARCEL(?:_OWNER)?|REGION)))?|CAMERA_(?:PITCH|DISTANCE|BEHINDNESS_(?:ANGLE|LAG)|(?:FOCUS|POSITION)(?:_(?:THRESHOLD|LOCKED|LAG))?|FOCUS_OFFSET|ACTIVE)|ANIM_ON|LOOP|REVERSE|PING_PONG|SMOOTH|ROTATE|SCALE|ALL_SIDES|LINK_(?:ROOT|SET|ALL_(?:OTHERS|CHILDREN)|THIS)|ACTIVE|PASSIVE|SCRIPTED|CONTROL_(?:FWD|BACK|(?:ROT_)?(?:LEFT|RIGHT)|UP|DOWN|(?:ML_)?LBUTTON)|PERMISSION_(?:RETURN_OBJECTS|DEBIT|OVERRIDE_ANIMATIONS|SILENT_ESTATE_MANAGEMENT|TAKE_CONTROLS|TRIGGER_ANIMATION|ATTACH|CHANGE_LINKS|(?:CONTROL|TRACK)_CAMERA|TELEPORT)|INVENTORY_(?:TEXTURE|SOUND|OBJECT|SCRIPT|LANDMARK|CLOTHING|NOTECARD|BODYPART|ANIMATION|GESTURE|ALL|NONE)|CHANGED_(?:INVENTORY|COLOR|SHAPE|SCALE|TEXTURE|LINK|ALLOWED_DROP|OWNER|REGION(?:_START)?|TELEPORT|MEDIA)|OBJECT_(?:(?:PHYSICS|SERVER|STREAMING)_COST|UNKNOWN_DETAIL|CHARACTER_TIME|PHANTOM|PHYSICS|TEMP_ON_REZ|NAME|DESC|POS|PRIM_EQUIVALENCE|RETURN_(?:PARCEL(?:_OWNER)?|REGION)|ROO?T|VELOCITY|OWNER|GROUP|CREATOR|ATTACHED_POINT|RENDER_WEIGHT|PATHFINDING_TYPE|(?:RUNNING|TOTAL)_SCRIPT_COUNT|SCRIPT_(?:MEMORY|TIME))|TYPE_(?:INTEGER|FLOAT|STRING|KEY|VECTOR|ROTATION|INVALID)|(?:DEBUG|PUBLIC)_CHANNEL|ATTACH_(?:AVATAR_CENTER|CHEST|HEAD|BACK|PELVIS|MOUTH|CHIN|NECK|NOSE|BELLY|[LR](?:SHOULDER|HAND|FOOT|EAR|EYE|[UL](?:ARM|LEG)|HIP)|(?:LEFT|RIGHT)_PEC|HUD_(?:CENTER_[12]|TOP_(?:RIGHT|CENTER|LEFT)|BOTTOM(?:_(?:RIGHT|LEFT))?))|LAND_(?:LEVEL|RAISE|LOWER|SMOOTH|NOISE|REVERT)|DATA_(?:ONLINE|NAME|BORN|SIM_(?:POS|STATUS|RATING)|PAYINFO)|PAYMENT_INFO_(?:ON_FILE|USED)|REMOTE_DATA_(?:CHANNEL|REQUEST|REPLY)|PSYS_(?:PART_(?:BF_(?:ZERO|ONE(?:_MINUS_(?:DEST_COLOR|SOURCE_(ALPHA|COLOR)))?|DEST_COLOR|SOURCE_(ALPHA|COLOR))|BLEND_FUNC_(DEST|SOURCE)|FLAGS|(?:START|END)_(?:COLOR|ALPHA|SCALE|GLOW)|MAX_AGE|(?:RIBBON|WIND|INTERP_(?:COLOR|SCALE)|BOUNCE|FOLLOW_(?:SRC|VELOCITY)|TARGET_(?:POS|LINEAR)|EMISSIVE)_MASK)|SRC_(?:MAX_AGE|PATTERN|ANGLE_(?:BEGIN|END)|BURST_(?:RATE|PART_COUNT|RADIUS|SPEED_(?:MIN|MAX))|ACCEL|TEXTURE|TARGET_KEY|OMEGA|PATTERN_(?:DROP|EXPLODE|ANGLE(?:_CONE(?:_EMPTY)?)?)))|VEHICLE_(?:REFERENCE_FRAME|TYPE_(?:NONE|SLED|CAR|BOAT|AIRPLANE|BALLOON)|(?:LINEAR|ANGULAR)_(?:FRICTION_TIMESCALE|MOTOR_DIRECTION)|LINEAR_MOTOR_OFFSET|HOVER_(?:HEIGHT|EFFICIENCY|TIMESCALE)|BUOYANCY|(?:LINEAR|ANGULAR)_(?:DEFLECTION_(?:EFFICIENCY|TIMESCALE)|MOTOR_(?:DECAY_)?TIMESCALE)|VERTICAL_ATTRACTION_(?:EFFICIENCY|TIMESCALE)|BANKING_(?:EFFICIENCY|MIX|TIMESCALE)|FLAG_(?:NO_DEFLECTION_UP|LIMIT_(?:ROLL_ONLY|MOTOR_UP)|HOVER_(?:(?:WATER|TERRAIN|UP)_ONLY|GLOBAL_HEIGHT)|MOUSELOOK_(?:STEER|BANK)|CAMERA_DECOUPLED))|PRIM_(?:TYPE(?:_(?:BOX|CYLINDER|PRISM|SPHERE|TORUS|TUBE|RING|SCULPT))?|HOLE_(?:DEFAULT|CIRCLE|SQUARE|TRIANGLE)|MATERIAL(?:_(?:STONE|METAL|GLASS|WOOD|FLESH|PLASTIC|RUBBER))?|SHINY_(?:NONE|LOW|MEDIUM|HIGH)|BUMP_(?:NONE|BRIGHT|DARK|WOOD|BARK|BRICKS|CHECKER|CONCRETE|TILE|STONE|DISKS|GRAVEL|BLOBS|SIDING|LARGETILE|STUCCO|SUCTION|WEAVE)|TEXGEN_(?:DEFAULT|PLANAR)|SCULPT_(?:TYPE_(?:SPHERE|TORUS|PLANE|CYLINDER|MASK)|FLAG_(?:MIRROR|INVERT))|PHYSICS(?:_(?:SHAPE_(?:CONVEX|NONE|PRIM|TYPE)))?|(?:POS|ROT)_LOCAL|SLICE|TEXT|FLEXIBLE|POINT_LIGHT|TEMP_ON_REZ|PHANTOM|POSITION|SIZE|ROTATION|TEXTURE|NAME|OMEGA|DESC|LINK_TARGET|COLOR|BUMP_SHINY|FULLBRIGHT|TEXGEN|GLOW|MEDIA_(?:ALT_IMAGE_ENABLE|CONTROLS|(?:CURRENT|HOME)_URL|AUTO_(?:LOOP|PLAY|SCALE|ZOOM)|FIRST_CLICK_INTERACT|(?:WIDTH|HEIGHT)_PIXELS|WHITELIST(?:_ENABLE)?|PERMS_(?:INTERACT|CONTROL)|PARAM_MAX|CONTROLS_(?:STANDARD|MINI)|PERM_(?:NONE|OWNER|GROUP|ANYONE)|MAX_(?:URL_LENGTH|WHITELIST_(?:SIZE|COUNT)|(?:WIDTH|HEIGHT)_PIXELS)))|MASK_(?:BASE|OWNER|GROUP|EVERYONE|NEXT)|PERM_(?:TRANSFER|MODIFY|COPY|MOVE|ALL)|PARCEL_(?:MEDIA_COMMAND_(?:STOP|PAUSE|PLAY|LOOP|TEXTURE|URL|TIME|AGENT|UNLOAD|AUTO_ALIGN|TYPE|SIZE|DESC|LOOP_SET)|FLAG_(?:ALLOW_(?:FLY|(?:GROUP_)?SCRIPTS|LANDMARK|TERRAFORM|DAMAGE|CREATE_(?:GROUP_)?OBJECTS)|USE_(?:ACCESS_(?:GROUP|LIST)|BAN_LIST|LAND_PASS_LIST)|LOCAL_SOUND_ONLY|RESTRICT_PUSHOBJECT|ALLOW_(?:GROUP|ALL)_OBJECT_ENTRY)|COUNT_(?:TOTAL|OWNER|GROUP|OTHER|SELECTED|TEMP)|DETAILS_(?:NAME|DESC|OWNER|GROUP|AREA|ID|SEE_AVATARS))|LIST_STAT_(?:MAX|MIN|MEAN|MEDIAN|STD_DEV|SUM(?:_SQUARES)?|NUM_COUNT|GEOMETRIC_MEAN|RANGE)|PAY_(?:HIDE|DEFAULT)|REGION_FLAG_(?:ALLOW_DAMAGE|FIXED_SUN|BLOCK_TERRAFORM|SANDBOX|DISABLE_(?:COLLISIONS|PHYSICS)|BLOCK_FLY|ALLOW_DIRECT_TELEPORT|RESTRICT_PUSHOBJECT)|HTTP_(?:METHOD|MIMETYPE|BODY_(?:MAXLENGTH|TRUNCATED)|CUSTOM_HEADER|PRAGMA_NO_CACHE|VERBOSE_THROTTLE|VERIFY_CERT)|STRING_(?:TRIM(?:_(?:HEAD|TAIL))?)|CLICK_ACTION_(?:NONE|TOUCH|SIT|BUY|PAY|OPEN(?:_MEDIA)?|PLAY|ZOOM)|TOUCH_INVALID_FACE|PROFILE_(?:NONE|SCRIPT_MEMORY)|RC_(?:DATA_FLAGS|DETECT_PHANTOM|GET_(?:LINK_NUM|NORMAL|ROOT_KEY)|MAX_HITS|REJECT_(?:TYPES|AGENTS|(?:NON)?PHYSICAL|LAND))|RCERR_(?:CAST_TIME_EXCEEDED|SIM_PERF_LOW|UNKNOWN)|ESTATE_ACCESS_(?:ALLOWED_(?:AGENT|GROUP)_(?:ADD|REMOVE)|BANNED_AGENT_(?:ADD|REMOVE))|DENSITY|FRICTION|RESTITUTION|GRAVITY_MULTIPLIER|KFM_(?:COMMAND|CMD_(?:PLAY|STOP|PAUSE|SET_MODE)|MODE|FORWARD|LOOP|PING_PONG|REVERSE|DATA|ROTATION|TRANSLATION)|ERR_(?:GENERIC|PARCEL_PERMISSIONS|MALFORMED_PARAMS|RUNTIME_PERMISSIONS|THROTTLED)|CHARACTER_(?:CMD_(?:(?:SMOOTH_)?STOP|JUMP)|DESIRED_(?:TURN_)?SPEED|RADIUS|STAY_WITHIN_PARCEL|LENGTH|ORIENTATION|ACCOUNT_FOR_SKIPPED_FRAMES|AVOIDANCE_MODE|TYPE(?:_(?:[A-D]|NONE))?|MAX_(?:DECEL|TURN_RADIUS|(?:ACCEL|SPEED)))|PURSUIT_(?:OFFSET|FUZZ_FACTOR|GOAL_TOLERANCE|INTERCEPT)|REQUIRE_LINE_OF_SIGHT|FORCE_DIRECT_PATH|VERTICAL|HORIZONTAL|AVOID_(?:CHARACTERS|DYNAMIC_OBSTACLES|NONE)|PU_(?:EVADE_(?:HIDDEN|SPOTTED)|FAILURE_(?:DYNAMIC_PATHFINDING_DISABLED|INVALID_(?:GOAL|START)|NO_(?:NAVMESH|VALID_DESTINATION)|OTHER|TARGET_GONE|(?:PARCEL_)?UNREACHABLE)|(?:GOAL|SLOWDOWN_DISTANCE)_REACHED)|TRAVERSAL_TYPE(?:_(?:FAST|NONE|SLOW))?|CONTENT_TYPE_(?:ATOM|FORM|HTML|JSON|LLSD|RSS|TEXT|XHTML|XML)|GCNP_(?:RADIUS|STATIC)|(?:PATROL|WANDER)_PAUSE_AT_WAYPOINTS|OPT_(?:AVATAR|CHARACTER|EXCLUSION_VOLUME|LEGACY_LINKSET|MATERIAL_VOLUME|OTHER|STATIC_OBSTACLE|WALKABLE)|SIM_STAT_PCT_CHARS_STEPPED)\b'
- lsl_constants_integer_boolean = r'\b(?:FALSE|TRUE)\b'
- lsl_constants_rotation = r'\b(?:ZERO_ROTATION)\b'
- lsl_constants_string = r'\b(?:EOF|JSON_(?:ARRAY|DELETE|FALSE|INVALID|NULL|NUMBER|OBJECT|STRING|TRUE)|NULL_KEY|TEXTURE_(?:BLANK|DEFAULT|MEDIA|PLYWOOD|TRANSPARENT)|URL_REQUEST_(?:GRANTED|DENIED))\b'
- lsl_constants_vector = r'\b(?:TOUCH_INVALID_(?:TEXCOORD|VECTOR)|ZERO_VECTOR)\b'
- lsl_invalid_broken = r'\b(?:LAND_(?:LARGE|MEDIUM|SMALL)_BRUSH)\b'
- lsl_invalid_deprecated = r'\b(?:ATTACH_[LR]PEC|DATA_RATING|OBJECT_ATTACHMENT_(?:GEOMETRY_BYTES|SURFACE_AREA)|PRIM_(?:CAST_SHADOWS|MATERIAL_LIGHT|TYPE_LEGACY)|PSYS_SRC_(?:INNER|OUTER)ANGLE|VEHICLE_FLAG_NO_FLY_UP|ll(?:Cloud|Make(?:Explosion|Fountain|Smoke|Fire)|RemoteDataSetRegion|Sound(?:Preload)?|XorBase64Strings(?:Correct)?))\b'
- lsl_invalid_illegal = r'\b(?:event)\b'
- lsl_invalid_unimplemented = r'\b(?:CHARACTER_(?:MAX_ANGULAR_(?:ACCEL|SPEED)|TURN_SPEED_MULTIPLIER)|PERMISSION_(?:CHANGE_(?:JOINTS|PERMISSIONS)|RELEASE_OWNERSHIP|REMAP_CONTROLS)|PRIM_PHYSICS_MATERIAL|PSYS_SRC_OBJ_REL_MASK|ll(?:CollisionSprite|(?:Stop)?PointAt|(?:(?:Refresh|Set)Prim)URL|(?:Take|Release)Camera|RemoteLoadScript))\b'
- lsl_reserved_godmode = r'\b(?:ll(?:GodLikeRezObject|Set(?:Inventory|Object)PermMask))\b'
- lsl_reserved_log = r'\b(?:print)\b'
- lsl_operators = r'\+\+|\-\-|<<|>>|&&?|\|\|?|\^|~|[!%<>=*+\-/]=?'
-
- tokens = {
- 'root':
- [
- (r'//.*?\n', Comment.Single),
- (r'/\*', Comment.Multiline, 'comment'),
- (r'"', String.Double, 'string'),
- (lsl_keywords, Keyword),
- (lsl_types, Keyword.Type),
- (lsl_states, Name.Class),
- (lsl_events, Name.Builtin),
- (lsl_functions_builtin, Name.Function),
- (lsl_constants_float, Keyword.Constant),
- (lsl_constants_integer, Keyword.Constant),
- (lsl_constants_integer_boolean, Keyword.Constant),
- (lsl_constants_rotation, Keyword.Constant),
- (lsl_constants_string, Keyword.Constant),
- (lsl_constants_vector, Keyword.Constant),
- (lsl_invalid_broken, Error),
- (lsl_invalid_deprecated, Error),
- (lsl_invalid_illegal, Error),
- (lsl_invalid_unimplemented, Error),
- (lsl_reserved_godmode, Keyword.Reserved),
- (lsl_reserved_log, Keyword.Reserved),
- (r'\b([a-zA-Z_]\w*)\b', Name.Variable),
- (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d*', Number.Float),
- (r'(\d+\.\d*|\.\d+)', Number.Float),
- (r'0[xX][0-9a-fA-F]+', Number.Hex),
- (r'\d+', Number.Integer),
- (lsl_operators, Operator),
- (r':=?', Error),
- (r'[,;{}()\[\]]', Punctuation),
- (r'\n+', Whitespace),
- (r'\s+', Whitespace)
- ],
- 'comment':
- [
- (r'[^*/]+', Comment.Multiline),
- (r'/\*', Comment.Multiline, '#push'),
- (r'\*/', Comment.Multiline, '#pop'),
- (r'[*/]', Comment.Multiline)
- ],
- 'string':
- [
- (r'\\([nt"\\])', String.Escape),
- (r'"', String.Double, '#pop'),
- (r'\\.', Error),
- (r'[^"\\]+', String.Double),
- ]
- }
-
-
-class AppleScriptLexer(RegexLexer):
- """
- For AppleScript source code,
- including `AppleScript Studio
- <http://developer.apple.com/documentation/AppleScript/
- Reference/StudioReference>`_.
- Contributed by Andreas Amann <aamann@mac.com>.
-
- .. versionadded:: 1.0
- """
-
- name = 'AppleScript'
- url = 'https://developer.apple.com/library/archive/documentation/AppleScript/Conceptual/AppleScriptLangGuide/introduction/ASLR_intro.html'
- aliases = ['applescript']
- filenames = ['*.applescript']
-
- flags = re.MULTILINE | re.DOTALL
-
- Identifiers = r'[a-zA-Z]\w*'
-
- # XXX: use words() for all of these
- Literals = ('AppleScript', 'current application', 'false', 'linefeed',
- 'missing value', 'pi', 'quote', 'result', 'return', 'space',
- 'tab', 'text item delimiters', 'true', 'version')
- Classes = ('alias ', 'application ', 'boolean ', 'class ', 'constant ',
- 'date ', 'file ', 'integer ', 'list ', 'number ', 'POSIX file ',
- 'real ', 'record ', 'reference ', 'RGB color ', 'script ',
- 'text ', 'unit types', '(?:Unicode )?text', 'string')
- BuiltIn = ('attachment', 'attribute run', 'character', 'day', 'month',
- 'paragraph', 'word', 'year')
- HandlerParams = ('about', 'above', 'against', 'apart from', 'around',
- 'aside from', 'at', 'below', 'beneath', 'beside',
- 'between', 'for', 'given', 'instead of', 'on', 'onto',
- 'out of', 'over', 'since')
- Commands = ('ASCII (character|number)', 'activate', 'beep', 'choose URL',
- 'choose application', 'choose color', 'choose file( name)?',
- 'choose folder', 'choose from list',
- 'choose remote application', 'clipboard info',
- 'close( access)?', 'copy', 'count', 'current date', 'delay',
- 'delete', 'display (alert|dialog)', 'do shell script',
- 'duplicate', 'exists', 'get eof', 'get volume settings',
- 'info for', 'launch', 'list (disks|folder)', 'load script',
- 'log', 'make', 'mount volume', 'new', 'offset',
- 'open( (for access|location))?', 'path to', 'print', 'quit',
- 'random number', 'read', 'round', 'run( script)?',
- 'say', 'scripting components',
- 'set (eof|the clipboard to|volume)', 'store script',
- 'summarize', 'system attribute', 'system info',
- 'the clipboard', 'time to GMT', 'write', 'quoted form')
- References = ('(in )?back of', '(in )?front of', '[0-9]+(st|nd|rd|th)',
- 'first', 'second', 'third', 'fourth', 'fifth', 'sixth',
- 'seventh', 'eighth', 'ninth', 'tenth', 'after', 'back',
- 'before', 'behind', 'every', 'front', 'index', 'last',
- 'middle', 'some', 'that', 'through', 'thru', 'where', 'whose')
- Operators = ("and", "or", "is equal", "equals", "(is )?equal to", "is not",
- "isn't", "isn't equal( to)?", "is not equal( to)?",
- "doesn't equal", "does not equal", "(is )?greater than",
- "comes after", "is not less than or equal( to)?",
- "isn't less than or equal( to)?", "(is )?less than",
- "comes before", "is not greater than or equal( to)?",
- "isn't greater than or equal( to)?",
- "(is )?greater than or equal( to)?", "is not less than",
- "isn't less than", "does not come before",
- "doesn't come before", "(is )?less than or equal( to)?",
- "is not greater than", "isn't greater than",
- "does not come after", "doesn't come after", "starts? with",
- "begins? with", "ends? with", "contains?", "does not contain",
- "doesn't contain", "is in", "is contained by", "is not in",
- "is not contained by", "isn't contained by", "div", "mod",
- "not", "(a )?(ref( to)?|reference to)", "is", "does")
- Control = ('considering', 'else', 'error', 'exit', 'from', 'if',
- 'ignoring', 'in', 'repeat', 'tell', 'then', 'times', 'to',
- 'try', 'until', 'using terms from', 'while', 'whith',
- 'with timeout( of)?', 'with transaction', 'by', 'continue',
- 'end', 'its?', 'me', 'my', 'return', 'of', 'as')
- Declarations = ('global', 'local', 'prop(erty)?', 'set', 'get')
- Reserved = ('but', 'put', 'returning', 'the')
- StudioClasses = ('action cell', 'alert reply', 'application', 'box',
- 'browser( cell)?', 'bundle', 'button( cell)?', 'cell',
- 'clip view', 'color well', 'color-panel',
- 'combo box( item)?', 'control',
- 'data( (cell|column|item|row|source))?', 'default entry',
- 'dialog reply', 'document', 'drag info', 'drawer',
- 'event', 'font(-panel)?', 'formatter',
- 'image( (cell|view))?', 'matrix', 'menu( item)?', 'item',
- 'movie( view)?', 'open-panel', 'outline view', 'panel',
- 'pasteboard', 'plugin', 'popup button',
- 'progress indicator', 'responder', 'save-panel',
- 'scroll view', 'secure text field( cell)?', 'slider',
- 'sound', 'split view', 'stepper', 'tab view( item)?',
- 'table( (column|header cell|header view|view))',
- 'text( (field( cell)?|view))?', 'toolbar( item)?',
- 'user-defaults', 'view', 'window')
- StudioEvents = ('accept outline drop', 'accept table drop', 'action',
- 'activated', 'alert ended', 'awake from nib', 'became key',
- 'became main', 'begin editing', 'bounds changed',
- 'cell value', 'cell value changed', 'change cell value',
- 'change item value', 'changed', 'child of item',
- 'choose menu item', 'clicked', 'clicked toolbar item',
- 'closed', 'column clicked', 'column moved',
- 'column resized', 'conclude drop', 'data representation',
- 'deminiaturized', 'dialog ended', 'document nib name',
- 'double clicked', 'drag( (entered|exited|updated))?',
- 'drop', 'end editing', 'exposed', 'idle', 'item expandable',
- 'item value', 'item value changed', 'items changed',
- 'keyboard down', 'keyboard up', 'launched',
- 'load data representation', 'miniaturized', 'mouse down',
- 'mouse dragged', 'mouse entered', 'mouse exited',
- 'mouse moved', 'mouse up', 'moved',
- 'number of browser rows', 'number of items',
- 'number of rows', 'open untitled', 'opened', 'panel ended',
- 'parameters updated', 'plugin loaded', 'prepare drop',
- 'prepare outline drag', 'prepare outline drop',
- 'prepare table drag', 'prepare table drop',
- 'read from file', 'resigned active', 'resigned key',
- 'resigned main', 'resized( sub views)?',
- 'right mouse down', 'right mouse dragged',
- 'right mouse up', 'rows changed', 'scroll wheel',
- 'selected tab view item', 'selection changed',
- 'selection changing', 'should begin editing',
- 'should close', 'should collapse item',
- 'should end editing', 'should expand item',
- 'should open( untitled)?',
- 'should quit( after last window closed)?',
- 'should select column', 'should select item',
- 'should select row', 'should select tab view item',
- 'should selection change', 'should zoom', 'shown',
- 'update menu item', 'update parameters',
- 'update toolbar item', 'was hidden', 'was miniaturized',
- 'will become active', 'will close', 'will dismiss',
- 'will display browser cell', 'will display cell',
- 'will display item cell', 'will display outline cell',
- 'will finish launching', 'will hide', 'will miniaturize',
- 'will move', 'will open', 'will pop up', 'will quit',
- 'will resign active', 'will resize( sub views)?',
- 'will select tab view item', 'will show', 'will zoom',
- 'write to file', 'zoomed')
- StudioCommands = ('animate', 'append', 'call method', 'center',
- 'close drawer', 'close panel', 'display',
- 'display alert', 'display dialog', 'display panel', 'go',
- 'hide', 'highlight', 'increment', 'item for',
- 'load image', 'load movie', 'load nib', 'load panel',
- 'load sound', 'localized string', 'lock focus', 'log',
- 'open drawer', 'path for', 'pause', 'perform action',
- 'play', 'register', 'resume', 'scroll', 'select( all)?',
- 'show', 'size to fit', 'start', 'step back',
- 'step forward', 'stop', 'synchronize', 'unlock focus',
- 'update')
- StudioProperties = ('accepts arrow key', 'action method', 'active',
- 'alignment', 'allowed identifiers',
- 'allows branch selection', 'allows column reordering',
- 'allows column resizing', 'allows column selection',
- 'allows customization',
- 'allows editing text attributes',
- 'allows empty selection', 'allows mixed state',
- 'allows multiple selection', 'allows reordering',
- 'allows undo', 'alpha( value)?', 'alternate image',
- 'alternate increment value', 'alternate title',
- 'animation delay', 'associated file name',
- 'associated object', 'auto completes', 'auto display',
- 'auto enables items', 'auto repeat',
- 'auto resizes( outline column)?',
- 'auto save expanded items', 'auto save name',
- 'auto save table columns', 'auto saves configuration',
- 'auto scroll', 'auto sizes all columns to fit',
- 'auto sizes cells', 'background color', 'bezel state',
- 'bezel style', 'bezeled', 'border rect', 'border type',
- 'bordered', 'bounds( rotation)?', 'box type',
- 'button returned', 'button type',
- 'can choose directories', 'can choose files',
- 'can draw', 'can hide',
- 'cell( (background color|size|type))?', 'characters',
- 'class', 'click count', 'clicked( data)? column',
- 'clicked data item', 'clicked( data)? row',
- 'closeable', 'collating', 'color( (mode|panel))',
- 'command key down', 'configuration',
- 'content(s| (size|view( margins)?))?', 'context',
- 'continuous', 'control key down', 'control size',
- 'control tint', 'control view',
- 'controller visible', 'coordinate system',
- 'copies( on scroll)?', 'corner view', 'current cell',
- 'current column', 'current( field)? editor',
- 'current( menu)? item', 'current row',
- 'current tab view item', 'data source',
- 'default identifiers', 'delta (x|y|z)',
- 'destination window', 'directory', 'display mode',
- 'displayed cell', 'document( (edited|rect|view))?',
- 'double value', 'dragged column', 'dragged distance',
- 'dragged items', 'draws( cell)? background',
- 'draws grid', 'dynamically scrolls', 'echos bullets',
- 'edge', 'editable', 'edited( data)? column',
- 'edited data item', 'edited( data)? row', 'enabled',
- 'enclosing scroll view', 'ending page',
- 'error handling', 'event number', 'event type',
- 'excluded from windows menu', 'executable path',
- 'expanded', 'fax number', 'field editor', 'file kind',
- 'file name', 'file type', 'first responder',
- 'first visible column', 'flipped', 'floating',
- 'font( panel)?', 'formatter', 'frameworks path',
- 'frontmost', 'gave up', 'grid color', 'has data items',
- 'has horizontal ruler', 'has horizontal scroller',
- 'has parent data item', 'has resize indicator',
- 'has shadow', 'has sub menu', 'has vertical ruler',
- 'has vertical scroller', 'header cell', 'header view',
- 'hidden', 'hides when deactivated', 'highlights by',
- 'horizontal line scroll', 'horizontal page scroll',
- 'horizontal ruler view', 'horizontally resizable',
- 'icon image', 'id', 'identifier',
- 'ignores multiple clicks',
- 'image( (alignment|dims when disabled|frame style|scaling))?',
- 'imports graphics', 'increment value',
- 'indentation per level', 'indeterminate', 'index',
- 'integer value', 'intercell spacing', 'item height',
- 'key( (code|equivalent( modifier)?|window))?',
- 'knob thickness', 'label', 'last( visible)? column',
- 'leading offset', 'leaf', 'level', 'line scroll',
- 'loaded', 'localized sort', 'location', 'loop mode',
- 'main( (bunde|menu|window))?', 'marker follows cell',
- 'matrix mode', 'maximum( content)? size',
- 'maximum visible columns',
- 'menu( form representation)?', 'miniaturizable',
- 'miniaturized', 'minimized image', 'minimized title',
- 'minimum column width', 'minimum( content)? size',
- 'modal', 'modified', 'mouse down state',
- 'movie( (controller|file|rect))?', 'muted', 'name',
- 'needs display', 'next state', 'next text',
- 'number of tick marks', 'only tick mark values',
- 'opaque', 'open panel', 'option key down',
- 'outline table column', 'page scroll', 'pages across',
- 'pages down', 'palette label', 'pane splitter',
- 'parent data item', 'parent window', 'pasteboard',
- 'path( (names|separator))?', 'playing',
- 'plays every frame', 'plays selection only', 'position',
- 'preferred edge', 'preferred type', 'pressure',
- 'previous text', 'prompt', 'properties',
- 'prototype cell', 'pulls down', 'rate',
- 'released when closed', 'repeated',
- 'requested print time', 'required file type',
- 'resizable', 'resized column', 'resource path',
- 'returns records', 'reuses columns', 'rich text',
- 'roll over', 'row height', 'rulers visible',
- 'save panel', 'scripts path', 'scrollable',
- 'selectable( identifiers)?', 'selected cell',
- 'selected( data)? columns?', 'selected data items?',
- 'selected( data)? rows?', 'selected item identifier',
- 'selection by rect', 'send action on arrow key',
- 'sends action when done editing', 'separates columns',
- 'separator item', 'sequence number', 'services menu',
- 'shared frameworks path', 'shared support path',
- 'sheet', 'shift key down', 'shows alpha',
- 'shows state by', 'size( mode)?',
- 'smart insert delete enabled', 'sort case sensitivity',
- 'sort column', 'sort order', 'sort type',
- 'sorted( data rows)?', 'sound', 'source( mask)?',
- 'spell checking enabled', 'starting page', 'state',
- 'string value', 'sub menu', 'super menu', 'super view',
- 'tab key traverses cells', 'tab state', 'tab type',
- 'tab view', 'table view', 'tag', 'target( printer)?',
- 'text color', 'text container insert',
- 'text container origin', 'text returned',
- 'tick mark position', 'time stamp',
- 'title(d| (cell|font|height|position|rect))?',
- 'tool tip', 'toolbar', 'trailing offset', 'transparent',
- 'treat packages as directories', 'truncated labels',
- 'types', 'unmodified characters', 'update views',
- 'use sort indicator', 'user defaults',
- 'uses data source', 'uses ruler',
- 'uses threaded animation',
- 'uses title from previous column', 'value wraps',
- 'version',
- 'vertical( (line scroll|page scroll|ruler view))?',
- 'vertically resizable', 'view',
- 'visible( document rect)?', 'volume', 'width', 'window',
- 'windows menu', 'wraps', 'zoomable', 'zoomed')
-
- tokens = {
- 'root': [
- (r'\s+', Text),
- (r'¬\n', String.Escape),
- (r"'s\s+", Text), # This is a possessive, consider moving
- (r'(--|#).*?$', Comment),
- (r'\(\*', Comment.Multiline, 'comment'),
- (r'[(){}!,.:]', Punctuation),
- (r'(«)([^»]+)(»)',
- bygroups(Text, Name.Builtin, Text)),
- (r'\b((?:considering|ignoring)\s*)'
- r'(application responses|case|diacriticals|hyphens|'
- r'numeric strings|punctuation|white space)',
- bygroups(Keyword, Name.Builtin)),
- (r'(-|\*|\+|&|≠|>=?|<=?|=|≥|≤|/|÷|\^)', Operator),
- (r"\b(%s)\b" % '|'.join(Operators), Operator.Word),
- (r'^(\s*(?:on|end)\s+)'
- r'(%s)' % '|'.join(StudioEvents[::-1]),
- bygroups(Keyword, Name.Function)),
- (r'^(\s*)(in|on|script|to)(\s+)', bygroups(Text, Keyword, Text)),
- (r'\b(as )(%s)\b' % '|'.join(Classes),
- bygroups(Keyword, Name.Class)),
- (r'\b(%s)\b' % '|'.join(Literals), Name.Constant),
- (r'\b(%s)\b' % '|'.join(Commands), Name.Builtin),
- (r'\b(%s)\b' % '|'.join(Control), Keyword),
- (r'\b(%s)\b' % '|'.join(Declarations), Keyword),
- (r'\b(%s)\b' % '|'.join(Reserved), Name.Builtin),
- (r'\b(%s)s?\b' % '|'.join(BuiltIn), Name.Builtin),
- (r'\b(%s)\b' % '|'.join(HandlerParams), Name.Builtin),
- (r'\b(%s)\b' % '|'.join(StudioProperties), Name.Attribute),
- (r'\b(%s)s?\b' % '|'.join(StudioClasses), Name.Builtin),
- (r'\b(%s)\b' % '|'.join(StudioCommands), Name.Builtin),
- (r'\b(%s)\b' % '|'.join(References), Name.Builtin),
- (r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
- (r'\b(%s)\b' % Identifiers, Name.Variable),
- (r'[-+]?(\d+\.\d*|\d*\.\d+)(E[-+][0-9]+)?', Number.Float),
- (r'[-+]?\d+', Number.Integer),
- ],
- 'comment': [
- (r'\(\*', Comment.Multiline, '#push'),
- (r'\*\)', Comment.Multiline, '#pop'),
- ('[^*(]+', Comment.Multiline),
- ('[*(]', Comment.Multiline),
- ],
- }
-
-
-class RexxLexer(RegexLexer):
- """
- Rexx is a scripting language available for
- a wide range of different platforms with its roots found on mainframe
- systems. It is popular for I/O- and data based tasks and can act as glue
- language to bind different applications together.
-
- .. versionadded:: 2.0
- """
- name = 'Rexx'
- url = 'http://www.rexxinfo.org/'
- aliases = ['rexx', 'arexx']
- filenames = ['*.rexx', '*.rex', '*.rx', '*.arexx']
- mimetypes = ['text/x-rexx']
- flags = re.IGNORECASE
-
- tokens = {
- 'root': [
- (r'\s+', Whitespace),
- (r'/\*', Comment.Multiline, 'comment'),
- (r'"', String, 'string_double'),
- (r"'", String, 'string_single'),
- (r'[0-9]+(\.[0-9]+)?(e[+-]?[0-9])?', Number),
- (r'([a-z_]\w*)(\s*)(:)(\s*)(procedure)\b',
- bygroups(Name.Function, Whitespace, Operator, Whitespace,
- Keyword.Declaration)),
- (r'([a-z_]\w*)(\s*)(:)',
- bygroups(Name.Label, Whitespace, Operator)),
- include('function'),
- include('keyword'),
- include('operator'),
- (r'[a-z_]\w*', Text),
- ],
- 'function': [
- (words((
- 'abbrev', 'abs', 'address', 'arg', 'b2x', 'bitand', 'bitor', 'bitxor',
- 'c2d', 'c2x', 'center', 'charin', 'charout', 'chars', 'compare',
- 'condition', 'copies', 'd2c', 'd2x', 'datatype', 'date', 'delstr',
- 'delword', 'digits', 'errortext', 'form', 'format', 'fuzz', 'insert',
- 'lastpos', 'left', 'length', 'linein', 'lineout', 'lines', 'max',
- 'min', 'overlay', 'pos', 'queued', 'random', 'reverse', 'right', 'sign',
- 'sourceline', 'space', 'stream', 'strip', 'substr', 'subword', 'symbol',
- 'time', 'trace', 'translate', 'trunc', 'value', 'verify', 'word',
- 'wordindex', 'wordlength', 'wordpos', 'words', 'x2b', 'x2c', 'x2d',
- 'xrange'), suffix=r'(\s*)(\()'),
- bygroups(Name.Builtin, Whitespace, Operator)),
- ],
- 'keyword': [
- (r'(address|arg|by|call|do|drop|else|end|exit|for|forever|if|'
- r'interpret|iterate|leave|nop|numeric|off|on|options|parse|'
- r'pull|push|queue|return|say|select|signal|to|then|trace|until|'
- r'while)\b', Keyword.Reserved),
- ],
- 'operator': [
- (r'(-|//|/|\(|\)|\*\*|\*|\\<<|\\<|\\==|\\=|\\>>|\\>|\\|\|\||\||'
- r'&&|&|%|\+|<<=|<<|<=|<>|<|==|=|><|>=|>>=|>>|>|¬<<|¬<|¬==|¬=|'
- r'¬>>|¬>|¬|\.|,)', Operator),
- ],
- 'string_double': [
- (r'[^"\n]+', String),
- (r'""', String),
- (r'"', String, '#pop'),
- (r'\n', Text, '#pop'), # Stray linefeed also terminates strings.
- ],
- 'string_single': [
- (r'[^\'\n]+', String),
- (r'\'\'', String),
- (r'\'', String, '#pop'),
- (r'\n', Text, '#pop'), # Stray linefeed also terminates strings.
- ],
- 'comment': [
- (r'[^*]+', Comment.Multiline),
- (r'\*/', Comment.Multiline, '#pop'),
- (r'\*', Comment.Multiline),
- ]
- }
-
- _c = lambda s: re.compile(s, re.MULTILINE)
- _ADDRESS_COMMAND_PATTERN = _c(r'^\s*address\s+command\b')
- _ADDRESS_PATTERN = _c(r'^\s*address\s+')
- _DO_WHILE_PATTERN = _c(r'^\s*do\s+while\b')
- _IF_THEN_DO_PATTERN = _c(r'^\s*if\b.+\bthen\s+do\s*$')
- _PROCEDURE_PATTERN = _c(r'^\s*([a-z_]\w*)(\s*)(:)(\s*)(procedure)\b')
- _ELSE_DO_PATTERN = _c(r'\belse\s+do\s*$')
- _PARSE_ARG_PATTERN = _c(r'^\s*parse\s+(upper\s+)?(arg|value)\b')
- PATTERNS_AND_WEIGHTS = (
- (_ADDRESS_COMMAND_PATTERN, 0.2),
- (_ADDRESS_PATTERN, 0.05),
- (_DO_WHILE_PATTERN, 0.1),
- (_ELSE_DO_PATTERN, 0.1),
- (_IF_THEN_DO_PATTERN, 0.1),
- (_PROCEDURE_PATTERN, 0.5),
- (_PARSE_ARG_PATTERN, 0.2),
- )
-
- def analyse_text(text):
- """
- Check for initial comment and patterns that distinguish Rexx from other
- C-like languages.
- """
- if re.search(r'/\*\**\s*rexx', text, re.IGNORECASE):
- # Header matches MVS Rexx requirements, this is certainly a Rexx
- # script.
- return 1.0
- elif text.startswith('/*'):
- # Header matches general Rexx requirements; the source code might
- # still be any language using C comments such as C++, C# or Java.
- lowerText = text.lower()
- result = sum(weight
- for (pattern, weight) in RexxLexer.PATTERNS_AND_WEIGHTS
- if pattern.search(lowerText)) + 0.01
- return min(result, 1.0)
-
-
-class MOOCodeLexer(RegexLexer):
- """
- For MOOCode (the MOO scripting language).
-
- .. versionadded:: 0.9
- """
- name = 'MOOCode'
- url = 'http://www.moo.mud.org/'
- filenames = ['*.moo']
- aliases = ['moocode', 'moo']
- mimetypes = ['text/x-moocode']
-
- tokens = {
- 'root': [
- # Numbers
- (r'(0|[1-9][0-9_]*)', Number.Integer),
- # Strings
- (r'"(\\\\|\\[^\\]|[^"\\])*"', String),
- # exceptions
- (r'(E_PERM|E_DIV)', Name.Exception),
- # db-refs
- (r'((#[-0-9]+)|(\$\w+))', Name.Entity),
- # Keywords
- (r'\b(if|else|elseif|endif|for|endfor|fork|endfork|while'
- r'|endwhile|break|continue|return|try'
- r'|except|endtry|finally|in)\b', Keyword),
- # builtins
- (r'(random|length)', Name.Builtin),
- # special variables
- (r'(player|caller|this|args)', Name.Variable.Instance),
- # skip whitespace
- (r'\s+', Text),
- (r'\n', Text),
- # other operators
- (r'([!;=,{}&|:.\[\]@()<>?]+)', Operator),
- # function call
- (r'(\w+)(\()', bygroups(Name.Function, Operator)),
- # variables
- (r'(\w+)', Text),
- ]
- }
-
-
-class HybrisLexer(RegexLexer):
- """
- For Hybris source code.
-
- .. versionadded:: 1.4
- """
-
- name = 'Hybris'
- aliases = ['hybris', 'hy']
- filenames = ['*.hy', '*.hyb']
- mimetypes = ['text/x-hybris', 'application/x-hybris']
-
- flags = re.MULTILINE | re.DOTALL
-
- tokens = {
- 'root': [
- # method names
- (r'^(\s*(?:function|method|operator\s+)+?)'
- r'([a-zA-Z_]\w*)'
- r'(\s*)(\()', bygroups(Keyword, Name.Function, Text, Operator)),
- (r'[^\S\n]+', Text),
- (r'//.*?\n', Comment.Single),
- (r'/\*.*?\*/', Comment.Multiline),
- (r'@[a-zA-Z_][\w.]*', Name.Decorator),
- (r'(break|case|catch|next|default|do|else|finally|for|foreach|of|'
- r'unless|if|new|return|switch|me|throw|try|while)\b', Keyword),
- (r'(extends|private|protected|public|static|throws|function|method|'
- r'operator)\b', Keyword.Declaration),
- (r'(true|false|null|__FILE__|__LINE__|__VERSION__|__LIB_PATH__|'
- r'__INC_PATH__)\b', Keyword.Constant),
- (r'(class|struct)(\s+)',
- bygroups(Keyword.Declaration, Text), 'class'),
- (r'(import|include)(\s+)',
- bygroups(Keyword.Namespace, Text), 'import'),
- (words((
- 'gc_collect', 'gc_mm_items', 'gc_mm_usage', 'gc_collect_threshold',
- 'urlencode', 'urldecode', 'base64encode', 'base64decode', 'sha1', 'crc32',
- 'sha2', 'md5', 'md5_file', 'acos', 'asin', 'atan', 'atan2', 'ceil', 'cos',
- 'cosh', 'exp', 'fabs', 'floor', 'fmod', 'log', 'log10', 'pow', 'sin',
- 'sinh', 'sqrt', 'tan', 'tanh', 'isint', 'isfloat', 'ischar', 'isstring',
- 'isarray', 'ismap', 'isalias', 'typeof', 'sizeof', 'toint', 'tostring',
- 'fromxml', 'toxml', 'binary', 'pack', 'load', 'eval', 'var_names',
- 'var_values', 'user_functions', 'dyn_functions', 'methods', 'call',
- 'call_method', 'mknod', 'mkfifo', 'mount', 'umount2', 'umount', 'ticks',
- 'usleep', 'sleep', 'time', 'strtime', 'strdate', 'dllopen', 'dlllink',
- 'dllcall', 'dllcall_argv', 'dllclose', 'env', 'exec', 'fork', 'getpid',
- 'wait', 'popen', 'pclose', 'exit', 'kill', 'pthread_create',
- 'pthread_create_argv', 'pthread_exit', 'pthread_join', 'pthread_kill',
- 'smtp_send', 'http_get', 'http_post', 'http_download', 'socket', 'bind',
- 'listen', 'accept', 'getsockname', 'getpeername', 'settimeout', 'connect',
- 'server', 'recv', 'send', 'close', 'print', 'println', 'printf', 'input',
- 'readline', 'serial_open', 'serial_fcntl', 'serial_get_attr',
- 'serial_get_ispeed', 'serial_get_ospeed', 'serial_set_attr',
- 'serial_set_ispeed', 'serial_set_ospeed', 'serial_write', 'serial_read',
- 'serial_close', 'xml_load', 'xml_parse', 'fopen', 'fseek', 'ftell',
- 'fsize', 'fread', 'fwrite', 'fgets', 'fclose', 'file', 'readdir',
- 'pcre_replace', 'size', 'pop', 'unmap', 'has', 'keys', 'values',
- 'length', 'find', 'substr', 'replace', 'split', 'trim', 'remove',
- 'contains', 'join'), suffix=r'\b'),
- Name.Builtin),
- (words((
- 'MethodReference', 'Runner', 'Dll', 'Thread', 'Pipe', 'Process',
- 'Runnable', 'CGI', 'ClientSocket', 'Socket', 'ServerSocket',
- 'File', 'Console', 'Directory', 'Exception'), suffix=r'\b'),
- Keyword.Type),
- (r'"(\\\\|\\[^\\]|[^"\\])*"', String),
- (r"'\\.'|'[^\\]'|'\\u[0-9a-f]{4}'", String.Char),
- (r'(\.)([a-zA-Z_]\w*)',
- bygroups(Operator, Name.Attribute)),
- (r'[a-zA-Z_]\w*:', Name.Label),
- (r'[a-zA-Z_$]\w*', Name),
- (r'[~^*!%&\[\](){}<>|+=:;,./?\-@]+', Operator),
- (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
- (r'0x[0-9a-f]+', Number.Hex),
- (r'[0-9]+L?', Number.Integer),
- (r'\n', Text),
- ],
- 'class': [
- (r'[a-zA-Z_]\w*', Name.Class, '#pop')
- ],
- 'import': [
- (r'[\w.]+\*?', Name.Namespace, '#pop')
- ],
- }
-
- def analyse_text(text):
- """public method and private method don't seem to be quite common
- elsewhere."""
- result = 0
- if re.search(r'\b(?:public|private)\s+method\b', text):
- result += 0.01
- return result
-
-
-
-class EasytrieveLexer(RegexLexer):
- """
- Easytrieve Plus is a programming language for extracting, filtering and
- converting sequential data. Furthermore it can layout data for reports.
- It is mainly used on mainframe platforms and can access several of the
- mainframe's native file formats. It is somewhat comparable to awk.
-
- .. versionadded:: 2.1
- """
- name = 'Easytrieve'
- aliases = ['easytrieve']
- filenames = ['*.ezt', '*.mac']
- mimetypes = ['text/x-easytrieve']
- flags = 0
-
- # Note: We cannot use r'\b' at the start and end of keywords because
- # Easytrieve Plus delimiter characters are:
- #
- # * space ( )
- # * apostrophe (')
- # * period (.)
- # * comma (,)
- # * parenthesis ( and )
- # * colon (:)
- #
- # Additionally words end once a '*' appears, indicatins a comment.
- _DELIMITERS = r' \'.,():\n'
- _DELIMITERS_OR_COMENT = _DELIMITERS + '*'
- _DELIMITER_PATTERN = '[' + _DELIMITERS + ']'
- _DELIMITER_PATTERN_CAPTURE = '(' + _DELIMITER_PATTERN + ')'
- _NON_DELIMITER_OR_COMMENT_PATTERN = '[^' + _DELIMITERS_OR_COMENT + ']'
- _OPERATORS_PATTERN = '[.+\\-/=\\[\\](){}<>;,&%¬]'
- _KEYWORDS = [
- 'AFTER-BREAK', 'AFTER-LINE', 'AFTER-SCREEN', 'AIM', 'AND', 'ATTR',
- 'BEFORE', 'BEFORE-BREAK', 'BEFORE-LINE', 'BEFORE-SCREEN', 'BUSHU',
- 'BY', 'CALL', 'CASE', 'CHECKPOINT', 'CHKP', 'CHKP-STATUS', 'CLEAR',
- 'CLOSE', 'COL', 'COLOR', 'COMMIT', 'CONTROL', 'COPY', 'CURSOR', 'D',
- 'DECLARE', 'DEFAULT', 'DEFINE', 'DELETE', 'DENWA', 'DISPLAY', 'DLI',
- 'DO', 'DUPLICATE', 'E', 'ELSE', 'ELSE-IF', 'END', 'END-CASE',
- 'END-DO', 'END-IF', 'END-PROC', 'ENDPAGE', 'ENDTABLE', 'ENTER', 'EOF',
- 'EQ', 'ERROR', 'EXIT', 'EXTERNAL', 'EZLIB', 'F1', 'F10', 'F11', 'F12',
- 'F13', 'F14', 'F15', 'F16', 'F17', 'F18', 'F19', 'F2', 'F20', 'F21',
- 'F22', 'F23', 'F24', 'F25', 'F26', 'F27', 'F28', 'F29', 'F3', 'F30',
- 'F31', 'F32', 'F33', 'F34', 'F35', 'F36', 'F4', 'F5', 'F6', 'F7',
- 'F8', 'F9', 'FETCH', 'FILE-STATUS', 'FILL', 'FINAL', 'FIRST',
- 'FIRST-DUP', 'FOR', 'GE', 'GET', 'GO', 'GOTO', 'GQ', 'GR', 'GT',
- 'HEADING', 'HEX', 'HIGH-VALUES', 'IDD', 'IDMS', 'IF', 'IN', 'INSERT',
- 'JUSTIFY', 'KANJI-DATE', 'KANJI-DATE-LONG', 'KANJI-TIME', 'KEY',
- 'KEY-PRESSED', 'KOKUGO', 'KUN', 'LAST-DUP', 'LE', 'LEVEL', 'LIKE',
- 'LINE', 'LINE-COUNT', 'LINE-NUMBER', 'LINK', 'LIST', 'LOW-VALUES',
- 'LQ', 'LS', 'LT', 'MACRO', 'MASK', 'MATCHED', 'MEND', 'MESSAGE',
- 'MOVE', 'MSTART', 'NE', 'NEWPAGE', 'NOMASK', 'NOPRINT', 'NOT',
- 'NOTE', 'NOVERIFY', 'NQ', 'NULL', 'OF', 'OR', 'OTHERWISE', 'PA1',
- 'PA2', 'PA3', 'PAGE-COUNT', 'PAGE-NUMBER', 'PARM-REGISTER',
- 'PATH-ID', 'PATTERN', 'PERFORM', 'POINT', 'POS', 'PRIMARY', 'PRINT',
- 'PROCEDURE', 'PROGRAM', 'PUT', 'READ', 'RECORD', 'RECORD-COUNT',
- 'RECORD-LENGTH', 'REFRESH', 'RELEASE', 'RENUM', 'REPEAT', 'REPORT',
- 'REPORT-INPUT', 'RESHOW', 'RESTART', 'RETRIEVE', 'RETURN-CODE',
- 'ROLLBACK', 'ROW', 'S', 'SCREEN', 'SEARCH', 'SECONDARY', 'SELECT',
- 'SEQUENCE', 'SIZE', 'SKIP', 'SOKAKU', 'SORT', 'SQL', 'STOP', 'SUM',
- 'SYSDATE', 'SYSDATE-LONG', 'SYSIN', 'SYSIPT', 'SYSLST', 'SYSPRINT',
- 'SYSSNAP', 'SYSTIME', 'TALLY', 'TERM-COLUMNS', 'TERM-NAME',
- 'TERM-ROWS', 'TERMINATION', 'TITLE', 'TO', 'TRANSFER', 'TRC',
- 'UNIQUE', 'UNTIL', 'UPDATE', 'UPPERCASE', 'USER', 'USERID', 'VALUE',
- 'VERIFY', 'W', 'WHEN', 'WHILE', 'WORK', 'WRITE', 'X', 'XDM', 'XRST'
- ]
-
- tokens = {
- 'root': [
- (r'\*.*\n', Comment.Single),
- (r'\n+', Whitespace),
- # Macro argument
- (r'&' + _NON_DELIMITER_OR_COMMENT_PATTERN + r'+\.', Name.Variable,
- 'after_macro_argument'),
- # Macro call
- (r'%' + _NON_DELIMITER_OR_COMMENT_PATTERN + r'+', Name.Variable),
- (r'(FILE|MACRO|REPORT)(\s+)',
- bygroups(Keyword.Declaration, Whitespace), 'after_declaration'),
- (r'(JOB|PARM)' + r'(' + _DELIMITER_PATTERN + r')',
- bygroups(Keyword.Declaration, Operator)),
- (words(_KEYWORDS, suffix=_DELIMITER_PATTERN_CAPTURE),
- bygroups(Keyword.Reserved, Operator)),
- (_OPERATORS_PATTERN, Operator),
- # Procedure declaration
- (r'(' + _NON_DELIMITER_OR_COMMENT_PATTERN + r'+)(\s*)(\.?)(\s*)(PROC)(\s*\n)',
- bygroups(Name.Function, Whitespace, Operator, Whitespace,
- Keyword.Declaration, Whitespace)),
- (r'[0-9]+\.[0-9]*', Number.Float),
- (r'[0-9]+', Number.Integer),
- (r"'(''|[^'])*'", String),
- (r'\s+', Whitespace),
- # Everything else just belongs to a name
- (_NON_DELIMITER_OR_COMMENT_PATTERN + r'+', Name),
- ],
- 'after_declaration': [
- (_NON_DELIMITER_OR_COMMENT_PATTERN + r'+', Name.Function),
- default('#pop'),
- ],
- 'after_macro_argument': [
- (r'\*.*\n', Comment.Single, '#pop'),
- (r'\s+', Whitespace, '#pop'),
- (_OPERATORS_PATTERN, Operator, '#pop'),
- (r"'(''|[^'])*'", String, '#pop'),
- # Everything else just belongs to a name
- (_NON_DELIMITER_OR_COMMENT_PATTERN + r'+', Name),
- ],
- }
- _COMMENT_LINE_REGEX = re.compile(r'^\s*\*')
- _MACRO_HEADER_REGEX = re.compile(r'^\s*MACRO')
-
- def analyse_text(text):
- """
- Perform a structural analysis for basic Easytrieve constructs.
- """
- result = 0.0
- lines = text.split('\n')
- hasEndProc = False
- hasHeaderComment = False
- hasFile = False
- hasJob = False
- hasProc = False
- hasParm = False
- hasReport = False
-
- def isCommentLine(line):
- return EasytrieveLexer._COMMENT_LINE_REGEX.match(lines[0]) is not None
-
- def isEmptyLine(line):
- return not bool(line.strip())
-
- # Remove possible empty lines and header comments.
- while lines and (isEmptyLine(lines[0]) or isCommentLine(lines[0])):
- if not isEmptyLine(lines[0]):
- hasHeaderComment = True
- del lines[0]
-
- if EasytrieveLexer._MACRO_HEADER_REGEX.match(lines[0]):
- # Looks like an Easytrieve macro.
- result = 0.4
- if hasHeaderComment:
- result += 0.4
- else:
- # Scan the source for lines starting with indicators.
- for line in lines:
- words = line.split()
- if (len(words) >= 2):
- firstWord = words[0]
- if not hasReport:
- if not hasJob:
- if not hasFile:
- if not hasParm:
- if firstWord == 'PARM':
- hasParm = True
- if firstWord == 'FILE':
- hasFile = True
- if firstWord == 'JOB':
- hasJob = True
- elif firstWord == 'PROC':
- hasProc = True
- elif firstWord == 'END-PROC':
- hasEndProc = True
- elif firstWord == 'REPORT':
- hasReport = True
-
- # Weight the findings.
- if hasJob and (hasProc == hasEndProc):
- if hasHeaderComment:
- result += 0.1
- if hasParm:
- if hasProc:
- # Found PARM, JOB and PROC/END-PROC:
- # pretty sure this is Easytrieve.
- result += 0.8
- else:
- # Found PARAM and JOB: probably this is Easytrieve
- result += 0.5
- else:
- # Found JOB and possibly other keywords: might be Easytrieve
- result += 0.11
- if hasParm:
- # Note: PARAM is not a proper English word, so this is
- # regarded a much better indicator for Easytrieve than
- # the other words.
- result += 0.2
- if hasFile:
- result += 0.01
- if hasReport:
- result += 0.01
- assert 0.0 <= result <= 1.0
- return result
-
-
-class JclLexer(RegexLexer):
- """
- Job Control Language (JCL)
- is a scripting language used on mainframe platforms to instruct the system
- on how to run a batch job or start a subsystem. It is somewhat
- comparable to MS DOS batch and Unix shell scripts.
-
- .. versionadded:: 2.1
- """
- name = 'JCL'
- aliases = ['jcl']
- filenames = ['*.jcl']
- mimetypes = ['text/x-jcl']
- flags = re.IGNORECASE
-
- tokens = {
- 'root': [
- (r'//\*.*\n', Comment.Single),
- (r'//', Keyword.Pseudo, 'statement'),
- (r'/\*', Keyword.Pseudo, 'jes2_statement'),
- # TODO: JES3 statement
- (r'.*\n', Other) # Input text or inline code in any language.
- ],
- 'statement': [
- (r'\s*\n', Whitespace, '#pop'),
- (r'([a-z]\w*)(\s+)(exec|job)(\s*)',
- bygroups(Name.Label, Whitespace, Keyword.Reserved, Whitespace),
- 'option'),
- (r'[a-z]\w*', Name.Variable, 'statement_command'),
- (r'\s+', Whitespace, 'statement_command'),
- ],
- 'statement_command': [
- (r'\s+(command|cntl|dd|endctl|endif|else|include|jcllib|'
- r'output|pend|proc|set|then|xmit)\s+', Keyword.Reserved, 'option'),
- include('option')
- ],
- 'jes2_statement': [
- (r'\s*\n', Whitespace, '#pop'),
- (r'\$', Keyword, 'option'),
- (r'\b(jobparam|message|netacct|notify|output|priority|route|'
- r'setup|signoff|xeq|xmit)\b', Keyword, 'option'),
- ],
- 'option': [
- # (r'\n', Text, 'root'),
- (r'\*', Name.Builtin),
- (r'[\[\](){}<>;,]', Punctuation),
- (r'[-+*/=&%]', Operator),
- (r'[a-z_]\w*', Name),
- (r'\d+\.\d*', Number.Float),
- (r'\.\d+', Number.Float),
- (r'\d+', Number.Integer),
- (r"'", String, 'option_string'),
- (r'[ \t]+', Whitespace, 'option_comment'),
- (r'\.', Punctuation),
- ],
- 'option_string': [
- (r"(\n)(//)", bygroups(Text, Keyword.Pseudo)),
- (r"''", String),
- (r"[^']", String),
- (r"'", String, '#pop'),
- ],
- 'option_comment': [
- # (r'\n', Text, 'root'),
- (r'.+', Comment.Single),
- ]
- }
-
- _JOB_HEADER_PATTERN = re.compile(r'^//[a-z#$@][a-z0-9#$@]{0,7}\s+job(\s+.*)?$',
- re.IGNORECASE)
-
- def analyse_text(text):
- """
- Recognize JCL job by header.
- """
- result = 0.0
- lines = text.split('\n')
- if len(lines) > 0:
- if JclLexer._JOB_HEADER_PATTERN.match(lines[0]):
- result = 1.0
- assert 0.0 <= result <= 1.0
- return result
-
-
-class MiniScriptLexer(RegexLexer):
- """
- For MiniScript source code.
-
- .. versionadded:: 2.6
- """
-
- name = 'MiniScript'
- url = 'https://miniscript.org'
- aliases = ['miniscript', 'ms']
- filenames = ['*.ms']
- mimetypes = ['text/x-minicript', 'application/x-miniscript']
-
- tokens = {
- 'root': [
- (r'#!(.*?)$', Comment.Preproc),
- default('base'),
- ],
- 'base': [
- ('//.*$', Comment.Single),
- (r'(?i)(\d*\.\d+|\d+\.\d*)(e[+-]?\d+)?', Number),
- (r'(?i)\d+e[+-]?\d+', Number),
- (r'\d+', Number),
- (r'\n', Text),
- (r'[^\S\n]+', Text),
- (r'"', String, 'string_double'),
- (r'(==|!=|<=|>=|[=+\-*/%^<>.:])', Operator),
- (r'[;,\[\]{}()]', Punctuation),
- (words((
- 'break', 'continue', 'else', 'end', 'for', 'function', 'if',
- 'in', 'isa', 'then', 'repeat', 'return', 'while'), suffix=r'\b'),
- Keyword),
- (words((
- 'abs', 'acos', 'asin', 'atan', 'ceil', 'char', 'cos', 'floor',
- 'log', 'round', 'rnd', 'pi', 'sign', 'sin', 'sqrt', 'str', 'tan',
- 'hasIndex', 'indexOf', 'len', 'val', 'code', 'remove', 'lower',
- 'upper', 'replace', 'split', 'indexes', 'values', 'join', 'sum',
- 'sort', 'shuffle', 'push', 'pop', 'pull', 'range',
- 'print', 'input', 'time', 'wait', 'locals', 'globals', 'outer',
- 'yield'), suffix=r'\b'),
- Name.Builtin),
- (r'(true|false|null)\b', Keyword.Constant),
- (r'(and|or|not|new)\b', Operator.Word),
- (r'(self|super|__isa)\b', Name.Builtin.Pseudo),
- (r'[a-zA-Z_]\w*', Name.Variable)
- ],
- 'string_double': [
- (r'[^"\n]+', String),
- (r'""', String),
- (r'"', String, '#pop'),
- (r'\n', Text, '#pop'), # Stray linefeed also terminates strings.
- ]
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/sgf.py b/venv/lib/python3.11/site-packages/pygments/lexers/sgf.py
deleted file mode 100644
index 0fad263..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/sgf.py
+++ /dev/null
@@ -1,60 +0,0 @@
-"""
- pygments.lexers.sgf
- ~~~~~~~~~~~~~~~~~~~
-
- Lexer for Smart Game Format (sgf) file format.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, bygroups
-from pygments.token import Name, Literal, String, Punctuation, Whitespace
-
-__all__ = ["SmartGameFormatLexer"]
-
-
-class SmartGameFormatLexer(RegexLexer):
- """
- Lexer for Smart Game Format (sgf) file format.
-
- The format is used to store game records of board games for two players
- (mainly Go game).
-
- .. versionadded:: 2.4
- """
- name = 'SmartGameFormat'
- url = 'https://www.red-bean.com/sgf/'
- aliases = ['sgf']
- filenames = ['*.sgf']
-
- tokens = {
- 'root': [
- (r'[():;]+', Punctuation),
- # tokens:
- (r'(A[BW]|AE|AN|AP|AR|AS|[BW]L|BM|[BW]R|[BW]S|[BW]T|CA|CH|CP|CR|'
- r'DD|DM|DO|DT|EL|EV|EX|FF|FG|G[BW]|GC|GM|GN|HA|HO|ID|IP|IT|IY|KM|'
- r'KO|LB|LN|LT|L|MA|MN|M|N|OB|OM|ON|OP|OT|OV|P[BW]|PC|PL|PM|RE|RG|'
- r'RO|RU|SO|SC|SE|SI|SL|SO|SQ|ST|SU|SZ|T[BW]|TC|TE|TM|TR|UC|US|VW|'
- r'V|[BW]|C)',
- Name.Builtin),
- # number:
- (r'(\[)([0-9.]+)(\])',
- bygroups(Punctuation, Literal.Number, Punctuation)),
- # date:
- (r'(\[)([0-9]{4}-[0-9]{2}-[0-9]{2})(\])',
- bygroups(Punctuation, Literal.Date, Punctuation)),
- # point:
- (r'(\[)([a-z]{2})(\])',
- bygroups(Punctuation, String, Punctuation)),
- # double points:
- (r'(\[)([a-z]{2})(:)([a-z]{2})(\])',
- bygroups(Punctuation, String, Punctuation, String, Punctuation)),
-
- (r'(\[)([\w\s#()+,\-.:?]+)(\])',
- bygroups(Punctuation, String, Punctuation)),
- (r'(\[)(\s.*)(\])',
- bygroups(Punctuation, Whitespace, Punctuation)),
- (r'\s+', Whitespace)
- ],
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/shell.py b/venv/lib/python3.11/site-packages/pygments/lexers/shell.py
deleted file mode 100644
index eabf4ec..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/shell.py
+++ /dev/null
@@ -1,920 +0,0 @@
-"""
- pygments.lexers.shell
- ~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for various shells.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import Lexer, RegexLexer, do_insertions, bygroups, \
- include, default, this, using, words, line_re
-from pygments.token import Punctuation, Whitespace, \
- Text, Comment, Operator, Keyword, Name, String, Number, Generic
-from pygments.util import shebang_matches
-
-__all__ = ['BashLexer', 'BashSessionLexer', 'TcshLexer', 'BatchLexer',
- 'SlurmBashLexer', 'MSDOSSessionLexer', 'PowerShellLexer',
- 'PowerShellSessionLexer', 'TcshSessionLexer', 'FishShellLexer',
- 'ExeclineLexer']
-
-
-class BashLexer(RegexLexer):
- """
- Lexer for (ba|k|z|)sh shell scripts.
-
- .. versionadded:: 0.6
- """
-
- name = 'Bash'
- aliases = ['bash', 'sh', 'ksh', 'zsh', 'shell']
- filenames = ['*.sh', '*.ksh', '*.bash', '*.ebuild', '*.eclass',
- '*.exheres-0', '*.exlib', '*.zsh',
- '.bashrc', 'bashrc', '.bash_*', 'bash_*', 'zshrc', '.zshrc',
- '.kshrc', 'kshrc',
- 'PKGBUILD']
- mimetypes = ['application/x-sh', 'application/x-shellscript', 'text/x-shellscript']
-
- tokens = {
- 'root': [
- include('basic'),
- (r'`', String.Backtick, 'backticks'),
- include('data'),
- include('interp'),
- ],
- 'interp': [
- (r'\$\(\(', Keyword, 'math'),
- (r'\$\(', Keyword, 'paren'),
- (r'\$\{#?', String.Interpol, 'curly'),
- (r'\$[a-zA-Z_]\w*', Name.Variable), # user variable
- (r'\$(?:\d+|[#$?!_*@-])', Name.Variable), # builtin
- (r'\$', Text),
- ],
- 'basic': [
- (r'\b(if|fi|else|while|in|do|done|for|then|return|function|case|'
- r'select|break|continue|until|esac|elif)(\s*)\b',
- bygroups(Keyword, Whitespace)),
- (r'\b(alias|bg|bind|builtin|caller|cd|command|compgen|'
- r'complete|declare|dirs|disown|echo|enable|eval|exec|exit|'
- r'export|false|fc|fg|getopts|hash|help|history|jobs|kill|let|'
- r'local|logout|popd|printf|pushd|pwd|read|readonly|set|shift|'
- r'shopt|source|suspend|test|time|times|trap|true|type|typeset|'
- r'ulimit|umask|unalias|unset|wait)(?=[\s)`])',
- Name.Builtin),
- (r'\A#!.+\n', Comment.Hashbang),
- (r'#.*\n', Comment.Single),
- (r'\\[\w\W]', String.Escape),
- (r'(\b\w+)(\s*)(\+?=)', bygroups(Name.Variable, Whitespace, Operator)),
- (r'[\[\]{}()=]', Operator),
- (r'<<<', Operator), # here-string
- (r'<<-?\s*(\'?)\\?(\w+)[\w\W]+?\2', String),
- (r'&&|\|\|', Operator),
- ],
- 'data': [
- (r'(?s)\$?"(\\.|[^"\\$])*"', String.Double),
- (r'"', String.Double, 'string'),
- (r"(?s)\$'(\\\\|\\[0-7]+|\\.|[^'\\])*'", String.Single),
- (r"(?s)'.*?'", String.Single),
- (r';', Punctuation),
- (r'&', Punctuation),
- (r'\|', Punctuation),
- (r'\s+', Whitespace),
- (r'\d+\b', Number),
- (r'[^=\s\[\]{}()$"\'`\\<&|;]+', Text),
- (r'<', Text),
- ],
- 'string': [
- (r'"', String.Double, '#pop'),
- (r'(?s)(\\\\|\\[0-7]+|\\.|[^"\\$])+', String.Double),
- include('interp'),
- ],
- 'curly': [
- (r'\}', String.Interpol, '#pop'),
- (r':-', Keyword),
- (r'\w+', Name.Variable),
- (r'[^}:"\'`$\\]+', Punctuation),
- (r':', Punctuation),
- include('root'),
- ],
- 'paren': [
- (r'\)', Keyword, '#pop'),
- include('root'),
- ],
- 'math': [
- (r'\)\)', Keyword, '#pop'),
- (r'\*\*|\|\||<<|>>|[-+*/%^|&<>]', Operator),
- (r'\d+#[\da-zA-Z]+', Number),
- (r'\d+#(?! )', Number),
- (r'0[xX][\da-fA-F]+', Number),
- (r'\d+', Number),
- (r'[a-zA-Z_]\w*', Name.Variable), # user variable
- include('root'),
- ],
- 'backticks': [
- (r'`', String.Backtick, '#pop'),
- include('root'),
- ],
- }
-
- def analyse_text(text):
- if shebang_matches(text, r'(ba|z|)sh'):
- return 1
- if text.startswith('$ '):
- return 0.2
-
-
-class SlurmBashLexer(BashLexer):
- """
- Lexer for (ba|k|z|)sh Slurm scripts.
-
- .. versionadded:: 2.4
- """
-
- name = 'Slurm'
- aliases = ['slurm', 'sbatch']
- filenames = ['*.sl']
- mimetypes = []
- EXTRA_KEYWORDS = {'srun'}
-
- def get_tokens_unprocessed(self, text):
- for index, token, value in BashLexer.get_tokens_unprocessed(self, text):
- if token is Text and value in self.EXTRA_KEYWORDS:
- yield index, Name.Builtin, value
- elif token is Comment.Single and 'SBATCH' in value:
- yield index, Keyword.Pseudo, value
- else:
- yield index, token, value
-
-
-class ShellSessionBaseLexer(Lexer):
- """
- Base lexer for shell sessions.
-
- .. versionadded:: 2.1
- """
-
- _bare_continuation = False
- _venv = re.compile(r'^(\([^)]*\))(\s*)')
-
- def get_tokens_unprocessed(self, text):
- innerlexer = self._innerLexerCls(**self.options)
-
- pos = 0
- curcode = ''
- insertions = []
- backslash_continuation = False
-
- for match in line_re.finditer(text):
- line = match.group()
-
- venv_match = self._venv.match(line)
- if venv_match:
- venv = venv_match.group(1)
- venv_whitespace = venv_match.group(2)
- insertions.append((len(curcode),
- [(0, Generic.Prompt.VirtualEnv, venv)]))
- if venv_whitespace:
- insertions.append((len(curcode),
- [(0, Text, venv_whitespace)]))
- line = line[venv_match.end():]
-
- m = self._ps1rgx.match(line)
- if m:
- # To support output lexers (say diff output), the output
- # needs to be broken by prompts whenever the output lexer
- # changes.
- if not insertions:
- pos = match.start()
-
- insertions.append((len(curcode),
- [(0, Generic.Prompt, m.group(1))]))
- curcode += m.group(2)
- backslash_continuation = curcode.endswith('\\\n')
- elif backslash_continuation:
- if line.startswith(self._ps2):
- insertions.append((len(curcode),
- [(0, Generic.Prompt,
- line[:len(self._ps2)])]))
- curcode += line[len(self._ps2):]
- else:
- curcode += line
- backslash_continuation = curcode.endswith('\\\n')
- elif self._bare_continuation and line.startswith(self._ps2):
- insertions.append((len(curcode),
- [(0, Generic.Prompt,
- line[:len(self._ps2)])]))
- curcode += line[len(self._ps2):]
- else:
- if insertions:
- toks = innerlexer.get_tokens_unprocessed(curcode)
- for i, t, v in do_insertions(insertions, toks):
- yield pos+i, t, v
- yield match.start(), Generic.Output, line
- insertions = []
- curcode = ''
- if insertions:
- for i, t, v in do_insertions(insertions,
- innerlexer.get_tokens_unprocessed(curcode)):
- yield pos+i, t, v
-
-
-class BashSessionLexer(ShellSessionBaseLexer):
- """
- Lexer for Bash shell sessions, i.e. command lines, including a
- prompt, interspersed with output.
-
- .. versionadded:: 1.1
- """
-
- name = 'Bash Session'
- aliases = ['console', 'shell-session']
- filenames = ['*.sh-session', '*.shell-session']
- mimetypes = ['application/x-shell-session', 'application/x-sh-session']
-
- _innerLexerCls = BashLexer
- _ps1rgx = re.compile(
- r'^((?:(?:\[.*?\])|(?:\(\S+\))?(?:| |sh\S*?|\w+\S+[@:]\S+(?:\s+\S+)' \
- r'?|\[\S+[@:][^\n]+\].+))\s*[$#%]\s*)(.*\n?)')
- _ps2 = '> '
-
-
-class BatchLexer(RegexLexer):
- """
- Lexer for the DOS/Windows Batch file format.
-
- .. versionadded:: 0.7
- """
- name = 'Batchfile'
- aliases = ['batch', 'bat', 'dosbatch', 'winbatch']
- filenames = ['*.bat', '*.cmd']
- mimetypes = ['application/x-dos-batch']
-
- flags = re.MULTILINE | re.IGNORECASE
-
- _nl = r'\n\x1a'
- _punct = r'&<>|'
- _ws = r'\t\v\f\r ,;=\xa0'
- _nlws = r'\s\x1a\xa0,;='
- _space = r'(?:(?:(?:\^[%s])?[%s])+)' % (_nl, _ws)
- _keyword_terminator = (r'(?=(?:\^[%s]?)?[%s+./:[\\\]]|[%s%s(])' %
- (_nl, _ws, _nl, _punct))
- _token_terminator = r'(?=\^?[%s]|[%s%s])' % (_ws, _punct, _nl)
- _start_label = r'((?:(?<=^[^:])|^[^:]?)[%s]*)(:)' % _ws
- _label = r'(?:(?:[^%s%s+:^]|\^[%s]?[\w\W])*)' % (_nlws, _punct, _nl)
- _label_compound = r'(?:(?:[^%s%s+:^)]|\^[%s]?[^)])*)' % (_nlws, _punct, _nl)
- _number = r'(?:-?(?:0[0-7]+|0x[\da-f]+|\d+)%s)' % _token_terminator
- _opword = r'(?:equ|geq|gtr|leq|lss|neq)'
- _string = r'(?:"[^%s"]*(?:"|(?=[%s])))' % (_nl, _nl)
- _variable = (r'(?:(?:%%(?:\*|(?:~[a-z]*(?:\$[^:]+:)?)?\d|'
- r'[^%%:%s]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^%%%s^]|'
- r'\^[^%%%s])[^=%s]*=(?:[^%%%s^]|\^[^%%%s])*)?)?%%))|'
- r'(?:\^?![^!:%s]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:'
- r'[^!%s^]|\^[^!%s])[^=%s]*=(?:[^!%s^]|\^[^!%s])*)?)?\^?!))' %
- (_nl, _nl, _nl, _nl, _nl, _nl, _nl, _nl, _nl, _nl, _nl, _nl))
- _core_token = r'(?:(?:(?:\^[%s]?)?[^"%s%s])+)' % (_nl, _nlws, _punct)
- _core_token_compound = r'(?:(?:(?:\^[%s]?)?[^"%s%s)])+)' % (_nl, _nlws, _punct)
- _token = r'(?:[%s]+|%s)' % (_punct, _core_token)
- _token_compound = r'(?:[%s]+|%s)' % (_punct, _core_token_compound)
- _stoken = (r'(?:[%s]+|(?:%s|%s|%s)+)' %
- (_punct, _string, _variable, _core_token))
-
- def _make_begin_state(compound, _core_token=_core_token,
- _core_token_compound=_core_token_compound,
- _keyword_terminator=_keyword_terminator,
- _nl=_nl, _punct=_punct, _string=_string,
- _space=_space, _start_label=_start_label,
- _stoken=_stoken, _token_terminator=_token_terminator,
- _variable=_variable, _ws=_ws):
- rest = '(?:%s|%s|[^"%%%s%s%s])*' % (_string, _variable, _nl, _punct,
- ')' if compound else '')
- rest_of_line = r'(?:(?:[^%s^]|\^[%s]?[\w\W])*)' % (_nl, _nl)
- rest_of_line_compound = r'(?:(?:[^%s^)]|\^[%s]?[^)])*)' % (_nl, _nl)
- set_space = r'((?:(?:\^[%s]?)?[^\S\n])*)' % _nl
- suffix = ''
- if compound:
- _keyword_terminator = r'(?:(?=\))|%s)' % _keyword_terminator
- _token_terminator = r'(?:(?=\))|%s)' % _token_terminator
- suffix = '/compound'
- return [
- ((r'\)', Punctuation, '#pop') if compound else
- (r'\)((?=\()|%s)%s' % (_token_terminator, rest_of_line),
- Comment.Single)),
- (r'(?=%s)' % _start_label, Text, 'follow%s' % suffix),
- (_space, using(this, state='text')),
- include('redirect%s' % suffix),
- (r'[%s]+' % _nl, Text),
- (r'\(', Punctuation, 'root/compound'),
- (r'@+', Punctuation),
- (r'((?:for|if|rem)(?:(?=(?:\^[%s]?)?/)|(?:(?!\^)|'
- r'(?<=m))(?:(?=\()|%s)))(%s?%s?(?:\^[%s]?)?/(?:\^[%s]?)?\?)' %
- (_nl, _token_terminator, _space,
- _core_token_compound if compound else _core_token, _nl, _nl),
- bygroups(Keyword, using(this, state='text')),
- 'follow%s' % suffix),
- (r'(goto%s)(%s(?:\^[%s]?)?/(?:\^[%s]?)?\?%s)' %
- (_keyword_terminator, rest, _nl, _nl, rest),
- bygroups(Keyword, using(this, state='text')),
- 'follow%s' % suffix),
- (words(('assoc', 'break', 'cd', 'chdir', 'cls', 'color', 'copy',
- 'date', 'del', 'dir', 'dpath', 'echo', 'endlocal', 'erase',
- 'exit', 'ftype', 'keys', 'md', 'mkdir', 'mklink', 'move',
- 'path', 'pause', 'popd', 'prompt', 'pushd', 'rd', 'ren',
- 'rename', 'rmdir', 'setlocal', 'shift', 'start', 'time',
- 'title', 'type', 'ver', 'verify', 'vol'),
- suffix=_keyword_terminator), Keyword, 'follow%s' % suffix),
- (r'(call)(%s?)(:)' % _space,
- bygroups(Keyword, using(this, state='text'), Punctuation),
- 'call%s' % suffix),
- (r'call%s' % _keyword_terminator, Keyword),
- (r'(for%s(?!\^))(%s)(/f%s)' %
- (_token_terminator, _space, _token_terminator),
- bygroups(Keyword, using(this, state='text'), Keyword),
- ('for/f', 'for')),
- (r'(for%s(?!\^))(%s)(/l%s)' %
- (_token_terminator, _space, _token_terminator),
- bygroups(Keyword, using(this, state='text'), Keyword),
- ('for/l', 'for')),
- (r'for%s(?!\^)' % _token_terminator, Keyword, ('for2', 'for')),
- (r'(goto%s)(%s?)(:?)' % (_keyword_terminator, _space),
- bygroups(Keyword, using(this, state='text'), Punctuation),
- 'label%s' % suffix),
- (r'(if(?:(?=\()|%s)(?!\^))(%s?)((?:/i%s)?)(%s?)((?:not%s)?)(%s?)' %
- (_token_terminator, _space, _token_terminator, _space,
- _token_terminator, _space),
- bygroups(Keyword, using(this, state='text'), Keyword,
- using(this, state='text'), Keyword,
- using(this, state='text')), ('(?', 'if')),
- (r'rem(((?=\()|%s)%s?%s?.*|%s%s)' %
- (_token_terminator, _space, _stoken, _keyword_terminator,
- rest_of_line_compound if compound else rest_of_line),
- Comment.Single, 'follow%s' % suffix),
- (r'(set%s)%s(/a)' % (_keyword_terminator, set_space),
- bygroups(Keyword, using(this, state='text'), Keyword),
- 'arithmetic%s' % suffix),
- (r'(set%s)%s((?:/p)?)%s((?:(?:(?:\^[%s]?)?[^"%s%s^=%s]|'
- r'\^[%s]?[^"=])+)?)((?:(?:\^[%s]?)?=)?)' %
- (_keyword_terminator, set_space, set_space, _nl, _nl, _punct,
- ')' if compound else '', _nl, _nl),
- bygroups(Keyword, using(this, state='text'), Keyword,
- using(this, state='text'), using(this, state='variable'),
- Punctuation),
- 'follow%s' % suffix),
- default('follow%s' % suffix)
- ]
-
- def _make_follow_state(compound, _label=_label,
- _label_compound=_label_compound, _nl=_nl,
- _space=_space, _start_label=_start_label,
- _token=_token, _token_compound=_token_compound,
- _ws=_ws):
- suffix = '/compound' if compound else ''
- state = []
- if compound:
- state.append((r'(?=\))', Text, '#pop'))
- state += [
- (r'%s([%s]*)(%s)(.*)' %
- (_start_label, _ws, _label_compound if compound else _label),
- bygroups(Text, Punctuation, Text, Name.Label, Comment.Single)),
- include('redirect%s' % suffix),
- (r'(?=[%s])' % _nl, Text, '#pop'),
- (r'\|\|?|&&?', Punctuation, '#pop'),
- include('text')
- ]
- return state
-
- def _make_arithmetic_state(compound, _nl=_nl, _punct=_punct,
- _string=_string, _variable=_variable,
- _ws=_ws, _nlws=_nlws):
- op = r'=+\-*/!~'
- state = []
- if compound:
- state.append((r'(?=\))', Text, '#pop'))
- state += [
- (r'0[0-7]+', Number.Oct),
- (r'0x[\da-f]+', Number.Hex),
- (r'\d+', Number.Integer),
- (r'[(),]+', Punctuation),
- (r'([%s]|%%|\^\^)+' % op, Operator),
- (r'(%s|%s|(\^[%s]?)?[^()%s%%\^"%s%s]|\^[%s]?%s)+' %
- (_string, _variable, _nl, op, _nlws, _punct, _nlws,
- r'[^)]' if compound else r'[\w\W]'),
- using(this, state='variable')),
- (r'(?=[\x00|&])', Text, '#pop'),
- include('follow')
- ]
- return state
-
- def _make_call_state(compound, _label=_label,
- _label_compound=_label_compound):
- state = []
- if compound:
- state.append((r'(?=\))', Text, '#pop'))
- state.append((r'(:?)(%s)' % (_label_compound if compound else _label),
- bygroups(Punctuation, Name.Label), '#pop'))
- return state
-
- def _make_label_state(compound, _label=_label,
- _label_compound=_label_compound, _nl=_nl,
- _punct=_punct, _string=_string, _variable=_variable):
- state = []
- if compound:
- state.append((r'(?=\))', Text, '#pop'))
- state.append((r'(%s?)((?:%s|%s|\^[%s]?%s|[^"%%^%s%s%s])*)' %
- (_label_compound if compound else _label, _string,
- _variable, _nl, r'[^)]' if compound else r'[\w\W]', _nl,
- _punct, r')' if compound else ''),
- bygroups(Name.Label, Comment.Single), '#pop'))
- return state
-
- def _make_redirect_state(compound,
- _core_token_compound=_core_token_compound,
- _nl=_nl, _punct=_punct, _stoken=_stoken,
- _string=_string, _space=_space,
- _variable=_variable, _nlws=_nlws):
- stoken_compound = (r'(?:[%s]+|(?:%s|%s|%s)+)' %
- (_punct, _string, _variable, _core_token_compound))
- return [
- (r'((?:(?<=[%s])\d)?)(>>?&|<&)([%s]*)(\d)' %
- (_nlws, _nlws),
- bygroups(Number.Integer, Punctuation, Text, Number.Integer)),
- (r'((?:(?<=[%s])(?<!\^[%s])\d)?)(>>?|<)(%s?%s)' %
- (_nlws, _nl, _space, stoken_compound if compound else _stoken),
- bygroups(Number.Integer, Punctuation, using(this, state='text')))
- ]
-
- tokens = {
- 'root': _make_begin_state(False),
- 'follow': _make_follow_state(False),
- 'arithmetic': _make_arithmetic_state(False),
- 'call': _make_call_state(False),
- 'label': _make_label_state(False),
- 'redirect': _make_redirect_state(False),
- 'root/compound': _make_begin_state(True),
- 'follow/compound': _make_follow_state(True),
- 'arithmetic/compound': _make_arithmetic_state(True),
- 'call/compound': _make_call_state(True),
- 'label/compound': _make_label_state(True),
- 'redirect/compound': _make_redirect_state(True),
- 'variable-or-escape': [
- (_variable, Name.Variable),
- (r'%%%%|\^[%s]?(\^!|[\w\W])' % _nl, String.Escape)
- ],
- 'string': [
- (r'"', String.Double, '#pop'),
- (_variable, Name.Variable),
- (r'\^!|%%', String.Escape),
- (r'[^"%%^%s]+|[%%^]' % _nl, String.Double),
- default('#pop')
- ],
- 'sqstring': [
- include('variable-or-escape'),
- (r'[^%]+|%', String.Single)
- ],
- 'bqstring': [
- include('variable-or-escape'),
- (r'[^%]+|%', String.Backtick)
- ],
- 'text': [
- (r'"', String.Double, 'string'),
- include('variable-or-escape'),
- (r'[^"%%^%s%s\d)]+|.' % (_nlws, _punct), Text)
- ],
- 'variable': [
- (r'"', String.Double, 'string'),
- include('variable-or-escape'),
- (r'[^"%%^%s]+|.' % _nl, Name.Variable)
- ],
- 'for': [
- (r'(%s)(in)(%s)(\()' % (_space, _space),
- bygroups(using(this, state='text'), Keyword,
- using(this, state='text'), Punctuation), '#pop'),
- include('follow')
- ],
- 'for2': [
- (r'\)', Punctuation),
- (r'(%s)(do%s)' % (_space, _token_terminator),
- bygroups(using(this, state='text'), Keyword), '#pop'),
- (r'[%s]+' % _nl, Text),
- include('follow')
- ],
- 'for/f': [
- (r'(")((?:%s|[^"])*?")([%s]*)(\))' % (_variable, _nlws),
- bygroups(String.Double, using(this, state='string'), Text,
- Punctuation)),
- (r'"', String.Double, ('#pop', 'for2', 'string')),
- (r"('(?:%%%%|%s|[\w\W])*?')([%s]*)(\))" % (_variable, _nlws),
- bygroups(using(this, state='sqstring'), Text, Punctuation)),
- (r'(`(?:%%%%|%s|[\w\W])*?`)([%s]*)(\))' % (_variable, _nlws),
- bygroups(using(this, state='bqstring'), Text, Punctuation)),
- include('for2')
- ],
- 'for/l': [
- (r'-?\d+', Number.Integer),
- include('for2')
- ],
- 'if': [
- (r'((?:cmdextversion|errorlevel)%s)(%s)(\d+)' %
- (_token_terminator, _space),
- bygroups(Keyword, using(this, state='text'),
- Number.Integer), '#pop'),
- (r'(defined%s)(%s)(%s)' % (_token_terminator, _space, _stoken),
- bygroups(Keyword, using(this, state='text'),
- using(this, state='variable')), '#pop'),
- (r'(exist%s)(%s%s)' % (_token_terminator, _space, _stoken),
- bygroups(Keyword, using(this, state='text')), '#pop'),
- (r'(%s%s)(%s)(%s%s)' % (_number, _space, _opword, _space, _number),
- bygroups(using(this, state='arithmetic'), Operator.Word,
- using(this, state='arithmetic')), '#pop'),
- (_stoken, using(this, state='text'), ('#pop', 'if2')),
- ],
- 'if2': [
- (r'(%s?)(==)(%s?%s)' % (_space, _space, _stoken),
- bygroups(using(this, state='text'), Operator,
- using(this, state='text')), '#pop'),
- (r'(%s)(%s)(%s%s)' % (_space, _opword, _space, _stoken),
- bygroups(using(this, state='text'), Operator.Word,
- using(this, state='text')), '#pop')
- ],
- '(?': [
- (_space, using(this, state='text')),
- (r'\(', Punctuation, ('#pop', 'else?', 'root/compound')),
- default('#pop')
- ],
- 'else?': [
- (_space, using(this, state='text')),
- (r'else%s' % _token_terminator, Keyword, '#pop'),
- default('#pop')
- ]
- }
-
-
-class MSDOSSessionLexer(ShellSessionBaseLexer):
- """
- Lexer for MS DOS shell sessions, i.e. command lines, including a
- prompt, interspersed with output.
-
- .. versionadded:: 2.1
- """
-
- name = 'MSDOS Session'
- aliases = ['doscon']
- filenames = []
- mimetypes = []
-
- _innerLexerCls = BatchLexer
- _ps1rgx = re.compile(r'^([^>]*>)(.*\n?)')
- _ps2 = 'More? '
-
-
-class TcshLexer(RegexLexer):
- """
- Lexer for tcsh scripts.
-
- .. versionadded:: 0.10
- """
-
- name = 'Tcsh'
- aliases = ['tcsh', 'csh']
- filenames = ['*.tcsh', '*.csh']
- mimetypes = ['application/x-csh']
-
- tokens = {
- 'root': [
- include('basic'),
- (r'\$\(', Keyword, 'paren'),
- (r'\$\{#?', Keyword, 'curly'),
- (r'`', String.Backtick, 'backticks'),
- include('data'),
- ],
- 'basic': [
- (r'\b(if|endif|else|while|then|foreach|case|default|'
- r'break|continue|goto|breaksw|end|switch|endsw)\s*\b',
- Keyword),
- (r'\b(alias|alloc|bg|bindkey|builtins|bye|caller|cd|chdir|'
- r'complete|dirs|echo|echotc|eval|exec|exit|fg|filetest|getxvers|'
- r'glob|getspath|hashstat|history|hup|inlib|jobs|kill|'
- r'limit|log|login|logout|ls-F|migrate|newgrp|nice|nohup|notify|'
- r'onintr|popd|printenv|pushd|rehash|repeat|rootnode|popd|pushd|'
- r'set|shift|sched|setenv|setpath|settc|setty|setxvers|shift|'
- r'source|stop|suspend|source|suspend|telltc|time|'
- r'umask|unalias|uncomplete|unhash|universe|unlimit|unset|unsetenv|'
- r'ver|wait|warp|watchlog|where|which)\s*\b',
- Name.Builtin),
- (r'#.*', Comment),
- (r'\\[\w\W]', String.Escape),
- (r'(\b\w+)(\s*)(=)', bygroups(Name.Variable, Text, Operator)),
- (r'[\[\]{}()=]+', Operator),
- (r'<<\s*(\'?)\\?(\w+)[\w\W]+?\2', String),
- (r';', Punctuation),
- ],
- 'data': [
- (r'(?s)"(\\\\|\\[0-7]+|\\.|[^"\\])*"', String.Double),
- (r"(?s)'(\\\\|\\[0-7]+|\\.|[^'\\])*'", String.Single),
- (r'\s+', Text),
- (r'[^=\s\[\]{}()$"\'`\\;#]+', Text),
- (r'\d+(?= |\Z)', Number),
- (r'\$#?(\w+|.)', Name.Variable),
- ],
- 'curly': [
- (r'\}', Keyword, '#pop'),
- (r':-', Keyword),
- (r'\w+', Name.Variable),
- (r'[^}:"\'`$]+', Punctuation),
- (r':', Punctuation),
- include('root'),
- ],
- 'paren': [
- (r'\)', Keyword, '#pop'),
- include('root'),
- ],
- 'backticks': [
- (r'`', String.Backtick, '#pop'),
- include('root'),
- ],
- }
-
-
-class TcshSessionLexer(ShellSessionBaseLexer):
- """
- Lexer for Tcsh sessions, i.e. command lines, including a
- prompt, interspersed with output.
-
- .. versionadded:: 2.1
- """
-
- name = 'Tcsh Session'
- aliases = ['tcshcon']
- filenames = []
- mimetypes = []
-
- _innerLexerCls = TcshLexer
- _ps1rgx = re.compile(r'^([^>]+>)(.*\n?)')
- _ps2 = '? '
-
-
-class PowerShellLexer(RegexLexer):
- """
- For Windows PowerShell code.
-
- .. versionadded:: 1.5
- """
- name = 'PowerShell'
- aliases = ['powershell', 'pwsh', 'posh', 'ps1', 'psm1']
- filenames = ['*.ps1', '*.psm1']
- mimetypes = ['text/x-powershell']
-
- flags = re.DOTALL | re.IGNORECASE | re.MULTILINE
-
- keywords = (
- 'while validateset validaterange validatepattern validatelength '
- 'validatecount until trap switch return ref process param parameter in '
- 'if global: local: function foreach for finally filter end elseif else '
- 'dynamicparam do default continue cmdletbinding break begin alias \\? '
- '% #script #private #local #global mandatory parametersetname position '
- 'valuefrompipeline valuefrompipelinebypropertyname '
- 'valuefromremainingarguments helpmessage try catch throw').split()
-
- operators = (
- 'and as band bnot bor bxor casesensitive ccontains ceq cge cgt cle '
- 'clike clt cmatch cne cnotcontains cnotlike cnotmatch contains '
- 'creplace eq exact f file ge gt icontains ieq ige igt ile ilike ilt '
- 'imatch ine inotcontains inotlike inotmatch ireplace is isnot le like '
- 'lt match ne not notcontains notlike notmatch or regex replace '
- 'wildcard').split()
-
- verbs = (
- 'write where watch wait use update unregister unpublish unprotect '
- 'unlock uninstall undo unblock trace test tee take sync switch '
- 'suspend submit stop step start split sort skip show set send select '
- 'search scroll save revoke resume restore restart resolve resize '
- 'reset request repair rename remove register redo receive read push '
- 'publish protect pop ping out optimize open new move mount merge '
- 'measure lock limit join invoke install initialize import hide group '
- 'grant get format foreach find export expand exit enter enable edit '
- 'dismount disconnect disable deny debug cxnew copy convertto '
- 'convertfrom convert connect confirm compress complete compare close '
- 'clear checkpoint block backup assert approve aggregate add').split()
-
- aliases_ = (
- 'ac asnp cat cd cfs chdir clc clear clhy cli clp cls clv cnsn '
- 'compare copy cp cpi cpp curl cvpa dbp del diff dir dnsn ebp echo epal '
- 'epcsv epsn erase etsn exsn fc fhx fl foreach ft fw gal gbp gc gci gcm '
- 'gcs gdr ghy gi gjb gl gm gmo gp gps gpv group gsn gsnp gsv gu gv gwmi '
- 'h history icm iex ihy ii ipal ipcsv ipmo ipsn irm ise iwmi iwr kill lp '
- 'ls man md measure mi mount move mp mv nal ndr ni nmo npssc nsn nv ogv '
- 'oh popd ps pushd pwd r rbp rcjb rcsn rd rdr ren ri rjb rm rmdir rmo '
- 'rni rnp rp rsn rsnp rujb rv rvpa rwmi sajb sal saps sasv sbp sc select '
- 'set shcm si sl sleep sls sort sp spjb spps spsv start sujb sv swmi tee '
- 'trcm type wget where wjb write').split()
-
- commenthelp = (
- 'component description example externalhelp forwardhelpcategory '
- 'forwardhelptargetname functionality inputs link '
- 'notes outputs parameter remotehelprunspace role synopsis').split()
-
- tokens = {
- 'root': [
- # we need to count pairs of parentheses for correct highlight
- # of '$(...)' blocks in strings
- (r'\(', Punctuation, 'child'),
- (r'\s+', Text),
- (r'^(\s*#[#\s]*)(\.(?:%s))([^\n]*$)' % '|'.join(commenthelp),
- bygroups(Comment, String.Doc, Comment)),
- (r'#[^\n]*?$', Comment),
- (r'(&lt;|<)#', Comment.Multiline, 'multline'),
- (r'@"\n', String.Heredoc, 'heredoc-double'),
- (r"@'\n.*?\n'@", String.Heredoc),
- # escaped syntax
- (r'`[\'"$@-]', Punctuation),
- (r'"', String.Double, 'string'),
- (r"'([^']|'')*'", String.Single),
- (r'(\$|@@|@)((global|script|private|env):)?\w+',
- Name.Variable),
- (r'(%s)\b' % '|'.join(keywords), Keyword),
- (r'-(%s)\b' % '|'.join(operators), Operator),
- (r'(%s)-[a-z_]\w*\b' % '|'.join(verbs), Name.Builtin),
- (r'(%s)\s' % '|'.join(aliases_), Name.Builtin),
- (r'\[[a-z_\[][\w. `,\[\]]*\]', Name.Constant), # .net [type]s
- (r'-[a-z_]\w*', Name),
- (r'\w+', Name),
- (r'[.,;:@{}\[\]$()=+*/\\&%!~?^`|<>-]', Punctuation),
- ],
- 'child': [
- (r'\)', Punctuation, '#pop'),
- include('root'),
- ],
- 'multline': [
- (r'[^#&.]+', Comment.Multiline),
- (r'#(>|&gt;)', Comment.Multiline, '#pop'),
- (r'\.(%s)' % '|'.join(commenthelp), String.Doc),
- (r'[#&.]', Comment.Multiline),
- ],
- 'string': [
- (r"`[0abfnrtv'\"$`]", String.Escape),
- (r'[^$`"]+', String.Double),
- (r'\$\(', Punctuation, 'child'),
- (r'""', String.Double),
- (r'[`$]', String.Double),
- (r'"', String.Double, '#pop'),
- ],
- 'heredoc-double': [
- (r'\n"@', String.Heredoc, '#pop'),
- (r'\$\(', Punctuation, 'child'),
- (r'[^@\n]+"]', String.Heredoc),
- (r".", String.Heredoc),
- ]
- }
-
-
-class PowerShellSessionLexer(ShellSessionBaseLexer):
- """
- Lexer for PowerShell sessions, i.e. command lines, including a
- prompt, interspersed with output.
-
- .. versionadded:: 2.1
- """
-
- name = 'PowerShell Session'
- aliases = ['pwsh-session', 'ps1con']
- filenames = []
- mimetypes = []
-
- _innerLexerCls = PowerShellLexer
- _bare_continuation = True
- _ps1rgx = re.compile(r'^((?:\[[^]]+\]: )?PS[^>]*> ?)(.*\n?)')
- _ps2 = '> '
-
-
-class FishShellLexer(RegexLexer):
- """
- Lexer for Fish shell scripts.
-
- .. versionadded:: 2.1
- """
-
- name = 'Fish'
- aliases = ['fish', 'fishshell']
- filenames = ['*.fish', '*.load']
- mimetypes = ['application/x-fish']
-
- tokens = {
- 'root': [
- include('basic'),
- include('data'),
- include('interp'),
- ],
- 'interp': [
- (r'\$\(\(', Keyword, 'math'),
- (r'\(', Keyword, 'paren'),
- (r'\$#?(\w+|.)', Name.Variable),
- ],
- 'basic': [
- (r'\b(begin|end|if|else|while|break|for|in|return|function|block|'
- r'case|continue|switch|not|and|or|set|echo|exit|pwd|true|false|'
- r'cd|count|test)(\s*)\b',
- bygroups(Keyword, Text)),
- (r'\b(alias|bg|bind|breakpoint|builtin|command|commandline|'
- r'complete|contains|dirh|dirs|emit|eval|exec|fg|fish|fish_config|'
- r'fish_indent|fish_pager|fish_prompt|fish_right_prompt|'
- r'fish_update_completions|fishd|funced|funcsave|functions|help|'
- r'history|isatty|jobs|math|mimedb|nextd|open|popd|prevd|psub|'
- r'pushd|random|read|set_color|source|status|trap|type|ulimit|'
- r'umask|vared|fc|getopts|hash|kill|printf|time|wait)\s*\b(?!\.)',
- Name.Builtin),
- (r'#.*\n', Comment),
- (r'\\[\w\W]', String.Escape),
- (r'(\b\w+)(\s*)(=)', bygroups(Name.Variable, Whitespace, Operator)),
- (r'[\[\]()=]', Operator),
- (r'<<-?\s*(\'?)\\?(\w+)[\w\W]+?\2', String),
- ],
- 'data': [
- (r'(?s)\$?"(\\\\|\\[0-7]+|\\.|[^"\\$])*"', String.Double),
- (r'"', String.Double, 'string'),
- (r"(?s)\$'(\\\\|\\[0-7]+|\\.|[^'\\])*'", String.Single),
- (r"(?s)'.*?'", String.Single),
- (r';', Punctuation),
- (r'&|\||\^|<|>', Operator),
- (r'\s+', Text),
- (r'\d+(?= |\Z)', Number),
- (r'[^=\s\[\]{}()$"\'`\\<&|;]+', Text),
- ],
- 'string': [
- (r'"', String.Double, '#pop'),
- (r'(?s)(\\\\|\\[0-7]+|\\.|[^"\\$])+', String.Double),
- include('interp'),
- ],
- 'paren': [
- (r'\)', Keyword, '#pop'),
- include('root'),
- ],
- 'math': [
- (r'\)\)', Keyword, '#pop'),
- (r'[-+*/%^|&]|\*\*|\|\|', Operator),
- (r'\d+#\d+', Number),
- (r'\d+#(?! )', Number),
- (r'\d+', Number),
- include('root'),
- ],
- }
-
-class ExeclineLexer(RegexLexer):
- """
- Lexer for Laurent Bercot's execline language
- (https://skarnet.org/software/execline).
-
- .. versionadded:: 2.7
- """
-
- name = 'execline'
- aliases = ['execline']
- filenames = ['*.exec']
-
- tokens = {
- 'root': [
- include('basic'),
- include('data'),
- include('interp')
- ],
- 'interp': [
- (r'\$\{', String.Interpol, 'curly'),
- (r'\$[\w@#]+', Name.Variable), # user variable
- (r'\$', Text),
- ],
- 'basic': [
- (r'\b(background|backtick|cd|define|dollarat|elgetopt|'
- r'elgetpositionals|elglob|emptyenv|envfile|exec|execlineb|'
- r'exit|export|fdblock|fdclose|fdmove|fdreserve|fdswap|'
- r'forbacktickx|foreground|forstdin|forx|getcwd|getpid|heredoc|'
- r'homeof|if|ifelse|ifte|ifthenelse|importas|loopwhilex|'
- r'multidefine|multisubstitute|pipeline|piperw|posix-cd|'
- r'redirfd|runblock|shift|trap|tryexec|umask|unexport|wait|'
- r'withstdinas)\b', Name.Builtin),
- (r'\A#!.+\n', Comment.Hashbang),
- (r'#.*\n', Comment.Single),
- (r'[{}]', Operator)
- ],
- 'data': [
- (r'(?s)"(\\.|[^"\\$])*"', String.Double),
- (r'"', String.Double, 'string'),
- (r'\s+', Text),
- (r'[^\s{}$"\\]+', Text)
- ],
- 'string': [
- (r'"', String.Double, '#pop'),
- (r'(?s)(\\\\|\\.|[^"\\$])+', String.Double),
- include('interp'),
- ],
- 'curly': [
- (r'\}', String.Interpol, '#pop'),
- (r'[\w#@]+', Name.Variable),
- include('root')
- ]
-
- }
-
- def analyse_text(text):
- if shebang_matches(text, r'execlineb'):
- return 1
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/sieve.py b/venv/lib/python3.11/site-packages/pygments/lexers/sieve.py
deleted file mode 100644
index 8287b07..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/sieve.py
+++ /dev/null
@@ -1,78 +0,0 @@
-"""
- pygments.lexers.sieve
- ~~~~~~~~~~~~~~~~~~~~~
-
- Lexer for Sieve file format.
-
- https://tools.ietf.org/html/rfc5228
- https://tools.ietf.org/html/rfc5173
- https://tools.ietf.org/html/rfc5229
- https://tools.ietf.org/html/rfc5230
- https://tools.ietf.org/html/rfc5232
- https://tools.ietf.org/html/rfc5235
- https://tools.ietf.org/html/rfc5429
- https://tools.ietf.org/html/rfc8580
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, bygroups
-from pygments.token import Comment, Name, Literal, String, Text, Punctuation, \
- Keyword
-
-__all__ = ["SieveLexer"]
-
-
-class SieveLexer(RegexLexer):
- """
- Lexer for sieve format.
-
- .. versionadded:: 2.6
- """
- name = 'Sieve'
- filenames = ['*.siv', '*.sieve']
- aliases = ['sieve']
-
- tokens = {
- 'root': [
- (r'\s+', Text),
- (r'[();,{}\[\]]', Punctuation),
- # import:
- (r'(?i)require',
- Keyword.Namespace),
- # tags:
- (r'(?i)(:)(addresses|all|contains|content|create|copy|comparator|'
- r'count|days|detail|domain|fcc|flags|from|handle|importance|is|'
- r'localpart|length|lowerfirst|lower|matches|message|mime|options|'
- r'over|percent|quotewildcard|raw|regex|specialuse|subject|text|'
- r'under|upperfirst|upper|value)',
- bygroups(Name.Tag, Name.Tag)),
- # tokens:
- (r'(?i)(address|addflag|allof|anyof|body|discard|elsif|else|envelope|'
- r'ereject|exists|false|fileinto|if|hasflag|header|keep|'
- r'notify_method_capability|notify|not|redirect|reject|removeflag|'
- r'setflag|size|spamtest|stop|string|true|vacation|virustest)',
- Name.Builtin),
- (r'(?i)set',
- Keyword.Declaration),
- # number:
- (r'([0-9.]+)([kmgKMG])?',
- bygroups(Literal.Number, Literal.Number)),
- # comment:
- (r'#.*$',
- Comment.Single),
- (r'/\*.*\*/',
- Comment.Multiline),
- # string:
- (r'"[^"]*?"',
- String),
- # text block:
- (r'text:',
- Name.Tag, 'text'),
- ],
- 'text': [
- (r'[^.].*?\n', String),
- (r'^\.', Punctuation, "#pop"),
- ]
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/slash.py b/venv/lib/python3.11/site-packages/pygments/lexers/slash.py
deleted file mode 100644
index cce47ce..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/slash.py
+++ /dev/null
@@ -1,184 +0,0 @@
-"""
- pygments.lexers.slash
- ~~~~~~~~~~~~~~~~~~~~~
-
- Lexer for the `Slash <https://github.com/arturadib/Slash-A>`_ programming
- language.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import ExtendedRegexLexer, bygroups, DelegatingLexer
-from pygments.token import Name, Number, String, Comment, Punctuation, \
- Other, Keyword, Operator, Whitespace
-
-__all__ = ['SlashLexer']
-
-
-class SlashLanguageLexer(ExtendedRegexLexer):
- _nkw = r'(?=[^a-zA-Z_0-9])'
-
- def move_state(new_state):
- return ("#pop", new_state)
-
- def right_angle_bracket(lexer, match, ctx):
- if len(ctx.stack) > 1 and ctx.stack[-2] == "string":
- ctx.stack.pop()
- yield match.start(), String.Interpol, '}'
- ctx.pos = match.end()
- pass
-
- tokens = {
- "root": [
- (r"<%=", Comment.Preproc, move_state("slash")),
- (r"<%!!", Comment.Preproc, move_state("slash")),
- (r"<%#.*?%>", Comment.Multiline),
- (r"<%", Comment.Preproc, move_state("slash")),
- (r".|\n", Other),
- ],
- "string": [
- (r"\\", String.Escape, move_state("string_e")),
- (r"\"", String, move_state("slash")),
- (r"#\{", String.Interpol, "slash"),
- (r'.|\n', String),
- ],
- "string_e": [
- (r'n', String.Escape, move_state("string")),
- (r't', String.Escape, move_state("string")),
- (r'r', String.Escape, move_state("string")),
- (r'e', String.Escape, move_state("string")),
- (r'x[a-fA-F0-9]{2}', String.Escape, move_state("string")),
- (r'.', String.Escape, move_state("string")),
- ],
- "regexp": [
- (r'}[a-z]*', String.Regex, move_state("slash")),
- (r'\\(.|\n)', String.Regex),
- (r'{', String.Regex, "regexp_r"),
- (r'.|\n', String.Regex),
- ],
- "regexp_r": [
- (r'}[a-z]*', String.Regex, "#pop"),
- (r'\\(.|\n)', String.Regex),
- (r'{', String.Regex, "regexp_r"),
- ],
- "slash": [
- (r"%>", Comment.Preproc, move_state("root")),
- (r"\"", String, move_state("string")),
- (r"'[a-zA-Z0-9_]+", String),
- (r'%r{', String.Regex, move_state("regexp")),
- (r'/\*.*?\*/', Comment.Multiline),
- (r"(#|//).*?\n", Comment.Single),
- (r'-?[0-9]+e[+-]?[0-9]+', Number.Float),
- (r'-?[0-9]+\.[0-9]+(e[+-]?[0-9]+)?', Number.Float),
- (r'-?[0-9]+', Number.Integer),
- (r'nil'+_nkw, Name.Builtin),
- (r'true'+_nkw, Name.Builtin),
- (r'false'+_nkw, Name.Builtin),
- (r'self'+_nkw, Name.Builtin),
- (r'(class)(\s+)([A-Z][a-zA-Z0-9_\']*)',
- bygroups(Keyword, Whitespace, Name.Class)),
- (r'class'+_nkw, Keyword),
- (r'extends'+_nkw, Keyword),
- (r'(def)(\s+)(self)(\s*)(\.)(\s*)([a-z_][a-zA-Z0-9_\']*=?|<<|>>|==|<=>|<=|<|>=|>|\+|-(self)?|~(self)?|\*|/|%|^|&&|&|\||\[\]=?)',
- bygroups(Keyword, Whitespace, Name.Builtin, Whitespace, Punctuation, Whitespace, Name.Function)),
- (r'(def)(\s+)([a-z_][a-zA-Z0-9_\']*=?|<<|>>|==|<=>|<=|<|>=|>|\+|-(self)?|~(self)?|\*|/|%|^|&&|&|\||\[\]=?)',
- bygroups(Keyword, Whitespace, Name.Function)),
- (r'def'+_nkw, Keyword),
- (r'if'+_nkw, Keyword),
- (r'elsif'+_nkw, Keyword),
- (r'else'+_nkw, Keyword),
- (r'unless'+_nkw, Keyword),
- (r'for'+_nkw, Keyword),
- (r'in'+_nkw, Keyword),
- (r'while'+_nkw, Keyword),
- (r'until'+_nkw, Keyword),
- (r'and'+_nkw, Keyword),
- (r'or'+_nkw, Keyword),
- (r'not'+_nkw, Keyword),
- (r'lambda'+_nkw, Keyword),
- (r'try'+_nkw, Keyword),
- (r'catch'+_nkw, Keyword),
- (r'return'+_nkw, Keyword),
- (r'next'+_nkw, Keyword),
- (r'last'+_nkw, Keyword),
- (r'throw'+_nkw, Keyword),
- (r'use'+_nkw, Keyword),
- (r'switch'+_nkw, Keyword),
- (r'\\', Keyword),
- (r'λ', Keyword),
- (r'__FILE__'+_nkw, Name.Builtin.Pseudo),
- (r'__LINE__'+_nkw, Name.Builtin.Pseudo),
- (r'[A-Z][a-zA-Z0-9_\']*'+_nkw, Name.Constant),
- (r'[a-z_][a-zA-Z0-9_\']*'+_nkw, Name),
- (r'@[a-z_][a-zA-Z0-9_\']*'+_nkw, Name.Variable.Instance),
- (r'@@[a-z_][a-zA-Z0-9_\']*'+_nkw, Name.Variable.Class),
- (r'\(', Punctuation),
- (r'\)', Punctuation),
- (r'\[', Punctuation),
- (r'\]', Punctuation),
- (r'\{', Punctuation),
- (r'\}', right_angle_bracket),
- (r';', Punctuation),
- (r',', Punctuation),
- (r'<<=', Operator),
- (r'>>=', Operator),
- (r'<<', Operator),
- (r'>>', Operator),
- (r'==', Operator),
- (r'!=', Operator),
- (r'=>', Operator),
- (r'=', Operator),
- (r'<=>', Operator),
- (r'<=', Operator),
- (r'>=', Operator),
- (r'<', Operator),
- (r'>', Operator),
- (r'\+\+', Operator),
- (r'\+=', Operator),
- (r'-=', Operator),
- (r'\*\*=', Operator),
- (r'\*=', Operator),
- (r'\*\*', Operator),
- (r'\*', Operator),
- (r'/=', Operator),
- (r'\+', Operator),
- (r'-', Operator),
- (r'/', Operator),
- (r'%=', Operator),
- (r'%', Operator),
- (r'^=', Operator),
- (r'&&=', Operator),
- (r'&=', Operator),
- (r'&&', Operator),
- (r'&', Operator),
- (r'\|\|=', Operator),
- (r'\|=', Operator),
- (r'\|\|', Operator),
- (r'\|', Operator),
- (r'!', Operator),
- (r'\.\.\.', Operator),
- (r'\.\.', Operator),
- (r'\.', Operator),
- (r'::', Operator),
- (r':', Operator),
- (r'(\s|\n)+', Whitespace),
- (r'[a-z_][a-zA-Z0-9_\']*', Name.Variable),
- ],
- }
-
-
-class SlashLexer(DelegatingLexer):
- """
- Lexer for the Slash programming language.
-
- .. versionadded:: 2.4
- """
-
- name = 'Slash'
- aliases = ['slash']
- filenames = ['*.sla']
-
- def __init__(self, **options):
- from pygments.lexers.web import HtmlLexer
- super().__init__(HtmlLexer, SlashLanguageLexer, **options)
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/smalltalk.py b/venv/lib/python3.11/site-packages/pygments/lexers/smalltalk.py
deleted file mode 100644
index 58d870e..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/smalltalk.py
+++ /dev/null
@@ -1,196 +0,0 @@
-"""
- pygments.lexers.smalltalk
- ~~~~~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for Smalltalk and related languages.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, include, bygroups, default
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation
-
-__all__ = ['SmalltalkLexer', 'NewspeakLexer']
-
-
-class SmalltalkLexer(RegexLexer):
- """
- For Smalltalk syntax.
- Contributed by Stefan Matthias Aust.
- Rewritten by Nils Winter.
-
- .. versionadded:: 0.10
- """
- name = 'Smalltalk'
- url = 'http://www.smalltalk.org/'
- filenames = ['*.st']
- aliases = ['smalltalk', 'squeak', 'st']
- mimetypes = ['text/x-smalltalk']
-
- tokens = {
- 'root': [
- (r'(<)(\w+:)(.*?)(>)', bygroups(Text, Keyword, Text, Text)),
- include('squeak fileout'),
- include('whitespaces'),
- include('method definition'),
- (r'(\|)([\w\s]*)(\|)', bygroups(Operator, Name.Variable, Operator)),
- include('objects'),
- (r'\^|\:=|\_', Operator),
- # temporaries
- (r'[\]({}.;!]', Text),
- ],
- 'method definition': [
- # Not perfect can't allow whitespaces at the beginning and the
- # without breaking everything
- (r'([a-zA-Z]+\w*:)(\s*)(\w+)',
- bygroups(Name.Function, Text, Name.Variable)),
- (r'^(\b[a-zA-Z]+\w*\b)(\s*)$', bygroups(Name.Function, Text)),
- (r'^([-+*/\\~<>=|&!?,@%]+)(\s*)(\w+)(\s*)$',
- bygroups(Name.Function, Text, Name.Variable, Text)),
- ],
- 'blockvariables': [
- include('whitespaces'),
- (r'(:)(\s*)(\w+)',
- bygroups(Operator, Text, Name.Variable)),
- (r'\|', Operator, '#pop'),
- default('#pop'), # else pop
- ],
- 'literals': [
- (r"'(''|[^'])*'", String, 'afterobject'),
- (r'\$.', String.Char, 'afterobject'),
- (r'#\(', String.Symbol, 'parenth'),
- (r'\)', Text, 'afterobject'),
- (r'(\d+r)?-?\d+(\.\d+)?(e-?\d+)?', Number, 'afterobject'),
- ],
- '_parenth_helper': [
- include('whitespaces'),
- (r'(\d+r)?-?\d+(\.\d+)?(e-?\d+)?', Number),
- (r'[-+*/\\~<>=|&#!?,@%\w:]+', String.Symbol),
- # literals
- (r"'(''|[^'])*'", String),
- (r'\$.', String.Char),
- (r'#*\(', String.Symbol, 'inner_parenth'),
- ],
- 'parenth': [
- # This state is a bit tricky since
- # we can't just pop this state
- (r'\)', String.Symbol, ('root', 'afterobject')),
- include('_parenth_helper'),
- ],
- 'inner_parenth': [
- (r'\)', String.Symbol, '#pop'),
- include('_parenth_helper'),
- ],
- 'whitespaces': [
- # skip whitespace and comments
- (r'\s+', Text),
- (r'"(""|[^"])*"', Comment),
- ],
- 'objects': [
- (r'\[', Text, 'blockvariables'),
- (r'\]', Text, 'afterobject'),
- (r'\b(self|super|true|false|nil|thisContext)\b',
- Name.Builtin.Pseudo, 'afterobject'),
- (r'\b[A-Z]\w*(?!:)\b', Name.Class, 'afterobject'),
- (r'\b[a-z]\w*(?!:)\b', Name.Variable, 'afterobject'),
- (r'#("(""|[^"])*"|[-+*/\\~<>=|&!?,@%]+|[\w:]+)',
- String.Symbol, 'afterobject'),
- include('literals'),
- ],
- 'afterobject': [
- (r'! !$', Keyword, '#pop'), # squeak chunk delimiter
- include('whitespaces'),
- (r'\b(ifTrue:|ifFalse:|whileTrue:|whileFalse:|timesRepeat:)',
- Name.Builtin, '#pop'),
- (r'\b(new\b(?!:))', Name.Builtin),
- (r'\:=|\_', Operator, '#pop'),
- (r'\b[a-zA-Z]+\w*:', Name.Function, '#pop'),
- (r'\b[a-zA-Z]+\w*', Name.Function),
- (r'\w+:?|[-+*/\\~<>=|&!?,@%]+', Name.Function, '#pop'),
- (r'\.', Punctuation, '#pop'),
- (r';', Punctuation),
- (r'[\])}]', Text),
- (r'[\[({]', Text, '#pop'),
- ],
- 'squeak fileout': [
- # Squeak fileout format (optional)
- (r'^"(""|[^"])*"!', Keyword),
- (r"^'(''|[^'])*'!", Keyword),
- (r'^(!)(\w+)( commentStamp: )(.*?)( prior: .*?!\n)(.*?)(!)',
- bygroups(Keyword, Name.Class, Keyword, String, Keyword, Text, Keyword)),
- (r"^(!)(\w+(?: class)?)( methodsFor: )('(?:''|[^'])*')(.*?!)",
- bygroups(Keyword, Name.Class, Keyword, String, Keyword)),
- (r'^(\w+)( subclass: )(#\w+)'
- r'(\s+instanceVariableNames: )(.*?)'
- r'(\s+classVariableNames: )(.*?)'
- r'(\s+poolDictionaries: )(.*?)'
- r'(\s+category: )(.*?)(!)',
- bygroups(Name.Class, Keyword, String.Symbol, Keyword, String, Keyword,
- String, Keyword, String, Keyword, String, Keyword)),
- (r'^(\w+(?: class)?)(\s+instanceVariableNames: )(.*?)(!)',
- bygroups(Name.Class, Keyword, String, Keyword)),
- (r'(!\n)(\].*)(! !)$', bygroups(Keyword, Text, Keyword)),
- (r'! !$', Keyword),
- ],
- }
-
-
-class NewspeakLexer(RegexLexer):
- """
- For Newspeak syntax.
-
- .. versionadded:: 1.1
- """
- name = 'Newspeak'
- url = 'http://newspeaklanguage.org/'
- filenames = ['*.ns2']
- aliases = ['newspeak', ]
- mimetypes = ['text/x-newspeak']
-
- tokens = {
- 'root': [
- (r'\b(Newsqueak2)\b', Keyword.Declaration),
- (r"'[^']*'", String),
- (r'\b(class)(\s+)(\w+)(\s*)',
- bygroups(Keyword.Declaration, Text, Name.Class, Text)),
- (r'\b(mixin|self|super|private|public|protected|nil|true|false)\b',
- Keyword),
- (r'(\w+\:)(\s*)([a-zA-Z_]\w+)',
- bygroups(Name.Function, Text, Name.Variable)),
- (r'(\w+)(\s*)(=)',
- bygroups(Name.Attribute, Text, Operator)),
- (r'<\w+>', Comment.Special),
- include('expressionstat'),
- include('whitespace')
- ],
-
- 'expressionstat': [
- (r'(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float),
- (r'\d+', Number.Integer),
- (r':\w+', Name.Variable),
- (r'(\w+)(::)', bygroups(Name.Variable, Operator)),
- (r'\w+:', Name.Function),
- (r'\w+', Name.Variable),
- (r'\(|\)', Punctuation),
- (r'\[|\]', Punctuation),
- (r'\{|\}', Punctuation),
-
- (r'(\^|\+|\/|~|\*|<|>|=|@|%|\||&|\?|!|,|-|:)', Operator),
- (r'\.|;', Punctuation),
- include('whitespace'),
- include('literals'),
- ],
- 'literals': [
- (r'\$.', String),
- (r"'[^']*'", String),
- (r"#'[^']*'", String.Symbol),
- (r"#\w+:?", String.Symbol),
- (r"#(\+|\/|~|\*|<|>|=|@|%|\||&|\?|!|,|-)+", String.Symbol)
- ],
- 'whitespace': [
- (r'\s+', Text),
- (r'"[^"]*"', Comment)
- ],
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/smithy.py b/venv/lib/python3.11/site-packages/pygments/lexers/smithy.py
deleted file mode 100644
index 3f48bfa..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/smithy.py
+++ /dev/null
@@ -1,78 +0,0 @@
-"""
- pygments.lexers.smithy
- ~~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for the Smithy IDL.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, bygroups, words
-from pygments.token import Text, Comment, Keyword, Name, String, \
- Number, Whitespace, Punctuation
-
-__all__ = ['SmithyLexer']
-
-
-class SmithyLexer(RegexLexer):
- """
- For Smithy IDL
-
- .. versionadded:: 2.10
- """
- name = 'Smithy'
- url = 'https://awslabs.github.io/smithy/'
- filenames = ['*.smithy']
- aliases = ['smithy']
-
- unquoted = r'[A-Za-z0-9_\.#$-]+'
- identifier = r"[A-Za-z0-9_\.#$-]+"
-
- simple_shapes = (
- 'use', 'byte', 'short', 'integer', 'long', 'float', 'document',
- 'double', 'bigInteger', 'bigDecimal', 'boolean', 'blob', 'string',
- 'timestamp',
- )
-
- aggregate_shapes = (
- 'apply', 'list', 'map', 'set', 'structure', 'union', 'resource',
- 'operation', 'service', 'trait'
- )
-
- tokens = {
- 'root': [
- (r'///.*$', Comment.Multiline),
- (r'//.*$', Comment),
- (r'@[0-9a-zA-Z\.#-]*', Name.Decorator),
- (r'(=)', Name.Decorator),
- (r'^(\$version)(:)(.+)',
- bygroups(Keyword.Declaration, Name.Decorator, Name.Class)),
- (r'^(namespace)(\s+' + identifier + r')\b',
- bygroups(Keyword.Declaration, Name.Class)),
- (words(simple_shapes,
- prefix=r'^', suffix=r'(\s+' + identifier + r')\b'),
- bygroups(Keyword.Declaration, Name.Class)),
- (words(aggregate_shapes,
- prefix=r'^', suffix=r'(\s+' + identifier + r')'),
- bygroups(Keyword.Declaration, Name.Class)),
- (r'^(metadata)(\s+)((?:\S+)|(?:\"[^"]+\"))(\s*)(=)',
- bygroups(Keyword.Declaration, Whitespace, Name.Class,
- Whitespace, Name.Decorator)),
- (r"(true|false|null)", Keyword.Constant),
- (r"(-?(?:0|[1-9]\d*)(?:\.\d+)?(?:[eE][+-]?\d+)?)", Number),
- (identifier + ":", Name.Label),
- (identifier, Name.Variable.Class),
- (r'\[', Text, "#push"),
- (r'\]', Text, "#pop"),
- (r'\(', Text, "#push"),
- (r'\)', Text, "#pop"),
- (r'\{', Text, "#push"),
- (r'\}', Text, "#pop"),
- (r'"{3}(\\\\|\n|\\")*"{3}', String.Doc),
- (r'"(\\\\|\n|\\"|[^"])*"', String.Double),
- (r"'(\\\\|\n|\\'|[^'])*'", String.Single),
- (r'[:,]+', Punctuation),
- (r'\s+', Whitespace),
- ]
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/smv.py b/venv/lib/python3.11/site-packages/pygments/lexers/smv.py
deleted file mode 100644
index 2584086..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/smv.py
+++ /dev/null
@@ -1,78 +0,0 @@
-"""
- pygments.lexers.smv
- ~~~~~~~~~~~~~~~~~~~
-
- Lexers for the SMV languages.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, words
-from pygments.token import Comment, Keyword, Name, Number, Operator, \
- Punctuation, Text
-
-__all__ = ['NuSMVLexer']
-
-
-class NuSMVLexer(RegexLexer):
- """
- Lexer for the NuSMV language.
-
- .. versionadded:: 2.2
- """
-
- name = 'NuSMV'
- aliases = ['nusmv']
- filenames = ['*.smv']
- mimetypes = []
-
- tokens = {
- 'root': [
- # Comments
- (r'(?s)\/\-\-.*?\-\-/', Comment),
- (r'--.*\n', Comment),
-
- # Reserved
- (words(('MODULE', 'DEFINE', 'MDEFINE', 'CONSTANTS', 'VAR', 'IVAR',
- 'FROZENVAR', 'INIT', 'TRANS', 'INVAR', 'SPEC', 'CTLSPEC',
- 'LTLSPEC', 'PSLSPEC', 'COMPUTE', 'NAME', 'INVARSPEC',
- 'FAIRNESS', 'JUSTICE', 'COMPASSION', 'ISA', 'ASSIGN',
- 'CONSTRAINT', 'SIMPWFF', 'CTLWFF', 'LTLWFF', 'PSLWFF',
- 'COMPWFF', 'IN', 'MIN', 'MAX', 'MIRROR', 'PRED',
- 'PREDICATES'), suffix=r'(?![\w$#-])'),
- Keyword.Declaration),
- (r'process(?![\w$#-])', Keyword),
- (words(('array', 'of', 'boolean', 'integer', 'real', 'word'),
- suffix=r'(?![\w$#-])'), Keyword.Type),
- (words(('case', 'esac'), suffix=r'(?![\w$#-])'), Keyword),
- (words(('word1', 'bool', 'signed', 'unsigned', 'extend', 'resize',
- 'sizeof', 'uwconst', 'swconst', 'init', 'self', 'count',
- 'abs', 'max', 'min'), suffix=r'(?![\w$#-])'),
- Name.Builtin),
- (words(('EX', 'AX', 'EF', 'AF', 'EG', 'AG', 'E', 'F', 'O', 'G',
- 'H', 'X', 'Y', 'Z', 'A', 'U', 'S', 'V', 'T', 'BU', 'EBF',
- 'ABF', 'EBG', 'ABG', 'next', 'mod', 'union', 'in', 'xor',
- 'xnor'), suffix=r'(?![\w$#-])'),
- Operator.Word),
- (words(('TRUE', 'FALSE'), suffix=r'(?![\w$#-])'), Keyword.Constant),
-
- # Names
- (r'[a-zA-Z_][\w$#-]*', Name.Variable),
-
- # Operators
- (r':=', Operator),
- (r'[-&|+*/<>!=]', Operator),
-
- # Literals
- (r'\-?\d+\b', Number.Integer),
- (r'0[su][bB]\d*_[01_]+', Number.Bin),
- (r'0[su][oO]\d*_[0-7_]+', Number.Oct),
- (r'0[su][dD]\d*_[\d_]+', Number.Decimal),
- (r'0[su][hH]\d*_[\da-fA-F_]+', Number.Hex),
-
- # Whitespace, punctuation and the rest
- (r'\s+', Text.Whitespace),
- (r'[()\[\]{};?:.,]', Punctuation),
- ],
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/snobol.py b/venv/lib/python3.11/site-packages/pygments/lexers/snobol.py
deleted file mode 100644
index 28087de..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/snobol.py
+++ /dev/null
@@ -1,82 +0,0 @@
-"""
- pygments.lexers.snobol
- ~~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for the SNOBOL language.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, bygroups
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation
-
-__all__ = ['SnobolLexer']
-
-
-class SnobolLexer(RegexLexer):
- """
- Lexer for the SNOBOL4 programming language.
-
- Recognizes the common ASCII equivalents of the original SNOBOL4 operators.
- Does not require spaces around binary operators.
-
- .. versionadded:: 1.5
- """
-
- name = "Snobol"
- aliases = ["snobol"]
- filenames = ['*.snobol']
- mimetypes = ['text/x-snobol']
-
- tokens = {
- # root state, start of line
- # comments, continuation lines, and directives start in column 1
- # as do labels
- 'root': [
- (r'\*.*\n', Comment),
- (r'[+.] ', Punctuation, 'statement'),
- (r'-.*\n', Comment),
- (r'END\s*\n', Name.Label, 'heredoc'),
- (r'[A-Za-z$][\w$]*', Name.Label, 'statement'),
- (r'\s+', Text, 'statement'),
- ],
- # statement state, line after continuation or label
- 'statement': [
- (r'\s*\n', Text, '#pop'),
- (r'\s+', Text),
- (r'(?<=[^\w.])(LT|LE|EQ|NE|GE|GT|INTEGER|IDENT|DIFFER|LGT|SIZE|'
- r'REPLACE|TRIM|DUPL|REMDR|DATE|TIME|EVAL|APPLY|OPSYN|LOAD|UNLOAD|'
- r'LEN|SPAN|BREAK|ANY|NOTANY|TAB|RTAB|REM|POS|RPOS|FAIL|FENCE|'
- r'ABORT|ARB|ARBNO|BAL|SUCCEED|INPUT|OUTPUT|TERMINAL)(?=[^\w.])',
- Name.Builtin),
- (r'[A-Za-z][\w.]*', Name),
- # ASCII equivalents of original operators
- # | for the EBCDIC equivalent, ! likewise
- # \ for EBCDIC negation
- (r'\*\*|[?$.!%*/#+\-@|&\\=]', Operator),
- (r'"[^"]*"', String),
- (r"'[^']*'", String),
- # Accept SPITBOL syntax for real numbers
- # as well as Macro SNOBOL4
- (r'[0-9]+(?=[^.EeDd])', Number.Integer),
- (r'[0-9]+(\.[0-9]*)?([EDed][-+]?[0-9]+)?', Number.Float),
- # Goto
- (r':', Punctuation, 'goto'),
- (r'[()<>,;]', Punctuation),
- ],
- # Goto block
- 'goto': [
- (r'\s*\n', Text, "#pop:2"),
- (r'\s+', Text),
- (r'F|S', Keyword),
- (r'(\()([A-Za-z][\w.]*)(\))',
- bygroups(Punctuation, Name.Label, Punctuation))
- ],
- # everything after the END statement is basically one
- # big heredoc.
- 'heredoc': [
- (r'.*\n', String.Heredoc)
- ]
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/solidity.py b/venv/lib/python3.11/site-packages/pygments/lexers/solidity.py
deleted file mode 100644
index f1654e4..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/solidity.py
+++ /dev/null
@@ -1,87 +0,0 @@
-"""
- pygments.lexers.solidity
- ~~~~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for Solidity.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, bygroups, include, words
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation, Whitespace
-
-__all__ = ['SolidityLexer']
-
-
-class SolidityLexer(RegexLexer):
- """
- For Solidity source code.
-
- .. versionadded:: 2.5
- """
-
- name = 'Solidity'
- aliases = ['solidity']
- filenames = ['*.sol']
- mimetypes = []
-
- datatype = (
- r'\b(address|bool|(?:(?:bytes|hash|int|string|uint)(?:8|16|24|32|40|48|56|64'
- r'|72|80|88|96|104|112|120|128|136|144|152|160|168|176|184|192|200|208'
- r'|216|224|232|240|248|256)?))\b'
- )
-
- tokens = {
- 'root': [
- include('whitespace'),
- include('comments'),
- (r'\bpragma\s+solidity\b', Keyword, 'pragma'),
- (r'\b(contract)(\s+)([a-zA-Z_]\w*)',
- bygroups(Keyword, Whitespace, Name.Entity)),
- (datatype + r'(\s+)((?:external|public|internal|private)\s+)?' +
- r'([a-zA-Z_]\w*)',
- bygroups(Keyword.Type, Whitespace, Keyword, Name.Variable)),
- (r'\b(enum|event|function|struct)(\s+)([a-zA-Z_]\w*)',
- bygroups(Keyword.Type, Whitespace, Name.Variable)),
- (r'\b(msg|block|tx)\.([A-Za-z_][a-zA-Z0-9_]*)\b', Keyword),
- (words((
- 'block', 'break', 'constant', 'constructor', 'continue',
- 'contract', 'do', 'else', 'external', 'false', 'for',
- 'function', 'if', 'import', 'inherited', 'internal', 'is',
- 'library', 'mapping', 'memory', 'modifier', 'msg', 'new',
- 'payable', 'private', 'public', 'require', 'return',
- 'returns', 'struct', 'suicide', 'throw', 'this', 'true',
- 'tx', 'var', 'while'), prefix=r'\b', suffix=r'\b'),
- Keyword.Type),
- (words(('keccak256',), prefix=r'\b', suffix=r'\b'), Name.Builtin),
- (datatype, Keyword.Type),
- include('constants'),
- (r'[a-zA-Z_]\w*', Text),
- (r'[~!%^&*+=|?:<>/-]', Operator),
- (r'[.;{}(),\[\]]', Punctuation)
- ],
- 'comments': [
- (r'//(\n|[\w\W]*?[^\\]\n)', Comment.Single),
- (r'/(\\\n)?[*][\w\W]*?[*](\\\n)?/', Comment.Multiline),
- (r'/(\\\n)?[*][\w\W]*', Comment.Multiline)
- ],
- 'constants': [
- (r'("(\\"|.)*?")', String.Double),
- (r"('(\\'|.)*?')", String.Single),
- (r'\b0[xX][0-9a-fA-F]+\b', Number.Hex),
- (r'\b\d+\b', Number.Decimal),
- ],
- 'pragma': [
- include('whitespace'),
- include('comments'),
- (r'(\^|>=|<)(\s*)(\d+\.\d+\.\d+)',
- bygroups(Operator, Whitespace, Keyword)),
- (r';', Punctuation, '#pop')
- ],
- 'whitespace': [
- (r'\s+', Whitespace),
- (r'\n', Whitespace)
- ]
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/sophia.py b/venv/lib/python3.11/site-packages/pygments/lexers/sophia.py
deleted file mode 100644
index fc4928c..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/sophia.py
+++ /dev/null
@@ -1,103 +0,0 @@
-"""
- pygments.lexers.sophia
- ~~~~~~~~~~~~~~~~~~~~~~
-
- Lexer for Sophia.
-
- Derived from pygments/lexers/reason.py.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, include, default, words
-from pygments.token import Comment, Keyword, Name, Number, Operator, \
- Punctuation, String, Text
-
-__all__ = ['SophiaLexer']
-
-class SophiaLexer(RegexLexer):
- """
- A Sophia lexer.
-
- .. versionadded:: 2.11
- """
-
- name = 'Sophia'
- aliases = ['sophia']
- filenames = ['*.aes']
- mimetypes = []
-
- keywords = (
- 'contract', 'include', 'let', 'switch', 'type', 'record', 'datatype',
- 'if', 'elif', 'else', 'function', 'stateful', 'payable', 'public',
- 'entrypoint', 'private', 'indexed', 'namespace', 'interface', 'main',
- 'using', 'as', 'for', 'hiding',
- )
-
- builtins = ('state', 'put', 'abort', 'require')
-
- word_operators = ('mod', 'band', 'bor', 'bxor', 'bnot')
-
- primitive_types = ('int', 'address', 'bool', 'bits', 'bytes', 'string',
- 'list', 'option', 'char', 'unit', 'map', 'event',
- 'hash', 'signature', 'oracle', 'oracle_query')
-
- tokens = {
- 'escape-sequence': [
- (r'\\[\\"\'ntbr]', String.Escape),
- (r'\\[0-9]{3}', String.Escape),
- (r'\\x[0-9a-fA-F]{2}', String.Escape),
- ],
- 'root': [
- (r'\s+', Text.Whitespace),
- (r'(true|false)\b', Keyword.Constant),
- (r'\b([A-Z][\w\']*)(?=\s*\.)', Name.Class, 'dotted'),
- (r'\b([A-Z][\w\']*)', Name.Function),
- (r'//.*?\n', Comment.Single),
- (r'\/\*(?!/)', Comment.Multiline, 'comment'),
-
- (r'0[xX][\da-fA-F][\da-fA-F_]*', Number.Hex),
- (r'#[\da-fA-F][\da-fA-F_]*', Name.Label),
- (r'\d[\d_]*', Number.Integer),
-
- (words(keywords, suffix=r'\b'), Keyword),
- (words(builtins, suffix=r'\b'), Name.Builtin),
- (words(word_operators, prefix=r'\b', suffix=r'\b'), Operator.Word),
- (words(primitive_types, prefix=r'\b', suffix=r'\b'), Keyword.Type),
-
- (r'[=!<>+\\*/:&|?~@^-]', Operator.Word),
- (r'[.;:{}(),\[\]]', Punctuation),
-
- (r"(ak_|ok_|oq_|ct_)[\w']*", Name.Label),
- (r"[^\W\d][\w']*", Name),
-
- (r"'(?:(\\[\\\"'ntbr ])|(\\[0-9]{3})|(\\x[0-9a-fA-F]{2}))'",
- String.Char),
- (r"'.'", String.Char),
- (r"'[a-z][\w]*", Name.Variable),
-
- (r'"', String.Double, 'string')
- ],
- 'comment': [
- (r'[^/*]+', Comment.Multiline),
- (r'\/\*', Comment.Multiline, '#push'),
- (r'\*\/', Comment.Multiline, '#pop'),
- (r'\*', Comment.Multiline),
- ],
- 'string': [
- (r'[^\\"]+', String.Double),
- include('escape-sequence'),
- (r'\\\n', String.Double),
- (r'"', String.Double, '#pop'),
- ],
- 'dotted': [
- (r'\s+', Text),
- (r'\.', Punctuation),
- (r'[A-Z][\w\']*(?=\s*\.)', Name.Function),
- (r'[A-Z][\w\']*', Name.Function, '#pop'),
- (r'[a-z_][\w\']*', Name, '#pop'),
- default('#pop'),
- ],
- }
-
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/special.py b/venv/lib/python3.11/site-packages/pygments/lexers/special.py
deleted file mode 100644
index 45565ac..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/special.py
+++ /dev/null
@@ -1,116 +0,0 @@
-"""
- pygments.lexers.special
- ~~~~~~~~~~~~~~~~~~~~~~~
-
- Special lexers.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import ast
-
-from pygments.lexer import Lexer, line_re
-from pygments.token import Token, Error, Text, Generic
-from pygments.util import get_choice_opt
-
-
-__all__ = ['TextLexer', 'OutputLexer', 'RawTokenLexer']
-
-
-class TextLexer(Lexer):
- """
- "Null" lexer, doesn't highlight anything.
- """
- name = 'Text only'
- aliases = ['text']
- filenames = ['*.txt']
- mimetypes = ['text/plain']
- priority = 0.01
-
- def get_tokens_unprocessed(self, text):
- yield 0, Text, text
-
- def analyse_text(text):
- return TextLexer.priority
-
-
-class OutputLexer(Lexer):
- """
- Simple lexer that highlights everything as ``Token.Generic.Output``.
-
- .. versionadded:: 2.10
- """
- name = 'Text output'
- aliases = ['output']
-
- def get_tokens_unprocessed(self, text):
- yield 0, Generic.Output, text
-
-
-_ttype_cache = {}
-
-
-class RawTokenLexer(Lexer):
- """
- Recreate a token stream formatted with the `RawTokenFormatter`.
-
- Additional options accepted:
-
- `compress`
- If set to ``"gz"`` or ``"bz2"``, decompress the token stream with
- the given compression algorithm before lexing (default: ``""``).
- """
- name = 'Raw token data'
- aliases = []
- filenames = []
- mimetypes = ['application/x-pygments-tokens']
-
- def __init__(self, **options):
- self.compress = get_choice_opt(options, 'compress',
- ['', 'none', 'gz', 'bz2'], '')
- Lexer.__init__(self, **options)
-
- def get_tokens(self, text):
- if self.compress:
- if isinstance(text, str):
- text = text.encode('latin1')
- try:
- if self.compress == 'gz':
- import gzip
- text = gzip.decompress(text)
- elif self.compress == 'bz2':
- import bz2
- text = bz2.decompress(text)
- except OSError:
- yield Error, text.decode('latin1')
- if isinstance(text, bytes):
- text = text.decode('latin1')
-
- # do not call Lexer.get_tokens() because stripping is not optional.
- text = text.strip('\n') + '\n'
- for i, t, v in self.get_tokens_unprocessed(text):
- yield t, v
-
- def get_tokens_unprocessed(self, text):
- length = 0
- for match in line_re.finditer(text):
- try:
- ttypestr, val = match.group().rstrip().split('\t', 1)
- ttype = _ttype_cache.get(ttypestr)
- if not ttype:
- ttype = Token
- ttypes = ttypestr.split('.')[1:]
- for ttype_ in ttypes:
- if not ttype_ or not ttype_[0].isupper():
- raise ValueError('malformed token name')
- ttype = getattr(ttype, ttype_)
- _ttype_cache[ttypestr] = ttype
- val = ast.literal_eval(val)
- if not isinstance(val, str):
- raise ValueError('expected str')
- except (SyntaxError, ValueError):
- val = match.group()
- ttype = Error
- yield length, ttype, val
- length += len(val)
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/spice.py b/venv/lib/python3.11/site-packages/pygments/lexers/spice.py
deleted file mode 100644
index 5c2d8f2..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/spice.py
+++ /dev/null
@@ -1,70 +0,0 @@
-"""
- pygments.lexers.spice
- ~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for the Spice programming language.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, bygroups, words
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation, Whitespace
-
-__all__ = ['SpiceLexer']
-
-
-class SpiceLexer(RegexLexer):
- """
- For Spice source.
-
- .. versionadded:: 2.11
- """
- name = 'Spice'
- url = 'https://www.spicelang.com'
- filenames = ['*.spice']
- aliases = ['spice', 'spicelang']
- mimetypes = ['text/x-spice']
-
- tokens = {
- 'root': [
- (r'\n', Whitespace),
- (r'\s+', Whitespace),
- (r'\\\n', Text),
- # comments
- (r'//(.*?)\n', Comment.Single),
- (r'/(\\\n)?[*]{2}(.|\n)*?[*](\\\n)?/', String.Doc),
- (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline),
- # keywords
- (r'(import|as)\b', Keyword.Namespace),
- (r'(f|p|type|struct|interface|enum|alias|operator)\b', Keyword.Declaration),
- (words(('if', 'else', 'for', 'foreach', 'do', 'while', 'break',
- 'continue', 'return', 'assert', 'unsafe', 'ext'), suffix=r'\b'), Keyword),
- (words(('const', 'signed', 'unsigned', 'inline', 'public', 'heap'),
- suffix=r'\b'), Keyword.Pseudo),
- (words(('new', 'switch', 'case', 'yield', 'stash', 'pick', 'sync',
- 'class'), suffix=r'\b'), Keyword.Reserved),
- (r'(true|false|nil)\b', Keyword.Constant),
- (words(('double', 'int', 'short', 'long', 'byte', 'char', 'string',
- 'bool', 'dyn'), suffix=r'\b'), Keyword.Type),
- (words(('printf', 'sizeof', 'alignof', 'len', 'panic'), suffix=r'\b(\()'),
- bygroups(Name.Builtin, Punctuation)),
- # numeric literals
- (r'[-]?[0-9]*[.][0-9]+([eE][+-]?[0-9]+)?', Number.Double),
- (r'0[bB][01]+[slu]?', Number.Bin),
- (r'0[oO][0-7]+[slu]?', Number.Oct),
- (r'0[xXhH][0-9a-fA-F]+[slu]?', Number.Hex),
- (r'(0[dD])?[0-9]+[slu]?', Number.Integer),
- # string literal
- (r'"(\\\\|\\[^\\]|[^"\\])*"', String),
- # char literal
- (r'\'(\\\\|\\[^\\]|[^\'\\])\'', String.Char),
- # tokens
- (r'<<=|>>=|<<|>>|<=|>=|\+=|-=|\*=|/=|\%=|\|=|&=|\^=|&&|\|\||&|\||'
- r'\+\+|--|\%|\^|\~|==|!=|->|::|[.]{3}|#!|#|[+\-*/&]', Operator),
- (r'[|<>=!()\[\]{}.,;:\?]', Punctuation),
- # identifiers
- (r'[^\W\d]\w*', Name.Other),
- ]
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/sql.py b/venv/lib/python3.11/site-packages/pygments/lexers/sql.py
deleted file mode 100644
index 2880841..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/sql.py
+++ /dev/null
@@ -1,1027 +0,0 @@
-"""
- pygments.lexers.sql
- ~~~~~~~~~~~~~~~~~~~
-
- Lexers for various SQL dialects and related interactive sessions.
-
- Postgres specific lexers:
-
- `PostgresLexer`
- A SQL lexer for the PostgreSQL dialect. Differences w.r.t. the SQL
- lexer are:
-
- - keywords and data types list parsed from the PG docs (run the
- `_postgres_builtins` module to update them);
- - Content of $-strings parsed using a specific lexer, e.g. the content
- of a PL/Python function is parsed using the Python lexer;
- - parse PG specific constructs: E-strings, $-strings, U&-strings,
- different operators and punctuation.
-
- `PlPgsqlLexer`
- A lexer for the PL/pgSQL language. Adds a few specific construct on
- top of the PG SQL lexer (such as <<label>>).
-
- `PostgresConsoleLexer`
- A lexer to highlight an interactive psql session:
-
- - identifies the prompt and does its best to detect the end of command
- in multiline statement where not all the lines are prefixed by a
- prompt, telling them apart from the output;
- - highlights errors in the output and notification levels;
- - handles psql backslash commands.
-
- `PostgresExplainLexer`
- A lexer to highlight Postgres execution plan.
-
- The ``tests/examplefiles`` contains a few test files with data to be
- parsed by these lexers.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import Lexer, RegexLexer, do_insertions, bygroups, words
-from pygments.token import Punctuation, Whitespace, Text, Comment, Operator, \
- Keyword, Name, String, Number, Generic, Literal
-from pygments.lexers import get_lexer_by_name, ClassNotFound
-
-from pygments.lexers._postgres_builtins import KEYWORDS, DATATYPES, \
- PSEUDO_TYPES, PLPGSQL_KEYWORDS, EXPLAIN_KEYWORDS
-from pygments.lexers._mysql_builtins import \
- MYSQL_CONSTANTS, \
- MYSQL_DATATYPES, \
- MYSQL_FUNCTIONS, \
- MYSQL_KEYWORDS, \
- MYSQL_OPTIMIZER_HINTS
-
-from pygments.lexers import _tsql_builtins
-
-
-__all__ = ['PostgresLexer', 'PlPgsqlLexer', 'PostgresConsoleLexer',
- 'PostgresExplainLexer', 'SqlLexer', 'TransactSqlLexer',
- 'MySqlLexer', 'SqliteConsoleLexer', 'RqlLexer']
-
-line_re = re.compile('.*?\n')
-sqlite_prompt_re = re.compile(r'^(?:sqlite| ...)>(?= )')
-
-language_re = re.compile(r"\s+LANGUAGE\s+'?(\w+)'?", re.IGNORECASE)
-
-do_re = re.compile(r'\bDO\b', re.IGNORECASE)
-
-# Regular expressions for analyse_text()
-name_between_bracket_re = re.compile(r'\[[a-zA-Z_]\w*\]')
-name_between_backtick_re = re.compile(r'`[a-zA-Z_]\w*`')
-tsql_go_re = re.compile(r'\bgo\b', re.IGNORECASE)
-tsql_declare_re = re.compile(r'\bdeclare\s+@', re.IGNORECASE)
-tsql_variable_re = re.compile(r'@[a-zA-Z_]\w*\b')
-
-
-def language_callback(lexer, match):
- """Parse the content of a $-string using a lexer
-
- The lexer is chosen looking for a nearby LANGUAGE or assumed as
- plpgsql if inside a DO statement and no LANGUAGE has been found.
- """
- lx = None
- m = language_re.match(lexer.text[match.end():match.end()+100])
- if m is not None:
- lx = lexer._get_lexer(m.group(1))
- else:
- m = list(language_re.finditer(
- lexer.text[max(0, match.start()-100):match.start()]))
- if m:
- lx = lexer._get_lexer(m[-1].group(1))
- else:
- m = list(do_re.finditer(
- lexer.text[max(0, match.start()-25):match.start()]))
- if m:
- lx = lexer._get_lexer('plpgsql')
-
- # 1 = $, 2 = delimiter, 3 = $
- yield (match.start(1), String, match.group(1))
- yield (match.start(2), String.Delimiter, match.group(2))
- yield (match.start(3), String, match.group(3))
- # 4 = string contents
- if lx:
- yield from lx.get_tokens_unprocessed(match.group(4))
- else:
- yield (match.start(4), String, match.group(4))
- # 5 = $, 6 = delimiter, 7 = $
- yield (match.start(5), String, match.group(5))
- yield (match.start(6), String.Delimiter, match.group(6))
- yield (match.start(7), String, match.group(7))
-
-
-class PostgresBase:
- """Base class for Postgres-related lexers.
-
- This is implemented as a mixin to avoid the Lexer metaclass kicking in.
- this way the different lexer don't have a common Lexer ancestor. If they
- had, _tokens could be created on this ancestor and not updated for the
- other classes, resulting e.g. in PL/pgSQL parsed as SQL. This shortcoming
- seem to suggest that regexp lexers are not really subclassable.
- """
- def get_tokens_unprocessed(self, text, *args):
- # Have a copy of the entire text to be used by `language_callback`.
- self.text = text
- yield from super().get_tokens_unprocessed(text, *args)
-
- def _get_lexer(self, lang):
- if lang.lower() == 'sql':
- return get_lexer_by_name('postgresql', **self.options)
-
- tries = [lang]
- if lang.startswith('pl'):
- tries.append(lang[2:])
- if lang.endswith('u'):
- tries.append(lang[:-1])
- if lang.startswith('pl') and lang.endswith('u'):
- tries.append(lang[2:-1])
-
- for lx in tries:
- try:
- return get_lexer_by_name(lx, **self.options)
- except ClassNotFound:
- pass
- else:
- # TODO: better logging
- # print >>sys.stderr, "language not found:", lang
- return None
-
-
-class PostgresLexer(PostgresBase, RegexLexer):
- """
- Lexer for the PostgreSQL dialect of SQL.
-
- .. versionadded:: 1.5
- """
-
- name = 'PostgreSQL SQL dialect'
- aliases = ['postgresql', 'postgres']
- mimetypes = ['text/x-postgresql']
-
- flags = re.IGNORECASE
- tokens = {
- 'root': [
- (r'\s+', Whitespace),
- (r'--.*\n?', Comment.Single),
- (r'/\*', Comment.Multiline, 'multiline-comments'),
- (r'(' + '|'.join(s.replace(" ", r"\s+")
- for s in DATATYPES + PSEUDO_TYPES) + r')\b',
- Name.Builtin),
- (words(KEYWORDS, suffix=r'\b'), Keyword),
- (r'[+*/<>=~!@#%^&|`?-]+', Operator),
- (r'::', Operator), # cast
- (r'\$\d+', Name.Variable),
- (r'([0-9]*\.[0-9]*|[0-9]+)(e[+-]?[0-9]+)?', Number.Float),
- (r'[0-9]+', Number.Integer),
- (r"((?:E|U&)?)(')", bygroups(String.Affix, String.Single), 'string'),
- # quoted identifier
- (r'((?:U&)?)(")', bygroups(String.Affix, String.Name), 'quoted-ident'),
- (r'(?s)(\$)([^$]*)(\$)(.*?)(\$)(\2)(\$)', language_callback),
- (r'[a-z_]\w*', Name),
-
- # psql variable in SQL
- (r""":(['"]?)[a-z]\w*\b\1""", Name.Variable),
-
- (r'[;:()\[\]{},.]', Punctuation),
- ],
- 'multiline-comments': [
- (r'/\*', Comment.Multiline, 'multiline-comments'),
- (r'\*/', Comment.Multiline, '#pop'),
- (r'[^/*]+', Comment.Multiline),
- (r'[/*]', Comment.Multiline)
- ],
- 'string': [
- (r"[^']+", String.Single),
- (r"''", String.Single),
- (r"'", String.Single, '#pop'),
- ],
- 'quoted-ident': [
- (r'[^"]+', String.Name),
- (r'""', String.Name),
- (r'"', String.Name, '#pop'),
- ],
- }
-
-
-class PlPgsqlLexer(PostgresBase, RegexLexer):
- """
- Handle the extra syntax in Pl/pgSQL language.
-
- .. versionadded:: 1.5
- """
- name = 'PL/pgSQL'
- aliases = ['plpgsql']
- mimetypes = ['text/x-plpgsql']
-
- flags = re.IGNORECASE
- tokens = {k: l[:] for (k, l) in PostgresLexer.tokens.items()}
-
- # extend the keywords list
- for i, pattern in enumerate(tokens['root']):
- if pattern[1] == Keyword:
- tokens['root'][i] = (
- words(KEYWORDS + PLPGSQL_KEYWORDS, suffix=r'\b'),
- Keyword)
- del i
- break
- else:
- assert 0, "SQL keywords not found"
-
- # Add specific PL/pgSQL rules (before the SQL ones)
- tokens['root'][:0] = [
- (r'\%[a-z]\w*\b', Name.Builtin), # actually, a datatype
- (r':=', Operator),
- (r'\<\<[a-z]\w*\>\>', Name.Label),
- (r'\#[a-z]\w*\b', Keyword.Pseudo), # #variable_conflict
- ]
-
-
-class PsqlRegexLexer(PostgresBase, RegexLexer):
- """
- Extend the PostgresLexer adding support specific for psql commands.
-
- This is not a complete psql lexer yet as it lacks prompt support
- and output rendering.
- """
-
- name = 'PostgreSQL console - regexp based lexer'
- aliases = [] # not public
-
- flags = re.IGNORECASE
- tokens = {k: l[:] for (k, l) in PostgresLexer.tokens.items()}
-
- tokens['root'].append(
- (r'\\[^\s]+', Keyword.Pseudo, 'psql-command'))
- tokens['psql-command'] = [
- (r'\n', Text, 'root'),
- (r'\s+', Whitespace),
- (r'\\[^\s]+', Keyword.Pseudo),
- (r""":(['"]?)[a-z]\w*\b\1""", Name.Variable),
- (r"'(''|[^'])*'", String.Single),
- (r"`([^`])*`", String.Backtick),
- (r"[^\s]+", String.Symbol),
- ]
-
-
-re_prompt = re.compile(r'^(\S.*?)??[=\-\(\$\'\"][#>]')
-re_psql_command = re.compile(r'\s*\\')
-re_end_command = re.compile(r';\s*(--.*?)?$')
-re_psql_command = re.compile(r'(\s*)(\\.+?)(\s+)$')
-re_error = re.compile(r'(ERROR|FATAL):')
-re_message = re.compile(
- r'((?:DEBUG|INFO|NOTICE|WARNING|ERROR|'
- r'FATAL|HINT|DETAIL|CONTEXT|LINE [0-9]+):)(.*?\n)')
-
-
-class lookahead:
- """Wrap an iterator and allow pushing back an item."""
- def __init__(self, x):
- self.iter = iter(x)
- self._nextitem = None
-
- def __iter__(self):
- return self
-
- def send(self, i):
- self._nextitem = i
- return i
-
- def __next__(self):
- if self._nextitem is not None:
- ni = self._nextitem
- self._nextitem = None
- return ni
- return next(self.iter)
- next = __next__
-
-
-class PostgresConsoleLexer(Lexer):
- """
- Lexer for psql sessions.
-
- .. versionadded:: 1.5
- """
-
- name = 'PostgreSQL console (psql)'
- aliases = ['psql', 'postgresql-console', 'postgres-console']
- mimetypes = ['text/x-postgresql-psql']
-
- def get_tokens_unprocessed(self, data):
- sql = PsqlRegexLexer(**self.options)
-
- lines = lookahead(line_re.findall(data))
-
- # prompt-output cycle
- while 1:
-
- # consume the lines of the command: start with an optional prompt
- # and continue until the end of command is detected
- curcode = ''
- insertions = []
- for line in lines:
- # Identify a shell prompt in case of psql commandline example
- if line.startswith('$') and not curcode:
- lexer = get_lexer_by_name('console', **self.options)
- yield from lexer.get_tokens_unprocessed(line)
- break
-
- # Identify a psql prompt
- mprompt = re_prompt.match(line)
- if mprompt is not None:
- insertions.append((len(curcode),
- [(0, Generic.Prompt, mprompt.group())]))
- curcode += line[len(mprompt.group()):]
- else:
- curcode += line
-
- # Check if this is the end of the command
- # TODO: better handle multiline comments at the end with
- # a lexer with an external state?
- if re_psql_command.match(curcode) \
- or re_end_command.search(curcode):
- break
-
- # Emit the combined stream of command and prompt(s)
- yield from do_insertions(insertions,
- sql.get_tokens_unprocessed(curcode))
-
- # Emit the output lines
- out_token = Generic.Output
- for line in lines:
- mprompt = re_prompt.match(line)
- if mprompt is not None:
- # push the line back to have it processed by the prompt
- lines.send(line)
- break
-
- mmsg = re_message.match(line)
- if mmsg is not None:
- if mmsg.group(1).startswith("ERROR") \
- or mmsg.group(1).startswith("FATAL"):
- out_token = Generic.Error
- yield (mmsg.start(1), Generic.Strong, mmsg.group(1))
- yield (mmsg.start(2), out_token, mmsg.group(2))
- else:
- yield (0, out_token, line)
- else:
- return
-
-
-class PostgresExplainLexer(RegexLexer):
- """
- Handle PostgreSQL EXPLAIN output
-
- .. versionadded:: 2.15
- """
-
- name = 'PostgreSQL EXPLAIN dialect'
- aliases = ['postgres-explain']
- filenames = ['*.explain']
- mimetypes = ['text/x-postgresql-explain']
-
- tokens = {
- 'root': [
- (r'(:|\(|\)|ms|kB|->|\.\.|\,)', Punctuation),
- (r'(\s+)', Whitespace),
-
- # This match estimated cost and effectively measured counters with ANALYZE
- # Then, we move to instrumentation state
- (r'(cost)(=?)', bygroups(Name.Class, Punctuation), 'instrumentation'),
- (r'(actual)( )(=?)', bygroups(Name.Class, Whitespace, Punctuation), 'instrumentation'),
-
- # Misc keywords
- (words(('actual', 'Memory Usage', 'Memory', 'Buckets', 'Batches',
- 'originally', 'row', 'rows', 'Hits', 'Misses',
- 'Evictions', 'Overflows'), suffix=r'\b'),
- Comment.Single),
-
- (r'(hit|read|dirtied|written|write|time|calls)(=)', bygroups(Comment.Single, Operator)),
- (r'(shared|temp|local)', Keyword.Pseudo),
-
- # We move to sort state in order to emphasize specific keywords (especially disk access)
- (r'(Sort Method)(: )', bygroups(Comment.Preproc, Punctuation), 'sort'),
-
- # These keywords can be followed by an object, like a table
- (r'(Sort Key|Group Key|Presorted Key|Hash Key)(:)( )',
- bygroups(Comment.Preproc, Punctuation, Whitespace), 'object_name'),
- (r'(Cache Key|Cache Mode)(:)( )', bygroups(Comment, Punctuation, Whitespace), 'object_name'),
-
- # These keywords can be followed by a predicate
- (words(('Join Filter', 'Subplans Removed', 'Filter', 'Merge Cond',
- 'Hash Cond', 'Index Cond', 'Recheck Cond', 'Heap Blocks',
- 'TID Cond', 'Run Condition', 'Order By', 'Function Call',
- 'Table Function Call', 'Inner Unique', 'Params Evaluated',
- 'Single Copy', 'Sampling', 'One-Time Filter', 'Output',
- 'Relations', 'Remote SQL'), suffix=r'\b'),
- Comment.Preproc, 'predicate'),
-
- # Special keyword to handle ON CONFLICT
- (r'Conflict ', Comment.Preproc, 'conflict'),
-
- # Special keyword for InitPlan or SubPlan
- (r'(InitPlan|SubPlan)( )(\d+)( )',
- bygroups(Keyword, Whitespace, Number.Integer, Whitespace),
- 'init_plan'),
-
- (words(('Sort Method', 'Join Filter', 'Planning time',
- 'Planning Time', 'Execution time', 'Execution Time',
- 'Workers Planned', 'Workers Launched', 'Buffers',
- 'Planning', 'Worker', 'Query Identifier', 'Time',
- 'Full-sort Groups', 'Pre-sorted Groups'), suffix=r'\b'), Comment.Preproc),
-
- # Emphasize these keywords
-
- (words(('Rows Removed by Join Filter', 'Rows Removed by Filter',
- 'Rows Removed by Index Recheck',
- 'Heap Fetches', 'never executed'),
- suffix=r'\b'), Name.Exception),
- (r'(I/O Timings)(:)( )', bygroups(Name.Exception, Punctuation, Whitespace)),
-
- (words(EXPLAIN_KEYWORDS, suffix=r'\b'), Keyword),
-
- # join keywords
- (r'((Right|Left|Full|Semi|Anti) Join)', Keyword.Type),
- (r'(Parallel |Async |Finalize |Partial )', Comment.Preproc),
- (r'Backward', Comment.Preproc),
- (r'(Intersect|Except|Hash)', Comment.Preproc),
-
- (r'(CTE)( )(\w*)?', bygroups(Comment, Whitespace, Name.Variable)),
-
-
- # Treat "on" and "using" as a punctuation
- (r'(on|using)', Punctuation, 'object_name'),
-
-
- # strings
- (r"'(''|[^'])*'", String.Single),
- # numbers
- (r'\d+\.\d+', Number.Float),
- (r'(\d+)', Number.Integer),
-
- # boolean
- (r'(true|false)', Name.Constant),
- # explain header
- (r'\s*QUERY PLAN\s*\n\s*-+', Comment.Single),
- # Settings
- (r'(Settings)(:)( )', bygroups(Comment.Preproc, Punctuation, Whitespace), 'setting'),
-
- # Handle JIT counters
- (r'(JIT|Functions|Options|Timing)(:)', bygroups(Comment.Preproc, Punctuation)),
- (r'(Inlining|Optimization|Expressions|Deforming|Generation|Emission|Total)', Keyword.Pseudo),
-
- # Handle Triggers counters
- (r'(Trigger)( )(\S*)(:)( )',
- bygroups(Comment.Preproc, Whitespace, Name.Variable, Punctuation, Whitespace)),
-
- ],
- 'expression': [
- # matches any kind of parenthesized expression
- # the first opening paren is matched by the 'caller'
- (r'\(', Punctuation, '#push'),
- (r'\)', Punctuation, '#pop'),
- (r'(never executed)', Name.Exception),
- (r'[^)(]+', Comment),
- ],
- 'object_name': [
-
- # This is a cost or analyze measure
- (r'(\(cost)(=?)', bygroups(Name.Class, Punctuation), 'instrumentation'),
- (r'(\(actual)( )(=?)', bygroups(Name.Class, Whitespace, Punctuation), 'instrumentation'),
-
- # if object_name is parenthesized, mark opening paren as
- # punctuation, call 'expression', and exit state
- (r'\(', Punctuation, 'expression'),
- (r'(on)', Punctuation),
- # matches possibly schema-qualified table and column names
- (r'\w+(\.\w+)*( USING \S+| \w+ USING \S+)', Name.Variable),
- (r'\"?\w+\"?(?:\.\"?\w+\"?)?', Name.Variable),
- (r'\'\S*\'', Name.Variable),
-
- # if we encounter a comma, another object is listed
- (r',\n', Punctuation, 'object_name'),
- (r',', Punctuation, 'object_name'),
-
- # special case: "*SELECT*"
- (r'"\*SELECT\*( \d+)?"(.\w+)?', Name.Variable),
- (r'"\*VALUES\*(_\d+)?"(.\w+)?', Name.Variable),
- (r'"ANY_subquery"', Name.Variable),
-
- # Variable $1 ...
- (r'\$\d+', Name.Variable),
- # cast
- (r'::\w+', Name.Variable),
- (r' +', Whitespace),
- (r'"', Punctuation),
- (r'\[\.\.\.\]', Punctuation),
- (r'\)', Punctuation, '#pop'),
- ],
- 'predicate': [
- # if predicate is parenthesized, mark paren as punctuation
- (r'(\()([^\n]*)(\))', bygroups(Punctuation, Name.Variable, Punctuation), '#pop'),
- # otherwise color until newline
- (r'[^\n]*', Name.Variable, '#pop'),
- ],
- 'instrumentation': [
- (r'=|\.\.', Punctuation),
- (r' +', Whitespace),
- (r'(rows|width|time|loops)', Name.Class),
- (r'\d+\.\d+', Number.Float),
- (r'(\d+)', Number.Integer),
- (r'\)', Punctuation, '#pop'),
- ],
- 'conflict': [
- (r'(Resolution: )(\w+)', bygroups(Comment.Preproc, Name.Variable)),
- (r'(Arbiter \w+:)', Comment.Preproc, 'object_name'),
- (r'(Filter: )', Comment.Preproc, 'predicate'),
- ],
- 'setting': [
- (r'([a-z_]*?)(\s*)(=)(\s*)(\'.*?\')', bygroups(Name.Attribute, Whitespace, Operator, Whitespace, String)),
- (r'\, ', Punctuation),
- ],
- 'init_plan': [
- (r'\(', Punctuation),
- (r'returns \$\d+(,\$\d+)?', Name.Variable),
- (r'\)', Punctuation, '#pop'),
- ],
- 'sort': [
- (r':|kB', Punctuation),
- (r'(quicksort|top-N|heapsort|Average|Memory|Peak)', Comment.Prepoc),
- (r'(external|merge|Disk|sort)', Name.Exception),
- (r'(\d+)', Number.Integer),
- (r' +', Whitespace),
- ],
- }
-
-
-class SqlLexer(RegexLexer):
- """
- Lexer for Structured Query Language. Currently, this lexer does
- not recognize any special syntax except ANSI SQL.
- """
-
- name = 'SQL'
- aliases = ['sql']
- filenames = ['*.sql']
- mimetypes = ['text/x-sql']
-
- flags = re.IGNORECASE
- tokens = {
- 'root': [
- (r'\s+', Whitespace),
- (r'--.*\n?', Comment.Single),
- (r'/\*', Comment.Multiline, 'multiline-comments'),
- (words((
- 'ABORT', 'ABS', 'ABSOLUTE', 'ACCESS', 'ADA', 'ADD', 'ADMIN', 'AFTER',
- 'AGGREGATE', 'ALIAS', 'ALL', 'ALLOCATE', 'ALTER', 'ANALYSE', 'ANALYZE',
- 'AND', 'ANY', 'ARE', 'AS', 'ASC', 'ASENSITIVE', 'ASSERTION', 'ASSIGNMENT',
- 'ASYMMETRIC', 'AT', 'ATOMIC', 'AUTHORIZATION', 'AVG', 'BACKWARD',
- 'BEFORE', 'BEGIN', 'BETWEEN', 'BITVAR', 'BIT_LENGTH', 'BOTH', 'BREADTH',
- 'BY', 'C', 'CACHE', 'CALL', 'CALLED', 'CARDINALITY', 'CASCADE',
- 'CASCADED', 'CASE', 'CAST', 'CATALOG', 'CATALOG_NAME', 'CHAIN',
- 'CHARACTERISTICS', 'CHARACTER_LENGTH', 'CHARACTER_SET_CATALOG',
- 'CHARACTER_SET_NAME', 'CHARACTER_SET_SCHEMA', 'CHAR_LENGTH', 'CHECK',
- 'CHECKED', 'CHECKPOINT', 'CLASS', 'CLASS_ORIGIN', 'CLOB', 'CLOSE',
- 'CLUSTER', 'COALESCE', 'COBOL', 'COLLATE', 'COLLATION',
- 'COLLATION_CATALOG', 'COLLATION_NAME', 'COLLATION_SCHEMA', 'COLUMN',
- 'COLUMN_NAME', 'COMMAND_FUNCTION', 'COMMAND_FUNCTION_CODE', 'COMMENT',
- 'COMMIT', 'COMMITTED', 'COMPLETION', 'CONDITION_NUMBER', 'CONNECT',
- 'CONNECTION', 'CONNECTION_NAME', 'CONSTRAINT', 'CONSTRAINTS',
- 'CONSTRAINT_CATALOG', 'CONSTRAINT_NAME', 'CONSTRAINT_SCHEMA',
- 'CONSTRUCTOR', 'CONTAINS', 'CONTINUE', 'CONVERSION', 'CONVERT',
- 'COPY', 'CORRESPONDING', 'COUNT', 'CREATE', 'CREATEDB', 'CREATEUSER',
- 'CROSS', 'CUBE', 'CURRENT', 'CURRENT_DATE', 'CURRENT_PATH',
- 'CURRENT_ROLE', 'CURRENT_TIME', 'CURRENT_TIMESTAMP', 'CURRENT_USER',
- 'CURSOR', 'CURSOR_NAME', 'CYCLE', 'DATA', 'DATABASE',
- 'DATETIME_INTERVAL_CODE', 'DATETIME_INTERVAL_PRECISION', 'DAY',
- 'DEALLOCATE', 'DECLARE', 'DEFAULT', 'DEFAULTS', 'DEFERRABLE',
- 'DEFERRED', 'DEFINED', 'DEFINER', 'DELETE', 'DELIMITER', 'DELIMITERS',
- 'DEREF', 'DESC', 'DESCRIBE', 'DESCRIPTOR', 'DESTROY', 'DESTRUCTOR',
- 'DETERMINISTIC', 'DIAGNOSTICS', 'DICTIONARY', 'DISCONNECT', 'DISPATCH',
- 'DISTINCT', 'DO', 'DOMAIN', 'DROP', 'DYNAMIC', 'DYNAMIC_FUNCTION',
- 'DYNAMIC_FUNCTION_CODE', 'EACH', 'ELSE', 'ELSIF', 'ENCODING',
- 'ENCRYPTED', 'END', 'END-EXEC', 'EQUALS', 'ESCAPE', 'EVERY', 'EXCEPTION',
- 'EXCEPT', 'EXCLUDING', 'EXCLUSIVE', 'EXEC', 'EXECUTE', 'EXISTING',
- 'EXISTS', 'EXPLAIN', 'EXTERNAL', 'EXTRACT', 'FALSE', 'FETCH', 'FINAL',
- 'FIRST', 'FOR', 'FORCE', 'FOREIGN', 'FORTRAN', 'FORWARD', 'FOUND', 'FREE',
- 'FREEZE', 'FROM', 'FULL', 'FUNCTION', 'G', 'GENERAL', 'GENERATED', 'GET',
- 'GLOBAL', 'GO', 'GOTO', 'GRANT', 'GRANTED', 'GROUP', 'GROUPING',
- 'HANDLER', 'HAVING', 'HIERARCHY', 'HOLD', 'HOST', 'IDENTITY', 'IF',
- 'IGNORE', 'ILIKE', 'IMMEDIATE', 'IMMEDIATELY', 'IMMUTABLE', 'IMPLEMENTATION', 'IMPLICIT',
- 'IN', 'INCLUDING', 'INCREMENT', 'INDEX', 'INDITCATOR', 'INFIX',
- 'INHERITS', 'INITIALIZE', 'INITIALLY', 'INNER', 'INOUT', 'INPUT',
- 'INSENSITIVE', 'INSERT', 'INSTANTIABLE', 'INSTEAD', 'INTERSECT', 'INTO',
- 'INVOKER', 'IS', 'ISNULL', 'ISOLATION', 'ITERATE', 'JOIN', 'KEY',
- 'KEY_MEMBER', 'KEY_TYPE', 'LANCOMPILER', 'LANGUAGE', 'LARGE', 'LAST',
- 'LATERAL', 'LEADING', 'LEFT', 'LENGTH', 'LESS', 'LEVEL', 'LIKE', 'LIMIT',
- 'LISTEN', 'LOAD', 'LOCAL', 'LOCALTIME', 'LOCALTIMESTAMP', 'LOCATION',
- 'LOCATOR', 'LOCK', 'LOWER', 'MAP', 'MATCH', 'MAX', 'MAXVALUE',
- 'MESSAGE_LENGTH', 'MESSAGE_OCTET_LENGTH', 'MESSAGE_TEXT', 'METHOD', 'MIN',
- 'MINUTE', 'MINVALUE', 'MOD', 'MODE', 'MODIFIES', 'MODIFY', 'MONTH',
- 'MORE', 'MOVE', 'MUMPS', 'NAMES', 'NATIONAL', 'NATURAL', 'NCHAR', 'NCLOB',
- 'NEW', 'NEXT', 'NO', 'NOCREATEDB', 'NOCREATEUSER', 'NONE', 'NOT',
- 'NOTHING', 'NOTIFY', 'NOTNULL', 'NULL', 'NULLABLE', 'NULLIF', 'OBJECT',
- 'OCTET_LENGTH', 'OF', 'OFF', 'OFFSET', 'OIDS', 'OLD', 'ON', 'ONLY',
- 'OPEN', 'OPERATION', 'OPERATOR', 'OPTION', 'OPTIONS', 'OR', 'ORDER',
- 'ORDINALITY', 'OUT', 'OUTER', 'OUTPUT', 'OVERLAPS', 'OVERLAY',
- 'OVERRIDING', 'OWNER', 'PAD', 'PARAMETER', 'PARAMETERS', 'PARAMETER_MODE',
- 'PARAMETER_NAME', 'PARAMETER_ORDINAL_POSITION',
- 'PARAMETER_SPECIFIC_CATALOG', 'PARAMETER_SPECIFIC_NAME',
- 'PARAMETER_SPECIFIC_SCHEMA', 'PARTIAL', 'PASCAL', 'PENDANT', 'PERIOD', 'PLACING',
- 'PLI', 'POSITION', 'POSTFIX', 'PRECEEDS', 'PRECISION', 'PREFIX', 'PREORDER',
- 'PREPARE', 'PRESERVE', 'PRIMARY', 'PRIOR', 'PRIVILEGES', 'PROCEDURAL',
- 'PROCEDURE', 'PUBLIC', 'READ', 'READS', 'RECHECK', 'RECURSIVE', 'REF',
- 'REFERENCES', 'REFERENCING', 'REINDEX', 'RELATIVE', 'RENAME',
- 'REPEATABLE', 'REPLACE', 'RESET', 'RESTART', 'RESTRICT', 'RESULT',
- 'RETURN', 'RETURNED_LENGTH', 'RETURNED_OCTET_LENGTH', 'RETURNED_SQLSTATE',
- 'RETURNS', 'REVOKE', 'RIGHT', 'ROLE', 'ROLLBACK', 'ROLLUP', 'ROUTINE',
- 'ROUTINE_CATALOG', 'ROUTINE_NAME', 'ROUTINE_SCHEMA', 'ROW', 'ROWS',
- 'ROW_COUNT', 'RULE', 'SAVE_POINT', 'SCALE', 'SCHEMA', 'SCHEMA_NAME',
- 'SCOPE', 'SCROLL', 'SEARCH', 'SECOND', 'SECURITY', 'SELECT', 'SELF',
- 'SENSITIVE', 'SERIALIZABLE', 'SERVER_NAME', 'SESSION', 'SESSION_USER',
- 'SET', 'SETOF', 'SETS', 'SHARE', 'SHOW', 'SIMILAR', 'SIMPLE', 'SIZE',
- 'SOME', 'SOURCE', 'SPACE', 'SPECIFIC', 'SPECIFICTYPE', 'SPECIFIC_NAME',
- 'SQL', 'SQLCODE', 'SQLERROR', 'SQLEXCEPTION', 'SQLSTATE', 'SQLWARNINIG',
- 'STABLE', 'START', 'STATE', 'STATEMENT', 'STATIC', 'STATISTICS', 'STDIN',
- 'STDOUT', 'STORAGE', 'STRICT', 'STRUCTURE', 'STYPE', 'SUBCLASS_ORIGIN',
- 'SUBLIST', 'SUBSTRING', 'SUCCEEDS', 'SUM', 'SYMMETRIC', 'SYSID', 'SYSTEM',
- 'SYSTEM_USER', 'TABLE', 'TABLE_NAME', ' TEMP', 'TEMPLATE', 'TEMPORARY',
- 'TERMINATE', 'THAN', 'THEN', 'TIME', 'TIMESTAMP', 'TIMEZONE_HOUR',
- 'TIMEZONE_MINUTE', 'TO', 'TOAST', 'TRAILING', 'TRANSACTION',
- 'TRANSACTIONS_COMMITTED', 'TRANSACTIONS_ROLLED_BACK', 'TRANSACTION_ACTIVE',
- 'TRANSFORM', 'TRANSFORMS', 'TRANSLATE', 'TRANSLATION', 'TREAT', 'TRIGGER',
- 'TRIGGER_CATALOG', 'TRIGGER_NAME', 'TRIGGER_SCHEMA', 'TRIM', 'TRUE',
- 'TRUNCATE', 'TRUSTED', 'TYPE', 'UNCOMMITTED', 'UNDER', 'UNENCRYPTED',
- 'UNION', 'UNIQUE', 'UNKNOWN', 'UNLISTEN', 'UNNAMED', 'UNNEST', 'UNTIL',
- 'UPDATE', 'UPPER', 'USAGE', 'USER', 'USER_DEFINED_TYPE_CATALOG',
- 'USER_DEFINED_TYPE_NAME', 'USER_DEFINED_TYPE_SCHEMA', 'USING', 'VACUUM',
- 'VALID', 'VALIDATOR', 'VALUES', 'VARIABLE', 'VERBOSE',
- 'VERSION', 'VERSIONS', 'VERSIONING', 'VIEW',
- 'VOLATILE', 'WHEN', 'WHENEVER', 'WHERE', 'WITH', 'WITHOUT', 'WORK',
- 'WRITE', 'YEAR', 'ZONE'), suffix=r'\b'),
- Keyword),
- (words((
- 'ARRAY', 'BIGINT', 'BINARY', 'BIT', 'BLOB', 'BOOLEAN', 'CHAR',
- 'CHARACTER', 'DATE', 'DEC', 'DECIMAL', 'FLOAT', 'INT', 'INTEGER',
- 'INTERVAL', 'NUMBER', 'NUMERIC', 'REAL', 'SERIAL', 'SMALLINT',
- 'VARCHAR', 'VARYING', 'INT8', 'SERIAL8', 'TEXT'), suffix=r'\b'),
- Name.Builtin),
- (r'[+*/<>=~!@#%^&|`?-]', Operator),
- (r'[0-9]+', Number.Integer),
- # TODO: Backslash escapes?
- (r"'(''|[^'])*'", String.Single),
- (r'"(""|[^"])*"', String.Symbol), # not a real string literal in ANSI SQL
- (r'[a-z_][\w$]*', Name), # allow $s in strings for Oracle
- (r'[;:()\[\],.]', Punctuation)
- ],
- 'multiline-comments': [
- (r'/\*', Comment.Multiline, 'multiline-comments'),
- (r'\*/', Comment.Multiline, '#pop'),
- (r'[^/*]+', Comment.Multiline),
- (r'[/*]', Comment.Multiline)
- ]
- }
-
- def analyse_text(self, text):
- return
-
-
-class TransactSqlLexer(RegexLexer):
- """
- Transact-SQL (T-SQL) is Microsoft's and Sybase's proprietary extension to
- SQL.
-
- The list of keywords includes ODBC and keywords reserved for future use..
- """
-
- name = 'Transact-SQL'
- aliases = ['tsql', 't-sql']
- filenames = ['*.sql']
- mimetypes = ['text/x-tsql']
-
- flags = re.IGNORECASE
-
- tokens = {
- 'root': [
- (r'\s+', Whitespace),
- (r'--.*?$\n?', Comment.Single),
- (r'/\*', Comment.Multiline, 'multiline-comments'),
- (words(_tsql_builtins.OPERATORS), Operator),
- (words(_tsql_builtins.OPERATOR_WORDS, suffix=r'\b'), Operator.Word),
- (words(_tsql_builtins.TYPES, suffix=r'\b'), Name.Class),
- (words(_tsql_builtins.FUNCTIONS, suffix=r'\b'), Name.Function),
- (r'(goto)(\s+)(\w+\b)', bygroups(Keyword, Whitespace, Name.Label)),
- (words(_tsql_builtins.KEYWORDS, suffix=r'\b'), Keyword),
- (r'(\[)([^]]+)(\])', bygroups(Operator, Name, Operator)),
- (r'0x[0-9a-f]+', Number.Hex),
- # Float variant 1, for example: 1., 1.e2, 1.2e3
- (r'[0-9]+\.[0-9]*(e[+-]?[0-9]+)?', Number.Float),
- # Float variant 2, for example: .1, .1e2
- (r'\.[0-9]+(e[+-]?[0-9]+)?', Number.Float),
- # Float variant 3, for example: 123e45
- (r'[0-9]+e[+-]?[0-9]+', Number.Float),
- (r'[0-9]+', Number.Integer),
- (r"'(''|[^'])*'", String.Single),
- (r'"(""|[^"])*"', String.Symbol),
- (r'[;(),.]', Punctuation),
- # Below we use \w even for the first "real" character because
- # tokens starting with a digit have already been recognized
- # as Number above.
- (r'@@\w+', Name.Builtin),
- (r'@\w+', Name.Variable),
- (r'(\w+)(:)', bygroups(Name.Label, Punctuation)),
- (r'#?#?\w+', Name), # names for temp tables and anything else
- (r'\?', Name.Variable.Magic), # parameter for prepared statements
- ],
- 'multiline-comments': [
- (r'/\*', Comment.Multiline, 'multiline-comments'),
- (r'\*/', Comment.Multiline, '#pop'),
- (r'[^/*]+', Comment.Multiline),
- (r'[/*]', Comment.Multiline)
- ]
- }
-
- def analyse_text(text):
- rating = 0
- if tsql_declare_re.search(text):
- # Found T-SQL variable declaration.
- rating = 1.0
- else:
- name_between_backtick_count = len(
- name_between_backtick_re.findall(text))
- name_between_bracket_count = len(
- name_between_bracket_re.findall(text))
- # We need to check if there are any names using
- # backticks or brackets, as otherwise both are 0
- # and 0 >= 2 * 0, so we would always assume it's true
- dialect_name_count = name_between_backtick_count + name_between_bracket_count
- if dialect_name_count >= 1 and \
- name_between_bracket_count >= 2 * name_between_backtick_count:
- # Found at least twice as many [name] as `name`.
- rating += 0.5
- elif name_between_bracket_count > name_between_backtick_count:
- rating += 0.2
- elif name_between_bracket_count > 0:
- rating += 0.1
- if tsql_variable_re.search(text) is not None:
- rating += 0.1
- if tsql_go_re.search(text) is not None:
- rating += 0.1
- return rating
-
-
-class MySqlLexer(RegexLexer):
- """The Oracle MySQL lexer.
-
- This lexer does not attempt to maintain strict compatibility with
- MariaDB syntax or keywords. Although MySQL and MariaDB's common code
- history suggests there may be significant overlap between the two,
- compatibility between the two is not a target for this lexer.
- """
-
- name = 'MySQL'
- aliases = ['mysql']
- mimetypes = ['text/x-mysql']
-
- flags = re.IGNORECASE
- tokens = {
- 'root': [
- (r'\s+', Whitespace),
-
- # Comments
- (r'(?:#|--\s+).*', Comment.Single),
- (r'/\*\+', Comment.Special, 'optimizer-hints'),
- (r'/\*', Comment.Multiline, 'multiline-comment'),
-
- # Hexadecimal literals
- (r"x'([0-9a-f]{2})+'", Number.Hex), # MySQL requires paired hex characters in this form.
- (r'0x[0-9a-f]+', Number.Hex),
-
- # Binary literals
- (r"b'[01]+'", Number.Bin),
- (r'0b[01]+', Number.Bin),
-
- # Numeric literals
- (r'[0-9]+\.[0-9]*(e[+-]?[0-9]+)?', Number.Float), # Mandatory integer, optional fraction and exponent
- (r'[0-9]*\.[0-9]+(e[+-]?[0-9]+)?', Number.Float), # Mandatory fraction, optional integer and exponent
- (r'[0-9]+e[+-]?[0-9]+', Number.Float), # Exponents with integer significands are still floats
- (r'[0-9]+(?=[^0-9a-z$_\u0080-\uffff])', Number.Integer), # Integers that are not in a schema object name
-
- # Date literals
- (r"\{\s*d\s*(?P<quote>['\"])\s*\d{2}(\d{2})?.?\d{2}.?\d{2}\s*(?P=quote)\s*\}",
- Literal.Date),
-
- # Time literals
- (r"\{\s*t\s*(?P<quote>['\"])\s*(?:\d+\s+)?\d{1,2}.?\d{1,2}.?\d{1,2}(\.\d*)?\s*(?P=quote)\s*\}",
- Literal.Date),
-
- # Timestamp literals
- (
- r"\{\s*ts\s*(?P<quote>['\"])\s*"
- r"\d{2}(?:\d{2})?.?\d{2}.?\d{2}" # Date part
- r"\s+" # Whitespace between date and time
- r"\d{1,2}.?\d{1,2}.?\d{1,2}(\.\d*)?" # Time part
- r"\s*(?P=quote)\s*\}",
- Literal.Date
- ),
-
- # String literals
- (r"'", String.Single, 'single-quoted-string'),
- (r'"', String.Double, 'double-quoted-string'),
-
- # Variables
- (r'@@(?:global\.|persist\.|persist_only\.|session\.)?[a-z_]+', Name.Variable),
- (r'@[a-z0-9_$.]+', Name.Variable),
- (r"@'", Name.Variable, 'single-quoted-variable'),
- (r'@"', Name.Variable, 'double-quoted-variable'),
- (r"@`", Name.Variable, 'backtick-quoted-variable'),
- (r'\?', Name.Variable), # For demonstrating prepared statements
-
- # Operators
- (r'[!%&*+/:<=>^|~-]+', Operator),
-
- # Exceptions; these words tokenize differently in different contexts.
- (r'\b(set)(?!\s*\()', Keyword),
- (r'\b(character)(\s+)(set)\b', bygroups(Keyword, Whitespace, Keyword)),
- # In all other known cases, "SET" is tokenized by MYSQL_DATATYPES.
-
- (words(MYSQL_CONSTANTS, prefix=r'\b', suffix=r'\b'), Name.Constant),
- (words(MYSQL_DATATYPES, prefix=r'\b', suffix=r'\b'), Keyword.Type),
- (words(MYSQL_KEYWORDS, prefix=r'\b', suffix=r'\b'), Keyword),
- (words(MYSQL_FUNCTIONS, prefix=r'\b', suffix=r'\b(\s*)(\()'),
- bygroups(Name.Function, Whitespace, Punctuation)),
-
- # Schema object names
- #
- # Note: Although the first regex supports unquoted all-numeric
- # identifiers, this will not be a problem in practice because
- # numeric literals have already been handled above.
- #
- ('[0-9a-z$_\u0080-\uffff]+', Name),
- (r'`', Name.Quoted, 'schema-object-name'),
-
- # Punctuation
- (r'[(),.;]', Punctuation),
- ],
-
- # Multiline comment substates
- # ---------------------------
-
- 'optimizer-hints': [
- (r'[^*a-z]+', Comment.Special),
- (r'\*/', Comment.Special, '#pop'),
- (words(MYSQL_OPTIMIZER_HINTS, suffix=r'\b'), Comment.Preproc),
- ('[a-z]+', Comment.Special),
- (r'\*', Comment.Special),
- ],
-
- 'multiline-comment': [
- (r'[^*]+', Comment.Multiline),
- (r'\*/', Comment.Multiline, '#pop'),
- (r'\*', Comment.Multiline),
- ],
-
- # String substates
- # ----------------
-
- 'single-quoted-string': [
- (r"[^'\\]+", String.Single),
- (r"''", String.Escape),
- (r"""\\[0'"bnrtZ\\%_]""", String.Escape),
- (r"'", String.Single, '#pop'),
- ],
-
- 'double-quoted-string': [
- (r'[^"\\]+', String.Double),
- (r'""', String.Escape),
- (r"""\\[0'"bnrtZ\\%_]""", String.Escape),
- (r'"', String.Double, '#pop'),
- ],
-
- # Variable substates
- # ------------------
-
- 'single-quoted-variable': [
- (r"[^']+", Name.Variable),
- (r"''", Name.Variable),
- (r"'", Name.Variable, '#pop'),
- ],
-
- 'double-quoted-variable': [
- (r'[^"]+', Name.Variable),
- (r'""', Name.Variable),
- (r'"', Name.Variable, '#pop'),
- ],
-
- 'backtick-quoted-variable': [
- (r'[^`]+', Name.Variable),
- (r'``', Name.Variable),
- (r'`', Name.Variable, '#pop'),
- ],
-
- # Schema object name substates
- # ----------------------------
- #
- # "Name.Quoted" and "Name.Quoted.Escape" are non-standard but
- # formatters will style them as "Name" by default but add
- # additional styles based on the token name. This gives users
- # flexibility to add custom styles as desired.
- #
- 'schema-object-name': [
- (r'[^`]+', Name.Quoted),
- (r'``', Name.Quoted.Escape),
- (r'`', Name.Quoted, '#pop'),
- ],
- }
-
- def analyse_text(text):
- rating = 0
- name_between_backtick_count = len(
- name_between_backtick_re.findall(text))
- name_between_bracket_count = len(
- name_between_bracket_re.findall(text))
- # Same logic as above in the TSQL analysis
- dialect_name_count = name_between_backtick_count + name_between_bracket_count
- if dialect_name_count >= 1 and \
- name_between_backtick_count >= 2 * name_between_bracket_count:
- # Found at least twice as many `name` as [name].
- rating += 0.5
- elif name_between_backtick_count > name_between_bracket_count:
- rating += 0.2
- elif name_between_backtick_count > 0:
- rating += 0.1
- return rating
-
-
-class SqliteConsoleLexer(Lexer):
- """
- Lexer for example sessions using sqlite3.
-
- .. versionadded:: 0.11
- """
-
- name = 'sqlite3con'
- aliases = ['sqlite3']
- filenames = ['*.sqlite3-console']
- mimetypes = ['text/x-sqlite3-console']
-
- def get_tokens_unprocessed(self, data):
- sql = SqlLexer(**self.options)
-
- curcode = ''
- insertions = []
- for match in line_re.finditer(data):
- line = match.group()
- prompt_match = sqlite_prompt_re.match(line)
- if prompt_match is not None:
- insertions.append((len(curcode),
- [(0, Generic.Prompt, line[:7])]))
- insertions.append((len(curcode),
- [(7, Whitespace, ' ')]))
- curcode += line[8:]
- else:
- if curcode:
- yield from do_insertions(insertions,
- sql.get_tokens_unprocessed(curcode))
- curcode = ''
- insertions = []
- if line.startswith('SQL error: '):
- yield (match.start(), Generic.Traceback, line)
- else:
- yield (match.start(), Generic.Output, line)
- if curcode:
- yield from do_insertions(insertions,
- sql.get_tokens_unprocessed(curcode))
-
-
-class RqlLexer(RegexLexer):
- """
- Lexer for Relation Query Language.
-
- .. versionadded:: 2.0
- """
- name = 'RQL'
- url = 'http://www.logilab.org/project/rql'
- aliases = ['rql']
- filenames = ['*.rql']
- mimetypes = ['text/x-rql']
-
- flags = re.IGNORECASE
- tokens = {
- 'root': [
- (r'\s+', Whitespace),
- (r'(DELETE|SET|INSERT|UNION|DISTINCT|WITH|WHERE|BEING|OR'
- r'|AND|NOT|GROUPBY|HAVING|ORDERBY|ASC|DESC|LIMIT|OFFSET'
- r'|TODAY|NOW|TRUE|FALSE|NULL|EXISTS)\b', Keyword),
- (r'[+*/<>=%-]', Operator),
- (r'(Any|is|instance_of|CWEType|CWRelation)\b', Name.Builtin),
- (r'[0-9]+', Number.Integer),
- (r'[A-Z_]\w*\??', Name),
- (r"'(''|[^'])*'", String.Single),
- (r'"(""|[^"])*"', String.Single),
- (r'[;:()\[\],.]', Punctuation)
- ],
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/srcinfo.py b/venv/lib/python3.11/site-packages/pygments/lexers/srcinfo.py
deleted file mode 100644
index c4da88b..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/srcinfo.py
+++ /dev/null
@@ -1,62 +0,0 @@
-"""
- pygments.lexers.srcinfo
- ~~~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for .SRCINFO files used by Arch Linux Packages.
-
- The description of the format can be found in the wiki:
- https://wiki.archlinux.org/title/.SRCINFO
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, words
-from pygments.token import Text, Comment, Keyword, Name, Operator, Whitespace
-
-__all__ = ['SrcinfoLexer']
-
-keywords = (
- 'pkgbase', 'pkgname',
- 'pkgver', 'pkgrel', 'epoch',
- 'pkgdesc', 'url', 'install', 'changelog',
- 'arch', 'groups', 'license', 'noextract', 'options', 'backup',
- 'validpgpkeys',
-)
-
-architecture_dependent_keywords = (
- 'source', 'depends', 'checkdepends', 'makedepends', 'optdepends',
- 'provides', 'conflicts', 'replaces',
- 'md5sums', 'sha1sums', 'sha224sums', 'sha256sums', 'sha384sums',
- 'sha512sums',
-)
-
-
-class SrcinfoLexer(RegexLexer):
- """Lexer for .SRCINFO files used by Arch Linux Packages.
-
- .. versionadded:: 2.11
- """
-
- name = 'Srcinfo'
- aliases = ['srcinfo']
- filenames = ['.SRCINFO']
-
- tokens = {
- 'root': [
- (r'\s+', Whitespace),
- (r'#.*', Comment.Single),
- (words(keywords), Keyword, 'assignment'),
- (words(architecture_dependent_keywords, suffix=r'_\w+'),
- Keyword, 'assignment'),
- (r'\w+', Name.Variable, 'assignment'),
- ],
- 'assignment': [
- (r' +', Whitespace),
- (r'=', Operator, 'value'),
- ],
- 'value': [
- (r' +', Whitespace),
- (r'.*', Text, '#pop:2'),
- ],
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/stata.py b/venv/lib/python3.11/site-packages/pygments/lexers/stata.py
deleted file mode 100644
index 917c999..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/stata.py
+++ /dev/null
@@ -1,171 +0,0 @@
-"""
- pygments.lexers.stata
- ~~~~~~~~~~~~~~~~~~~~~
-
- Lexer for Stata
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-from pygments.lexer import RegexLexer, default, include, words
-from pygments.token import Comment, Keyword, Name, Number, \
- String, Text, Operator
-
-from pygments.lexers._stata_builtins import builtins_base, builtins_functions
-
-__all__ = ['StataLexer']
-
-
-class StataLexer(RegexLexer):
- """
- For Stata do files.
-
- .. versionadded:: 2.2
- """
- # Syntax based on
- # - http://fmwww.bc.edu/RePEc/bocode/s/synlightlist.ado
- # - https://github.com/isagalaev/highlight.js/blob/master/src/languages/stata.js
- # - https://github.com/jpitblado/vim-stata/blob/master/syntax/stata.vim
-
- name = 'Stata'
- url = 'http://www.stata.com/'
- aliases = ['stata', 'do']
- filenames = ['*.do', '*.ado']
- mimetypes = ['text/x-stata', 'text/stata', 'application/x-stata']
- flags = re.MULTILINE | re.DOTALL
-
- tokens = {
- 'root': [
- include('comments'),
- include('strings'),
- include('macros'),
- include('numbers'),
- include('keywords'),
- include('operators'),
- include('format'),
- (r'.', Text),
- ],
- # Comments are a complicated beast in Stata because they can be
- # nested and there are a few corner cases with that. See:
- # - github.com/kylebarron/language-stata/issues/90
- # - statalist.org/forums/forum/general-stata-discussion/general/1448244
- 'comments': [
- (r'(^//|(?<=\s)//)(?!/)', Comment.Single, 'comments-double-slash'),
- (r'^\s*\*', Comment.Single, 'comments-star'),
- (r'/\*', Comment.Multiline, 'comments-block'),
- (r'(^///|(?<=\s)///)', Comment.Special, 'comments-triple-slash')
- ],
- 'comments-block': [
- (r'/\*', Comment.Multiline, '#push'),
- # this ends and restarts a comment block. but need to catch this so
- # that it doesn\'t start _another_ level of comment blocks
- (r'\*/\*', Comment.Multiline),
- (r'(\*/\s+\*(?!/)[^\n]*)|(\*/)', Comment.Multiline, '#pop'),
- # Match anything else as a character inside the comment
- (r'.', Comment.Multiline),
- ],
- 'comments-star': [
- (r'///.*?\n', Comment.Single,
- ('#pop', 'comments-triple-slash')),
- (r'(^//|(?<=\s)//)(?!/)', Comment.Single,
- ('#pop', 'comments-double-slash')),
- (r'/\*', Comment.Multiline, 'comments-block'),
- (r'.(?=\n)', Comment.Single, '#pop'),
- (r'.', Comment.Single),
- ],
- 'comments-triple-slash': [
- (r'\n', Comment.Special, '#pop'),
- # A // breaks out of a comment for the rest of the line
- (r'//.*?(?=\n)', Comment.Single, '#pop'),
- (r'.', Comment.Special),
- ],
- 'comments-double-slash': [
- (r'\n', Text, '#pop'),
- (r'.', Comment.Single),
- ],
- # `"compound string"' and regular "string"; note the former are
- # nested.
- 'strings': [
- (r'`"', String, 'string-compound'),
- (r'(?<!`)"', String, 'string-regular'),
- ],
- 'string-compound': [
- (r'`"', String, '#push'),
- (r'"\'', String, '#pop'),
- (r'\\\\|\\"|\\\$|\\`|\\\n', String.Escape),
- include('macros'),
- (r'.', String)
- ],
- 'string-regular': [
- (r'(")(?!\')|(?=\n)', String, '#pop'),
- (r'\\\\|\\"|\\\$|\\`|\\\n', String.Escape),
- include('macros'),
- (r'.', String)
- ],
- # A local is usually
- # `\w{0,31}'
- # `:extended macro'
- # `=expression'
- # `[rsen](results)'
- # `(++--)scalar(++--)'
- #
- # However, there are all sorts of weird rules wrt edge
- # cases. Instead of writing 27 exceptions, anything inside
- # `' is a local.
- #
- # A global is more restricted, so we do follow rules. Note only
- # locals explicitly enclosed ${} can be nested.
- 'macros': [
- (r'\$(\{|(?=[$`]))', Name.Variable.Global, 'macro-global-nested'),
- (r'\$', Name.Variable.Global, 'macro-global-name'),
- (r'`', Name.Variable, 'macro-local'),
- ],
- 'macro-local': [
- (r'`', Name.Variable, '#push'),
- (r"'", Name.Variable, '#pop'),
- (r'\$(\{|(?=[$`]))', Name.Variable.Global, 'macro-global-nested'),
- (r'\$', Name.Variable.Global, 'macro-global-name'),
- (r'.', Name.Variable), # fallback
- ],
- 'macro-global-nested': [
- (r'\$(\{|(?=[$`]))', Name.Variable.Global, '#push'),
- (r'\}', Name.Variable.Global, '#pop'),
- (r'\$', Name.Variable.Global, 'macro-global-name'),
- (r'`', Name.Variable, 'macro-local'),
- (r'\w', Name.Variable.Global), # fallback
- default('#pop'),
- ],
- 'macro-global-name': [
- (r'\$(\{|(?=[$`]))', Name.Variable.Global, 'macro-global-nested', '#pop'),
- (r'\$', Name.Variable.Global, 'macro-global-name', '#pop'),
- (r'`', Name.Variable, 'macro-local', '#pop'),
- (r'\w{1,32}', Name.Variable.Global, '#pop'),
- ],
- # Built in functions and statements
- 'keywords': [
- (words(builtins_functions, prefix = r'\b', suffix = r'(?=\()'),
- Name.Function),
- (words(builtins_base, prefix = r'(^\s*|\s)', suffix = r'\b'),
- Keyword),
- ],
- # http://www.stata.com/help.cgi?operators
- 'operators': [
- (r'-|==|<=|>=|<|>|&|!=', Operator),
- (r'\*|\+|\^|/|!|~|==|~=', Operator)
- ],
- # Stata numbers
- 'numbers': [
- # decimal number
- (r'\b[+-]?([0-9]+(\.[0-9]+)?|\.[0-9]+|\.)([eE][+-]?[0-9]+)?[i]?\b',
- Number),
- ],
- # Stata formats
- 'format': [
- (r'%-?\d{1,2}(\.\d{1,2})?[gfe]c?', Name.Other),
- (r'%(21x|16H|16L|8H|8L)', Name.Other),
- (r'%-?(tc|tC|td|tw|tm|tq|th|ty|tg)\S{0,32}', Name.Other),
- (r'%[-~]?\d{1,4}s', Name.Other),
- ]
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/supercollider.py b/venv/lib/python3.11/site-packages/pygments/lexers/supercollider.py
deleted file mode 100644
index ea7a176..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/supercollider.py
+++ /dev/null
@@ -1,95 +0,0 @@
-"""
- pygments.lexers.supercollider
- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
- Lexer for SuperCollider
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, include, words, default
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation
-
-__all__ = ['SuperColliderLexer']
-
-
-class SuperColliderLexer(RegexLexer):
- """
- For SuperCollider source code.
-
- .. versionadded:: 2.1
- """
-
- name = 'SuperCollider'
- url = 'http://supercollider.github.io/'
- aliases = ['supercollider', 'sc']
- filenames = ['*.sc', '*.scd']
- mimetypes = ['application/supercollider', 'text/supercollider']
-
- flags = re.DOTALL | re.MULTILINE
- tokens = {
- 'commentsandwhitespace': [
- (r'\s+', Text),
- (r'<!--', Comment),
- (r'//.*?\n', Comment.Single),
- (r'/\*.*?\*/', Comment.Multiline)
- ],
- 'slashstartsregex': [
- include('commentsandwhitespace'),
- (r'/(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/'
- r'([gim]+\b|\B)', String.Regex, '#pop'),
- (r'(?=/)', Text, ('#pop', 'badregex')),
- default('#pop'),
- ],
- 'badregex': [
- (r'\n', Text, '#pop')
- ],
- 'root': [
- (r'^(?=\s|/|<!--)', Text, 'slashstartsregex'),
- include('commentsandwhitespace'),
- (r'\+\+|--|~|&&|\?|:|\|\||\\(?=\n)|'
- r'(<<|>>>?|==?|!=?|[-<>+*%&|^/])=?', Operator, 'slashstartsregex'),
- (r'[{(\[;,]', Punctuation, 'slashstartsregex'),
- (r'[})\].]', Punctuation),
- (words((
- 'for', 'in', 'while', 'do', 'break', 'return', 'continue',
- 'switch', 'case', 'default', 'if', 'else', 'throw', 'try',
- 'catch', 'finally', 'new', 'delete', 'typeof', 'instanceof',
- 'void'), suffix=r'\b'),
- Keyword, 'slashstartsregex'),
- (words(('var', 'let', 'with', 'function', 'arg'), suffix=r'\b'),
- Keyword.Declaration, 'slashstartsregex'),
- (words((
- '(abstract', 'boolean', 'byte', 'char', 'class', 'const',
- 'debugger', 'double', 'enum', 'export', 'extends', 'final',
- 'float', 'goto', 'implements', 'import', 'int', 'interface',
- 'long', 'native', 'package', 'private', 'protected', 'public',
- 'short', 'static', 'super', 'synchronized', 'throws',
- 'transient', 'volatile'), suffix=r'\b'),
- Keyword.Reserved),
- (words(('true', 'false', 'nil', 'inf'), suffix=r'\b'), Keyword.Constant),
- (words((
- 'Array', 'Boolean', 'Date', 'Error', 'Function', 'Number',
- 'Object', 'Packages', 'RegExp', 'String',
- 'isFinite', 'isNaN', 'parseFloat', 'parseInt', 'super',
- 'thisFunctionDef', 'thisFunction', 'thisMethod', 'thisProcess',
- 'thisThread', 'this'), suffix=r'\b'),
- Name.Builtin),
- (r'[$a-zA-Z_]\w*', Name.Other),
- (r'\\?[$a-zA-Z_]\w*', String.Symbol),
- (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
- (r'0x[0-9a-fA-F]+', Number.Hex),
- (r'[0-9]+', Number.Integer),
- (r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
- (r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
- ]
- }
-
- def analyse_text(text):
- """We're searching for a common function and a unique keyword here."""
- if 'SinOsc' in text or 'thisFunctionDef' in text:
- return 0.1
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/tal.py b/venv/lib/python3.11/site-packages/pygments/lexers/tal.py
deleted file mode 100644
index 170b781..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/tal.py
+++ /dev/null
@@ -1,77 +0,0 @@
-"""
- pygments.lexers.tal
- ~~~~~~~~~~~~~~~~~~~
-
- Lexer for Uxntal
-
- .. versionadded:: 2.12
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, words
-from pygments.token import Comment, Keyword, Name, String, Number, \
- Punctuation, Whitespace, Literal
-
-__all__ = ['TalLexer']
-
-
-class TalLexer(RegexLexer):
- """
- For `Uxntal <https://wiki.xxiivv.com/site/uxntal.html>`_ source code.
-
- .. versionadded:: 2.12
- """
-
- name = 'Tal'
- aliases = ['tal', 'uxntal']
- filenames = ['*.tal']
- mimetypes = ['text/x-uxntal']
-
- instructions = [
- 'BRK', 'LIT', 'INC', 'POP', 'DUP', 'NIP', 'SWP', 'OVR', 'ROT',
- 'EQU', 'NEQ', 'GTH', 'LTH', 'JMP', 'JCN', 'JSR', 'STH',
- 'LDZ', 'STZ', 'LDR', 'STR', 'LDA', 'STA', 'DEI', 'DEO',
- 'ADD', 'SUB', 'MUL', 'DIV', 'AND', 'ORA', 'EOR', 'SFT'
- ]
-
- tokens = {
- # the comment delimiters must not be adjacent to non-space characters.
- # this means ( foo ) is a valid comment but (foo) is not. this also
- # applies to nested comments.
- 'comment': [
- (r'(?<!\S)\((?!\S)', Comment.Multiline, '#push'), # nested comments
- (r'(?<!\S)\)(?!\S)', Comment.Multiline, '#pop'), # nested comments
- (r'[^()]+', Comment.Multiline), # comments
- (r'[()]+', Comment.Multiline), # comments
- ],
- 'root': [
- (r'\s+', Whitespace), # spaces
- (r'(?<!\S)\((?!\S)', Comment.Multiline, 'comment'), # comments
- (words(instructions, prefix=r'(?<!\S)', suffix=r'2?k?r?(?!\S)'),
- Keyword.Reserved), # instructions
- (r'[][{}](?!\S)', Punctuation), # delimiters
- (r'#([0-9a-f]{2}){1,2}(?!\S)', Number.Hex), # integer
- (r'"\S+', String), # raw string
- (r'([0-9a-f]{2}){1,2}(?!\S)', Literal), # raw integer
- (r'[|$][0-9a-f]{1,4}(?!\S)', Keyword.Declaration), # abs/rel pad
- (r'%\S+', Name.Decorator), # macro
- (r'@\S+', Name.Function), # label
- (r'&\S+', Name.Label), # sublabel
- (r'/\S+', Name.Tag), # spacer
- (r'\.\S+', Name.Variable.Magic), # literal zero page addr
- (r',\S+', Name.Variable.Instance), # literal rel addr
- (r';\S+', Name.Variable.Global), # literal abs addr
- (r'-\S+', Literal), # raw zero page addr
- (r'_\S+', Literal), # raw relative addr
- (r'=\S+', Literal), # raw absolute addr
- (r'!\S+', Name.Function), # immediate jump
- (r'\?\S+', Name.Function), # conditional immediate jump
- (r'~\S+', Keyword.Namespace), # include
- (r'\S+', Name.Function), # macro invocation, immediate subroutine
- ]
- }
-
- def analyse_text(text):
- return '|0100' in text[:500]
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/tcl.py b/venv/lib/python3.11/site-packages/pygments/lexers/tcl.py
deleted file mode 100644
index f444223..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/tcl.py
+++ /dev/null
@@ -1,149 +0,0 @@
-"""
- pygments.lexers.tcl
- ~~~~~~~~~~~~~~~~~~~
-
- Lexers for Tcl and related languages.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, include, words
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Whitespace
-from pygments.util import shebang_matches
-
-__all__ = ['TclLexer']
-
-
-class TclLexer(RegexLexer):
- """
- For Tcl source code.
-
- .. versionadded:: 0.10
- """
-
- keyword_cmds_re = words((
- 'after', 'apply', 'array', 'break', 'catch', 'continue', 'elseif',
- 'else', 'error', 'eval', 'expr', 'for', 'foreach', 'global', 'if',
- 'namespace', 'proc', 'rename', 'return', 'set', 'switch', 'then',
- 'trace', 'unset', 'update', 'uplevel', 'upvar', 'variable', 'vwait',
- 'while'), prefix=r'\b', suffix=r'\b')
-
- builtin_cmds_re = words((
- 'append', 'bgerror', 'binary', 'cd', 'chan', 'clock', 'close',
- 'concat', 'dde', 'dict', 'encoding', 'eof', 'exec', 'exit', 'fblocked',
- 'fconfigure', 'fcopy', 'file', 'fileevent', 'flush', 'format', 'gets',
- 'glob', 'history', 'http', 'incr', 'info', 'interp', 'join', 'lappend',
- 'lassign', 'lindex', 'linsert', 'list', 'llength', 'load', 'loadTk',
- 'lrange', 'lrepeat', 'lreplace', 'lreverse', 'lsearch', 'lset', 'lsort',
- 'mathfunc', 'mathop', 'memory', 'msgcat', 'open', 'package', 'pid',
- 'pkg::create', 'pkg_mkIndex', 'platform', 'platform::shell', 'puts',
- 'pwd', 're_syntax', 'read', 'refchan', 'regexp', 'registry', 'regsub',
- 'scan', 'seek', 'socket', 'source', 'split', 'string', 'subst', 'tell',
- 'time', 'tm', 'unknown', 'unload'), prefix=r'\b', suffix=r'\b')
-
- name = 'Tcl'
- url = 'https://www.tcl.tk/about/language.html'
- aliases = ['tcl']
- filenames = ['*.tcl', '*.rvt']
- mimetypes = ['text/x-tcl', 'text/x-script.tcl', 'application/x-tcl']
-
- def _gen_command_rules(keyword_cmds_re, builtin_cmds_re, context=""):
- return [
- (keyword_cmds_re, Keyword, 'params' + context),
- (builtin_cmds_re, Name.Builtin, 'params' + context),
- (r'([\w.-]+)', Name.Variable, 'params' + context),
- (r'#', Comment, 'comment'),
- ]
-
- tokens = {
- 'root': [
- include('command'),
- include('basic'),
- include('data'),
- (r'\}', Keyword), # HACK: somehow we miscounted our braces
- ],
- 'command': _gen_command_rules(keyword_cmds_re, builtin_cmds_re),
- 'command-in-brace': _gen_command_rules(keyword_cmds_re,
- builtin_cmds_re,
- "-in-brace"),
- 'command-in-bracket': _gen_command_rules(keyword_cmds_re,
- builtin_cmds_re,
- "-in-bracket"),
- 'command-in-paren': _gen_command_rules(keyword_cmds_re,
- builtin_cmds_re,
- "-in-paren"),
- 'basic': [
- (r'\(', Keyword, 'paren'),
- (r'\[', Keyword, 'bracket'),
- (r'\{', Keyword, 'brace'),
- (r'"', String.Double, 'string'),
- (r'(eq|ne|in|ni)\b', Operator.Word),
- (r'!=|==|<<|>>|<=|>=|&&|\|\||\*\*|[-+~!*/%<>&^|?:]', Operator),
- ],
- 'data': [
- (r'\s+', Whitespace),
- (r'0x[a-fA-F0-9]+', Number.Hex),
- (r'0[0-7]+', Number.Oct),
- (r'\d+\.\d+', Number.Float),
- (r'\d+', Number.Integer),
- (r'\$[\w.:-]+', Name.Variable),
- (r'\$\{[\w.:-]+\}', Name.Variable),
- (r'[\w.,@:-]+', Text),
- ],
- 'params': [
- (r';', Keyword, '#pop'),
- (r'\n', Text, '#pop'),
- (r'(else|elseif|then)\b', Keyword),
- include('basic'),
- include('data'),
- ],
- 'params-in-brace': [
- (r'\}', Keyword, ('#pop', '#pop')),
- include('params')
- ],
- 'params-in-paren': [
- (r'\)', Keyword, ('#pop', '#pop')),
- include('params')
- ],
- 'params-in-bracket': [
- (r'\]', Keyword, ('#pop', '#pop')),
- include('params')
- ],
- 'string': [
- (r'\[', String.Double, 'string-square'),
- (r'(?s)(\\\\|\\[0-7]+|\\.|[^"\\])', String.Double),
- (r'"', String.Double, '#pop')
- ],
- 'string-square': [
- (r'\[', String.Double, 'string-square'),
- (r'(?s)(\\\\|\\[0-7]+|\\.|\\\n|[^\]\\])', String.Double),
- (r'\]', String.Double, '#pop')
- ],
- 'brace': [
- (r'\}', Keyword, '#pop'),
- include('command-in-brace'),
- include('basic'),
- include('data'),
- ],
- 'paren': [
- (r'\)', Keyword, '#pop'),
- include('command-in-paren'),
- include('basic'),
- include('data'),
- ],
- 'bracket': [
- (r'\]', Keyword, '#pop'),
- include('command-in-bracket'),
- include('basic'),
- include('data'),
- ],
- 'comment': [
- (r'.*[^\\]\n', Comment, '#pop'),
- (r'.*\\\n', Comment),
- ],
- }
-
- def analyse_text(text):
- return shebang_matches(text, r'(tcl)')
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/teal.py b/venv/lib/python3.11/site-packages/pygments/lexers/teal.py
deleted file mode 100644
index e488e09..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/teal.py
+++ /dev/null
@@ -1,89 +0,0 @@
-"""
- pygments.lexers.teal
- ~~~~~~~~~~~~~~~~~~~~
-
- Lexer for TEAL.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, bygroups, include, words
-from pygments.token import Comment, Name, Number, String, Text, Keyword, \
- Whitespace
-
-__all__ = ['TealLexer']
-
-
-class TealLexer(RegexLexer):
- """
- For the Transaction Execution Approval Language (TEAL)
-
- For more information about the grammar, see:
- https://github.com/algorand/go-algorand/blob/master/data/transactions/logic/assembler.go
-
- .. versionadded:: 2.9
- """
- name = 'teal'
- url = 'https://developer.algorand.org/docs/reference/teal/specification/'
- aliases = ['teal']
- filenames = ['*.teal']
-
- keywords = words({
- 'Sender', 'Fee', 'FirstValid', 'FirstValidTime', 'LastValid', 'Note',
- 'Lease', 'Receiver', 'Amount', 'CloseRemainderTo', 'VotePK',
- 'SelectionPK', 'VoteFirst', 'VoteLast', 'VoteKeyDilution', 'Type',
- 'TypeEnum', 'XferAsset', 'AssetAmount', 'AssetSender', 'AssetReceiver',
- 'AssetCloseTo', 'GroupIndex', 'TxID', 'ApplicationID', 'OnCompletion',
- 'ApplicationArgs', 'NumAppArgs', 'Accounts', 'NumAccounts',
- 'ApprovalProgram', 'ClearStateProgram', 'RekeyTo', 'ConfigAsset',
- 'ConfigAssetTotal', 'ConfigAssetDecimals', 'ConfigAssetDefaultFrozen',
- 'ConfigAssetUnitName', 'ConfigAssetName', 'ConfigAssetURL',
- 'ConfigAssetMetadataHash', 'ConfigAssetManager', 'ConfigAssetReserve',
- 'ConfigAssetFreeze', 'ConfigAssetClawback', 'FreezeAsset',
- 'FreezeAssetAccount', 'FreezeAssetFrozen',
- 'NoOp', 'OptIn', 'CloseOut', 'ClearState', 'UpdateApplication',
- 'DeleteApplication',
- 'MinTxnFee', 'MinBalance', 'MaxTxnLife', 'ZeroAddress', 'GroupSize',
- 'LogicSigVersion', 'Round', 'LatestTimestamp', 'CurrentApplicationID',
- 'AssetBalance', 'AssetFrozen',
- 'AssetTotal', 'AssetDecimals', 'AssetDefaultFrozen', 'AssetUnitName',
- 'AssetName', 'AssetURL', 'AssetMetadataHash', 'AssetManager',
- 'AssetReserve', 'AssetFreeze', 'AssetClawback',
- }, suffix=r'\b')
-
- identifier = r'[^ \t\n]+(?=\/\/)|[^ \t\n]+'
- newline = r'\r?\n'
- tokens = {
- 'root': [
- include('whitespace'),
- # pragmas match specifically on the space character
- (r'^#pragma .*' + newline, Comment.Directive),
- # labels must be followed by a space,
- # but anything after that is ignored
- ('(' + identifier + ':' + ')' + '([ \t].*)',
- bygroups(Name.Label, Comment.Single)),
- (identifier, Name.Function, 'function-args'),
- ],
- 'function-args': [
- include('whitespace'),
- (r'"', String, 'string'),
- (r'(b(?:ase)?(?:32|64) ?)(\(?[a-zA-Z0-9+/=]+\)?)',
- bygroups(String.Affix, String.Other)),
- (r'[A-Z2-7]{58}', Number), # address
- (r'0x[\da-fA-F]+', Number.Hex),
- (r'\d+', Number.Integer),
- (keywords, Keyword),
- (identifier, Name.Attributes), # branch targets
- (newline, Text, '#pop'),
- ],
- 'string': [
- (r'\\(?:["nrt\\]|x\d\d)', String.Escape),
- (r'[^\\\"\n]+', String),
- (r'"', String, '#pop'),
- ],
- 'whitespace': [
- (r'[ \t]+', Whitespace),
- (r'//[^\n]+', Comment.Single),
- ],
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/templates.py b/venv/lib/python3.11/site-packages/pygments/lexers/templates.py
deleted file mode 100644
index 5f46a47..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/templates.py
+++ /dev/null
@@ -1,2296 +0,0 @@
-"""
- pygments.lexers.templates
- ~~~~~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for various template engines' markup.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexers.html import HtmlLexer, XmlLexer
-from pygments.lexers.javascript import JavascriptLexer, LassoLexer
-from pygments.lexers.css import CssLexer
-from pygments.lexers.php import PhpLexer
-from pygments.lexers.python import PythonLexer
-from pygments.lexers.perl import PerlLexer
-from pygments.lexers.jvm import JavaLexer, TeaLangLexer
-from pygments.lexers.data import YamlLexer
-from pygments.lexers.sql import SqlLexer
-from pygments.lexer import Lexer, DelegatingLexer, RegexLexer, bygroups, \
- include, using, this, default, combined
-from pygments.token import Error, Punctuation, Whitespace, \
- Text, Comment, Operator, Keyword, Name, String, Number, Other, Token
-from pygments.util import html_doctype_matches, looks_like_xml
-
-__all__ = ['HtmlPhpLexer', 'XmlPhpLexer', 'CssPhpLexer',
- 'JavascriptPhpLexer', 'ErbLexer', 'RhtmlLexer',
- 'XmlErbLexer', 'CssErbLexer', 'JavascriptErbLexer',
- 'SmartyLexer', 'HtmlSmartyLexer', 'XmlSmartyLexer',
- 'CssSmartyLexer', 'JavascriptSmartyLexer', 'DjangoLexer',
- 'HtmlDjangoLexer', 'CssDjangoLexer', 'XmlDjangoLexer',
- 'JavascriptDjangoLexer', 'GenshiLexer', 'HtmlGenshiLexer',
- 'GenshiTextLexer', 'CssGenshiLexer', 'JavascriptGenshiLexer',
- 'MyghtyLexer', 'MyghtyHtmlLexer', 'MyghtyXmlLexer',
- 'MyghtyCssLexer', 'MyghtyJavascriptLexer', 'MasonLexer', 'MakoLexer',
- 'MakoHtmlLexer', 'MakoXmlLexer', 'MakoJavascriptLexer',
- 'MakoCssLexer', 'JspLexer', 'CheetahLexer', 'CheetahHtmlLexer',
- 'CheetahXmlLexer', 'CheetahJavascriptLexer', 'EvoqueLexer',
- 'EvoqueHtmlLexer', 'EvoqueXmlLexer', 'ColdfusionLexer',
- 'ColdfusionHtmlLexer', 'ColdfusionCFCLexer', 'VelocityLexer',
- 'VelocityHtmlLexer', 'VelocityXmlLexer', 'SspLexer',
- 'TeaTemplateLexer', 'LassoHtmlLexer', 'LassoXmlLexer',
- 'LassoCssLexer', 'LassoJavascriptLexer', 'HandlebarsLexer',
- 'HandlebarsHtmlLexer', 'YamlJinjaLexer', 'LiquidLexer',
- 'TwigLexer', 'TwigHtmlLexer', 'Angular2Lexer', 'Angular2HtmlLexer',
- 'SqlJinjaLexer']
-
-
-class ErbLexer(Lexer):
- """
- Generic ERB (Ruby Templating) lexer.
-
- Just highlights ruby code between the preprocessor directives, other data
- is left untouched by the lexer.
-
- All options are also forwarded to the `RubyLexer`.
- """
-
- name = 'ERB'
- url = 'https://github.com/ruby/erb'
- aliases = ['erb']
- mimetypes = ['application/x-ruby-templating']
-
- _block_re = re.compile(r'(<%%|%%>|<%=|<%#|<%-|<%|-%>|%>|^%[^%].*?$)', re.M)
-
- def __init__(self, **options):
- from pygments.lexers.ruby import RubyLexer
- self.ruby_lexer = RubyLexer(**options)
- Lexer.__init__(self, **options)
-
- def get_tokens_unprocessed(self, text):
- """
- Since ERB doesn't allow "<%" and other tags inside of ruby
- blocks we have to use a split approach here that fails for
- that too.
- """
- tokens = self._block_re.split(text)
- tokens.reverse()
- state = idx = 0
- try:
- while True:
- # text
- if state == 0:
- val = tokens.pop()
- yield idx, Other, val
- idx += len(val)
- state = 1
- # block starts
- elif state == 1:
- tag = tokens.pop()
- # literals
- if tag in ('<%%', '%%>'):
- yield idx, Other, tag
- idx += 3
- state = 0
- # comment
- elif tag == '<%#':
- yield idx, Comment.Preproc, tag
- val = tokens.pop()
- yield idx + 3, Comment, val
- idx += 3 + len(val)
- state = 2
- # blocks or output
- elif tag in ('<%', '<%=', '<%-'):
- yield idx, Comment.Preproc, tag
- idx += len(tag)
- data = tokens.pop()
- r_idx = 0
- for r_idx, r_token, r_value in \
- self.ruby_lexer.get_tokens_unprocessed(data):
- yield r_idx + idx, r_token, r_value
- idx += len(data)
- state = 2
- elif tag in ('%>', '-%>'):
- yield idx, Error, tag
- idx += len(tag)
- state = 0
- # % raw ruby statements
- else:
- yield idx, Comment.Preproc, tag[0]
- r_idx = 0
- for r_idx, r_token, r_value in \
- self.ruby_lexer.get_tokens_unprocessed(tag[1:]):
- yield idx + 1 + r_idx, r_token, r_value
- idx += len(tag)
- state = 0
- # block ends
- elif state == 2:
- tag = tokens.pop()
- if tag not in ('%>', '-%>'):
- yield idx, Other, tag
- else:
- yield idx, Comment.Preproc, tag
- idx += len(tag)
- state = 0
- except IndexError:
- return
-
- def analyse_text(text):
- if '<%' in text and '%>' in text:
- return 0.4
-
-
-class SmartyLexer(RegexLexer):
- """
- Generic Smarty template lexer.
-
- Just highlights smarty code between the preprocessor directives, other
- data is left untouched by the lexer.
- """
-
- name = 'Smarty'
- url = 'https://www.smarty.net/'
- aliases = ['smarty']
- filenames = ['*.tpl']
- mimetypes = ['application/x-smarty']
-
- flags = re.MULTILINE | re.DOTALL
-
- tokens = {
- 'root': [
- (r'[^{]+', Other),
- (r'(\{)(\*.*?\*)(\})',
- bygroups(Comment.Preproc, Comment, Comment.Preproc)),
- (r'(\{php\})(.*?)(\{/php\})',
- bygroups(Comment.Preproc, using(PhpLexer, startinline=True),
- Comment.Preproc)),
- (r'(\{)(/?[a-zA-Z_]\w*)(\s*)',
- bygroups(Comment.Preproc, Name.Function, Text), 'smarty'),
- (r'\{', Comment.Preproc, 'smarty')
- ],
- 'smarty': [
- (r'\s+', Text),
- (r'\{', Comment.Preproc, '#push'),
- (r'\}', Comment.Preproc, '#pop'),
- (r'#[a-zA-Z_]\w*#', Name.Variable),
- (r'\$[a-zA-Z_]\w*(\.\w+)*', Name.Variable),
- (r'[~!%^&*()+=|\[\]:;,.<>/?@-]', Operator),
- (r'(true|false|null)\b', Keyword.Constant),
- (r"[0-9](\.[0-9]*)?(eE[+-][0-9])?[flFLdD]?|"
- r"0[xX][0-9a-fA-F]+[Ll]?", Number),
- (r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
- (r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
- (r'[a-zA-Z_]\w*', Name.Attribute)
- ]
- }
-
- def analyse_text(text):
- rv = 0.0
- if re.search(r'\{if\s+.*?\}.*?\{/if\}', text):
- rv += 0.15
- if re.search(r'\{include\s+file=.*?\}', text):
- rv += 0.15
- if re.search(r'\{foreach\s+.*?\}.*?\{/foreach\}', text):
- rv += 0.15
- if re.search(r'\{\$.*?\}', text):
- rv += 0.01
- return rv
-
-
-class VelocityLexer(RegexLexer):
- """
- Generic Velocity template lexer.
-
- Just highlights velocity directives and variable references, other
- data is left untouched by the lexer.
- """
-
- name = 'Velocity'
- url = 'https://velocity.apache.org/'
- aliases = ['velocity']
- filenames = ['*.vm', '*.fhtml']
-
- flags = re.MULTILINE | re.DOTALL
-
- identifier = r'[a-zA-Z_]\w*'
-
- tokens = {
- 'root': [
- (r'[^{#$]+', Other),
- (r'(#)(\*.*?\*)(#)',
- bygroups(Comment.Preproc, Comment, Comment.Preproc)),
- (r'(##)(.*?$)',
- bygroups(Comment.Preproc, Comment)),
- (r'(#\{?)(' + identifier + r')(\}?)(\s?\()',
- bygroups(Comment.Preproc, Name.Function, Comment.Preproc, Punctuation),
- 'directiveparams'),
- (r'(#\{?)(' + identifier + r')(\}|\b)',
- bygroups(Comment.Preproc, Name.Function, Comment.Preproc)),
- (r'\$!?\{?', Punctuation, 'variable')
- ],
- 'variable': [
- (identifier, Name.Variable),
- (r'\(', Punctuation, 'funcparams'),
- (r'(\.)(' + identifier + r')',
- bygroups(Punctuation, Name.Variable), '#push'),
- (r'\}', Punctuation, '#pop'),
- default('#pop')
- ],
- 'directiveparams': [
- (r'(&&|\|\||==?|!=?|[-<>+*%&|^/])|\b(eq|ne|gt|lt|ge|le|not|in)\b',
- Operator),
- (r'\[', Operator, 'rangeoperator'),
- (r'\b' + identifier + r'\b', Name.Function),
- include('funcparams')
- ],
- 'rangeoperator': [
- (r'\.\.', Operator),
- include('funcparams'),
- (r'\]', Operator, '#pop')
- ],
- 'funcparams': [
- (r'\$!?\{?', Punctuation, 'variable'),
- (r'\s+', Text),
- (r'[,:]', Punctuation),
- (r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
- (r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
- (r"0[xX][0-9a-fA-F]+[Ll]?", Number),
- (r"\b[0-9]+\b", Number),
- (r'(true|false|null)\b', Keyword.Constant),
- (r'\(', Punctuation, '#push'),
- (r'\)', Punctuation, '#pop'),
- (r'\{', Punctuation, '#push'),
- (r'\}', Punctuation, '#pop'),
- (r'\[', Punctuation, '#push'),
- (r'\]', Punctuation, '#pop'),
- ]
- }
-
- def analyse_text(text):
- rv = 0.0
- if re.search(r'#\{?macro\}?\(.*?\).*?#\{?end\}?', text, re.DOTALL):
- rv += 0.25
- if re.search(r'#\{?if\}?\(.+?\).*?#\{?end\}?', text, re.DOTALL):
- rv += 0.15
- if re.search(r'#\{?foreach\}?\(.+?\).*?#\{?end\}?', text, re.DOTALL):
- rv += 0.15
- if re.search(r'\$!?\{?[a-zA-Z_]\w*(\([^)]*\))?'
- r'(\.\w+(\([^)]*\))?)*\}?', text):
- rv += 0.01
- return rv
-
-
-class VelocityHtmlLexer(DelegatingLexer):
- """
- Subclass of the `VelocityLexer` that highlights unlexed data
- with the `HtmlLexer`.
-
- """
-
- name = 'HTML+Velocity'
- aliases = ['html+velocity']
- alias_filenames = ['*.html', '*.fhtml']
- mimetypes = ['text/html+velocity']
-
- def __init__(self, **options):
- super().__init__(HtmlLexer, VelocityLexer, **options)
-
-
-class VelocityXmlLexer(DelegatingLexer):
- """
- Subclass of the `VelocityLexer` that highlights unlexed data
- with the `XmlLexer`.
-
- """
-
- name = 'XML+Velocity'
- aliases = ['xml+velocity']
- alias_filenames = ['*.xml', '*.vm']
- mimetypes = ['application/xml+velocity']
-
- def __init__(self, **options):
- super().__init__(XmlLexer, VelocityLexer, **options)
-
- def analyse_text(text):
- rv = VelocityLexer.analyse_text(text) - 0.01
- if looks_like_xml(text):
- rv += 0.4
- return rv
-
-
-class DjangoLexer(RegexLexer):
- """
- Generic `django <http://www.djangoproject.com/documentation/templates/>`_
- and `jinja <https://jinja.pocoo.org/jinja/>`_ template lexer.
-
- It just highlights django/jinja code between the preprocessor directives,
- other data is left untouched by the lexer.
- """
-
- name = 'Django/Jinja'
- aliases = ['django', 'jinja']
- mimetypes = ['application/x-django-templating', 'application/x-jinja']
-
- flags = re.M | re.S
-
- tokens = {
- 'root': [
- (r'[^{]+', Other),
- (r'\{\{', Comment.Preproc, 'var'),
- # jinja/django comments
- (r'\{#.*?#\}', Comment),
- # django comments
- (r'(\{%)(-?\s*)(comment)(\s*-?)(%\})(.*?)'
- r'(\{%)(-?\s*)(endcomment)(\s*-?)(%\})',
- bygroups(Comment.Preproc, Text, Keyword, Text, Comment.Preproc,
- Comment, Comment.Preproc, Text, Keyword, Text,
- Comment.Preproc)),
- # raw jinja blocks
- (r'(\{%)(-?\s*)(raw)(\s*-?)(%\})(.*?)'
- r'(\{%)(-?\s*)(endraw)(\s*-?)(%\})',
- bygroups(Comment.Preproc, Text, Keyword, Text, Comment.Preproc,
- Text, Comment.Preproc, Text, Keyword, Text,
- Comment.Preproc)),
- # filter blocks
- (r'(\{%)(-?\s*)(filter)(\s+)([a-zA-Z_]\w*)',
- bygroups(Comment.Preproc, Text, Keyword, Text, Name.Function),
- 'block'),
- (r'(\{%)(-?\s*)([a-zA-Z_]\w*)',
- bygroups(Comment.Preproc, Text, Keyword), 'block'),
- (r'\{', Other)
- ],
- 'varnames': [
- (r'(\|)(\s*)([a-zA-Z_]\w*)',
- bygroups(Operator, Text, Name.Function)),
- (r'(is)(\s+)(not)?(\s+)?([a-zA-Z_]\w*)',
- bygroups(Keyword, Text, Keyword, Text, Name.Function)),
- (r'(_|true|false|none|True|False|None)\b', Keyword.Pseudo),
- (r'(in|as|reversed|recursive|not|and|or|is|if|else|import|'
- r'with(?:(?:out)?\s*context)?|scoped|ignore\s+missing)\b',
- Keyword),
- (r'(loop|block|super|forloop)\b', Name.Builtin),
- (r'[a-zA-Z_][\w-]*', Name.Variable),
- (r'\.\w+', Name.Variable),
- (r':?"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
- (r":?'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
- (r'([{}()\[\]+\-*/%,:~]|[><=]=?|!=)', Operator),
- (r"[0-9](\.[0-9]*)?(eE[+-][0-9])?[flFLdD]?|"
- r"0[xX][0-9a-fA-F]+[Ll]?", Number),
- ],
- 'var': [
- (r'\s+', Text),
- (r'(-?)(\}\})', bygroups(Text, Comment.Preproc), '#pop'),
- include('varnames')
- ],
- 'block': [
- (r'\s+', Text),
- (r'(-?)(%\})', bygroups(Text, Comment.Preproc), '#pop'),
- include('varnames'),
- (r'.', Punctuation)
- ]
- }
-
- def analyse_text(text):
- rv = 0.0
- if re.search(r'\{%\s*(block|extends)', text) is not None:
- rv += 0.4
- if re.search(r'\{%\s*if\s*.*?%\}', text) is not None:
- rv += 0.1
- if re.search(r'\{\{.*?\}\}', text) is not None:
- rv += 0.1
- return rv
-
-
-class MyghtyLexer(RegexLexer):
- """
- Generic myghty templates lexer. Code that isn't Myghty
- markup is yielded as `Token.Other`.
-
- .. versionadded:: 0.6
- """
-
- name = 'Myghty'
- url = 'http://www.myghty.org/'
- aliases = ['myghty']
- filenames = ['*.myt', 'autodelegate']
- mimetypes = ['application/x-myghty']
-
- tokens = {
- 'root': [
- (r'\s+', Text),
- (r'(?s)(<%(?:def|method))(\s*)(.*?)(>)(.*?)(</%\2\s*>)',
- bygroups(Name.Tag, Text, Name.Function, Name.Tag,
- using(this), Name.Tag)),
- (r'(?s)(<%\w+)(.*?)(>)(.*?)(</%\2\s*>)',
- bygroups(Name.Tag, Name.Function, Name.Tag,
- using(PythonLexer), Name.Tag)),
- (r'(<&[^|])(.*?)(,.*?)?(&>)',
- bygroups(Name.Tag, Name.Function, using(PythonLexer), Name.Tag)),
- (r'(?s)(<&\|)(.*?)(,.*?)?(&>)',
- bygroups(Name.Tag, Name.Function, using(PythonLexer), Name.Tag)),
- (r'</&>', Name.Tag),
- (r'(?s)(<%!?)(.*?)(%>)',
- bygroups(Name.Tag, using(PythonLexer), Name.Tag)),
- (r'(?<=^)#[^\n]*(\n|\Z)', Comment),
- (r'(?<=^)(%)([^\n]*)(\n|\Z)',
- bygroups(Name.Tag, using(PythonLexer), Other)),
- (r"""(?sx)
- (.+?) # anything, followed by:
- (?:
- (?<=\n)(?=[%#]) | # an eval or comment line
- (?=</?[%&]) | # a substitution or block or
- # call start or end
- # - don't consume
- (\\\n) | # an escaped newline
- \Z # end of string
- )""", bygroups(Other, Operator)),
- ]
- }
-
-
-class MyghtyHtmlLexer(DelegatingLexer):
- """
- Subclass of the `MyghtyLexer` that highlights unlexed data
- with the `HtmlLexer`.
-
- .. versionadded:: 0.6
- """
-
- name = 'HTML+Myghty'
- aliases = ['html+myghty']
- mimetypes = ['text/html+myghty']
-
- def __init__(self, **options):
- super().__init__(HtmlLexer, MyghtyLexer, **options)
-
-
-class MyghtyXmlLexer(DelegatingLexer):
- """
- Subclass of the `MyghtyLexer` that highlights unlexed data
- with the `XmlLexer`.
-
- .. versionadded:: 0.6
- """
-
- name = 'XML+Myghty'
- aliases = ['xml+myghty']
- mimetypes = ['application/xml+myghty']
-
- def __init__(self, **options):
- super().__init__(XmlLexer, MyghtyLexer, **options)
-
-
-class MyghtyJavascriptLexer(DelegatingLexer):
- """
- Subclass of the `MyghtyLexer` that highlights unlexed data
- with the `JavascriptLexer`.
-
- .. versionadded:: 0.6
- """
-
- name = 'JavaScript+Myghty'
- aliases = ['javascript+myghty', 'js+myghty']
- mimetypes = ['application/x-javascript+myghty',
- 'text/x-javascript+myghty',
- 'text/javascript+mygthy']
-
- def __init__(self, **options):
- super().__init__(JavascriptLexer, MyghtyLexer, **options)
-
-
-class MyghtyCssLexer(DelegatingLexer):
- """
- Subclass of the `MyghtyLexer` that highlights unlexed data
- with the `CssLexer`.
-
- .. versionadded:: 0.6
- """
-
- name = 'CSS+Myghty'
- aliases = ['css+myghty']
- mimetypes = ['text/css+myghty']
-
- def __init__(self, **options):
- super().__init__(CssLexer, MyghtyLexer, **options)
-
-
-class MasonLexer(RegexLexer):
- """
- Generic mason templates lexer. Stolen from Myghty lexer. Code that isn't
- Mason markup is HTML.
-
- .. versionadded:: 1.4
- """
- name = 'Mason'
- url = 'http://www.masonhq.com/'
- aliases = ['mason']
- filenames = ['*.m', '*.mhtml', '*.mc', '*.mi', 'autohandler', 'dhandler']
- mimetypes = ['application/x-mason']
-
- tokens = {
- 'root': [
- (r'\s+', Whitespace),
- (r'(?s)(<%doc>)(.*?)(</%doc>)',
- bygroups(Name.Tag, Comment.Multiline, Name.Tag)),
- (r'(?s)(<%(?:def|method))(\s*)(.*?)(>)(.*?)(</%\2\s*>)',
- bygroups(Name.Tag, Whitespace, Name.Function, Name.Tag,
- using(this), Name.Tag)),
- (r'(?s)(<%(\w+)(.*?)(>))(.*?)(</%\2\s*>)',
- bygroups(Name.Tag, None, None, None, using(PerlLexer), Name.Tag)),
- (r'(?s)(<&[^|])(.*?)(,.*?)?(&>)',
- bygroups(Name.Tag, Name.Function, using(PerlLexer), Name.Tag)),
- (r'(?s)(<&\|)(.*?)(,.*?)?(&>)',
- bygroups(Name.Tag, Name.Function, using(PerlLexer), Name.Tag)),
- (r'</&>', Name.Tag),
- (r'(?s)(<%!?)(.*?)(%>)',
- bygroups(Name.Tag, using(PerlLexer), Name.Tag)),
- (r'(?<=^)#[^\n]*(\n|\Z)', Comment),
- (r'(?<=^)(%)([^\n]*)(\n|\Z)',
- bygroups(Name.Tag, using(PerlLexer), Other)),
- (r"""(?sx)
- (.+?) # anything, followed by:
- (?:
- (?<=\n)(?=[%#]) | # an eval or comment line
- (?=</?[%&]) | # a substitution or block or
- # call start or end
- # - don't consume
- (\\\n) | # an escaped newline
- \Z # end of string
- )""", bygroups(using(HtmlLexer), Operator)),
- ]
- }
-
- def analyse_text(text):
- result = 0.0
- if re.search(r'</%(class|doc|init)>', text) is not None:
- result = 1.0
- elif re.search(r'<&.+&>', text, re.DOTALL) is not None:
- result = 0.11
- return result
-
-
-class MakoLexer(RegexLexer):
- """
- Generic mako templates lexer. Code that isn't Mako
- markup is yielded as `Token.Other`.
-
- .. versionadded:: 0.7
- """
-
- name = 'Mako'
- url = 'http://www.makotemplates.org/'
- aliases = ['mako']
- filenames = ['*.mao']
- mimetypes = ['application/x-mako']
-
- tokens = {
- 'root': [
- (r'(\s*)(%)(\s*end(?:\w+))(\n|\Z)',
- bygroups(Text.Whitespace, Comment.Preproc, Keyword, Other)),
- (r'(\s*)(%)([^\n]*)(\n|\Z)',
- bygroups(Text.Whitespace, Comment.Preproc, using(PythonLexer), Other)),
- (r'(\s*)(##[^\n]*)(\n|\Z)',
- bygroups(Text.Whitespace, Comment.Single, Text.Whitespace)),
- (r'(?s)<%doc>.*?</%doc>', Comment.Multiline),
- (r'(<%)([\w.:]+)',
- bygroups(Comment.Preproc, Name.Builtin), 'tag'),
- (r'(</%)([\w.:]+)(>)',
- bygroups(Comment.Preproc, Name.Builtin, Comment.Preproc)),
- (r'<%(?=([\w.:]+))', Comment.Preproc, 'ondeftags'),
- (r'(?s)(<%(?:!?))(.*?)(%>)',
- bygroups(Comment.Preproc, using(PythonLexer), Comment.Preproc)),
- (r'(\$\{)(.*?)(\})',
- bygroups(Comment.Preproc, using(PythonLexer), Comment.Preproc)),
- (r'''(?sx)
- (.+?) # anything, followed by:
- (?:
- (?<=\n)(?=%|\#\#) | # an eval or comment line
- (?=\#\*) | # multiline comment
- (?=</?%) | # a python block
- # call start or end
- (?=\$\{) | # a substitution
- (?<=\n)(?=\s*%) |
- # - don't consume
- (\\\n) | # an escaped newline
- \Z # end of string
- )
- ''', bygroups(Other, Operator)),
- (r'\s+', Text),
- ],
- 'ondeftags': [
- (r'<%', Comment.Preproc),
- (r'(?<=<%)(include|inherit|namespace|page)', Name.Builtin),
- include('tag'),
- ],
- 'tag': [
- (r'((?:\w+)\s*=)(\s*)(".*?")',
- bygroups(Name.Attribute, Text, String)),
- (r'/?\s*>', Comment.Preproc, '#pop'),
- (r'\s+', Text),
- ],
- 'attr': [
- ('".*?"', String, '#pop'),
- ("'.*?'", String, '#pop'),
- (r'[^\s>]+', String, '#pop'),
- ],
- }
-
-
-class MakoHtmlLexer(DelegatingLexer):
- """
- Subclass of the `MakoLexer` that highlights unlexed data
- with the `HtmlLexer`.
-
- .. versionadded:: 0.7
- """
-
- name = 'HTML+Mako'
- aliases = ['html+mako']
- mimetypes = ['text/html+mako']
-
- def __init__(self, **options):
- super().__init__(HtmlLexer, MakoLexer, **options)
-
-
-class MakoXmlLexer(DelegatingLexer):
- """
- Subclass of the `MakoLexer` that highlights unlexed data
- with the `XmlLexer`.
-
- .. versionadded:: 0.7
- """
-
- name = 'XML+Mako'
- aliases = ['xml+mako']
- mimetypes = ['application/xml+mako']
-
- def __init__(self, **options):
- super().__init__(XmlLexer, MakoLexer, **options)
-
-
-class MakoJavascriptLexer(DelegatingLexer):
- """
- Subclass of the `MakoLexer` that highlights unlexed data
- with the `JavascriptLexer`.
-
- .. versionadded:: 0.7
- """
-
- name = 'JavaScript+Mako'
- aliases = ['javascript+mako', 'js+mako']
- mimetypes = ['application/x-javascript+mako',
- 'text/x-javascript+mako',
- 'text/javascript+mako']
-
- def __init__(self, **options):
- super().__init__(JavascriptLexer, MakoLexer, **options)
-
-
-class MakoCssLexer(DelegatingLexer):
- """
- Subclass of the `MakoLexer` that highlights unlexed data
- with the `CssLexer`.
-
- .. versionadded:: 0.7
- """
-
- name = 'CSS+Mako'
- aliases = ['css+mako']
- mimetypes = ['text/css+mako']
-
- def __init__(self, **options):
- super().__init__(CssLexer, MakoLexer, **options)
-
-
-# Genshi and Cheetah lexers courtesy of Matt Good.
-
-class CheetahPythonLexer(Lexer):
- """
- Lexer for handling Cheetah's special $ tokens in Python syntax.
- """
-
- def get_tokens_unprocessed(self, text):
- pylexer = PythonLexer(**self.options)
- for pos, type_, value in pylexer.get_tokens_unprocessed(text):
- if type_ == Token.Error and value == '$':
- type_ = Comment.Preproc
- yield pos, type_, value
-
-
-class CheetahLexer(RegexLexer):
- """
- Generic cheetah templates lexer. Code that isn't Cheetah
- markup is yielded as `Token.Other`. This also works for
- `spitfire templates`_ which use the same syntax.
-
- .. _spitfire templates: http://code.google.com/p/spitfire/
- """
-
- name = 'Cheetah'
- url = 'http://www.cheetahtemplate.org/'
- aliases = ['cheetah', 'spitfire']
- filenames = ['*.tmpl', '*.spt']
- mimetypes = ['application/x-cheetah', 'application/x-spitfire']
-
- tokens = {
- 'root': [
- (r'(##[^\n]*)$',
- (bygroups(Comment))),
- (r'#[*](.|\n)*?[*]#', Comment),
- (r'#end[^#\n]*(?:#|$)', Comment.Preproc),
- (r'#slurp$', Comment.Preproc),
- (r'(#[a-zA-Z]+)([^#\n]*)(#|$)',
- (bygroups(Comment.Preproc, using(CheetahPythonLexer),
- Comment.Preproc))),
- # TODO support other Python syntax like $foo['bar']
- (r'(\$)([a-zA-Z_][\w.]*\w)',
- bygroups(Comment.Preproc, using(CheetahPythonLexer))),
- (r'(?s)(\$\{!?)(.*?)(\})',
- bygroups(Comment.Preproc, using(CheetahPythonLexer),
- Comment.Preproc)),
- (r'''(?sx)
- (.+?) # anything, followed by:
- (?:
- (?=\#[#a-zA-Z]*) | # an eval comment
- (?=\$[a-zA-Z_{]) | # a substitution
- \Z # end of string
- )
- ''', Other),
- (r'\s+', Text),
- ],
- }
-
-
-class CheetahHtmlLexer(DelegatingLexer):
- """
- Subclass of the `CheetahLexer` that highlights unlexed data
- with the `HtmlLexer`.
- """
-
- name = 'HTML+Cheetah'
- aliases = ['html+cheetah', 'html+spitfire', 'htmlcheetah']
- mimetypes = ['text/html+cheetah', 'text/html+spitfire']
-
- def __init__(self, **options):
- super().__init__(HtmlLexer, CheetahLexer, **options)
-
-
-class CheetahXmlLexer(DelegatingLexer):
- """
- Subclass of the `CheetahLexer` that highlights unlexed data
- with the `XmlLexer`.
- """
-
- name = 'XML+Cheetah'
- aliases = ['xml+cheetah', 'xml+spitfire']
- mimetypes = ['application/xml+cheetah', 'application/xml+spitfire']
-
- def __init__(self, **options):
- super().__init__(XmlLexer, CheetahLexer, **options)
-
-
-class CheetahJavascriptLexer(DelegatingLexer):
- """
- Subclass of the `CheetahLexer` that highlights unlexed data
- with the `JavascriptLexer`.
- """
-
- name = 'JavaScript+Cheetah'
- aliases = ['javascript+cheetah', 'js+cheetah',
- 'javascript+spitfire', 'js+spitfire']
- mimetypes = ['application/x-javascript+cheetah',
- 'text/x-javascript+cheetah',
- 'text/javascript+cheetah',
- 'application/x-javascript+spitfire',
- 'text/x-javascript+spitfire',
- 'text/javascript+spitfire']
-
- def __init__(self, **options):
- super().__init__(JavascriptLexer, CheetahLexer, **options)
-
-
-class GenshiTextLexer(RegexLexer):
- """
- A lexer that highlights genshi text templates.
- """
-
- name = 'Genshi Text'
- url = 'http://genshi.edgewall.org/'
- aliases = ['genshitext']
- mimetypes = ['application/x-genshi-text', 'text/x-genshi']
-
- tokens = {
- 'root': [
- (r'[^#$\s]+', Other),
- (r'^(\s*)(##.*)$', bygroups(Text, Comment)),
- (r'^(\s*)(#)', bygroups(Text, Comment.Preproc), 'directive'),
- include('variable'),
- (r'[#$\s]', Other),
- ],
- 'directive': [
- (r'\n', Text, '#pop'),
- (r'(?:def|for|if)\s+.*', using(PythonLexer), '#pop'),
- (r'(choose|when|with)([^\S\n]+)(.*)',
- bygroups(Keyword, Text, using(PythonLexer)), '#pop'),
- (r'(choose|otherwise)\b', Keyword, '#pop'),
- (r'(end\w*)([^\S\n]*)(.*)', bygroups(Keyword, Text, Comment), '#pop'),
- ],
- 'variable': [
- (r'(?<!\$)(\$\{)(.+?)(\})',
- bygroups(Comment.Preproc, using(PythonLexer), Comment.Preproc)),
- (r'(?<!\$)(\$)([a-zA-Z_][\w.]*)',
- Name.Variable),
- ]
- }
-
-
-class GenshiMarkupLexer(RegexLexer):
- """
- Base lexer for Genshi markup, used by `HtmlGenshiLexer` and
- `GenshiLexer`.
- """
-
- flags = re.DOTALL
-
- tokens = {
- 'root': [
- (r'[^<$]+', Other),
- (r'(<\?python)(.*?)(\?>)',
- bygroups(Comment.Preproc, using(PythonLexer), Comment.Preproc)),
- # yield style and script blocks as Other
- (r'<\s*(script|style)\s*.*?>.*?<\s*/\1\s*>', Other),
- (r'<\s*py:[a-zA-Z0-9]+', Name.Tag, 'pytag'),
- (r'<\s*[a-zA-Z0-9:.]+', Name.Tag, 'tag'),
- include('variable'),
- (r'[<$]', Other),
- ],
- 'pytag': [
- (r'\s+', Text),
- (r'[\w:-]+\s*=', Name.Attribute, 'pyattr'),
- (r'/?\s*>', Name.Tag, '#pop'),
- ],
- 'pyattr': [
- ('(")(.*?)(")', bygroups(String, using(PythonLexer), String), '#pop'),
- ("(')(.*?)(')", bygroups(String, using(PythonLexer), String), '#pop'),
- (r'[^\s>]+', String, '#pop'),
- ],
- 'tag': [
- (r'\s+', Text),
- (r'py:[\w-]+\s*=', Name.Attribute, 'pyattr'),
- (r'[\w:-]+\s*=', Name.Attribute, 'attr'),
- (r'/?\s*>', Name.Tag, '#pop'),
- ],
- 'attr': [
- ('"', String, 'attr-dstring'),
- ("'", String, 'attr-sstring'),
- (r'[^\s>]*', String, '#pop')
- ],
- 'attr-dstring': [
- ('"', String, '#pop'),
- include('strings'),
- ("'", String)
- ],
- 'attr-sstring': [
- ("'", String, '#pop'),
- include('strings'),
- ("'", String)
- ],
- 'strings': [
- ('[^"\'$]+', String),
- include('variable')
- ],
- 'variable': [
- (r'(?<!\$)(\$\{)(.+?)(\})',
- bygroups(Comment.Preproc, using(PythonLexer), Comment.Preproc)),
- (r'(?<!\$)(\$)([a-zA-Z_][\w\.]*)',
- Name.Variable),
- ]
- }
-
-
-class HtmlGenshiLexer(DelegatingLexer):
- """
- A lexer that highlights `genshi <http://genshi.edgewall.org/>`_ and
- `kid <http://kid-templating.org/>`_ kid HTML templates.
- """
-
- name = 'HTML+Genshi'
- aliases = ['html+genshi', 'html+kid']
- alias_filenames = ['*.html', '*.htm', '*.xhtml']
- mimetypes = ['text/html+genshi']
-
- def __init__(self, **options):
- super().__init__(HtmlLexer, GenshiMarkupLexer, **options)
-
- def analyse_text(text):
- rv = 0.0
- if re.search(r'\$\{.*?\}', text) is not None:
- rv += 0.2
- if re.search(r'py:(.*?)=["\']', text) is not None:
- rv += 0.2
- return rv + HtmlLexer.analyse_text(text) - 0.01
-
-
-class GenshiLexer(DelegatingLexer):
- """
- A lexer that highlights `genshi <http://genshi.edgewall.org/>`_ and
- `kid <http://kid-templating.org/>`_ kid XML templates.
- """
-
- name = 'Genshi'
- aliases = ['genshi', 'kid', 'xml+genshi', 'xml+kid']
- filenames = ['*.kid']
- alias_filenames = ['*.xml']
- mimetypes = ['application/x-genshi', 'application/x-kid']
-
- def __init__(self, **options):
- super().__init__(XmlLexer, GenshiMarkupLexer, **options)
-
- def analyse_text(text):
- rv = 0.0
- if re.search(r'\$\{.*?\}', text) is not None:
- rv += 0.2
- if re.search(r'py:(.*?)=["\']', text) is not None:
- rv += 0.2
- return rv + XmlLexer.analyse_text(text) - 0.01
-
-
-class JavascriptGenshiLexer(DelegatingLexer):
- """
- A lexer that highlights javascript code in genshi text templates.
- """
-
- name = 'JavaScript+Genshi Text'
- aliases = ['js+genshitext', 'js+genshi', 'javascript+genshitext',
- 'javascript+genshi']
- alias_filenames = ['*.js']
- mimetypes = ['application/x-javascript+genshi',
- 'text/x-javascript+genshi',
- 'text/javascript+genshi']
-
- def __init__(self, **options):
- super().__init__(JavascriptLexer, GenshiTextLexer, **options)
-
- def analyse_text(text):
- return GenshiLexer.analyse_text(text) - 0.05
-
-
-class CssGenshiLexer(DelegatingLexer):
- """
- A lexer that highlights CSS definitions in genshi text templates.
- """
-
- name = 'CSS+Genshi Text'
- aliases = ['css+genshitext', 'css+genshi']
- alias_filenames = ['*.css']
- mimetypes = ['text/css+genshi']
-
- def __init__(self, **options):
- super().__init__(CssLexer, GenshiTextLexer, **options)
-
- def analyse_text(text):
- return GenshiLexer.analyse_text(text) - 0.05
-
-
-class RhtmlLexer(DelegatingLexer):
- """
- Subclass of the ERB lexer that highlights the unlexed data with the
- html lexer.
-
- Nested Javascript and CSS is highlighted too.
- """
-
- name = 'RHTML'
- aliases = ['rhtml', 'html+erb', 'html+ruby']
- filenames = ['*.rhtml']
- alias_filenames = ['*.html', '*.htm', '*.xhtml']
- mimetypes = ['text/html+ruby']
-
- def __init__(self, **options):
- super().__init__(HtmlLexer, ErbLexer, **options)
-
- def analyse_text(text):
- rv = ErbLexer.analyse_text(text) - 0.01
- if html_doctype_matches(text):
- # one more than the XmlErbLexer returns
- rv += 0.5
- return rv
-
-
-class XmlErbLexer(DelegatingLexer):
- """
- Subclass of `ErbLexer` which highlights data outside preprocessor
- directives with the `XmlLexer`.
- """
-
- name = 'XML+Ruby'
- aliases = ['xml+ruby', 'xml+erb']
- alias_filenames = ['*.xml']
- mimetypes = ['application/xml+ruby']
-
- def __init__(self, **options):
- super().__init__(XmlLexer, ErbLexer, **options)
-
- def analyse_text(text):
- rv = ErbLexer.analyse_text(text) - 0.01
- if looks_like_xml(text):
- rv += 0.4
- return rv
-
-
-class CssErbLexer(DelegatingLexer):
- """
- Subclass of `ErbLexer` which highlights unlexed data with the `CssLexer`.
- """
-
- name = 'CSS+Ruby'
- aliases = ['css+ruby', 'css+erb']
- alias_filenames = ['*.css']
- mimetypes = ['text/css+ruby']
-
- def __init__(self, **options):
- super().__init__(CssLexer, ErbLexer, **options)
-
- def analyse_text(text):
- return ErbLexer.analyse_text(text) - 0.05
-
-
-class JavascriptErbLexer(DelegatingLexer):
- """
- Subclass of `ErbLexer` which highlights unlexed data with the
- `JavascriptLexer`.
- """
-
- name = 'JavaScript+Ruby'
- aliases = ['javascript+ruby', 'js+ruby', 'javascript+erb', 'js+erb']
- alias_filenames = ['*.js']
- mimetypes = ['application/x-javascript+ruby',
- 'text/x-javascript+ruby',
- 'text/javascript+ruby']
-
- def __init__(self, **options):
- super().__init__(JavascriptLexer, ErbLexer, **options)
-
- def analyse_text(text):
- return ErbLexer.analyse_text(text) - 0.05
-
-
-class HtmlPhpLexer(DelegatingLexer):
- """
- Subclass of `PhpLexer` that highlights unhandled data with the `HtmlLexer`.
-
- Nested Javascript and CSS is highlighted too.
- """
-
- name = 'HTML+PHP'
- aliases = ['html+php']
- filenames = ['*.phtml']
- alias_filenames = ['*.php', '*.html', '*.htm', '*.xhtml',
- '*.php[345]']
- mimetypes = ['application/x-php',
- 'application/x-httpd-php', 'application/x-httpd-php3',
- 'application/x-httpd-php4', 'application/x-httpd-php5']
-
- def __init__(self, **options):
- super().__init__(HtmlLexer, PhpLexer, **options)
-
- def analyse_text(text):
- rv = PhpLexer.analyse_text(text) - 0.01
- if html_doctype_matches(text):
- rv += 0.5
- return rv
-
-
-class XmlPhpLexer(DelegatingLexer):
- """
- Subclass of `PhpLexer` that highlights unhandled data with the `XmlLexer`.
- """
-
- name = 'XML+PHP'
- aliases = ['xml+php']
- alias_filenames = ['*.xml', '*.php', '*.php[345]']
- mimetypes = ['application/xml+php']
-
- def __init__(self, **options):
- super().__init__(XmlLexer, PhpLexer, **options)
-
- def analyse_text(text):
- rv = PhpLexer.analyse_text(text) - 0.01
- if looks_like_xml(text):
- rv += 0.4
- return rv
-
-
-class CssPhpLexer(DelegatingLexer):
- """
- Subclass of `PhpLexer` which highlights unmatched data with the `CssLexer`.
- """
-
- name = 'CSS+PHP'
- aliases = ['css+php']
- alias_filenames = ['*.css']
- mimetypes = ['text/css+php']
-
- def __init__(self, **options):
- super().__init__(CssLexer, PhpLexer, **options)
-
- def analyse_text(text):
- return PhpLexer.analyse_text(text) - 0.05
-
-
-class JavascriptPhpLexer(DelegatingLexer):
- """
- Subclass of `PhpLexer` which highlights unmatched data with the
- `JavascriptLexer`.
- """
-
- name = 'JavaScript+PHP'
- aliases = ['javascript+php', 'js+php']
- alias_filenames = ['*.js']
- mimetypes = ['application/x-javascript+php',
- 'text/x-javascript+php',
- 'text/javascript+php']
-
- def __init__(self, **options):
- super().__init__(JavascriptLexer, PhpLexer, **options)
-
- def analyse_text(text):
- return PhpLexer.analyse_text(text)
-
-
-class HtmlSmartyLexer(DelegatingLexer):
- """
- Subclass of the `SmartyLexer` that highlights unlexed data with the
- `HtmlLexer`.
-
- Nested Javascript and CSS is highlighted too.
- """
-
- name = 'HTML+Smarty'
- aliases = ['html+smarty']
- alias_filenames = ['*.html', '*.htm', '*.xhtml', '*.tpl']
- mimetypes = ['text/html+smarty']
-
- def __init__(self, **options):
- super().__init__(HtmlLexer, SmartyLexer, **options)
-
- def analyse_text(text):
- rv = SmartyLexer.analyse_text(text) - 0.01
- if html_doctype_matches(text):
- rv += 0.5
- return rv
-
-
-class XmlSmartyLexer(DelegatingLexer):
- """
- Subclass of the `SmartyLexer` that highlights unlexed data with the
- `XmlLexer`.
- """
-
- name = 'XML+Smarty'
- aliases = ['xml+smarty']
- alias_filenames = ['*.xml', '*.tpl']
- mimetypes = ['application/xml+smarty']
-
- def __init__(self, **options):
- super().__init__(XmlLexer, SmartyLexer, **options)
-
- def analyse_text(text):
- rv = SmartyLexer.analyse_text(text) - 0.01
- if looks_like_xml(text):
- rv += 0.4
- return rv
-
-
-class CssSmartyLexer(DelegatingLexer):
- """
- Subclass of the `SmartyLexer` that highlights unlexed data with the
- `CssLexer`.
- """
-
- name = 'CSS+Smarty'
- aliases = ['css+smarty']
- alias_filenames = ['*.css', '*.tpl']
- mimetypes = ['text/css+smarty']
-
- def __init__(self, **options):
- super().__init__(CssLexer, SmartyLexer, **options)
-
- def analyse_text(text):
- return SmartyLexer.analyse_text(text) - 0.05
-
-
-class JavascriptSmartyLexer(DelegatingLexer):
- """
- Subclass of the `SmartyLexer` that highlights unlexed data with the
- `JavascriptLexer`.
- """
-
- name = 'JavaScript+Smarty'
- aliases = ['javascript+smarty', 'js+smarty']
- alias_filenames = ['*.js', '*.tpl']
- mimetypes = ['application/x-javascript+smarty',
- 'text/x-javascript+smarty',
- 'text/javascript+smarty']
-
- def __init__(self, **options):
- super().__init__(JavascriptLexer, SmartyLexer, **options)
-
- def analyse_text(text):
- return SmartyLexer.analyse_text(text) - 0.05
-
-
-class HtmlDjangoLexer(DelegatingLexer):
- """
- Subclass of the `DjangoLexer` that highlights unlexed data with the
- `HtmlLexer`.
-
- Nested Javascript and CSS is highlighted too.
- """
-
- name = 'HTML+Django/Jinja'
- aliases = ['html+django', 'html+jinja', 'htmldjango']
- filenames = ['*.html.j2', '*.htm.j2', '*.xhtml.j2', '*.html.jinja2', '*.htm.jinja2', '*.xhtml.jinja2']
- alias_filenames = ['*.html', '*.htm', '*.xhtml']
- mimetypes = ['text/html+django', 'text/html+jinja']
-
- def __init__(self, **options):
- super().__init__(HtmlLexer, DjangoLexer, **options)
-
- def analyse_text(text):
- rv = DjangoLexer.analyse_text(text) - 0.01
- if html_doctype_matches(text):
- rv += 0.5
- return rv
-
-
-class XmlDjangoLexer(DelegatingLexer):
- """
- Subclass of the `DjangoLexer` that highlights unlexed data with the
- `XmlLexer`.
- """
-
- name = 'XML+Django/Jinja'
- aliases = ['xml+django', 'xml+jinja']
- filenames = ['*.xml.j2', '*.xml.jinja2']
- alias_filenames = ['*.xml']
- mimetypes = ['application/xml+django', 'application/xml+jinja']
-
- def __init__(self, **options):
- super().__init__(XmlLexer, DjangoLexer, **options)
-
- def analyse_text(text):
- rv = DjangoLexer.analyse_text(text) - 0.01
- if looks_like_xml(text):
- rv += 0.4
- return rv
-
-
-class CssDjangoLexer(DelegatingLexer):
- """
- Subclass of the `DjangoLexer` that highlights unlexed data with the
- `CssLexer`.
- """
-
- name = 'CSS+Django/Jinja'
- aliases = ['css+django', 'css+jinja']
- filenames = ['*.css.j2', '*.css.jinja2']
- alias_filenames = ['*.css']
- mimetypes = ['text/css+django', 'text/css+jinja']
-
- def __init__(self, **options):
- super().__init__(CssLexer, DjangoLexer, **options)
-
- def analyse_text(text):
- return DjangoLexer.analyse_text(text) - 0.05
-
-
-class JavascriptDjangoLexer(DelegatingLexer):
- """
- Subclass of the `DjangoLexer` that highlights unlexed data with the
- `JavascriptLexer`.
- """
-
- name = 'JavaScript+Django/Jinja'
- aliases = ['javascript+django', 'js+django',
- 'javascript+jinja', 'js+jinja']
- filenames = ['*.js.j2', '*.js.jinja2']
- alias_filenames = ['*.js']
- mimetypes = ['application/x-javascript+django',
- 'application/x-javascript+jinja',
- 'text/x-javascript+django',
- 'text/x-javascript+jinja',
- 'text/javascript+django',
- 'text/javascript+jinja']
-
- def __init__(self, **options):
- super().__init__(JavascriptLexer, DjangoLexer, **options)
-
- def analyse_text(text):
- return DjangoLexer.analyse_text(text) - 0.05
-
-
-class JspRootLexer(RegexLexer):
- """
- Base for the `JspLexer`. Yields `Token.Other` for area outside of
- JSP tags.
-
- .. versionadded:: 0.7
- """
-
- tokens = {
- 'root': [
- (r'<%\S?', Keyword, 'sec'),
- # FIXME: I want to make these keywords but still parse attributes.
- (r'</?jsp:(forward|getProperty|include|plugin|setProperty|useBean).*?>',
- Keyword),
- (r'[^<]+', Other),
- (r'<', Other),
- ],
- 'sec': [
- (r'%>', Keyword, '#pop'),
- # note: '\w\W' != '.' without DOTALL.
- (r'[\w\W]+?(?=%>|\Z)', using(JavaLexer)),
- ],
- }
-
-
-class JspLexer(DelegatingLexer):
- """
- Lexer for Java Server Pages.
-
- .. versionadded:: 0.7
- """
- name = 'Java Server Page'
- aliases = ['jsp']
- filenames = ['*.jsp']
- mimetypes = ['application/x-jsp']
-
- def __init__(self, **options):
- super().__init__(XmlLexer, JspRootLexer, **options)
-
- def analyse_text(text):
- rv = JavaLexer.analyse_text(text) - 0.01
- if looks_like_xml(text):
- rv += 0.4
- if '<%' in text and '%>' in text:
- rv += 0.1
- return rv
-
-
-class EvoqueLexer(RegexLexer):
- """
- For files using the Evoque templating system.
-
- .. versionadded:: 1.1
- """
- name = 'Evoque'
- aliases = ['evoque']
- filenames = ['*.evoque']
- mimetypes = ['application/x-evoque']
-
- flags = re.DOTALL
-
- tokens = {
- 'root': [
- (r'[^#$]+', Other),
- (r'#\[', Comment.Multiline, 'comment'),
- (r'\$\$', Other),
- # svn keywords
- (r'\$\w+:[^$\n]*\$', Comment.Multiline),
- # directives: begin, end
- (r'(\$)(begin|end)(\{(%)?)(.*?)((?(4)%)\})',
- bygroups(Punctuation, Name.Builtin, Punctuation, None,
- String, Punctuation)),
- # directives: evoque, overlay
- # see doc for handling first name arg: /directives/evoque/
- # + minor inconsistency: the "name" in e.g. $overlay{name=site_base}
- # should be using(PythonLexer), not passed out as String
- (r'(\$)(evoque|overlay)(\{(%)?)(\s*[#\w\-"\'.]+)?'
- r'(.*?)((?(4)%)\})',
- bygroups(Punctuation, Name.Builtin, Punctuation, None,
- String, using(PythonLexer), Punctuation)),
- # directives: if, for, prefer, test
- (r'(\$)(\w+)(\{(%)?)(.*?)((?(4)%)\})',
- bygroups(Punctuation, Name.Builtin, Punctuation, None,
- using(PythonLexer), Punctuation)),
- # directive clauses (no {} expression)
- (r'(\$)(else|rof|fi)', bygroups(Punctuation, Name.Builtin)),
- # expressions
- (r'(\$\{(%)?)(.*?)((!)(.*?))?((?(2)%)\})',
- bygroups(Punctuation, None, using(PythonLexer),
- Name.Builtin, None, None, Punctuation)),
- (r'#', Other),
- ],
- 'comment': [
- (r'[^\]#]', Comment.Multiline),
- (r'#\[', Comment.Multiline, '#push'),
- (r'\]#', Comment.Multiline, '#pop'),
- (r'[\]#]', Comment.Multiline)
- ],
- }
-
- def analyse_text(text):
- """Evoque templates use $evoque, which is unique."""
- if '$evoque' in text:
- return 1
-
-class EvoqueHtmlLexer(DelegatingLexer):
- """
- Subclass of the `EvoqueLexer` that highlights unlexed data with the
- `HtmlLexer`.
-
- .. versionadded:: 1.1
- """
- name = 'HTML+Evoque'
- aliases = ['html+evoque']
- filenames = ['*.html']
- mimetypes = ['text/html+evoque']
-
- def __init__(self, **options):
- super().__init__(HtmlLexer, EvoqueLexer, **options)
-
- def analyse_text(text):
- return EvoqueLexer.analyse_text(text)
-
-
-class EvoqueXmlLexer(DelegatingLexer):
- """
- Subclass of the `EvoqueLexer` that highlights unlexed data with the
- `XmlLexer`.
-
- .. versionadded:: 1.1
- """
- name = 'XML+Evoque'
- aliases = ['xml+evoque']
- filenames = ['*.xml']
- mimetypes = ['application/xml+evoque']
-
- def __init__(self, **options):
- super().__init__(XmlLexer, EvoqueLexer, **options)
-
- def analyse_text(text):
- return EvoqueLexer.analyse_text(text)
-
-
-class ColdfusionLexer(RegexLexer):
- """
- Coldfusion statements
- """
- name = 'cfstatement'
- aliases = ['cfs']
- filenames = []
- mimetypes = []
- flags = re.IGNORECASE
-
- tokens = {
- 'root': [
- (r'//.*?\n', Comment.Single),
- (r'/\*(?:.|\n)*?\*/', Comment.Multiline),
- (r'\+\+|--', Operator),
- (r'[-+*/^&=!]', Operator),
- (r'<=|>=|<|>|==', Operator),
- (r'mod\b', Operator),
- (r'(eq|lt|gt|lte|gte|not|is|and|or)\b', Operator),
- (r'\|\||&&', Operator),
- (r'\?', Operator),
- (r'"', String.Double, 'string'),
- # There is a special rule for allowing html in single quoted
- # strings, evidently.
- (r"'.*?'", String.Single),
- (r'\d+', Number),
- (r'(if|else|len|var|xml|default|break|switch|component|property|function|do|'
- r'try|catch|in|continue|for|return|while|required|any|array|binary|boolean|'
- r'component|date|guid|numeric|query|string|struct|uuid|case)\b', Keyword),
- (r'(true|false|null)\b', Keyword.Constant),
- (r'(application|session|client|cookie|super|this|variables|arguments)\b',
- Name.Constant),
- (r'([a-z_$][\w.]*)(\s*)(\()',
- bygroups(Name.Function, Text, Punctuation)),
- (r'[a-z_$][\w.]*', Name.Variable),
- (r'[()\[\]{};:,.\\]', Punctuation),
- (r'\s+', Text),
- ],
- 'string': [
- (r'""', String.Double),
- (r'#.+?#', String.Interp),
- (r'[^"#]+', String.Double),
- (r'#', String.Double),
- (r'"', String.Double, '#pop'),
- ],
- }
-
-
-class ColdfusionMarkupLexer(RegexLexer):
- """
- Coldfusion markup only
- """
- name = 'Coldfusion'
- aliases = ['cf']
- filenames = []
- mimetypes = []
-
- tokens = {
- 'root': [
- (r'[^<]+', Other),
- include('tags'),
- (r'<[^<>]*', Other),
- ],
- 'tags': [
- (r'<!---', Comment.Multiline, 'cfcomment'),
- (r'(?s)<!--.*?-->', Comment),
- (r'<cfoutput.*?>', Name.Builtin, 'cfoutput'),
- (r'(?s)(<cfscript.*?>)(.+?)(</cfscript.*?>)',
- bygroups(Name.Builtin, using(ColdfusionLexer), Name.Builtin)),
- # negative lookbehind is for strings with embedded >
- (r'(?s)(</?cf(?:component|include|if|else|elseif|loop|return|'
- r'dbinfo|dump|abort|location|invoke|throw|file|savecontent|'
- r'mailpart|mail|header|content|zip|image|lock|argument|try|'
- r'catch|break|directory|http|set|function|param)\b)(.*?)((?<!\\)>)',
- bygroups(Name.Builtin, using(ColdfusionLexer), Name.Builtin)),
- ],
- 'cfoutput': [
- (r'[^#<]+', Other),
- (r'(#)(.*?)(#)', bygroups(Punctuation, using(ColdfusionLexer),
- Punctuation)),
- # (r'<cfoutput.*?>', Name.Builtin, '#push'),
- (r'</cfoutput.*?>', Name.Builtin, '#pop'),
- include('tags'),
- (r'(?s)<[^<>]*', Other),
- (r'#', Other),
- ],
- 'cfcomment': [
- (r'<!---', Comment.Multiline, '#push'),
- (r'--->', Comment.Multiline, '#pop'),
- (r'([^<-]|<(?!!---)|-(?!-->))+', Comment.Multiline),
- ],
- }
-
-
-class ColdfusionHtmlLexer(DelegatingLexer):
- """
- Coldfusion markup in html
- """
- name = 'Coldfusion HTML'
- aliases = ['cfm']
- filenames = ['*.cfm', '*.cfml']
- mimetypes = ['application/x-coldfusion']
-
- def __init__(self, **options):
- super().__init__(HtmlLexer, ColdfusionMarkupLexer, **options)
-
-
-class ColdfusionCFCLexer(DelegatingLexer):
- """
- Coldfusion markup/script components
-
- .. versionadded:: 2.0
- """
- name = 'Coldfusion CFC'
- aliases = ['cfc']
- filenames = ['*.cfc']
- mimetypes = []
-
- def __init__(self, **options):
- super().__init__(ColdfusionHtmlLexer, ColdfusionLexer, **options)
-
-
-class SspLexer(DelegatingLexer):
- """
- Lexer for Scalate Server Pages.
-
- .. versionadded:: 1.4
- """
- name = 'Scalate Server Page'
- aliases = ['ssp']
- filenames = ['*.ssp']
- mimetypes = ['application/x-ssp']
-
- def __init__(self, **options):
- super().__init__(XmlLexer, JspRootLexer, **options)
-
- def analyse_text(text):
- rv = 0.0
- if re.search(r'val \w+\s*:', text):
- rv += 0.6
- if looks_like_xml(text):
- rv += 0.2
- if '<%' in text and '%>' in text:
- rv += 0.1
- return rv
-
-
-class TeaTemplateRootLexer(RegexLexer):
- """
- Base for the `TeaTemplateLexer`. Yields `Token.Other` for area outside of
- code blocks.
-
- .. versionadded:: 1.5
- """
-
- tokens = {
- 'root': [
- (r'<%\S?', Keyword, 'sec'),
- (r'[^<]+', Other),
- (r'<', Other),
- ],
- 'sec': [
- (r'%>', Keyword, '#pop'),
- # note: '\w\W' != '.' without DOTALL.
- (r'[\w\W]+?(?=%>|\Z)', using(TeaLangLexer)),
- ],
- }
-
-
-class TeaTemplateLexer(DelegatingLexer):
- """
- Lexer for `Tea Templates <http://teatrove.org/>`_.
-
- .. versionadded:: 1.5
- """
- name = 'Tea'
- aliases = ['tea']
- filenames = ['*.tea']
- mimetypes = ['text/x-tea']
-
- def __init__(self, **options):
- super().__init__(XmlLexer, TeaTemplateRootLexer, **options)
-
- def analyse_text(text):
- rv = TeaLangLexer.analyse_text(text) - 0.01
- if looks_like_xml(text):
- rv += 0.4
- if '<%' in text and '%>' in text:
- rv += 0.1
- return rv
-
-
-class LassoHtmlLexer(DelegatingLexer):
- """
- Subclass of the `LassoLexer` which highlights unhandled data with the
- `HtmlLexer`.
-
- Nested JavaScript and CSS is also highlighted.
-
- .. versionadded:: 1.6
- """
-
- name = 'HTML+Lasso'
- aliases = ['html+lasso']
- alias_filenames = ['*.html', '*.htm', '*.xhtml', '*.lasso', '*.lasso[89]',
- '*.incl', '*.inc', '*.las']
- mimetypes = ['text/html+lasso',
- 'application/x-httpd-lasso',
- 'application/x-httpd-lasso[89]']
-
- def __init__(self, **options):
- super().__init__(HtmlLexer, LassoLexer, **options)
-
- def analyse_text(text):
- rv = LassoLexer.analyse_text(text) - 0.01
- if html_doctype_matches(text): # same as HTML lexer
- rv += 0.5
- return rv
-
-
-class LassoXmlLexer(DelegatingLexer):
- """
- Subclass of the `LassoLexer` which highlights unhandled data with the
- `XmlLexer`.
-
- .. versionadded:: 1.6
- """
-
- name = 'XML+Lasso'
- aliases = ['xml+lasso']
- alias_filenames = ['*.xml', '*.lasso', '*.lasso[89]',
- '*.incl', '*.inc', '*.las']
- mimetypes = ['application/xml+lasso']
-
- def __init__(self, **options):
- super().__init__(XmlLexer, LassoLexer, **options)
-
- def analyse_text(text):
- rv = LassoLexer.analyse_text(text) - 0.01
- if looks_like_xml(text):
- rv += 0.4
- return rv
-
-
-class LassoCssLexer(DelegatingLexer):
- """
- Subclass of the `LassoLexer` which highlights unhandled data with the
- `CssLexer`.
-
- .. versionadded:: 1.6
- """
-
- name = 'CSS+Lasso'
- aliases = ['css+lasso']
- alias_filenames = ['*.css']
- mimetypes = ['text/css+lasso']
-
- def __init__(self, **options):
- options['requiredelimiters'] = True
- super().__init__(CssLexer, LassoLexer, **options)
-
- def analyse_text(text):
- rv = LassoLexer.analyse_text(text) - 0.05
- if re.search(r'\w+:[^;]+;', text):
- rv += 0.1
- if 'padding:' in text:
- rv += 0.1
- return rv
-
-
-class LassoJavascriptLexer(DelegatingLexer):
- """
- Subclass of the `LassoLexer` which highlights unhandled data with the
- `JavascriptLexer`.
-
- .. versionadded:: 1.6
- """
-
- name = 'JavaScript+Lasso'
- aliases = ['javascript+lasso', 'js+lasso']
- alias_filenames = ['*.js']
- mimetypes = ['application/x-javascript+lasso',
- 'text/x-javascript+lasso',
- 'text/javascript+lasso']
-
- def __init__(self, **options):
- options['requiredelimiters'] = True
- super().__init__(JavascriptLexer, LassoLexer, **options)
-
- def analyse_text(text):
- rv = LassoLexer.analyse_text(text) - 0.05
- return rv
-
-
-class HandlebarsLexer(RegexLexer):
- """
- Generic handlebars template lexer.
-
- Highlights only the Handlebars template tags (stuff between `{{` and `}}`).
- Everything else is left for a delegating lexer.
-
- .. versionadded:: 2.0
- """
-
- name = "Handlebars"
- url = 'https://handlebarsjs.com/'
- aliases = ['handlebars']
-
- tokens = {
- 'root': [
- (r'[^{]+', Other),
-
- # Comment start {{! }} or {{!--
- (r'\{\{!.*\}\}', Comment),
-
- # HTML Escaping open {{{expression
- (r'(\{\{\{)(\s*)', bygroups(Comment.Special, Text), 'tag'),
-
- # {{blockOpen {{#blockOpen {{/blockClose with optional tilde ~
- (r'(\{\{)([#~/]+)([^\s}]*)',
- bygroups(Comment.Preproc, Number.Attribute, Number.Attribute), 'tag'),
- (r'(\{\{)(\s*)', bygroups(Comment.Preproc, Text), 'tag'),
- ],
-
- 'tag': [
- (r'\s+', Text),
- # HTML Escaping close }}}
- (r'\}\}\}', Comment.Special, '#pop'),
- # blockClose}}, includes optional tilde ~
- (r'(~?)(\}\})', bygroups(Number, Comment.Preproc), '#pop'),
-
- # {{opt=something}}
- (r'([^\s}]+)(=)', bygroups(Name.Attribute, Operator)),
-
- # Partials {{> ...}}
- (r'(>)(\s*)(@partial-block)', bygroups(Keyword, Text, Keyword)),
- (r'(#?>)(\s*)([\w-]+)', bygroups(Keyword, Text, Name.Variable)),
- (r'(>)(\s*)(\()', bygroups(Keyword, Text, Punctuation),
- 'dynamic-partial'),
-
- include('generic'),
- ],
- 'dynamic-partial': [
- (r'\s+', Text),
- (r'\)', Punctuation, '#pop'),
-
- (r'(lookup)(\s+)(\.|this)(\s+)', bygroups(Keyword, Text,
- Name.Variable, Text)),
- (r'(lookup)(\s+)(\S+)', bygroups(Keyword, Text,
- using(this, state='variable'))),
- (r'[\w-]+', Name.Function),
-
- include('generic'),
- ],
- 'variable': [
- (r'[()/@a-zA-Z][\w-]*', Name.Variable),
- (r'\.[\w-]+', Name.Variable),
- (r'(this\/|\.\/|(\.\.\/)+)[\w-]+', Name.Variable),
- ],
- 'generic': [
- include('variable'),
-
- # borrowed from DjangoLexer
- (r':?"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
- (r":?'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
- (r"[0-9](\.[0-9]*)?(eE[+-][0-9])?[flFLdD]?|"
- r"0[xX][0-9a-fA-F]+[Ll]?", Number),
- ]
- }
-
-
-class HandlebarsHtmlLexer(DelegatingLexer):
- """
- Subclass of the `HandlebarsLexer` that highlights unlexed data with the
- `HtmlLexer`.
-
- .. versionadded:: 2.0
- """
-
- name = "HTML+Handlebars"
- aliases = ["html+handlebars"]
- filenames = ['*.handlebars', '*.hbs']
- mimetypes = ['text/html+handlebars', 'text/x-handlebars-template']
-
- def __init__(self, **options):
- super().__init__(HtmlLexer, HandlebarsLexer, **options)
-
-
-class YamlJinjaLexer(DelegatingLexer):
- """
- Subclass of the `DjangoLexer` that highlights unlexed data with the
- `YamlLexer`.
-
- Commonly used in Saltstack salt states.
-
- .. versionadded:: 2.0
- """
-
- name = 'YAML+Jinja'
- aliases = ['yaml+jinja', 'salt', 'sls']
- filenames = ['*.sls', '*.yaml.j2', '*.yml.j2', '*.yaml.jinja2', '*.yml.jinja2']
- mimetypes = ['text/x-yaml+jinja', 'text/x-sls']
-
- def __init__(self, **options):
- super().__init__(YamlLexer, DjangoLexer, **options)
-
-
-class LiquidLexer(RegexLexer):
- """
- Lexer for Liquid templates.
-
- .. versionadded:: 2.0
- """
- name = 'liquid'
- url = 'https://www.rubydoc.info/github/Shopify/liquid'
- aliases = ['liquid']
- filenames = ['*.liquid']
-
- tokens = {
- 'root': [
- (r'[^{]+', Text),
- # tags and block tags
- (r'(\{%)(\s*)', bygroups(Punctuation, Whitespace), 'tag-or-block'),
- # output tags
- (r'(\{\{)(\s*)([^\s}]+)',
- bygroups(Punctuation, Whitespace, using(this, state = 'generic')),
- 'output'),
- (r'\{', Text)
- ],
-
- 'tag-or-block': [
- # builtin logic blocks
- (r'(if|unless|elsif|case)(?=\s+)', Keyword.Reserved, 'condition'),
- (r'(when)(\s+)', bygroups(Keyword.Reserved, Whitespace),
- combined('end-of-block', 'whitespace', 'generic')),
- (r'(else)(\s*)(%\})',
- bygroups(Keyword.Reserved, Whitespace, Punctuation), '#pop'),
-
- # other builtin blocks
- (r'(capture)(\s+)([^\s%]+)(\s*)(%\})',
- bygroups(Name.Tag, Whitespace, using(this, state = 'variable'),
- Whitespace, Punctuation), '#pop'),
- (r'(comment)(\s*)(%\})',
- bygroups(Name.Tag, Whitespace, Punctuation), 'comment'),
- (r'(raw)(\s*)(%\})',
- bygroups(Name.Tag, Whitespace, Punctuation), 'raw'),
-
- # end of block
- (r'(end(case|unless|if))(\s*)(%\})',
- bygroups(Keyword.Reserved, None, Whitespace, Punctuation), '#pop'),
- (r'(end([^\s%]+))(\s*)(%\})',
- bygroups(Name.Tag, None, Whitespace, Punctuation), '#pop'),
-
- # builtin tags (assign and include are handled together with usual tags)
- (r'(cycle)(\s+)(?:([^\s:]*)(:))?(\s*)',
- bygroups(Name.Tag, Whitespace,
- using(this, state='generic'), Punctuation, Whitespace),
- 'variable-tag-markup'),
-
- # other tags or blocks
- (r'([^\s%]+)(\s*)', bygroups(Name.Tag, Whitespace), 'tag-markup')
- ],
-
- 'output': [
- include('whitespace'),
- (r'\}\}', Punctuation, '#pop'), # end of output
-
- (r'\|', Punctuation, 'filters')
- ],
-
- 'filters': [
- include('whitespace'),
- (r'\}\}', Punctuation, ('#pop', '#pop')), # end of filters and output
-
- (r'([^\s|:]+)(:?)(\s*)',
- bygroups(Name.Function, Punctuation, Whitespace), 'filter-markup')
- ],
-
- 'filter-markup': [
- (r'\|', Punctuation, '#pop'),
- include('end-of-tag'),
- include('default-param-markup')
- ],
-
- 'condition': [
- include('end-of-block'),
- include('whitespace'),
-
- (r'([^\s=!><]+)(\s*)([=!><]=?)(\s*)(\S+)(\s*)(%\})',
- bygroups(using(this, state = 'generic'), Whitespace, Operator,
- Whitespace, using(this, state = 'generic'), Whitespace,
- Punctuation)),
- (r'\b!', Operator),
- (r'\bnot\b', Operator.Word),
- (r'([\w.\'"]+)(\s+)(contains)(\s+)([\w.\'"]+)',
- bygroups(using(this, state = 'generic'), Whitespace, Operator.Word,
- Whitespace, using(this, state = 'generic'))),
-
- include('generic'),
- include('whitespace')
- ],
-
- 'generic-value': [
- include('generic'),
- include('end-at-whitespace')
- ],
-
- 'operator': [
- (r'(\s*)((=|!|>|<)=?)(\s*)',
- bygroups(Whitespace, Operator, None, Whitespace), '#pop'),
- (r'(\s*)(\bcontains\b)(\s*)',
- bygroups(Whitespace, Operator.Word, Whitespace), '#pop'),
- ],
-
- 'end-of-tag': [
- (r'\}\}', Punctuation, '#pop')
- ],
-
- 'end-of-block': [
- (r'%\}', Punctuation, ('#pop', '#pop'))
- ],
-
- 'end-at-whitespace': [
- (r'\s+', Whitespace, '#pop')
- ],
-
- # states for unknown markup
- 'param-markup': [
- include('whitespace'),
- # params with colons or equals
- (r'([^\s=:]+)(\s*)(=|:)',
- bygroups(Name.Attribute, Whitespace, Operator)),
- # explicit variables
- (r'(\{\{)(\s*)([^\s}])(\s*)(\}\})',
- bygroups(Punctuation, Whitespace, using(this, state = 'variable'),
- Whitespace, Punctuation)),
-
- include('string'),
- include('number'),
- include('keyword'),
- (r',', Punctuation)
- ],
-
- 'default-param-markup': [
- include('param-markup'),
- (r'.', Text) # fallback for switches / variables / un-quoted strings / ...
- ],
-
- 'variable-param-markup': [
- include('param-markup'),
- include('variable'),
- (r'.', Text) # fallback
- ],
-
- 'tag-markup': [
- (r'%\}', Punctuation, ('#pop', '#pop')), # end of tag
- include('default-param-markup')
- ],
-
- 'variable-tag-markup': [
- (r'%\}', Punctuation, ('#pop', '#pop')), # end of tag
- include('variable-param-markup')
- ],
-
- # states for different values types
- 'keyword': [
- (r'\b(false|true)\b', Keyword.Constant)
- ],
-
- 'variable': [
- (r'[a-zA-Z_]\w*', Name.Variable),
- (r'(?<=\w)\.(?=\w)', Punctuation)
- ],
-
- 'string': [
- (r"'[^']*'", String.Single),
- (r'"[^"]*"', String.Double)
- ],
-
- 'number': [
- (r'\d+\.\d+', Number.Float),
- (r'\d+', Number.Integer)
- ],
-
- 'generic': [ # decides for variable, string, keyword or number
- include('keyword'),
- include('string'),
- include('number'),
- include('variable')
- ],
-
- 'whitespace': [
- (r'[ \t]+', Whitespace)
- ],
-
- # states for builtin blocks
- 'comment': [
- (r'(\{%)(\s*)(endcomment)(\s*)(%\})',
- bygroups(Punctuation, Whitespace, Name.Tag, Whitespace,
- Punctuation), ('#pop', '#pop')),
- (r'.', Comment)
- ],
-
- 'raw': [
- (r'[^{]+', Text),
- (r'(\{%)(\s*)(endraw)(\s*)(%\})',
- bygroups(Punctuation, Whitespace, Name.Tag, Whitespace,
- Punctuation), '#pop'),
- (r'\{', Text)
- ],
- }
-
-
-class TwigLexer(RegexLexer):
- """
- Twig template lexer.
-
- It just highlights Twig code between the preprocessor directives,
- other data is left untouched by the lexer.
-
- .. versionadded:: 2.0
- """
-
- name = 'Twig'
- aliases = ['twig']
- mimetypes = ['application/x-twig']
-
- flags = re.M | re.S
-
- # Note that a backslash is included in the following two patterns
- # PHP uses a backslash as a namespace separator
- _ident_char = r'[\\\w-]|[^\x00-\x7f]'
- _ident_begin = r'(?:[\\_a-z]|[^\x00-\x7f])'
- _ident_end = r'(?:' + _ident_char + ')*'
- _ident_inner = _ident_begin + _ident_end
-
- tokens = {
- 'root': [
- (r'[^{]+', Other),
- (r'\{\{', Comment.Preproc, 'var'),
- # twig comments
- (r'\{\#.*?\#\}', Comment),
- # raw twig blocks
- (r'(\{%)(-?\s*)(raw)(\s*-?)(%\})(.*?)'
- r'(\{%)(-?\s*)(endraw)(\s*-?)(%\})',
- bygroups(Comment.Preproc, Text, Keyword, Text, Comment.Preproc,
- Other, Comment.Preproc, Text, Keyword, Text,
- Comment.Preproc)),
- (r'(\{%)(-?\s*)(verbatim)(\s*-?)(%\})(.*?)'
- r'(\{%)(-?\s*)(endverbatim)(\s*-?)(%\})',
- bygroups(Comment.Preproc, Text, Keyword, Text, Comment.Preproc,
- Other, Comment.Preproc, Text, Keyword, Text,
- Comment.Preproc)),
- # filter blocks
- (r'(\{%%)(-?\s*)(filter)(\s+)(%s)' % _ident_inner,
- bygroups(Comment.Preproc, Text, Keyword, Text, Name.Function),
- 'tag'),
- (r'(\{%)(-?\s*)([a-zA-Z_]\w*)',
- bygroups(Comment.Preproc, Text, Keyword), 'tag'),
- (r'\{', Other),
- ],
- 'varnames': [
- (r'(\|)(\s*)(%s)' % _ident_inner,
- bygroups(Operator, Text, Name.Function)),
- (r'(is)(\s+)(not)?(\s*)(%s)' % _ident_inner,
- bygroups(Keyword, Text, Keyword, Text, Name.Function)),
- (r'(?i)(true|false|none|null)\b', Keyword.Pseudo),
- (r'(in|not|and|b-and|or|b-or|b-xor|is'
- r'if|elseif|else|import'
- r'constant|defined|divisibleby|empty|even|iterable|odd|sameas'
- r'matches|starts\s+with|ends\s+with)\b',
- Keyword),
- (r'(loop|block|parent)\b', Name.Builtin),
- (_ident_inner, Name.Variable),
- (r'\.' + _ident_inner, Name.Variable),
- (r'\.[0-9]+', Number),
- (r':?"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
- (r":?'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
- (r'([{}()\[\]+\-*/,:~%]|\.\.|\?|:|\*\*|\/\/|!=|[><=]=?)', Operator),
- (r"[0-9](\.[0-9]*)?(eE[+-][0-9])?[flFLdD]?|"
- r"0[xX][0-9a-fA-F]+[Ll]?", Number),
- ],
- 'var': [
- (r'\s+', Text),
- (r'(-?)(\}\})', bygroups(Text, Comment.Preproc), '#pop'),
- include('varnames')
- ],
- 'tag': [
- (r'\s+', Text),
- (r'(-?)(%\})', bygroups(Text, Comment.Preproc), '#pop'),
- include('varnames'),
- (r'.', Punctuation),
- ],
- }
-
-
-class TwigHtmlLexer(DelegatingLexer):
- """
- Subclass of the `TwigLexer` that highlights unlexed data with the
- `HtmlLexer`.
-
- .. versionadded:: 2.0
- """
-
- name = "HTML+Twig"
- aliases = ["html+twig"]
- filenames = ['*.twig']
- mimetypes = ['text/html+twig']
-
- def __init__(self, **options):
- super().__init__(HtmlLexer, TwigLexer, **options)
-
-
-class Angular2Lexer(RegexLexer):
- """
- Generic angular2 template lexer.
-
- Highlights only the Angular template tags (stuff between `{{` and `}}` and
- special attributes: '(event)=', '[property]=', '[(twoWayBinding)]=').
- Everything else is left for a delegating lexer.
-
- .. versionadded:: 2.1
- """
-
- name = "Angular2"
- url = 'https://angular.io/guide/template-syntax'
- aliases = ['ng2']
-
- tokens = {
- 'root': [
- (r'[^{([*#]+', Other),
-
- # {{meal.name}}
- (r'(\{\{)(\s*)', bygroups(Comment.Preproc, Text), 'ngExpression'),
-
- # (click)="deleteOrder()"; [value]="test"; [(twoWayTest)]="foo.bar"
- (r'([([]+)([\w:.-]+)([\])]+)(\s*)(=)(\s*)',
- bygroups(Punctuation, Name.Attribute, Punctuation, Text, Operator, Text),
- 'attr'),
- (r'([([]+)([\w:.-]+)([\])]+)(\s*)',
- bygroups(Punctuation, Name.Attribute, Punctuation, Text)),
-
- # *ngIf="..."; #f="ngForm"
- (r'([*#])([\w:.-]+)(\s*)(=)(\s*)',
- bygroups(Punctuation, Name.Attribute, Text, Operator, Text), 'attr'),
- (r'([*#])([\w:.-]+)(\s*)',
- bygroups(Punctuation, Name.Attribute, Text)),
- ],
-
- 'ngExpression': [
- (r'\s+(\|\s+)?', Text),
- (r'\}\}', Comment.Preproc, '#pop'),
-
- # Literals
- (r':?(true|false)', String.Boolean),
- (r':?"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
- (r":?'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
- (r"[0-9](\.[0-9]*)?(eE[+-][0-9])?[flFLdD]?|"
- r"0[xX][0-9a-fA-F]+[Ll]?", Number),
-
- # Variabletext
- (r'[a-zA-Z][\w-]*(\(.*\))?', Name.Variable),
- (r'\.[\w-]+(\(.*\))?', Name.Variable),
-
- # inline If
- (r'(\?)(\s*)([^}\s]+)(\s*)(:)(\s*)([^}\s]+)(\s*)',
- bygroups(Operator, Text, String, Text, Operator, Text, String, Text)),
- ],
- 'attr': [
- ('".*?"', String, '#pop'),
- ("'.*?'", String, '#pop'),
- (r'[^\s>]+', String, '#pop'),
- ],
- }
-
-
-class Angular2HtmlLexer(DelegatingLexer):
- """
- Subclass of the `Angular2Lexer` that highlights unlexed data with the
- `HtmlLexer`.
-
- .. versionadded:: 2.0
- """
-
- name = "HTML + Angular2"
- aliases = ["html+ng2"]
- filenames = ['*.ng2']
-
- def __init__(self, **options):
- super().__init__(HtmlLexer, Angular2Lexer, **options)
-
-
-class SqlJinjaLexer(DelegatingLexer):
- """
- Templated SQL lexer.
-
- .. versionadded:: 2.13
- """
-
- name = 'SQL+Jinja'
- aliases = ['sql+jinja']
- filenames = ['*.sql', '*.sql.j2', '*.sql.jinja2']
-
- def __init__(self, **options):
- super().__init__(SqlLexer, DjangoLexer, **options)
-
- def analyse_text(text):
- rv = 0.0
- # dbt's ref function
- if re.search(r'\{\{\s*ref\(.*\)\s*\}\}', text):
- rv += 0.4
- # dbt's source function
- if re.search(r'\{\{\s*source\(.*\)\s*\}\}', text):
- rv += 0.25
- # Jinja macro
- if re.search(r'\{%-?\s*macro \w+\(.*\)\s*-?%\}', text):
- rv += 0.15
- return rv
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/teraterm.py b/venv/lib/python3.11/site-packages/pygments/lexers/teraterm.py
deleted file mode 100644
index 2c0d3b2..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/teraterm.py
+++ /dev/null
@@ -1,326 +0,0 @@
-"""
- pygments.lexers.teraterm
- ~~~~~~~~~~~~~~~~~~~~~~~~
-
- Lexer for Tera Term macro files.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, include, bygroups
-from pygments.token import Text, Comment, Operator, Name, String, \
- Number, Keyword, Error
-
-__all__ = ['TeraTermLexer']
-
-
-class TeraTermLexer(RegexLexer):
- """
- For Tera Term macro source code.
-
- .. versionadded:: 2.4
- """
- name = 'Tera Term macro'
- url = 'https://ttssh2.osdn.jp/'
- aliases = ['teratermmacro', 'teraterm', 'ttl']
- filenames = ['*.ttl']
- mimetypes = ['text/x-teratermmacro']
-
- tokens = {
- 'root': [
- include('comments'),
- include('labels'),
- include('commands'),
- include('builtin-variables'),
- include('user-variables'),
- include('operators'),
- include('numeric-literals'),
- include('string-literals'),
- include('all-whitespace'),
- (r'\S', Text),
- ],
- 'comments': [
- (r';[^\r\n]*', Comment.Single),
- (r'/\*', Comment.Multiline, 'in-comment'),
- ],
- 'in-comment': [
- (r'\*/', Comment.Multiline, '#pop'),
- (r'[^*/]+', Comment.Multiline),
- (r'[*/]', Comment.Multiline)
- ],
- 'labels': [
- (r'(?i)^(\s*)(:[a-z0-9_]+)', bygroups(Text.Whitespace, Name.Label)),
- ],
- 'commands': [
- (
- r'(?i)\b('
- r'basename|'
- r'beep|'
- r'bplusrecv|'
- r'bplussend|'
- r'break|'
- r'bringupbox|'
- # 'call' is handled separately.
- r'callmenu|'
- r'changedir|'
- r'checksum16|'
- r'checksum16file|'
- r'checksum32|'
- r'checksum32file|'
- r'checksum8|'
- r'checksum8file|'
- r'clearscreen|'
- r'clipb2var|'
- r'closesbox|'
- r'closett|'
- r'code2str|'
- r'connect|'
- r'continue|'
- r'crc16|'
- r'crc16file|'
- r'crc32|'
- r'crc32file|'
- r'cygconnect|'
- r'delpassword|'
- r'dirname|'
- r'dirnamebox|'
- r'disconnect|'
- r'dispstr|'
- r'do|'
- r'else|'
- r'elseif|'
- r'enablekeyb|'
- r'end|'
- r'endif|'
- r'enduntil|'
- r'endwhile|'
- r'exec|'
- r'execcmnd|'
- r'exit|'
- r'expandenv|'
- r'fileclose|'
- r'fileconcat|'
- r'filecopy|'
- r'filecreate|'
- r'filedelete|'
- r'filelock|'
- r'filemarkptr|'
- r'filenamebox|'
- r'fileopen|'
- r'fileread|'
- r'filereadln|'
- r'filerename|'
- r'filesearch|'
- r'fileseek|'
- r'fileseekback|'
- r'filestat|'
- r'filestrseek|'
- r'filestrseek2|'
- r'filetruncate|'
- r'fileunlock|'
- r'filewrite|'
- r'filewriteln|'
- r'findclose|'
- r'findfirst|'
- r'findnext|'
- r'flushrecv|'
- r'foldercreate|'
- r'folderdelete|'
- r'foldersearch|'
- r'for|'
- r'getdate|'
- r'getdir|'
- r'getenv|'
- r'getfileattr|'
- r'gethostname|'
- r'getipv4addr|'
- r'getipv6addr|'
- r'getmodemstatus|'
- r'getpassword|'
- r'getspecialfolder|'
- r'gettime|'
- r'gettitle|'
- r'getttdir|'
- r'getver|'
- # 'goto' is handled separately.
- r'if|'
- r'ifdefined|'
- r'include|'
- r'inputbox|'
- r'int2str|'
- r'intdim|'
- r'ispassword|'
- r'kmtfinish|'
- r'kmtget|'
- r'kmtrecv|'
- r'kmtsend|'
- r'listbox|'
- r'loadkeymap|'
- r'logautoclosemode|'
- r'logclose|'
- r'loginfo|'
- r'logopen|'
- r'logpause|'
- r'logrotate|'
- r'logstart|'
- r'logwrite|'
- r'loop|'
- r'makepath|'
- r'messagebox|'
- r'mpause|'
- r'next|'
- r'passwordbox|'
- r'pause|'
- r'quickvanrecv|'
- r'quickvansend|'
- r'random|'
- r'recvln|'
- r'regexoption|'
- r'restoresetup|'
- r'return|'
- r'rotateleft|'
- r'rotateright|'
- r'scprecv|'
- r'scpsend|'
- r'send|'
- r'sendbreak|'
- r'sendbroadcast|'
- r'sendfile|'
- r'sendkcode|'
- r'sendln|'
- r'sendlnbroadcast|'
- r'sendlnmulticast|'
- r'sendmulticast|'
- r'setbaud|'
- r'setdate|'
- r'setdebug|'
- r'setdir|'
- r'setdlgpos|'
- r'setdtr|'
- r'setecho|'
- r'setenv|'
- r'setexitcode|'
- r'setfileattr|'
- r'setflowctrl|'
- r'setmulticastname|'
- r'setpassword|'
- r'setrts|'
- r'setspeed|'
- r'setsync|'
- r'settime|'
- r'settitle|'
- r'show|'
- r'showtt|'
- r'sprintf|'
- r'sprintf2|'
- r'statusbox|'
- r'str2code|'
- r'str2int|'
- r'strcompare|'
- r'strconcat|'
- r'strcopy|'
- r'strdim|'
- r'strinsert|'
- r'strjoin|'
- r'strlen|'
- r'strmatch|'
- r'strremove|'
- r'strreplace|'
- r'strscan|'
- r'strspecial|'
- r'strsplit|'
- r'strtrim|'
- r'testlink|'
- r'then|'
- r'tolower|'
- r'toupper|'
- r'unlink|'
- r'until|'
- r'uptime|'
- r'var2clipb|'
- r'wait|'
- r'wait4all|'
- r'waitevent|'
- r'waitln|'
- r'waitn|'
- r'waitrecv|'
- r'waitregex|'
- r'while|'
- r'xmodemrecv|'
- r'xmodemsend|'
- r'yesnobox|'
- r'ymodemrecv|'
- r'ymodemsend|'
- r'zmodemrecv|'
- r'zmodemsend'
- r')\b',
- Keyword,
- ),
- (r'(?i)(call|goto)([ \t]+)([a-z0-9_]+)',
- bygroups(Keyword, Text.Whitespace, Name.Label)),
- ],
- 'builtin-variables': [
- (
- r'(?i)('
- r'groupmatchstr1|'
- r'groupmatchstr2|'
- r'groupmatchstr3|'
- r'groupmatchstr4|'
- r'groupmatchstr5|'
- r'groupmatchstr6|'
- r'groupmatchstr7|'
- r'groupmatchstr8|'
- r'groupmatchstr9|'
- r'inputstr|'
- r'matchstr|'
- r'mtimeout|'
- r'param1|'
- r'param2|'
- r'param3|'
- r'param4|'
- r'param5|'
- r'param6|'
- r'param7|'
- r'param8|'
- r'param9|'
- r'paramcnt|'
- r'params|'
- r'result|'
- r'timeout'
- r')\b',
- Name.Builtin
- ),
- ],
- 'user-variables': [
- (r'(?i)[a-z_][a-z0-9_]*', Name.Variable),
- ],
- 'numeric-literals': [
- (r'(-?)([0-9]+)', bygroups(Operator, Number.Integer)),
- (r'(?i)\$[0-9a-f]+', Number.Hex),
- ],
- 'string-literals': [
- (r'(?i)#(?:[0-9]+|\$[0-9a-f]+)', String.Char),
- (r"'[^'\n]*'", String.Single),
- (r'"[^"\n]*"', String.Double),
- # Opening quotes without a closing quote on the same line are errors.
- (r"('[^']*)(\n)", bygroups(Error, Text.Whitespace)),
- (r'("[^"]*)(\n)', bygroups(Error, Text.Whitespace)),
- ],
- 'operators': [
- (r'and|not|or|xor', Operator.Word),
- (r'[!%&*+<=>^~\|\/-]+', Operator),
- (r'[()]', String.Symbol),
- ],
- 'all-whitespace': [
- (r'\s+', Text.Whitespace),
- ],
- }
-
- # Turtle and Tera Term macro files share the same file extension
- # but each has a recognizable and distinct syntax.
- def analyse_text(text):
- if re.search(TeraTermLexer.tokens['commands'][0][0], text):
- return 0.01
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/testing.py b/venv/lib/python3.11/site-packages/pygments/lexers/testing.py
deleted file mode 100644
index dec3a15..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/testing.py
+++ /dev/null
@@ -1,210 +0,0 @@
-"""
- pygments.lexers.testing
- ~~~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for testing languages.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, include, bygroups
-from pygments.token import Comment, Keyword, Name, String, Number, Generic, Text
-
-__all__ = ['GherkinLexer', 'TAPLexer']
-
-
-class GherkinLexer(RegexLexer):
- """
- For Gherkin syntax.
-
- .. versionadded:: 1.2
- """
- name = 'Gherkin'
- aliases = ['gherkin', 'cucumber']
- filenames = ['*.feature']
- mimetypes = ['text/x-gherkin']
-
- feature_keywords = '^(기능|機能|功能|フィーチャ|خاصية|תכונה|Функціонал|Функционалност|Функционал|Фича|Особина|Могућност|Özellik|Właściwość|Tính năng|Trajto|Savybė|Požiadavka|Požadavek|Osobina|Ominaisuus|Omadus|OH HAI|Mogućnost|Mogucnost|Jellemző|Fīča|Funzionalità|Funktionalität|Funkcionalnost|Funkcionalitāte|Funcționalitate|Functionaliteit|Functionalitate|Funcionalitat|Funcionalidade|Fonctionnalité|Fitur|Feature|Egenskap|Egenskab|Crikey|Característica|Arwedd)(:)(.*)$'
- feature_element_keywords = '^(\\s*)(시나리오 개요|시나리오|배경|背景|場景大綱|場景|场景大纲|场景|劇本大綱|劇本|剧本大纲|剧本|テンプレ|シナリオテンプレート|シナリオテンプレ|シナリオアウトライン|シナリオ|سيناريو مخطط|سيناريو|الخلفية|תרחיש|תבנית תרחיש|רקע|Тарих|Сценарій|Сценарио|Сценарий структураси|Сценарий|Структура сценарію|Структура сценарија|Структура сценария|Скица|Рамка на сценарий|Пример|Предыстория|Предистория|Позадина|Передумова|Основа|Концепт|Контекст|Założenia|Wharrimean is|Tình huống|The thing of it is|Tausta|Taust|Tapausaihio|Tapaus|Szenariogrundriss|Szenario|Szablon scenariusza|Stsenaarium|Struktura scenarija|Skica|Skenario konsep|Skenario|Situācija|Senaryo taslağı|Senaryo|Scénář|Scénario|Schema dello scenario|Scenārijs pēc parauga|Scenārijs|Scenár|Scenaro|Scenariusz|Scenariul de şablon|Scenariul de sablon|Scenariu|Scenario Outline|Scenario Amlinellol|Scenario|Scenarijus|Scenarijaus šablonas|Scenarij|Scenarie|Rerefons|Raamstsenaarium|Primer|Pozadí|Pozadina|Pozadie|Plan du scénario|Plan du Scénario|Osnova scénáře|Osnova|Náčrt Scénáře|Náčrt Scenáru|Mate|MISHUN SRSLY|MISHUN|Kịch bản|Konturo de la scenaro|Kontext|Konteksts|Kontekstas|Kontekst|Koncept|Khung tình huống|Khung kịch bản|Háttér|Grundlage|Geçmiş|Forgatókönyv vázlat|Forgatókönyv|Fono|Esquema do Cenário|Esquema do Cenario|Esquema del escenario|Esquema de l\'escenari|Escenario|Escenari|Dis is what went down|Dasar|Contexto|Contexte|Contesto|Condiţii|Conditii|Cenário|Cenario|Cefndir|Bối cảnh|Blokes|Bakgrunn|Bakgrund|Baggrund|Background|B4|Antecedents|Antecedentes|All y\'all|Achtergrond|Abstrakt Scenario|Abstract Scenario)(:)(.*)$'
- examples_keywords = '^(\\s*)(예|例子|例|サンプル|امثلة|דוגמאות|Сценарији|Примери|Приклади|Мисоллар|Значения|Örnekler|Voorbeelden|Variantai|Tapaukset|Scenarios|Scenariji|Scenarijai|Příklady|Példák|Príklady|Przykłady|Primjeri|Primeri|Piemēri|Pavyzdžiai|Paraugs|Juhtumid|Exemplos|Exemples|Exemplele|Exempel|Examples|Esempi|Enghreifftiau|Ekzemploj|Eksempler|Ejemplos|EXAMPLZ|Dữ liệu|Contoh|Cobber|Beispiele)(:)(.*)$'
- step_keywords = '^(\\s*)(하지만|조건|먼저|만일|만약|단|그리고|그러면|那麼|那么|而且|當|当|前提|假設|假设|假如|假定|但是|但し|並且|并且|同時|同时|もし|ならば|ただし|しかし|かつ|و |متى |لكن |عندما |ثم |بفرض |اذاً |כאשר |וגם |בהינתן |אזי |אז |אבל |Якщо |Унда |То |Припустимо, що |Припустимо |Онда |Но |Нехай |Лекин |Когато |Када |Кад |К тому же |И |Задато |Задати |Задате |Если |Допустим |Дадено |Ва |Бирок |Аммо |Али |Але |Агар |А |І |Și |És |Zatati |Zakładając |Zadato |Zadate |Zadano |Zadani |Zadan |Youse know when youse got |Youse know like when |Yna |Ya know how |Ya gotta |Y |Wun |Wtedy |When y\'all |When |Wenn |WEN |Và |Ve |Und |Un |Thì |Then y\'all |Then |Tapi |Tak |Tada |Tad |Så |Stel |Soit |Siis |Si |Sed |Se |Quando |Quand |Quan |Pryd |Pokud |Pokiaľ |Però |Pero |Pak |Oraz |Onda |Ond |Oletetaan |Og |Och |O zaman |Når |När |Niin |Nhưng |N |Mutta |Men |Mas |Maka |Majd |Mais |Maar |Ma |Lorsque |Lorsqu\'|Kun |Kuid |Kui |Khi |Keď |Ketika |Když |Kaj |Kai |Kada |Kad |Jeżeli |Ja |Ir |I CAN HAZ |I |Ha |Givun |Givet |Given y\'all |Given |Gitt |Gegeven |Gegeben sei |Fakat |Eğer ki |Etant donné |Et |Então |Entonces |Entao |En |Eeldades |E |Duota |Dun |Donitaĵo |Donat |Donada |Do |Diyelim ki |Dengan |Den youse gotta |De |Dato |Dar |Dann |Dan |Dado |Dacă |Daca |DEN |Când |Cuando |Cho |Cept |Cand |Cal |But y\'all |But |Buh |Biết |Bet |BUT |Atès |Atunci |Atesa |Anrhegedig a |Angenommen |And y\'all |And |An |Ama |Als |Alors |Allora |Ali |Aleshores |Ale |Akkor |Aber |AN |A také |A |\\* )'
-
- tokens = {
- 'comments': [
- (r'^\s*#.*$', Comment),
- ],
- 'feature_elements': [
- (step_keywords, Keyword, "step_content_stack"),
- include('comments'),
- (r"(\s|.)", Name.Function),
- ],
- 'feature_elements_on_stack': [
- (step_keywords, Keyword, "#pop:2"),
- include('comments'),
- (r"(\s|.)", Name.Function),
- ],
- 'examples_table': [
- (r"\s+\|", Keyword, 'examples_table_header'),
- include('comments'),
- (r"(\s|.)", Name.Function),
- ],
- 'examples_table_header': [
- (r"\s+\|\s*$", Keyword, "#pop:2"),
- include('comments'),
- (r"\\\|", Name.Variable),
- (r"\s*\|", Keyword),
- (r"[^|]", Name.Variable),
- ],
- 'scenario_sections_on_stack': [
- (feature_element_keywords,
- bygroups(Name.Function, Keyword, Keyword, Name.Function),
- "feature_elements_on_stack"),
- ],
- 'narrative': [
- include('scenario_sections_on_stack'),
- include('comments'),
- (r"(\s|.)", Name.Function),
- ],
- 'table_vars': [
- (r'(<[^>]+>)', Name.Variable),
- ],
- 'numbers': [
- (r'(\d+\.?\d*|\d*\.\d+)([eE][+-]?[0-9]+)?', String),
- ],
- 'string': [
- include('table_vars'),
- (r'(\s|.)', String),
- ],
- 'py_string': [
- (r'"""', Keyword, "#pop"),
- include('string'),
- ],
- 'step_content_root': [
- (r"$", Keyword, "#pop"),
- include('step_content'),
- ],
- 'step_content_stack': [
- (r"$", Keyword, "#pop:2"),
- include('step_content'),
- ],
- 'step_content': [
- (r'"', Name.Function, "double_string"),
- include('table_vars'),
- include('numbers'),
- include('comments'),
- (r'(\s|.)', Name.Function),
- ],
- 'table_content': [
- (r"\s+\|\s*$", Keyword, "#pop"),
- include('comments'),
- (r"\\\|", String),
- (r"\s*\|", Keyword),
- include('string'),
- ],
- 'double_string': [
- (r'"', Name.Function, "#pop"),
- include('string'),
- ],
- 'root': [
- (r'\n', Name.Function),
- include('comments'),
- (r'"""', Keyword, "py_string"),
- (r'\s+\|', Keyword, 'table_content'),
- (r'"', Name.Function, "double_string"),
- include('table_vars'),
- include('numbers'),
- (r'(\s*)(@[^@\r\n\t ]+)', bygroups(Name.Function, Name.Tag)),
- (step_keywords, bygroups(Name.Function, Keyword),
- 'step_content_root'),
- (feature_keywords, bygroups(Keyword, Keyword, Name.Function),
- 'narrative'),
- (feature_element_keywords,
- bygroups(Name.Function, Keyword, Keyword, Name.Function),
- 'feature_elements'),
- (examples_keywords,
- bygroups(Name.Function, Keyword, Keyword, Name.Function),
- 'examples_table'),
- (r'(\s|.)', Name.Function),
- ]
- }
-
- def analyse_text(self, text):
- return
-
-
-class TAPLexer(RegexLexer):
- """
- For Test Anything Protocol (TAP) output.
-
- .. versionadded:: 2.1
- """
- name = 'TAP'
- url = 'https://testanything.org/'
- aliases = ['tap']
- filenames = ['*.tap']
-
- tokens = {
- 'root': [
- # A TAP version may be specified.
- (r'^TAP version \d+\n', Name.Namespace),
-
- # Specify a plan with a plan line.
- (r'^1\.\.\d+', Keyword.Declaration, 'plan'),
-
- # A test failure
- (r'^(not ok)([^\S\n]*)(\d*)',
- bygroups(Generic.Error, Text, Number.Integer), 'test'),
-
- # A test success
- (r'^(ok)([^\S\n]*)(\d*)',
- bygroups(Keyword.Reserved, Text, Number.Integer), 'test'),
-
- # Diagnostics start with a hash.
- (r'^#.*\n', Comment),
-
- # TAP's version of an abort statement.
- (r'^Bail out!.*\n', Generic.Error),
-
- # TAP ignores any unrecognized lines.
- (r'^.*\n', Text),
- ],
- 'plan': [
- # Consume whitespace (but not newline).
- (r'[^\S\n]+', Text),
-
- # A plan may have a directive with it.
- (r'#', Comment, 'directive'),
-
- # Or it could just end.
- (r'\n', Comment, '#pop'),
-
- # Anything else is wrong.
- (r'.*\n', Generic.Error, '#pop'),
- ],
- 'test': [
- # Consume whitespace (but not newline).
- (r'[^\S\n]+', Text),
-
- # A test may have a directive with it.
- (r'#', Comment, 'directive'),
-
- (r'\S+', Text),
-
- (r'\n', Text, '#pop'),
- ],
- 'directive': [
- # Consume whitespace (but not newline).
- (r'[^\S\n]+', Comment),
-
- # Extract todo items.
- (r'(?i)\bTODO\b', Comment.Preproc),
-
- # Extract skip items.
- (r'(?i)\bSKIP\S*', Comment.Preproc),
-
- (r'\S+', Comment),
-
- (r'\n', Comment, '#pop:2'),
- ],
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/text.py b/venv/lib/python3.11/site-packages/pygments/lexers/text.py
deleted file mode 100644
index e35b0b5..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/text.py
+++ /dev/null
@@ -1,26 +0,0 @@
-"""
- pygments.lexers.text
- ~~~~~~~~~~~~~~~~~~~~
-
- Lexers for non-source code file types.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexers.configs import ApacheConfLexer, NginxConfLexer, \
- SquidConfLexer, LighttpdConfLexer, IniLexer, RegeditLexer, PropertiesLexer, \
- UnixConfigLexer
-from pygments.lexers.console import PyPyLogLexer
-from pygments.lexers.textedit import VimLexer
-from pygments.lexers.markup import BBCodeLexer, MoinWikiLexer, RstLexer, \
- TexLexer, GroffLexer
-from pygments.lexers.installers import DebianControlLexer, SourcesListLexer
-from pygments.lexers.make import MakefileLexer, BaseMakefileLexer, CMakeLexer
-from pygments.lexers.haxe import HxmlLexer
-from pygments.lexers.sgf import SmartGameFormatLexer
-from pygments.lexers.diff import DiffLexer, DarcsPatchLexer
-from pygments.lexers.data import YamlLexer
-from pygments.lexers.textfmts import IrcLogsLexer, GettextLexer, HttpLexer
-
-__all__ = []
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/textedit.py b/venv/lib/python3.11/site-packages/pygments/lexers/textedit.py
deleted file mode 100644
index 09defdb..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/textedit.py
+++ /dev/null
@@ -1,202 +0,0 @@
-"""
- pygments.lexers.textedit
- ~~~~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for languages related to text processing.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-from bisect import bisect
-
-from pygments.lexer import RegexLexer, bygroups, default, include, this, using
-from pygments.lexers.python import PythonLexer
-from pygments.token import Comment, Keyword, Name, Number, Operator, \
- Punctuation, String, Text, Whitespace
-
-__all__ = ['AwkLexer', 'SedLexer', 'VimLexer']
-
-
-class AwkLexer(RegexLexer):
- """
- For Awk scripts.
-
- .. versionadded:: 1.5
- """
-
- name = 'Awk'
- aliases = ['awk', 'gawk', 'mawk', 'nawk']
- filenames = ['*.awk']
- mimetypes = ['application/x-awk']
-
- tokens = {
- 'commentsandwhitespace': [
- (r'\s+', Text),
- (r'#.*$', Comment.Single)
- ],
- 'slashstartsregex': [
- include('commentsandwhitespace'),
- (r'/(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/'
- r'\B', String.Regex, '#pop'),
- (r'(?=/)', Text, ('#pop', 'badregex')),
- default('#pop')
- ],
- 'badregex': [
- (r'\n', Text, '#pop')
- ],
- 'root': [
- (r'^(?=\s|/)', Text, 'slashstartsregex'),
- include('commentsandwhitespace'),
- (r'\+\+|--|\|\||&&|in\b|\$|!?~|'
- r'(\*\*|[-<>+*%\^/!=|])=?', Operator, 'slashstartsregex'),
- (r'[{(\[;,]', Punctuation, 'slashstartsregex'),
- (r'[})\].]', Punctuation),
- (r'(break|continue|do|while|exit|for|if|else|'
- r'return)\b', Keyword, 'slashstartsregex'),
- (r'function\b', Keyword.Declaration, 'slashstartsregex'),
- (r'(atan2|cos|exp|int|log|rand|sin|sqrt|srand|gensub|gsub|index|'
- r'length|match|split|sprintf|sub|substr|tolower|toupper|close|'
- r'fflush|getline|next|nextfile|print|printf|strftime|systime|'
- r'delete|system)\b', Keyword.Reserved),
- (r'(ARGC|ARGIND|ARGV|BEGIN|CONVFMT|ENVIRON|END|ERRNO|FIELDWIDTHS|'
- r'FILENAME|FNR|FS|IGNORECASE|NF|NR|OFMT|OFS|ORFS|RLENGTH|RS|'
- r'RSTART|RT|SUBSEP)\b', Name.Builtin),
- (r'[$a-zA-Z_]\w*', Name.Other),
- (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
- (r'0x[0-9a-fA-F]+', Number.Hex),
- (r'[0-9]+', Number.Integer),
- (r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
- (r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
- ]
- }
-
-
-class SedLexer(RegexLexer):
- """
- Lexer for Sed script files.
- """
- name = 'Sed'
- aliases = ['sed', 'gsed', 'ssed']
- filenames = ['*.sed', '*.[gs]sed']
- mimetypes = ['text/x-sed']
- flags = re.MULTILINE
-
- # Match the contents within delimiters such as /<contents>/
- _inside_delims = r'((?:(?:\\[^\n]|[^\\])*?\\\n)*?(?:\\.|[^\\])*?)'
-
- tokens = {
- 'root': [
- (r'\s+', Whitespace),
- (r'#.*$', Comment.Single),
- (r'[0-9]+', Number.Integer),
- (r'\$', Operator),
- (r'[{};,!]', Punctuation),
- (r'[dDFgGhHlnNpPqQxz=]', Keyword),
- (r'([berRtTvwW:])([^;\n]*)', bygroups(Keyword, String.Single)),
- (r'([aci])((?:.*?\\\n)*(?:.*?[^\\]$))', bygroups(Keyword, String.Double)),
- (r'([qQ])([0-9]*)', bygroups(Keyword, Number.Integer)),
- (r'(/)' + _inside_delims + r'(/)', bygroups(Punctuation, String.Regex, Punctuation)),
- (r'(\\(.))' + _inside_delims + r'(\2)',
- bygroups(Punctuation, None, String.Regex, Punctuation)),
- (r'(y)(.)' + _inside_delims + r'(\2)' + _inside_delims + r'(\2)',
- bygroups(Keyword, Punctuation, String.Single, Punctuation, String.Single, Punctuation)),
- (r'(s)(.)' + _inside_delims + r'(\2)' + _inside_delims + r'(\2)((?:[gpeIiMm]|[0-9])*)',
- bygroups(Keyword, Punctuation, String.Regex, Punctuation, String.Single, Punctuation,
- Keyword))
- ]
- }
-
-class VimLexer(RegexLexer):
- """
- Lexer for VimL script files.
-
- .. versionadded:: 0.8
- """
- name = 'VimL'
- aliases = ['vim']
- filenames = ['*.vim', '.vimrc', '.exrc', '.gvimrc',
- '_vimrc', '_exrc', '_gvimrc', 'vimrc', 'gvimrc']
- mimetypes = ['text/x-vim']
- flags = re.MULTILINE
-
- _python = r'py(?:t(?:h(?:o(?:n)?)?)?)?'
-
- tokens = {
- 'root': [
- (r'^([ \t:]*)(' + _python + r')([ \t]*)(<<)([ \t]*)(.*)((?:\n|.)*)(\6)',
- bygroups(using(this), Keyword, Text, Operator, Text, Text,
- using(PythonLexer), Text)),
- (r'^([ \t:]*)(' + _python + r')([ \t])(.*)',
- bygroups(using(this), Keyword, Text, using(PythonLexer))),
-
- (r'^\s*".*', Comment),
-
- (r'[ \t]+', Text),
- # TODO: regexes can have other delims
- (r'/[^/\\\n]*(?:\\[\s\S][^/\\\n]*)*/', String.Regex),
- (r'"[^"\\\n]*(?:\\[\s\S][^"\\\n]*)*"', String.Double),
- (r"'[^\n']*(?:''[^\n']*)*'", String.Single),
-
- # Who decided that doublequote was a good comment character??
- (r'(?<=\s)"[^\-:.%#=*].*', Comment),
- (r'-?\d+', Number),
- (r'#[0-9a-f]{6}', Number.Hex),
- (r'^:', Punctuation),
- (r'[()<>+=!|,~-]', Punctuation), # Inexact list. Looks decent.
- (r'\b(let|if|else|endif|elseif|fun|function|endfunction)\b',
- Keyword),
- (r'\b(NONE|bold|italic|underline|dark|light)\b', Name.Builtin),
- (r'\b\w+\b', Name.Other), # These are postprocessed below
- (r'.', Text),
- ],
- }
-
- def __init__(self, **options):
- from pygments.lexers._vim_builtins import auto, command, option
- self._cmd = command
- self._opt = option
- self._aut = auto
-
- RegexLexer.__init__(self, **options)
-
- def is_in(self, w, mapping):
- r"""
- It's kind of difficult to decide if something might be a keyword
- in VimL because it allows you to abbreviate them. In fact,
- 'ab[breviate]' is a good example. :ab, :abbre, or :abbreviate are
- valid ways to call it so rather than making really awful regexps
- like::
-
- \bab(?:b(?:r(?:e(?:v(?:i(?:a(?:t(?:e)?)?)?)?)?)?)?)?\b
-
- we match `\b\w+\b` and then call is_in() on those tokens. See
- `scripts/get_vimkw.py` for how the lists are extracted.
- """
- p = bisect(mapping, (w,))
- if p > 0:
- if mapping[p-1][0] == w[:len(mapping[p-1][0])] and \
- mapping[p-1][1][:len(w)] == w:
- return True
- if p < len(mapping):
- return mapping[p][0] == w[:len(mapping[p][0])] and \
- mapping[p][1][:len(w)] == w
- return False
-
- def get_tokens_unprocessed(self, text):
- # TODO: builtins are only subsequent tokens on lines
- # and 'keywords' only happen at the beginning except
- # for :au ones
- for index, token, value in \
- RegexLexer.get_tokens_unprocessed(self, text):
- if token is Name.Other:
- if self.is_in(value, self._cmd):
- yield index, Keyword, value
- elif self.is_in(value, self._opt) or \
- self.is_in(value, self._aut):
- yield index, Name.Builtin, value
- else:
- yield index, Text, value
- else:
- yield index, token, value
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/textfmts.py b/venv/lib/python3.11/site-packages/pygments/lexers/textfmts.py
deleted file mode 100644
index c7cfb6d..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/textfmts.py
+++ /dev/null
@@ -1,436 +0,0 @@
-"""
- pygments.lexers.textfmts
- ~~~~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for various text formats.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexers import guess_lexer, get_lexer_by_name
-from pygments.lexer import RegexLexer, bygroups, default, include
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Generic, Literal, Punctuation
-from pygments.util import ClassNotFound
-
-__all__ = ['IrcLogsLexer', 'TodotxtLexer', 'HttpLexer', 'GettextLexer',
- 'NotmuchLexer', 'KernelLogLexer']
-
-
-class IrcLogsLexer(RegexLexer):
- """
- Lexer for IRC logs in *irssi*, *xchat* or *weechat* style.
- """
-
- name = 'IRC logs'
- aliases = ['irc']
- filenames = ['*.weechatlog']
- mimetypes = ['text/x-irclog']
-
- flags = re.VERBOSE | re.MULTILINE
- timestamp = r"""
- (
- # irssi / xchat and others
- (?: \[|\()? # Opening bracket or paren for the timestamp
- (?: # Timestamp
- (?: (?:\d{1,4} [-/])* # Date as - or /-separated groups of digits
- (?:\d{1,4})
- [T ])? # Date/time separator: T or space
- (?: \d?\d [:.])* # Time as :/.-separated groups of 1 or 2 digits
- (?: \d?\d)
- )
- (?: \]|\))?\s+ # Closing bracket or paren for the timestamp
- |
- # weechat
- \d{4}\s\w{3}\s\d{2}\s # Date
- \d{2}:\d{2}:\d{2}\s+ # Time + Whitespace
- |
- # xchat
- \w{3}\s\d{2}\s # Date
- \d{2}:\d{2}:\d{2}\s+ # Time + Whitespace
- )?
- """
- tokens = {
- 'root': [
- # log start/end
- (r'^\*\*\*\*(.*)\*\*\*\*$', Comment),
- # hack
- ("^" + timestamp + r'(\s*<[^>]*>\s*)$', bygroups(Comment.Preproc, Name.Tag)),
- # normal msgs
- ("^" + timestamp + r"""
- (\s*<.*?>\s*) # Nick """,
- bygroups(Comment.Preproc, Name.Tag), 'msg'),
- # /me msgs
- ("^" + timestamp + r"""
- (\s*[*]\s+) # Star
- (\S+\s+.*?\n) # Nick + rest of message """,
- bygroups(Comment.Preproc, Keyword, Generic.Inserted)),
- # join/part msgs
- ("^" + timestamp + r"""
- (\s*(?:\*{3}|<?-[!@=P]?->?)\s*) # Star(s) or symbols
- (\S+\s+) # Nick + Space
- (.*?\n) # Rest of message """,
- bygroups(Comment.Preproc, Keyword, String, Comment)),
- (r"^.*?\n", Text),
- ],
- 'msg': [
- (r"\S+:(?!//)", Name.Attribute), # Prefix
- (r".*\n", Text, '#pop'),
- ],
- }
-
-
-class GettextLexer(RegexLexer):
- """
- Lexer for Gettext catalog files.
-
- .. versionadded:: 0.9
- """
- name = 'Gettext Catalog'
- aliases = ['pot', 'po']
- filenames = ['*.pot', '*.po']
- mimetypes = ['application/x-gettext', 'text/x-gettext', 'text/gettext']
-
- tokens = {
- 'root': [
- (r'^#,\s.*?$', Keyword.Type),
- (r'^#:\s.*?$', Keyword.Declaration),
- # (r'^#$', Comment),
- (r'^(#|#\.\s|#\|\s|#~\s|#\s).*$', Comment.Single),
- (r'^(")([A-Za-z-]+:)(.*")$',
- bygroups(String, Name.Property, String)),
- (r'^".*"$', String),
- (r'^(msgid|msgid_plural|msgstr|msgctxt)(\s+)(".*")$',
- bygroups(Name.Variable, Text, String)),
- (r'^(msgstr\[)(\d)(\])(\s+)(".*")$',
- bygroups(Name.Variable, Number.Integer, Name.Variable, Text, String)),
- ]
- }
-
-
-class HttpLexer(RegexLexer):
- """
- Lexer for HTTP sessions.
-
- .. versionadded:: 1.5
- """
-
- name = 'HTTP'
- aliases = ['http']
-
- flags = re.DOTALL
-
- def get_tokens_unprocessed(self, text, stack=('root',)):
- """Reset the content-type state."""
- self.content_type = None
- return RegexLexer.get_tokens_unprocessed(self, text, stack)
-
- def header_callback(self, match):
- if match.group(1).lower() == 'content-type':
- content_type = match.group(5).strip()
- if ';' in content_type:
- content_type = content_type[:content_type.find(';')].strip()
- self.content_type = content_type
- yield match.start(1), Name.Attribute, match.group(1)
- yield match.start(2), Text, match.group(2)
- yield match.start(3), Operator, match.group(3)
- yield match.start(4), Text, match.group(4)
- yield match.start(5), Literal, match.group(5)
- yield match.start(6), Text, match.group(6)
-
- def continuous_header_callback(self, match):
- yield match.start(1), Text, match.group(1)
- yield match.start(2), Literal, match.group(2)
- yield match.start(3), Text, match.group(3)
-
- def content_callback(self, match):
- content_type = getattr(self, 'content_type', None)
- content = match.group()
- offset = match.start()
- if content_type:
- from pygments.lexers import get_lexer_for_mimetype
- possible_lexer_mimetypes = [content_type]
- if '+' in content_type:
- # application/calendar+xml can be treated as application/xml
- # if there's not a better match.
- general_type = re.sub(r'^(.*)/.*\+(.*)$', r'\1/\2',
- content_type)
- possible_lexer_mimetypes.append(general_type)
-
- for i in possible_lexer_mimetypes:
- try:
- lexer = get_lexer_for_mimetype(i)
- except ClassNotFound:
- pass
- else:
- for idx, token, value in lexer.get_tokens_unprocessed(content):
- yield offset + idx, token, value
- return
- yield offset, Text, content
-
- tokens = {
- 'root': [
- (r'([a-zA-Z][-_a-zA-Z]+)( +)([^ ]+)( +)'
- r'(HTTP)(/)(1\.[01]|2(?:\.0)?|3)(\r?\n|\Z)',
- bygroups(Name.Function, Text, Name.Namespace, Text,
- Keyword.Reserved, Operator, Number, Text),
- 'headers'),
- (r'(HTTP)(/)(1\.[01]|2(?:\.0)?|3)( +)(\d{3})(?:( +)([^\r\n]*))?(\r?\n|\Z)',
- bygroups(Keyword.Reserved, Operator, Number, Text, Number, Text,
- Name.Exception, Text),
- 'headers'),
- ],
- 'headers': [
- (r'([^\s:]+)( *)(:)( *)([^\r\n]*)(\r?\n|\Z)', header_callback),
- (r'([\t ]+)([^\r\n]+)(\r?\n|\Z)', continuous_header_callback),
- (r'\r?\n', Text, 'content')
- ],
- 'content': [
- (r'.+', content_callback)
- ]
- }
-
- def analyse_text(text):
- return any (
- re.search(pattern, text) is not None
- for pattern in (
- r'^([a-zA-Z][-_a-zA-Z]+)( +)([^ ]+)( +)(HTTP)(/)(1\.[01]|2(?:\.0)?|3)(\r?\n|\Z)',
- r'^(HTTP)(/)(1\.[01]|2(?:\.0)?|3)( +)(\d{3})(?:( +)([^\r\n]*))?(\r?\n|\Z)',
- )
- )
-
-
-class TodotxtLexer(RegexLexer):
- """
- Lexer for Todo.txt todo list format.
-
- .. versionadded:: 2.0
- """
-
- name = 'Todotxt'
- url = 'http://todotxt.com/'
- aliases = ['todotxt']
- # *.todotxt is not a standard extension for Todo.txt files; including it
- # makes testing easier, and also makes autodetecting file type easier.
- filenames = ['todo.txt', '*.todotxt']
- mimetypes = ['text/x-todo']
-
- # Aliases mapping standard token types of Todo.txt format concepts
- CompleteTaskText = Operator # Chosen to de-emphasize complete tasks
- IncompleteTaskText = Text # Incomplete tasks should look like plain text
-
- # Priority should have most emphasis to indicate importance of tasks
- Priority = Generic.Heading
- # Dates should have next most emphasis because time is important
- Date = Generic.Subheading
-
- # Project and context should have equal weight, and be in different colors
- Project = Generic.Error
- Context = String
-
- # If tag functionality is added, it should have the same weight as Project
- # and Context, and a different color. Generic.Traceback would work well.
-
- # Regex patterns for building up rules; dates, priorities, projects, and
- # contexts are all atomic
- # TODO: Make date regex more ISO 8601 compliant
- date_regex = r'\d{4,}-\d{2}-\d{2}'
- priority_regex = r'\([A-Z]\)'
- project_regex = r'\+\S+'
- context_regex = r'@\S+'
-
- # Compound regex expressions
- complete_one_date_regex = r'(x )(' + date_regex + r')'
- complete_two_date_regex = (complete_one_date_regex + r'( )(' +
- date_regex + r')')
- priority_date_regex = r'(' + priority_regex + r')( )(' + date_regex + r')'
-
- tokens = {
- # Should parse starting at beginning of line; each line is a task
- 'root': [
- # Complete task entry points: two total:
- # 1. Complete task with two dates
- (complete_two_date_regex, bygroups(CompleteTaskText, Date,
- CompleteTaskText, Date),
- 'complete'),
- # 2. Complete task with one date
- (complete_one_date_regex, bygroups(CompleteTaskText, Date),
- 'complete'),
-
- # Incomplete task entry points: six total:
- # 1. Priority plus date
- (priority_date_regex, bygroups(Priority, IncompleteTaskText, Date),
- 'incomplete'),
- # 2. Priority only
- (priority_regex, Priority, 'incomplete'),
- # 3. Leading date
- (date_regex, Date, 'incomplete'),
- # 4. Leading context
- (context_regex, Context, 'incomplete'),
- # 5. Leading project
- (project_regex, Project, 'incomplete'),
- # 6. Non-whitespace catch-all
- (r'\S+', IncompleteTaskText, 'incomplete'),
- ],
-
- # Parse a complete task
- 'complete': [
- # Newline indicates end of task, should return to root
- (r'\s*\n', CompleteTaskText, '#pop'),
- # Tokenize contexts and projects
- (context_regex, Context),
- (project_regex, Project),
- # Tokenize non-whitespace text
- (r'\S+', CompleteTaskText),
- # Tokenize whitespace not containing a newline
- (r'\s+', CompleteTaskText),
- ],
-
- # Parse an incomplete task
- 'incomplete': [
- # Newline indicates end of task, should return to root
- (r'\s*\n', IncompleteTaskText, '#pop'),
- # Tokenize contexts and projects
- (context_regex, Context),
- (project_regex, Project),
- # Tokenize non-whitespace text
- (r'\S+', IncompleteTaskText),
- # Tokenize whitespace not containing a newline
- (r'\s+', IncompleteTaskText),
- ],
- }
-
-
-class NotmuchLexer(RegexLexer):
- """
- For Notmuch email text format.
-
- .. versionadded:: 2.5
-
- Additional options accepted:
-
- `body_lexer`
- If given, highlight the contents of the message body with the specified
- lexer, else guess it according to the body content (default: ``None``).
- """
-
- name = 'Notmuch'
- url = 'https://notmuchmail.org/'
- aliases = ['notmuch']
-
- def _highlight_code(self, match):
- code = match.group(1)
-
- try:
- if self.body_lexer:
- lexer = get_lexer_by_name(self.body_lexer)
- else:
- lexer = guess_lexer(code.strip())
- except ClassNotFound:
- lexer = get_lexer_by_name('text')
-
- yield from lexer.get_tokens_unprocessed(code)
-
- tokens = {
- 'root': [
- (r'\fmessage\{\s*', Keyword, ('message', 'message-attr')),
- ],
- 'message-attr': [
- (r'(\s*id:\s*)(\S+)', bygroups(Name.Attribute, String)),
- (r'(\s*(?:depth|match|excluded):\s*)(\d+)',
- bygroups(Name.Attribute, Number.Integer)),
- (r'(\s*filename:\s*)(.+\n)',
- bygroups(Name.Attribute, String)),
- default('#pop'),
- ],
- 'message': [
- (r'\fmessage\}\n', Keyword, '#pop'),
- (r'\fheader\{\n', Keyword, 'header'),
- (r'\fbody\{\n', Keyword, 'body'),
- ],
- 'header': [
- (r'\fheader\}\n', Keyword, '#pop'),
- (r'((?:Subject|From|To|Cc|Date):\s*)(.*\n)',
- bygroups(Name.Attribute, String)),
- (r'(.*)(\s*\(.*\))(\s*\(.*\)\n)',
- bygroups(Generic.Strong, Literal, Name.Tag)),
- ],
- 'body': [
- (r'\fpart\{\n', Keyword, 'part'),
- (r'\f(part|attachment)\{\s*', Keyword, ('part', 'part-attr')),
- (r'\fbody\}\n', Keyword, '#pop'),
- ],
- 'part-attr': [
- (r'(ID:\s*)(\d+)', bygroups(Name.Attribute, Number.Integer)),
- (r'(,\s*)((?:Filename|Content-id):\s*)([^,]+)',
- bygroups(Punctuation, Name.Attribute, String)),
- (r'(,\s*)(Content-type:\s*)(.+\n)',
- bygroups(Punctuation, Name.Attribute, String)),
- default('#pop'),
- ],
- 'part': [
- (r'\f(?:part|attachment)\}\n', Keyword, '#pop'),
- (r'\f(?:part|attachment)\{\s*', Keyword, ('#push', 'part-attr')),
- (r'^Non-text part: .*\n', Comment),
- (r'(?s)(.*?(?=\f(?:part|attachment)\}\n))', _highlight_code),
- ],
- }
-
- def analyse_text(text):
- return 1.0 if text.startswith('\fmessage{') else 0.0
-
- def __init__(self, **options):
- self.body_lexer = options.get('body_lexer', None)
- RegexLexer.__init__(self, **options)
-
-
-class KernelLogLexer(RegexLexer):
- """
- For Linux Kernel log ("dmesg") output.
-
- .. versionadded:: 2.6
- """
- name = 'Kernel log'
- aliases = ['kmsg', 'dmesg']
- filenames = ['*.kmsg', '*.dmesg']
-
- tokens = {
- 'root': [
- (r'^[^:]+:debug : (?=\[)', Text, 'debug'),
- (r'^[^:]+:info : (?=\[)', Text, 'info'),
- (r'^[^:]+:warn : (?=\[)', Text, 'warn'),
- (r'^[^:]+:notice: (?=\[)', Text, 'warn'),
- (r'^[^:]+:err : (?=\[)', Text, 'error'),
- (r'^[^:]+:crit : (?=\[)', Text, 'error'),
- (r'^(?=\[)', Text, 'unknown'),
- ],
- 'unknown': [
- (r'^(?=.+(warning|notice|audit|deprecated))', Text, 'warn'),
- (r'^(?=.+(error|critical|fail|Bug))', Text, 'error'),
- default('info'),
- ],
- 'base': [
- (r'\[[0-9. ]+\] ', Number),
- (r'(?<=\] ).+?:', Keyword),
- (r'\n', Text, '#pop'),
- ],
- 'debug': [
- include('base'),
- (r'.+\n', Comment, '#pop')
- ],
- 'info': [
- include('base'),
- (r'.+\n', Text, '#pop')
- ],
- 'warn': [
- include('base'),
- (r'.+\n', Generic.Strong, '#pop')
- ],
- 'error': [
- include('base'),
- (r'.+\n', Generic.Error, '#pop')
- ]
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/theorem.py b/venv/lib/python3.11/site-packages/pygments/lexers/theorem.py
deleted file mode 100644
index abf09ae..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/theorem.py
+++ /dev/null
@@ -1,391 +0,0 @@
-"""
- pygments.lexers.theorem
- ~~~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for theorem-proving languages.
-
- See also :mod:`pygments.lexers.lean`
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, default, words, include
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation, Generic, Whitespace
-from pygments.lexers.lean import LeanLexer
-
-__all__ = ['CoqLexer', 'IsabelleLexer']
-
-
-class CoqLexer(RegexLexer):
- """
- For the Coq theorem prover.
-
- .. versionadded:: 1.5
- """
-
- name = 'Coq'
- url = 'http://coq.inria.fr/'
- aliases = ['coq']
- filenames = ['*.v']
- mimetypes = ['text/x-coq']
-
- flags = 0 # no re.MULTILINE
-
- keywords1 = (
- # Vernacular commands
- 'Section', 'Module', 'End', 'Require', 'Import', 'Export', 'Variable',
- 'Variables', 'Parameter', 'Parameters', 'Axiom', 'Axioms', 'Hypothesis',
- 'Hypotheses', 'Notation', 'Local', 'Tactic', 'Reserved', 'Scope',
- 'Open', 'Close', 'Bind', 'Delimit', 'Definition', 'Example', 'Let',
- 'Ltac', 'Fixpoint', 'CoFixpoint', 'Morphism', 'Relation', 'Implicit',
- 'Arguments', 'Types', 'Unset', 'Contextual', 'Strict', 'Prenex',
- 'Implicits', 'Inductive', 'CoInductive', 'Record', 'Structure',
- 'Variant', 'Canonical', 'Coercion', 'Theorem', 'Lemma', 'Fact',
- 'Remark', 'Corollary', 'Proposition', 'Property', 'Goal',
- 'Proof', 'Restart', 'Save', 'Qed', 'Defined', 'Abort', 'Admitted',
- 'Hint', 'Resolve', 'Rewrite', 'View', 'Search', 'Compute', 'Eval',
- 'Show', 'Print', 'Printing', 'All', 'Graph', 'Projections', 'inside',
- 'outside', 'Check', 'Global', 'Instance', 'Class', 'Existing',
- 'Universe', 'Polymorphic', 'Monomorphic', 'Context', 'Scheme', 'From',
- 'Undo', 'Fail', 'Function',
- )
- keywords2 = (
- # Gallina
- 'forall', 'exists', 'exists2', 'fun', 'fix', 'cofix', 'struct',
- 'match', 'end', 'in', 'return', 'let', 'if', 'is', 'then', 'else',
- 'for', 'of', 'nosimpl', 'with', 'as',
- )
- keywords3 = (
- # Sorts
- 'Type', 'Prop', 'SProp', 'Set',
- )
- keywords4 = (
- # Tactics
- 'pose', 'set', 'move', 'case', 'elim', 'apply', 'clear', 'hnf', 'intro',
- 'intros', 'generalize', 'rename', 'pattern', 'after', 'destruct',
- 'induction', 'using', 'refine', 'inversion', 'injection', 'rewrite',
- 'congr', 'unlock', 'compute', 'ring', 'field', 'replace', 'fold',
- 'unfold', 'change', 'cutrewrite', 'simpl', 'have', 'suff', 'wlog',
- 'suffices', 'without', 'loss', 'nat_norm', 'assert', 'cut', 'trivial',
- 'revert', 'bool_congr', 'nat_congr', 'symmetry', 'transitivity', 'auto',
- 'split', 'left', 'right', 'autorewrite', 'tauto', 'setoid_rewrite',
- 'intuition', 'eauto', 'eapply', 'econstructor', 'etransitivity',
- 'constructor', 'erewrite', 'red', 'cbv', 'lazy', 'vm_compute',
- 'native_compute', 'subst',
- )
- keywords5 = (
- # Terminators
- 'by', 'now', 'done', 'exact', 'reflexivity',
- 'tauto', 'romega', 'omega', 'lia', 'nia', 'lra', 'nra', 'psatz',
- 'assumption', 'solve', 'contradiction', 'discriminate',
- 'congruence', 'admit'
- )
- keywords6 = (
- # Control
- 'do', 'last', 'first', 'try', 'idtac', 'repeat',
- )
- # 'as', 'assert', 'begin', 'class', 'constraint', 'do', 'done',
- # 'downto', 'else', 'end', 'exception', 'external', 'false',
- # 'for', 'fun', 'function', 'functor', 'if', 'in', 'include',
- # 'inherit', 'initializer', 'lazy', 'let', 'match', 'method',
- # 'module', 'mutable', 'new', 'object', 'of', 'open', 'private',
- # 'raise', 'rec', 'sig', 'struct', 'then', 'to', 'true', 'try',
- # 'type', 'val', 'virtual', 'when', 'while', 'with'
- keyopts = (
- '!=', '#', '&', '&&', r'\(', r'\)', r'\*', r'\+', ',', '-', r'-\.',
- '->', r'\.', r'\.\.', ':', '::', ':=', ':>', ';', ';;', '<', '<-',
- '<->', '=', '>', '>]', r'>\}', r'\?', r'\?\?', r'\[', r'\[<', r'\[>',
- r'\[\|', ']', '_', '`', r'\{', r'\{<', r'\|', r'\|]', r'\}', '~', '=>',
- r'/\\', r'\\/', r'\{\|', r'\|\}',
- # 'Π', 'Σ', # Not defined in the standard library
- 'λ', '¬', '∧', '∨', '∀', '∃', '→', '↔', '≠', '≤', '≥',
- )
- operators = r'[!$%&*+\./:<=>?@^|~-]'
- prefix_syms = r'[!?~]'
- infix_syms = r'[=<>@^|&+\*/$%-]'
-
- tokens = {
- 'root': [
- (r'\s+', Text),
- (r'false|true|\(\)|\[\]', Name.Builtin.Pseudo),
- (r'\(\*', Comment, 'comment'),
- (r'\b(?:[^\W\d][\w\']*\.)+[^\W\d][\w\']*\b', Name),
- (r'\bEquations\b\??', Keyword.Namespace),
- # Very weak heuristic to distinguish the Set vernacular from the Set sort
- (r'\bSet(?=[ \t]+[A-Z][a-z][^\n]*?\.)', Keyword.Namespace),
- (words(keywords1, prefix=r'\b', suffix=r'\b'), Keyword.Namespace),
- (words(keywords2, prefix=r'\b', suffix=r'\b'), Keyword),
- (words(keywords3, prefix=r'\b', suffix=r'\b'), Keyword.Type),
- (words(keywords4, prefix=r'\b', suffix=r'\b'), Keyword),
- (words(keywords5, prefix=r'\b', suffix=r'\b'), Keyword.Pseudo),
- (words(keywords6, prefix=r'\b', suffix=r'\b'), Keyword.Reserved),
- # (r'\b([A-Z][\w\']*)(\.)', Name.Namespace, 'dotted'),
- (r'\b([A-Z][\w\']*)', Name),
- (r'(%s)' % '|'.join(keyopts[::-1]), Operator),
- (r'(%s|%s)?%s' % (infix_syms, prefix_syms, operators), Operator),
-
- (r"[^\W\d][\w']*", Name),
-
- (r'\d[\d_]*', Number.Integer),
- (r'0[xX][\da-fA-F][\da-fA-F_]*', Number.Hex),
- (r'0[oO][0-7][0-7_]*', Number.Oct),
- (r'0[bB][01][01_]*', Number.Bin),
- (r'-?\d[\d_]*(.[\d_]*)?([eE][+\-]?\d[\d_]*)', Number.Float),
-
- (r"'(?:(\\[\\\"'ntbr ])|(\\[0-9]{3})|(\\x[0-9a-fA-F]{2}))'", String.Char),
-
- (r"'.'", String.Char),
- (r"'", Keyword), # a stray quote is another syntax element
-
- (r'"', String.Double, 'string'),
-
- (r'[~?][a-z][\w\']*:', Name),
- (r'\S', Name.Builtin.Pseudo),
- ],
- 'comment': [
- (r'[^(*)]+', Comment),
- (r'\(\*', Comment, '#push'),
- (r'\*\)', Comment, '#pop'),
- (r'[(*)]', Comment),
- ],
- 'string': [
- (r'[^"]+', String.Double),
- (r'""', String.Double),
- (r'"', String.Double, '#pop'),
- ],
- 'dotted': [
- (r'\s+', Text),
- (r'\.', Punctuation),
- (r'[A-Z][\w\']*(?=\s*\.)', Name.Namespace),
- (r'[A-Z][\w\']*', Name.Class, '#pop'),
- (r'[a-z][a-z0-9_\']*', Name, '#pop'),
- default('#pop')
- ],
- }
-
- def analyse_text(text):
- if 'Qed' in text and 'Proof' in text:
- return 1
-
-
-class IsabelleLexer(RegexLexer):
- """
- For the Isabelle proof assistant.
-
- .. versionadded:: 2.0
- """
-
- name = 'Isabelle'
- url = 'https://isabelle.in.tum.de/'
- aliases = ['isabelle']
- filenames = ['*.thy']
- mimetypes = ['text/x-isabelle']
-
- keyword_minor = (
- 'and', 'assumes', 'attach', 'avoids', 'binder', 'checking',
- 'class_instance', 'class_relation', 'code_module', 'congs',
- 'constant', 'constrains', 'datatypes', 'defines', 'file', 'fixes',
- 'for', 'functions', 'hints', 'identifier', 'if', 'imports', 'in',
- 'includes', 'infix', 'infixl', 'infixr', 'is', 'keywords', 'lazy',
- 'module_name', 'monos', 'morphisms', 'no_discs_sels', 'notes',
- 'obtains', 'open', 'output', 'overloaded', 'parametric', 'permissive',
- 'pervasive', 'rep_compat', 'shows', 'structure', 'type_class',
- 'type_constructor', 'unchecked', 'unsafe', 'where',
- )
-
- keyword_diag = (
- 'ML_command', 'ML_val', 'class_deps', 'code_deps', 'code_thms',
- 'display_drafts', 'find_consts', 'find_theorems', 'find_unused_assms',
- 'full_prf', 'help', 'locale_deps', 'nitpick', 'pr', 'prf',
- 'print_abbrevs', 'print_antiquotations', 'print_attributes',
- 'print_binds', 'print_bnfs', 'print_bundles',
- 'print_case_translations', 'print_cases', 'print_claset',
- 'print_classes', 'print_codeproc', 'print_codesetup',
- 'print_coercions', 'print_commands', 'print_context',
- 'print_defn_rules', 'print_dependencies', 'print_facts',
- 'print_induct_rules', 'print_inductives', 'print_interps',
- 'print_locale', 'print_locales', 'print_methods', 'print_options',
- 'print_orders', 'print_quot_maps', 'print_quotconsts',
- 'print_quotients', 'print_quotientsQ3', 'print_quotmapsQ3',
- 'print_rules', 'print_simpset', 'print_state', 'print_statement',
- 'print_syntax', 'print_theorems', 'print_theory', 'print_trans_rules',
- 'prop', 'pwd', 'quickcheck', 'refute', 'sledgehammer', 'smt_status',
- 'solve_direct', 'spark_status', 'term', 'thm', 'thm_deps', 'thy_deps',
- 'try', 'try0', 'typ', 'unused_thms', 'value', 'values', 'welcome',
- 'print_ML_antiquotations', 'print_term_bindings', 'values_prolog',
- )
-
- keyword_thy = ('theory', 'begin', 'end')
-
- keyword_section = ('header', 'chapter')
-
- keyword_subsection = (
- 'section', 'subsection', 'subsubsection', 'sect', 'subsect',
- 'subsubsect',
- )
-
- keyword_theory_decl = (
- 'ML', 'ML_file', 'abbreviation', 'adhoc_overloading', 'arities',
- 'atom_decl', 'attribute_setup', 'axiomatization', 'bundle',
- 'case_of_simps', 'class', 'classes', 'classrel', 'codatatype',
- 'code_abort', 'code_class', 'code_const', 'code_datatype',
- 'code_identifier', 'code_include', 'code_instance', 'code_modulename',
- 'code_monad', 'code_printing', 'code_reflect', 'code_reserved',
- 'code_type', 'coinductive', 'coinductive_set', 'consts', 'context',
- 'datatype', 'datatype_new', 'datatype_new_compat', 'declaration',
- 'declare', 'default_sort', 'defer_recdef', 'definition', 'defs',
- 'domain', 'domain_isomorphism', 'domaindef', 'equivariance',
- 'export_code', 'extract', 'extract_type', 'fixrec', 'fun',
- 'fun_cases', 'hide_class', 'hide_const', 'hide_fact', 'hide_type',
- 'import_const_map', 'import_file', 'import_tptp', 'import_type_map',
- 'inductive', 'inductive_set', 'instantiation', 'judgment', 'lemmas',
- 'lifting_forget', 'lifting_update', 'local_setup', 'locale',
- 'method_setup', 'nitpick_params', 'no_adhoc_overloading',
- 'no_notation', 'no_syntax', 'no_translations', 'no_type_notation',
- 'nominal_datatype', 'nonterminal', 'notation', 'notepad', 'oracle',
- 'overloading', 'parse_ast_translation', 'parse_translation',
- 'partial_function', 'primcorec', 'primrec', 'primrec_new',
- 'print_ast_translation', 'print_translation', 'quickcheck_generator',
- 'quickcheck_params', 'realizability', 'realizers', 'recdef', 'record',
- 'refute_params', 'setup', 'setup_lifting', 'simproc_setup',
- 'simps_of_case', 'sledgehammer_params', 'spark_end', 'spark_open',
- 'spark_open_siv', 'spark_open_vcg', 'spark_proof_functions',
- 'spark_types', 'statespace', 'syntax', 'syntax_declaration', 'text',
- 'text_raw', 'theorems', 'translations', 'type_notation',
- 'type_synonym', 'typed_print_translation', 'typedecl', 'hoarestate',
- 'install_C_file', 'install_C_types', 'wpc_setup', 'c_defs', 'c_types',
- 'memsafe', 'SML_export', 'SML_file', 'SML_import', 'approximate',
- 'bnf_axiomatization', 'cartouche', 'datatype_compat',
- 'free_constructors', 'functor', 'nominal_function',
- 'nominal_termination', 'permanent_interpretation',
- 'binds', 'defining', 'smt2_status', 'term_cartouche',
- 'boogie_file', 'text_cartouche',
- )
-
- keyword_theory_script = ('inductive_cases', 'inductive_simps')
-
- keyword_theory_goal = (
- 'ax_specification', 'bnf', 'code_pred', 'corollary', 'cpodef',
- 'crunch', 'crunch_ignore',
- 'enriched_type', 'function', 'instance', 'interpretation', 'lemma',
- 'lift_definition', 'nominal_inductive', 'nominal_inductive2',
- 'nominal_primrec', 'pcpodef', 'primcorecursive',
- 'quotient_definition', 'quotient_type', 'recdef_tc', 'rep_datatype',
- 'schematic_corollary', 'schematic_lemma', 'schematic_theorem',
- 'spark_vc', 'specification', 'subclass', 'sublocale', 'termination',
- 'theorem', 'typedef', 'wrap_free_constructors',
- )
-
- keyword_qed = ('by', 'done', 'qed')
- keyword_abandon_proof = ('sorry', 'oops')
-
- keyword_proof_goal = ('have', 'hence', 'interpret')
-
- keyword_proof_block = ('next', 'proof')
-
- keyword_proof_chain = (
- 'finally', 'from', 'then', 'ultimately', 'with',
- )
-
- keyword_proof_decl = (
- 'ML_prf', 'also', 'include', 'including', 'let', 'moreover', 'note',
- 'txt', 'txt_raw', 'unfolding', 'using', 'write',
- )
-
- keyword_proof_asm = ('assume', 'case', 'def', 'fix', 'presume')
-
- keyword_proof_asm_goal = ('guess', 'obtain', 'show', 'thus')
-
- keyword_proof_script = (
- 'apply', 'apply_end', 'apply_trace', 'back', 'defer', 'prefer',
- )
-
- operators = (
- '::', ':', '(', ')', '[', ']', '_', '=', ',', '|',
- '+', '-', '!', '?',
- )
-
- proof_operators = ('{', '}', '.', '..')
-
- tokens = {
- 'root': [
- (r'\s+', Whitespace),
- (r'\(\*', Comment, 'comment'),
- (r'\\<open>', String.Symbol, 'cartouche'),
- (r'\{\*|‹', String, 'cartouche'),
-
- (words(operators), Operator),
- (words(proof_operators), Operator.Word),
-
- (words(keyword_minor, prefix=r'\b', suffix=r'\b'), Keyword.Pseudo),
-
- (words(keyword_diag, prefix=r'\b', suffix=r'\b'), Keyword.Type),
-
- (words(keyword_thy, prefix=r'\b', suffix=r'\b'), Keyword),
- (words(keyword_theory_decl, prefix=r'\b', suffix=r'\b'), Keyword),
-
- (words(keyword_section, prefix=r'\b', suffix=r'\b'), Generic.Heading),
- (words(keyword_subsection, prefix=r'\b', suffix=r'\b'), Generic.Subheading),
-
- (words(keyword_theory_goal, prefix=r'\b', suffix=r'\b'), Keyword.Namespace),
- (words(keyword_theory_script, prefix=r'\b', suffix=r'\b'), Keyword.Namespace),
-
- (words(keyword_abandon_proof, prefix=r'\b', suffix=r'\b'), Generic.Error),
-
- (words(keyword_qed, prefix=r'\b', suffix=r'\b'), Keyword),
- (words(keyword_proof_goal, prefix=r'\b', suffix=r'\b'), Keyword),
- (words(keyword_proof_block, prefix=r'\b', suffix=r'\b'), Keyword),
- (words(keyword_proof_decl, prefix=r'\b', suffix=r'\b'), Keyword),
-
- (words(keyword_proof_chain, prefix=r'\b', suffix=r'\b'), Keyword),
- (words(keyword_proof_asm, prefix=r'\b', suffix=r'\b'), Keyword),
- (words(keyword_proof_asm_goal, prefix=r'\b', suffix=r'\b'), Keyword),
-
- (words(keyword_proof_script, prefix=r'\b', suffix=r'\b'), Keyword.Pseudo),
-
- (r'\\<(\w|\^)*>', Text.Symbol),
-
- (r"'[^\W\d][.\w']*", Name.Type),
-
- (r'0[xX][\da-fA-F][\da-fA-F_]*', Number.Hex),
- (r'0[oO][0-7][0-7_]*', Number.Oct),
- (r'0[bB][01][01_]*', Number.Bin),
-
- (r'"', String, 'string'),
- (r'`', String.Other, 'fact'),
- (r'[^\s:|\[\]\-()=,+!?{}._][^\s:|\[\]\-()=,+!?{}]*', Name),
- ],
- 'comment': [
- (r'[^(*)]+', Comment),
- (r'\(\*', Comment, '#push'),
- (r'\*\)', Comment, '#pop'),
- (r'[(*)]', Comment),
- ],
- 'cartouche': [
- (r'[^{*}\\‹›]+', String),
- (r'\\<open>', String.Symbol, '#push'),
- (r'\{\*|‹', String, '#push'),
- (r'\\<close>', String.Symbol, '#pop'),
- (r'\*\}|›', String, '#pop'),
- (r'\\<(\w|\^)*>', String.Symbol),
- (r'[{*}\\]', String),
- ],
- 'string': [
- (r'[^"\\]+', String),
- (r'\\<(\w|\^)*>', String.Symbol),
- (r'\\"', String),
- (r'\\', String),
- (r'"', String, '#pop'),
- ],
- 'fact': [
- (r'[^`\\]+', String.Other),
- (r'\\<(\w|\^)*>', String.Symbol),
- (r'\\`', String.Other),
- (r'\\', String.Other),
- (r'`', String.Other, '#pop'),
- ],
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/thingsdb.py b/venv/lib/python3.11/site-packages/pygments/lexers/thingsdb.py
deleted file mode 100644
index 13230f8..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/thingsdb.py
+++ /dev/null
@@ -1,116 +0,0 @@
-"""
- pygments.lexers.thingsdb
- ~~~~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for the ThingsDB language.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, include, bygroups
-from pygments.token import Comment, Keyword, Name, Number, String, Text, \
- Operator, Punctuation, Whitespace
-
-__all__ = ['ThingsDBLexer']
-
-
-class ThingsDBLexer(RegexLexer):
- """
- Lexer for the ThingsDB programming language.
-
- .. versionadded:: 2.9
- """
- name = 'ThingsDB'
- aliases = ['ti', 'thingsdb']
- filenames = ['*.ti']
-
- tokens = {
- 'root': [
- include('expression'),
- ],
- 'expression': [
- include('comments'),
- include('whitespace'),
-
- # numbers
- (r'[-+]?0b[01]+', Number.Bin),
- (r'[-+]?0o[0-8]+', Number.Oct),
- (r'([-+]?0x[0-9a-fA-F]+)', Number.Hex),
- (r'[-+]?[0-9]+', Number.Integer),
- (r'[-+]?((inf|nan)([^0-9A-Za-z_]|$)|[0-9]*\.[0-9]+(e[+-][0-9]+)?)',
- Number.Float),
-
- # strings
- (r'(?:"(?:[^"]*)")+', String.Double),
- (r"(?:'(?:[^']*)')+", String.Single),
-
- # literals
- (r'(true|false|nil)\b', Keyword.Constant),
-
- # regular expressions
- (r'(/[^/\\]*(?:\\.[^/\\]*)*/i?)', String.Regex),
-
- # thing id's
- (r'#[0-9]+', Comment.Preproc),
-
- # name, assignments and functions
- include('names'),
-
- (r'[(){}\[\],;]', Punctuation),
- (r'[+\-*/%&|<>^!~@=:?]', Operator),
- ],
- 'names': [
- (r'(\.)'
- r'(add|call|contains|del|endswith|extend|filter|find|findindex|'
- r'get|has|id|indexof|keys|len|lower|map|pop|push|remove|set|sort|'
- r'splice|startswith|test|unwrap|upper|values|wrap)'
- r'(\()',
- bygroups(Name.Function, Name.Function, Punctuation), 'arguments'),
- (r'(array|assert|assert_err|auth_err|backup_info|backups_info|'
- r'bad_data_err|bool|closure|collection_info|collections_info|'
- r'counters|deep|del_backup|del_collection|del_expired|del_node|'
- r'del_procedure|del_token|del_type|del_user|err|float|'
- r'forbidden_err|grant|int|isarray|isascii|isbool|isbytes|iserr|'
- r'isfloat|isinf|isint|islist|isnan|isnil|israw|isset|isstr|'
- r'isthing|istuple|isutf8|lookup_err|max_quota_err|mod_type|new|'
- r'new_backup|new_collection|new_node|new_procedure|new_token|'
- r'new_type|new_user|node_err|node_info|nodes_info|now|'
- r'num_arguments_err|operation_err|overflow_err|procedure_doc|'
- r'procedure_info|procedures_info|raise|refs|rename_collection|'
- r'rename_user|reset_counters|return|revoke|run|set_log_level|set|'
- r'set_quota|set_type|shutdown|str|syntax_err|thing|try|type|'
- r'type_err|type_count|type_info|types_info|user_info|users_info|'
- r'value_err|wse|zero_div_err)'
- r'(\()',
- bygroups(Name.Function, Punctuation),
- 'arguments'),
- (r'(\.[A-Za-z_][0-9A-Za-z_]*)'
- r'(\s*)(=)',
- bygroups(Name.Attribute, Text, Operator)),
- (r'\.[A-Za-z_][0-9A-Za-z_]*', Name.Attribute),
- (r'([A-Za-z_][0-9A-Za-z_]*)(\s*)(=)',
- bygroups(Name.Variable, Text, Operator)),
- (r'[A-Za-z_][0-9A-Za-z_]*', Name.Variable),
- ],
- 'whitespace': [
- (r'\n', Whitespace),
- (r'\s+', Whitespace),
- ],
- 'comments': [
- (r'//(.*?)\n', Comment.Single),
- (r'/\*', Comment.Multiline, 'comment'),
- ],
- 'comment': [
- (r'[^*/]+', Comment.Multiline),
- (r'/\*', Comment.Multiline, '#push'),
- (r'\*/', Comment.Multiline, '#pop'),
- (r'[*/]', Comment.Multiline),
- ],
- 'arguments': [
- include('expression'),
- (',', Punctuation),
- (r'\(', Punctuation, '#push'),
- (r'\)', Punctuation, '#pop'),
- ],
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/tlb.py b/venv/lib/python3.11/site-packages/pygments/lexers/tlb.py
deleted file mode 100644
index ac629dc..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/tlb.py
+++ /dev/null
@@ -1,57 +0,0 @@
-"""
- pygments.lexers.tlb
- ~~~~~~~~~~~~~~~~~~~
-
- Lexers for TL-b.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, include, words
-from pygments.token import Operator, Name, \
- Number, Whitespace, Punctuation, Comment
-
-__all__ = ['TlbLexer']
-
-
-class TlbLexer(RegexLexer):
- """
- For TL-b source code.
- """
-
- name = 'Tl-b'
- aliases = ['tlb']
- filenames = ['*.tlb']
-
- tokens = {
- 'root': [
- (r'\s+', Whitespace),
-
- include('comments'),
-
- (r'[0-9]+', Number),
- (words((
- '+', '-', '*', '=', '?', '~', '.',
- '^', '==', '<', '>', '<=', '>=', '!='
- )), Operator),
- (words(('##', '#<', '#<=')), Name.Tag),
- (r'#[0-9a-f]*_?', Name.Tag),
- (r'\$[01]*_?', Name.Tag),
-
- (r'[a-zA-Z_][0-9a-zA-Z_]*', Name),
-
- (r'[;():\[\]{}]', Punctuation)
- ],
-
- 'comments': [
- (r'//.*', Comment.Singleline),
- (r'/\*', Comment.Multiline, 'comment'),
- ],
- 'comment': [
- (r'[^/*]+', Comment.Multiline),
- (r'/\*', Comment.Multiline, '#push'),
- (r'\*/', Comment.Multiline, '#pop'),
- (r'[*/]', Comment.Multiline),
- ],
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/tls.py b/venv/lib/python3.11/site-packages/pygments/lexers/tls.py
deleted file mode 100644
index 34f8d4f..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/tls.py
+++ /dev/null
@@ -1,55 +0,0 @@
-"""
- pygments.lexers.tls
- ~~~~~~~~~~~~~~~~~~~
-
- Lexers for the TLS presentation language.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-import re
-
-from pygments.lexer import RegexLexer, words
-from pygments.token import Comment, Operator, Keyword, Name, String, \
- Number, Punctuation, Whitespace
-
-__all__ = ['TlsLexer']
-
-
-class TlsLexer(RegexLexer):
- """
- The TLS presentation language, described in RFC 8446.
-
- .. versionadded:: 2.16
- """
- name = 'TLS Presentation Language'
- url = 'https://www.rfc-editor.org/rfc/rfc8446#section-3'
- filenames = []
- aliases = ['tls']
- mimetypes = []
-
- flags = re.MULTILINE | re.DOTALL
-
- tokens = {
- 'root': [
- (r'\s+', Whitespace),
- # comments
- (r'/[*].*?[*]/', Comment.Multiline),
- # Keywords
- (words(('struct', 'enum', 'select', 'case'), suffix=r'\b'),
- Keyword),
- (words(('uint8', 'uint16', 'uint24', 'uint32', 'uint64', 'opaque'),
- suffix=r'\b'), Keyword.Type),
- # numeric literals
- (r'0x[0-9a-fA-F]+', Number.Hex),
- (r'[0-9]+', Number.Integer),
- # string literal
- (r'"(\\.|[^"\\])*"', String),
- # tokens
- (r'[.]{2}', Operator),
- (r'[+\-*/&^]', Operator),
- (r'[|<>=!()\[\]{}.,;:\?]', Punctuation),
- # identifiers
- (r'[^\W\d]\w*', Name.Other),
- ]
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/tnt.py b/venv/lib/python3.11/site-packages/pygments/lexers/tnt.py
deleted file mode 100644
index 2251373..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/tnt.py
+++ /dev/null
@@ -1,271 +0,0 @@
-"""
- pygments.lexers.tnt
- ~~~~~~~~~~~~~~~~~~~
-
- Lexer for Typographic Number Theory.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import Lexer
-from pygments.token import Text, Comment, Operator, Keyword, Name, Number, \
- Punctuation, Error
-
-__all__ = ['TNTLexer']
-
-
-class TNTLexer(Lexer):
- """
- Lexer for Typographic Number Theory, as described in the book
- Gödel, Escher, Bach, by Douglas R. Hofstadter
-
- .. versionadded:: 2.7
- """
-
- name = 'Typographic Number Theory'
- url = 'https://github.com/Kenny2github/language-tnt'
- aliases = ['tnt']
- filenames = ['*.tnt']
-
- cur = []
-
- LOGIC = set('⊃→]&∧^|∨Vv')
- OPERATORS = set('+.⋅*')
- VARIABLES = set('abcde')
- PRIMES = set("'′")
- NEGATORS = set('~!')
- QUANTIFIERS = set('AE∀∃')
- NUMBERS = set('0123456789')
- WHITESPACE = set('\t \v\n')
-
- RULES = re.compile('''(?xi)
- joining | separation | double-tilde | fantasy\\ rule
- | carry[- ]over(?:\\ of)?(?:\\ line)?\\ ([0-9]+) | detachment
- | contrapositive | De\\ Morgan | switcheroo
- | specification | generalization | interchange
- | existence | symmetry | transitivity
- | add\\ S | drop\\ S | induction
- | axiom\\ ([1-5]) | premise | push | pop
- ''')
- LINENOS = re.compile(r'(?:[0-9]+)(?:(?:, ?|,? and )(?:[0-9]+))*')
- COMMENT = re.compile(r'\[[^\n\]]+\]')
-
- def __init__(self, *args, **kwargs):
- Lexer.__init__(self, *args, **kwargs)
- self.cur = []
-
- def whitespace(self, start, text, required=False):
- """Tokenize whitespace."""
- end = start
- try:
- while text[end] in self.WHITESPACE:
- end += 1
- except IndexError:
- end = len(text)
- if required and end == start:
- raise AssertionError
- if end != start:
- self.cur.append((start, Text, text[start:end]))
- return end
-
- def variable(self, start, text):
- """Tokenize a variable."""
- if text[start] not in self.VARIABLES:
- raise AssertionError
- end = start+1
- while text[end] in self.PRIMES:
- end += 1
- self.cur.append((start, Name.Variable, text[start:end]))
- return end
-
- def term(self, start, text):
- """Tokenize a term."""
- if text[start] == 'S': # S...S(...) or S...0
- end = start+1
- while text[end] == 'S':
- end += 1
- self.cur.append((start, Number.Integer, text[start:end]))
- return self.term(end, text)
- if text[start] == '0': # the singleton 0
- self.cur.append((start, Number.Integer, text[start]))
- return start+1
- if text[start] in self.VARIABLES: # a''...
- return self.variable(start, text)
- if text[start] == '(': # (...+...)
- self.cur.append((start, Punctuation, text[start]))
- start = self.term(start+1, text)
- if text[start] not in self.OPERATORS:
- raise AssertionError
- self.cur.append((start, Operator, text[start]))
- start = self.term(start+1, text)
- if text[start] != ')':
- raise AssertionError
- self.cur.append((start, Punctuation, text[start]))
- return start+1
- raise AssertionError # no matches
-
- def formula(self, start, text):
- """Tokenize a formula."""
- if text[start] in self.NEGATORS: # ~<...>
- end = start+1
- while text[end] in self.NEGATORS:
- end += 1
- self.cur.append((start, Operator, text[start:end]))
- return self.formula(end, text)
- if text[start] in self.QUANTIFIERS: # Aa:<...>
- self.cur.append((start, Keyword.Declaration, text[start]))
- start = self.variable(start+1, text)
- if text[start] != ':':
- raise AssertionError
- self.cur.append((start, Punctuation, text[start]))
- return self.formula(start+1, text)
- if text[start] == '<': # <...&...>
- self.cur.append((start, Punctuation, text[start]))
- start = self.formula(start+1, text)
- if text[start] not in self.LOGIC:
- raise AssertionError
- self.cur.append((start, Operator, text[start]))
- start = self.formula(start+1, text)
- if text[start] != '>':
- raise AssertionError
- self.cur.append((start, Punctuation, text[start]))
- return start+1
- # ...=...
- start = self.term(start, text)
- if text[start] != '=':
- raise AssertionError
- self.cur.append((start, Operator, text[start]))
- start = self.term(start+1, text)
- return start
-
- def rule(self, start, text):
- """Tokenize a rule."""
- match = self.RULES.match(text, start)
- if match is None:
- raise AssertionError
- groups = sorted(match.regs[1:]) # exclude whole match
- for group in groups:
- if group[0] >= 0: # this group matched
- self.cur.append((start, Keyword, text[start:group[0]]))
- self.cur.append((group[0], Number.Integer,
- text[group[0]:group[1]]))
- if group[1] != match.end():
- self.cur.append((group[1], Keyword,
- text[group[1]:match.end()]))
- break
- else:
- self.cur.append((start, Keyword, text[start:match.end()]))
- return match.end()
-
- def lineno(self, start, text):
- """Tokenize a line referral."""
- end = start
- while text[end] not in self.NUMBERS:
- end += 1
- self.cur.append((start, Punctuation, text[start]))
- self.cur.append((start+1, Text, text[start+1:end]))
- start = end
- match = self.LINENOS.match(text, start)
- if match is None:
- raise AssertionError
- if text[match.end()] != ')':
- raise AssertionError
- self.cur.append((match.start(), Number.Integer, match.group(0)))
- self.cur.append((match.end(), Punctuation, text[match.end()]))
- return match.end() + 1
-
- def error_till_line_end(self, start, text):
- """Mark everything from ``start`` to the end of the line as Error."""
- end = start
- try:
- while text[end] != '\n': # there's whitespace in rules
- end += 1
- except IndexError:
- end = len(text)
- if end != start:
- self.cur.append((start, Error, text[start:end]))
- end = self.whitespace(end, text)
- return end
-
- def get_tokens_unprocessed(self, text):
- """Returns a list of TNT tokens."""
- self.cur = []
- start = end = self.whitespace(0, text)
- while start <= end < len(text):
- try:
- # try line number
- while text[end] in self.NUMBERS:
- end += 1
- if end != start: # actual number present
- self.cur.append((start, Number.Integer, text[start:end]))
- # whitespace is required after a line number
- orig = len(self.cur)
- try:
- start = end = self.whitespace(end, text, True)
- except AssertionError:
- del self.cur[orig:]
- start = end = self.error_till_line_end(end, text)
- continue
- # at this point it could be a comment
- match = self.COMMENT.match(text, start)
- if match is not None:
- self.cur.append((start, Comment, text[start:match.end()]))
- start = end = match.end()
- # anything after the closing bracket is invalid
- start = end = self.error_till_line_end(start, text)
- # do not attempt to process the rest
- continue
- del match
- if text[start] in '[]': # fantasy push or pop
- self.cur.append((start, Keyword, text[start]))
- start += 1
- end += 1
- else:
- # one formula, possibly containing subformulae
- orig = len(self.cur)
- try:
- start = end = self.formula(start, text)
- except (AssertionError, RecursionError): # not well-formed
- del self.cur[orig:]
- while text[end] not in self.WHITESPACE:
- end += 1
- self.cur.append((start, Error, text[start:end]))
- start = end
- # skip whitespace after formula
- orig = len(self.cur)
- try:
- start = end = self.whitespace(end, text, True)
- except AssertionError:
- del self.cur[orig:]
- start = end = self.error_till_line_end(start, text)
- continue
- # rule proving this formula a theorem
- orig = len(self.cur)
- try:
- start = end = self.rule(start, text)
- except AssertionError:
- del self.cur[orig:]
- start = end = self.error_till_line_end(start, text)
- continue
- # skip whitespace after rule
- start = end = self.whitespace(end, text)
- # line marker
- if text[start] == '(':
- orig = len(self.cur)
- try:
- start = end = self.lineno(start, text)
- except AssertionError:
- del self.cur[orig:]
- start = end = self.error_till_line_end(start, text)
- continue
- start = end = self.whitespace(start, text)
- except IndexError:
- try:
- del self.cur[orig:]
- except NameError:
- pass # if orig was never defined, fine
- self.error_till_line_end(start, text)
- return self.cur
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/trafficscript.py b/venv/lib/python3.11/site-packages/pygments/lexers/trafficscript.py
deleted file mode 100644
index b48124b..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/trafficscript.py
+++ /dev/null
@@ -1,51 +0,0 @@
-"""
- pygments.lexers.trafficscript
- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
- Lexer for RiverBed's TrafficScript (RTS) language.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer
-from pygments.token import String, Number, Name, Keyword, Operator, Text, Comment
-
-__all__ = ['RtsLexer']
-
-
-class RtsLexer(RegexLexer):
- """
- For Riverbed Stingray Traffic Manager
-
- .. versionadded:: 2.1
- """
- name = 'TrafficScript'
- aliases = ['trafficscript', 'rts']
- filenames = ['*.rts']
-
- tokens = {
- 'root' : [
- (r"'(\\\\|\\[^\\]|[^'\\])*'", String),
- (r'"', String, 'escapable-string'),
- (r'(0x[0-9a-fA-F]+|\d+)', Number),
- (r'\d+\.\d+', Number.Float),
- (r'\$[a-zA-Z](\w|_)*', Name.Variable),
- (r'(if|else|for(each)?|in|while|do|break|sub|return|import)', Keyword),
- (r'[a-zA-Z][\w.]*', Name.Function),
- (r'[-+*/%=,;(){}<>^.!~|&\[\]\?\:]', Operator),
- (r'(>=|<=|==|!=|'
- r'&&|\|\||'
- r'\+=|.=|-=|\*=|/=|%=|<<=|>>=|&=|\|=|\^=|'
- r'>>|<<|'
- r'\+\+|--|=>)', Operator),
- (r'[ \t\r]+', Text),
- (r'#[^\n]*', Comment),
- ],
- 'escapable-string' : [
- (r'\\[tsn]', String.Escape),
- (r'[^"]', String),
- (r'"', String, '#pop'),
- ],
-
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/typoscript.py b/venv/lib/python3.11/site-packages/pygments/lexers/typoscript.py
deleted file mode 100644
index dff6e5e..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/typoscript.py
+++ /dev/null
@@ -1,217 +0,0 @@
-"""
- pygments.lexers.typoscript
- ~~~~~~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for TypoScript
-
- `TypoScriptLexer`
- A TypoScript lexer.
-
- `TypoScriptCssDataLexer`
- Lexer that highlights markers, constants and registers within css.
-
- `TypoScriptHtmlDataLexer`
- Lexer that highlights markers, constants and registers within html tags.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, include, bygroups, using
-from pygments.token import Text, Comment, Name, String, Number, \
- Operator, Punctuation
-
-__all__ = ['TypoScriptLexer', 'TypoScriptCssDataLexer', 'TypoScriptHtmlDataLexer']
-
-
-class TypoScriptCssDataLexer(RegexLexer):
- """
- Lexer that highlights markers, constants and registers within css blocks.
-
- .. versionadded:: 2.2
- """
-
- name = 'TypoScriptCssData'
- aliases = ['typoscriptcssdata']
-
- tokens = {
- 'root': [
- # marker: ###MARK###
- (r'(.*)(###\w+###)(.*)', bygroups(String, Name.Constant, String)),
- # constant: {$some.constant}
- (r'(\{)(\$)((?:[\w\-]+\.)*)([\w\-]+)(\})',
- bygroups(String.Symbol, Operator, Name.Constant,
- Name.Constant, String.Symbol)), # constant
- # constant: {register:somevalue}
- (r'(.*)(\{)([\w\-]+)(\s*:\s*)([\w\-]+)(\})(.*)',
- bygroups(String, String.Symbol, Name.Constant, Operator,
- Name.Constant, String.Symbol, String)), # constant
- # whitespace
- (r'\s+', Text),
- # comments
- (r'/\*(?:(?!\*/).)*\*/', Comment),
- (r'(?<!(#|\'|"))(?:#(?!(?:[a-fA-F0-9]{6}|[a-fA-F0-9]{3}))[^\n#]+|//[^\n]*)',
- Comment),
- # other
- (r'[<>,:=.*%+|]', String),
- (r'[\w"\-!/&;(){}]+', String),
- ]
- }
-
-
-class TypoScriptHtmlDataLexer(RegexLexer):
- """
- Lexer that highlights markers, constants and registers within html tags.
-
- .. versionadded:: 2.2
- """
-
- name = 'TypoScriptHtmlData'
- aliases = ['typoscripthtmldata']
-
- tokens = {
- 'root': [
- # INCLUDE_TYPOSCRIPT
- (r'(INCLUDE_TYPOSCRIPT)', Name.Class),
- # Language label or extension resource FILE:... or LLL:... or EXT:...
- (r'(EXT|FILE|LLL):[^}\n"]*', String),
- # marker: ###MARK###
- (r'(.*)(###\w+###)(.*)', bygroups(String, Name.Constant, String)),
- # constant: {$some.constant}
- (r'(\{)(\$)((?:[\w\-]+\.)*)([\w\-]+)(\})',
- bygroups(String.Symbol, Operator, Name.Constant,
- Name.Constant, String.Symbol)), # constant
- # constant: {register:somevalue}
- (r'(.*)(\{)([\w\-]+)(\s*:\s*)([\w\-]+)(\})(.*)',
- bygroups(String, String.Symbol, Name.Constant, Operator,
- Name.Constant, String.Symbol, String)), # constant
- # whitespace
- (r'\s+', Text),
- # other
- (r'[<>,:=.*%+|]', String),
- (r'[\w"\-!/&;(){}#]+', String),
- ]
- }
-
-
-class TypoScriptLexer(RegexLexer):
- """
- Lexer for TypoScript code.
-
- .. versionadded:: 2.2
- """
-
- name = 'TypoScript'
- url = 'http://docs.typo3.org/typo3cms/TyposcriptReference/'
- aliases = ['typoscript']
- filenames = ['*.typoscript']
- mimetypes = ['text/x-typoscript']
-
- flags = re.DOTALL | re.MULTILINE
-
- tokens = {
- 'root': [
- include('comment'),
- include('constant'),
- include('html'),
- include('label'),
- include('whitespace'),
- include('keywords'),
- include('punctuation'),
- include('operator'),
- include('structure'),
- include('literal'),
- include('other'),
- ],
- 'keywords': [
- # Conditions
- (r'(?i)(\[)(browser|compatVersion|dayofmonth|dayofweek|dayofyear|'
- r'device|ELSE|END|GLOBAL|globalString|globalVar|hostname|hour|IP|'
- r'language|loginUser|loginuser|minute|month|page|PIDinRootline|'
- r'PIDupinRootline|system|treeLevel|useragent|userFunc|usergroup|'
- r'version)([^\]]*)(\])',
- bygroups(String.Symbol, Name.Constant, Text, String.Symbol)),
- # Functions
- (r'(?=[\w\-])(HTMLparser|HTMLparser_tags|addParams|cache|encapsLines|'
- r'filelink|if|imageLinkWrap|imgResource|makelinks|numRows|numberFormat|'
- r'parseFunc|replacement|round|select|split|stdWrap|strPad|tableStyle|'
- r'tags|textStyle|typolink)(?![\w\-])', Name.Function),
- # Toplevel objects and _*
- (r'(?:(=?\s*<?\s+|^\s*))(cObj|field|config|content|constants|FEData|'
- r'file|frameset|includeLibs|lib|page|plugin|register|resources|sitemap|'
- r'sitetitle|styles|temp|tt_[^:.\s]*|types|xmlnews|INCLUDE_TYPOSCRIPT|'
- r'_CSS_DEFAULT_STYLE|_DEFAULT_PI_VARS|_LOCAL_LANG)(?![\w\-])',
- bygroups(Operator, Name.Builtin)),
- # Content objects
- (r'(?=[\w\-])(CASE|CLEARGIF|COA|COA_INT|COBJ_ARRAY|COLUMNS|CONTENT|'
- r'CTABLE|EDITPANEL|FILE|FILES|FLUIDTEMPLATE|FORM|HMENU|HRULER|HTML|'
- r'IMAGE|IMGTEXT|IMG_RESOURCE|LOAD_REGISTER|MEDIA|MULTIMEDIA|OTABLE|'
- r'PAGE|QTOBJECT|RECORDS|RESTORE_REGISTER|SEARCHRESULT|SVG|SWFOBJECT|'
- r'TEMPLATE|TEXT|USER|USER_INT)(?![\w\-])', Name.Class),
- # Menu states
- (r'(?=[\w\-])(ACTIFSUBRO|ACTIFSUB|ACTRO|ACT|CURIFSUBRO|CURIFSUB|CURRO|'
- r'CUR|IFSUBRO|IFSUB|NO|SPC|USERDEF1RO|USERDEF1|USERDEF2RO|USERDEF2|'
- r'USRRO|USR)', Name.Class),
- # Menu objects
- (r'(?=[\w\-])(GMENU_FOLDOUT|GMENU_LAYERS|GMENU|IMGMENUITEM|IMGMENU|'
- r'JSMENUITEM|JSMENU|TMENUITEM|TMENU_LAYERS|TMENU)', Name.Class),
- # PHP objects
- (r'(?=[\w\-])(PHP_SCRIPT(_EXT|_INT)?)', Name.Class),
- (r'(?=[\w\-])(userFunc)(?![\w\-])', Name.Function),
- ],
- 'whitespace': [
- (r'\s+', Text),
- ],
- 'html': [
- (r'<\S[^\n>]*>', using(TypoScriptHtmlDataLexer)),
- (r'&[^;\n]*;', String),
- (r'(?s)(_CSS_DEFAULT_STYLE)(\s*)(\()(.*(?=\n\)))',
- bygroups(Name.Class, Text, String.Symbol, using(TypoScriptCssDataLexer))),
- ],
- 'literal': [
- (r'0x[0-9A-Fa-f]+t?', Number.Hex),
- # (r'[0-9]*\.[0-9]+([eE][0-9]+)?[fd]?\s*(?:[^=])', Number.Float),
- (r'[0-9]+', Number.Integer),
- (r'(###\w+###)', Name.Constant),
- ],
- 'label': [
- # Language label or extension resource FILE:... or LLL:... or EXT:...
- (r'(EXT|FILE|LLL):[^}\n"]*', String),
- # Path to a resource
- (r'(?![^\w\-])([\w\-]+(?:/[\w\-]+)+/?)(\S*\n)',
- bygroups(String, String)),
- ],
- 'punctuation': [
- (r'[,.]', Punctuation),
- ],
- 'operator': [
- (r'[<>,:=.*%+|]', Operator),
- ],
- 'structure': [
- # Brackets and braces
- (r'[{}()\[\]\\]', String.Symbol),
- ],
- 'constant': [
- # Constant: {$some.constant}
- (r'(\{)(\$)((?:[\w\-]+\.)*)([\w\-]+)(\})',
- bygroups(String.Symbol, Operator, Name.Constant,
- Name.Constant, String.Symbol)), # constant
- # Constant: {register:somevalue}
- (r'(\{)([\w\-]+)(\s*:\s*)([\w\-]+)(\})',
- bygroups(String.Symbol, Name.Constant, Operator,
- Name.Constant, String.Symbol)), # constant
- # Hex color: #ff0077
- (r'(#[a-fA-F0-9]{6}\b|#[a-fA-F0-9]{3}\b)', String.Char)
- ],
- 'comment': [
- (r'(?<!(#|\'|"))(?:#(?!(?:[a-fA-F0-9]{6}|[a-fA-F0-9]{3}))[^\n#]+|//[^\n]*)',
- Comment),
- (r'/\*(?:(?!\*/).)*\*/', Comment),
- (r'(\s*#\s*\n)', Comment),
- ],
- 'other': [
- (r'[\w"\-!/&;]+', Text),
- ],
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/ul4.py b/venv/lib/python3.11/site-packages/pygments/lexers/ul4.py
deleted file mode 100644
index a40c20f..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/ul4.py
+++ /dev/null
@@ -1,267 +0,0 @@
-"""
- pygments.lexers.ul4
- ~~~~~~~~~~~~~~~~~~~
-
- Lexer for the UL4 templating language.
-
- More information: https://python.livinglogic.de/UL4.html
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, DelegatingLexer, bygroups, words, include
-from pygments.token import Comment, Text, Keyword, String, Number, Literal, \
- Name, Other, Operator
-from pygments.lexers.web import HtmlLexer, XmlLexer, CssLexer, JavascriptLexer
-from pygments.lexers.python import PythonLexer
-
-__all__ = ['UL4Lexer', 'HTMLUL4Lexer', 'XMLUL4Lexer', 'CSSUL4Lexer',
- 'JavascriptUL4Lexer', 'PythonUL4Lexer']
-
-
-class UL4Lexer(RegexLexer):
- """
- Generic lexer for UL4.
-
- .. versionadded:: 2.12
- """
-
- flags = re.MULTILINE | re.DOTALL
-
- name = 'UL4'
- aliases = ['ul4']
- filenames = ['*.ul4']
-
- tokens = {
- "root": [
- (
- # Template header without name:
- # ``<?ul4?>``
- r"(<\?)(\s*)(ul4)(\s*)(\?>)",
- bygroups(Comment.Preproc, Text.Whitespace, Keyword,
- Text.Whitespace, Comment.Preproc),
- ),
- (
- # Template header with name (potentially followed by the signature):
- # ``<?ul4 foo(bar=42)?>``
- r"(<\?)(\s*)(ul4)(\s*)([a-zA-Z_][a-zA-Z_0-9]*)?",
- bygroups(Comment.Preproc, Text.Whitespace, Keyword,
- Text.Whitespace, Name.Function),
- "ul4", # Switch to "expression" mode
- ),
- (
- # Comment:
- # ``<?note foobar?>``
- r"<\?\s*note\s.*?\?>",
- Comment,
- ),
- (
- # Template documentation:
- # ``<?doc foobar?>``
- r"<\?\s*doc\s.*?\?>",
- String.Doc,
- ),
- (
- # ``<?ignore?>`` tag for commenting out code:
- # ``<?ignore?>...<?end ignore?>``
- r"<\?\s*ignore\s*\?>",
- Comment,
- "ignore", # Switch to "ignore" mode
- ),
- (
- # ``<?def?>`` tag for defining local templates
- # ``<?def foo(bar=42)?>...<?end def?>``
- r"(<\?)(\s*)(def)(\s*)([a-zA-Z_][a-zA-Z_0-9]*)?",
- bygroups(Comment.Preproc, Text.Whitespace, Keyword,
- Text.Whitespace, Name.Function),
- "ul4", # Switch to "expression" mode
- ),
- (
- # The rest of the supported tags
- r"(<\?)(\s*)(printx|print|for|if|elif|else|while|code|renderblocks?|render)\b",
- bygroups(Comment.Preproc, Text.Whitespace, Keyword),
- "ul4", # Switch to "expression" mode
- ),
- (
- # ``<?end?>`` tag for ending ``<?def?>``, ``<?for?>``,
- # ``<?if?>``, ``<?while?>``, ``<?renderblock?>`` and
- # ``<?renderblocks?>`` blocks.
- r"(<\?)(\s*)(end)\b",
- bygroups(Comment.Preproc, Text.Whitespace, Keyword),
- "end", # Switch to "end tag" mode
- ),
- (
- # ``<?whitespace?>`` tag for configuring whitespace handlng
- r"(<\?)(\s*)(whitespace)\b",
- bygroups(Comment.Preproc, Text.Whitespace, Keyword),
- "whitespace", # Switch to "whitespace" mode
- ),
- # Plain text
- (r"[^<]+", Other),
- (r"<", Other),
- ],
- # Ignore mode ignores everything upto the matching ``<?end ignore?>`` tag
- "ignore": [
- # Nested ``<?ignore?>`` tag
- (r"<\?\s*ignore\s*\?>", Comment, "#push"),
- # ``<?end ignore?>`` tag
- (r"<\?\s*end\s+ignore\s*\?>", Comment, "#pop"),
- # Everything else
- (r"[^<]+", Comment),
- (r".", Comment),
- ],
- # UL4 expressions
- "ul4": [
- # End the tag
- (r"\?>", Comment.Preproc, "#pop"),
- # Start triple quoted string constant
- ("'''", String, "string13"),
- ('"""', String, "string23"),
- # Start single quoted string constant
- ("'", String, "string1"),
- ('"', String, "string2"),
- # Floating point number
- (r"\d+\.\d*([eE][+-]?\d+)?", Number.Float),
- (r"\.\d+([eE][+-]?\d+)?", Number.Float),
- (r"\d+[eE][+-]?\d+", Number.Float),
- # Binary integer: ``0b101010``
- (r"0[bB][01]+", Number.Bin),
- # Octal integer: ``0o52``
- (r"0[oO][0-7]+", Number.Oct),
- # Hexadecimal integer: ``0x2a``
- (r"0[xX][0-9a-fA-F]+", Number.Hex),
- # Date or datetime: ``@(2000-02-29)``/``@(2000-02-29T12:34:56.987654)``
- (r"@\(\d\d\d\d-\d\d-\d\d(T(\d\d:\d\d(:\d\d(\.\d{6})?)?)?)?\)", Literal.Date),
- # Color: ``#fff``, ``#fff8f0`` etc.
- (r"#[0-9a-fA-F]{8}", Literal.Color),
- (r"#[0-9a-fA-F]{6}", Literal.Color),
- (r"#[0-9a-fA-F]{3,4}", Literal.Color),
- # Decimal integer: ``42``
- (r"\d+", Number.Integer),
- # Operators
- (r"//|==|!=|>=|<=|<<|>>|\+=|-=|\*=|/=|//=|<<=|>>=|&=|\|=|^=|=|[\[\]{},:*/().~%&|<>^+-]", Operator),
- # Keywords
- (words(("for", "in", "if", "else", "not", "is", "and", "or"), suffix=r"\b"), Keyword),
- # Builtin constants
- (words(("None", "False", "True"), suffix=r"\b"), Keyword.Constant),
- # Variable names
- (r"[a-zA-Z_][a-zA-Z0-9_]*", Name),
- # Whitespace
- (r"\s+", Text.Whitespace),
- ],
- # ``<?end ...?>`` tag for closing the last open block
- "end": [
- (r"\?>", Comment.Preproc, "#pop"),
- (words(("for", "if", "def", "while", "renderblock", "renderblocks"), suffix=r"\b"), Keyword),
- (r"\s+", Text),
- ],
- # Content of the ``<?whitespace ...?>`` tag:
- # ``keep``, ``strip`` or ``smart``
- "whitespace": [
- (r"\?>", Comment.Preproc, "#pop"),
- (words(("keep", "strip", "smart"), suffix=r"\b"), Comment.Preproc),
- (r"\s+", Text.Whitespace),
- ],
- # Inside a string constant
- "stringescapes": [
- (r"""\\[\\'"abtnfr]""", String.Escape),
- (r"\\x[0-9a-fA-F]{2}", String.Escape),
- (r"\\u[0-9a-fA-F]{4}", String.Escape),
- (r"\\U[0-9a-fA-F]{8}", String.Escape),
- ],
- # Inside a triple quoted string started with ``'''``
- "string13": [
- (r"'''", String, "#pop"),
- include("stringescapes"),
- (r"[^\\']+", String),
- (r'.', String),
- ],
- # Inside a triple quoted string started with ``"""``
- "string23": [
- (r'"""', String, "#pop"),
- include("stringescapes"),
- (r'[^\\"]+', String),
- (r'.', String),
- ],
- # Inside a single quoted string started with ``'``
- "string1": [
- (r"'", String, "#pop"),
- include("stringescapes"),
- (r"[^\\']+", String),
- (r'.', String),
- ],
- # Inside a single quoted string started with ``"``
- "string2": [
- (r'"', String, "#pop"),
- include("stringescapes"),
- (r'[^\\"]+', String),
- (r'.', String),
- ],
- }
-
-class HTMLUL4Lexer(DelegatingLexer):
- """
- Lexer for UL4 embedded in HTML.
- """
-
- name = 'HTML+UL4'
- aliases = ['html+ul4']
- filenames = ['*.htmlul4']
-
- def __init__(self, **options):
- super().__init__(HtmlLexer, UL4Lexer, **options)
-
-
-class XMLUL4Lexer(DelegatingLexer):
- """
- Lexer for UL4 embedded in XML.
- """
-
- name = 'XML+UL4'
- aliases = ['xml+ul4']
- filenames = ['*.xmlul4']
-
- def __init__(self, **options):
- super().__init__(XmlLexer, UL4Lexer, **options)
-
-
-class CSSUL4Lexer(DelegatingLexer):
- """
- Lexer for UL4 embedded in CSS.
- """
-
- name = 'CSS+UL4'
- aliases = ['css+ul4']
- filenames = ['*.cssul4']
-
- def __init__(self, **options):
- super().__init__(CssLexer, UL4Lexer, **options)
-
-
-class JavascriptUL4Lexer(DelegatingLexer):
- """
- Lexer for UL4 embedded in Javascript.
- """
-
- name = 'Javascript+UL4'
- aliases = ['js+ul4']
- filenames = ['*.jsul4']
-
- def __init__(self, **options):
- super().__init__(JavascriptLexer, UL4Lexer, **options)
-
-
-class PythonUL4Lexer(DelegatingLexer):
- """
- Lexer for UL4 embedded in Python.
- """
-
- name = 'Python+UL4'
- aliases = ['py+ul4']
- filenames = ['*.pyul4']
-
- def __init__(self, **options):
- super().__init__(PythonLexer, UL4Lexer, **options)
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/unicon.py b/venv/lib/python3.11/site-packages/pygments/lexers/unicon.py
deleted file mode 100644
index 9cd6790..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/unicon.py
+++ /dev/null
@@ -1,411 +0,0 @@
-"""
- pygments.lexers.unicon
- ~~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for the Icon and Unicon languages, including ucode VM.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, include, bygroups, words, using, this
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation
-
-__all__ = ['IconLexer', 'UcodeLexer', 'UniconLexer']
-
-
-class UniconLexer(RegexLexer):
- """
- For Unicon source code.
-
- .. versionadded:: 2.4
- """
-
- name = 'Unicon'
- aliases = ['unicon']
- filenames = ['*.icn']
- mimetypes = ['text/unicon']
-
- flags = re.MULTILINE
-
- tokens = {
- 'root': [
- (r'[^\S\n]+', Text),
- (r'#.*?\n', Comment.Single),
- (r'[^\S\n]+', Text),
- (r'class|method|procedure', Keyword.Declaration, 'subprogram'),
- (r'(record)(\s+)(\w+)',
- bygroups(Keyword.Declaration, Text, Keyword.Type), 'type_def'),
- (r'(#line|\$C|\$Cend|\$define|\$else|\$endif|\$error|\$ifdef|'
- r'\$ifndef|\$include|\$line|\$undef)\b', Keyword.PreProc),
- (r'(&null|&fail)\b', Keyword.Constant),
- (r'&allocated|&ascii|&clock|&collections|&column|&col|&control|'
- r'&cset|&current|&dateline|&date|&digits|&dump|'
- r'&errno|&errornumber|&errortext|&errorvalue|&error|&errout|'
- r'&eventcode|&eventvalue|&eventsource|&e|'
- r'&features|&file|&host|&input|&interval|&lcase|&letters|'
- r'&level|&line|&ldrag|&lpress|&lrelease|'
- r'&main|&mdrag|&meta|&mpress|&mrelease|&now|&output|'
- r'&phi|&pick|&pi|&pos|&progname|'
- r'&random|&rdrag|&regions|&resize|&row|&rpress|&rrelease|'
- r'&shift|&source|&storage|&subject|'
- r'&time|&trace|&ucase|&version|'
- r'&window|&x|&y', Keyword.Reserved),
- (r'(by|of|not|to)\b', Keyword.Reserved),
- (r'(global|local|static|abstract)\b', Keyword.Reserved),
- (r'package|link|import', Keyword.Declaration),
- (words((
- 'break', 'case', 'create', 'critical', 'default', 'end', 'all',
- 'do', 'else', 'every', 'fail', 'if', 'import', 'initial',
- 'initially', 'invocable', 'next',
- 'repeat', 'return', 'suspend',
- 'then', 'thread', 'until', 'while'), prefix=r'\b', suffix=r'\b'),
- Keyword.Reserved),
- (words((
- 'Abort', 'abs', 'acos', 'Active', 'Alert', 'any', 'Any', 'Arb',
- 'Arbno', 'args', 'array', 'asin', 'atan', 'atanh', 'Attrib',
- 'Bal', 'bal', 'Bg', 'Break', 'Breakx',
- 'callout', 'center', 'char', 'chdir', 'chmod', 'chown', 'chroot',
- 'classname', 'Clip', 'Clone', 'close', 'cofail', 'collect',
- 'Color', 'ColorValue', 'condvar', 'constructor', 'copy',
- 'CopyArea', 'cos', 'Couple', 'crypt', 'cset', 'ctime',
- 'dbcolumns', 'dbdriver', 'dbkeys', 'dblimits', 'dbproduct',
- 'dbtables', 'delay', 'delete', 'detab', 'display', 'DrawArc',
- 'DrawCircle', 'DrawCube', 'DrawCurve', 'DrawCylinder',
- 'DrawDisk', 'DrawImage', 'DrawLine', 'DrawPoint', 'DrawPolygon',
- 'DrawRectangle', 'DrawSegment', 'DrawSphere', 'DrawString',
- 'DrawTorus', 'dtor',
- 'entab', 'EraseArea', 'errorclear', 'Event', 'eventmask',
- 'EvGet', 'EvSend', 'exec', 'exit', 'exp', 'Eye',
- 'Fail', 'fcntl', 'fdup', 'Fence', 'fetch', 'Fg', 'fieldnames',
- 'filepair', 'FillArc', 'FillCircle', 'FillPolygon',
- 'FillRectangle', 'find', 'flock', 'flush', 'Font', 'fork',
- 'FreeColor', 'FreeSpace', 'function',
- 'get', 'getch', 'getche', 'getegid', 'getenv', 'geteuid',
- 'getgid', 'getgr', 'gethost', 'getpgrp', 'getpid', 'getppid',
- 'getpw', 'getrusage', 'getserv', 'GetSpace', 'gettimeofday',
- 'getuid', 'globalnames', 'GotoRC', 'GotoXY', 'gtime', 'hardlink',
- 'iand', 'icom', 'IdentityMatrix', 'image', 'InPort', 'insert',
- 'Int86', 'integer', 'ioctl', 'ior', 'ishift', 'istate', 'ixor',
- 'kbhit', 'key', 'keyword', 'kill',
- 'left', 'Len', 'list', 'load', 'loadfunc', 'localnames',
- 'lock', 'log', 'Lower', 'lstat',
- 'many', 'map', 'match', 'MatrixMode', 'max', 'member',
- 'membernames', 'methodnames', 'methods', 'min', 'mkdir', 'move',
- 'MultMatrix', 'mutex',
- 'name', 'NewColor', 'Normals', 'NotAny', 'numeric',
- 'open', 'opencl', 'oprec', 'ord', 'OutPort',
- 'PaletteChars', 'PaletteColor', 'PaletteKey', 'paramnames',
- 'parent', 'Pattern', 'Peek', 'Pending', 'pipe', 'Pixel',
- 'PlayAudio', 'Poke', 'pop', 'PopMatrix', 'Pos', 'pos',
- 'proc', 'pull', 'push', 'PushMatrix', 'PushRotate', 'PushScale',
- 'PushTranslate', 'put',
- 'QueryPointer',
- 'Raise', 'read', 'ReadImage', 'readlink', 'reads', 'ready',
- 'real', 'receive', 'Refresh', 'Rem', 'remove', 'rename',
- 'repl', 'reverse', 'right', 'rmdir', 'Rotate', 'Rpos',
- 'Rtab', 'rtod', 'runerr',
- 'save', 'Scale', 'seek', 'select', 'send', 'seq',
- 'serial', 'set', 'setenv', 'setgid', 'setgrent',
- 'sethostent', 'setpgrp', 'setpwent', 'setservent',
- 'setuid', 'signal', 'sin', 'sort', 'sortf', 'Span',
- 'spawn', 'sql', 'sqrt', 'stat', 'staticnames', 'stop',
- 'StopAudio', 'string', 'structure', 'Succeed', 'Swi',
- 'symlink', 'sys_errstr', 'system', 'syswrite',
- 'Tab', 'tab', 'table', 'tan',
- 'Texcoord', 'Texture', 'TextWidth', 'Translate',
- 'trap', 'trim', 'truncate', 'trylock', 'type',
- 'umask', 'Uncouple', 'unlock', 'upto', 'utime',
- 'variable', 'VAttrib',
- 'wait', 'WAttrib', 'WDefault', 'WFlush', 'where',
- 'WinAssociate', 'WinButton', 'WinColorDialog', 'WindowContents',
- 'WinEditRegion', 'WinFontDialog', 'WinMenuBar', 'WinOpenDialog',
- 'WinPlayMedia', 'WinSaveDialog', 'WinScrollBar', 'WinSelectDialog',
- 'write', 'WriteImage', 'writes', 'WSection',
- 'WSync'), prefix=r'\b', suffix=r'\b'),
- Name.Function),
- include('numbers'),
- (r'<@|<<@|>@|>>@|\.>|->|===|~===|\*\*|\+\+|--|\.|~==|~=|<=|>=|==|'
- r'=|<<=|<<|>>=|>>|:=:|:=|->|<->|\+:=|\|', Operator),
- (r'"(?:[^\\"]|\\.)*"', String),
- (r"'(?:[^\\']|\\.)*'", String.Character),
- (r'[*<>+=/&!?@~\\-]', Operator),
- (r'\^', Operator),
- (r'(\w+)(\s*|[(,])', bygroups(Name, using(this))),
- (r"[\[\]]", Punctuation),
- (r"<>|=>|[()|:;,.'`{}%&?]", Punctuation),
- (r'\n+', Text),
- ],
- 'numbers': [
- (r'\b([+-]?([2-9]|[12][0-9]|3[0-6])[rR][0-9a-zA-Z]+)\b', Number.Hex),
- (r'[+-]?[0-9]*\.([0-9]*)([Ee][+-]?[0-9]*)?', Number.Float),
- (r'\b([+-]?[0-9]+[KMGTPkmgtp]?)\b', Number.Integer),
- ],
- 'subprogram': [
- (r'\(', Punctuation, ('#pop', 'formal_part')),
- (r';', Punctuation, '#pop'),
- (r'"[^"]+"|\w+', Name.Function),
- include('root'),
- ],
- 'type_def': [
- (r'\(', Punctuation, 'formal_part'),
- ],
- 'formal_part': [
- (r'\)', Punctuation, '#pop'),
- (r'\w+', Name.Variable),
- (r',', Punctuation),
- (r'(:string|:integer|:real)\b', Keyword.Reserved),
- include('root'),
- ],
- }
-
-
-class IconLexer(RegexLexer):
- """
- Lexer for Icon.
-
- .. versionadded:: 1.6
- """
- name = 'Icon'
- aliases = ['icon']
- filenames = ['*.icon', '*.ICON']
- mimetypes = []
- flags = re.MULTILINE
-
- tokens = {
- 'root': [
- (r'[^\S\n]+', Text),
- (r'#.*?\n', Comment.Single),
- (r'[^\S\n]+', Text),
- (r'class|method|procedure', Keyword.Declaration, 'subprogram'),
- (r'(record)(\s+)(\w+)',
- bygroups(Keyword.Declaration, Text, Keyword.Type), 'type_def'),
- (r'(#line|\$C|\$Cend|\$define|\$else|\$endif|\$error|\$ifdef|'
- r'\$ifndef|\$include|\$line|\$undef)\b', Keyword.PreProc),
- (r'(&null|&fail)\b', Keyword.Constant),
- (r'&allocated|&ascii|&clock|&collections|&column|&col|&control|'
- r'&cset|&current|&dateline|&date|&digits|&dump|'
- r'&errno|&errornumber|&errortext|&errorvalue|&error|&errout|'
- r'&eventcode|&eventvalue|&eventsource|&e|'
- r'&features|&file|&host|&input|&interval|&lcase|&letters|'
- r'&level|&line|&ldrag|&lpress|&lrelease|'
- r'&main|&mdrag|&meta|&mpress|&mrelease|&now|&output|'
- r'&phi|&pick|&pi|&pos|&progname|'
- r'&random|&rdrag|&regions|&resize|&row|&rpress|&rrelease|'
- r'&shift|&source|&storage|&subject|'
- r'&time|&trace|&ucase|&version|'
- r'&window|&x|&y', Keyword.Reserved),
- (r'(by|of|not|to)\b', Keyword.Reserved),
- (r'(global|local|static)\b', Keyword.Reserved),
- (r'link', Keyword.Declaration),
- (words((
- 'break', 'case', 'create', 'default', 'end', 'all',
- 'do', 'else', 'every', 'fail', 'if', 'initial',
- 'invocable', 'next',
- 'repeat', 'return', 'suspend',
- 'then', 'until', 'while'), prefix=r'\b', suffix=r'\b'),
- Keyword.Reserved),
- (words((
- 'abs', 'acos', 'Active', 'Alert', 'any',
- 'args', 'array', 'asin', 'atan', 'atanh', 'Attrib',
- 'bal', 'Bg',
- 'callout', 'center', 'char', 'chdir', 'chmod', 'chown', 'chroot',
- 'Clip', 'Clone', 'close', 'cofail', 'collect',
- 'Color', 'ColorValue', 'condvar', 'copy',
- 'CopyArea', 'cos', 'Couple', 'crypt', 'cset', 'ctime',
- 'delay', 'delete', 'detab', 'display', 'DrawArc',
- 'DrawCircle', 'DrawCube', 'DrawCurve', 'DrawCylinder',
- 'DrawDisk', 'DrawImage', 'DrawLine', 'DrawPoint', 'DrawPolygon',
- 'DrawRectangle', 'DrawSegment', 'DrawSphere', 'DrawString',
- 'DrawTorus', 'dtor',
- 'entab', 'EraseArea', 'errorclear', 'Event', 'eventmask',
- 'EvGet', 'EvSend', 'exec', 'exit', 'exp', 'Eye',
- 'fcntl', 'fdup', 'fetch', 'Fg', 'fieldnames',
- 'FillArc', 'FillCircle', 'FillPolygon',
- 'FillRectangle', 'find', 'flock', 'flush', 'Font',
- 'FreeColor', 'FreeSpace', 'function',
- 'get', 'getch', 'getche', 'getenv',
- 'GetSpace', 'gettimeofday',
- 'getuid', 'globalnames', 'GotoRC', 'GotoXY', 'gtime', 'hardlink',
- 'iand', 'icom', 'IdentityMatrix', 'image', 'InPort', 'insert',
- 'Int86', 'integer', 'ioctl', 'ior', 'ishift', 'istate', 'ixor',
- 'kbhit', 'key', 'keyword', 'kill',
- 'left', 'Len', 'list', 'load', 'loadfunc', 'localnames',
- 'lock', 'log', 'Lower', 'lstat',
- 'many', 'map', 'match', 'MatrixMode', 'max', 'member',
- 'membernames', 'methodnames', 'methods', 'min', 'mkdir', 'move',
- 'MultMatrix', 'mutex',
- 'name', 'NewColor', 'Normals', 'numeric',
- 'open', 'opencl', 'oprec', 'ord', 'OutPort',
- 'PaletteChars', 'PaletteColor', 'PaletteKey', 'paramnames',
- 'parent', 'Pattern', 'Peek', 'Pending', 'pipe', 'Pixel',
- 'Poke', 'pop', 'PopMatrix', 'Pos', 'pos',
- 'proc', 'pull', 'push', 'PushMatrix', 'PushRotate', 'PushScale',
- 'PushTranslate', 'put',
- 'QueryPointer',
- 'Raise', 'read', 'ReadImage', 'readlink', 'reads', 'ready',
- 'real', 'receive', 'Refresh', 'Rem', 'remove', 'rename',
- 'repl', 'reverse', 'right', 'rmdir', 'Rotate', 'Rpos',
- 'rtod', 'runerr',
- 'save', 'Scale', 'seek', 'select', 'send', 'seq',
- 'serial', 'set', 'setenv',
- 'setuid', 'signal', 'sin', 'sort', 'sortf',
- 'spawn', 'sql', 'sqrt', 'stat', 'staticnames', 'stop',
- 'string', 'structure', 'Swi',
- 'symlink', 'sys_errstr', 'system', 'syswrite',
- 'tab', 'table', 'tan',
- 'Texcoord', 'Texture', 'TextWidth', 'Translate',
- 'trap', 'trim', 'truncate', 'trylock', 'type',
- 'umask', 'Uncouple', 'unlock', 'upto', 'utime',
- 'variable',
- 'wait', 'WAttrib', 'WDefault', 'WFlush', 'where',
- 'WinAssociate', 'WinButton', 'WinColorDialog', 'WindowContents',
- 'WinEditRegion', 'WinFontDialog', 'WinMenuBar', 'WinOpenDialog',
- 'WinPlayMedia', 'WinSaveDialog', 'WinScrollBar', 'WinSelectDialog',
- 'write', 'WriteImage', 'writes', 'WSection',
- 'WSync'), prefix=r'\b', suffix=r'\b'),
- Name.Function),
- include('numbers'),
- (r'===|~===|\*\*|\+\+|--|\.|==|~==|<=|>=|=|~=|<<=|<<|>>=|>>|'
- r':=:|:=|<->|<-|\+:=|\|\||\|', Operator),
- (r'"(?:[^\\"]|\\.)*"', String),
- (r"'(?:[^\\']|\\.)*'", String.Character),
- (r'[*<>+=/&!?@~\\-]', Operator),
- (r'(\w+)(\s*|[(,])', bygroups(Name, using(this))),
- (r"[\[\]]", Punctuation),
- (r"<>|=>|[()|:;,.'`{}%\^&?]", Punctuation),
- (r'\n+', Text),
- ],
- 'numbers': [
- (r'\b([+-]?([2-9]|[12][0-9]|3[0-6])[rR][0-9a-zA-Z]+)\b', Number.Hex),
- (r'[+-]?[0-9]*\.([0-9]*)([Ee][+-]?[0-9]*)?', Number.Float),
- (r'\b([+-]?[0-9]+[KMGTPkmgtp]?)\b', Number.Integer),
- ],
- 'subprogram': [
- (r'\(', Punctuation, ('#pop', 'formal_part')),
- (r';', Punctuation, '#pop'),
- (r'"[^"]+"|\w+', Name.Function),
- include('root'),
- ],
- 'type_def': [
- (r'\(', Punctuation, 'formal_part'),
- ],
- 'formal_part': [
- (r'\)', Punctuation, '#pop'),
- (r'\w+', Name.Variable),
- (r',', Punctuation),
- (r'(:string|:integer|:real)\b', Keyword.Reserved),
- include('root'),
- ],
- }
-
-
-class UcodeLexer(RegexLexer):
- """
- Lexer for Icon ucode files.
-
- .. versionadded:: 2.4
- """
- name = 'ucode'
- aliases = ['ucode']
- filenames = ['*.u', '*.u1', '*.u2']
- mimetypes = []
- flags = re.MULTILINE
-
- tokens = {
- 'root': [
- (r'(#.*\n)', Comment),
- (words((
- 'con', 'declend', 'end',
- 'global',
- 'impl', 'invocable',
- 'lab', 'link', 'local',
- 'record',
- 'uid', 'unions',
- 'version'),
- prefix=r'\b', suffix=r'\b'),
- Name.Function),
- (words((
- 'colm', 'filen', 'line', 'synt'),
- prefix=r'\b', suffix=r'\b'),
- Comment),
- (words((
- 'asgn',
- 'bang', 'bscan',
- 'cat', 'ccase', 'chfail',
- 'coact', 'cofail', 'compl',
- 'coret', 'create', 'cset',
- 'diff', 'div', 'dup',
- 'efail', 'einit', 'end', 'eqv', 'eret',
- 'error', 'escan', 'esusp',
- 'field',
- 'goto',
- 'init', 'int', 'inter',
- 'invoke',
- 'keywd',
- 'lconcat', 'lexeq', 'lexge',
- 'lexgt', 'lexle', 'lexlt', 'lexne',
- 'limit', 'llist', 'lsusp',
- 'mark', 'mark0', 'minus', 'mod', 'mult',
- 'neg', 'neqv', 'nonnull', 'noop', 'null',
- 'number', 'numeq', 'numge', 'numgt',
- 'numle', 'numlt', 'numne',
- 'pfail', 'plus', 'pnull', 'pop', 'power',
- 'pret', 'proc', 'psusp', 'push1', 'pushn1',
- 'random', 'rasgn', 'rcv', 'rcvbk', 'real',
- 'refresh', 'rswap',
- 'sdup', 'sect', 'size', 'snd', 'sndbk',
- 'str', 'subsc', 'swap',
- 'tabmat', 'tally', 'toby', 'trace',
- 'unmark',
- 'value', 'var'), prefix=r'\b', suffix=r'\b'),
- Keyword.Declaration),
- (words((
- 'any',
- 'case',
- 'endcase', 'endevery', 'endif',
- 'endifelse', 'endrepeat', 'endsuspend',
- 'enduntil', 'endwhile', 'every',
- 'if', 'ifelse',
- 'repeat',
- 'suspend',
- 'until',
- 'while'),
- prefix=r'\b', suffix=r'\b'),
- Name.Constant),
- (r'\d+(\s*|\.$|$)', Number.Integer),
- (r'[+-]?\d*\.\d+(E[-+]?\d+)?', Number.Float),
- (r'[+-]?\d+\.\d*(E[-+]?\d+)?', Number.Float),
- (r"(<>|=>|[()|:;,.'`]|[{}]|[%^]|[&?])", Punctuation),
- (r'\s+\b', Text),
- (r'[\w-]+', Text),
- ],
- }
-
- def analyse_text(text):
- """endsuspend and endrepeat are unique to this language, and
- \\self, /self doesn't seem to get used anywhere else either."""
- result = 0
-
- if 'endsuspend' in text:
- result += 0.1
-
- if 'endrepeat' in text:
- result += 0.1
-
- if ':=' in text:
- result += 0.01
-
- if 'procedure' in text and 'end' in text:
- result += 0.01
-
- # This seems quite unique to unicon -- doesn't appear in any other
- # example source we have (A quick search reveals that \SELF appears in
- # Perl/Raku code)
- if r'\self' in text and r'/self' in text:
- result += 0.5
-
- return result
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/urbi.py b/venv/lib/python3.11/site-packages/pygments/lexers/urbi.py
deleted file mode 100644
index 3857ff2..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/urbi.py
+++ /dev/null
@@ -1,145 +0,0 @@
-"""
- pygments.lexers.urbi
- ~~~~~~~~~~~~~~~~~~~~
-
- Lexers for UrbiScript language.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import ExtendedRegexLexer, words
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation
-
-__all__ = ['UrbiscriptLexer']
-
-
-class UrbiscriptLexer(ExtendedRegexLexer):
- """
- For UrbiScript source code.
-
- .. versionadded:: 1.5
- """
-
- name = 'UrbiScript'
- aliases = ['urbiscript']
- filenames = ['*.u']
- mimetypes = ['application/x-urbiscript']
-
- flags = re.DOTALL
-
- # TODO
- # - handle Experimental and deprecated tags with specific tokens
- # - handle Angles and Durations with specific tokens
-
- def blob_callback(lexer, match, ctx):
- text_before_blob = match.group(1)
- blob_start = match.group(2)
- blob_size_str = match.group(3)
- blob_size = int(blob_size_str)
- yield match.start(), String, text_before_blob
- ctx.pos += len(text_before_blob)
-
- # if blob size doesn't match blob format (example : "\B(2)(aaa)")
- # yield blob as a string
- if ctx.text[match.end() + blob_size] != ")":
- result = "\\B(" + blob_size_str + ")("
- yield match.start(), String, result
- ctx.pos += len(result)
- return
-
- # if blob is well formatted, yield as Escape
- blob_text = blob_start + ctx.text[match.end():match.end()+blob_size] + ")"
- yield match.start(), String.Escape, blob_text
- ctx.pos = match.end() + blob_size + 1 # +1 is the ending ")"
-
- tokens = {
- 'root': [
- (r'\s+', Text),
- # comments
- (r'//.*?\n', Comment),
- (r'/\*', Comment.Multiline, 'comment'),
- (r'(every|for|loop|while)(?:;|&|\||,)', Keyword),
- (words((
- 'assert', 'at', 'break', 'case', 'catch', 'closure', 'compl',
- 'continue', 'default', 'else', 'enum', 'every', 'external',
- 'finally', 'for', 'freezeif', 'if', 'new', 'onleave', 'return',
- 'stopif', 'switch', 'this', 'throw', 'timeout', 'try',
- 'waituntil', 'whenever', 'while'), suffix=r'\b'),
- Keyword),
- (words((
- 'asm', 'auto', 'bool', 'char', 'const_cast', 'delete', 'double',
- 'dynamic_cast', 'explicit', 'export', 'extern', 'float', 'friend',
- 'goto', 'inline', 'int', 'long', 'mutable', 'namespace', 'register',
- 'reinterpret_cast', 'short', 'signed', 'sizeof', 'static_cast',
- 'struct', 'template', 'typedef', 'typeid', 'typename', 'union',
- 'unsigned', 'using', 'virtual', 'volatile', 'wchar_t'), suffix=r'\b'),
- Keyword.Reserved),
- # deprecated keywords, use a meaningful token when available
- (r'(emit|foreach|internal|loopn|static)\b', Keyword),
- # ignored keywords, use a meaningful token when available
- (r'(private|protected|public)\b', Keyword),
- (r'(var|do|const|function|class)\b', Keyword.Declaration),
- (r'(true|false|nil|void)\b', Keyword.Constant),
- (words((
- 'Barrier', 'Binary', 'Boolean', 'CallMessage', 'Channel', 'Code',
- 'Comparable', 'Container', 'Control', 'Date', 'Dictionary', 'Directory',
- 'Duration', 'Enumeration', 'Event', 'Exception', 'Executable', 'File',
- 'Finalizable', 'Float', 'FormatInfo', 'Formatter', 'Global', 'Group',
- 'Hash', 'InputStream', 'IoService', 'Job', 'Kernel', 'Lazy', 'List',
- 'Loadable', 'Lobby', 'Location', 'Logger', 'Math', 'Mutex', 'nil',
- 'Object', 'Orderable', 'OutputStream', 'Pair', 'Path', 'Pattern',
- 'Position', 'Primitive', 'Process', 'Profile', 'PseudoLazy', 'PubSub',
- 'RangeIterable', 'Regexp', 'Semaphore', 'Server', 'Singleton', 'Socket',
- 'StackFrame', 'Stream', 'String', 'System', 'Tag', 'Timeout',
- 'Traceable', 'TrajectoryGenerator', 'Triplet', 'Tuple', 'UObject',
- 'UValue', 'UVar'), suffix=r'\b'),
- Name.Builtin),
- (r'(?:this)\b', Name.Builtin.Pseudo),
- # don't match single | and &
- (r'(?:[-=+*%/<>~^:]+|\.&?|\|\||&&)', Operator),
- (r'(?:and_eq|and|bitand|bitor|in|not|not_eq|or_eq|or|xor_eq|xor)\b',
- Operator.Word),
- (r'[{}\[\]()]+', Punctuation),
- (r'(?:;|\||,|&|\?|!)+', Punctuation),
- (r'[$a-zA-Z_]\w*', Name.Other),
- (r'0x[0-9a-fA-F]+', Number.Hex),
- # Float, Integer, Angle and Duration
- (r'(?:[0-9]+(?:(?:\.[0-9]+)?(?:[eE][+-]?[0-9]+)?)?'
- r'((?:rad|deg|grad)|(?:ms|s|min|h|d))?)\b', Number.Float),
- # handle binary blob in strings
- (r'"', String.Double, "string.double"),
- (r"'", String.Single, "string.single"),
- ],
- 'string.double': [
- (r'((?:\\\\|\\"|[^"])*?)(\\B\((\d+)\)\()', blob_callback),
- (r'(\\\\|\\[^\\]|[^"\\])*?"', String.Double, '#pop'),
- ],
- 'string.single': [
- (r"((?:\\\\|\\'|[^'])*?)(\\B\((\d+)\)\()", blob_callback),
- (r"(\\\\|\\[^\\]|[^'\\])*?'", String.Single, '#pop'),
- ],
- # from http://pygments.org/docs/lexerdevelopment/#changing-states
- 'comment': [
- (r'[^*/]', Comment.Multiline),
- (r'/\*', Comment.Multiline, '#push'),
- (r'\*/', Comment.Multiline, '#pop'),
- (r'[*/]', Comment.Multiline),
- ]
- }
-
- def analyse_text(text):
- """This is fairly similar to C and others, but freezeif and
- waituntil are unique keywords."""
- result = 0
-
- if 'freezeif' in text:
- result += 0.05
-
- if 'waituntil' in text:
- result += 0.05
-
- return result
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/usd.py b/venv/lib/python3.11/site-packages/pygments/lexers/usd.py
deleted file mode 100644
index 79a2ad9..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/usd.py
+++ /dev/null
@@ -1,90 +0,0 @@
-"""
- pygments.lexers.usd
- ~~~~~~~~~~~~~~~~~~~
-
- The module that parses Pixar's Universal Scene Description file format.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, bygroups
-from pygments.lexer import words as words_
-from pygments.lexers._usd_builtins import COMMON_ATTRIBUTES, KEYWORDS, \
- OPERATORS, SPECIAL_NAMES, TYPES
-from pygments.token import Comment, Keyword, Name, Number, Operator, \
- Punctuation, String, Text, Whitespace
-
-__all__ = ["UsdLexer"]
-
-
-def _keywords(words, type_):
- return [(words_(words, prefix=r"\b", suffix=r"\b"), type_)]
-
-
-_TYPE = r"(\w+(?:\[\])?)"
-_BASE_ATTRIBUTE = r"(\w+(?:\:\w+)*)(?:(\.)(timeSamples))?"
-_WHITESPACE = r"([ \t]+)"
-
-
-class UsdLexer(RegexLexer):
- """
- A lexer that parses Pixar's Universal Scene Description file format.
-
- .. versionadded:: 2.6
- """
-
- name = "USD"
- url = 'https://graphics.pixar.com/usd/release/index.html'
- aliases = ["usd", "usda"]
- filenames = ["*.usd", "*.usda"]
-
- tokens = {
- "root": [
- (r"(custom){_WHITESPACE}(uniform)(\s+){}(\s+){}(\s*)(=)".format(
- _TYPE, _BASE_ATTRIBUTE, _WHITESPACE=_WHITESPACE),
- bygroups(Keyword.Token, Whitespace, Keyword.Token, Whitespace,
- Keyword.Type, Whitespace, Name.Attribute, Text,
- Name.Keyword.Tokens, Whitespace, Operator)),
- (r"(custom){_WHITESPACE}{}(\s+){}(\s*)(=)".format(
- _TYPE, _BASE_ATTRIBUTE, _WHITESPACE=_WHITESPACE),
- bygroups(Keyword.Token, Whitespace, Keyword.Type, Whitespace,
- Name.Attribute, Text, Name.Keyword.Tokens, Whitespace,
- Operator)),
- (r"(uniform){_WHITESPACE}{}(\s+){}(\s*)(=)".format(
- _TYPE, _BASE_ATTRIBUTE, _WHITESPACE=_WHITESPACE),
- bygroups(Keyword.Token, Whitespace, Keyword.Type, Whitespace,
- Name.Attribute, Text, Name.Keyword.Tokens, Whitespace,
- Operator)),
- (r"{}{_WHITESPACE}{}(\s*)(=)".format(
- _TYPE, _BASE_ATTRIBUTE, _WHITESPACE=_WHITESPACE),
- bygroups(Keyword.Type, Whitespace, Name.Attribute, Text,
- Name.Keyword.Tokens, Whitespace, Operator)),
- ] +
- _keywords(KEYWORDS, Keyword.Tokens) +
- _keywords(SPECIAL_NAMES, Name.Builtins) +
- _keywords(COMMON_ATTRIBUTES, Name.Attribute) +
- [(r"\b\w+:[\w:]+\b", Name.Attribute)] +
- _keywords(OPERATORS, Operator) + # more attributes
- [(type_ + r"\[\]", Keyword.Type) for type_ in TYPES] +
- _keywords(TYPES, Keyword.Type) +
- [
- (r"[(){}\[\]]", Punctuation),
- ("#.*?$", Comment.Single),
- (",", Punctuation),
- (";", Punctuation), # ";"s are allowed to combine separate metadata lines
- ("=", Operator),
- (r"[-]*([0-9]*[.])?[0-9]+(?:e[+-]*\d+)?", Number),
- (r"'''(?:.|\n)*?'''", String),
- (r'"""(?:.|\n)*?"""', String),
- (r"'.*?'", String),
- (r'".*?"', String),
- (r"<(\.\./)*([\w/]+|[\w/]+\.\w+[\w:]*)>", Name.Namespace),
- (r"@.*?@", String.Interpol),
- (r'\(.*"[.\\n]*".*\)', String.Doc),
- (r"\A#usda .+$", Comment.Hashbang),
- (r"\s+", Whitespace),
- (r"\w+", Text),
- (r"[_:.]+", Punctuation),
- ],
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/varnish.py b/venv/lib/python3.11/site-packages/pygments/lexers/varnish.py
deleted file mode 100644
index 3c22792..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/varnish.py
+++ /dev/null
@@ -1,189 +0,0 @@
-"""
- pygments.lexers.varnish
- ~~~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for Varnish configuration
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, include, bygroups, using, this, \
- inherit, words
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation, Literal, Whitespace
-
-__all__ = ['VCLLexer', 'VCLSnippetLexer']
-
-
-class VCLLexer(RegexLexer):
- """
- For Varnish Configuration Language (VCL).
-
- .. versionadded:: 2.2
- """
- name = 'VCL'
- aliases = ['vcl']
- filenames = ['*.vcl']
- mimetypes = ['text/x-vclsrc']
-
- def analyse_text(text):
- # If the very first line is 'vcl 4.0;' it's pretty much guaranteed
- # that this is VCL
- if text.startswith('vcl 4.0;'):
- return 1.0
- # Skip over comments and blank lines
- # This is accurate enough that returning 0.9 is reasonable.
- # Almost no VCL files start without some comments.
- elif '\nvcl 4.0;' in text[:1000]:
- return 0.9
-
- tokens = {
- 'probe': [
- include('whitespace'),
- include('comments'),
- (r'(\.\w+)(\s*=\s*)([^;]*)(;)',
- bygroups(Name.Attribute, Operator, using(this), Punctuation)),
- (r'\}', Punctuation, '#pop'),
- ],
- 'acl': [
- include('whitespace'),
- include('comments'),
- (r'[!/]+', Operator),
- (r';', Punctuation),
- (r'\d+', Number),
- (r'\}', Punctuation, '#pop'),
- ],
- 'backend': [
- include('whitespace'),
- (r'(\.probe)(\s*=\s*)(\w+)(;)',
- bygroups(Name.Attribute, Operator, Name.Variable.Global, Punctuation)),
- (r'(\.probe)(\s*=\s*)(\{)',
- bygroups(Name.Attribute, Operator, Punctuation), 'probe'),
- (r'(\.\w+\b)(\s*=\s*)([^;\s]*)(\s*;)',
- bygroups(Name.Attribute, Operator, using(this), Punctuation)),
- (r'\{', Punctuation, '#push'),
- (r'\}', Punctuation, '#pop'),
- ],
- 'statements': [
- (r'(\d\.)?\d+[sdwhmy]', Literal.Date),
- (r'(\d\.)?\d+ms', Literal.Date),
- (r'(vcl_pass|vcl_hash|vcl_hit|vcl_init|vcl_backend_fetch|vcl_pipe|'
- r'vcl_backend_response|vcl_synth|vcl_deliver|vcl_backend_error|'
- r'vcl_fini|vcl_recv|vcl_purge|vcl_miss)\b', Name.Function),
- (r'(pipe|retry|hash|synth|deliver|purge|abandon|lookup|pass|fail|ok|'
- r'miss|fetch|restart)\b', Name.Constant),
- (r'(beresp|obj|resp|req|req_top|bereq)\.http\.[a-zA-Z_-]+\b', Name.Variable),
- (words((
- 'obj.status', 'req.hash_always_miss', 'beresp.backend', 'req.esi_level',
- 'req.can_gzip', 'beresp.ttl', 'obj.uncacheable', 'req.ttl', 'obj.hits',
- 'client.identity', 'req.hash_ignore_busy', 'obj.reason', 'req.xid',
- 'req_top.proto', 'beresp.age', 'obj.proto', 'obj.age', 'local.ip',
- 'beresp.uncacheable', 'req.method', 'beresp.backend.ip', 'now',
- 'obj.grace', 'req.restarts', 'beresp.keep', 'req.proto', 'resp.proto',
- 'bereq.xid', 'bereq.between_bytes_timeout', 'req.esi',
- 'bereq.first_byte_timeout', 'bereq.method', 'bereq.connect_timeout',
- 'beresp.do_gzip', 'resp.status', 'beresp.do_gunzip',
- 'beresp.storage_hint', 'resp.is_streaming', 'beresp.do_stream',
- 'req_top.method', 'bereq.backend', 'beresp.backend.name', 'beresp.status',
- 'req.url', 'obj.keep', 'obj.ttl', 'beresp.reason', 'bereq.retries',
- 'resp.reason', 'bereq.url', 'beresp.do_esi', 'beresp.proto', 'client.ip',
- 'bereq.proto', 'server.hostname', 'remote.ip', 'req.backend_hint',
- 'server.identity', 'req_top.url', 'beresp.grace', 'beresp.was_304',
- 'server.ip', 'bereq.uncacheable'), suffix=r'\b'),
- Name.Variable),
- (r'[!%&+*\-,/<.}{>=|~]+', Operator),
- (r'[();]', Punctuation),
-
- (r'[,]+', Punctuation),
- (words(('hash_data', 'regsub', 'regsuball', 'if', 'else',
- 'elsif', 'elif', 'synth', 'synthetic', 'ban',
- 'return', 'set', 'unset', 'import', 'include', 'new',
- 'rollback', 'call'), suffix=r'\b'),
- Keyword),
- (r'storage\.\w+\.\w+\b', Name.Variable),
- (words(('true', 'false')), Name.Builtin),
- (r'\d+\b', Number),
- (r'(backend)(\s+\w+)(\s*\{)',
- bygroups(Keyword, Name.Variable.Global, Punctuation), 'backend'),
- (r'(probe\s)(\s*\w+\s)(\{)',
- bygroups(Keyword, Name.Variable.Global, Punctuation), 'probe'),
- (r'(acl\s)(\s*\w+\s)(\{)',
- bygroups(Keyword, Name.Variable.Global, Punctuation), 'acl'),
- (r'(vcl )(4.0)(;)$',
- bygroups(Keyword.Reserved, Name.Constant, Punctuation)),
- (r'(sub\s+)([a-zA-Z]\w*)(\s*\{)',
- bygroups(Keyword, Name.Function, Punctuation)),
- (r'([a-zA-Z_]\w*)'
- r'(\.)'
- r'([a-zA-Z_]\w*)'
- r'(\s*\(.*\))',
- bygroups(Name.Function, Punctuation, Name.Function, using(this))),
- (r'[a-zA-Z_]\w*', Name),
- ],
- 'comment': [
- (r'[^*/]+', Comment.Multiline),
- (r'/\*', Comment.Multiline, '#push'),
- (r'\*/', Comment.Multiline, '#pop'),
- (r'[*/]', Comment.Multiline),
- ],
- 'comments': [
- (r'#.*$', Comment),
- (r'/\*', Comment.Multiline, 'comment'),
- (r'//.*$', Comment),
- ],
- 'string': [
- (r'"', String, '#pop'),
- (r'[^"\n]+', String), # all other characters
- ],
- 'multistring': [
- (r'[^"}]', String),
- (r'"\}', String, '#pop'),
- (r'["}]', String),
- ],
- 'whitespace': [
- (r'L?"', String, 'string'),
- (r'\{"', String, 'multistring'),
- (r'\n', Whitespace),
- (r'\s+', Whitespace),
- (r'\\\n', Text), # line continuation
- ],
- 'root': [
- include('whitespace'),
- include('comments'),
- include('statements'),
- (r'\s+', Whitespace),
- ],
- }
-
-
-class VCLSnippetLexer(VCLLexer):
- """
- For Varnish Configuration Language snippets.
-
- .. versionadded:: 2.2
- """
- name = 'VCLSnippets'
- aliases = ['vclsnippets', 'vclsnippet']
- mimetypes = ['text/x-vclsnippet']
- filenames = []
-
- def analyse_text(text):
- # override method inherited from VCLLexer
- return 0
-
- tokens = {
- 'snippetspre': [
- (r'\.\.\.+', Comment),
- (r'(bereq|req|req_top|resp|beresp|obj|client|server|local|remote|'
- r'storage)($|\.\*)', Name.Variable),
- ],
- 'snippetspost': [
- (r'(backend)\b', Keyword.Reserved),
- ],
- 'root': [
- include('snippetspre'),
- inherit,
- include('snippetspost'),
- ],
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/verification.py b/venv/lib/python3.11/site-packages/pygments/lexers/verification.py
deleted file mode 100644
index 41d45d4..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/verification.py
+++ /dev/null
@@ -1,114 +0,0 @@
-"""
- pygments.lexers.verification
- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
- Lexer for Intermediate Verification Languages (IVLs).
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, include, words
-from pygments.token import Comment, Operator, Keyword, Name, Number, \
- Punctuation, Text, Generic
-
-__all__ = ['BoogieLexer', 'SilverLexer']
-
-
-class BoogieLexer(RegexLexer):
- """
- For Boogie source code.
-
- .. versionadded:: 2.1
- """
- name = 'Boogie'
- url = 'https://boogie-docs.readthedocs.io/en/latest/'
- aliases = ['boogie']
- filenames = ['*.bpl']
-
- tokens = {
- 'root': [
- # Whitespace and Comments
- (r'\n', Text),
- (r'\s+', Text),
- (r'\\\n', Text), # line continuation
- (r'//[/!](.*?)\n', Comment.Doc),
- (r'//(.*?)\n', Comment.Single),
- (r'/\*', Comment.Multiline, 'comment'),
-
- (words((
- 'axiom', 'break', 'call', 'ensures', 'else', 'exists', 'function',
- 'forall', 'if', 'invariant', 'modifies', 'procedure', 'requires',
- 'then', 'var', 'while'),
- suffix=r'\b'), Keyword),
- (words(('const',), suffix=r'\b'), Keyword.Reserved),
-
- (words(('bool', 'int', 'ref'), suffix=r'\b'), Keyword.Type),
- include('numbers'),
- (r"(>=|<=|:=|!=|==>|&&|\|\||[+/\-=>*<\[\]])", Operator),
- (r'\{.*?\}', Generic.Emph), #triggers
- (r"([{}():;,.])", Punctuation),
- # Identifier
- (r'[a-zA-Z_]\w*', Name),
- ],
- 'comment': [
- (r'[^*/]+', Comment.Multiline),
- (r'/\*', Comment.Multiline, '#push'),
- (r'\*/', Comment.Multiline, '#pop'),
- (r'[*/]', Comment.Multiline),
- ],
- 'numbers': [
- (r'[0-9]+', Number.Integer),
- ],
- }
-
-
-class SilverLexer(RegexLexer):
- """
- For Silver source code.
-
- .. versionadded:: 2.2
- """
- name = 'Silver'
- aliases = ['silver']
- filenames = ['*.sil', '*.vpr']
-
- tokens = {
- 'root': [
- # Whitespace and Comments
- (r'\n', Text),
- (r'\s+', Text),
- (r'\\\n', Text), # line continuation
- (r'//[/!](.*?)\n', Comment.Doc),
- (r'//(.*?)\n', Comment.Single),
- (r'/\*', Comment.Multiline, 'comment'),
-
- (words((
- 'result', 'true', 'false', 'null', 'method', 'function',
- 'predicate', 'program', 'domain', 'axiom', 'var', 'returns',
- 'field', 'define', 'fold', 'unfold', 'inhale', 'exhale', 'new', 'assert',
- 'assume', 'goto', 'while', 'if', 'elseif', 'else', 'fresh',
- 'constraining', 'Seq', 'Set', 'Multiset', 'union', 'intersection',
- 'setminus', 'subset', 'unfolding', 'in', 'old', 'forall', 'exists',
- 'acc', 'wildcard', 'write', 'none', 'epsilon', 'perm', 'unique',
- 'apply', 'package', 'folding', 'label', 'forperm'),
- suffix=r'\b'), Keyword),
- (words(('requires', 'ensures', 'invariant'), suffix=r'\b'), Name.Decorator),
- (words(('Int', 'Perm', 'Bool', 'Ref', 'Rational'), suffix=r'\b'), Keyword.Type),
- include('numbers'),
- (r'[!%&*+=|?:<>/\-\[\]]', Operator),
- (r'\{.*?\}', Generic.Emph), #triggers
- (r'([{}():;,.])', Punctuation),
- # Identifier
- (r'[\w$]\w*', Name),
- ],
- 'comment': [
- (r'[^*/]+', Comment.Multiline),
- (r'/\*', Comment.Multiline, '#push'),
- (r'\*/', Comment.Multiline, '#pop'),
- (r'[*/]', Comment.Multiline),
- ],
- 'numbers': [
- (r'[0-9]+', Number.Integer),
- ],
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/verifpal.py b/venv/lib/python3.11/site-packages/pygments/lexers/verifpal.py
deleted file mode 100644
index 6953dd7..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/verifpal.py
+++ /dev/null
@@ -1,66 +0,0 @@
-"""
- pygments.lexers.verifpal
- ~~~~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for Verifpal languages.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, words, bygroups, default
-from pygments.token import Comment, Keyword, Name, String, Punctuation, \
- Whitespace
-
-__all__ = ['VerifpalLexer']
-
-
-class VerifpalLexer(RegexLexer):
- """
- For Verifpal code.
-
- .. versionadded:: 2.16
- """
-
- name = 'Verifpal'
- aliases = ['verifpal']
- filenames = ['*.vp']
- mimetypes = ['text/x-verifpal']
- url = 'https://verifpal.com'
-
- tokens = {
- 'root': [
- (r'//.*$', Comment.Single),
- (r'(principal)( +)(\w+)( *)(\[)(.*)$', bygroups(Name.Builtin, Whitespace, String, Whitespace, Punctuation, Whitespace)),
- (r'(attacker)( *)(\[)( *)(passive|active)( *)(\])( *)$', bygroups(Name.Builtin, Whitespace, Punctuation, Whitespace, String, Whitespace, Punctuation, Whitespace)),
- (r'(knows)( +)(private|public)( +)', bygroups(Name.Builtin, Whitespace, Keyword.Constant, Whitespace), 'shared'),
- (r'(queries)( +)(\[)', bygroups(Name.Builtin, Whitespace, Punctuation), 'queries'),
- (r'(\w+)( +)(->|→)( *)(\w+)( *)(\:)', bygroups(String, Whitespace, Punctuation, Whitespace, String, Whitespace, Punctuation), 'shared'),
- (words(('generates', 'leaks'), suffix=r'\b'), Name.Builtin, 'shared'),
- (words(( 'phase', 'precondition',), suffix=r'\b'), Name.Builtin),
- (r'[\[\(\)\]\?:=→^,]', Punctuation),
- (r'->', Punctuation),
- (words(('password',), suffix=r'\b'), Keyword.Constant),
- (words(('AEAD_DEC', 'AEAD_ENC', 'ASSERT', 'BLIND', 'CONCAT',
- 'DEC', 'ENC', 'G', 'HASH', 'HKDF', 'MAC', 'PKE_DEC',
- 'PKE_ENC', 'PW_HASH', 'RINGSIGN', 'RINGSIGNVERIF',
- 'SHAMIR_JOIN', 'SHAMIR_SPLIT', 'SIGN', 'SIGNVERIF',
- 'SPLIT', 'UNBLIND', '_', 'nil'), suffix=r'\b'),
- Name.Function),
- (r'\s+', Whitespace),
- (r'\w+', Name.Variable),
- ],
- 'shared': [
- (r'[\^\[\],]', Punctuation),
- (r' +', Whitespace),
- (r'\w+', Name.Variable),
- default('#pop')
- ],
- 'queries': [
- (r'\s+', Name.Variable),
- (words(('confidentiality?', 'authentication?', 'freshness?',
- 'unlinkability?', 'equivalence?'), suffix='( )'),
- bygroups(Keyword.Pseudo, Whitespace), 'shared'),
- default('#pop')
- ]
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/vip.py b/venv/lib/python3.11/site-packages/pygments/lexers/vip.py
deleted file mode 100644
index 1b25d5c..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/vip.py
+++ /dev/null
@@ -1,152 +0,0 @@
-"""
- pygments.lexers.vip
- ~~~~~~~~~~~~~~~~~~~
-
- Lexers for Visual Prolog & Grammar files.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, inherit, words, include
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation, Whitespace
-
-__all__ = ['VisualPrologLexer', 'VisualPrologGrammarLexer']
-
-
-class VisualPrologBaseLexer(RegexLexer):
- minorendkw = ('try', 'foreach', 'if')
- minorkwexp = ('and', 'catch', 'do', 'else', 'elseif', 'erroneous', 'externally', 'failure', 'finally', 'foreach', 'if', 'or', 'orelse', 'otherwise', 'then',
- 'try', 'div', 'mod', 'rem', 'quot')
- dockw = ('short', 'detail', 'end', 'withdomain')
- tokens = {
- 'root': [
- (r'\s+', Whitespace),
- (words(minorendkw, prefix=r'\bend\s+', suffix=r'\b'), Keyword.Minor),
- (r'end', Keyword),
- (words(minorkwexp, suffix=r'\b'), Keyword.Minor),
- (r'0[xo][\da-fA-F_]+', Number),
- (r'((\d[\d_]*)?\.)?\d[\d_]*([eE][\-+]?\d+)?', Number),
- (r'_\w*', Name.Variable.Anonymous),
- (r'[A-Z]\w*', Name.Variable),
- (r'@\w+', Name.Variable),
- (r'[a-z]\w*', Name),
- (r'/\*', Comment, 'comment'),
- (r'\%', Comment, 'commentline'),
- (r'"', String.Symbol, 'string'),
- (r'\'', String.Symbol, 'stringsingle'),
- (r'@"', String.Symbol, 'atstring'),
- (r'[\-+*^/!?<>=~:]+', Operator),
- (r'[$,.[\]|(){}\\]+', Punctuation),
- (r'.', Text),
- ],
- 'commentdoc': [
- (words(dockw, prefix=r'@', suffix=r'\b'), Comment.Preproc),
- (r'@', Comment),
- ],
- 'commentline': [
- include('commentdoc'),
- (r'[^@\n]+', Comment),
- (r'$', Comment, '#pop'),
- ],
- 'comment': [
- include('commentdoc'),
- (r'[^@*/]+', Comment),
- (r'/\*', Comment, '#push'),
- (r'\*/', Comment, '#pop'),
- (r'[*/]', Comment),
- ],
- 'stringescape': [
- (r'\\u[0-9a-fA-F]{4}', String.Escape),
- (r'\\[\'"ntr\\]', String.Escape),
- ],
- 'stringsingle': [
- include('stringescape'),
- (r'\'', String.Symbol, '#pop'),
- (r'[^\'\\\n]+', String),
- (r'\n', String.Escape.Error, '#pop'),
- ],
- 'string': [
- include('stringescape'),
- (r'"', String.Symbol, '#pop'),
- (r'[^"\\\n]+', String),
- (r'\n', String.Escape.Error, '#pop'),
- ],
- 'atstring': [
- (r'""', String.Escape),
- (r'"', String.Symbol, '#pop'),
- (r'[^"]+', String),
- ]
- }
-
-
-class VisualPrologLexer(VisualPrologBaseLexer):
- """Lexer for VisualProlog
-
- .. versionadded:: 2.17
- """
- name = 'Visual Prolog'
- url = 'https://www.visual-prolog.com/'
- aliases = ['visualprolog']
- filenames = ['*.pro', '*.cl', '*.i', '*.pack', '*.ph']
-
- majorkw = ('goal', 'namespace', 'interface', 'class', 'implement', 'where', 'open', 'inherits', 'supports', 'resolve',
- 'delegate', 'monitor', 'constants', 'domains', 'predicates', 'constructors', 'properties', 'clauses', 'facts')
- minorkw = ('align', 'anyflow', 'as', 'bitsize', 'determ', 'digits', 'erroneous', 'externally', 'failure', 'from',
- 'guard', 'multi', 'nondeterm', 'or', 'orelse', 'otherwise', 'procedure', 'resolve', 'single', 'suspending')
- directivekw = ('bininclude', 'else', 'elseif', 'endif', 'error', 'export', 'externally', 'from', 'grammargenerate',
- 'grammarinclude', 'if', 'include', 'message', 'options', 'orrequires', 'requires', 'stringinclude', 'then')
- tokens = {
- 'root': [
- (words(minorkw, suffix=r'\b'), Keyword.Minor),
- (words(majorkw, suffix=r'\b'), Keyword),
- (words(directivekw, prefix='#', suffix=r'\b'), Keyword.Directive),
- inherit
- ]
- }
-
- def analyse_text(text):
- """Competes with IDL and Prolog on *.pro; div. lisps on*.cl and SwigLexer on *.i"""
- # These are *really* good indicators (and not conflicting with the other languages)
- # end-scope first on line e.g. 'end implement'
- # section keyword alone on line e.g. 'clauses'
- if re.search(r'^\s*(end\s+(interface|class|implement)|(clauses|predicates|domains|facts|constants|properties)\s*$)', text):
- return 0.98
- else:
- return 0
-
-
-class VisualPrologGrammarLexer(VisualPrologBaseLexer):
- """Lexer for VisualProlog grammar
-
- .. versionadded:: 2.17
- """
-
- name = 'Visual Prolog Grammar'
- url = 'https://www.visual-prolog.com/'
- aliases = ['visualprologgrammar']
- filenames = ['*.vipgrm']
-
- majorkw = ('open', 'namespace', 'grammar', 'nonterminals',
- 'startsymbols', 'terminals', 'rules', 'precedence')
- directivekw = ('bininclude', 'stringinclude')
- tokens = {
- 'root': [
- (words(majorkw, suffix=r'\b'), Keyword),
- (words(directivekw, prefix='#', suffix=r'\b'), Keyword.Directive),
- inherit
- ]
- }
-
- def analyse_text(text):
- """No competditors (currently)"""
- # These are *really* good indicators
- # end-scope first on line e.g. 'end grammar'
- # section keyword alone on line e.g. 'rules'
- if re.search(r'^\s*(end\s+grammar|(nonterminals|startsymbols|terminals|rules|precedence)\s*$)', text):
- return 0.98
- else:
- return 0
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/vyper.py b/venv/lib/python3.11/site-packages/pygments/lexers/vyper.py
deleted file mode 100644
index ff9d0b0..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/vyper.py
+++ /dev/null
@@ -1,141 +0,0 @@
-"""
- pygments.lexers.vyper
- ~~~~~~~~~~~~~~~~~~~~~
-
- Lexer for the Vyper Smart Contract language.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, bygroups, words
-from pygments.token import (Comment, String, Name, Keyword, Number,
- Operator, Punctuation, Text, Whitespace)
-
-__all__ = ['VyperLexer']
-
-
-class VyperLexer(RegexLexer):
- """For the Vyper smart contract language.
-
- .. versionadded:: 2.17
- """
- name = 'Vyper'
- aliases = ['vyper']
- filenames = ['*.vy']
- url = "https://vyper.readthedocs.io"
-
- tokens = {
- 'root': [
- # Whitespace
- (r'\s+', Whitespace),
-
- # Line continuations
- (r'(\\)(\n|\r\n|\r)', bygroups(Text, Whitespace)),
-
- # Comments - inline and multiline
- (r'#.*$', Comment.Single),
- (r'\"\"\"', Comment.Multiline, 'multiline-comment'),
-
- # Strings - single and double
- (r"'", String.Single, 'single-string'),
- (r'"', String.Double, 'double-string'),
-
- # Functions (working)
- (r'(def)(\s+)([a-zA-Z_][a-zA-Z0-9_]*)',
- bygroups(Keyword, Whitespace, Name.Function)),
-
- # Event and Struct
- (r'(event|struct|interface|log)(\s+)([a-zA-Z_][a-zA-Z0-9_]*)',
- bygroups(Keyword, Whitespace, Name.Class)),
-
- # Imports
- (r'(from)(\s+)(vyper\.\w+)(\s+)(import)(\s+)(\w+)',
- bygroups(Keyword, Whitespace, Name.Namespace, Whitespace,
- Keyword, Whitespace, Name.Class)),
-
- # Numeric Literals
- (r'\b0x[0-9a-fA-F]+\b', Number.Hex),
- (r'\b(\d{1,3}(?:_\d{3})*|\d+)\b', Number.Integer),
- (r'\b\d+\.\d*\b', Number.Float),
-
- # Keywords
- (words(('def', 'event', 'pass', 'return', 'for', 'while', 'if', 'elif',
- 'else', 'assert', 'raise', 'import', 'in', 'struct', 'implements',
- 'interface', 'from', 'indexed', 'log'),
- prefix=r'\b', suffix=r'\b'), Keyword),
-
- # Visibility and State Mutability
- (words(('public', 'private', 'view', 'pure', 'constant',
- 'immutable', 'nonpayable'), prefix=r'\b', suffix=r'\b'),
- Keyword.Declaration),
-
- # Built-in Functions
- (words(('bitwise_and', 'bitwise_not', 'bitwise_or', 'bitwise_xor', 'shift',
- 'create_minimal_proxy_to', 'create_copy_of', 'create_from_blueprint',
- 'ecadd', 'ecmul', 'ecrecover', 'keccak256', 'sha256', 'concat', 'convert',
- 'uint2str', 'extract32', 'slice', 'abs', 'ceil', 'floor', 'max', 'max_value',
- 'min', 'min_value', 'pow_mod256', 'sqrt', 'isqrt', 'uint256_addmod',
- 'uint256_mulmod', 'unsafe_add', 'unsafe_sub', 'unsafe_mul', 'unsafe_div',
- 'as_wei_value', 'blockhash', 'empty', 'len', 'method_id', '_abi_encode',
- '_abi_decode', 'print', 'range'), prefix=r'\b', suffix=r'\b'),
- Name.Builtin),
-
- # Built-in Variables and Attributes
- (words(('msg.sender', 'msg.value', 'block.timestamp', 'block.number', 'msg.gas'),
- prefix=r'\b', suffix=r'\b'),
- Name.Builtin.Pseudo),
-
- (words(('uint', 'uint8', 'uint16', 'uint32', 'uint64', 'uint128', 'uint256',
- 'int', 'int8', 'int16', 'int32', 'int64', 'int128', 'int256', 'bool',
- 'decimal', 'bytes', 'bytes1', 'bytes2', 'bytes3', 'bytes4', 'bytes5',
- 'bytes6', 'bytes7', 'bytes8', 'bytes9', 'bytes10', 'bytes11',
- 'bytes12', 'bytes13', 'bytes14', 'bytes15', 'bytes16', 'bytes17',
- 'bytes18', 'bytes19', 'bytes20', 'bytes21', 'bytes22', 'bytes23',
- 'bytes24', 'bytes25', 'bytes26', 'bytes27', 'bytes28', 'bytes29',
- 'bytes30', 'bytes31', 'bytes32', 'string', 'String', 'address',
- 'enum', 'struct'), prefix=r'\b', suffix=r'\b'),
- Keyword.Type),
-
- # indexed keywords
- (r'\b(indexed)\b(\s*)(\()(\s*)(\w+)(\s*)(\))',
- bygroups(Keyword, Whitespace, Punctuation, Whitespace,
- Keyword.Type, Punctuation)),
-
- # Operators and Punctuation
- (r'(\+|\-|\*|\/|<=?|>=?|==|!=|=|\||&|%)', Operator),
- (r'[.,:;()\[\]{}]', Punctuation),
-
- # Other variable names and types
- (r'@[\w.]+', Name.Decorator),
- (r'__\w+__', Name.Magic), # Matches double underscores followed by word characters
- (r'EMPTY_BYTES32', Name.Constant),
- (r'\bERC20\b', Name.Class),
- (r'\bself\b', Name.Attribute),
-
- (r'Bytes\[\d+\]', Keyword.Type),
-
- # Generic names and variables
- (r'\b[a-zA-Z_]\w*\b:', Name.Variable),
- (r'\b[a-zA-Z_]\w*\b', Name),
-
- ],
-
- 'multiline-comment': [
- (r'\"\"\"', Comment.Multiline, '#pop'),
- (r'[^"]+', Comment.Multiline),
- (r'\"', Comment.Multiline)
- ],
-
- 'single-string': [
- (r"[^\\']+", String.Single),
- (r"'", String.Single, '#pop'),
- (r'\\.', String.Escape),
- ],
-
- 'double-string': [
- (r'[^\\"]+', String.Double),
- (r'"', String.Double, '#pop'),
- (r'\\.', String.Escape),
- ]
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/web.py b/venv/lib/python3.11/site-packages/pygments/lexers/web.py
deleted file mode 100644
index 9e52653..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/web.py
+++ /dev/null
@@ -1,23 +0,0 @@
-"""
- pygments.lexers.web
- ~~~~~~~~~~~~~~~~~~~
-
- Just export previously exported lexers.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexers.html import HtmlLexer, DtdLexer, XmlLexer, XsltLexer, \
- HamlLexer, ScamlLexer, JadeLexer
-from pygments.lexers.css import CssLexer, SassLexer, ScssLexer
-from pygments.lexers.javascript import JavascriptLexer, LiveScriptLexer, \
- DartLexer, TypeScriptLexer, LassoLexer, ObjectiveJLexer, CoffeeScriptLexer
-from pygments.lexers.actionscript import ActionScriptLexer, \
- ActionScript3Lexer, MxmlLexer
-from pygments.lexers.php import PhpLexer
-from pygments.lexers.webmisc import DuelLexer, XQueryLexer, SlimLexer, QmlLexer
-from pygments.lexers.data import JsonLexer
-JSONLexer = JsonLexer # for backwards compatibility with Pygments 1.5
-
-__all__ = []
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/webassembly.py b/venv/lib/python3.11/site-packages/pygments/lexers/webassembly.py
deleted file mode 100644
index f674ead..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/webassembly.py
+++ /dev/null
@@ -1,120 +0,0 @@
-"""
- pygments.lexers.webassembly
- ~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for the WebAssembly text format.
-
- The grammar can be found at https://github.com/WebAssembly/spec/blob/master/interpreter/README.md
- and https://webassembly.github.io/spec/core/text/.
-
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, words, bygroups, default
-from pygments.token import Text, Comment, Operator, Keyword, String, Number, Punctuation, Name
-
-__all__ = ['WatLexer']
-
-keywords = (
- 'module', 'import', 'func', 'funcref', 'start', 'param', 'local', 'type',
- 'result', 'export', 'memory', 'global', 'mut', 'data', 'table', 'elem',
- 'if', 'then', 'else', 'end', 'block', 'loop'
-)
-
-builtins = (
- 'unreachable', 'nop', 'block', 'loop', 'if', 'else', 'end', 'br', 'br_if',
- 'br_table', 'return', 'call', 'call_indirect', 'drop', 'select',
- 'local.get', 'local.set', 'local.tee', 'global.get', 'global.set',
- 'i32.load', 'i64.load', 'f32.load', 'f64.load', 'i32.load8_s',
- 'i32.load8_u', 'i32.load16_s', 'i32.load16_u', 'i64.load8_s',
- 'i64.load8_u', 'i64.load16_s', 'i64.load16_u', 'i64.load32_s',
- 'i64.load32_u', 'i32.store', 'i64.store', 'f32.store', 'f64.store',
- 'i32.store8', 'i32.store16', 'i64.store8', 'i64.store16', 'i64.store32',
- 'memory.size', 'memory.grow', 'i32.const', 'i64.const', 'f32.const',
- 'f64.const', 'i32.eqz', 'i32.eq', 'i32.ne', 'i32.lt_s', 'i32.lt_u',
- 'i32.gt_s', 'i32.gt_u', 'i32.le_s', 'i32.le_u', 'i32.ge_s', 'i32.ge_u',
- 'i64.eqz', 'i64.eq', 'i64.ne', 'i64.lt_s', 'i64.lt_u', 'i64.gt_s',
- 'i64.gt_u', 'i64.le_s', 'i64.le_u', 'i64.ge_s', 'i64.ge_u', 'f32.eq',
- 'f32.ne', 'f32.lt', 'f32.gt', 'f32.le', 'f32.ge', 'f64.eq', 'f64.ne',
- 'f64.lt', 'f64.gt', 'f64.le', 'f64.ge', 'i32.clz', 'i32.ctz', 'i32.popcnt',
- 'i32.add', 'i32.sub', 'i32.mul', 'i32.div_s', 'i32.div_u', 'i32.rem_s',
- 'i32.rem_u', 'i32.and', 'i32.or', 'i32.xor', 'i32.shl', 'i32.shr_s',
- 'i32.shr_u', 'i32.rotl', 'i32.rotr', 'i64.clz', 'i64.ctz', 'i64.popcnt',
- 'i64.add', 'i64.sub', 'i64.mul', 'i64.div_s', 'i64.div_u', 'i64.rem_s',
- 'i64.rem_u', 'i64.and', 'i64.or', 'i64.xor', 'i64.shl', 'i64.shr_s',
- 'i64.shr_u', 'i64.rotl', 'i64.rotr', 'f32.abs', 'f32.neg', 'f32.ceil',
- 'f32.floor', 'f32.trunc', 'f32.nearest', 'f32.sqrt', 'f32.add', 'f32.sub',
- 'f32.mul', 'f32.div', 'f32.min', 'f32.max', 'f32.copysign', 'f64.abs',
- 'f64.neg', 'f64.ceil', 'f64.floor', 'f64.trunc', 'f64.nearest', 'f64.sqrt',
- 'f64.add', 'f64.sub', 'f64.mul', 'f64.div', 'f64.min', 'f64.max',
- 'f64.copysign', 'i32.wrap_i64', 'i32.trunc_f32_s', 'i32.trunc_f32_u',
- 'i32.trunc_f64_s', 'i32.trunc_f64_u', 'i64.extend_i32_s',
- 'i64.extend_i32_u', 'i64.trunc_f32_s', 'i64.trunc_f32_u',
- 'i64.trunc_f64_s', 'i64.trunc_f64_u', 'f32.convert_i32_s',
- 'f32.convert_i32_u', 'f32.convert_i64_s', 'f32.convert_i64_u',
- 'f32.demote_f64', 'f64.convert_i32_s', 'f64.convert_i32_u',
- 'f64.convert_i64_s', 'f64.convert_i64_u', 'f64.promote_f32',
- 'i32.reinterpret_f32', 'i64.reinterpret_f64', 'f32.reinterpret_i32',
- 'f64.reinterpret_i64',
-)
-
-
-class WatLexer(RegexLexer):
- """Lexer for the WebAssembly text format.
-
- .. versionadded:: 2.9
- """
-
- name = 'WebAssembly'
- url = 'https://webassembly.org/'
- aliases = ['wast', 'wat']
- filenames = ['*.wat', '*.wast']
-
- tokens = {
- 'root': [
- (words(keywords, suffix=r'(?=[^a-z_\.])'), Keyword),
- (words(builtins), Name.Builtin, 'arguments'),
- (words(['i32', 'i64', 'f32', 'f64']), Keyword.Type),
- (r'\$[A-Za-z0-9!#$%&\'*+./:<=>?@\\^_`|~-]+', Name.Variable), # yes, all of the are valid in identifiers
- (r';;.*?$', Comment.Single),
- (r'\(;', Comment.Multiline, 'nesting_comment'),
- (r'[+-]?0x[\dA-Fa-f](_?[\dA-Fa-f])*(.([\dA-Fa-f](_?[\dA-Fa-f])*)?)?([pP][+-]?[\dA-Fa-f](_?[\dA-Fa-f])*)?', Number.Float),
- (r'[+-]?\d.\d(_?\d)*[eE][+-]?\d(_?\d)*', Number.Float),
- (r'[+-]?\d.\d(_?\d)*', Number.Float),
- (r'[+-]?\d.[eE][+-]?\d(_?\d)*', Number.Float),
- (r'[+-]?(inf|nan:0x[\dA-Fa-f](_?[\dA-Fa-f])*|nan)', Number.Float),
- (r'[+-]?0x[\dA-Fa-f](_?[\dA-Fa-f])*', Number.Hex),
- (r'[+-]?\d(_?\d)*', Number.Integer),
- (r'[\(\)]', Punctuation),
- (r'"', String.Double, 'string'),
- (r'\s+', Text),
- ],
- 'nesting_comment': [
- (r'\(;', Comment.Multiline, '#push'),
- (r';\)', Comment.Multiline, '#pop'),
- (r'[^;(]+', Comment.Multiline),
- (r'[;(]', Comment.Multiline),
- ],
- 'string': [
- (r'\\[\dA-Fa-f][\dA-Fa-f]', String.Escape), # must have exactly two hex digits
- (r'\\t', String.Escape),
- (r'\\n', String.Escape),
- (r'\\r', String.Escape),
- (r'\\"', String.Escape),
- (r"\\'", String.Escape),
- (r'\\u\{[\dA-Fa-f](_?[\dA-Fa-f])*\}', String.Escape),
- (r'\\\\', String.Escape),
- (r'"', String.Double, '#pop'),
- (r'[^"\\]+', String.Double),
- ],
- 'arguments': [
- (r'\s+', Text),
- (r'(offset)(=)(0x[\dA-Fa-f](_?[\dA-Fa-f])*)', bygroups(Keyword, Operator, Number.Hex)),
- (r'(offset)(=)(\d(_?\d)*)', bygroups(Keyword, Operator, Number.Integer)),
- (r'(align)(=)(0x[\dA-Fa-f](_?[\dA-Fa-f])*)', bygroups(Keyword, Operator, Number.Hex)),
- (r'(align)(=)(\d(_?\d)*)', bygroups(Keyword, Operator, Number.Integer)),
- default('#pop'),
- ]
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/webidl.py b/venv/lib/python3.11/site-packages/pygments/lexers/webidl.py
deleted file mode 100644
index 5fcbe69..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/webidl.py
+++ /dev/null
@@ -1,299 +0,0 @@
-"""
- pygments.lexers.webidl
- ~~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for Web IDL, including some extensions.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, default, include, words
-from pygments.token import Comment, Keyword, Name, Number, Punctuation, \
- String, Text
-
-__all__ = ['WebIDLLexer']
-
-_builtin_types = (
- # primitive types
- 'byte', 'octet', 'boolean',
- r'(?:unsigned\s+)?(?:short|long(?:\s+long)?)',
- r'(?:unrestricted\s+)?(?:float|double)',
- # string types
- 'DOMString', 'ByteString', 'USVString',
- # exception types
- 'Error', 'DOMException',
- # typed array types
- 'Uint8Array', 'Uint16Array', 'Uint32Array', 'Uint8ClampedArray',
- 'Float32Array', 'Float64Array',
- # buffer source types
- 'ArrayBuffer', 'DataView', 'Int8Array', 'Int16Array', 'Int32Array',
- # other
- 'any', 'void', 'object', 'RegExp',
-)
-_identifier = r'_?[A-Za-z][a-zA-Z0-9_-]*'
-_keyword_suffix = r'(?![\w-])'
-_string = r'"[^"]*"'
-
-
-class WebIDLLexer(RegexLexer):
- """
- For Web IDL.
-
- .. versionadded:: 2.6
- """
-
- name = 'Web IDL'
- url = 'https://www.w3.org/wiki/Web_IDL'
- aliases = ['webidl']
- filenames = ['*.webidl']
-
- tokens = {
- 'common': [
- (r'\s+', Text),
- (r'(?s)/\*.*?\*/', Comment.Multiline),
- (r'//.*', Comment.Single),
- (r'^#.*', Comment.Preproc),
- ],
- 'root': [
- include('common'),
- (r'\[', Punctuation, 'extended_attributes'),
- (r'partial' + _keyword_suffix, Keyword),
- (r'typedef' + _keyword_suffix, Keyword, ('typedef', 'type')),
- (r'interface' + _keyword_suffix, Keyword, 'interface_rest'),
- (r'enum' + _keyword_suffix, Keyword, 'enum_rest'),
- (r'callback' + _keyword_suffix, Keyword, 'callback_rest'),
- (r'dictionary' + _keyword_suffix, Keyword, 'dictionary_rest'),
- (r'namespace' + _keyword_suffix, Keyword, 'namespace_rest'),
- (_identifier, Name.Class, 'implements_rest'),
- ],
- 'extended_attributes': [
- include('common'),
- (r',', Punctuation),
- (_identifier, Name.Decorator),
- (r'=', Punctuation, 'extended_attribute_rest'),
- (r'\(', Punctuation, 'argument_list'),
- (r'\]', Punctuation, '#pop'),
- ],
- 'extended_attribute_rest': [
- include('common'),
- (_identifier, Name, 'extended_attribute_named_rest'),
- (_string, String),
- (r'\(', Punctuation, 'identifier_list'),
- default('#pop'),
- ],
- 'extended_attribute_named_rest': [
- include('common'),
- (r'\(', Punctuation, 'argument_list'),
- default('#pop'),
- ],
- 'argument_list': [
- include('common'),
- (r'\)', Punctuation, '#pop'),
- default('argument'),
- ],
- 'argument': [
- include('common'),
- (r'optional' + _keyword_suffix, Keyword),
- (r'\[', Punctuation, 'extended_attributes'),
- (r',', Punctuation, '#pop'),
- (r'\)', Punctuation, '#pop:2'),
- default(('argument_rest', 'type'))
- ],
- 'argument_rest': [
- include('common'),
- (_identifier, Name.Variable),
- (r'\.\.\.', Punctuation),
- (r'=', Punctuation, 'default_value'),
- default('#pop'),
- ],
- 'identifier_list': [
- include('common'),
- (_identifier, Name.Class),
- (r',', Punctuation),
- (r'\)', Punctuation, '#pop'),
- ],
- 'type': [
- include('common'),
- (r'(?:' + r'|'.join(_builtin_types) + r')' + _keyword_suffix,
- Keyword.Type, 'type_null'),
- (words(('sequence', 'Promise', 'FrozenArray'),
- suffix=_keyword_suffix), Keyword.Type, 'type_identifier'),
- (_identifier, Name.Class, 'type_identifier'),
- (r'\(', Punctuation, 'union_type'),
- ],
- 'union_type': [
- include('common'),
- (r'or' + _keyword_suffix, Keyword),
- (r'\)', Punctuation, ('#pop', 'type_null')),
- default('type'),
- ],
- 'type_identifier': [
- (r'<', Punctuation, 'type_list'),
- default(('#pop', 'type_null'))
- ],
- 'type_null': [
- (r'\?', Punctuation),
- default('#pop:2'),
- ],
- 'default_value': [
- include('common'),
- include('const_value'),
- (_string, String, '#pop'),
- (r'\[\s*\]', Punctuation, '#pop'),
- ],
- 'const_value': [
- include('common'),
- (words(('true', 'false', '-Infinity', 'Infinity', 'NaN', 'null'),
- suffix=_keyword_suffix), Keyword.Constant, '#pop'),
- (r'-?(?:(?:[0-9]+\.[0-9]*|[0-9]*\.[0-9]+)(?:[Ee][+-]?[0-9]+)?' +
- r'|[0-9]+[Ee][+-]?[0-9]+)', Number.Float, '#pop'),
- (r'-?[1-9][0-9]*', Number.Integer, '#pop'),
- (r'-?0[Xx][0-9A-Fa-f]+', Number.Hex, '#pop'),
- (r'-?0[0-7]*', Number.Oct, '#pop'),
- ],
- 'typedef': [
- include('common'),
- (_identifier, Name.Class),
- (r';', Punctuation, '#pop'),
- ],
- 'namespace_rest': [
- include('common'),
- (_identifier, Name.Namespace),
- (r'\{', Punctuation, 'namespace_body'),
- (r';', Punctuation, '#pop'),
- ],
- 'namespace_body': [
- include('common'),
- (r'\[', Punctuation, 'extended_attributes'),
- (r'readonly' + _keyword_suffix, Keyword),
- (r'attribute' + _keyword_suffix,
- Keyword, ('attribute_rest', 'type')),
- (r'const' + _keyword_suffix, Keyword, ('const_rest', 'type')),
- (r'\}', Punctuation, '#pop'),
- default(('operation_rest', 'type')),
- ],
- 'interface_rest': [
- include('common'),
- (_identifier, Name.Class),
- (r':', Punctuation),
- (r'\{', Punctuation, 'interface_body'),
- (r';', Punctuation, '#pop'),
- ],
- 'interface_body': [
- (words(('iterable', 'maplike', 'setlike'), suffix=_keyword_suffix),
- Keyword, 'iterable_maplike_setlike_rest'),
- (words(('setter', 'getter', 'creator', 'deleter', 'legacycaller',
- 'inherit', 'static', 'stringifier', 'jsonifier'),
- suffix=_keyword_suffix), Keyword),
- (r'serializer' + _keyword_suffix, Keyword, 'serializer_rest'),
- (r';', Punctuation),
- include('namespace_body'),
- ],
- 'attribute_rest': [
- include('common'),
- (_identifier, Name.Variable),
- (r';', Punctuation, '#pop'),
- ],
- 'const_rest': [
- include('common'),
- (_identifier, Name.Constant),
- (r'=', Punctuation, 'const_value'),
- (r';', Punctuation, '#pop'),
- ],
- 'operation_rest': [
- include('common'),
- (r';', Punctuation, '#pop'),
- default('operation'),
- ],
- 'operation': [
- include('common'),
- (_identifier, Name.Function),
- (r'\(', Punctuation, 'argument_list'),
- (r';', Punctuation, '#pop:2'),
- ],
- 'iterable_maplike_setlike_rest': [
- include('common'),
- (r'<', Punctuation, 'type_list'),
- (r';', Punctuation, '#pop'),
- ],
- 'type_list': [
- include('common'),
- (r',', Punctuation),
- (r'>', Punctuation, '#pop'),
- default('type'),
- ],
- 'serializer_rest': [
- include('common'),
- (r'=', Punctuation, 'serialization_pattern'),
- (r';', Punctuation, '#pop'),
- default('operation'),
- ],
- 'serialization_pattern': [
- include('common'),
- (_identifier, Name.Variable, '#pop'),
- (r'\{', Punctuation, 'serialization_pattern_map'),
- (r'\[', Punctuation, 'serialization_pattern_list'),
- ],
- 'serialization_pattern_map': [
- include('common'),
- (words(('getter', 'inherit', 'attribute'),
- suffix=_keyword_suffix), Keyword),
- (r',', Punctuation),
- (_identifier, Name.Variable),
- (r'\}', Punctuation, '#pop:2'),
- ],
- 'serialization_pattern_list': [
- include('common'),
- (words(('getter', 'attribute'), suffix=_keyword_suffix), Keyword),
- (r',', Punctuation),
- (_identifier, Name.Variable),
- (r']', Punctuation, '#pop:2'),
- ],
- 'enum_rest': [
- include('common'),
- (_identifier, Name.Class),
- (r'\{', Punctuation, 'enum_body'),
- (r';', Punctuation, '#pop'),
- ],
- 'enum_body': [
- include('common'),
- (_string, String),
- (r',', Punctuation),
- (r'\}', Punctuation, '#pop'),
- ],
- 'callback_rest': [
- include('common'),
- (r'interface' + _keyword_suffix,
- Keyword, ('#pop', 'interface_rest')),
- (_identifier, Name.Class),
- (r'=', Punctuation, ('operation', 'type')),
- (r';', Punctuation, '#pop'),
- ],
- 'dictionary_rest': [
- include('common'),
- (_identifier, Name.Class),
- (r':', Punctuation),
- (r'\{', Punctuation, 'dictionary_body'),
- (r';', Punctuation, '#pop'),
- ],
- 'dictionary_body': [
- include('common'),
- (r'\[', Punctuation, 'extended_attributes'),
- (r'required' + _keyword_suffix, Keyword),
- (r'\}', Punctuation, '#pop'),
- default(('dictionary_item', 'type')),
- ],
- 'dictionary_item': [
- include('common'),
- (_identifier, Name.Variable),
- (r'=', Punctuation, 'default_value'),
- (r';', Punctuation, '#pop'),
- ],
- 'implements_rest': [
- include('common'),
- (r'implements' + _keyword_suffix, Keyword),
- (_identifier, Name.Class),
- (r';', Punctuation, '#pop'),
- ],
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/webmisc.py b/venv/lib/python3.11/site-packages/pygments/lexers/webmisc.py
deleted file mode 100644
index 787a8a6..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/webmisc.py
+++ /dev/null
@@ -1,1010 +0,0 @@
-"""
- pygments.lexers.webmisc
- ~~~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for misc. web stuff.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, ExtendedRegexLexer, include, bygroups, \
- default, using
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation, Literal, Whitespace
-
-from pygments.lexers.css import _indentation, _starts_block
-from pygments.lexers.html import HtmlLexer
-from pygments.lexers.javascript import JavascriptLexer
-from pygments.lexers.ruby import RubyLexer
-
-__all__ = ['DuelLexer', 'SlimLexer', 'XQueryLexer', 'QmlLexer', 'CirruLexer']
-
-
-class DuelLexer(RegexLexer):
- """
- Lexer for Duel Views Engine (formerly JBST) markup with JavaScript code blocks.
-
- .. versionadded:: 1.4
- """
-
- name = 'Duel'
- url = 'http://duelengine.org/'
- aliases = ['duel', 'jbst', 'jsonml+bst']
- filenames = ['*.duel', '*.jbst']
- mimetypes = ['text/x-duel', 'text/x-jbst']
-
- flags = re.DOTALL
-
- tokens = {
- 'root': [
- (r'(<%[@=#!:]?)(.*?)(%>)',
- bygroups(Name.Tag, using(JavascriptLexer), Name.Tag)),
- (r'(<%\$)(.*?)(:)(.*?)(%>)',
- bygroups(Name.Tag, Name.Function, Punctuation, String, Name.Tag)),
- (r'(<%--)(.*?)(--%>)',
- bygroups(Name.Tag, Comment.Multiline, Name.Tag)),
- (r'(<script.*?>)(.*?)(</script>)',
- bygroups(using(HtmlLexer),
- using(JavascriptLexer), using(HtmlLexer))),
- (r'(.+?)(?=<)', using(HtmlLexer)),
- (r'.+', using(HtmlLexer)),
- ],
- }
-
-
-class XQueryLexer(ExtendedRegexLexer):
- """
- An XQuery lexer, parsing a stream and outputting the tokens needed to
- highlight xquery code.
-
- .. versionadded:: 1.4
- """
- name = 'XQuery'
- url = 'https://www.w3.org/XML/Query/'
- aliases = ['xquery', 'xqy', 'xq', 'xql', 'xqm']
- filenames = ['*.xqy', '*.xquery', '*.xq', '*.xql', '*.xqm']
- mimetypes = ['text/xquery', 'application/xquery']
-
- xquery_parse_state = []
-
- # FIX UNICODE LATER
- # ncnamestartchar = (
- # r"[A-Z]|_|[a-z]|[\u00C0-\u00D6]|[\u00D8-\u00F6]|[\u00F8-\u02FF]|"
- # r"[\u0370-\u037D]|[\u037F-\u1FFF]|[\u200C-\u200D]|[\u2070-\u218F]|"
- # r"[\u2C00-\u2FEF]|[\u3001-\uD7FF]|[\uF900-\uFDCF]|[\uFDF0-\uFFFD]|"
- # r"[\u10000-\uEFFFF]"
- # )
- ncnamestartchar = r"(?:[A-Z]|_|[a-z])"
- # FIX UNICODE LATER
- # ncnamechar = ncnamestartchar + (r"|-|\.|[0-9]|\u00B7|[\u0300-\u036F]|"
- # r"[\u203F-\u2040]")
- ncnamechar = r"(?:" + ncnamestartchar + r"|-|\.|[0-9])"
- ncname = "(?:%s+%s*)" % (ncnamestartchar, ncnamechar)
- pitarget_namestartchar = r"(?:[A-KN-WYZ]|_|:|[a-kn-wyz])"
- pitarget_namechar = r"(?:" + pitarget_namestartchar + r"|-|\.|[0-9])"
- pitarget = "%s+%s*" % (pitarget_namestartchar, pitarget_namechar)
- prefixedname = "%s:%s" % (ncname, ncname)
- unprefixedname = ncname
- qname = "(?:%s|%s)" % (prefixedname, unprefixedname)
-
- entityref = r'(?:&(?:lt|gt|amp|quot|apos|nbsp);)'
- charref = r'(?:&#[0-9]+;|&#x[0-9a-fA-F]+;)'
-
- stringdouble = r'(?:"(?:' + entityref + r'|' + charref + r'|""|[^&"])*")'
- stringsingle = r"(?:'(?:" + entityref + r"|" + charref + r"|''|[^&'])*')"
-
- # FIX UNICODE LATER
- # elementcontentchar = (r'\t|\r|\n|[\u0020-\u0025]|[\u0028-\u003b]|'
- # r'[\u003d-\u007a]|\u007c|[\u007e-\u007F]')
- elementcontentchar = r'[A-Za-z]|\s|\d|[!"#$%()*+,\-./:;=?@\[\\\]^_\'`|~]'
- # quotattrcontentchar = (r'\t|\r|\n|[\u0020-\u0021]|[\u0023-\u0025]|'
- # r'[\u0027-\u003b]|[\u003d-\u007a]|\u007c|[\u007e-\u007F]')
- quotattrcontentchar = r'[A-Za-z]|\s|\d|[!#$%()*+,\-./:;=?@\[\\\]^_\'`|~]'
- # aposattrcontentchar = (r'\t|\r|\n|[\u0020-\u0025]|[\u0028-\u003b]|'
- # r'[\u003d-\u007a]|\u007c|[\u007e-\u007F]')
- aposattrcontentchar = r'[A-Za-z]|\s|\d|[!"#$%()*+,\-./:;=?@\[\\\]^_`|~]'
-
- # CHAR elements - fix the above elementcontentchar, quotattrcontentchar,
- # aposattrcontentchar
- # x9 | #xA | #xD | [#x20-#xD7FF] | [#xE000-#xFFFD] | [#x10000-#x10FFFF]
-
- flags = re.DOTALL | re.MULTILINE
-
- def punctuation_root_callback(lexer, match, ctx):
- yield match.start(), Punctuation, match.group(1)
- # transition to root always - don't pop off stack
- ctx.stack = ['root']
- ctx.pos = match.end()
-
- def operator_root_callback(lexer, match, ctx):
- yield match.start(), Operator, match.group(1)
- # transition to root always - don't pop off stack
- ctx.stack = ['root']
- ctx.pos = match.end()
-
- def popstate_tag_callback(lexer, match, ctx):
- yield match.start(), Name.Tag, match.group(1)
- if lexer.xquery_parse_state:
- ctx.stack.append(lexer.xquery_parse_state.pop())
- ctx.pos = match.end()
-
- def popstate_xmlcomment_callback(lexer, match, ctx):
- yield match.start(), String.Doc, match.group(1)
- ctx.stack.append(lexer.xquery_parse_state.pop())
- ctx.pos = match.end()
-
- def popstate_kindtest_callback(lexer, match, ctx):
- yield match.start(), Punctuation, match.group(1)
- next_state = lexer.xquery_parse_state.pop()
- if next_state == 'occurrenceindicator':
- if re.match("[?*+]+", match.group(2)):
- yield match.start(), Punctuation, match.group(2)
- ctx.stack.append('operator')
- ctx.pos = match.end()
- else:
- ctx.stack.append('operator')
- ctx.pos = match.end(1)
- else:
- ctx.stack.append(next_state)
- ctx.pos = match.end(1)
-
- def popstate_callback(lexer, match, ctx):
- yield match.start(), Punctuation, match.group(1)
- # if we have run out of our state stack, pop whatever is on the pygments
- # state stack
- if len(lexer.xquery_parse_state) == 0:
- ctx.stack.pop()
- if not ctx.stack:
- # make sure we have at least the root state on invalid inputs
- ctx.stack = ['root']
- elif len(ctx.stack) > 1:
- ctx.stack.append(lexer.xquery_parse_state.pop())
- else:
- # i don't know if i'll need this, but in case, default back to root
- ctx.stack = ['root']
- ctx.pos = match.end()
-
- def pushstate_element_content_starttag_callback(lexer, match, ctx):
- yield match.start(), Name.Tag, match.group(1)
- lexer.xquery_parse_state.append('element_content')
- ctx.stack.append('start_tag')
- ctx.pos = match.end()
-
- def pushstate_cdata_section_callback(lexer, match, ctx):
- yield match.start(), String.Doc, match.group(1)
- ctx.stack.append('cdata_section')
- lexer.xquery_parse_state.append(ctx.state.pop)
- ctx.pos = match.end()
-
- def pushstate_starttag_callback(lexer, match, ctx):
- yield match.start(), Name.Tag, match.group(1)
- lexer.xquery_parse_state.append(ctx.state.pop)
- ctx.stack.append('start_tag')
- ctx.pos = match.end()
-
- def pushstate_operator_order_callback(lexer, match, ctx):
- yield match.start(), Keyword, match.group(1)
- yield match.start(), Whitespace, match.group(2)
- yield match.start(), Punctuation, match.group(3)
- ctx.stack = ['root']
- lexer.xquery_parse_state.append('operator')
- ctx.pos = match.end()
-
- def pushstate_operator_map_callback(lexer, match, ctx):
- yield match.start(), Keyword, match.group(1)
- yield match.start(), Whitespace, match.group(2)
- yield match.start(), Punctuation, match.group(3)
- ctx.stack = ['root']
- lexer.xquery_parse_state.append('operator')
- ctx.pos = match.end()
-
- def pushstate_operator_root_validate(lexer, match, ctx):
- yield match.start(), Keyword, match.group(1)
- yield match.start(), Whitespace, match.group(2)
- yield match.start(), Punctuation, match.group(3)
- ctx.stack = ['root']
- lexer.xquery_parse_state.append('operator')
- ctx.pos = match.end()
-
- def pushstate_operator_root_validate_withmode(lexer, match, ctx):
- yield match.start(), Keyword, match.group(1)
- yield match.start(), Whitespace, match.group(2)
- yield match.start(), Keyword, match.group(3)
- ctx.stack = ['root']
- lexer.xquery_parse_state.append('operator')
- ctx.pos = match.end()
-
- def pushstate_operator_processing_instruction_callback(lexer, match, ctx):
- yield match.start(), String.Doc, match.group(1)
- ctx.stack.append('processing_instruction')
- lexer.xquery_parse_state.append('operator')
- ctx.pos = match.end()
-
- def pushstate_element_content_processing_instruction_callback(lexer, match, ctx):
- yield match.start(), String.Doc, match.group(1)
- ctx.stack.append('processing_instruction')
- lexer.xquery_parse_state.append('element_content')
- ctx.pos = match.end()
-
- def pushstate_element_content_cdata_section_callback(lexer, match, ctx):
- yield match.start(), String.Doc, match.group(1)
- ctx.stack.append('cdata_section')
- lexer.xquery_parse_state.append('element_content')
- ctx.pos = match.end()
-
- def pushstate_operator_cdata_section_callback(lexer, match, ctx):
- yield match.start(), String.Doc, match.group(1)
- ctx.stack.append('cdata_section')
- lexer.xquery_parse_state.append('operator')
- ctx.pos = match.end()
-
- def pushstate_element_content_xmlcomment_callback(lexer, match, ctx):
- yield match.start(), String.Doc, match.group(1)
- ctx.stack.append('xml_comment')
- lexer.xquery_parse_state.append('element_content')
- ctx.pos = match.end()
-
- def pushstate_operator_xmlcomment_callback(lexer, match, ctx):
- yield match.start(), String.Doc, match.group(1)
- ctx.stack.append('xml_comment')
- lexer.xquery_parse_state.append('operator')
- ctx.pos = match.end()
-
- def pushstate_kindtest_callback(lexer, match, ctx):
- yield match.start(), Keyword, match.group(1)
- yield match.start(), Whitespace, match.group(2)
- yield match.start(), Punctuation, match.group(3)
- lexer.xquery_parse_state.append('kindtest')
- ctx.stack.append('kindtest')
- ctx.pos = match.end()
-
- def pushstate_operator_kindtestforpi_callback(lexer, match, ctx):
- yield match.start(), Keyword, match.group(1)
- yield match.start(), Whitespace, match.group(2)
- yield match.start(), Punctuation, match.group(3)
- lexer.xquery_parse_state.append('operator')
- ctx.stack.append('kindtestforpi')
- ctx.pos = match.end()
-
- def pushstate_operator_kindtest_callback(lexer, match, ctx):
- yield match.start(), Keyword, match.group(1)
- yield match.start(), Whitespace, match.group(2)
- yield match.start(), Punctuation, match.group(3)
- lexer.xquery_parse_state.append('operator')
- ctx.stack.append('kindtest')
- ctx.pos = match.end()
-
- def pushstate_occurrenceindicator_kindtest_callback(lexer, match, ctx):
- yield match.start(), Name.Tag, match.group(1)
- yield match.start(), Whitespace, match.group(2)
- yield match.start(), Punctuation, match.group(3)
- lexer.xquery_parse_state.append('occurrenceindicator')
- ctx.stack.append('kindtest')
- ctx.pos = match.end()
-
- def pushstate_operator_starttag_callback(lexer, match, ctx):
- yield match.start(), Name.Tag, match.group(1)
- lexer.xquery_parse_state.append('operator')
- ctx.stack.append('start_tag')
- ctx.pos = match.end()
-
- def pushstate_operator_root_callback(lexer, match, ctx):
- yield match.start(), Punctuation, match.group(1)
- lexer.xquery_parse_state.append('operator')
- ctx.stack = ['root']
- ctx.pos = match.end()
-
- def pushstate_operator_root_construct_callback(lexer, match, ctx):
- yield match.start(), Keyword, match.group(1)
- yield match.start(), Whitespace, match.group(2)
- yield match.start(), Punctuation, match.group(3)
- lexer.xquery_parse_state.append('operator')
- ctx.stack = ['root']
- ctx.pos = match.end()
-
- def pushstate_root_callback(lexer, match, ctx):
- yield match.start(), Punctuation, match.group(1)
- cur_state = ctx.stack.pop()
- lexer.xquery_parse_state.append(cur_state)
- ctx.stack = ['root']
- ctx.pos = match.end()
-
- def pushstate_operator_attribute_callback(lexer, match, ctx):
- yield match.start(), Name.Attribute, match.group(1)
- ctx.stack.append('operator')
- ctx.pos = match.end()
-
- tokens = {
- 'comment': [
- # xquery comments
- (r'[^:()]+', Comment),
- (r'\(:', Comment, '#push'),
- (r':\)', Comment, '#pop'),
- (r'[:()]', Comment),
- ],
- 'whitespace': [
- (r'\s+', Whitespace),
- ],
- 'operator': [
- include('whitespace'),
- (r'(\})', popstate_callback),
- (r'\(:', Comment, 'comment'),
-
- (r'(\{)', pushstate_root_callback),
- (r'then|else|external|at|div|except', Keyword, 'root'),
- (r'order by', Keyword, 'root'),
- (r'group by', Keyword, 'root'),
- (r'is|mod|order\s+by|stable\s+order\s+by', Keyword, 'root'),
- (r'and|or', Operator.Word, 'root'),
- (r'(eq|ge|gt|le|lt|ne|idiv|intersect|in)(?=\b)',
- Operator.Word, 'root'),
- (r'return|satisfies|to|union|where|count|preserve\s+strip',
- Keyword, 'root'),
- (r'(>=|>>|>|<=|<<|<|-|\*|!=|\+|\|\||\||:=|=|!)',
- operator_root_callback),
- (r'(::|:|;|\[|//|/|,)',
- punctuation_root_callback),
- (r'(castable|cast)(\s+)(as)\b',
- bygroups(Keyword, Whitespace, Keyword), 'singletype'),
- (r'(instance)(\s+)(of)\b',
- bygroups(Keyword, Whitespace, Keyword), 'itemtype'),
- (r'(treat)(\s+)(as)\b',
- bygroups(Keyword, Whitespace, Keyword), 'itemtype'),
- (r'(case)(\s+)(' + stringdouble + ')',
- bygroups(Keyword, Whitespace, String.Double), 'itemtype'),
- (r'(case)(\s+)(' + stringsingle + ')',
- bygroups(Keyword, Whitespace, String.Single), 'itemtype'),
- (r'(case|as)\b', Keyword, 'itemtype'),
- (r'(\))(\s*)(as)',
- bygroups(Punctuation, Whitespace, Keyword), 'itemtype'),
- (r'\$', Name.Variable, 'varname'),
- (r'(for|let|previous|next)(\s+)(\$)',
- bygroups(Keyword, Whitespace, Name.Variable), 'varname'),
- (r'(for)(\s+)(tumbling|sliding)(\s+)(window)(\s+)(\$)',
- bygroups(Keyword, Whitespace, Keyword, Whitespace, Keyword,
- Whitespace, Name.Variable),
- 'varname'),
- # (r'\)|\?|\]', Punctuation, '#push'),
- (r'\)|\?|\]', Punctuation),
- (r'(empty)(\s+)(greatest|least)',
- bygroups(Keyword, Whitespace, Keyword)),
- (r'ascending|descending|default', Keyword, '#push'),
- (r'(allowing)(\s+)(empty)',
- bygroups(Keyword, Whitespace, Keyword)),
- (r'external', Keyword),
- (r'(start|when|end)', Keyword, 'root'),
- (r'(only)(\s+)(end)', bygroups(Keyword, Whitespace, Keyword),
- 'root'),
- (r'collation', Keyword, 'uritooperator'),
-
- # eXist specific XQUF
- (r'(into|following|preceding|with)', Keyword, 'root'),
-
- # support for current context on rhs of Simple Map Operator
- (r'\.', Operator),
-
- # finally catch all string literals and stay in operator state
- (stringdouble, String.Double),
- (stringsingle, String.Single),
-
- (r'(catch)(\s*)', bygroups(Keyword, Whitespace), 'root'),
- ],
- 'uritooperator': [
- (stringdouble, String.Double, '#pop'),
- (stringsingle, String.Single, '#pop'),
- ],
- 'namespacedecl': [
- include('whitespace'),
- (r'\(:', Comment, 'comment'),
- (r'(at)(\s+)('+stringdouble+')',
- bygroups(Keyword, Whitespace, String.Double)),
- (r"(at)(\s+)("+stringsingle+')',
- bygroups(Keyword, Whitespace, String.Single)),
- (stringdouble, String.Double),
- (stringsingle, String.Single),
- (r',', Punctuation),
- (r'=', Operator),
- (r';', Punctuation, 'root'),
- (ncname, Name.Namespace),
- ],
- 'namespacekeyword': [
- include('whitespace'),
- (r'\(:', Comment, 'comment'),
- (stringdouble, String.Double, 'namespacedecl'),
- (stringsingle, String.Single, 'namespacedecl'),
- (r'inherit|no-inherit', Keyword, 'root'),
- (r'namespace', Keyword, 'namespacedecl'),
- (r'(default)(\s+)(element)', bygroups(Keyword, Text, Keyword)),
- (r'preserve|no-preserve', Keyword),
- (r',', Punctuation),
- ],
- 'annotationname': [
- (r'\(:', Comment, 'comment'),
- (qname, Name.Decorator),
- (r'(\()(' + stringdouble + ')', bygroups(Punctuation, String.Double)),
- (r'(\()(' + stringsingle + ')', bygroups(Punctuation, String.Single)),
- (r'(\,)(\s+)(' + stringdouble + ')',
- bygroups(Punctuation, Text, String.Double)),
- (r'(\,)(\s+)(' + stringsingle + ')',
- bygroups(Punctuation, Text, String.Single)),
- (r'\)', Punctuation),
- (r'(\s+)(\%)', bygroups(Text, Name.Decorator), 'annotationname'),
- (r'(\s+)(variable)(\s+)(\$)',
- bygroups(Text, Keyword.Declaration, Text, Name.Variable), 'varname'),
- (r'(\s+)(function)(\s+)',
- bygroups(Text, Keyword.Declaration, Text), 'root')
- ],
- 'varname': [
- (r'\(:', Comment, 'comment'),
- (r'(' + qname + r')(\()?', bygroups(Name, Punctuation), 'operator'),
- ],
- 'singletype': [
- include('whitespace'),
- (r'\(:', Comment, 'comment'),
- (ncname + r'(:\*)', Name.Variable, 'operator'),
- (qname, Name.Variable, 'operator'),
- ],
- 'itemtype': [
- include('whitespace'),
- (r'\(:', Comment, 'comment'),
- (r'\$', Name.Variable, 'varname'),
- (r'(void)(\s*)(\()(\s*)(\))',
- bygroups(Keyword, Text, Punctuation, Text, Punctuation), 'operator'),
- (r'(element|attribute|schema-element|schema-attribute|comment|text|'
- r'node|binary|document-node|empty-sequence)(\s*)(\()',
- pushstate_occurrenceindicator_kindtest_callback),
- # Marklogic specific type?
- (r'(processing-instruction)(\s*)(\()',
- bygroups(Keyword, Text, Punctuation),
- ('occurrenceindicator', 'kindtestforpi')),
- (r'(item)(\s*)(\()(\s*)(\))(?=[*+?])',
- bygroups(Keyword, Text, Punctuation, Text, Punctuation),
- 'occurrenceindicator'),
- (r'(\(\#)(\s*)', bygroups(Punctuation, Text), 'pragma'),
- (r';', Punctuation, '#pop'),
- (r'then|else', Keyword, '#pop'),
- (r'(at)(\s+)(' + stringdouble + ')',
- bygroups(Keyword, Text, String.Double), 'namespacedecl'),
- (r'(at)(\s+)(' + stringsingle + ')',
- bygroups(Keyword, Text, String.Single), 'namespacedecl'),
- (r'except|intersect|in|is|return|satisfies|to|union|where|count',
- Keyword, 'root'),
- (r'and|div|eq|ge|gt|le|lt|ne|idiv|mod|or', Operator.Word, 'root'),
- (r':=|=|,|>=|>>|>|\[|\(|<=|<<|<|-|!=|\|\||\|', Operator, 'root'),
- (r'external|at', Keyword, 'root'),
- (r'(stable)(\s+)(order)(\s+)(by)',
- bygroups(Keyword, Text, Keyword, Text, Keyword), 'root'),
- (r'(castable|cast)(\s+)(as)',
- bygroups(Keyword, Text, Keyword), 'singletype'),
- (r'(treat)(\s+)(as)', bygroups(Keyword, Text, Keyword)),
- (r'(instance)(\s+)(of)', bygroups(Keyword, Text, Keyword)),
- (r'(case)(\s+)(' + stringdouble + ')',
- bygroups(Keyword, Text, String.Double), 'itemtype'),
- (r'(case)(\s+)(' + stringsingle + ')',
- bygroups(Keyword, Text, String.Single), 'itemtype'),
- (r'case|as', Keyword, 'itemtype'),
- (r'(\))(\s*)(as)', bygroups(Operator, Text, Keyword), 'itemtype'),
- (ncname + r':\*', Keyword.Type, 'operator'),
- (r'(function|map|array)(\()', bygroups(Keyword.Type, Punctuation)),
- (qname, Keyword.Type, 'occurrenceindicator'),
- ],
- 'kindtest': [
- (r'\(:', Comment, 'comment'),
- (r'\{', Punctuation, 'root'),
- (r'(\))([*+?]?)', popstate_kindtest_callback),
- (r'\*', Name, 'closekindtest'),
- (qname, Name, 'closekindtest'),
- (r'(element|schema-element)(\s*)(\()', pushstate_kindtest_callback),
- ],
- 'kindtestforpi': [
- (r'\(:', Comment, 'comment'),
- (r'\)', Punctuation, '#pop'),
- (ncname, Name.Variable),
- (stringdouble, String.Double),
- (stringsingle, String.Single),
- ],
- 'closekindtest': [
- (r'\(:', Comment, 'comment'),
- (r'(\))', popstate_callback),
- (r',', Punctuation),
- (r'(\{)', pushstate_operator_root_callback),
- (r'\?', Punctuation),
- ],
- 'xml_comment': [
- (r'(-->)', popstate_xmlcomment_callback),
- (r'[^-]{1,2}', Literal),
- (r'\t|\r|\n|[\u0020-\uD7FF]|[\uE000-\uFFFD]|[\U00010000-\U0010FFFF]',
- Literal),
- ],
- 'processing_instruction': [
- (r'\s+', Text, 'processing_instruction_content'),
- (r'\?>', String.Doc, '#pop'),
- (pitarget, Name),
- ],
- 'processing_instruction_content': [
- (r'\?>', String.Doc, '#pop'),
- (r'\t|\r|\n|[\u0020-\uD7FF]|[\uE000-\uFFFD]|[\U00010000-\U0010FFFF]',
- Literal),
- ],
- 'cdata_section': [
- (r']]>', String.Doc, '#pop'),
- (r'\t|\r|\n|[\u0020-\uD7FF]|[\uE000-\uFFFD]|[\U00010000-\U0010FFFF]',
- Literal),
- ],
- 'start_tag': [
- include('whitespace'),
- (r'(/>)', popstate_tag_callback),
- (r'>', Name.Tag, 'element_content'),
- (r'"', Punctuation, 'quot_attribute_content'),
- (r"'", Punctuation, 'apos_attribute_content'),
- (r'=', Operator),
- (qname, Name.Tag),
- ],
- 'quot_attribute_content': [
- (r'"', Punctuation, 'start_tag'),
- (r'(\{)', pushstate_root_callback),
- (r'""', Name.Attribute),
- (quotattrcontentchar, Name.Attribute),
- (entityref, Name.Attribute),
- (charref, Name.Attribute),
- (r'\{\{|\}\}', Name.Attribute),
- ],
- 'apos_attribute_content': [
- (r"'", Punctuation, 'start_tag'),
- (r'\{', Punctuation, 'root'),
- (r"''", Name.Attribute),
- (aposattrcontentchar, Name.Attribute),
- (entityref, Name.Attribute),
- (charref, Name.Attribute),
- (r'\{\{|\}\}', Name.Attribute),
- ],
- 'element_content': [
- (r'</', Name.Tag, 'end_tag'),
- (r'(\{)', pushstate_root_callback),
- (r'(<!--)', pushstate_element_content_xmlcomment_callback),
- (r'(<\?)', pushstate_element_content_processing_instruction_callback),
- (r'(<!\[CDATA\[)', pushstate_element_content_cdata_section_callback),
- (r'(<)', pushstate_element_content_starttag_callback),
- (elementcontentchar, Literal),
- (entityref, Literal),
- (charref, Literal),
- (r'\{\{|\}\}', Literal),
- ],
- 'end_tag': [
- include('whitespace'),
- (r'(>)', popstate_tag_callback),
- (qname, Name.Tag),
- ],
- 'xmlspace_decl': [
- include('whitespace'),
- (r'\(:', Comment, 'comment'),
- (r'preserve|strip', Keyword, '#pop'),
- ],
- 'declareordering': [
- (r'\(:', Comment, 'comment'),
- include('whitespace'),
- (r'ordered|unordered', Keyword, '#pop'),
- ],
- 'xqueryversion': [
- include('whitespace'),
- (r'\(:', Comment, 'comment'),
- (stringdouble, String.Double),
- (stringsingle, String.Single),
- (r'encoding', Keyword),
- (r';', Punctuation, '#pop'),
- ],
- 'pragma': [
- (qname, Name.Variable, 'pragmacontents'),
- ],
- 'pragmacontents': [
- (r'#\)', Punctuation, 'operator'),
- (r'\t|\r|\n|[\u0020-\uD7FF]|[\uE000-\uFFFD]|[\U00010000-\U0010FFFF]',
- Literal),
- (r'(\s+)', Whitespace),
- ],
- 'occurrenceindicator': [
- include('whitespace'),
- (r'\(:', Comment, 'comment'),
- (r'\*|\?|\+', Operator, 'operator'),
- (r':=', Operator, 'root'),
- default('operator'),
- ],
- 'option': [
- include('whitespace'),
- (qname, Name.Variable, '#pop'),
- ],
- 'qname_braren': [
- include('whitespace'),
- (r'(\{)', pushstate_operator_root_callback),
- (r'(\()', Punctuation, 'root'),
- ],
- 'element_qname': [
- (qname, Name.Variable, 'root'),
- ],
- 'attribute_qname': [
- (qname, Name.Variable, 'root'),
- ],
- 'root': [
- include('whitespace'),
- (r'\(:', Comment, 'comment'),
-
- # handle operator state
- # order on numbers matters - handle most complex first
- (r'\d+(\.\d*)?[eE][+-]?\d+', Number.Float, 'operator'),
- (r'(\.\d+)[eE][+-]?\d+', Number.Float, 'operator'),
- (r'(\.\d+|\d+\.\d*)', Number.Float, 'operator'),
- (r'(\d+)', Number.Integer, 'operator'),
- (r'(\.\.|\.|\))', Punctuation, 'operator'),
- (r'(declare)(\s+)(construction)',
- bygroups(Keyword.Declaration, Text, Keyword.Declaration), 'operator'),
- (r'(declare)(\s+)(default)(\s+)(order)',
- bygroups(Keyword.Declaration, Text, Keyword.Declaration, Text, Keyword.Declaration), 'operator'),
- (r'(declare)(\s+)(context)(\s+)(item)',
- bygroups(Keyword.Declaration, Text, Keyword.Declaration, Text, Keyword.Declaration), 'operator'),
- (ncname + r':\*', Name, 'operator'),
- (r'\*:'+ncname, Name.Tag, 'operator'),
- (r'\*', Name.Tag, 'operator'),
- (stringdouble, String.Double, 'operator'),
- (stringsingle, String.Single, 'operator'),
-
- (r'(\}|\])', popstate_callback),
-
- # NAMESPACE DECL
- (r'(declare)(\s+)(default)(\s+)(collation)',
- bygroups(Keyword.Declaration, Whitespace, Keyword.Declaration,
- Whitespace, Keyword.Declaration)),
- (r'(module|declare)(\s+)(namespace)',
- bygroups(Keyword.Declaration, Whitespace, Keyword.Declaration),
- 'namespacedecl'),
- (r'(declare)(\s+)(base-uri)',
- bygroups(Keyword.Declaration, Whitespace, Keyword.Declaration),
- 'namespacedecl'),
-
- # NAMESPACE KEYWORD
- (r'(declare)(\s+)(default)(\s+)(element|function)',
- bygroups(Keyword.Declaration, Whitespace, Keyword.Declaration,
- Whitespace, Keyword.Declaration),
- 'namespacekeyword'),
- (r'(import)(\s+)(schema|module)',
- bygroups(Keyword.Pseudo, Whitespace, Keyword.Pseudo),
- 'namespacekeyword'),
- (r'(declare)(\s+)(copy-namespaces)',
- bygroups(Keyword.Declaration, Whitespace, Keyword.Declaration),
- 'namespacekeyword'),
-
- # VARNAMEs
- (r'(for|let|some|every)(\s+)(\$)',
- bygroups(Keyword, Whitespace, Name.Variable), 'varname'),
- (r'(for)(\s+)(tumbling|sliding)(\s+)(window)(\s+)(\$)',
- bygroups(Keyword, Whitespace, Keyword, Whitespace, Keyword,
- Whitespace, Name.Variable),
- 'varname'),
- (r'\$', Name.Variable, 'varname'),
- (r'(declare)(\s+)(variable)(\s+)(\$)',
- bygroups(Keyword.Declaration, Whitespace, Keyword.Declaration,
- Whitespace, Name.Variable),
- 'varname'),
-
- # ANNOTATED GLOBAL VARIABLES AND FUNCTIONS
- (r'(declare)(\s+)(\%)', bygroups(Keyword.Declaration, Whitespace,
- Name.Decorator),
- 'annotationname'),
-
- # ITEMTYPE
- (r'(\))(\s+)(as)', bygroups(Operator, Whitespace, Keyword),
- 'itemtype'),
-
- (r'(element|attribute|schema-element|schema-attribute|comment|'
- r'text|node|document-node|empty-sequence)(\s+)(\()',
- pushstate_operator_kindtest_callback),
-
- (r'(processing-instruction)(\s+)(\()',
- pushstate_operator_kindtestforpi_callback),
-
- (r'(<!--)', pushstate_operator_xmlcomment_callback),
-
- (r'(<\?)', pushstate_operator_processing_instruction_callback),
-
- (r'(<!\[CDATA\[)', pushstate_operator_cdata_section_callback),
-
- # (r'</', Name.Tag, 'end_tag'),
- (r'(<)', pushstate_operator_starttag_callback),
-
- (r'(declare)(\s+)(boundary-space)',
- bygroups(Keyword.Declaration, Text, Keyword.Declaration), 'xmlspace_decl'),
-
- (r'(validate)(\s+)(lax|strict)',
- pushstate_operator_root_validate_withmode),
- (r'(validate)(\s*)(\{)', pushstate_operator_root_validate),
- (r'(typeswitch)(\s*)(\()', bygroups(Keyword, Whitespace,
- Punctuation)),
- (r'(switch)(\s*)(\()', bygroups(Keyword, Whitespace, Punctuation)),
- (r'(element|attribute|namespace)(\s*)(\{)',
- pushstate_operator_root_construct_callback),
-
- (r'(document|text|processing-instruction|comment)(\s*)(\{)',
- pushstate_operator_root_construct_callback),
- # ATTRIBUTE
- (r'(attribute)(\s+)(?=' + qname + r')',
- bygroups(Keyword, Whitespace), 'attribute_qname'),
- # ELEMENT
- (r'(element)(\s+)(?=' + qname + r')',
- bygroups(Keyword, Whitespace), 'element_qname'),
- # PROCESSING_INSTRUCTION
- (r'(processing-instruction|namespace)(\s+)(' + ncname + r')(\s*)(\{)',
- bygroups(Keyword, Whitespace, Name.Variable, Whitespace,
- Punctuation),
- 'operator'),
-
- (r'(declare|define)(\s+)(function)',
- bygroups(Keyword.Declaration, Whitespace, Keyword.Declaration)),
-
- (r'(\{|\[)', pushstate_operator_root_callback),
-
- (r'(unordered|ordered)(\s*)(\{)',
- pushstate_operator_order_callback),
-
- (r'(map|array)(\s*)(\{)',
- pushstate_operator_map_callback),
-
- (r'(declare)(\s+)(ordering)',
- bygroups(Keyword.Declaration, Whitespace, Keyword.Declaration),
- 'declareordering'),
-
- (r'(xquery)(\s+)(version)',
- bygroups(Keyword.Pseudo, Whitespace, Keyword.Pseudo),
- 'xqueryversion'),
-
- (r'(\(#)(\s*)', bygroups(Punctuation, Whitespace), 'pragma'),
-
- # sometimes return can occur in root state
- (r'return', Keyword),
-
- (r'(declare)(\s+)(option)', bygroups(Keyword.Declaration,
- Whitespace,
- Keyword.Declaration),
- 'option'),
-
- # URI LITERALS - single and double quoted
- (r'(at)(\s+)('+stringdouble+')', String.Double, 'namespacedecl'),
- (r'(at)(\s+)('+stringsingle+')', String.Single, 'namespacedecl'),
-
- (r'(ancestor-or-self|ancestor|attribute|child|descendant-or-self)(::)',
- bygroups(Keyword, Punctuation)),
- (r'(descendant|following-sibling|following|parent|preceding-sibling'
- r'|preceding|self)(::)', bygroups(Keyword, Punctuation)),
-
- (r'(if)(\s*)(\()', bygroups(Keyword, Whitespace, Punctuation)),
-
- (r'then|else', Keyword),
-
- # eXist specific XQUF
- (r'(update)(\s*)(insert|delete|replace|value|rename)',
- bygroups(Keyword, Whitespace, Keyword)),
- (r'(into|following|preceding|with)', Keyword),
-
- # Marklogic specific
- (r'(try)(\s*)', bygroups(Keyword, Whitespace), 'root'),
- (r'(catch)(\s*)(\()(\$)',
- bygroups(Keyword, Whitespace, Punctuation, Name.Variable),
- 'varname'),
-
-
- (r'(@'+qname+')', Name.Attribute, 'operator'),
- (r'(@'+ncname+')', Name.Attribute, 'operator'),
- (r'@\*:'+ncname, Name.Attribute, 'operator'),
- (r'@\*', Name.Attribute, 'operator'),
- (r'(@)', Name.Attribute, 'operator'),
-
- (r'//|/|\+|-|;|,|\(|\)', Punctuation),
-
- # STANDALONE QNAMES
- (qname + r'(?=\s*\{)', Name.Tag, 'qname_braren'),
- (qname + r'(?=\s*\([^:])', Name.Function, 'qname_braren'),
- (r'(' + qname + ')(#)([0-9]+)', bygroups(Name.Function, Keyword.Type, Number.Integer)),
- (qname, Name.Tag, 'operator'),
- ]
- }
-
-
-class QmlLexer(RegexLexer):
- """
- For QML files.
-
- .. versionadded:: 1.6
- """
-
- # QML is based on javascript, so much of this is taken from the
- # JavascriptLexer above.
-
- name = 'QML'
- url = 'https://doc.qt.io/qt-6/qmlapplications.html'
- aliases = ['qml', 'qbs']
- filenames = ['*.qml', '*.qbs']
- mimetypes = ['application/x-qml', 'application/x-qt.qbs+qml']
-
- # pasted from JavascriptLexer, with some additions
- flags = re.DOTALL | re.MULTILINE
-
- tokens = {
- 'commentsandwhitespace': [
- (r'\s+', Text),
- (r'<!--', Comment),
- (r'//.*?\n', Comment.Single),
- (r'/\*.*?\*/', Comment.Multiline)
- ],
- 'slashstartsregex': [
- include('commentsandwhitespace'),
- (r'/(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/'
- r'([gim]+\b|\B)', String.Regex, '#pop'),
- (r'(?=/)', Text, ('#pop', 'badregex')),
- default('#pop')
- ],
- 'badregex': [
- (r'\n', Text, '#pop')
- ],
- 'root': [
- (r'^(?=\s|/|<!--)', Text, 'slashstartsregex'),
- include('commentsandwhitespace'),
- (r'\+\+|--|~|&&|\?|:|\|\||\\(?=\n)|'
- r'(<<|>>>?|==?|!=?|[-<>+*%&|^/])=?', Operator, 'slashstartsregex'),
- (r'[{(\[;,]', Punctuation, 'slashstartsregex'),
- (r'[})\].]', Punctuation),
-
- # QML insertions
- (r'\bid\s*:\s*[A-Za-z][\w.]*', Keyword.Declaration,
- 'slashstartsregex'),
- (r'\b[A-Za-z][\w.]*\s*:', Keyword, 'slashstartsregex'),
-
- # the rest from JavascriptLexer
- (r'(for|in|while|do|break|return|continue|switch|case|default|if|else|'
- r'throw|try|catch|finally|new|delete|typeof|instanceof|void|'
- r'this)\b', Keyword, 'slashstartsregex'),
- (r'(var|let|with|function)\b', Keyword.Declaration, 'slashstartsregex'),
- (r'(abstract|boolean|byte|char|class|const|debugger|double|enum|export|'
- r'extends|final|float|goto|implements|import|int|interface|long|native|'
- r'package|private|protected|public|short|static|super|synchronized|throws|'
- r'transient|volatile)\b', Keyword.Reserved),
- (r'(true|false|null|NaN|Infinity|undefined)\b', Keyword.Constant),
- (r'(Array|Boolean|Date|Error|Function|Math|netscape|'
- r'Number|Object|Packages|RegExp|String|sun|decodeURI|'
- r'decodeURIComponent|encodeURI|encodeURIComponent|'
- r'Error|eval|isFinite|isNaN|parseFloat|parseInt|document|this|'
- r'window)\b', Name.Builtin),
- (r'[$a-zA-Z_]\w*', Name.Other),
- (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
- (r'0x[0-9a-fA-F]+', Number.Hex),
- (r'[0-9]+', Number.Integer),
- (r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
- (r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
- ]
- }
-
-
-class CirruLexer(RegexLexer):
- r"""
- * using ``()`` for expressions, but restricted in a same line
- * using ``""`` for strings, with ``\`` for escaping chars
- * using ``$`` as folding operator
- * using ``,`` as unfolding operator
- * using indentations for nested blocks
-
- .. versionadded:: 2.0
- """
-
- name = 'Cirru'
- url = 'http://cirru.org/'
- aliases = ['cirru']
- filenames = ['*.cirru']
- mimetypes = ['text/x-cirru']
- flags = re.MULTILINE
-
- tokens = {
- 'string': [
- (r'[^"\\\n]+', String),
- (r'\\', String.Escape, 'escape'),
- (r'"', String, '#pop'),
- ],
- 'escape': [
- (r'.', String.Escape, '#pop'),
- ],
- 'function': [
- (r'\,', Operator, '#pop'),
- (r'[^\s"()]+', Name.Function, '#pop'),
- (r'\)', Operator, '#pop'),
- (r'(?=\n)', Text, '#pop'),
- (r'\(', Operator, '#push'),
- (r'"', String, ('#pop', 'string')),
- (r'[ ]+', Text.Whitespace),
- ],
- 'line': [
- (r'(?<!\w)\$(?!\w)', Operator, 'function'),
- (r'\(', Operator, 'function'),
- (r'\)', Operator),
- (r'\n', Text, '#pop'),
- (r'"', String, 'string'),
- (r'[ ]+', Text.Whitespace),
- (r'[+-]?[\d.]+\b', Number),
- (r'[^\s"()]+', Name.Variable)
- ],
- 'root': [
- (r'^\n+', Text.Whitespace),
- default(('line', 'function')),
- ]
- }
-
-
-class SlimLexer(ExtendedRegexLexer):
- """
- For Slim markup.
-
- .. versionadded:: 2.0
- """
-
- name = 'Slim'
- aliases = ['slim']
- filenames = ['*.slim']
- mimetypes = ['text/x-slim']
-
- flags = re.IGNORECASE
- _dot = r'(?: \|\n(?=.* \|)|.)'
- tokens = {
- 'root': [
- (r'[ \t]*\n', Text),
- (r'[ \t]*', _indentation),
- ],
-
- 'css': [
- (r'\.[\w:-]+', Name.Class, 'tag'),
- (r'\#[\w:-]+', Name.Function, 'tag'),
- ],
-
- 'eval-or-plain': [
- (r'([ \t]*==?)(.*\n)',
- bygroups(Punctuation, using(RubyLexer)),
- 'root'),
- (r'[ \t]+[\w:-]+(?==)', Name.Attribute, 'html-attributes'),
- default('plain'),
- ],
-
- 'content': [
- include('css'),
- (r'[\w:-]+:[ \t]*\n', Text, 'plain'),
- (r'(-)(.*\n)',
- bygroups(Punctuation, using(RubyLexer)),
- '#pop'),
- (r'\|' + _dot + r'*\n', _starts_block(Text, 'plain'), '#pop'),
- (r'/' + _dot + r'*\n', _starts_block(Comment.Preproc, 'slim-comment-block'), '#pop'),
- (r'[\w:-]+', Name.Tag, 'tag'),
- include('eval-or-plain'),
- ],
-
- 'tag': [
- include('css'),
- (r'[<>]{1,2}(?=[ \t=])', Punctuation),
- (r'[ \t]+\n', Punctuation, '#pop:2'),
- include('eval-or-plain'),
- ],
-
- 'plain': [
- (r'([^#\n]|#[^{\n]|(\\\\)*\\#\{)+', Text),
- (r'(#\{)(.*?)(\})',
- bygroups(String.Interpol, using(RubyLexer), String.Interpol)),
- (r'\n', Text, 'root'),
- ],
-
- 'html-attributes': [
- (r'=', Punctuation),
- (r'"[^"]+"', using(RubyLexer), 'tag'),
- (r'\'[^\']+\'', using(RubyLexer), 'tag'),
- (r'\w+', Text, 'tag'),
- ],
-
- 'slim-comment-block': [
- (_dot + '+', Comment.Preproc),
- (r'\n', Text, 'root'),
- ],
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/wgsl.py b/venv/lib/python3.11/site-packages/pygments/lexers/wgsl.py
deleted file mode 100644
index f233421..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/wgsl.py
+++ /dev/null
@@ -1,407 +0,0 @@
-"""
- pygments.lexers.wgsl
- ~~~~~~~~~~~~~~~~~~~~
-
- Lexer for the WebGPU Shading Language.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, include, bygroups, words, default
-from pygments.token import Comment, Operator, Keyword, Name, \
- Number, Punctuation, Whitespace
-from pygments import unistring as uni
-
-__all__ = ['WgslLexer']
-
-LF = '\\u000a'
-VT = '\\u000b'
-FF = '\\u000c'
-CR = '\\u000d'
-NextLine = '\\u0085'
-LineSep = '\\u2028'
-ParaSep = '\\u2029'
-LineEndCodePoints = [LF,VT,FF,CR,NextLine,LineSep,ParaSep]
-NotLineEndRE = '[^' + "".join(LineEndCodePoints) + ']'
-LineEndRE = '[' + "".join(LineEndCodePoints) + ']'
-
-# https://www.w3.org/TR/WGSL/#syntax-ident_pattern_token
-ident_pattern_token = '([{}][{}]+)|[{}]'.format(uni.xid_start,uni.xid_continue,uni.xid_start)
-
-
-class WgslLexer(RegexLexer):
- """
- Lexer for the WebGPU Shading Language.
-
- .. versionadded:: 2.15
- """
- name = 'WebGPU Shading Language'
- url = 'https://www.w3.org/TR/WGSL/'
- aliases = ['wgsl']
- filenames = ['*.wgsl']
- mimetypes = ['text/wgsl']
-
- # https://www.w3.org/TR/WGSL/#var-and-value
- keyword_decl = (words('var let const override'.split(),suffix=r'\b'), Keyword.Declaration)
- # https://www.w3.org/TR/WGSL/#keyword-summary
- keywords = (words("""
- alias
- break
- case
- const_assert
- continue
- continuing
- default
- diagnostic
- discard
- else
- enable
- false
- fn
- for
- if
- loop
- requires
- return
- struct
- switch
- true
- while
- """.split(), suffix=r'\b'), Keyword)
-
- # https://www.w3.org/TR/WGSL/#reserved-words
- keyword_reserved = (words("""
- NULL
- Self
- abstract
- active
- alignas
- alignof
- as
- asm
- asm_fragment
- async
- attribute
- auto
- await
- become
- binding_array
- cast
- catch
- class
- co_await
- co_return
- co_yield
- coherent
- column_major
- common
- compile
- compile_fragment
- concept
- const_cast
- consteval
- constexpr
- constinit
- crate
- debugger
- decltype
- delete
- demote
- demote_to_helper
- do
- dynamic_cast
- enum
- explicit
- export
- extends
- extern
- external
- fallthrough
- filter
- final
- finally
- friend
- from
- fxgroup
- get
- goto
- groupshared
- highp
- impl
- implements
- import
- inline
- instanceof
- interface
- layout
- lowp
- macro
- macro_rules
- match
- mediump
- meta
- mod
- module
- move
- mut
- mutable
- namespace
- new
- nil
- noexcept
- noinline
- nointerpolation
- noperspective
- null
- nullptr
- of
- operator
- package
- packoffset
- partition
- pass
- patch
- pixelfragment
- precise
- precision
- premerge
- priv
- protected
- pub
- public
- readonly
- ref
- regardless
- register
- reinterpret_cast
- require
- resource
- restrict
- self
- set
- shared
- sizeof
- smooth
- snorm
- static
- static_assert
- static_cast
- std
- subroutine
- super
- target
- template
- this
- thread_local
- throw
- trait
- try
- type
- typedef
- typeid
- typename
- typeof
- union
- unless
- unorm
- unsafe
- unsized
- use
- using
- varying
- virtual
- volatile
- wgsl
- where
- with
- writeonly
- yield
- """.split(), suffix=r'\b'), Keyword.Reserved)
-
- # https://www.w3.org/TR/WGSL/#predeclared-enumerants
- predeclared_enums = (words("""
- read write read_write
- function private workgroup uniform storage
- perspective linear flat
- center centroid sample
- vertex_index instance_index position front_facing frag_depth
- local_invocation_id local_invocation_index
- global_invocation_id workgroup_id num_workgroups
- sample_index sample_mask
- rgba8unorm
- rgba8snorm
- rgba8uint
- rgba8sint
- rgba16uint
- rgba16sint
- rgba16float
- r32uint
- r32sint
- r32float
- rg32uint
- rg32sint
- rg32float
- rgba32uint
- rgba32sint
- rgba32float
- bgra8unorm
- """.split(), suffix=r'\b'), Name.Builtin)
-
- # https://www.w3.org/TR/WGSL/#predeclared-types
- predeclared_types = (words("""
- bool
- f16
- f32
- i32
- sampler sampler_comparison
- texture_depth_2d
- texture_depth_2d_array
- texture_depth_cube
- texture_depth_cube_array
- texture_depth_multisampled_2d
- texture_external
- texture_external
- u32
- """.split(), suffix=r'\b'), Name.Builtin)
-
- # https://www.w3.org/TR/WGSL/#predeclared-types
- predeclared_type_generators = (words("""
- array
- atomic
- mat2x2
- mat2x3
- mat2x4
- mat3x2
- mat3x3
- mat3x4
- mat4x2
- mat4x3
- mat4x4
- ptr
- texture_1d
- texture_2d
- texture_2d_array
- texture_3d
- texture_cube
- texture_cube_array
- texture_multisampled_2d
- texture_storage_1d
- texture_storage_2d
- texture_storage_2d_array
- texture_storage_3d
- vec2
- vec3
- vec4
- """.split(), suffix=r'\b'), Name.Builtin)
-
- # Predeclared type aliases for vectors
- # https://www.w3.org/TR/WGSL/#vector-types
- predeclared_type_alias_vectors = (words("""
- vec2i vec3i vec4i
- vec2u vec3u vec4u
- vec2f vec3f vec4f
- vec2h vec3h vec4h
- """.split(), suffix=r'\b'), Name.Builtin)
-
- # Predeclared type aliases for matrices
- # https://www.w3.org/TR/WGSL/#matrix-types
- predeclared_type_alias_matrices = (words("""
- mat2x2f mat2x3f mat2x4f
- mat3x2f mat3x3f mat3x4f
- mat4x2f mat4x3f mat4x4f
- mat2x2h mat2x3h mat2x4h
- mat3x2h mat3x3h mat3x4h
- mat4x2h mat4x3h mat4x4h
- """.split(), suffix=r'\b'), Name.Builtin)
-
- tokens = {
- 'blankspace': [
- # https://www.w3.org/TR/WGSL/#blankspace
- (r'[\u0020\u0009\u000a\u000b\u000c\u000d\u0085\u200e\u200f\u2028\u2029]+', Whitespace),
- ],
- 'comments': [
- # Line ending comments
- # Match up CR/LF pair first.
- (r'//{}*{}{}'.format(NotLineEndRE,CR,LF), Comment.Single),
- (r'//{}*{}'.format(NotLineEndRE,LineEndRE), Comment.Single),
- (r'/\*', Comment.Multiline, 'block_comment'),
- ],
- 'attribute': [
- include('blankspace'),
- include('comments'),
- (ident_pattern_token, Name.Decorator,'#pop'),
- default('#pop'),
- ],
- 'root': [
- include('blankspace'),
- include('comments'),
-
- # Attributes.
- # https://www.w3.org/TR/WGSL/#attributes
- # Mark the '@' and the attribute name as a decorator.
- (r'@', Name.Decorator, 'attribute'),
-
- # Keywords
- (r'(true|false)\b', Keyword.Constant),
- keyword_decl,
- keywords,
- keyword_reserved,
-
- # Predeclared
- predeclared_enums,
- predeclared_types,
- predeclared_type_generators,
- predeclared_type_alias_vectors,
- predeclared_type_alias_matrices,
-
- # Decimal float literals
- # https://www.w3.org/TR/WGSL/#syntax-decimal_float_literal
- # 0, with type-specifying suffix.
- (r'0[fh]', Number.Float),
- # Other decimal integer, with type-specifying suffix.
- (r'[1-9][0-9]*[fh]', Number.Float),
- # Has decimal point, at least one digit after decimal.
- (r'[0-9]*\.[0-9]+([eE][+-]?[0-9]+)?[fh]?', Number.Float),
- # Has decimal point, at least one digit before decimal.
- (r'[0-9]+\.[0-9]*([eE][+-]?[0-9]+)?[fh]?', Number.Float),
- # Has at least one digit, and has an exponent.
- (r'[0-9]+[eE][+-]?[0-9]+[fh]?', Number.Float),
-
- # Hex float literals
- # https://www.w3.org/TR/WGSL/#syntax-hex_float_literal
- (r'0[xX][0-9a-fA-F]*\.[0-9a-fA-F]+([pP][+-]?[0-9]+[fh]?)?', Number.Float),
- (r'0[xX][0-9a-fA-F]+\.[0-9a-fA-F]*([pP][+-]?[0-9]+[fh]?)?', Number.Float),
- (r'0[xX][0-9a-fA-F]+[pP][+-]?[0-9]+[fh]?', Number.Float),
-
- # Hexadecimal integer literals
- # https://www.w3.org/TR/WGSL/#syntax-hex_int_literal
- (r'0[xX][0-9a-fA-F]+[iu]?', Number.Hex),
- # Decimal integer literals
- # https://www.w3.org/TR/WGSL/#syntax-decimal_int_literal
- # We need two rules here because 01 is not valid.
- (r'[1-9][0-9]*[iu]?', Number.Integer),
- (r'0[iu]?', Number.Integer), # Must match last.
-
- # Operators and Punctuation
- (r'[{}()\[\],\.;:]', Punctuation),
- (r'->', Punctuation), # Return-type arrow
- (r'[+\-*/%&|<>^!~=]', Operator),
-
- # TODO: Treat context-depedendent names specially
- # https://www.w3.org/TR/WGSL/#context-dependent-name
-
- # Identifiers
- (ident_pattern_token, Name),
-
- # TODO: templates start and end tokens.
- # https://www.w3.org/TR/WGSL/#template-lists-sec
- ],
- 'block_comment': [
- # https://www.w3.org/TR/WGSL/#block-comment
- (r'[^*/]+', Comment.Multiline),
- (r'/\*', Comment.Multiline, '#push'),
- (r'\*/', Comment.Multiline, '#pop'),
- (r'[*/]', Comment.Multiline),
- ],
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/whiley.py b/venv/lib/python3.11/site-packages/pygments/lexers/whiley.py
deleted file mode 100644
index bf707d2..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/whiley.py
+++ /dev/null
@@ -1,116 +0,0 @@
-"""
- pygments.lexers.whiley
- ~~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for the Whiley language.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, bygroups, words
-from pygments.token import Comment, Keyword, Name, Number, Operator, \
- Punctuation, String, Text
-
-__all__ = ['WhileyLexer']
-
-
-class WhileyLexer(RegexLexer):
- """
- Lexer for the Whiley programming language.
-
- .. versionadded:: 2.2
- """
- name = 'Whiley'
- url = 'http://whiley.org/'
- filenames = ['*.whiley']
- aliases = ['whiley']
- mimetypes = ['text/x-whiley']
-
- # See the language specification:
- # http://whiley.org/download/WhileyLanguageSpec.pdf
-
- tokens = {
- 'root': [
- # Whitespace
- (r'\s+', Text),
-
- # Comments
- (r'//.*', Comment.Single),
- # don't parse empty comment as doc comment
- (r'/\*\*/', Comment.Multiline),
- (r'(?s)/\*\*.*?\*/', String.Doc),
- (r'(?s)/\*.*?\*/', Comment.Multiline),
-
- # Keywords
- (words((
- 'if', 'else', 'while', 'for', 'do', 'return',
- 'switch', 'case', 'default', 'break', 'continue',
- 'requires', 'ensures', 'where', 'assert', 'assume',
- 'all', 'no', 'some', 'in', 'is', 'new',
- 'throw', 'try', 'catch', 'debug', 'skip', 'fail',
- 'finite', 'total'), suffix=r'\b'), Keyword.Reserved),
- (words((
- 'function', 'method', 'public', 'private', 'protected',
- 'export', 'native'), suffix=r'\b'), Keyword.Declaration),
- # "constant" & "type" are not keywords unless used in declarations
- (r'(constant|type)(\s+)([a-zA-Z_]\w*)(\s+)(is)\b',
- bygroups(Keyword.Declaration, Text, Name, Text, Keyword.Reserved)),
- (r'(true|false|null)\b', Keyword.Constant),
- (r'(bool|byte|int|real|any|void)\b', Keyword.Type),
- # "from" is not a keyword unless used with import
- (r'(import)(\s+)(\*)([^\S\n]+)(from)\b',
- bygroups(Keyword.Namespace, Text, Punctuation, Text, Keyword.Namespace)),
- (r'(import)(\s+)([a-zA-Z_]\w*)([^\S\n]+)(from)\b',
- bygroups(Keyword.Namespace, Text, Name, Text, Keyword.Namespace)),
- (r'(package|import)\b', Keyword.Namespace),
-
- # standard library: https://github.com/Whiley/WhileyLibs/
- (words((
- # types defined in whiley.lang.Int
- 'i8', 'i16', 'i32', 'i64',
- 'u8', 'u16', 'u32', 'u64',
- 'uint', 'nat',
-
- # whiley.lang.Any
- 'toString'), suffix=r'\b'), Name.Builtin),
-
- # byte literal
- (r'[01]+b', Number.Bin),
-
- # decimal literal
- (r'[0-9]+\.[0-9]+', Number.Float),
- # match "1." but not ranges like "3..5"
- (r'[0-9]+\.(?!\.)', Number.Float),
-
- # integer literal
- (r'0x[0-9a-fA-F]+', Number.Hex),
- (r'[0-9]+', Number.Integer),
-
- # character literal
- (r"""'[^\\]'""", String.Char),
- (r"""(')(\\['"\\btnfr])(')""",
- bygroups(String.Char, String.Escape, String.Char)),
-
- # string literal
- (r'"', String, 'string'),
-
- # operators and punctuation
- (r'[{}()\[\],.;]', Punctuation),
- (r'[+\-*/%&|<>^!~@=:?'
- # unicode operators
- r'\u2200\u2203\u2205\u2282\u2286\u2283\u2287'
- r'\u222A\u2229\u2264\u2265\u2208\u2227\u2228'
- r']', Operator),
-
- # identifier
- (r'[a-zA-Z_]\w*', Name),
- ],
- 'string': [
- (r'"', String, '#pop'),
- (r'\\[btnfr]', String.Escape),
- (r'\\u[0-9a-fA-F]{4}', String.Escape),
- (r'\\.', String),
- (r'[^\\"]+', String),
- ],
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/wowtoc.py b/venv/lib/python3.11/site-packages/pygments/lexers/wowtoc.py
deleted file mode 100644
index 6b96e65..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/wowtoc.py
+++ /dev/null
@@ -1,120 +0,0 @@
-"""
- pygments.lexers.wowtoc
- ~~~~~~~~~~~~~~~~~~~~~~
-
- Lexer for World of Warcraft TOC files
-
- TOC files describe game addons.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, bygroups
-from pygments.token import Comment, Name, Text, Punctuation, String, Keyword
-
-__all__ = ["WoWTocLexer"]
-
-def _create_tag_line_pattern(inner_pattern, ignore_case=False):
- return ((r"(?i)" if ignore_case else r"")
- + r"^(##)( *)" # groups 1, 2
- + inner_pattern # group 3
- + r"( *)(:)( *)(.*?)( *)$") # groups 4, 5, 6, 7, 8
-
-
-def _create_tag_line_token(inner_pattern, inner_token, ignore_case=False):
- # this function template-izes the tag line for a specific type of tag, which will
- # have a different pattern and different token. otherwise, everything about a tag
- # line is the same
- return (
- _create_tag_line_pattern(inner_pattern, ignore_case=ignore_case),
- bygroups(
- Keyword.Declaration,
- Text.Whitespace,
- inner_token,
- Text.Whitespace,
- Punctuation,
- Text.Whitespace,
- String,
- Text.Whitespace,
- ),
- )
-
-
-class WoWTocLexer(RegexLexer):
- """
- Lexer for World of Warcraft TOC files.
-
- .. versionadded:: 2.14
- """
-
- name = "World of Warcraft TOC"
- aliases = ["wowtoc"]
- filenames = ["*.toc"]
-
- tokens = {
- "root": [
- # official localized tags, Notes and Title
- # (normal part is insensitive, locale part is sensitive)
- _create_tag_line_token(
- r"((?:[nN][oO][tT][eE][sS]|[tT][iI][tT][lL][eE])-(?:ptBR|zhCN|"
- r"enCN|frFR|deDE|itIT|esMX|ptPT|koKR|ruRU|esES|zhTW|enTW|enGB|enUS))",
- Name.Builtin,
- ),
- # other official tags
- _create_tag_line_token(
- r"(Interface|Title|Notes|RequiredDeps|Dep[^: ]*|OptionalDeps|"
- r"LoadOnDemand|LoadWith|LoadManagers|SavedVariablesPerCharacter|"
- r"SavedVariables|DefaultState|Secure|Author|Version)",
- Name.Builtin,
- ignore_case=True,
- ),
- # user-defined tags
- _create_tag_line_token(
- r"(X-[^: ]*)",
- Name.Variable,
- ignore_case=True,
- ),
- # non-conforming tags, but still valid
- _create_tag_line_token(
- r"([^: ]*)",
- Name.Other,
- ),
-
- # Comments
- (r"^#.*$", Comment),
-
- # Addon Files
- (r"^.+$", Name),
- ]
- }
-
- def analyse_text(text):
- # at time of writing, this file suffix conflict's with one of Tex's in
- # markup.py. Tex's anaylse_text() appears to be definitive (binary) and does not
- # share any likeness to WoW TOCs, which means we wont have to compete with it by
- # abitrary increments in score.
-
- result = 0
-
- # while not required, an almost certain marker of WoW TOC's is the interface tag
- # if this tag is omitted, players will need to opt-in to loading the addon with
- # an options change ("Load out of date addons"). the value is also standardized:
- # `<major><minor><patch>`, with minor and patch being two-digit zero-padded.
- interface_pattern = _create_tag_line_pattern(r"(Interface)", ignore_case=True)
- match = re.search(interface_pattern, text)
- if match and re.match(r"(\d+)(\d{2})(\d{2})", match.group(7)):
- result += 0.8
-
- casefolded = text.casefold()
- # Lua file listing is good marker too, but probably conflicts with many other
- # lexers
- if ".lua" in casefolded:
- result += 0.1
- # ditto for XML files, but they're less used in WoW TOCs
- if ".xml" in casefolded:
- result += 0.05
-
- return result
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/wren.py b/venv/lib/python3.11/site-packages/pygments/lexers/wren.py
deleted file mode 100644
index ed4ddc7..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/wren.py
+++ /dev/null
@@ -1,99 +0,0 @@
-"""
- pygments.lexers.wren
- ~~~~~~~~~~~~~~~~~~~~
-
- Lexer for Wren.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import include, RegexLexer, words
-from pygments.token import Whitespace, Punctuation, Keyword, Name, Comment, \
- Operator, Number, String, Error
-
-__all__ = ['WrenLexer']
-
-class WrenLexer(RegexLexer):
- """
- For Wren source code, version 0.4.0.
-
- .. versionadded:: 2.14.0
- """
- name = 'Wren'
- url = 'https://wren.io'
- aliases = ['wren']
- filenames = ['*.wren']
-
- flags = re.MULTILINE | re.DOTALL
-
- tokens = {
- 'root': [
- # Whitespace.
- (r'\s+', Whitespace),
- (r'[,\\\[\]{}]', Punctuation),
-
- # Really 'root', not '#push': in 'interpolation',
- # parentheses inside the interpolation expression are
- # Punctuation, not String.Interpol.
- (r'\(', Punctuation, 'root'),
- (r'\)', Punctuation, '#pop'),
-
- # Keywords.
- (words((
- 'as', 'break', 'class', 'construct', 'continue', 'else',
- 'for', 'foreign', 'if', 'import', 'return', 'static', 'super',
- 'this', 'var', 'while'), prefix = r'(?<!\.)',
- suffix = r'\b'), Keyword),
-
- (words((
- 'true', 'false', 'null'), prefix = r'(?<!\.)',
- suffix = r'\b'), Keyword.Constant),
-
- (words((
- 'in', 'is'), prefix = r'(?<!\.)',
- suffix = r'\b'), Operator.Word),
-
- # Comments.
- (r'/\*', Comment.Multiline, 'comment'), # Multiline, can nest.
- (r'//.*?$', Comment.Single), # Single line.
- (r'#.*?(\(.*?\))?$', Comment.Special), # Attribute or shebang.
-
- # Names and operators.
- (r'[!%&*+\-./:<=>?\\^|~]+', Operator),
- (r'[a-z][a-zA-Z_0-9]*', Name),
- (r'[A-Z][a-zA-Z_0-9]*', Name.Class),
- (r'__[a-zA-Z_0-9]*', Name.Variable.Class),
- (r'_[a-zA-Z_0-9]*', Name.Variable.Instance),
-
- # Numbers.
- (r'0x[0-9a-fA-F]+', Number.Hex),
- (r'\d+(\.\d+)?([eE][-+]?\d+)?', Number.Float),
-
- # Strings.
- (r'""".*?"""', String), # Raw string
- (r'"', String, 'string'), # Other string
- ],
- 'comment': [
- (r'/\*', Comment.Multiline, '#push'),
- (r'\*/', Comment.Multiline, '#pop'),
- (r'([^*/]|\*(?!/)|/(?!\*))+', Comment.Multiline),
- ],
- 'string': [
- (r'"', String, '#pop'),
- (r'\\[\\%"0abefnrtv]', String.Escape), # Escape.
- (r'\\x[a-fA-F0-9]{2}', String.Escape), # Byte escape.
- (r'\\u[a-fA-F0-9]{4}', String.Escape), # Unicode escape.
- (r'\\U[a-fA-F0-9]{8}', String.Escape), # Long Unicode escape.
-
- (r'%\(', String.Interpol, 'interpolation'),
- (r'[^\\"%]+', String), # All remaining characters.
- ],
- 'interpolation': [
- # redefine closing paren to be String.Interpol
- (r'\)', String.Interpol, '#pop'),
- include('root'),
- ],
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/x10.py b/venv/lib/python3.11/site-packages/pygments/lexers/x10.py
deleted file mode 100644
index c125b53..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/x10.py
+++ /dev/null
@@ -1,67 +0,0 @@
-"""
- pygments.lexers.x10
- ~~~~~~~~~~~~~~~~~~~
-
- Lexers for the X10 programming language.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer
-from pygments.token import Text, Comment, Keyword, String
-
-__all__ = ['X10Lexer']
-
-
-class X10Lexer(RegexLexer):
- """
- For the X10 language.
-
- .. versionadded:: 2.2
- """
-
- name = 'X10'
- url = 'http://x10-lang.org/'
- aliases = ['x10', 'xten']
- filenames = ['*.x10']
- mimetypes = ['text/x-x10']
-
- keywords = (
- 'as', 'assert', 'async', 'at', 'athome', 'ateach', 'atomic',
- 'break', 'case', 'catch', 'class', 'clocked', 'continue',
- 'def', 'default', 'do', 'else', 'final', 'finally', 'finish',
- 'for', 'goto', 'haszero', 'here', 'if', 'import', 'in',
- 'instanceof', 'interface', 'isref', 'new', 'offer',
- 'operator', 'package', 'return', 'struct', 'switch', 'throw',
- 'try', 'type', 'val', 'var', 'when', 'while'
- )
-
- types = (
- 'void'
- )
-
- values = (
- 'false', 'null', 'self', 'super', 'this', 'true'
- )
-
- modifiers = (
- 'abstract', 'extends', 'implements', 'native', 'offers',
- 'private', 'property', 'protected', 'public', 'static',
- 'throws', 'transient'
- )
-
- tokens = {
- 'root': [
- (r'[^\S\n]+', Text),
- (r'//.*?\n', Comment.Single),
- (r'/\*(.|\n)*?\*/', Comment.Multiline),
- (r'\b(%s)\b' % '|'.join(keywords), Keyword),
- (r'\b(%s)\b' % '|'.join(types), Keyword.Type),
- (r'\b(%s)\b' % '|'.join(values), Keyword.Constant),
- (r'\b(%s)\b' % '|'.join(modifiers), Keyword.Declaration),
- (r'"(\\\\|\\[^\\]|[^"\\])*"', String),
- (r"'\\.'|'[^\\]'|'\\u[0-9a-fA-F]{4}'", String.Char),
- (r'.', Text)
- ],
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/xorg.py b/venv/lib/python3.11/site-packages/pygments/lexers/xorg.py
deleted file mode 100644
index 67878c3..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/xorg.py
+++ /dev/null
@@ -1,37 +0,0 @@
-"""
- pygments.lexers.xorg
- ~~~~~~~~~~~~~~~~~~~~
-
- Lexers for Xorg configs.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, bygroups
-from pygments.token import Comment, String, Name, Text
-
-__all__ = ['XorgLexer']
-
-
-class XorgLexer(RegexLexer):
- """Lexer for xorg.conf files."""
- name = 'Xorg'
- url = 'https://www.x.org/wiki/'
- aliases = ['xorg.conf']
- filenames = ['xorg.conf']
- mimetypes = []
-
- tokens = {
- 'root': [
- (r'\s+', Text),
- (r'#.*$', Comment),
-
- (r'((?:Sub)?Section)(\s+)("\w+")',
- bygroups(String.Escape, Text, String.Escape)),
- (r'(End(?:Sub)?Section)', String.Escape),
-
- (r'(\w+)(\s+)([^\n#]+)',
- bygroups(Name.Builtin, Text, Name.Constant)),
- ],
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/yang.py b/venv/lib/python3.11/site-packages/pygments/lexers/yang.py
deleted file mode 100644
index 866c01d..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/yang.py
+++ /dev/null
@@ -1,104 +0,0 @@
-"""
- pygments.lexers.yang
- ~~~~~~~~~~~~~~~~~~~~
-
- Lexer for the YANG 1.1 modeling language. See :rfc:`7950`.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, bygroups, words
-from pygments.token import Text, Token, Name, String, Comment, Number
-
-__all__ = ['YangLexer']
-
-
-class YangLexer(RegexLexer):
- """
- Lexer for YANG, based on RFC7950.
-
- .. versionadded:: 2.7
- """
- name = 'YANG'
- url = 'https://tools.ietf.org/html/rfc7950/'
- aliases = ['yang']
- filenames = ['*.yang']
- mimetypes = ['application/yang']
-
- #Keywords from RFC7950 ; oriented at BNF style
- TOP_STMTS_KEYWORDS = ("module", "submodule")
- MODULE_HEADER_STMT_KEYWORDS = ("belongs-to", "namespace", "prefix", "yang-version")
- META_STMT_KEYWORDS = ("contact", "description", "organization",
- "reference", "revision")
- LINKAGE_STMTS_KEYWORDS = ("import", "include", "revision-date")
- BODY_STMT_KEYWORDS = ("action", "argument", "augment", "deviation",
- "extension", "feature", "grouping", "identity",
- "if-feature", "input", "notification", "output",
- "rpc", "typedef")
- DATA_DEF_STMT_KEYWORDS = ("anydata", "anyxml", "case", "choice",
- "config", "container", "deviate", "leaf",
- "leaf-list", "list", "must", "presence",
- "refine", "uses", "when")
- TYPE_STMT_KEYWORDS = ("base", "bit", "default", "enum", "error-app-tag",
- "error-message", "fraction-digits", "length",
- "max-elements", "min-elements", "modifier",
- "ordered-by", "path", "pattern", "position",
- "range", "require-instance", "status", "type",
- "units", "value", "yin-element")
- LIST_STMT_KEYWORDS = ("key", "mandatory", "unique")
-
- #RFC7950 other keywords
- CONSTANTS_KEYWORDS = ("add", "current", "delete", "deprecated", "false",
- "invert-match", "max", "min", "not-supported",
- "obsolete", "replace", "true", "unbounded", "user")
-
- #RFC7950 Built-In Types
- TYPES = ("binary", "bits", "boolean", "decimal64", "empty", "enumeration",
- "identityref", "instance-identifier", "int16", "int32", "int64",
- "int8", "leafref", "string", "uint16", "uint32", "uint64",
- "uint8", "union")
-
- suffix_re_pattern = r'(?=[^\w\-:])'
-
- tokens = {
- 'comments': [
- (r'[^*/]', Comment),
- (r'/\*', Comment, '#push'),
- (r'\*/', Comment, '#pop'),
- (r'[*/]', Comment),
- ],
- "root": [
- (r'\s+', Text.Whitespace),
- (r'[{};]+', Token.Punctuation),
- (r'(?<![\-\w])(and|or|not|\+|\.)(?![\-\w])', Token.Operator),
-
- (r'"(?:\\"|[^"])*?"', String.Double),
- (r"'(?:\\'|[^'])*?'", String.Single),
-
- (r'/\*', Comment, 'comments'),
- (r'//.*?$', Comment),
-
- #match BNF stmt for `node-identifier` with [ prefix ":"]
- (r'(?:^|(?<=[\s{};]))([\w.-]+)(:)([\w.-]+)(?=[\s{};])',
- bygroups(Name.Namespace, Token.Punctuation, Name.Variable)),
-
- #match BNF stmt `date-arg-str`
- (r'([0-9]{4}\-[0-9]{2}\-[0-9]{2})(?=[\s{};])', Name.Label),
- (r'([0-9]+\.[0-9]+)(?=[\s{};])', Number.Float),
- (r'([0-9]+)(?=[\s{};])', Number.Integer),
-
- (words(TOP_STMTS_KEYWORDS, suffix=suffix_re_pattern), Token.Keyword),
- (words(MODULE_HEADER_STMT_KEYWORDS, suffix=suffix_re_pattern), Token.Keyword),
- (words(META_STMT_KEYWORDS, suffix=suffix_re_pattern), Token.Keyword),
- (words(LINKAGE_STMTS_KEYWORDS, suffix=suffix_re_pattern), Token.Keyword),
- (words(BODY_STMT_KEYWORDS, suffix=suffix_re_pattern), Token.Keyword),
- (words(DATA_DEF_STMT_KEYWORDS, suffix=suffix_re_pattern), Token.Keyword),
- (words(TYPE_STMT_KEYWORDS, suffix=suffix_re_pattern), Token.Keyword),
- (words(LIST_STMT_KEYWORDS, suffix=suffix_re_pattern), Token.Keyword),
- (words(TYPES, suffix=suffix_re_pattern), Name.Class),
- (words(CONSTANTS_KEYWORDS, suffix=suffix_re_pattern), Name.Class),
-
- (r'[^;{}\s\'"]+', Name.Variable),
- ]
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/yara.py b/venv/lib/python3.11/site-packages/pygments/lexers/yara.py
deleted file mode 100644
index 1a84e4a..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/yara.py
+++ /dev/null
@@ -1,70 +0,0 @@
-"""
- pygments.lexers.yara
- ~~~~~~~~~~~~~~~~~~~~
-
- Lexers for YARA.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, words
-from pygments.token import Comment, String, Name, Text, Punctuation, \
- Operator, Keyword, Whitespace, Number
-
-__all__ = ['YaraLexer']
-
-
-class YaraLexer(RegexLexer):
- """
- For YARA rules
-
- .. versionadded:: 2.16
- """
-
- name = 'YARA'
- url = 'https://virustotal.github.io/yara/'
- aliases = ['yara', 'yar']
- filenames = ['*.yar']
- mimetypes = ['text/x-yara']
-
- tokens = {
- 'root': [
- (r'\s+', Whitespace),
- (r'//.*?$', Comment.Single),
- (r'\#.*?$', Comment.Single),
- (r'/\*', Comment.Multiline, 'comment'),
- (words(('rule', 'private', 'global', 'import', 'include'),
- prefix=r'\b', suffix=r'\b'),
- Keyword.Declaration),
- (words(('strings', 'condition', 'meta'), prefix=r'\b', suffix=r'\b'),
- Keyword),
- (words(('ascii', 'at', 'base64', 'base64wide', 'condition',
- 'contains', 'endswith', 'entrypoint', 'filesize', 'for',
- 'fullword', 'icontains', 'iendswith', 'iequals', 'in',
- 'include', 'int16', 'int16be', 'int32', 'int32be', 'int8',
- 'int8be', 'istartswith', 'matches', 'meta', 'nocase',
- 'none', 'of', 'startswith', 'strings', 'them', 'uint16',
- 'uint16be', 'uint32', 'uint32be', 'uint8', 'uint8be',
- 'wide', 'xor', 'defined'),
- prefix=r'\b', suffix=r'\b'),
- Name.Builtin),
- (r'(true|false)\b', Keyword.Constant),
- (r'(and|or|not|any|all)\b', Operator.Word),
- (r'(\$\w+)', Name.Variable),
- (r'"[^"]*"', String.Double),
- (r'\'[^\']*\'', String.Single),
- (r'\{.*?\}$', Number.Hex),
- (r'(/.*?/)', String.Regex),
- (r'[a-z_]\w*', Name),
- (r'[$(){}[\].?+*|]', Punctuation),
- (r'[:=,;]', Punctuation),
- (r'.', Text)
- ],
- 'comment': [
- (r'[^*/]+', Comment.Multiline),
- (r'/\*', Comment.Multiline, '#push'),
- (r'\*/', Comment.Multiline, '#pop'),
- (r'[*/]', Comment.Multiline)
- ]
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/zig.py b/venv/lib/python3.11/site-packages/pygments/lexers/zig.py
deleted file mode 100644
index fad3b79..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/zig.py
+++ /dev/null
@@ -1,124 +0,0 @@
-"""
- pygments.lexers.zig
- ~~~~~~~~~~~~~~~~~~~
-
- Lexers for Zig.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, words
-from pygments.token import Comment, Operator, Keyword, Name, String, \
- Number, Punctuation, Whitespace
-
-__all__ = ['ZigLexer']
-
-
-class ZigLexer(RegexLexer):
- """
- Lexer for the Zig language.
-
- grammar: https://ziglang.org/documentation/master/#Grammar
- """
- name = 'Zig'
- url = 'http://www.ziglang.org'
- aliases = ['zig']
- filenames = ['*.zig']
- mimetypes = ['text/zig']
-
- type_keywords = (
- words(('bool', 'f16', 'f32', 'f64', 'f128', 'void', 'noreturn', 'type',
- 'anyerror', 'promise', 'i0', 'u0', 'isize', 'usize', 'comptime_int',
- 'comptime_float', 'c_short', 'c_ushort', 'c_int', 'c_uint', 'c_long',
- 'c_ulong', 'c_longlong', 'c_ulonglong', 'c_longdouble', 'c_void'
- 'i8', 'u8', 'i16', 'u16', 'i32', 'u32', 'i64', 'u64', 'i128',
- 'u128'), suffix=r'\b'),
- Keyword.Type)
-
- storage_keywords = (
- words(('const', 'var', 'extern', 'packed', 'export', 'pub', 'noalias',
- 'inline', 'comptime', 'nakedcc', 'stdcallcc', 'volatile', 'allowzero',
- 'align', 'linksection', 'threadlocal'), suffix=r'\b'),
- Keyword.Reserved)
-
- structure_keywords = (
- words(('struct', 'enum', 'union', 'error'), suffix=r'\b'),
- Keyword)
-
- statement_keywords = (
- words(('break', 'return', 'continue', 'asm', 'defer', 'errdefer',
- 'unreachable', 'try', 'catch', 'async', 'await', 'suspend',
- 'resume', 'cancel'), suffix=r'\b'),
- Keyword)
-
- conditional_keywords = (
- words(('if', 'else', 'switch', 'and', 'or', 'orelse'), suffix=r'\b'),
- Keyword)
-
- repeat_keywords = (
- words(('while', 'for'), suffix=r'\b'),
- Keyword)
-
- other_keywords = (
- words(('fn', 'usingnamespace', 'test'), suffix=r'\b'),
- Keyword)
-
- constant_keywords = (
- words(('true', 'false', 'null', 'undefined'), suffix=r'\b'),
- Keyword.Constant)
-
- tokens = {
- 'root': [
- (r'\n', Whitespace),
- (r'\s+', Whitespace),
- (r'//.*?\n', Comment.Single),
-
- # Keywords
- statement_keywords,
- storage_keywords,
- structure_keywords,
- repeat_keywords,
- type_keywords,
- constant_keywords,
- conditional_keywords,
- other_keywords,
-
- # Floats
- (r'0x[0-9a-fA-F]+\.[0-9a-fA-F]+([pP][\-+]?[0-9a-fA-F]+)?', Number.Float),
- (r'0x[0-9a-fA-F]+\.?[pP][\-+]?[0-9a-fA-F]+', Number.Float),
- (r'[0-9]+\.[0-9]+([eE][-+]?[0-9]+)?', Number.Float),
- (r'[0-9]+\.?[eE][-+]?[0-9]+', Number.Float),
-
- # Integers
- (r'0b[01]+', Number.Bin),
- (r'0o[0-7]+', Number.Oct),
- (r'0x[0-9a-fA-F]+', Number.Hex),
- (r'[0-9]+', Number.Integer),
-
- # Identifier
- (r'@[a-zA-Z_]\w*', Name.Builtin),
- (r'[a-zA-Z_]\w*', Name),
-
- # Characters
- (r'\'\\\'\'', String.Escape),
- (r'\'\\(x[a-fA-F0-9]{2}|u[a-fA-F0-9]{4}|U[a-fA-F0-9]{6}|[nr\\t\'"])\'',
- String.Escape),
- (r'\'[^\\\']\'', String),
-
- # Strings
- (r'\\\\[^\n]*', String.Heredoc),
- (r'c\\\\[^\n]*', String.Heredoc),
- (r'c?"', String, 'string'),
-
- # Operators, Punctuation
- (r'[+%=><|^!?/\-*&~:]', Operator),
- (r'[{}()\[\],.;]', Punctuation)
- ],
- 'string': [
- (r'\\(x[a-fA-F0-9]{2}|u[a-fA-F0-9]{4}|U[a-fA-F0-9]{6}|[nr\\t\'"])',
- String.Escape),
- (r'[^\\"\n]+', String),
- (r'"', String, '#pop')
- ]
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/modeline.py b/venv/lib/python3.11/site-packages/pygments/modeline.py
deleted file mode 100644
index 7b6f6a3..0000000
--- a/venv/lib/python3.11/site-packages/pygments/modeline.py
+++ /dev/null
@@ -1,43 +0,0 @@
-"""
- pygments.modeline
- ~~~~~~~~~~~~~~~~~
-
- A simple modeline parser (based on pymodeline).
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-__all__ = ['get_filetype_from_buffer']
-
-
-modeline_re = re.compile(r'''
- (?: vi | vim | ex ) (?: [<=>]? \d* )? :
- .* (?: ft | filetype | syn | syntax ) = ( [^:\s]+ )
-''', re.VERBOSE)
-
-
-def get_filetype_from_line(l):
- m = modeline_re.search(l)
- if m:
- return m.group(1)
-
-
-def get_filetype_from_buffer(buf, max_lines=5):
- """
- Scan the buffer for modelines and return filetype if one is found.
- """
- lines = buf.splitlines()
- for l in lines[-1:-max_lines-1:-1]:
- ret = get_filetype_from_line(l)
- if ret:
- return ret
- for i in range(max_lines, -1, -1):
- if i < len(lines):
- ret = get_filetype_from_line(lines[i])
- if ret:
- return ret
-
- return None
diff --git a/venv/lib/python3.11/site-packages/pygments/plugin.py b/venv/lib/python3.11/site-packages/pygments/plugin.py
deleted file mode 100644
index 0de47ba..0000000
--- a/venv/lib/python3.11/site-packages/pygments/plugin.py
+++ /dev/null
@@ -1,88 +0,0 @@
-"""
- pygments.plugin
- ~~~~~~~~~~~~~~~
-
- Pygments plugin interface. By default, this tries to use
- ``importlib.metadata``, which is in the Python standard
- library since Python 3.8, or its ``importlib_metadata``
- backport for earlier versions of Python. It falls back on
- ``pkg_resources`` if not found. Finally, if ``pkg_resources``
- is not found either, no plugins are loaded at all.
-
- lexer plugins::
-
- [pygments.lexers]
- yourlexer = yourmodule:YourLexer
-
- formatter plugins::
-
- [pygments.formatters]
- yourformatter = yourformatter:YourFormatter
- /.ext = yourformatter:YourFormatter
-
- As you can see, you can define extensions for the formatter
- with a leading slash.
-
- syntax plugins::
-
- [pygments.styles]
- yourstyle = yourstyle:YourStyle
-
- filter plugin::
-
- [pygments.filter]
- yourfilter = yourfilter:YourFilter
-
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-LEXER_ENTRY_POINT = 'pygments.lexers'
-FORMATTER_ENTRY_POINT = 'pygments.formatters'
-STYLE_ENTRY_POINT = 'pygments.styles'
-FILTER_ENTRY_POINT = 'pygments.filters'
-
-
-def iter_entry_points(group_name):
- try:
- from importlib.metadata import entry_points
- except ImportError:
- try:
- from importlib_metadata import entry_points
- except ImportError:
- try:
- from pkg_resources import iter_entry_points
- except (ImportError, OSError):
- return []
- else:
- return iter_entry_points(group_name)
- groups = entry_points()
- if hasattr(groups, 'select'):
- # New interface in Python 3.10 and newer versions of the
- # importlib_metadata backport.
- return groups.select(group=group_name)
- else:
- # Older interface, deprecated in Python 3.10 and recent
- # importlib_metadata, but we need it in Python 3.8 and 3.9.
- return groups.get(group_name, [])
-
-
-def find_plugin_lexers():
- for entrypoint in iter_entry_points(LEXER_ENTRY_POINT):
- yield entrypoint.load()
-
-
-def find_plugin_formatters():
- for entrypoint in iter_entry_points(FORMATTER_ENTRY_POINT):
- yield entrypoint.name, entrypoint.load()
-
-
-def find_plugin_styles():
- for entrypoint in iter_entry_points(STYLE_ENTRY_POINT):
- yield entrypoint.name, entrypoint.load()
-
-
-def find_plugin_filters():
- for entrypoint in iter_entry_points(FILTER_ENTRY_POINT):
- yield entrypoint.name, entrypoint.load()
diff --git a/venv/lib/python3.11/site-packages/pygments/regexopt.py b/venv/lib/python3.11/site-packages/pygments/regexopt.py
deleted file mode 100644
index 45223ec..0000000
--- a/venv/lib/python3.11/site-packages/pygments/regexopt.py
+++ /dev/null
@@ -1,91 +0,0 @@
-"""
- pygments.regexopt
- ~~~~~~~~~~~~~~~~~
-
- An algorithm that generates optimized regexes for matching long lists of
- literal strings.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-from re import escape
-from os.path import commonprefix
-from itertools import groupby
-from operator import itemgetter
-
-CS_ESCAPE = re.compile(r'[\[\^\\\-\]]')
-FIRST_ELEMENT = itemgetter(0)
-
-
-def make_charset(letters):
- return '[' + CS_ESCAPE.sub(lambda m: '\\' + m.group(), ''.join(letters)) + ']'
-
-
-def regex_opt_inner(strings, open_paren):
- """Return a regex that matches any string in the sorted list of strings."""
- close_paren = open_paren and ')' or ''
- # print strings, repr(open_paren)
- if not strings:
- # print '-> nothing left'
- return ''
- first = strings[0]
- if len(strings) == 1:
- # print '-> only 1 string'
- return open_paren + escape(first) + close_paren
- if not first:
- # print '-> first string empty'
- return open_paren + regex_opt_inner(strings[1:], '(?:') \
- + '?' + close_paren
- if len(first) == 1:
- # multiple one-char strings? make a charset
- oneletter = []
- rest = []
- for s in strings:
- if len(s) == 1:
- oneletter.append(s)
- else:
- rest.append(s)
- if len(oneletter) > 1: # do we have more than one oneletter string?
- if rest:
- # print '-> 1-character + rest'
- return open_paren + regex_opt_inner(rest, '') + '|' \
- + make_charset(oneletter) + close_paren
- # print '-> only 1-character'
- return open_paren + make_charset(oneletter) + close_paren
- prefix = commonprefix(strings)
- if prefix:
- plen = len(prefix)
- # we have a prefix for all strings
- # print '-> prefix:', prefix
- return open_paren + escape(prefix) \
- + regex_opt_inner([s[plen:] for s in strings], '(?:') \
- + close_paren
- # is there a suffix?
- strings_rev = [s[::-1] for s in strings]
- suffix = commonprefix(strings_rev)
- if suffix:
- slen = len(suffix)
- # print '-> suffix:', suffix[::-1]
- return open_paren \
- + regex_opt_inner(sorted(s[:-slen] for s in strings), '(?:') \
- + escape(suffix[::-1]) + close_paren
- # recurse on common 1-string prefixes
- # print '-> last resort'
- return open_paren + \
- '|'.join(regex_opt_inner(list(group[1]), '')
- for group in groupby(strings, lambda s: s[0] == first[0])) \
- + close_paren
-
-
-def regex_opt(strings, prefix='', suffix=''):
- """Return a compiled regex that matches any string in the given list.
-
- The strings to match must be literal strings, not regexes. They will be
- regex-escaped.
-
- *prefix* and *suffix* are pre- and appended to the final regex.
- """
- strings = sorted(strings)
- return prefix + regex_opt_inner(strings, '(') + suffix
diff --git a/venv/lib/python3.11/site-packages/pygments/scanner.py b/venv/lib/python3.11/site-packages/pygments/scanner.py
deleted file mode 100644
index 32a2f30..0000000
--- a/venv/lib/python3.11/site-packages/pygments/scanner.py
+++ /dev/null
@@ -1,104 +0,0 @@
-"""
- pygments.scanner
- ~~~~~~~~~~~~~~~~
-
- This library implements a regex based scanner. Some languages
- like Pascal are easy to parse but have some keywords that
- depend on the context. Because of this it's impossible to lex
- that just by using a regular expression lexer like the
- `RegexLexer`.
-
- Have a look at the `DelphiLexer` to get an idea of how to use
- this scanner.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-import re
-
-
-class EndOfText(RuntimeError):
- """
- Raise if end of text is reached and the user
- tried to call a match function.
- """
-
-
-class Scanner:
- """
- Simple scanner
-
- All method patterns are regular expression strings (not
- compiled expressions!)
- """
-
- def __init__(self, text, flags=0):
- """
- :param text: The text which should be scanned
- :param flags: default regular expression flags
- """
- self.data = text
- self.data_length = len(text)
- self.start_pos = 0
- self.pos = 0
- self.flags = flags
- self.last = None
- self.match = None
- self._re_cache = {}
-
- def eos(self):
- """`True` if the scanner reached the end of text."""
- return self.pos >= self.data_length
- eos = property(eos, eos.__doc__)
-
- def check(self, pattern):
- """
- Apply `pattern` on the current position and return
- the match object. (Doesn't touch pos). Use this for
- lookahead.
- """
- if self.eos:
- raise EndOfText()
- if pattern not in self._re_cache:
- self._re_cache[pattern] = re.compile(pattern, self.flags)
- return self._re_cache[pattern].match(self.data, self.pos)
-
- def test(self, pattern):
- """Apply a pattern on the current position and check
- if it patches. Doesn't touch pos.
- """
- return self.check(pattern) is not None
-
- def scan(self, pattern):
- """
- Scan the text for the given pattern and update pos/match
- and related fields. The return value is a boolean that
- indicates if the pattern matched. The matched value is
- stored on the instance as ``match``, the last value is
- stored as ``last``. ``start_pos`` is the position of the
- pointer before the pattern was matched, ``pos`` is the
- end position.
- """
- if self.eos:
- raise EndOfText()
- if pattern not in self._re_cache:
- self._re_cache[pattern] = re.compile(pattern, self.flags)
- self.last = self.match
- m = self._re_cache[pattern].match(self.data, self.pos)
- if m is None:
- return False
- self.start_pos = m.start()
- self.pos = m.end()
- self.match = m.group()
- return True
-
- def get_char(self):
- """Scan exactly one char."""
- self.scan('.')
-
- def __repr__(self):
- return '<%s %d/%d>' % (
- self.__class__.__name__,
- self.pos,
- self.data_length
- )
diff --git a/venv/lib/python3.11/site-packages/pygments/sphinxext.py b/venv/lib/python3.11/site-packages/pygments/sphinxext.py
deleted file mode 100644
index f935688..0000000
--- a/venv/lib/python3.11/site-packages/pygments/sphinxext.py
+++ /dev/null
@@ -1,239 +0,0 @@
-"""
- pygments.sphinxext
- ~~~~~~~~~~~~~~~~~~
-
- Sphinx extension to generate automatic documentation of lexers,
- formatters and filters.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import sys
-
-from docutils import nodes
-from docutils.statemachine import ViewList
-from docutils.parsers.rst import Directive
-from sphinx.util.nodes import nested_parse_with_titles
-
-
-MODULEDOC = '''
-.. module:: %s
-
-%s
-%s
-'''
-
-LEXERDOC = '''
-.. class:: %s
-
- :Short names: %s
- :Filenames: %s
- :MIME types: %s
-
- %s
-
-'''
-
-FMTERDOC = '''
-.. class:: %s
-
- :Short names: %s
- :Filenames: %s
-
- %s
-
-'''
-
-FILTERDOC = '''
-.. class:: %s
-
- :Name: %s
-
- %s
-
-'''
-
-
-class PygmentsDoc(Directive):
- """
- A directive to collect all lexers/formatters/filters and generate
- autoclass directives for them.
- """
- has_content = False
- required_arguments = 1
- optional_arguments = 0
- final_argument_whitespace = False
- option_spec = {}
-
- def run(self):
- self.filenames = set()
- if self.arguments[0] == 'lexers':
- out = self.document_lexers()
- elif self.arguments[0] == 'formatters':
- out = self.document_formatters()
- elif self.arguments[0] == 'filters':
- out = self.document_filters()
- elif self.arguments[0] == 'lexers_overview':
- out = self.document_lexers_overview()
- else:
- raise Exception('invalid argument for "pygmentsdoc" directive')
- node = nodes.compound()
- vl = ViewList(out.split('\n'), source='')
- nested_parse_with_titles(self.state, vl, node)
- for fn in self.filenames:
- self.state.document.settings.record_dependencies.add(fn)
- return node.children
-
- def document_lexers_overview(self):
- """Generate a tabular overview of all lexers.
-
- The columns are the lexer name, the extensions handled by this lexer
- (or "None"), the aliases and a link to the lexer class."""
- from pygments.lexers._mapping import LEXERS
- import pygments.lexers
- out = []
-
- table = []
-
- def format_link(name, url):
- if url:
- return f'`{name} <{url}>`_'
- return name
-
- for classname, data in sorted(LEXERS.items(), key=lambda x: x[1][1].lower()):
- lexer_cls = pygments.lexers.find_lexer_class(data[1])
- extensions = lexer_cls.filenames + lexer_cls.alias_filenames
-
- table.append({
- 'name': format_link(data[1], lexer_cls.url),
- 'extensions': ', '.join(extensions).replace('*', '\\*').replace('_', '\\') or 'None',
- 'aliases': ', '.join(data[2]),
- 'class': f'{data[0]}.{classname}'
- })
-
- column_names = ['name', 'extensions', 'aliases', 'class']
- column_lengths = [max([len(row[column]) for row in table if row[column]])
- for column in column_names]
-
- def write_row(*columns):
- """Format a table row"""
- out = []
- for l, c in zip(column_lengths, columns):
- if c:
- out.append(c.ljust(l))
- else:
- out.append(' '*l)
-
- return ' '.join(out)
-
- def write_seperator():
- """Write a table separator row"""
- sep = ['='*c for c in column_lengths]
- return write_row(*sep)
-
- out.append(write_seperator())
- out.append(write_row('Name', 'Extension(s)', 'Short name(s)', 'Lexer class'))
- out.append(write_seperator())
- for row in table:
- out.append(write_row(
- row['name'],
- row['extensions'],
- row['aliases'],
- f':class:`~{row["class"]}`'))
- out.append(write_seperator())
-
- return '\n'.join(out)
-
- def document_lexers(self):
- from pygments.lexers._mapping import LEXERS
- import pygments
- import inspect
- import pathlib
-
- out = []
- modules = {}
- moduledocstrings = {}
- for classname, data in sorted(LEXERS.items(), key=lambda x: x[0]):
- module = data[0]
- mod = __import__(module, None, None, [classname])
- self.filenames.add(mod.__file__)
- cls = getattr(mod, classname)
- if not cls.__doc__:
- print("Warning: %s does not have a docstring." % classname)
- docstring = cls.__doc__
- if isinstance(docstring, bytes):
- docstring = docstring.decode('utf8')
-
- example_file = getattr(cls, '_example', None)
- if example_file:
- p = pathlib.Path(inspect.getabsfile(pygments)).parent.parent /\
- 'tests' / 'examplefiles' / example_file
- content = p.read_text(encoding='utf-8')
- if not content:
- raise Exception(
- f"Empty example file '{example_file}' for lexer "
- f"{classname}")
-
- if data[2]:
- lexer_name = data[2][0]
- docstring += '\n\n .. admonition:: Example\n'
- docstring += f'\n .. code-block:: {lexer_name}\n\n'
- for line in content.splitlines():
- docstring += f' {line}\n'
-
- modules.setdefault(module, []).append((
- classname,
- ', '.join(data[2]) or 'None',
- ', '.join(data[3]).replace('*', '\\*').replace('_', '\\') or 'None',
- ', '.join(data[4]) or 'None',
- docstring))
- if module not in moduledocstrings:
- moddoc = mod.__doc__
- if isinstance(moddoc, bytes):
- moddoc = moddoc.decode('utf8')
- moduledocstrings[module] = moddoc
-
- for module, lexers in sorted(modules.items(), key=lambda x: x[0]):
- if moduledocstrings[module] is None:
- raise Exception("Missing docstring for %s" % (module,))
- heading = moduledocstrings[module].splitlines()[4].strip().rstrip('.')
- out.append(MODULEDOC % (module, heading, '-'*len(heading)))
- for data in lexers:
- out.append(LEXERDOC % data)
-
- return ''.join(out)
-
- def document_formatters(self):
- from pygments.formatters import FORMATTERS
-
- out = []
- for classname, data in sorted(FORMATTERS.items(), key=lambda x: x[0]):
- module = data[0]
- mod = __import__(module, None, None, [classname])
- self.filenames.add(mod.__file__)
- cls = getattr(mod, classname)
- docstring = cls.__doc__
- if isinstance(docstring, bytes):
- docstring = docstring.decode('utf8')
- heading = cls.__name__
- out.append(FMTERDOC % (heading, ', '.join(data[2]) or 'None',
- ', '.join(data[3]).replace('*', '\\*') or 'None',
- docstring))
- return ''.join(out)
-
- def document_filters(self):
- from pygments.filters import FILTERS
-
- out = []
- for name, cls in FILTERS.items():
- self.filenames.add(sys.modules[cls.__module__].__file__)
- docstring = cls.__doc__
- if isinstance(docstring, bytes):
- docstring = docstring.decode('utf8')
- out.append(FILTERDOC % (cls.__name__, name, docstring))
- return ''.join(out)
-
-
-def setup(app):
- app.add_directive('pygmentsdoc', PygmentsDoc)
diff --git a/venv/lib/python3.11/site-packages/pygments/style.py b/venv/lib/python3.11/site-packages/pygments/style.py
deleted file mode 100644
index 96eb92c..0000000
--- a/venv/lib/python3.11/site-packages/pygments/style.py
+++ /dev/null
@@ -1,203 +0,0 @@
-"""
- pygments.style
- ~~~~~~~~~~~~~~
-
- Basic style object.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.token import Token, STANDARD_TYPES
-
-# Default mapping of ansixxx to RGB colors.
-_ansimap = {
- # dark
- 'ansiblack': '000000',
- 'ansired': '7f0000',
- 'ansigreen': '007f00',
- 'ansiyellow': '7f7fe0',
- 'ansiblue': '00007f',
- 'ansimagenta': '7f007f',
- 'ansicyan': '007f7f',
- 'ansigray': 'e5e5e5',
- # normal
- 'ansibrightblack': '555555',
- 'ansibrightred': 'ff0000',
- 'ansibrightgreen': '00ff00',
- 'ansibrightyellow': 'ffff00',
- 'ansibrightblue': '0000ff',
- 'ansibrightmagenta': 'ff00ff',
- 'ansibrightcyan': '00ffff',
- 'ansiwhite': 'ffffff',
-}
-# mapping of deprecated #ansixxx colors to new color names
-_deprecated_ansicolors = {
- # dark
- '#ansiblack': 'ansiblack',
- '#ansidarkred': 'ansired',
- '#ansidarkgreen': 'ansigreen',
- '#ansibrown': 'ansiyellow',
- '#ansidarkblue': 'ansiblue',
- '#ansipurple': 'ansimagenta',
- '#ansiteal': 'ansicyan',
- '#ansilightgray': 'ansigray',
- # normal
- '#ansidarkgray': 'ansibrightblack',
- '#ansired': 'ansibrightred',
- '#ansigreen': 'ansibrightgreen',
- '#ansiyellow': 'ansibrightyellow',
- '#ansiblue': 'ansibrightblue',
- '#ansifuchsia': 'ansibrightmagenta',
- '#ansiturquoise': 'ansibrightcyan',
- '#ansiwhite': 'ansiwhite',
-}
-ansicolors = set(_ansimap)
-
-
-class StyleMeta(type):
-
- def __new__(mcs, name, bases, dct):
- obj = type.__new__(mcs, name, bases, dct)
- for token in STANDARD_TYPES:
- if token not in obj.styles:
- obj.styles[token] = ''
-
- def colorformat(text):
- if text in ansicolors:
- return text
- if text[0:1] == '#':
- col = text[1:]
- if len(col) == 6:
- return col
- elif len(col) == 3:
- return col[0] * 2 + col[1] * 2 + col[2] * 2
- elif text == '':
- return ''
- elif text.startswith('var') or text.startswith('calc'):
- return text
- assert False, "wrong color format %r" % text
-
- _styles = obj._styles = {}
-
- for ttype in obj.styles:
- for token in ttype.split():
- if token in _styles:
- continue
- ndef = _styles.get(token.parent, None)
- styledefs = obj.styles.get(token, '').split()
- if not ndef or token is None:
- ndef = ['', 0, 0, 0, '', '', 0, 0, 0]
- elif 'noinherit' in styledefs and token is not Token:
- ndef = _styles[Token][:]
- else:
- ndef = ndef[:]
- _styles[token] = ndef
- for styledef in obj.styles.get(token, '').split():
- if styledef == 'noinherit':
- pass
- elif styledef == 'bold':
- ndef[1] = 1
- elif styledef == 'nobold':
- ndef[1] = 0
- elif styledef == 'italic':
- ndef[2] = 1
- elif styledef == 'noitalic':
- ndef[2] = 0
- elif styledef == 'underline':
- ndef[3] = 1
- elif styledef == 'nounderline':
- ndef[3] = 0
- elif styledef[:3] == 'bg:':
- ndef[4] = colorformat(styledef[3:])
- elif styledef[:7] == 'border:':
- ndef[5] = colorformat(styledef[7:])
- elif styledef == 'roman':
- ndef[6] = 1
- elif styledef == 'sans':
- ndef[7] = 1
- elif styledef == 'mono':
- ndef[8] = 1
- else:
- ndef[0] = colorformat(styledef)
-
- return obj
-
- def style_for_token(cls, token):
- t = cls._styles[token]
- ansicolor = bgansicolor = None
- color = t[0]
- if color in _deprecated_ansicolors:
- color = _deprecated_ansicolors[color]
- if color in ansicolors:
- ansicolor = color
- color = _ansimap[color]
- bgcolor = t[4]
- if bgcolor in _deprecated_ansicolors:
- bgcolor = _deprecated_ansicolors[bgcolor]
- if bgcolor in ansicolors:
- bgansicolor = bgcolor
- bgcolor = _ansimap[bgcolor]
-
- return {
- 'color': color or None,
- 'bold': bool(t[1]),
- 'italic': bool(t[2]),
- 'underline': bool(t[3]),
- 'bgcolor': bgcolor or None,
- 'border': t[5] or None,
- 'roman': bool(t[6]) or None,
- 'sans': bool(t[7]) or None,
- 'mono': bool(t[8]) or None,
- 'ansicolor': ansicolor,
- 'bgansicolor': bgansicolor,
- }
-
- def list_styles(cls):
- return list(cls)
-
- def styles_token(cls, ttype):
- return ttype in cls._styles
-
- def __iter__(cls):
- for token in cls._styles:
- yield token, cls.style_for_token(token)
-
- def __len__(cls):
- return len(cls._styles)
-
-
-class Style(metaclass=StyleMeta):
-
- #: overall background color (``None`` means transparent)
- background_color = '#ffffff'
-
- #: highlight background color
- highlight_color = '#ffffcc'
-
- #: line number font color
- line_number_color = 'inherit'
-
- #: line number background color
- line_number_background_color = 'transparent'
-
- #: special line number font color
- line_number_special_color = '#000000'
-
- #: special line number background color
- line_number_special_background_color = '#ffffc0'
-
- #: Style definitions for individual token types.
- styles = {}
-
- #: user-friendly style name (used when selecting the style, so this
- # should be all-lowercase, no spaces, hyphens)
- name = 'unnamed'
-
- aliases = []
-
- # Attribute for lexers defined within Pygments. If set
- # to True, the style is not shown in the style gallery
- # on the website. This is intended for language-specific
- # styles.
- web_style_gallery_exclude = False
diff --git a/venv/lib/python3.11/site-packages/pygments/styles/__init__.py b/venv/lib/python3.11/site-packages/pygments/styles/__init__.py
deleted file mode 100644
index 75ac30b..0000000
--- a/venv/lib/python3.11/site-packages/pygments/styles/__init__.py
+++ /dev/null
@@ -1,61 +0,0 @@
-"""
- pygments.styles
- ~~~~~~~~~~~~~~~
-
- Contains built-in styles.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.plugin import find_plugin_styles
-from pygments.util import ClassNotFound
-from pygments.styles._mapping import STYLES
-
-#: A dictionary of built-in styles, mapping style names to
-#: ``'submodule::classname'`` strings.
-#: This list is deprecated. Use `pygments.styles.STYLES` instead
-STYLE_MAP = {v[1]: v[0].split('.')[-1] + '::' + k for k, v in STYLES.items()}
-
-#: Internal reverse mapping to make `get_style_by_name` more efficient
-_STYLE_NAME_TO_MODULE_MAP = {v[1]: (v[0], k) for k, v in STYLES.items()}
-
-
-def get_style_by_name(name):
- """
- Return a style class by its short name. The names of the builtin styles
- are listed in :data:`pygments.styles.STYLE_MAP`.
-
- Will raise :exc:`pygments.util.ClassNotFound` if no style of that name is
- found.
- """
- if name in _STYLE_NAME_TO_MODULE_MAP:
- mod, cls = _STYLE_NAME_TO_MODULE_MAP[name]
- builtin = "yes"
- else:
- for found_name, style in find_plugin_styles():
- if name == found_name:
- return style
- # perhaps it got dropped into our styles package
- builtin = ""
- mod = 'pygments.styles.' + name
- cls = name.title() + "Style"
-
- try:
- mod = __import__(mod, None, None, [cls])
- except ImportError:
- raise ClassNotFound("Could not find style module %r" % mod +
- (builtin and ", though it should be builtin")
- + ".")
- try:
- return getattr(mod, cls)
- except AttributeError:
- raise ClassNotFound("Could not find style class %r in style module." % cls)
-
-
-def get_all_styles():
- """Return a generator for all styles by name, both builtin and plugin."""
- for v in STYLES.values():
- yield v[1]
- for name, _ in find_plugin_styles():
- yield name
diff --git a/venv/lib/python3.11/site-packages/pygments/styles/__pycache__/__init__.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/styles/__pycache__/__init__.cpython-311.pyc
deleted file mode 100644
index e75552e..0000000
--- a/venv/lib/python3.11/site-packages/pygments/styles/__pycache__/__init__.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/styles/__pycache__/_mapping.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/styles/__pycache__/_mapping.cpython-311.pyc
deleted file mode 100644
index 737021a..0000000
--- a/venv/lib/python3.11/site-packages/pygments/styles/__pycache__/_mapping.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/styles/__pycache__/abap.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/styles/__pycache__/abap.cpython-311.pyc
deleted file mode 100644
index 9b722e8..0000000
--- a/venv/lib/python3.11/site-packages/pygments/styles/__pycache__/abap.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/styles/__pycache__/algol.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/styles/__pycache__/algol.cpython-311.pyc
deleted file mode 100644
index d2dd53e..0000000
--- a/venv/lib/python3.11/site-packages/pygments/styles/__pycache__/algol.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/styles/__pycache__/algol_nu.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/styles/__pycache__/algol_nu.cpython-311.pyc
deleted file mode 100644
index 90ad902..0000000
--- a/venv/lib/python3.11/site-packages/pygments/styles/__pycache__/algol_nu.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/styles/__pycache__/arduino.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/styles/__pycache__/arduino.cpython-311.pyc
deleted file mode 100644
index 6ed7a94..0000000
--- a/venv/lib/python3.11/site-packages/pygments/styles/__pycache__/arduino.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/styles/__pycache__/autumn.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/styles/__pycache__/autumn.cpython-311.pyc
deleted file mode 100644
index 12a71a4..0000000
--- a/venv/lib/python3.11/site-packages/pygments/styles/__pycache__/autumn.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/styles/__pycache__/borland.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/styles/__pycache__/borland.cpython-311.pyc
deleted file mode 100644
index 12b6184..0000000
--- a/venv/lib/python3.11/site-packages/pygments/styles/__pycache__/borland.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/styles/__pycache__/bw.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/styles/__pycache__/bw.cpython-311.pyc
deleted file mode 100644
index c4ec4fc..0000000
--- a/venv/lib/python3.11/site-packages/pygments/styles/__pycache__/bw.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/styles/__pycache__/colorful.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/styles/__pycache__/colorful.cpython-311.pyc
deleted file mode 100644
index d296dd6..0000000
--- a/venv/lib/python3.11/site-packages/pygments/styles/__pycache__/colorful.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/styles/__pycache__/default.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/styles/__pycache__/default.cpython-311.pyc
deleted file mode 100644
index e43cc76..0000000
--- a/venv/lib/python3.11/site-packages/pygments/styles/__pycache__/default.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/styles/__pycache__/dracula.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/styles/__pycache__/dracula.cpython-311.pyc
deleted file mode 100644
index 2c72cea..0000000
--- a/venv/lib/python3.11/site-packages/pygments/styles/__pycache__/dracula.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/styles/__pycache__/emacs.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/styles/__pycache__/emacs.cpython-311.pyc
deleted file mode 100644
index ec9a7da..0000000
--- a/venv/lib/python3.11/site-packages/pygments/styles/__pycache__/emacs.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/styles/__pycache__/friendly.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/styles/__pycache__/friendly.cpython-311.pyc
deleted file mode 100644
index 1a4d335..0000000
--- a/venv/lib/python3.11/site-packages/pygments/styles/__pycache__/friendly.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/styles/__pycache__/friendly_grayscale.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/styles/__pycache__/friendly_grayscale.cpython-311.pyc
deleted file mode 100644
index 67239e0..0000000
--- a/venv/lib/python3.11/site-packages/pygments/styles/__pycache__/friendly_grayscale.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/styles/__pycache__/fruity.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/styles/__pycache__/fruity.cpython-311.pyc
deleted file mode 100644
index c4a95c5..0000000
--- a/venv/lib/python3.11/site-packages/pygments/styles/__pycache__/fruity.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/styles/__pycache__/gh_dark.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/styles/__pycache__/gh_dark.cpython-311.pyc
deleted file mode 100644
index afa8ce8..0000000
--- a/venv/lib/python3.11/site-packages/pygments/styles/__pycache__/gh_dark.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/styles/__pycache__/gruvbox.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/styles/__pycache__/gruvbox.cpython-311.pyc
deleted file mode 100644
index 0f32767..0000000
--- a/venv/lib/python3.11/site-packages/pygments/styles/__pycache__/gruvbox.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/styles/__pycache__/igor.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/styles/__pycache__/igor.cpython-311.pyc
deleted file mode 100644
index 93acf73..0000000
--- a/venv/lib/python3.11/site-packages/pygments/styles/__pycache__/igor.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/styles/__pycache__/inkpot.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/styles/__pycache__/inkpot.cpython-311.pyc
deleted file mode 100644
index 216dbb1..0000000
--- a/venv/lib/python3.11/site-packages/pygments/styles/__pycache__/inkpot.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/styles/__pycache__/lightbulb.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/styles/__pycache__/lightbulb.cpython-311.pyc
deleted file mode 100644
index a59a0ec..0000000
--- a/venv/lib/python3.11/site-packages/pygments/styles/__pycache__/lightbulb.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/styles/__pycache__/lilypond.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/styles/__pycache__/lilypond.cpython-311.pyc
deleted file mode 100644
index 67237ba..0000000
--- a/venv/lib/python3.11/site-packages/pygments/styles/__pycache__/lilypond.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/styles/__pycache__/lovelace.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/styles/__pycache__/lovelace.cpython-311.pyc
deleted file mode 100644
index 4ccd0ed..0000000
--- a/venv/lib/python3.11/site-packages/pygments/styles/__pycache__/lovelace.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/styles/__pycache__/manni.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/styles/__pycache__/manni.cpython-311.pyc
deleted file mode 100644
index e2f0441..0000000
--- a/venv/lib/python3.11/site-packages/pygments/styles/__pycache__/manni.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/styles/__pycache__/material.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/styles/__pycache__/material.cpython-311.pyc
deleted file mode 100644
index 84f600f..0000000
--- a/venv/lib/python3.11/site-packages/pygments/styles/__pycache__/material.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/styles/__pycache__/monokai.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/styles/__pycache__/monokai.cpython-311.pyc
deleted file mode 100644
index f917020..0000000
--- a/venv/lib/python3.11/site-packages/pygments/styles/__pycache__/monokai.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/styles/__pycache__/murphy.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/styles/__pycache__/murphy.cpython-311.pyc
deleted file mode 100644
index de91853..0000000
--- a/venv/lib/python3.11/site-packages/pygments/styles/__pycache__/murphy.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/styles/__pycache__/native.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/styles/__pycache__/native.cpython-311.pyc
deleted file mode 100644
index 387fc95..0000000
--- a/venv/lib/python3.11/site-packages/pygments/styles/__pycache__/native.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/styles/__pycache__/nord.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/styles/__pycache__/nord.cpython-311.pyc
deleted file mode 100644
index 5c8ec6a..0000000
--- a/venv/lib/python3.11/site-packages/pygments/styles/__pycache__/nord.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/styles/__pycache__/onedark.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/styles/__pycache__/onedark.cpython-311.pyc
deleted file mode 100644
index 70803b5..0000000
--- a/venv/lib/python3.11/site-packages/pygments/styles/__pycache__/onedark.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/styles/__pycache__/paraiso_dark.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/styles/__pycache__/paraiso_dark.cpython-311.pyc
deleted file mode 100644
index 44eca3f..0000000
--- a/venv/lib/python3.11/site-packages/pygments/styles/__pycache__/paraiso_dark.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/styles/__pycache__/paraiso_light.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/styles/__pycache__/paraiso_light.cpython-311.pyc
deleted file mode 100644
index 6381255..0000000
--- a/venv/lib/python3.11/site-packages/pygments/styles/__pycache__/paraiso_light.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/styles/__pycache__/pastie.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/styles/__pycache__/pastie.cpython-311.pyc
deleted file mode 100644
index 316d965..0000000
--- a/venv/lib/python3.11/site-packages/pygments/styles/__pycache__/pastie.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/styles/__pycache__/perldoc.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/styles/__pycache__/perldoc.cpython-311.pyc
deleted file mode 100644
index b106e98..0000000
--- a/venv/lib/python3.11/site-packages/pygments/styles/__pycache__/perldoc.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/styles/__pycache__/rainbow_dash.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/styles/__pycache__/rainbow_dash.cpython-311.pyc
deleted file mode 100644
index 6f30e3e..0000000
--- a/venv/lib/python3.11/site-packages/pygments/styles/__pycache__/rainbow_dash.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/styles/__pycache__/rrt.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/styles/__pycache__/rrt.cpython-311.pyc
deleted file mode 100644
index 530bf39..0000000
--- a/venv/lib/python3.11/site-packages/pygments/styles/__pycache__/rrt.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/styles/__pycache__/sas.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/styles/__pycache__/sas.cpython-311.pyc
deleted file mode 100644
index b1d369b..0000000
--- a/venv/lib/python3.11/site-packages/pygments/styles/__pycache__/sas.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/styles/__pycache__/solarized.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/styles/__pycache__/solarized.cpython-311.pyc
deleted file mode 100644
index 548e86f..0000000
--- a/venv/lib/python3.11/site-packages/pygments/styles/__pycache__/solarized.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/styles/__pycache__/staroffice.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/styles/__pycache__/staroffice.cpython-311.pyc
deleted file mode 100644
index 2c350b9..0000000
--- a/venv/lib/python3.11/site-packages/pygments/styles/__pycache__/staroffice.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/styles/__pycache__/stata_dark.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/styles/__pycache__/stata_dark.cpython-311.pyc
deleted file mode 100644
index 24906ee..0000000
--- a/venv/lib/python3.11/site-packages/pygments/styles/__pycache__/stata_dark.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/styles/__pycache__/stata_light.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/styles/__pycache__/stata_light.cpython-311.pyc
deleted file mode 100644
index 50cfb6d..0000000
--- a/venv/lib/python3.11/site-packages/pygments/styles/__pycache__/stata_light.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/styles/__pycache__/tango.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/styles/__pycache__/tango.cpython-311.pyc
deleted file mode 100644
index 43eaecf..0000000
--- a/venv/lib/python3.11/site-packages/pygments/styles/__pycache__/tango.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/styles/__pycache__/trac.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/styles/__pycache__/trac.cpython-311.pyc
deleted file mode 100644
index bcace7d..0000000
--- a/venv/lib/python3.11/site-packages/pygments/styles/__pycache__/trac.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/styles/__pycache__/vim.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/styles/__pycache__/vim.cpython-311.pyc
deleted file mode 100644
index 8d68547..0000000
--- a/venv/lib/python3.11/site-packages/pygments/styles/__pycache__/vim.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/styles/__pycache__/vs.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/styles/__pycache__/vs.cpython-311.pyc
deleted file mode 100644
index 8f1ed00..0000000
--- a/venv/lib/python3.11/site-packages/pygments/styles/__pycache__/vs.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/styles/__pycache__/xcode.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/styles/__pycache__/xcode.cpython-311.pyc
deleted file mode 100644
index a6236a2..0000000
--- a/venv/lib/python3.11/site-packages/pygments/styles/__pycache__/xcode.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/styles/__pycache__/zenburn.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/styles/__pycache__/zenburn.cpython-311.pyc
deleted file mode 100644
index de65448..0000000
--- a/venv/lib/python3.11/site-packages/pygments/styles/__pycache__/zenburn.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pygments/styles/_mapping.py b/venv/lib/python3.11/site-packages/pygments/styles/_mapping.py
deleted file mode 100644
index 04c7ddf..0000000
--- a/venv/lib/python3.11/site-packages/pygments/styles/_mapping.py
+++ /dev/null
@@ -1,53 +0,0 @@
-# Automatically generated by scripts/gen_mapfiles.py.
-# DO NOT EDIT BY HAND; run `tox -e mapfiles` instead.
-
-STYLES = {
- 'AbapStyle': ('pygments.styles.abap', 'abap', ()),
- 'AlgolStyle': ('pygments.styles.algol', 'algol', ()),
- 'Algol_NuStyle': ('pygments.styles.algol_nu', 'algol_nu', ()),
- 'ArduinoStyle': ('pygments.styles.arduino', 'arduino', ()),
- 'AutumnStyle': ('pygments.styles.autumn', 'autumn', ()),
- 'BlackWhiteStyle': ('pygments.styles.bw', 'bw', ()),
- 'BorlandStyle': ('pygments.styles.borland', 'borland', ()),
- 'ColorfulStyle': ('pygments.styles.colorful', 'colorful', ()),
- 'DefaultStyle': ('pygments.styles.default', 'default', ()),
- 'DraculaStyle': ('pygments.styles.dracula', 'dracula', ()),
- 'EmacsStyle': ('pygments.styles.emacs', 'emacs', ()),
- 'FriendlyGrayscaleStyle': ('pygments.styles.friendly_grayscale', 'friendly_grayscale', ()),
- 'FriendlyStyle': ('pygments.styles.friendly', 'friendly', ()),
- 'FruityStyle': ('pygments.styles.fruity', 'fruity', ()),
- 'GhDarkStyle': ('pygments.styles.gh_dark', 'github-dark', ()),
- 'GruvboxDarkStyle': ('pygments.styles.gruvbox', 'gruvbox-dark', ()),
- 'GruvboxLightStyle': ('pygments.styles.gruvbox', 'gruvbox-light', ()),
- 'IgorStyle': ('pygments.styles.igor', 'igor', ()),
- 'InkPotStyle': ('pygments.styles.inkpot', 'inkpot', ()),
- 'LightbulbStyle': ('pygments.styles.lightbulb', 'lightbulb', ()),
- 'LilyPondStyle': ('pygments.styles.lilypond', 'lilypond', ()),
- 'LovelaceStyle': ('pygments.styles.lovelace', 'lovelace', ()),
- 'ManniStyle': ('pygments.styles.manni', 'manni', ()),
- 'MaterialStyle': ('pygments.styles.material', 'material', ()),
- 'MonokaiStyle': ('pygments.styles.monokai', 'monokai', ()),
- 'MurphyStyle': ('pygments.styles.murphy', 'murphy', ()),
- 'NativeStyle': ('pygments.styles.native', 'native', ()),
- 'NordDarkerStyle': ('pygments.styles.nord', 'nord-darker', ()),
- 'NordStyle': ('pygments.styles.nord', 'nord', ()),
- 'OneDarkStyle': ('pygments.styles.onedark', 'one-dark', ()),
- 'ParaisoDarkStyle': ('pygments.styles.paraiso_dark', 'paraiso-dark', ()),
- 'ParaisoLightStyle': ('pygments.styles.paraiso_light', 'paraiso-light', ()),
- 'PastieStyle': ('pygments.styles.pastie', 'pastie', ()),
- 'PerldocStyle': ('pygments.styles.perldoc', 'perldoc', ()),
- 'RainbowDashStyle': ('pygments.styles.rainbow_dash', 'rainbow_dash', ()),
- 'RrtStyle': ('pygments.styles.rrt', 'rrt', ()),
- 'SasStyle': ('pygments.styles.sas', 'sas', ()),
- 'SolarizedDarkStyle': ('pygments.styles.solarized', 'solarized-dark', ()),
- 'SolarizedLightStyle': ('pygments.styles.solarized', 'solarized-light', ()),
- 'StarofficeStyle': ('pygments.styles.staroffice', 'staroffice', ()),
- 'StataDarkStyle': ('pygments.styles.stata_dark', 'stata-dark', ()),
- 'StataLightStyle': ('pygments.styles.stata_light', 'stata-light', ()),
- 'TangoStyle': ('pygments.styles.tango', 'tango', ()),
- 'TracStyle': ('pygments.styles.trac', 'trac', ()),
- 'VimStyle': ('pygments.styles.vim', 'vim', ()),
- 'VisualStudioStyle': ('pygments.styles.vs', 'vs', ()),
- 'XcodeStyle': ('pygments.styles.xcode', 'xcode', ()),
- 'ZenburnStyle': ('pygments.styles.zenburn', 'zenburn', ()),
-}
diff --git a/venv/lib/python3.11/site-packages/pygments/styles/abap.py b/venv/lib/python3.11/site-packages/pygments/styles/abap.py
deleted file mode 100644
index ab322df..0000000
--- a/venv/lib/python3.11/site-packages/pygments/styles/abap.py
+++ /dev/null
@@ -1,32 +0,0 @@
-"""
- pygments.styles.abap
- ~~~~~~~~~~~~~~~~~~~~
-
- ABAP workbench like style.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.style import Style
-from pygments.token import Keyword, Name, Comment, String, Error, \
- Number, Operator
-
-
-__all__ = ['AbapStyle']
-
-
-class AbapStyle(Style):
- name = 'abap'
-
- styles = {
- Comment: 'italic #888',
- Comment.Special: '#888',
- Keyword: '#00f',
- Operator.Word: '#00f',
- Name: '#000',
- Number: '#3af',
- String: '#5a2',
-
- Error: '#F00',
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/styles/algol.py b/venv/lib/python3.11/site-packages/pygments/styles/algol.py
deleted file mode 100644
index 83319e0..0000000
--- a/venv/lib/python3.11/site-packages/pygments/styles/algol.py
+++ /dev/null
@@ -1,65 +0,0 @@
-"""
- pygments.styles.algol
- ~~~~~~~~~~~~~~~~~~~~~
-
- Algol publication style.
-
- This style renders source code for publication of algorithms in
- scientific papers and academic texts, where its format is frequently used.
-
- It is based on the style of the revised Algol-60 language report[1].
-
- o No colours, only black, white and shades of grey are used.
- o Keywords are rendered in lowercase underline boldface.
- o Builtins are rendered in lowercase boldface italic.
- o Docstrings and pragmas are rendered in dark grey boldface.
- o Library identifiers are rendered in dark grey boldface italic.
- o Comments are rendered in grey italic.
-
- To render keywords without underlining, refer to the `Algol_Nu` style.
-
- For lowercase conversion of keywords and builtins in languages where
- these are not or might not be lowercase, a supporting lexer is required.
- The Algol and Modula-2 lexers automatically convert to lowercase whenever
- this style is selected.
-
- [1] `Revised Report on the Algorithmic Language Algol-60 <http://www.masswerk.at/algol60/report.htm>`
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.style import Style
-from pygments.token import Keyword, Name, Comment, String, Error, Operator
-
-
-__all__ = ['AlgolStyle']
-
-
-class AlgolStyle(Style):
- name = 'algol'
-
- background_color = "#ffffff"
-
- styles = {
- Comment: "italic #888",
- Comment.Preproc: "bold noitalic #888",
- Comment.Special: "bold noitalic #888",
-
- Keyword: "underline bold",
- Keyword.Declaration: "italic",
-
- Name.Builtin: "bold italic",
- Name.Builtin.Pseudo: "bold italic",
- Name.Namespace: "bold italic #666",
- Name.Class: "bold italic #666",
- Name.Function: "bold italic #666",
- Name.Variable: "bold italic #666",
- Name.Constant: "bold italic #666",
-
- Operator.Word: "bold",
-
- String: "italic #666",
-
- Error: "border:#FF0000"
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/styles/algol_nu.py b/venv/lib/python3.11/site-packages/pygments/styles/algol_nu.py
deleted file mode 100644
index de1434d..0000000
--- a/venv/lib/python3.11/site-packages/pygments/styles/algol_nu.py
+++ /dev/null
@@ -1,65 +0,0 @@
-"""
- pygments.styles.algol_nu
- ~~~~~~~~~~~~~~~~~~~~~~~~
-
- Algol publication style without underlining of keywords.
-
- This style renders source code for publication of algorithms in
- scientific papers and academic texts, where its format is frequently used.
-
- It is based on the style of the revised Algol-60 language report[1].
-
- o No colours, only black, white and shades of grey are used.
- o Keywords are rendered in lowercase boldface.
- o Builtins are rendered in lowercase boldface italic.
- o Docstrings and pragmas are rendered in dark grey boldface.
- o Library identifiers are rendered in dark grey boldface italic.
- o Comments are rendered in grey italic.
-
- To render keywords with underlining, refer to the `Algol` style.
-
- For lowercase conversion of keywords and builtins in languages where
- these are not or might not be lowercase, a supporting lexer is required.
- The Algol and Modula-2 lexers automatically convert to lowercase whenever
- this style is selected.
-
- [1] `Revised Report on the Algorithmic Language Algol-60 <http://www.masswerk.at/algol60/report.htm>`
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.style import Style
-from pygments.token import Keyword, Name, Comment, String, Error, Operator
-
-
-__all__ = ['Algol_NuStyle']
-
-
-class Algol_NuStyle(Style):
- name = 'algol_nu'
-
- background_color = "#ffffff"
-
- styles = {
- Comment: "italic #888",
- Comment.Preproc: "bold noitalic #888",
- Comment.Special: "bold noitalic #888",
-
- Keyword: "bold",
- Keyword.Declaration: "italic",
-
- Name.Builtin: "bold italic",
- Name.Builtin.Pseudo: "bold italic",
- Name.Namespace: "bold italic #666",
- Name.Class: "bold italic #666",
- Name.Function: "bold italic #666",
- Name.Variable: "bold italic #666",
- Name.Constant: "bold italic #666",
-
- Operator.Word: "bold",
-
- String: "italic #666",
-
- Error: "border:#FF0000"
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/styles/arduino.py b/venv/lib/python3.11/site-packages/pygments/styles/arduino.py
deleted file mode 100644
index 8655b03..0000000
--- a/venv/lib/python3.11/site-packages/pygments/styles/arduino.py
+++ /dev/null
@@ -1,100 +0,0 @@
-"""
- pygments.styles.arduino
- ~~~~~~~~~~~~~~~~~~~~~~~
-
- Arduino® Syntax highlighting style.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.style import Style
-from pygments.token import Keyword, Name, Comment, String, Error, \
- Number, Operator, Generic, Whitespace
-
-
-__all__ = ['ArduinoStyle']
-
-
-class ArduinoStyle(Style):
- """
- The Arduino® language style. This style is designed to highlight the
- Arduino source code, so expect the best results with it.
- """
- name = 'arduino'
-
- background_color = "#ffffff"
-
- styles = {
- Whitespace: "", # class: 'w'
- Error: "#a61717", # class: 'err'
-
- Comment: "#95a5a6", # class: 'c'
- Comment.Multiline: "", # class: 'cm'
- Comment.Preproc: "#728E00", # class: 'cp'
- Comment.Single: "", # class: 'c1'
- Comment.Special: "", # class: 'cs'
-
- Keyword: "#728E00", # class: 'k'
- Keyword.Constant: "#00979D", # class: 'kc'
- Keyword.Declaration: "", # class: 'kd'
- Keyword.Namespace: "", # class: 'kn'
- Keyword.Pseudo: "#00979D", # class: 'kp'
- Keyword.Reserved: "#00979D", # class: 'kr'
- Keyword.Type: "#00979D", # class: 'kt'
-
- Operator: "#728E00", # class: 'o'
- Operator.Word: "", # class: 'ow'
-
- Name: "#434f54", # class: 'n'
- Name.Attribute: "", # class: 'na'
- Name.Builtin: "#728E00", # class: 'nb'
- Name.Builtin.Pseudo: "", # class: 'bp'
- Name.Class: "", # class: 'nc'
- Name.Constant: "", # class: 'no'
- Name.Decorator: "", # class: 'nd'
- Name.Entity: "", # class: 'ni'
- Name.Exception: "", # class: 'ne'
- Name.Function: "#D35400", # class: 'nf'
- Name.Property: "", # class: 'py'
- Name.Label: "", # class: 'nl'
- Name.Namespace: "", # class: 'nn'
- Name.Other: "#728E00", # class: 'nx'
- Name.Tag: "", # class: 'nt'
- Name.Variable: "", # class: 'nv'
- Name.Variable.Class: "", # class: 'vc'
- Name.Variable.Global: "", # class: 'vg'
- Name.Variable.Instance: "", # class: 'vi'
-
- Number: "#8A7B52", # class: 'm'
- Number.Float: "", # class: 'mf'
- Number.Hex: "", # class: 'mh'
- Number.Integer: "", # class: 'mi'
- Number.Integer.Long: "", # class: 'il'
- Number.Oct: "", # class: 'mo'
-
- String: "#7F8C8D", # class: 's'
- String.Backtick: "", # class: 'sb'
- String.Char: "", # class: 'sc'
- String.Doc: "", # class: 'sd'
- String.Double: "", # class: 's2'
- String.Escape: "", # class: 'se'
- String.Heredoc: "", # class: 'sh'
- String.Interpol: "", # class: 'si'
- String.Other: "", # class: 'sx'
- String.Regex: "", # class: 'sr'
- String.Single: "", # class: 's1'
- String.Symbol: "", # class: 'ss'
-
- Generic: "", # class: 'g'
- Generic.Deleted: "", # class: 'gd',
- Generic.Emph: "", # class: 'ge'
- Generic.Error: "", # class: 'gr'
- Generic.Heading: "", # class: 'gh'
- Generic.Inserted: "", # class: 'gi'
- Generic.Output: "", # class: 'go'
- Generic.Prompt: "", # class: 'gp'
- Generic.Strong: "", # class: 'gs'
- Generic.Subheading: "", # class: 'gu'
- Generic.Traceback: "", # class: 'gt'
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/styles/autumn.py b/venv/lib/python3.11/site-packages/pygments/styles/autumn.py
deleted file mode 100644
index ccbb5fe..0000000
--- a/venv/lib/python3.11/site-packages/pygments/styles/autumn.py
+++ /dev/null
@@ -1,67 +0,0 @@
-"""
- pygments.styles.autumn
- ~~~~~~~~~~~~~~~~~~~~~~
-
- A colorful style, inspired by the terminal highlighting style.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.style import Style
-from pygments.token import Keyword, Name, Comment, String, Error, \
- Number, Operator, Generic, Whitespace
-
-
-__all__ = ['AutumnStyle']
-
-
-class AutumnStyle(Style):
- """
- A colorful style, inspired by the terminal highlighting style.
- """
- name = 'autumn'
-
- styles = {
- Whitespace: '#bbbbbb',
-
- Comment: 'italic #aaaaaa',
- Comment.Preproc: 'noitalic #4c8317',
- Comment.Special: 'italic #0000aa',
-
- Keyword: '#0000aa',
- Keyword.Type: '#00aaaa',
-
- Operator.Word: '#0000aa',
-
- Name.Builtin: '#00aaaa',
- Name.Function: '#00aa00',
- Name.Class: 'underline #00aa00',
- Name.Namespace: 'underline #00aaaa',
- Name.Variable: '#aa0000',
- Name.Constant: '#aa0000',
- Name.Entity: 'bold #800',
- Name.Attribute: '#1e90ff',
- Name.Tag: 'bold #1e90ff',
- Name.Decorator: '#888888',
-
- String: '#aa5500',
- String.Symbol: '#0000aa',
- String.Regex: '#009999',
-
- Number: '#009999',
-
- Generic.Heading: 'bold #000080',
- Generic.Subheading: 'bold #800080',
- Generic.Deleted: '#aa0000',
- Generic.Inserted: '#00aa00',
- Generic.Error: '#aa0000',
- Generic.Emph: 'italic',
- Generic.Strong: 'bold',
- Generic.EmphStrong: 'bold italic',
- Generic.Prompt: '#555555',
- Generic.Output: '#888888',
- Generic.Traceback: '#aa0000',
-
- Error: '#F00 bg:#FAA'
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/styles/borland.py b/venv/lib/python3.11/site-packages/pygments/styles/borland.py
deleted file mode 100644
index 82c00ae..0000000
--- a/venv/lib/python3.11/site-packages/pygments/styles/borland.py
+++ /dev/null
@@ -1,53 +0,0 @@
-"""
- pygments.styles.borland
- ~~~~~~~~~~~~~~~~~~~~~~~
-
- Style similar to the style used in the Borland IDEs.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.style import Style
-from pygments.token import Keyword, Name, Comment, String, Error, \
- Number, Operator, Generic, Whitespace
-
-
-__all__ = ['BorlandStyle']
-
-
-class BorlandStyle(Style):
- """
- Style similar to the style used in the borland IDEs.
- """
- name = 'borland'
-
- styles = {
- Whitespace: '#bbbbbb',
-
- Comment: 'italic #008800',
- Comment.Preproc: 'noitalic #008080',
- Comment.Special: 'noitalic bold',
-
- String: '#0000FF',
- String.Char: '#800080',
- Number: '#0000FF',
- Keyword: 'bold #000080',
- Operator.Word: 'bold',
- Name.Tag: 'bold #000080',
- Name.Attribute: '#FF0000',
-
- Generic.Heading: '#999999',
- Generic.Subheading: '#aaaaaa',
- Generic.Deleted: 'bg:#ffdddd #000000',
- Generic.Inserted: 'bg:#ddffdd #000000',
- Generic.Error: '#aa0000',
- Generic.Emph: 'italic',
- Generic.Strong: 'bold',
- Generic.EmphStrong: 'bold italic',
- Generic.Prompt: '#555555',
- Generic.Output: '#888888',
- Generic.Traceback: '#aa0000',
-
- Error: 'bg:#e3d2d2 #a61717'
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/styles/bw.py b/venv/lib/python3.11/site-packages/pygments/styles/bw.py
deleted file mode 100644
index 3ba0092..0000000
--- a/venv/lib/python3.11/site-packages/pygments/styles/bw.py
+++ /dev/null
@@ -1,52 +0,0 @@
-"""
- pygments.styles.bw
- ~~~~~~~~~~~~~~~~~~
-
- Simple black/white only style.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.style import Style
-from pygments.token import Keyword, Name, Comment, String, Error, \
- Operator, Generic
-
-
-__all__ = ['BlackWhiteStyle']
-
-
-class BlackWhiteStyle(Style):
- name = 'bw'
-
- background_color = "#ffffff"
-
- styles = {
- Comment: "italic",
- Comment.Preproc: "noitalic",
-
- Keyword: "bold",
- Keyword.Pseudo: "nobold",
- Keyword.Type: "nobold",
-
- Operator.Word: "bold",
-
- Name.Class: "bold",
- Name.Namespace: "bold",
- Name.Exception: "bold",
- Name.Entity: "bold",
- Name.Tag: "bold",
-
- String: "italic",
- String.Interpol: "bold",
- String.Escape: "bold",
-
- Generic.Heading: "bold",
- Generic.Subheading: "bold",
- Generic.Emph: "italic",
- Generic.Strong: "bold",
- Generic.EmphStrong: "bold italic",
- Generic.Prompt: "bold",
-
- Error: "border:#FF0000"
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/styles/colorful.py b/venv/lib/python3.11/site-packages/pygments/styles/colorful.py
deleted file mode 100644
index 661a9e4..0000000
--- a/venv/lib/python3.11/site-packages/pygments/styles/colorful.py
+++ /dev/null
@@ -1,83 +0,0 @@
-"""
- pygments.styles.colorful
- ~~~~~~~~~~~~~~~~~~~~~~~~
-
- A colorful style, inspired by CodeRay.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.style import Style
-from pygments.token import Keyword, Name, Comment, String, Error, \
- Number, Operator, Generic, Whitespace
-
-
-__all__ = ['ColorfulStyle']
-
-
-class ColorfulStyle(Style):
- """
- A colorful style, inspired by CodeRay.
- """
- name = 'colorful'
-
- styles = {
- Whitespace: "#bbbbbb",
-
- Comment: "#888",
- Comment.Preproc: "#579",
- Comment.Special: "bold #cc0000",
-
- Keyword: "bold #080",
- Keyword.Pseudo: "#038",
- Keyword.Type: "#339",
-
- Operator: "#333",
- Operator.Word: "bold #000",
-
- Name.Builtin: "#007020",
- Name.Function: "bold #06B",
- Name.Class: "bold #B06",
- Name.Namespace: "bold #0e84b5",
- Name.Exception: "bold #F00",
- Name.Variable: "#963",
- Name.Variable.Instance: "#33B",
- Name.Variable.Class: "#369",
- Name.Variable.Global: "bold #d70",
- Name.Constant: "bold #036",
- Name.Label: "bold #970",
- Name.Entity: "bold #800",
- Name.Attribute: "#00C",
- Name.Tag: "#070",
- Name.Decorator: "bold #555",
-
- String: "bg:#fff0f0",
- String.Char: "#04D bg:",
- String.Doc: "#D42 bg:",
- String.Interpol: "bg:#eee",
- String.Escape: "bold #666",
- String.Regex: "bg:#fff0ff #000",
- String.Symbol: "#A60 bg:",
- String.Other: "#D20",
-
- Number: "bold #60E",
- Number.Integer: "bold #00D",
- Number.Float: "bold #60E",
- Number.Hex: "bold #058",
- Number.Oct: "bold #40E",
-
- Generic.Heading: "bold #000080",
- Generic.Subheading: "bold #800080",
- Generic.Deleted: "#A00000",
- Generic.Inserted: "#00A000",
- Generic.Error: "#FF0000",
- Generic.Emph: "italic",
- Generic.Strong: "bold",
- Generic.EmphStrong: "bold italic",
- Generic.Prompt: "bold #c65d09",
- Generic.Output: "#888",
- Generic.Traceback: "#04D",
-
- Error: "#F00 bg:#FAA"
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/styles/default.py b/venv/lib/python3.11/site-packages/pygments/styles/default.py
deleted file mode 100644
index f4e5b7b..0000000
--- a/venv/lib/python3.11/site-packages/pygments/styles/default.py
+++ /dev/null
@@ -1,76 +0,0 @@
-"""
- pygments.styles.default
- ~~~~~~~~~~~~~~~~~~~~~~~
-
- The default highlighting style.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.style import Style
-from pygments.token import Keyword, Name, Comment, String, Error, \
- Number, Operator, Generic, Whitespace
-
-
-__all__ = ['DefaultStyle']
-
-
-class DefaultStyle(Style):
- """
- The default style (inspired by Emacs 22).
- """
- name = 'default'
-
- background_color = "#f8f8f8"
-
- styles = {
- Whitespace: "#bbbbbb",
- Comment: "italic #3D7B7B",
- Comment.Preproc: "noitalic #9C6500",
-
- #Keyword: "bold #AA22FF",
- Keyword: "bold #008000",
- Keyword.Pseudo: "nobold",
- Keyword.Type: "nobold #B00040",
-
- Operator: "#666666",
- Operator.Word: "bold #AA22FF",
-
- Name.Builtin: "#008000",
- Name.Function: "#0000FF",
- Name.Class: "bold #0000FF",
- Name.Namespace: "bold #0000FF",
- Name.Exception: "bold #CB3F38",
- Name.Variable: "#19177C",
- Name.Constant: "#880000",
- Name.Label: "#767600",
- Name.Entity: "bold #717171",
- Name.Attribute: "#687822",
- Name.Tag: "bold #008000",
- Name.Decorator: "#AA22FF",
-
- String: "#BA2121",
- String.Doc: "italic",
- String.Interpol: "bold #A45A77",
- String.Escape: "bold #AA5D1F",
- String.Regex: "#A45A77",
- #String.Symbol: "#B8860B",
- String.Symbol: "#19177C",
- String.Other: "#008000",
- Number: "#666666",
-
- Generic.Heading: "bold #000080",
- Generic.Subheading: "bold #800080",
- Generic.Deleted: "#A00000",
- Generic.Inserted: "#008400",
- Generic.Error: "#E40000",
- Generic.Emph: "italic",
- Generic.Strong: "bold",
- Generic.EmphStrong: "bold italic",
- Generic.Prompt: "bold #000080",
- Generic.Output: "#717171",
- Generic.Traceback: "#04D",
-
- Error: "border:#FF0000"
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/styles/dracula.py b/venv/lib/python3.11/site-packages/pygments/styles/dracula.py
deleted file mode 100644
index d7043c0..0000000
--- a/venv/lib/python3.11/site-packages/pygments/styles/dracula.py
+++ /dev/null
@@ -1,90 +0,0 @@
-"""
- pygments.styles.dracula
- ~~~~~~~~~~~~~~~~~~~~~~~
-
- Pygments version of `Dracula` from https://github.com/dracula/dracula-theme.
-
- Based on the Dracula Theme for pygments by Chris Bracco.
- See https://github.com/dracula/pygments/tree/fee9ed5613d1086bc01b9d0a5a0e9867a009f571
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.style import Style
-from pygments.token import Keyword, Name, Comment, String, Error, Literal, \
- Number, Operator, Other, Punctuation, Text, Generic, Whitespace
-
-
-__all__ = ['DraculaStyle']
-
-background = "#282a36"
-foreground = "#f8f8f2"
-selection = "#44475a"
-comment = "#6272a4"
-cyan = "#8be9fd"
-green = "#50fa7b"
-orange = "#ffb86c"
-pink = "#ff79c6"
-purple = "#bd93f9"
-red = "#ff5555"
-yellow = "#f1fa8c"
-
-deletion = "#8b080b"
-
-class DraculaStyle(Style):
- name = 'dracula'
-
- background_color = background
- highlight_color = selection
- line_number_color = yellow
- line_number_background_color = selection
- line_number_special_color = green
- line_number_special_background_color = comment
-
- styles = {
- Whitespace: foreground,
-
- Comment: comment,
- Comment.Preproc: pink,
-
- Generic: foreground,
- Generic.Deleted: deletion,
- Generic.Emph: "underline",
- Generic.Heading: "bold",
- Generic.Inserted: "bold",
- Generic.Output: selection,
- Generic.EmphStrong: "underline",
- Generic.Subheading: "bold",
-
- Error: foreground,
-
- Keyword: pink,
- Keyword.Constant: pink,
- Keyword.Declaration: cyan + " italic",
- Keyword.Type: cyan,
-
- Literal: foreground,
-
- Name: foreground,
- Name.Attribute: green,
- Name.Builtin: cyan + " italic",
- Name.Builtin.Pseudo: foreground,
- Name.Class: green,
- Name.Function: green,
- Name.Label: cyan + " italic",
- Name.Tag: pink,
- Name.Variable: cyan + " italic",
-
- Number: orange,
-
- Operator: pink,
-
- Other: foreground,
-
- Punctuation: foreground,
-
- String: purple,
-
- Text: foreground,
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/styles/emacs.py b/venv/lib/python3.11/site-packages/pygments/styles/emacs.py
deleted file mode 100644
index fad91a1..0000000
--- a/venv/lib/python3.11/site-packages/pygments/styles/emacs.py
+++ /dev/null
@@ -1,75 +0,0 @@
-"""
- pygments.styles.emacs
- ~~~~~~~~~~~~~~~~~~~~~
-
- A highlighting style for Pygments, inspired by Emacs.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.style import Style
-from pygments.token import Keyword, Name, Comment, String, Error, \
- Number, Operator, Generic, Whitespace
-
-
-__all__ = ['EmacsStyle']
-
-
-class EmacsStyle(Style):
- """
- The default style (inspired by Emacs 22).
- """
- name = 'emacs'
-
- background_color = "#f8f8f8"
-
- styles = {
- Whitespace: "#bbbbbb",
- Comment: "italic #008800",
- Comment.Preproc: "noitalic",
- Comment.Special: "noitalic bold",
-
- Keyword: "bold #AA22FF",
- Keyword.Pseudo: "nobold",
- Keyword.Type: "bold #00BB00",
-
- Operator: "#666666",
- Operator.Word: "bold #AA22FF",
-
- Name.Builtin: "#AA22FF",
- Name.Function: "#00A000",
- Name.Class: "#0000FF",
- Name.Namespace: "bold #0000FF",
- Name.Exception: "bold #D2413A",
- Name.Variable: "#B8860B",
- Name.Constant: "#880000",
- Name.Label: "#A0A000",
- Name.Entity: "bold #999999",
- Name.Attribute: "#BB4444",
- Name.Tag: "bold #008000",
- Name.Decorator: "#AA22FF",
-
- String: "#BB4444",
- String.Doc: "italic",
- String.Interpol: "bold #BB6688",
- String.Escape: "bold #BB6622",
- String.Regex: "#BB6688",
- String.Symbol: "#B8860B",
- String.Other: "#008000",
- Number: "#666666",
-
- Generic.Heading: "bold #000080",
- Generic.Subheading: "bold #800080",
- Generic.Deleted: "#A00000",
- Generic.Inserted: "#00A000",
- Generic.Error: "#FF0000",
- Generic.Emph: "italic",
- Generic.Strong: "bold",
- Generic.EmphStrong: "bold italic",
- Generic.Prompt: "bold #000080",
- Generic.Output: "#888",
- Generic.Traceback: "#04D",
-
- Error: "border:#FF0000"
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/styles/friendly.py b/venv/lib/python3.11/site-packages/pygments/styles/friendly.py
deleted file mode 100644
index 8de4fcc..0000000
--- a/venv/lib/python3.11/site-packages/pygments/styles/friendly.py
+++ /dev/null
@@ -1,76 +0,0 @@
-"""
- pygments.styles.friendly
- ~~~~~~~~~~~~~~~~~~~~~~~~
-
- A modern style based on the VIM pyte theme.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.style import Style
-from pygments.token import Keyword, Name, Comment, String, Error, \
- Number, Operator, Generic, Whitespace
-
-
-__all__ = ['FriendlyStyle']
-
-
-class FriendlyStyle(Style):
- """
- A modern style based on the VIM pyte theme.
- """
- name = 'friendly'
-
- background_color = "#f0f0f0"
- line_number_color = "#666666"
-
- styles = {
- Whitespace: "#bbbbbb",
- Comment: "italic #60a0b0",
- Comment.Preproc: "noitalic #007020",
- Comment.Special: "noitalic bg:#fff0f0",
-
- Keyword: "bold #007020",
- Keyword.Pseudo: "nobold",
- Keyword.Type: "nobold #902000",
-
- Operator: "#666666",
- Operator.Word: "bold #007020",
-
- Name.Builtin: "#007020",
- Name.Function: "#06287e",
- Name.Class: "bold #0e84b5",
- Name.Namespace: "bold #0e84b5",
- Name.Exception: "#007020",
- Name.Variable: "#bb60d5",
- Name.Constant: "#60add5",
- Name.Label: "bold #002070",
- Name.Entity: "bold #d55537",
- Name.Attribute: "#4070a0",
- Name.Tag: "bold #062873",
- Name.Decorator: "bold #555555",
-
- String: "#4070a0",
- String.Doc: "italic",
- String.Interpol: "italic #70a0d0",
- String.Escape: "bold #4070a0",
- String.Regex: "#235388",
- String.Symbol: "#517918",
- String.Other: "#c65d09",
- Number: "#40a070",
-
- Generic.Heading: "bold #000080",
- Generic.Subheading: "bold #800080",
- Generic.Deleted: "#A00000",
- Generic.Inserted: "#00A000",
- Generic.Error: "#FF0000",
- Generic.Emph: "italic",
- Generic.Strong: "bold",
- Generic.EmphStrong: "bold italic",
- Generic.Prompt: "bold #c65d09",
- Generic.Output: "#888",
- Generic.Traceback: "#04D",
-
- Error: "border:#FF0000"
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/styles/friendly_grayscale.py b/venv/lib/python3.11/site-packages/pygments/styles/friendly_grayscale.py
deleted file mode 100644
index e7d3ed4..0000000
--- a/venv/lib/python3.11/site-packages/pygments/styles/friendly_grayscale.py
+++ /dev/null
@@ -1,80 +0,0 @@
-"""
- pygments.styles.friendly_grayscale
- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
- A style based on friendly style.
- The color values of the friendly style have been converted to grayscale
- using the luminosity value calculated by
- http://www.workwithcolor.com/color-converter-01.htm
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.style import Style
-from pygments.token import Keyword, Name, Comment, String, Error, \
- Number, Operator, Generic, Whitespace
-
-
-__all__ = ['FriendlyGrayscaleStyle']
-
-
-class FriendlyGrayscaleStyle(Style):
- """
- A modern grayscale style based on the friendly style.
-
- .. versionadded:: 2.11
- """
- name = 'friendly_grayscale'
-
- background_color = "#f0f0f0"
-
- styles = {
- Whitespace: "#bbbbbb",
- Comment: "italic #959595",
- Comment.Preproc: "noitalic #575757",
- Comment.Special: "noitalic bg:#F4F4F4",
-
- Keyword: "bold #575757",
- Keyword.Pseudo: "nobold",
- Keyword.Type: "nobold #4F4F4F",
-
- Operator: "#666666",
- Operator.Word: "bold #575757",
-
- Name.Builtin: "#575757",
- Name.Function: "#3F3F3F",
- Name.Class: "bold #7E7E7E",
- Name.Namespace: "bold #7E7E7E",
- Name.Exception: "#575757",
- Name.Variable: "#9A9A9A",
- Name.Constant: "#A5A5A5",
- Name.Label: "bold #363636",
- Name.Entity: "bold #848484",
- Name.Attribute: "#707070",
- Name.Tag: "bold #3B3B3B",
- Name.Decorator: "bold #555555",
-
- String: "#717171",
- String.Doc: "italic",
- String.Interpol: "italic #9F9F9F",
- String.Escape: "bold #717171",
- String.Regex: "#575757",
- String.Symbol: "#676767",
- String.Other: "#7E7E7E",
- Number: "#888888",
-
- Generic.Heading: "bold #373737",
- Generic.Subheading: "bold #5A5A5A",
- Generic.Deleted: "#545454",
- Generic.Inserted: "#7D7D7D",
- Generic.Error: "#898989",
- Generic.Emph: "italic",
- Generic.Strong: "bold",
- Generic.EmphStrong: "bold italic",
- Generic.Prompt: "bold #7E7E7E",
- Generic.Output: "#888888",
- Generic.Traceback: "#6D6D6D",
-
- Error: "border:#898989"
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/styles/fruity.py b/venv/lib/python3.11/site-packages/pygments/styles/fruity.py
deleted file mode 100644
index b23257d..0000000
--- a/venv/lib/python3.11/site-packages/pygments/styles/fruity.py
+++ /dev/null
@@ -1,47 +0,0 @@
-"""
- pygments.styles.fruity
- ~~~~~~~~~~~~~~~~~~~~~~
-
- pygments version of my "fruity" vim theme.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.style import Style
-from pygments.token import Token, Comment, Name, Keyword, \
- Generic, Number, String, Whitespace
-
-
-__all__ = ['FruityStyle']
-
-
-class FruityStyle(Style):
- """
- Pygments version of the "native" vim theme.
- """
-
- name = 'fruity'
-
- background_color = '#111111'
- highlight_color = '#333333'
-
- styles = {
- Whitespace: '#888888',
- Token: '#ffffff',
- Generic.Output: '#444444 bg:#222222',
- Keyword: '#fb660a bold',
- Keyword.Pseudo: 'nobold',
- Number: '#0086f7 bold',
- Name.Tag: '#fb660a bold',
- Name.Variable: '#fb660a',
- Comment: '#008800 bg:#0f140f italic',
- Name.Attribute: '#ff0086 bold',
- String: '#0086d2',
- Name.Function: '#ff0086 bold',
- Generic.Heading: '#ffffff bold',
- Keyword.Type: '#cdcaa9 bold',
- Generic.Subheading: '#ffffff bold',
- Name.Constant: '#0086d2',
- Comment.Preproc: '#ff0007 bold'
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/styles/gh_dark.py b/venv/lib/python3.11/site-packages/pygments/styles/gh_dark.py
deleted file mode 100644
index 95f8e80..0000000
--- a/venv/lib/python3.11/site-packages/pygments/styles/gh_dark.py
+++ /dev/null
@@ -1,113 +0,0 @@
-"""
- pygments.styles.gh_dark
- ~~~~~~~~~~~~~~~~~~~~~~~
-
- Github's Dark-Colorscheme based theme for Pygments
- Colors extracted from https://github.com/primer/primitives
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.style import Style
-from pygments.token import Keyword, Name, Comment, Error, Number, Operator, \
- Generic, Text, Literal, String, Token
-
-
-__all__ = ['GhDarkStyle']
-
-
-# vars are defined to match the defs in
-# - [GitHub's VS Code theme](https://github.com/primer/github-vscode-theme) and
-# - [Primer styles](https://github.com/primer/primitives)
-RED_2 = "#ffa198"
-RED_3 = "#ff7b72"
-RED_9 = "#490202"
-ORANGE_2 = "#ffa657"
-ORANGE_3 = "#f0883e"
-GREEN_1 = "#7ee787"
-GREEN_2 = "#56d364"
-GREEN_7 = "#0f5323"
-BLUE_1 = "#a5d6ff"
-BLUE_2 = "#79c0ff"
-PURPLE_2 = "#d2a8ff"
-GRAY_3 = "#8b949e"
-GRAY_4 = "#6e7681"
-FG_SUBTLE = "#6e7681"
-FG_DEFAULT = "#e6edf3"
-BG_DEFAULT = "#0d1117"
-DANGER_FG = "#f85149"
-
-
-class GhDarkStyle(Style):
- """
- Github's Dark-Colorscheme based theme for Pygments
- """
-
- name = 'github-dark'
-
- background_color = BG_DEFAULT
-
- # has transparency in VS Code theme as `colors.codemirror.activelineBg`
- highlight_color = GRAY_4
-
- line_number_special_color = FG_DEFAULT
- line_number_special_background_color = FG_SUBTLE
-
- line_number_color = GRAY_4
- line_number_background_color = BG_DEFAULT
-
- styles = {
- Token: FG_DEFAULT,
-
- Error: DANGER_FG,
-
- Keyword: RED_3,
- Keyword.Constant: BLUE_2,
- Keyword.Pseudo: BLUE_2,
-
- Name: FG_DEFAULT,
- Name.Class: "bold "+ORANGE_3,
- Name.Constant: "bold "+BLUE_2,
- Name.Decorator: 'bold '+PURPLE_2,
- Name.Entity: ORANGE_2,
- Name.Exception: "bold "+ORANGE_3,
- Name.Function: 'bold '+PURPLE_2,
- Name.Label: "bold "+BLUE_2,
- Name.Namespace: RED_3,
- Name.Property: BLUE_2,
- Name.Tag: GREEN_1,
- Name.Variable: BLUE_2,
-
- Literal: BLUE_1,
- Literal.Date: BLUE_2,
- String: BLUE_1,
- String.Affix: BLUE_2,
- String.Delimiter: BLUE_2,
- String.Escape: BLUE_2,
- String.Heredoc: BLUE_2,
- String.Regex: BLUE_2,
- Number: BLUE_1,
-
- Comment: 'italic '+GRAY_3,
- Comment.Preproc: "bold " + GRAY_3,
- Comment.Special: "bold italic " + GRAY_3,
-
- Operator: 'bold ' + RED_3,
-
- Generic: FG_DEFAULT,
- Generic.Deleted: f"bg:{RED_9} {RED_2}",
- Generic.Emph: "italic",
- Generic.Error: RED_2,
- Generic.Heading: "bold "+BLUE_2,
- Generic.Inserted: f'bg:{GREEN_7} {GREEN_2}',
- Generic.Output: GRAY_3,
- Generic.Prompt: GRAY_3,
- Generic.Strong: "bold",
- Generic.EmphStrong: "bold italic",
- Generic.Subheading: BLUE_2,
- Generic.Traceback: RED_3,
- Generic.Underline: "underline",
-
- Text.Whitespace: FG_SUBTLE,
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/styles/gruvbox.py b/venv/lib/python3.11/site-packages/pygments/styles/gruvbox.py
deleted file mode 100644
index c05f314..0000000
--- a/venv/lib/python3.11/site-packages/pygments/styles/gruvbox.py
+++ /dev/null
@@ -1,118 +0,0 @@
-"""
- pygments.styles.gruvbox
- ~~~~~~~~~~~~~~~~~~~~~~~
-
- pygments version of the "gruvbox" vim theme.
- https://github.com/morhetz/gruvbox
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.style import Style
-from pygments.token import Token, Keyword, Name, Comment, String, Error, \
- Number, Operator, Generic
-
-
-__all__ = ['GruvboxDarkStyle', 'GruvboxLightStyle']
-
-
-class GruvboxDarkStyle(Style):
- """
- Pygments version of the "gruvbox" dark vim theme.
- """
-
- name = 'gruvbox-dark'
-
- background_color = '#282828'
- highlight_color = '#ebdbb2'
-
- styles = {
- Token: '#dddddd',
-
- Comment: 'italic #928374',
- Comment.PreProc: '#8ec07c',
- Comment.Special: 'bold italic #ebdbb2',
-
- Keyword: '#fb4934',
- Operator.Word: '#fb4934',
-
- String: '#b8bb26',
- String.Escape: '#fe8019',
-
- Number: '#d3869b',
-
- Name.Builtin: '#fe8019',
- Name.Variable: '#83a598',
- Name.Constant: '#d3869b',
- Name.Class: '#8ec07c',
- Name.Function: '#8ec07c',
- Name.Namespace: '#8ec07c',
- Name.Exception: '#fb4934',
- Name.Tag: '#8ec07c',
- Name.Attribute: '#fabd2f',
- Name.Decorator: '#fb4934',
-
- Generic.Heading: 'bold #ebdbb2',
- Generic.Subheading: 'underline #ebdbb2',
- Generic.Deleted: 'bg:#fb4934 #282828',
- Generic.Inserted: 'bg:#b8bb26 #282828',
- Generic.Error: '#fb4934',
- Generic.Emph: 'italic',
- Generic.Strong: 'bold',
- Generic.EmphStrong: 'bold italic',
- Generic.Prompt: '#a89984',
- Generic.Output: '#f2e5bc',
- Generic.Traceback: '#fb4934',
-
- Error: 'bg:#fb4934 #282828'
- }
-
-
-class GruvboxLightStyle(Style):
- """
- Pygments version of the "gruvbox" Light vim theme.
- """
-
- name = 'gruvbox-light'
-
- background_color = '#fbf1c7'
- highlight_color = '#3c3836'
-
- styles = {
- Comment: 'italic #928374',
- Comment.PreProc: '#427b58',
- Comment.Special: 'bold italic #3c3836',
-
- Keyword: '#9d0006',
- Operator.Word: '#9d0006',
-
- String: '#79740e',
- String.Escape: '#af3a03',
-
- Number: '#8f3f71',
-
- Name.Builtin: '#af3a03',
- Name.Variable: '#076678',
- Name.Constant: '#8f3f71',
- Name.Class: '#427b58',
- Name.Function: '#427b58',
- Name.Namespace: '#427b58',
- Name.Exception: '#9d0006',
- Name.Tag: '#427b58',
- Name.Attribute: '#b57614',
- Name.Decorator: '#9d0006',
-
- Generic.Heading: 'bold #3c3836',
- Generic.Subheading: 'underline #3c3836',
- Generic.Deleted: 'bg:#9d0006 #fbf1c7',
- Generic.Inserted: 'bg:#79740e #fbf1c7',
- Generic.Error: '#9d0006',
- Generic.Emph: 'italic',
- Generic.Strong: 'bold',
- Generic.Prompt: '#7c6f64',
- Generic.Output: '#32302f',
- Generic.Traceback: '#9d0006',
-
- Error: 'bg:#9d0006 #fbf1c7'
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/styles/igor.py b/venv/lib/python3.11/site-packages/pygments/styles/igor.py
deleted file mode 100644
index 797e367..0000000
--- a/venv/lib/python3.11/site-packages/pygments/styles/igor.py
+++ /dev/null
@@ -1,32 +0,0 @@
-"""
- pygments.styles.igor
- ~~~~~~~~~~~~~~~~~~~~
-
- Igor Pro default style.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.style import Style
-from pygments.token import Keyword, Name, Comment, String
-
-
-__all__ = ['IgorStyle']
-
-
-class IgorStyle(Style):
- """
- Pygments version of the official colors for Igor Pro procedures.
- """
-
- name = 'igor'
-
- styles = {
- Comment: 'italic #FF0000',
- Keyword: '#0000FF',
- Name.Function: '#C34E00',
- Name.Decorator: '#CC00A3',
- Name.Class: '#007575',
- String: '#009C00'
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/styles/inkpot.py b/venv/lib/python3.11/site-packages/pygments/styles/inkpot.py
deleted file mode 100644
index 817d97f..0000000
--- a/venv/lib/python3.11/site-packages/pygments/styles/inkpot.py
+++ /dev/null
@@ -1,72 +0,0 @@
-"""
- pygments.styles.inkpot
- ~~~~~~~~~~~~~~~~~~~~~~
-
- A highlighting style for Pygments, inspired by the Inkpot theme for VIM.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.style import Style
-from pygments.token import Text, Other, Keyword, Name, Comment, String, \
- Error, Number, Operator, Generic, Whitespace, Punctuation
-
-
-__all__ = ['InkPotStyle']
-
-
-class InkPotStyle(Style):
- name = 'inkpot'
-
- background_color = "#1e1e27"
-
- styles = {
- Text: "#cfbfad",
- Other: "#cfbfad",
- Whitespace: "#434357",
- Comment: "#cd8b00",
- Comment.Preproc: "#409090",
- Comment.PreprocFile: "bg:#404040 #ffcd8b",
- Comment.Special: "#808bed",
-
- Keyword: "#808bed",
- Keyword.Pseudo: "nobold",
- Keyword.Type: "#ff8bff",
-
- Operator: "#666666",
-
- Punctuation: "#cfbfad",
-
- Name: "#cfbfad",
- Name.Attribute: "#cfbfad",
- Name.Builtin.Pseudo: '#ffff00',
- Name.Builtin: "#808bed",
- Name.Class: "#ff8bff",
- Name.Constant: "#409090",
- Name.Decorator: "#409090",
- Name.Exception: "#ff0000",
- Name.Function: "#c080d0",
- Name.Label: "#808bed",
- Name.Namespace: "#ff0000",
- Name.Variable: "#cfbfad",
-
- String: "bg:#404040 #ffcd8b",
- String.Doc: "#808bed",
-
- Number: "#f0ad6d",
-
- Generic.Heading: "bold #000080",
- Generic.Subheading: "bold #800080",
- Generic.Deleted: "#A00000",
- Generic.Inserted: "#00A000",
- Generic.Error: "#FF0000",
- Generic.Emph: "italic",
- Generic.Strong: "bold",
- Generic.EmphStrong: "bold italic",
- Generic.Prompt: "bold #000080",
- Generic.Output: "#888",
- Generic.Traceback: "#04D",
-
- Error: "bg:#6e2e2e #ffffff"
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/styles/lightbulb.py b/venv/lib/python3.11/site-packages/pygments/styles/lightbulb.py
deleted file mode 100644
index 25c4b15..0000000
--- a/venv/lib/python3.11/site-packages/pygments/styles/lightbulb.py
+++ /dev/null
@@ -1,110 +0,0 @@
-"""
- pygments.styles.lightbulb
- ~~~~~~~~~~~~~~~~~~~~~~~~~
-
- A minimal dark theme based on the Lightbulb theme for VSCode.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.style import Style
-from pygments.token import (
- Comment,
- Error,
- Generic,
- Keyword,
- Literal,
- Name,
- Number,
- Operator,
- Punctuation,
- String,
- Token,
-)
-
-
-__all__ = ['LightbulbStyle']
-
-
-COLORS = {
- "bg": "#1d2331",
- "blue_1": "#73D0FF",
- "gray_1": "#7e8aa1",
- "gray_2": "#3c4354",
- "gray_3": "#6e7681",
- "red_1": "#f88f7f",
- "red_2": "#3d1e20",
- "orange_1": "#FFAD66",
- "orange_2": "#F29E74",
- "yellow_1": "#FFD173",
- "white": "#d4d2c8",
- "magenta_1": "#DFBFFF",
- "green_1": "#D5FF80",
- "green_2": "#19362c",
- "cyan_1": "#95E6CB",
-}
-
-
-class LightbulbStyle(Style):
- """
- A minimal dark theme based on the Lightbulb theme for VSCode.
- """
-
- name = 'lightbulb'
-
- background_color = COLORS['bg']
- highlight_color = COLORS['gray_3']
-
- line_number_color = COLORS['gray_2']
- line_number_special_color = COLORS['gray_2']
-
- styles = {
- Comment: COLORS["gray_1"],
- Comment.Hashbang: "italic " + COLORS['red_1'],
- Comment.Preproc: "bold " + COLORS['orange_1'],
- Comment.Special: "italic " + COLORS['gray_1'],
- Error: COLORS['red_1'],
- Generic.Deleted: f"bg:{COLORS['red_2']} #f88f7f",
- Generic.Emph: "italic",
- Generic.Error: "#f88f7f",
- Generic.Inserted: f"bg:{COLORS['green_2']} #6ad4af",
- Generic.Output: COLORS['gray_1'],
- Generic.Strong: "bold",
- Generic.Traceback: COLORS['red_1'],
- Keyword: COLORS['orange_1'],
- Keyword.Constant: COLORS['orange_1'],
- Keyword.Declaration: COLORS['orange_1'],
- Keyword.Namespace: COLORS['orange_1'],
- Keyword.Reserved: COLORS['orange_1'],
- Keyword.Type: COLORS['blue_1'],
- Literal: COLORS['green_1'],
- Name: COLORS['white'],
- Name.Attribute: COLORS['yellow_1'],
- Name.Builtin: COLORS['yellow_1'],
- Name.Builtin.Pseudo: "#5CCFE6",
- Name.Class: COLORS['blue_1'],
- Name.Constant: COLORS['yellow_1'],
- Name.Decorator: "bold italic " + COLORS['gray_1'],
- Name.Entity: COLORS['cyan_1'],
- Name.Exception: COLORS['blue_1'],
- Name.Function: COLORS['yellow_1'],
- Name.Function.Magic: COLORS['yellow_1'],
- Name.Other: COLORS['white'],
- Name.Property: COLORS['yellow_1'],
- Name.Tag: "#5CCFE6",
- Name.Variable: COLORS['white'],
- Number: COLORS['magenta_1'],
- Operator: COLORS['orange_1'],
- Operator.Word: COLORS['orange_1'],
- Punctuation: COLORS['white'],
- String: COLORS['green_1'],
- String.Affix: COLORS['orange_2'],
- String.Doc: COLORS['gray_1'],
- String.Escape: COLORS['cyan_1'],
- String.Interpol: COLORS['cyan_1'],
- String.Other: COLORS['cyan_1'],
- String.Regex: COLORS['cyan_1'],
- String.Symbol: COLORS['magenta_1'],
- Token: COLORS['white'],
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/styles/lilypond.py b/venv/lib/python3.11/site-packages/pygments/styles/lilypond.py
deleted file mode 100644
index 1218ec9..0000000
--- a/venv/lib/python3.11/site-packages/pygments/styles/lilypond.py
+++ /dev/null
@@ -1,62 +0,0 @@
-"""
- pygments.styles.lilypond
- ~~~~~~~~~~~~~~~~~~~~~~~~
-
- LilyPond-specific style.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.style import Style
-from pygments.token import Token
-
-
-__all__ = ['LilyPondStyle']
-
-
-class LilyPondStyle(Style):
- """
- Style for the LilyPond language.
-
- .. versionadded:: 2.11
- """
-
- name = 'lilypond'
-
- # Don't show it in the gallery, it's intended for LilyPond
- # input only and doesn't show good output on Python code.
- web_style_gallery_exclude = True
-
- styles = {
- Token.Text: "",
- Token.Keyword: "bold",
- Token.Comment: "italic #A3AAB2",
- Token.String: "#AB0909",
- Token.String.Escape: "#C46C6C",
- Token.String.Symbol: "noinherit",
- Token.Pitch: "", #"#911520",
- Token.Number: "#976806", # includes durations
- # A bare 11 is not distinguishable from a number, so we highlight
- # the same.
- Token.ChordModifier: "#976806",
- Token.Name.Lvalue: "#08547A",
- Token.Name.BackslashReference: "#08547A",
- Token.Name.Builtin.MusicCommand: "bold #08547A",
- Token.Name.Builtin.PaperVariable: "bold #6C5A05",
- Token.Name.Builtin.HeaderVariable: "bold #6C5A05",
- Token.Name.Builtin.MusicFunction: "bold #08547A",
- Token.Name.Builtin.Clef: "bold #08547A",
- Token.Name.Builtin.Scale: "bold #08547A",
- Token.Name.Builtin.RepeatType: "#08547A",
- Token.Name.Builtin.Dynamic: "#68175A",
- Token.Name.Builtin.Articulation: "#68175A",
- Token.Name.Builtin.SchemeFunction: "bold #A83401",
- Token.Name.Builtin.SchemeBuiltin: "bold",
- Token.Name.Builtin.MarkupCommand: "bold #831E71",
- Token.Name.Builtin.Context: "bold #038B8B",
- Token.Name.Builtin.ContextProperty: "#038B8B",
- Token.Name.Builtin.Grob: "bold #0C7441",
- Token.Name.Builtin.GrobProperty: "#0C7441",
- Token.Name.Builtin.Translator: "bold #6200A4",
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/styles/lovelace.py b/venv/lib/python3.11/site-packages/pygments/styles/lovelace.py
deleted file mode 100644
index 279ff07..0000000
--- a/venv/lib/python3.11/site-packages/pygments/styles/lovelace.py
+++ /dev/null
@@ -1,100 +0,0 @@
-"""
- pygments.styles.lovelace
- ~~~~~~~~~~~~~~~~~~~~~~~~
-
- Lovelace by Miikka Salminen
-
- Pygments style by Miikka Salminen (https://github.com/miikkas)
- A desaturated, somewhat subdued style created for the Lovelace interactive
- learning environment.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.style import Style
-from pygments.token import Keyword, Name, Comment, String, Error, \
- Number, Operator, Punctuation, Generic, Whitespace
-
-
-__all__ = ['LovelaceStyle']
-
-
-class LovelaceStyle(Style):
- """
- The style used in Lovelace interactive learning environment. Tries to avoid
- the "angry fruit salad" effect with desaturated and dim colours.
- """
- name = 'lovelace'
-
- _KW_BLUE = '#2838b0'
- _NAME_GREEN = '#388038'
- _DOC_ORANGE = '#b85820'
- _OW_PURPLE = '#a848a8'
- _FUN_BROWN = '#785840'
- _STR_RED = '#b83838'
- _CLS_CYAN = '#287088'
- _ESCAPE_LIME = '#709030'
- _LABEL_CYAN = '#289870'
- _EXCEPT_YELLOW = '#908828'
-
- styles = {
- Whitespace: '#a89028',
- Comment: 'italic #888888',
- Comment.Hashbang: _CLS_CYAN,
- Comment.Multiline: '#888888',
- Comment.Preproc: 'noitalic '+_LABEL_CYAN,
-
- Keyword: _KW_BLUE,
- Keyword.Constant: 'italic #444444',
- Keyword.Declaration: 'italic',
- Keyword.Type: 'italic',
-
- Operator: '#666666',
- Operator.Word: _OW_PURPLE,
-
- Punctuation: '#888888',
-
- Name.Attribute: _NAME_GREEN,
- Name.Builtin: _NAME_GREEN,
- Name.Builtin.Pseudo: 'italic',
- Name.Class: _CLS_CYAN,
- Name.Constant: _DOC_ORANGE,
- Name.Decorator: _CLS_CYAN,
- Name.Entity: _ESCAPE_LIME,
- Name.Exception: _EXCEPT_YELLOW,
- Name.Function: _FUN_BROWN,
- Name.Function.Magic: _DOC_ORANGE,
- Name.Label: _LABEL_CYAN,
- Name.Namespace: _LABEL_CYAN,
- Name.Tag: _KW_BLUE,
- Name.Variable: '#b04040',
- Name.Variable.Global:_EXCEPT_YELLOW,
- Name.Variable.Magic: _DOC_ORANGE,
-
- String: _STR_RED,
- String.Affix: '#444444',
- String.Char: _OW_PURPLE,
- String.Delimiter: _DOC_ORANGE,
- String.Doc: 'italic '+_DOC_ORANGE,
- String.Escape: _ESCAPE_LIME,
- String.Interpol: 'underline',
- String.Other: _OW_PURPLE,
- String.Regex: _OW_PURPLE,
-
- Number: '#444444',
-
- Generic.Deleted: '#c02828',
- Generic.Emph: 'italic',
- Generic.Error: '#c02828',
- Generic.Heading: '#666666',
- Generic.Subheading: '#444444',
- Generic.Inserted: _NAME_GREEN,
- Generic.Output: '#666666',
- Generic.Prompt: '#444444',
- Generic.Strong: 'bold',
- Generic.EmphStrong: 'bold italic',
- Generic.Traceback: _KW_BLUE,
-
- Error: 'bg:'+_OW_PURPLE,
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/styles/manni.py b/venv/lib/python3.11/site-packages/pygments/styles/manni.py
deleted file mode 100644
index 1eb0e69..0000000
--- a/venv/lib/python3.11/site-packages/pygments/styles/manni.py
+++ /dev/null
@@ -1,79 +0,0 @@
-"""
- pygments.styles.manni
- ~~~~~~~~~~~~~~~~~~~~~
-
- A colorful style, inspired by the terminal highlighting style.
-
- This is a port of the style used in the `php port`_ of pygments
- by Manni. The style is called 'default' there.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.style import Style
-from pygments.token import Keyword, Name, Comment, String, Error, \
- Number, Operator, Generic, Whitespace
-
-
-__all__ = ['ManniStyle']
-
-
-class ManniStyle(Style):
- """
- A colorful style, inspired by the terminal highlighting style.
- """
- name = 'manni'
-
- background_color = '#f0f3f3'
-
- styles = {
- Whitespace: '#bbbbbb',
- Comment: 'italic #0099FF',
- Comment.Preproc: 'noitalic #009999',
- Comment.Special: 'bold',
-
- Keyword: 'bold #006699',
- Keyword.Pseudo: 'nobold',
- Keyword.Type: '#007788',
-
- Operator: '#555555',
- Operator.Word: 'bold #000000',
-
- Name.Builtin: '#336666',
- Name.Function: '#CC00FF',
- Name.Class: 'bold #00AA88',
- Name.Namespace: 'bold #00CCFF',
- Name.Exception: 'bold #CC0000',
- Name.Variable: '#003333',
- Name.Constant: '#336600',
- Name.Label: '#9999FF',
- Name.Entity: 'bold #999999',
- Name.Attribute: '#330099',
- Name.Tag: 'bold #330099',
- Name.Decorator: '#9999FF',
-
- String: '#CC3300',
- String.Doc: 'italic',
- String.Interpol: '#AA0000',
- String.Escape: 'bold #CC3300',
- String.Regex: '#33AAAA',
- String.Symbol: '#FFCC33',
- String.Other: '#CC3300',
-
- Number: '#FF6600',
-
- Generic.Heading: 'bold #003300',
- Generic.Subheading: 'bold #003300',
- Generic.Deleted: 'border:#CC0000 bg:#FFCCCC',
- Generic.Inserted: 'border:#00CC00 bg:#CCFFCC',
- Generic.Error: '#FF0000',
- Generic.Emph: 'italic',
- Generic.Strong: 'bold',
- Generic.EmphStrong: 'bold italic',
- Generic.Prompt: 'bold #000099',
- Generic.Output: '#AAAAAA',
- Generic.Traceback: '#99CC66',
-
- Error: 'bg:#FFAAAA #AA0000'
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/styles/material.py b/venv/lib/python3.11/site-packages/pygments/styles/material.py
deleted file mode 100644
index db0952d..0000000
--- a/venv/lib/python3.11/site-packages/pygments/styles/material.py
+++ /dev/null
@@ -1,124 +0,0 @@
-"""
- pygments.styles.material
- ~~~~~~~~~~~~~~~~~~~~~~~~
-
- Mimic the Material theme color scheme.
-
- https://github.com/material-theme/vsc-material-theme
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.style import Style
-from pygments.token import Keyword, Name, Comment, String, Escape, \
- Error, Text, Number, Operator, Generic, Punctuation, Literal
-
-
-__all__ = ['MaterialStyle']
-
-
-class MaterialStyle(Style):
- """
- This style mimics the Material Theme color scheme.
- """
- name = 'material'
-
- dark_teal = '#263238'
- white = '#FFFFFF'
- black = '#000000'
- red = '#FF5370'
- orange = '#F78C6C'
- yellow = '#FFCB6B'
- green = '#C3E88D'
- cyan = '#89DDFF'
- blue = '#82AAFF'
- paleblue = '#B2CCD6'
- purple = '#C792EA'
- brown = '#C17E70'
- pink = '#F07178'
- violet = '#BB80B3'
- foreground = '#EEFFFF'
- faded = '#546E7A'
-
- background_color = dark_teal
- highlight_color = '#2C3B41'
- line_number_color = '#37474F'
- line_number_background_color = dark_teal
- line_number_special_color = '#607A86'
- line_number_special_background_color = dark_teal
-
- styles = {
- Text: foreground,
- Escape: cyan,
- Error: red,
-
- Keyword: violet,
- Keyword.Constant: cyan,
- Keyword.Declaration: violet,
- Keyword.Namespace: 'italic ' + cyan,
- Keyword.Pseudo: cyan,
- Keyword.Type: violet,
-
- Name: foreground,
- Name.Attribute: violet,
- Name.Builtin: blue,
- Name.Builtin.Pseudo: cyan,
- Name.Class: yellow,
- Name.Constant: foreground,
- Name.Decorator: blue,
- Name.Entity: cyan,
- Name.Exception: yellow,
- Name.Function: blue,
- Name.Function.Magic: blue,
- Name.Label: blue,
- Name.Property: yellow,
- Name.Namespace: yellow,
- Name.Other: foreground,
- Name.Tag: red,
- Name.Variable: cyan,
- Name.Variable.Class: cyan,
- Name.Variable.Global: cyan,
- Name.Variable.Instance: cyan,
- Name.Variable.Magic: blue,
-
- Literal: green,
- Literal.Date: green,
-
- String: green,
- String.Affix: violet,
- String.Backtick: green,
- String.Char: green,
- String.Delimiter: foreground,
- String.Doc: 'italic ' + faded,
- String.Double: green,
- String.Escape: foreground,
- String.Heredoc: green,
- String.Interpol: cyan,
- String.Other: green,
- String.Regex: cyan,
- String.Single: green,
- String.Symbol: cyan,
-
- Number: orange,
-
- Operator: cyan,
- Operator.Word: 'italic ' + cyan,
-
- Punctuation: cyan,
-
- Comment: 'italic ' + faded,
-
- Generic: foreground,
- Generic.Deleted: red,
- Generic.Emph: cyan,
- Generic.Error: red,
- Generic.Heading: green,
- Generic.Inserted: green,
- Generic.Output: faded,
- Generic.Prompt: yellow,
- Generic.Strong: red,
- Generic.EmphStrong: yellow,
- Generic.Subheading: cyan,
- Generic.Traceback: red,
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/styles/monokai.py b/venv/lib/python3.11/site-packages/pygments/styles/monokai.py
deleted file mode 100644
index 2ae51bc..0000000
--- a/venv/lib/python3.11/site-packages/pygments/styles/monokai.py
+++ /dev/null
@@ -1,112 +0,0 @@
-"""
- pygments.styles.monokai
- ~~~~~~~~~~~~~~~~~~~~~~~
-
- Mimic the Monokai color scheme. Based on tango.py.
-
- http://www.monokai.nl/blog/2006/07/15/textmate-color-theme/
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.style import Style
-from pygments.token import Keyword, Name, Comment, String, Error, Token, \
- Number, Operator, Generic, Whitespace, Punctuation, Other, Literal
-
-
-__all__ = ['MonokaiStyle']
-
-
-class MonokaiStyle(Style):
- """
- This style mimics the Monokai color scheme.
- """
- name = 'monokai'
-
- background_color = "#272822"
- highlight_color = "#49483e"
-
- styles = {
- # No corresponding class for the following:
- Token: "#f8f8f2", # class: ''
- Whitespace: "", # class: 'w'
- Error: "#ed007e bg:#1e0010", # class: 'err'
- Other: "", # class 'x'
-
- Comment: "#959077", # class: 'c'
- Comment.Multiline: "", # class: 'cm'
- Comment.Preproc: "", # class: 'cp'
- Comment.Single: "", # class: 'c1'
- Comment.Special: "", # class: 'cs'
-
- Keyword: "#66d9ef", # class: 'k'
- Keyword.Constant: "", # class: 'kc'
- Keyword.Declaration: "", # class: 'kd'
- Keyword.Namespace: "#ff4689", # class: 'kn'
- Keyword.Pseudo: "", # class: 'kp'
- Keyword.Reserved: "", # class: 'kr'
- Keyword.Type: "", # class: 'kt'
-
- Operator: "#ff4689", # class: 'o'
- Operator.Word: "", # class: 'ow' - like keywords
-
- Punctuation: "#f8f8f2", # class: 'p'
-
- Name: "#f8f8f2", # class: 'n'
- Name.Attribute: "#a6e22e", # class: 'na' - to be revised
- Name.Builtin: "", # class: 'nb'
- Name.Builtin.Pseudo: "", # class: 'bp'
- Name.Class: "#a6e22e", # class: 'nc' - to be revised
- Name.Constant: "#66d9ef", # class: 'no' - to be revised
- Name.Decorator: "#a6e22e", # class: 'nd' - to be revised
- Name.Entity: "", # class: 'ni'
- Name.Exception: "#a6e22e", # class: 'ne'
- Name.Function: "#a6e22e", # class: 'nf'
- Name.Property: "", # class: 'py'
- Name.Label: "", # class: 'nl'
- Name.Namespace: "", # class: 'nn' - to be revised
- Name.Other: "#a6e22e", # class: 'nx'
- Name.Tag: "#ff4689", # class: 'nt' - like a keyword
- Name.Variable: "", # class: 'nv' - to be revised
- Name.Variable.Class: "", # class: 'vc' - to be revised
- Name.Variable.Global: "", # class: 'vg' - to be revised
- Name.Variable.Instance: "", # class: 'vi' - to be revised
-
- Number: "#ae81ff", # class: 'm'
- Number.Float: "", # class: 'mf'
- Number.Hex: "", # class: 'mh'
- Number.Integer: "", # class: 'mi'
- Number.Integer.Long: "", # class: 'il'
- Number.Oct: "", # class: 'mo'
-
- Literal: "#ae81ff", # class: 'l'
- Literal.Date: "#e6db74", # class: 'ld'
-
- String: "#e6db74", # class: 's'
- String.Backtick: "", # class: 'sb'
- String.Char: "", # class: 'sc'
- String.Doc: "", # class: 'sd' - like a comment
- String.Double: "", # class: 's2'
- String.Escape: "#ae81ff", # class: 'se'
- String.Heredoc: "", # class: 'sh'
- String.Interpol: "", # class: 'si'
- String.Other: "", # class: 'sx'
- String.Regex: "", # class: 'sr'
- String.Single: "", # class: 's1'
- String.Symbol: "", # class: 'ss'
-
-
- Generic: "", # class: 'g'
- Generic.Deleted: "#ff4689", # class: 'gd',
- Generic.Emph: "italic", # class: 'ge'
- Generic.Error: "", # class: 'gr'
- Generic.Heading: "", # class: 'gh'
- Generic.Inserted: "#a6e22e", # class: 'gi'
- Generic.Output: "#66d9ef", # class: 'go'
- Generic.Prompt: "bold #ff4689", # class: 'gp'
- Generic.Strong: "bold", # class: 'gs'
- Generic.EmphStrong: "bold italic", # class: 'ges'
- Generic.Subheading: "#959077", # class: 'gu'
- Generic.Traceback: "", # class: 'gt'
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/styles/murphy.py b/venv/lib/python3.11/site-packages/pygments/styles/murphy.py
deleted file mode 100644
index 0c5cc6d..0000000
--- a/venv/lib/python3.11/site-packages/pygments/styles/murphy.py
+++ /dev/null
@@ -1,82 +0,0 @@
-"""
- pygments.styles.murphy
- ~~~~~~~~~~~~~~~~~~~~~~
-
- Murphy's style from CodeRay.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.style import Style
-from pygments.token import Keyword, Name, Comment, String, Error, \
- Number, Operator, Generic, Whitespace
-
-
-__all__ = ['MurphyStyle']
-
-
-class MurphyStyle(Style):
- """
- Murphy's style from CodeRay.
- """
- name = 'murphy'
-
- styles = {
- Whitespace: "#bbbbbb",
- Comment: "#666 italic",
- Comment.Preproc: "#579 noitalic",
- Comment.Special: "#c00 bold",
-
- Keyword: "bold #289",
- Keyword.Pseudo: "#08f",
- Keyword.Type: "#66f",
-
- Operator: "#333",
- Operator.Word: "bold #000",
-
- Name.Builtin: "#072",
- Name.Function: "bold #5ed",
- Name.Class: "bold #e9e",
- Name.Namespace: "bold #0e84b5",
- Name.Exception: "bold #F00",
- Name.Variable: "#036",
- Name.Variable.Instance: "#aaf",
- Name.Variable.Class: "#ccf",
- Name.Variable.Global: "#f84",
- Name.Constant: "bold #5ed",
- Name.Label: "bold #970",
- Name.Entity: "#800",
- Name.Attribute: "#007",
- Name.Tag: "#070",
- Name.Decorator: "bold #555",
-
- String: "bg:#e0e0ff",
- String.Char: "#88F bg:",
- String.Doc: "#D42 bg:",
- String.Interpol: "bg:#eee",
- String.Escape: "bold #666",
- String.Regex: "bg:#e0e0ff #000",
- String.Symbol: "#fc8 bg:",
- String.Other: "#f88",
-
- Number: "bold #60E",
- Number.Integer: "bold #66f",
- Number.Float: "bold #60E",
- Number.Hex: "bold #058",
- Number.Oct: "bold #40E",
-
- Generic.Heading: "bold #000080",
- Generic.Subheading: "bold #800080",
- Generic.Deleted: "#A00000",
- Generic.Inserted: "#00A000",
- Generic.Error: "#FF0000",
- Generic.Emph: "italic",
- Generic.Strong: "bold",
- Generic.EmphStrong: "bold italic",
- Generic.Prompt: "bold #c65d09",
- Generic.Output: "#888",
- Generic.Traceback: "#04D",
-
- Error: "#F00 bg:#FAA"
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/styles/native.py b/venv/lib/python3.11/site-packages/pygments/styles/native.py
deleted file mode 100644
index 11f83db..0000000
--- a/venv/lib/python3.11/site-packages/pygments/styles/native.py
+++ /dev/null
@@ -1,70 +0,0 @@
-"""
- pygments.styles.native
- ~~~~~~~~~~~~~~~~~~~~~~
-
- pygments version of my "native" vim theme.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.style import Style
-from pygments.token import Keyword, Name, Comment, String, Error, \
- Number, Operator, Generic, Token, Whitespace
-
-
-__all__ = ['NativeStyle']
-
-
-class NativeStyle(Style):
- """
- Pygments version of the "native" vim theme.
- """
- name = 'native'
-
- background_color = '#202020'
- highlight_color = '#404040'
- line_number_color = '#aaaaaa'
-
- styles = {
- Token: '#d0d0d0',
- Whitespace: '#666666',
-
- Comment: 'italic #ababab',
- Comment.Preproc: 'noitalic bold #ff3a3a',
- Comment.Special: 'noitalic bold #e50808 bg:#520000',
-
- Keyword: 'bold #6ebf26',
- Keyword.Pseudo: 'nobold',
- Operator.Word: 'bold #6ebf26',
-
- String: '#ed9d13',
- String.Other: '#ffa500',
-
- Number: '#51b2fd',
-
- Name.Builtin: '#2fbccd',
- Name.Variable: '#40ffff',
- Name.Constant: '#40ffff',
- Name.Class: 'underline #71adff',
- Name.Function: '#71adff',
- Name.Namespace: 'underline #71adff',
- Name.Exception: '#bbbbbb',
- Name.Tag: 'bold #6ebf26',
- Name.Attribute: '#bbbbbb',
- Name.Decorator: '#ffa500',
-
- Generic.Heading: 'bold #ffffff',
- Generic.Subheading: 'underline #ffffff',
- Generic.Deleted: '#d22323',
- Generic.Inserted: '#589819',
- Generic.Error: '#d22323',
- Generic.Emph: 'italic',
- Generic.Strong: 'bold',
- Generic.EmphStrong: 'bold italic',
- Generic.Prompt: '#aaaaaa',
- Generic.Output: '#cccccc',
- Generic.Traceback: '#d22323',
-
- Error: 'bg:#e3d2d2 #a61717'
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/styles/nord.py b/venv/lib/python3.11/site-packages/pygments/styles/nord.py
deleted file mode 100644
index e5cff24..0000000
--- a/venv/lib/python3.11/site-packages/pygments/styles/nord.py
+++ /dev/null
@@ -1,156 +0,0 @@
-"""
- pygments.styles.nord
- ~~~~~~~~~~~~~~~~~~~~
-
- pygments version of the "nord" theme by Arctic Ice Studio
- https://www.nordtheme.com/
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.style import Style
-from pygments.token import Keyword, Name, Comment, String, Error, Number, \
- Operator, Generic, Whitespace, Punctuation, Text, Token
-
-
-__all__ = ['NordStyle', 'NordDarkerStyle']
-
-
-class NordStyle(Style):
- """
- Pygments version of the "nord" theme by Arctic Ice Studio.
- """
- name = 'nord'
-
- line_number_color = "#D8DEE9"
- line_number_background_color = "#242933"
- line_number_special_color = "#242933"
- line_number_special_background_color = "#D8DEE9"
-
- background_color = "#2E3440"
- highlight_color = "#3B4252"
-
- styles = {
- Token: "#d8dee9",
-
- Whitespace: '#d8dee9',
- Punctuation: '#eceff4',
-
- Comment: 'italic #616e87',
- Comment.Preproc: '#5e81ac',
-
- Keyword: 'bold #81a1c1',
- Keyword.Pseudo: 'nobold #81a1c1',
- Keyword.Type: 'nobold #81a1c1',
-
- Operator: 'bold #81a1c1',
- Operator.Word: 'bold #81a1c1',
-
- Name: '#d8dee9',
- Name.Builtin: '#81a1c1',
- Name.Function: '#88c0d0',
- Name.Class: '#8fbcbb',
- Name.Namespace: '#8fbcbb',
- Name.Exception: '#bf616a',
- Name.Variable: '#d8dee9',
- Name.Constant: '#8fbcbb',
- Name.Entity: '#d08770',
- Name.Attribute: '#8fbcbb',
- Name.Tag: '#81a1c1',
- Name.Decorator: '#d08770',
-
- String: '#a3be8c',
- String.Doc: '#616e87',
- String.Interpol: '#a3be8c',
- String.Escape: '#ebcb8b',
- String.Regex: '#ebcb8b',
- String.Symbol: '#a3be8c',
- String.Other: '#a3be8c',
-
- Number: '#b48ead',
-
- Generic.Heading: 'bold #88c0d0',
- Generic.Subheading: 'bold #88c0d0',
- Generic.Deleted: '#bf616a',
- Generic.Inserted: '#a3be8c',
- Generic.Error: '#bf616a',
- Generic.Emph: 'italic',
- Generic.Strong: 'bold',
- Generic.EmphStrong: 'bold italic',
- Generic.Prompt: 'bold #616e88',
- Generic.Output: '#d8dee9',
- Generic.Traceback: '#bf616a',
-
- Error: '#bf616a',
- Text: '#d8dee9',
- }
-
-
-class NordDarkerStyle(Style):
- """
- Pygments version of a darker "nord" theme by Arctic Ice Studio
- """
- name = 'nord-darker'
-
- line_number_color = "#D8DEE9"
- line_number_background_color = "#242933"
- line_number_special_color = "#242933"
- line_number_special_background_color = "#D8DEE9"
-
- background_color = "#242933"
- highlight_color = "#3B4252"
-
- styles = {
- Token: "#d8dee9",
-
- Whitespace: '#d8dee9',
- Punctuation: '#eceff4',
-
- Comment: 'italic #616e87',
- Comment.Preproc: '#5e81ac',
-
- Keyword: 'bold #81a1c1',
- Keyword.Pseudo: 'nobold #81a1c1',
- Keyword.Type: 'nobold #81a1c1',
-
- Operator: 'bold #81a1c1',
- Operator.Word: 'bold #81a1c1',
-
- Name: '#d8dee9',
- Name.Builtin: '#81a1c1',
- Name.Function: '#88c0d0',
- Name.Class: '#8fbcbb',
- Name.Namespace: '#8fbcbb',
- Name.Exception: '#bf616a',
- Name.Variable: '#d8dee9',
- Name.Constant: '#8fbcbb',
- Name.Entity: '#d08770',
- Name.Attribute: '#8fbcbb',
- Name.Tag: '#81a1c1',
- Name.Decorator: '#d08770',
-
- String: '#a3be8c',
- String.Doc: '#616e87',
- String.Interpol: '#a3be8c',
- String.Escape: '#ebcb8b',
- String.Regex: '#ebcb8b',
- String.Symbol: '#a3be8c',
- String.Other: '#a3be8c',
-
- Number: '#b48ead',
-
- Generic.Heading: 'bold #88c0d0',
- Generic.Subheading: 'bold #88c0d0',
- Generic.Deleted: '#bf616a',
- Generic.Inserted: '#a3be8c',
- Generic.Error: '#bf616a',
- Generic.Emph: 'italic',
- Generic.Strong: 'bold',
- Generic.Prompt: 'bold #616e88',
- Generic.Output: '#d8dee9',
- Generic.Traceback: '#bf616a',
-
- Error: '#bf616a',
- Text: '#d8dee9',
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/styles/onedark.py b/venv/lib/python3.11/site-packages/pygments/styles/onedark.py
deleted file mode 100644
index b145ce9..0000000
--- a/venv/lib/python3.11/site-packages/pygments/styles/onedark.py
+++ /dev/null
@@ -1,63 +0,0 @@
-"""
- pygments.styles.onedark
- ~~~~~~~~~~~~~~~~~~~~~~~
-
- One Dark Theme for Pygments by Tobias Zoghaib (https://github.com/TobiZog)
-
- Inspired by one-dark-ui for the code editor Atom
- (https://atom.io/themes/one-dark-ui).
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.style import Style
-from pygments.token import Comment, Keyword, Name, Number, Operator, \
- Punctuation, String, Token
-
-
-__all__ = ['OneDarkStyle']
-
-
-class OneDarkStyle(Style):
- """
- Theme inspired by One Dark Pro for Atom.
-
- .. versionadded:: 2.11
- """
- name = 'one-dark'
-
- background_color = '#282C34'
-
- styles = {
- Token: '#ABB2BF',
-
- Punctuation: '#ABB2BF',
- Punctuation.Marker: '#ABB2BF',
-
- Keyword: '#C678DD',
- Keyword.Constant: '#E5C07B',
- Keyword.Declaration: '#C678DD',
- Keyword.Namespace: '#C678DD',
- Keyword.Reserved: '#C678DD',
- Keyword.Type: '#E5C07B',
-
- Name: '#E06C75',
- Name.Attribute: '#E06C75',
- Name.Builtin: '#E5C07B',
- Name.Class: '#E5C07B',
- Name.Function: 'bold #61AFEF',
- Name.Function.Magic: 'bold #56B6C2',
- Name.Other: '#E06C75',
- Name.Tag: '#E06C75',
- Name.Decorator: '#61AFEF',
- Name.Variable.Class: '',
-
- String: '#98C379',
-
- Number: '#D19A66',
-
- Operator: '#56B6C2',
-
- Comment: '#7F848E'
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/styles/paraiso_dark.py b/venv/lib/python3.11/site-packages/pygments/styles/paraiso_dark.py
deleted file mode 100644
index 8cc231f..0000000
--- a/venv/lib/python3.11/site-packages/pygments/styles/paraiso_dark.py
+++ /dev/null
@@ -1,124 +0,0 @@
-"""
- pygments.styles.paraiso_dark
- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
- Paraíso (Dark) by Jan T. Sott
-
- Pygments template by Jan T. Sott (https://github.com/idleberg)
- Created with Base16 Builder by Chris Kempson
- (https://github.com/chriskempson/base16-builder).
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.style import Style
-from pygments.token import Keyword, Name, Comment, String, Error, Text, \
- Number, Operator, Generic, Whitespace, Punctuation, Other, Literal
-
-
-__all__ = ['ParaisoDarkStyle']
-
-
-BACKGROUND = "#2f1e2e"
-CURRENT_LINE = "#41323f"
-SELECTION = "#4f424c"
-FOREGROUND = "#e7e9db"
-COMMENT = "#776e71"
-RED = "#ef6155"
-ORANGE = "#f99b15"
-YELLOW = "#fec418"
-GREEN = "#48b685"
-AQUA = "#5bc4bf"
-BLUE = "#06b6ef"
-PURPLE = "#815ba4"
-
-
-class ParaisoDarkStyle(Style):
- name = 'paraiso-dark'
-
- background_color = BACKGROUND
- highlight_color = SELECTION
-
- styles = {
- # No corresponding class for the following:
- Text: FOREGROUND, # class: ''
- Whitespace: "", # class: 'w'
- Error: RED, # class: 'err'
- Other: "", # class 'x'
-
- Comment: COMMENT, # class: 'c'
- Comment.Multiline: "", # class: 'cm'
- Comment.Preproc: "", # class: 'cp'
- Comment.Single: "", # class: 'c1'
- Comment.Special: "", # class: 'cs'
-
- Keyword: PURPLE, # class: 'k'
- Keyword.Constant: "", # class: 'kc'
- Keyword.Declaration: "", # class: 'kd'
- Keyword.Namespace: AQUA, # class: 'kn'
- Keyword.Pseudo: "", # class: 'kp'
- Keyword.Reserved: "", # class: 'kr'
- Keyword.Type: YELLOW, # class: 'kt'
-
- Operator: AQUA, # class: 'o'
- Operator.Word: "", # class: 'ow' - like keywords
-
- Punctuation: FOREGROUND, # class: 'p'
-
- Name: FOREGROUND, # class: 'n'
- Name.Attribute: BLUE, # class: 'na' - to be revised
- Name.Builtin: "", # class: 'nb'
- Name.Builtin.Pseudo: "", # class: 'bp'
- Name.Class: YELLOW, # class: 'nc' - to be revised
- Name.Constant: RED, # class: 'no' - to be revised
- Name.Decorator: AQUA, # class: 'nd' - to be revised
- Name.Entity: "", # class: 'ni'
- Name.Exception: RED, # class: 'ne'
- Name.Function: BLUE, # class: 'nf'
- Name.Property: "", # class: 'py'
- Name.Label: "", # class: 'nl'
- Name.Namespace: YELLOW, # class: 'nn' - to be revised
- Name.Other: BLUE, # class: 'nx'
- Name.Tag: AQUA, # class: 'nt' - like a keyword
- Name.Variable: RED, # class: 'nv' - to be revised
- Name.Variable.Class: "", # class: 'vc' - to be revised
- Name.Variable.Global: "", # class: 'vg' - to be revised
- Name.Variable.Instance: "", # class: 'vi' - to be revised
-
- Number: ORANGE, # class: 'm'
- Number.Float: "", # class: 'mf'
- Number.Hex: "", # class: 'mh'
- Number.Integer: "", # class: 'mi'
- Number.Integer.Long: "", # class: 'il'
- Number.Oct: "", # class: 'mo'
-
- Literal: ORANGE, # class: 'l'
- Literal.Date: GREEN, # class: 'ld'
-
- String: GREEN, # class: 's'
- String.Backtick: "", # class: 'sb'
- String.Char: FOREGROUND, # class: 'sc'
- String.Doc: COMMENT, # class: 'sd' - like a comment
- String.Double: "", # class: 's2'
- String.Escape: ORANGE, # class: 'se'
- String.Heredoc: "", # class: 'sh'
- String.Interpol: ORANGE, # class: 'si'
- String.Other: "", # class: 'sx'
- String.Regex: "", # class: 'sr'
- String.Single: "", # class: 's1'
- String.Symbol: "", # class: 'ss'
-
- Generic: "", # class: 'g'
- Generic.Deleted: RED, # class: 'gd',
- Generic.Emph: "italic", # class: 'ge'
- Generic.Error: "", # class: 'gr'
- Generic.Heading: "bold " + FOREGROUND, # class: 'gh'
- Generic.Inserted: GREEN, # class: 'gi'
- Generic.Output: "", # class: 'go'
- Generic.Prompt: "bold " + COMMENT, # class: 'gp'
- Generic.Strong: "bold", # class: 'gs'
- Generic.EmphStrong: "bold italic", # class: 'ges'
- Generic.Subheading: "bold " + AQUA, # class: 'gu'
- Generic.Traceback: "", # class: 'gt'
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/styles/paraiso_light.py b/venv/lib/python3.11/site-packages/pygments/styles/paraiso_light.py
deleted file mode 100644
index ac76bad..0000000
--- a/venv/lib/python3.11/site-packages/pygments/styles/paraiso_light.py
+++ /dev/null
@@ -1,124 +0,0 @@
-"""
- pygments.styles.paraiso_light
- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
- Paraíso (Light) by Jan T. Sott
-
- Pygments template by Jan T. Sott (https://github.com/idleberg)
- Created with Base16 Builder by Chris Kempson
- (https://github.com/chriskempson/base16-builder).
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.style import Style
-from pygments.token import Keyword, Name, Comment, String, Error, Text, \
- Number, Operator, Generic, Whitespace, Punctuation, Other, Literal
-
-
-__all__ = ['ParaisoLightStyle']
-
-
-BACKGROUND = "#e7e9db"
-CURRENT_LINE = "#b9b6b0"
-SELECTION = "#a39e9b"
-FOREGROUND = "#2f1e2e"
-COMMENT = "#8d8687"
-RED = "#ef6155"
-ORANGE = "#f99b15"
-YELLOW = "#fec418"
-GREEN = "#48b685"
-AQUA = "#5bc4bf"
-BLUE = "#06b6ef"
-PURPLE = "#815ba4"
-
-
-class ParaisoLightStyle(Style):
- name = 'paraiso-light'
-
- background_color = BACKGROUND
- highlight_color = SELECTION
-
- styles = {
- # No corresponding class for the following:
- Text: FOREGROUND, # class: ''
- Whitespace: "", # class: 'w'
- Error: RED, # class: 'err'
- Other: "", # class 'x'
-
- Comment: COMMENT, # class: 'c'
- Comment.Multiline: "", # class: 'cm'
- Comment.Preproc: "", # class: 'cp'
- Comment.Single: "", # class: 'c1'
- Comment.Special: "", # class: 'cs'
-
- Keyword: PURPLE, # class: 'k'
- Keyword.Constant: "", # class: 'kc'
- Keyword.Declaration: "", # class: 'kd'
- Keyword.Namespace: AQUA, # class: 'kn'
- Keyword.Pseudo: "", # class: 'kp'
- Keyword.Reserved: "", # class: 'kr'
- Keyword.Type: YELLOW, # class: 'kt'
-
- Operator: AQUA, # class: 'o'
- Operator.Word: "", # class: 'ow' - like keywords
-
- Punctuation: FOREGROUND, # class: 'p'
-
- Name: FOREGROUND, # class: 'n'
- Name.Attribute: BLUE, # class: 'na' - to be revised
- Name.Builtin: "", # class: 'nb'
- Name.Builtin.Pseudo: "", # class: 'bp'
- Name.Class: YELLOW, # class: 'nc' - to be revised
- Name.Constant: RED, # class: 'no' - to be revised
- Name.Decorator: AQUA, # class: 'nd' - to be revised
- Name.Entity: "", # class: 'ni'
- Name.Exception: RED, # class: 'ne'
- Name.Function: BLUE, # class: 'nf'
- Name.Property: "", # class: 'py'
- Name.Label: "", # class: 'nl'
- Name.Namespace: YELLOW, # class: 'nn' - to be revised
- Name.Other: BLUE, # class: 'nx'
- Name.Tag: AQUA, # class: 'nt' - like a keyword
- Name.Variable: RED, # class: 'nv' - to be revised
- Name.Variable.Class: "", # class: 'vc' - to be revised
- Name.Variable.Global: "", # class: 'vg' - to be revised
- Name.Variable.Instance: "", # class: 'vi' - to be revised
-
- Number: ORANGE, # class: 'm'
- Number.Float: "", # class: 'mf'
- Number.Hex: "", # class: 'mh'
- Number.Integer: "", # class: 'mi'
- Number.Integer.Long: "", # class: 'il'
- Number.Oct: "", # class: 'mo'
-
- Literal: ORANGE, # class: 'l'
- Literal.Date: GREEN, # class: 'ld'
-
- String: GREEN, # class: 's'
- String.Backtick: "", # class: 'sb'
- String.Char: FOREGROUND, # class: 'sc'
- String.Doc: COMMENT, # class: 'sd' - like a comment
- String.Double: "", # class: 's2'
- String.Escape: ORANGE, # class: 'se'
- String.Heredoc: "", # class: 'sh'
- String.Interpol: ORANGE, # class: 'si'
- String.Other: "", # class: 'sx'
- String.Regex: "", # class: 'sr'
- String.Single: "", # class: 's1'
- String.Symbol: "", # class: 'ss'
-
- Generic: "", # class: 'g'
- Generic.Deleted: RED, # class: 'gd',
- Generic.Emph: "italic", # class: 'ge'
- Generic.Error: "", # class: 'gr'
- Generic.Heading: "bold " + FOREGROUND, # class: 'gh'
- Generic.Inserted: GREEN, # class: 'gi'
- Generic.Output: "", # class: 'go'
- Generic.Prompt: "bold " + COMMENT, # class: 'gp'
- Generic.Strong: "bold", # class: 'gs'
- Generic.EmphStrong: "bold italic", # class: 'ges'
- Generic.Subheading: "bold " + AQUA, # class: 'gu'
- Generic.Traceback: "", # class: 'gt'
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/styles/pastie.py b/venv/lib/python3.11/site-packages/pygments/styles/pastie.py
deleted file mode 100644
index 2892660..0000000
--- a/venv/lib/python3.11/site-packages/pygments/styles/pastie.py
+++ /dev/null
@@ -1,78 +0,0 @@
-"""
- pygments.styles.pastie
- ~~~~~~~~~~~~~~~~~~~~~~
-
- Style similar to the `pastie`_ default style.
-
- .. _pastie: http://pastie.caboo.se/
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.style import Style
-from pygments.token import Keyword, Name, Comment, String, Error, \
- Number, Operator, Generic, Whitespace
-
-
-__all__ = ['PastieStyle']
-
-
-class PastieStyle(Style):
- """
- Style similar to the pastie default style.
- """
-
- name = 'pastie'
-
- styles = {
- Whitespace: '#bbbbbb',
- Comment: '#888888',
- Comment.Preproc: 'bold #cc0000',
- Comment.Special: 'bg:#fff0f0 bold #cc0000',
-
- String: 'bg:#fff0f0 #dd2200',
- String.Regex: 'bg:#fff0ff #008800',
- String.Other: 'bg:#f0fff0 #22bb22',
- String.Symbol: '#aa6600',
- String.Interpol: '#3333bb',
- String.Escape: '#0044dd',
-
- Operator.Word: '#008800',
-
- Keyword: 'bold #008800',
- Keyword.Pseudo: 'nobold',
- Keyword.Type: '#888888',
-
- Name.Class: 'bold #bb0066',
- Name.Exception: 'bold #bb0066',
- Name.Function: 'bold #0066bb',
- Name.Property: 'bold #336699',
- Name.Namespace: 'bold #bb0066',
- Name.Builtin: '#003388',
- Name.Variable: '#336699',
- Name.Variable.Class: '#336699',
- Name.Variable.Instance: '#3333bb',
- Name.Variable.Global: '#dd7700',
- Name.Constant: 'bold #003366',
- Name.Tag: 'bold #bb0066',
- Name.Attribute: '#336699',
- Name.Decorator: '#555555',
- Name.Label: 'italic #336699',
-
- Number: 'bold #0000DD',
-
- Generic.Heading: '#333',
- Generic.Subheading: '#666',
- Generic.Deleted: 'bg:#ffdddd #000000',
- Generic.Inserted: 'bg:#ddffdd #000000',
- Generic.Error: '#aa0000',
- Generic.Emph: 'italic',
- Generic.Strong: 'bold',
- Generic.EmphStrong: 'bold italic',
- Generic.Prompt: '#555555',
- Generic.Output: '#888888',
- Generic.Traceback: '#aa0000',
-
- Error: 'bg:#e3d2d2 #a61717'
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/styles/perldoc.py b/venv/lib/python3.11/site-packages/pygments/styles/perldoc.py
deleted file mode 100644
index 071821b..0000000
--- a/venv/lib/python3.11/site-packages/pygments/styles/perldoc.py
+++ /dev/null
@@ -1,73 +0,0 @@
-"""
- pygments.styles.perldoc
- ~~~~~~~~~~~~~~~~~~~~~~~
-
- Style similar to the style used in the `perldoc`_ code blocks.
-
- .. _perldoc: http://perldoc.perl.org/
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.style import Style
-from pygments.token import Keyword, Name, Comment, String, Error, \
- Number, Operator, Generic, Whitespace
-
-
-__all__ = ['PerldocStyle']
-
-
-class PerldocStyle(Style):
- """
- Style similar to the style used in the perldoc code blocks.
- """
-
- name = 'perldoc'
-
- background_color = '#eeeedd'
-
- styles = {
- Whitespace: '#bbbbbb',
- Comment: '#228B22',
- Comment.Preproc: '#1e889b',
- Comment.Special: '#8B008B bold',
-
- String: '#CD5555',
- String.Heredoc: '#1c7e71 italic',
- String.Regex: '#B452CD',
- String.Other: '#cb6c20',
- String.Regex: '#1c7e71',
-
- Number: '#B452CD',
-
- Operator.Word: '#8B008B',
-
- Keyword: '#8B008B bold',
- Keyword.Type: '#00688B',
-
- Name.Class: '#008b45 bold',
- Name.Exception: '#008b45 bold',
- Name.Function: '#008b45',
- Name.Namespace: '#008b45 underline',
- Name.Variable: '#00688B',
- Name.Constant: '#00688B',
- Name.Decorator: '#707a7c',
- Name.Tag: '#8B008B bold',
- Name.Attribute: '#658b00',
- Name.Builtin: '#658b00',
-
- Generic.Heading: 'bold #000080',
- Generic.Subheading: 'bold #800080',
- Generic.Deleted: '#aa0000',
- Generic.Inserted: '#00aa00',
- Generic.Error: '#aa0000',
- Generic.Emph: 'italic',
- Generic.Strong: 'bold',
- Generic.EmphStrong: 'bold italic',
- Generic.Prompt: '#555555',
- Generic.Output: '#888888',
- Generic.Traceback: '#aa0000',
-
- Error: 'bg:#e3d2d2 #a61717'
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/styles/rainbow_dash.py b/venv/lib/python3.11/site-packages/pygments/styles/rainbow_dash.py
deleted file mode 100644
index 82bfed5..0000000
--- a/venv/lib/python3.11/site-packages/pygments/styles/rainbow_dash.py
+++ /dev/null
@@ -1,95 +0,0 @@
-"""
- pygments.styles.rainbow_dash
- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
- A bright and colorful syntax highlighting `theme`.
-
- .. _theme: http://sanssecours.github.io/Rainbow-Dash.tmbundle
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.style import Style
-from pygments.token import Comment, Error, Generic, Name, Number, Operator, \
- String, Text, Whitespace, Keyword
-
-
-__all__ = ['RainbowDashStyle']
-
-
-BLUE_LIGHT = '#0080ff'
-BLUE = '#2c5dcd'
-GREEN = '#00cc66'
-GREEN_LIGHT = '#ccffcc'
-GREEN_NEON = '#00cc00'
-GREY = '#aaaaaa'
-GREY_LIGHT = '#cbcbcb'
-GREY_DARK = '#4d4d4d'
-PURPLE = '#5918bb'
-RED = '#cc0000'
-RED_DARK = '#c5060b'
-RED_LIGHT = '#ffcccc'
-RED_BRIGHT = '#ff0000'
-WHITE = '#ffffff'
-TURQUOISE = '#318495'
-ORANGE = '#ff8000'
-
-
-class RainbowDashStyle(Style):
- """
- A bright and colorful syntax highlighting theme.
- """
-
- name = 'rainbow_dash'
-
- background_color = WHITE
-
- styles = {
- Comment: 'italic {}'.format(BLUE_LIGHT),
- Comment.Preproc: 'noitalic',
- Comment.Special: 'bold',
-
- Error: 'bg:{} {}'.format(RED, WHITE),
-
- Generic.Deleted: 'border:{} bg:{}'.format(RED_DARK, RED_LIGHT),
- Generic.Emph: 'italic',
- Generic.Error: RED_BRIGHT,
- Generic.Heading: 'bold {}'.format(BLUE),
- Generic.Inserted: 'border:{} bg:{}'.format(GREEN_NEON, GREEN_LIGHT),
- Generic.Output: GREY,
- Generic.Prompt: 'bold {}'.format(BLUE),
- Generic.Strong: 'bold',
- Generic.EmphStrong: 'bold italic',
- Generic.Subheading: 'bold {}'.format(BLUE),
- Generic.Traceback: RED_DARK,
-
- Keyword: 'bold {}'.format(BLUE),
- Keyword.Pseudo: 'nobold',
- Keyword.Type: PURPLE,
-
- Name.Attribute: 'italic {}'.format(BLUE),
- Name.Builtin: 'bold {}'.format(PURPLE),
- Name.Class: 'underline',
- Name.Constant: TURQUOISE,
- Name.Decorator: 'bold {}'.format(ORANGE),
- Name.Entity: 'bold {}'.format(PURPLE),
- Name.Exception: 'bold {}'.format(PURPLE),
- Name.Function: 'bold {}'.format(ORANGE),
- Name.Tag: 'bold {}'.format(BLUE),
-
- Number: 'bold {}'.format(PURPLE),
-
- Operator: BLUE,
- Operator.Word: 'bold',
-
- String: GREEN,
- String.Doc: 'italic',
- String.Escape: 'bold {}'.format(RED_DARK),
- String.Other: TURQUOISE,
- String.Symbol: 'bold {}'.format(RED_DARK),
-
- Text: GREY_DARK,
-
- Whitespace: GREY_LIGHT
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/styles/rrt.py b/venv/lib/python3.11/site-packages/pygments/styles/rrt.py
deleted file mode 100644
index 3376d58..0000000
--- a/venv/lib/python3.11/site-packages/pygments/styles/rrt.py
+++ /dev/null
@@ -1,39 +0,0 @@
-"""
- pygments.styles.rrt
- ~~~~~~~~~~~~~~~~~~~
-
- pygments "rrt" theme, based on Zap and Emacs defaults.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.style import Style
-from pygments.token import Token, Comment, Name, Keyword, String, Number
-
-
-__all__ = ['RrtStyle']
-
-
-class RrtStyle(Style):
- """
- Minimalistic "rrt" theme, based on Zap and Emacs defaults.
- """
-
- name = 'rrt'
-
- background_color = '#000000'
- highlight_color = '#0000ff'
-
- styles = {
- Token: '#dddddd',
- Comment: '#00ff00',
- Name.Function: '#ffff00',
- Name.Variable: '#eedd82',
- Name.Constant: '#7fffd4',
- Keyword: '#ff0000',
- Comment.Preproc: '#e5e5e5',
- String: '#87ceeb',
- Keyword.Type: '#ee82ee',
- Number: '#ff00ff',
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/styles/sas.py b/venv/lib/python3.11/site-packages/pygments/styles/sas.py
deleted file mode 100644
index 4d19b22..0000000
--- a/venv/lib/python3.11/site-packages/pygments/styles/sas.py
+++ /dev/null
@@ -1,46 +0,0 @@
-"""
- pygments.styles.sas
- ~~~~~~~~~~~~~~~~~~~
-
- Style inspired by SAS' enhanced program editor. Note This is not
- meant to be a complete style. It's merely meant to mimic SAS'
- program editor syntax highlighting.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.style import Style
-from pygments.token import Keyword, Name, Comment, String, Error, \
- Number, Other, Whitespace, Generic
-
-
-__all__ = ['SasStyle']
-
-
-class SasStyle(Style):
- """
- Style inspired by SAS' enhanced program editor. Note This is not
- meant to be a complete style. It's merely meant to mimic SAS'
- program editor syntax highlighting.
- """
-
- name = 'sas'
-
- styles = {
- Whitespace: '#bbbbbb',
- Comment: 'italic #008800',
- String: '#800080',
- Number: 'bold #2c8553',
- Other: 'bg:#ffffe0',
- Keyword: '#2c2cff',
- Keyword.Reserved: 'bold #353580',
- Keyword.Constant: 'bold',
- Name.Builtin: '#2c2cff',
- Name.Function: 'bold italic',
- Name.Variable: 'bold #2c2cff',
- Generic: '#2c2cff',
- Generic.Emph: '#008800',
- Generic.Error: '#d30202',
- Error: 'bg:#e3d2d2 #a61717'
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/styles/solarized.py b/venv/lib/python3.11/site-packages/pygments/styles/solarized.py
deleted file mode 100644
index 6a1f812..0000000
--- a/venv/lib/python3.11/site-packages/pygments/styles/solarized.py
+++ /dev/null
@@ -1,144 +0,0 @@
-"""
- pygments.styles.solarized
- ~~~~~~~~~~~~~~~~~~~~~~~~~
-
- Solarized by Camil Staps
-
- A Pygments style for the Solarized themes (licensed under MIT).
- See: https://github.com/altercation/solarized
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.style import Style
-from pygments.token import Comment, Error, Generic, Keyword, Name, Number, \
- Operator, String, Token
-
-
-__all__ = ['SolarizedLightStyle', 'SolarizedDarkStyle']
-
-
-def make_style(colors):
- return {
- Token: colors['base0'],
-
- Comment: 'italic ' + colors['base01'],
- Comment.Hashbang: colors['base01'],
- Comment.Multiline: colors['base01'],
- Comment.Preproc: 'noitalic ' + colors['magenta'],
- Comment.PreprocFile: 'noitalic ' + colors['base01'],
-
- Keyword: colors['green'],
- Keyword.Constant: colors['cyan'],
- Keyword.Declaration: colors['cyan'],
- Keyword.Namespace: colors['orange'],
- Keyword.Type: colors['yellow'],
-
- Operator: colors['base01'],
- Operator.Word: colors['green'],
-
- Name.Builtin: colors['blue'],
- Name.Builtin.Pseudo: colors['blue'],
- Name.Class: colors['blue'],
- Name.Constant: colors['blue'],
- Name.Decorator: colors['blue'],
- Name.Entity: colors['blue'],
- Name.Exception: colors['blue'],
- Name.Function: colors['blue'],
- Name.Function.Magic: colors['blue'],
- Name.Label: colors['blue'],
- Name.Namespace: colors['blue'],
- Name.Tag: colors['blue'],
- Name.Variable: colors['blue'],
- Name.Variable.Global:colors['blue'],
- Name.Variable.Magic: colors['blue'],
-
- String: colors['cyan'],
- String.Doc: colors['base01'],
- String.Regex: colors['orange'],
-
- Number: colors['cyan'],
-
- Generic: colors['base0'],
- Generic.Deleted: colors['red'],
- Generic.Emph: 'italic',
- Generic.Error: colors['red'],
- Generic.Heading: 'bold',
- Generic.Subheading: 'underline',
- Generic.Inserted: colors['green'],
- Generic.Output: colors['base0'],
- Generic.Prompt: 'bold ' + colors['blue'],
- Generic.Strong: 'bold',
- Generic.EmphStrong: 'bold italic',
- Generic.Traceback: colors['blue'],
-
- Error: 'bg:' + colors['red'],
- }
-
-
-DARK_COLORS = {
- 'base03': '#002b36',
- 'base02': '#073642',
- 'base01': '#586e75',
- 'base00': '#657b83',
- 'base0': '#839496',
- 'base1': '#93a1a1',
- 'base2': '#eee8d5',
- 'base3': '#fdf6e3',
- 'yellow': '#b58900',
- 'orange': '#cb4b16',
- 'red': '#dc322f',
- 'magenta': '#d33682',
- 'violet': '#6c71c4',
- 'blue': '#268bd2',
- 'cyan': '#2aa198',
- 'green': '#859900',
-}
-
-LIGHT_COLORS = {
- 'base3': '#002b36',
- 'base2': '#073642',
- 'base1': '#586e75',
- 'base0': '#657b83',
- 'base00': '#839496',
- 'base01': '#93a1a1',
- 'base02': '#eee8d5',
- 'base03': '#fdf6e3',
- 'yellow': '#b58900',
- 'orange': '#cb4b16',
- 'red': '#dc322f',
- 'magenta': '#d33682',
- 'violet': '#6c71c4',
- 'blue': '#268bd2',
- 'cyan': '#2aa198',
- 'green': '#859900',
-}
-
-
-class SolarizedDarkStyle(Style):
- """
- The solarized style, dark.
- """
-
- name = 'solarized-dark'
-
- styles = make_style(DARK_COLORS)
- background_color = DARK_COLORS['base03']
- highlight_color = DARK_COLORS['base02']
- line_number_color = DARK_COLORS['base01']
- line_number_background_color = DARK_COLORS['base02']
-
-
-class SolarizedLightStyle(SolarizedDarkStyle):
- """
- The solarized style, light.
- """
-
- name = 'solarized-light'
-
- styles = make_style(LIGHT_COLORS)
- background_color = LIGHT_COLORS['base03']
- highlight_color = LIGHT_COLORS['base02']
- line_number_color = LIGHT_COLORS['base01']
- line_number_background_color = LIGHT_COLORS['base02']
diff --git a/venv/lib/python3.11/site-packages/pygments/styles/staroffice.py b/venv/lib/python3.11/site-packages/pygments/styles/staroffice.py
deleted file mode 100644
index b2cfba9..0000000
--- a/venv/lib/python3.11/site-packages/pygments/styles/staroffice.py
+++ /dev/null
@@ -1,31 +0,0 @@
-"""
- pygments.styles.staroffice
- ~~~~~~~~~~~~~~~~~~~~~~~~~~
-
- Style similar to StarOffice style, also in OpenOffice and LibreOffice.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.style import Style
-from pygments.token import Comment, Error, Literal, Name, Token
-
-
-__all__ = ['StarofficeStyle']
-
-
-class StarofficeStyle(Style):
- """
- Style similar to StarOffice style, also in OpenOffice and LibreOffice.
- """
- name = 'staroffice'
-
-
- styles = {
- Token: '#000080', # Blue
- Comment: '#696969', # DimGray
- Error: '#800000', # Maroon
- Literal: '#EE0000', # Red
- Name: '#008000', # Green
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/styles/stata_dark.py b/venv/lib/python3.11/site-packages/pygments/styles/stata_dark.py
deleted file mode 100644
index c2d0f19..0000000
--- a/venv/lib/python3.11/site-packages/pygments/styles/stata_dark.py
+++ /dev/null
@@ -1,42 +0,0 @@
-"""
- pygments.styles.stata_dark
- ~~~~~~~~~~~~~~~~~~~~~~~~~~
-
- Dark style inspired by Stata's do-file editor. Note this is not
- meant to be a complete style, just for Stata's file formats.
-
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.style import Style
-from pygments.token import Token, Keyword, Name, Comment, String, Error, \
- Number, Operator, Whitespace, Generic
-
-
-__all__ = ['StataDarkStyle']
-
-
-class StataDarkStyle(Style):
- name = 'stata-dark'
-
- background_color = "#232629"
- highlight_color = "#49483e"
-
- styles = {
- Token: '#cccccc',
- Whitespace: '#bbbbbb',
- Error: 'bg:#e3d2d2 #a61717',
- String: '#51cc99',
- Number: '#4FB8CC',
- Operator: '',
- Name.Function: '#6a6aff',
- Name.Other: '#e2828e',
- Keyword: 'bold #7686bb',
- Keyword.Constant: '',
- Comment: 'italic #777777',
- Name.Variable: 'bold #7AB4DB',
- Name.Variable.Global: 'bold #BE646C',
- Generic.Prompt: '#ffffff',
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/styles/stata_light.py b/venv/lib/python3.11/site-packages/pygments/styles/stata_light.py
deleted file mode 100644
index 5e03456..0000000
--- a/venv/lib/python3.11/site-packages/pygments/styles/stata_light.py
+++ /dev/null
@@ -1,42 +0,0 @@
-"""
- pygments.styles.stata_light
- ~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
- Light Style inspired by Stata's do-file editor. Note this is not
- meant to be a complete style, just for Stata's file formats.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.style import Style
-from pygments.token import Keyword, Name, Comment, String, Error, \
- Number, Operator, Whitespace, Text
-
-
-__all__ = ['StataLightStyle']
-
-
-class StataLightStyle(Style):
- """
- Light mode style inspired by Stata's do-file editor. This is not
- meant to be a complete style, just for use with Stata.
- """
-
- name = 'stata-light'
-
- styles = {
- Text: '#111111',
- Whitespace: '#bbbbbb',
- Error: 'bg:#e3d2d2 #a61717',
- String: '#7a2424',
- Number: '#2c2cff',
- Operator: '',
- Name.Function: '#2c2cff',
- Name.Other: '#be646c',
- Keyword: 'bold #353580',
- Keyword.Constant: '',
- Comment: 'italic #008800',
- Name.Variable: 'bold #35baba',
- Name.Variable.Global: 'bold #b5565e',
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/styles/tango.py b/venv/lib/python3.11/site-packages/pygments/styles/tango.py
deleted file mode 100644
index 787a697..0000000
--- a/venv/lib/python3.11/site-packages/pygments/styles/tango.py
+++ /dev/null
@@ -1,143 +0,0 @@
-"""
- pygments.styles.tango
- ~~~~~~~~~~~~~~~~~~~~~
-
- The Crunchy default Style inspired from the color palette from
- the Tango Icon Theme Guidelines.
-
- http://tango.freedesktop.org/Tango_Icon_Theme_Guidelines
-
- Butter: #fce94f #edd400 #c4a000
- Orange: #fcaf3e #f57900 #ce5c00
- Chocolate: #e9b96e #c17d11 #8f5902
- Chameleon: #8ae234 #73d216 #4e9a06
- Sky Blue: #729fcf #3465a4 #204a87
- Plum: #ad7fa8 #75507b #5c35cc
- Scarlet Red:#ef2929 #cc0000 #a40000
- Aluminium: #eeeeec #d3d7cf #babdb6
- #888a85 #555753 #2e3436
-
- Not all of the above colors are used; other colors added:
- very light grey: #f8f8f8 (for background)
-
- This style can be used as a template as it includes all the known
- Token types, unlike most (if not all) of the styles included in the
- Pygments distribution.
-
- However, since Crunchy is intended to be used by beginners, we have strived
- to create a style that gloss over subtle distinctions between different
- categories.
-
- Taking Python for example, comments (Comment.*) and docstrings (String.Doc)
- have been chosen to have the same style. Similarly, keywords (Keyword.*),
- and Operator.Word (and, or, in) have been assigned the same style.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.style import Style
-from pygments.token import Keyword, Name, Comment, String, Error, \
- Number, Operator, Generic, Whitespace, Punctuation, Other, Literal
-
-
-__all__ = ['TangoStyle']
-
-
-class TangoStyle(Style):
- """
- The Crunchy default Style inspired from the color palette from
- the Tango Icon Theme Guidelines.
- """
-
- name = 'tango'
-
- background_color = "#f8f8f8"
-
- styles = {
- # No corresponding class for the following:
- #Text: "", # class: ''
- Whitespace: "#f8f8f8", # class: 'w'
- Error: "#a40000 border:#ef2929", # class: 'err'
- Other: "#000000", # class 'x'
-
- Comment: "italic #8f5902", # class: 'c'
- Comment.Multiline: "italic #8f5902", # class: 'cm'
- Comment.Preproc: "italic #8f5902", # class: 'cp'
- Comment.Single: "italic #8f5902", # class: 'c1'
- Comment.Special: "italic #8f5902", # class: 'cs'
-
- Keyword: "bold #204a87", # class: 'k'
- Keyword.Constant: "bold #204a87", # class: 'kc'
- Keyword.Declaration: "bold #204a87", # class: 'kd'
- Keyword.Namespace: "bold #204a87", # class: 'kn'
- Keyword.Pseudo: "bold #204a87", # class: 'kp'
- Keyword.Reserved: "bold #204a87", # class: 'kr'
- Keyword.Type: "bold #204a87", # class: 'kt'
-
- Operator: "bold #ce5c00", # class: 'o'
- Operator.Word: "bold #204a87", # class: 'ow' - like keywords
-
- Punctuation: "bold #000000", # class: 'p'
-
- # because special names such as Name.Class, Name.Function, etc.
- # are not recognized as such later in the parsing, we choose them
- # to look the same as ordinary variables.
- Name: "#000000", # class: 'n'
- Name.Attribute: "#c4a000", # class: 'na' - to be revised
- Name.Builtin: "#204a87", # class: 'nb'
- Name.Builtin.Pseudo: "#3465a4", # class: 'bp'
- Name.Class: "#000000", # class: 'nc' - to be revised
- Name.Constant: "#000000", # class: 'no' - to be revised
- Name.Decorator: "bold #5c35cc", # class: 'nd' - to be revised
- Name.Entity: "#ce5c00", # class: 'ni'
- Name.Exception: "bold #cc0000", # class: 'ne'
- Name.Function: "#000000", # class: 'nf'
- Name.Property: "#000000", # class: 'py'
- Name.Label: "#f57900", # class: 'nl'
- Name.Namespace: "#000000", # class: 'nn' - to be revised
- Name.Other: "#000000", # class: 'nx'
- Name.Tag: "bold #204a87", # class: 'nt' - like a keyword
- Name.Variable: "#000000", # class: 'nv' - to be revised
- Name.Variable.Class: "#000000", # class: 'vc' - to be revised
- Name.Variable.Global: "#000000", # class: 'vg' - to be revised
- Name.Variable.Instance: "#000000", # class: 'vi' - to be revised
-
- # since the tango light blue does not show up well in text, we choose
- # a pure blue instead.
- Number: "bold #0000cf", # class: 'm'
- Number.Float: "bold #0000cf", # class: 'mf'
- Number.Hex: "bold #0000cf", # class: 'mh'
- Number.Integer: "bold #0000cf", # class: 'mi'
- Number.Integer.Long: "bold #0000cf", # class: 'il'
- Number.Oct: "bold #0000cf", # class: 'mo'
-
- Literal: "#000000", # class: 'l'
- Literal.Date: "#000000", # class: 'ld'
-
- String: "#4e9a06", # class: 's'
- String.Backtick: "#4e9a06", # class: 'sb'
- String.Char: "#4e9a06", # class: 'sc'
- String.Doc: "italic #8f5902", # class: 'sd' - like a comment
- String.Double: "#4e9a06", # class: 's2'
- String.Escape: "#4e9a06", # class: 'se'
- String.Heredoc: "#4e9a06", # class: 'sh'
- String.Interpol: "#4e9a06", # class: 'si'
- String.Other: "#4e9a06", # class: 'sx'
- String.Regex: "#4e9a06", # class: 'sr'
- String.Single: "#4e9a06", # class: 's1'
- String.Symbol: "#4e9a06", # class: 'ss'
-
- Generic: "#000000", # class: 'g'
- Generic.Deleted: "#a40000", # class: 'gd'
- Generic.Emph: "italic #000000", # class: 'ge'
- Generic.Error: "#ef2929", # class: 'gr'
- Generic.Heading: "bold #000080", # class: 'gh'
- Generic.Inserted: "#00A000", # class: 'gi'
- Generic.Output: "italic #000000", # class: 'go'
- Generic.Prompt: "#8f5902", # class: 'gp'
- Generic.Strong: "bold #000000", # class: 'gs'
- Generic.EmphStrong: "bold italic #000000", # class: 'ges'
- Generic.Subheading: "bold #800080", # class: 'gu'
- Generic.Traceback: "bold #a40000", # class: 'gt'
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/styles/trac.py b/venv/lib/python3.11/site-packages/pygments/styles/trac.py
deleted file mode 100644
index 5f5c319..0000000
--- a/venv/lib/python3.11/site-packages/pygments/styles/trac.py
+++ /dev/null
@@ -1,66 +0,0 @@
-"""
- pygments.styles.trac
- ~~~~~~~~~~~~~~~~~~~~
-
- Port of the default trac highlighter design.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.style import Style
-from pygments.token import Keyword, Name, Comment, String, Error, \
- Number, Operator, Generic, Whitespace
-
-
-__all__ = ['TracStyle']
-
-
-class TracStyle(Style):
- """
- Port of the default trac highlighter design.
- """
-
- name = 'trac'
-
- styles = {
- Whitespace: '#bbbbbb',
- Comment: 'italic #999988',
- Comment.Preproc: 'bold noitalic #999999',
- Comment.Special: 'bold #999999',
-
- Operator: 'bold',
-
- String: '#bb8844',
- String.Regex: '#808000',
-
- Number: '#009999',
-
- Keyword: 'bold',
- Keyword.Type: '#445588',
-
- Name.Builtin: '#999999',
- Name.Function: 'bold #990000',
- Name.Class: 'bold #445588',
- Name.Exception: 'bold #990000',
- Name.Namespace: '#555555',
- Name.Variable: '#008080',
- Name.Constant: '#008080',
- Name.Tag: '#000080',
- Name.Attribute: '#008080',
- Name.Entity: '#800080',
-
- Generic.Heading: '#999999',
- Generic.Subheading: '#aaaaaa',
- Generic.Deleted: 'bg:#ffdddd #000000',
- Generic.Inserted: 'bg:#ddffdd #000000',
- Generic.Error: '#aa0000',
- Generic.Emph: 'italic',
- Generic.Strong: 'bold',
- Generic.EmphStrong: 'bold italic',
- Generic.Prompt: '#555555',
- Generic.Output: '#888888',
- Generic.Traceback: '#aa0000',
-
- Error: 'bg:#e3d2d2 #a61717'
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/styles/vim.py b/venv/lib/python3.11/site-packages/pygments/styles/vim.py
deleted file mode 100644
index 1a0828f..0000000
--- a/venv/lib/python3.11/site-packages/pygments/styles/vim.py
+++ /dev/null
@@ -1,67 +0,0 @@
-"""
- pygments.styles.vim
- ~~~~~~~~~~~~~~~~~~~
-
- A highlighting style for Pygments, inspired by vim.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.style import Style
-from pygments.token import Keyword, Name, Comment, String, Error, \
- Number, Operator, Generic, Whitespace, Token
-
-
-__all__ = ['VimStyle']
-
-
-class VimStyle(Style):
- """
- Styles somewhat like vim 7.0
- """
-
- name = 'vim'
-
- background_color = "#000000"
- highlight_color = "#222222"
-
- styles = {
- Token: "#cccccc",
- Whitespace: "",
- Comment: "#000080",
- Comment.Preproc: "",
- Comment.Special: "bold #cd0000",
-
- Keyword: "#cdcd00",
- Keyword.Declaration: "#00cd00",
- Keyword.Namespace: "#cd00cd",
- Keyword.Pseudo: "",
- Keyword.Type: "#00cd00",
-
- Operator: "#3399cc",
- Operator.Word: "#cdcd00",
-
- Name: "",
- Name.Class: "#00cdcd",
- Name.Builtin: "#cd00cd",
- Name.Exception: "bold #666699",
- Name.Variable: "#00cdcd",
-
- String: "#cd0000",
- Number: "#cd00cd",
-
- Generic.Heading: "bold #000080",
- Generic.Subheading: "bold #800080",
- Generic.Deleted: "#cd0000",
- Generic.Inserted: "#00cd00",
- Generic.Error: "#FF0000",
- Generic.Emph: "italic",
- Generic.Strong: "bold",
- Generic.EmphStrong: "bold italic",
- Generic.Prompt: "bold #000080",
- Generic.Output: "#888",
- Generic.Traceback: "#04D",
-
- Error: "border:#FF0000"
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/styles/vs.py b/venv/lib/python3.11/site-packages/pygments/styles/vs.py
deleted file mode 100644
index b3b98c0..0000000
--- a/venv/lib/python3.11/site-packages/pygments/styles/vs.py
+++ /dev/null
@@ -1,41 +0,0 @@
-"""
- pygments.styles.vs
- ~~~~~~~~~~~~~~~~~~
-
- Simple style with MS Visual Studio colors.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.style import Style
-from pygments.token import Keyword, Name, Comment, String, Error, \
- Operator, Generic
-
-
-__all__ = ['VisualStudioStyle']
-
-
-class VisualStudioStyle(Style):
- name = 'vs'
-
- background_color = "#ffffff"
-
- styles = {
- Comment: "#008000",
- Comment.Preproc: "#0000ff",
- Keyword: "#0000ff",
- Operator.Word: "#0000ff",
- Keyword.Type: "#2b91af",
- Name.Class: "#2b91af",
- String: "#a31515",
-
- Generic.Heading: "bold",
- Generic.Subheading: "bold",
- Generic.Emph: "italic",
- Generic.Strong: "bold",
- Generic.EmphStrong: "bold italic",
- Generic.Prompt: "bold",
-
- Error: "border:#FF0000"
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/styles/xcode.py b/venv/lib/python3.11/site-packages/pygments/styles/xcode.py
deleted file mode 100644
index 87b1323..0000000
--- a/venv/lib/python3.11/site-packages/pygments/styles/xcode.py
+++ /dev/null
@@ -1,53 +0,0 @@
-"""
- pygments.styles.xcode
- ~~~~~~~~~~~~~~~~~~~~~
-
- Style similar to the `Xcode` default theme.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.style import Style
-from pygments.token import Keyword, Name, Comment, String, Error, \
- Number, Operator, Literal
-
-
-__all__ = ['XcodeStyle']
-
-
-class XcodeStyle(Style):
- """
- Style similar to the Xcode default colouring theme.
- """
-
- name = 'xcode'
-
- styles = {
- Comment: '#177500',
- Comment.Preproc: '#633820',
-
- String: '#C41A16',
- String.Char: '#2300CE',
-
- Operator: '#000000',
-
- Keyword: '#A90D91',
-
- Name: '#000000',
- Name.Attribute: '#836C28',
- Name.Class: '#3F6E75',
- Name.Function: '#000000',
- Name.Builtin: '#A90D91',
- # In Obj-C code this token is used to colour Cocoa types
- Name.Builtin.Pseudo: '#5B269A',
- Name.Variable: '#000000',
- Name.Tag: '#000000',
- Name.Decorator: '#000000',
- # Workaround for a BUG here: lexer treats multiline method signatres as labels
- Name.Label: '#000000',
-
- Literal: '#1C01CE',
- Number: '#1C01CE',
- Error: '#000000',
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/styles/zenburn.py b/venv/lib/python3.11/site-packages/pygments/styles/zenburn.py
deleted file mode 100644
index 6751c08..0000000
--- a/venv/lib/python3.11/site-packages/pygments/styles/zenburn.py
+++ /dev/null
@@ -1,83 +0,0 @@
-"""
- pygments.styles.zenburn
- ~~~~~~~~~~~~~~~~~~~~~~~
-
- Low contrast color scheme Zenburn.
-
- See: https://kippura.org/zenburnpage/
- https://github.com/jnurmine/Zenburn
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.style import Style
-from pygments.token import Token, Name, Operator, Keyword, Generic, Comment, \
- Number, String, Literal, Punctuation, Error
-
-
-__all__ = ['ZenburnStyle']
-
-
-class ZenburnStyle(Style):
- """
- Low contrast Zenburn style.
- """
-
- name = 'zenburn'
-
- background_color = '#3f3f3f'
- highlight_color = '#484848'
- line_number_color = '#5d6262'
- line_number_background_color = '#353535'
- line_number_special_color = '#7a8080'
- line_number_special_background_color = '#353535'
-
- styles = {
- Token: '#dcdccc',
- Error: '#e37170 bold',
-
- Keyword: '#efdcbc',
- Keyword.Type: '#dfdfbf bold',
- Keyword.Constant: '#dca3a3',
- Keyword.Declaration: '#f0dfaf',
- Keyword.Namespace: '#f0dfaf',
-
- Name: '#dcdccc',
- Name.Tag: '#e89393 bold',
- Name.Entity: '#cfbfaf',
- Name.Constant: '#dca3a3',
- Name.Class: '#efef8f',
- Name.Function: '#efef8f',
- Name.Builtin: '#efef8f',
- Name.Builtin.Pseudo: '#dcdccc',
- Name.Attribute: '#efef8f',
- Name.Exception: '#c3bf9f bold',
-
- Literal: '#9fafaf',
-
- String: '#cc9393',
- String.Doc: '#7f9f7f',
- String.Interpol: '#dca3a3 bold',
-
- Number: '#8cd0d3',
- Number.Float: '#c0bed1',
-
- Operator: '#f0efd0',
-
- Punctuation: '#f0efd0',
-
- Comment: '#7f9f7f italic',
- Comment.Preproc: '#dfaf8f bold',
- Comment.PreprocFile: '#cc9393',
- Comment.Special: '#dfdfdf bold',
-
- Generic: '#ecbcbc bold',
- Generic.Emph: '#ffffff bold',
- Generic.Output: '#5b605e bold',
- Generic.Heading: '#efefef bold',
- Generic.Deleted: '#c3bf9f bg:#313c36',
- Generic.Inserted: '#709080 bg:#313c36 bold',
- Generic.Traceback: '#80d4aa bg:#2f2f2f bold',
- Generic.Subheading: '#efefef bold',
- }
diff --git a/venv/lib/python3.11/site-packages/pygments/token.py b/venv/lib/python3.11/site-packages/pygments/token.py
deleted file mode 100644
index bdf2e8e..0000000
--- a/venv/lib/python3.11/site-packages/pygments/token.py
+++ /dev/null
@@ -1,214 +0,0 @@
-"""
- pygments.token
- ~~~~~~~~~~~~~~
-
- Basic token types and the standard tokens.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-
-class _TokenType(tuple):
- parent = None
-
- def split(self):
- buf = []
- node = self
- while node is not None:
- buf.append(node)
- node = node.parent
- buf.reverse()
- return buf
-
- def __init__(self, *args):
- # no need to call super.__init__
- self.subtypes = set()
-
- def __contains__(self, val):
- return self is val or (
- type(val) is self.__class__ and
- val[:len(self)] == self
- )
-
- def __getattr__(self, val):
- if not val or not val[0].isupper():
- return tuple.__getattribute__(self, val)
- new = _TokenType(self + (val,))
- setattr(self, val, new)
- self.subtypes.add(new)
- new.parent = self
- return new
-
- def __repr__(self):
- return 'Token' + (self and '.' or '') + '.'.join(self)
-
- def __copy__(self):
- # These instances are supposed to be singletons
- return self
-
- def __deepcopy__(self, memo):
- # These instances are supposed to be singletons
- return self
-
-
-Token = _TokenType()
-
-# Special token types
-Text = Token.Text
-Whitespace = Text.Whitespace
-Escape = Token.Escape
-Error = Token.Error
-# Text that doesn't belong to this lexer (e.g. HTML in PHP)
-Other = Token.Other
-
-# Common token types for source code
-Keyword = Token.Keyword
-Name = Token.Name
-Literal = Token.Literal
-String = Literal.String
-Number = Literal.Number
-Punctuation = Token.Punctuation
-Operator = Token.Operator
-Comment = Token.Comment
-
-# Generic types for non-source code
-Generic = Token.Generic
-
-# String and some others are not direct children of Token.
-# alias them:
-Token.Token = Token
-Token.String = String
-Token.Number = Number
-
-
-def is_token_subtype(ttype, other):
- """
- Return True if ``ttype`` is a subtype of ``other``.
-
- exists for backwards compatibility. use ``ttype in other`` now.
- """
- return ttype in other
-
-
-def string_to_tokentype(s):
- """
- Convert a string into a token type::
-
- >>> string_to_token('String.Double')
- Token.Literal.String.Double
- >>> string_to_token('Token.Literal.Number')
- Token.Literal.Number
- >>> string_to_token('')
- Token
-
- Tokens that are already tokens are returned unchanged:
-
- >>> string_to_token(String)
- Token.Literal.String
- """
- if isinstance(s, _TokenType):
- return s
- if not s:
- return Token
- node = Token
- for item in s.split('.'):
- node = getattr(node, item)
- return node
-
-
-# Map standard token types to short names, used in CSS class naming.
-# If you add a new item, please be sure to run this file to perform
-# a consistency check for duplicate values.
-STANDARD_TYPES = {
- Token: '',
-
- Text: '',
- Whitespace: 'w',
- Escape: 'esc',
- Error: 'err',
- Other: 'x',
-
- Keyword: 'k',
- Keyword.Constant: 'kc',
- Keyword.Declaration: 'kd',
- Keyword.Namespace: 'kn',
- Keyword.Pseudo: 'kp',
- Keyword.Reserved: 'kr',
- Keyword.Type: 'kt',
-
- Name: 'n',
- Name.Attribute: 'na',
- Name.Builtin: 'nb',
- Name.Builtin.Pseudo: 'bp',
- Name.Class: 'nc',
- Name.Constant: 'no',
- Name.Decorator: 'nd',
- Name.Entity: 'ni',
- Name.Exception: 'ne',
- Name.Function: 'nf',
- Name.Function.Magic: 'fm',
- Name.Property: 'py',
- Name.Label: 'nl',
- Name.Namespace: 'nn',
- Name.Other: 'nx',
- Name.Tag: 'nt',
- Name.Variable: 'nv',
- Name.Variable.Class: 'vc',
- Name.Variable.Global: 'vg',
- Name.Variable.Instance: 'vi',
- Name.Variable.Magic: 'vm',
-
- Literal: 'l',
- Literal.Date: 'ld',
-
- String: 's',
- String.Affix: 'sa',
- String.Backtick: 'sb',
- String.Char: 'sc',
- String.Delimiter: 'dl',
- String.Doc: 'sd',
- String.Double: 's2',
- String.Escape: 'se',
- String.Heredoc: 'sh',
- String.Interpol: 'si',
- String.Other: 'sx',
- String.Regex: 'sr',
- String.Single: 's1',
- String.Symbol: 'ss',
-
- Number: 'm',
- Number.Bin: 'mb',
- Number.Float: 'mf',
- Number.Hex: 'mh',
- Number.Integer: 'mi',
- Number.Integer.Long: 'il',
- Number.Oct: 'mo',
-
- Operator: 'o',
- Operator.Word: 'ow',
-
- Punctuation: 'p',
- Punctuation.Marker: 'pm',
-
- Comment: 'c',
- Comment.Hashbang: 'ch',
- Comment.Multiline: 'cm',
- Comment.Preproc: 'cp',
- Comment.PreprocFile: 'cpf',
- Comment.Single: 'c1',
- Comment.Special: 'cs',
-
- Generic: 'g',
- Generic.Deleted: 'gd',
- Generic.Emph: 'ge',
- Generic.Error: 'gr',
- Generic.Heading: 'gh',
- Generic.Inserted: 'gi',
- Generic.Output: 'go',
- Generic.Prompt: 'gp',
- Generic.Strong: 'gs',
- Generic.Subheading: 'gu',
- Generic.EmphStrong: 'ges',
- Generic.Traceback: 'gt',
-}
diff --git a/venv/lib/python3.11/site-packages/pygments/unistring.py b/venv/lib/python3.11/site-packages/pygments/unistring.py
deleted file mode 100644
index 39f6bae..0000000
--- a/venv/lib/python3.11/site-packages/pygments/unistring.py
+++ /dev/null
@@ -1,153 +0,0 @@
-"""
- pygments.unistring
- ~~~~~~~~~~~~~~~~~~
-
- Strings of all Unicode characters of a certain category.
- Used for matching in Unicode-aware languages. Run to regenerate.
-
- Inspired by chartypes_create.py from the MoinMoin project.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-Cc = '\x00-\x1f\x7f-\x9f'
-
-Cf = '\xad\u0600-\u0605\u061c\u06dd\u070f\u08e2\u180e\u200b-\u200f\u202a-\u202e\u2060-\u2064\u2066-\u206f\ufeff\ufff9-\ufffb\U000110bd\U000110cd\U0001bca0-\U0001bca3\U0001d173-\U0001d17a\U000e0001\U000e0020-\U000e007f'
-
-Cn = '\u0378-\u0379\u0380-\u0383\u038b\u038d\u03a2\u0530\u0557-\u0558\u058b-\u058c\u0590\u05c8-\u05cf\u05eb-\u05ee\u05f5-\u05ff\u061d\u070e\u074b-\u074c\u07b2-\u07bf\u07fb-\u07fc\u082e-\u082f\u083f\u085c-\u085d\u085f\u086b-\u089f\u08b5\u08be-\u08d2\u0984\u098d-\u098e\u0991-\u0992\u09a9\u09b1\u09b3-\u09b5\u09ba-\u09bb\u09c5-\u09c6\u09c9-\u09ca\u09cf-\u09d6\u09d8-\u09db\u09de\u09e4-\u09e5\u09ff-\u0a00\u0a04\u0a0b-\u0a0e\u0a11-\u0a12\u0a29\u0a31\u0a34\u0a37\u0a3a-\u0a3b\u0a3d\u0a43-\u0a46\u0a49-\u0a4a\u0a4e-\u0a50\u0a52-\u0a58\u0a5d\u0a5f-\u0a65\u0a77-\u0a80\u0a84\u0a8e\u0a92\u0aa9\u0ab1\u0ab4\u0aba-\u0abb\u0ac6\u0aca\u0ace-\u0acf\u0ad1-\u0adf\u0ae4-\u0ae5\u0af2-\u0af8\u0b00\u0b04\u0b0d-\u0b0e\u0b11-\u0b12\u0b29\u0b31\u0b34\u0b3a-\u0b3b\u0b45-\u0b46\u0b49-\u0b4a\u0b4e-\u0b55\u0b58-\u0b5b\u0b5e\u0b64-\u0b65\u0b78-\u0b81\u0b84\u0b8b-\u0b8d\u0b91\u0b96-\u0b98\u0b9b\u0b9d\u0ba0-\u0ba2\u0ba5-\u0ba7\u0bab-\u0bad\u0bba-\u0bbd\u0bc3-\u0bc5\u0bc9\u0bce-\u0bcf\u0bd1-\u0bd6\u0bd8-\u0be5\u0bfb-\u0bff\u0c0d\u0c11\u0c29\u0c3a-\u0c3c\u0c45\u0c49\u0c4e-\u0c54\u0c57\u0c5b-\u0c5f\u0c64-\u0c65\u0c70-\u0c77\u0c8d\u0c91\u0ca9\u0cb4\u0cba-\u0cbb\u0cc5\u0cc9\u0cce-\u0cd4\u0cd7-\u0cdd\u0cdf\u0ce4-\u0ce5\u0cf0\u0cf3-\u0cff\u0d04\u0d0d\u0d11\u0d45\u0d49\u0d50-\u0d53\u0d64-\u0d65\u0d80-\u0d81\u0d84\u0d97-\u0d99\u0db2\u0dbc\u0dbe-\u0dbf\u0dc7-\u0dc9\u0dcb-\u0dce\u0dd5\u0dd7\u0de0-\u0de5\u0df0-\u0df1\u0df5-\u0e00\u0e3b-\u0e3e\u0e5c-\u0e80\u0e83\u0e85-\u0e86\u0e89\u0e8b-\u0e8c\u0e8e-\u0e93\u0e98\u0ea0\u0ea4\u0ea6\u0ea8-\u0ea9\u0eac\u0eba\u0ebe-\u0ebf\u0ec5\u0ec7\u0ece-\u0ecf\u0eda-\u0edb\u0ee0-\u0eff\u0f48\u0f6d-\u0f70\u0f98\u0fbd\u0fcd\u0fdb-\u0fff\u10c6\u10c8-\u10cc\u10ce-\u10cf\u1249\u124e-\u124f\u1257\u1259\u125e-\u125f\u1289\u128e-\u128f\u12b1\u12b6-\u12b7\u12bf\u12c1\u12c6-\u12c7\u12d7\u1311\u1316-\u1317\u135b-\u135c\u137d-\u137f\u139a-\u139f\u13f6-\u13f7\u13fe-\u13ff\u169d-\u169f\u16f9-\u16ff\u170d\u1715-\u171f\u1737-\u173f\u1754-\u175f\u176d\u1771\u1774-\u177f\u17de-\u17df\u17ea-\u17ef\u17fa-\u17ff\u180f\u181a-\u181f\u1879-\u187f\u18ab-\u18af\u18f6-\u18ff\u191f\u192c-\u192f\u193c-\u193f\u1941-\u1943\u196e-\u196f\u1975-\u197f\u19ac-\u19af\u19ca-\u19cf\u19db-\u19dd\u1a1c-\u1a1d\u1a5f\u1a7d-\u1a7e\u1a8a-\u1a8f\u1a9a-\u1a9f\u1aae-\u1aaf\u1abf-\u1aff\u1b4c-\u1b4f\u1b7d-\u1b7f\u1bf4-\u1bfb\u1c38-\u1c3a\u1c4a-\u1c4c\u1c89-\u1c8f\u1cbb-\u1cbc\u1cc8-\u1ccf\u1cfa-\u1cff\u1dfa\u1f16-\u1f17\u1f1e-\u1f1f\u1f46-\u1f47\u1f4e-\u1f4f\u1f58\u1f5a\u1f5c\u1f5e\u1f7e-\u1f7f\u1fb5\u1fc5\u1fd4-\u1fd5\u1fdc\u1ff0-\u1ff1\u1ff5\u1fff\u2065\u2072-\u2073\u208f\u209d-\u209f\u20c0-\u20cf\u20f1-\u20ff\u218c-\u218f\u2427-\u243f\u244b-\u245f\u2b74-\u2b75\u2b96-\u2b97\u2bc9\u2bff\u2c2f\u2c5f\u2cf4-\u2cf8\u2d26\u2d28-\u2d2c\u2d2e-\u2d2f\u2d68-\u2d6e\u2d71-\u2d7e\u2d97-\u2d9f\u2da7\u2daf\u2db7\u2dbf\u2dc7\u2dcf\u2dd7\u2ddf\u2e4f-\u2e7f\u2e9a\u2ef4-\u2eff\u2fd6-\u2fef\u2ffc-\u2fff\u3040\u3097-\u3098\u3100-\u3104\u3130\u318f\u31bb-\u31bf\u31e4-\u31ef\u321f\u32ff\u4db6-\u4dbf\u9ff0-\u9fff\ua48d-\ua48f\ua4c7-\ua4cf\ua62c-\ua63f\ua6f8-\ua6ff\ua7ba-\ua7f6\ua82c-\ua82f\ua83a-\ua83f\ua878-\ua87f\ua8c6-\ua8cd\ua8da-\ua8df\ua954-\ua95e\ua97d-\ua97f\ua9ce\ua9da-\ua9dd\ua9ff\uaa37-\uaa3f\uaa4e-\uaa4f\uaa5a-\uaa5b\uaac3-\uaada\uaaf7-\uab00\uab07-\uab08\uab0f-\uab10\uab17-\uab1f\uab27\uab2f\uab66-\uab6f\uabee-\uabef\uabfa-\uabff\ud7a4-\ud7af\ud7c7-\ud7ca\ud7fc-\ud7ff\ufa6e-\ufa6f\ufada-\ufaff\ufb07-\ufb12\ufb18-\ufb1c\ufb37\ufb3d\ufb3f\ufb42\ufb45\ufbc2-\ufbd2\ufd40-\ufd4f\ufd90-\ufd91\ufdc8-\ufdef\ufdfe-\ufdff\ufe1a-\ufe1f\ufe53\ufe67\ufe6c-\ufe6f\ufe75\ufefd-\ufefe\uff00\uffbf-\uffc1\uffc8-\uffc9\uffd0-\uffd1\uffd8-\uffd9\uffdd-\uffdf\uffe7\uffef-\ufff8\ufffe-\uffff\U0001000c\U00010027\U0001003b\U0001003e\U0001004e-\U0001004f\U0001005e-\U0001007f\U000100fb-\U000100ff\U00010103-\U00010106\U00010134-\U00010136\U0001018f\U0001019c-\U0001019f\U000101a1-\U000101cf\U000101fe-\U0001027f\U0001029d-\U0001029f\U000102d1-\U000102df\U000102fc-\U000102ff\U00010324-\U0001032c\U0001034b-\U0001034f\U0001037b-\U0001037f\U0001039e\U000103c4-\U000103c7\U000103d6-\U000103ff\U0001049e-\U0001049f\U000104aa-\U000104af\U000104d4-\U000104d7\U000104fc-\U000104ff\U00010528-\U0001052f\U00010564-\U0001056e\U00010570-\U000105ff\U00010737-\U0001073f\U00010756-\U0001075f\U00010768-\U000107ff\U00010806-\U00010807\U00010809\U00010836\U00010839-\U0001083b\U0001083d-\U0001083e\U00010856\U0001089f-\U000108a6\U000108b0-\U000108df\U000108f3\U000108f6-\U000108fa\U0001091c-\U0001091e\U0001093a-\U0001093e\U00010940-\U0001097f\U000109b8-\U000109bb\U000109d0-\U000109d1\U00010a04\U00010a07-\U00010a0b\U00010a14\U00010a18\U00010a36-\U00010a37\U00010a3b-\U00010a3e\U00010a49-\U00010a4f\U00010a59-\U00010a5f\U00010aa0-\U00010abf\U00010ae7-\U00010aea\U00010af7-\U00010aff\U00010b36-\U00010b38\U00010b56-\U00010b57\U00010b73-\U00010b77\U00010b92-\U00010b98\U00010b9d-\U00010ba8\U00010bb0-\U00010bff\U00010c49-\U00010c7f\U00010cb3-\U00010cbf\U00010cf3-\U00010cf9\U00010d28-\U00010d2f\U00010d3a-\U00010e5f\U00010e7f-\U00010eff\U00010f28-\U00010f2f\U00010f5a-\U00010fff\U0001104e-\U00011051\U00011070-\U0001107e\U000110c2-\U000110cc\U000110ce-\U000110cf\U000110e9-\U000110ef\U000110fa-\U000110ff\U00011135\U00011147-\U0001114f\U00011177-\U0001117f\U000111ce-\U000111cf\U000111e0\U000111f5-\U000111ff\U00011212\U0001123f-\U0001127f\U00011287\U00011289\U0001128e\U0001129e\U000112aa-\U000112af\U000112eb-\U000112ef\U000112fa-\U000112ff\U00011304\U0001130d-\U0001130e\U00011311-\U00011312\U00011329\U00011331\U00011334\U0001133a\U00011345-\U00011346\U00011349-\U0001134a\U0001134e-\U0001134f\U00011351-\U00011356\U00011358-\U0001135c\U00011364-\U00011365\U0001136d-\U0001136f\U00011375-\U000113ff\U0001145a\U0001145c\U0001145f-\U0001147f\U000114c8-\U000114cf\U000114da-\U0001157f\U000115b6-\U000115b7\U000115de-\U000115ff\U00011645-\U0001164f\U0001165a-\U0001165f\U0001166d-\U0001167f\U000116b8-\U000116bf\U000116ca-\U000116ff\U0001171b-\U0001171c\U0001172c-\U0001172f\U00011740-\U000117ff\U0001183c-\U0001189f\U000118f3-\U000118fe\U00011900-\U000119ff\U00011a48-\U00011a4f\U00011a84-\U00011a85\U00011aa3-\U00011abf\U00011af9-\U00011bff\U00011c09\U00011c37\U00011c46-\U00011c4f\U00011c6d-\U00011c6f\U00011c90-\U00011c91\U00011ca8\U00011cb7-\U00011cff\U00011d07\U00011d0a\U00011d37-\U00011d39\U00011d3b\U00011d3e\U00011d48-\U00011d4f\U00011d5a-\U00011d5f\U00011d66\U00011d69\U00011d8f\U00011d92\U00011d99-\U00011d9f\U00011daa-\U00011edf\U00011ef9-\U00011fff\U0001239a-\U000123ff\U0001246f\U00012475-\U0001247f\U00012544-\U00012fff\U0001342f-\U000143ff\U00014647-\U000167ff\U00016a39-\U00016a3f\U00016a5f\U00016a6a-\U00016a6d\U00016a70-\U00016acf\U00016aee-\U00016aef\U00016af6-\U00016aff\U00016b46-\U00016b4f\U00016b5a\U00016b62\U00016b78-\U00016b7c\U00016b90-\U00016e3f\U00016e9b-\U00016eff\U00016f45-\U00016f4f\U00016f7f-\U00016f8e\U00016fa0-\U00016fdf\U00016fe2-\U00016fff\U000187f2-\U000187ff\U00018af3-\U0001afff\U0001b11f-\U0001b16f\U0001b2fc-\U0001bbff\U0001bc6b-\U0001bc6f\U0001bc7d-\U0001bc7f\U0001bc89-\U0001bc8f\U0001bc9a-\U0001bc9b\U0001bca4-\U0001cfff\U0001d0f6-\U0001d0ff\U0001d127-\U0001d128\U0001d1e9-\U0001d1ff\U0001d246-\U0001d2df\U0001d2f4-\U0001d2ff\U0001d357-\U0001d35f\U0001d379-\U0001d3ff\U0001d455\U0001d49d\U0001d4a0-\U0001d4a1\U0001d4a3-\U0001d4a4\U0001d4a7-\U0001d4a8\U0001d4ad\U0001d4ba\U0001d4bc\U0001d4c4\U0001d506\U0001d50b-\U0001d50c\U0001d515\U0001d51d\U0001d53a\U0001d53f\U0001d545\U0001d547-\U0001d549\U0001d551\U0001d6a6-\U0001d6a7\U0001d7cc-\U0001d7cd\U0001da8c-\U0001da9a\U0001daa0\U0001dab0-\U0001dfff\U0001e007\U0001e019-\U0001e01a\U0001e022\U0001e025\U0001e02b-\U0001e7ff\U0001e8c5-\U0001e8c6\U0001e8d7-\U0001e8ff\U0001e94b-\U0001e94f\U0001e95a-\U0001e95d\U0001e960-\U0001ec70\U0001ecb5-\U0001edff\U0001ee04\U0001ee20\U0001ee23\U0001ee25-\U0001ee26\U0001ee28\U0001ee33\U0001ee38\U0001ee3a\U0001ee3c-\U0001ee41\U0001ee43-\U0001ee46\U0001ee48\U0001ee4a\U0001ee4c\U0001ee50\U0001ee53\U0001ee55-\U0001ee56\U0001ee58\U0001ee5a\U0001ee5c\U0001ee5e\U0001ee60\U0001ee63\U0001ee65-\U0001ee66\U0001ee6b\U0001ee73\U0001ee78\U0001ee7d\U0001ee7f\U0001ee8a\U0001ee9c-\U0001eea0\U0001eea4\U0001eeaa\U0001eebc-\U0001eeef\U0001eef2-\U0001efff\U0001f02c-\U0001f02f\U0001f094-\U0001f09f\U0001f0af-\U0001f0b0\U0001f0c0\U0001f0d0\U0001f0f6-\U0001f0ff\U0001f10d-\U0001f10f\U0001f16c-\U0001f16f\U0001f1ad-\U0001f1e5\U0001f203-\U0001f20f\U0001f23c-\U0001f23f\U0001f249-\U0001f24f\U0001f252-\U0001f25f\U0001f266-\U0001f2ff\U0001f6d5-\U0001f6df\U0001f6ed-\U0001f6ef\U0001f6fa-\U0001f6ff\U0001f774-\U0001f77f\U0001f7d9-\U0001f7ff\U0001f80c-\U0001f80f\U0001f848-\U0001f84f\U0001f85a-\U0001f85f\U0001f888-\U0001f88f\U0001f8ae-\U0001f8ff\U0001f90c-\U0001f90f\U0001f93f\U0001f971-\U0001f972\U0001f977-\U0001f979\U0001f97b\U0001f9a3-\U0001f9af\U0001f9ba-\U0001f9bf\U0001f9c3-\U0001f9cf\U0001fa00-\U0001fa5f\U0001fa6e-\U0001ffff\U0002a6d7-\U0002a6ff\U0002b735-\U0002b73f\U0002b81e-\U0002b81f\U0002cea2-\U0002ceaf\U0002ebe1-\U0002f7ff\U0002fa1e-\U000e0000\U000e0002-\U000e001f\U000e0080-\U000e00ff\U000e01f0-\U000effff\U000ffffe-\U000fffff\U0010fffe-\U0010ffff'
-
-Co = '\ue000-\uf8ff\U000f0000-\U000ffffd\U00100000-\U0010fffd'
-
-Cs = '\ud800-\udbff\\\udc00\udc01-\udfff'
-
-Ll = 'a-z\xb5\xdf-\xf6\xf8-\xff\u0101\u0103\u0105\u0107\u0109\u010b\u010d\u010f\u0111\u0113\u0115\u0117\u0119\u011b\u011d\u011f\u0121\u0123\u0125\u0127\u0129\u012b\u012d\u012f\u0131\u0133\u0135\u0137-\u0138\u013a\u013c\u013e\u0140\u0142\u0144\u0146\u0148-\u0149\u014b\u014d\u014f\u0151\u0153\u0155\u0157\u0159\u015b\u015d\u015f\u0161\u0163\u0165\u0167\u0169\u016b\u016d\u016f\u0171\u0173\u0175\u0177\u017a\u017c\u017e-\u0180\u0183\u0185\u0188\u018c-\u018d\u0192\u0195\u0199-\u019b\u019e\u01a1\u01a3\u01a5\u01a8\u01aa-\u01ab\u01ad\u01b0\u01b4\u01b6\u01b9-\u01ba\u01bd-\u01bf\u01c6\u01c9\u01cc\u01ce\u01d0\u01d2\u01d4\u01d6\u01d8\u01da\u01dc-\u01dd\u01df\u01e1\u01e3\u01e5\u01e7\u01e9\u01eb\u01ed\u01ef-\u01f0\u01f3\u01f5\u01f9\u01fb\u01fd\u01ff\u0201\u0203\u0205\u0207\u0209\u020b\u020d\u020f\u0211\u0213\u0215\u0217\u0219\u021b\u021d\u021f\u0221\u0223\u0225\u0227\u0229\u022b\u022d\u022f\u0231\u0233-\u0239\u023c\u023f-\u0240\u0242\u0247\u0249\u024b\u024d\u024f-\u0293\u0295-\u02af\u0371\u0373\u0377\u037b-\u037d\u0390\u03ac-\u03ce\u03d0-\u03d1\u03d5-\u03d7\u03d9\u03db\u03dd\u03df\u03e1\u03e3\u03e5\u03e7\u03e9\u03eb\u03ed\u03ef-\u03f3\u03f5\u03f8\u03fb-\u03fc\u0430-\u045f\u0461\u0463\u0465\u0467\u0469\u046b\u046d\u046f\u0471\u0473\u0475\u0477\u0479\u047b\u047d\u047f\u0481\u048b\u048d\u048f\u0491\u0493\u0495\u0497\u0499\u049b\u049d\u049f\u04a1\u04a3\u04a5\u04a7\u04a9\u04ab\u04ad\u04af\u04b1\u04b3\u04b5\u04b7\u04b9\u04bb\u04bd\u04bf\u04c2\u04c4\u04c6\u04c8\u04ca\u04cc\u04ce-\u04cf\u04d1\u04d3\u04d5\u04d7\u04d9\u04db\u04dd\u04df\u04e1\u04e3\u04e5\u04e7\u04e9\u04eb\u04ed\u04ef\u04f1\u04f3\u04f5\u04f7\u04f9\u04fb\u04fd\u04ff\u0501\u0503\u0505\u0507\u0509\u050b\u050d\u050f\u0511\u0513\u0515\u0517\u0519\u051b\u051d\u051f\u0521\u0523\u0525\u0527\u0529\u052b\u052d\u052f\u0560-\u0588\u10d0-\u10fa\u10fd-\u10ff\u13f8-\u13fd\u1c80-\u1c88\u1d00-\u1d2b\u1d6b-\u1d77\u1d79-\u1d9a\u1e01\u1e03\u1e05\u1e07\u1e09\u1e0b\u1e0d\u1e0f\u1e11\u1e13\u1e15\u1e17\u1e19\u1e1b\u1e1d\u1e1f\u1e21\u1e23\u1e25\u1e27\u1e29\u1e2b\u1e2d\u1e2f\u1e31\u1e33\u1e35\u1e37\u1e39\u1e3b\u1e3d\u1e3f\u1e41\u1e43\u1e45\u1e47\u1e49\u1e4b\u1e4d\u1e4f\u1e51\u1e53\u1e55\u1e57\u1e59\u1e5b\u1e5d\u1e5f\u1e61\u1e63\u1e65\u1e67\u1e69\u1e6b\u1e6d\u1e6f\u1e71\u1e73\u1e75\u1e77\u1e79\u1e7b\u1e7d\u1e7f\u1e81\u1e83\u1e85\u1e87\u1e89\u1e8b\u1e8d\u1e8f\u1e91\u1e93\u1e95-\u1e9d\u1e9f\u1ea1\u1ea3\u1ea5\u1ea7\u1ea9\u1eab\u1ead\u1eaf\u1eb1\u1eb3\u1eb5\u1eb7\u1eb9\u1ebb\u1ebd\u1ebf\u1ec1\u1ec3\u1ec5\u1ec7\u1ec9\u1ecb\u1ecd\u1ecf\u1ed1\u1ed3\u1ed5\u1ed7\u1ed9\u1edb\u1edd\u1edf\u1ee1\u1ee3\u1ee5\u1ee7\u1ee9\u1eeb\u1eed\u1eef\u1ef1\u1ef3\u1ef5\u1ef7\u1ef9\u1efb\u1efd\u1eff-\u1f07\u1f10-\u1f15\u1f20-\u1f27\u1f30-\u1f37\u1f40-\u1f45\u1f50-\u1f57\u1f60-\u1f67\u1f70-\u1f7d\u1f80-\u1f87\u1f90-\u1f97\u1fa0-\u1fa7\u1fb0-\u1fb4\u1fb6-\u1fb7\u1fbe\u1fc2-\u1fc4\u1fc6-\u1fc7\u1fd0-\u1fd3\u1fd6-\u1fd7\u1fe0-\u1fe7\u1ff2-\u1ff4\u1ff6-\u1ff7\u210a\u210e-\u210f\u2113\u212f\u2134\u2139\u213c-\u213d\u2146-\u2149\u214e\u2184\u2c30-\u2c5e\u2c61\u2c65-\u2c66\u2c68\u2c6a\u2c6c\u2c71\u2c73-\u2c74\u2c76-\u2c7b\u2c81\u2c83\u2c85\u2c87\u2c89\u2c8b\u2c8d\u2c8f\u2c91\u2c93\u2c95\u2c97\u2c99\u2c9b\u2c9d\u2c9f\u2ca1\u2ca3\u2ca5\u2ca7\u2ca9\u2cab\u2cad\u2caf\u2cb1\u2cb3\u2cb5\u2cb7\u2cb9\u2cbb\u2cbd\u2cbf\u2cc1\u2cc3\u2cc5\u2cc7\u2cc9\u2ccb\u2ccd\u2ccf\u2cd1\u2cd3\u2cd5\u2cd7\u2cd9\u2cdb\u2cdd\u2cdf\u2ce1\u2ce3-\u2ce4\u2cec\u2cee\u2cf3\u2d00-\u2d25\u2d27\u2d2d\ua641\ua643\ua645\ua647\ua649\ua64b\ua64d\ua64f\ua651\ua653\ua655\ua657\ua659\ua65b\ua65d\ua65f\ua661\ua663\ua665\ua667\ua669\ua66b\ua66d\ua681\ua683\ua685\ua687\ua689\ua68b\ua68d\ua68f\ua691\ua693\ua695\ua697\ua699\ua69b\ua723\ua725\ua727\ua729\ua72b\ua72d\ua72f-\ua731\ua733\ua735\ua737\ua739\ua73b\ua73d\ua73f\ua741\ua743\ua745\ua747\ua749\ua74b\ua74d\ua74f\ua751\ua753\ua755\ua757\ua759\ua75b\ua75d\ua75f\ua761\ua763\ua765\ua767\ua769\ua76b\ua76d\ua76f\ua771-\ua778\ua77a\ua77c\ua77f\ua781\ua783\ua785\ua787\ua78c\ua78e\ua791\ua793-\ua795\ua797\ua799\ua79b\ua79d\ua79f\ua7a1\ua7a3\ua7a5\ua7a7\ua7a9\ua7af\ua7b5\ua7b7\ua7b9\ua7fa\uab30-\uab5a\uab60-\uab65\uab70-\uabbf\ufb00-\ufb06\ufb13-\ufb17\uff41-\uff5a\U00010428-\U0001044f\U000104d8-\U000104fb\U00010cc0-\U00010cf2\U000118c0-\U000118df\U00016e60-\U00016e7f\U0001d41a-\U0001d433\U0001d44e-\U0001d454\U0001d456-\U0001d467\U0001d482-\U0001d49b\U0001d4b6-\U0001d4b9\U0001d4bb\U0001d4bd-\U0001d4c3\U0001d4c5-\U0001d4cf\U0001d4ea-\U0001d503\U0001d51e-\U0001d537\U0001d552-\U0001d56b\U0001d586-\U0001d59f\U0001d5ba-\U0001d5d3\U0001d5ee-\U0001d607\U0001d622-\U0001d63b\U0001d656-\U0001d66f\U0001d68a-\U0001d6a5\U0001d6c2-\U0001d6da\U0001d6dc-\U0001d6e1\U0001d6fc-\U0001d714\U0001d716-\U0001d71b\U0001d736-\U0001d74e\U0001d750-\U0001d755\U0001d770-\U0001d788\U0001d78a-\U0001d78f\U0001d7aa-\U0001d7c2\U0001d7c4-\U0001d7c9\U0001d7cb\U0001e922-\U0001e943'
-
-Lm = '\u02b0-\u02c1\u02c6-\u02d1\u02e0-\u02e4\u02ec\u02ee\u0374\u037a\u0559\u0640\u06e5-\u06e6\u07f4-\u07f5\u07fa\u081a\u0824\u0828\u0971\u0e46\u0ec6\u10fc\u17d7\u1843\u1aa7\u1c78-\u1c7d\u1d2c-\u1d6a\u1d78\u1d9b-\u1dbf\u2071\u207f\u2090-\u209c\u2c7c-\u2c7d\u2d6f\u2e2f\u3005\u3031-\u3035\u303b\u309d-\u309e\u30fc-\u30fe\ua015\ua4f8-\ua4fd\ua60c\ua67f\ua69c-\ua69d\ua717-\ua71f\ua770\ua788\ua7f8-\ua7f9\ua9cf\ua9e6\uaa70\uaadd\uaaf3-\uaaf4\uab5c-\uab5f\uff70\uff9e-\uff9f\U00016b40-\U00016b43\U00016f93-\U00016f9f\U00016fe0-\U00016fe1'
-
-Lo = '\xaa\xba\u01bb\u01c0-\u01c3\u0294\u05d0-\u05ea\u05ef-\u05f2\u0620-\u063f\u0641-\u064a\u066e-\u066f\u0671-\u06d3\u06d5\u06ee-\u06ef\u06fa-\u06fc\u06ff\u0710\u0712-\u072f\u074d-\u07a5\u07b1\u07ca-\u07ea\u0800-\u0815\u0840-\u0858\u0860-\u086a\u08a0-\u08b4\u08b6-\u08bd\u0904-\u0939\u093d\u0950\u0958-\u0961\u0972-\u0980\u0985-\u098c\u098f-\u0990\u0993-\u09a8\u09aa-\u09b0\u09b2\u09b6-\u09b9\u09bd\u09ce\u09dc-\u09dd\u09df-\u09e1\u09f0-\u09f1\u09fc\u0a05-\u0a0a\u0a0f-\u0a10\u0a13-\u0a28\u0a2a-\u0a30\u0a32-\u0a33\u0a35-\u0a36\u0a38-\u0a39\u0a59-\u0a5c\u0a5e\u0a72-\u0a74\u0a85-\u0a8d\u0a8f-\u0a91\u0a93-\u0aa8\u0aaa-\u0ab0\u0ab2-\u0ab3\u0ab5-\u0ab9\u0abd\u0ad0\u0ae0-\u0ae1\u0af9\u0b05-\u0b0c\u0b0f-\u0b10\u0b13-\u0b28\u0b2a-\u0b30\u0b32-\u0b33\u0b35-\u0b39\u0b3d\u0b5c-\u0b5d\u0b5f-\u0b61\u0b71\u0b83\u0b85-\u0b8a\u0b8e-\u0b90\u0b92-\u0b95\u0b99-\u0b9a\u0b9c\u0b9e-\u0b9f\u0ba3-\u0ba4\u0ba8-\u0baa\u0bae-\u0bb9\u0bd0\u0c05-\u0c0c\u0c0e-\u0c10\u0c12-\u0c28\u0c2a-\u0c39\u0c3d\u0c58-\u0c5a\u0c60-\u0c61\u0c80\u0c85-\u0c8c\u0c8e-\u0c90\u0c92-\u0ca8\u0caa-\u0cb3\u0cb5-\u0cb9\u0cbd\u0cde\u0ce0-\u0ce1\u0cf1-\u0cf2\u0d05-\u0d0c\u0d0e-\u0d10\u0d12-\u0d3a\u0d3d\u0d4e\u0d54-\u0d56\u0d5f-\u0d61\u0d7a-\u0d7f\u0d85-\u0d96\u0d9a-\u0db1\u0db3-\u0dbb\u0dbd\u0dc0-\u0dc6\u0e01-\u0e30\u0e32-\u0e33\u0e40-\u0e45\u0e81-\u0e82\u0e84\u0e87-\u0e88\u0e8a\u0e8d\u0e94-\u0e97\u0e99-\u0e9f\u0ea1-\u0ea3\u0ea5\u0ea7\u0eaa-\u0eab\u0ead-\u0eb0\u0eb2-\u0eb3\u0ebd\u0ec0-\u0ec4\u0edc-\u0edf\u0f00\u0f40-\u0f47\u0f49-\u0f6c\u0f88-\u0f8c\u1000-\u102a\u103f\u1050-\u1055\u105a-\u105d\u1061\u1065-\u1066\u106e-\u1070\u1075-\u1081\u108e\u1100-\u1248\u124a-\u124d\u1250-\u1256\u1258\u125a-\u125d\u1260-\u1288\u128a-\u128d\u1290-\u12b0\u12b2-\u12b5\u12b8-\u12be\u12c0\u12c2-\u12c5\u12c8-\u12d6\u12d8-\u1310\u1312-\u1315\u1318-\u135a\u1380-\u138f\u1401-\u166c\u166f-\u167f\u1681-\u169a\u16a0-\u16ea\u16f1-\u16f8\u1700-\u170c\u170e-\u1711\u1720-\u1731\u1740-\u1751\u1760-\u176c\u176e-\u1770\u1780-\u17b3\u17dc\u1820-\u1842\u1844-\u1878\u1880-\u1884\u1887-\u18a8\u18aa\u18b0-\u18f5\u1900-\u191e\u1950-\u196d\u1970-\u1974\u1980-\u19ab\u19b0-\u19c9\u1a00-\u1a16\u1a20-\u1a54\u1b05-\u1b33\u1b45-\u1b4b\u1b83-\u1ba0\u1bae-\u1baf\u1bba-\u1be5\u1c00-\u1c23\u1c4d-\u1c4f\u1c5a-\u1c77\u1ce9-\u1cec\u1cee-\u1cf1\u1cf5-\u1cf6\u2135-\u2138\u2d30-\u2d67\u2d80-\u2d96\u2da0-\u2da6\u2da8-\u2dae\u2db0-\u2db6\u2db8-\u2dbe\u2dc0-\u2dc6\u2dc8-\u2dce\u2dd0-\u2dd6\u2dd8-\u2dde\u3006\u303c\u3041-\u3096\u309f\u30a1-\u30fa\u30ff\u3105-\u312f\u3131-\u318e\u31a0-\u31ba\u31f0-\u31ff\u3400-\u4db5\u4e00-\u9fef\ua000-\ua014\ua016-\ua48c\ua4d0-\ua4f7\ua500-\ua60b\ua610-\ua61f\ua62a-\ua62b\ua66e\ua6a0-\ua6e5\ua78f\ua7f7\ua7fb-\ua801\ua803-\ua805\ua807-\ua80a\ua80c-\ua822\ua840-\ua873\ua882-\ua8b3\ua8f2-\ua8f7\ua8fb\ua8fd-\ua8fe\ua90a-\ua925\ua930-\ua946\ua960-\ua97c\ua984-\ua9b2\ua9e0-\ua9e4\ua9e7-\ua9ef\ua9fa-\ua9fe\uaa00-\uaa28\uaa40-\uaa42\uaa44-\uaa4b\uaa60-\uaa6f\uaa71-\uaa76\uaa7a\uaa7e-\uaaaf\uaab1\uaab5-\uaab6\uaab9-\uaabd\uaac0\uaac2\uaadb-\uaadc\uaae0-\uaaea\uaaf2\uab01-\uab06\uab09-\uab0e\uab11-\uab16\uab20-\uab26\uab28-\uab2e\uabc0-\uabe2\uac00-\ud7a3\ud7b0-\ud7c6\ud7cb-\ud7fb\uf900-\ufa6d\ufa70-\ufad9\ufb1d\ufb1f-\ufb28\ufb2a-\ufb36\ufb38-\ufb3c\ufb3e\ufb40-\ufb41\ufb43-\ufb44\ufb46-\ufbb1\ufbd3-\ufd3d\ufd50-\ufd8f\ufd92-\ufdc7\ufdf0-\ufdfb\ufe70-\ufe74\ufe76-\ufefc\uff66-\uff6f\uff71-\uff9d\uffa0-\uffbe\uffc2-\uffc7\uffca-\uffcf\uffd2-\uffd7\uffda-\uffdc\U00010000-\U0001000b\U0001000d-\U00010026\U00010028-\U0001003a\U0001003c-\U0001003d\U0001003f-\U0001004d\U00010050-\U0001005d\U00010080-\U000100fa\U00010280-\U0001029c\U000102a0-\U000102d0\U00010300-\U0001031f\U0001032d-\U00010340\U00010342-\U00010349\U00010350-\U00010375\U00010380-\U0001039d\U000103a0-\U000103c3\U000103c8-\U000103cf\U00010450-\U0001049d\U00010500-\U00010527\U00010530-\U00010563\U00010600-\U00010736\U00010740-\U00010755\U00010760-\U00010767\U00010800-\U00010805\U00010808\U0001080a-\U00010835\U00010837-\U00010838\U0001083c\U0001083f-\U00010855\U00010860-\U00010876\U00010880-\U0001089e\U000108e0-\U000108f2\U000108f4-\U000108f5\U00010900-\U00010915\U00010920-\U00010939\U00010980-\U000109b7\U000109be-\U000109bf\U00010a00\U00010a10-\U00010a13\U00010a15-\U00010a17\U00010a19-\U00010a35\U00010a60-\U00010a7c\U00010a80-\U00010a9c\U00010ac0-\U00010ac7\U00010ac9-\U00010ae4\U00010b00-\U00010b35\U00010b40-\U00010b55\U00010b60-\U00010b72\U00010b80-\U00010b91\U00010c00-\U00010c48\U00010d00-\U00010d23\U00010f00-\U00010f1c\U00010f27\U00010f30-\U00010f45\U00011003-\U00011037\U00011083-\U000110af\U000110d0-\U000110e8\U00011103-\U00011126\U00011144\U00011150-\U00011172\U00011176\U00011183-\U000111b2\U000111c1-\U000111c4\U000111da\U000111dc\U00011200-\U00011211\U00011213-\U0001122b\U00011280-\U00011286\U00011288\U0001128a-\U0001128d\U0001128f-\U0001129d\U0001129f-\U000112a8\U000112b0-\U000112de\U00011305-\U0001130c\U0001130f-\U00011310\U00011313-\U00011328\U0001132a-\U00011330\U00011332-\U00011333\U00011335-\U00011339\U0001133d\U00011350\U0001135d-\U00011361\U00011400-\U00011434\U00011447-\U0001144a\U00011480-\U000114af\U000114c4-\U000114c5\U000114c7\U00011580-\U000115ae\U000115d8-\U000115db\U00011600-\U0001162f\U00011644\U00011680-\U000116aa\U00011700-\U0001171a\U00011800-\U0001182b\U000118ff\U00011a00\U00011a0b-\U00011a32\U00011a3a\U00011a50\U00011a5c-\U00011a83\U00011a86-\U00011a89\U00011a9d\U00011ac0-\U00011af8\U00011c00-\U00011c08\U00011c0a-\U00011c2e\U00011c40\U00011c72-\U00011c8f\U00011d00-\U00011d06\U00011d08-\U00011d09\U00011d0b-\U00011d30\U00011d46\U00011d60-\U00011d65\U00011d67-\U00011d68\U00011d6a-\U00011d89\U00011d98\U00011ee0-\U00011ef2\U00012000-\U00012399\U00012480-\U00012543\U00013000-\U0001342e\U00014400-\U00014646\U00016800-\U00016a38\U00016a40-\U00016a5e\U00016ad0-\U00016aed\U00016b00-\U00016b2f\U00016b63-\U00016b77\U00016b7d-\U00016b8f\U00016f00-\U00016f44\U00016f50\U00017000-\U000187f1\U00018800-\U00018af2\U0001b000-\U0001b11e\U0001b170-\U0001b2fb\U0001bc00-\U0001bc6a\U0001bc70-\U0001bc7c\U0001bc80-\U0001bc88\U0001bc90-\U0001bc99\U0001e800-\U0001e8c4\U0001ee00-\U0001ee03\U0001ee05-\U0001ee1f\U0001ee21-\U0001ee22\U0001ee24\U0001ee27\U0001ee29-\U0001ee32\U0001ee34-\U0001ee37\U0001ee39\U0001ee3b\U0001ee42\U0001ee47\U0001ee49\U0001ee4b\U0001ee4d-\U0001ee4f\U0001ee51-\U0001ee52\U0001ee54\U0001ee57\U0001ee59\U0001ee5b\U0001ee5d\U0001ee5f\U0001ee61-\U0001ee62\U0001ee64\U0001ee67-\U0001ee6a\U0001ee6c-\U0001ee72\U0001ee74-\U0001ee77\U0001ee79-\U0001ee7c\U0001ee7e\U0001ee80-\U0001ee89\U0001ee8b-\U0001ee9b\U0001eea1-\U0001eea3\U0001eea5-\U0001eea9\U0001eeab-\U0001eebb\U00020000-\U0002a6d6\U0002a700-\U0002b734\U0002b740-\U0002b81d\U0002b820-\U0002cea1\U0002ceb0-\U0002ebe0\U0002f800-\U0002fa1d'
-
-Lt = '\u01c5\u01c8\u01cb\u01f2\u1f88-\u1f8f\u1f98-\u1f9f\u1fa8-\u1faf\u1fbc\u1fcc\u1ffc'
-
-Lu = 'A-Z\xc0-\xd6\xd8-\xde\u0100\u0102\u0104\u0106\u0108\u010a\u010c\u010e\u0110\u0112\u0114\u0116\u0118\u011a\u011c\u011e\u0120\u0122\u0124\u0126\u0128\u012a\u012c\u012e\u0130\u0132\u0134\u0136\u0139\u013b\u013d\u013f\u0141\u0143\u0145\u0147\u014a\u014c\u014e\u0150\u0152\u0154\u0156\u0158\u015a\u015c\u015e\u0160\u0162\u0164\u0166\u0168\u016a\u016c\u016e\u0170\u0172\u0174\u0176\u0178-\u0179\u017b\u017d\u0181-\u0182\u0184\u0186-\u0187\u0189-\u018b\u018e-\u0191\u0193-\u0194\u0196-\u0198\u019c-\u019d\u019f-\u01a0\u01a2\u01a4\u01a6-\u01a7\u01a9\u01ac\u01ae-\u01af\u01b1-\u01b3\u01b5\u01b7-\u01b8\u01bc\u01c4\u01c7\u01ca\u01cd\u01cf\u01d1\u01d3\u01d5\u01d7\u01d9\u01db\u01de\u01e0\u01e2\u01e4\u01e6\u01e8\u01ea\u01ec\u01ee\u01f1\u01f4\u01f6-\u01f8\u01fa\u01fc\u01fe\u0200\u0202\u0204\u0206\u0208\u020a\u020c\u020e\u0210\u0212\u0214\u0216\u0218\u021a\u021c\u021e\u0220\u0222\u0224\u0226\u0228\u022a\u022c\u022e\u0230\u0232\u023a-\u023b\u023d-\u023e\u0241\u0243-\u0246\u0248\u024a\u024c\u024e\u0370\u0372\u0376\u037f\u0386\u0388-\u038a\u038c\u038e-\u038f\u0391-\u03a1\u03a3-\u03ab\u03cf\u03d2-\u03d4\u03d8\u03da\u03dc\u03de\u03e0\u03e2\u03e4\u03e6\u03e8\u03ea\u03ec\u03ee\u03f4\u03f7\u03f9-\u03fa\u03fd-\u042f\u0460\u0462\u0464\u0466\u0468\u046a\u046c\u046e\u0470\u0472\u0474\u0476\u0478\u047a\u047c\u047e\u0480\u048a\u048c\u048e\u0490\u0492\u0494\u0496\u0498\u049a\u049c\u049e\u04a0\u04a2\u04a4\u04a6\u04a8\u04aa\u04ac\u04ae\u04b0\u04b2\u04b4\u04b6\u04b8\u04ba\u04bc\u04be\u04c0-\u04c1\u04c3\u04c5\u04c7\u04c9\u04cb\u04cd\u04d0\u04d2\u04d4\u04d6\u04d8\u04da\u04dc\u04de\u04e0\u04e2\u04e4\u04e6\u04e8\u04ea\u04ec\u04ee\u04f0\u04f2\u04f4\u04f6\u04f8\u04fa\u04fc\u04fe\u0500\u0502\u0504\u0506\u0508\u050a\u050c\u050e\u0510\u0512\u0514\u0516\u0518\u051a\u051c\u051e\u0520\u0522\u0524\u0526\u0528\u052a\u052c\u052e\u0531-\u0556\u10a0-\u10c5\u10c7\u10cd\u13a0-\u13f5\u1c90-\u1cba\u1cbd-\u1cbf\u1e00\u1e02\u1e04\u1e06\u1e08\u1e0a\u1e0c\u1e0e\u1e10\u1e12\u1e14\u1e16\u1e18\u1e1a\u1e1c\u1e1e\u1e20\u1e22\u1e24\u1e26\u1e28\u1e2a\u1e2c\u1e2e\u1e30\u1e32\u1e34\u1e36\u1e38\u1e3a\u1e3c\u1e3e\u1e40\u1e42\u1e44\u1e46\u1e48\u1e4a\u1e4c\u1e4e\u1e50\u1e52\u1e54\u1e56\u1e58\u1e5a\u1e5c\u1e5e\u1e60\u1e62\u1e64\u1e66\u1e68\u1e6a\u1e6c\u1e6e\u1e70\u1e72\u1e74\u1e76\u1e78\u1e7a\u1e7c\u1e7e\u1e80\u1e82\u1e84\u1e86\u1e88\u1e8a\u1e8c\u1e8e\u1e90\u1e92\u1e94\u1e9e\u1ea0\u1ea2\u1ea4\u1ea6\u1ea8\u1eaa\u1eac\u1eae\u1eb0\u1eb2\u1eb4\u1eb6\u1eb8\u1eba\u1ebc\u1ebe\u1ec0\u1ec2\u1ec4\u1ec6\u1ec8\u1eca\u1ecc\u1ece\u1ed0\u1ed2\u1ed4\u1ed6\u1ed8\u1eda\u1edc\u1ede\u1ee0\u1ee2\u1ee4\u1ee6\u1ee8\u1eea\u1eec\u1eee\u1ef0\u1ef2\u1ef4\u1ef6\u1ef8\u1efa\u1efc\u1efe\u1f08-\u1f0f\u1f18-\u1f1d\u1f28-\u1f2f\u1f38-\u1f3f\u1f48-\u1f4d\u1f59\u1f5b\u1f5d\u1f5f\u1f68-\u1f6f\u1fb8-\u1fbb\u1fc8-\u1fcb\u1fd8-\u1fdb\u1fe8-\u1fec\u1ff8-\u1ffb\u2102\u2107\u210b-\u210d\u2110-\u2112\u2115\u2119-\u211d\u2124\u2126\u2128\u212a-\u212d\u2130-\u2133\u213e-\u213f\u2145\u2183\u2c00-\u2c2e\u2c60\u2c62-\u2c64\u2c67\u2c69\u2c6b\u2c6d-\u2c70\u2c72\u2c75\u2c7e-\u2c80\u2c82\u2c84\u2c86\u2c88\u2c8a\u2c8c\u2c8e\u2c90\u2c92\u2c94\u2c96\u2c98\u2c9a\u2c9c\u2c9e\u2ca0\u2ca2\u2ca4\u2ca6\u2ca8\u2caa\u2cac\u2cae\u2cb0\u2cb2\u2cb4\u2cb6\u2cb8\u2cba\u2cbc\u2cbe\u2cc0\u2cc2\u2cc4\u2cc6\u2cc8\u2cca\u2ccc\u2cce\u2cd0\u2cd2\u2cd4\u2cd6\u2cd8\u2cda\u2cdc\u2cde\u2ce0\u2ce2\u2ceb\u2ced\u2cf2\ua640\ua642\ua644\ua646\ua648\ua64a\ua64c\ua64e\ua650\ua652\ua654\ua656\ua658\ua65a\ua65c\ua65e\ua660\ua662\ua664\ua666\ua668\ua66a\ua66c\ua680\ua682\ua684\ua686\ua688\ua68a\ua68c\ua68e\ua690\ua692\ua694\ua696\ua698\ua69a\ua722\ua724\ua726\ua728\ua72a\ua72c\ua72e\ua732\ua734\ua736\ua738\ua73a\ua73c\ua73e\ua740\ua742\ua744\ua746\ua748\ua74a\ua74c\ua74e\ua750\ua752\ua754\ua756\ua758\ua75a\ua75c\ua75e\ua760\ua762\ua764\ua766\ua768\ua76a\ua76c\ua76e\ua779\ua77b\ua77d-\ua77e\ua780\ua782\ua784\ua786\ua78b\ua78d\ua790\ua792\ua796\ua798\ua79a\ua79c\ua79e\ua7a0\ua7a2\ua7a4\ua7a6\ua7a8\ua7aa-\ua7ae\ua7b0-\ua7b4\ua7b6\ua7b8\uff21-\uff3a\U00010400-\U00010427\U000104b0-\U000104d3\U00010c80-\U00010cb2\U000118a0-\U000118bf\U00016e40-\U00016e5f\U0001d400-\U0001d419\U0001d434-\U0001d44d\U0001d468-\U0001d481\U0001d49c\U0001d49e-\U0001d49f\U0001d4a2\U0001d4a5-\U0001d4a6\U0001d4a9-\U0001d4ac\U0001d4ae-\U0001d4b5\U0001d4d0-\U0001d4e9\U0001d504-\U0001d505\U0001d507-\U0001d50a\U0001d50d-\U0001d514\U0001d516-\U0001d51c\U0001d538-\U0001d539\U0001d53b-\U0001d53e\U0001d540-\U0001d544\U0001d546\U0001d54a-\U0001d550\U0001d56c-\U0001d585\U0001d5a0-\U0001d5b9\U0001d5d4-\U0001d5ed\U0001d608-\U0001d621\U0001d63c-\U0001d655\U0001d670-\U0001d689\U0001d6a8-\U0001d6c0\U0001d6e2-\U0001d6fa\U0001d71c-\U0001d734\U0001d756-\U0001d76e\U0001d790-\U0001d7a8\U0001d7ca\U0001e900-\U0001e921'
-
-Mc = '\u0903\u093b\u093e-\u0940\u0949-\u094c\u094e-\u094f\u0982-\u0983\u09be-\u09c0\u09c7-\u09c8\u09cb-\u09cc\u09d7\u0a03\u0a3e-\u0a40\u0a83\u0abe-\u0ac0\u0ac9\u0acb-\u0acc\u0b02-\u0b03\u0b3e\u0b40\u0b47-\u0b48\u0b4b-\u0b4c\u0b57\u0bbe-\u0bbf\u0bc1-\u0bc2\u0bc6-\u0bc8\u0bca-\u0bcc\u0bd7\u0c01-\u0c03\u0c41-\u0c44\u0c82-\u0c83\u0cbe\u0cc0-\u0cc4\u0cc7-\u0cc8\u0cca-\u0ccb\u0cd5-\u0cd6\u0d02-\u0d03\u0d3e-\u0d40\u0d46-\u0d48\u0d4a-\u0d4c\u0d57\u0d82-\u0d83\u0dcf-\u0dd1\u0dd8-\u0ddf\u0df2-\u0df3\u0f3e-\u0f3f\u0f7f\u102b-\u102c\u1031\u1038\u103b-\u103c\u1056-\u1057\u1062-\u1064\u1067-\u106d\u1083-\u1084\u1087-\u108c\u108f\u109a-\u109c\u17b6\u17be-\u17c5\u17c7-\u17c8\u1923-\u1926\u1929-\u192b\u1930-\u1931\u1933-\u1938\u1a19-\u1a1a\u1a55\u1a57\u1a61\u1a63-\u1a64\u1a6d-\u1a72\u1b04\u1b35\u1b3b\u1b3d-\u1b41\u1b43-\u1b44\u1b82\u1ba1\u1ba6-\u1ba7\u1baa\u1be7\u1bea-\u1bec\u1bee\u1bf2-\u1bf3\u1c24-\u1c2b\u1c34-\u1c35\u1ce1\u1cf2-\u1cf3\u1cf7\u302e-\u302f\ua823-\ua824\ua827\ua880-\ua881\ua8b4-\ua8c3\ua952-\ua953\ua983\ua9b4-\ua9b5\ua9ba-\ua9bb\ua9bd-\ua9c0\uaa2f-\uaa30\uaa33-\uaa34\uaa4d\uaa7b\uaa7d\uaaeb\uaaee-\uaaef\uaaf5\uabe3-\uabe4\uabe6-\uabe7\uabe9-\uabea\uabec\U00011000\U00011002\U00011082\U000110b0-\U000110b2\U000110b7-\U000110b8\U0001112c\U00011145-\U00011146\U00011182\U000111b3-\U000111b5\U000111bf-\U000111c0\U0001122c-\U0001122e\U00011232-\U00011233\U00011235\U000112e0-\U000112e2\U00011302-\U00011303\U0001133e-\U0001133f\U00011341-\U00011344\U00011347-\U00011348\U0001134b-\U0001134d\U00011357\U00011362-\U00011363\U00011435-\U00011437\U00011440-\U00011441\U00011445\U000114b0-\U000114b2\U000114b9\U000114bb-\U000114be\U000114c1\U000115af-\U000115b1\U000115b8-\U000115bb\U000115be\U00011630-\U00011632\U0001163b-\U0001163c\U0001163e\U000116ac\U000116ae-\U000116af\U000116b6\U00011720-\U00011721\U00011726\U0001182c-\U0001182e\U00011838\U00011a39\U00011a57-\U00011a58\U00011a97\U00011c2f\U00011c3e\U00011ca9\U00011cb1\U00011cb4\U00011d8a-\U00011d8e\U00011d93-\U00011d94\U00011d96\U00011ef5-\U00011ef6\U00016f51-\U00016f7e\U0001d165-\U0001d166\U0001d16d-\U0001d172'
-
-Me = '\u0488-\u0489\u1abe\u20dd-\u20e0\u20e2-\u20e4\ua670-\ua672'
-
-Mn = '\u0300-\u036f\u0483-\u0487\u0591-\u05bd\u05bf\u05c1-\u05c2\u05c4-\u05c5\u05c7\u0610-\u061a\u064b-\u065f\u0670\u06d6-\u06dc\u06df-\u06e4\u06e7-\u06e8\u06ea-\u06ed\u0711\u0730-\u074a\u07a6-\u07b0\u07eb-\u07f3\u07fd\u0816-\u0819\u081b-\u0823\u0825-\u0827\u0829-\u082d\u0859-\u085b\u08d3-\u08e1\u08e3-\u0902\u093a\u093c\u0941-\u0948\u094d\u0951-\u0957\u0962-\u0963\u0981\u09bc\u09c1-\u09c4\u09cd\u09e2-\u09e3\u09fe\u0a01-\u0a02\u0a3c\u0a41-\u0a42\u0a47-\u0a48\u0a4b-\u0a4d\u0a51\u0a70-\u0a71\u0a75\u0a81-\u0a82\u0abc\u0ac1-\u0ac5\u0ac7-\u0ac8\u0acd\u0ae2-\u0ae3\u0afa-\u0aff\u0b01\u0b3c\u0b3f\u0b41-\u0b44\u0b4d\u0b56\u0b62-\u0b63\u0b82\u0bc0\u0bcd\u0c00\u0c04\u0c3e-\u0c40\u0c46-\u0c48\u0c4a-\u0c4d\u0c55-\u0c56\u0c62-\u0c63\u0c81\u0cbc\u0cbf\u0cc6\u0ccc-\u0ccd\u0ce2-\u0ce3\u0d00-\u0d01\u0d3b-\u0d3c\u0d41-\u0d44\u0d4d\u0d62-\u0d63\u0dca\u0dd2-\u0dd4\u0dd6\u0e31\u0e34-\u0e3a\u0e47-\u0e4e\u0eb1\u0eb4-\u0eb9\u0ebb-\u0ebc\u0ec8-\u0ecd\u0f18-\u0f19\u0f35\u0f37\u0f39\u0f71-\u0f7e\u0f80-\u0f84\u0f86-\u0f87\u0f8d-\u0f97\u0f99-\u0fbc\u0fc6\u102d-\u1030\u1032-\u1037\u1039-\u103a\u103d-\u103e\u1058-\u1059\u105e-\u1060\u1071-\u1074\u1082\u1085-\u1086\u108d\u109d\u135d-\u135f\u1712-\u1714\u1732-\u1734\u1752-\u1753\u1772-\u1773\u17b4-\u17b5\u17b7-\u17bd\u17c6\u17c9-\u17d3\u17dd\u180b-\u180d\u1885-\u1886\u18a9\u1920-\u1922\u1927-\u1928\u1932\u1939-\u193b\u1a17-\u1a18\u1a1b\u1a56\u1a58-\u1a5e\u1a60\u1a62\u1a65-\u1a6c\u1a73-\u1a7c\u1a7f\u1ab0-\u1abd\u1b00-\u1b03\u1b34\u1b36-\u1b3a\u1b3c\u1b42\u1b6b-\u1b73\u1b80-\u1b81\u1ba2-\u1ba5\u1ba8-\u1ba9\u1bab-\u1bad\u1be6\u1be8-\u1be9\u1bed\u1bef-\u1bf1\u1c2c-\u1c33\u1c36-\u1c37\u1cd0-\u1cd2\u1cd4-\u1ce0\u1ce2-\u1ce8\u1ced\u1cf4\u1cf8-\u1cf9\u1dc0-\u1df9\u1dfb-\u1dff\u20d0-\u20dc\u20e1\u20e5-\u20f0\u2cef-\u2cf1\u2d7f\u2de0-\u2dff\u302a-\u302d\u3099-\u309a\ua66f\ua674-\ua67d\ua69e-\ua69f\ua6f0-\ua6f1\ua802\ua806\ua80b\ua825-\ua826\ua8c4-\ua8c5\ua8e0-\ua8f1\ua8ff\ua926-\ua92d\ua947-\ua951\ua980-\ua982\ua9b3\ua9b6-\ua9b9\ua9bc\ua9e5\uaa29-\uaa2e\uaa31-\uaa32\uaa35-\uaa36\uaa43\uaa4c\uaa7c\uaab0\uaab2-\uaab4\uaab7-\uaab8\uaabe-\uaabf\uaac1\uaaec-\uaaed\uaaf6\uabe5\uabe8\uabed\ufb1e\ufe00-\ufe0f\ufe20-\ufe2f\U000101fd\U000102e0\U00010376-\U0001037a\U00010a01-\U00010a03\U00010a05-\U00010a06\U00010a0c-\U00010a0f\U00010a38-\U00010a3a\U00010a3f\U00010ae5-\U00010ae6\U00010d24-\U00010d27\U00010f46-\U00010f50\U00011001\U00011038-\U00011046\U0001107f-\U00011081\U000110b3-\U000110b6\U000110b9-\U000110ba\U00011100-\U00011102\U00011127-\U0001112b\U0001112d-\U00011134\U00011173\U00011180-\U00011181\U000111b6-\U000111be\U000111c9-\U000111cc\U0001122f-\U00011231\U00011234\U00011236-\U00011237\U0001123e\U000112df\U000112e3-\U000112ea\U00011300-\U00011301\U0001133b-\U0001133c\U00011340\U00011366-\U0001136c\U00011370-\U00011374\U00011438-\U0001143f\U00011442-\U00011444\U00011446\U0001145e\U000114b3-\U000114b8\U000114ba\U000114bf-\U000114c0\U000114c2-\U000114c3\U000115b2-\U000115b5\U000115bc-\U000115bd\U000115bf-\U000115c0\U000115dc-\U000115dd\U00011633-\U0001163a\U0001163d\U0001163f-\U00011640\U000116ab\U000116ad\U000116b0-\U000116b5\U000116b7\U0001171d-\U0001171f\U00011722-\U00011725\U00011727-\U0001172b\U0001182f-\U00011837\U00011839-\U0001183a\U00011a01-\U00011a0a\U00011a33-\U00011a38\U00011a3b-\U00011a3e\U00011a47\U00011a51-\U00011a56\U00011a59-\U00011a5b\U00011a8a-\U00011a96\U00011a98-\U00011a99\U00011c30-\U00011c36\U00011c38-\U00011c3d\U00011c3f\U00011c92-\U00011ca7\U00011caa-\U00011cb0\U00011cb2-\U00011cb3\U00011cb5-\U00011cb6\U00011d31-\U00011d36\U00011d3a\U00011d3c-\U00011d3d\U00011d3f-\U00011d45\U00011d47\U00011d90-\U00011d91\U00011d95\U00011d97\U00011ef3-\U00011ef4\U00016af0-\U00016af4\U00016b30-\U00016b36\U00016f8f-\U00016f92\U0001bc9d-\U0001bc9e\U0001d167-\U0001d169\U0001d17b-\U0001d182\U0001d185-\U0001d18b\U0001d1aa-\U0001d1ad\U0001d242-\U0001d244\U0001da00-\U0001da36\U0001da3b-\U0001da6c\U0001da75\U0001da84\U0001da9b-\U0001da9f\U0001daa1-\U0001daaf\U0001e000-\U0001e006\U0001e008-\U0001e018\U0001e01b-\U0001e021\U0001e023-\U0001e024\U0001e026-\U0001e02a\U0001e8d0-\U0001e8d6\U0001e944-\U0001e94a\U000e0100-\U000e01ef'
-
-Nd = '0-9\u0660-\u0669\u06f0-\u06f9\u07c0-\u07c9\u0966-\u096f\u09e6-\u09ef\u0a66-\u0a6f\u0ae6-\u0aef\u0b66-\u0b6f\u0be6-\u0bef\u0c66-\u0c6f\u0ce6-\u0cef\u0d66-\u0d6f\u0de6-\u0def\u0e50-\u0e59\u0ed0-\u0ed9\u0f20-\u0f29\u1040-\u1049\u1090-\u1099\u17e0-\u17e9\u1810-\u1819\u1946-\u194f\u19d0-\u19d9\u1a80-\u1a89\u1a90-\u1a99\u1b50-\u1b59\u1bb0-\u1bb9\u1c40-\u1c49\u1c50-\u1c59\ua620-\ua629\ua8d0-\ua8d9\ua900-\ua909\ua9d0-\ua9d9\ua9f0-\ua9f9\uaa50-\uaa59\uabf0-\uabf9\uff10-\uff19\U000104a0-\U000104a9\U00010d30-\U00010d39\U00011066-\U0001106f\U000110f0-\U000110f9\U00011136-\U0001113f\U000111d0-\U000111d9\U000112f0-\U000112f9\U00011450-\U00011459\U000114d0-\U000114d9\U00011650-\U00011659\U000116c0-\U000116c9\U00011730-\U00011739\U000118e0-\U000118e9\U00011c50-\U00011c59\U00011d50-\U00011d59\U00011da0-\U00011da9\U00016a60-\U00016a69\U00016b50-\U00016b59\U0001d7ce-\U0001d7ff\U0001e950-\U0001e959'
-
-Nl = '\u16ee-\u16f0\u2160-\u2182\u2185-\u2188\u3007\u3021-\u3029\u3038-\u303a\ua6e6-\ua6ef\U00010140-\U00010174\U00010341\U0001034a\U000103d1-\U000103d5\U00012400-\U0001246e'
-
-No = '\xb2-\xb3\xb9\xbc-\xbe\u09f4-\u09f9\u0b72-\u0b77\u0bf0-\u0bf2\u0c78-\u0c7e\u0d58-\u0d5e\u0d70-\u0d78\u0f2a-\u0f33\u1369-\u137c\u17f0-\u17f9\u19da\u2070\u2074-\u2079\u2080-\u2089\u2150-\u215f\u2189\u2460-\u249b\u24ea-\u24ff\u2776-\u2793\u2cfd\u3192-\u3195\u3220-\u3229\u3248-\u324f\u3251-\u325f\u3280-\u3289\u32b1-\u32bf\ua830-\ua835\U00010107-\U00010133\U00010175-\U00010178\U0001018a-\U0001018b\U000102e1-\U000102fb\U00010320-\U00010323\U00010858-\U0001085f\U00010879-\U0001087f\U000108a7-\U000108af\U000108fb-\U000108ff\U00010916-\U0001091b\U000109bc-\U000109bd\U000109c0-\U000109cf\U000109d2-\U000109ff\U00010a40-\U00010a48\U00010a7d-\U00010a7e\U00010a9d-\U00010a9f\U00010aeb-\U00010aef\U00010b58-\U00010b5f\U00010b78-\U00010b7f\U00010ba9-\U00010baf\U00010cfa-\U00010cff\U00010e60-\U00010e7e\U00010f1d-\U00010f26\U00010f51-\U00010f54\U00011052-\U00011065\U000111e1-\U000111f4\U0001173a-\U0001173b\U000118ea-\U000118f2\U00011c5a-\U00011c6c\U00016b5b-\U00016b61\U00016e80-\U00016e96\U0001d2e0-\U0001d2f3\U0001d360-\U0001d378\U0001e8c7-\U0001e8cf\U0001ec71-\U0001ecab\U0001ecad-\U0001ecaf\U0001ecb1-\U0001ecb4\U0001f100-\U0001f10c'
-
-Pc = '_\u203f-\u2040\u2054\ufe33-\ufe34\ufe4d-\ufe4f\uff3f'
-
-Pd = '\\-\u058a\u05be\u1400\u1806\u2010-\u2015\u2e17\u2e1a\u2e3a-\u2e3b\u2e40\u301c\u3030\u30a0\ufe31-\ufe32\ufe58\ufe63\uff0d'
-
-Pe = ')\\]}\u0f3b\u0f3d\u169c\u2046\u207e\u208e\u2309\u230b\u232a\u2769\u276b\u276d\u276f\u2771\u2773\u2775\u27c6\u27e7\u27e9\u27eb\u27ed\u27ef\u2984\u2986\u2988\u298a\u298c\u298e\u2990\u2992\u2994\u2996\u2998\u29d9\u29db\u29fd\u2e23\u2e25\u2e27\u2e29\u3009\u300b\u300d\u300f\u3011\u3015\u3017\u3019\u301b\u301e-\u301f\ufd3e\ufe18\ufe36\ufe38\ufe3a\ufe3c\ufe3e\ufe40\ufe42\ufe44\ufe48\ufe5a\ufe5c\ufe5e\uff09\uff3d\uff5d\uff60\uff63'
-
-Pf = '\xbb\u2019\u201d\u203a\u2e03\u2e05\u2e0a\u2e0d\u2e1d\u2e21'
-
-Pi = '\xab\u2018\u201b-\u201c\u201f\u2039\u2e02\u2e04\u2e09\u2e0c\u2e1c\u2e20'
-
-Po = "!-#%-'*,.-/:-;?-@\\\\\xa1\xa7\xb6-\xb7\xbf\u037e\u0387\u055a-\u055f\u0589\u05c0\u05c3\u05c6\u05f3-\u05f4\u0609-\u060a\u060c-\u060d\u061b\u061e-\u061f\u066a-\u066d\u06d4\u0700-\u070d\u07f7-\u07f9\u0830-\u083e\u085e\u0964-\u0965\u0970\u09fd\u0a76\u0af0\u0c84\u0df4\u0e4f\u0e5a-\u0e5b\u0f04-\u0f12\u0f14\u0f85\u0fd0-\u0fd4\u0fd9-\u0fda\u104a-\u104f\u10fb\u1360-\u1368\u166d-\u166e\u16eb-\u16ed\u1735-\u1736\u17d4-\u17d6\u17d8-\u17da\u1800-\u1805\u1807-\u180a\u1944-\u1945\u1a1e-\u1a1f\u1aa0-\u1aa6\u1aa8-\u1aad\u1b5a-\u1b60\u1bfc-\u1bff\u1c3b-\u1c3f\u1c7e-\u1c7f\u1cc0-\u1cc7\u1cd3\u2016-\u2017\u2020-\u2027\u2030-\u2038\u203b-\u203e\u2041-\u2043\u2047-\u2051\u2053\u2055-\u205e\u2cf9-\u2cfc\u2cfe-\u2cff\u2d70\u2e00-\u2e01\u2e06-\u2e08\u2e0b\u2e0e-\u2e16\u2e18-\u2e19\u2e1b\u2e1e-\u2e1f\u2e2a-\u2e2e\u2e30-\u2e39\u2e3c-\u2e3f\u2e41\u2e43-\u2e4e\u3001-\u3003\u303d\u30fb\ua4fe-\ua4ff\ua60d-\ua60f\ua673\ua67e\ua6f2-\ua6f7\ua874-\ua877\ua8ce-\ua8cf\ua8f8-\ua8fa\ua8fc\ua92e-\ua92f\ua95f\ua9c1-\ua9cd\ua9de-\ua9df\uaa5c-\uaa5f\uaade-\uaadf\uaaf0-\uaaf1\uabeb\ufe10-\ufe16\ufe19\ufe30\ufe45-\ufe46\ufe49-\ufe4c\ufe50-\ufe52\ufe54-\ufe57\ufe5f-\ufe61\ufe68\ufe6a-\ufe6b\uff01-\uff03\uff05-\uff07\uff0a\uff0c\uff0e-\uff0f\uff1a-\uff1b\uff1f-\uff20\uff3c\uff61\uff64-\uff65\U00010100-\U00010102\U0001039f\U000103d0\U0001056f\U00010857\U0001091f\U0001093f\U00010a50-\U00010a58\U00010a7f\U00010af0-\U00010af6\U00010b39-\U00010b3f\U00010b99-\U00010b9c\U00010f55-\U00010f59\U00011047-\U0001104d\U000110bb-\U000110bc\U000110be-\U000110c1\U00011140-\U00011143\U00011174-\U00011175\U000111c5-\U000111c8\U000111cd\U000111db\U000111dd-\U000111df\U00011238-\U0001123d\U000112a9\U0001144b-\U0001144f\U0001145b\U0001145d\U000114c6\U000115c1-\U000115d7\U00011641-\U00011643\U00011660-\U0001166c\U0001173c-\U0001173e\U0001183b\U00011a3f-\U00011a46\U00011a9a-\U00011a9c\U00011a9e-\U00011aa2\U00011c41-\U00011c45\U00011c70-\U00011c71\U00011ef7-\U00011ef8\U00012470-\U00012474\U00016a6e-\U00016a6f\U00016af5\U00016b37-\U00016b3b\U00016b44\U00016e97-\U00016e9a\U0001bc9f\U0001da87-\U0001da8b\U0001e95e-\U0001e95f"
-
-Ps = '(\\[{\u0f3a\u0f3c\u169b\u201a\u201e\u2045\u207d\u208d\u2308\u230a\u2329\u2768\u276a\u276c\u276e\u2770\u2772\u2774\u27c5\u27e6\u27e8\u27ea\u27ec\u27ee\u2983\u2985\u2987\u2989\u298b\u298d\u298f\u2991\u2993\u2995\u2997\u29d8\u29da\u29fc\u2e22\u2e24\u2e26\u2e28\u2e42\u3008\u300a\u300c\u300e\u3010\u3014\u3016\u3018\u301a\u301d\ufd3f\ufe17\ufe35\ufe37\ufe39\ufe3b\ufe3d\ufe3f\ufe41\ufe43\ufe47\ufe59\ufe5b\ufe5d\uff08\uff3b\uff5b\uff5f\uff62'
-
-Sc = '$\xa2-\xa5\u058f\u060b\u07fe-\u07ff\u09f2-\u09f3\u09fb\u0af1\u0bf9\u0e3f\u17db\u20a0-\u20bf\ua838\ufdfc\ufe69\uff04\uffe0-\uffe1\uffe5-\uffe6\U0001ecb0'
-
-Sk = '\\^`\xa8\xaf\xb4\xb8\u02c2-\u02c5\u02d2-\u02df\u02e5-\u02eb\u02ed\u02ef-\u02ff\u0375\u0384-\u0385\u1fbd\u1fbf-\u1fc1\u1fcd-\u1fcf\u1fdd-\u1fdf\u1fed-\u1fef\u1ffd-\u1ffe\u309b-\u309c\ua700-\ua716\ua720-\ua721\ua789-\ua78a\uab5b\ufbb2-\ufbc1\uff3e\uff40\uffe3\U0001f3fb-\U0001f3ff'
-
-Sm = '+<->|~\xac\xb1\xd7\xf7\u03f6\u0606-\u0608\u2044\u2052\u207a-\u207c\u208a-\u208c\u2118\u2140-\u2144\u214b\u2190-\u2194\u219a-\u219b\u21a0\u21a3\u21a6\u21ae\u21ce-\u21cf\u21d2\u21d4\u21f4-\u22ff\u2320-\u2321\u237c\u239b-\u23b3\u23dc-\u23e1\u25b7\u25c1\u25f8-\u25ff\u266f\u27c0-\u27c4\u27c7-\u27e5\u27f0-\u27ff\u2900-\u2982\u2999-\u29d7\u29dc-\u29fb\u29fe-\u2aff\u2b30-\u2b44\u2b47-\u2b4c\ufb29\ufe62\ufe64-\ufe66\uff0b\uff1c-\uff1e\uff5c\uff5e\uffe2\uffe9-\uffec\U0001d6c1\U0001d6db\U0001d6fb\U0001d715\U0001d735\U0001d74f\U0001d76f\U0001d789\U0001d7a9\U0001d7c3\U0001eef0-\U0001eef1'
-
-So = '\xa6\xa9\xae\xb0\u0482\u058d-\u058e\u060e-\u060f\u06de\u06e9\u06fd-\u06fe\u07f6\u09fa\u0b70\u0bf3-\u0bf8\u0bfa\u0c7f\u0d4f\u0d79\u0f01-\u0f03\u0f13\u0f15-\u0f17\u0f1a-\u0f1f\u0f34\u0f36\u0f38\u0fbe-\u0fc5\u0fc7-\u0fcc\u0fce-\u0fcf\u0fd5-\u0fd8\u109e-\u109f\u1390-\u1399\u1940\u19de-\u19ff\u1b61-\u1b6a\u1b74-\u1b7c\u2100-\u2101\u2103-\u2106\u2108-\u2109\u2114\u2116-\u2117\u211e-\u2123\u2125\u2127\u2129\u212e\u213a-\u213b\u214a\u214c-\u214d\u214f\u218a-\u218b\u2195-\u2199\u219c-\u219f\u21a1-\u21a2\u21a4-\u21a5\u21a7-\u21ad\u21af-\u21cd\u21d0-\u21d1\u21d3\u21d5-\u21f3\u2300-\u2307\u230c-\u231f\u2322-\u2328\u232b-\u237b\u237d-\u239a\u23b4-\u23db\u23e2-\u2426\u2440-\u244a\u249c-\u24e9\u2500-\u25b6\u25b8-\u25c0\u25c2-\u25f7\u2600-\u266e\u2670-\u2767\u2794-\u27bf\u2800-\u28ff\u2b00-\u2b2f\u2b45-\u2b46\u2b4d-\u2b73\u2b76-\u2b95\u2b98-\u2bc8\u2bca-\u2bfe\u2ce5-\u2cea\u2e80-\u2e99\u2e9b-\u2ef3\u2f00-\u2fd5\u2ff0-\u2ffb\u3004\u3012-\u3013\u3020\u3036-\u3037\u303e-\u303f\u3190-\u3191\u3196-\u319f\u31c0-\u31e3\u3200-\u321e\u322a-\u3247\u3250\u3260-\u327f\u328a-\u32b0\u32c0-\u32fe\u3300-\u33ff\u4dc0-\u4dff\ua490-\ua4c6\ua828-\ua82b\ua836-\ua837\ua839\uaa77-\uaa79\ufdfd\uffe4\uffe8\uffed-\uffee\ufffc-\ufffd\U00010137-\U0001013f\U00010179-\U00010189\U0001018c-\U0001018e\U00010190-\U0001019b\U000101a0\U000101d0-\U000101fc\U00010877-\U00010878\U00010ac8\U0001173f\U00016b3c-\U00016b3f\U00016b45\U0001bc9c\U0001d000-\U0001d0f5\U0001d100-\U0001d126\U0001d129-\U0001d164\U0001d16a-\U0001d16c\U0001d183-\U0001d184\U0001d18c-\U0001d1a9\U0001d1ae-\U0001d1e8\U0001d200-\U0001d241\U0001d245\U0001d300-\U0001d356\U0001d800-\U0001d9ff\U0001da37-\U0001da3a\U0001da6d-\U0001da74\U0001da76-\U0001da83\U0001da85-\U0001da86\U0001ecac\U0001f000-\U0001f02b\U0001f030-\U0001f093\U0001f0a0-\U0001f0ae\U0001f0b1-\U0001f0bf\U0001f0c1-\U0001f0cf\U0001f0d1-\U0001f0f5\U0001f110-\U0001f16b\U0001f170-\U0001f1ac\U0001f1e6-\U0001f202\U0001f210-\U0001f23b\U0001f240-\U0001f248\U0001f250-\U0001f251\U0001f260-\U0001f265\U0001f300-\U0001f3fa\U0001f400-\U0001f6d4\U0001f6e0-\U0001f6ec\U0001f6f0-\U0001f6f9\U0001f700-\U0001f773\U0001f780-\U0001f7d8\U0001f800-\U0001f80b\U0001f810-\U0001f847\U0001f850-\U0001f859\U0001f860-\U0001f887\U0001f890-\U0001f8ad\U0001f900-\U0001f90b\U0001f910-\U0001f93e\U0001f940-\U0001f970\U0001f973-\U0001f976\U0001f97a\U0001f97c-\U0001f9a2\U0001f9b0-\U0001f9b9\U0001f9c0-\U0001f9c2\U0001f9d0-\U0001f9ff\U0001fa60-\U0001fa6d'
-
-Zl = '\u2028'
-
-Zp = '\u2029'
-
-Zs = ' \xa0\u1680\u2000-\u200a\u202f\u205f\u3000'
-
-xid_continue = '0-9A-Z_a-z\xaa\xb5\xb7\xba\xc0-\xd6\xd8-\xf6\xf8-\u02c1\u02c6-\u02d1\u02e0-\u02e4\u02ec\u02ee\u0300-\u0374\u0376-\u0377\u037b-\u037d\u037f\u0386-\u038a\u038c\u038e-\u03a1\u03a3-\u03f5\u03f7-\u0481\u0483-\u0487\u048a-\u052f\u0531-\u0556\u0559\u0560-\u0588\u0591-\u05bd\u05bf\u05c1-\u05c2\u05c4-\u05c5\u05c7\u05d0-\u05ea\u05ef-\u05f2\u0610-\u061a\u0620-\u0669\u066e-\u06d3\u06d5-\u06dc\u06df-\u06e8\u06ea-\u06fc\u06ff\u0710-\u074a\u074d-\u07b1\u07c0-\u07f5\u07fa\u07fd\u0800-\u082d\u0840-\u085b\u0860-\u086a\u08a0-\u08b4\u08b6-\u08bd\u08d3-\u08e1\u08e3-\u0963\u0966-\u096f\u0971-\u0983\u0985-\u098c\u098f-\u0990\u0993-\u09a8\u09aa-\u09b0\u09b2\u09b6-\u09b9\u09bc-\u09c4\u09c7-\u09c8\u09cb-\u09ce\u09d7\u09dc-\u09dd\u09df-\u09e3\u09e6-\u09f1\u09fc\u09fe\u0a01-\u0a03\u0a05-\u0a0a\u0a0f-\u0a10\u0a13-\u0a28\u0a2a-\u0a30\u0a32-\u0a33\u0a35-\u0a36\u0a38-\u0a39\u0a3c\u0a3e-\u0a42\u0a47-\u0a48\u0a4b-\u0a4d\u0a51\u0a59-\u0a5c\u0a5e\u0a66-\u0a75\u0a81-\u0a83\u0a85-\u0a8d\u0a8f-\u0a91\u0a93-\u0aa8\u0aaa-\u0ab0\u0ab2-\u0ab3\u0ab5-\u0ab9\u0abc-\u0ac5\u0ac7-\u0ac9\u0acb-\u0acd\u0ad0\u0ae0-\u0ae3\u0ae6-\u0aef\u0af9-\u0aff\u0b01-\u0b03\u0b05-\u0b0c\u0b0f-\u0b10\u0b13-\u0b28\u0b2a-\u0b30\u0b32-\u0b33\u0b35-\u0b39\u0b3c-\u0b44\u0b47-\u0b48\u0b4b-\u0b4d\u0b56-\u0b57\u0b5c-\u0b5d\u0b5f-\u0b63\u0b66-\u0b6f\u0b71\u0b82-\u0b83\u0b85-\u0b8a\u0b8e-\u0b90\u0b92-\u0b95\u0b99-\u0b9a\u0b9c\u0b9e-\u0b9f\u0ba3-\u0ba4\u0ba8-\u0baa\u0bae-\u0bb9\u0bbe-\u0bc2\u0bc6-\u0bc8\u0bca-\u0bcd\u0bd0\u0bd7\u0be6-\u0bef\u0c00-\u0c0c\u0c0e-\u0c10\u0c12-\u0c28\u0c2a-\u0c39\u0c3d-\u0c44\u0c46-\u0c48\u0c4a-\u0c4d\u0c55-\u0c56\u0c58-\u0c5a\u0c60-\u0c63\u0c66-\u0c6f\u0c80-\u0c83\u0c85-\u0c8c\u0c8e-\u0c90\u0c92-\u0ca8\u0caa-\u0cb3\u0cb5-\u0cb9\u0cbc-\u0cc4\u0cc6-\u0cc8\u0cca-\u0ccd\u0cd5-\u0cd6\u0cde\u0ce0-\u0ce3\u0ce6-\u0cef\u0cf1-\u0cf2\u0d00-\u0d03\u0d05-\u0d0c\u0d0e-\u0d10\u0d12-\u0d44\u0d46-\u0d48\u0d4a-\u0d4e\u0d54-\u0d57\u0d5f-\u0d63\u0d66-\u0d6f\u0d7a-\u0d7f\u0d82-\u0d83\u0d85-\u0d96\u0d9a-\u0db1\u0db3-\u0dbb\u0dbd\u0dc0-\u0dc6\u0dca\u0dcf-\u0dd4\u0dd6\u0dd8-\u0ddf\u0de6-\u0def\u0df2-\u0df3\u0e01-\u0e3a\u0e40-\u0e4e\u0e50-\u0e59\u0e81-\u0e82\u0e84\u0e87-\u0e88\u0e8a\u0e8d\u0e94-\u0e97\u0e99-\u0e9f\u0ea1-\u0ea3\u0ea5\u0ea7\u0eaa-\u0eab\u0ead-\u0eb9\u0ebb-\u0ebd\u0ec0-\u0ec4\u0ec6\u0ec8-\u0ecd\u0ed0-\u0ed9\u0edc-\u0edf\u0f00\u0f18-\u0f19\u0f20-\u0f29\u0f35\u0f37\u0f39\u0f3e-\u0f47\u0f49-\u0f6c\u0f71-\u0f84\u0f86-\u0f97\u0f99-\u0fbc\u0fc6\u1000-\u1049\u1050-\u109d\u10a0-\u10c5\u10c7\u10cd\u10d0-\u10fa\u10fc-\u1248\u124a-\u124d\u1250-\u1256\u1258\u125a-\u125d\u1260-\u1288\u128a-\u128d\u1290-\u12b0\u12b2-\u12b5\u12b8-\u12be\u12c0\u12c2-\u12c5\u12c8-\u12d6\u12d8-\u1310\u1312-\u1315\u1318-\u135a\u135d-\u135f\u1369-\u1371\u1380-\u138f\u13a0-\u13f5\u13f8-\u13fd\u1401-\u166c\u166f-\u167f\u1681-\u169a\u16a0-\u16ea\u16ee-\u16f8\u1700-\u170c\u170e-\u1714\u1720-\u1734\u1740-\u1753\u1760-\u176c\u176e-\u1770\u1772-\u1773\u1780-\u17d3\u17d7\u17dc-\u17dd\u17e0-\u17e9\u180b-\u180d\u1810-\u1819\u1820-\u1878\u1880-\u18aa\u18b0-\u18f5\u1900-\u191e\u1920-\u192b\u1930-\u193b\u1946-\u196d\u1970-\u1974\u1980-\u19ab\u19b0-\u19c9\u19d0-\u19da\u1a00-\u1a1b\u1a20-\u1a5e\u1a60-\u1a7c\u1a7f-\u1a89\u1a90-\u1a99\u1aa7\u1ab0-\u1abd\u1b00-\u1b4b\u1b50-\u1b59\u1b6b-\u1b73\u1b80-\u1bf3\u1c00-\u1c37\u1c40-\u1c49\u1c4d-\u1c7d\u1c80-\u1c88\u1c90-\u1cba\u1cbd-\u1cbf\u1cd0-\u1cd2\u1cd4-\u1cf9\u1d00-\u1df9\u1dfb-\u1f15\u1f18-\u1f1d\u1f20-\u1f45\u1f48-\u1f4d\u1f50-\u1f57\u1f59\u1f5b\u1f5d\u1f5f-\u1f7d\u1f80-\u1fb4\u1fb6-\u1fbc\u1fbe\u1fc2-\u1fc4\u1fc6-\u1fcc\u1fd0-\u1fd3\u1fd6-\u1fdb\u1fe0-\u1fec\u1ff2-\u1ff4\u1ff6-\u1ffc\u203f-\u2040\u2054\u2071\u207f\u2090-\u209c\u20d0-\u20dc\u20e1\u20e5-\u20f0\u2102\u2107\u210a-\u2113\u2115\u2118-\u211d\u2124\u2126\u2128\u212a-\u2139\u213c-\u213f\u2145-\u2149\u214e\u2160-\u2188\u2c00-\u2c2e\u2c30-\u2c5e\u2c60-\u2ce4\u2ceb-\u2cf3\u2d00-\u2d25\u2d27\u2d2d\u2d30-\u2d67\u2d6f\u2d7f-\u2d96\u2da0-\u2da6\u2da8-\u2dae\u2db0-\u2db6\u2db8-\u2dbe\u2dc0-\u2dc6\u2dc8-\u2dce\u2dd0-\u2dd6\u2dd8-\u2dde\u2de0-\u2dff\u3005-\u3007\u3021-\u302f\u3031-\u3035\u3038-\u303c\u3041-\u3096\u3099-\u309a\u309d-\u309f\u30a1-\u30fa\u30fc-\u30ff\u3105-\u312f\u3131-\u318e\u31a0-\u31ba\u31f0-\u31ff\u3400-\u4db5\u4e00-\u9fef\ua000-\ua48c\ua4d0-\ua4fd\ua500-\ua60c\ua610-\ua62b\ua640-\ua66f\ua674-\ua67d\ua67f-\ua6f1\ua717-\ua71f\ua722-\ua788\ua78b-\ua7b9\ua7f7-\ua827\ua840-\ua873\ua880-\ua8c5\ua8d0-\ua8d9\ua8e0-\ua8f7\ua8fb\ua8fd-\ua92d\ua930-\ua953\ua960-\ua97c\ua980-\ua9c0\ua9cf-\ua9d9\ua9e0-\ua9fe\uaa00-\uaa36\uaa40-\uaa4d\uaa50-\uaa59\uaa60-\uaa76\uaa7a-\uaac2\uaadb-\uaadd\uaae0-\uaaef\uaaf2-\uaaf6\uab01-\uab06\uab09-\uab0e\uab11-\uab16\uab20-\uab26\uab28-\uab2e\uab30-\uab5a\uab5c-\uab65\uab70-\uabea\uabec-\uabed\uabf0-\uabf9\uac00-\ud7a3\ud7b0-\ud7c6\ud7cb-\ud7fb\uf900-\ufa6d\ufa70-\ufad9\ufb00-\ufb06\ufb13-\ufb17\ufb1d-\ufb28\ufb2a-\ufb36\ufb38-\ufb3c\ufb3e\ufb40-\ufb41\ufb43-\ufb44\ufb46-\ufbb1\ufbd3-\ufc5d\ufc64-\ufd3d\ufd50-\ufd8f\ufd92-\ufdc7\ufdf0-\ufdf9\ufe00-\ufe0f\ufe20-\ufe2f\ufe33-\ufe34\ufe4d-\ufe4f\ufe71\ufe73\ufe77\ufe79\ufe7b\ufe7d\ufe7f-\ufefc\uff10-\uff19\uff21-\uff3a\uff3f\uff41-\uff5a\uff66-\uffbe\uffc2-\uffc7\uffca-\uffcf\uffd2-\uffd7\uffda-\uffdc\U00010000-\U0001000b\U0001000d-\U00010026\U00010028-\U0001003a\U0001003c-\U0001003d\U0001003f-\U0001004d\U00010050-\U0001005d\U00010080-\U000100fa\U00010140-\U00010174\U000101fd\U00010280-\U0001029c\U000102a0-\U000102d0\U000102e0\U00010300-\U0001031f\U0001032d-\U0001034a\U00010350-\U0001037a\U00010380-\U0001039d\U000103a0-\U000103c3\U000103c8-\U000103cf\U000103d1-\U000103d5\U00010400-\U0001049d\U000104a0-\U000104a9\U000104b0-\U000104d3\U000104d8-\U000104fb\U00010500-\U00010527\U00010530-\U00010563\U00010600-\U00010736\U00010740-\U00010755\U00010760-\U00010767\U00010800-\U00010805\U00010808\U0001080a-\U00010835\U00010837-\U00010838\U0001083c\U0001083f-\U00010855\U00010860-\U00010876\U00010880-\U0001089e\U000108e0-\U000108f2\U000108f4-\U000108f5\U00010900-\U00010915\U00010920-\U00010939\U00010980-\U000109b7\U000109be-\U000109bf\U00010a00-\U00010a03\U00010a05-\U00010a06\U00010a0c-\U00010a13\U00010a15-\U00010a17\U00010a19-\U00010a35\U00010a38-\U00010a3a\U00010a3f\U00010a60-\U00010a7c\U00010a80-\U00010a9c\U00010ac0-\U00010ac7\U00010ac9-\U00010ae6\U00010b00-\U00010b35\U00010b40-\U00010b55\U00010b60-\U00010b72\U00010b80-\U00010b91\U00010c00-\U00010c48\U00010c80-\U00010cb2\U00010cc0-\U00010cf2\U00010d00-\U00010d27\U00010d30-\U00010d39\U00010f00-\U00010f1c\U00010f27\U00010f30-\U00010f50\U00011000-\U00011046\U00011066-\U0001106f\U0001107f-\U000110ba\U000110d0-\U000110e8\U000110f0-\U000110f9\U00011100-\U00011134\U00011136-\U0001113f\U00011144-\U00011146\U00011150-\U00011173\U00011176\U00011180-\U000111c4\U000111c9-\U000111cc\U000111d0-\U000111da\U000111dc\U00011200-\U00011211\U00011213-\U00011237\U0001123e\U00011280-\U00011286\U00011288\U0001128a-\U0001128d\U0001128f-\U0001129d\U0001129f-\U000112a8\U000112b0-\U000112ea\U000112f0-\U000112f9\U00011300-\U00011303\U00011305-\U0001130c\U0001130f-\U00011310\U00011313-\U00011328\U0001132a-\U00011330\U00011332-\U00011333\U00011335-\U00011339\U0001133b-\U00011344\U00011347-\U00011348\U0001134b-\U0001134d\U00011350\U00011357\U0001135d-\U00011363\U00011366-\U0001136c\U00011370-\U00011374\U00011400-\U0001144a\U00011450-\U00011459\U0001145e\U00011480-\U000114c5\U000114c7\U000114d0-\U000114d9\U00011580-\U000115b5\U000115b8-\U000115c0\U000115d8-\U000115dd\U00011600-\U00011640\U00011644\U00011650-\U00011659\U00011680-\U000116b7\U000116c0-\U000116c9\U00011700-\U0001171a\U0001171d-\U0001172b\U00011730-\U00011739\U00011800-\U0001183a\U000118a0-\U000118e9\U000118ff\U00011a00-\U00011a3e\U00011a47\U00011a50-\U00011a83\U00011a86-\U00011a99\U00011a9d\U00011ac0-\U00011af8\U00011c00-\U00011c08\U00011c0a-\U00011c36\U00011c38-\U00011c40\U00011c50-\U00011c59\U00011c72-\U00011c8f\U00011c92-\U00011ca7\U00011ca9-\U00011cb6\U00011d00-\U00011d06\U00011d08-\U00011d09\U00011d0b-\U00011d36\U00011d3a\U00011d3c-\U00011d3d\U00011d3f-\U00011d47\U00011d50-\U00011d59\U00011d60-\U00011d65\U00011d67-\U00011d68\U00011d6a-\U00011d8e\U00011d90-\U00011d91\U00011d93-\U00011d98\U00011da0-\U00011da9\U00011ee0-\U00011ef6\U00012000-\U00012399\U00012400-\U0001246e\U00012480-\U00012543\U00013000-\U0001342e\U00014400-\U00014646\U00016800-\U00016a38\U00016a40-\U00016a5e\U00016a60-\U00016a69\U00016ad0-\U00016aed\U00016af0-\U00016af4\U00016b00-\U00016b36\U00016b40-\U00016b43\U00016b50-\U00016b59\U00016b63-\U00016b77\U00016b7d-\U00016b8f\U00016e40-\U00016e7f\U00016f00-\U00016f44\U00016f50-\U00016f7e\U00016f8f-\U00016f9f\U00016fe0-\U00016fe1\U00017000-\U000187f1\U00018800-\U00018af2\U0001b000-\U0001b11e\U0001b170-\U0001b2fb\U0001bc00-\U0001bc6a\U0001bc70-\U0001bc7c\U0001bc80-\U0001bc88\U0001bc90-\U0001bc99\U0001bc9d-\U0001bc9e\U0001d165-\U0001d169\U0001d16d-\U0001d172\U0001d17b-\U0001d182\U0001d185-\U0001d18b\U0001d1aa-\U0001d1ad\U0001d242-\U0001d244\U0001d400-\U0001d454\U0001d456-\U0001d49c\U0001d49e-\U0001d49f\U0001d4a2\U0001d4a5-\U0001d4a6\U0001d4a9-\U0001d4ac\U0001d4ae-\U0001d4b9\U0001d4bb\U0001d4bd-\U0001d4c3\U0001d4c5-\U0001d505\U0001d507-\U0001d50a\U0001d50d-\U0001d514\U0001d516-\U0001d51c\U0001d51e-\U0001d539\U0001d53b-\U0001d53e\U0001d540-\U0001d544\U0001d546\U0001d54a-\U0001d550\U0001d552-\U0001d6a5\U0001d6a8-\U0001d6c0\U0001d6c2-\U0001d6da\U0001d6dc-\U0001d6fa\U0001d6fc-\U0001d714\U0001d716-\U0001d734\U0001d736-\U0001d74e\U0001d750-\U0001d76e\U0001d770-\U0001d788\U0001d78a-\U0001d7a8\U0001d7aa-\U0001d7c2\U0001d7c4-\U0001d7cb\U0001d7ce-\U0001d7ff\U0001da00-\U0001da36\U0001da3b-\U0001da6c\U0001da75\U0001da84\U0001da9b-\U0001da9f\U0001daa1-\U0001daaf\U0001e000-\U0001e006\U0001e008-\U0001e018\U0001e01b-\U0001e021\U0001e023-\U0001e024\U0001e026-\U0001e02a\U0001e800-\U0001e8c4\U0001e8d0-\U0001e8d6\U0001e900-\U0001e94a\U0001e950-\U0001e959\U0001ee00-\U0001ee03\U0001ee05-\U0001ee1f\U0001ee21-\U0001ee22\U0001ee24\U0001ee27\U0001ee29-\U0001ee32\U0001ee34-\U0001ee37\U0001ee39\U0001ee3b\U0001ee42\U0001ee47\U0001ee49\U0001ee4b\U0001ee4d-\U0001ee4f\U0001ee51-\U0001ee52\U0001ee54\U0001ee57\U0001ee59\U0001ee5b\U0001ee5d\U0001ee5f\U0001ee61-\U0001ee62\U0001ee64\U0001ee67-\U0001ee6a\U0001ee6c-\U0001ee72\U0001ee74-\U0001ee77\U0001ee79-\U0001ee7c\U0001ee7e\U0001ee80-\U0001ee89\U0001ee8b-\U0001ee9b\U0001eea1-\U0001eea3\U0001eea5-\U0001eea9\U0001eeab-\U0001eebb\U00020000-\U0002a6d6\U0002a700-\U0002b734\U0002b740-\U0002b81d\U0002b820-\U0002cea1\U0002ceb0-\U0002ebe0\U0002f800-\U0002fa1d\U000e0100-\U000e01ef'
-
-xid_start = 'A-Z_a-z\xaa\xb5\xba\xc0-\xd6\xd8-\xf6\xf8-\u02c1\u02c6-\u02d1\u02e0-\u02e4\u02ec\u02ee\u0370-\u0374\u0376-\u0377\u037b-\u037d\u037f\u0386\u0388-\u038a\u038c\u038e-\u03a1\u03a3-\u03f5\u03f7-\u0481\u048a-\u052f\u0531-\u0556\u0559\u0560-\u0588\u05d0-\u05ea\u05ef-\u05f2\u0620-\u064a\u066e-\u066f\u0671-\u06d3\u06d5\u06e5-\u06e6\u06ee-\u06ef\u06fa-\u06fc\u06ff\u0710\u0712-\u072f\u074d-\u07a5\u07b1\u07ca-\u07ea\u07f4-\u07f5\u07fa\u0800-\u0815\u081a\u0824\u0828\u0840-\u0858\u0860-\u086a\u08a0-\u08b4\u08b6-\u08bd\u0904-\u0939\u093d\u0950\u0958-\u0961\u0971-\u0980\u0985-\u098c\u098f-\u0990\u0993-\u09a8\u09aa-\u09b0\u09b2\u09b6-\u09b9\u09bd\u09ce\u09dc-\u09dd\u09df-\u09e1\u09f0-\u09f1\u09fc\u0a05-\u0a0a\u0a0f-\u0a10\u0a13-\u0a28\u0a2a-\u0a30\u0a32-\u0a33\u0a35-\u0a36\u0a38-\u0a39\u0a59-\u0a5c\u0a5e\u0a72-\u0a74\u0a85-\u0a8d\u0a8f-\u0a91\u0a93-\u0aa8\u0aaa-\u0ab0\u0ab2-\u0ab3\u0ab5-\u0ab9\u0abd\u0ad0\u0ae0-\u0ae1\u0af9\u0b05-\u0b0c\u0b0f-\u0b10\u0b13-\u0b28\u0b2a-\u0b30\u0b32-\u0b33\u0b35-\u0b39\u0b3d\u0b5c-\u0b5d\u0b5f-\u0b61\u0b71\u0b83\u0b85-\u0b8a\u0b8e-\u0b90\u0b92-\u0b95\u0b99-\u0b9a\u0b9c\u0b9e-\u0b9f\u0ba3-\u0ba4\u0ba8-\u0baa\u0bae-\u0bb9\u0bd0\u0c05-\u0c0c\u0c0e-\u0c10\u0c12-\u0c28\u0c2a-\u0c39\u0c3d\u0c58-\u0c5a\u0c60-\u0c61\u0c80\u0c85-\u0c8c\u0c8e-\u0c90\u0c92-\u0ca8\u0caa-\u0cb3\u0cb5-\u0cb9\u0cbd\u0cde\u0ce0-\u0ce1\u0cf1-\u0cf2\u0d05-\u0d0c\u0d0e-\u0d10\u0d12-\u0d3a\u0d3d\u0d4e\u0d54-\u0d56\u0d5f-\u0d61\u0d7a-\u0d7f\u0d85-\u0d96\u0d9a-\u0db1\u0db3-\u0dbb\u0dbd\u0dc0-\u0dc6\u0e01-\u0e30\u0e32\u0e40-\u0e46\u0e81-\u0e82\u0e84\u0e87-\u0e88\u0e8a\u0e8d\u0e94-\u0e97\u0e99-\u0e9f\u0ea1-\u0ea3\u0ea5\u0ea7\u0eaa-\u0eab\u0ead-\u0eb0\u0eb2\u0ebd\u0ec0-\u0ec4\u0ec6\u0edc-\u0edf\u0f00\u0f40-\u0f47\u0f49-\u0f6c\u0f88-\u0f8c\u1000-\u102a\u103f\u1050-\u1055\u105a-\u105d\u1061\u1065-\u1066\u106e-\u1070\u1075-\u1081\u108e\u10a0-\u10c5\u10c7\u10cd\u10d0-\u10fa\u10fc-\u1248\u124a-\u124d\u1250-\u1256\u1258\u125a-\u125d\u1260-\u1288\u128a-\u128d\u1290-\u12b0\u12b2-\u12b5\u12b8-\u12be\u12c0\u12c2-\u12c5\u12c8-\u12d6\u12d8-\u1310\u1312-\u1315\u1318-\u135a\u1380-\u138f\u13a0-\u13f5\u13f8-\u13fd\u1401-\u166c\u166f-\u167f\u1681-\u169a\u16a0-\u16ea\u16ee-\u16f8\u1700-\u170c\u170e-\u1711\u1720-\u1731\u1740-\u1751\u1760-\u176c\u176e-\u1770\u1780-\u17b3\u17d7\u17dc\u1820-\u1878\u1880-\u18a8\u18aa\u18b0-\u18f5\u1900-\u191e\u1950-\u196d\u1970-\u1974\u1980-\u19ab\u19b0-\u19c9\u1a00-\u1a16\u1a20-\u1a54\u1aa7\u1b05-\u1b33\u1b45-\u1b4b\u1b83-\u1ba0\u1bae-\u1baf\u1bba-\u1be5\u1c00-\u1c23\u1c4d-\u1c4f\u1c5a-\u1c7d\u1c80-\u1c88\u1c90-\u1cba\u1cbd-\u1cbf\u1ce9-\u1cec\u1cee-\u1cf1\u1cf5-\u1cf6\u1d00-\u1dbf\u1e00-\u1f15\u1f18-\u1f1d\u1f20-\u1f45\u1f48-\u1f4d\u1f50-\u1f57\u1f59\u1f5b\u1f5d\u1f5f-\u1f7d\u1f80-\u1fb4\u1fb6-\u1fbc\u1fbe\u1fc2-\u1fc4\u1fc6-\u1fcc\u1fd0-\u1fd3\u1fd6-\u1fdb\u1fe0-\u1fec\u1ff2-\u1ff4\u1ff6-\u1ffc\u2071\u207f\u2090-\u209c\u2102\u2107\u210a-\u2113\u2115\u2118-\u211d\u2124\u2126\u2128\u212a-\u2139\u213c-\u213f\u2145-\u2149\u214e\u2160-\u2188\u2c00-\u2c2e\u2c30-\u2c5e\u2c60-\u2ce4\u2ceb-\u2cee\u2cf2-\u2cf3\u2d00-\u2d25\u2d27\u2d2d\u2d30-\u2d67\u2d6f\u2d80-\u2d96\u2da0-\u2da6\u2da8-\u2dae\u2db0-\u2db6\u2db8-\u2dbe\u2dc0-\u2dc6\u2dc8-\u2dce\u2dd0-\u2dd6\u2dd8-\u2dde\u3005-\u3007\u3021-\u3029\u3031-\u3035\u3038-\u303c\u3041-\u3096\u309d-\u309f\u30a1-\u30fa\u30fc-\u30ff\u3105-\u312f\u3131-\u318e\u31a0-\u31ba\u31f0-\u31ff\u3400-\u4db5\u4e00-\u9fef\ua000-\ua48c\ua4d0-\ua4fd\ua500-\ua60c\ua610-\ua61f\ua62a-\ua62b\ua640-\ua66e\ua67f-\ua69d\ua6a0-\ua6ef\ua717-\ua71f\ua722-\ua788\ua78b-\ua7b9\ua7f7-\ua801\ua803-\ua805\ua807-\ua80a\ua80c-\ua822\ua840-\ua873\ua882-\ua8b3\ua8f2-\ua8f7\ua8fb\ua8fd-\ua8fe\ua90a-\ua925\ua930-\ua946\ua960-\ua97c\ua984-\ua9b2\ua9cf\ua9e0-\ua9e4\ua9e6-\ua9ef\ua9fa-\ua9fe\uaa00-\uaa28\uaa40-\uaa42\uaa44-\uaa4b\uaa60-\uaa76\uaa7a\uaa7e-\uaaaf\uaab1\uaab5-\uaab6\uaab9-\uaabd\uaac0\uaac2\uaadb-\uaadd\uaae0-\uaaea\uaaf2-\uaaf4\uab01-\uab06\uab09-\uab0e\uab11-\uab16\uab20-\uab26\uab28-\uab2e\uab30-\uab5a\uab5c-\uab65\uab70-\uabe2\uac00-\ud7a3\ud7b0-\ud7c6\ud7cb-\ud7fb\uf900-\ufa6d\ufa70-\ufad9\ufb00-\ufb06\ufb13-\ufb17\ufb1d\ufb1f-\ufb28\ufb2a-\ufb36\ufb38-\ufb3c\ufb3e\ufb40-\ufb41\ufb43-\ufb44\ufb46-\ufbb1\ufbd3-\ufc5d\ufc64-\ufd3d\ufd50-\ufd8f\ufd92-\ufdc7\ufdf0-\ufdf9\ufe71\ufe73\ufe77\ufe79\ufe7b\ufe7d\ufe7f-\ufefc\uff21-\uff3a\uff41-\uff5a\uff66-\uff9d\uffa0-\uffbe\uffc2-\uffc7\uffca-\uffcf\uffd2-\uffd7\uffda-\uffdc\U00010000-\U0001000b\U0001000d-\U00010026\U00010028-\U0001003a\U0001003c-\U0001003d\U0001003f-\U0001004d\U00010050-\U0001005d\U00010080-\U000100fa\U00010140-\U00010174\U00010280-\U0001029c\U000102a0-\U000102d0\U00010300-\U0001031f\U0001032d-\U0001034a\U00010350-\U00010375\U00010380-\U0001039d\U000103a0-\U000103c3\U000103c8-\U000103cf\U000103d1-\U000103d5\U00010400-\U0001049d\U000104b0-\U000104d3\U000104d8-\U000104fb\U00010500-\U00010527\U00010530-\U00010563\U00010600-\U00010736\U00010740-\U00010755\U00010760-\U00010767\U00010800-\U00010805\U00010808\U0001080a-\U00010835\U00010837-\U00010838\U0001083c\U0001083f-\U00010855\U00010860-\U00010876\U00010880-\U0001089e\U000108e0-\U000108f2\U000108f4-\U000108f5\U00010900-\U00010915\U00010920-\U00010939\U00010980-\U000109b7\U000109be-\U000109bf\U00010a00\U00010a10-\U00010a13\U00010a15-\U00010a17\U00010a19-\U00010a35\U00010a60-\U00010a7c\U00010a80-\U00010a9c\U00010ac0-\U00010ac7\U00010ac9-\U00010ae4\U00010b00-\U00010b35\U00010b40-\U00010b55\U00010b60-\U00010b72\U00010b80-\U00010b91\U00010c00-\U00010c48\U00010c80-\U00010cb2\U00010cc0-\U00010cf2\U00010d00-\U00010d23\U00010f00-\U00010f1c\U00010f27\U00010f30-\U00010f45\U00011003-\U00011037\U00011083-\U000110af\U000110d0-\U000110e8\U00011103-\U00011126\U00011144\U00011150-\U00011172\U00011176\U00011183-\U000111b2\U000111c1-\U000111c4\U000111da\U000111dc\U00011200-\U00011211\U00011213-\U0001122b\U00011280-\U00011286\U00011288\U0001128a-\U0001128d\U0001128f-\U0001129d\U0001129f-\U000112a8\U000112b0-\U000112de\U00011305-\U0001130c\U0001130f-\U00011310\U00011313-\U00011328\U0001132a-\U00011330\U00011332-\U00011333\U00011335-\U00011339\U0001133d\U00011350\U0001135d-\U00011361\U00011400-\U00011434\U00011447-\U0001144a\U00011480-\U000114af\U000114c4-\U000114c5\U000114c7\U00011580-\U000115ae\U000115d8-\U000115db\U00011600-\U0001162f\U00011644\U00011680-\U000116aa\U00011700-\U0001171a\U00011800-\U0001182b\U000118a0-\U000118df\U000118ff\U00011a00\U00011a0b-\U00011a32\U00011a3a\U00011a50\U00011a5c-\U00011a83\U00011a86-\U00011a89\U00011a9d\U00011ac0-\U00011af8\U00011c00-\U00011c08\U00011c0a-\U00011c2e\U00011c40\U00011c72-\U00011c8f\U00011d00-\U00011d06\U00011d08-\U00011d09\U00011d0b-\U00011d30\U00011d46\U00011d60-\U00011d65\U00011d67-\U00011d68\U00011d6a-\U00011d89\U00011d98\U00011ee0-\U00011ef2\U00012000-\U00012399\U00012400-\U0001246e\U00012480-\U00012543\U00013000-\U0001342e\U00014400-\U00014646\U00016800-\U00016a38\U00016a40-\U00016a5e\U00016ad0-\U00016aed\U00016b00-\U00016b2f\U00016b40-\U00016b43\U00016b63-\U00016b77\U00016b7d-\U00016b8f\U00016e40-\U00016e7f\U00016f00-\U00016f44\U00016f50\U00016f93-\U00016f9f\U00016fe0-\U00016fe1\U00017000-\U000187f1\U00018800-\U00018af2\U0001b000-\U0001b11e\U0001b170-\U0001b2fb\U0001bc00-\U0001bc6a\U0001bc70-\U0001bc7c\U0001bc80-\U0001bc88\U0001bc90-\U0001bc99\U0001d400-\U0001d454\U0001d456-\U0001d49c\U0001d49e-\U0001d49f\U0001d4a2\U0001d4a5-\U0001d4a6\U0001d4a9-\U0001d4ac\U0001d4ae-\U0001d4b9\U0001d4bb\U0001d4bd-\U0001d4c3\U0001d4c5-\U0001d505\U0001d507-\U0001d50a\U0001d50d-\U0001d514\U0001d516-\U0001d51c\U0001d51e-\U0001d539\U0001d53b-\U0001d53e\U0001d540-\U0001d544\U0001d546\U0001d54a-\U0001d550\U0001d552-\U0001d6a5\U0001d6a8-\U0001d6c0\U0001d6c2-\U0001d6da\U0001d6dc-\U0001d6fa\U0001d6fc-\U0001d714\U0001d716-\U0001d734\U0001d736-\U0001d74e\U0001d750-\U0001d76e\U0001d770-\U0001d788\U0001d78a-\U0001d7a8\U0001d7aa-\U0001d7c2\U0001d7c4-\U0001d7cb\U0001e800-\U0001e8c4\U0001e900-\U0001e943\U0001ee00-\U0001ee03\U0001ee05-\U0001ee1f\U0001ee21-\U0001ee22\U0001ee24\U0001ee27\U0001ee29-\U0001ee32\U0001ee34-\U0001ee37\U0001ee39\U0001ee3b\U0001ee42\U0001ee47\U0001ee49\U0001ee4b\U0001ee4d-\U0001ee4f\U0001ee51-\U0001ee52\U0001ee54\U0001ee57\U0001ee59\U0001ee5b\U0001ee5d\U0001ee5f\U0001ee61-\U0001ee62\U0001ee64\U0001ee67-\U0001ee6a\U0001ee6c-\U0001ee72\U0001ee74-\U0001ee77\U0001ee79-\U0001ee7c\U0001ee7e\U0001ee80-\U0001ee89\U0001ee8b-\U0001ee9b\U0001eea1-\U0001eea3\U0001eea5-\U0001eea9\U0001eeab-\U0001eebb\U00020000-\U0002a6d6\U0002a700-\U0002b734\U0002b740-\U0002b81d\U0002b820-\U0002cea1\U0002ceb0-\U0002ebe0\U0002f800-\U0002fa1d'
-
-cats = ['Cc', 'Cf', 'Cn', 'Co', 'Cs', 'Ll', 'Lm', 'Lo', 'Lt', 'Lu', 'Mc', 'Me', 'Mn', 'Nd', 'Nl', 'No', 'Pc', 'Pd', 'Pe', 'Pf', 'Pi', 'Po', 'Ps', 'Sc', 'Sk', 'Sm', 'So', 'Zl', 'Zp', 'Zs']
-
-# Generated from unidata 11.0.0
-
-def combine(*args):
- return ''.join(globals()[cat] for cat in args)
-
-
-def allexcept(*args):
- newcats = cats[:]
- for arg in args:
- newcats.remove(arg)
- return ''.join(globals()[cat] for cat in newcats)
-
-
-def _handle_runs(char_list): # pragma: no cover
- buf = []
- for c in char_list:
- if len(c) == 1:
- if buf and buf[-1][1] == chr(ord(c)-1):
- buf[-1] = (buf[-1][0], c)
- else:
- buf.append((c, c))
- else:
- buf.append((c, c))
- for a, b in buf:
- if a == b:
- yield a
- else:
- yield '%s-%s' % (a, b)
-
-
-if __name__ == '__main__': # pragma: no cover
- import unicodedata
-
- categories = {'xid_start': [], 'xid_continue': []}
-
- with open(__file__, encoding='utf-8') as fp:
- content = fp.read()
-
- header = content[:content.find('Cc =')]
- footer = content[content.find("def combine("):]
-
- for code in range(0x110000):
- c = chr(code)
- cat = unicodedata.category(c)
- if ord(c) == 0xdc00:
- # Hack to avoid combining this combining with the preceding high
- # surrogate, 0xdbff, when doing a repr.
- c = '\\' + c
- elif ord(c) in (0x2d, 0x5b, 0x5c, 0x5d, 0x5e):
- # Escape regex metachars.
- c = '\\' + c
- categories.setdefault(cat, []).append(c)
- # XID_START and XID_CONTINUE are special categories used for matching
- # identifiers in Python 3.
- if c.isidentifier():
- categories['xid_start'].append(c)
- if ('a' + c).isidentifier():
- categories['xid_continue'].append(c)
-
- with open(__file__, 'w', encoding='utf-8') as fp:
- fp.write(header)
-
- for cat in sorted(categories):
- val = ''.join(_handle_runs(categories[cat]))
- fp.write('%s = %a\n\n' % (cat, val))
-
- cats = sorted(categories)
- cats.remove('xid_start')
- cats.remove('xid_continue')
- fp.write('cats = %r\n\n' % cats)
-
- fp.write('# Generated from unidata %s\n\n' % (unicodedata.unidata_version,))
-
- fp.write(footer)
diff --git a/venv/lib/python3.11/site-packages/pygments/util.py b/venv/lib/python3.11/site-packages/pygments/util.py
deleted file mode 100644
index 941fdb9..0000000
--- a/venv/lib/python3.11/site-packages/pygments/util.py
+++ /dev/null
@@ -1,330 +0,0 @@
-"""
- pygments.util
- ~~~~~~~~~~~~~
-
- Utility functions.
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-from io import TextIOWrapper
-
-
-split_path_re = re.compile(r'[/\\ ]')
-doctype_lookup_re = re.compile(r'''
- <!DOCTYPE\s+(
- [a-zA-Z_][a-zA-Z0-9]*
- (?: \s+ # optional in HTML5
- [a-zA-Z_][a-zA-Z0-9]*\s+
- "[^"]*")?
- )
- [^>]*>
-''', re.DOTALL | re.MULTILINE | re.VERBOSE)
-tag_re = re.compile(r'<(.+?)(\s.*?)?>.*?</.+?>',
- re.IGNORECASE | re.DOTALL | re.MULTILINE)
-xml_decl_re = re.compile(r'\s*<\?xml[^>]*\?>', re.I)
-
-
-class ClassNotFound(ValueError):
- """Raised if one of the lookup functions didn't find a matching class."""
-
-
-class OptionError(Exception):
- """
- This exception will be raised by all option processing functions if
- the type or value of the argument is not correct.
- """
-
-def get_choice_opt(options, optname, allowed, default=None, normcase=False):
- """
- If the key `optname` from the dictionary is not in the sequence
- `allowed`, raise an error, otherwise return it.
- """
- string = options.get(optname, default)
- if normcase:
- string = string.lower()
- if string not in allowed:
- raise OptionError('Value for option %s must be one of %s' %
- (optname, ', '.join(map(str, allowed))))
- return string
-
-
-def get_bool_opt(options, optname, default=None):
- """
- Intuitively, this is `options.get(optname, default)`, but restricted to
- Boolean value. The Booleans can be represented as string, in order to accept
- Boolean value from the command line arguments. If the key `optname` is
- present in the dictionary `options` and is not associated with a Boolean,
- raise an `OptionError`. If it is absent, `default` is returned instead.
-
- The valid string values for ``True`` are ``1``, ``yes``, ``true`` and
- ``on``, the ones for ``False`` are ``0``, ``no``, ``false`` and ``off``
- (matched case-insensitively).
- """
- string = options.get(optname, default)
- if isinstance(string, bool):
- return string
- elif isinstance(string, int):
- return bool(string)
- elif not isinstance(string, str):
- raise OptionError('Invalid type %r for option %s; use '
- '1/0, yes/no, true/false, on/off' % (
- string, optname))
- elif string.lower() in ('1', 'yes', 'true', 'on'):
- return True
- elif string.lower() in ('0', 'no', 'false', 'off'):
- return False
- else:
- raise OptionError('Invalid value %r for option %s; use '
- '1/0, yes/no, true/false, on/off' % (
- string, optname))
-
-
-def get_int_opt(options, optname, default=None):
- """As :func:`get_bool_opt`, but interpret the value as an integer."""
- string = options.get(optname, default)
- try:
- return int(string)
- except TypeError:
- raise OptionError('Invalid type %r for option %s; you '
- 'must give an integer value' % (
- string, optname))
- except ValueError:
- raise OptionError('Invalid value %r for option %s; you '
- 'must give an integer value' % (
- string, optname))
-
-def get_list_opt(options, optname, default=None):
- """
- If the key `optname` from the dictionary `options` is a string,
- split it at whitespace and return it. If it is already a list
- or a tuple, it is returned as a list.
- """
- val = options.get(optname, default)
- if isinstance(val, str):
- return val.split()
- elif isinstance(val, (list, tuple)):
- return list(val)
- else:
- raise OptionError('Invalid type %r for option %s; you '
- 'must give a list value' % (
- val, optname))
-
-
-def docstring_headline(obj):
- if not obj.__doc__:
- return ''
- res = []
- for line in obj.__doc__.strip().splitlines():
- if line.strip():
- res.append(" " + line.strip())
- else:
- break
- return ''.join(res).lstrip()
-
-
-def make_analysator(f):
- """Return a static text analyser function that returns float values."""
- def text_analyse(text):
- try:
- rv = f(text)
- except Exception:
- return 0.0
- if not rv:
- return 0.0
- try:
- return min(1.0, max(0.0, float(rv)))
- except (ValueError, TypeError):
- return 0.0
- text_analyse.__doc__ = f.__doc__
- return staticmethod(text_analyse)
-
-
-def shebang_matches(text, regex):
- r"""Check if the given regular expression matches the last part of the
- shebang if one exists.
-
- >>> from pygments.util import shebang_matches
- >>> shebang_matches('#!/usr/bin/env python', r'python(2\.\d)?')
- True
- >>> shebang_matches('#!/usr/bin/python2.4', r'python(2\.\d)?')
- True
- >>> shebang_matches('#!/usr/bin/python-ruby', r'python(2\.\d)?')
- False
- >>> shebang_matches('#!/usr/bin/python/ruby', r'python(2\.\d)?')
- False
- >>> shebang_matches('#!/usr/bin/startsomethingwith python',
- ... r'python(2\.\d)?')
- True
-
- It also checks for common windows executable file extensions::
-
- >>> shebang_matches('#!C:\\Python2.4\\Python.exe', r'python(2\.\d)?')
- True
-
- Parameters (``'-f'`` or ``'--foo'`` are ignored so ``'perl'`` does
- the same as ``'perl -e'``)
-
- Note that this method automatically searches the whole string (eg:
- the regular expression is wrapped in ``'^$'``)
- """
- index = text.find('\n')
- if index >= 0:
- first_line = text[:index].lower()
- else:
- first_line = text.lower()
- if first_line.startswith('#!'):
- try:
- found = [x for x in split_path_re.split(first_line[2:].strip())
- if x and not x.startswith('-')][-1]
- except IndexError:
- return False
- regex = re.compile(r'^%s(\.(exe|cmd|bat|bin))?$' % regex, re.IGNORECASE)
- if regex.search(found) is not None:
- return True
- return False
-
-
-def doctype_matches(text, regex):
- """Check if the doctype matches a regular expression (if present).
-
- Note that this method only checks the first part of a DOCTYPE.
- eg: 'html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN"'
- """
- m = doctype_lookup_re.search(text)
- if m is None:
- return False
- doctype = m.group(1)
- return re.compile(regex, re.I).match(doctype.strip()) is not None
-
-
-def html_doctype_matches(text):
- """Check if the file looks like it has a html doctype."""
- return doctype_matches(text, r'html')
-
-
-_looks_like_xml_cache = {}
-
-
-def looks_like_xml(text):
- """Check if a doctype exists or if we have some tags."""
- if xml_decl_re.match(text):
- return True
- key = hash(text)
- try:
- return _looks_like_xml_cache[key]
- except KeyError:
- m = doctype_lookup_re.search(text)
- if m is not None:
- return True
- rv = tag_re.search(text[:1000]) is not None
- _looks_like_xml_cache[key] = rv
- return rv
-
-
-def surrogatepair(c):
- """Given a unicode character code with length greater than 16 bits,
- return the two 16 bit surrogate pair.
- """
- # From example D28 of:
- # http://www.unicode.org/book/ch03.pdf
- return (0xd7c0 + (c >> 10), (0xdc00 + (c & 0x3ff)))
-
-
-def format_lines(var_name, seq, raw=False, indent_level=0):
- """Formats a sequence of strings for output."""
- lines = []
- base_indent = ' ' * indent_level * 4
- inner_indent = ' ' * (indent_level + 1) * 4
- lines.append(base_indent + var_name + ' = (')
- if raw:
- # These should be preformatted reprs of, say, tuples.
- for i in seq:
- lines.append(inner_indent + i + ',')
- else:
- for i in seq:
- # Force use of single quotes
- r = repr(i + '"')
- lines.append(inner_indent + r[:-2] + r[-1] + ',')
- lines.append(base_indent + ')')
- return '\n'.join(lines)
-
-
-def duplicates_removed(it, already_seen=()):
- """
- Returns a list with duplicates removed from the iterable `it`.
-
- Order is preserved.
- """
- lst = []
- seen = set()
- for i in it:
- if i in seen or i in already_seen:
- continue
- lst.append(i)
- seen.add(i)
- return lst
-
-
-class Future:
- """Generic class to defer some work.
-
- Handled specially in RegexLexerMeta, to support regex string construction at
- first use.
- """
- def get(self):
- raise NotImplementedError
-
-
-def guess_decode(text):
- """Decode *text* with guessed encoding.
-
- First try UTF-8; this should fail for non-UTF-8 encodings.
- Then try the preferred locale encoding.
- Fall back to latin-1, which always works.
- """
- try:
- text = text.decode('utf-8')
- return text, 'utf-8'
- except UnicodeDecodeError:
- try:
- import locale
- prefencoding = locale.getpreferredencoding()
- text = text.decode()
- return text, prefencoding
- except (UnicodeDecodeError, LookupError):
- text = text.decode('latin1')
- return text, 'latin1'
-
-
-def guess_decode_from_terminal(text, term):
- """Decode *text* coming from terminal *term*.
-
- First try the terminal encoding, if given.
- Then try UTF-8. Then try the preferred locale encoding.
- Fall back to latin-1, which always works.
- """
- if getattr(term, 'encoding', None):
- try:
- text = text.decode(term.encoding)
- except UnicodeDecodeError:
- pass
- else:
- return text, term.encoding
- return guess_decode(text)
-
-
-def terminal_encoding(term):
- """Return our best guess of encoding for the given *term*."""
- if getattr(term, 'encoding', None):
- return term.encoding
- import locale
- return locale.getpreferredencoding()
-
-
-class UnclosingTextIOWrapper(TextIOWrapper):
- # Don't close underlying buffer on destruction.
- def close(self):
- self.flush()