From 6165952ce2f03c45187d2c293565100eb2cfdfa6 Mon Sep 17 00:00:00 2001 From: Mark McGranaghan Date: Tue, 27 Dec 2016 08:10:47 -0800 Subject: [PATCH] Update vendor'd Pygments to 2.1.3 --- vendor/pygments/.hgignore | 19 + vendor/pygments/.hgtags | 31 + vendor/pygments/AUTHORS | 73 +- vendor/pygments/CHANGES | 304 +- vendor/pygments/LICENSE | 2 +- vendor/pygments/MANIFEST.in | 2 +- vendor/pygments/Makefile | 25 +- vendor/pygments/PKG-INFO | 21 +- vendor/pygments/Pygments.egg-info/PKG-INFO | 21 +- vendor/pygments/Pygments.egg-info/SOURCES.txt | 510 +- vendor/pygments/README.rst | 39 + vendor/pygments/TODO | 3 - vendor/pygments/doc/Makefile | 153 + vendor/pygments/doc/_static/favicon.ico | Bin 0 -> 16958 bytes vendor/pygments/doc/_static/logo_new.png | Bin 0 -> 40944 bytes vendor/pygments/doc/_static/logo_only.png | Bin 0 -> 16424 bytes .../pygments/doc/_templates/docssidebar.html | 3 + .../pygments/doc/_templates/indexsidebar.html | 25 + .../doc/_themes/pygments14/layout.html | 98 + .../doc/_themes/pygments14/static/bodybg.png | Bin 0 -> 51903 bytes .../doc/_themes/pygments14/static/docbg.png | Bin 0 -> 61296 bytes .../_themes/pygments14/static/listitem.png | Bin 0 -> 207 bytes .../doc/_themes/pygments14/static/logo.png | Bin 0 -> 26933 bytes .../doc/_themes/pygments14/static/pocoo.png | Bin 0 -> 2154 bytes .../pygments14/static/pygments14.css_t | 401 + .../doc/_themes/pygments14/theme.conf | 15 + vendor/pygments/doc/conf.py | 241 + vendor/pygments/doc/docs/api.rst | 316 + vendor/pygments/doc/docs/authors.rst | 4 + vendor/pygments/doc/docs/changelog.rst | 1 + .../src/cmdline.txt => doc/docs/cmdline.rst} | 28 +- .../docs/filterdevelopment.rst} | 9 +- .../src/filters.txt => doc/docs/filters.rst} | 9 +- .../docs/formatterdevelopment.rst} | 2 +- .../docs/formatters.rst} | 12 +- vendor/pygments/doc/docs/index.rst | 66 + .../integrate.txt => doc/docs/integrate.rst} | 32 +- .../{docs/src/java.txt => doc/docs/java.rst} | 14 +- vendor/pygments/doc/docs/lexerdevelopment.rst | 681 ++ .../src/lexers.txt => doc/docs/lexers.rst} | 16 +- .../moinmoin.txt => doc/docs/moinmoin.rst} | 0 .../src/plugins.txt => doc/docs/plugins.rst} | 0 .../docs/quickstart.rst} | 41 +- .../docs/rstdirective.rst} | 0 .../src/styles.txt => doc/docs/styles.rst} | 6 +- .../src/tokens.txt => doc/docs/tokens.rst} | 25 +- .../src/unicode.txt => doc/docs/unicode.rst} | 35 +- vendor/pygments/doc/download.rst | 41 + vendor/pygments/doc/faq.rst | 139 + vendor/pygments/doc/index.rst | 54 + vendor/pygments/doc/languages.rst | 152 + vendor/pygments/doc/make.bat | 190 + vendor/pygments/{docs => doc}/pygmentize.1 | 0 vendor/pygments/docs/build/api.html | 458 - vendor/pygments/docs/build/authors.html | 355 - vendor/pygments/docs/build/changelog.html | 930 -- vendor/pygments/docs/build/cmdline.html | 353 - .../docs/build/filterdevelopment.html | 282 - vendor/pygments/docs/build/filters.html | 412 - .../docs/build/formatterdevelopment.html | 374 - vendor/pygments/docs/build/formatters.html | 927 -- vendor/pygments/docs/build/index.html | 261 - vendor/pygments/docs/build/installation.html | 281 - vendor/pygments/docs/build/integrate.html | 268 - vendor/pygments/docs/build/java.html | 271 - .../pygments/docs/build/lexerdevelopment.html | 691 -- vendor/pygments/docs/build/lexers.html | 5359 ----------- vendor/pygments/docs/build/moinmoin.html | 245 - vendor/pygments/docs/build/plugins.html | 294 - vendor/pygments/docs/build/quickstart.html | 390 - vendor/pygments/docs/build/rstdirective.html | 229 - vendor/pygments/docs/build/styles.html | 341 - vendor/pygments/docs/build/tokens.html | 541 -- vendor/pygments/docs/build/unicode.html | 249 - vendor/pygments/docs/generate.py | 472 - vendor/pygments/docs/src/api.txt | 270 - vendor/pygments/docs/src/authors.txt | 5 - vendor/pygments/docs/src/changelog.txt | 5 - vendor/pygments/docs/src/index.txt | 69 - vendor/pygments/docs/src/installation.txt | 71 - vendor/pygments/docs/src/lexerdevelopment.txt | 551 -- vendor/pygments/external/autopygmentize | 119 +- .../external/lasso-builtins-generator-9.lasso | 169 +- .../pygments/external/markdown-processor.py | 30 +- vendor/pygments/external/moin-parser.py | 2 +- vendor/pygments/external/rst-directive-old.py | 77 - vendor/pygments/external/rst-directive.py | 5 +- vendor/pygments/ez_setup.py | 564 +- vendor/pygments/pygmentize | 5 +- vendor/pygments/pygments/__init__.py | 21 +- vendor/pygments/pygments/cmdline.py | 428 +- vendor/pygments/pygments/console.py | 2 +- vendor/pygments/pygments/filter.py | 6 +- vendor/pygments/pygments/filters/__init__.py | 76 +- vendor/pygments/pygments/formatter.py | 11 +- .../pygments/pygments/formatters/__init__.py | 132 +- .../pygments/pygments/formatters/_mapping.py | 115 +- vendor/pygments/pygments/formatters/bbcode.py | 2 +- vendor/pygments/pygments/formatters/html.py | 180 +- vendor/pygments/pygments/formatters/img.py | 72 +- vendor/pygments/pygments/formatters/irc.py | 182 + vendor/pygments/pygments/formatters/latex.py | 144 +- vendor/pygments/pygments/formatters/other.py | 63 +- vendor/pygments/pygments/formatters/rtf.py | 91 +- vendor/pygments/pygments/formatters/svg.py | 5 +- .../pygments/pygments/formatters/terminal.py | 58 +- .../pygments/formatters/terminal256.py | 115 +- vendor/pygments/pygments/lexer.py | 244 +- vendor/pygments/pygments/lexers/__init__.py | 146 +- .../{_asybuiltins.py => _asy_builtins.py} | 14 +- .../{_clbuiltins.py => _cl_builtins.py} | 34 +- .../pygments/lexers/_cocoa_builtins.py | 72 + .../pygments/lexers/_csound_builtins.py | 1346 +++ .../{_lassobuiltins.py => _lasso_builtins.py} | 8100 ++++++++--------- .../{_luabuiltins.py => _lua_builtins.py} | 82 +- vendor/pygments/pygments/lexers/_mapping.py | 539 +- .../pygments/pygments/lexers/_mql_builtins.py | 1172 +++ .../pygments/lexers/_openedge_builtins.py | 2547 ++++++ .../pygments/lexers/_openedgebuiltins.py | 562 -- .../{_phpbuiltins.py => _php_builtins.py} | 1737 +++- .../pygments/lexers/_postgres_builtins.py | 808 +- .../pygments/lexers/_scilab_builtins.py | 3106 ++++++- .../pygments/lexers/_sourcemod_builtins.py | 1163 +++ .../pygments/lexers/_sourcemodbuiltins.py | 1072 --- .../pygments/lexers/_stan_builtins.py | 396 +- .../pygments/pygments/lexers/_vim_builtins.py | 1939 ++++ .../pygments/pygments/lexers/_vimbuiltins.py | 13 - .../pygments/pygments/lexers/actionscript.py | 240 + vendor/pygments/pygments/lexers/agile.py | 1921 +--- vendor/pygments/pygments/lexers/algebra.py | 221 + vendor/pygments/pygments/lexers/ambient.py | 76 + vendor/pygments/pygments/lexers/apl.py | 101 + vendor/pygments/pygments/lexers/archetype.py | 318 + vendor/pygments/pygments/lexers/asm.py | 178 +- vendor/pygments/pygments/lexers/automation.py | 374 + vendor/pygments/pygments/lexers/basic.py | 500 + vendor/pygments/pygments/lexers/business.py | 592 ++ vendor/pygments/pygments/lexers/c_cpp.py | 248 + vendor/pygments/pygments/lexers/c_like.py | 541 ++ vendor/pygments/pygments/lexers/chapel.py | 101 + vendor/pygments/pygments/lexers/compiled.py | 3509 +------ vendor/pygments/pygments/lexers/configs.py | 827 ++ vendor/pygments/pygments/lexers/console.py | 114 + vendor/pygments/pygments/lexers/csound.py | 366 + vendor/pygments/pygments/lexers/css.py | 524 ++ vendor/pygments/pygments/lexers/d.py | 251 + vendor/pygments/pygments/lexers/dalvik.py | 43 +- vendor/pygments/pygments/lexers/data.py | 530 ++ vendor/pygments/pygments/lexers/diff.py | 106 + vendor/pygments/pygments/lexers/dotnet.py | 296 +- vendor/pygments/pygments/lexers/dsls.py | 694 ++ vendor/pygments/pygments/lexers/dylan.py | 289 + vendor/pygments/pygments/lexers/ecl.py | 125 + vendor/pygments/pygments/lexers/eiffel.py | 65 + vendor/pygments/pygments/lexers/elm.py | 121 + vendor/pygments/pygments/lexers/erlang.py | 511 ++ vendor/pygments/pygments/lexers/esoteric.py | 219 + vendor/pygments/pygments/lexers/ezhil.py | 68 + vendor/pygments/pygments/lexers/factor.py | 344 + vendor/pygments/pygments/lexers/fantom.py | 250 + vendor/pygments/pygments/lexers/felix.py | 273 + vendor/pygments/pygments/lexers/fortran.py | 206 + vendor/pygments/pygments/lexers/foxpro.py | 6 +- vendor/pygments/pygments/lexers/functional.py | 2601 +----- vendor/pygments/pygments/lexers/go.py | 101 + .../pygments/lexers/grammar_notation.py | 131 + vendor/pygments/pygments/lexers/graph.py | 80 + vendor/pygments/pygments/lexers/graphics.py | 553 ++ vendor/pygments/pygments/lexers/haskell.py | 840 ++ vendor/pygments/pygments/lexers/haxe.py | 936 ++ vendor/pygments/pygments/lexers/hdl.py | 342 +- vendor/pygments/pygments/lexers/hexdump.py | 97 + vendor/pygments/pygments/lexers/html.py | 601 ++ vendor/pygments/pygments/lexers/idl.py | 269 + vendor/pygments/pygments/lexers/igor.py | 280 + vendor/pygments/pygments/lexers/inferno.py | 96 + vendor/pygments/pygments/lexers/installers.py | 322 + .../pygments/pygments/lexers/int_fiction.py | 1343 +++ vendor/pygments/pygments/lexers/iolang.py | 63 + vendor/pygments/pygments/lexers/j.py | 146 + vendor/pygments/pygments/lexers/javascript.py | 1440 +++ vendor/pygments/pygments/lexers/julia.py | 206 + vendor/pygments/pygments/lexers/jvm.py | 1088 ++- vendor/pygments/pygments/lexers/lisp.py | 2365 +++++ vendor/pygments/pygments/lexers/make.py | 202 + vendor/pygments/pygments/lexers/markup.py | 502 + vendor/pygments/pygments/lexers/math.py | 1655 +--- vendor/pygments/pygments/lexers/matlab.py | 663 ++ vendor/pygments/pygments/lexers/ml.py | 769 ++ vendor/pygments/pygments/lexers/modeling.py | 358 + vendor/pygments/pygments/lexers/modula2.py | 1561 ++++ vendor/pygments/pygments/lexers/nimrod.py | 159 + vendor/pygments/pygments/lexers/nit.py | 64 + vendor/pygments/pygments/lexers/nix.py | 136 + vendor/pygments/pygments/lexers/oberon.py | 105 + vendor/pygments/pygments/lexers/objective.py | 501 + vendor/pygments/pygments/lexers/ooc.py | 85 + vendor/pygments/pygments/lexers/other.py | 3685 +------- vendor/pygments/pygments/lexers/parasail.py | 79 + vendor/pygments/pygments/lexers/parsers.py | 327 +- vendor/pygments/pygments/lexers/pascal.py | 644 ++ vendor/pygments/pygments/lexers/pawn.py | 199 + vendor/pygments/pygments/lexers/perl.py | 615 ++ vendor/pygments/pygments/lexers/php.py | 245 + vendor/pygments/pygments/lexers/praat.py | 295 + vendor/pygments/pygments/lexers/prolog.py | 306 + vendor/pygments/pygments/lexers/python.py | 859 ++ vendor/pygments/pygments/lexers/qvt.py | 150 + vendor/pygments/pygments/lexers/r.py | 453 + vendor/pygments/pygments/lexers/rdf.py | 272 + vendor/pygments/pygments/lexers/rebol.py | 431 + vendor/pygments/pygments/lexers/resource.py | 84 + vendor/pygments/pygments/lexers/roboconf.py | 82 + ...botframeworklexer.py => robotframework.py} | 47 +- vendor/pygments/pygments/lexers/ruby.py | 519 ++ vendor/pygments/pygments/lexers/rust.py | 190 + vendor/pygments/pygments/lexers/scripting.py | 1203 +++ vendor/pygments/pygments/lexers/shell.py | 718 +- vendor/pygments/pygments/lexers/smalltalk.py | 195 + vendor/pygments/pygments/lexers/snobol.py | 83 + vendor/pygments/pygments/lexers/special.py | 18 +- vendor/pygments/pygments/lexers/sql.py | 305 +- .../pygments/pygments/lexers/supercollider.py | 90 + vendor/pygments/pygments/lexers/tcl.py | 145 + vendor/pygments/pygments/lexers/templates.py | 684 +- vendor/pygments/pygments/lexers/testing.py | 207 + vendor/pygments/pygments/lexers/text.py | 1848 +--- vendor/pygments/pygments/lexers/textedit.py | 169 + vendor/pygments/pygments/lexers/textfmts.py | 297 + vendor/pygments/pygments/lexers/theorem.py | 454 + .../pygments/pygments/lexers/trafficscript.py | 54 + vendor/pygments/pygments/lexers/urbi.py | 133 + vendor/pygments/pygments/lexers/web.py | 3425 +------ vendor/pygments/pygments/lexers/webmisc.py | 979 ++ vendor/pygments/pygments/lexers/x10.py | 69 + vendor/pygments/pygments/modeline.py | 43 + vendor/pygments/pygments/plugin.py | 2 +- vendor/pygments/pygments/regexopt.py | 92 + vendor/pygments/pygments/scanner.py | 2 +- vendor/pygments/pygments/sphinxext.py | 157 + vendor/pygments/pygments/style.py | 7 +- vendor/pygments/pygments/styles/__init__.py | 9 +- vendor/pygments/pygments/styles/algol.py | 63 + vendor/pygments/pygments/styles/algol_nu.py | 63 + vendor/pygments/pygments/styles/arduino.py | 98 + vendor/pygments/pygments/styles/autumn.py | 2 +- vendor/pygments/pygments/styles/borland.py | 2 +- vendor/pygments/pygments/styles/bw.py | 2 +- vendor/pygments/pygments/styles/colorful.py | 2 +- vendor/pygments/pygments/styles/default.py | 2 +- vendor/pygments/pygments/styles/emacs.py | 2 +- vendor/pygments/pygments/styles/friendly.py | 2 +- vendor/pygments/pygments/styles/fruity.py | 2 +- vendor/pygments/pygments/styles/igor.py | 29 + vendor/pygments/pygments/styles/lovelace.py | 93 + vendor/pygments/pygments/styles/manni.py | 2 +- vendor/pygments/pygments/styles/monokai.py | 8 +- vendor/pygments/pygments/styles/murphy.py | 2 +- vendor/pygments/pygments/styles/native.py | 2 +- .../pygments/pygments/styles/paraiso_dark.py | 125 + .../pygments/pygments/styles/paraiso_light.py | 125 + vendor/pygments/pygments/styles/pastie.py | 2 +- vendor/pygments/pygments/styles/perldoc.py | 2 +- vendor/pygments/pygments/styles/rrt.py | 2 +- vendor/pygments/pygments/styles/tango.py | 2 +- vendor/pygments/pygments/styles/trac.py | 2 +- vendor/pygments/pygments/styles/vim.py | 2 +- vendor/pygments/pygments/styles/vs.py | 2 +- vendor/pygments/pygments/styles/xcode.py | 51 + vendor/pygments/pygments/token.py | 15 +- vendor/pygments/pygments/unistring.py | 201 +- vendor/pygments/pygments/util.py | 214 +- vendor/pygments/requirements.txt | 5 + vendor/pygments/scripts/check_sources.py | 123 +- vendor/pygments/scripts/debug_lexer.py | 244 + .../scripts/detect_missing_analyse_text.py | 9 +- vendor/pygments/scripts/find_codetags.py | 205 - vendor/pygments/scripts/find_error.py | 171 +- vendor/pygments/scripts/get_vimkw.py | 43 +- vendor/pygments/scripts/reindent.py | 291 - vendor/pygments/scripts/vim2pygments.py | 16 +- vendor/pygments/setup.cfg | 4 + vendor/pygments/setup.py | 55 +- vendor/pygments/tests/.coverage | Bin 0 -> 13152 bytes vendor/pygments/tests/cover/coverage_html.js | 376 + vendor/pygments/tests/cover/jquery.hotkeys.js | 99 + .../pygments/tests/cover/jquery.isonscreen.js | 53 + vendor/pygments/tests/cover/jquery.min.js | 166 + .../tests/cover/jquery.tablesorter.min.js | 2 + vendor/pygments/tests/cover/keybd_closed.png | Bin 0 -> 264 bytes vendor/pygments/tests/cover/keybd_open.png | Bin 0 -> 267 bytes vendor/pygments/tests/cover/status.dat | 5179 +++++++++++ vendor/pygments/tests/cover/style.css | 300 + .../examplefiles/99_bottles_of_beer.chpl | 179 + vendor/pygments/tests/examplefiles/Blink.ino | 24 + .../tests/examplefiles/CPDictionary.j | 0 vendor/pygments/tests/examplefiles/Deflate.fs | 578 ++ vendor/pygments/tests/examplefiles/Error.pmod | 38 + .../pygments/tests/examplefiles/Errors.scala | 5 + .../pygments/tests/examplefiles/FakeFile.pike | 360 + .../Get-CommandDefinitionHtml.ps1 | 66 + .../tests/examplefiles/IPDispatchC.nc | 104 + .../tests/examplefiles/IPDispatchP.nc | 671 ++ vendor/pygments/tests/examplefiles/RoleQ.pm6 | 23 + .../tests/examplefiles/abnf_example1.abnf | 22 + .../tests/examplefiles/abnf_example2.abnf | 9 + vendor/pygments/tests/examplefiles/ahcon.f | 340 + vendor/pygments/tests/examplefiles/all.nit | 1986 ++++ .../{ANTLRv3.g => antlr_ANTLRv3.g} | 0 .../tests/examplefiles/autoit_submit.au3 | 2 + .../pygments/tests/examplefiles/automake.mk | 7 + .../pygments/tests/examplefiles/batchfile.bat | 49 - .../tests/examplefiles/bnf_example1.bnf | 15 + vendor/pygments/tests/examplefiles/char.scala | 4 + .../examplefiles/clojure-weird-keywords.clj | 5 + vendor/pygments/tests/examplefiles/core.cljs | 52 + vendor/pygments/tests/examplefiles/demo.cfm | 14 +- .../pygments/tests/examplefiles/demo.css.in | 6 + vendor/pygments/tests/examplefiles/demo.hbs | 12 + vendor/pygments/tests/examplefiles/demo.js.in | 6 + .../pygments/tests/examplefiles/demo.thrift | 14 + .../pygments/tests/examplefiles/demo.xul.in | 7 + .../pygments/tests/examplefiles/docker.docker | 5 + .../tests/examplefiles/eg_example1.eg | 155 + .../tests/examplefiles/ember.handlebars | 33 + vendor/pygments/tests/examplefiles/es6.js | 46 + vendor/pygments/tests/examplefiles/eval.rs | 606 ++ .../pygments/tests/examplefiles/example.als | 217 + .../pygments/tests/examplefiles/example.bat | 209 + vendor/pygments/tests/examplefiles/example.bc | 53 + vendor/pygments/tests/examplefiles/example.c | 2 +- .../tests/examplefiles/example.ceylon | 39 +- .../pygments/tests/examplefiles/example.chai | 6 + .../pygments/tests/examplefiles/example.clay | 33 + .../pygments/tests/examplefiles/example.cob | 936 -- .../tests/examplefiles/example.coffee | 27 + vendor/pygments/tests/examplefiles/example.e | 124 + .../pygments/tests/examplefiles/example.elm | 58 + .../pygments/tests/examplefiles/example.ezt | 32 + .../pygments/tests/examplefiles/example.f90 | 8 + .../tests/examplefiles/example.feature | 16 + .../pygments/tests/examplefiles/example.fish | 580 ++ vendor/pygments/tests/examplefiles/example.gd | 23 + vendor/pygments/tests/examplefiles/example.gi | 64 + .../pygments/tests/examplefiles/example.golo | 113 + .../tests/examplefiles/example.groovy | 2 + vendor/pygments/tests/examplefiles/example.hs | 31 + vendor/pygments/tests/examplefiles/example.hx | 192 + .../pygments/tests/examplefiles/example.i6t | 32 + .../pygments/tests/examplefiles/example.i7x | 45 + vendor/pygments/tests/examplefiles/example.j | 564 ++ .../pygments/tests/examplefiles/example.java | 16 + .../pygments/tests/examplefiles/example.jcl | 31 + .../tests/examplefiles/example.jsonld | 27 + .../pygments/tests/examplefiles/example.kal | 75 + .../pygments/tests/examplefiles/example.lagda | 19 + .../tests/examplefiles/example.liquid | 42 + vendor/pygments/tests/examplefiles/example.ma | 8 + .../pygments/tests/examplefiles/example.mac | 6 + .../pygments/tests/examplefiles/example.mq4 | 187 + .../pygments/tests/examplefiles/example.mqh | 123 + vendor/pygments/tests/examplefiles/example.ni | 57 + .../pygments/tests/examplefiles/example.nix | 80 + .../pygments/tests/examplefiles/example.pcmk | 115 + vendor/pygments/tests/examplefiles/example.pp | 8 + .../pygments/tests/examplefiles/example.praat | 245 + .../pygments/tests/examplefiles/example.red | 257 + .../pygments/tests/examplefiles/example.reds | 150 + .../pygments/tests/examplefiles/example.rexx | 50 + .../pygments/tests/examplefiles/example.rkt | 706 +- .../pygments/tests/examplefiles/example.rts | 118 + .../pygments/tests/examplefiles/example.scd | 76 + vendor/pygments/tests/examplefiles/example.sh | 22 + .../pygments/tests/examplefiles/example.slim | 31 + .../pygments/tests/examplefiles/example.sls | 51 + .../pygments/tests/examplefiles/example.stan | 177 +- .../pygments/tests/examplefiles/example.tap | 37 + vendor/pygments/tests/examplefiles/example.tf | 162 + .../pygments/tests/examplefiles/example.thy | 751 ++ .../tests/examplefiles/example.todotxt | 9 + vendor/pygments/tests/examplefiles/example.ts | 11 + .../pygments/tests/examplefiles/example.ttl | 43 + .../tests/examplefiles/example.weechatlog | 4 +- .../pygments/tests/examplefiles/example.x10 | 9 + .../pygments/tests/examplefiles/example1.cadl | 149 + .../tests/examplefiles/exampleScript.cfc | 241 + .../tests/examplefiles/exampleTag.cfc | 18 + .../pygments/tests/examplefiles/example_coq.v | 4 + .../tests/examplefiles/example_elixir.ex | 562 +- .../tests/examplefiles/ezhil_primefactors.n | 152 + .../tests/examplefiles/garcia-wachs.kk | 70 +- .../tests/examplefiles/grammar-test.p6 | 22 + .../tests/examplefiles/hash_syntax.rb | 5 + vendor/pygments/tests/examplefiles/hello.at | 6 + vendor/pygments/tests/examplefiles/hello.golo | 5 + vendor/pygments/tests/examplefiles/hello.lsl | 12 + .../tests/examplefiles/hexdump_debugexe | 309 + vendor/pygments/tests/examplefiles/hexdump_hd | 310 + .../tests/examplefiles/hexdump_hexcat | 247 + .../tests/examplefiles/hexdump_hexdump | 310 + vendor/pygments/tests/examplefiles/hexdump_od | 310 + .../pygments/tests/examplefiles/hexdump_xxd | 309 + .../examplefiles/{File.hy => hybris_File.hy} | 0 .../{mg_sample.pro => idl_sample.pro} | 0 .../pygments/tests/examplefiles/iex_example | 23 + vendor/pygments/tests/examplefiles/import.hs | 4 - .../pygments/tests/examplefiles/inet_pton6.dg | 48 +- .../tests/examplefiles/inform6_example | 375 + .../pygments/tests/examplefiles/interp.scala | 10 + vendor/pygments/tests/examplefiles/irc.lsp | 0 .../pygments/tests/examplefiles/language.hy | 165 + vendor/pygments/tests/examplefiles/limbo.b | 456 + .../tests/examplefiles/livescript-demo.ls | 4 +- vendor/pygments/tests/examplefiles/main.cmake | 3 + .../pygments/tests/examplefiles/markdown.lsp | 0 .../pygments/tests/examplefiles/matlab_sample | 4 + .../tests/examplefiles/modula2_test_cases.def | 354 + .../pygments/tests/examplefiles/noexcept.cpp | 8 + .../tests/examplefiles/objc_example.m | 192 +- .../tests/examplefiles/objc_example2.m | 24 - .../{example.p => openedge_example} | 0 .../pygments/tests/examplefiles/pacman.conf | 49 + vendor/pygments/tests/examplefiles/pacman.ijs | 1107 +++ .../pygments/tests/examplefiles/pawn_example | 25 + .../tests/examplefiles/pkgconfig_example.pc | 18 + .../tests/examplefiles/py3tb_test.py3tb | 4 + .../tests/examplefiles/pycon_ctrlc_traceback | 118 + .../tests/examplefiles/pycon_test.pycon | 5 +- .../tests/examplefiles/qbasic_example | 2 + .../tests/examplefiles/r6rs-comments.scm | 23 + .../tests/examplefiles/resourcebundle_demo | 9 + .../tests/examplefiles/roboconf.graph | 40 + .../tests/examplefiles/roboconf.instances | 24 + ...tframework.txt => robotframework_test.txt} | 1 + .../tests/examplefiles/rql-queries.rql | 34 + .../tests/examplefiles/rust_example.rs | 233 - .../pygments/tests/examplefiles/sample.qvto | 4 + .../pygments/tests/examplefiles/scope.cirru | 237 + .../pygments/tests/examplefiles/simple.camkes | 38 + .../examplefiles/{simple.md => simple.croc} | 0 vendor/pygments/tests/examplefiles/sparql.rq | 48 + vendor/pygments/tests/examplefiles/subr.el | 4868 ++++++++++ .../pygments/tests/examplefiles/swig_java.swg | 1329 +++ .../tests/examplefiles/swig_std_vector.i | 225 + .../tests/examplefiles/tads3_example.t | 1248 +++ vendor/pygments/tests/examplefiles/termcap | 1340 +++ vendor/pygments/tests/examplefiles/terminfo | 1445 +++ .../pygments/tests/examplefiles/test-3.0.xq | 185 + .../tests/examplefiles/test-exist-update.xq | 75 + vendor/pygments/tests/examplefiles/test.R | 42 +- vendor/pygments/tests/examplefiles/test.adls | 313 + vendor/pygments/tests/examplefiles/test.agda | 109 + vendor/pygments/tests/examplefiles/test.apl | 26 + vendor/pygments/tests/examplefiles/test.bb | 95 + vendor/pygments/tests/examplefiles/test.bpl | 140 + vendor/pygments/tests/examplefiles/test.cadl | 32 + vendor/pygments/tests/examplefiles/test.csd | 260 + vendor/pygments/tests/examplefiles/test.cyp | 123 + vendor/pygments/tests/examplefiles/test.ebnf | 31 + vendor/pygments/tests/examplefiles/test.ecl | 58 - vendor/pygments/tests/examplefiles/test.fan | 0 .../pygments/tests/examplefiles/test.gradle | 20 + vendor/pygments/tests/examplefiles/test.idr | 101 + vendor/pygments/tests/examplefiles/test.lean | 217 + vendor/pygments/tests/examplefiles/test.mask | 41 + vendor/pygments/tests/examplefiles/test.odin | 43 + vendor/pygments/tests/examplefiles/test.orc | 257 + vendor/pygments/tests/examplefiles/test.p6 | 252 + vendor/pygments/tests/examplefiles/test.pan | 54 + vendor/pygments/tests/examplefiles/test.php | 12 +- vendor/pygments/tests/examplefiles/test.pig | 148 + vendor/pygments/tests/examplefiles/test.psl | 182 + vendor/pygments/tests/examplefiles/test.pwn | 253 + .../pygments/tests/examplefiles/test.pypylog | 839 -- vendor/pygments/tests/examplefiles/test.r3 | 34 +- vendor/pygments/tests/examplefiles/test.rsl | 111 + vendor/pygments/tests/examplefiles/test.sco | 10 + vendor/pygments/tests/examplefiles/test.shen | 137 + vendor/pygments/tests/examplefiles/test.swift | 65 + vendor/pygments/tests/examplefiles/test.zep | 33 + vendor/pygments/tests/examplefiles/test2.odin | 30 + .../tests/examplefiles/test_basic.adls | 28 + vendor/pygments/tests/examplefiles/twig_test | 4612 ++++++++++ vendor/pygments/tests/examplefiles/type.lisp | 16 + vendor/pygments/tests/examplefiles/unicode.go | 10 + vendor/pygments/tests/examplefiles/unicode.js | 6 + .../examplefiles/{test.bas => vbnet_test.bas} | 0 .../tests/examplefiles/vctreestatus_hg | 4 + vendor/pygments/tests/examplefiles/vimrc | 21 + vendor/pygments/tests/examplefiles/vpath.mk | 16 + .../pygments/tests/examplefiles/yahalom.cpsa | 34 + vendor/pygments/tests/old_run.py | 138 - vendor/pygments/tests/run.py | 51 +- vendor/pygments/tests/string_asserts.py | 22 + vendor/pygments/tests/support.py | 2 + vendor/pygments/tests/test_basic_api.py | 163 +- vendor/pygments/tests/test_cfm.py | 46 + vendor/pygments/tests/test_clexer.py | 234 +- vendor/pygments/tests/test_cmdline.py | 281 +- vendor/pygments/tests/test_examplefiles.py | 111 +- vendor/pygments/tests/test_ezhil.py | 182 + vendor/pygments/tests/test_html_formatter.py | 70 +- vendor/pygments/tests/test_inherit.py | 94 + vendor/pygments/tests/test_irc_formatter.py | 30 + vendor/pygments/tests/test_java.py | 78 + vendor/pygments/tests/test_latex_formatter.py | 13 +- vendor/pygments/tests/test_lexers_other.py | 80 + vendor/pygments/tests/test_objectiveclexer.py | 81 + vendor/pygments/tests/test_perllexer.py | 4 +- vendor/pygments/tests/test_qbasiclexer.py | 43 + vendor/pygments/tests/test_regexlexer.py | 9 +- vendor/pygments/tests/test_regexopt.py | 76 + vendor/pygments/tests/test_rtf_formatter.py | 109 + vendor/pygments/tests/test_ruby.py | 145 + vendor/pygments/tests/test_shell.py | 89 + vendor/pygments/tests/test_smarty.py | 40 + vendor/pygments/tests/test_string_asserts.py | 35 + .../pygments/tests/test_terminal_formatter.py | 51 + vendor/pygments/tests/test_textfmts.py | 41 + vendor/pygments/tests/test_token.py | 14 +- vendor/pygments/tests/test_unistring.py | 48 + vendor/pygments/tests/test_using_api.py | 2 +- vendor/pygments/tests/test_util.py | 98 +- vendor/pygments/tox.ini | 7 + 524 files changed, 106264 insertions(+), 46754 deletions(-) create mode 100644 vendor/pygments/.hgignore create mode 100644 vendor/pygments/.hgtags create mode 100644 vendor/pygments/README.rst create mode 100644 vendor/pygments/doc/Makefile create mode 100644 vendor/pygments/doc/_static/favicon.ico create mode 100644 vendor/pygments/doc/_static/logo_new.png create mode 100644 vendor/pygments/doc/_static/logo_only.png create mode 100644 vendor/pygments/doc/_templates/docssidebar.html create mode 100644 vendor/pygments/doc/_templates/indexsidebar.html create mode 100644 vendor/pygments/doc/_themes/pygments14/layout.html create mode 100644 vendor/pygments/doc/_themes/pygments14/static/bodybg.png create mode 100644 vendor/pygments/doc/_themes/pygments14/static/docbg.png create mode 100644 vendor/pygments/doc/_themes/pygments14/static/listitem.png create mode 100644 vendor/pygments/doc/_themes/pygments14/static/logo.png create mode 100644 vendor/pygments/doc/_themes/pygments14/static/pocoo.png create mode 100644 vendor/pygments/doc/_themes/pygments14/static/pygments14.css_t create mode 100644 vendor/pygments/doc/_themes/pygments14/theme.conf create mode 100644 vendor/pygments/doc/conf.py create mode 100644 vendor/pygments/doc/docs/api.rst create mode 100644 vendor/pygments/doc/docs/authors.rst create mode 100644 vendor/pygments/doc/docs/changelog.rst rename vendor/pygments/{docs/src/cmdline.txt => doc/docs/cmdline.rst} (86%) rename vendor/pygments/{docs/src/filterdevelopment.txt => doc/docs/filterdevelopment.rst} (88%) rename vendor/pygments/{docs/src/filters.txt => doc/docs/filters.rst} (85%) rename vendor/pygments/{docs/src/formatterdevelopment.txt => doc/docs/formatterdevelopment.rst} (98%) rename vendor/pygments/{docs/src/formatters.txt => doc/docs/formatters.rst} (90%) create mode 100644 vendor/pygments/doc/docs/index.rst rename vendor/pygments/{docs/src/integrate.txt => doc/docs/integrate.rst} (54%) rename vendor/pygments/{docs/src/java.txt => doc/docs/java.rst} (82%) create mode 100644 vendor/pygments/doc/docs/lexerdevelopment.rst rename vendor/pygments/{docs/src/lexers.txt => doc/docs/lexers.rst} (89%) rename vendor/pygments/{docs/src/moinmoin.txt => doc/docs/moinmoin.rst} (100%) rename vendor/pygments/{docs/src/plugins.txt => doc/docs/plugins.rst} (100%) rename vendor/pygments/{docs/src/quickstart.txt => doc/docs/quickstart.rst} (80%) rename vendor/pygments/{docs/src/rstdirective.txt => doc/docs/rstdirective.rst} (100%) rename vendor/pygments/{docs/src/styles.txt => doc/docs/styles.rst} (95%) rename vendor/pygments/{docs/src/tokens.txt => doc/docs/tokens.rst} (94%) rename vendor/pygments/{docs/src/unicode.txt => doc/docs/unicode.rst} (53%) create mode 100644 vendor/pygments/doc/download.rst create mode 100644 vendor/pygments/doc/faq.rst create mode 100644 vendor/pygments/doc/index.rst create mode 100644 vendor/pygments/doc/languages.rst create mode 100644 vendor/pygments/doc/make.bat rename vendor/pygments/{docs => doc}/pygmentize.1 (100%) delete mode 100644 vendor/pygments/docs/build/api.html delete mode 100644 vendor/pygments/docs/build/authors.html delete mode 100644 vendor/pygments/docs/build/changelog.html delete mode 100644 vendor/pygments/docs/build/cmdline.html delete mode 100644 vendor/pygments/docs/build/filterdevelopment.html delete mode 100644 vendor/pygments/docs/build/filters.html delete mode 100644 vendor/pygments/docs/build/formatterdevelopment.html delete mode 100644 vendor/pygments/docs/build/formatters.html delete mode 100644 vendor/pygments/docs/build/index.html delete mode 100644 vendor/pygments/docs/build/installation.html delete mode 100644 vendor/pygments/docs/build/integrate.html delete mode 100644 vendor/pygments/docs/build/java.html delete mode 100644 vendor/pygments/docs/build/lexerdevelopment.html delete mode 100644 vendor/pygments/docs/build/lexers.html delete mode 100644 vendor/pygments/docs/build/moinmoin.html delete mode 100644 vendor/pygments/docs/build/plugins.html delete mode 100644 vendor/pygments/docs/build/quickstart.html delete mode 100644 vendor/pygments/docs/build/rstdirective.html delete mode 100644 vendor/pygments/docs/build/styles.html delete mode 100644 vendor/pygments/docs/build/tokens.html delete mode 100644 vendor/pygments/docs/build/unicode.html delete mode 100755 vendor/pygments/docs/generate.py delete mode 100644 vendor/pygments/docs/src/api.txt delete mode 100644 vendor/pygments/docs/src/authors.txt delete mode 100644 vendor/pygments/docs/src/changelog.txt delete mode 100644 vendor/pygments/docs/src/index.txt delete mode 100644 vendor/pygments/docs/src/installation.txt delete mode 100644 vendor/pygments/docs/src/lexerdevelopment.txt delete mode 100644 vendor/pygments/external/rst-directive-old.py mode change 100755 => 100644 vendor/pygments/ez_setup.py create mode 100644 vendor/pygments/pygments/formatters/irc.py rename vendor/pygments/pygments/lexers/{_asybuiltins.py => _asy_builtins.py} (99%) rename vendor/pygments/pygments/lexers/{_clbuiltins.py => _cl_builtins.py} (97%) create mode 100644 vendor/pygments/pygments/lexers/_cocoa_builtins.py create mode 100644 vendor/pygments/pygments/lexers/_csound_builtins.py rename vendor/pygments/pygments/lexers/{_lassobuiltins.py => _lasso_builtins.py} (93%) rename vendor/pygments/pygments/lexers/{_luabuiltins.py => _lua_builtins.py} (80%) create mode 100644 vendor/pygments/pygments/lexers/_mql_builtins.py create mode 100644 vendor/pygments/pygments/lexers/_openedge_builtins.py delete mode 100644 vendor/pygments/pygments/lexers/_openedgebuiltins.py rename vendor/pygments/pygments/lexers/{_phpbuiltins.py => _php_builtins.py} (70%) create mode 100644 vendor/pygments/pygments/lexers/_sourcemod_builtins.py delete mode 100644 vendor/pygments/pygments/lexers/_sourcemodbuiltins.py create mode 100644 vendor/pygments/pygments/lexers/_vim_builtins.py delete mode 100644 vendor/pygments/pygments/lexers/_vimbuiltins.py create mode 100644 vendor/pygments/pygments/lexers/actionscript.py create mode 100644 vendor/pygments/pygments/lexers/algebra.py create mode 100644 vendor/pygments/pygments/lexers/ambient.py create mode 100644 vendor/pygments/pygments/lexers/apl.py create mode 100644 vendor/pygments/pygments/lexers/archetype.py create mode 100644 vendor/pygments/pygments/lexers/automation.py create mode 100644 vendor/pygments/pygments/lexers/basic.py create mode 100644 vendor/pygments/pygments/lexers/business.py create mode 100644 vendor/pygments/pygments/lexers/c_cpp.py create mode 100644 vendor/pygments/pygments/lexers/c_like.py create mode 100644 vendor/pygments/pygments/lexers/chapel.py create mode 100644 vendor/pygments/pygments/lexers/configs.py create mode 100644 vendor/pygments/pygments/lexers/console.py create mode 100644 vendor/pygments/pygments/lexers/csound.py create mode 100644 vendor/pygments/pygments/lexers/css.py create mode 100644 vendor/pygments/pygments/lexers/d.py create mode 100644 vendor/pygments/pygments/lexers/data.py create mode 100644 vendor/pygments/pygments/lexers/diff.py create mode 100644 vendor/pygments/pygments/lexers/dsls.py create mode 100644 vendor/pygments/pygments/lexers/dylan.py create mode 100644 vendor/pygments/pygments/lexers/ecl.py create mode 100644 vendor/pygments/pygments/lexers/eiffel.py create mode 100644 vendor/pygments/pygments/lexers/elm.py create mode 100644 vendor/pygments/pygments/lexers/erlang.py create mode 100644 vendor/pygments/pygments/lexers/esoteric.py create mode 100644 vendor/pygments/pygments/lexers/ezhil.py create mode 100644 vendor/pygments/pygments/lexers/factor.py create mode 100644 vendor/pygments/pygments/lexers/fantom.py create mode 100644 vendor/pygments/pygments/lexers/felix.py create mode 100644 vendor/pygments/pygments/lexers/fortran.py create mode 100644 vendor/pygments/pygments/lexers/go.py create mode 100644 vendor/pygments/pygments/lexers/grammar_notation.py create mode 100644 vendor/pygments/pygments/lexers/graph.py create mode 100644 vendor/pygments/pygments/lexers/graphics.py create mode 100644 vendor/pygments/pygments/lexers/haskell.py create mode 100644 vendor/pygments/pygments/lexers/haxe.py create mode 100644 vendor/pygments/pygments/lexers/hexdump.py create mode 100644 vendor/pygments/pygments/lexers/html.py create mode 100644 vendor/pygments/pygments/lexers/idl.py create mode 100644 vendor/pygments/pygments/lexers/igor.py create mode 100644 vendor/pygments/pygments/lexers/inferno.py create mode 100644 vendor/pygments/pygments/lexers/installers.py create mode 100644 vendor/pygments/pygments/lexers/int_fiction.py create mode 100644 vendor/pygments/pygments/lexers/iolang.py create mode 100644 vendor/pygments/pygments/lexers/j.py create mode 100644 vendor/pygments/pygments/lexers/javascript.py create mode 100644 vendor/pygments/pygments/lexers/julia.py create mode 100644 vendor/pygments/pygments/lexers/lisp.py create mode 100644 vendor/pygments/pygments/lexers/make.py create mode 100644 vendor/pygments/pygments/lexers/markup.py create mode 100644 vendor/pygments/pygments/lexers/matlab.py create mode 100644 vendor/pygments/pygments/lexers/ml.py create mode 100644 vendor/pygments/pygments/lexers/modeling.py create mode 100644 vendor/pygments/pygments/lexers/modula2.py create mode 100644 vendor/pygments/pygments/lexers/nimrod.py create mode 100644 vendor/pygments/pygments/lexers/nit.py create mode 100644 vendor/pygments/pygments/lexers/nix.py create mode 100644 vendor/pygments/pygments/lexers/oberon.py create mode 100644 vendor/pygments/pygments/lexers/objective.py create mode 100644 vendor/pygments/pygments/lexers/ooc.py create mode 100644 vendor/pygments/pygments/lexers/parasail.py create mode 100644 vendor/pygments/pygments/lexers/pascal.py create mode 100644 vendor/pygments/pygments/lexers/pawn.py create mode 100644 vendor/pygments/pygments/lexers/perl.py create mode 100644 vendor/pygments/pygments/lexers/php.py create mode 100644 vendor/pygments/pygments/lexers/praat.py create mode 100644 vendor/pygments/pygments/lexers/prolog.py create mode 100644 vendor/pygments/pygments/lexers/python.py create mode 100644 vendor/pygments/pygments/lexers/qvt.py create mode 100644 vendor/pygments/pygments/lexers/r.py create mode 100644 vendor/pygments/pygments/lexers/rdf.py create mode 100644 vendor/pygments/pygments/lexers/rebol.py create mode 100644 vendor/pygments/pygments/lexers/resource.py create mode 100644 vendor/pygments/pygments/lexers/roboconf.py rename vendor/pygments/pygments/lexers/{_robotframeworklexer.py => robotframework.py} (93%) create mode 100644 vendor/pygments/pygments/lexers/ruby.py create mode 100644 vendor/pygments/pygments/lexers/rust.py create mode 100644 vendor/pygments/pygments/lexers/scripting.py create mode 100644 vendor/pygments/pygments/lexers/smalltalk.py create mode 100644 vendor/pygments/pygments/lexers/snobol.py create mode 100644 vendor/pygments/pygments/lexers/supercollider.py create mode 100644 vendor/pygments/pygments/lexers/tcl.py create mode 100644 vendor/pygments/pygments/lexers/testing.py create mode 100644 vendor/pygments/pygments/lexers/textedit.py create mode 100644 vendor/pygments/pygments/lexers/textfmts.py create mode 100644 vendor/pygments/pygments/lexers/theorem.py create mode 100644 vendor/pygments/pygments/lexers/trafficscript.py create mode 100644 vendor/pygments/pygments/lexers/urbi.py create mode 100644 vendor/pygments/pygments/lexers/webmisc.py create mode 100644 vendor/pygments/pygments/lexers/x10.py create mode 100644 vendor/pygments/pygments/modeline.py create mode 100644 vendor/pygments/pygments/regexopt.py create mode 100644 vendor/pygments/pygments/sphinxext.py create mode 100644 vendor/pygments/pygments/styles/algol.py create mode 100644 vendor/pygments/pygments/styles/algol_nu.py create mode 100644 vendor/pygments/pygments/styles/arduino.py create mode 100644 vendor/pygments/pygments/styles/igor.py create mode 100644 vendor/pygments/pygments/styles/lovelace.py create mode 100644 vendor/pygments/pygments/styles/paraiso_dark.py create mode 100644 vendor/pygments/pygments/styles/paraiso_light.py create mode 100644 vendor/pygments/pygments/styles/xcode.py create mode 100644 vendor/pygments/requirements.txt create mode 100755 vendor/pygments/scripts/debug_lexer.py delete mode 100755 vendor/pygments/scripts/find_codetags.py mode change 100755 => 120000 vendor/pygments/scripts/find_error.py delete mode 100755 vendor/pygments/scripts/reindent.py mode change 100644 => 100755 vendor/pygments/scripts/vim2pygments.py create mode 100644 vendor/pygments/tests/.coverage create mode 100644 vendor/pygments/tests/cover/coverage_html.js create mode 100644 vendor/pygments/tests/cover/jquery.hotkeys.js create mode 100644 vendor/pygments/tests/cover/jquery.isonscreen.js create mode 100644 vendor/pygments/tests/cover/jquery.min.js create mode 100644 vendor/pygments/tests/cover/jquery.tablesorter.min.js create mode 100644 vendor/pygments/tests/cover/keybd_closed.png create mode 100644 vendor/pygments/tests/cover/keybd_open.png create mode 100644 vendor/pygments/tests/cover/status.dat create mode 100644 vendor/pygments/tests/cover/style.css create mode 100644 vendor/pygments/tests/examplefiles/99_bottles_of_beer.chpl create mode 100644 vendor/pygments/tests/examplefiles/Blink.ino mode change 100755 => 100644 vendor/pygments/tests/examplefiles/CPDictionary.j create mode 100644 vendor/pygments/tests/examplefiles/Deflate.fs create mode 100644 vendor/pygments/tests/examplefiles/Error.pmod create mode 100644 vendor/pygments/tests/examplefiles/FakeFile.pike create mode 100644 vendor/pygments/tests/examplefiles/Get-CommandDefinitionHtml.ps1 create mode 100644 vendor/pygments/tests/examplefiles/IPDispatchC.nc create mode 100644 vendor/pygments/tests/examplefiles/IPDispatchP.nc create mode 100644 vendor/pygments/tests/examplefiles/RoleQ.pm6 create mode 100644 vendor/pygments/tests/examplefiles/abnf_example1.abnf create mode 100644 vendor/pygments/tests/examplefiles/abnf_example2.abnf create mode 100644 vendor/pygments/tests/examplefiles/ahcon.f create mode 100644 vendor/pygments/tests/examplefiles/all.nit rename vendor/pygments/tests/examplefiles/{ANTLRv3.g => antlr_ANTLRv3.g} (100%) create mode 100644 vendor/pygments/tests/examplefiles/automake.mk delete mode 100644 vendor/pygments/tests/examplefiles/batchfile.bat create mode 100644 vendor/pygments/tests/examplefiles/bnf_example1.bnf create mode 100644 vendor/pygments/tests/examplefiles/char.scala create mode 100644 vendor/pygments/tests/examplefiles/clojure-weird-keywords.clj create mode 100644 vendor/pygments/tests/examplefiles/core.cljs create mode 100644 vendor/pygments/tests/examplefiles/demo.css.in create mode 100644 vendor/pygments/tests/examplefiles/demo.hbs create mode 100644 vendor/pygments/tests/examplefiles/demo.js.in create mode 100644 vendor/pygments/tests/examplefiles/demo.thrift create mode 100644 vendor/pygments/tests/examplefiles/demo.xul.in create mode 100644 vendor/pygments/tests/examplefiles/docker.docker create mode 100644 vendor/pygments/tests/examplefiles/eg_example1.eg create mode 100644 vendor/pygments/tests/examplefiles/ember.handlebars create mode 100644 vendor/pygments/tests/examplefiles/es6.js create mode 100644 vendor/pygments/tests/examplefiles/eval.rs create mode 100644 vendor/pygments/tests/examplefiles/example.als create mode 100644 vendor/pygments/tests/examplefiles/example.bat create mode 100644 vendor/pygments/tests/examplefiles/example.bc create mode 100644 vendor/pygments/tests/examplefiles/example.chai create mode 100644 vendor/pygments/tests/examplefiles/example.clay create mode 100644 vendor/pygments/tests/examplefiles/example.coffee create mode 100644 vendor/pygments/tests/examplefiles/example.e create mode 100644 vendor/pygments/tests/examplefiles/example.elm create mode 100644 vendor/pygments/tests/examplefiles/example.ezt create mode 100644 vendor/pygments/tests/examplefiles/example.f90 create mode 100644 vendor/pygments/tests/examplefiles/example.feature create mode 100644 vendor/pygments/tests/examplefiles/example.fish create mode 100644 vendor/pygments/tests/examplefiles/example.gd create mode 100644 vendor/pygments/tests/examplefiles/example.gi create mode 100644 vendor/pygments/tests/examplefiles/example.golo create mode 100644 vendor/pygments/tests/examplefiles/example.groovy create mode 100644 vendor/pygments/tests/examplefiles/example.hs create mode 100644 vendor/pygments/tests/examplefiles/example.hx create mode 100644 vendor/pygments/tests/examplefiles/example.i6t create mode 100644 vendor/pygments/tests/examplefiles/example.i7x create mode 100644 vendor/pygments/tests/examplefiles/example.j create mode 100644 vendor/pygments/tests/examplefiles/example.java create mode 100644 vendor/pygments/tests/examplefiles/example.jcl create mode 100644 vendor/pygments/tests/examplefiles/example.jsonld create mode 100644 vendor/pygments/tests/examplefiles/example.kal create mode 100644 vendor/pygments/tests/examplefiles/example.lagda create mode 100644 vendor/pygments/tests/examplefiles/example.liquid create mode 100644 vendor/pygments/tests/examplefiles/example.ma create mode 100644 vendor/pygments/tests/examplefiles/example.mac create mode 100644 vendor/pygments/tests/examplefiles/example.mq4 create mode 100644 vendor/pygments/tests/examplefiles/example.mqh create mode 100644 vendor/pygments/tests/examplefiles/example.ni create mode 100644 vendor/pygments/tests/examplefiles/example.nix create mode 100644 vendor/pygments/tests/examplefiles/example.pcmk create mode 100644 vendor/pygments/tests/examplefiles/example.pp create mode 100644 vendor/pygments/tests/examplefiles/example.praat create mode 100644 vendor/pygments/tests/examplefiles/example.red create mode 100644 vendor/pygments/tests/examplefiles/example.reds create mode 100644 vendor/pygments/tests/examplefiles/example.rexx create mode 100644 vendor/pygments/tests/examplefiles/example.rts create mode 100644 vendor/pygments/tests/examplefiles/example.scd create mode 100644 vendor/pygments/tests/examplefiles/example.sh create mode 100644 vendor/pygments/tests/examplefiles/example.slim create mode 100644 vendor/pygments/tests/examplefiles/example.sls create mode 100644 vendor/pygments/tests/examplefiles/example.tap create mode 100644 vendor/pygments/tests/examplefiles/example.tf create mode 100644 vendor/pygments/tests/examplefiles/example.thy create mode 100644 vendor/pygments/tests/examplefiles/example.todotxt create mode 100644 vendor/pygments/tests/examplefiles/example.ttl create mode 100644 vendor/pygments/tests/examplefiles/example.x10 create mode 100644 vendor/pygments/tests/examplefiles/example1.cadl create mode 100644 vendor/pygments/tests/examplefiles/exampleScript.cfc create mode 100644 vendor/pygments/tests/examplefiles/exampleTag.cfc create mode 100644 vendor/pygments/tests/examplefiles/example_coq.v create mode 100644 vendor/pygments/tests/examplefiles/ezhil_primefactors.n create mode 100644 vendor/pygments/tests/examplefiles/grammar-test.p6 create mode 100644 vendor/pygments/tests/examplefiles/hash_syntax.rb create mode 100644 vendor/pygments/tests/examplefiles/hello.at create mode 100644 vendor/pygments/tests/examplefiles/hello.golo create mode 100644 vendor/pygments/tests/examplefiles/hello.lsl create mode 100644 vendor/pygments/tests/examplefiles/hexdump_debugexe create mode 100644 vendor/pygments/tests/examplefiles/hexdump_hd create mode 100644 vendor/pygments/tests/examplefiles/hexdump_hexcat create mode 100644 vendor/pygments/tests/examplefiles/hexdump_hexdump create mode 100644 vendor/pygments/tests/examplefiles/hexdump_od create mode 100644 vendor/pygments/tests/examplefiles/hexdump_xxd rename vendor/pygments/tests/examplefiles/{File.hy => hybris_File.hy} (100%) rename vendor/pygments/tests/examplefiles/{mg_sample.pro => idl_sample.pro} (100%) create mode 100644 vendor/pygments/tests/examplefiles/iex_example delete mode 100644 vendor/pygments/tests/examplefiles/import.hs create mode 100644 vendor/pygments/tests/examplefiles/inform6_example create mode 100644 vendor/pygments/tests/examplefiles/interp.scala mode change 100755 => 100644 vendor/pygments/tests/examplefiles/irc.lsp create mode 100644 vendor/pygments/tests/examplefiles/language.hy create mode 100644 vendor/pygments/tests/examplefiles/limbo.b mode change 100755 => 100644 vendor/pygments/tests/examplefiles/markdown.lsp create mode 100644 vendor/pygments/tests/examplefiles/modula2_test_cases.def create mode 100644 vendor/pygments/tests/examplefiles/noexcept.cpp delete mode 100644 vendor/pygments/tests/examplefiles/objc_example2.m rename vendor/pygments/tests/examplefiles/{example.p => openedge_example} (100%) create mode 100644 vendor/pygments/tests/examplefiles/pacman.conf create mode 100644 vendor/pygments/tests/examplefiles/pacman.ijs create mode 100644 vendor/pygments/tests/examplefiles/pawn_example create mode 100644 vendor/pygments/tests/examplefiles/pkgconfig_example.pc create mode 100644 vendor/pygments/tests/examplefiles/py3tb_test.py3tb create mode 100644 vendor/pygments/tests/examplefiles/pycon_ctrlc_traceback create mode 100644 vendor/pygments/tests/examplefiles/qbasic_example create mode 100644 vendor/pygments/tests/examplefiles/r6rs-comments.scm create mode 100644 vendor/pygments/tests/examplefiles/resourcebundle_demo create mode 100644 vendor/pygments/tests/examplefiles/roboconf.graph create mode 100644 vendor/pygments/tests/examplefiles/roboconf.instances rename vendor/pygments/tests/examplefiles/{robotframework.txt => robotframework_test.txt} (95%) create mode 100644 vendor/pygments/tests/examplefiles/rql-queries.rql delete mode 100644 vendor/pygments/tests/examplefiles/rust_example.rs create mode 100644 vendor/pygments/tests/examplefiles/sample.qvto create mode 100644 vendor/pygments/tests/examplefiles/scope.cirru create mode 100644 vendor/pygments/tests/examplefiles/simple.camkes rename vendor/pygments/tests/examplefiles/{simple.md => simple.croc} (100%) create mode 100644 vendor/pygments/tests/examplefiles/sparql.rq create mode 100644 vendor/pygments/tests/examplefiles/subr.el create mode 100644 vendor/pygments/tests/examplefiles/swig_java.swg create mode 100644 vendor/pygments/tests/examplefiles/swig_std_vector.i create mode 100644 vendor/pygments/tests/examplefiles/tads3_example.t create mode 100644 vendor/pygments/tests/examplefiles/termcap create mode 100644 vendor/pygments/tests/examplefiles/terminfo create mode 100644 vendor/pygments/tests/examplefiles/test-3.0.xq create mode 100644 vendor/pygments/tests/examplefiles/test-exist-update.xq create mode 100644 vendor/pygments/tests/examplefiles/test.adls create mode 100644 vendor/pygments/tests/examplefiles/test.agda create mode 100644 vendor/pygments/tests/examplefiles/test.apl create mode 100644 vendor/pygments/tests/examplefiles/test.bb create mode 100644 vendor/pygments/tests/examplefiles/test.bpl create mode 100644 vendor/pygments/tests/examplefiles/test.cadl create mode 100644 vendor/pygments/tests/examplefiles/test.csd create mode 100644 vendor/pygments/tests/examplefiles/test.cyp create mode 100644 vendor/pygments/tests/examplefiles/test.ebnf delete mode 100644 vendor/pygments/tests/examplefiles/test.ecl mode change 100755 => 100644 vendor/pygments/tests/examplefiles/test.fan create mode 100644 vendor/pygments/tests/examplefiles/test.gradle create mode 100644 vendor/pygments/tests/examplefiles/test.idr create mode 100644 vendor/pygments/tests/examplefiles/test.lean create mode 100644 vendor/pygments/tests/examplefiles/test.mask create mode 100644 vendor/pygments/tests/examplefiles/test.odin create mode 100644 vendor/pygments/tests/examplefiles/test.orc create mode 100644 vendor/pygments/tests/examplefiles/test.p6 create mode 100644 vendor/pygments/tests/examplefiles/test.pan create mode 100644 vendor/pygments/tests/examplefiles/test.pig create mode 100644 vendor/pygments/tests/examplefiles/test.psl create mode 100644 vendor/pygments/tests/examplefiles/test.pwn create mode 100644 vendor/pygments/tests/examplefiles/test.rsl create mode 100644 vendor/pygments/tests/examplefiles/test.sco create mode 100644 vendor/pygments/tests/examplefiles/test.shen create mode 100644 vendor/pygments/tests/examplefiles/test.swift create mode 100644 vendor/pygments/tests/examplefiles/test.zep create mode 100644 vendor/pygments/tests/examplefiles/test2.odin create mode 100644 vendor/pygments/tests/examplefiles/test_basic.adls create mode 100644 vendor/pygments/tests/examplefiles/twig_test create mode 100644 vendor/pygments/tests/examplefiles/unicode.go create mode 100644 vendor/pygments/tests/examplefiles/unicode.js rename vendor/pygments/tests/examplefiles/{test.bas => vbnet_test.bas} (100%) create mode 100644 vendor/pygments/tests/examplefiles/vctreestatus_hg create mode 100644 vendor/pygments/tests/examplefiles/vimrc create mode 100644 vendor/pygments/tests/examplefiles/vpath.mk create mode 100644 vendor/pygments/tests/examplefiles/yahalom.cpsa delete mode 100644 vendor/pygments/tests/old_run.py create mode 100644 vendor/pygments/tests/string_asserts.py create mode 100644 vendor/pygments/tests/test_cfm.py create mode 100644 vendor/pygments/tests/test_ezhil.py create mode 100644 vendor/pygments/tests/test_inherit.py create mode 100644 vendor/pygments/tests/test_irc_formatter.py create mode 100644 vendor/pygments/tests/test_java.py create mode 100644 vendor/pygments/tests/test_lexers_other.py create mode 100644 vendor/pygments/tests/test_objectiveclexer.py create mode 100644 vendor/pygments/tests/test_qbasiclexer.py create mode 100644 vendor/pygments/tests/test_regexopt.py create mode 100644 vendor/pygments/tests/test_rtf_formatter.py create mode 100644 vendor/pygments/tests/test_ruby.py create mode 100644 vendor/pygments/tests/test_shell.py create mode 100644 vendor/pygments/tests/test_smarty.py create mode 100644 vendor/pygments/tests/test_string_asserts.py create mode 100644 vendor/pygments/tests/test_terminal_formatter.py create mode 100644 vendor/pygments/tests/test_textfmts.py create mode 100644 vendor/pygments/tests/test_unistring.py create mode 100644 vendor/pygments/tox.ini diff --git a/vendor/pygments/.hgignore b/vendor/pygments/.hgignore new file mode 100644 index 0000000..850baf1 --- /dev/null +++ b/vendor/pygments/.hgignore @@ -0,0 +1,19 @@ +syntax: glob +*.egg +*.pyc +*.pyo +.*.sw[op] +.idea/ +.ropeproject +.project +.tags +.tox +Pygments.egg-info/* +TAGS +build/* +dist/* +doc/_build +TAGS +tests/.coverage +tests/cover +tests/examplefiles/output diff --git a/vendor/pygments/.hgtags b/vendor/pygments/.hgtags new file mode 100644 index 0000000..33b32ca --- /dev/null +++ b/vendor/pygments/.hgtags @@ -0,0 +1,31 @@ +634420aa4221cc1eb2b3753bd571166bd9e611d4 0.9 +942ecbb5c84ca5d57ae82f5697775973f4e12717 0.10 +63632d0340958d891176db20fe9a32a56abcd5ea 0.11 +13834ec94d2c5a90a68bc2c2a327abd962c486bc 0.11.1 +a5748745272afffd725570e068a560d46e28dc1f 1.0 +5a794a620dc711a219722a7af94d9d2e95cda26d 1.1 +dd81c35efd95292de4965153c66c8bbfe435f1c4 1.1.1 +e7691aa4f473a2cdaa2e5b7bfed8aec196719aca 0.5.1 +6f53364d63ddb8bd9532bb6ea402e3af05275b03 0.5 +11efe99c11e601071c3a77910b9fca769de66fbf 0.6 +99df0a7404d168b05626ffced6fd16edcf58c145 0.7 +d0b08fd569d3d9dafec4c045a7d8876442b3ef64 0.7.1 +1054522d1dda9c7899516ead3e65e5e363fdf30d 0.8 +066e56d8f5caa31e15386fff6f938bedd85a8732 0.8.1 +bae0833cae75e5a641abe3c4b430fa384cd9d258 1.2 +f6e5acee4f761696676e05a9112c91a5a5670b49 1.2.1 +580c5ce755486bc92c79c50f80cfc79924e15140 1.2.2 +c62867700c9e98cc2988c62f298ec54cee9b6927 1.3 +3a3846c2503db85bb70a243c8bc702629c4bce57 1.3.1 +8ad6d35dd2ab0530a1e2c088ab7fe0e00426b5f9 1.4 +eff3aee4abff2b72564ddfde77fcc82adbba52ad 1.5 +2c262bfc66b05a8aecc1109c3acc5b9447a5213c 1.6rc1 +7c962dcb484cb73394aec7f41709940340dc8a9c 1.6 +da509a68ea620bbb8ee3f5d5cf7761375d8f4451 2.0rc1 +ed3206a773e9cb90a0edeabee8ef6b56b5b9a53c 2.0 +94e1e056c92d97e3a54759f9216e8deff22efbdd 2.0.1 +142a870bf0f1822414649ae26f433b112a5c92d5 2.0.2 +34530db252d35d7ef57a8dbb9fce7bcc46f6ba6b 2.1 +2935c3a59672e8ae74ffb7ea66ea6567f49782f6 2.1.1 +8e7ebc56153cf899067333bff4f15ae98758a2e1 2.1.2 +88527db663dce0729c2cd6e3bc2f3c657ae39254 2.1.3 diff --git a/vendor/pygments/AUTHORS b/vendor/pygments/AUTHORS index 9447bd0..9318d43 100644 --- a/vendor/pygments/AUTHORS +++ b/vendor/pygments/AUTHORS @@ -6,76 +6,117 @@ Major developers are Tim Hatch and Armin Ronacher Other contributors, listed alphabetically, are: * Sam Aaron -- Ioke lexer -* Kumar Appaiah -- Debian control lexer * Ali Afshar -- image formatter +* Thomas Aglassinger -- Easytrieve, JCL and Rexx lexers +* Muthiah Annamalai -- Ezhil lexer +* Kumar Appaiah -- Debian control lexer * Andreas Amann -- AppleScript lexer * Timothy Armstrong -- Dart lexer fixes * Jeffrey Arnold -- R/S, Rd, BUGS, Jags, and Stan lexers * Jeremy Ashkenas -- CoffeeScript lexer +* José Joaquín Atria -- Praat lexer * Stefan Matthias Aust -- Smalltalk lexer +* Lucas Bajolet -- Nit lexer * Ben Bangert -- Mako lexers * Max Battcher -- Darcs patch lexer +* Thomas Baruchel -- APL lexer +* Tim Baumann -- (Literate) Agda lexer * Paul Baumgart, 280 North, Inc. -- Objective-J lexer * Michael Bayer -- Myghty lexers +* Thomas Beale -- Archetype lexers * John Benediktsson -- Factor lexer +* Trevor Bergeron -- mIRC formatter +* Vincent Bernat -- LessCSS lexer * Christopher Bertels -- Fancy lexer +* Sébastien Bigaret -- QVT Operational lexer * Jarrett Billingsley -- MiniD lexer * Adam Blinkinsop -- Haskell, Redcode lexers * Frits van Bommel -- assembler lexers * Pierre Bourdon -- bugfixes +* chebee7i -- Python traceback lexer improvements * Hiram Chirino -- Scaml and Jade lexers * Ian Cooper -- VGL lexer +* David Corbett -- Inform, Jasmin, and TADS 3 lexers * Leaf Corcoran -- MoonScript lexer -* Christian Jann -- ShellSession lexer * Christopher Creutzig -- MuPAD lexer +* Daniël W. Crompton -- Pike lexer * Pete Curry -- bugfixes -* Owen Durni -- haXe lexer +* Bryan Davis -- EBNF lexer +* Bruno Deferrari -- Shen lexer +* Giedrius Dubinskas -- HTML formatter improvements +* Owen Durni -- Haxe lexer +* Alexander Dutton, Oxford University Computing Services -- SPARQL lexer +* James Edwards -- Terraform lexer * Nick Efford -- Python 3 lexer * Sven Efftinge -- Xtend lexer * Artem Egorkine -- terminal256 formatter +* Matthew Fernandez -- CAmkES lexer +* Michael Ficarra -- CPSA lexer * James H. Fisher -- PostScript lexer +* William S. Fulton -- SWIG lexer * Carlos Galdino -- Elixir and Elixir Console lexers * Michael Galloy -- IDL lexer * Naveen Garg -- Autohotkey lexer * Laurent Gautier -- R/S lexer * Alex Gaynor -- PyPy log lexer +* Richard Gerkin -- Igor Pro lexer * Alain Gilbert -- TypeScript lexer +* Alex Gilding -- BlitzBasic lexer * Bertrand Goetzmann -- Groovy lexer * Krzysiek Goj -- Scala lexer * Matt Good -- Genshi, Cheetah lexers +* Michał Górny -- vim modeline support +* Alex Gosse -- TrafficScript lexer * Patrick Gotthardt -- PHP namespaces support * Olivier Guibe -- Asymptote lexer * Jordi Gutiérrez Hermoso -- Octave lexer +* Florian Hahn -- Boogie lexer * Martin Harriman -- SNOBOL lexer * Matthew Harrison -- SVG formatter * Steven Hazel -- Tcl lexer +* Dan Michael Heggø -- Turtle lexer * Aslak Hellesøy -- Gherkin lexer * Greg Hendershott -- Racket lexer +* Justin Hendrick -- ParaSail lexer * David Hess, Fish Software, Inc. -- Objective-J lexer * Varun Hiremath -- Debian control lexer +* Rob Hoelz -- Perl 6 lexer * Doug Hogan -- Mscgen lexer * Ben Hollis -- Mason lexer -* Dustin Howett -- Logos lexer +* Max Horn -- GAP lexer * Alastair Houghton -- Lexer inheritance facility * Tim Howard -- BlitzMax lexer +* Dustin Howett -- Logos lexer * Ivan Inozemtsev -- Fantom lexer +* Hiroaki Itoh -- Shell console rewrite, Lexers for PowerShell session, MSDOS session, BC * Brian R. Jackson -- Tea lexer +* Christian Jann -- ShellSession lexer * Dennis Kaarsemaker -- sources.list lexer +* Dmitri Kabak -- Inferno Limbo lexer * Igor Kalnitsky -- vhdl lexer +* Alexander Kit -- MaskJS lexer * Pekka Klärck -- Robot Framework lexer +* Gerwin Klein -- Isabelle lexer * Eric Knibbe -- Lasso lexer +* Stepan Koltsov -- Clay lexer * Adam Koprowski -- Opa lexer * Benjamin Kowarsch -- Modula-2 lexer +* Domen Kožar -- Nix lexer +* Oleh Krekel -- Emacs Lisp lexer * Alexander Kriegisch -- Kconfig and AspectJ lexers * Marek Kubica -- Scheme lexer * Jochen Kupperschmidt -- Markdown processor * Gerd Kurzbach -- Modelica lexer * Jon Larimer, Google Inc. -- Smali lexer * Olov Lassus -- Dart lexer +* Matt Layman -- TAP lexer * Sylvestre Ledru -- Scilab lexer * Mark Lee -- Vala lexer +* Valentin Lorentz -- C++ lexer improvements * Ben Mabey -- Gherkin lexer * Angus MacArthur -- QML lexer +* Louis Mandel -- X10 lexer +* Louis Marchand -- Eiffel lexer * Simone Margaritelli -- Hybris lexer * Kirk McDonald -- D lexer * Gordon McGregor -- SystemVerilog lexer @@ -83,6 +124,7 @@ Other contributors, listed alphabetically, are: * Brian McKenna -- F# lexer * Charles McLaughlin -- Puppet lexer * Lukas Meuser -- BBCode formatter, Lua lexer +* Cat Miller -- Pig lexer * Paul Miller -- LiveScript lexer * Hong Minhee -- HTTP lexer * Michael Mior -- Awk lexer @@ -91,24 +133,32 @@ Other contributors, listed alphabetically, are: * Jon Morton -- Rust lexer * Paulo Moura -- Logtalk lexer * Mher Movsisyan -- DTD lexer +* Dejan Muhamedagic -- Crmsh lexer * Ana Nelson -- Ragel, ANTLR, R console lexers * Nam T. Nguyen -- Monokai style * Jesper Noehr -- HTML formatter "anchorlinenos" * Mike Nolta -- Julia lexer * Jonas Obrist -- BBCode lexer +* Edward O'Callaghan -- Cryptol lexer * David Oliva -- Rebol lexer -* Jon Parise -- Protocol buffers lexer -* Ronny Pfannschmidt -- BBCode lexer +* Pat Pannuto -- nesC lexer +* Jon Parise -- Protocol buffers and Thrift lexers * Benjamin Peterson -- Test suite refactoring +* Ronny Pfannschmidt -- BBCode lexer * Dominik Picheta -- Nimrod lexer +* Andrew Pinkham -- RTF Formatter Refactoring * Clément Prévost -- UrbiScript lexer +* Elias Rabel -- Fortran fixed form lexer +* raichoo -- Idris lexer * Kashif Rasul -- CUDA lexer * Justin Reidy -- MXML lexer * Norman Richards -- JSON lexer +* Corey Richardson -- Rust lexer updates * Lubomir Rintel -- GoodData MAQL and CL lexers * Andre Roberge -- Tango style * Konrad Rudolph -- LaTeX formatter enhancements * Mario Ruggier -- Evoque lexers +* Miikka Salminen -- Lovelace style, Hexdump lexer, lexer enhancements * Stou Sandalski -- NumPy, FORTRAN, tcsh and XSLT lexers * Matteo Sasso -- Common Lisp lexer * Joe Schafer -- Ada lexer @@ -123,21 +173,30 @@ Other contributors, listed alphabetically, are: * Jerome St-Louis -- eC lexer * James Strachan -- Kotlin lexer * Tom Stuart -- Treetop lexer +* Colin Sullivan -- SuperCollider lexer +* Edoardo Tenani -- Arduino lexer * Tiberius Teng -- default style overhaul * Jeremy Thurgood -- Erlang, Squid config lexers * Brian Tiffin -- OpenCOBOL lexer +* Bob Tolbert -- Hy lexer * Erick Tryzelaar -- Felix lexer +* Alexander Udalov -- Kotlin lexer improvements +* Thomas Van Doren -- Chapel lexer * Daniele Varrazzo -- PostgreSQL lexers * Abe Voelker -- OpenEdge ABL lexer * Pepijn de Vos -- HTML formatter CTags support -* Whitney Young -- ObjectiveC lexer * Matthias Vallentin -- Bro lexer +* Linh Vu Hong -- RSL lexer * Nathan Weizenbaum -- Haml and Sass lexers +* Nathan Whetsell -- Csound lexers * Dietmar Winkler -- Modelica lexer * Nils Winter -- Smalltalk lexer * Davy Wybiral -- Clojure lexer +* Whitney Young -- ObjectiveC lexer * Diego Zamboni -- CFengine3 lexer * Enrique Zamudio -- Ceylon lexer * Alex Zimin -- Nemerle lexer +* Rob Zimmerman -- Kal lexer +* Vincent Zurczak -- Roboconf lexer Many thanks for all contributions! diff --git a/vendor/pygments/CHANGES b/vendor/pygments/CHANGES index c2a3528..478970d 100644 --- a/vendor/pygments/CHANGES +++ b/vendor/pygments/CHANGES @@ -2,9 +2,305 @@ Pygments changelog ================== Issue numbers refer to the tracker at -, +, pull request numbers to the requests at -. +. + +Version 2.1.3 +------------- +(released Mar 2, 2016) + +- Fixed regression in Bash lexer (PR#563) + + +Version 2.1.2 +------------- +(released Feb 29, 2016) + +- Fixed Python 3 regression in image formatter (#1215) +- Fixed regression in Bash lexer (PR#562) + + +Version 2.1.1 +------------- +(relased Feb 14, 2016) + +- Fixed Jython compatibility (#1205) +- Fixed HTML formatter output with leading empty lines (#1111) +- Added a mapping table for LaTeX encodings and added utf8 (#1152) +- Fixed image formatter font searching on Macs (#1188) +- Fixed deepcopy-ing of Token instances (#1168) +- Fixed Julia string interpolation (#1170) +- Fixed statefulness of HttpLexer between get_tokens calls +- Many smaller fixes to various lexers + + +Version 2.1 +----------- +(released Jan 17, 2016) + +- Added lexers: + + * Emacs Lisp (PR#431) + * Arduino (PR#442) + * Modula-2 with multi-dialect support (#1090) + * Fortran fixed format (PR#213) + * Archetype Definition language (PR#483) + * Terraform (PR#432) + * Jcl, Easytrieve (PR#208) + * ParaSail (PR#381) + * Boogie (PR#420) + * Turtle (PR#425) + * Fish Shell (PR#422) + * Roboconf (PR#449) + * Test Anything Protocol (PR#428) + * Shen (PR#385) + * Component Pascal (PR#437) + * SuperCollider (PR#472) + * Shell consoles (Tcsh, PowerShell, MSDOS) (PR#479) + * Elm and J (PR#452) + * Crmsh (PR#440) + * Praat (PR#492) + * CSound (PR#494) + * Ezhil (PR#443) + * Thrift (PR#469) + * QVT Operational (PR#204) + * Hexdump (PR#508) + * CAmkES Configuration (PR#462) + +- Added styles: + + * Lovelace (PR#456) + * Algol and Algol-nu (#1090) + +- Added formatters: + + * IRC (PR#458) + * True color (24-bit) terminal ANSI sequences (#1142) + (formatter alias: "16m") + +- New "filename" option for HTML formatter (PR#527). + +- Improved performance of the HTML formatter for long lines (PR#504). + +- Updated autopygmentize script (PR#445). + +- Fixed style inheritance for non-standard token types in HTML output. + +- Added support for async/await to Python 3 lexer. + +- Rewrote linenos option for TerminalFormatter (it's better, but slightly + different output than before) (#1147). + +- Javascript lexer now supports most of ES6 (#1100). + +- Cocoa builtins updated for iOS 8.1 (PR#433). + +- Combined BashSessionLexer and ShellSessionLexer, new version should support + the prompt styles of either. + +- Added option to pygmentize to show a full traceback on exceptions. + +- Fixed incomplete output on Windows and Python 3 (e.g. when using iPython + Notebook) (#1153). + +- Allowed more traceback styles in Python console lexer (PR#253). + +- Added decorators to TypeScript (PR#509). + +- Fix highlighting of certain IRC logs formats (#1076). + + +Version 2.0.2 +------------- +(released Jan 20, 2015) + +- Fix Python tracebacks getting duplicated in the console lexer (#1068). + +- Backquote-delimited identifiers are now recognized in F# (#1062). + + +Version 2.0.1 +------------- +(released Nov 10, 2014) + +- Fix an encoding issue when using ``pygmentize`` with the ``-o`` option. + + +Version 2.0 +----------- +(released Nov 9, 2014) + +- Default lexer encoding is now "guess", i.e. UTF-8 / Locale / Latin1 is + tried in that order. + +- Major update to Swift lexer (PR#410). + +- Multiple fixes to lexer guessing in conflicting cases: + + * recognize HTML5 by doctype + * recognize XML by XML declaration + * don't recognize C/C++ as SystemVerilog + +- Simplified regexes and builtin lists. + + +Version 2.0rc1 +-------------- +(released Oct 16, 2014) + +- Dropped Python 2.4 and 2.5 compatibility. This is in favor of single-source + compatibility between Python 2.6, 2.7 and 3.3+. + +- New website and documentation based on Sphinx (finally!) + +- Lexers added: + + * APL (#969) + * Agda and Literate Agda (PR#203) + * Alloy (PR#355) + * AmbientTalk + * BlitzBasic (PR#197) + * ChaiScript (PR#24) + * Chapel (PR#256) + * Cirru (PR#275) + * Clay (PR#184) + * ColdFusion CFC (PR#283) + * Cryptol and Literate Cryptol (PR#344) + * Cypher (PR#257) + * Docker config files + * EBNF (PR#193) + * Eiffel (PR#273) + * GAP (PR#311) + * Golo (PR#309) + * Handlebars (PR#186) + * Hy (PR#238) + * Idris and Literate Idris (PR#210) + * Igor Pro (PR#172) + * Inform 6/7 (PR#281) + * Intel objdump (PR#279) + * Isabelle (PR#386) + * Jasmin (PR#349) + * JSON-LD (PR#289) + * Kal (PR#233) + * Lean (PR#399) + * LSL (PR#296) + * Limbo (PR#291) + * Liquid (#977) + * MQL (PR#285) + * MaskJS (PR#280) + * Mozilla preprocessors + * Mathematica (PR#245) + * NesC (PR#166) + * Nit (PR#375) + * Nix (PR#267) + * Pan + * Pawn (PR#211) + * Perl 6 (PR#181) + * Pig (PR#304) + * Pike (PR#237) + * QBasic (PR#182) + * Red (PR#341) + * ResourceBundle (#1038) + * Rexx (PR#199) + * Rql (PR#251) + * Rsl + * SPARQL (PR#78) + * Slim (PR#366) + * Swift (PR#371) + * Swig (PR#168) + * TADS 3 (PR#407) + * Todo.txt todo lists + * Twig (PR#404) + +- Added a helper to "optimize" regular expressions that match one of many + literal words; this can save 20% and more lexing time with lexers that + highlight many keywords or builtins. + +- New styles: "xcode" and "igor", similar to the default highlighting of + the respective IDEs. + +- The command-line "pygmentize" tool now tries a little harder to find the + correct encoding for files and the terminal (#979). + +- Added "inencoding" option for lexers to override "encoding" analogous + to "outencoding" (#800). + +- Added line-by-line "streaming" mode for pygmentize with the "-s" option. + (PR#165) Only fully works for lexers that have no constructs spanning + lines! + +- Added an "envname" option to the LaTeX formatter to select a replacement + verbatim environment (PR#235). + +- Updated the Makefile lexer to yield a little more useful highlighting. + +- Lexer aliases passed to ``get_lexer_by_name()`` are now case-insensitive. + +- File name matching in lexers and formatters will now use a regex cache + for speed (PR#205). + +- Pygments will now recognize "vim" modelines when guessing the lexer for + a file based on content (PR#118). + +- Major restructure of the ``pygments.lexers`` module namespace. There are now + many more modules with less lexers per module. Old modules are still around + and re-export the lexers they previously contained. + +- The NameHighlightFilter now works with any Name.* token type (#790). + +- Python 3 lexer: add new exceptions from PEP 3151. + +- Opa lexer: add new keywords (PR#170). + +- Julia lexer: add keywords and underscore-separated number + literals (PR#176). + +- Lasso lexer: fix method highlighting, update builtins. Fix + guessing so that plain XML isn't always taken as Lasso (PR#163). + +- Objective C/C++ lexers: allow "@" prefixing any expression (#871). + +- Ruby lexer: fix lexing of Name::Space tokens (#860) and of symbols + in hashes (#873). + +- Stan lexer: update for version 2.4.0 of the language (PR#162, PR#255, PR#377). + +- JavaScript lexer: add the "yield" keyword (PR#196). + +- HTTP lexer: support for PATCH method (PR#190). + +- Koka lexer: update to newest language spec (PR#201). + +- Haxe lexer: rewrite and support for Haxe 3 (PR#174). + +- Prolog lexer: add different kinds of numeric literals (#864). + +- F# lexer: rewrite with newest spec for F# 3.0 (#842), fix a bug with + dotted chains (#948). + +- Kotlin lexer: general update (PR#271). + +- Rebol lexer: fix comment detection and analyse_text (PR#261). + +- LLVM lexer: update keywords to v3.4 (PR#258). + +- PHP lexer: add new keywords and binary literals (PR#222). + +- external/markdown-processor.py updated to newest python-markdown (PR#221). + +- CSS lexer: some highlighting order fixes (PR#231). + +- Ceylon lexer: fix parsing of nested multiline comments (#915). + +- C family lexers: fix parsing of indented preprocessor directives (#944). + +- Rust lexer: update to 0.9 language version (PR#270, PR#388). + +- Elixir lexer: update to 0.15 language version (PR#392). + +- Fix swallowing incomplete tracebacks in Python console lexer (#874). + Version 1.6 ----------- @@ -259,7 +555,7 @@ Version 1.3 * Ada * Coldfusion * Modula-2 - * haXe + * Haxe * R console * Objective-J * Haml and Sass @@ -318,7 +614,7 @@ Version 1.2 * CMake * Ooc * Coldfusion - * haXe + * Haxe * R console - Added options for rendering LaTeX in source code comments in the diff --git a/vendor/pygments/LICENSE b/vendor/pygments/LICENSE index 1e09119..10b8e91 100644 --- a/vendor/pygments/LICENSE +++ b/vendor/pygments/LICENSE @@ -1,4 +1,4 @@ -Copyright (c) 2006-2013 by the respective authors (see AUTHORS file). +Copyright (c) 2006-2015 by the respective authors (see AUTHORS file). All rights reserved. Redistribution and use in source and binary forms, with or without diff --git a/vendor/pygments/MANIFEST.in b/vendor/pygments/MANIFEST.in index 312c150..cfec4e9 100644 --- a/vendor/pygments/MANIFEST.in +++ b/vendor/pygments/MANIFEST.in @@ -2,5 +2,5 @@ include pygmentize include external/* include Makefile CHANGES LICENSE AUTHORS TODO ez_setup.py recursive-include tests * -recursive-include docs * +recursive-include doc * recursive-include scripts * diff --git a/vendor/pygments/Makefile b/vendor/pygments/Makefile index f24dd08..efae857 100644 --- a/vendor/pygments/Makefile +++ b/vendor/pygments/Makefile @@ -4,7 +4,7 @@ # # Combines scripts for common tasks. # -# :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS. +# :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS. # :license: BSD, see LICENSE for details. # @@ -19,9 +19,9 @@ all: clean-pyc check test check: @$(PYTHON) scripts/detect_missing_analyse_text.py || true + @pyflakes pygments | grep -v 'but unused' || true @$(PYTHON) scripts/check_sources.py -i build -i dist -i pygments/lexers/_mapping.py \ - -i docs/build -i pygments/formatters/_mapping.py -i pygments/unistring.py \ - -i pygments/lexers/_vimbuiltins.py + -i docs/build -i pygments/formatters/_mapping.py -i pygments/unistring.py clean: clean-pyc -rm -rf build @@ -36,15 +36,12 @@ codetags: @$(PYTHON) scripts/find_codetags.py -i tests/examplefiles -i scripts/pylintrc \ -i scripts/find_codetags.py -o codetags.html . -docs: docs/build - -docs/build: docs/src/*.txt - $(PYTHON) docs/generate.py html docs/build $? - touch docs/build +docs: + make -C doc html mapfiles: - (cd pygments/lexers; $(PYTHON) _mapping.py) (cd pygments/formatters; $(PYTHON) _mapping.py) + (cd pygments/lexers; $(PYTHON) _mapping.py) pylint: @pylint --rcfile scripts/pylintrc pygments @@ -53,7 +50,13 @@ reindent: @$(PYTHON) scripts/reindent.py -r -B . test: - @$(PYTHON) tests/run.py $(TESTS) + @$(PYTHON) tests/run.py -d $(TEST) test-coverage: - @$(PYTHON) tests/run.py -C $(TESTS) + @$(PYTHON) tests/run.py -d --with-coverage --cover-package=pygments --cover-erase $(TEST) + +tox-test: + @tox -- $(TEST) + +tox-test-coverage: + @tox -- --with-coverage --cover-package=pygments --cover-erase $(TEST) diff --git a/vendor/pygments/PKG-INFO b/vendor/pygments/PKG-INFO index 6e9739d..4e75234 100644 --- a/vendor/pygments/PKG-INFO +++ b/vendor/pygments/PKG-INFO @@ -1,34 +1,27 @@ Metadata-Version: 1.1 Name: Pygments -Version: 1.6 +Version: 2.1.3 Summary: Pygments is a syntax highlighting package written in Python. Home-page: http://pygments.org/ Author: Georg Brandl Author-email: georg@python.org License: BSD License -Description: - Pygments +Description: Pygments ~~~~~~~~ Pygments is a syntax highlighting package written in Python. - It is a generic syntax highlighter for general use in all kinds of software - such as forum systems, wikis or other applications that need to prettify - source code. Highlights are: + It is a generic syntax highlighter suitable for use in code hosting, forums, + wikis or other applications that need to prettify source code. Highlights + are: - * a wide range of common languages and markup formats is supported + * a wide range of over 300 languages and other text formats is supported * special attention is paid to details, increasing quality by a fair amount * support for new languages and formats are added easily * a number of output formats, presently HTML, LaTeX, RTF, SVG, all image formats that PIL supports and ANSI sequences * it is usable as a command-line tool and as a library - * ... and it highlights even Brainfuck! - The `Pygments tip`_ is installable with ``easy_install Pygments==dev``. - - .. _Pygments tip: - http://bitbucket.org/birkenfeld/pygments-main/get/default.zip#egg=Pygments-dev - - :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. Keywords: syntax highlighting diff --git a/vendor/pygments/Pygments.egg-info/PKG-INFO b/vendor/pygments/Pygments.egg-info/PKG-INFO index 6e9739d..4e75234 100644 --- a/vendor/pygments/Pygments.egg-info/PKG-INFO +++ b/vendor/pygments/Pygments.egg-info/PKG-INFO @@ -1,34 +1,27 @@ Metadata-Version: 1.1 Name: Pygments -Version: 1.6 +Version: 2.1.3 Summary: Pygments is a syntax highlighting package written in Python. Home-page: http://pygments.org/ Author: Georg Brandl Author-email: georg@python.org License: BSD License -Description: - Pygments +Description: Pygments ~~~~~~~~ Pygments is a syntax highlighting package written in Python. - It is a generic syntax highlighter for general use in all kinds of software - such as forum systems, wikis or other applications that need to prettify - source code. Highlights are: + It is a generic syntax highlighter suitable for use in code hosting, forums, + wikis or other applications that need to prettify source code. Highlights + are: - * a wide range of common languages and markup formats is supported + * a wide range of over 300 languages and other text formats is supported * special attention is paid to details, increasing quality by a fair amount * support for new languages and formats are added easily * a number of output formats, presently HTML, LaTeX, RTF, SVG, all image formats that PIL supports and ANSI sequences * it is usable as a command-line tool and as a library - * ... and it highlights even Brainfuck! - The `Pygments tip`_ is installable with ``easy_install Pygments==dev``. - - .. _Pygments tip: - http://bitbucket.org/birkenfeld/pygments-main/get/default.zip#egg=Pygments-dev - - :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. Keywords: syntax highlighting diff --git a/vendor/pygments/Pygments.egg-info/SOURCES.txt b/vendor/pygments/Pygments.egg-info/SOURCES.txt index dd49c40..5155bcc 100644 --- a/vendor/pygments/Pygments.egg-info/SOURCES.txt +++ b/vendor/pygments/Pygments.egg-info/SOURCES.txt @@ -1,69 +1,70 @@ +.hgignore +.hgtags AUTHORS CHANGES LICENSE MANIFEST.in Makefile +README.rst TODO ez_setup.py pygmentize +requirements.txt setup.cfg setup.py +tox.ini Pygments.egg-info/PKG-INFO Pygments.egg-info/SOURCES.txt Pygments.egg-info/dependency_links.txt Pygments.egg-info/entry_points.txt Pygments.egg-info/not-zip-safe Pygments.egg-info/top_level.txt -docs/generate.py -docs/pygmentize.1 -docs/build/api.html -docs/build/authors.html -docs/build/changelog.html -docs/build/cmdline.html -docs/build/filterdevelopment.html -docs/build/filters.html -docs/build/formatterdevelopment.html -docs/build/formatters.html -docs/build/index.html -docs/build/installation.html -docs/build/integrate.html -docs/build/java.html -docs/build/lexerdevelopment.html -docs/build/lexers.html -docs/build/moinmoin.html -docs/build/plugins.html -docs/build/quickstart.html -docs/build/rstdirective.html -docs/build/styles.html -docs/build/tokens.html -docs/build/unicode.html -docs/src/api.txt -docs/src/authors.txt -docs/src/changelog.txt -docs/src/cmdline.txt -docs/src/filterdevelopment.txt -docs/src/filters.txt -docs/src/formatterdevelopment.txt -docs/src/formatters.txt -docs/src/index.txt -docs/src/installation.txt -docs/src/integrate.txt -docs/src/java.txt -docs/src/lexerdevelopment.txt -docs/src/lexers.txt -docs/src/moinmoin.txt -docs/src/plugins.txt -docs/src/quickstart.txt -docs/src/rstdirective.txt -docs/src/styles.txt -docs/src/tokens.txt -docs/src/unicode.txt +doc/Makefile +doc/conf.py +doc/download.rst +doc/faq.rst +doc/index.rst +doc/languages.rst +doc/make.bat +doc/pygmentize.1 +doc/_static/favicon.ico +doc/_static/logo_new.png +doc/_static/logo_only.png +doc/_templates/docssidebar.html +doc/_templates/indexsidebar.html +doc/_themes/pygments14/layout.html +doc/_themes/pygments14/theme.conf +doc/_themes/pygments14/static/bodybg.png +doc/_themes/pygments14/static/docbg.png +doc/_themes/pygments14/static/listitem.png +doc/_themes/pygments14/static/logo.png +doc/_themes/pygments14/static/pocoo.png +doc/_themes/pygments14/static/pygments14.css_t +doc/docs/api.rst +doc/docs/authors.rst +doc/docs/changelog.rst +doc/docs/cmdline.rst +doc/docs/filterdevelopment.rst +doc/docs/filters.rst +doc/docs/formatterdevelopment.rst +doc/docs/formatters.rst +doc/docs/index.rst +doc/docs/integrate.rst +doc/docs/java.rst +doc/docs/lexerdevelopment.rst +doc/docs/lexers.rst +doc/docs/moinmoin.rst +doc/docs/plugins.rst +doc/docs/quickstart.rst +doc/docs/rstdirective.rst +doc/docs/styles.rst +doc/docs/tokens.rst +doc/docs/unicode.rst external/autopygmentize external/lasso-builtins-generator-9.lasso external/markdown-processor.py external/moin-parser.py external/pygments.bashcomp -external/rst-directive-old.py external/rst-directive.py pygments/__init__.py pygments/cmdline.py @@ -71,8 +72,11 @@ pygments/console.py pygments/filter.py pygments/formatter.py pygments/lexer.py +pygments/modeline.py pygments/plugin.py +pygments/regexopt.py pygments/scanner.py +pygments/sphinxext.py pygments/style.py pygments/token.py pygments/unistring.py @@ -83,6 +87,7 @@ pygments/formatters/_mapping.py pygments/formatters/bbcode.py pygments/formatters/html.py pygments/formatters/img.py +pygments/formatters/irc.py pygments/formatters/latex.py pygments/formatters/other.py pygments/formatters/rtf.py @@ -90,38 +95,133 @@ pygments/formatters/svg.py pygments/formatters/terminal.py pygments/formatters/terminal256.py pygments/lexers/__init__.py -pygments/lexers/_asybuiltins.py -pygments/lexers/_clbuiltins.py -pygments/lexers/_lassobuiltins.py -pygments/lexers/_luabuiltins.py +pygments/lexers/_asy_builtins.py +pygments/lexers/_cl_builtins.py +pygments/lexers/_cocoa_builtins.py +pygments/lexers/_csound_builtins.py +pygments/lexers/_lasso_builtins.py +pygments/lexers/_lua_builtins.py pygments/lexers/_mapping.py -pygments/lexers/_openedgebuiltins.py -pygments/lexers/_phpbuiltins.py +pygments/lexers/_mql_builtins.py +pygments/lexers/_openedge_builtins.py +pygments/lexers/_php_builtins.py pygments/lexers/_postgres_builtins.py -pygments/lexers/_robotframeworklexer.py pygments/lexers/_scilab_builtins.py -pygments/lexers/_sourcemodbuiltins.py +pygments/lexers/_sourcemod_builtins.py pygments/lexers/_stan_builtins.py -pygments/lexers/_vimbuiltins.py +pygments/lexers/_vim_builtins.py +pygments/lexers/actionscript.py pygments/lexers/agile.py +pygments/lexers/algebra.py +pygments/lexers/ambient.py +pygments/lexers/apl.py +pygments/lexers/archetype.py pygments/lexers/asm.py +pygments/lexers/automation.py +pygments/lexers/basic.py +pygments/lexers/business.py +pygments/lexers/c_cpp.py +pygments/lexers/c_like.py +pygments/lexers/chapel.py pygments/lexers/compiled.py +pygments/lexers/configs.py +pygments/lexers/console.py +pygments/lexers/csound.py +pygments/lexers/css.py +pygments/lexers/d.py pygments/lexers/dalvik.py +pygments/lexers/data.py +pygments/lexers/diff.py pygments/lexers/dotnet.py +pygments/lexers/dsls.py +pygments/lexers/dylan.py +pygments/lexers/ecl.py +pygments/lexers/eiffel.py +pygments/lexers/elm.py +pygments/lexers/erlang.py +pygments/lexers/esoteric.py +pygments/lexers/ezhil.py +pygments/lexers/factor.py +pygments/lexers/fantom.py +pygments/lexers/felix.py +pygments/lexers/fortran.py pygments/lexers/foxpro.py pygments/lexers/functional.py +pygments/lexers/go.py +pygments/lexers/grammar_notation.py +pygments/lexers/graph.py +pygments/lexers/graphics.py +pygments/lexers/haskell.py +pygments/lexers/haxe.py pygments/lexers/hdl.py +pygments/lexers/hexdump.py +pygments/lexers/html.py +pygments/lexers/idl.py +pygments/lexers/igor.py +pygments/lexers/inferno.py +pygments/lexers/installers.py +pygments/lexers/int_fiction.py +pygments/lexers/iolang.py +pygments/lexers/j.py +pygments/lexers/javascript.py +pygments/lexers/julia.py pygments/lexers/jvm.py +pygments/lexers/lisp.py +pygments/lexers/make.py +pygments/lexers/markup.py pygments/lexers/math.py +pygments/lexers/matlab.py +pygments/lexers/ml.py +pygments/lexers/modeling.py +pygments/lexers/modula2.py +pygments/lexers/nimrod.py +pygments/lexers/nit.py +pygments/lexers/nix.py +pygments/lexers/oberon.py +pygments/lexers/objective.py +pygments/lexers/ooc.py pygments/lexers/other.py +pygments/lexers/parasail.py pygments/lexers/parsers.py +pygments/lexers/pascal.py +pygments/lexers/pawn.py +pygments/lexers/perl.py +pygments/lexers/php.py +pygments/lexers/praat.py +pygments/lexers/prolog.py +pygments/lexers/python.py +pygments/lexers/qvt.py +pygments/lexers/r.py +pygments/lexers/rdf.py +pygments/lexers/rebol.py +pygments/lexers/resource.py +pygments/lexers/roboconf.py +pygments/lexers/robotframework.py +pygments/lexers/ruby.py +pygments/lexers/rust.py +pygments/lexers/scripting.py pygments/lexers/shell.py +pygments/lexers/smalltalk.py +pygments/lexers/snobol.py pygments/lexers/special.py pygments/lexers/sql.py +pygments/lexers/supercollider.py +pygments/lexers/tcl.py pygments/lexers/templates.py +pygments/lexers/testing.py pygments/lexers/text.py +pygments/lexers/textedit.py +pygments/lexers/textfmts.py +pygments/lexers/theorem.py +pygments/lexers/trafficscript.py +pygments/lexers/urbi.py pygments/lexers/web.py +pygments/lexers/webmisc.py +pygments/lexers/x10.py pygments/styles/__init__.py +pygments/styles/algol.py +pygments/styles/algol_nu.py +pygments/styles/arduino.py pygments/styles/autumn.py pygments/styles/borland.py pygments/styles/bw.py @@ -130,10 +230,14 @@ pygments/styles/default.py pygments/styles/emacs.py pygments/styles/friendly.py pygments/styles/fruity.py +pygments/styles/igor.py +pygments/styles/lovelace.py pygments/styles/manni.py pygments/styles/monokai.py pygments/styles/murphy.py pygments/styles/native.py +pygments/styles/paraiso_dark.py +pygments/styles/paraiso_light.py pygments/styles/pastie.py pygments/styles/perldoc.py pygments/styles/rrt.py @@ -141,29 +245,143 @@ pygments/styles/tango.py pygments/styles/trac.py pygments/styles/vim.py pygments/styles/vs.py +pygments/styles/xcode.py scripts/check_sources.py +scripts/debug_lexer.py scripts/detect_missing_analyse_text.py scripts/epydoc.css -scripts/find_codetags.py scripts/find_error.py scripts/get_vimkw.py scripts/pylintrc -scripts/reindent.py scripts/vim2pygments.py -tests/old_run.py +tests/.coverage tests/run.py +tests/string_asserts.py +tests/string_asserts.pyc tests/support.py +tests/support.pyc tests/test_basic_api.py +tests/test_basic_api.pyc +tests/test_cfm.py +tests/test_cfm.pyc tests/test_clexer.py +tests/test_clexer.pyc tests/test_cmdline.py +tests/test_cmdline.pyc tests/test_examplefiles.py +tests/test_examplefiles.pyc +tests/test_ezhil.py +tests/test_ezhil.pyc tests/test_html_formatter.py +tests/test_html_formatter.pyc +tests/test_inherit.py +tests/test_inherit.pyc +tests/test_irc_formatter.py +tests/test_irc_formatter.pyc +tests/test_java.py +tests/test_java.pyc tests/test_latex_formatter.py +tests/test_latex_formatter.pyc +tests/test_lexers_other.py +tests/test_lexers_other.pyc +tests/test_objectiveclexer.py +tests/test_objectiveclexer.pyc tests/test_perllexer.py +tests/test_perllexer.pyc +tests/test_qbasiclexer.py +tests/test_qbasiclexer.pyc tests/test_regexlexer.py +tests/test_regexlexer.pyc +tests/test_regexopt.py +tests/test_regexopt.pyc +tests/test_rtf_formatter.py +tests/test_rtf_formatter.pyc +tests/test_ruby.py +tests/test_ruby.pyc +tests/test_shell.py +tests/test_shell.pyc +tests/test_smarty.py +tests/test_smarty.pyc +tests/test_string_asserts.py +tests/test_string_asserts.pyc +tests/test_terminal_formatter.py +tests/test_terminal_formatter.pyc +tests/test_textfmts.py +tests/test_textfmts.pyc tests/test_token.py +tests/test_token.pyc +tests/test_unistring.py +tests/test_unistring.pyc tests/test_using_api.py +tests/test_using_api.pyc tests/test_util.py +tests/test_util.pyc +tests/__pycache__/string_asserts.cpython-33.pyc +tests/__pycache__/string_asserts.cpython-35.pyc +tests/__pycache__/support.cpython-33.pyc +tests/__pycache__/support.cpython-35.pyc +tests/__pycache__/test_basic_api.cpython-33.pyc +tests/__pycache__/test_basic_api.cpython-35.pyc +tests/__pycache__/test_cfm.cpython-33.pyc +tests/__pycache__/test_cfm.cpython-35.pyc +tests/__pycache__/test_clexer.cpython-33.pyc +tests/__pycache__/test_clexer.cpython-35.pyc +tests/__pycache__/test_cmdline.cpython-33.pyc +tests/__pycache__/test_cmdline.cpython-35.pyc +tests/__pycache__/test_examplefiles.cpython-33.pyc +tests/__pycache__/test_examplefiles.cpython-35.pyc +tests/__pycache__/test_ezhil.cpython-35.pyc +tests/__pycache__/test_html_formatter.cpython-33.pyc +tests/__pycache__/test_html_formatter.cpython-35.pyc +tests/__pycache__/test_inherit.cpython-33.pyc +tests/__pycache__/test_inherit.cpython-35.pyc +tests/__pycache__/test_irc_formatter.cpython-35.pyc +tests/__pycache__/test_java.cpython-33.pyc +tests/__pycache__/test_java.cpython-35.pyc +tests/__pycache__/test_latex_formatter.cpython-33.pyc +tests/__pycache__/test_latex_formatter.cpython-35.pyc +tests/__pycache__/test_lexers_other.cpython-33.pyc +tests/__pycache__/test_lexers_other.cpython-35.pyc +tests/__pycache__/test_objectiveclexer.cpython-33.pyc +tests/__pycache__/test_objectiveclexer.cpython-35.pyc +tests/__pycache__/test_perllexer.cpython-33.pyc +tests/__pycache__/test_perllexer.cpython-35.pyc +tests/__pycache__/test_qbasiclexer.cpython-33.pyc +tests/__pycache__/test_qbasiclexer.cpython-35.pyc +tests/__pycache__/test_regexlexer.cpython-33.pyc +tests/__pycache__/test_regexlexer.cpython-35.pyc +tests/__pycache__/test_regexopt.cpython-33.pyc +tests/__pycache__/test_regexopt.cpython-35.pyc +tests/__pycache__/test_rtf_formatter.cpython-33.pyc +tests/__pycache__/test_rtf_formatter.cpython-35.pyc +tests/__pycache__/test_ruby.cpython-33.pyc +tests/__pycache__/test_ruby.cpython-35.pyc +tests/__pycache__/test_shell.cpython-33.pyc +tests/__pycache__/test_shell.cpython-35.pyc +tests/__pycache__/test_smarty.cpython-33.pyc +tests/__pycache__/test_smarty.cpython-35.pyc +tests/__pycache__/test_string_asserts.cpython-33.pyc +tests/__pycache__/test_string_asserts.cpython-35.pyc +tests/__pycache__/test_terminal_formatter.cpython-35.pyc +tests/__pycache__/test_textfmts.cpython-33.pyc +tests/__pycache__/test_textfmts.cpython-35.pyc +tests/__pycache__/test_token.cpython-33.pyc +tests/__pycache__/test_token.cpython-35.pyc +tests/__pycache__/test_unistring.cpython-33.pyc +tests/__pycache__/test_unistring.cpython-35.pyc +tests/__pycache__/test_using_api.cpython-33.pyc +tests/__pycache__/test_using_api.cpython-35.pyc +tests/__pycache__/test_util.cpython-33.pyc +tests/__pycache__/test_util.cpython-35.pyc +tests/cover/coverage_html.js +tests/cover/jquery.hotkeys.js +tests/cover/jquery.isonscreen.js +tests/cover/jquery.min.js +tests/cover/jquery.tablesorter.min.js +tests/cover/keybd_closed.png +tests/cover/keybd_open.png +tests/cover/status.dat +tests/cover/style.css tests/dtds/HTML4-f.dtd tests/dtds/HTML4-s.dtd tests/dtds/HTML4.dcl @@ -172,24 +390,36 @@ tests/dtds/HTML4.soc tests/dtds/HTMLlat1.ent tests/dtds/HTMLspec.ent tests/dtds/HTMLsym.ent -tests/examplefiles/ANTLRv3.g +tests/examplefiles/99_bottles_of_beer.chpl tests/examplefiles/AcidStateAdvanced.hs tests/examplefiles/AlternatingGroup.mu tests/examplefiles/BOM.js +tests/examplefiles/Blink.ino tests/examplefiles/CPDictionary.j tests/examplefiles/Config.in.cache tests/examplefiles/Constants.mo tests/examplefiles/DancingSudoku.lhs +tests/examplefiles/Deflate.fs +tests/examplefiles/Error.pmod tests/examplefiles/Errors.scala -tests/examplefiles/File.hy +tests/examplefiles/FakeFile.pike +tests/examplefiles/Get-CommandDefinitionHtml.ps1 +tests/examplefiles/IPDispatchC.nc +tests/examplefiles/IPDispatchP.nc tests/examplefiles/Intro.java tests/examplefiles/Makefile tests/examplefiles/Object.st tests/examplefiles/OrderedMap.hx +tests/examplefiles/RoleQ.pm6 tests/examplefiles/SmallCheck.hs tests/examplefiles/Sorting.mod tests/examplefiles/Sudoku.lhs +tests/examplefiles/abnf_example1.abnf +tests/examplefiles/abnf_example2.abnf tests/examplefiles/addressbook.proto +tests/examplefiles/ahcon.f +tests/examplefiles/all.nit +tests/examplefiles/antlr_ANTLRv3.g tests/examplefiles/antlr_throws tests/examplefiles/apache2.conf tests/examplefiles/as3_test.as @@ -197,69 +427,137 @@ tests/examplefiles/as3_test2.as tests/examplefiles/as3_test3.as tests/examplefiles/aspx-cs_example tests/examplefiles/autoit_submit.au3 +tests/examplefiles/automake.mk tests/examplefiles/badcase.java -tests/examplefiles/batchfile.bat tests/examplefiles/bigtest.nsi +tests/examplefiles/bnf_example1.bnf tests/examplefiles/boot-9.scm tests/examplefiles/ca65_example tests/examplefiles/cbmbas_example tests/examplefiles/cells.ps tests/examplefiles/ceval.c +tests/examplefiles/char.scala tests/examplefiles/cheetah_example.html tests/examplefiles/classes.dylan +tests/examplefiles/clojure-weird-keywords.clj tests/examplefiles/condensed_ruby.rb tests/examplefiles/coq_RelationClasses +tests/examplefiles/core.cljs tests/examplefiles/database.pytb tests/examplefiles/de.MoinMoin.po tests/examplefiles/demo.ahk tests/examplefiles/demo.cfm +tests/examplefiles/demo.css.in +tests/examplefiles/demo.hbs +tests/examplefiles/demo.js.in +tests/examplefiles/demo.thrift +tests/examplefiles/demo.xul.in tests/examplefiles/django_sample.html+django +tests/examplefiles/docker.docker tests/examplefiles/dwarf.cw +tests/examplefiles/eg_example1.eg +tests/examplefiles/ember.handlebars tests/examplefiles/erl_session +tests/examplefiles/es6.js tests/examplefiles/escape_semicolon.clj +tests/examplefiles/eval.rs tests/examplefiles/evil_regex.js tests/examplefiles/example.Rd +tests/examplefiles/example.als +tests/examplefiles/example.bat +tests/examplefiles/example.bc tests/examplefiles/example.bug tests/examplefiles/example.c tests/examplefiles/example.ceylon +tests/examplefiles/example.chai +tests/examplefiles/example.clay tests/examplefiles/example.cls tests/examplefiles/example.cob +tests/examplefiles/example.coffee tests/examplefiles/example.cpp +tests/examplefiles/example.e +tests/examplefiles/example.elm +tests/examplefiles/example.ezt +tests/examplefiles/example.f90 +tests/examplefiles/example.feature +tests/examplefiles/example.fish +tests/examplefiles/example.gd +tests/examplefiles/example.gi +tests/examplefiles/example.golo +tests/examplefiles/example.groovy tests/examplefiles/example.gs tests/examplefiles/example.gst +tests/examplefiles/example.hs +tests/examplefiles/example.hx +tests/examplefiles/example.i6t +tests/examplefiles/example.i7x +tests/examplefiles/example.j tests/examplefiles/example.jag +tests/examplefiles/example.java +tests/examplefiles/example.jcl +tests/examplefiles/example.jsonld +tests/examplefiles/example.kal tests/examplefiles/example.kt +tests/examplefiles/example.lagda +tests/examplefiles/example.liquid tests/examplefiles/example.lua +tests/examplefiles/example.ma +tests/examplefiles/example.mac tests/examplefiles/example.monkey tests/examplefiles/example.moo tests/examplefiles/example.moon +tests/examplefiles/example.mq4 +tests/examplefiles/example.mqh tests/examplefiles/example.msc +tests/examplefiles/example.ni tests/examplefiles/example.nim +tests/examplefiles/example.nix tests/examplefiles/example.ns2 -tests/examplefiles/example.p tests/examplefiles/example.pas +tests/examplefiles/example.pcmk +tests/examplefiles/example.pp +tests/examplefiles/example.praat tests/examplefiles/example.prg tests/examplefiles/example.rb +tests/examplefiles/example.red +tests/examplefiles/example.reds tests/examplefiles/example.reg +tests/examplefiles/example.rexx tests/examplefiles/example.rhtml tests/examplefiles/example.rkt tests/examplefiles/example.rpf +tests/examplefiles/example.rts +tests/examplefiles/example.scd +tests/examplefiles/example.sh tests/examplefiles/example.sh-session tests/examplefiles/example.shell-session +tests/examplefiles/example.slim +tests/examplefiles/example.sls tests/examplefiles/example.sml tests/examplefiles/example.snobol tests/examplefiles/example.stan +tests/examplefiles/example.tap tests/examplefiles/example.tea +tests/examplefiles/example.tf +tests/examplefiles/example.thy +tests/examplefiles/example.todotxt tests/examplefiles/example.ts +tests/examplefiles/example.ttl tests/examplefiles/example.u tests/examplefiles/example.weechatlog +tests/examplefiles/example.x10 tests/examplefiles/example.xhtml tests/examplefiles/example.xtend tests/examplefiles/example.yaml +tests/examplefiles/example1.cadl tests/examplefiles/example2.aspx tests/examplefiles/example2.msc +tests/examplefiles/exampleScript.cfc +tests/examplefiles/exampleTag.cfc +tests/examplefiles/example_coq.v tests/examplefiles/example_elixir.ex tests/examplefiles/example_file.fy +tests/examplefiles/ezhil_primefactors.n tests/examplefiles/firefox.mak tests/examplefiles/flipflop.sv tests/examplefiles/foo.sce @@ -273,13 +571,28 @@ tests/examplefiles/genshi_example.xml+genshi tests/examplefiles/genshitext_example.genshitext tests/examplefiles/glsl.frag tests/examplefiles/glsl.vert +tests/examplefiles/grammar-test.p6 +tests/examplefiles/hash_syntax.rb +tests/examplefiles/hello.at +tests/examplefiles/hello.golo +tests/examplefiles/hello.lsl tests/examplefiles/hello.smali tests/examplefiles/hello.sp +tests/examplefiles/hexdump_debugexe +tests/examplefiles/hexdump_hd +tests/examplefiles/hexdump_hexcat +tests/examplefiles/hexdump_hexdump +tests/examplefiles/hexdump_od +tests/examplefiles/hexdump_xxd tests/examplefiles/html+php_faulty.php tests/examplefiles/http_request_example tests/examplefiles/http_response_example -tests/examplefiles/import.hs +tests/examplefiles/hybris_File.hy +tests/examplefiles/idl_sample.pro +tests/examplefiles/iex_example tests/examplefiles/inet_pton6.dg +tests/examplefiles/inform6_example +tests/examplefiles/interp.scala tests/examplefiles/intro.ik tests/examplefiles/ints.php tests/examplefiles/intsyn.fun @@ -292,7 +605,9 @@ tests/examplefiles/jbst_example2.jbst tests/examplefiles/jinjadesignerdoc.rst tests/examplefiles/json.lasso tests/examplefiles/json.lasso9 +tests/examplefiles/language.hy tests/examplefiles/lighttpd_config.conf +tests/examplefiles/limbo.b tests/examplefiles/linecontinuation.py tests/examplefiles/livescript-demo.ls tests/examplefiles/logos_example.xm @@ -303,9 +618,9 @@ tests/examplefiles/matlab_noreturn tests/examplefiles/matlab_sample tests/examplefiles/matlabsession_sample.txt tests/examplefiles/metagrammar.treetop -tests/examplefiles/mg_sample.pro tests/examplefiles/minehunt.qml tests/examplefiles/minimal.ns2 +tests/examplefiles/modula2_test_cases.def tests/examplefiles/moin_SyntaxReference.txt tests/examplefiles/multiline_regexes.rb tests/examplefiles/nanomsg.intr @@ -313,104 +628,161 @@ tests/examplefiles/nasm_aoutso.asm tests/examplefiles/nasm_objexe.asm tests/examplefiles/nemerle_sample.n tests/examplefiles/nginx_nginx.conf +tests/examplefiles/noexcept.cpp tests/examplefiles/numbers.c tests/examplefiles/objc_example.m -tests/examplefiles/objc_example2.m +tests/examplefiles/openedge_example +tests/examplefiles/pacman.conf +tests/examplefiles/pacman.ijs +tests/examplefiles/pawn_example tests/examplefiles/perl_misc tests/examplefiles/perl_perl5db tests/examplefiles/perl_regex-delims tests/examplefiles/perlfunc.1 tests/examplefiles/phpMyAdmin.spec tests/examplefiles/phpcomplete.vim +tests/examplefiles/pkgconfig_example.pc tests/examplefiles/pleac.in.rb tests/examplefiles/postgresql_test.txt tests/examplefiles/pppoe.applescript tests/examplefiles/psql_session.txt tests/examplefiles/py3_test.txt +tests/examplefiles/py3tb_test.py3tb +tests/examplefiles/pycon_ctrlc_traceback tests/examplefiles/pycon_test.pycon tests/examplefiles/pytb_test2.pytb tests/examplefiles/pytb_test3.pytb tests/examplefiles/python25-bsd.mak +tests/examplefiles/qbasic_example tests/examplefiles/qsort.prolog tests/examplefiles/r-console-transcript.Rout +tests/examplefiles/r6rs-comments.scm tests/examplefiles/ragel-cpp_rlscan tests/examplefiles/ragel-cpp_snippet tests/examplefiles/regex.js +tests/examplefiles/resourcebundle_demo tests/examplefiles/reversi.lsp -tests/examplefiles/robotframework.txt +tests/examplefiles/roboconf.graph +tests/examplefiles/roboconf.instances +tests/examplefiles/robotframework_test.txt +tests/examplefiles/rql-queries.rql tests/examplefiles/ruby_func_def.rb -tests/examplefiles/rust_example.rs +tests/examplefiles/sample.qvto tests/examplefiles/scilab.sci +tests/examplefiles/scope.cirru tests/examplefiles/session.dylan-console tests/examplefiles/sibling.prolog -tests/examplefiles/simple.md +tests/examplefiles/simple.camkes +tests/examplefiles/simple.croc tests/examplefiles/smarty_example.html tests/examplefiles/source.lgt tests/examplefiles/sources.list +tests/examplefiles/sparql.rq tests/examplefiles/sphere.pov tests/examplefiles/sqlite3.sqlite3-console tests/examplefiles/squid.conf tests/examplefiles/string.jl tests/examplefiles/string_delimiters.d tests/examplefiles/stripheredoc.sh +tests/examplefiles/subr.el +tests/examplefiles/swig_java.swg +tests/examplefiles/swig_std_vector.i +tests/examplefiles/tads3_example.t +tests/examplefiles/termcap +tests/examplefiles/terminfo +tests/examplefiles/test-3.0.xq +tests/examplefiles/test-exist-update.xq tests/examplefiles/test.R tests/examplefiles/test.adb +tests/examplefiles/test.adls +tests/examplefiles/test.agda +tests/examplefiles/test.apl tests/examplefiles/test.asy tests/examplefiles/test.awk -tests/examplefiles/test.bas +tests/examplefiles/test.bb tests/examplefiles/test.bmx tests/examplefiles/test.boo +tests/examplefiles/test.bpl tests/examplefiles/test.bro +tests/examplefiles/test.cadl tests/examplefiles/test.cs +tests/examplefiles/test.csd tests/examplefiles/test.css tests/examplefiles/test.cu +tests/examplefiles/test.cyp tests/examplefiles/test.d tests/examplefiles/test.dart tests/examplefiles/test.dtd +tests/examplefiles/test.ebnf tests/examplefiles/test.ec -tests/examplefiles/test.ecl tests/examplefiles/test.eh tests/examplefiles/test.erl tests/examplefiles/test.evoque tests/examplefiles/test.fan tests/examplefiles/test.flx tests/examplefiles/test.gdc +tests/examplefiles/test.gradle tests/examplefiles/test.groovy tests/examplefiles/test.html +tests/examplefiles/test.idr tests/examplefiles/test.ini tests/examplefiles/test.java tests/examplefiles/test.jsp +tests/examplefiles/test.lean tests/examplefiles/test.maql +tests/examplefiles/test.mask tests/examplefiles/test.mod tests/examplefiles/test.moo tests/examplefiles/test.myt tests/examplefiles/test.nim +tests/examplefiles/test.odin tests/examplefiles/test.opa +tests/examplefiles/test.orc +tests/examplefiles/test.p6 +tests/examplefiles/test.pan tests/examplefiles/test.pas tests/examplefiles/test.php +tests/examplefiles/test.pig tests/examplefiles/test.plot tests/examplefiles/test.ps1 +tests/examplefiles/test.psl +tests/examplefiles/test.pwn tests/examplefiles/test.pypylog tests/examplefiles/test.r3 tests/examplefiles/test.rb tests/examplefiles/test.rhtml +tests/examplefiles/test.rsl tests/examplefiles/test.scaml +tests/examplefiles/test.sco +tests/examplefiles/test.shen tests/examplefiles/test.ssp +tests/examplefiles/test.swift tests/examplefiles/test.tcsh tests/examplefiles/test.vb tests/examplefiles/test.vhdl tests/examplefiles/test.xqy tests/examplefiles/test.xsl +tests/examplefiles/test.zep +tests/examplefiles/test2.odin tests/examplefiles/test2.pypylog +tests/examplefiles/test_basic.adls tests/examplefiles/truncated.pytb +tests/examplefiles/twig_test tests/examplefiles/type.lisp tests/examplefiles/underscore.coffee tests/examplefiles/unicode.applescript +tests/examplefiles/unicode.go +tests/examplefiles/unicode.js tests/examplefiles/unicodedoc.py tests/examplefiles/unix-io.lid +tests/examplefiles/vbnet_test.bas +tests/examplefiles/vctreestatus_hg +tests/examplefiles/vimrc +tests/examplefiles/vpath.mk tests/examplefiles/webkit-transition.css tests/examplefiles/while.pov tests/examplefiles/wiki.factor tests/examplefiles/xml_example +tests/examplefiles/yahalom.cpsa tests/examplefiles/zmlrpc.f90 tests/support/tags \ No newline at end of file diff --git a/vendor/pygments/README.rst b/vendor/pygments/README.rst new file mode 100644 index 0000000..350e242 --- /dev/null +++ b/vendor/pygments/README.rst @@ -0,0 +1,39 @@ +README for Pygments +=================== + +This is the source of Pygments. It is a generic syntax highlighter that +supports over 300 languages and text formats, for use in code hosting, forums, +wikis or other applications that need to prettify source code. + +Installing +---------- + +... works as usual, use ``python setup.py install``. + +Documentation +------------- + +... can be found online at http://pygments.org/ or created by :: + + cd doc + make html + +Development +----------- + +... takes place on `Bitbucket +`_, where the Mercurial +repository, tickets and pull requests can be viewed. + +Continuous testing runs on drone.io: + +.. image:: https://drone.io/bitbucket.org/birkenfeld/pygments-main/status.png + :target: https://drone.io/bitbucket.org/birkenfeld/pygments-main + +The authors +----------- + +Pygments is maintained by **Georg Brandl**, e-mail address *georg*\ *@*\ *python.org*. + +Many lexers and fixes have been contributed by **Armin Ronacher**, the rest of +the `Pocoo `_ team and **Tim Hatch**. diff --git a/vendor/pygments/TODO b/vendor/pygments/TODO index 3ff63a3..88076f3 100644 --- a/vendor/pygments/TODO +++ b/vendor/pygments/TODO @@ -1,9 +1,6 @@ Todo ==== -- suggested new lexers - * IPython sessions - - lexers that need work: * review perl lexer (numerous bugs, but so far no one had complaints ;) * readd property support for C# lexer? that is, find a regex that doesn't diff --git a/vendor/pygments/doc/Makefile b/vendor/pygments/doc/Makefile new file mode 100644 index 0000000..7fb7541 --- /dev/null +++ b/vendor/pygments/doc/Makefile @@ -0,0 +1,153 @@ +# Makefile for Sphinx documentation +# + +# You can set these variables from the command line. +SPHINXOPTS = +SPHINXBUILD = PYTHONPATH=.. sphinx-build +PAPER = +BUILDDIR = _build + +# Internal variables. +PAPEROPT_a4 = -D latex_paper_size=a4 +PAPEROPT_letter = -D latex_paper_size=letter +ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . +# the i18n builder cannot share the environment and doctrees with the others +I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . + +.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext + +help: + @echo "Please use \`make ' where is one of" + @echo " html to make standalone HTML files" + @echo " dirhtml to make HTML files named index.html in directories" + @echo " singlehtml to make a single large HTML file" + @echo " pickle to make pickle files" + @echo " json to make JSON files" + @echo " htmlhelp to make HTML files and a HTML help project" + @echo " qthelp to make HTML files and a qthelp project" + @echo " devhelp to make HTML files and a Devhelp project" + @echo " epub to make an epub" + @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" + @echo " latexpdf to make LaTeX files and run them through pdflatex" + @echo " text to make text files" + @echo " man to make manual pages" + @echo " texinfo to make Texinfo files" + @echo " info to make Texinfo files and run them through makeinfo" + @echo " gettext to make PO message catalogs" + @echo " changes to make an overview of all changed/added/deprecated items" + @echo " linkcheck to check all external links for integrity" + @echo " doctest to run all doctests embedded in the documentation (if enabled)" + +clean: + -rm -rf $(BUILDDIR)/* + +html: + $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html + @echo + @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." + +dirhtml: + $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml + @echo + @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." + +singlehtml: + $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml + @echo + @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." + +pickle: + $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle + @echo + @echo "Build finished; now you can process the pickle files." + +json: + $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json + @echo + @echo "Build finished; now you can process the JSON files." + +htmlhelp: + $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp + @echo + @echo "Build finished; now you can run HTML Help Workshop with the" \ + ".hhp project file in $(BUILDDIR)/htmlhelp." + +qthelp: + $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp + @echo + @echo "Build finished; now you can run "qcollectiongenerator" with the" \ + ".qhcp project file in $(BUILDDIR)/qthelp, like this:" + @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/Pygments.qhcp" + @echo "To view the help file:" + @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/Pygments.qhc" + +devhelp: + $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp + @echo + @echo "Build finished." + @echo "To view the help file:" + @echo "# mkdir -p $$HOME/.local/share/devhelp/Pygments" + @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/Pygments" + @echo "# devhelp" + +epub: + $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub + @echo + @echo "Build finished. The epub file is in $(BUILDDIR)/epub." + +latex: + $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex + @echo + @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." + @echo "Run \`make' in that directory to run these through (pdf)latex" \ + "(use \`make latexpdf' here to do that automatically)." + +latexpdf: + $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex + @echo "Running LaTeX files through pdflatex..." + $(MAKE) -C $(BUILDDIR)/latex all-pdf + @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." + +text: + $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text + @echo + @echo "Build finished. The text files are in $(BUILDDIR)/text." + +man: + $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man + @echo + @echo "Build finished. The manual pages are in $(BUILDDIR)/man." + +texinfo: + $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo + @echo + @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." + @echo "Run \`make' in that directory to run these through makeinfo" \ + "(use \`make info' here to do that automatically)." + +info: + $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo + @echo "Running Texinfo files through makeinfo..." + make -C $(BUILDDIR)/texinfo info + @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." + +gettext: + $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale + @echo + @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." + +changes: + $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes + @echo + @echo "The overview file is in $(BUILDDIR)/changes." + +linkcheck: + $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck + @echo + @echo "Link check complete; look for any errors in the above output " \ + "or in $(BUILDDIR)/linkcheck/output.txt." + +doctest: + $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest + @echo "Testing of doctests in the sources finished, look at the " \ + "results in $(BUILDDIR)/doctest/output.txt." diff --git a/vendor/pygments/doc/_static/favicon.ico b/vendor/pygments/doc/_static/favicon.ico new file mode 100644 index 0000000000000000000000000000000000000000..777f617dd904043a7ae5ae7469fc2b2afd18cee3 GIT binary patch literal 16958 zcmeI330xIb-^cHM*<}&Aa6uMDbH%3~bKez75p%&b746APGwaPB^D$FVD|^$@OmoSl zH1`(G4FP2p1i6|E;tqFmxq<~bSm1ar@9#g{;Tdm0rdYn8kH@~w+%t2Q-|zhAoHOGm zg+hrx-McIBcaY+Vo(e?+g+lQJK$apFQ2yie*n!6mJa*tOc0h^tYQnxNHQh(~|KAez zSq-#64;*CBZ_20ZlIDB87eyZFM0!*}2V6jXU<3_i&~Go`49Ew9&MB|Dd#Mhpi|VAh zAJuq}#@rEoq&_zWPlIqUKnDG83qpXW4LYa1>h7gFs4l9L>XzaLr;kh`?HWMiNMraU z7zieVd0?T8xj3H)B0zJ{%yv$BbWMIQm8CkK1ym>1O>IzHk4hX!+9^f@Ku0hZd;qq9 zgT5YZGul7#?+hQ;ju~pTIuUtafN7wg4Lbh1G&aJa?eI0nk#yXX! zcb4OtL{5G5*FuGfxK*e=RCrZCY}{$wL!MEcF9T|W=99nMgX*+X|CWQ)Mjj*c`v+J` zp7FK(7U5?p?+e0>mh#?4%gvraCobZi-C!1>QmJ-pKi+mhVfyVCXfK0y8>PRgkMbq@ z*kuK{FBPl+6F?X6cY08hcIsbRL+4@UNTY@C=WDqQ{VW1NKZAY&F>tfDzvZHrTeB+t zdV~LN{q8*I|CRLLhJL0nl>(j+6sDWJdeTfzp;o7%e#wKbI1d8#fDWht_3L4$>a^3C zh}y$^rg8)Ppxtmy=nvhbU*>_RzfXYWqLZV0m42pnl2P?rO&_99_4m|8Q^X3oy!~VoyFLfVC-%B#jz*tHi`wMshD?Jxr0+XI2=NmwO}fs zb*44&0S{|^B;6EW6lWA~L4e|~O8Y;(7jRJq3qKIrqo5fcFu4bafjjWv*Dn5huBnu| z!zfGh#J|88@CitESn{n|X}ZN%#yshlw3pz#82pSl__-Abu=vH2KiceAV5DONWaVj0s3#X_giOi(t906nN45{ z=m9!`@j#q6-DWQ}t4usRxCyPKze+pKeZTe*%2$6o4(pH}HFXMh%_elG6&>w^X zdX8vKt6Do8r!6o;J3OH0UL1ExJB~L%(H(`Uu*`K^@=t1Km$N9lA5aX91LS92>=ESJ z$kUq$(bAkv>jUj~dIB1E(yj*@AhmOs z2QK5LAP#=w6*)zS0~&WxH?&^|76N(KZfm|ty{Yu|H=~|ZK<&`H)CEv%{n=;1Uc1yM z@;2E~=YDZS$eEvq87yVce+MuEK8PNW5A-Yy!MHc6_cDhvl6I+2fyi?MI-mkIwI|L8 z{u8IdgBys0LU=&>CGFR6PFy7f?XFDTp?-tv+WRve)YKR9G}T4xXO8qqzB$}rxjh^@ zf%Q26{Z`U%^FWC9xA0FIrpf#AM=@DbyP^lWzJtt)3^P2q4i9MD<=U^{H*r~TOU>mS zTeXq5D|vn|9@Nwa(w^pBFT_05?>PTF#uivd*dPyVJ}`J8MEhRj8@h(e`ygqjXS}vp ztf~D<4|?`9YmQ#Svv36-RB0yN;71#rpgnM$*EVk@Z~tCAsHtsHyTLV#iwV5Uk2F|> zkz`0##v+}?Rc)-B} z(o4DvY})hb9CKoUph!KzyM_0^7Y}G}LwhFq8YF5D^$zDo`k46`Xb0BOcG&z7q7A2b zcU=>CTYq-kCGFC&rZExYKouBb)*hw#06Imzq+8+=&P4=m_Br0AYY%z9s*3~KGaCVY zTafl)X`vpG+*spPevC~!>6asVLE~=B<&CcG4mKdc1B#1Q`! z-p93tyzaWvj&6vlx;UUQqJAbiJ<;Ne# zS$J?3`t8U$BdAPg_&l-0xaB1ct-^cRE+yQEW+oDU!8QG zS5Sub^DBY$Jl)G41GnFIA`fV;b^;ARD=-+$1G}~Lz0Z2=j4xK0@-QFd!Gk<_kXIpt zewV9LvWqk!jk8dO-j@aeQGZ+in7Ee>7H$Ic0|qMWz%zeLaC0#Hj55c;dO+W%yMl+U z9ZiTy^1uPVyum-fXh2$$0jE7=x}`9k#$0e3dQU@t74*AYWy&qmNA&x>eq&7BD?S!( z67&NGD(tY1^E+AQ=^Stk?WBT_!AS5Fpm~I1lip>hj}I$J&jIZt$#e4lRj?Kuaq#l8 zXj2a38OX)>=R*5kK!0wz+H|VO@tMfq>Bl$NxHp2XbCaR}HRuNw60hSNSpNre;<&)H z60KI3hxWb)G-r+i?SVh2ZGVf)50vEErM(J0d$d#v&s{^2z`cZ$EI1UhfO0llDtm{N%qM>YlTeUpQJC~{D{o<_Sg zkI*|Yy&Fqw>cjA$I_PKWybb%I6W9-(fKCzUKVd`kLr`S@z-xv?UqU(g z{736w6qv76Dihj!zm)%Pyte}ZSf|=}3rE02;K4Ybq+$=f|3#Eb2J~($t$!4kb?HH^ z+FjeU;T(2+5A8od`w!559MDmMejgW9hjVyMpT1`FEd|Kezxwzs-~XgEbm(ZFhWGbr z@ZdktO-#4(HjXpk1Gqgk;Pg#*=LS?~8lbVDcsqwn)CUhj`%!2=3Z29; zJK#&sF}_~Es&|3q=%0N5Jx5 za|&G-J!tJUz&t1DIzJOSXOUhI3+8~iU|uEg=Opi=Ybv!-7jaOl_Id-OI9+-wz6E4q zY_qME9D#lb@*_u3A4ud?&6~^nCD$&0Mr-^A=kMCmJkyxZL$e@heiyol_X$6Xb-v#< zVZQN4KG;1>-q!ubL9N<#ojP#3j3i#2b4XC-WH4pUVd&4YVe`UtM9`X!@Gk$=Za?mF z?HKObzrh9S+nQ(i*{rp)E zYSr%Cr#t7IoyhC64+^>?DNJ)Dm8o;mt?_p~}eURz0lbIGiXpW>Y)sgfv-Hxw|RSg;7jvpmw#u70}v2YeVD}B3i%TBL#(_oPML^%~??pRq6H|2_(7Lb;B>6Zry7(VsQt?L(^UD_n zU1E!aFF^M>>ypN2tV=_4t;?F8WXqc$6PCBk;(HsT@5KX2JAD%m0`xvI5v)*a)M{UOzzE|#nOH`V?) zZwcrfnf5lczLS>wNmNeSAJ87DEf@_x0&(@7eDhulUQxKb*|A?&v@i*z{qwL4wx&`$ zgLbyQ(_UeHm!15eprLXPr2Q9t%b@4IE1+>-qjz!4eDa+kmwP3~mUd2jS7?850edQW z5o?{ilr@J3P14q|khCv_hUs7NJ~Lk7)LKm%%F!H2-*9?>#=sNk!GmZ=aWP3A(02!l zC-Q)vt@pv#8l^U8Lg0eSt6F83e%>a7VxGnw+7s9p9rv*fU3amsdVD8r?6sAj-F=oB zWm15&|EBkMdbVheT?I12KOJ*JXj$#Nn6ePPah}u zQ(9*OQ3pLsI-mm7_qs^-9#Fprfw#a~y~gp-)MiULj61)!Z4zQWfqmIAj;-$k?cKj) zn|f_$o5ME?TOu~_liQBJq*AK(q1(hhi&M#lWpwz4cpXbE87zB72DeXi}LLQ*A#UR?wEx# zOTch2U8z>?=o5G^@zr~)3e;XdmX59|VV;v977uqE)^OAOHE_qnm(M>Ut2NYKnUleB)Z(RWOV>$jml+>|}Uo(H%X*Un;!p0uovQ53W zvCVxqv90|!ux*j+%C|>-UbG~9j$D7NyT5zfh)ps1qxQVakJvlucJ#j2*`T;LS>*m{ ztZ%{$)+=F-(CxtcyrJ(?)s6dKr5zHh68S)0Q@m086n~-zIHtbPbdnj*JioE$c41SW zEz;PtZBc75{ws?%3|@4Xe$QafxKZ23|7rZPMSLd5==d~j3; zGwP;sr~T+(J??1ocd1qnqMHskZ|=MG=9a!2%eVAfUtT%(tINKAc4c8qn<01U*SqNB zhO8W#KW5K3e$<``<-_+*D#tufjuj_{fpmkHecX-&(JDU5$RGkOZPg>*l`nmaLbu_li>I_;NnrEq$O49x@ z7!7ETHW17N8y%b-l7c#gWCnE)&Gc^{lBspj9z^~YFdvKs^o{24c;3-$t;ybl4!E22 z%>tj;L3y-)puIWme_DY7U@~|Q(B4X79?ofBL>`e>bpQSCXHoQ_CdmWpAMF*xKyN_% z1UZyP*XSO)*9$ZTw7-r3f~;8Ql^Xzq+^LP zwjLb|l(Bj`CM#prbc|KTD(R?D#wuo4R9F@2WJR}%iaQE@fg%va6!q{QiYWri6ex&` zcN8dyo2?2IR47EmbL5HFaF2K|%7|t0msl4vZFQp!sjUhmg)&*}L+pQ#s8-Ccy007c zNiEeQ)ho3lwJY^Q>Q_}irGBG=GSY(zN=P3nD5%t1*{2GHI#z63@+DbGax37QLiDXf XNySl69UDkT$>)lCmA+Hn{ipu}r=-;_ literal 0 HcmV?d00001 diff --git a/vendor/pygments/doc/_static/logo_new.png b/vendor/pygments/doc/_static/logo_new.png new file mode 100644 index 0000000000000000000000000000000000000000..0ae4b2095814ac2c57901369af7d0c880d4a185a GIT binary patch literal 40944 zcmd3N1zVI|*ES-864D{vASvCT(k)0x4N5oCT`EeCfPl0P(lOFCghAJ`V^$_f~_cV9o6@)Ln~9=%sF^1#5rBfa~+hmn>+1-yylsj8`nGk=eSi1Y!U zkdG(^1|x>5!V5j$>7Ay43>w>@{o9~rFJ=i8N$Cn!r)q{uz54UesbAG#TyNA%k6!9D z-Fq_58c$1u{Og>IH_lVdTpH)hLkN6Le%RI?a2usiFsrHX@P(QEo_Xx8?#(FHQ?K^u zo;C@`yxVv=o0NCf;TPdyJK+WyYE_|W_~kLWkK5knSEj9vcUEmPWv2uOFvb7ALIWi5 z8vpxn_tWQoB+manhxUcF-~Zpo*0tx$_x|^B(VY=6(f_>OOMLbHp9c(#I$9O!|MO`2 zn3ewjJo4d?5dNRXl>a}5-Tcc3OlqjFph&I9%H5SKX?pc|fsG2Egjk1@%p$lYn^hI^ ziu2QpJDAPP965>i!&-|jB$Yx)xp2qM;`|mwY7|YkYC$iBevd+$^Ik)4mkmqJ z1?kNz{AJD!%i6RsvslvyhIe*&;8ub%w~Z85^PO*>`7)A3zCHNYxEn7RQK3X|SZhSDcVCh z(cq!`@arp-GFUpW!{X%pD*VSY-5Bw;FL9K#Jo)ChIV< z?`dhvX?eVj3p2<=9afSjcwhdK68ZnLOpK$)UwwbP1NrZX%bXpPgquVM2g7p`;{E;FvlZy=llbfyNc4X^=?S1_4h}WxaG#fI|Ya)3)`VG-d z&omnwn}e=W>;zUF9tz6@)_1o5j<`rX*v5xpkYFCL5!b{p)2H3!8HL{aOnC!;{Y{!2 zo^->JzFFya{y~T<0HiLf9jSeTBjR_9W zY>CaOzej|4*^ATV_}KR`2*0YZ`byI?4A@joKKXa7yG-=TYN;k0oxhGo=kn5H!EMh5GnAA;H$5bwlxbL*S@49owj9YMg**qG(f?x z{IqnG@5aNK{v%c>&X91djjb)}!QCMWy!d_*Hoo;kqt)|O>W`JLIygKn`b(C@iQ%<$ z*5M41qyh%ILNVfQ7VQxc9$e<*SUI|%c`ft2qSVO@_;Rl5l}}{`Cc-BKW8QdWf&9}z z-OWP>X#MSl)Q~dja9d}acc4M86E$#4=5&WgzPbOzXY~ayF(v%E)ESq?((MTvvk7}Q zxAL<$Bf`BlmEW}T>+|ft^{q``+@xxJUPhhS@ld3<68~%F(16f(lh3fL;infp5ljcY zinIb;Tn&Quw!{H8_9>Je=Sc+RUEl5L1S6%Qk1TWZdo9{yy0qtti4p0~>_o&5imyZD ztuM zuckIj?5T4{>~-~%9Mi^=;XjJDe8p+AA{3aZi2gOADJJW@;L2Kx7UBjxct|;-tcl6% zI$CG6N)Fjq3-jw>bI#556{b&mu#n{Zku+2KMzHB%HQMvf8nkUue`zz#SLJFsS ztw1gQAMrj?W=TfD1kPS;a6shvO7i4 zNc3=MXozTcwm#8$sv`V6P`0Zkvv17~o+T4dp7QT5y~l}YH(}?IX;#6Jyp`Jq`>5OKs zZhArwD^n@Ooeh2j0q`IS@-0d25wMZ5N=Cxe9gi^;i6U$9-}nc{-yE_WULBPu=6R|y zdwkMUG@Rzr@rm(>J}bZ3c`i&3MGM(cRa$Welr7uxk%b$cNG3t{)GErPV{1H1P|F^K z(ueXFy-gu)fKRuD-diLBP)CjPOpOFZv;;*`iVEv{wtvuv-Tf)2lJ(@%}+M$3}L@4RM4t2CY&8YnQ_O>@01w(LZx|Po$_*kku*# zBqB1!KU(*{`Zo3c+|TDXIx~L|*2UeJNG(^%HNd@jIw-nGGb5vCnO7jovj5uC+?<*D-7-&jdf0t@)Oh9XqUTv|*XA~nABK3` zYi8{UFE#eW$)EoO$K#)zxUNX_53#WJ?H5)1GH#0Q%N*Ls2cR1XM{4KXNk{2Y(UuJR zAQvNZMr!zxD7oP8+ctfwQEJoS5c>{~aV4^GqyprC>$OdBJ|aPrx^&KC-E#&B@{e(8 zIlaC*PzPJSy7wN4Q%;KLYTC(?A}+}>%)61rpG#)unet7HO4ggk4Q~p2&kN#^K)Z&T zPut0RTBdL7BnGOz_$L94vlgst3t5VpHl`@nY9t8xoo} z)e9z#+bHWM`TOK?O4|sQj+0GCSv!r`)oW5a?;^X8W_9(Z>ODg9FlU^HZZZ`XSjG|U4=9O8mv9HO6TWF3{ zt2LvoBO~})P$E>5lCY&Q*Gb!8CrUp<{qgvw$x0>klvw3&^_<+7o@Tp}z9qiz570-T z&dt$HOV6&L5bSuI(7#So6>HBKiR|oYR0<#bGxm>w=gY?r;en@-2Y**Sw^piJ^S&DY z+D0N_Yt8^R`DC`D$)}Km>NgeT{8_6=$G6+^jqkwx?eyl@h*giE_ZZ}$H@jG)xHL)0 z1p1hsJ8#@QU909Csg)(Ggu1-pkh>bTqqU8AjK- zjo7NrkTB;_>l@@XO#;eH{`S?K0l&Gn4P@6T zpk0N4lOIj{m`Z%saf#9AHbflg?^kK`bB|aSTJ$@ug2rqp27Vz7Qv63-J#y`uu2#;K zWizGEc6{bP#bf{QvNB@NhI7Dz*%PNin;!nNJ-2gs%SqToWhLA!GR)-z0p+b&ga3d?vSfJDX5|=U zJ0?t6c}O5SgL}con#<|D#zCZl(>3M~PoF@=uUSDYIsc=Pt-o+~Mna7Dp(?D*S8P2$ z<-eR*68;l;7(;%5ExIkUZwQ0Wq|OI3V_vT?gL~7p-;B7Xvh|IL#wI&>K}*dXp$I!z zv6#71${gpr9}gx`o#5YqDdOLdfnw8qtB37+Nd9y=fhr>;8uzNa`{LQ=PP)Ls4!2F( z1FO%5F{+frX({*&zxG)DzW-0A!`Q(5>Px?TRQ~$gRpLJK0liqHgbT6Fm;y*-$!25& zVsHc+s4`Yp<3p`OPyi_kyoe?2+fd=|E=nri7^m^600!5m^brBPJ??%dulwK;b&`6m zgV#ZU8prfB9EDEv%gc@w=e@GH$Y=rwlh6s?vkOyU9Ui`DD6{1F&(hxkOK$^C7tUjj zg%$oHxZbZek|c$!b2r|2!>1=jVhx+7#44|pK5C9~xhNKsU##wCyG|MI zj+IJnVMLcmRWaj&jUc-`y6`lvJ|6cnrzgdumZAiF-`Ot8b|j)(xoJPyCOIc=#3T3e zg1~VvSMLpB{(1Y#cdnyDap(EJK_H3GnuQ}Cv_nRBwRn1lQjYufOPQ1zPEBA~x`|b6 z!~ixN?c#2>-F$abPp!#UV&)l3K7j?#PrupXE%=BeN;!h&o48(K9a8E#I(ny7|ALd~ zt4VoKJ8vEvdmZfLptsazApV$e-xlTU#`GHkAosm{`Sa^R3j4+@y#Mh@4~cwmCgjWQ zS>uB+<-C&4PDOg5NLcSf8AmcYXaU$Zu8!MzM7Fdh4MW(q_^j@C<{gJ`5qT7b6u#rc z@iaS8qla(s>lSaIHtE!O-5Z0q#tIO+HkqC)VQ%ge~BgM zbwPzq8_NEKimFxXUrV&gKVPo@F4U!k_{(s{C-+Wa(TI7&nB1?uSQxs<{^W08xXJm7 zBoLvOud9`)6z&64aJ>y-I+*V0xZd8(5sO(%Hye#<|5`ap^i6B^5}U z7`-uE)R_vP-bbBz_#k{3uH=Kaj!5pt2QY2hwnJJCWx&6W5N|qlS&xRQ+0u`Vn#-T~;2`E;o|}yay%mAsG~4IgN#o0}22W zq&-h)Zw$4qoYI{9_^&-YDO98>-IwE8j=C9wzSuHI=n{F@{OQ~l{%vQl8n0T0>OI~X z?GjQ(PXVw%w0=?HCvVC~XFLt#fnAVnmsCMk>b*S%!RDgwjgct7h{) zs=2yd|2A>;av(|?f@UWppR+0!Y2bt@+tU@P6pOgRr&pP)*4(F`x!-RX^7e0QMw?y? zE#^-G+r#aV)x)(>zym3R(Kt7Ko7yt7eL_x1!W7oki?57|bN`Xo%_M}}M}4hqZ6VDi2nZX=?lVCSvbsnM>=yfMM_D&x+` z4<>v$<}KuyP?IymEcFo4vnSrDtv>Dscv{YMQ7(c4EY``LbLfyamcT|}ps}b!9%dVy z-LZnRsdYRMh&p?LFOK1La2`1M=wB=NVyQ@{d<|NC;2${SbCzn!!L4iBhn24BY1%Yc zIqnq=j!aR!5KxyNj*6?+Ymx=A*jNAg*|#}ybEWhU#~q&9=IHYV{!Bj|_|;D4TI& z8TTFIl&OfEt~771@CI2|Mqi;`Yc7h6OhQUzK}>@CYa{%_;Dr4($^W%E{SU&%AB{Zo z$K6sP=aE4^Zp;A5t%Mydw?_&n!5BNlSHh7`1eDtSZjLP;Np(hgtW>`gdo>uL(dRB0 zS=-idzDwPwU#1+BRQmUjL73rQH?CHk$8z{C`2+`7;Csj)@Ip5q4X(v+GZh~6yUvKi zY2=wzU-lP~>)=QN|IJ z?PCZNvVz3;U3_HFgXX)lJ}LE+Elfjb`VT3R_YQ^y*^IjI_{tckX-WL7{GA>)^m@g> z-@Nym41GI=N+UmlnhKZh;-x${i0spp((-lzp(ouqzAJ$> z9?;TWakc{zt=IPWJNNlKH}(9HU-r|V<-a`MrKA%&^d-Q@|H`Ubmi;?_#D2xc_1yRP zJdjnD1b92<0cKd_D^9FG6RZHO0u19Rso78l=Hugws}*Gb^Y*us^vj<_-W4B_F?sgs zo@VBv6L>>6mVIY!{Fz!FkoD%!$JmGT7IO_=H21?=@2z}CUx-kVKuyGy|1s3HT)@7d zx;RMXe*fj~g!d~k$ebhJrwx7dL(d2YYJ569t>e$ideEDLj_A%~&kp(L8N<`(WnK|i?#O1-l#_QN>Z-wOTU03ralg;3d!s=s3gRU0 zxiJ*H0yshoKS`uhvh@_{O1fGuXQmc7Z)DRA(PMh&D{Y#A<>3a5L_c0}3RB>Qv$rk1 z*Ty9QR2CPPGK*}rggeeG;pPNrn?*$~D(s1unbcy9C@ziR)9{x+2jW$JZIc-M!Af28@*!R)nj#%p3UDa4WAc#HYgF7 z{Mh4Y2HYN8?Xgd%kn8kDKEVxQu}DWg?5GZ}+=2?d-ib#E3AsW8=aw(auizOm8O`Rnq}%fAx`S7a|}U zi$=+X90w%?JOw|I+_x>LUNeRkCtd{*bOEEkzyIO2TjOrcOtR*#7GN}YcU?QV2OnQd zms{eHK1rXNGRo>(b22|Y53KUqHdudPH1?PzvK?Dd=fp6;43K3yEc#;;sr2>fvWFr` z9)CGiWR4$bVm{K-&y@){(UWnFSpXCQXNvP6eSNk&<)-474y!7&QF1Cz9~}+9{7cWn z(;6w8lZ9Bc^qG3LMN2@ot+}5fuu~lG?53eZq*S@q=y?RBVN8;hv_lkWvf=4h; zt*xX=qkcoEufAd}TrtZ&)1Mhke4Obc>U=fByQ6Q&Slv-Pn?Has!;$Ms{ra1f)Hi8> zKPG3P%m8Hl&CG~b+N9hTrhIezhdZy6&x9tu)$Wx2tMBfDb_EZ(!(t#)9*`GCNA=|Q zefoH7f2*)ol*G-{*Gr^jXG0r90%aNI{I^T{2@)v*BfonWMy9wl*MKUkg)+x?PXZFt zCqowLbP3nAr0t@jVL&<9&wEzhSfh}Gi1gMJ1jLi4QOjGFqSUb!Kn2YSatOH9z`%f; z*tlJi^_0*A*c-$RgaTmlI+vKXrGsYRQ&YT85kS%acB=pohFxS0FvBni2nl_zW!3fd z0U_=EnA=*?ox!v~rnjfybA+p$h7C%|iZa}m+)t1FG73^!2lJw_VM2cGu5>DP_k)lZ z!+#Ql;Dk-!Nu#gsHAs2U$obh6GPZ&2H5R87Mj{_dH$JW!8+Is;mKnJ%Ihyp{>zxSv z3JRIBMF>1`FN#@XMUg2IUtoWauQ`+_)70ZJ*+bBIvX!Fp^KH&CktO+UpIZaUu0$*V6^rNpI3;IT%$Jp^ z@rxP~kGq|leJ*&t6A4~4!T;@q@Cy34(J-LJrLUiB!%hk09X)+BQA+_aF|p}>G@u4I zcpXZKi76*0CeEs3Lf_-ZX=`br6`0F^#>dsU#M2Ha_3-lW9Qdz@iHNLS^t@(I{8?D| zRJpS+;*}aJNmklhI2?Z1)1z9(sRq5?{kgemFDoSUw`rJZt}eKLV1PH>QBg4UsK0u? zXFHq|Ls zeN$FMDP*Q*%Z-o88#yeH zHfn2(>Ef?ga28;zYJ1NY6y>(R%5_sCia8F7yr9+2d4IUlo%GbRc@+quH2h%n(>(M2lAb^rlwo`H3GM&D8Udaka>~*c2?eZ7q7ML zEUt04`5p;$K>9L8Zdu(&)|5iVRv1;Kv__sYAHk#JmscQJ8OY6N{?&OF-Ec0z4<2_k zSk>vMIqW+%mv}A~{0?us#U110+((%IG5HaWYlcO05_0^TYkP0bLX1koG$xpu)VD+p z%e;Ej)KbcO-)NSdJa?5p`xg6J`kE<1`ge(4)rPwWHg;O$IzJzuXwZm_XUvWljL5Bk zGl~OJn?}si=(E3GSy>s-M3RJTU+pPgZl{7OFeCJi-b>S~)nIJ>w-Tpd6n19aiHxDGPU-7J1aV>Skv4^iI+3?qd;4-=U;Fa+d z9FrM56#={9#`OG4`GxVoKO@?}{6$FMU%86t!Nv~-QLRFsGBaDHij}bFMGk1E^2U-~ zCxWH)^snM`v*z!tFn2l0fKJhFiMWT;_d$<~4ORw6Zog3$*VjKixp-42u{@*_L|mG= z5q9`C(Qw$`nEbH}Nk?^vzkk|9`g1_AO3Tcg+wO<}E3wCe5|x28zPkF$md(AoJfdqw zGum1#alocW{psMb3M+{;Js9k5x8RIMHv=ft2^mBS$njbAUtZ~Z1=`r;ZHK&OLQS`F zt8;7`w%j@g2g;hUw?y!4mW=x9f#f%B;5OR2{X=%;X{9SUAxkEs-ws$`c^mm59_vNe?3ukYALgnjvP-e=2* ze!kC=Fz>*qyXf=w?OP4KilYyK1p<%T7D8(qCz}L*Zi^6Ga8Tvt=SyV;j}@xcPvDkL zuq5^j0dlxzycm!SUKft$fn;n}P<6HnAi>m~dds002-`JQSBkG6F;~7RCP^}!%nBNb=Gv_G z_CnX?zIag0hc*D59DVCYv;DEv#Ng~=Fz2*sNqq-$SXMyf%F1|9z@uMNao;&-v+@`a z#$FClhZ{P5|07N(g(A8}{_%Of8kVJ0g`NI_lgy!k+X1LT5e2n`hlgj;;VJ5TVj*CO zf5`6nIKFQU|J~H=xv7r3ZovD$^wJ7&`Q^W1jm&+2}z#ggwEYzMA0=01K4~nBkIp`Q` zryO)$kRH8GA;7f#d8bBuSQ-`XB-767_aD+XbMQ==}rDS zY6+`n-^tXdk6^803Lr5-rHf~oZl4X4wGvL|T~UdjTP;5;b@%ZPiW3W1d~@A%=*H11 z;T#h03jf~|JknzIgv+^r%HCNL&`3FKQ%NeAWJS~0(lP*i;eiF+W z;@m;ahAk^x4Hm1|`waP(6P9xWYTAY-(z%1`qc3nacKf>CDFA`Qz_kjzf4_UOchA^;O0{ z71+_i$QRP|$OP7Mm&hj^0K_!r)nLHKeoBTbpqkLVe=6oVPeb4lb33x>X|nBDSR~1( zI;X2>ynoMAg1YjlbKQk#D0Pp~mxX({^XN(&AGQI=aLg;Ip z?0lr$2_S_n;&s?bxG2<=Cv*!%OIkiLcvvfBMQba4>)*4mmYdFmEMQ&2fAXEut#jM& zk!^(Z;YrB{zFRqNQojzk6|o=mW|w<&OZ!IbGXfi1ej+eRpz|CMp1IQAyvi(S=(QnJ zIKhOcIjEB(d=&JEgKU%Bt#4P<{O=M#r&&#XhQ(cQFD*VquzQ;21DqQ8Xq*~u&Y!mK?g;KOuu`mQ zel5*IEKY!RezQIgO}yJGx>!hL*NQaJ!)FNoM#%v>pV>bqPZ9yQd-?N~I-rwd;GV_9uD3~7E2WT5B}YBl3fwaCi;3spp?9*PsQs$3 zvdpffinuN^Vw#KjcB5=Lh2)e+Ya!V#btjijA_`fyD)}SALP9J@T>u|+*`9JV6HU+U z>FG&!d1hHo<*3LAoOL>gs?1C&p9+^>OI05M?+1Vdg``rg(^T8%&!1^;HFi}(60QAs zsAILNUVkQs4AN%qJi+`ivQT1@nx5_vC}<@n0hIEAf7j&*HUyBEi_4(?oV!#LA+Qnk z_4PU&uc>Cd%UMBVMN~v+C@ z11-A1tgP6e#1{7*)+ECQZqrkpF`*UQr9r>Us`n~qnJ0Wl5I*D;gM7H))Gd8u zh55zm)90&xfYulQgy?C_a5(~bh#1V2GD-VhB`Aup^ISPNI0yq#MFVcVl-}jttpQNP zi88Y|MWXHsX;Gl;k5JuA8?dnt4i5g9_%vxwc70k#0IZZl1DM_Y2Mt75hb%2UecNGF zEq^U@&d;Zd&PQA{e&>XeE>fYb{18erLB!D|pS)K7hDQ{^J%I}e#61K!R3pjHi~$=} z9Wv!$WsW^->T_6Z_s&aacwR{U($c#nx8Hf0Qr7_?=)~+*ki;!;5-IA?Mw1YJ`Fklz z9{a`9_{BdKj1EflV8Mibl}x`a)N6?ovPrja1eJZaFwyuaGS-lztLEfaDy?KqBRV!v zoCI<#P3)5Ba~;vWjf+4`FBNa#K9nB9ppj>=q|ERc%L@rOA`OU4+FH>c?+w?U;Wjj;@_7i^{m0T@Ae^I7tt`w@3e1+J^GBawGT~xj zzI@tr7yMtoe37{o1FF9ynXJ7P(&KUI(^7kNe~D9>RY^K5%F8s+SHW@sC1!|F%IPnR ztOJnG-=*KSx+`g^?JhGlrNu3VK#EskZIjgbSUz3e0JJ-lsX+nnlJeep3($0+2gFzw zXTNUcF_4Yo%1l`@i&QxQ-3}nxYm=Vt(s$rVugaLCj8a}+o`-sO{u;WlpAYoa0h8s; zFIjh&&?O6>eo{YeM~5&NBo(7KDHSE$JX*+msjwWzYf|%!qZvyhe1A1UnniWgHku1GlSpjztLbgMcLkjl7*eHA+@dVhw7z78!UoAN0)4N2(f zd?T2nzrU?n`#PS3g@wfo%3`b0p2;;%2kxrZp>3i3${Go9{7qbeCl9ggE>Ctf0Eg6S zhPstBu*sMwTi*#zj@2Ws0E_jMoI}CQRhT?kFY*#M?T$Nh#vG0KJ&{SkU)Y&M=!e<* zTswF<0h&+Yc7q8Z>A{l9n~uj#8r^Sb%rR^H2}gRm-Bn;Vx38`bB0U~=HTRhNp{#U1 z#6SJOvCl!B;@5Am{sfx-3zMJny;X#mV5Brxq`07Kbov6E=Mgs>gPYb*c(dOj;GU1) zR=ub+^+HJP`yJ|s9P?e~Q9!?WRaA$>6?s-ys}EGi2UFB{?W}^x{glbkgWrlAqp+mU zRR_ww;+|sgOzPOQKWTmYzjw8JO&rY-3YUP<(3MbnX({pK>xNc=RFZ4Ns?J zs^s`Xz)z}zE>Ds>RG8-SILdHb3Jz_>`~U@nv{ZP&-hixr_l-zjf&m!FoXS$yQ<)nI z_NMZ)?oQN5HEvS>oXUm?D5ZC0t*R(D&}j>3&~JFgkNFLNb2UDm zc!~d>)cH9RDsAzsgbnC^}^_rnHvpT>RTSmxMXgnc^$uR zS?3})vArynqm@c)MzYaQN%+NYk_RZAnj9FEXd>Ll6!#l0ODBRN&o2x{jE>t)s*y#H zV8#faykjnwL#}ko$=u!K_5db}p*%c37A-=#bB=ObCUj_@YW~QoUcNA5quxPm5dZ}= zisQVVREIm?3yQd|G@XU1y`&{UI^U98BwVVeRHYt;zl@8B2SrQ+b(}I%-{ciLk4Eb- zlLuLTm{VJ*BA`!=BmJ{DB67GCNP1R?+1U(^7GL+AgW5Pv9ty`z)6H=H2XhMCr#5p% z)o)g^Qe5iSlJmgOSDd5e1%S_BOWZ(v2sdkQ7V)eoY}YBsDN+riYI1N@m2HR(%W0Ex zN5{3x*)9vDJ$m`X9^s}R8Cy2>buGA)V$r|jU}U0t2A7qA$8g%+HtarW5#Gz%~RAWK;<8E=cfuWS_x87wua ztzq3s?6K;S-JhMEwc|IJHZZWA{&KL4OI;iVn&bVF3(_4)lUjCvF` zQKUjzvkw(D_15G96<~sHJXk>U+qf+^-Swn)jIn|g!z>HO;8Np%7_7pm&%8uc@G`Gh zsULFe^leqxYy6A=-lpB^+y22l6KC!%YOQSRu@MV(cW_bOd~*Ka_*b9N?=T_gZFgb_ zZNO0v;c~k4A~kT_1IOK-LJ;$aX5$MxEj9Sl%65qJ+y?uR`$Vb&sex|D=Uur@loP*M0*j_&1FoKMQ1I02MmGl|Q`*kL%+e_HD&5Hrl?wzBf8 z<%ZPYield^Cu>F#ozt>Bm}P8V<1##EV~XWg0W5pmMdbIR*a{FC($jOy8oecklPXCQ zLl88zK-w(S>-Df4Rl|D69Ur~>{SEfV7u59hCwjZHDy%O{g&PkX2-pC03t)a!{~H1V z0tt{ICv~j9k{T5ZDYH*3YXFF%TE9kXNWv)r<-I=F=w;3XAp)BR#C4yk5B+^Ya;pZd zXepUFuiZ-gm`;+lihnK`_JrVjQI7@iXKQEO)w^20hPD^~DL1>U58gh#{j8Dsc3^<3 zcgxwx3Z%0k!E^`|?Mx?Ab4@*T{DJ`WcWCe3ddoT{zNp<>^PuZz5_bi0bSCAL&_4UZPjSq zu*63kSj{S(+6;sm&l_gT~DUJ;}a5UR}FpZIksj5*b#6RYx_Ez0xc2m zg+ckB#0uhOsPiY6PA~1d;^_MA{NG(gcUU+f2Qzs{B0aB!UM%Y`ZImt`yS%U4M{@<` zP-6LwY1Z^Q_0KDD!}6@NG!yY1qnxWdDXobEtQe9eCJ4iDB)jU7rW!3z;wr;DppMTj z2HC@tQGr1+4!;~rNzrjm<4lRLoQl)1?)#Z9M}&bBALx!e1H?%~GV2hj>FF+u@*3?M z#T=BU8D&|^FoAC2x8K5v)>EjSGCOBk%YGQen3YVK;>58r0UH#T$0)f6=wt5)$@`l+@vSCDeDnTiM*M?HGtPbM zzzG2E4!U`p%Q-*3<;MHYj2?s-_F~N%c~KjVYG<;gPiP`@f53!%3Pt*)wyT#8@ESLp zKe{gG!)ZH0@4v%;VSSYjxc}Yu1EL!^HWmd4)mcD8yp(m@!NId_g(!LMO`M`4|GYop zcB^k0i5T-+md3oy>O{?fx*h2{xV z03RP6gtJp8QR8D5(&%t&5yvWpV|nnUY}@^fw?P#hh%Kl`MBQqze{AqIgLU2$xcH6N zD*khPEVzMDo$c}2dTW4Jjn4Wg1)!XG>#CSGaRG+9$( zrQ?>dX^#{dxuj+WKXL7jth^ExhpXd)uCBtxdikF>JB|-I-dvOt`TpQ8+1bw5oCu1( zzWlSW#80oCjFq8Z^F%IvkVXonqz~eU=}8i5_NAs2nl=Y!I`k#eC!RYzkBQLA@C{7> z->`#CuPw)Gmjm$|y3QuMWNUVEo?08xZ;uYZ7WO%elDia!D*$}SKu@3bzVR18+}buJ zcR9~ydlf3^G_@K4Xg2i8j_WNT(8@_m_e;C;F&u1B^;AeB)JCu%fm{uSYs8P2*-s2iHVor}} zL2opXa_r@g=CnjcNH0o!_`?VQ_c$#<2%_NN((fnYIXCggUCwVj)Abdva*8?P4h^!>-P2ljtB_x6gf269V_FaG|4Ys2#X z<7e17pLtFT6;~cW*4qCeox0pyv`h!EkSzd_fu8El22?ZTsiljn@Q@nH2iWS2xEQ80su+lBSvxX|5)e=~R*!hQFI=x>yjc1`%cxx1O|g(ngo=&lNwx%Bs0Yv_PJlE)WC3k zY{SaYkRNVLx}V`V0W1g2=S=~?nGm4JbQRihIWrP%L~_Cmy4qvgAe5h4 z++re?_Fu9}xNAL}>jLy3ot>t2qj3^2|C$|YJ!C2WH9Yx|U}7SRiG*Rz*C0=jQ4ue{ zb~RfxhMm#6j#nzO@dUGB)jSvnldvvX>{E01OXw7cX?3t@xC)X6V=*wBoXVd{a2A_! z_cP=>>VuncMM~N5-%t@}N*|NSTurhEV=@!geFGqf07bM{b&V;vVxhkN-1d0Tj2i0H zTR|zJHy>2MyS}=rKO#KgleY8V-Ei+dqw4S*C~bZM>!U|gKzCpl__fXy;$uAQr6mOD z1Oc6!nfjM4=9Q^O%(DRM^jV|$b zQ#wRXF;SPUpWH>&4-Uu>Ha52L6b@;Vdq$K}Obv_Xvs4Y-Ac3W|xGL`m-1V2F4-66N z%Z@9ySnY@mfeO(?7YtVxU4VT*FaV5*iF_68L*% zvu(5>IHKUz2bzO-Yx5A3(D!rnMDzY%DB;a@N~tDqx4}Fr4IMYtE9G)Yt#hqJ9;+~y zWOm=Fj1Yj**60r|O~(f7d68p_i$4w;012RMrVB#+K{gx8TUcB=YPl6YZK0(tIhzqY z78mymbSdMnsu4k-zx#C{RYS>+$A9g6)eYTl`;8+<(>HI>+2vE}>L?m{oH#Z_Ow?T; zIdD&`o&AkB$-d{7gfZ;gG5H~Ecy&f=BT^YD67Qfs*aI6AGgo$WrFmKGv?wuNdVMVy zWOi^HST9&wBjIPeV4uum@!LNqDyq8)8?yJkbQ1V<1W3Z;8-1`7=>&NG+Jwq^s8rPr zxCeJ9F}S4bRSyg3RjoKQ4tZtj7LKyV1KG=)eY#MP3SXB$ZzHN&)`V{vuP7C8s6jV}D`kqch)?LB# z4KsgRvVL@;n4!w%Przd9Yz85tuXqnL|-5AZTXZT~7kQvG!a6Awi^KqOIT)L$Z7Wef=|! z2qk7w@q1tJ-40GklOP!GZN(A7)*DIe+*-?d{%K$5TJ5cjT&9$N=@x1NZ3fb*8Wvv+ zej?dG)>t49g!e#H^TlROtRix(EZw$STB|*BJn+VuLH2ZRiGTS$c9Rll^?HaET_jTc zJLP>fWRKcjSEqO|v!JdLz5058;b?ZLrNfG-Dv*U9xQpQeOf(h@t^`ELrLm1K@5Omc zZ&mL^qnj%TaTt(TN{P9dNn9l~TA8S32U=TOsZKsfoqSZGE?`x?^!L&rLTwEOFb$aj zM?Z^*v`0Pd?tD|awzjs1m{`IVMNrdKU*^>xmTUfCPk40T=C3t!msF4#+V~JIG;#=# z;SW#LsndQ|5p?3g1x7qN-7`|?E3c#oclZHY8ym|9x5`bZ|)rTq*UQ2r)A;b}}!FSAPj5Ld=!QVe1-V{DMe9MdhG_~9+PpYnsvu1IX}xg(T5k?&iH?r4wk zk>iQ}avX-aHgieT^B^Nz$(&iPz20P71!W4`NP*>)QpNfQbpP!I_|m=2UOVSabJBtb z2&6a)Q8Y+EvD8m$7=y_RlMMYMO}NY;fjT&uF?1r`n1~oCYaGq)e^Pz?o*H3;-{c#7 zOcG&wfA@K^HErIg~EM{wVNQ*%@*o<0A54nDb}eN7y46F%4Rx<|HS z+&@^qMQ{|7By1Fa97mg)yAyeNu5Y+&$!(AUeu2}M0LT(y&!2DPY3O`I+|?3#;M~;r zsPM6gOEEDvREIY`weeclSP3xfC{z>d0D|oVu)XNuo~aE#aXC3S5{XnzU zBgL?nWWJgU-(TsHyLHtEIV)664b&-)uXrW9nr^rOxB5#A&p8iDPb-p^qTefGghrIX zc8X(&QAlwAbcR_bxlgH@l$4g$etsAI3|2f&vIx-T$wm^HxAsObmG+JnZRi!f;zoo( zh`(sgFUCd9hXNMmOCc>fm5`GIlQ3cMSW2>?`%f)_`XC3%BcPZHLD*@J!?zQ z&rtCFoZit%C5L4cc2;eG+0nUwUmC~R`Djb!MG)E9_$A{3#_#tX*5=lfWm7ikd?vws z64_iP#WiLuGp?&L`0-yxjenaGJHGU?!zo9;D-}&Y!|1#R45XAk%vMb3R>98w z(?sADW8NMd81Q1n1a3v@3Z{}JSY<}E{d0S2gFS&2(cS%U+kmd;4hGH={oS^|$w~$e zFQ4qqR0GJtRudmd?nKN+abOvLAQQpt`DF1BD^nIxR9I z`URI6t~O#{p{Xz<@pRY0ZM|Q|f2aKa5%tw^RdzwQ2T4IB1?f^zQl(p3P(mqb73q|2 zFlYf$0qGKHkd{VJ8V=oEhwjch=l#BWFMs(v!g=9}` z^J5v7i#OWz4i1$Tk-cT2Da+A26eV{y0a2(p>@dZTQcR;24y?d> z<$rbqukvZs%_hn%F#?TIZg>l)Kbw56&ZqAU8aiZYEbiXc^qkzCgN$%hat{wLYl_d##~2C+FpoZwGSCc0=dvU*Hj=dhs_e06 zC4tyO8NbdG%iywK)03UZw=s9eP(>?_dFPtUU+tXUuB^vms|ShxEtu_SfpV&VV|KQ% zcUYj@QIH(DGj&fGx?N3M#D4+2e+4~)kQCn!%3+l}4~mnF(b?#++mC^-Ns5o3Uq5>O z0!!;jHBVhX!Ie#_I4vO|ElL%~7`eeb&ix${*Ui7ObiVwBZ9Zat1vTLbt97*%*GuR1 zI7Ks7W;Dj=(Ju{ny8jw;q@p#c^`lqvaE@)vn5dr(oAW9)Vn81SPGU&0p(VC&-h8a6 zpbLxs6w! z+U9LhL=|(juUcgcdTduBPmOGS>oJotl+3Ou48J1J@`IDY8K_Hf}fE+L^JG_O7iTQC= z4ABLGdUP)KCOy4L>zgkC7R3`b^s=4E^3`$phSeWx; z7P2|psa^9%PYzZORL;A;OC0!LjSoXO`rlyh;tsCX6s{6}D=?ZbGKwO)^%FDv>MziF_-{vi9n?{MCleyB!=H9C&$@cv$ySuo13c0_#-Xi2Yc=M2srk zB257g`#@a03o=v`cYw}s@k(vkw}$=ek~ZU)a|<0sW1^n0KGw@GP2)Gn3FTHSfS9LS zUo%%QDZl1Gyv(GttOzys3>T^JsyWs*Tozt>?5hGbE$#T-qK$?IsdkiKcDB@*9W%b_ zHwbt3CYK$lacL30w-iP>iW1{0g{~mn8Kjrc!NtGV4=5nTP`Z15En;YSs*jI!v;Uo= z%4-7A}*ghu@K-CwmewH&lsb)O5CE8{FW{|9fczNmi&hxVpbw*x5J zF_~A5A5vMQ|3N=~_0&@w1VJ|i(Ir{F{E)d+fLt)JU?ype)>Rhj7O`>(HqJBnLD8W( zX(yXq>Ou8nkxqGXn?mK+RwV*XGHm?oG3$+U4Shste#y+om}@!Gr5(s`kdV~4?%60x zlwAdF9qs=|e|S@7L2^6bf*)`@-*GHT<3`kvk-gTFMj#Aoydj1{8%MNR1`$ig@}Vkl z^dO5I-t2dR0dRlf3LSr7;G>|tPUgHHMQP{{{5YnI_h*>*Y30F!x&(dK8F@NNiv&`q z{YmcVnxuyirQw2qkph0Up~1VS)$Gn!l9OCOzxp+}#CiY9fBz1|Bt$&=a%+bA3Svt% zMK$xg&1y*p#OZW7;X@ih(Uh*+@9XDo-C4Oddhh7(JpJk+IC`IklZJvnRG7XY+HlsY z9M9LK_{fBBuiOj@`NJVYza$COCqX~t=Iw=2a41trJV7Zq8rjfe!>cqF!a|5(49;Io z8;0`GauPf&+Gw}R{I98Z>5k9zzq+R5Rm))|yvv+lb!_8g9o)=#CnACO4%b)y=8Ix$ z2fm1*piJSx#lr_OlUZsiuYO4)8L2}v?t<$rgvX>N^(e@&Kq6!6W7;(YqdQ>_uQja) z5n@{NT1>gusk6Mr;=SPB2(#+^$JJzp$Ke_5YG%5je_8Cs7or3zJYPLDAa^D0x`q(_ z$h0%cv@*)MOwANmlSJ=xL?{|p1DvA!740Zl)~FxmI*si)*@_i<%1ev)NrPOSFZqkZ z5r~>lB&v}Q3(@@*yc{(ZmAMcbcXazRwn&Rp)cWtgy41%w5(imT+~x+|B&7ligE1vz zwOf|>_ex;nx<q(4$e;y{ftGmC@@rh{IXklo$P8T;H@>4yq=$lD7nS4l z1HM{igK2Bi+^KLD24DL6&ALP>K=fYhU7O!CEj7CMFk!#kfNHbghnBnxDKUX!JfM?1 zKKB+_XJs0RQO@WYzk=SMbi~@+F;DH+#1OvGfaS=+8U*D}ZQ#v^p&fV*HY*A%L2b;^ zYNGa+45rHl+EX-dofc#yE6ErXJ3jy4=iuPt`YES?&MQ{n2GkgQY5k%4%DD&c55ID~ ztgQ_CQH-91t#2Oo2T5%LcDPme!m9W&rz$hEQXfAAMy2|*#&d^ZCUX+$i$8R7O4o&> z-xIvUePla#2)^3Dg8?jWfoo@;eOH)8QfFpFKz?a`JNQaP|UdKeMuqbj?r;Ff*TD zeV+&UN7mI*7NYY9Q~-XB>qkrt<^BLPs=IX8Y7x4#RX}0dMVp?{w<%Rgu*I7ypp%`g zsT7?@nT6UjH5W+jk)(EPZ_Is`cM>*rQ77|$Ari%+9W^10L0S1R@FzJY!aYRwZi)7rnx}XA4*r{5UI^vx7E9dn;v&_o~*&l7S>h>|o4IjkT55H{UTDm#pP;1bh!;^K17xFti;c~7k{Sd8$)qEfoKT`P zOE=|&bVbeUp>8q4{%?MFJ zXY1uxPT-~B!0R24ynqfCbTx5Z@j}5EI0iL6q_Hno zDh&p!C3#nXq1Yr#n05|^_6p>A-&^0fb6F@o`&KrThmkx4y^v^Rhh%x_5%rd6HG$3P z)Aa~f3D2#6T`lD%X6p`nDmAu12OwTHU5#iS-72^Nk5Ft5o2;QsBdBfz>6F=rklDAk zp&gd!WWvb$DJxpSn$JGE)EH_O>wi}i@i*TAS7r!83EF#Wh!PxT9g!zhBAvnA6hYBd zC`sJ%rm?dE@pH90iN){7nn2!)WQkyu?U1^Mu> zM_lD$%nwl03yX<$W@KczF-Ht{KPjr6P-2s@)0<6DJ`XA+@DV1Pvr%bLtz zQ0p3;=hd0vzIZ6)yk~QbZHogs2BFV?Zw<(w+L(7@C3&y>q6yq!NcAKXDp096H8mAt zra};BMB<27=J%bQo#Cv>BL3PvaW@5O48nz4z^eHm@BxL`(>v0^scT6wF*E-awRqQk z?lkdhvni>f@}EOPljfw2NgHQP{KVaZAyV{sNVq&ZGe#(XGe$YNk|<8wj~DaJ#C@gs68A*ZQ-{;#ieWUKexHVG<_CL`s*n4wG^^k$t3=o5=Cf!LVKzJf5Lp2}V=BtzI?K>qnicf$k^ z=L4M^Lk2p;xQ635gj}#s{0`j}v>`km^_|+ZJ#Hs0(38)Fs-!CC_3YCMkZ-HpBF((S z&1P2U{odl9O7dDc*<7culm3iSg3p9k)Y*Z78%rLJBNNUZ=TNj^ow~lT468wmL*P z>dh|nsn&3BE7Q5=ex`;#h=j@}#fN?E8HpAabc?+{Xz~vOTkOFfD~;y^Qc=dps-u~i znIO$o9jd8(R?;BCiA40opbw(U=45A_(jf*SP$3Yl;t8^bN(nSoInlqsm!RC&-%k#! z&@e=<)ORrZ7{j@Qv@{*vAuK3tNdMcd)(E`ui7FQ~GcXjtDMuEjDKab%;llbwZrGFR z;*)bRl7^;e&)|}dinCl9~Te%B;^dK2aH8_bd=Ec#C4jyRl1^K zzCa)e#Tiu*i7Wll-@!(@*1E7igr-`sh0|b5b0)1OWPcrpQ;wXp8%X~~WSaBdW$`y_ z?T*n-&G~*mWi1x^@F*DoJ90SOQ{@3x5SLK+Q8=RlDHK;*?8j-u(F_-AswwL6zDs`d zumXAYSmQUJ2F0D9Q||^=V13=+{ z@Uu$B{}k}dpv+eKN8~4U^qO4V*v{_rA`0a*r*4EL*f9U+xdRCFRgFx zer?^7iun>+#K8c)?Z)mxVoP1asbpItr_`V3-`k)e%G!@F>gt|kWp?39)jGyg*j|V- z{N6=U$$%}T=BAICIPKt`tjp5ZT37bjpV>7Gchu>&6lmf2+A8SIhL)K;IR*B^+L?Lw zZ#~Cud@Yl}ol&Td=T6-~*x^Pz{#XXNK^P=m%^>wx4-liFg`Xe(Ok=Rh`sJ@eQ^YZM zv2FWx^=@zAOX2GVgP-;~B{;+O0?8soX?GN!kBR(1XOxtbm4#2RWAj&k+->ugebxeY z9NNTwy=Bf!j{hv@khrNLQ8`1tZ+Y)`AE7C_GTM35;X|#nRsW%GL59_tZ?j6q*;G<> z%PB3qA2;I;0&(N!m4bxLp5+2tr2aX@HIAiw(A)|tt3Rs=#3uX}+TfMD)w#FlsB7B- zwL9VpU-$zc*tmJq?C!cuA1@?BZYlLahXRL;G&T%G*L~>O2Mrra(^wq2qS+0e1n!ty zqCxvzh2D3M7*!A5zW}i5E1A87ju8XA)@oHE&1kM_BanI83s{ez^? z`QG|n?o!sN@8 z9z<;tTl5pcWDHRiiOvn~M2Q(wdAnE)1aqp-#XR43l7x`H!&J^cx0{XP$rRk5l~$QR zo@7}R`dg1n*nRIVkFc;#6hW?t%SJa)8ZTeHlKe(Q_l2v&L7QZdYW*C6pn;kn66RF< z@*FFon(#Gm<%v2Ui48i*fx_oqb@C(A?yiuj9o{_LgwHRri^W}pK8U7oMp$2#S)9(; zeJ^~?C8{7T`jksgWVic^n2Hoj_?Zc9(Uz5Mg6$A`j8Yy2hVcOPky z=UtJ1awI)GMCgR0)UvvQ z;tGz z61r;6AD(g^7uvbIpTx(Uw)=l(IX&DgSZ2U*$p5OWk)0>-%>gy0e8}^(og$$C=&ghU zsuW!HSvare<#$Q{HM zkIQ{IJ=($|@S1xI;zdqDL3kLg(DPk^{q^4@yNRuDme1yf~-KUFy-yANYRm zEl~si^VGw`qjNA~YI91r^;c~!lYttFYt&iTGuxg?k84-r1Mz7ftV4e-?yA7uPTaqx z9j*h{LNuP7oLO%?+bC30Ni9mDx4G*ipj|h0saF%JWogL{#OQ;8$v7ObHwd;27^rf* z;2%e4XZzBF2~Z51!x%bX+G5;KB5>PsMg!8y%gTDuALzuzN>@f@{v{sOQ(ExG>UXI$t} zxCpMrop?Jz+DdLgL@*r#9UYz1>G|o&3A(}QQw|EU^nsadzmga7rTcgL`*eQ|x{IV- zv&f)*5taV#-8&QoCFMPekf5BL91TNm?>e8&TCYc)l?|BokCTH!N5t(qMt~mj7FOr%@ZftZ^kt;f+1cwk1=txLf?SYbt z%BqE?cU|3vPp(6S9@?39banNP6q{Q6kfFDlmzUS4sm+46ok22c7e~i_brsfclIxKl z`q>L~x>&H^2dTktus9!Y6Rp3QJM0yH+TSJ7y}!TzOkN&WEo^!$+sM~gdu4T%rEbbF z@|6Ls;QCZuQlUHc=A_q3>={vq^~bS#gpoog=c*}F%l)?h`y+kT!s}8 zo0;|R?p#TtX`?zCc^?qww+3))v?!jy68SN=HRE9IZ3F;=Zw`0PO+lbBfMMKraBk17)_NKUXi zGXR92U2r)%HrBdPQz9ZPY-VN_2)}iQ$74a;>)O1(+w-b&hrlVAMtJM8cvR7!HUUej zbFN~T@>3HgxC96m#GumVQnb0X0ktxp3|?e8f{ zX{SlV^d_FP>oFrGE=3oMvM{0R0;?0fO2#kZ_d7~QL1e7o8v}^OUgzfw4r-3Vn>)<@ zz4{XlphZd->;MfUE1dts!P(EY-z2T0rCnSE(B}i_(x)yihxe)och}G+>S*iLSdvz^ zGX2WZl4?z*B^cQK!$YlqrN79169nJBhbc{(aa&|_yY`FY2}GUfGes+8|JQlFwz&A~ z*U@TypL%1No|9L(Qfp~n!E`*ZN_u_2{93HA3z_>ohI&R-gRG(tEipsxROT0+urQj5 zGD`CRj}k6_94PrJTPld^#VU1PVJ0%JJRrt$Ki?m6BAgL?JBuY{5NTZf>(`^*-CgwA zz|_1J4F!&9@DLAMp0;N!%n;-vof!)U2hQNUGH;`Lz!|XdQO>}@`CeOF8!J|(inFhA zJtQQgVV++YR-JLxQmHr>4TC0(4bOW&Nt8CPF_1&!7|LSr`HE+YRP zmTRQolc-g(x}CpOdQS*BU#B#!tgv+=k?8n4?X|IX)L5}8(qUkynfg;)LPGir4wKbi zKPfN+AbqW)qa$2VQo?K1B}|WWk8~B_% z={ppFi^kcViSbA)EGpVrY7!lSV%NkPXbd8MJz!4V(sDJhWMaln75DBjI3{U;Ik+c{2C%2Wo+4RP>Opq_oCQDLEnEWo0N#) z7KsqT@0NLvq)ZKP&}!3T49TF#Yl?h;F+J1ime;k4gJ}D~Z@6h)m(t7dvak5^J?*x{ z>+cj%_?$PNxutF(?_j$fQsyTQU<`k@+ZE;Xq9y*fPxEt1NYj0rV7Z2PTCxU~c%U>{ z7S5f!&VV3ajSD&oUQ7LRouNUTKyry_HF$%MX8vm7r88*&t*_U3$EPISS%HK4Vm8NO z=4nf_ab-g}UtA{}C#OiLb#+pjfJ%#JzUVJ`Nkiy-%s7bc#vI9>%YNWJJIbj;bJFvR zI8N>}?n_Joo;g&$p*K_V!LjB+CJ)Ph-?fn;hy(9lRd-rd*SBA}v9Y1{V(-VnGM#dJ z5l28TTDFkGH@w{VSTa#)w^^I2|Aq%LbkN@$ZEqWFOq!6K@Qp9!Zkq0^YV*w`l22Qk zB*_^|>yu%<1MlN>zP-7zK`(iAP`swmT%U{5n-Sf@w4HBw9n+j(CwD_bYo%sD@-wcg z&~snbDI1fUT0w4GKMu`gd~vs0Ag4J|;UK@es>eln=V<}ilP6D5s`&8ts+`noM=9T< zIExqyo=ei*aPu*OA&FM=?-`K7J`P!n@Fr>3{%cj^w>4f`G%&Te5n) z)x2>=TS!Wf=iqqwCo9P`#{73!M!qR+IFtbP(h#flD=XO%LffjoFus=+I3hm310*LO zn7|fEz2;G#?P9onPEcqLd(XJc^B8-(xWn~A$iI!pbMV_PLl)5?f3Zqm{?wTVj8tgv zpuqzTBvbcR8i59cB4>o}eUblXgWj?+^l_xA@kZ!ZoCLG_s~tS#QFA~FiZg%c%X5k+9XXY5w-IZ@O1`2{wr zaMp}22G+^zRlppu8R)m9mT-i(yw92DW7jrb6FZTka#>4{<93NE2&Ocg=+1F@NC zx81`ph7lK`nm?X1R3?bqvLNY;oBReCjE(V%wAE5@T~{LfirWGtm6=m*OK?DJ$SyH?W~ktCNcPt{Olg3mD&Rq?mxZ_?#OK1hk=xs!0O zObFMknQW8U459i9APE3CCij`;TXmC50@a>E3cf~xG>7SSgb-0h7 z=%;FYKCaf1iFDFcv!}#EHY_cesxyC12rDn%K04F;eRTIe`+zW20%yhZ6#6TozOxYM zY)G{2PZHR^eH+o#+$`&7@aY{a-CKAy$a#I&d9lYr5t7h1ru#Jz4NuLi;3=vP2nQ~( zhkyCVxS3fCQB>bIN*X<2!gjlGvS5Jy5iH_yz`@DM zx$JITbJ(D(qm$|~=_b!XVFjP{=-SrYUg0#VLZ6@-W=@i4rlza`10m=I|3Yms^W@j7 z%fQ{nSN`U&TF*2rI`OXTZ_LeD{w>ulA*Ckq8|GG?{fvFofNGG{xe#O&e^K{r&rL^~ zzj*O)AG-@fczI1AT%!>PzIvk3EU25n*3(l@V+8C-z@+!-dN$!sR8*8Bw>$Ip={Yjx zA3XHYZNC0CO7oR*rLd!`{^ldE?Nm(mr+FyGTMb$BCw`WuT=K%^olZgeg*b}QOfRoH z$s4SeYrHe4kglOqsc!db74kJ;Kw#e_Gy%Z6 ze2hveSmg&U+DCcS5_X_;T3Xud+M1OgU zYd$a%tqj5;F+ZO3y=&qzeM7D5k;FV@{iC-gSUdpRvJQ_su_U|2B=jHMklaDfx1Tg9* z*h~QcN=-+{w=eaps_OLH)F#8*3cV;VB1vL1=u!wnW{;)11vXyTejcx}h~ql?s)=8I zu|(T}C0K6G*EfPCq6sf4tf5iFo!l;hm)w3=MDy}8^!I&=Dp8u3y<6pPhYO-|_NiMMVrm2k#6t9>P+KY*riT zsAm6#QOV0U8$`D|rIeON?p;&Shee8vys>;l%&lLkK4^b6pz#_-F^maOl@&*zishE~bdpz|ibswxS$0|UiRi|a^j z&qoYnTs`tpPK*97G;HO@>_W2zj4dCsx6LD%N-T8VO=#uF`r_`JfYG0HLnQ?3n=#h+ zDHCIzOn)F!?t;@e)OXlai0`hUev8KqN&4Y0zr8*BB11Bm>D2lWUBw*4j0#P>v~7LQ*9Cj z&_^Ef&%)W&8r%fbU?(*O|AT?5auUxPXA$kRiAf}Czm>Y^9<|OON_yQ`^F5c!N*aG_ zgMhMqayS1ufu5*8f@tOY0Fwc5e;}T}k7wNBbg-rk?e_AA8*Z6cT3e@|pC0l6yCJ!N zMuQsWQY4~({V#`r(Tpolz8+#(PB=Lk%|qHvX&Pu*Zf3nCab4mpA8#jl#~u^r>Sqf{ z$2Y+fsvCKYxhONY4NVk6DA6@q(i-!d7CLI)-+$m;$Kdx22T;Ra2M_W-RB7d{RP4ZX zwQe4wjWF>Z`nh~kDxxu`1kvWVd6~tp?PQy4=Z z)0|qvlk^xbT;Ho7;G8<0a!xDzvvY7;?+4IUslUC`=1DlB3XQ*|-Bv7PtjAkT}Y&l`tl>lKcWeuFcNr1rUKq}@Ll?O4O`b}0!jK8gNYwQ z99d|lBUyeGOMeGQ%Q)5BOA3`Rs~dT@{T?PHBv5v*<_oWQzzG2BgyiJn!fC6y36w80 zSfK2*G@~ED3lfi`r;JlNP-{ibYn?yuqyIlxkF0730H6YiJ4RN&qjlu!!lyCX?g-SY zIh1J5DVFAeyS8?M9yy~pEg2?D#^B^j%gVkq&Ik^Sjtcko^hk0=6ErgBj6=wumz&!? zJIf4M1T|tgl=d*JkQ7-5vLP;~o#eB(?qkTz18=xYV`ahMWgY4%FTN=Q$> zT7Aw~D4T3UQLWaM9G733x}OD1nSPJT%e1>cfD%CVjizq~5|R6NQ%l(Im$UKcnyr#; z+Uo+khA)D5qT)-&Q#8^k`^<8I4@T!>o33|pKvysh1NV7lmBi_038}jV@J%TVe|TLU z&Y{E?7!K-<0v~5pnWn6(zp09CR&?Uj*rnw|O8cUh7=AP#sfZE=AGY)fA9Z~~#Z0Iu z{P@{b5W#Z6x)g!yKhgOqsmkrx&$o=F`z%f0I;5|U4T7fs{?b>2cy<{G<*>VdFAPJ= zFLV;wqqetux39hzy}x2(ayD){f>7!Ax}`Jru%xVa9sJvWKlM_0+m1Rk?jo!$JnR&A zQEerosS!>3)+OhxJ{`PH-h`2avGX{ti}aCA3_No}grQ@t$K=hTaEn13&i{NvJL6xT zWXI2@0b9yQBcgI(DOyPwATrLrnpAxUKPjF+?@&M8W1%(ae?lVoBQ|WNM9DVE#1Uz$ zb`j6AVP`D2ah5X_M1^ z!p*jX{?T*qf3I9ehvQch$TzvqSlDRrzu>ycjB3U66_IjMc~b=-q0Exs(ZQ|QpO!e#TvoSJ(2@)0jdk1#QrcoYETs7g5lamimKB!q8Rgm=a z&sRD@Txq~xPo^X|yI0%vsW~#SulsibEgB&}PBq|b>%1TdbV{ee^OGXGdjnBlL5yn9 z;m``3yBqO1PzK%W5N@@~B%@tLM1d{(`fFw~BC01Xqa&R78-2MrG=PGbc=r|4!S2^g z>(4OPGHqg`!TZ&HeY!}q0#Qz}Lwcj6pL8APT-`bvixE@B{1Qt?qBcrvx{uA)~zfz=nZFc+ztWJWhTj)V>8++x{wuR}+#O)p)S;nn{21&6hUM$`QeRmiin4BD&nP?$gS^i~hxZAPyCP7ecGN^; z8whGTY{LJ(I7%{5>yA=_$`_UkiwH*Krw%vIUDSwDZ zsJuoos;tl5l+x906DyG>#)2L3T9jVm6B_ru6o4uP!7!Gvxy;E$Yv~#=f{k{g_7pb! zExK0|DE^h|j^vqe+N7Y~?p~>u7bq%BB8#|N_Nlfdxydq`d|E+l3%}FD^dGTKjl)c& zd%V+i)poMYSwS47A~NA{d1exvMoSDt{dL#G3^U%?Dg>r1eB@A-YJf;SVfxGNf?@ih zTE*}zeD>VWFw51CZn2KNMBgor_*6*jXfNvMh{#a=!N;4~ot1VGHvdDY(Ue8=w6O0} zGaEU~v@*#1YB9jX+utHJ@7tl4B!<*58FIG@M1VKa4Lkdmc|rZqn4)TSB^_d=$fn&h zu5@eqa1@&&?*0|T0AZs!=N5pGQ1cbfqL$+RYvHAazN{6e#kcZ4D$nl;e$@HJrT1fu z0axY=w(CFjMm^VxaF+THuYraPZ_>JLhcKSK)$+w2jXOv-oyjBFw)(UAL`x*=F@kvij2>&{MnlM{w7enfV^#d715 z!r%Hq@2c1;TZ;4pJ5&OiJMthI@^GrKxqPiYzYQAOYNk7+SKmSp`FX+Xkp!yg0(oZb zj40=-(*Y~Yeu6GP!~iZ+qTkMkYQ=oz9OtSsFJNCjG8W*7e#UkU4||n9SJ}O7)9cI^ zzU*erb49fK-WU(}=J1T0x4dvde(>h!r*{N6F8;iwrMPz`Q_8G8`4%yrDkJf5XAwss z#}g=Xc3~ub^e5TCjteUj<2xDh_4CgzbBCAe)h@46T6oqU3M7uAybISa8g93(oxb~h z&UzVUzG5Yxt`)N%hf?^xN7pHBmiMARbvPp1xF;kEBIwuzOk_&gpB6t?wn%0-vuMS1 z=ZcAq?6@%;_O&bhI52%!AO7U{{q#lr&+8G0ZL|5Bk0|D;Wl-_!Ek#GK=#ER-oUcgq zC@y^m8i$mvI@mLjO2Q?sU=rZ3{}6LMW-$=mclVpB-hcVe3HjPT3e=JW5-5{+Uc<+i z)f(tCrj{J$B-1A3M8(sObIs z|NqB9Q9YYHoM}@Mk(ZCtwK6zM^l#-QIa!-I-jqzN89$xpdqzL4!S;*2c}x4}TanU> z#VMZu>qL`BUpeW}ylQ+0wibwu`)77fN^xC}#|Hf;slAW%4?~li*jG0eDF2?uy#3dF z^I@IJ&)D;5omZOuzf|D6#4f;eIgu96*@-s7+k15fIH-B-51}eAk8uA^5&z~$m$mlyRYMl9QC!A0M1>yfv zdfm4~xxG|$qeFX)Hb+Y>;XBNXjTh|he0BR87-&4=U{hX|WUOspHBlL+R8^Ybo{5y2 z(#@KfA1rL1>zS}AY7Wa*OfE%-PhWCRv$n z$e$pxym{_HX{T|)Gevr}5eHrpl4H->qVvv=ZPJ0JJcd|qm)^g(iue3IA_=#CSX)Zl z5<&>TtXq4y4j!QGF2(eXG(OKhvbGqKd8T^IoZQ^pwa>N=g0yawnxJSV$9e~uTWDzb zovsAzjy`R(J`?kfdrcu+MY4|)PlUTQVxAgsA5^6EEIA$Xhd1|`%uC&ehSA5nC?eLc zt=r5qzTe64RJF8*9@*M%e$-})-8)AuE>42fXofk-C_a$hOBtqAOib*lmR9t}CPR2L zgO^JZQUD~och)rd~sV5Gp&L#^QtW-w4c}L-8kEv67wN_`fpaX+7 z*8yrB=xiTpu^OB-wYsnTjps*J({3i!{(IrVi%Nf#8cB*^@@0 zbAr$nnwjL;{hM#Dyz_mn&7P1Z6qYvE9L|HcV?o9e%&hpw|Ko7QSHry)HL~9^RQ=`? z`L;AP7t#F>Pi$}Ymz914x*Nr*N4)vCGAG{F3O#Zct{;>g@f$-Mex7wvPQ1M~ z_JVo3VB6Z`J1lj9^KKVC|BSPeA0e@KV|YyYJ;PvC9#bwW8mGvDL8H5dQ;^MRx?BS- zZ=pkdVN0h^>mec56|GP`-=BTvFkqTCLw1-xz(w2cHL+73R#sNhBOoGwXj48$VQjSb zJ*0WAvOUsR8;rD@z%50AmyuQ?&N)c?GUMY4&WOk4HCMU+tvjpTC4YXSvUaD9ou;kJ zObj{)P}9(i{Ze6JikhqwiWH@!5HToyoc{dV<6AN;1pIC_lf&;YaZmQ+3)VFA|A~id zpr>|2&un2zgwH1_j85cWsX1_*AsPP}{zWqXOJ&ix*Nw=~66N!&fGzl#27j3%MQ6Ar zew3z2NC#V`cwoCvkG!oF$;&~K!hW?>Xk3{W$Y2Yy(+^8rpQz1YoH-9}r z4SEl)0WzrMo(~EX469<^Z|G%-dHNiRBto%Vd;Ru6{CG3GnaI*Dp%>NV&vJvCCT-yX zKfg()UhzV5#nU_+cz+f4(IWDPH?MB}I0p&sgGfbl8}=-9wvM;)8;4=db3+TK)gVjR zRG;0HNgwq{nbTM`g_1q;(6S13wj17ap#m~#%>@$|O-o{jR9Te$L8U`{S?bxZeKx;Q zDClZyGXZU8W%ZQf#&lNA9lw>2GbQ|G%Ohg*ftOc{N!?}1hs4>p*X#?VbQ4ob8w|Z4 z%iKzpH*kXhdTA1Zq*VcA^m$D#wGO*ab&yxAPU7rMFl=nJ91$X)SWxfHAJulwZI>)u zFZ|4Pt@Frs@-E#TzPC_Af*t3e8ZZuywlw4LA``!;(m;e-q0ln|$T{r7w0ny(uheS7|hS;Qd9 zILSdi1r^QKr&vyBGSYGzfnZQlkdaic8ZfOGv)|Y4r|td$w`L*PYMf^P5J36N4r7i z@uyA0Drcn!W|Ua(puvM#)8``{!SGb&H7_o854xFcSLYl~Tb_KTz(Dj{Xpy~!UU@%9 z7iqBeiDyS@-V4`75$-4lK@=1EDn3!!CRO3|S;XFBADN3CY4=|TF-J#Bb+5ihai{bu z-2s$b#en-4w?S6_UCg9!C~s}k`2WrKqHsLp6qW%4`U?BCf*@JOQ>=l&Ta3#~y*^80$L(_*b7c$Hd zF3*w_(;Hgf6dA75>MlE0PWzQ3QSAJiT^h_5kcNcYd*9MVQ<|W`SwQ216akrR$B~a9c9v4eM8h z=lCH%UcHlDBwJ-o^-e?u&Hb*PdyXFpYJI{^l^5%%XZ=AnNWWgEkG&N#Y_@lQ7@852r(Zt)?!*P`hXt!|7ue5gYaek+n zf1%^}^!(oY>W4V4?%IXPev{{DF9jD(gEpBwl8Y^JQK{YFCE>h*?-R+0o;d%Qn3!s0 z&)Aqq%HGHu%1}44_>M#YFk5|pKHzFh-4skvhSGUeA5r?tf}Oa?T@*3|di}hyW5LEz zJQa7{O3t3d83}QOZu(DosXJ+Gev}8Ornfg;6Gfs3aq<16Zc{$^943*3Y__9~9x_Vn z#!q!N`_D+0F1&ddLzxPo68Ac-Djx&fJMsgrmzvpkXN3!E1U|m|FD79>sklxr+R}n5 z1Pl!u=6cR@1vB3JS+|k7Z4NDWBBRW%)LvjUU*J?IyTSQ z7Y!CJJgm5X>LOmyToL6aUhLEtRdwI*i!D8(t@%2^6}dg`}?d0k!CAr)It)T zA?7P*Ize_yn?^b(>OnY(tZYwB4z}D5FJDDydLM?XU!%j(NU!t$uyF%#Yp+W>V}Z|o z@eg~c#IX_HR=q{NK74R;R@0{T(dE^Y!TS?Vj55y&reVP9MtEdmT*X-5q>GI105 z-I}5n%>2FTwtIFxUC6j+LzUGz^DC^x(3~EdZQc+GNM;KJk{R)S znB=S8Fm8gQhvn*o&t!1Je0)GI?C3twM{f$3e>GV z`y%nolVBPYv($rO6{on#rvU^myq1)4K58mDOC4#M?+1B*zzweMu{Bi#Xdu_!O`E=% zvWPQxcD(E6^9hFgqiQIg{7nn@m(YIw^tPsJJDY3!?or&LEeXl3UZ4G~TY%`97iTAm`Imj7tLOo;~M z1r@`N@QO)=?@byW$-6nKaST2%uNp;JtolV)onJICPk64=6)xoYg#6=jJP{T9;c>k> zwV6S?*i1GHSvDf{NOwF9Z|l)lN8uV4>QIZ7KP-3+Vb28;ZOU~uY>A+?i$K2-bf3|> zf3rFp)nFig&nIQ3I`ZY-Y%lno_97a0hAmMaF=9M&4wO1uB;`YxRpT^=R|*c(EdhU* z@~RZSh0+1pG2n1GT(RD-=vBn=1e${wxhAp#R+MYxE*KJFVmP_IUiG)reDch-_x>?l z{v|h4ZIk1+m3@4q8q3;icAq8kohJ$0?s9_yE5&g0ttL0xDwR5YQO?-SXF<p zWsqlXCG2+S*#6p+O-p>SvR0UoBv(oddb2xt?|I+I?E6?;5iP9?WLFYRczUN`>6lBg z(bC=B-DV7pFFucC|7GiWnR`W)@DY z_G&I)iJe=jmqqF5)JZ0R2qwa%`n%iOvTH}~@%9e{5g(y5bO>+;_Cu*Iuw855+nwbh zMRV_aRoC+DswU1&)|!_cYjYPCrRcK@IV^RMf7fbX?-1DKQO-oN(GDr!~lJMnCo~tN_%$^WhHd%RusLYlgtyCX;oQ%BCHo;c#}U+=`?(&xK# zSzcb-2ClshA1_fH)?UkpnLX#m>W*2@4eBIUhkRaaz2!mGL!Z%_aBJu5-4s}^FNxcW z+{C2S4dQFp0Js&At<{`-=uWy*VJKf+jd?p^o}=F&oZkH6@UFFu2gyE>iQXeldFF^< zqGIsQ=R1+)rY0uWVQ+k)G##P8JGT?!E;8|ZaD}j>apqA9X%Ad&Jqoz+Zru!22San5 ze=^LH;z_pUL#s6wi-fGB%Eq=f*pl6%B_IGMbECQLC-uow!!8~9u61(V=Q?(OP(mw8 zG4eugcukn%2DZ#c;)0(gpDujCeFc{L>`A^eRe$}CfUaii&DiEyC$@#;e$*Z#&vxd~ zUj9k;KA-zR#=?1-=r&h*tI!s$1*HXC1j3ZjS23eC47JSLFYgOJTe+fde*JzGhuO2` z&yroaGh+V^qLnkISW_?D*({@s?~w-GSCmY2WcmAc{6^T^k4c9r4|n(c*k=r8B7+L) z&v^v|4#knXuhQdm*==<`{9#Nh@s-_pU>@zZWwA^fT#s;izPUgbD)%6dY?yTMZ_a}; zB(-ibCfMxp)9+YFX;Cn4$_P$J@668olh6zKX~W8j;|Y@&dv)gmP%cHta5>vD*Yc z4SqCm{erxFwSC>9>=&*8GD+=pT#-XDW&Z)>!B;CQD|h_=eT)4e5vh1jsjZGC{c0pB z9@f%uq0y(hOVHo}mq9qAC3U|I@qvCRS!<%`YEy>-KPQ^$beHnp&|4O5| zB{|K-A|6T4X!o$)KmVSUjnT+U@g!E1YSur+waEhhr&wt&-*Yr}$8UIiLT8%o!c4SR zR#vE664KcJBV(~`AEP;pXxr80<%;0mkBauf3D9r-&ig;pf8i!ZsO=2=x2XJeu`4bn zCglSz(L)^%mgMzs(01jAl(c5LEO1MQL#0pm;C4MDMaH{$SI`lA_cj_3LTS*`qw5Hb zKdbvq@Bc7%X(!W2RqNC${GX<-JRHjQZI2}_mSk%z+3K|=OLp1HR@ufb$zEBq#>606 zcq3acV+oUaEg?%q_Mx{hq(b&>WZ%PJgz#Nczdyd`IOdNznCF?fpZmG*^E%J-x~|U* z!AcH$U9YRr29$zpA3cqp3zQ+F=KdSHUC`VEE-o$_ zvaPyQJJ^TfYjPbe8%9CHvGq%v-JUiSDgQV(n*m^~(FU0Y;7pyr<8l9@?oHYVcc>rn z2-o1Kk2Jo%c(NDnsq#b_a%y zAg}%B6hozCBe&)AM-tep(^S1-L0?=WmxDH4{S3-I+`Ec55cPck+9k;=E4T7pK*xfz zQ-_TjKK4+HjMeW#7vK9G_)+VrLkWI<~PlHrWr^ zJRW2xmlYRhqXJ1!7=XMbCJ0#D9XWl$f-Ey+JUtX$Z=F*?>z8-(DR6>Kes-j0XE6@v zdV3$keRWdEihA*xLUVlLDU|3Y6JC6P^HRO)nr>nhlwPZ_-{Lq*FdHn>W0-DMTAq3%9hEULg|Jxo3t?3{xjiINsg+6*kY?aOg6#c$Gg$QtAzC_ zrI(SIF3>h-L?A19xila`^aw3<-l?~L4ja_W`u5!W2XuSnjx^>yRH3R^Ry(U>w($a? z-rCBFCCBxBJnKa7Gq88^@v1TsuF2Yw zkVmc7p}BJpunKOsT7C<8=gjedAl&hcn(#bS=eTe7_rMV`$U<$2%`mSxWgfp+58?zd zSywX%*RL+XPxPysdhumG;2V__%BxlL{jNpwgL&UH;)3XbSc2nH*Ouw|f+UQDPvqhg zzhtHXGD$!eR|RM43&OH^!{rq{C{nI!Uvy zRm13d`1%Uyv#jD#$J$)UdzmQbk8!@#SUfMJZQxn|;%L^c&qY>_S?%uAb!E)>?BQh* z$#_SSY?)}2_V63&)9O)Xg&eS^={Bfy9P^rMp5MfXvmRMW(eweGqUp8vvKqgMq1ADx z6X96*2x{^(^9f^TFb%<6_tla!On__}SBcqD|Ib`>a<{eq1c!6ek&( zu;s{cl>4`qaD11I8#Z-Es8A6z6IgNoHncjU^qql zS70@?H6gk|w!ea+q6h?jH}n!X<*ZM>f@ zypMwdmz)ys=?S&Uc_K?<8kr@czIpHHos8-(>YS787NIU@*V|-SWMeI z^FT51fG!p1&Jr|$>+ZVV_)rif&(q7v0vv!6eJ$#;=mE>svjR{O@B|}G!ONcV-pgTZ z>P$~#anehrnPbd>&U=Az-!e;XWf~~Q=UCeZ>#E(IVft#|h=ip;^ic4fpAIQ&M*au* zu%P9>li|nDwiR{`P@MmZFfvV}*lp`7TnwC|+-rnX+FhJ(G2+I*D$ zqHa0dY!#L5hHq29fJ-XD#T#vZu~*H?S0V*z_sGp=a~%1U;FX8}2Yu&4ukU10-nrZD z_pcRxaU-R|phs7Ko$GoWA2H&i)$3}F zF8==HFuA`~tv&#E!)dJ%r0oKSpOAI9Zy4%q`yLNYib7tCH>kq)T!c6an;YV z3AtfjyyYYOjFmc63bV~1CaneiS_Ph7XFK6RmCsm98v_$Ku_AJB20RfGHhmK2Mlv>_ zd^r1yCx6R#()GtM(}g1}g^U1~S9L9u+SKZds$7?xIT8jz2{n*aqX zN4%9F$Q}je6nkUU!FnM$)m)R#_EMy6tqK$jh&4MNzf}L|-+=OYU*#!6Et+HT5Im$t z^MBo@j$Dox@KcH4wMgu6Nd+kjmO(Ib1BiJ`oE&7OfD?#sCiuSHJi{QM7W#`pr8Xh| z7#bN(kGhus=ELUtdC+{yz7gq}nIkAJ$m$-MTr=$1@pGJ5(*bW|F!2#b<*g2UFi~4I z>;3^mv!Lt#2U0@`lMqz5HH2ObzOj(QZK`^Jom>%FXc}7NNg(C~oqD#bWTGlvJNlz+ z*4}{1C}bJ0h;gMzFdrQ54CqJ@j&{dZB+ovnh35|AvA6idi9^*J zdl2_<#_jTf^(3&aws3ekAGDaJ$+d^tNq~l-5&S8St+uz4r__QHWnJFco*j-KP3ua5 zH`@(3U$I$*7_!Tlnz+hGrH`@%CN2Ut{kzn@RjlV{lwqe;WQzci{OC3_rsn0*f+$MqW*`_IAU~hmV-XMySF{dN zPV;VFyXBXJ=cF(VoHT@?Bwa|le_o(DHFz&$IW$uP?(1;C^2|g>{S*jnw zsKBU}6G~XEBwzxZhF|HQKybSc9D1mh6KT7!2`;O`n!QGPr8<4naFprb_&-2Zecbo> zxilYu`TLTxY2_9pn2jQ(%nZ&c3rgQkAeP^;5W>dNU zOF8GZTk>doiz7-KK;!L=&&TK1f?y)BvPbI-ef}Y2{k77ZdDA`j+-%=65bsFPJ+r47 zY-=wvGd!}sTe;be>&~|*t(l%f*8u{tQ@~70AHN1ILA}o&%vkLFOe`Ccl)im#<=nf# zbIZLzAp<`%1H-?kN6TMVd12n?>9;XX$Sxb`49ua*ZZmdiyJ4rFUA}&Xtt<4DP>EC~ zfhE1u#vzz^Ri*3@zE#+-u}(MQAjC_~MwN z&wd}NPAoWSVTDvSx8A&RC+&2Z#nb-Xpvg-6p{6`WssC!*I3Oc><%PXvpZ_Eem zq}nc8V#40=cKTyq^?EgV6ZKUzOwbav&6PjUb&WrKl}ou)!yA5)+yA{(&||f{OiehP z{&z<1`Cb@Gv@esfH}hoq&4H9XO>SnT_G@2JqpTATB(|h-=lZT6R0XYEim}-I(2bn% z+V8U;q70el0}aE0<{mkiQt;c`L0(y(O6Q)E&A{ctn%yq%y{LPU4hh<>l4S9FSQ9kw zXZ{z-ix^_jQt0MI(k##;6hD06%Knspn@}--P5LgJ-us~0;VH-;_|+r@`k;))cd3=3 zTNgg(`vl+<+Xp)vcX+x7D8eNU2jfvP9Zy=8$MT72CgFNInJbk_-BIHaU_PfiB82Ef zBZ>%dp8!Ob<+!3m5!sU@eqDs^WhY!Ug?n^+aI7J8%V(crvU20{vud>Yo<>8LZpLoI z+JV>pGJ$`izMQ4II_r4@Ty3Mb-)*nO;o$4eOHCuS>R?<`NaLq)sp+j27oIBfkCpZw zp}&Gl{dTKfbeH=wAUM#}aE$CljX%Y>5f!H*Jn}0N5kLU}v}Z?jvIz0deSq0C5dLn( zh~lsu0_iz>3uW%WK-O+m-Hkad%RB0@pVM-~0qk-=mhf=m1J6l{kER{tPg2?C`rWf^ zD|-tgk3t%c-6@zgGV%YMFZ#lB;MWh{q9LhY>O3!5Y$T zN}jUxT3Lu5K#z_waQb}e%V=!X+47sj$2%B~PWApL6NmyR)V0t9eZ4%w?E=Em zh?O4v&Ah2T`i_rPX%Jzo6X0L5V1+pcQeDk36+Ggxd^B*OY&*8-CMj??QH7?URY4c| zfpKN;RKX*>m7s(pXNsvVJq>0tut-OIYIQvi$SqxonFhl~lf8_vIfuP^g6L3n z2N#(NcZ^om^FLOczoFK58Gca~!8zZzd9|ysiZZ`Sxt-;0{B1_Po7q+cU zjMoun5=mju8m1{o1^@vT63HIY8UBRZDqCX!{>3Pv73Muh5ovTn{NUtG2X}`VPwhiS zcbFxJI_OTd0TFXF8J~%%8TAIe&Fv*j7#SKXs#{4c35&(v$iq!;VqAbpnj^|+Q9pG7 z2xuxV;Ck|+*dSdoD^O?9aThkoPGeJ1gd@vF6Qq5LvEydNdEzAY@tYWe?2C^6gtlB7 zctDn}{=T(2phyyWRt>M9@sAbhb*a}BxCB1fi1QP{IABFisQ)@NG9SS9;qdR-?3MaZ zaH{0F1arF%FAS_!un_ece~#BR3><&lP4GT~@c;j!@_Wes{F~z6tkg`!-^crz-xKQ3 qn*l!Rh5vVCXjAX^pTqI)fhOg9UQFyEJQOKGAg<^d>y%z{eE5H0T@U{N literal 0 HcmV?d00001 diff --git a/vendor/pygments/doc/_static/logo_only.png b/vendor/pygments/doc/_static/logo_only.png new file mode 100644 index 0000000000000000000000000000000000000000..fdebcc474a87ef939de63bed9371f6beb099bc4e GIT binary patch literal 16424 zcmdUWhc{f`7q3JR5d;xI^lp@B5d`?R@s<>~n9do{lQ{J%)P(1O(*jYDxwK1h;Jd``sl5 zu8e{L7=S-wJ55z3f}8)oMP21k;0mdinz=6l0U6DIzgq-3d5?jMBw%%IWs>Dv)HIB2 zKjG!A1O%)E>PiYmka=8wV2IJo(oGK!DJjj!1L}ylm$w`5D%_H+x^7mqo|ui>aIwc7 zzSk@~aj`aFad#_yKOq;oXouT0Ag^s<02Q|wKjdn?=kEO`rG6-BmlXBUK~3#>kGvCi z$8bl$p4{p38PZlbW3fXFDkgR+TX@v9Rq*N#pwa(NpZA?ls{=53>odcO(hEt3Q>{*G z5xSF=>%)pG*NzLBO63%VkI2^cyF0vJjo+O1`lFMtH)Q201^LU>3Kc`Xj^0Z%H{|Ul z1OK`?P?*1hcXcwfNl-4`9I&mSbhXBNdSf2rWB$RQ7y8)0cKn4evW&Huwa%{2?=&uv z&0Tv8m-Ul#wZ*8PbksnvX9oTY!7N<3A;I2$2C{FaMViu zrE+&&e966_K%*n~S%cm2IRUH9Z(3E#=E-?^iNDr2muY|zJQT*^1J2&4&|CO9Rg-wXWg| z3c02Pp)83H@O-j0HN-~cn@V4s<7``Uz-CP5QMjgF~#u*@Lkwe%M{t`{A~ z>rRE168;}-=fie#SJn^K-TB#ENI}*b58Z`FK(NBFJ<5^VXYX7W)q2kVnqrh?$_hoG zV>0gi6bvzRKUaUxM;`AXPa&$7Z_huvJ;nRXUL365W}{@bRWZH(Emp(&1HY1c?a%Qx zb?PH28y{l*k)a*Dp87&;O+0MkOxz7k$wd42P<9xLmC#L@!?jJn(VT15ylYG3UT7%g%)$`5rMYzzvj7?Ne z`|sRb#xri+#e=^mk1P(uZ-xfj0yGdRhNLv&TG>zW*W(qYR;p_xE@O?1@7jXT3N~cX zqee2!;Wq=#ZGMat+%a^ISFcE}M+1-@+J5ixgJ~d23!$IfQc)di-a*Wf)hFrKodHX1 zK|LNnx@zctuGXZ2X-a8KW3<^2W++jgq-nM9XD@6oj zI!2#Idl4uifAKAC1PE*h7CuQ~O4j1DRY0*V{S|eqG-dWl--t?TUB}F-w^}zW1-Ni4 z67rNJlqpTtgS=g}X4E@ED2V!9@XM0e4Lqe!qQBnP`45CtYop9|g-^cptQ=b?B2Z-3 zLCOQPYtwvEKef9=QLcp!JA5R&&i)(tpVRG=UTt{-(+X3CF$T6ov!KT1Ic~02)H)Xmb=TPeCIW#Vh3+o9H~cEcquLe|nVMOi04r%jki zR^tNS5tnHmjHcXXgNSyU^PPPwH<=^x0g89>pD72Q$Wkn?g1b>Rb9%7>3>8^hII(ZC z-1}o9Nj?lL1IQ&V)U{oZKEB%LDjn`{a(P+3oLh%st-`sIlZ)j9wdgd`rqd!sbv?g~ zS=d?7F+A3d)i|i!$zvxKQJ=HQe@cr@@s@^w$*hg~A&=(|cn0a#$VG#1PEdn>WXlWb z$wH)>_g(23g{3OcRHln7ircj5j0QIofOP9qVxa}yH zQgo5sGh>L2jkf0NW03j&ll)7(=aNGL9dt|%x_P>Ds7~mQ8UCg zG=Ws!D$^d4JImOZU9B9WR(k;1w(pSpvfW!9#2wD+QW0OxVinq!s|_P2G;5;_g)3S1 zYlIVy9cEV1gD>B}zdCDZ1)$YiJ?HB>RAh}XK%a-f-?=F34{N9)(YudU1C+5ZBu{T(|VvYp#`51J!q_rqz| zh_W;HKA5&0a~nn;(sG%ngp|@f<&DmANq>SdFs2lyDYdEn;#k<>a(npHt8`VQ^eyl9 zjMeF(`Pnl(5#dln-S;{vXu0CqPt#hcp4Ibe!M`S6?E>XUnwm}(0> zgX|EAs5or?I}U0}5dd?Y%U|QBEv1X)t*@rW8@0N}C~dEw#~W_(eBGUqF<8EW8w2X8 zPzcY;J{h;_dX`&+nE>&k#x2H$1M>juz<6C@d~;p3;0li!r@8-xY!m!XD)glux~{{@ z&H}F|wA)=yXBK2$THrYaOoUi12?TITXApqCb2S8`$F-XQf-|>yu_B}t`neifFIww! z&_^2)1b0r0%xK<_5IA#=MX&aOXT~qtmL5`CpEwwF&%2l!-K1IT4Bv@O@m7#E5+OF> zd)ns(8lA4-1L6NfXp_)Q9aH^pqOnaD!?&g&SL9cEgr$tjm3H_BE|boedPQ0S%bs$t)HJAy$7t@7%%^)fiTz$+u>&3cQu@OmVJ1? ztLT~!Cs*A$br@8U5~{|kQudFjw~?Kia)zBBCmFRY(`yEp_^ByO_bgC<8uGz!Uh)|_ zD%WP?Ks)g}X5KQYsUT!SzFJjU&2y?-R9h+~sk!&-#t&3`$nF+4)A*kSLP_b=_T(WW zwcwn3XM{&b@q*yC4oDfs0}4E~mc@*=*QBA+3=J2E5wL`#r_JjvEjQ}<)*_rXudNY` zo+0T0EVwi-6?XjKE%wQ~#9}DZR1MnkAATyJ!PMG4_qddYs09@(kvyRqxk921*^cnO zr4Y||o&2||xV#k83Gn;&n2{a}tyu@<9G-fO^L&M2N-DBfGIerC*$F(hoXZ3n0Y{19 zt2`Dm4>h+3;nF*mUsCayh1_Qk+fpiYK;b(2U8W(q=SmCMOcG7NmH=CbJYWl5og1fa z{I-Xr%j_#6tp2)y{7&qvS%*HbgK{rcZ6OHJ&T~fG$)qWF9rPM1OzlY=_ia~> z8B3yiR3)f?nIGy|CF#j^R(77H(*Lh&rxgpWI~0ks70HwH-Ew#|efsAM?ahzVRu7sa zzfqB70(zM@uaPwdChVweD6Pw29N(idfk9xi+%N?M$a^Du+(5m@`-2AZfhRGn({ZyJ zx#O&=Yok~2a)&RIUy~pX>QsiQ_6Bf8<3avAsBGpfD#{+8=h_tYV8a(ZdzqJ!x+^-t z>>B%TH9F;80=A|G<6Q1($#QX24&7EdQ$LIP!ep9oW2o|9m4Bbn);3iaysWNa$}S#% zIFnk@?hoolXor7{kS2n%Vl`AX^8)4^o?XUKeF+@jo|PSvU%Y7LLv21zrNz$aaq(1* zpfqHuUuIL6q8eC*HQekY0d(*_`?ux%e&A3!ac)aoaQS)YK&>VuU`OOk34?ZF2zG6^ zdOlj>)f@RC2fjSxUS(0+pYGl$MGrg8oDLhO%m&BCRQ0d)k1@SSS6kohalQ}2qT=rO zazQvj{q3$ZG3(ZX;%ta$fCqTiRu^-)?+_kzL_lPJ53VInoh};ow4e5-T^cNYTx?&% zq#JZw`JqSZLL8ftNioA1H|~2;dHCfmWGDA&?cHp!VTj`;Zg=g_O`dH7C9!dn-S6M{ zj8oIpL`wiP<9ufZ;((_O9O$$cuk#`-44f5gBkp3-oYg9EC%H+0x+U-RV}rw*-!*tu zWEH(A5|EA2AZw)Y%7Lyw2Fcuw&d27xGn@;i&p(PBKNjrqQBr+|ER?qil906A!b$O! zBObpBH5jU|0oUEOkC3i7cc880R8bNS#JmcSKRQ^a{8+>!9x`F7w?3+pV>@rwvQ^+X zt{Jq~I7y1#MtomL95mH0k>Ik`zcMXZUf6<#NFQ}>9kV=hR}#owo(s;@S=SP=G)-EC z>Tx@iO8FI=nH+xN>8PUMU#`0jIIM`?4|BqKvfNdWsVLqBJ@9wQ9k)l`gFo3jjt*;pr3A4_k+cD*;A_Xjj)yTZ zD)~WVU9GhRpT02B@b5)JevEXpaF&92Y5t&E+Q3CZf`8^h@tEpI)oR_v4Di{fO~~R9 z#P3OvnfP#N?%H*a?XV2Of=g6-5@a{?c#dY9%?!~=gJRdTshzxkU5NNU+dHumt1} zOW{_2Bc@uAN378fF^o(0ilKF>u`hd=Vj`XPnN{slre-&&Y6UO%-vn}3nGZZCq^Nd^ zt^#`cK6kgIJ9vuhgvO4-Y{g1R;Fw@^1-114oN1#R&&E3W+$uI*@(|#YrwLw9`K@RE z)`-&#B}oV!fD$dCgu4(s`%;o?jGBw9F`KXMN|;fYD*eIi z&4*@#suI~LJ;X^Cm`JuZ0A=L6clZ~%jw`n1BK6ZL#zzN9_~?&5>D?Rb=J%%R_^(_u znbC8hEv5v|_*bC)X%kXC*~Hi!%;|o(7@KV(1DwyCo?*Xgg-bG%qK{9XzsY$OVzC5H z19FEhA$TSugw5u2Y3FGaUEsj^kH%8MKJ2W;`Hb`dciw$fZp<+UbHZTTN*1;Bg`{Z% zl3i`?-gwAg@~}UDxXIAkV#gY9k!}bYT_^OHc9r?ymH%Wt*sx|AA7hfuLCN^`~{0(Uiykh4aN#+e5(@G;4T{{2j{4FP+Q3nRX~o zyD7T%U(O$cf)~hPwxeYH_h&+45uVi8{od5=-` z?)HoVQRU<$xI!nkCcvC#Xgm>}(RwX#ANSmIcZJsi@~C%561&%VJ$|WF^D56y`|k75 zb7NaM0Yne~Ox$+&5RE%bR^+_MO=a>=N9Ice#nDNbRLJ_ZX-(~#o89vaF|3}Yo`Jr) zTSZ`p{`@?%Be0gv>}^ZfX@Fgjb;jOx6&>|XNs2)Y`&sH{*gV!W+ia@T_6i+ovSeyB z&ZS~>(l;r75ABz@Vaf8_O+J)+^n0?E#6ayVFTu>?pW-|P39wh`SxS8RqNWuF77O$7 z|CP|?pL68HbZdV42$CwB-#m6qjYc4v+OqfQbZXC`VWSz$N|{8OMArT|X9g)sv|p|^ ziB0z5SQ(2jN8gB3&=Ip4Hg%`;mCTZNZOUx}7x&MFy+A}`OSYX~9^1|J#>Yb>vv%w3Yj=N}lln9TlrM%-vM73|)cL`SOv#_{2ge&^*GPnZyUcvMPW@DN3 zm~_sVhHGYzMo_%wx1k&^q3{LEv42Rr3j(D|!)5=Y& zy54fE#%}_4lCJqGaH&>1Kq$~?*@f-P zd;B({k>(AeP=uqG9hUtYOhTWUxszNJk z5tc%@2 z!tKY~Xso?W^ttG`?~QaW%f1JdKgxLF7|q5pgKXL6LihRUI!ZT#__0?56yc2?zcyKH z0ln{TkR3>KFU_%VO)mU?|GqYjWu(k@6isNKW@z-X{OYfw+T7b0H~X=d5Yza6J%Nq9 zwjiwD3z^JiB=PoMds4;2i|tsA^BY5wFsKR6@;i_SB0ma0Lh@l(pZo%t&aQV(HB}!t zXnfTvx7AVVn7l20b!zL7B1ZfjEQcAtk&6%TZ~5boN-Tf1VP>M>GhGob&Bu=#$kL(z zM{d!y@AoLmKcN!f={t97DTTKhqBDnWAC*1esC2A=XyVAsO0L~7m|UCl(b$txs_^&V ztFaxoy0uaF9q~@gA(&U~lgiW=`(o+>kBk@%X5Ie@?cA66PiP(F-dfA6qf^^;oN$N$ zozU{=L8_ZuXGj#HXpC{BRHkq!Eopte^Ab-Eo-| z(f3Bn#2Lu*U^<~hT}(IoNROB~UO{!NPktsx*TwA>aMHv!!D8pOTL67lQvYy zUe^;Gt@5-TSw|Pr6oT!x#VgwWl{mF6)>jhR0a07dbeahm}j)+N2ald}u| z>RyeLn!cTQnBj3XC8dw``+@(sko%Md6=3>KhRhX^OSd+26KX$D;nH9D?;whhp|*|t zjXde|a4UN9&=bvMk3q8*yh-=PMB$w!Z@f`l_M`MNm_xqO85LTxC*q_ZI6_Sk!S&!epI@(J^Z ze-@AMCHksF5upW&=Dy~+y1j!v@36(y@4VfNt7_XSpO;!U{CD!O=ain+Hj<@ z#P2ZMI@kwF8&a$jAIWEAkj{@*&+v(vB|CQdAzg9oxOvg}9aorQo$teg=7`D8P$gye zt9kd8?CmZ8kWH%?Nxr44dstC2ZAs9ZtG2O0=euIJ_f9u7_(Q!IU}S&tKNBC-?K$OF z{&&0q3(U2Vv!|T{9>$F$U}R$-t-a2+ zY&8v3I=9fFZTndg8zWSo1*1tJ z1YG-J;5g2+ZOSh6PSkL81u2cOU z!Xvn|=I2UNZo@;Z2*t4R*ykuOaW%R>-Uq)f%=K5zBR+x{R7$n{*NoH{DwDA=#cI?u zrcQs;sfiD^>tXLyP2wbN+Rm3Z8FwtG(HqY+$;Q;p;1Ail^n327Jn%xjGF>9aMu77y zJ5p)!GMe_B@5jdZ)J1SY5N-$1LLrsJGeOuFN87D1*Z(KU3_pC-GB8a~RUY~)u639g z;oI>svF!Jvvm80(-*zWikLdL+XQIceyWaav(0k+T1~V@cVT$W`osv|$XO7j#_0|4Q z;;>!D7k6>>h8B~YQxeONrK({28Td%svUtV^+&unuX{&EBk(4t`PBZ7e zkl+gjJkMuy`P^}<)O$@{10PDoTvk@)>$Td}FW*%Gp3l`lm}rpsLj~LZJ!inbR=%gB zc3Pb*R6hdqW@%I)?5WJYm+1ekw)AK!mVkhz^1o*RI2Ai9X9uqs80ieWSIu8c;p#Ph z3(?D+>&bcs|6yXvj_YoM1WZw&_7{3^O zOeH_vuJ^a{_}MQD^%ktZ=Vy4?^(UIZnox)U@5tH-_(IfJ zGFh2J{C6KCtM%{49L9$;b$;gx22P=~%(mM4u_0Fi3okPm>}G@yAWN#mnj|{uba@~V z*g}SBplG^41o*>Ip3Q-{jNb$fVeQ3(SV13t$fk~1+6#Tbx1!a?0bbKq3rHtNSl9fcVNa6g^u{McaZ`x#O^TzdPNcHlOOY*826qKUe-i(56ir zNAp@bk97%8_ejiaQVgzq`6IYz%&b{>m%ysD2D~{v1wCwmN{^N)@F-WS1_X%FVrYHk zTREPtZ8X?VhpacLW2Lj5^B*?MTm>2+cnnd+o-Yw!#fUS;@?6Eww*9+8SuF(5HbJSL z-1Agw2i)9GZRC0mp-oc4RKq1kjbi-#u_Sxk}l{spYIc{v$zsbf5ARjk2Xmd3pZOY7L z8utJI&zE_tO|QQUTG49x(FQp6lvEi#(<$(aj9!KhtAtLRwTkZme}HF+e~~&zG9~l92MQf)ITjZ zXgECZ$K^>lLOKw;zngbQFUMx7t6o7dV2Rz*GJ0Jf=6IREZqgEX(I)S3{E6BOXWXn6 z3HJ6%)0-ch%gna8Jl-dW9>dRu&;C$Id;O)_oIWZH)ghqqR7B`{F}tes|({orALl6BubE)-BCGE!t*jwimUI={wh?HS92@GRS4`vv2s>XP#_MCjB` zHJdn$p^tuh5i3AFIYmWEQvFKY3eSCE-fZc7m>oY98^?cM@m5E-uq#QrcKPde|rWTCs_8yhV( z&0wc~-GUCTzhwYG6hWlNm1JJfdLs8CgFjym*fXflRlUZQIc%N0nYhb0o>t&ArWQDcCm)k-*1mVcZX93a&H?I00vrX%=G*;JK)| zpmV@bOF&z}a`MVIKZg3mas!PS>Y=GqvbHaPcn?w3#Y?g+ZEZ0^tEkM+E1!IeRpwAb zu!4B`@}-P5yKT~lvxdi^sc<^mhjlhDC$BN`{w-sHjuof*4>k`&%^Hyp)VY_wJ?B@L zotSR-`n8weR0c7=_cv#8(QbR&9$9(A zVfz8BV1h-Y(ARuCO^}V)cFgAw3Oz??#pjYdD?8e<~Kif z?=Pl$@`1J5yv>Oqb7)T3$8YWQX=(`1zm3$@k6_c(7Mi!BEzm0(Z-)bs=F`#Y7qn<1 zR@6UIRWlophPN~$Pb0(giaI;94}Ic07;#=k$S$;>VuenAOjEFd4ubmce{(RY(GifQ zN4RiZ2^h%_&WRlK@0ctz!Y|s?spjdWH%-nrq&J9wdj!woHUs`v>x~ZH0JH*+65Q0$ zb#x(hNo=cLHFQN@-rB=yVwUWcs>Vu+1E<)m3DEV9KRCm*p4K{Wb8d@=a?tSAwGXb< zXw5<_w~)R0Ag1TmHcxE_(&3-|nU&qT?_qDhey?wD+R#n>_L(o@{;Ge3+#_3Uo^tq$sRjr4=HLM6t5QPYDw+NQ9s`N&VhPEY z2!fx#n-48ilrGiQ6<4l2{3-TkH#5a7UH?Gr-Q%wwZbzTP!80+8POpGjc~)*oPiZiu z!=}P_%a++9K8eX{A_r`2cj}*tT`JBgl7jGwGTy z%b!~TLC`*2WVqEBrkuIh@adfE)4W zoA7^%=6!qPP2lhaA`_5B7ASXR|CYjTcx^ORMUjd&uqJ=Lz^dQ-Il*ta;8tt89DCf? z0@vZKYk_#lo9~l9&;bu34$7}kZTdb0UmYzN=!AUj3L*uBCGKk%WcdW|5uKKf*}G4R zud`8{-TqwwJ_X>ouhyey?l@6vWU)Cp6rS@ktmpanie8$-&~Vk$1)$V>3scY$?D|4htoB;LVhU5=RNr-sXF!gGpKx?+4-1pd zh5cePxjMdtEcp!55istqNX1J=IT$?Q`$RD!`R3wwT-ZvFlRtOHhPNd;S(xAiwvd`f zAnS}@0QXw1yrH)}=P~eGjR0J3m^^DBc8>htJv(>8r~|&dYO0oMMqYTh<{S{T`Lji^ zMBxv_ZH4Icn>V>fUK76`K;sGdWH10O#O^s|lZ*{w4)+4nUsXEW%Sw=d*IS^`0WQRcp1O|I>$D#}Vf>W^q_qfz-dORvOQ^S*h`(6Q0bV9F?v+C^`ZW z8A+`*rh^%_7uz()!ujgJm@4~lEpFh>;=s8kwHaj!9)p`?jqa?4TLh;tO$;3BX@SOT zuW3_Nhs6!-WJDj^_w&Jh9;XYO-?E=neB#Gi{fjGE=oCM>Qv(i7ffy>edDtl*bu)a` z{F=b^w=T*WaQ01d$SoL14H3r7?!>Kr3d~!ZNR|ztkvseGcS|9bN3|U;{`I}CPX_56 z#4P9T*BjgXz4*CwD{`;MBzl;vASN@0*T8S`w&%uRz@!$9ilL+cB&N6#Ha66_3pJXf+S!pXWL)K zN~(jq*tbu`wfn$S+WP2qCdRxuS>w@F@`f?Rk4e%-4a6R^`7+ZlK5S0M4X=E@6-`R` zD!ozTACEHGXt?biH(Y?`$h#;E*>{w`df>O}>20FQtwwfh+e`-7zqgMwWJ=@GLe~BE z--MAoxM#sgC$w-*1?94aD`7vS`TE3TF^BW#uTl>qOuwJPrdj)PsczYnJD(jXyLl8a zRU#m_D6AhiV41Y8oC#^cdknxmI-!rP*Y85XQmTafgx|FRa^e}_Sk;}>N z4u?!Rw@`?l7;dU^$dWv-$rWc=`kn60i7|6z;L3{y(&z z`ti?8vhlxa^)@4f0z81q7_tz-rIHBJktKe?=jPH+4}TJ+#Da@`NscsSd@c|Bo1EFm zAqUX{2KbwhJUg7ME|8HcxBkWcU_kqc5h1d<(?2gril!jes$Rs<;zoOa0_TV9O+{L)(^A!T08e9x z$y$w64rnB2t*pLP>t-$4T@h6l!_KQm>Ts*epqg2;!B-PWFJnxFRX}Adr zG};VBGwW!xe^MnclrR^Uid8n@nKqSW6!~n}i`8cO$aYb4iPWE#26npSF=V2A);vm> ze$=z-hv{Q5|Gp}!FkZnsI>^n9omS@#!p~pYxn=<@ocq=DhFYH4*4<1XdpFfGFd+?m zHJ$V_-qf~R;eq@4!26I7xx40KFVn%JYFtw-li<(#M$*tV;*4#D#JS=S&CD^=5$w)G zoxz1|d(OQd|6#2Ct;re}BLp_pmxFL=HoimsU3e-YNZ9yVz5MZ0**z4;2?)pb9#$$b@&=`;M-d zvJBF-r-Qpv>lfICK=4}m)V!~Yp18ZEki_9R)I6M~aii=c(O91N?6&5vM7z=FTxx%I zW!EKnuBD>p9}S|Pax?GeehEH(11w#t%(C?3!|&?2D-PpKz2?n@EE8&2X|4BaBZoAQ zyt?uodr!;>Y0;w&GW_4csCs&zXbM=W@@ac>&ZmXSpFgi-UAS0&j5xi_Jn8>5oHHdb zW6#+VsL$9kpUz@tW4ZFqD^y&54nkN62@WeXlaUzxm;GtgOedXf@n<0@b^mQj$cHkp zvU+Cci3@mtka|g&r!C6|JpxcTe+QtSb4DEdh#TtJL57;{1cO#gvqNrl8ny?X_7Fbh zv~D8Jd@?`o5n;6azfn+4{GCcB^At!as%;~Q2QrXK8JhfVba~)sH=yKOu}3(P9S5n} zi_Jq5l!5Hx7hR516xEk>@D)0#Mi+_3SY9482^jc^IC|vO&4|C-73pN~*PBW>%Nv+~ zT_8{vxFwJcv?`>^4Zq}a+&Knv)g{{4=ll7WgxQdIrci`!)KZTj+~}2oocjoL$W8y9a97QbS|cE=GuK8yPnrC#N$s&QkCW31g6v08w|Sq zl8SHJ;k4fSS3bPJ!74AB0tt~I(~ud`U!U28$@KIVORVJJtoXilrrcLKQP--A#4!5j z0OjtW{}Y051a)T@|NK`PIC7aH##9luok~P{h5#CsEmyTl37 z0~@#;R4T?4rzyAtL-e}mp*s+XoNs$!^N!JGc=*<7v zyO(Qz?|bvQX)( z;gH+T0=kBQD%pP!7T>$$a_AQpX5D63=NxRa;RRFymQ+t-tg}^njj{c0-@QqmZ#Ya= zs>j`VHUO=9$qu$3REH%D>|E~|u3BVz+#8f^Ld)i)3AJ5P1YN|6USNfU;I*DBQf{ty z=f7lw*=*)87nZRhUDS2?pEEi>`|eFB7+oGyngMhDd`gHORr>B1Z1hscLu7-m%X`v*m2b{bqhI=Qq*ut89y785c!N4N&8>3`jUNu`TgJC@A>HJvwBm+#2(cL=|G)MyH%oQqUFfF^`VMQ)!Y9gTavzxywK<9haCLU$OrecGkXUV%ue_dF-K_vN_ z-~J+F^))VJ>(h>j>PSKY%|fw6+h~$eD8-b91?Eo%-(|bq%-wR$``IsD9L~tk9PDuyQ`AfJP zlQssk%2(!#oK8YFREu(3r!iRZ$}}$?xJ7E#T)cgpo$XFECHLxzr2{CzyfLIEiBFHV zgO&s(FR<~zP?|Vnj)?iBiqQ^mKCi?}Bn8yWF0?KwL`+5`i3xkceMtq-)VIl=sV4cx z9N+a8Xw@Yva9#!o|6U*pQ`Rxg@_7HDEi+fghf9j}GEe z(Am9=bd67&f4E@hZ>`~#y8YpL!!U41R7jg+SBNFKkOr2-#k|9F1qS{xK_RbBw}yY& zeMXlg#%feu!hl|KQvTadv@}zn$ROO47$EU(7;$>~C+Ak4IKWX2b^J}%B{*7~1Knlm z(&%pgq*(dC;|Xo@2H1@aUuboBaKFa4?k!zZy7W^vsdNtj?Ac9QXojZuFd{`LxEUZ7 zVjV`hUl$ZYnJlVE*2q~WbN26H7(=jStz0_1QMUgZAyd|1q&tDB_6ycGK(IMSdzJ!= zNvZofCuOA4o%HOQwfRtXx!ucrV9eZ5G{vh+nyoa=O9%p%lXg(yh>*!0{2WVB8v+B(xK5c8VIZ1Zh&HITb zv-ls`lqx>5n3$jqMjY9kB=hx5jFFwUSdg=9_wrHMMSfaa)1n>! zggJ|#8gB3-I0ZtO9}+IL@@xNkMAH;e>%1I}6GA!${Vq#S)0q2=9rv))QkT4s8-#(q zQC_b%I<@rt2b;n9>E5ty(YV<~`YabNWpzpC!dhzFkUu~rR9Ii8pD zDC5_wDIRNv{d|Y%!-Q-rWBE=0yb})Cz0y*02C|w z1KC1qp=i%=oBi;^<)w3---u}lh6?SJd2^$>%Fr6BcAw0mHBxgje4 z^a99~JJl+X6bv#o!vLY+5nTv#Q$@DF|Na(eYm}o)DVvZ(tMo$;O->|)tl5j}@RpU> z;Q-(D_>TxP8!zWdW)QX?8&9f~IS~hAoc8~xjcEOt*t?@ko%wrtxRasvEpMUoD6N}n7w1720UK1kbN1MBzT@VwqCX7_a^#r@b zXTseAj)qPB-0ss4E1|w^hcj6l%`>%#-@x!0~9Btdi+uM^xKCEK7sh%+?F( z7w_)oWQV*^>9tp_#I4Fh4z(sDoZ`Ik zZ~twa^uL&mfpMinu^>wawM3^tHfJ2~{XhVu358a^Th^hH!GdWV_fNb(cQt&UnZqfq z?}yH`_^)Xe)gxQ4dkb72MfmG|E0Ss4d>jQK)R;v6x8pkbb>iScR6gag|BP8igUM4s z*h?E3LB_SH)8!FY2Mp}CtT)mnSWq~-WhQZZjVQLxd}ixW%*Kv)?6(iixoiKZhsZP< zAe79`dBhL7*j@9FFd_I6C!KU=D~7PL=hC|E;^Bm!;iS74xfa_|8;=ZOe+*{3d`*5Y z=s&j;*`T$~X&Dfp_Kd+;0-7$>~tF?v37)nK0_#YG7cx54x&$u3=qIu89e~5FcvBE0dMyibbq2y z6HiRYFA(_@b1ANXu&f*(@ofa4qmj?Lr@7<-;PfBDD>oAQ{`Kq*BvW+`mW4x8OJ_xe z$L0eYYf-!C|LVtkFxzrXis!GPa&w!FyL1}&Dax%;CyQ+5?HtLMapzjG14HYZD%|&= zLc{Q)qLz}CpT@~Ty}7bLbKr6X7Q%$A|NeK;bhA**b1{Zk(k6Zpooy(_#;i%=sA%%N zbL&rD+M&J7Js^LQ`h>}1=sqfSQ%#mS)^FvN+v)LGS0(9BkRpI z=ueRBap!t&9Yyw#jGxd{>IeFU_7w3%4WPpmFu{b`@_&hVAgnhiXVVXn{U zw2CgtxZAH8FcgTTqvounOLTTrsb<<^uHSRO?Ls|cS$AcB_Exz;kRFx3YcraiTAngI zk7Q#twwdh$g~+)K(UlFb&J;LRCuox&YbXu9H{_Px=bt=2pAvezPwCIs`>~kdjtiH$ zuieVN-M+b|XfF04md?12l%Wyk2!EKSQkv=Nm}2syg%6SUV(A{2UZFArfKv%O=lS~> zgLy%u3o-5JnU0E3H12^wkPF5Tr)04!_J@N1v4MT(63<#S_TMI7+N!?;{G}kbO$?ph zR$L2k0vKu~+m!vSIoAYf^$!KgIl@JK&vi-|cUY($oEyUlv}GL*yTQjm1=k2e_j-O? zl6i%|zNsde4&a3uidZfT%Nl?-*r0jGNod8*kDIABAWYIB2Yn?KX9%vE#bV9M{fZz{ zwCw((!a$pis!dA?uqQZ#*u~JT5k(v?ocwJ<`u15wqw7L~zopaZPcH7acjwfbBThy) zmPo=Up$z$HkAPJAez1_Oier^r&#v|bm6!beG5peO2`M5Z=MpCqjZ zkdH+-Kfc$!yV)fHs##paP6zbbjql7MJF^j#a*8^wCAzU2(9q4tnQ8>pTziZ#bzl6y$7)+;cbiw8O z6=-c~tE7;XzhZ~KW!NWA&fJqM+@b=~p5$PR39QJrI{oB<6cKI3QV=WfF8%*spT*4^ ZLT|PoRWU`_@&7beSJqLgQ+yrqe*mK>u#Nx# literal 0 HcmV?d00001 diff --git a/vendor/pygments/doc/_templates/docssidebar.html b/vendor/pygments/doc/_templates/docssidebar.html new file mode 100644 index 0000000..913acaa --- /dev/null +++ b/vendor/pygments/doc/_templates/docssidebar.html @@ -0,0 +1,3 @@ +{% if pagename != 'docs/index' %} +« Back to docs index +{% endif %} diff --git a/vendor/pygments/doc/_templates/indexsidebar.html b/vendor/pygments/doc/_templates/indexsidebar.html new file mode 100644 index 0000000..2995455 --- /dev/null +++ b/vendor/pygments/doc/_templates/indexsidebar.html @@ -0,0 +1,25 @@ +

Download

+{% if version.endswith('(hg)') %} +

This documentation is for version {{ version }}, which is + not released yet.

+

You can use it from the + Mercurial repo or look for + released versions in the Python + Package Index.

+{% else %} +

Current version: {{ version }}

+

Get Pygments from the Python Package +Index, or install it with:

+
pip install Pygments
+{% endif %} + +

Questions? Suggestions?

+ +

Clone at Bitbucket +or come to the #pocoo channel on FreeNode.

+

You can also open an issue at the + tracker.

+ + + diff --git a/vendor/pygments/doc/_themes/pygments14/layout.html b/vendor/pygments/doc/_themes/pygments14/layout.html new file mode 100644 index 0000000..2cc03e0 --- /dev/null +++ b/vendor/pygments/doc/_themes/pygments14/layout.html @@ -0,0 +1,98 @@ +{# + sphinxdoc/layout.html + ~~~~~~~~~~~~~~~~~~~~~ + + Sphinx layout template for the sphinxdoc theme. + + :copyright: Copyright 2007-2013 by the Sphinx team, see AUTHORS. + :license: BSD, see LICENSE for details. +#} +{%- extends "basic/layout.html" %} + +{# put the sidebar before the body #} +{% block sidebar1 %}{{ sidebar() }}{% endblock %} +{% block sidebar2 %}{% endblock %} + +{% block relbar1 %}{% endblock %} +{% block relbar2 %}{% endblock %} + +{% block extrahead %} + +{{ super() }} +{%- if not embedded %} + + +{%- endif %} +{% endblock %} + +{% block header %} +
+ +{% endblock %} + +{% block footer %} + +
{# closes "outerwrapper" div #} +{% endblock %} + +{% block sidebarrel %} +{% endblock %} + +{% block sidebarsourcelink %} +{% endblock %} diff --git a/vendor/pygments/doc/_themes/pygments14/static/bodybg.png b/vendor/pygments/doc/_themes/pygments14/static/bodybg.png new file mode 100644 index 0000000000000000000000000000000000000000..46892b801ac1088cdb7091f230bcb0eec1bfbe85 GIT binary patch literal 51903 zcmV)#K##wPP)7IfB;EEK~#9!P2D?|o5__eV64U@ z=)r-V9t@6x9@fe+$e;&9V}Q|?MgfB!{4~dA~S*i z5pm)?_G9z=%U}Nd+wFe;efRtOulxJ`x5NGZ@3-G~znt&K-*?~t=Wma{J$^a=zWdMX z{r;Cf|9biD@XznRod5cV-wuC!{QmOSKm30E%b)-D_|NO#|MbiDx7+X6e|*cQ{dV|u z|9^kXPv7rO8 z_>@om+vEAE@A>w>{Q39mKfmWw^Ns)SfB*95U$%ccJ-_4A^Gc7uobwxhd;Gfp>mPpE z=6iqN{q6LZKmYsV^Y?du{ll;Oe|&p>=RTkFe}DXC`|BmY`>zkVkLNX?zW+~tCRgx( zfBb#-+wE_s-){N5U(UJue}2wS=Z^pN;g{|68_s|K)4#slkG~y$-~Ihh|M>L0wq5S- zpWpAtT*I%I@Bfo0@ch5S^E-~uzdil!^!$`ZuHkUjF&{ z`FYQ$_4hyh`{VN&Typ*YdHv;_Z~lGvk59jB^UR)i^!Dri`IZjPzkTW5p6~bn_rK4l z_q^uccloyb&99gLeEoI*d@H-(cmMBy^G2Wl?UZNGtG?z_e!b-H|M4yF>@R=5AOHQf z`R(@ax4w$oU;prYh=*LxU;pr*ulfJE6DU|NU?NB@gZY z{`j}Y^LDmbKF`N=$>09|l5cw6{51>Ye$NAczLfKGsXVXZ_m@1d=h8U;=k>X4x6e1FXge`5Pw zNcU`E-kO{-^)EKCg411)lAkpZnWmlMDaP*Z%19fByOT zx!bSz<6r)qb>B7ji~_&zyT1?r{%9pWciy+WrmXRQe9I+f5C8uDTx<92-`vdKPFXB@ z*w4?q|9;Ib`}?2%^5?();rTdkS&8|!-ww}y*fq?UUD+XpxBUe5RkW5E7N1@1wtw1)OD(sP~`O zEQx&MKfmWy=NaUEWcmJb{@0h>b?!J3B#|!5EWg`G{{HKw@8$XY@<#vur%hf-ZZKcc zRr#I(nbrCGdmd6=%k$-3bD7T<@Ht^AU;6yl2F?5Nxvma*jJf^XX7)_JGHWxh+fw=M z@Vv--Zt-s?`#j(L`+GkBw_5^tmRXj-_y5U-WoJBJ<1Vl6Z;w3K{M~bLee9liKK6wA ztor9|z2uSPdlS~P<9<2kQ=X6FxxAkT#>E3j9LoC7HRq0=FYuDxn19PQ$d1bHeny&G zf_WZxf4PENea3(arkxn!qy|6cQHc`gb0`PW2-MA0mUXH>W+UOXexHFx`bI`4Vz zS+|bE%y37JfPBAkE)qAw8e+F_HSY7*2T$B4M8W^SQrd6Xd(|yIBI= z@p%T%WA0yH5}BUS_?VbzyFZ`(bF**q$g>unyYZG?^9<&ng?wM0OV7!f^_W-me}Bx{ z%nnL`%1im}_WajtURr{$*Ymu*mxSoNvVVOL!m~rOhI2d5(6<){et*f0WcTHDC%olB zWQEI1@^EstW@RTJ{=O5^vf%#tJ>Q-e`p@sr&)?^J^5XNF5(M+u^3ZZl=Mm>Fb1vr5 zW*_B6{_Ww&%EHMp*1i1PZ+V$nRs63>E8)tkLNM3Os&_6!q1?QD#on|feOukBdp7G$6bj58wE&DAH7OBV4w*}wM)zUIWvyT~)oHpqWw zC+FbE&t-AV`+CWe${Wn9&0@}S%y%`N{+wl-`0|fW*{r#Pe}CNk{ZC$5)@2@hZaW#H zSDRck>nD#%`j&sspR&*Ly0b5{EAsNPmvfPM#d77GZOv)l6P|KG$^Nq|^S$}EEV?|1 zpDCi_CSf@Ln(xSC=nMFqZ_A$f$G60-Jle*l9zl6M4R5FC)j!;cqpg2xrjW!aA<~K1 zrE=@tHTY(iWvS#vCA{W-5=y&(AGw2ne$Ri;-zLIkS*i#U)}QzG)=K1&EtK${SNnXr z`z#L!TJrGJA4y;HobwvJ+8mlHi=?%AzNwxPwDXP6C~)i{aZUVFnI#*M^yWTPaQWms z-(&!J@wu-2M$^XT5s3oNjriL7=edwJ*}u8z=LMe?Vp6Jv*SwTu&H0twMoO~m+3eZ| z*LxG7W6rnk{RZJ@g7G;!KA|tGJrCp=@V;ebCU)d^QvfDzWD%(~@@kVnR(^f6~EoG#%1ma@OgO&@VU~QwaE|UBgyI$VUp?R&T@Fj;PWUFeDe)G zS0C933DTZMuAndKoZl5K?#E=;J+iJnP9Dial2GN|vZQ(_9+J2^X`jpcwOio1>$CHc zXFThG@4ezg*7t<>1ooWg&k%h|p`8elD@e|eMWZ_DQ#hyO&XP%%k%J~rFj;;6Hk&ES z$9B$9nYfcru_cnL^f13?e zWpt_Rvm5gp-SN-WnUg8Uiie}qkd#CvpMt2b;+XU;CtQMW7M#;6uP_fciy^n1;G1xm zE6px*{w2?D?;&UCJvR`RS**<-jtQU+qVDNa7C~M>7EBI@@Bfp) zn@@l4t^S?!Qf=`4f3iIDkXP2y3gehpl0!~OmgVlN79>B&5tEmb<(Qm5%i|fE zzh%c}C#PU@R6UQoZA!L8<>ofKWDohhyqPTOTwRu4kIZXwpVkB0)Hrz;c{1Iw`yQb$ zdC&QGN$Mu2P##&n$P3M$mKoEOCi<}nCi26?=AaMV<=~t48E6p zYpQw9u8=hS>qCpBlN??Bo|x0je4kdyET7Mcx!-Pi*b=EMpM=dUTRHIG9+p%dK@PNp z{p>RPr0LPQMeQyRJ8>WpseAjF<(Rw9CnvW^4AIWY_dScuufk{Y`v&N%Q#Y@qcXSow za_mgv^_BxpWn($!I7v#EV=?(gHda!^MB3bCGnsuBdj2&}H!Y-;vw0p_TuI5(vB^eD z1ZlK5w$and#u1pG&hO;V%+}47`jljI`MWG;+a;fvC$vfU&uO8}lC_=PsGFC3vdxP_ zcjLoON=as^B-|t!%qni@@cAf`J>&~>)+UxDz&{`2B`+eUX-k%?-|eOE6NcL(dxlr( zVE&Zdl!ut0-}CSC{L?M_CofN#IH&j_;Vpk^n!4-xd(IwA-mcr1g_Pe;Y1AtEm;gGJ z$aCqf#!R{p3BPLW+@}YdKWPJrKUpeSQmF{?x)RvES_P#AkQyfkMy{saw^O!I7Jr`k z^T{+yh?^oWk)+8PI6s_Ro z*?CCW{R#Ei+sQYwq_SI*D{Jj0Ksl3j2lIlGduPXI53UnY8I;}svvr!*M)qQkqCDQb ztn8>PLiKd=j62si_{g_>*hmle% zdn&tDwV!8|Z*MyI+C_0o!z%Zm2$ZLh=ORIKWK0gS7p`rM?&(JZ`+NRVvD}5#c5iz) zk8Dm?l7pq~qHR8up5FU=9#d9mU(Z?U=Ru_5 zl2?+|+f?Y3Fqy+4Kc5@S`jQQ|dOzpIJJS+XEe0(k}jh+OXti*&MHD?}JZYtqbYd3)>H=GyXM9hV!qBo*Q-kBGl zwnH9vE;IuQ4V*l>EH7=+?6Cx2eaD`}TUm3`%av5-8&d1$W)w^b@E&JAFRv`i!UQ8} zSsr;7T(XfanqzX6?tzoXp4TC1%d*Zh$lghBu657F3s6ic3iCR$kMg;R0y!F5Hh-$rekXOn5I=aR)O{mt^s_2o)!v5dPC1+pa@E!t2UnF%(vv6&iC4(jUXLuwJml{Uee=hoAzv;U$Tdm-i>uWqlw%$ z-^)9s+8te(h2%s!LWO2@JnEZb+w zc~fma&*{sISzqinhgV+ecXLydi@eQzayL^uxOuku*EIX{ z1hSdLveaOkG%8kmCm$tvv>EacxRrR>N3G+}d3mkWFPfRHVvi}N^6}|{b+mCwYLyFb zt^ShNkVlXsAulZ6qxN{+~oM|5jwfw0m=|=2M<`^saVFUoXd4o<{bi zGEyy`m!EH&3CD{@b|(nw3AQ@Pb50~`bE=!JLArAi#a^ZAuF-R8fA?D!a-Q5qb)4X3M3Tjjy_o=SN~PA-ot6zJ%;h!xJoA)a z37d%sZI!fpEfL9uJpXh|M92koR?vebAvFt4KQTKuGx{v0&g~Cv@LV!^%zn>lp8(x# z{Yx$%BB1mAtuA8g-*bX~ZaX`wn~0WItj=B42u&9gN*u{vVqINTae^b6dH=)imj( z?XC`mrvYzBc*zOdjPj6&qur6ljGWqf&f`l!HciVXcOrGv;VYWuOZUbOC?qeX0qQik zf{zKv`5fb#Tx)x2mwaNvHH1rAG;I(c5(JfV$^OA9QV{7#G}2tf16R^V@_^J)oyZjW zsn?{ndKx*rjLGvU*%;Z4Ej$l-3R$*2s_yCO|CqR+vp+LVD#+#15$slTDv`A{!7hs- zznMi1xsaEfpq;FyaFfmebIskiGg5=A^0sD<*co4+)iJ+qu%7ESq0F1;anjjs`Z@RG zCIKz~n8zfrr|Rtv*}8DKqiR||u5pfyX#_bNTJvO87SWoGC~;gOoP1pjhRq)E`% zm`;|Vd7D>}SJrODMZ2~6_|>aW#wJP46=a*FN1-=igp;z=&dYOXd#vBAh|9Mn^T+~D z4$^7HF>@ElqeP#)3LTalqS>J$L+)LFa%DkT%uaDL5!-4twnRM$jYi=c=>J% ztRqxlE;m89=}Z>VPKwx3%CNO*6|OoUeTH*Br^hW zgHF`@mpvq6=IT@(IcoBEn}q)kH`-U0gSRi$>0r_x9l`t>2&MI`o2<{DP?j7x>0MrO z6Q)bf&^$_+g5FmyH;*B!(+myj#hf|`sbo;s9uT59w|ma+WYTSJoQ$G!Te*FuWu^d- zcsUzY8wugL{-m%uI#yp#vYST?{+f{6*PV&`3F zmj$PPnJ$SGvOBN&cn(8lb!T!{OHZknXWH!tm8hF{5&m-r1>K(Kg`PC3jHlXD{Ww|u z&9RuL&@%i1L(rk|ag~DNcu|kf?Y_y)q_yEBar(if6=%0k(PdGjf&}NtfzjFjA&;pc z_p}0(J|Rr8WsyJk94wqOdp{4hXZ5?mWpO+=o$@qrw4H)VoVMqS`F(;f7+MYo!7@9= zwZpx789C5^Q4)s+WKuXsQrq+t+P>|i5PT&ygwc)SvCzsFrCAP#BKXWDZ-6I>QWC?o zRN81c<@{~vz9pFF0Brp+ZQS-dj|JGgw2W*zSa|EhQ)@|kH~Jl%!f6J%AG-;z3H9&= zsdfqjxn|>ZZ=ZEU8d+NkHtL+jp=_ari_9}sB5%459@(K;*~TIHHNemOZBNMygbA2c z9z|Y!UZbL>QQ(mVtU+#8Y|q_OUZc}~0T(5A_DmW_fK0`{wd%+Y$g712P+2F1!&lv~v>O^RdI1~?`g)@jSn=B0H0c7r?2pNk|4+VJX#NwCd>O~#!U zn(ND1oS5@-j3p&?*5)ZBNwt5IB5GICdS`5KAOFmVC!B^^v=K_$Dz_NEwXTQEU znTr3r7&bQb>+8#Jcp1QqZIr%G2knunTqCkAm6P^K_W42$p(*5dTNB*m_epk)IP+Mv zY;zk0fb9DnFAIy)#oAYu@G56W-VPp8MB`Q_W5ksQVfdU{Lx-Sc=}t#o0L|H*%)C9p ztBz(f{ZDd?AH$}cvhUO6J!EuLDoY#P(&&c&W(e8)*})kOIkIv`233F}K&=V?=K8iv zdyE0V&4dF~ex@1c0heC0(bf=M$!H|+xfD>+j$x164(0Qd7vMD4_@A#Erv@x<_Cj7A zc!MM?E77Ba#Lo*XWV2}Eq-qIXS#=&0RE7zxvX6Z~>M{%nPmxF1YC4ota@ldl$imTyTLaZT#FHId-0%aUYiaaI&alAdb5+UkUr;rA6rFSj6MbMD0yP^lS`kMrtJ2Be zE8;p!$e{TJl&omJ$Fx~Knja+l$DqioH5tWI5HD%JFT5ZVe<8lPw!9LR=nAu{0 zpIiCqq=*N&AjqJOobQ@bW?6|AkbdTQ1CIQbPt7T5gp=FqOm138Fo}8bxyMd8UksXa zm$|;|v=XP<&BzM{qs-s6WPD_?;!8C7Y{b$y_oB3>+FZVAKb1DJ14%t@i6#^kq5IKXssoLOmdwHRWu6pIw`KYa{Fm*oLI1F)80|AnNWgt z+YX7g;B*>qMK0~Nl>qNB5VAxIE4$gNSw?0oFv(3w8+}B=LTNrKW7lM0n3ES2|2%sM zc%rh;+vGB3ss@p%dJ9qbs6Cqyo)DOtG=FDuKM23QSr&X?VhIQ)?78VbW~^qgx)=8! zVIdE&?AA|NGKQki0^r(8tr-~YfFJTH3EcUa96?U(?6iEFfo}jHIf+~HoZtx!hgQX+ z*VorwP_-s+JMJ$noYFUonnA}pIkBe)|2~baLpjh7nw;ayB<Pj4FBWhb*#7l7vmkhn(YN(MAg-A?XLoQn*ivb?VIJn@F6FMeN zb)^}$PIfG;rS_2>FK21ddL8U^B_{HTumRa5$(ArDN<2H|^O}6#6rrznc!8vVZuX(uX zk7qs0Ef6kB$?D`WeJN;Iltm0$NmO11R!r&&H;|eg(0eojE&$I+ok=QD!BSu)K;|(5 zO`&KOt#nNzrBKXWPVp@7JjiA(TMWFUQBK`*Z!XEM+hW}h(&+@+na9}#^aa2&d$1?& zwf&8o{2&Wcd%Ma22N7uvrSf#KbhF?RZxQEB6q8hfb6_TBTjq#f<8QrHJy4urjO)CS zEF{>+EDJIbkP=xy`8oMRFoAnLBVsJc%2y9BuL)tgSod3RwLpz@o(wh8Z-;5ak}PO#ikLT6cxCrB89Tk&XG znMGZM(zW1`qY;xdG*^YzlB^()Ca*j@1#%9N1I92rUzfcD@uHq_5y~~XfR>~|Otb=P zp$bKLwXH-FxXn$?SacC-EmU`52ziOXwoWRlEZck@g%Zqz)(v56@1d*#$Ffa!2jlAtOZ0dq(WQv7|nEm+CP6tc4OxKk0x5VE*r+@*VG zUU5l!G_8^k#-k93AfV~U5u(8yTHM@K(VGuye&;|*qS+x=_yOOIhH|BY(Yq!#X{l9z zWBQ4W7M++Fm9?pjRZ)2g#Mc~}o|ZmgmT7Sr+ct+9RVa@XUA#vW(?#r5;eZ#dhmvjO zuqoKB5LuvpEFmfJU;nUKfKF#)=W1=TWK3Dm_pJ;7t(Uv71SU@FBfkL`yavNMi%omZ zzP#z(@QUU{JZOOcM`qEr=XUPHcn4$79ISw_@)32t6G2SG@G)R=CU{)h8 z^xhRS4=x&J>nI(XX=$b-Y zu5UAK$qQ`~O(f?!^ZhL;8EvXc)7+9psgy`%v=4dJ4K?{INo5c2-iXrUc+iH|EQ^xI zPWcUy$Wh);uPWRom({HKJ-a05M<4cu-WM=*5--(s^5>pG+a-V^+`zvS_j(2YN{`!l zL=vLfR9}I>lFk_T<~2&F$^*LCdqu_Wo9ujZ#^*s8$8_{jUZB2r%bcACQ(if}uUx(Q zBg>{!iXv;|-jp-iQNICQn z9JootHTGUbE_O)S63CXSdmwUsN2&*T7nP{|G8N0I+`|G%C7;T<4(QHhTyJfOtGMxcTRy<_CkByehH z^fkJiPF0NIl_?|!A)_IhnwRAofG(<2x&@yyWgOw0b{nU;^10V^#v)eY5vvrg!82j~ z^7C1&N?&?kGIB*V;);k`9cs6`KftXN0P4;_p+10ArgYh2P#Zl z>SVT9hR*|WG#5|BGMCq&)s`t#Y+gr&RxhitgT-Hrf<~^B*ac>lWdRwdX#&?jG_6$Q zqbEqHaT+9}I0i(ieX~gNOC447InO0~+uTo7Nv1C{(BW%$zlUM&(^GTYnN*gPbA?X? z3MsB?nzwoOHlnyY{mzf#U%45_A+f$;A|)d;iAnMf2!euZVv&tw>{MF)ZPOXrg9ItL zfwXazWO{hHeFSV_snzFAP|e?z{B#EJ#;B*@l$!AZOlyx+MEtKo4MVNRc2~94)8bYbIA&4}cfWzB+c(m@>g0u+r zSvXzFdx+n>SZruwHtibRR!6Q+Y^NbKnj-J2ncUmL=@*aGrPQ>naI!TC!3`obQ%Q|V z#uBthY)f{NHv zI1?za`#Wc+(KB*qG;h~12S8Ls-`VQ>fo3zT}Qh2*@Hdp<>OF18DO++W7C7)s))a^stgq2h3vl z1ML{SImcKr6=zx?6>0cHjsYiWP3&}8+d_J1!@x4-lZqxdV;cj+<{&8g_(c3t7GlWN zG2IIt1i4f@sdqEJF2olDx+JdoQ=WBI9&Uhb#HOA{fB`6t0Qw+ift68dGs0^$4_F_H z#AW)0zl{=sXoTWr@JIgl3s4K_=)8wKLVl+Mc{XkXrDjSck0`MiRbq4tzMZSsa zu?6f2k2(L>Y3T(;Zt|hi>Z>f`Hz$VtTt)heLMn6eYt!YsrW4+AC~fqIGKO~a<~}eh zQ}&rKBwGSNSIISsy)V$pAStU4lC83V@Jljgoetf*vu|t@48b9T2gY;~8yFv8z>|e8 zQ)Oo?ktTAH|DDPQ&)hs zWgJ21g;@hJD#T3VAG$t-DVKtGbCoOdSe_OM+{6eic}*g~^0vaRwnkE7ebF2-D5iyq zUWh{|@6%+%2hZ{O>wW`}ot>eDSs28JkrS&@P05$rq#v3;orv9tJSrI%d%nfpI~3u6 zi$*5EgDR!b1D;VK=SPRiuO{Ecyr0P{^k$ADys{I{yMLQ!FL;-pes%+4XrzuR95jf- z_Dt`v^$e^%-67Izc{q@_376)G1nLu@t04MCTuYjV(4EB~2xAHmdr~5I^i>dBF_oj% zozp(siYlx2Y&JxXwoAcZH)7F2N)V5z8kl+GSGuKSt%>a*jpsAbX)65q8QPj~KR}Gk zL$REz)5}0rPXyU?VtfHQ%!6>6W{vAi6-==sj>xv5>CA%`2*D;|C(4|Rbn`fuz(W^T zm3}mB6W}#Vz{v|YdFx|6mpsw2QhL`Ia|z8#sj0;;uzA=n2B0$I(KEWH9j(=*$qRsn zrvzk@do$An7E5>)Qu8YEP_@Bn@99hcqhx{5JVXDq$g9mi(c?NX{spIJt0A z>N%a&yiynJVGcb-9ei6pMAF7Qa8lRpBz%J+bRQ~tay3XVxtlzUbmSJ{oE|tXo-3eM zR=#uP>=N3uDAH`T&WJRvLT}kEk_}sTH`n0oN@K&mc1ZSmIiu6PBEcm-~cI+2}ij` z=R(_$C?5&59j6Br}5q*>f~C)62y5d?7WRl%CU-g4Bav}nmG zVg(zSl3`epVW}1yZI#)3lA+-Dkg0RnEF%T5b8JB4)4b&B3+(j@&{T_Q8W|}OF3+Wm zg1f`8uloa-3&{#-;UaZTxIsdBDfDNFYYwG+Tcw~b^vFy%yFk7{UGWVfote11`YIaj zu*p2lhIQFI!e|<}ZwcQ%7XTd3QpL2(3W+>W!5a;nBc<7Qmae0S3=%aB70&Q-CZhaa@qt>fC{DQ%Mwo z<}1{;DBw1>(p+x-P=Z=xXQBmFZxV8J4VH|2*_2my5i{#JNP4q*bC)nt#9~$p82VCQ zMR%d&`BKU^Ik#YZ+L|o-y$IM-X9QdMWPV|#dDy6+-V?}9U<=YCNm1bluL$5NGS#=4 z@u4l*wLn@ep(^!&Tn>1mAyqw|6Xm`vN04kX5tUW{?qAdDWTeoB&_k!cVzVdRH%%pK zU`y1p^-8tSE7x?=En7pH2Fr`MQ6~R`N_YV(ABKfl_5X7 zDWC2;)>u{!iGlS0oSfY=cclq2(nX(y4@|9_OKg6B(Cj1+ztHbI{5lREX;!_#Whp}v zmdm2KYzemKdzF!dTj__1Bp{`5&kEGu?@KYl zS9Q;lp|8GKIna&jSRQ~-XKZqX%QZdJ znK0LLB(x>8)mu+C-^pIv#~GVYF)u6Nfz%1-Yi?jF$kZW!yY_wpJhUx7M5mSY^0WKv z%op6l0~xK6s|8^0E>FljG+ld5Cv9f<9IrSjBJ|FWrP8MHqI=gF=#IF8A3&am$zDgg zSAv-BKwNPY9j=Kv?Pr$uMPj1b1W|LlvchO7_*o;n$ga*|mo4CCVa7%xCV1Ny^*3Ek z)NaGX2FB5yai%C`tj@f~_OzO;__PULcM{GZZx=90rRzhSfB=8w%0=(DUGa3gIl_N?iemloMNF1qU?) zo~>4xB94w1osO9sG!d0M0zqX)x=B?z0+DY(6?NL!8CW73)3t9fdgNSH9wsaq zqvq8%Ss7_`Lr(NW;Sl~NSuWI^FuXEuUt&lKfbnOZsf42BS7j%&~i>&kY*ypoykoRlPid$oJWK3obf}f z`Qr6tnK@EZ`f@N3q=)8RqDkxa7sqYh$I3j?YiWDuY{UfUh479urQ^v3tvc%8hG-gf zq7mFh=*b;>Gp!F^HmO{}x6p+ZD|Xp-E8>0B-NauB-CMm3`et6Mh|>PdJ?#b zA3=|ft1ceP$O0X{vz^R>!yWJtSw!cP(X#1x)*J8>5SDaRE+62S@T#|&&+fxHK98x(C219UWqe(9E=f*w9t+`$p{u13)#A&t|uq03mn4VQ zCSSYI>MYKnT~wr{R(xN5DZ+7hM6}8z-(Sinre7 z;KIDQ=ELn97Fz<)v@?F!@Z33dMS47mp@;W|hl*9Ar+z z*}%6-qYpil<@^ct$H>JoTb+(KjTr{G8gJwu7(kNBU4}UIZ2Ddt@s6pS_APf69zkH` zxpT8fS_cH`GgVlAkcg)I+07xiB5rWWDBSCd=d6|KkJ-vL6|;d0!zlaBxD~~aZ#a~# zJq^WL=Fl4(h+3O=h9?yw!7@%3*U`*8p;pK!mPM<{9{u|ac&L#rh$^5u)ow!5AYVY$ zlm@aW#-MGAq#g<78EOU-t!nq+yhgu*G(ozOM7fzCFmrNmywvh7sq&d+CF*Is3jcW; zYxLWB{LMD00MB8xK#qn-)lfUI$^t+igu#)<2O+;~Qxqd(Elrz_{F?Vj?U#K^C*oE zJ+iDxbnlWszmYS^t6gVQ@)Ag7Xrbz9ARdYaSBb9oix z4aMvdK>t(dbL*8qtXh_SRRj-=Zc=t8Bt;7%n(6^fZqT$99mal@Y|Z`);vl)U%I zBRe(8ALV0y+klcDyTSIgUk zU=FD)lAJI_L+N-xPnfKbf?pV6q|+MOpIJkYo>^H20ci;4g?7k&g70O9byAkmRv)2!*T z;4eVJCO65}X<1eYwE`_S58$p8dt_aUAviH<9mvSzCqy+52ZXz1j00b=Ug=rfP$ke! z0m(2Qq)xslF=-OD9b5-B(Z>{kdfjq*8hKH>EedW>=6W zPS=HoX^s|R(N#SD*l9o&9TdaO`FlK7VrQ!{{f|$ZoE63Q-TcE03SKl+z!1x`;WKq# zv~cq@6S35`=J800Rv2IwE|2;*FO?f8lCnmQ1}srkr&(f z<(2Cp(|00r0O;c)Qkz#v5jR~rd}9Z9p@5K?+(Y?HA82n&G`#>S37(M2fS#cH z1!q_JlhnuN1~9_;C}88^+Yzt8aT3Jld#?8Q84**2I;tD)UAkb<#99u$4x>_(=k zdGvPU@(v~@ZA_WRk|j{(--ir&WT}pXleq740v-jQ;)Wr4Qfp{Yx60o!M9fW><@`!^ zwzz`583G|#J_8_AC9&R6vFLne$<^wvYDlGG7nMj1gG~jW;Pq;A&(-HjYIO*nKd+m7 zOBQG~R(D3zG}zk^f0v*U;We~v>AUAluuQzEujF$IF~72=gCokZ=(!N7MioP*Z+sy~ z36oIzxYCj=hC=r5f7I0u>2Ac)?Ugp^N72RcDIj)RTVuh~IK=oA! zY?5=Y=GmUDL?Qz{peIO%QR!Q>cNN<&%l%Z11GD&u*Z#ffT{+ zwAI7~vSv|8vjZe$0zCF>1})e!rERtgQ0=TXq(|>-OC=oXB$0;63X~823>o$+h~6Lo zm(_#?Y)>hNvqS4YSRSOfQBm42Jq`%ko~aenNBa45Ic$>Wh!=Fw5LPvgK)E1@i$ADi z5pY`7i9X1svXfzjV79S1lkmX+X+W004Q05i(M`R<4A(nK_=iN4MAI6|7TRb&jucrQ z=b472YFsO#UUXjxrPbByWW^#m&VXbl%mPlS1BcF z8r~el)O(tKqA9@3PfF)3;}6E=j53}FPrJx8T!_x=rv7JaOglfx*jP$`&W=`O=Cj~( zTY;3wORBZdf zx`OWD@=zTPR#Wyiejmft`V1_)VB%zU6JfRV6s4)MHZ4)MkiL?;*5}{p|E`m%Yv!f= z)U$IdkAa>qK|+J!Qj=cvtn>hQ>o7dZD#qpoen6tg_u?ZGQd@Fy{=`2I8H^@Tc3y_X z(>;7gp{j}eR%nETsW`-&{GH?i8r+yzl+QH0{D3^idw1~da z?!~DXS&TioYP*{Q$uhyDw@5F@KxttmlhP4ybic)ERTN4KSl6%EOET1lBz#6vIEA3Q zv{c$;DrUlf3_w&%uSe0Ll~;P8*A3{EVJAwZl!@S4g<)Q6-F%(1`-0k7l+I570048z z9_QSuV_mg&mlbn8f*wmA%BjX?+R)Z6V^T-EHy3W+4G2za!}3Yu1$H?&#=WrwyoX1_SV8#Pyki|UKh9>%bCm^WJ9 zNhM7uy7W5Y(d*Ng2A4I11OiR-4AITN2(r*bqIxL%aYlQgiN3lYt?u4e2~XZ*)75k@ z=B^%Ec{m+}9IJK_ALKMY8B3}s_F4_$cXk`L5|aqDd-k&$Tc9dx63ikbnkBo< zS0M`tn1|N*DrD)3j{wEU^)IlmgAY=BlkpEnYwgN%WIyWo6JM}~3NCm69y1}N?#ZR& zr`aPZ1WqUIe{e}U>nV(2#1Ur?MvkcYXpbFC_6pB@TheBRtYZ&(W9yKtS;>QE zNXo6JZyv^&$ivqy)$H~G?-Wkkd2I1$oK$ww)_Iwus9xP~62qnWZRn#)n1EbT2k>H{Jw3SaK1OX~6iHF=ek!vr$J zqTfCOG9{>OrbT#Mmhi)0d##)wx}eoCbdO$Fgkn}StpgnLel#{#39(7dpk;-e$; z)VP!cB23rD;D}uaGzCG-46fAuR<+S_Smm zDp2(VtJ7B6c&3MjS3zb1+uE?kZ21n1NKNBJ5SmVWTPO*e?c{|Mi%CrArt64(!;=dO zs58FN4DNv9zEzd!TzW^fV`0n_1Q?Tf0qSLOb3HKH z?Y`sS6I32A@e52Ktv#7}URFDDH${!45d=zh!KN`OpAMg?N88+W{ooi(1Cbac9lolS zy-Ia+m#rYKZPBn`>p;fmQ6>Whg^z$!&`;zBkX>6scS)y7rI28q8qf#jjAl0qZS`eEvLBBPqK z`I82SW+ow$aTMFjS(V4O2`_O&C^o!@9^lP1IY;lta7G@XJ*xyaLE+NAnJXYP~y?&+|NRqamhPJ z6Fw#*we;HL?pmJViNI^q!Ml@P@|%WG{&O6DN#)jFPHEi7d1M#ia~aYNZ0ijkn59-H zeB}^1(z00d6u{$f9;yLWj6?1Xa*6=$vYuhgCDW=>>n9~Lp)GXWl36Z>z}z5fazPTuNUoGlX?20V|Q@S)N!2jR6Qb6 zDw2}40a)i%D7rH5CIR0u>SbLYka-z>4V%RuT$`mAw%VZg&V8t2Z6myFNL{4yw?W75 zeu`k_j3;L!M+g4GZd`ub;uVZx9;a3prY#kP9L+Pj(yn~E1vcCD`#R=d%~}9l^+x{k=M7jxpaQaLLdj?uw7Uqv&xrQj zXu+b;chf|u?J%7nB@?!K`u;3**FYOGd0^E)tiUi^Zc``%IEdAKc(nRyu`=<|A0 zV5y_ENptWRnUZABH|aJ)5Ic2E(0^#1?u*jfGKtX4D#rh$xl%pQaVOMw$6tW0aV?G~ zUB4oou3XJ>1xWJ#PD!>R0|2^i3cT{tVKyVsh)OO%$&u0zkR4(T#57%iHleG3kVUPP zMBZ2blwfSZWABE4RVaCW|AsSQ3z$2;X&$WJF+6{%V4tiUge_y0f|5ph+^8fsQwf;E z%&l(hPn2PR4qknth2p5Ak78$7r5azbs7qnT45p9xJkm}r5BXyY?;MC3uT+fwM();3 zfsM22`V3@h%>)PdL-Tu-M;O`9RZRdbl=Y6kpOc5#g5x41?KWgbJK1-N*rV9lMnI!JJX$ zI=dOt0qN!-U=)eX!I1^#DS1r|sJfJj!6?3%J39Lqt}eCZxN$ptCV?7~)}1kB&ilNc z{_P3Rl{&r`VP66|Q>!2wV{V1ssYn9;T1r2yu`5=t2OmBTH-OSbFQ!!mpj_|_eV3Fz~ao=m<#5^uZ)sMjn)B zrY-MWkE-v(*XVBIX`zF7D4TP)O$QH>M5!cJBaVbY5L2o;khH^+>`RZpL7#2h8RVf7 zF^is!cgJ6H+&AvD_;rp)lx&B~lcU49M)t3x4g*v&#>HyW*DHbZtHo1f;SOSTi8ofH zVa%aiF8O-N7%?YoD z!;S&QVnC6F9HFSUWdb)2LJt5qvqG-o`FA}4Iwz=T%_FWGGDsz1ytr6m4s@rvObGf~ zIjRK4>2!6m{Jlme(h&5Apk>gbE0i;baak ztqrHTYbG;QqCFf+1P+Iz%bJQiyRK=V(Sr*GKWI} zNy`3R{pV7iZgOgsc-E<2J@S)LXi>JEZqPOMUW*6BVITwa+^ZT=9s~MbYEIQ#tlW6&~%a)d)l;nMI zmbXy}Lvg4x?pI(6?U-_XZuDfRiBra{GgW(ELJ;ubf^C&;Ty(Jn&21G4crm`9LqPOF z8L6N&^nuO81<>=7=IKmm(WD{NJid0Z2pAZ6^oZ#XX<(b4lein?&3hj2(g!Basuo}N zi+X$&rz;PF0|_aa8514QPsc8z2t?XSMm-u^`R2rz=0it^L&rp4kZxFGV-9g*pbMrI zCzzw|XD7Q-jz9Gm`UFqt%`6pAjNmLT?hBgGsO$uNs2ydZkybSn}&kXbfVq z-<;k6&$I(o>IFgXpaL`MF_Bop%ULvZJ~<&dVmh0+=J64(PDJnml8`p>I{ozdDVpHp z61U3LgQ92Gf?WcDzzGWaT#v93(!pTw##vpPDlax$jnpQ}3HJc%V)PG6DBCZqXo$x2 z5At4;`cWFn$=*_s95frhV@0JkiDqiq`r}lwXfE|yNYGq1DvBg#i5Q*KbP)Huht&*=BNvoGIf9^l2R7x-)B zQJLqVW15d^60Ih0nSZ=>c57ZHMj3Y*1j+sptTcds3dnFESwInl_ry+i@i}!(6qBU# zV4+araDWv`WLYl9hRT$YlZ+9X&H^@(1ro~=&a+YtLHDi+psD|g5xFa4-K)cw5VCU9Ae${R+vkT=B;!mslm>~#MFN-aVS9`>;C54e0##la&aI4G?n9K8Q@4c# zm9Jmt=m1Yiy6P@iJfeftc5lmKj{-<~eb7>gwg4lX{uoB!_L0wJ<~f+B12h#=48(6^ zBDo3_1LzmEMuPJ-qCJ|;iXYuBM``p%jg9L*gVF}%QjhM#0Ju=l7G$c<`k8^m}TJ1Z^XYyh7?-5P)8!=YJ!qcRv6dKm9QT>)%309VtC%8fCaQ1ZFze4pPaP|e1CG=BsPq42*+D&nxB*azqrHu@wkhbmc7?X%w zHNv(7hB^a5FFHD=fW`HbahM0uI^`xrW(g39TN6s-TcWf*bx+fT-qYBxdf{2aRH6pP ziCmT|MaXnIGoJ&BqY*IF&=30QiAbHnpJ|_Hn8Ph)r(+_?<>fh}Tw33cLALGR=i!;4_SY4i65P5}ceq4>2gdku>RE;Kc@3i!0Z!PPntLCM<(lo+F}+GBiM77|Iu*u^Go9Jtt5lzX2nlSxxMKbb=g z{A0A+PQ2Zrj7ZBf0p^DWC7@pdP)FuZ%S?}ULq-`LV<3ljhU^9+cxW`NNI~T*b^o+5 zK}74I+s2VTIy(ldx%{^=Qf1-9+PR3C9k8VfE)ID=52Az<;<-a8f^fapD-+8@(5p7{TSmcw=7fvwDwhMV1M=!>6M!FTKxYpVAv6YmIP5Z ztT$XE)hvMm_Ng$bZU)X5i!W^!MN)?wJIV!#Gy^?(G1E73=NdMz=U7#ND|)->zcCRn zT`#5QWQAqR^1FbZ>YfjF@TOr}zF)ly`^w(%33wezYddGZ8LxoTC0@b*B~EsDd*b@R z0f}tZpoZ=S(P-fvS-p{q=z(#dcD|fN@+8sW(_n`H%C}`R@mS(&ngA~&OLq_|5=IzX zLK2v($+R?Ycf)K<8a2eJ%bFiK^-&CqMBO7a`VHlVs=O!Xtv(k=r?{~MSefAl9$#`$ zm3m${*hhE&3u0H{7hC3%@QLbQl}9&Ed2%WsvQONTsRv^e zQ%0%6>33x(_fslNu#LI*j^fXGS}kd>o|P&AlbUyuwpRCA$O~aHShqlLbt41XgC3TE*+A;k~{9L`QPo(6t@*d=-j@zo9-iK zYdVV-WQn8KHIzaPE#d9ViHk@C8C$bJ4V;oSzbjQo!|$L)qi}1jaRlDfm~kx_iX`sD z3uawaA^SSc30tB(%&?4gR`H$R0)Z!GxlmWhEl~Js7~hDQJYp)A=ThGAsqPW*4s5wh zB53azku3j!eEf_#_T}9jCs}+2K``IS2#2I;n8PMzDvc`;k1eCQS!NW5dcxO% z_%O-S2hG{M6vFMvWOxN=;23P@&a38AQz|tOf_O{MchwQK6n(@T3F?J(=u36q6`(gU z5ofxZsN)zy5s04yY+%&}_XMmQW61Q5ay%fp%;E-R0<$4F?r{<(XITKw+0*6XoB>Rj za*3Jy(WnP2o{Cq=9T#D>8R&r#JOn}Vh}B5OlLG`F@#M!cdN4iYhexUfbdp~YYN$EI zqNwN9azC%~f2$PjqN4P#1MoiTUQ#YipO9Fj@ydd{0#+;`t1JPsiv(Ked`;STckC!k zd3`J$b>L2BIxL*1j{=hsukE5axO1we1-v}yD;md{GJ>IXhOq^usTjshN$Wcb<-eNx zFBMfxf!1El4#GP;~pm!`8vjbGym544hx{QLOI%QeW zQ#zyLB@@7RHaHnSzTJOe{I?7s$x^Ynl1gfzOa)Z%t1QTbgD`$#4q`n|A6XR^5M94H zyT2~LM)gIvWGfwf3TV>_l-Um8-#T~Mb!K=Yajjds@RQ7C`b<0RAobN&*k>F`vfDgQ z(`w4$xnYBp@Q@ggFy}0*9QlDsadxPcZ_nfdNK566A|ONEQ{9j?Zp;i*gxBUfemZSr zv6u{GN0O7pB&6es-Cuqo%ETT)LldaD!x>E%gIkB)Wcsx8JAk^>W8@1qAT53vVT5xO zFD>2_dlm@5B+>ZM^4XG#RK%4i`$J-z#Os*ZoykUH5)1<3Ua5!C2Few{S91131*EDPxBjVGAyD_( z{V8g|D)MTVZ@4LZ&)R7-=X*!_|BF#mj=N$VPAj~&QvXWIp_t>nRx&V(=z}L!jo*vq zP$o4wr&sk+%3)}00F@D%`Dnb8G7YJom<=kYS5`LQSBgbi<6>P7ND}ot(x5Cz?F`xV zKyX*9OHqg(W01BaFi=PT^ST+u{GKFgah!gPS38BKCglvLi!NN+J4!dJ4kF%Ts*l(= zBMpF!AJT9aj^)dfE8%Md%(p4$mhS%Y{$}%*Z|-cO0}qljq?Vurs$6;^bmi49TZBmi z-#r{~?R+BYC5|F}!Zuq!s^_I(ZQl+wwwT_R`i$>6^yKx(0LW(1{Uv^my z7-p2@At{nV7cEXJ^qobO2TPijM>tuBTwT6zK_duV>gO37a~FlPV~Sx+osI)2byW(c zN?2lxgS$BrfFiA31KepzQS8bi<~0oHRiz)3i=B!-FN#}ud#X2jf)&?odij&F#0`au ztDJ$cH$eN3H**p*jm5}k&)1_Ph5`MUMj@H zSHT0KMPUdML1!OQx?&xXUq8ar?bCngWpHnNbz7%EaK{ z3R**)hKyDuCE6_%QapohKngs8a}z|^Qk>D+Eyen?$sO&iu_n}C+a<_A*zo^VFYwpM ztub8i8u5@IVc>z?LWAY2&m*HuzfdEEyN^L`iZfo^DCK%-sT}MJvTf|u3&Xhc888mc zvhWaD+vm|$=BhG|h}HIay~BHR(JQ=k|&n+sWlXPx&h8}%1SLzFDv<+>f|wEpAU1><;?Q? zZi9~lvD$w2KBunrUsc=NGCdaL*p#uFWsz>$EY+dvGbYb3BN$blzvK_{hk*kEX~LX- zB0&avH`ptj{+LM>LUC~Z={tIPG6kpu28?oL8X73xC}_})^aRBrgIDh85Q=gEw2@pX zA=ts&LP%CJqA#zkL1c{NHAH05H;i@K%4R>+*qJ8-^W>Fgp=(?hnF;muo|g^}{L>jo z*Oa&}g_TYKZ~V%k4whm9H^JfnZgp3=n4`!ta@F9dmhz^uV@F@PTENB#s$;6V9r}D4 zMRmaAC^N zi|QQWU`R4L{#W5p*_GvuSS>%uLd!LqFc_Ii)hrq!%jOEGR)Qs)IU;e$p%8;~NKsOY zSv-{kus3BiRMuyJOoDGBsnH_yePl52Y5AyQQVJ@OL2k;){IImbypKGi#Era0UM~3^ zoQq!7U6Y(1qkgOcy6~EVg_#Amk?*d@KK#|w^45l!5st|a8Z#s_C%a_lEd{+&wok++ z^4nQ|7`fvw9xBsUuSXURtW^N|9uOQDHLn6t9WqG?S_7)fe8-W*-6F6fJ!hXxv*x}U zQO;P}S2h3VWPRTke{1L_#`cP(BCCg;{S7Q=gll#i#96 zHgpB4xqY#s8oh!$r6fyln96N>w=H}yMFD~>>-T~Ob}e3R#{~RNwl8(E@(u?I?~7N!35@Fpu~{61Ji#1KZLIB#_Ok`^$-kU8fI}wG zN|=^{?Z1H=GmO`L%L+r^q#i78z{ByQXLdjr3{S{-g=hg&iGVWKfTsd2m3YdI=c^I& zi7q@Hj5cydRt^>x0h)j%Bg@g(J5%1I&&BL2RSZG-A9nKHTZHN%dP)nwC_FnI9WO)% zcX$Lv;5?>fdt`V?1JNDvNd#ckPf^8mvNFLu?KUJ2HdZ2=NF&S8w$4&sw$L+?eXyKk zG?OTxpO~vcm`|gp4$pT6m07T>AFC}~U|x$tIaK20X0>B5$P6kMP${c(2^T`!)Kc}E zOPtc9hyXqV3D;V-wJUj3xgt^$F36HIE;Me_hAW1W)B zV-0+nLXxM2;vpW7q{;4C+Nfq%ZP$gbaIs2coJ1s={lKaV;+AV^jJRvU7y@e$gOQZI z+*-->KiLXPnbtNhq%6fSuzAMWrXD(xA~5$KvAC_TcTX>~)WnhEm^)vf$tJQ^mwm54 zQ)cItkqj2LC6JAkjZJzh%W;DgM%<;OtE*dR>2)3G)jn?y%T{R*la0}~3JqikYRQ1|suF@IEx(KIH3r@=rCsBpIHyS*8cmbV^=?Y#q zDod35nIG8k$>m$(?JZ!`T9opVJIL--<6O6IrPJfmR!L5V)11fHQcrvAu^{z8v>gVG zAj1ywwp2u-!&jYmydApY1$o}>98|e32eezOlD*`%ro^+N{+QyS5G+T|)f` zm3+Q;fk!xwt5e$UsC@!q8@tiG(fBD<%piWR%a)shA$V%5}p*Y zGip4k&;4&Ne0I^ZnKv4*&>gI%?hYveuV!`S_jlF?(t2}ZlAe#t(cr`JbSnL-GBwgxetV`9o2zVdLGopX^_M9;7Pkne7w|LDnBce9oDK zEF$TvebS~*$FhKv)v`En>==vcv+uww!lEm<`Hk*%XZa&luL)C#$Jc_ods_oK-;K6r zLGC#HjW2hyvy+;({2Eehyva{X#h1A?!EBYvQUI>Q$9R$Cu)(FyLMpAkhS-fXX9aJb zNAvNft8Mq5VFTJ8J{#r~T2~<#z}ianZ9#Kzt7<28LkwTD#l)e3>J10*XUdstwXw5x zWMrlzG!Yo;Zm{h0uopgdS)`Az$K6ZIDUR1u4$;nOicwf-;!QwA%b;V^_GO720N45l z1{h1EA>Ak}L}oP&r8kmMteu+yU-c-OkGI0Wu1QMAHnjp!`!%olQI%gUkS4Py2_Gae zDLhDMoW1NV!=~v+9F&=A11g|-m`%n$V-e&}CX`A>)k_P-9sKTM1QpJ4Ag@K`AJ1RM zb1gljFk`X4g{_VVd@&(EDK3Z`!JH{%_dE|ez4_kdd!gqhSKzVqbMfmHY?@had0kNi zXKSpRo$yLfej(^IIC<66)m>^I9c$i=dUVZAuX9NmjC5qNGf^aws&R_(>Sa*9&-b=z zQpx0LeB5ihvZY_R*vm`VL!$@Kk(S;M#jNAX03ef+9#xZTJi=3TEPE%S%0PAsVa)KOQ}yuwHb%80uP78V(c8sdCj zD26IDU4c-kspY^xOi9V@wKrQjFG~utw{ zm|PWV{5YcC9CCzZ5$8$eB?U8vF(y~}D|}h#AZq~2NXn@I@f1kj1muZ^N4SgnD9tc# z1;*ONF`BP_Tb;$FdFH)kIc&&@;E6DCgeL7Mw%=r5)iZjsrSwSO)TgT0uVegwW(I;^@9$p^lKMa~!z|6`OZAhFO9e z1-nhlcC2Tr*lRp4sz5+wd(l1a4E2{xhSy$KqDK5^n+sq z9fFP3$&q)?nSpJkzm}jqjM6=?tyNXx&&ZXte87#ILS2>bGj+Qz^?uc{8l2K7HiDqp z(duDo@l*0aSalK-TF7Zk7h4A$3NE3Zjt;j&UOC9>sN&|A3M^>3oP!>F6_84VXSJB` ztrg@loj6eS4)}`=`y$Z*(~dsUXC#np1F$T~opg)~5V;0BxzYCa1Il`B_v6UqrSP?P z+=0Y{++h#M4u+?-b{)On_tkz+6kS^grvXM-J3j|01?7(8j_XZtJZ$g*v})&mR(-y5 zjLaj6x)f(dLmf(u>;!wrd>`;D zsUWf?!Y$a%RX%(TmxZX*~hS1{8<1Fke#3_lpL5HHMC7$IVtJ3abHnamnbdT!x|CO z3nH|j#k({|Nxz_6<#ppJTS)oW;_F&3+m}DZJ(ybtJk-)8N##LCZtE^-acj-4#N;ki( zKjTr+s>9of?2d;jdfNTC(agjLN-m0L*5c=Zom*naBQc`q){i+DpcUQygix<6>$1At z)x>}UU{B9jO2T3iN2Waw(ri1s0wIO2(2{u`;^S$NxCq)V-9fk@FbG2vDWc;BpcBA? z{bDoohejH_+kU?%?M z`!|cP3*5YPt=jGw3->&n3a3BlhMIMmd1ZChTCI(nCXlBkYQ7Y$Er4Xrqz%R@^#I9> zB7#R>@#pOQ2H|$uPk>vMNPR#WW zN<_f5u}BjlYifM1cHohd#ZN{(zmCOtohHZkaq-GZJRu>ap*zNFpWG-Q_-fA@zOa!j zB=Tgw1qmbd^l)?4AJP(`NY+yxVh}8De>wyB5!MZ@fBoyfe^n&#MzC@1aoCAg!fDcs}xcvgp}r> zG<9m4EJ%I;?rAwD=Qq}-U%Zrv9901P)Dehr6w7(o;M-^QrD`?!45wWz42PhW;vXD9Z{4R)U8N*lMRXSLg z-UAUQpl=ectm2HPupT9Ri(iXMvc^;89$v?K^)uj@5g30YR(Gm|)tDilD^)`QNp9g! z+Cbt@mWp{nHj|dKSF500T7-pnJs_frkvcBbVxXGd&yVCiJ3W|1@-nF_+Eu6+Js&p2 z{%n9vR*bW!Ve^H&>rZe27`Ez^_=1;A>4HchPLNKy{Wc*tQ>}}K(vJ0&gRZ#&892;4 zKbNq%)I%VDma-tz>dM1Mcx}&4g+~u{z_w?^mZ%OIMWt96>T z7vs1@B+MPtk;S=%-?1>I&dS47Cu-~wx7s3d6e@pVN^eS`!7!Fb%9VHb~?eTSLNvCKTxz(TK#-f#m5fRFPy1tv*%T)%S^Jx?f&$5E{^uOCrezz&} zBd00mWTqKx=nAks4BP2KbXqdDL>_H6{XeF6u!svfZX&xB{$P4d@X6S=wt4_~C9a`` zr2zDN349r%6RxAVtW;(0v9MdeUN%r^tsog69O?6SlJ`P-h|G&Ls~x#B04A6NiBx3g zWo?kUVtm7EBdO*DTr_dl+Rf4%gxbI_>J|Eh!-wn@NFwJslrRl2a3}s{+N6XaDE2Oz zo2)tMX3>*HZTH`A;uE z?{179I!Ba6fUY%iT(zE6F*+30_-G%WEF=X4DV^s+5_@@;P$iV-l#ae9db!jL@}_AS zU}tXV{m&qyXA60#shGdh#$8hDq|p(m5H!5_@d~&Qg<@`yKbhEJ)F)rYuCZwFZPIX| z?z4h)7uXAULrzJfjkJqg6)87Y1&FzX_D zLo}j%7^Ak@{y?XwA-zy>$p_VtWvGTIhV=vaqZO7{&GvRp;_?ug zIzU3#Vel&%9A?$3ZG&~#D7($!h0T*}X`m&aafi`HXfhXm`w)wVUU#c$Rztm1wLvkF1 zto}2vtGxr0-?ZmV2P%PEiC4Tzc`A8%ba6B@iJGUFLi{f4sJK6v{MPC(3=%>#q1QJy z&Q;FM^g+E1BTPzF?~Qp2iK;4V?U+~L+c3HE0wz7WdhF(DS%ht%JV-?nwS+sQbKEU) zfhwjDrvE3=WL)#J-OFRWT&W+rYFR$ejkT0%8JVb}jD+UvgyTgc8w^bSW_m;FnkN_( zwK+*!L&qevs*`M9n5^G!8&HSRGU`lKg^gQwclBtyjtw`)eB?oHAbmY5cx>1gLJjiP zVqg(RVGsnWuFyp^uFE+XM~BG8B{$gFG{8Q+rEqV!Wnf9IEaAGu}?TU1Vc zI=-cO2AObZjoZIVdV}WxRkI4?eUIX6pcs#zQoO^x%0PESWlcu)^~^(HUD^$*Ona`8tMf zBf74G=25e01g9W+ZPt-JC?2>H0xgg_suP(4xHkBF`9jAvCeNp!v9Xv3^=h(R(YcY$ z-xWH3rWwL{jQN~nwhgO26OJr~rtA-*UtV&K=VUd7n{)<9oO4-qYz%>gSDoli1&?E6 z`qAuNJ#ApK%DFL!?uXXpu7gipL3ngj>z;H?0dWR|_+iIg*P2wswXLxOrKSp;eKb-J zMbYF+J3ZX59wR7vA!&!zqPA>;Wt#5bXQVH3iY9eL!YXfmDMv5^K67uLi~5H5g*YvFFoYKNY3b9 z+0&q|_*QvFo$DTTSgOCe=hX;Ltv|W4pe*LR!40(!c7!zXD@*!;00FF!SFr-4(|%ZL zUq@rJihQ}%F>>Ru@hUJs-NqJkC|Q*Hg<*6@baYJM8N+-4Qd=`LsFxO17q(6_@GUG#LW&;`I8lJb$tbF> z1|OV_#@dj0Z0&HMvs2kHaZ%UTlkB2ewE}--eBX)t3F@zYV?__<`Y5wwVHczHW|Wec|&7u%UvOu}=2!{%GGjmT- z$k^TZ0_P_gCSWDUi-a$oC*u&RVJzYj1#0zvW8~I)x}F7C3nD;eO{bTJ>2x8kyt_>Hpo(dWs86W%pq!XcVhnpm z8JZa?Lp|5M_zTY-I>#g;F(WD){Za3sAU2l0z!{8p$B8g6UImuXFYDILlgwk4+qE@Bdb=!Yfoz zHT&<*SUGh(9PM9#L^!vVlEAWePjAUT zEaj?>Op>m>7^NUnc^SZrxcbvUdz9;YL-_C#E84PC?J|l4SVL+7$TlQNm5|RiA(v6K ziM8){yeuqE7whnV7Dx=4YW{T9anDl0g(7#swOfavW$6yAE6piv%DR1VK6xFdp-xpx zdSAn)3oFg(4t_B8)998)d-kP_edC3k;gBP%TJ3jw204m=TG@3Xijkh-bG9ZNpz<@# zsGm}K0Z{VH*IB>fxppTt(>Kj0^-3Z^nA4tc6mZMPF_OJNl+oNOI~jDG`nO?;;tZOY zl9&#-2gjDJud%{xzPa^uBa&Hpn>rsCFHnjI4@Xk~RtP-2jnL~4d6c85vb0JvmQ{qZ zW-m>(k3FYxm4e$VNHsAX|1bQLu>!_b{Hv}(%`P6&#jp&Fcslcc5R4^7n|;o33%#AI zTJkUx1U54ayf6X8A<$Xk5NZg>M{ph8)g=OxCpk_?1;gKpK*{|4 z)jkGaUSN-`zY#{*n$F<|fOe`T}1v^Q)_(6S55M zOjb;EsHu^@lGk;T`AVzNz->wUpiL_t)DgiU<9g?MW?yqfIwHC;6+(V$=iIx{?@h>&XTJmH0>Kw}d#_k&@M z+#9^RGS538i8N7RdH^Eoukk^TGy4b+m#lhsz>&T{JN@s>BU$5&2H0yA4B8$uhT*Hq z#hg@qv-J-QHPjjfku}fJR;L`!tu%+&a{Z8&X?)!ScP3DlF5uWs#kOtRsMt=v*iI@{ zv2A^^ZQHhO+m%$3y6Nej>6v@i+~084S$mzc_kN!rZD2-q>qkHw4dy6|)qy;ts2hg~ zHFVn?MXz5eu+bWPtFIXyN%arqy6|Uhy^S+4yGM6h>@Yf?+!MJzdvgj8lLvEA%%(I4 z3@=UzP+23s))&wRJ?p%#E%zls3hM=rH3^)U3@b@xcAKGtkz$rCVK+uO;VNvp2)8a1 z`&KH0;Cd)#N>bAvQ#9*B|HYOJAF%ywpe%ej!YvPmNF1GkuhZFa@=>Y@dtxospjM`) z8Spq#2{(pQhZ7@B8E(UB@&Uy(!Ig0h-zP~f?)Z+&=;RUpQV1+yk_{sB$j8h#w@#s@ z(S*X%b%Oe{mE_?}`l9BQgeMT2$9RpB$5-feL|}pqnn)Xj1^WQiHqOERC-g9iOfnV8 zb-uPCAW^eSp*GBn9Ggf9lN>fVP?#!*4OE}#O;fbck^&%F2!e)A2C^;%adEt-bTjU( zW{mgO(zPHyK9ND+7Mc!%d*l6x9_{=aaV^_woz~ec0~uW4N{ZDGVoUw&G0%DXw;*ST zPP(_>nc#HF4o)f2z|_+29nPrSi!_-{eVU;m!7Opg41jF5W@2@0l3sydUZC66~TQF^dsiv=b4OMKNkt9(xVpDCWX zhPp&{3tdtNkyiF%O&KT=8RimaG@{bLxM-*?YQi(6%Nf3*qh$C)4AlDX5t(VGEG1n9 zMhjuk7tI8;xrW?X!@BWuKkO)a`Cw{vq}6teT(yQ*F5aY`x0V6f|VI= z7zeFsi0DR1F@&n@AtgZ}(D7r}jrd!4cnE6Um;}ugZL5E&BMLD2r|@s`^LmO7VA~mFzQK^<8+m-abi+8~g~pICEreIn|RtA4gs#0WwAA+zG=% zS#n31Al*=bQ_nz}(CO;^Wzpqw2v#z?!jFW6ySw{?E5k?yZ;QNo6Ndwi4a&g!*3k_M8LA}8J=`tN$fof47k|bICnOL=RQ0= zSy7>#5J&guV}n$ z@wK!~t?042@jJ@t=Ub@4r$3bYhnw!kcc>olnqfwLUcS{cooK5oQjG$_BQuVVkzk9) z>J_T=OV7501Y(s*3~F1Y9_D!IX(Kh2R@Fo*c_Jxe}q7c z8R%70L{jeAb%prQSTiJB^*9|kN_}g@pA?uaHkA|(0@PfvdFjVwl(b89fH%FTatML- zle3v2OdM$WDBDfo!MfH zj01m(E4T7(g(@5yEI_K7)avOvd}{^~Rd7)`EcOQVK~CIB z02S%4$%Slt=ZgL2e!$SoPi~nqZ#%n@Xj_|mYy>uT+vk4nSDys&BXy%8`af&&k$h`? zp5McX&(2KO&Y&LSyOe&@MeNm6XGqZLFG%kpO?v42kiSLvw*ZAwT~&EW*?&Tvxq@9) z`fQ9j`k(|e@gr>Pw2mRyGdHzVAULALFDpgp;6wnkY4{X6vQHok5S_euO@kce9hVGFnq78{fzIjPhs!dQfA>1GP9Sq_<5AYe$S0D{AA| zt+6~I{%Z2cC?!YFBFOv$6R98wN;8Aw|E*HyQR<>sJQwYML-y=ihAf&a3isAMzcnsr zmGRy;%NZH6jb(pbe#$Y;(enxfZAu}!-LjRBt?6`d6P?V z%*+m&aGnIr-+7G%jQ0x0F3p@3`~5c7W?*0J;Lhr*+&}D)D1xa>@IYIz;C)e%pdtF?4ucL-pkBU#`yUCLj(ZBJ$%%5}QK5|N*&#M2!4WYVk10Z{hAh$j1GqL+v zKr+NKC9s>7-D9ReK*fS}L%#Rf4(1hQCFBu@&TH-FftV*2`XmZh5J?fpl}l=DJ$he7 zkdC#am6RX49xg6Oq;HXc9{$4%AyZX{_Mwucw*X^vJIUCEzwFB*=bCCQD<|fVGH2H1 zym24YpO(~Ul(IJPT{#6Ms-nI}LdwQ3qBwW347$e<{|6Pq>iqi;fUvDL>>6lt6Sln= z4t`(KK1lSy>q3qaZu(mOC52psaVmCr$Xk4$c2U9Eob;kBm`HTNR&+db40=byxrLr$ zB3LOd*QZ_b1W(E15BU^*E$v^*x=X6)cFvI zk6c+F+ah+1g)Eq=y)@kJbANQte~l6B{@o#&`$H4HL^P0EqTih;Yb?FCA{JAqkIVU^GqIihksha@oBveQClAJhU*(? zXk$h_p}p?Ay{Kgw!^^mgbq#`4VfU#{7Uc^^|Sm8 zm?8-+89Mj8gX2%x7?-Murzuk$nz|v?i#Da}vFk z6;+pPC->+VXPAD)aLPAZv;lvAj!>^Y$lRcDhBcUGrRo|?YX)y*0@iwj74LZx_%0mOj5tn1}0RI)kgAIU&c zd&cGRk@OTYcOfU^AF)%0x?2ePG(V85m zRN+a!pWGRZ1FJ@#JQUF#yz^Z%gqv*U*xiTlFRD&c@H&`3kWj1l&)7-}Trn$YM!VNMh9LyPa-_&DG48M%&7l5mn-FV$LuNWLMBXEHDqEqW<} z{ZIw~k}Z5@aEr>c;LCnVk#qgQ2>k~g+-L?6ko+Rzim8Q@%t_Wce8j2OywP{h^3i{v z^UGaTb^mUH#x{QGA;%l?IEXmD(bHyoK4WN;u*Gk+6Y5e+lwg~KrivyRb8bw1&1ZFi`t$|CfEM}>PkotD|S;X{4BW6fLs#yof}pF;&t#(3DbVX) zTM?3Ep4X5cP+^=SJ2W4si;7T>++}b%mCs3uF#0$9iOMBz${OW_1T(OH zrm-|PUe}xH?sPn37$h8i=HkTF-rpeL+4Qc_r;wxhFYQMGd{Lbkx8o?@0Z*q#DF#f;(UqK9=1?EomUycipeu*v3V>(Ql!*I5qcY z)K)G0PAbXr3^-_|LHoEIn zj^d!bIAL;Bq(Bc)h{oaLTQt4qo_#mq7bC}1PjYL$m|rgU@W+g>x~2;E&8J;<*RHb|w2kje#K{8x2~-a9%=hKK=@R*4SkdicnG7Q5TRfZ8#oOL6Dw#EKFip-CAh#h-uZwNhfnZ{*; z!ZJQkLL(uAz$y?wWl?Fp)s{%g=NRu>-XEp0NN;JTO^*uIUphf9-n*@y)qXnKuhR}b zN_;Or(y)?hWsfd8u;9^43)P%eK}evsL>>ZVg7yU5!7VeUgJ8}1I0+1kS|PV+IJ)~^ z%j!|2Jxd(Y(u!UK19RNQA-Z_Aq3=bPcUTql)f5#)DRhOTn;&WzIFGWxVC*xXPV!Tg z=0#}eBD?f-CiNksx_@Y}TogR1ph%^9+;$Eh2url&<6H(P;=?9j6B$6my~MhFpu*@(<1)M7t$ zif{F<`tgN$WV38B?&Q{{(Fup68T2g0GfxlPpiT2TY3dQW@cJXjgW*9?RCd@c{e^*x zQ{5N1Iec_f>NHY;#nbG(WNRyo_-4>6Kxcm|+&Wp%$I5(T4x0E=e$Qc%-oR`~90iyd zbn3jV$|7rI$FQAMNRccN$F|7L=`z~-OVrAIJxUt_c!W8T-m6#MRefc`qr3EBzG-Q3DR35N0N!m_yg z-L~AwsU;SK)cE7FUX&DB?N$=}MEWcfqpJ{M(AjVG#@vTJZ0S>I^70vmM$I$I2(+_- z$Y08ty^yjFBtaE9^d-X^RNB_a-JZyP#MYSe|w$ zImzq`FBbn?F+^myely$=2NRJLYI`aQ&~AIaV{5g@w8FfKu% z>;v3bYoPacVm^Nc=~!|J3a=i($5rCd@Wzmm zw8so~T3;Hu4=UWue_8TMg=esSFTnhqkkj!F|ywB zv3d9tSRTaxF52^Od%^8ls4Y?PFTDeIuz!>PlCmyRDsC=Hxem0YDE%5bvO`?J@wZP6 ztBPwO5oLm3QojJ5(rXGPnUDGlJ^_Dn4v;6fZ?UJ{7A7jxCoW)kYyiO-k`>a{<#22JajKxai(STR_T$xL5p)dYTvm%ofHx@XRodNIEV z{tvTbgn34}`+b{XPH#ujrLnod!Dv!4RUN#`N4<{20zaz1iCrvw=IiaAbl6={WH zyFDcmJs?5_?Y~hSz|KZl;cUeICMyQUSU!n8Y{^EaAS1Y_WN(okp*v*4-2cKlEb{+> zbrg{(I@LW4zqfdF0I7Hlge{UO=2|@Y!^F;^LSe84UF1|&@Pf7G7+1;^h)`&>!K`f; zm3qVL0esGAt_$S0Os(Lywb5#>cD@aooEley7HCCVIL=GcC=sL}p;1W6vZ9|ZK$1=td|sQ5Ph z@l)!V<O>&(K0^1TJ6}ro%h|oK*)8;r6Ld=7{+%-cfAIQT_C-%fDm(q3hh`U42r3fWL=mDN@T_KtB6=OpzJ(C?#SA1R;ZlB%jL`_g(NU-Rwzoh zRr1Hm%|Dh2wK+3$xafacD-p7_w>HzSCFKNlJe$6r6}|?Z2RT-)fQGbzk-YeX?9KdaK_OW#$@0_g^goXGS;XQr=eE<00Gv| z8VA5m$>W6~>SOdcEXFW{sG+~wx_88-6O^ed(awasw|!ayEa;FOB#mHzkMBK=h%dCX ziIMd*GkOE=<(9!CaL5XN+pgeBFqeRz)tsV2)|DwnO^{LsmTs69vy8O!DsRDNlu@i$Lr(Lqw5qc_aU=qJp?cKa}f zb~xdZPf5(;VSCI#R+c|lW<_ttQsS{Wv~X$R1k-KZ(-Z*1r5`^*y3n4scAxid(O3vm zuk;hpv#8*B0vP2AWQDq0Z3oc0I!GdTV6H2E%9pj%1K}Tz-(_}sYa3dAD-QAQ#X+*u zL~vk#<;L%#+2Q|Gji@d&>en67a#3dwx!`Cu95qtqx{dU=P55d38PFRrQdwNHfA@3m zwV^<+a{D{YZ=5}#gjnd9g^8NF+8brD+KMZ2xplOt>=#Ah$az~ee=3Fy;PDmxKs|7p zT%Nj`8qEZ)9>#kORs7o%|KF(&y?tVl*XZ_4kx9>=why3WVV`hGw)OIeRDVWS3lkC+ zl7f*69c!4f8#IcA%;`3KvUkj|0X;Y^CYTS1q4iBAd0G?r-ehLlN^dx-IK#d)7Op$< z|F${|E<$wKvdjM`)`7N1AHiaI@&5ISwkXAsZ;VJf#{Gbshd7f(J2L-u`jWkRzi(u! z#X2Focrj-aAf(YH*%KVJW19$fi!TX(Xz8hj*aTid5ArC2jUSozDLl%w;aUK^pr;E~ zg${!fkwgLf-quDnDx|2h67H(@S&0Ozd`wG8aCc`)8lRsr=vFsd4=#AeA3=Ka9U|#U zTN#^c+l(ULa(^1VpE}*9@cmVwJ-J~fe3s+#C(unWP{xsa+$4b6Ph}Zg8mXTA-%f{L zIOBgh9ZxF%-RZFFyNV!h$6c}ptZbW?+~Se_2KT~Qmso&aqv-~H$s;i?u#Ak0(c-PH zypns{aQ<>S`cGC5jZYSdtp|xSIaJn@vrE8_K$28ynE@HkK5pDLs+_~Fb%9O3YxK~j3$rhVTfFhk1^d-sl1iaw0Y!CzU#^I|97TC zEvx}$qcCi74%_SqM;6TH{w3K|b$paV9ROjZCW+r4w=Q0nqBGNJ*Wj(gU-}uz@3g!{ zyMGm0Z1LeM7cM(3eA(r#UIIuIeJ@(`Wf1^e@RwSOR3zy<8~uao@J9Xz)q$~)0v`5i z{TvDn1zfJ)X}P}kww=e7Keh>jK(nHZH!DHBB=A5PP^G;0KK`XUX1NcGSV+U?bd(hw zJVprHrYP4!>y3Y4_I4h@@m7S(nc#rzn08*_GYhc%ehV_*Uqf9+25^C+jpB;y)b^gW z&iBm~)%}#`X#VK$AYTit^Uitp<)!@w{7dW5Qto`=Cg%NLv<^9cJgViU0<2G*s$wAzNx!{* z;P-!P9hJt8Id{QY2meFsz)Dx{3cy`Fd9`CjV)2BWRzhzHcN`n2ZgwMTUM8;;xXk3? zgdOnKE{499Wum@9uTHTeAqa1)CiGFiL54Wy%_vh*wlLrb!K+)_kmeQ{uYQFq5rqp!9P97E9QVm_b;=9^~>zQNU0fzq}X^Q-)8#m z;B62N)*j9SHnv#Y96-y7S>UWCyODlX@PDZtIuDxz)@T40Tm_rMrP3N44PO6^U>7<8 z+nc9o?M?j#V_QT+;$MlZ7?o~sR-1zxU%xwNyVcp%B_DZE=g8uTxD4u<0hE|$V-)L? zE}i4uIpG6hEvUhEKf84$8;j%AMjm+MN_De*ENpZc{md0ogM;PzDXEBcO90vwi*Svb z{I2{em+@{4A-jWxZp37j;e!?qBhgrpARRRlP3rK*-qMi%+T*E9@^+gECZ>og5BJ22mIw7 z3$mg91yf)9f1+SgKaVJ}M!*Rs}q@a#5o{P5FmKrAZ4AhewbKhB;<8ssdya1sn{3+&f?afV9!4E&DCo?u5bpGhUMaNxCmf#!{U}(RVGr& zb@C1#a!&J18mrZeSUhiS$%R-bTu6c(A?WB{;P09owr%NIYZxR;k8(v$qRz|u!__j{ zf3t7D++Y|d&e}-BLOOd9S7)*6-eY~&8yKYLw}Q?vi2~DikF&d|pE0TMt>_`0LbIZy zoFq9yUyYOnffK_gV5MS8z(N_z^@sEdMNQ_62PQR#MS&IWrCnGLdKpRoMq7VZ(M@*q z0P9_Gcft(K;QM$6qh_tPF{777zcH--oVhRDmYh!W)7Uz>g~y2qb5 zeG?3rbA^3eTMq@HCE9`C{LzdX;Y6f>uy$r3Dwm80@`k^#LW{C zG|Y$uy`CC^K-W}iU_N*-uBb$TyJlLprElGhlJRW?hhuTlx2A?#)r zBhJV^-rMjw$6N;Cc`acoaHW2VQeAcIBcE$Q05#n5k)6v1e`w|DeS~PyMrttcvj77fu`=6_ZW>9Q(I9=1>DV=#V>xjLL&56Tl&@gUMf8tkr?g#Cb({{_oFM?;(f#O~^Xr}T#3))RI_;hylH0VDeZ*;- zKaBK6JZ{q=0?^BTDdR_(%CuZqvGA1c5Wud0^2X_lJKKiW2R|lnS}`vC)u&SJFkm_= z$_(EdHc+I+)o``-nyt&o7e+~=e(wu}laZuiZ zq|^NXy3jGjE??K1R7=g8j*0Ost;*xAuLI=I>Cb~yA~yYY8#(E7HjB*M9ix7jQY9go z&C}!wHlH*HhV^?`t6FSAf=F0CM}l`MyI^>`p!7U~5o9onCN0!I3hUHACXG^}MY!Gp zB<~zLM{CoJzQ~S;EuEP#rzqxKI5Xb2v~W!s!HRD>1&unuzE1U|y2mSVP(35(s*G$e zpe?}bxzJb^scf9TM0uq;UY=3=*|5d<*9J){GQ2Wtu|h^3oI)lTevJ;MkZb)p;WGAJ z^LxkHZ;KbL%7GPuGvHEX@n$Z0vVFz?bX0=7(pL@>`{Q;OPz}i$`d%)AZEwAz*TY`X zW%-r^9^YY+;5*AKVlJ)#Nc?aoX|OvgXv;H81zx+Qp`A%tS?S&h~Rfuv(x+3ltzWTuZ{HvHB zEFMp@{i-8Xp_hh?e7pvMrB&-ia+9nBvMHQj{DSkjEt_X&cmfBWt#_?u<|`){M##{b z{$c9j>7z3n<+us4OHHCbD)|!o*Gc(h6kUUM2#bzHn4|=@M{Hkj_-xOMp5Gn`WBbC? z(q;ydNv@}f(aemC2rHFWmNNW0FrhEYUbW!}+F7$(m6a!*y~H$#zn$S3HHKyiYa=~PA-iy1c!c*bf( zQP$kRfJsgQJNdrrda(PIexER1!E;oYDRXRLw|;aBdUOh>HfItr;NK?>~D zJr2?0`tXutit-RoxSkWwcCmgr3u-ZPc;ln{46)1d8H^!z;OP4Twzml2bc0If_kF(6 zVzko*n|CfzKv>i0ZUN?7l!~>Lh;BEP2Qs=DxX`_V*z1jx9JB; z=lWNAs7ujY8W2(W*1 zBx)p}2Qe6?0AAvbG<98J`i=2fpqBZ+$PPnp_d5vC;Y!k3!Y4G}IxbHx0@KIxqpHe4 z*uo|%wwVcQscR@)N~@sJMEzPT`SFqgi*r!HMBzwniT_vZux+Y@8)&-~oooUL?3e1L zIi+$2AFWmh32-f!{@$%N*iR@cthPs#>$FXAn@cx+-+({3fN=g`7GePs%d6tda%V;zMQJD{O`{5y*dsj5pKUz)Q>HvMGevq?4|5s!h1OLtqGi;6W}Vz$EE6KU_&|={K+Rig4zOR(Q3dv8^Wl0d)VB z;y@3ojzca)5*0p-T;gQiA3Q})`}S+%E~5-$?f!OMec%m?=Rr02q*6MVHrZgF_|sw> zy%A2MT{!cox%$g&-dsqxv`xR(59OWUC}#sKIdJ%_9?TQFIb7QRG3jm+!51 zoO(omw!u+B#SK=Ty)m5Pb{T7jL_veaZ@|Q1Q!MJKw+#^0p?LB9H4{JA<;IL|4qN!b z%RtOpQW!F3)H=oMD$2V)E(M_Pj%*V{$idJDBn3KIqSo)hWr-P;3L`)ycJaL+)8JA9 zd+_f2`MwJ*e*6E)9lY!9rj*OP*44@bv*-DVep0!XOOBGxc>i7QDCPgZjsw%hB3N- z;<(Uk~*F-M7~eX0}>#*PVTW2 zq5JFh8;bK2DZJ4K(qtA^kI_lDhngoZn>_w}@0{+*U_6){L6rjB?-;3qnH%W>!FJ)Z zI7p#J#TOrqG;vFFj04~Pqyz?ki}Rd)Ccj_pm?MeRhVhD;)#hzr5%22`6lc@%6GrbhZ4RHNc{u~0W6jx}Ca_8CqpJ+#3rNudkyb3L;(}_o#5$u}M`C~k9uBP#% zT!O!itdzW=n^?55{2NXeC0KqRdjH(%SR*0QZRqg}bu4pzPDOcO&bCq636%;zWh{ts zT*8EgMrS$1^<2VvOO~+Y{fQYVP)c6JorXoHmemEBwIPVmsUBvA@?{)KIICmFK^XlxOn!KQp@Vt3kk z(2IQD+ zGt+D3a@FsCd+q)S)(nTqvl75pXMuq3RoYk;x(2M~l=vm5k3YUb;Oo>c6V$r%)s3;N z{%A_}fyAPdOjp3f^re%9@vL09<|ic{NYxVXG`0 ztbEXtI?{7fDu4FG|&4PcV~d3^SEqaK~?Ua)Y`CVmo+z$&~WX zK*J|#4O$uk*Pp$OVH{Uf5zU6Q{C}xmj7LCBS#sw=*H8WMa?L{T|B&MhHT93FA;*HF zd1ho!ga5y`wOjgdHN0bDc+ z8K~|O6_3lxB+M9brAJS&7h><9)Olc{0PggGNNbh{3TF=Q+#`CdpWjCzb5a>iL>ci5 z&b&+RjJJF8$E44OUk|}e>{PUPloWEx&2u{`+m!#IcjyHGX;xDPAr_(HrKIgrGNWnv zMidv8kkK}az-hw)6vt%*Mqrn2TFhRTv3*2gqO(9R!IF~g<}|R zcIh0hAb|8wmNFapJBzb%?xq%UT;5<=zB6}|UP21)Z7_C)_6$skxT9c$f+83}>&H_u ztFA|upcql?_V8Kpn0WH3G_khPh9+ug>@Di#yO7t(NT74jfWm5wtx2079t4bv`qV|BUs4)6P>IXJ&91z2 z)s{mglbUK1yfku7h&sKC(FAH70P3z`acG{#&9E-&h)gZQQdU8gPy&0$KN2)rzBln^Ci_n6=^b zaat*k*~!+WZ_-)MpBEF9E#nZ)mFr~S^V(Sj9A&?A7nCcnONyrFG$qL%)&F*AwKKFe zn1O)ARcXeWt*2vtVOu<;+J|mthpgU2OoJ*L_g>h$P%8ngqG}=Bs?e)EGd;M-NlE1hX>TMzxTQbu-3E{L^;6w4ltSXf>16OXiLk8zt=8`_*X zQDE_EVl^~idp%k!uygPX2QiU(AYo#>|LnbR*(fwHRY;@Skld%xt}BX=^X^lTy2~nu z9gk)}+WAsiSX-`?>Yu8I@%KzrdIk*`+y6!N;G`$PndecFvEvzOwih?ytR9)yGYB1U z0nH_vFIP3uN zyo3;;%jCOeb_fj|P49Y|_+~k3GXe7=4vhEQ91A3Ubxza(Ka9K-39Fo+^k#+(g6JKmrJ3Y(}m}R(E^ZhY9lU>kR=V$ zG(Ny{BM8%ZH^cEskz=*`T4SgL+yb!DrO0`bVj)8dcFsMr zz%L+l}%@y2Su*%&uSh)!TTS@!e4)-HTogZw$C3)w4=0_Mi-08@qZSddjHNC^u*zYn9wF zVL5(Wk;G_~6Mm`UUDib6ZuiwmBhH`blog87;RNNX%mx&;b4Mg6^Zm$^UC;jznk6M< zen~%$t>zYe0n=ws(IY`{%!RY9n1;)9#e#|E6T?Sncg9GssR~ZE-tS#IN|nU~l!RIwPF~P@6HRbPyb_s(e>X?q)ND}>~(LRtiPe>@VzBs`q@HpQv2Cx=?=WLjRJ*cJ)O z!~gCX97A&Wt|!Dz96yD-IJ>0=#n4Gs!R3r9z>Kswo!TIsP0s5RbH#rr2@<4ayJmd@ zbd=?@??TLef|^H28VtY}(37I+psvV5538Mi_+FEJw0C-PiHiFH)x)Un^?h3RCu5A_ zU2|-;5r7tzzuWAZ_K7sDo2-g1!F69{lQlKyyV~D;+m>rWu54>)cfW)m;2ZtBW%`3u z-ee$Iy>;W8^qEg#;byFbWf@9Vno*m(pX$$krbiI^eq3Ij--_!fj(hEyh8UjfU*-rW z`Bbr&AUKKl;3XU-o(de_^w-8dlHky7O|QEOez*4*7j7-O)=hyQn`N}XBw%+7SX_VQ z73H(S#~DuF?}$?Cl*Q(^0=E$y`eQ>#>jTx zscG4ge;?cS_wY)z0J*qCHp+{V_h<^GGNHK^-ft~*|GSoTmvLh|^IzG$5^HE15Pfv1 z{+(I-sF0?!5L9j;U01xzn<*hsY@F?@W3v+-e2VqI*DFmex|1iWn2*BYzUxr*JA_mM zM(i-sVTx;O#gh3~L0<)8wGwFILvKwj>Ue$R7FEE@66AjX0a_NJh;gltsXVsxeF5fR zcdM)!nE*-SQYs&bCuH`7L<8Jm%J&)n`+hoM@DU?EkLd=I%;^+y6o)e!uN_snIjPC` zT!T?igYrMsAYI|%(jZIX`Y|M@iH$VS1xQm|^9b-g!yyD*ty3A93-KAAZtmUHBQj|~ zfPbILGn33Pg|xsC8rw^OGO`9gMoj`AW4|?zIJ72+$QKF5k#Khi(kI3LS{Fqqk4&fY zgLDaucdQ16V-aYgy!c?3)0;T+${)yZJRHu)6`1$J zmL=hVA6jzT!sVFSGLaY&$z{43Em6y;?Ows8{>2yIW39|4>5eGFB^#sh((`G6NXR6y>U`FN1Q2-r zpK}arvd}qT>#kscX7JxF)B5gNID$21;Qd#@&4jJx)TGUYQ2uG}s!0hkPcrPXa!q1aIOh1R8n{n~b4Sa1GYW9ycOOM*`grbC1P zd|{ZO?9RsjMtkIYN6F!pzE|zj(Q)&jWdz^vV4L}GQ=O~-u#GC*F!f=Rwzu)=G0w2Q zdUo9gS<+!{q;W&PG0?dFPfidUwYIF7$^9fSLEv}3V z9~xCj_{`uJMI@bnwex3&Vq1!<-bmd$K>@Lxs~C^wSiyQV(-0S8811!fDgh9R@`ll@ zNUiJlb*@7DbPycS;n9sdwh8@fmxlBLxiiY8ZCTlXf3!iBiwW(S;cwOGCwjndof&?Q z=&+G8v&|_pR)MqjPL>S+?qvH9)h5y0>xMttmKIcBG{%mjs^@gs`sA?1*RUCjwd^JR z@?K-V4c${sA$|nUwhaY%L71SYO3{uXeJE?$7*AJutO}BtE_i9w?4;B|maS@`w+Uxe z3$}ZXrTJfK-4gRM-id6iczDsHfX4j6typnN>DAL zwp3F=sHI{rwMFdvZpf(EDV+ozDIKv*MQt6eLA6D#rIupsrMb?Xd+yA9m{0Gg=O1{_ zd7j^gu4Us*h~)TsYsuRx1(|FX;^5H>_nC^q=o(31lQEBNUuj|i_sXXkE2-a7&BM{l z3T5P^y~59s-62y>=eb1n58SF@VFBD!SN^<2(=BW&Dz0O}`g8Ig8^tK*l1fb1Pe zW%pvk{%MK+CLXY5bC*c+h%^M(C}*g^vqak`gNL2N9LI!3jq2+C6RYPG>nQQTQdw^& ziNjybO*%eAieV?~@06Un`7Q=8(}q1P=Z&naXqscD$Ta*hDKz$RNc_3&H{Sll8xd!X zhM;}M7`fnh=MxwdtFlx05(9ip$y@F}tvrn`aP*m_r<5;CUUYxjFv9wMSu5|%gjx=e7FX8(M;umMifjrd<-8rNZaMw)WL{V zc7nAuug1a7ux6G1R6dmJ%17?LUucMFm|@rBKGLt!SfNF8C2yb?rS#xd5C6t|3|IZd zeC${;{RK)jt}%s$b^U+U(i-SCcDqllwJECa*J7MYk-aVL>yqJS$1XKD(7X%vE1K+4_9zDrDqAl9&DGw*$23jjm`caXV$FTD!(gK+3?*W&r@ zl03UXsQgc=2eC)^k0c`syoPF;RR-dM=YVSr-7)c2lEu-78MqP)j?W~H3a3`>Bi$5D zK1#ZgrIqC6(1i@aGU!>ObA9o$jB^LCFW+nGV8d7W7Y5WEv93Y0_W&q@W|+=hsUb)^;i~v zuf!Jpy0UvSDRnxsHY50~&h~*iOVy*r9+lXDbT0NvYIJSMj9G~<9!bNShnMOXm``$= zT3P{NYh^m?q&|#7H)SQ)GAlOQP%Zf7$%-TU5~nG_zChEZ`3PWL5Gsv&YQ2Rq7!;B1 zwW@!zQX9Z*EP{w6O7bBB&K4;Vw(Y{n@9xMEM2VV3nVtoasVxr`#Ua~ zKfvbZVq(>Ii;+hm@8QlCYlHe~hHoamO1Uf76N@flw;v(tQN2l7CncUg*opxj-2Ca=E^(`>~W zxDeRjROtU>vVkA?=l!3V{RuSL;E;gb)@8FDzV*BuWhP$f+43Y~qf1o&OUMSm%`wz} zk?{`I64bGFu%RS3Hm~d7ed}SBVuk#n$$20#)y8N6n_=)xpXce?szKBJe2W9xOwKob z4?*brS0kuiYHUpH1>ZrzC3R!pY$ADtCv}#L`k(bN+OqO@>mxrdEx0msBKES3_zV>- z_CkoB7w0*N@-59morjzj8AP!bK1I;s`CkRu$r`m3G2bdb^pv!v|8L_vS(ODoI7WBx}<=OqW_{&Xv~+)FUZ#$F!@V22;ty z$M~pYXx)B)4>UXV4+O_`R6zy(L3NuMHFd(aPMo}DxH|vKGtj880LulQCYwKvJ=^(UWa4j7;98;B= e{*SLG9n%K6rv8|15+s4M2*qLObjoy5J?;qW_s%xsNr>pP1 z=Q+>kL@6mqA|ntWfPsM_OG}BVe2wG(9cU0=XXnbJ=&u3ZQA*q8Yn1xmAu(Yq^xs8y z#~+qoP0k28APV2fY zc&eR3XiB%0QIw2^uyRndy_q0Yt_GknM9KNPEQP=CQ}4}()2z>pOYgnD1Sh|`Q(T;| z)?UQxHtCJu?tR{U&WXSn_Ry>BXW5ch<9*)jpQy;lK^rZ2M&T<;v6(LD zHX>?k29z$h4Ojj*e)(Z1f-+=}WOpE-9k2ZGnX&Ea`tHPkq2-+*Pk4Gn;}`i0(z=qn zKo=S{P>YDkV`}pVyX~;9ld~5mKW+|9@<2*pA|TZk^nNSo(4O5qbaH6hf!ucHJ_xW1 zqZN|KGygU_=HlCWDdx`CHGDE=IpJ}~|FZ5}*R3g(U`ekzqW73OKr)SV z$d_zC>*C4tb-3etn)Erk=h1wTdXOl4k9DZ(Kz^DtrJ3W$)isVDiyJeH%Xq2ML9GA& ztnF#$*}LvFdoXb@>M`;}{sd3|+o^2e1yt);y}UhmIk?c@G1GT2o!*Azppi#V9YGBK z;dZJoN-E&IV_A3X@oy*l=FsEc>>cfM_$`F*J`1jnF#>fbrO2sa0nzmP*L6J$p7}S%oOoXt0hn?u1N69 zee%rborlx})z|Hm-GpWz;7RgA z--k$|-Y)t^pG8N+DpbC$w{QvcSb5PLlCj#?2zAl5Aj<*ghV13eO!obJb2ajIG#w3C zxrA`3^Y`opwRjGD5(n|_X)k!Sy7aopUc2uwwwu2YbMbmj0fxO`2C02vSQBMGDltQ1 zP{3;$GJTf(N83Q1R@~1XzI@G$Y-66#py*L~k6f;8l+B#NS4scFQ+_^0_cDG%mxw|J;iG=nhGjD!JbqnoA(c=n~vGyjyRVRJ}+7$(Szo;zxR<8`C2 z`9Q|(!_+*89QxVxWCN4z}f+O9l8<|$l8kCDDAWPJgLnLsUhYMF_tEye%R&JdtFDNghu#bA zTkbAb{S;&i{?721wI;w$kbuZi`_M7J@yB)g)8xrgm*YxTy#pz?Q7>$F8t3ri)!$pv zxQjqdF$V%mf*tn!jx#9|QZDtQ;*pBksiUvOphiGp(Ua&}>BQp1_FeHw`IA3}lqnt? z7ic6v;vn`t+L|ZrO}#CRv2#fqH6{l0HxY8%RI-zBUi6Uvl!K3XD1S|^O+$5+ zu~4&BszxLbW;>RjC-d1QuOJ8_tfigMVJtbGsDmSJJdvD#>r~<1&q5c!6g~)M!A`j= zYrYHQj;&=kTxC2dkJf>eB|q9gW>CI)%dLM$^MdIdcp$cRoFmYf#)>jZ5O?WzCf@gwTFW0Wj4NN=gsa`}#WcWA(jJ6#ZZN)YY zEiveq(EHAw+iPyk&Uc0TKaRi5HjTjY^_@t*NA?+dHCjgUsI40cdIIV=Sy*{Ec(%Ou zZ!sgD%-3Tr5Z9fF;>O4q=(cOQ7c8x!5NN~bOv7vV0~v!(sG>1C)T=&+C0#4`56bK2XE7XG z(NJh)aAx6;s=E%O09!UutN3gQyczrt>?dMQ`!~~{KCJ4@t{(&Ye(*svFeGS}KF{TITd$G?< z2!$hPsP~4DAJ&#|G5GHouE_|kJa7D&48b;h2JfBt?;kYNq|AN;-fE4O$^Hlx<2v{W zP~Hm2Vs^7Zh35}N+{k{RY)GHGS>f|OPz$u~?UvrCpno#a(-Pp)y+wb6J)yoiadT+d zgt?yi#v4z#0H`pwV==}NA0%vHYP;mUuwS~;0>d-JGtdWLJT~X}@!(8-+){`eWiiEM zgrvV9PyxM*Hos1`z;{Sz#&Wz_D-Z1+3>qZW16b8pud5sL$0Gq0Aa_adWONCJR-CD0 zt}6}cNf;H~?Dxi}GUqJk%$UP3j#}<2f^LwIQ2ftsTZWiPyRmO3VQe4mxCYwfSp?>o z{1I)w;X7b~8+AQwj5wTNFd4<%Z)AqMRFC0U)tcl(bCdMyZs{yndYxuwFIu_3Z@A(+ zz_(Wz2^hB__#!mZNpdClpR9p~+K4W1gcW=lA1hn8xVGbpokA7<)uWvZ@#eYvZ_Jrp zu@LGwo!@3h7z%i*?k>HPUI=eIb@Vd-eJ{~c3hW%&xt~o~`;l;qP)|pq@k}2O{4I8} z8nnK54u`6XSs`K`_|9-SsW~i7s|Yn;v8~HcQfcJozfO2fbtNUT@e5gwxrKkTh-{^U zKry9DJOgvOY83YzzT~HMH`C9`Zl59G$^CuGPpQ-e8u$xOA|$*yhlm->xvKM;ZMi;| z$6txeM9C@9@am0&R0b5b7TtKh-RCd>{=wVG{dbFp|DEgvmu#3xPUNX=?kONLODUg$~*+0w$rSgWWcsT8Ogof zcu1d%5!Ku$*WS@hgVF3^1Ft&IUalio?h|XQHX_Ma+##&&8mUC9Xw&fzOh3|#0H*vI zU633I01BFA+`@e8-2KZ|5KF|$M!7#~vNKx&TOyPAy%0Nng8M{gWzYi2q;ZyL-viyS zKs>8ZEF)2?k4)I`)F#okQi)iB`M?QE-QP3h&udz6y@wam13YbkUf2drGB zG#yE0lBpL}CDKW`RpYB56D}k0J8>JtQbUJX2djB;#2N;6X{;e^8p}Hh3i+pw(o4DJ zM!HXKJC}ARsVcmbI`@3tYQ$O?WAjC$H#gtB8&zHlHP#_!BejnETq3DU=y@}wkH6XZ z_Bf>HeQc#RdwM>)i;&xdU(W09@DD`lpUHA7EczpEKY-(^ar5dfTZq|NVuVD~23rFVgXq2E!e#+8w71NNgVxtiKqH5 z-&_B{&4LwCR1JWB^SIkLXEOcHy%(qa07;)lb1F5d+wkt{yadyU0mXE9oC#l1^|AHe z?&_!NdmRXIR8ro^&aMg6P$QlWJ;8uczYCNem<>$Rb%)H80|V$@1{#Lr4Mg!M48Z}4 zHIwGScT**Yw(*L+^N85ck=w+TamY$qq)md^g}-oc*Zg!jbIb`el!D{!x}O83K92O@-KH8Gni;!g<9mU0A8Fy}fFD zEAf~Vu$k{kVt&XY6u|j4&!o(YY{@r{#_Ipq~J%jiVBG;lxfSg*~^Fny&+|y6)>Pki4A{ z*_sTGb}+Io?cd^JUX^oKmDddu?$xN!*-#r>2(QRcLZ|&Lgt^_i@9OZQrmswHO%EXc z!+7`=51D-5tc6=e3hm5F51!gO==hAtvhEqsE$)_g-hFcG@9ENxlXoY(&b}TKkG%VE zr$d_`x_zhNM5!PlV@ylP%%yAmJvOxlFq?1MUkFssoy(grh%I)akRw(giYM+)&D49v z{8E)eFRuR74iPK!D6~QKhFkP`Plprbjtu4z4tzforhlGYmOFAO`9yy0O2ZG1tO{Wh zp3F%1a>th85J8WYTUwxTEHFyWa5bFw2Ax#CY0>qQrEsu z-Nd?Errw!B#cjCF{Lib8YJe=jc4*g?KT^vWf>6FNgIXvr1Oq_MW!+Xxoe>36$oR{d4a0TeJo+lso0l<1-G!R0<8GLIr}E=ha-Tg{Dd?mSH`KMoU)%t##)UhGT9A)4xWE39$XI=nm_~YZvd5{ z)nFd8WtD6;mUpry;XGgh`GUzk8IR{#70e-U=G=Jh@CeCl+Bt4^VHF`?a_CBUHdD;k zT{d0(2G6A(F8`ht?#EgQa=2P+yV$c+cRg$Kz%Nn@&JXoBez-QDKnxy-xbmU7_#goc zjc3|JkKelWRqz)~f3qe#7vbC=IMvMyZmgSff=?O$wHgw3bNoFcM+9)MMF3s%*V4S& z_R{g&fQoqbnqUy5){NR4J(|=?ESToFepuc zN!$82or0?=j=>`YXbu}`a{pofEwk#{Fc>CilLod5488sqG~)UlQi93mSX4@^#;m9)uJOmL1Wfb_>l}7gb@PfSWP&=?as1lKivHqoedlo zDhO1m;KG*;vMUViJ^C^zobssOYS}X~ADU7akx-8?b2MQcdOzhd zHo&NVyE~uPK{sh#M>W8^eZ*WvBBA;&v<-)mh5kj!7@@U0ix@@YyhL2cJJr~m;_HD+ z>-LaBZRI1}@S7!5hoFn-Du-wQL+NVT21lw0H_j0rO1!z+zQkZz^!hmu3f}PgqJ7RJ zKCbxOYHuLphEamtG*KEtW|5VQu8temK7dvCLZL470fiYF8S z=P8mwd~^659#}vcrab#B{?wvM_(LIwW_IV;H|7w>Z?kXl zsp8H1Gm{FKIY~?74$zh-55fyNVh9iqMI=(@3_{}OGsm!XEr`Gmy9Mu~p8&so&f>th z#Wydyr-%AOz|Opi+usjcu8^=t;M~}LZ@L!a`Xh8_qyW`otO-#xf%s)F@m_9kxPakj z0X50IiXV(P_OZuU8S)TES=3Jj8Fx*tYXLvr{_3bn6*DGDrk7Sj(+3n#tB~;RO5q&b z0G?c)eo?!1X<&~BF+Bk~D>z^SEhqdXFlNiUOW^p~0gNY*^VMu}BaT7C?I?n`z-ZHxe_^BAVt4vT~~Q%xqxHi;Uzm{vF0lR=Q$a z)nDDowuR={h*a9)k|YgZ?4=o07c*f88Oz<;{#Hi8YxR)uu^RV z6%?IrV7g6+&=$ra3bgH%%_1`)9GMF+EmoVWYlh|lE(5haXkUL_X;FPebtcYFF0@^mHK`ee+-{l=_r?0Hv$N9rNHYC+ljo2{`&iLtP~ zNe8mi@-zb;(u9UT&(LUyVp0ikfV;iYJoU{XIAKn~ti0)jXI^A{^;~^jhW^56lLaAR zfMwuZk=9;eXuVmvN>rRMXX7VqnBzJG`NsfOBRsDiSs=8Wpv|=r+d8_^et9A!5Za*y zGiBE}7V#6Vl{1BSj)|oXA%q9%sro#Vf$`W9^w+$v`bXGL_1jGL+p_kj71+Lyz0MJu ztLMGye$eFr$>51OA1n1Uve^6qB9-gU05Ns4{_Mt4xZU0rO%a!Ma(O6o90g_G0rMZvAM1mv za6)}(Cz*)JEk~{*tTo#{d}o17<8A*l6nE*(8@SlvC>h!>KJZJT=brm;N}|hTt_|5z z8;ZOu7YvZ6O0C2vGTr*!sQM_zzGiTeMyR05n|a_S>!I0wM1cD66mlU={zUQ})Ja93 z*X>9MH|xLE;(DSgXt|7J2zO2B)@K-VHFKC%>8NJn&ht0g>Z++_789!^3oHk}FTY;r zUk{0wx(&JYZ!Jn`76K^{cu9>Mbd`!^puq=~yu&Gw=SMgU8_tYKM%w5fqf(l4$tkUR|^|ZLA+Xzt5M5yD5#f)~9qzv&$-r5V#^9_H1H+sQlnU zA|91S*r0b-Ag(J`ZLx)FI*AO>Eyb74PYt#^bGj<#d!$EjCG(1DzWD}wsbpimYTs&p zXXM^0+x!s{w%PzIY|CUIrxn6SV$Z~%m_Lz6?-8SW zWL_<;Mj%Ip8KH_CQb%Rk9KbrU)$#91Ga(2lp5RkBqBn>a0 z3?}D7(6G^*+BI@@*SP7tdF}RAq`h!%jRG%9(U@C-ch26at2?Jh*oW#>FI__YVVUg&_DhOG zFK;S=1!JR!UPb2;#;-v&1W&@|B(LJY-kjgs>0OF6( zht~N`l`iB@$DRGsJDnE_re#;^^;m}kwqy81+^@~U1f4R+jHqaXq80(C|KuloC6RMG zEUWGW$I~+65PXnpDCux{(|F`}j()7Mu+TDuxG*7Z`7wWmBtUz-$@RW(Knl$mfS>Hl`h^9NUyayqNpm8p%5 zy=KfekOVO)0S1ZqDz=f42xD@d1qJmlv~4R|Ag<2@3SkCIbJprw)VjCNXBTYm6q`P* zu=jkQu$>2?p1*rrgFnJ;lsN`X!UR-d(h?NrsAkF;VhohLG-?!Q#;J4o|GtL2MqfBX z=yKYCJEe&>>Suwp{a_(1Z?w0Ug_+`A-bc~)SL6N_jt)C33%6kwsb*(CxacRVHDeut z-#+!dgqO|HnJe?vg>)st$%ep&5wtb$$vAlAHHM>#-7nSYhtlxPH^d#WftROSdy9(o zxn(rw2HX-QOA2vwNJ=R92~eEejL_-jsgP&bgWrL%xfXW6tFQ9!h5v=Sf z5G?yvLnMzXk$Tr6Eqwi_F%IwzpZ3S)gz74a%f3l!356uZI;rr6elxlSWGZ9dX&u$= zrQ?I+QNfDh`whvnQcmO&q#=FUWM=$#mq~kedG?K|yrf5(--d%u)jJm2&lDELx+RDFX>4B+;fkh$^(JT4Ev#~NbdXaiH4c+0 z0=nhIx+ueFvx;#diS0csF=R7Wdvoo~ve(0sAX3#JQAUuZeVQ_1N?|8>724AbU5drT z;=fjm{eNN{)o*%EtgQz+4VxX?FWx_h&wc&A8hRm;vo{VzTU(p^5Y|j!$CUa?KSrkf z7wVp|l2j*(__D7{@W>bL$%;HqI<7`l5D~v+vnaWnJT6(|N&brI* zTiOYj!*QmdDi}zVM2L9@%UHDenMSLecrfBLMP6<6Xw5f8-(>v_i`-yNXbo zk?e{TkYgjeKTl3yR@>dlR{55jM_CibX2d&$ny^kDo#FA3o$AQC7*}4Y5Nq<07Momq z!VfMnB;;r2sLd7v%_5!0YMS9cNAMJ3^RM>P$i)$qs>WQ^p|q0rONomRu;uB$Y6=bUbOi1e-rI--TAw_(klTuB;>VD= zaXhrN#%EX+b44U1zFc8TgdM z;tE)Oh31Iyh?u_n)P!;`>7$dUNH&z^Cz8q$?FfV?mtIR{s=Uc0-u#M^2y>~%Fjh*R zMe`hbS3^N>Mol=NqLaJ*gbJsf4l-CIBVnvph|AiPmJH8CcNkV!a}sncug#&snn8e2 zi6^%=1D~+0?!P^XYl&!>I9C8y1g5=ahW@=$3Vei!bORh41A=JS4)Wr<2!0cI91%QT`^D2)K}`)*9ZB;9>$k;0@$Ql=pz91$Cl1X`Y&_ zReKN#X^5>8Ie=HKCKya&k{dc7X3=`D7gLE@IijscvoP029H7O zln=i4>{hDQa*9cKVg7;pfQ=16J>s|0!{Db9&J|40np&+ywTfNKf&X|=`7&iODq>CU zw77}kLal=>R-i@#UUt^hSbtB$h@Yk-6Jt1mnt0KUoR2G}6bQ{7rc2|ne_rxzR2ZiS zYf8fmg%^&O0=$uUG3Ur6QC3NdcEs(Q& zRju$G2Doh{bKaJ0_%nH!O73pZNU*vD!X!Oo8l4YIkM*l7LQ|WY5WdvpT;d+UI&GfJuhegYo(F&N+&80@TV;xukIB&i!6@+k?K) zQ|TT|#H+G~*7x+ERqJ(USGQ3H{}+7Xdzeq7L!+4Hf#X(On<|Ww#UH(OI>VE7!YB<4 zA@z8TkLCN9D&8aMKYvR-8HJ*F#dNg}@@F;%h_KoJXJ|G zB2)92^_90I3N4A4#98DKzdnUOQc8Eee0@tE8YSSg_OO)aGvSNOFW7x)rjkEFCAAS> z(249n>%Lzt_-r1lgzR)YQ0c29Cf+(|N_;kdI=-}Px zxO-o`?YI{T zSQSAJ>x?)ClnvkPFczFKZMMmmBzX=}ZP$xzNBmyv1 zUj@ob(Zrt!=hR#6s>Zp^QW=Of&XqrVkEc$w{WY;v_vsqde{H%>HH+El3|$spe(o{e z<=)g>zmJBjq9OY|zzJsf+`>MUzXj(%o&1^yYm&{%Lpbnd+d&-SkTzAP#+w{(bISff zBvX=hly{&-gwiEWuQyBMLnZrx^4eW(+lk#q{|^G7$#&0s(7kukgGa_S=ET$r#9R;E zoDJX3*>qQx+TKfRe)0CF-Yxv<>_7dR08FEz`uU!QNY33Du5Sk1w)@N^N>puYT-}WS z(?_^!JZfva8qT4!m{QFpWHE^fo5L~Mm@(rA2G}jQv$+G?r_*YH2)Q0}YP&#NDN(Q^$AklcCkd6hkr^|T~9+&{{u1;`2%nL zPby}8_zaVMGlPU{Ah*;$Dz!AzWFH)*u=W0h)7H-3k_M?MtE%PoSPa(}Gu3yV(O3>x zbkIXubEi6K)%qsX#w!t-rgj7NrBuOLKD3tQ9zH~W=Lu3REpAt8ug%#B%S>$_EK*KW z00qef@d6|ji27_pJCE>@@2l?0D-T9WzLQjzM2pf31mQ73;GWVCNZd&g>=TqX31(KK zWI^}Yy42yPYpIy#;#GS#Jg}tcm(rObrK?8q7kwm3c{!?0{Pnu^P-<4&M%d!eAg+AO zETE*(vmqe}A%lAowAR^Ss*RL|KQH?tWdf={tz_CXEo6_-(7E@eN13&*o0xe0_2((M zzJJd++5EtM?fAv`wIUpugKK~{i8vBdlbS&BF@jNu8c4CMcxK)G1fs#bVgQqiTCoBK z!Fat5)^4B#QFz56GgVo_N>IHN+ITdP&6O=m3ag?a{v_UlAF5mesr{KkOUKuK=BjT3 z)ha5DcX7#&hdgHe>0j#we-9>YSN&V+{?qK>NjdWcTRrAf0&Hdp$&PprGq=qm0`zqaG^2!@Rf?))kmp3j!Gaz2j;uLN{+guBDkyw&3)AA}0t`z;SP z&rOCkF1@hg`-SMCe&no~{$4myIe%NZRM=e~7Brrr8s7@)MYWK~B2$0Cks7-V{gtaU ztwCALV0i&vF{mc%1E36kgA%FqmdSig1pBIf*}tyT%(`^BE9AeY{>XDJPRklva?Bq^Hb66GVgupZq*sxaW9|5i3TbQ+A7r&Q7rcsLMYG@|)2_ZP^|RliBtxK^ zl`m~7uo~GtSq_;jkW$lHP2+{<7hRGE!ZEg`GAg$+9DpG(1WxbdYOYEDTL?3>xO?mX5Y0pjt>WX7K=c|hwz{k2h|_z% zqet5%6Mgw#vp8BGTs@Fe`e0(>HGiomZxeqwSvuN$k^!s1X3* zNQUfX%t>cU)=*?YsT?!^J-M^jjBEq3Vki6;VY|qYTRX|)X)?1gBC-{%XAj2~Vq%z1 zq0oEg{(WHt!79H5#G_}xT!*d6!%lyDZLoi(U{Xv7$qjsv6 z(0l&@<}S?Z=B_C$D5&EDs}fI@Ja1L+29}_;_kzm50}LN4qxihXk?B6*<`rNLugnM*dx0r_$#R;OKlQIjKM+S?Mwst-k^`i)x9M zGItaH?iWA_HMLYtUUZVO3YRABA}MbF(78VfL^(-M1zXVTz%Xd5ywAbM61`P%s__gZ zDaDDZFsY0Ff`Y$rSB&+`pNOf9n8jn&J$%{Z}q9#zb9^z4+-)w5Aw8JbX9`LK7nX1S0 zOf(V`V43Ae@Oo{nKQ2@2X`1NDauI88LoY1~iu8M8KM#M=rPI4SViuaXMIgf^T8RJI zhN-~~iDeNI_EC(>B|TaPZymERk@_Rn8w=%XuBoBm1{vHhUvbtX9jw5MH(WRAt3BvLDW3e_!uBNUj zbN}m5`e{A2D6!9k-b*M+J~ie0x1gm0Pd5Zgc+ALji}E7gt>&79#pLEfRiSoT?H z`fr@ejt3TC0~|jI8ThmLz}#>h*Nn!}p4XzK=<|bBmaDs?ZFZ3{xK(L@vHN)86k3zT z2vZ>YxI+>ZN*0k;f=~DLszySFoa0gZHUE{Z!W9p3uN?32i$t{}r5q$yw*s7|CUe$P ze9nag<0d^HPp6C;?nC~=>duvou_8WK;^CFR*YrtPt%XUM{;ZMd0th=Ks1lx#agT_q za&^6XG&Ov*KN#N!ndkXS8W?3Egwv-W2~2#}Xwi8NM09){oY1gYsIvX4+P}Yo;XwY6 z*Qg#ZP3^3h*#s zx=JCY>zk03&T-0|$PUJB3}K8Iy7qrhuivJWjiCf0T+G zxpe!vyHaTjjsJ3SggS})x%YkSQ8<=FBd4H&hoPx7$j?G@bn^Sb`j&*g8I^Wl6oTa& zMLeTJ&>9TH7!dx}`%7KC8{RFEptEVu^uP@?d$0JQf5T7S;!~O;}xy~I|A@aT~q18)#{S+(`zK7ciM|}LA|9Fn1ku0rkcep3^uEh z+uE#SY=dpKCdY$ei6;5dV~x%Yy6WH9(VTwIT@Bc&GCEfXf$*x8t&YB^>|-}5uSkI| zZ=6>d~9t{T}QaNGYlI5 zzhhgSsd~)hON05G`ls~lIT@YpOd6Y0qlWj@_Zcx2w{hQQJr#W->8W8M)ZK@7pT$-O z7Di&0Q!K7o_}u3aOO9O^SEM9o{rc{gf=kWV1WU-o*<2q6a@6Uil&cZX63RmJgP2GafJD1;%_3-Y9VuE zxf@DX?fKvP(W9medm-WK*5?gSX-I-J^fa5X^ZbhM7e^KX69R}%5SBwS>q&jHi!Drs zNY0V|5Q~StE@}q!`FFbi5oXu_VSjvzbFLLw4W`0~q7xxJ+VR+$kw6PJ-xt2^iH$_y z$lr~_&ZhW#Hr(xr_mz;N9@MA4mQPR+^*_=p;gtZ<#hp{>nlYEuD@(M$rSk}iY-?w@ zXR#JzW-0j!oKMxLL1hS2%n;BBjK>>h_v;|JA78Nv3F`qzJHe?lt_AI|jv-YqE;Wc2 znPit`J9jsIvqB83K7x~jpKADmIt5-b;_MkMX2K}=SE%al>SXV(?gFwki!cWuPZcw} zH6rr2xqtr%UD;bReJEj2GF5$RetgB5s&mP(CTr@6SG9sAYGv`wWgg3O5T9hkU#o2} zxRZV3o}>C(F%8}R#I-la&5WXF4f^hWr1s48yiAt&df&F1c}H)S;!&lc4t5IXu{X<1 z{YBS*R7-zavbS0|l4k&=k|b5IR1+Nu-xiz-xY!F4lr$~ZG?KFX8kO6a@%g7VwQ#pWpk7V4yJqcE2rZRJE5 zsUcH`tk}$ch7d*5#hUHe4c+5sTx;F4SxjlXV8vmei`Vxq<{1sOY@_t$u@E?nm2ZU- zx_HrnOrXe)E!){mVIu(_frLsG5r`_7*TKL+aHW(N% zsBSi_i*STlP<~Oe^Rk@2-@jTsX~xD^6cmlou&nrFgN}M}xChd!{zWwC34a;EKry(A#Tn z*Z+^)74*Mt^*p3p&Mm{o(2LbLbKmCQs8Or#`NE_*R}}18h+=Bq#QdrIhMl>AsCka% z+uO5<5$qAD|MIUt*ZOTy-`MbXY+;Hk9Z8Pa}b)LzgPK2oX* zQ~lHBO}Lv+okyzv0Y9>VL~$cy9q@2<5b8VK*sVW4{RL3Q^gGp0=-gejES#M2O}-7XHiDQ6bkWH|>)U62B1tw!$Xu zbT$$1=T@HjN=T$MO_s%Zn2xLI;A+ZyD2=BKK#!;8o&z@Kp51qs{Q8yyporZ#m@qKz z;^Vp*3vi?LKC@9DpGecU@k9)z4H-IH4TsR=2027W#3-bihKjK0OO79&_0__qY<`o~ zgeVC{{E}+onDFtp>D672*RT+o}f#GBf`9-@BI|Mp1FR1lH^YK zSZE-H_`Q#P2!!|MyqYu5H@hLJSF<(>lHZ&UNqY^l5z;0Xc^GCOESpj@r~E655kUQe zW+iroeh}=i9cTvl&-1Fp7Yr1a34@Lolai#h1EX9+1Pfmff)k?gML4ugM2`_~vhf0o zj919TSR09UOmwM<&C-sz5tXby70!jq;N zQV%0Q7z!p+D5D~P&Sl zt>s2BZLwRi+Z<;k0Npw5+TVUwc$0D&At|KLJBhB%V#<>0RrxV71flfB?XgB``k!%# z*r&J!(BG|a*>BchaU0V2fodQ&A9a=P7K4i{R*kSEHjNReKe?g?*co)Lh-&!kwVeHZ z%Nq?MUB*Qd_`i?_G=kfHaX@e?qQR9~Z7frP^U6n^_wyX5LD+jn*4ND=d^Xm*{WjI?Ey z+Un~dN^ANoF}~)$WnMDGjk@d6Q%~uRq+Wi71jSR0FxmBT=wjQ?S6 z7frbi<`yj@xp6a4tDJt}1^GgcqOUibuN^&o*X8fjzS+mbiBel^1_z2}n;#g9SXGx= z{88kM7TrZW1Rrg?cXqq+UH0sBN}0-xAeZu}NTAr(F-lUNSZ#X8bJbA~TWX-vwpbG_ zDvi(v-^?@dXkeqbaB_`PMH3(D!4C$5Zi0<{BB4Z24%zis!-Lnv(0(%_#35SGOQu|> zCI8J9mq}V-Lf8X~ugzPC@bR^2+Q%N3Ac`jGbU^e(hBakLObjVV_Le6z?nRBgUlCbb zKP~9-aFerY=SzotR@%O{)F0~pP$3oMar!UrBcSE?<)!O}2n#oJ{N24~zG`{;@Zr*u z0zYy^zJEB)7W<2J&=I>Q75h`n9`>DF_5~$pcXyH|Xq9bOW%D5GTMI&JNN~VPR5cI! zXyAd6M%ZV+s_OC)aog2w=lVu$5i||;B*>YEKVLX`x3rW8gW1T5Ki9)D=Q$yj*(>~a zNpLh4GMrP2S82T~b~1zf>%aK5KPO$)@ zrz4*8o&VtCcRB7O_WN4T!;i$z%TxXg%`n;7w59^*5DU$fC_7!laA|KWlCzeoYzftzXU$dBCU&G`A_J6LeOZh#zn?}gOZN!AxeD9p<@<1zk{Qw&Gf#W+VJrVc< z67|rsAKi>#Y(lxsU(_YxE`&!YpXcs}>1#mlSit{>jq|+JHO>jngfxaLDyU6S@5*0l z&+pADC95wJO7YiI*NVSGYvyQ&NM+hMXH%fK^e5FLioVzDqSO+2^U?9znQ#88nO%c= zB}U8DSe($0F%0?}7^oIg=^Rc!?kI=rFl9;@QuIh<4-S?`l}*Q``zC`YV(v`#6$d6| zJD2hoE(=X}}ZH0`RjHa#oIL3qz;a)frbP1y@~s$6Lu5vXXj`Vhf~D zu(8*OApkDlEc{TMR{E{fK}V`di$+#N-OnB>TnYPqDZtx*_EbZVO19z zChxfxT{QIe1f576ox^Cy%X29r*s-g{)XnO;GIF8~6{dYMn-f|wQyHU16Mp<7P3{%bO< zDaZQFx+r!-qauyo0iANhHG)*V4vOTL-Hx@cgHFD(I&l_;{FJ_g%f#Y$hCj?N^WlDL zTun+btG~d|r5ko4QolE-e+WtyiqMKsK<`5QlTR~>+WD25BD%n{={V5q)UErMbm3Hs zSusvj2jB*Z&V={;Je}GT+w^{3bqMXE%rEac5ux7QR?dlCYyxMxD88%0GWIIUS z8ofG2PNGh$;ZCP*i-1i5#@OaVVB5EnH~3FTKmVKc?!BrLmlBEO>b%luPi2hW)#=Yv zMu(u0d|?MZZ?)k(J;%oRkAf*SW^lga3Bh2NE8ewm-W6 zRvoKL{c6w8k9X}s+l5oe>p6m=6+`z<{+{EvLRM9b!yk68Jlte{&)E`$6oX`oIA^2B?YXF!f|xf z9R?0m-D}8iho8Y$^m)MK$|6*3|IZl;nc-HMw#ZOnL7UzYBcqq(q(JI!Mfp1|t#EY8PAR(&pLo!X^qA?5U z8xGw;(&|*#Xnod3#iqrzyilcR|M>k{L(9mY==Jy?5Y_M)BCXD>?LWh{Ai}2x`!Dv{ zO>fM@m`I(?s%YcB|E&4m0tYAa2&;B`$8S}iRl8a26z(`M`s3N5h9*IWc3OBZ0mTvQ zsk??(2@!Srz!B|3iK(2nS>rgIcvB<6v<(xzcR#6qB=bN~M4_o7fH+|1#dVkYiP*f- zAo*e{4?DYQ$dSM3HS3>ZWdi1_x=8Vs_X5FYwt zeXYof^vlM&BlCHeh6JzV53$kE`;LBqc8W+Uz3Iz0J0_4)RU!~PGLGQCi1$4A7^U{0 zc(klyjyC}>{T{fVDJeAz62oA#y>m}B&pk~KW|o}~oW=lCAF!ga(9R-cZV34U_tw-> z`?RPSXl3>o(Dk!&XBQR?`8d;tojCcB^Dh{C?CRQ5Mi7mu9Zz~Iqy)J6Wi5yqzoqd9 zR;%Ue-Qr_kF>tkM_#&{Zr)=+jLTgSpPMd+aB?o|2Jw`TxCW=aoCx_8gn~aUX#;)M9 zCRw?1!)z#9dcA+5OjsnZL5Vb_e9K!B0fZsi#t+!b^bB!JXKQEa)&SUi`*I6Xa{L{e z>w>}sVNAe$*VL3IL5r`Ja@o5>mu0IG{^4Nd_eL+{SG z^u2WlW)#o5aR-8M90`n=Q@F*QOZ8aNu zUXvq=l#%T}ij%vRIx%^M%ZGFFB$F=;etduen)CzI7*L0wMm(T;TG z3zQMErC`~j<%=mFA58p90E~V1JI9jJ6pDy2#Zv|!EH-`zb%3>*z#X!js5(dRMCQu* zLyJU(BQJ)GSr+~SEhhhx{Q#GN6|4=E!`Lb^wIir3%@i+z4S%t6{Zq*sqEB_W(hl5% zR~ejgTt&=@>%MfHy%i+AP|AB!N#Mt}anU-J8-OYr+}&Bc$h6OP$>f!ZrBfxAUF+BL z)t|H`Tir~T!)lXe9NJq_0$xUe_hB&JC^@q*@&HM>45sH0;T^VKn|1;k*CrnWK)|%vQ8BDCym`JABw*np>;b;KBt?{+yRx2@0!sah3fMQHv}K+rYGwK zDL3$Klk~W6iE4F|=Z=f#=4}q-lV^a~M&QG6Vh%xEHl`O0m$D!`dCigLq#^T7V-C_w z#dO(&%NvraONPvl?LV7)W40n6U47qFXmO727xmLljZMx8~9 z19`y`!I$A?{9TXY1(8M{;;dOlmu9T8It8<`WR1!7-{;><{63_ru+HPPdLS6XE`h-h zb&KVKy=eAfi)(MIAiR0~!5;{9YvREuhyg#zSQ-stKSjKm=kGG6Zr;9Q*?KqguXF@u zw{47LZ>#r>ey111J@fjA{oNrN%Lqcw1o9^hjl*E7)N|Io_TYx>rAI-Il36i2KTa+I zOjj6Mv|(R0k!#%kcW?GqO*em93hooRzYN8E!f@mrot7V_a3=`_>g!4)YEG$3v6W+nqBwPCRB5;AZK74>@S|S(XaYuQu^zgf zVM&C5&d(G@ClOltMV!P(raXGx3XfWT_zSrlVC$GUwGNAWDi`=(UO)EzM(k1@$uuc^ z6}HTnNZw-2I4k9XAJq?bvYs$(YqVrT6*ZotR4TZ2b|)jA!srZ*#E~5{$A3GM!$eCR zI;y!p@}=&}QX5kC;uK1pIib~UmK9HDgreTxAwZRHAhQ*bBYsMuTz&P|iyC53O_zU< z$;4osg5s4Cr{*+MFr6?wJg^ZI%PDDig-9QUw|C>-QLwMOkE+fsTilv(rtBt?I;;r& zflH~#np+JS&4b|pyM{@?%BgMS;;<>-;2LWe)YP!j)DcoKUBj~5x~ecPdC0qcN=p9i z@b@Gzo~{S02D~j@p`nIxr@tQ|HFYBGwDt8+4_{8|{q&;MV79S2b=Cu7SN1c&4in0l zwEURRq$Av8%dH}R+V+dvL3YTu$;rA=P&B!Q=opXHy~_tID!%hA)|`hA+n-Iph^8md7@?ozw=k1vcL-qcSGZucvlk$a`pLzDEv~5TRIdJM#T_aJ z1vyt!=gyU>p}o?Ev~6c$4tXgWwl5H@N`fVbAM(|opkRZ~ zx4lF7q}(7;`eD6N9e##|cT9QkyVWw@o~yUP7(Yz7a*_hK<= z-gXlyn~U{~ss0Upu%b~NS*ezj2a)L5upMVs|0r=(kGt8E>h>u%a6#2td=~%)LgQmC zX9wP&i8tR_97oUX00tSo;E4$`l^i6qTKeU$dV6E*Kj+IMC|y1K*0 zbQaep)PZTsf)=#HLaKaA#RZEWY7}D7pE*9ZGwdeXq8aKG#ZF;tz|%8Za^zIfsM^F;iOauiuI3*>dQZ^x&sF`=KMr1Ke%`U-(>FU`$;x@&Ep0AN}VdM z&Jaxtl|(PWBW-_GK#HllU~O*KNyR`nHD;*@^LJ^nDFr)eEr>Go#}PL^04t0iBpeqD z`%vAYdQS3QzUIcU+x0Ub*-6d@p<_xy^DKERtvAXm_OU62@m#m5_TQH92%AsG5k#kE zr0(el97_ZP|M6CmZ3H0D%oi{7=K4-+OM@hzRfEfIw}}&rT7r2=1{i$E1Pxj=}5Cc55W%#uAnATgB^(*(Mj-*lq%icsf#gbr|9MaPh zH6gq1s29FRIjtvfh;H^3!bNe}SaT0I6-SU~Q_169;-i z!FhOFHt7ASqg=Lu7lq?sh z1PKSEdiB(%5xCds)`Y%L{H9VHMMNoiGc3lNwUotetCXXD60-;rps7Qhk$a8`6;~;J zd6%}Xcj(4^1YJ;qD+&1+a9BV1jmj&kYMY(|(fvchUq!s29!7e?V8~^n1^A4Xs#rKj z^N$wbd$%V~cLnZhl8FzssSYA4(N!UyDLjm%D%`>p>CkKWn>>2-_R<6gB3^TVstbNq z{GQ9GlNIi}V|iJ7CuPbZCiUBgju<9o{X&}x*wSThKh{qV`U^r?mn`ouxj@PV61UNI>B$9SN}I9cZcYi}Br2Syv> z<*VxEb9>jPtOpaHVU4NR_uIuMr!@t~N}_H{H5DBF4y=DLt}y0Hma|OgbfQUg_)6pc z2*|JVgsA=+q{NX9j9`$t7;jxUZuLd)*oA&HG`fBVud4QG|2PKr{v^*|lQ9v!tL+ST zb*-ump1mjvWIph5iTn%VPu=;$=EzbSJVs#~zQcHW!y70IhbKpT%Fe)@3G&$NpEDkS z9ZAbUi&kTzG~K;_A`IL|Lbj4Zd!bETAUlNR6SBchtTf}MH_?eunu;bNPL;iZ`9=S} zQfYBecbrm>c@Mp4MFX9N2m+!u`q}csRWzeiI7j#QCy)dWB|B6cFn%MSpo=ikbHe&iN?{XyKdP%F<0+76b@Xy82R%~%-1VnjuNJI-xDO|x+d506?P$Z%_o zh}*98BL5=}1Zj$v2K8#6!j%hZMM~mkjJpGl-$gb}{Pu}3d-sHmRt*S;?%B$VNCxoM zHi%>p_(w?ljO<%m1~fznuIM|9WW-BEK$AG@GsS-wr_=CZ!oA~e6KA&!_khXAx*caP z%h=c34T&w|i~31Cw!S(vz>Ips$#)0=w&0fcSZ`N%Vk-O)DC}4AW+XPcb+ZoKu;y$U z;Eo{$7lzn9+(Q>Un*7a?OXzM5Fza$7BUwP2&yQD`>$5wbT&uY{7FD4OK-9%*Zo+>z z_8{45pgzO{c&UqclC9kM>}HrdVdRgjxGpxp*I0GiT^+$DgKL7tg(ZE3YbnJ|gUmUi zS;B>mNr{=HOtWRybc9?r8XQn<(_df6v!(i6E_v9MvLN1S>Ko4yDE!6StQs*#)6I?a zcBw$M)#DvXD>&6{)QmOvqecKXzyr^G9^d&;8{@Oa`acU094UiZ6fq*Uwmf$AQO84B z-Ro-gj~a_5`Akt zL`iBibHVwWw_|uR+J+Y9w1oPqyycY`B6o2g-W-S-M{WD~wG8ICc%k6-%VFWP5$;?9 zr%F(u>0P3rLPt-xgY~Qwo~%|f3`%jeget#p-5KS|_2eJc7&e%XD(bsyoUyL`HPWU{ z;AD`y@Glk=Dp-48HQ}#{Ud&o*y*-Ul9KN6ANdcGfR8GFl;-ZP^z%djy9X=9#+M#HR zjg$Mc&pP)-CE?0((ZTo&7V0#3~eu;ZzuR}k>{LTtw>2WdGnMLg(TZTUV5 z70pWa7>r6`5V@3XmPypMdATkr3N55Qpsjx_|3Tbm`fVC8Du9x+NhQMOeHuUGsk1v@ zh-}rGF*L#!v++s{v`h4ZV*k5wk54CWZ4{L~#Kqgo`Yi1FjUw zsd?~l?kJe2OVb<=!Cq zz*d`aFi{B*c-Om;L!Fp_t$IB6r(PH~kT{ zO?@pwr}r~BM{-Q^9fIkY(XpFFX}TfXQ3O7-tu2D zP~t)A*eI5st==uTEYu%EMCerl5JhDCBhYAa;&@C~*6vxyhX$wWof;NBGY&R?Q%{@Ng6I;415w>I35fX^)rf^nukXf6 zgA&cjYdVEiJbG?`c(xPY6D$XcNNCb;A-~AF(|_Z0DCgIoCfVB=hlMQUiz3tX+^xZ> zes|ezAP{??&84{i#FrCk}uYz!tAa;18mJ)zCme#koyc;(63aZQRd8 zY?=S_)NjuS7ye;dtyKUzXg^d-bn7dItA}p(vkA#Z{K+ald=uZ|R-U}Q^b^Vov3gr+ z!*ZTUoOWztus@?C0`UEX6$gq8UT<-Q;5$|MyDL@FWVBG2y%cL6@$^qiY@SYwPk21| z5z_-_)lnFwXE`bk1lAi&t)Y=HFoF|&NgOQCqPk&K@wKZ+o?Ai_bc$JsAPB}ZLL*$> zNvl083VSNeLr8pJQkVr~PTf?P=`C_k<97&Fb5{&eG7b`rKoI0JYKezU0^5*-rk&3P zD6I=rsmz9Y;Elx+*q{}(kisl2WSd+27}U=%n+^-nsdD)>^h@=Iwx5#FJ$3{FTWfZO z`p1}a#Cusq{B4+3%BSc5aKj|}Z~d4`dbcHi{b=WY(zeu_n;dGH{~+!ZiRLu`9(AWZ zwnFU)+-|G=gfazFQI_$|L9BWW+2+hhBdEQE5>+%YZk!z4^T8;J!dV^iBer=}j-yZ# z#x;nySN-{eaKi|L0a#ComSALwuP}*WUW2$D%HG}tTQ04o_?{trw@&oo(?9-fCGG$m zJTMPQhm6$btPGk)15v4Nf>j18JqdzQg4o{1RvWJfc2*1p4g%O5zTc_pSvtsqKYut? zKdi^bTfP!dhM0G(u1orozyqpAag9FLB8mBYE**ivHM>d%$bs1^q8ed@hV}5c49L+H z(f~;fZ&f%LQy4pDxD!odHL(k5&I>#ng{+OnTb}y$K^2==f3ruBiKKVd_R5hub;pNr zE_&bNgM|Y%C-MMw7-C@Hw_{~RV~buat*t*>OKKH)i&;ZulSb&q65yPHNN+ruxaqpc zp!!sq{Q_^WrO%~qcetIlTl+VC23TO5N6Rg0F=(7}V$Z~fy-Pd4H*IOFFX=Pj-f-vB z-prSmCm7;su?ikN^U~OFAaptuFE0NxKdet2cU4}`Sln9pHk)?es?kX8EKb%slqFuh zPf1z&U`)!;)QTNF;dC(>vI2~u<`V-;c-T-vV)31-*Gz|~>n5ujfP3n#q)qoaO+*vg zce4(>(aHGwRyBXJO@){~gVFU9&kDn-V#%VrQtm^ksJgxRgi=poslL}^=ZzU|ct&?SdwB6^SeQ#cs zykhObKSvG0+0El&fhoEri{0esD-Yq)q!{HCWR)XOz_TGyQ^i-lPnBZt=8}}ZXTS0V zoc#lg0~~I0s1@^y+A<2=KXs+|uRO$-2NUhYij?H3K<4hcP6seL zRR)N%+cY$Z>&9SBF*T>Z?wn~)R#m7VlmpHh?!q;*f>6Rrdz$f;6z4s)o0R{9>Cnj+V!=0N#EgE)5rRm5 zblzSZS5_oSxAY|@DD@L_e6X@K>G){90AxUUMQ9=E1qCIH-lS7Uo`JG;P_l4XZUxS@13ZzK^lv$ z0__$$zpCFmU!NNgdZp$OvsL4{X-rq-IqO~C6?NevqgrAaHl1HeC8=TZyS`oc4Pi}e z5^X;V_UMGRZn7}@#|jOk)RUrv4INHTRf{?kx^Rh;VwOq^t8n*6tkV%0OIg zw|2#VangMJYJVkRCRc<5@me9R-QZQ8+W|G&EqWp58i6pk<&aFxQ7EyYP$iDeOp&sf z|E8wpR?smyc8hlPt_Q@f`6uaOt_en=3`ZHG8AgG3YQvZkPtKtQ*lhpfMY`IJ5aS4{ z(oxU`7w>B?p$>-{TLFKhknv5<7E~nhW8atYo^r8K$=pQk#w1QAU!e@#3#HPMJRk9r zrg6MC>A=b^VsD2dEZG0bLS9Wwu17^cg@$E~Cf+4C(`6i+OaoGouZ#8W)qjxC+G!3F z0utiGRn;}SAh~M=g6oN|!%jXb4W&bS$ai_!Jk=m#KH&YEWWi9pGbh-%@Wk@UuP|}u zKE4B*cb`?coMH?xU;6Hr@&GB>?qm9OY38MCgZ`S=?gC8Wl*OtFX>D+4sbpK3adu4y z-{Lz_+X%1*eG?A1t+rgLr@~Bn+ePqeB{W~ryX?o;)9&5<(<1YJVbk+G8|df^US$DK zbK4LVkeQJDIC}AJhF{#>`Pz`mtWqQ+x4inU45jT!W7q}&Cmj~WXH4oZA#H)W?WLngkGQYx`d+H0Kjt?FgkKc%PyC4 z7TczEQDSNWAa>Q`6rI{Axc6JP^|Q(#lN2Cr;QzddDG|8j`n-AG+iw)e%LtF<*s2G< z*cAw`hwn~rY<*Gb0!bPVmJ==dZ6=o4-(9a+A)D2hUwH_}OtM$jli_8=u@4H3>ax}4 z;UVdkZqScBc?r0;wB!V5!#7>|-hJNbeddt;{ZFgJ@_7ruH%{ECxwn8}QNAr&Xj0$1 zG8%HBl762ZHtti4t%=p9nM(>FXi^^qGl0P_pqWQ~Yn-O&GB`9e8pNUlIXtRJM|t4L z{z1*PtfW4?Nk@`+a`o=lTfM-B8npG&HL865AW6Y6hM#cgL$-`Ct1UVM9;6Am6@xietG`Y%!UL*bg%sL}ie{2tL?hmW``xQKFAMJ~o`wirpUx}T&6Q8ueFsyn8L$$0Y*>;8uBw7lE)we}g2IyEDC)%hx=A4dm4#@Sd?ZWt{e zahr3RN+V66wU*QsWV=s)vtaX|!W|qZW};Ut4A|$|F&I`zGHtME14&benpib7ia-0I zIaTw%qNk4$nD}nl-`ba4ktIi7)m1fEHAlJB`f1*3@H>-F2$2^$)%9mvyCjHxwosMg z?ovqAB@z_XLmwm65l|B68OniRDp(K*Oe?WjY`=3R{+Brw;bnSAitd`Wr?2JyHZ`%n z?R$4G302&36?F+(o73)^@p8JSi15$%fOjFg&pG13zakf@dj>X)lpEihW5zWs3uc3T z<#kFYbAA;pD`c-q3x90!*8NEA=F;RjU zvoA);^<;haZI;p2fKmv?AlA?Dk{*^HSobQ7N)3ErL!> zR2H`VQ60)O32|+>{@wxo;QS;Juu^ASK2BEB6x1RVD6f0g=d zf9SsScnW>Kzs{Y_E$N~}BLz@9Hcs3VM_9Uma(whdtT(sycDAiq(tn-t(h6Z1;qpF1a+U(&Wv_g64r z`OA!lXa~nXeU91Tmi*4k8(b(6!my8kc)0K2ll_I4d*Ef%^0=yt*h6UlbMDb&YwYY$ z&?DxPx~a~*FZrY6wr=qE5Li#XpfIufm*WfO%lqWjSIuRvcEw+W4|$Iu!AZ-=KD-^A zxkIm4heOM=u-7VnHrJ$2*}KOd&KwWk&t|tdt^fG-jn4$SWB*xY35jpn*I<6jACOFCLz8?Nis$U7`VbJ2FCl zlx4u)F4q1^^Gv#Ml1tkOJ&G7>CK$T`&-H~R5#U%%SoTy3N)r*28%dc48lHBw(yW8r zFD2p2LBnMSQABQ&W@eOTw%dMkH#%0b#V~c?r{F5n#k8`dW-{+Zle=tOf3PwMo-;5?zj2NRg2xaqg}maVyiM?ay1wW)Rji74g2-VZApohk{`?JV4tq`R zH4QROG!#YziZ%|>sN6H4pfzEFD^73}Y4CKfqCv*_@lilHVz zac0L2XF`*uCs2DU2-^^^M`qG$UECsgTtn{6tNJUkU>x>8w0gu5gpMlK4Z(+HoMT!W zN~}+U&avryV5Y#qK{uUMs0fx&-#*Ly7N5I=T*W`r1x%Q(&wM&>fDd(;;6$-7LANm& zA9C-R%R6*@iMa}R2=|_WzO({S-mn5-VSI|kT+RDBZ^a3i^+xbj@Ov`{?1iXOYRiLb zGOdvB2>0&rT4h<7lB7txc!d_87UH>R@C21kp!%2q7$-6-^5`b`7Tn>jeu5@MLOX2Y zZkL?7$dvVkp^o9Pg4JMel^u-yb0FM|Qz1!fTL7LM<`D_Q)lM%dvS^7zwI_D_UZ*_= zZ9=>_Lw-Dys9ormJj776^_%LAnb%xISiPP?ctax*)#QfUpTl59&|;~W!~?SqjM~!I z%nAo5?w!?kmDW?s#ugUT(IJ+!z{yYEeN&W!Uj z8r$?L*A}_8bsH-nah=uHa!Brdm=M+wXD|Ucu$WcbN$9(nX!FZ1B}^RDv-4?Y!5$k- z`Dbq7RVolx!Qun9n^>^@O^sVuhK^6@Jf>1*hTXF)nqN5;cSF`gKKvH)9~IwTUW+1_ z2^@&7Q5M|qLr}yO;TPLJ=$(MKe>O=KlOD|0D*cp)z>D<`K!-^!&Q13vFxlVn4Vie( z05hchlk<*a%#}XbxoQjR*-&7+P5L9UBh%G%`Th^LUv^YF2X<7cN})d7;GFqH7pas& ztbPVEd@6Ijf;e9=qJ3z#d5n;<3j!w*fo7lC&%8A!6Fan= z?4Yzo1IUXyDG+_@Hk_>e+v2-D_lGZ$Y-A2rO*%-t{xu4)0#Nug2(k-Ok2Np3_a0AN zN@em*Br zNShV;VwIs`oimR3Chq~7YoFQyJ}yK&8Gq6!6ne&jUaD0gDvuY9vG~1A*ijHNo`0WW z9tZfbYGdKsfXD+qIn#D_+*fV-y6VYyi`zd;l^^NM0t_a(ADVy}vI;7jSoIPG>Md)J z^qW=_wC$pO6X0V^j%%|2bW29p)|bs!ZjjVp;*$7=D+?tp>#Z(_H2~4InadP3R zScB}b{_V5EGrMNJMaX$N=h{eJgmUzpelP$|iX?ke@~7s2;6WKD^4dk`YE@zSBTNl~ zh3wAh>9_g8ZxJ>=B;P@|WxIc9oS`^+r$nY~NNq7PORktgwaIlbF?;SHMYU$D3co2R zcqJKG{QRWbUcG_Dq)pHnK8G*-#U_99{j84t#9+1eRjE1$bM&y|0C>Sw@VT`I*NiYQ z-sRPu<7$HMj+*QPfFJgANnJ-zBSZU1x7Iw%teu?u{AZX{k4yr^pjAcbz{qzl%c)89 z-tr54Yi%BMT~Rm+Bj}s^J84Tit?p%ODH|RN8rgMVCQmk!CIQX#2F1i7&_oyGVjX0Z zP;KstU==O1wLZ_MmW2!EK0FAPq(=JKzq;R!{NQD0liyHt30y$_?tAth@yNcipPg!9 zEHY+L0rSvOs^)xKMvSCw3`bt-!e{Mtf@9CxG3uF*+eG#vBnAE6-{1*p7Gaazj=Vn9 z%X)AW)}gxdwxK5qbC+O=X$1 z@=L<5lsZVVQX3mTKZ;85M-`!ciHSqhFv_Kob%Ox7N~D@4ZB86A zCf;z{x+#!Z$&o9Th-Td7K6z}vVYb(uipma$PN%Z|Plc3;NyEv4xa?$X$4Y*a{&m~O z=Y1E5UXqmb6?Q-2O!6KDC~NAQvmP0!Lk0lm5f$jzwS$&2lTA#S@GQSwg_<8pyrB5&MKP{V*efgINUp=L%`^W(&`%GoL>RD;%fE>0jl8=^GCTSRe z`18{G?xAAn_W=sFE8Q9PKasO}cJ~^eBg&<`myjLG`06)AFK5R5_CAdE$1oJM< za~O!y{=^RfvtN{g@!c7HC-Hv+&Trms-iXoLA2o;Y554r9MW0?TKCwUG(boKjzUsG;%}+!=N<2~?hQM(wqYW)AOG(4vk(#eXz$`{qkP^P2yJNC5gwX9rYawDiEJM#NZE{Ts}yBD5ib=0u0>;pxlGjq58-3dlQ5KY`r(O#k`?+kptQQ zRr~qr0k*fNdnXZ4F?=ImWe`)<*ca^6=#Xjp)F$8P5#>b?odhVnQ71)flxuu$Ff5 z)46AfNzHK75#C4N))%%+tG>O3VsjaZRXT7S+1NrA8V4N$60e|TycodYvrYDSruf(> zO;*+RvkuCzD6lQ+D7|PDuqwNURv3K@teeSoW)#Osx}~vcB`*| zR3h#oILL+JhO_BqFqDkN2+6YYPGKT*^P+dhEtt;=yRt8mp=hBSPkSL#V+zy%jU`#` zH<#d;IJ{Xz2$c;>l3VEHN|HzehmwEsF1oAeY+PC2@Je(%I6@J|kcXW?R}sw8)p;~P z>Z2g(xB;wyW|Sw^fMGt)v_^+ZdJSKS3>~M3tO3bSIdgClkaJ+#-_!03J2wh$kae-C z66=4b_%%&hima2d^z!)F$1!^6+L#GDqiGgWW*tPg9w=y;R;|Jdzn{k3d9B?XM=R5g zc?G0dO;bzM1TptR!lIKJtJEL`I^P2Ht;Kzvu!)Xxb`%Clc6*)(FXhemF8ihN6VpJrEEVRty@~ zr1*6{8*r#6`9rx?66xtKTL+R#LO^%?p5M~jmuY9;V>>3V8T*yNV06}i0CR>4choR~ ztHcs?EQaf}wtkDrC9P~asfT$T*fB8cDj*X;5>p(kI{i%qo=t6C$|Up&ePPKii6iJB zk6ili>b6|!3tOGRze5F1T6fl&*=4+k0{q|Q(20mMP<`t?0Ynza(^&)_^-tjEW4v>=eJQ3B_a z?~EHiRt#6R5+Z8^k-b~k>XP(4S87F-<*0xPU6<$juNvTvqMM5HWu#Al=N)>N2e1(G z8Dc&1H^|z)13%-3gpt-&e)|^=3$kBa#0@4^Qge59sf&&T#T1_Eq~RVnsbrvm3bGzZ zH^qBwMrGSOa+c7HVv!+}?&1L2aHj!2v?LV+;~C5$i{*i?G_-8wCBa82t@z%G;h}!a>b(--?0^LN2EPgGOH>_M=3{ zI4m&{+?FG#t@2{SW&N{Gh=i)Qp0Yo$NN2%!lT3c^5&^bUplkr-LGWKX7CsnX; zN^SSJsQsT!81wm!7zrnh_5af~mNJhQL+n%*<{KO0x?H^eF7;o)2`rx*Xw=z)cp)yC zu&<|XVHXpwyeC0{qn#&QPPsK~nc^g27HHIDnO~C+brWo;R`ZX@os6cX`<|34YOI== z$c;+|AfyhGP)yR0dI4)XqSEkX3X=dD%)+mU-x7;%PWjg}uYDPPIQy1fc7T!dB@Zla zkgA5DmUk|xRk0^)wBq~zivhxHDP2bDwUq`>9LscvDGd$$NvcRY^PuWn8PKRG<|@eeQi7xKD~b^mMMom@r%SkD5Ew%_QeiPJ$YMT5Nfhzgy2u+f zP7v_`6rMX@M&+Mh$NSjGJAaT2d~RQDh329c{^#E4DMJ3-`{cf*Tn}GZZGTMQ<;pR= zR9#&@oP&GfY*4Fi7jT#T(=`(0fYyIC;MGuxP_A$6E8Q|KuT_S>)H!pk@B?eBC8^yi z@@n&J;D}WgwazNUW0N6yZp)P-9yz=69vBfgIbwU8<=$4OdIGeaYKsu~ zEK(NrORysnvdnDN{}M+QOfgp0Hts9Ui}%&E;g5qT$TgK`RQAqbX+q?V*3&^XTO(P3 z?LV}y0jtH(BiGpKP9GK9*65vpIe_7a`?0j47KO2e_sfxXihkTKWrPceZn)}M=`rjJ z_y4L=sJ$r!=j@ip^W)h8f$?uAN0H?yWS=xKP_r>5nH>!%hz)XMyTAX{qKzYW_Fl zEFc0zTAO=+H20pc{hI%JER{#Ds#o5&5+-woAbIt_R@Gq2il6))w07LON+Rk_Mbe72 zKMJJI)E3Lr7PJbhI>Umxmsi@+bZK8_=n4b7Uu)V4=bTh&iMUyHUDoCM&`Zfn7PWyG zb4KiVte0qf2Ov@=87Rno@8X)$==l5K#523}U9J~l8GI4;o4jQg-0X>1Ox44>+J8ND zjwR}3HgErYPD7#u$S2>HJJ#QHzLxx!zh)02VoXQEcyQv!fQZHTP+MtZpK zOAr54h&mW5sa`{C2e}WYP9d~gfLE8q(-L80`A5<5MA=OuTi%Q-P$$-2MW*ex{|I+= zMuEcJu+x9S-Dt}!{@=jbKCI@(EqB%NO&~`}sJgZk@^OdwmTw~geg0h8LuU6C)V5Q4 zWG>LPn}TaHmT$@F-H6kv6CuWh=3ohJt0zr_JUWvhdaT{~J$PEfqwB!E_^bc_iFMP% z+ph;Sdp!WF9wlOIuE)U0z`yAduhH-aR#$gnJ=Xcgo~tHE-UsNO(w1laQJP#}f_pNI zkT+I)^4@`?P20k?^;>?_VSg7`ZIcVQ+C0k^ULMm#z+=Cu*>#ZU#4WRMhlo3 zrzMyo-l4t&gl0WP{0-Bpl*<0>#9fUHXP@-GmpX@liV8mb6Yu()46wksq-Hd?Ak9Vc zc6J78HCGMz2`!J2elFDxDTD9;?l+Q=gAwDf9bXNXp0N}*&4#&PX0**8n|gCYb9OF~ zzzZA>%|P%#Eyb&=yl&Xa+A$x0qt_$f*iYjBYObtJId8B4|7;_!+&@QA!6qesZ3xsu zARg*jwc7blX4O+Mx_7}Gohz*^gakptL)HB(2;Jrf?sc>=+1D(p3%!t+0!hnArOPwd z|0!o167|i&B!QlYdQ7Up*CA>K;>S`!NR@TNx0=hXiYP& z-`=ivlf~Xch#}!$gdG~^Qnk|2NQE^4P-u8V@M{pPkUh=WWHfk^KP{xc*YkGQ+$c*2 zNMWt04;IyVrCj{Yj`q|k%!Hqmf!KU(lo20EE+Kp;@WRbS#46s39~?t(Nv1~9)J!d? z;HVRc*G_xe@tN2Bag8#eou0jUrLNoz!*4%o)2a_2y>d)W)&5d6(y+sTl4{kODI&;j z%Ai$9p3JXIuw|r7gj8+9fyx>yhlBfBzNSr+t%)nUwoAd48$Z)&5t?;5xpQK|YA52R zf@M|RiDzf41|P#Ak8tUs$Q-({LPAW-6X50rWq=(N6KB)mLW7S!Y)+K;9ol7Gzm@O+ ziLSI+1r4scp(d(1ARrF;(&8d&5wa0iA#F%Kwx_02;lvoo{wJUE4T%{}y7x8p7BAA7 z!MgsNF^}nRKm@d(Zhh3h>&h!m^(jq~kMmN!Va<}9O!^{C+`M$J23IcOoXXh{KjXaf z8uE+iIoX&egvUov`!E4j`7Hi$?HF90z&G zpFI1HFLrkVzo94CGvCZyb0+(dW?un*s@cDP+d`uDSgV zvYIHiU_HKhsk{Nbwm!G5;y-7XTcY*|zE~{2t;?_O-;fc>!~kus5m7BDdQ;BRPfswVNS*n7oIbd-AbK1%|RkHS}nk#MR_) zd^d9I)c>Fu6T|*z#yaApZnGZQj>B&tEamsQx9ycu)Is4#-`D}VI^Qt5?xgO?h5TI! zLbCqsq5ic}r^Fa-7}pnt)+*^yckZ;9gsZj^RghRQ{epy(!Bhyv099&iyE+TYruSHj z3yxB5ooN9UPeI*ydZpCr7FUD^`~7g9>c3mTg zd*|a>7t*J`2dq+s8-BkSQ?Qsr2&p6{UMjlr_V7E*M1Dc{~5lhKBsR*(As{ZcrEO7#~>3f;aUgh zz*9UG27}zE1*p(Hw9!M@=3p4FhWHcc_}C+~aZ?%gG`Y%GZ@e=clId$@n0rPUUk zIu$^Xj97_lyG`>7mMVQZ+H`#PtuoorxEy*4QV~(N=J#M0{SaKx!HA-xi+P+0HReOj zA$!&&Ii_RMn|dK`yw}Z<;qaHo;Av)cKbJj;j(iBC#V~0*j$w%&nQ9juaA=^tbC(JVeaeAj}j3?~t`eB}8omAdPyecWrSdgjTYlbQ9a(yYoV zNzr>DVJVD0yEeh5g3i!v8zTcOp1vumCGCE#>V#$WyqX4`16%XEykXjR;Msm=h35|` z|AVuPIk3q&ZXFs=4i066@(|7t%N?5TL?C^6w0xxvgz9c2m;Yo3)m(Mi!h>$QcczSa zBZqFW(I&7s!$Kn*{wGu_g!ej`7emzX1hg5+6Y$OAR_Aw>+APpudq2^()Em+xkJYbI+9AzRHWHFt#(i#*>CZh#ml+Ctg*yVJ-&P?L4q%c7OU}Ls z(WnMl4)=-1J!!Rpva6qM66Km}{JutN5ASeSP{E0ibb`B>O}vFYfh64*`nfNj&^~Cp)w1kwd{hP(UiF}5{25nR( zBXrHCMLeJK2Pbgn>HQi1)>@+|LRo0dyH?@LCH7l~Zh~HHcjJz28=+`LtX+VmkP0L5 zGzn}!N%4Br;Y5!sR4`O`!^gqfTjy{qwx|o)+7#1Qvo8nD>rJ`^z%nGcHuW~B{7pms zF}g!^z(=KyT$0=ly%?PgEIIH3qqmnyKfqjCpnqfNo~z#h*Xfu6CY%&g>@Ljj96;S! zMl_ZTnJdNFR?VO3$+hXu^Rp9`z)%L{R&nIA2Ykr1H#2K;oEm1GyyNK8!Uny|y?3{Uw`!x|eitF+!cK z@#!dLAfK3$w1yjMcP7^=WWnUy)#n|6B|dWR8d;Z$gVeqRJKjNaj4XY17VdefMLRBR zM(ioF#}uJtYSuvVbaEbNwZs=yCI2n9y0`FiG}Km%yx^8YP4`EOoel>Ory8wkDtAw= z3lhrlBzs}y$YY*7Rb&mdCwdAe7=If0^J($b_VoBY=8kyK5Ca_k6^VVzNTw|Vk0gSS zlN8cQBrVR$qeUw!IK2o+Rxhb(!y{_J4V+gnbMX+VPlC(v z{RNwtTijG<_MDUqJ^?2hGK+CTz}xoYcT6;)X;a$>MJ90ME9HNAu;C%f8`QdIKxE+i zWv`utPYT;t+G+oC@8#RNGa@#!PPk8P$8j9&5i+M z5(G>W1~zf=xn`kCmCC~pDKdXSa41JQp03SFyubi>!ha7XVF#tiS!EMTdR(v9i zlJSTx>W+VpdO4xflEXH_aU5ZAM`cz{JOM}yGAG~ZIMp=aHXe)>ONf~vHiU&;`62(lBG5R7vE1-{aQ=5hfC27Pm{-UNT;QPJ0B=WF!oUdV(q@DRvTOaWqAWUIZ~UEgh=>}v=1`G6t&$dxc&sH zLAL~H9x3HsS#$tJD)x(Pk305n+R@8U;w*E@2d$1&;qOMHjs;XhO>TQi`JE^hI|S0^ zcj?F3qUDxx#Dm5Gxc<^GDQ#Ki%dn(ntuMf8%$GQfnF8mR&+U?Hm>~xWT@g6wfwwK! zoUM>ISc2-k_qd`4D8g)|r@l_an50odac=Rb=l|*@`#1ZlyzA2(P96mD#)Byg%^cxj z_bX<4gQrOC`?7st3Da6@Fu;ZmBf@n18_u6%(tww(8WsVh-=4M!c>(|u#q~;rWGuG4k+c-dLF1{sxZb_K;+a%sMbvJ5YuyV@oRH8e8rRjVYTzE zF2t)zLM`yajvWDNZp29yo@`aQyhP>s!VaovZSW9xEcy_8nK_J{JZ3GwUE}x}d^+M^ zwrdML7OCbPk?cxxQiDzWwPNKmNl!y1-5H?PbWW@i3Dern0=^!j&lo;Zk8;UYJ|q*X zr3ZJdKy0}}oB$Nl8;?7;S6U>8NJCGSt%Deo3fEL(kx9ZYzlFrFJW)H3xurp2c19Iq=@3t=?j8LyiIO2!UWhCq3Yd zQBMue9{?2uMqEThaIOE3emVjiFPjb7Wt~3~f%TIUZdr!kvkiL!4>v~?pONf<%h0d^ zr}!`ERr*664kqW?rxF9jFxLuD^Uw}*HM1;y;RviuU`V{b01E}a5`W>Sa1J(rd#Ih6 z>_H6r)!bCL2-Gql)C2{>*n4?z1EbNs3MZ74^hpZVGYqk#`qGRUzey3gW<)T{h2U12 z7;3!S85FmqF3JbzV(sKj#uH08Fr)AFKKeJLDi{BryHEwnG0Ryv8VJC@)Bb|uAR`#_ zn-0oA$HRlzZqK2%#O@y~n?huS6e$SHqkd5|pvHgwf`pcakoOdD7vGdEtkK|;3*1q5=|32^Acas!Q%g;6r{;xo zY>BG3?rY~EnsIB;>1VKi{shiT1ZUZQ#HN;>^G5fLkPnLFT)ZVI?arXVz{0jH^S66! zg0kE`F`u1CPOYRbVou*T(SH=viKv)1HUOX`*_gK5)OPKiC}%i~{WSrkYUQu&y`)CA{*ENcJ$PfKj8}t0=>( z1&oddloB@0igZ7S$RSf6 z3086Qj}dVA$H%p}-x~_Z%jDwG)0{!jUtTV-CS1l!^LRPz@_Bnuo&N)xvT2F#JKS+g z?cT-+HCB8ky?jEL=9G*e2k+J9Z^H&;!JT|Z{HCvOWeHaCo1eHpm#Lffk7YVO_HWCS zvSd7E%yK`$Q+07z6rqny#nYtf`IihL^Z1h5{MWHcgtVS_J$#taOAyMSt{7uIbqoYA z7Ikkd-i!!rx>~(Ytnr%~KX_}MAJG@rgl*x))rf*`3z?FZ#;EM`ig`y(Jn>Ul@KL*u zfK4?K)H}tYS>dR+3foYRT4rLHz90`ICh#hO3-I;z*rp@>UOW-MXobO3zpg*dD&;vE znzA2&LYi^@#@i1{6J{$EglQiqePlaxXZS(3mG=75nI5Ym!T_UQmYRF>3DP{(DR2?M0AR(MX$fR ze|JUpOfiN;_WqsgJ$4=#Yc%AxLYA|DcPj?UP}nTbH4&%*hA&e zw<>%@$#izx2$53y(tB7N$@e>td3R^JwVT}iv}q@sT@y>Hj>iD7f+#O$jm9LWqd+sa z+FT!H#o8{fkQ-m~71PBf;SGzX7`w<3QQ&W>pI=vbYEIqlIa-|j2N+6&KYZ-i z0$$SPwxV2JXghew6PGiUy@x9JiDF>mpX(We65zMV(ym1Zjs`t?VV#>GsPiwF40@yU z7M$v#9SJra(DQQpqD1q+$CTGD(Q_^W6JUk>&9DE`L&G)gd4rNe{YRI-J==FDTN}ZT zN{n{Kl~fP{Z$iVBgrX(t7Q7qU%|1;)(Q10n>dNYU?40b336=E-L5*~WZVHD;S8K|uC3|pk->3S;qlkK zh_(yXyw_#pt#$y#xt+4+> zviB34WDKaYj=QI_kM`YII&hUUhA!9Zd-vBN)r8)=g1-Ule};U_$uS_{ya zQZs_Fj$U!;Q9Tv$UAijPQe9{muD%f!0k`w*@fX{u{_El0NZpa9dThcX_vbogOQN`g zM90z=jjEa}%a9 zm(G*QQBr+QyF^l4ABvoCw`Qy#inuVp5|JD~J(W3028yc#cSw5NW8&Z-_w(I4mtQrB!?*5f0& zUIjcDU8pmKWb0zIbyI-pyc}9*c2}!&o(xW6-XF2%QRsxPdEh$CT>_G^Z0Y}h$XF+d zP<$Kui<-Jrx2|6!Wt(v``@YVyg|I}3b2M3Xp{Mqw7t7oH@>;M4S#?ooS}sDu!UHz+ z62YAlR`)`(UV<*^YTLAAki+pY!(VW*AR1|uozxk219Is0-N3yyokae4rx?rJhbd9w zC$!x*dl4;A|9Gb;U}{&GsBne82GPiGc$jFyZiCh>ffXRjSU%0;Tq=hZ;$O1Tx+7xw zc8%-i{Z>Li{O`Jf*V5$hYJS4ieu2KL8oByZ)6|rXS8p{j{f2Agv2Ug{B%5H|+$R0X z<#lW1#p?w{_5liXyGdiZ;BHds@*}crjYsD4eM3{VW?!x#)uf2tF&o*gcmBgTWdZjO zU@if^#~4$D?sYxCDN3S;y$IG*Z>gJyCQ(do)|c%YQ8TUz^R(z^vNrQ2$?2 zs~22o<3b!5U;^&9oyyQ)toaOJxrqkA+l&hA**$jq0m8u_oEi3(yg!oHrXpQjz~Is= zCg}O20Bs}S6k>{eiTv|Oh=>Vva|i-FhRzk0)hhW%9dcCt3adfk8Ss+x`g1z1AIEuB zp9OS{c#%QwCPi;*m5TeCS_b8vB~WVrjA!5puZC_aiwUqHaf&L2kcDFO;2@62hKd;i zI}FtZ=upZq`O+B(A>;FsQ(RHS=L-?Tt>&r~s|f~#N+Ag!%$63jw{SUh}O*$94T+f4gOK2~C*%Qdex6W%dsd&kI#sBrazg<=TUH2tAu@Z*c zjs@MmIkxCPJ~D0LXV8B0m*hOiyVxbwx;(0=Ns9djZ^c6XIdpgY@TUthsSAO11Bc2& z#B}C^o@5Q?7O(|(^kHX||4^f$Uym>DlSQp(N?;U_y2Ma+2)?RKNC%i3NRJ9`Sg0{- z+LK1Al803~w0mX;A92OOEb;OGmKL941@)h$Tanw;HI%Vuj2@&J{6^+FS7yY#g^#*X z-ZJuQB(5(QM@knNdoPh{>mGm@7l;PZMN8o4l|a#k7qVy5kyR&b5P2}B^29?0K{hMZ zS28v`mgD}#hrNXFIK2y_dXNhE(Fkw(TCdKlOjxs>_9$L&Po5n~wR_AJ`5a}>W@CXI z4}W^0PIs<#Op55B4Nc?$GslDqMnS@H|)4VMYF#0(io^i)Bsa!ypgL z9Mf+M_wczgV6e9R8s-O{%hl&=$rE0DUtQqbh+cDih|f9X!310 z@=v2Ab9@j|_=c;Uc+~J-Wiql3p2)hERF*{Xv7EM6rT6U8bVx*D{Grno@?9Wa z_XY)P7^}%qW)-Bhy`o^H z_1u48*=t5rN0T;z4#1RH9^ipk`0k7)E%~B}XWiT8rCps7Aw)P;a==5Zjqp910Nwiy z#+gdBHRDP7NpayC=yey$If(g&>5=$$p5ov?EOoJLSCI6O2? z6FjJJ&i*@UF`AO*6ot5)F?NS}I_6MDlnn+3ldmtWV)__U=qvwMZM_Ps1M5F21}~P4 z9@S36&{AAU+aG&4I)NGVR2dz|YWuOK{MGL?RS?pF#^pM}3w)i=$STh!&)OcK6kc-~ zfjX?WH~6Zd)5Blt8t+r~k5!6tOCD_XS>+Lr#4<){>i+nQ8|Qh6WguY}%5a34a`-kk zkg>1xXfkh`ehXQE#!$JBDMwaHa}ZOx3L&29E?GYGcN7L~eH!@OCo>F*t`TmQtzl0( zF@j|yl6FFV_TQF*9}XC)jV;>ulntS9DlXaa5oN%GH!Zlv2EGFt(vc?G$Mn3+q_165kE8^@#J}h z81`C$u2N5&;s~bL$Xv1|`BdWR*epoVtutFSmltZsFpt{~?o!q_qr*TbSti50mP95H zI_)nj47No47O4F11J;2I)V#T$2J)x4Z{Hb+Br&;E@+m$h1DJ?9!lcV?RqJ`ZOs_l3CkT(? zcC5^$Ku4Ped4jjP+%627XmR(K?4!#tP^_Z3+TCnKfTeVP2i~b*V{{T}dUJ&;DlFsF z_KekesauqbzZ~aiqbAX%xW_JwzgNZL)KEvsvfkEIj3rev$D27Kw{5dO3d#Bmg7*2& zy3F)%@rMH$>hPK|)9ucA^tUMlrVEBiB1}Gh7_@ zJ~cRZ%L05&@4;Xht)Kir%3zOT*nYxKxq>gNnT8k$!Dd$p-^#k_-=A53LmfKmnctAW0FgFI3H=XqzL~utFIIS zI3c&2Day9*%i!!Eq5VHeN7NcGEV7k;$&j}OEPD!q^f4sJCdFbR2#i-bjgEqG<4O-_ zC=py`KB{`ccJ`p-t{@7U#{)^aogyH7AV_P&&HX-T*<-&8kw@pb7&q^oO1 z@-rpEHn6*onixutrz1x8myGFxxC++kCXmBK#slya-)_8pH5gpxQ3d)aqJp7LD`yBu z`WC>WetPId-CVi)r6tMOAudFypuzx1TS2dx*c6gggO=)yCuj#mno{z@zO47`gpRi0 zI!3A{Zo4w-ASh}jjA-k`(gki}*_dP5*7dPBjigP)NP+HTfSu6bl?}nEESsfO^l)Wa z>)66(nt{&nu}6_ti5r>I(9l-Ut?%7jZlZC@{)Bz)a`>v7SpunXpyQmG{UCjzbz{c& z6bJAb3bt$~ETbY@4L-)P`-r7iRH?rTL8g-FqU-Hrkr&+k&EM>5^g#8={I$wgbb!*klDC3uW}Yu?-FAJ z`SNG5#+u0SRySHLp-C|fpobY>Ap2?jSyBO6R;kQzIcfFo|0ic-oW zaiMA2ll-S?p?FRi7kX=fKpSCzVPv0%gq8Zse7qhTFos9orUrVRz(y1@e!Abd3ug)i zq^s@|8~a7oXf)ZRKCMO+-inH9T?3AaM^8Is2YxJcZoKV7t8y_vw7<;VI~&;G?;KR z?_u9A!~E*566ms@EE+BewuBVtsT5gNe52f) zY;g8mjpJR%)bFwsPixIZdEWQ^ACJ_<6<1!Vhg@En;=oi(vSV;l%T&`X5O5-8P9!+z zl%pM6@`|ZBdK+g2D8zt;&4e24M=2I|X?a;o6HThnyBS+!C~{?tqUugXObbi^`C3p0 z@Ia-?0p<}Zk_Rum0EOMhE=sB9PL|4+Ts=)F_6c#&Ogbn$FcRPE#9;TXMnGJ3SJm|X zVObH_JeQIwSb|6LCjgH=f8Hj$UI8ZpQ#l_EG$cwYJ@d(JBK0Txn@TMjuJ{e#u|M}c zzJlShJ9`J>`rm^(tJqV{JJxlO*+p5~gh$oS4O*DHFBluunY5J)`I*2`vs8#`3gY{h6j!m**&^vooK>#>=j4$-fwU1T09P1-?bIbc;wpU%F1LUJR} z(N6kdh}~h42PZ+#zzo&J&&>5mA(=7n`9P&qzxh!}Lz4Z35VL%)(ga7(2i75Jeno55 zY=aWe8CsJ979L{=ixjTd&#!t?;mn;|nRdPdvhh*f_2TMtnPqmK&eN{mg4&A4m8)Xb z^1Y!uPYEsWi?W+JS`m)NnhneA%`(N%F&dQ_J<=f+uw~p)_Ka0?%ECDA+RCPYaW%GL zOBA!&2W>cc6z;E0s`%J+oD3OL3Dq6`Dg-vtH;siU{2LUN?s^X)GbV?63Zqc!F>LvF zZf#8kipohbhmR>bZ&PCDpIAeRF;t(4 zd7AD>`3dNBzg7|lN~Z5_PViLZ@|u>j23t~Cs)2&h_d-`Na$V~X?VPnS_1~9v&dbS+ zQt4@!;u$b{O?d?i2IAXk)|C`N8uk7vTgkWu=JkZ~L{)KSS(->_EJ+!~CbqulQ5c&^ zN$axJC0%XCCOe}4(4;OanuB1!{L9n7yl#{dDgi|9euAMF*LnYoCUtJ_Rih5sS{I4w zGj{`ivd8wI(7spN0Pql{b^ds(>p*WcH*M&2H|sFxq%;LIbl5hg?e&jX2oL1Qy zZY2Ek>W?4;kE5VoxFj+kdQ~ygeDlV9^;#|I@+SZCvJevgQEm-z(@Rlq2>y7F&UON7 zjQ|u^@Nk40a8x4lk7<6nC8yMidPtMhoC{wUK1wDKAYSy`Pc$7t z_X@D_|LjHv>nXn6jlFIRY=-=aJ4R(eeOk*)^rc#lfOjLwOrN8v58al_<806NmC~|& zJQQ~n8#cukB6q&vC&USyoRy9o!%eU-E*t2wIaEY>x{>wyf(LV7Wa~g}AKhx1Rgmjb zbzeW&4_y4@-R1jj3tVEjNtX9z2jY44#-G7&w#06K5D@9z<_SL{C@&cC+~z^nz+Q;2 zNj;025?sEXyFG`{-8_#m$lcD5MFGw1kTw>|9KKY^$U%$|`^ux4A-M{Y*Leb#JkqW) zj<;T`vGR625&Ao5%F#tEa;Ywj2uS4&L3J)1ZVl;QlK077A=6co%}m2Q7v~+UpMfxD z&RRw@0yeXpVpB?3DpkOjWvfqAgWTYuLa_@`!T|=@XzpsjX~b6aE8gX!WxIH6hI9}% zizDbA77pJ!QAIY~k%_u_m4SJ8h^FFaxg?SP7WF|_Toz{Kqh6W=JRD;JYqlA4%K2eB zB>tLTZtc6R_E%W))T~KfW96Uipf_P{iX`BS^+5HVfrH={;E|B`E&de`MoYFa90uDo zn$L6ssP{oiR?dc|Mp<~8`USNFvPpc)f1S{ff1FSyR^_lg>q$>WEYVLMpeA!Mwp5C*spD+ z*%PHVryI|iN3%-OHoV=U1=m`9lA;+4mchoE-Rau0k#+|%G(M(@q10<*)ZI!|Da{dd z6Wk}+73yoeycDx*px!2R%$eg zDwfGr&QAsr7XaM^f?bGQlMcPo4wWzaz$*Sa>QK=l zjKP_(Grn~EZ7~{ysn}YqiuW{C-*q-?U6)^N^TqB%^M#n$EgwiUE530C76)X{yYs0& z0WuqY&@}WVWM*DRIEmFieIApeibBcu=$QTTcoHDc;zQ^dswD3tj4b#-HZ z#Ao{){F3PP=UeTI&&A6JW5(V2S&~Cli#D1f1O;=U9ZQ!&Rq+1JCxqhUzZ|I079v9% zaF^EA@=?qsZg_2vwQxvD2XNHZ)1w-1ybnHHBlh_`dMw4!CVp zFAf`X{Wg<^(NqJ*lmG2=YwvmsI(xI`I}fUAd`$l&p&1KGoTd}2H^X=Svhy-6#7bI6 zalbx{Q#<>1P=Rwf({qC=E9Vo4do==+XFBUv)A*R7p{cCgIUDiNTmji;AL`9|>&^)Gl30ad!A8z?MZ!MA4JomB>BP*xicY0R>4Sy^2)lqxJ5csOfb~~mvCQ2ug z=cDEsi@Feg?fN!8z_2iqvRsL*<`pN6#6#EX>J+((c!Td*eApXC>p?XZ^3zd&gFuQ` zysLB%WuYIW!kwLX{EI$%kJno=37g(LRGk%!fFYmUjIQWw7Dl{P8xi|8KZwJ+-oM3B z2wkZKkZ7d^{n7@%{nIne>qfUJlHy``AsbB9UvBp?CL(k)EatvZ>4*=38AILR&P`}s zfD0)IPjpUTz)TWy>6MYo-M2(>{AVSF3nBswlB$WpA@T}kCL z2u~^A7c~TxL8Vtpt`aQcszJ3mOBaxb=O$f4KIuTqo?oMvf~ZE-a8$%0$qL{w!J`)m z{CFt_Df}ePe(GoJ&tY!KN(NU}14PGnDv1c7Fa%3?*(Jc9zz&d3beffW zM+{M^KPId3Uz4>-0Vq2l%9zkYWTR0=;GTTL7(P@RWGJHzw~2^7XCoaWM;?EW*5^3*Y@ zmI^0bw9FzI0>~>+rqaigL!dZ8f8j|FkU|GA8bT$Yf_#M6$a{dC2Ztq7!o`vX*8y>5 zbv<(ki%monso91ySuh9dT?#*YrA=dzcDQ%Zi^ARrq3IDnD9;orugSIDS&($g$swXVC?96rDiL~xXz({MnW`B6q$&pH!Lrc zlFR@`%YS&(YJv*s0+?^*O{LIRSC;G2u0H|ByRWN(>@d7^7bb+%Sue|q9jEzW@1KQa z%V2YH^Jjprk6jibmVzMeyAeiYm!zm1%a=Zw=(8u$UoJBObr@qkPj!`%Xl4vdyp5=p zxC-U++|-Y%hzP@B%uoamwPRUO+3ERU7cJJVi<67R5Nkmxf659ov!-)KFMlc!gkjgT zWKFfRUVQz{6?%m5T@n&A96YLc;C?TT?-S^uWwJy$%#7K3WY`Ulc%>8z*aZjv-C-R^ z^X0z3C)5;fl$4IX3uA!;dt_3jpgwCMl1Lf6sdLD#<|L3`uMC=p*86{3ul-~ae|F6D z&`xhuP*Vs9b0tSu+QsM>7)2(f?fTqt!Z9{PZ@+7a4SaOP^fpn$$T|1M-$k3wnMf*1_RDXb1ni<87-_8F8 zyf!#szsWQW6-}M+Me5csKUp|(u)M5tKU{py zP`nthGdUjVg`(wFoaUB*TSp<<;)Hn%Uk$T~n2pP?vgTejn$dLTLqNxe59MXHe$FfG z19vI!&1pzc4<`JrEY7p`65i|5sEsBkt0Ng|nVK_4CX z&>E#lXZpLdYjPd%$SK?{_0kf}Ni1i|h4PM{=O_#pPB~F?e~%UtEHbvXq_z*hT&m@o z)W$84U8!2vA0j}#8#hmQnm62ozM&C0O8WX^ARMEBF2asCgI268^`p+!_`{YjRdo&n zQd$1@ir}E^HwW5F`MckhqFSP>(A4AMPwMv8+b5nJ1}-HyL=I)Ilh0ieZj!{U+A=4Bz}B6y3A@85qx zoqTp?W0ObBUyXxB)Z|9LQxwGgz)OassOezr`X0p{d7vFb4lc1zNYG2uy!0Y<-6h|z;^X>Ga zcx|P-?!giHpi?Ntin|t(yY{9^oCPjcqLvLpeWS@}0IdHpVB(Uf2&ZP=T2qTA3-n(Y zDIq4fx42pp$@x-eSJNoa^q}8($h5%AQ6PcFqi=HM$)C3ClnSf- z;)WN|zc0+KlRZvd@@j=S(-?oLlIEMNs$YQUsiZ%7=fJwdJx7YB^#6! zM*=m`4DALPn(#sC6V_J0JV$)iFQlaG(y!-MRJMm0RVhkn**Phl@dc#JD-+=c^f7O; zXbBDKqXbN?j^0uB$qqs{#~jVQR0_)e^V1DN=3sM`Ts(kUpo zfe}2su@+V5&I<6`+PXo8n#TmcO9TW&?=VYg=p3Tg<(ZAC1gAE~vP}A6q+mL*!$D?> z%1)C#7+6D8*+{tGp;g5?nKErn#h|d|fTBU09}HRt3#xv6x89RcZKx>_sG=|l2L)Aj zzo2T%Z3g)-m%P?4b4k%%_4{I8*iGEA+6T{f9XR5mObC;TsN}c>)9BH_{r4eJS4b^bQuFRUwdbma|OkGk$?^oUdGG;9kP#xr)A6GwTN@D11 znu}&AM5C?Pi%>^vF?Cdggu+S@mSC;;knt*tPwz)?(z6zyMgOp*DDQe{wRcPOltF75 z$`G|j*ih$Z`qB>ApWrhk*fAh2@Ag@b)-LVYO z{occPw3JyY|TAsVhUVg*;3A=X~@DI0Oi4#JA1^-+=9du66OM!+=7O>&7X@Z`<* z6gX~S%YxwZSlQ1++%YLUi$lIDiw!rgqY~Q`S6%A`zVKu`H7`^&*q+hjfxmla_73JC zLxzl~It&3;Jh=W~^ZFw2z9GHr(%UrJ1leP;f^Q2s>34n(8gSP}Y6>SG!j1Hbg~xu% zSw>i#2TFUOF8>_{zfp=<5dX^vj1%XVo-i|PG(7r2O7RK^#XCN{n!t+Ban@`vcDiA{ zx7OGtpHpT?KrD$+{S*}M^JvZ8KCnL9-YmuL6ip!?Oy}nVv~b(4S7)KngQwSEPB=&Q zMif&y=*$WS5IQ?QHo&*`oucGr1VxlBIP$hTE*-tffvXLZ49-VszxnUIbd@M}2w8*7 zrn_qEi->X}ptY*LcCKg-t5=bZH*Da=Snb|Qr<5_YPP}|4F0X=95B?bT6Ef(SdJz?B z-ep&@=~dL5#{)?>40CxdPxg($7>XKofMYFYam&0UT(+7{n8!W82Jl*6&nn`$ z_-5$4fX-yp*k75|Uk!MDH}ERwnF9VBEt)e+Eu^V?G9M4$`Mj6sdIr|_Rsf~ za{>-ykza0h;+69ib&H4srQeO$==8@GlFmQxx;*m2WsD>+wv~HFJ%}lFTkImPaF~sF z3+Ig7xLa{@?$7CSEH*eO6mLQJ0_-*(=`9S{VaC~2E(dI@Pg`f}UvVyu{pN>nRJgYK8nKG~@OmAC*J zN5h~a{MgS6Wb2_P{Fy|(ig2~GYdzZE=3@<-=jCy8!nm+PxS<2J&ue-^{5=pCz`b(7 z{0IJrTB>j2iG5T_J%mfI^x{H)-UG1NML7P;ufi%>|BGKG6U^gscaruINenUpOMAVgdOo<)r@kbb4{J>u58X>i_VoLk4PZ zI&8g1V=zCX6wory1}KHiPeA+wKI^$5K*X;*M(wN5Enq^OO^`~8 z3wZ&~gFQK_Z<^&5c|xZ^x6FJ_QuSKvR+zx7^pEyFJenrromK>R>b5BBjE=+WV#6~$dg2s&9vLAQjQK@D;n`t5}or-u|g4si%Zh+EU=i(ApO!z%R5+OOG;NC?`@6V3fibW$5gHse8Oe4R37Y(&>?gj?k z&p(z~iuN*TJ}KQS;`#J{@vp1o9Ivv?{d)nF@dsqz$<`T-&t=lMHHwmqEb5QXoE8tR zcUTeL?|Z5dE6+YA-rqN6w2Jn*CLpkjwA%1g#|8j7h_6oA8_w!kX6#r{FALEu@ZK!uV%)9!2NS>ZB~lcQR7`bw=8 zQlyT5vhcn2M|l1@CpB6A*XFc#B8^-qC+uddemagg64Cyg0vH62@w%zj6Ss-|3QzKg z9KJu#bkATe*3e%s*mZA)gugoHnnyL%gI$ji(sy0qXRRWqV&6+cmfmCgh*!1WCLn4} zz7jn`A{I{P-yG@EVpm2kA3-z)Muo88l{R#LK#J*r6>0-U{|pXASRr;c4NY~S38PpM znXTgtZ2`1vXgZ=)JJ_qrD2T)M{bWBdR3N!P1fm+<>%3McL&JY{D_lJUi20TFiiUkr zGtr!>8+2}*HHX31_DuyQ+n9%Fj5dK@A2F3=j&O-x6C^@w4RO@X>#Iw|+P?8IrNV=VcTmjm_7wGx zDp!-kA`-z{OS$52Kki$p-KoiOp<@;0v1|hfp}C z$8boHwh)9yZN@k|vn-QIPb9LJD2%IPg(3`)$wW;)w)alsX(WpW=|PFzy`(iA8;26L zO41n=l^s@o@ZDiHW6lM3sW2#mH7=lx85aa=VC-J!{RD@*yuOYlh-LBfBopxP9e}Yg zv4fKT$*$)P#%4E2CH&^CSUQ`iV@v6-rd5XdC5cT>epwmba)gM<#qFGer|*{hDVERDj}fJ1oFSkuL{N zuZDd}7C1V125BSwn!6gy?|+{(O+t#_b58GG@>lr`lj*`jVsz7qDXk-Q*;rXfREfhh zd*;FCUBncY0bPrhVywQwi;qM5R{y;zegmy6QcDR`Q7WyTq51rRsKfRHC_u6{8B4_-PH{6`5hV@XMu6&jC?aqsgVGiq%dKyCeuQNv=a6z zxQ8UCYYy?Ja*kHMfbJkYrMu?+dJDK!aYhEsV3A3u$o;15JXbcK9-qSC1rkGV=O1(l zCtYF3D++W`zp=zb{&i8a8~(be1QZgKaq5p+5vEs6$6tHia`Za4bt?xDqqEg@Bfc(t@wy;VX&-3;2F-P zdr9*5Z2(BkQPS71$)iE0Jzsn!nyf5(XXej(TpmtmcR@YFEqqR4%N!nij2F-%1E5I< zz_cV{&->`4j-d0@V~{veMnXRzD#BJ@5<2#2}sJhFx9ZDySMSIzr+--8jY7Jx-5rD`AJy0GYg_yHh-6HC(WkdpIzpo zq4>FaD<$82DqJjIu9(&eix(00I#f&7^BU%gKD^7Etvlu_?*pw9@O!ZLpbUq)Hn?rp zq|dzYxlV5u^BnoD+M&h# zvhT5lg*{+^eo3#4?j_n+OUW(2l5CKUreimckB@){9SKbk^UC=L zsoO$Y)9R=x6k6%F8%OAhb6{<@X*=u+_F=BXh`d9_KPN@fr=Z$}jx8Bj!CTeHl>GyG z!ihn;XJ2U{sN@se&_-#8W%`B^3Im+8I&Dmhf5=he^)D}O|D{R8HIi|xZf@!+S4RaPiYEfb1_K`{eN)&USvY_dL99THdu&Dl7co6JZfddD=Gy%c94X9 zazx`(fn|+S3B+j}3r%6@A_E4`1xx(_z(-d{mC@hbGU+PY4m>cw6*QmiBchGqY6VRs zS7Wx!hZo*on484lDC{5b)WuRsx35?%ucd@ENt(q#merMxEpN&MCjVx6S-aJbKV~s2 zB&?cF4f6_+?vM0!itSvOcr$XY39PZtfnO?DmY*=$UN1Hjs;>)zZocHkrnr{N+duOY z<~K5A_zT30`BmfK1f4#*k+C9#A4>#hReaW&j`L27LDS4y^dq^yB(4X1;BypI|N}Ret8Q z(oF__vJeh|8O)b4uEuLP4|*8P!9i-6!G^nzwdTyozO{5>BdM@M|1)Jw%`>M-J_r_c3r#Pl+|Hv|B3r-tt5*w>yg-trpG2MmzZ^x9TKbdL1XyXP5 z$*2-nd9@4IRxzm}g@FlIBUfg9QSVaAtVdELCh1XFyzA@E!feu#fb(1i8~D~1yh zu$qn%W?Qxu>l2)^sfikfNiIPW9ZY>;byiTUSRA2^UQjs3V(vuRdWfo$I z35cpuieP1vi99&_KXu(zP}^bmCvY5!7cExY-QC^Y9fAcbUZl9YySsaFFTvf47K*!; z5`5G5-JSXG?Ce#po=I|(oacAW`F@riIUFF9^!G{aVbIxB6-5|mEX>QgBbbCszmUvR ziCZv__V{@)t64w3{He?gfsfK>6GE7+nt>up;OI!C)jsE8Tl-2CFNqO4=7}4w!XIrN z^oK+(Cd~jI9g3VOtuXDQDgwbk;a9T0LVEFB^c#*;)FJ&)&WLxy=@nbwN2(Gv-t(R5 z1aH-2`WS|)!KhH>8SBXTO0=3IXgT+3h5i^C1QN18dm12%+GJ;$lLsdsNc+lET$B3Y z2N2D6d@j2fmY$soEQK+5(xsAmTMr%Pu#mn{BOt8SK0$1p>BC7lOe(w~d=zxNMNLS2 zFr57k(O)%4LmT~3?ab1kV4fjz&v*2CExZQL&#ae$Mb~M;D@{LW$X}*n%9?q;T^ieR zFoNd0#hq7;Ix)1*u+A5~a?(R$3pxZFMx_k9k8j&= z7&-5E5tyP9VB$`7IwZ>MxgCMsI?A31nyF@k!R9_EhL`|nCIeS$#5e?)iO4E-@ zak@NIht=`&YG^&MIAJrlx8!kJ%A>l7Al#lZO!S{Q(wv*tzM{P4|4o(RaSi;VN;Q(s z|4Ws2@PVUn!Kq~G)Pc~LxPr(Y0b?^_W5n_Lnp5;`Nk+PSM25dVkOzu81<%?kr!5f& zRqcL*I?40Is7*hE^!vzg=B5X;f8q)C-=fuJ1uBR=!Yy~S(mW459uxR`&C`1ouiY~Rr8xZ(q zqp=22Xj@HZ0WOQOndka(umSxWf}z=}wN)EQpkwV)DSmu+8QMzfUxHSYllfxgzzd)t zf1YtO1UHSJH)$UQb5+{Yb57^;xkH%|SKJN#rbWLpTxRy$8*^wDgLb0rNHd5MVZ^S4 zkw^VRPB?JWw2^rVYdaFO|Zs75Z@N6GC%}jfd7A z$6PzdkC3^$QYjizh4tvw7=~w!EhD%p{eh~i@;my&>cEXDvzO#T?)MMA!MzN>UgwT? z>cbZCX`_`}V=#s^p*s%gyzD_h9D_mb*Vy}k1q}%3*`+1t?Wt51MjshxS?46a7>e!I zvk%!gRFur5E|?HBQxfx$j(Ti1#|>!gMp#T{*6b_COH5KzN$jrO9p3ZR8ls83xlbfL z5kdGN1_(mw4R6RVynIqg{70UCd?5Hoo_<0D^nE5osbM3}q?zEl)pHz3A1I0Dt}t31 z?kF`6H4ox_QV>!N-HJz|pJHb!kQsMiN*UsC^|R+AlvkWt1|RBlW%vpP zcyai*6VNU21jtk-<4kd(#hpz3VEF|(2C>Ry5X}(SEdED$kygG*1mXv`l7%bG8>jS|z zu?GG|cnoJoqO9=`=3GXbuTvM^jg+tB>y2W1ebkza2Q$7=S)2#ViKylLxlf6;b zXk*gNZ{B=oW5?m5xX?VOIC|r;UA#crA4&N@aOxK>I@Q1ZK;Bjxy$KahY6E0*Y!H|> z6M}dN_ZTZH_p_xd0YxR!M~_jSpzek({pmofTKoI0`jCAWvcl3; zBCmtPY#5X09;^a5(Bb~yGgJMv5Bj%lF}WOYa(_b_<9n%4CrX$6+Ix&mlK9E|E$Rdr zR#yZd2&G8K0w4Sm>UM(@JTgpTp)qyQvDp^(7^GWtX3DPJFAVOMWUO5MSj3qKj>X=h zhyTDih?w+9L)0cNA?~l5ANC_4WP(4$t{n7X+BBleD#i*INkq^0Siv9hg#-7`O|`s% zv+)UK7RG=AH>QSJMGB9+ppFj6HgotfFq1A$TY8Zqc{SzOek#THE&6P2CF zfxEL!#0HTia@>vR`4)E59OY{Ai}yQDwxEJ8nW`o$`X8q&QDGTz*jv2&CqAifZ&jgM zS$dOlam=KSK;ppb&YBiEUI$o}jgpODw60ZO#4Y+hNXl(UY36PBXrgj9Jzz1!n}ULg?ev7*8U&cbH9zu zTYtqJu52v6nbWrVD-uosp_GUEh}4PjjlV_dEx?Cd@2vt$Y_|}ZvaA_WSBN$c8wouH_o^JX0atsQkkjRxn+)ojeEP z0!;G}mF3Xf002J?It#1Xg8MwCl`0Xb8Qz=AoCkQPt#1q$1w&-6UmJ}Y+eP5(7d+1; z#TWTWTMm1$IU&V31O6h+x>CEEYjPR5Z`=_p`6w z4{C;OJ!{nW>Pg|4^;x;{mCZle?u@nXG&jEux8|t3MYm~SknAuinQ1t5!0l&#+ngLt zW?n!_qZ8(H;0R;--8-LDVmq8(O!3ek^;BGs%D8P4eP{uiq4-#E5t0ft^%#6c-#(2| zk0`5^f(-R+_3642l%Z0p#3%oGzmPXvIG0tbQMJtdsL5f8jZcHX@`v}ues7kjAiq>T zd*?4S;&dSanTwaL6??a)`BZM8WZgvsZQV0j@PnE{6*NQCS^7U&y?dXIe%hh zLY$Btetodv*io{61s{J;TdFlZe|$oA&yJ34wf=7@v;N%N{M<&zAn6_L>xHIGd3=Vh z1`0C`hu50=vqE+zFU|@wHHOG&F6`Khfm`*hY#K<@w`x{Rn`gZ~E5{*h-mKd-W_bpu*o-wT^@-V+H%xNM3&OYrh|l8 zB7BIK2nhc#~WoH%VMaRBYg2r^{7P<$cuwPnCsQ;K4(B`wf&Zp)$g5a&X|6 zTN{*wc&id7sYbyfS{gH^Q1)|dY@M4T?Sh@U8n=Ez=aY@14P;+%%rS{GidysGPRWup zWW&eK)NAqmOa}Q_+o>O_hEM1G?B_Jif%a_YsSJVPQ7dEz`+zgoD8k5*VA!h58sck; z!P+c^xRyjji$qp#D;_j%{q1JB(nzQb@KUhM0j>)U?w8f z2w3`%n%23DoW>xIF#(_ns}ei&vHNZR4^)GCMyMz=Q51`+pw7h?x1#xp_4^hiT9^B! zI?agA^N`LuA~_HGxlwyzm!1v|f2WFZni#k$Ua#om zB_`;@Rw@J}XNdMdV_b;rCuRrr45_oR@{*NYqXQ?;K~u_nZA ze%GzoxK7OL_+DLI3?I}aXcD*^h6kIeG3|I3h%5g)kN%KEfd^LMGw2#7o~TsI{tJ(` zJP{f%f`f26;k7ZnrtOJ&*~oZ5D4hFZ{YkXpJ?@$SmrN`_8{Vy<%E-i9e57Y0BR8fV z84`JSNTTL8{CKP0v<{%UKpprp7zGeIQN3EjyGZmi0a>jgeYSyC1b3WED_|x+1W6xs z&`#$KAAUSm!xRf4D*a_UuVZz?NJWN+oj4m=fxOh%P-?Vv?{7s$T)O|Q@jS4L1o{Ox z<^vGB?xWq`wLEJ%!gyiL2aPR{W@%`zPx(&~`u+@OKhJ7j4%KtPbJ?;3hhx0$66hyxJOU2Bw}?=U+2K@>~V4? zPSMBkyT1ew-a6x!Ft5a-JwBF&gx)?@BpO}bHwj}xG$*&=2;EUKfhcB`y9FyC+pTo?0!g<5FU-sFvUHZ3~%l3#DH2*A9+x00|w~GFV%RiI0tjvYR zZ~d>`Z&vSA_s_KF&@IEC54^Sv#|t_mN`A}>&Rbm?Ka`&(*}vh|6?t6ZJ{0@1t&scF@c@dF7TZ@af_dGayA%JAmD3b-)cd)zfW@+x7KRV?YRE6{%T`*AaVMiao*G$CSbvx>h(>7UX) z{W|<@{M+c;O8Nyw8@2x5*+mhb_4p06zra4XyWCfNx3Sz}C%1IjDA|!|&-=?tz=6!x z*`>L^3Bx?nMMi*kr_PJbn=(N}Z)>3EntPRrRhKk~3hTMed)v?U^R?GAd;iq?chL(c4pVN$^Nkl)N6w8%u<_}w z^P@E69g1iK&C*xfzh(4qnVwyh3%+t>mojrD0lRcT04XlTVBW|!IJE}O_iyF45qgHk z=z*sWOue4sj#$gnO$qtsLqLE+p-Vn2O6~kANimO7?Su-9EzKiLqC*JkM}*~TV~7%w z2PJY16CU~CO_BtEgMTUjr9x;ae=ZSAN!CB3pb@vsgk{kIs30j%Cdhd?9KOeX;C68G zYi^SL%kG2%)A5%qJUG6OkScbF%&uUV!JouX^h1S(CT>z_|IGUlNzN5!!U!E1p_3Oj9fWTOV@W58vdei zG;5YFUeOe?4)>P;a2K{Ij@icmDsxxYCaOJy;jzxsz~O7$ATuO?q@2PQCsOB^YHUGp zM1Vw;K7~9~b%S^E_JZI!t$hA~@tgAS8HnLjCuG`uPFJda7*Wj&=ya_zRe1%vTkK)+ znX}UvhR8;-z<tI^ALzWU8|@oWPn9v@k(HU&*|wB*v@QbjF@i}; za#a&oPNguoDT4qqdMQ~71xv$4TNP`ygnwl+rY&Q+)_79MZPM*5jjGjt{z?=O%|%En zHH}(1723s$hyTo-%77@Z82xmLCxLU^sohvA+8~lnjK}4j1}8#W0uS#gNd7*5=PFJS~E2St|azMFAa@3 zvHO!U>+YW)JCixb#b~;=#Pf>4+#CD*BjZs~X-4}^H5YZBGTy44TAwXN3(Yl-ZC*Z_ z{t>^%u}K9N*NXmk> zhTYZ$vm4&>n?|lGzM|ucb?@Zml7tkr@2>*+mFL1y)NHa#GYb=`l1KXGuT159X=c{3 zZ)AB$M-bJsQEL{Wddkm<8{-@|a320jB**^Kawl9r?WL^#c;nh+F8=yc<5=w9ea(qS z;W6XgjQC%90QQXfncr|;f1UDoqtCX2Bs#*;M(A)nB|t}kabFHoj}hFR+-xlDVgJ}j zTB>6zM%WYkd7IPV;|A)KgjHj6Uos3mkAN#9=p>O=^?YJY*b2AFf{@+36w^a0FUg_6 zZ@RFIaR3ES^J_m!N28mx!kVy@hhouIv79IcR{Q{6pq%Bd?@7ph%WafFx%?FzsA}6Z{$g131nj2bd5hgQ-{?D*ERn*M)8IL zSV~h3!)?G>y-4g<#t-t!Ng5xm>Riz1?BSSYYOgCFFp9AoEzheh5WFDti9}cNSM#@Y z4HMn_`E0!|2QN#E0DoHYt(=?k;m_muzP0iMbr0>XA84*0I<4q+fed0G)Un)Eks96- z`GRWl77Rq>{lybCA-Y|O1pZFKpE+#Nzk`J}zGzRQ=i$5VdaR(fXa(VLoCiVIrT(yX zn$L&f^U5ZlYCPTpLauJ#Z1>e*A^GG(qo(2&vE4PqbYeCz#uyCL*Wtt8JGo}cfHO%) zaz6Sx_I`xrbgH~bv9zyM4u;$ezcXXJ%+6sWTOeay#E4NQH9k-<*@LjYPLhaHqAuqj zIO*(==`JXT;kqeK*;Ds`l%Ff1SF3gep%IX4rmFUDq5u&yZehWPWKGj+>?R?|t+l^N zpqUWR&ofc51w$=aKWl|pGbVa0t5MB3N{{ZQ+*CioFZP+dn*-bRC#W&A21TPT304wC zJhd?i28(l{EE{1$ZbGo=&-pux?Y}H??|4V=6f3s$qtJ=>*EEfFQ8oz)4$~MY>6k(* z-ilY?b=;PPA7!OFha2(B0?@amGm%Oh_#X2RK{&&Bsbx(E zbu^dPpE$-PwZzyF;U?u%rtYAAWZMRh5^UB7&+D}0{sf*c2nXHnWIuI(eK$CO@ii^c zE`i^*fvKzrURpB4FN}_YfjUX2CY&zV@JAbAW-S=g#mjyoRWZo~KP~0oil)wT`Y9A@ z^?Fv2EbN}oA1-1%U5SuWkI1h}U?*n4!eWzP>WU- zQEc)18TSSz@BdSa-COD3a-W@}c5^*)J<>e}1!DZF;I=k8Op;CTx__{XBar?259;LY z{Lz~(WE5cdgO%tl6dDs%t?{h5ziIaF(>m!o>Bdjv!L2JNoUt3byC!}{>c=aZ2b%l& zWbnwbuODrks8ffzPrxJofoNVXly^7jOCtR##KkijHg20fml%+4KO6t& zzE5oAV$k9CY)UqD0SW8i8TetSL>T>3qqg@t<_&2WsZ2Z87Ty~VK$Ymf3izT!*uEkC z7Ck|T4?oDqZ==4&ci+r+=@-fL-WkeH_(eedmFiWHIQjUW(qti$4&Tq_ZDlUqgC^4j zfQ=Lv|NOrYr|=QxO3!9i=9voCxBsLZO-Ki8Ps1a4-3vdpDWnN$ENPB>hHO0qFYIq1i##ajtFt!=$YoKQ58|eNW)|HEt^6dEkXo{_2~SeD%#R?eoe+rN ze5C%stxSuboL3?++3-aou%vFHSF>G(L}tu~48#KcLerkFI$5Ox+s(jJMK)Xfbg+9n zVwsA=hdW7}0r!40@cY{Qj8^E-%!zKtyzF}dL$PepfQtn{AZViGquweGiiZN|hJGT< zQ(Ld(cSZUXQf3l@H(Vx@mUo2d&*Mk25|S3t7k7u@L_~2)NjX^)8B{ldT2ja;OX3Qp z_iQ!YPBcy@wl)ZUQ2V#Ugh8lvr7+VJBChbvyxA+Gme^!HrhDtkR@B!I;ylQM*qIqi zDwszkZox<*NdD4}RN_Zp$b#U*vQf0Vus)wr`69{khgy+2NYffNbAY7RddVW-BRS{4 z9cZJT1qXa=$>I}S{mNYFF(e?Nhb0MI<@p3PsD|KaSXpUK?F`CCHNy23?5(jk`cSOC zUoPmNt2I1afhx?P!N@5si+#@4Lm9*lBJ-^E`t0s4t^-4XB1L|JF@Swp4uQXwC zl}=VXI4qPyHW3(jC%yPHmwPiJBni4rxJwj`3$ze{rzk#S5NH~*Br;;*M-tzi#U4$- zxN58#vvg~gtPUue{gU`Tq%SF$)Pd1a43?KC8CW!4GOtv46-Qu1;zcw>k49oq7!k!0)P6 zZv~1~7IFy`V}9^B_|@c~n43xJtmJH>#8Cva+`I+yfoL8l2JZd3-}qLcZeMG=&NNJT zM|wn5KRp7UNB2bQm3!U^HMXu}z0<1-6d$l9#%c1z23!`Skhae576@@2j6Lkjg}EFB zcu7;9ZnO5Kth%jqLC;k9X-OJ^V7nLm5baW*W0lV1J$7Q;PtY=kg`S0!=zk_w^PwKZ zn7R;0vvBBB2wd14k%w0tMO_4lu%qRudVA6{;;1o-D8bR9Z){R-Gec&Y zB?PN;002=FwB&qJOWi^|yhR#?a5A|$o{QERX5^^)67Ce_Et_}?p~d8!z_B=RUeSbL zNx=c~g%r*51;t%8x#+oKpNvA9d2#SY{$2yB_X}a{5kktn9X3NaB|;-YqzjCedxWrE z4C;NU$cBO>YIrLAxY^L3lj@RnBZ4v4;1Kr#j2k-kDOM;5SDa!UdvlYOQ&@StF`ee$ z68lm}dL3qFZJyg{tmpl;nTwxiFNL6B>ZNB}N%bi|%2_4Wq=QxL_4;T$)|x5IT-VhS z^)gOyxBe?r5kQFCJx07|7Cqi!9?!U&QkU^*z_#f}JfS!5Wn}hl@rGQU%1->CIzA=3 zN-K7roCb>dkzD;g9Z3ODk{{1Sg|4WyKz$EYBMrn_Sl;~ZOmz+A=!6j$kag{t#_-MZ=cV>s!WsM3Czza7Xk8^qE+pi}R@&!U} z9_gtJ2N|<78}0+ v7l!~hFEbmT02^CnuENs)b->XXU}x?9|39!ccxwwe03|Q2B2_P87W}^ev|o0l literal 0 HcmV?d00001 diff --git a/vendor/pygments/doc/_themes/pygments14/static/listitem.png b/vendor/pygments/doc/_themes/pygments14/static/listitem.png new file mode 100644 index 0000000000000000000000000000000000000000..e45715f914df0b9ce5650cd81f826565d794bc6f GIT binary patch literal 207 zcmeAS@N?(olHy`uVBq!ia0vp^oIuRO!3HEZ#7tid5-9M9ECz~A24Tjhjus6-LG}_) zUsv|KjDqY+yxD&zFaw41JY5_^BrYc>NGv&UGh(5%v^3ic2_Tqf;9+ScrKuZn^uU2H zQyfn*9&-yU`CQ%jRM<_jbEVs=+4%-On`#az=vrO%C^ha<()jUzeq*EHImaYp10cwd vh@M+!5EQwitFh7Z?ulO_J-(9;uViOPP?5c=x_{|>pv?@Pu6{1-oD!M<&SgTz literal 0 HcmV?d00001 diff --git a/vendor/pygments/doc/_themes/pygments14/static/logo.png b/vendor/pygments/doc/_themes/pygments14/static/logo.png new file mode 100644 index 0000000000000000000000000000000000000000..2c1a24dc7dc7e8d989ff3c98eee2c233074e3c0e GIT binary patch literal 26933 zcmb4L5DQ2> z#H8W}i39izii@0{I|PD;_um&BB;z{~_$88uyplB1DjY6072L^-x=aX!0wOOZuIaP* z*T~x+zwbWK>RSK931vjMsw5WuIbs1XOb!o5jVhObD-I`(NFYUtZ7UiR?zAVBk$X** zOBXICcXFi8*4%J$|JHlMZ9i9TVfSKTZ1rgMXtYC+bF9sB70%~$y*msE>y6O=`^Dzf zOW)UMuRWPbKHvLws9UzAc;Pu7Eopslx>>E97kqdiHA$~D>10L5#I({)t(bdefU$FP zOBX3;j@MeN&`^fMcRk);ZyX(sG-`vlPyA4y;Klp;U&UVtWTMF2PgLNwweyD6ld_tV zyihQjaHsDzBV<_wJAy}&wDa;@4SEFs{McSwk>L0$u2jl`$LXPdG}{(elm@b2y&}mCxdpKKhdA!nU zO0*+wQ_RlAr70*VSoNLDcJgm_%+$JAZ~Wrmy@1%2xO9CC^@tR znx7{9cjfktVTCq=Ej2n9H9D^aQc`Pa>!E~1N6U9zuLV65taruIqDNRn7S%7*M2oWIWH%tD`TO+} zDsi4V#oMM_j0|uy$q`cv_8&i~e&*?;d?#AdA2am+SY)tm@t@(a)opJa>6&Jue!Ig$ zjX^z0^44}jc{%gW-bB8s9w8{0?81Tqhk=oi3NcMRJw20$e@o8&*GEg<7V|&M{F!#T z^GVu{cg}R;3KHXmBB#1!YwbYvtTT>5xo$VHOMB%zbXC_1HTIGBB< z-;HyzQhQ|zGwaY4(02@yUgFUz(%%cc{}tQuTEL^J*L9o4kC>R4$9nYpZ44d#8==zv zosn;8Tie@L8YP;lj45(Z=$%@HHm~1n% zW%75Tk!yAHJlH))&m`HDe-Wc{cS+NGwwmEEQmp2;rn;Xg>O~kn(`s4mOkpQmjfK9D zq$f&#sfEwL@!gr`M2RDbuVMR}hBTR=SFB;H*HOUx_nT6W?sJt!_hb-O!=^5@7^HYc zQzxfY$qMbX03F7Z--Ck(k1fg1DHJ8e7{hPT6wT9jc$!kUWz5SPe)>GNU`*iYVpHhu?#`nC41V3kgTJ4`vD~>+Y zRFPl9gh5YUvCmfW#fMnmgze+KX&jTfZ>4p-4fm6B76@WSJ7J;PpC-Q$GF-^oevEM; zQ7TPt)9n$j9DWxgx8f&RGV0vdSQ10T<1lY>hv&-x<8xULCT4s6MrhUWp@=%Ajt8CP z+CTh0`;DCre!6GO4pSF4wst=EyB0s2x{?|t*`&#@@X;CS2~s&+^_-WK;RU~p6#_TL z*4JE*2jVkgN$e@#1qp=cD2R~!G3PEQxRSM13S^$J(F>OlqeqQ-FFi0hl%^sbxWan# z!MnrD&Shtqcz$6)*u~pBs?s?IyXWh-Z=F9CKK|E0vjjOBRw6Ev|9!l_{WI5xRr^!5 zJP!fCoEBR`L9=|8ybq39I^85@VxpjKF*!FyTs!RFzql}+ODH_#+WWiljXU*D-$wh( zl?LAWvp0<92oaVEfdw8@xLU-%IQ73CidZvm(71Jr6}XvtLh_|$9HN-5h>UBC+tKF2zlj`b59}-yc$&U)9i_~^_q}L z`EMRGWWhx-=`c%;)^R1Uo4>jfrn>%GUrKt2NgZkTM z199CLpx_|wWS?LUVjy+mxd_Vj( z!zhQ8*NGXeN;KfEPRqli?s2xlV1(CUetqZ9AC7SK(t_mDO5=7C3rEKaMtb^>_m@*D zXv@pX1rTg9vRJiznVII{D9qPA0Gw6(URVvCp1Oohk!h?( zH8$WD2a475Yd{10#Gp~Q{yX6<5uXbM#PsT5#tXciQYxDX>-}za(2fRFfgWb4sW~kI zQ%|?Gv&)z(*Q*_7HEJnH9C{qaf%x+71G1G4tf3ks;U1#h#|QStEbnwp!d85kJicR?4g7}3+$Y3|S@jKAN4EPG)}Px2Od$zzG$ zeRYF*hNnKJF z)I`Swc@=etW@4)fV$OR#W{T96rdlA&4skM*CK*x65|?Ck8AQZ3VEK9k(W1Nhk55LOzQ^E(^`Z?7ZZ zKEuk&>ScdlpB@MLdnJaHVok<9S=JaSs$$JjhiY9z2YY)zv)^ARO#}o5cc;@_RKD}Q zJJ}jY;-rW5PET{aWnd^d3Wj^3Nkc5+Jwt7^lNHpKMXfC4+%jEFy&84c|d z=TwnOHZ9?&i$eiDLzp^0vvJ#vc3_~$ds9=2y2i#64$p&Wm!k|DGc&VHaH|5cv`L=W zs1g6bM@UJHz^1Y$qMhsNT3+_Zs@gHt`&+uQpZl9rXVN-ZF; zsJg_X#YoWNb(B${UB>Cb7tDd~?zK0bTYG)Hq7aEiZoCFMce%f$*Q+_Q}+^8Y?y*k50v-MH6+k;`qq z7r|a;vbm}s!L}E~lVGf0velSGKtx>F-r6c3`Td)yRFm;A`D5B9A2I?wpMb#A*70$N z)p(BhzUKg;7=!2C&Nt`8I&nI({#@x#gh6+=w*ehC6I7&}oV7o~!^8EfUdS*Y_Pe@x zcxZyAa`{Fy@L|Cu=#hXb0r~0ai7)WM=ft4V>1b?pl&5mYeR6U&1B}r=di)sUwZMBv z!+;A;D@)6Fg5Jk})l^koBp6dtXVMl$$cJiN+}#bvo85P3SPUA5D~2j0n2veB}Xu4gX5~}UzDtXe=)1QC+`h|)B zFP?c&9UYCr%)(Lx-Y0O$@1$KvcWNEeM_#0)>DiH$rdULLg<8NdavWlnK>v$ge@5|zx!oU1o?&q)4jCwSiV%u<Jcm^@04Xs>~yubJvh4l?As_;c{tU9Cb<=({eKF>{JB5rs@glAP;&eP3iJf^Fw z>ltXEk~4OmnVfx`=r7_nzkL1Lv_6_Ccnt>E;?UF-9Wa z#r?mdW4Go9H7x|#TJ(P0KfP3GefHcIf5hWh5dZ;bK|%4PqIYa(Vq}ysH~*9I-$sWe>(5B3vVAYu!Xbu0=FmF+G+jR}=fw^` zB&)e^@9+taP^;Y;rRB_X^n8G-GQ-&GcrlN>Y;-#X-ACo%y4n)9nHHKwhK2A}#_W}y z-zJ>BPIbYyQ3_N|OHfTGR+UuLf5vA(s~j&^LFC^$Zz3OE{+D&xNw|x?#uGH1D;XuT z$7>O3w!XVdHzRCiVd0^7-71kA4Cs^$fF>lkxaueOoQ}#n*GSh zvyZ^cDLEw%3JO9%K=6PnDYYBx>e5f1eCI06G*6JDPLZQdhvcE7qt`@8n8k@vsA|ue zZSU?b0TP$R=Y4Fb0)k5$=EJ3cfx4+*6AdsC!H(P;9p=hZ@25jU!uod#@VOSXG z=x*Q=I72lAiW;C}?(gqUX;&KFZUVVMc4vFr@ypQmcuImQJ;G1+1*!!46f>eDPx9!# zKj^8J?pK0p63Ekb^=1GXlaq%qF)@E>t&9y0LVvNN z)59zcv|dCE@&L3!Oi67hI!;4D0lB|Exfbv}-@-zT2rVgR{nTd;`E9AEjY?qvCWHZD z06rmMgGr5ryuQ{plQnK&!s|~>%`9M6mXM#c_S0;0Q=>B&S%txCCZVUkfRlt9gyg`8 zN5$cKAY?kw{N#L!a5$|})5XQ7lLo<;7%f~rYy#x|^_-?yHau7mK4{6PXomZm7 z!+T%w^RJZlGj5PsI3I1N7~d^McWWXxQ=L6K$p#6)iQUVhTtuf0ncvYSX~@YTr?Z>E zCnY6u%7{KJ&(87yBr5a((8%f8SvL^r5>isGyg|J?KqMSD<>lpXgVpV%kQqv^TgliV zAt50qc2#98l3_0~0wJp1CGECTw29f-&WYc?`Ke9m;jTOrlaluH3ktGnGp5YxH#!-e z9=D#PVoOZoqe_RNy<%m@!NJi6O*DV=&mY=B3l2OZJZo`rIOS}Sc4q)-S-^VWAz_ec z?h16({FN%v9K@CAVn+wnRiz5u{qW&KAv!6Kq71V9%o7hCUH{|F*=A&PbTkHW$hzpB~1dC@5 zz)QpcPr-^mf6NWrKX_hb3HyzHCVH3m>r?{DQ0sqt-l?Ik{`V3WSCSrokc8W=j-QUs z+CGU^iNo*epiiDUQ3_fjZDCQckj`n1K0GplB=X_X#z9R@4b8~62nJhVHfZ?E46D-! z7>);by6J(kG%|7o?s4KnweExI&FRK%_!ka$b1$!kDFCT#O&uK{1MTb%R2CK%wzFv7 zzLoR`pd<*6tBkqJ;_rO*Gs4T4$^^&=ouoj-fETl}q6PIng!~OmzWsgqa0xShcs0fp zwaUs$@)yOX2N$3L_e7vsyOGh&@{%qtE{b4BOLc;ug+l%}?6Wg7DQH+&I-m_#tNHlw z)i*X?tH{aiv84?WAJt+@e96pY(e3c}wej(}=^|v&r#C58DRmhOAc{YY6cSotyxzUI zw%GkcMnRa2xr+=}v|=AoxS5nk;I9smC2~PGon1MyNJ<`I-VaiVE5}ZhQ-|pPV+TOD2}qlfXna z=*blHCfSQ;m5n8O2a}A#B#4{`3&w-9~89(1-%k} z!_(9FVDiU-{alj5KlkFri=;oBo0%R%BkbtkGc&^qRkDwflCikPBb>IDPdc9inclw5 z|Lhc%=UaF&Rj72Rt--ZC@{M;15edm8KUrIV-qXY5VPS18D1@gepH{D2x7ys<&CRX; ztyF>>0x9XYz*?)3yQ0EFL<9tE&~96NFLt)E$VD#aRA?QXoyi6KZ%*OB+8i8I;t>(e za|;VUewWClm(58{O+9|ErnJNA_m%dtM*?2^!OI>s*}mCC&`u#NtVL&0MI$y&Ep&H#eVKUtd?g z!kDundO4E8ZE$dM(g}gsNvguZ!CB0f>-_``>ssep5G*~;e@jnCL6P&z4QS{ZkkgBc z`@yB9d%6G(bzn_gBcc(0DaA#xKS{{Wer_~mWutrh>Q$NsShp)sLD0#xGS<3`7nD-m zmV?P6JUl#Tp4)&TQKk)D+ah6mlC7&MC@2sS5&3u>FW*Wip(J>YG=aZ8vVZ?Rsc^13 z>F?jaD4^>QECyKOxRdo)-}PH?q>Ec#SH|Q3YeqzbiK%RvVqkGm;15{!%y#Fs4#sRt z0Oz@=sS(1zTXRu-;=V+I8yOJ?8GyehmuFT(7q+~f=7RoepP?owc%+p_d z3~R<4w4Eds9;ql2ByMC53m=s-mXS2o57G>C7RdaE{+E+$qv(^(gWuTPY+H&3eo0{G zKq-!s_R1KCl=x66{l}K!YfTx$ag@^W^oKH~vR>Sm5z@SiD=~XFO43fxf5PMLN^#vQnGSAbL&raGD;!xaXa^BeB*FUq$OIGzux85Z?-YYM!fd3iy?Rco$I3%OcQ zKM77%^(JbNCU;A*$>Oee_5X8eviUi{f8ZGrFGdtVF?AGUKZ*6AA z^9<+*Dq63qElo{NNq@4F))l@T@lW1{JQZcusv966A>BttM2NGQQn?>Ym(VdC+VDCq zT8z0G^5=Jyl$5vuaSLx}EZf9d+^&vg8>WyZ!UslgtQRKUPxjxxe`6ydA~xUN-Tj(z z^}qtv9|jP7xw}oEiwk>;_er3AUkrf?E*_q+haj7zq$JR?l%^A9jdErv6qS^&U%h(e zJxTL%et!NP1XJ^jIT6fe--ag&^YyL}_sq;pf~%4?T3Xt308<}a?=JTt6*AJu-P=QH zod`dQSm!u6IsNw5*Ik-LlBxkQaRpT96VPkUK(ne8co93h7I6Lg+Jwch_)JoyWGD#8 zBw_XTbAK+zL_0CMNFarZ>7f~mo9+_@sA-vOuf;K!?b0VW*s~Ny?KKbfcC*y+W>#`8 z()~zQkdh-ZYvCoRj|qh{gHE^wFX9hN;2JcCuZBm1^4(!P`C z$RYYEv;Tg*K14#5{=fv*>vzH%e}Xp&aHnv=XWWkA*lRsKJx}22D&eC>cn1as>Pe2% z(JLqnWfv)?*O&q*GBX?Hb+njiNmMA;sMgop>%mG-Z=?SzRKR@)9rVi&Vq-x|l}4?7 zTAp=radD5^Zq2Ik1%Wi-$wHsR02L!UTwhmJ{${Q>t5~Ax<#TgtW~8LLH&?aRcb6== zG||%1qWzT`C}LE3`T2NR+x+cqZBH|7Ou^kLk39VR#6W5FNEootZn^3k7}!?>4s*jC z@9Qb>;NQ8dcM*WLxqV<_WmU)on=XhWIgB=L|4?#}#L^2WELg#2DmJ#0;^VW22Y)Fk zsmmZe1WRjcS*B(nF#Pgg__}wQOcO-1kGoJx3t3EzKq5M>r=^t>q>URPM zp~fAsUD*Uo+ywxR9nYE^UA;`d+3VwtjqbfDduwa!&bo+cus3Q=sBu&L=m1_}0XTnU zwZ9)pT%4`4)0Z}J(8PrdqS|)t#J0bC{{-Xky{HNQ09y%_v7ZdruoG1I4i zQ*FN!6Qllw_p&(?;u|~Ky+8Ui-*i@0A6vIE@#f8&*2jm3VL*8sru$-nZdMMv3qeya zxeNmcKwz5d4h;n*q^gQ-1?XgdLPJB(dE7$QyLi|9-%edC*~lI-rj#1+x6%zU%EXbS zUkMRfGcz!tq{&g=EhVGW%2~ernukEx;@?W~FC=SNYp-)=k&IK<5V;J^?{{T?85i z1_oYvdHEmD_!3RV+A^K0)%oi8vKMEYCWa;#iqw{O8lw1>&0OU!+kL7TCc{EB#W717 zQ*U$x)Kc68Lh$gVV1*Ly?_wi)NGGMm>H^Ax*5px-vo&A^NME5pr$nm|ox&90n zj`rbSB`7qbNK~{84BA!@k5GlufZ@0Opaay0TY%f+v!Ya4SXj21ht%6~JOe(irwb~- zfB$|jt`d%djg2iGJMY7rxylhK6^NX?eD%zQ>;Ry&9NAHAc(}Njd&UM8+HXPkSFVf# zHm|7rV#D6v9_RBFDg?A|(S!TP0(AxqEG#UOYV*DlVZVRqv(*wpjrSrN>r&m5Q;%qA zStyZ-Z+!%Us&-yVc#4cquAvzYN1y3ruJMxW2Vg=RQm^}#f+LkpUO-6ZRSdjO)Sll) zM5N&sw_sC@+bGgcrRLBXJ&qwj%*2vQHO-8>&qxOt)_6VL3@=YbKX7qFI(O4e8(7CP zd=(WH8@9H#&?n-0`QzUD`ud_lrul-bAMdU@fcnoW%LhdD=YM+>o0?kB&E6-gF7^5t z6}6e0d|VFmr_I7Ty1Kfw!J3e`mQ3kPnL_k1FanRp^a%kkSz&in(#!gUCZ7FJ8rIzG z%eyjA3LgRiW=a67aEm76$ao>FG`@}7_VclbJwFErN5QkF$yMJg69>I!x9v2bggJWB zZsVpvBY-sW1^_VOR2Bnm;AcIK00KVuuuDzzy79fsS>-pRT+IWI1pU0pBZ>N5n+V}t z1kDlgJzK=EYDZb(&zB-Pb@w8%G9NY_e~x_gvf1}!e#hh5AglfFA1R|{3#@@<51M!Y z)wG_$eY~*K!cxlP=mB`T4A*Vge`Fs^^?(*y^$GX16<SNJ$+&S zV5W@qzt)(7`2-YCPVeJo;=DE22Ac_UHP7oQ8g3R!YC1Y+r1)2nZEfe!Ttn7IWMt%z zMl}`#oF)b^b#dAxE%uG}im&|cR;{4XegU5oi)jj~uA%W6eCeTqsN7f{Fv;3Rk-;{8_lo-u^IDQ?1k4PT#8 z@%@WObhKT$$X!35SOxrh=j2~}U+Y+&E>N!vE1LAleVUOd-!dUbzEr{^;a)y+a+v2R ziuL(urS$t;N^@el=s6VS%UAYpws&TODiH8$yCSKqs_G;jNO={KJRf6Y>qI>dG$TB) zu20rvfipG^JSxZL=4~CD9}Nv6{O^CE_XWe%kNHS2M5Lv?8l!fG*f5;;nx9oPp54|j zsDZA{?)7(`8-SQqfD4vJhKKPNyTxfJ0679R{0OvwDrS?82M0O1-%(?k1<4yQ^bA3o zc@u(fo*xN^uDGZ@2^JIeqlyQ!-;?TFD16}Ysu8)D&Kv18IcE`jVPJ1!VTIWJyV%IE z)~azaJ^dg3TgoG}x1P&lDZhtx?~S16%Q-Ju?bjH6SDRd4gnxM`RQmR2f^%vfh~Wbo zYu$%OM;OqO5is0EX0IAu$fB@BZcHr$UTT1w^Y05BSm!%#b|xew?46vf*##`TX4Pup z?(U9CK!D^k2{-EV-CFDT0c$sR4*bu#BOhEBr475?fygv!64SuHHjRt-4C?4j_(THOI87cg~1an5_qFf~X2t)grffA>;kQhO8p z0jDs^P(H}ks&G^i)Y5JYRWf-nl10lf37bOrXznaNPk_dkyQ5~^Ov;_h_RZO^p-C{- zU$GieG=kePrpCnpGA*+z3Cz;YuBCf!uCe+lrU)>0^5WLvF#QS{p!Jda`S?)J)~T0S zHNyw!Uew?9iHrLD?CPR$pU9IA1g75Lt|wNAm?{-M>X^OYY>U;D&s0v*;TJSa%+z=w z#0dvq0o-$y0EImPVeJj`(B0PVu8!{5r%gT{;PH8H^h8Jlr-|E+ekL)Q`No7O?jU#> zWhuK0lv1b z3oLUMNsGXaWOoR)D)7}+&S&>ctHyveLOm}KfPrtiUh!TN)~3f-s{2gK%{_Ct+U|#8 z(BMFv|9Av6K`CH}f6H4cM#sR&Ri*cbOgAMrm6b6$t+Z&xtqTbW`NKW@o;!H;0*o}9 zSG?2z>?0x~u9XxO|CsbXu6l(|MjiLw0aCXHIXwdx?gMIND*Dd{GE&k*(D92qO&L&8 zQ41^d>j&L{`t`ol03N3Ap1o0w`Q;9gB`rIa2rH%1xuixc`k=!-7>a0i|8juPK*TY( z?M?oUiR01Po*M6D(^<;D!a^gJeKr%F?m8VrSK$?UXpD1we2mQ-IR@?z-?rv!EQgd@ zyxik9U4TF0_D=+e{NJ*(*Pzb$0WZbG9gin~A!pNGCoLx@hkaUq(-wo`Qh*U zyFrDW&Y!0nKexd~EZOXje|7*X)7#f)30w!pXw=+922{i9VXI2{#09sd21m=!)FL8J zuVCsW+1}32z|J<#w-o?;EBs3t5aYIw6TW@3S65YCAD!EJ^&&#zEdH3@hyq(z9eQh% zqJ#XORc&T%y=(dYy+--<@jNK)_$p5M^vavBTmD2h-xvh~(5LN^P=qePsxwl><;`A9k-rC76HDdr0b3aVRZQKF^bgMyv|>QTOqFYZ^Am!Esam(AF1 zG&n9XA0~dRWwhZE5ByT}0@!YTEo<9u{J6Nd%RU2b0u{vH=gaH#IP23jOWrrKO}0 z5)%{Sfb9xfMSf(;@zBG0Q<~3vwEr37! zddD1}i{4hcV|Wg8NBx*qZf-Y*kw9VBC`bSUdm5deO5Gdtk6JS`r# z*$P^#p{6Pf7|~B^u^WlW#rCwVTfY0-77yui*tY557MAtlZVfAFISnV8xEXG1dO(pfx-_)su1amp>v@D;PghzFWc500O4Ow>2-4I@e zuK#U7+-DHY30eBr*_&91Wi@0iSFd&Tpc1*n<<_y5{KXkm0egG?P*ICbvDb;~rk z(L6K#@zX#t47D1`SeY{t3B?Wtl%NDeuQr0 zccx6|L>UTQEK~R(G&mH4ow!hEI|aMTbOp-v1>j{Xg06phgBN!`UtC`Hf_wA>lJb+f zjLa{uW<$(+{m(ZyH*6A;lJ`P~d2wu{fd4I)byn)j$jCIl7WCqRgKC6mAwf&(kAvLZ z-9^D9Z1>#yoj|;}nZ{;P`Hw_061{sMiSAw~@Zn|`T288QD5V~1HA?mlXHGq-)Lf&? zNcw2G1E;}d!(bdu!fx5;tHC`Lr{x5;@7ZJOSfKRI=n}Ok(9-?PtD7dbjZr9jK~gq7 zkqlP?ACQ7$YYDSv8!!44=me07cB}O356mPaLgzrdp@IRn{r5G>%%kxvfe*qW=U_PsQn~EWeo0;W4mS_~kIV?Aw{d;=6Uo&X& zD3Jyk%jm3s7FV0`qWZv4+swou
    iqU(V_fBwX^d}2|D=Bk$roABOZOF$hQ9i8_E zrD5vP`FVM))n!@{2yc=C-S+zTcOhjcl|?}W z=v;NDr>Bt$sm$*>ea`awz>2asEjO9D?u}D^5`DV)$x1=dLOxjwi6)*EcuaJ;07Ab6Rb?g^5wn&$<5U>*EIfUG4VvmO=F4RPx~A z_JR{MT{$(i1XGYNe8Z7G%>4H4&&16p4wiUzC=D)(T8XAK(3(Cvqo{y`Bp8qgFkn{5 zL86s{iD}5N^L+r2678d<+~i^EeH8|cl@{QEosm>euCJRqrz;i$pUi^|gx)+x=ruH^ zGGDwzpMMk>?4O);Q?|0&v5ngtd@0!1*Ov+)?2Q67?skc0=`y%?nbGh3wQB&z$$<(f z(N;KGX}x9xzN6K^z(DO45(FUaI9prWGH`kAl;q^zERleWiLC#E?+AC~> z@!vxac76>ABtfn&23!4- zdF~%@@j+08-D>#jK8Q{% zWm=ZJqnX)+dWE*aZK^6iXqS^2-1a->Ae@<_!WE8fqv-7fkf##l5^g*VdysI3$NCa>LNjaFkj1hpB@jZe9oTG%NtDt5J1rt)R&Q_m*38x_*V%=lJ*&KQJG% z>k(ix>cDUo2bmQ9A`pM&l%`6MFe!wnYHA|X>mPw+%HDq(nLfZ?`i6#oOI7L1JugfT zo~q=NXzzh&w=;(ytA1;-y1h9YBfz7NK{)e8-Ls=)a!nP6dXCq)YvuXqg#@vN<;deA2K*LA9UTSXMYK0feDR|lx2-{;0gfSGob>k)$i@?%ckoyRH0@L4iCd683^!jm-5EUgrcu%3qa21 zaImC-=}I|EC>@i8tJoBr0;&M1;}$40VZKN=ZL+!x84{@0ATUailt4rRi88#5!8yC~ zmtmUQ96$w3+H0L4K&yYag&d9z>+B3t(b1V>k;uiaON9PrM+Z&XSX>vJd=Lko2d(mV zqP?S|$GFVQ70kz(7<;r7C?dE7Fg`wKg0nwAK(HzyqYyg(dbo9Jy&IGmH$V;bE zZ6>iB1;ULqvze^Ip+BN)=jle7{j)slOJP;8!qk$Yp;S_co#la*3(dIR^N&Rx)frlR ze(M&Vjzt=lRo)Z5AwYmyA=SF&M>e(~H&R2ZlI8!Xm0}HuA~qr6EE^?d?(&uqhz*wG zyp{PrJreW8e6t>gW#i_?A@X=Rg$F`|^^QR3@8IU)88su4smyLS==@0$o=ikaN(T~c z`6t8aoPwYUvgL|U14A>+51jaFt&jHXGkr7kMkxi9+sP}`oCHb)pOrLKs~P^r8Vj{Spq2xJ67 zsP?`9K%jt*=coHP;3o;GFWsjmC$}--qYe$O0Ob91O!5stgzc7QW|VKkLA3S1oAfwd z_B}gVs&5SZ&gU8$c)Oj}0b0wknWd!`vRL738*B+v8=H?{0_x}+vn1&1>#t^UT8|Qe z`~Vg>9>YYHAO{X^=~yL1W{9Dc~z0ePvvp&63pnL$PSXW6d@#xO}B zgIc-`&8$5I5bDUI9VwQiEthh@+7dOwCFF!?4;N!^kttD+a49tk6{~sJ*_))18HRXmY;Ao#o*r&r*!vi70J9Vcgwei% zgB5G(LcZrmcg))r@x#N4Vc;+$W*QVGVPLSt4pOhJ0JZi3qInoF0?ah?!DNP;!>Z2J z_)nkQxw*MVz6*FBOn_;e2*CAROR4=s=;CgpUYk#KCJbiT|Me@;=+cs(BCu2RG{Zwe z#PvZBE0FW0Wn?*mD+@E0(V^bC%*!!$oo(83@^Li#l$!wNXNSGq<+6<9}pA0BS1 z`v4m7<3xWy4s`G#@MeLAb5|qaJU|p!UHhS>Ucgy2piop<2AHz4^4zbu*Q8n+?f)^J z7LR>+h@6s==TftK(Pt3Cwlgz3!-S~R>nm~V(*Vm$hd46&9WQS@091cpOUMNpWp^&p zO}`Y{8o6_qJrt%ZX&u$m}ngv>58N6W_4rHAOH0^Y#& z^!hIpd3&p}^ubXMoJ@KyWff(z(=E;CTJPrSaxTAlB;<_#vOVY>GUIr+LW78`>b7OF zRDQfhOIY#TFS488_pns)k|Dk&VKN~8t?z!2$8;r)R*)$Ei?>r9yoMR!%{UUP(9p>Y zR^Z7*x{>V`*dKrivIMsd!^RJ`BlWc01sErUP9=+e+s(Im6A+I!;>`Y_m{-!O+=pbdOx*%0wb7psDg29V;RU@%xHPaCZGI+}O6U z%)F^z&9@sy!cDJvVRWxY@9<-|Li*B0LN05IIPYc^!C|p(|9Nt9a`5{SS%3G!!tEJw zcJhs9IFJxiDo4J4|1M-Oht*xfOcO3eLq~Uk5!Fu`{b2--MYrWSKTj0%1g`&GioM#5 zt?qU=PhC{szbXMV5&^>TDY?-$?f6#iBVceb#eVrl)%MWbV{K4nL z4*4BA)|tYVN8RbsPE`**$iSVr>CjIY57}!gICxMh{=*0N;>8r}&}))RqHpKLq$W#n zQnIqnvo!P&aKK8`31nXHeOdd&7zm8oeBRLj^~_RAs+SG;qiun}5oqvikK^ff1Wrgk za6h=iztlwbx{`qvBtJvw3!`HY-Wpj!OZ%VV1I|NBMGgG+{DMOk;?oU{l~}} zi!jR2Z_GtgXKTdpdbS-odjCErS-?X2`jbYARDH4S?RCbe8tT`Dk>x*tTzd`XJAsnA zp4C1dA6Hd{rgwF9)q=?6_I!rfS9oxoFTLx+^9j&icNcZ_#0W3U7)9EP|5{Sz@mgmm z-RZQy4xYci|9@FPYA&wJ21Q!V$=7Kg`^-UZ#DZ$=^VzyQ6w9hJ?gfMh{e;ZC!&BSO zoZdr^XUjnp`)v05J}{Y&+RM zKF-R?4iKx*{sf}C-sk<~LFimuT;;CkBVfv9yhK1?#zaSt9UdL6{8Bdd^XGewpOF~O z!ts{-;2f{knc@&9ekC8mre0WrU_g8QpPolPR_=aeMa#7EfwH`$R2gc>()?}EEFGwe zCI_}yYpkG6lTc>AGReTfQ2ctTT+|Xx<%^WSqF~t2qEcal2CW5+Vo3eaVq+nP^HKu_ zV2tlt!A!CSd|VM6&?tk#GLqBN<@w^uunUjY*MDr6RdswYbG`ZWiY*I-Q(FsbF5iV? z*XuH;Ha9oZD8MEufImIRjt(@jXHGn3R8z*A6oy|-~+BBrXEO&UGN1x`9IYv ze&6oi{7hTomf!nTv-Z21f^!)j=Zd|Ga*9V^vN{zZ?!fqHXGX3#1+Y>Wm_a>73n$Y^iU>Nh*hr>1qlTY1&4mJa#@ zO?*c-{tX5gY1`_r_lq(SQ)>U01#rks?g4>^fqSrFl>s!J1)@ZjpB_9s{QAjByh-4# zrK_6SMB5;4n%%`Yj>O>kt*xlN29efFLBZDFV4|h9#A@E)h6@p!to1CM zS%6`L8yQe1XFMLrtL8wqRd0skL?+J^ywe{A;P}Y{o@L6(m=sLlAoOxiUO7}JC-j2^ zYI(fo6!kad_nBrR_>tQAOM?I1=3UN}fxIfj)Ydk=!kJ`hMjM2DGyrW8`Nr>FxCX$Z z392~gtoZ=k^@E_Rg}J5W)h8*cXh2dew@aDdw(z`Y)JC(=;{oRK!*6hKYB3fdxFnEh zc7gbW{^_tP--4e(3zcySCt3+F?Ikt0@c~}B@g@-gFR-@0>fT7ZYDLxAXbq&y?{f4&PPQXD6 zw~VZk5{B}MiWw-(%?}`OFW@6w|H2Y|P?ZA*muNN^{`*%5Gr`0EG`veS++@OLr!jP9 zwGO}QlyvcU%)fVke3pR^<`8<>Lx^F*B>w8x=dkJ}ZG2kAGnN6&#bMVzPpaaR1e*BBITc`?UG3E`0hrDmB_(C)CR84I^W#!UWpK*<# zDe@{tT{R(uci4!x-P-2%>?p51r!h4Lgt>-9yg^iKajhc`0Wp}a__@hxf42ISmbdQ% z61C;U3FDT39kzB58wtbyZf$E`d}&Q|$Bd0;xR~p0N5E)~ZvstG4;duKM)Efk+j`rn z3?%rW`1zKgi0wHl2sdF;=Kd~sGqOJgdZ#l&)rDpZ58+Kc8yiynuO|Gw`dB(JxK@QOfP zaQtW-Vp09>-8&0#Ff$Jr`B|zZnzSho)PKXUM38@g*l?5M5>M>w`-e&7Qq9r^plu#~ z0OFcGuq89$if^qR58jH3KD)WPzEf35`w1AxI4Gx@fPfrLMrYvKRYMwPcdo&|D(LU| z`G0L)cQ}^s+rN>m%urTJlB`6i?3rYR%#4)mC?(=iM3NOHB;;#l6`5s3kx`PAk(Cfa zDSQ1s&-?e=anv8J=N{K}o#**kPs7nNm#)dfK|0mmF(@NnU0w9-?T4e$Gzj$IbhtoV zr&-<9jr&x;`@*(4F1H&NbzB)IIU@(P->8{9md^FN`KdpTzcJCNjGrrIb<~@dcsIE` zdSJh`Zm!7Y;|zh*-nCj=b-C`R>>|HVc4@*p*-An8}`eSTwKnK4;1otx>|36Eh-TFE=sy-0t;F zZ@x{^t;w+)rlA!T6`XMCTVkGD6|>xGmEaHGWx@VzLnjqsFtN1{ni?Cw^j&F_x_iU2 zAXGO~y3nA1Frm`1L#Q6lLnr^Z|9kk%N>W&`Wm z1i7n`iSxOpMe&BF%TB8M&pAGC_;9Huers3a!i&|GHRE&s40KGEjodw?E(*e}dgM%H z^ZR(?PYTEM%Kx-_Pj z58=mqSf-BtHaSIFgB#f#%Qpn)$d%k8tvTDYii)K`v<8b%QqL*7_lcU0!0NKvIyz@1 zIYk~!-^`7+(();IJ5D^aZ$OxfkyMgAVWH^MF!}KbmD`^2O;&Z*+#=!3{?1ow2bPCh zEp8py@M9(z9=KiG8`6?=nVq9)+hE4M8B!SGc5;hpim)a@7uleV<-CjE>z+9`t`x(W%5fjDt{e`uYMz@ZMzpE+oBf(2r1b%Vqg9mk zy=i%+w~hl#n`M8RB55<@_XqwPQ1F>~9qp2#YMOW4E9hN5=|$pHDj!kzY3iG60>AE* z_63N>zYMOlmHoi|~s7`EYdhSQ1`M4Tja+sg=!@ZzZBK z?!6`lNgHc6Fu_GCKp9(k37$A1AEe^k%KC7CPVlMAMWscWu4cPKIJ|gi z@9TD5fu|rkNzQZNp8dnl`?m>*@K_<}Ug!on79JDBGWGA@Wmm$^;)^$$)U7CBjLNIO z4&{1~tuM~kF_L-!gstAXQCoc=+I6yf!^yjZrI?kDJ|yU7kO*hQ_bGL1RSK>bBFfh} z%eAe1eQfVI9QdM3oth(3SXyvr_ian-+$U+Qj+e5J$uY3?2!<-sJJ!}J>$+)J_v+dP zykGqLCpfjT^6G~ag{w7}09LAJvXN1H;k|qLsEJA?yNsM1x?_rp=`}Tv*?4$P)WRp9 zRa`8c2@$1bUKjt5i}It;A4spOL#@3Pdajbk(B@36%ul()x^rrBYKjUb;?VZ-O%ro- zUlyVldbM5B(sP*}Ws)^Zl+Yc$hg)CH#KhzR_FweW{mCdKhl0jsN_aa{~SH z;rQt{;q1S{m<1e;q-=x17hKu&12Imt4UDe&xw#ZpR#xxY+8pn^b>%%@Njoe=@BmMe`-NMJsO$V?@h6h|!JVeLwIHR*y&?9c|Y>F1>>zw%a3^@x) zP*Tc5{r5C2EzRe1j?vC_{~eS~UiE-e!de}Rr4sc*p>cYKkJGN!R|bOjWC8+uj0Weu z?d;OSCK!4;Q$7Flnq5V|P*tk^3jRXyYwYE1O&OSMLB!zUw$}E!Vxl>K;>2=wqTHUR8PM!Yn zB7pnQpx8ufH7O^eRe|;*ueQx1pX8KDbhT~5!!sPCVN?|FmfhEHAFni1d_*?sP}=EkPNo^#mK-gGbHcPKJq86q=X%YBV}|I;p?ca*=?{6 za}q(>+53j%^#>tRrHE^W=juK6wlJ#9%*>BHJ(VH4nF9t8vK^L{6^EpUCWHkkIO1{3#yA%{e?zXN!^QHuRp z^Z?f(C@#JgMC%Payz0vn_w0aoj}vKop^WdiAae%brIwRZHf^FF4NWk`>7p~#(uurg zCMNM=BTJ4UhT6;yL9j-D13un?+f)%^6O@zVG8Z#@gPHz2xY&)#nhyg~E}uoR$<89L zOP5x+5CdE3=_z=dnjx7vkk&cD4|x)%D0*;ZX~wx@XSuJO6k-$ix_Hrd=Gc8KC0VFc zwc(krgYoW8LXVO( z1NVLnTeY2&;g;RGbJVDR!Eg}J!F;ePAGNtv@H0J;*5#rIV}NHAufm*RVBiKzqTXR% z!7VssvWVa5vH$ti{R@7cCRV{1Z#3)Z;+ncveXcjjwYPWQ@9(^fzrCNR5f!i4IV?26 z!ET&rtDKw8{SsZXW#Ps{C4btJQ~P%nXW#ff?EhaO+lS-UeZSUwh1VFxMJersrb@-e zTc^&+8mW+4=YG$RMUj1#?RcAeK|VTyw&@{=EtK}EpkL&m9@-qAm{^t(G8f^vsh0js zLQIU;U>D0%DoRSxO6x*sVP;=KKNO}QFaKZV%a?;g7v&Sspsl~y26wf+6t^aT0G_PD zq>B@u_J%>_ZT6z9Y(VBARUH;%NnT!FzqO4GBY57CY%@6~ z5;zTn*y!=_-}@dLs0K}?7@!YFX$-2!AX~&+|Mo`;r&u^RgeD43UJnQuhAes(Mpex} z%WI!-28Yw7>k>h@wZ}z;g$o8JPxceU0R+S?wv5ijV!DZ<36k{R(JcptHcV#cbkod& z7AiuUqcp8P#GJs;S91mSNl4&TSFMm~(J3JN&2xYLSbXZ~v3Pj@ekoVOvtyuHW-F~{ zqH)QcFY`WGSz7)A%WxI$#&h(93OKVKkeEVPV*;aNV_9$_j+~d7G zH~J47)|7$>1k+vtK4ck}XnW=A)pkFN3VAiUme$rAZygl$lVqCDgCm+>aRtj?+SS$N zh1&ZS{+cJus*Y#5Y2>3akgohE0(P(GrB z-ogCxr%xFk>Se70DWh3M&G?+q)YO#l_4DWJWPbiAS<12~DQI>VK`2op=U0lc+u}n* zX_%E#NutnIo7)D$@Ou|eQGcM8L|TKOb*;L(y3|ijF4R5-KI)P2aXDwOqy`Ni;dL_k zh)q-sR)a*;kp*^ITpS$lAot@uF2puNPC0o97dsY%Clzshb-Mh-)4V(<#w|DRK-F_( zqPCXoC#?Xh_Z>&xC{>*lLFAfDA}3}T>uB@y1sQ2@ChuV{jr#O*?|kuk=GXuHZc0z` z84j{j2oIf+qu=RnVAnaNnio89$atveS-6d4}Rx?g2ERn zOFm}i=oG*R*H(PkQA!%WewEWOFvR~@cfO=g>n``On}fp>5V)&mo{_L$Y@W#Ir~{A{ zeU-gzF=>&7~^Z!ju4+I4f zBK!CM@tgY*o~p8|J8NGK8d(*(E(qW2n(FJHAV4!(84{q|kKJVfW(-5h$qaQ91!33z zSh3DdN=b-?E~%xZ#kp2%Fz(*HZs|m^?zXmkA*}f&bVmdMXA$+Kv9`qe@xKOBmyZm> zi2QP1h=M=5{2G>fVyJ82fzySLKA*gqzH+<|4Wit4`GJs;iZQfV{Zo^ZRW`Mpy&WCT zqwtzh1us8)oQ9%r2YJ&8d0zc86VU)UadGz;`)Wt&9l%5z^&THn5S5fBXmI^o+k9YtzBE%C&F0bjCY!X2|Tb|?0H`XpVF znfdm0SC=U-KmSHKsO;M>NqLa6d%+=m^Y3_jSZGK{9jM22|Hkr8>}x!XL$H^ z%lw0V5pP>0BqdMdFZ=qM&~3SSh#11i9%%khP)oLN*+lTKuE zJG^?0dRD34rZ&9u{d;$q{Ui__p_IK!K2>qYE-28Vcvj`ZX%GeZ6@YbE@O$<|aBgm{ zuDN;t0YO3gGIKF~!MJ9IbVJ{3=t(9Jh;Y>2(^K-ep5B(~K5;3jizh*nU)~*C!A=On zZm`3B;8l-Vu1T+@r6t3j-Me@C)%X`l_?+r=8d%@{fS=s#A`5{xE+$3_6|#G{pV3S{ ziXF~EMKzs%cP}+4K9T^3{y^0{O1?cD*F%WVYi(-UcUnO2_mO}FtI;bLFE0H8$zFix zFp&HfZS2CHaE+T|W9MIG@11{ggfvZamLRa?WKry;V00AlJ9A+FehPp8)nt%GhNn(t zOeq)*UG$Tyq2E&P+H7QP&0Aocn^syXYlp5vIFwzUx5)=G2lW@0;>d$_G%pGcrdl{0 ziMJgm#pyJX?*5vSe&5lKvgP%ho%8Q$ZziytNSeiO6zS7eJy4+EsF0QYL)f_x= z^5iI$N(zjGXR>q9@=tv~6Qw&Y) zu!e5jSFYnxo!2+MhG%BC)V#|2Y`%_HWf_W?APp`L&HdNvVDs|d)hVHC!dOKn{r}CK zYIg-T9x&Foq`2P61Ls{7%T0vAq;mtST^83BVv{;}(7C+4+;i&qO|5$?d{TWSl)cKU zJ9F&WL!0G<28|y{zEBt1{c59wzjjT3&tzXU;rmswuCze1H2#d1`IgGP*4M*;E(C~@?|V8Hr7i)S$VLft?gGc@Jr3wMUK7!m2|_M1Ti@|>9eo7bva_=rK(Xy4>d4yfM}Sk5E3G#)vGJB6z|au0D0UR6Yfm3zxaD8~`N=Bi znPK^;B3npbO^~l!djaJfP>gngQY*X}AG#XJ!aICU$;1&6^P*n$f*M8T%`dS>T@EKUjJTc`ny z+Wh!P6Z1wFen$zSsd<>+qL*#?_H4w*xAe*jypvd9yE2!s{_f1|nGmOVep28M5``D5 z`?Ua91;arKjUTVfA0@=IN^u(In@MIfl;u+g)e50sBa2!nI*pg|gDoNol{N z4e0fa-*Bmu-Mp5VSM*i-X6{a%!%7?j+2Kha9G3DLU|kK;5u)4{cIm!;wkybuQ~n#F z4Z|`Mmye*a)35#8PzWdNNmEl(K}>Ue-4U7h)pxP%5)t`Xm+V&ys4E7OzMtLSvG=wk z@*8bxdNwwE6ac7@j`eG-DJ6fvTyEjZJ%7BRK4`@)s|N)OK57kZL{%>tVQb>2DHJekw8TDEUR? z6+Htu$q~ZB68`4y@l1U4EmwzOhwuE^~oXLc7uAvvc6;_5_AiJ*9io60*CQyZcI9{;_qi zG#au@>y7Wkn%yFmLrRVZ$m#E3B<$ljbGT*JlH#l-VjRWZ&8 zvAE{C`ugt=(XDjyEEhg(6ARan7K3Gi{Le=yh>hiTlE)K65ko60|3E~d4l>@VZ$}RH z`Z93e2;7H~oOQ!dL>3pBi)|}?_Us+mdhF`qv5hXh5F-4(gWOCNICr=6Hc^MVN_(;a zlnY{U5s>Q3l#C(>y2s@22(YWGT3EO;j>$v@bNgH5HC}&w@1D1ixcGE-RMfWCK`Z7+ ztrN%rG_n1m}-wMzB^Zzs$7ZNJMG{z4?h*eyn` zK-dojrKDKU4WRyMWOO{5EZuDWaTHiN(3KOHTEs11{5%Vf)jKT5W0=$aU|&2xlHyTq zSx|Vcq$dKgwB<#oQv_LxF?u6);#Yv`%WRc}_CKl*e(j$(C6slQbo}Y!) z6SiRNB!XrrBxFn2=?O+6zD8w6cW306^CIP18O^)mnp0d#EipjSqm3jd(~A$ zJg55l`m_;C5b$MS-~+G(J{;bkUFMGyL5SvksOW!X?KbNkrLKv~S6^0DZ3F-u=?3a( zUTSG02iv-9$b(GYf0+>227Xk{AT=RQ8U zHTeG0r8(Oxlc!T3hMwG^yJK@v-@1V2<0NspcCEowbXWB1%Lio_vv>}jA094y%d>LD zZbpnIB<9YAa=F_oe;IZe(yZMxHf&FpC+f{r9NYDTtqMksraMDYMmIK3eZlty+&qyW z!8h(*`2x<3$4jM)`-Fvi!3i9K)5Z!A_LBj%J1Fb4&AF%J+!dffc97)+ z(5!lKsQq-#3c)de@7XBDA9ngHS&4AegODx!yL=t2$q3`6)UMB;@j-`>+F3w(!!G~+ z{q1r3*@v)lxN&@5yb~2QaEry5$1-~5PzYK}%5JwSqAB{D$7}z5ViCRHlB%(L>%_^j z!1a|b3v+WiUOv9F^P`Bp0)^bRftA8U!PoPs8<4^@zU#z(V_*SD>=H;I*nC1$Z$>2zM# zS+wOJx!&2FwYPtL_44I!Pb7<;K23Uy+iz`cJxA332!?-p!fzi^_#_fq9iN-)Z3}Av zd!I?d5WGk2XJP@BbfQ}l8nOmP>kn*)lc#_D{8_<5?2?h;0K!b~?(TjG3wVc~@Fnhu zx08T9adu-uD_6O@s;+=o1FMUak(Ofamiat^Q zl>$Jr8+~Y9nFLJ`DET-}695UXl%2}M^qD;&FVF=h0qN+$tx^j9hmW`cJpDC_Q?#$2 z-%4L^ue7-sXXm$XkDQ&HnjXPk_Jf{XI9_<)K6ML=9d_JTqT9zycJlCitqp%uPU0Bo zeUo{oRW8M(o`*H*j>`#@@rp5N@47*QQ&iQGn;peDE&GU`iTZW{TlHVoDRa-63k%g$ ziudli`#}47e<|}_-)(4#!boc)0?$xey78`m0-v~>s1&f$RqOZiXWSEyp5oDqa2mKB zPIgz%?&RT=u)fVxI>yC&%=e=o6g4)`1iPQs3ov?E#HpwF?wcE(N*B z+MD1uB1sU^^MxubtWaXA{14@x3q5=}Fw$csxwF6$DbR!70q_3-M^dM)e|`k3XXV~= zZ~7u8CtZ4B-@1n}H6Mpt0wOW0Rd#Ws(;0=Sh~(^Q5A}MOsHo^eV0@yX_CBa|!`zr4 z8OT?HZv^qP0Euz9z>HCdZ9@8%T5RV9ncAeJou_DmI}SrAtL*7H+ZQ0Njp29=Fr&LF zT_V8q5^D*r%STcotR=2CO6e)t)df)_$-23b|Iod7R!{oRF0$irRI_u=oYYZ zSyJ?AEepz`-a1TY7>dxt{1-E6(-J_>+tdNXIUg?PF*t18;&n6Y9HoJGsbwRB$|_UxjnvpsX)B9~Y$+aKrq#@m_0vYb78b`P9qX6Jp+ zdwJg9`+c9w_j!K5g|d{TEG-}vt#x1#(E)f9I1{Lpw4o)()20=F#PfmUfzyG-2CfWL z4tWjO2h;<5fP2cWcVPE>2Y3)z3d{yRDQRWdqV@w+5fNtsoq+z5<^T(Umm;Fg5pPiu z(H^)87zhkWox0Fg^(L^%;H^C(fr^OF0lx>TJg#SeWx(fw(|}`vF2HJ_4meHHsw2wv zBBDRA2xtpT0*(W22aW+cdYrQ)tpR!f8-bM(@wFqyq9URn@RWZJ2Hpc61J>Ks&IV5O z`2muijEHLBRiHcYxTK2@YoH>c7x1!wUIu&wxE{D0*bVHEv@jxu0#kqvz;Ti`M??ki z1aJ}X2VjV#&4;V&%?8>5qk*2lw}A(M$-q3|8USa#ub5BG@UeOyyUlTINkkmqDl94@ zMgr4-b0odgL~EJ~i~?pFkiP-FfER(2fi;p&%DQK|v*BkXy`DP$UEp4z7UCV=!MV5BqH*MR_j1?&Jm0L%i;&2j5!puOGgeb4(NviekkR0b818x9nfJF}YEe6yAU;=QR zzy2E$ap@si)5$=8;66$7eYhIHcHfmeVr5g|$cD7Hmy z0#*T)CW(U`6Q%$k1%3#;;LJ7EAk_lj@XK~K`<{@)Qu{{4l?^=mE;sQ2eti(DTH!EJ z?fZ&rQIhsV#8PLZ4Zs$lO479vafhwqbTj5t9LIV|+GZ6;10yBv%ToPUgZ*ekbOh#G ze+S?!;3mhldSF+Iq&6hq0fqv@BH|%Q&lJ=30N(Pp3-ChXLXy@2-?!zQZL7L2i!HC0 zG&9Es*b7`65i8w)n&{wOWx9~ou^}~vhCb@s5%CBx!a7eaHZyRRnL zS%cqqke8bXbak?L8klQK`fZ9JZqW#8;1iNMK-_YEllyEL~izWMS)ALEq1YmE}+{v`@NeQ({4%6N*ZX3d)0MeA9p4C8Kipu zB}Dh~##94MX|p8!q<=&tU$C#YeAhCd;gocb zONAvV;0rpHB1Tz0 zepLZ=C04YH%p?HK0M?omN6L1=JIPFK^>2=6w`m5Tjxo(|PJR z!P_y7nR1GVL8b!RfZ@&Q-h~9}-Bb`C(L|ssf$0&^y#bKgyGX7F?w7QuRDjxSpz6uA zgY%JHsr?IpFE?;)KXa%$moAhFQ12M1rj$jivP!FS8n`wA=aoXtvJ?}j)wYOwyGBhO zI<{t&9>WgccGm-)vsyc)0hFXV;PsR>T%L!T9qwf%J)?FJad8$%m1Nc}0IZYrTuA|C znUUHb6%mIz9kw%7Tql4JH$WXD;wUnU%1-gQDa9hSg$61;71b@zpq|VYCq?ZQUV?a& zvtoDitvWI*Yf29&Pb0mSDrJV{*-}ZYsy-FC$5`i(6mMHvsul&H)c*JK)NyH6_DiG( znbmbkw%c0DKt0F)f6b_J8#kXWLADD*W|2k`pXO3TX%3)j`Tr^K8}fAH5o8v#rmdIb!cWUSNvlg5D7QyurYxZ?nPt)W%3Wz9oA4)!EX}oi z`bVi?oUBO6S6(&|t;nh?OCKoHgdeBCYSRQ=yT~*Jw*#Ltm-=O~fvRdo-D80-nslyj zqOKKWCKfh&uI_=u#r2T%KA9=@6KVPsd$r@#UdnP{iHNr%Vtz}Gp{hi%`V;VzvdNUC gEM+N6S(+sO1^9v8ds1+#WB>pF07*qoM6N<$f?9#*$^ZZW literal 0 HcmV?d00001 diff --git a/vendor/pygments/doc/_themes/pygments14/static/pygments14.css_t b/vendor/pygments/doc/_themes/pygments14/static/pygments14.css_t new file mode 100644 index 0000000..5c37aaf --- /dev/null +++ b/vendor/pygments/doc/_themes/pygments14/static/pygments14.css_t @@ -0,0 +1,401 @@ +/* + * pygments14.css + * ~~~~~~~~~~~~~~ + * + * Sphinx stylesheet -- pygments14 theme. Heavily copied from sphinx13. + * + * :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS. + * :license: BSD, see LICENSE for details. + * + */ + +@import url("basic.css"); + +/* -- page layout ----------------------------------------------------------- */ + +body { + font-family: {{ theme_font }}, 'Lucida Grande', 'Lucida Sans Unicode', 'Geneva', + 'Verdana', sans-serif; + font-size: 14px; + text-align: center; + background-image: url(bodybg.png); + background-color: {{ theme_background }}; + color: black; + padding: 0; + /* + border-right: 1px solid {{ theme_border }}; + border-left: 1px solid {{ theme_border }}; + */ + + margin: 0 auto; + min-width: 780px; + max-width: 1080px; +} + +.outerwrapper { + background-image: url(docbg.png); + background-attachment: fixed; +} + +.pageheader { + text-align: left; + padding: 10px 15px; +} + +.pageheader ul { + float: right; + color: white; + list-style-type: none; + padding-left: 0; + margin-top: 40px; + margin-right: 10px; +} + +.pageheader li { + float: left; + margin: 0 0 0 10px; +} + +.pageheader li a { + border-radius: 3px; + padding: 8px 12px; + color: {{ theme_darkgray }}; + text-shadow: 0 0 5px rgba(0, 0, 0, 0.2); +} + +.pageheader li a:hover { + background-color: {{ theme_yellow }}; + color: black; + text-shadow: none; +} + +div.document { + text-align: left; + /*border-left: 1em solid {{ theme_lightyellow }};*/ +} + +div.bodywrapper { + margin: 0 12px 0 240px; + background-color: white; +/* border-right: 1px solid {{ theme_border }}; */ +} + +div.body { + margin: 0; + padding: 0.5em 20px 20px 20px; +} + +div.related { + font-size: 1em; + color: {{ theme_darkgray }}; +} + +div.related ul { + background-image: url(relbg.png); + background-repeat: repeat-y; + background-color: {{ theme_yellow }}; + height: 1.9em; + /* + border-top: 1px solid {{ theme_border }}; + border-bottom: 1px solid {{ theme_border }}; + */ +} + +div.related ul li { + margin: 0 5px 0 0; + padding: 0; + float: left; +} + +div.related ul li.right { + float: right; + margin-right: 5px; +} + +div.related ul li a { + margin: 0; + padding: 0 5px 0 5px; + line-height: 1.75em; + color: {{ theme_darkgray }}; + /*text-shadow: 0px 0px 1px rgba(0, 0, 0, 0.5);*/ +} + +div.related ul li a:hover { + text-decoration: underline; + text-shadow: 0px 0px 1px rgba(255, 255, 255, 0.5); +} + +div.sphinxsidebarwrapper { + position: relative; + top: 0px; + padding: 0; +} + +div.sphinxsidebar { + margin: 0; + padding: 0 0px 15px 15px; + width: 210px; + float: left; + font-size: 1em; + text-align: left; +} + +div.sphinxsidebar .logo { + font-size: 1.8em; + color: #666; + font-weight: 300; + text-align: center; +} + +div.sphinxsidebar .logo img { + vertical-align: middle; +} + +div.sphinxsidebar input { + border: 1px solid #aaa; + font-family: {{ theme_font }}, 'Lucida Grande', 'Lucida Sans Unicode', 'Geneva', + 'Verdana', sans-serif; + font-size: 1em; +} + +div.sphinxsidebar h3 { + font-size: 1.5em; + /* border-top: 1px solid {{ theme_border }}; */ + margin-top: 1em; + margin-bottom: 0.5em; + padding-top: 0.5em; +} + +div.sphinxsidebar h4 { + font-size: 1.2em; + margin-bottom: 0; +} + +div.sphinxsidebar h3, div.sphinxsidebar h4 { + margin-right: -15px; + margin-left: -15px; + padding-right: 14px; + padding-left: 14px; + color: #333; + font-weight: 300; + /*text-shadow: 0px 0px 0.5px rgba(0, 0, 0, 0.4);*/ +} + +div.sphinxsidebarwrapper > h3:first-child { + margin-top: 0.5em; + border: none; +} + +div.sphinxsidebar h3 a { + color: #333; +} + +div.sphinxsidebar ul { + color: #444; + margin-top: 7px; + padding: 0; + line-height: 130%; +} + +div.sphinxsidebar ul ul { + margin-left: 20px; + list-style-image: url(listitem.png); +} + +div.footer { + color: {{ theme_darkgray }}; + text-shadow: 0 0 .2px rgba(255, 255, 255, 0.8); + padding: 2em; + text-align: center; + clear: both; + font-size: 0.8em; +} + +/* -- body styles ----------------------------------------------------------- */ + +p { + margin: 0.8em 0 0.5em 0; +} + +a { + color: {{ theme_darkgreen }}; + text-decoration: none; +} + +a:hover { + color: {{ theme_darkyellow }}; +} + +div.body a { + text-decoration: underline; +} + +h1 { + margin: 10px 0 0 0; + font-size: 2.4em; + color: {{ theme_darkgray }}; + font-weight: 300; +} + +h2 { + margin: 1.em 0 0.2em 0; + font-size: 1.5em; + font-weight: 300; + padding: 0; + color: {{ theme_darkgreen }}; +} + +h3 { + margin: 1em 0 -0.3em 0; + font-size: 1.3em; + font-weight: 300; +} + +div.body h1 a, div.body h2 a, div.body h3 a, div.body h4 a, div.body h5 a, div.body h6 a { + text-decoration: none; +} + +div.body h1 a tt, div.body h2 a tt, div.body h3 a tt, div.body h4 a tt, div.body h5 a tt, div.body h6 a tt { + color: {{ theme_darkgreen }} !important; + font-size: inherit !important; +} + +a.headerlink { + color: {{ theme_green }} !important; + font-size: 12px; + margin-left: 6px; + padding: 0 4px 0 4px; + text-decoration: none !important; + float: right; +} + +a.headerlink:hover { + background-color: #ccc; + color: white!important; +} + +cite, code, tt { + font-family: 'Consolas', 'DejaVu Sans Mono', + 'Bitstream Vera Sans Mono', monospace; + font-size: 14px; + letter-spacing: -0.02em; +} + +tt { + background-color: #f2f2f2; + border: 1px solid #ddd; + border-radius: 2px; + color: #333; + padding: 1px; +} + +tt.descname, tt.descclassname, tt.xref { + border: 0; +} + +hr { + border: 1px solid #abc; + margin: 2em; +} + +a tt { + border: 0; + color: {{ theme_darkgreen }}; +} + +a tt:hover { + color: {{ theme_darkyellow }}; +} + +pre { + font-family: 'Consolas', 'DejaVu Sans Mono', + 'Bitstream Vera Sans Mono', monospace; + font-size: 13px; + letter-spacing: 0.015em; + line-height: 120%; + padding: 0.5em; + border: 1px solid #ccc; + border-radius: 2px; + background-color: #f8f8f8; +} + +pre a { + color: inherit; + text-decoration: underline; +} + +td.linenos pre { + padding: 0.5em 0; +} + +div.quotebar { + background-color: #f8f8f8; + max-width: 250px; + float: right; + padding: 0px 7px; + border: 1px solid #ccc; + margin-left: 1em; +} + +div.topic { + background-color: #f8f8f8; +} + +table { + border-collapse: collapse; + margin: 0 -0.5em 0 -0.5em; +} + +table td, table th { + padding: 0.2em 0.5em 0.2em 0.5em; +} + +div.admonition, div.warning { + font-size: 0.9em; + margin: 1em 0 1em 0; + border: 1px solid #86989B; + border-radius: 2px; + background-color: #f7f7f7; + padding: 0; +} + +div.admonition p, div.warning p { + margin: 0.5em 1em 0.5em 1em; + padding: 0; +} + +div.admonition pre, div.warning pre { + margin: 0.4em 1em 0.4em 1em; +} + +div.admonition p.admonition-title, +div.warning p.admonition-title { + margin-top: 1em; + padding-top: 0.5em; + font-weight: bold; +} + +div.warning { + border: 1px solid #940000; +/* background-color: #FFCCCF;*/ +} + +div.warning p.admonition-title { +} + +div.admonition ul, div.admonition ol, +div.warning ul, div.warning ol { + margin: 0.1em 0.5em 0.5em 3em; + padding: 0; +} + +.viewcode-back { + font-family: {{ theme_font }}, 'Lucida Grande', 'Lucida Sans Unicode', 'Geneva', + 'Verdana', sans-serif; +} + +div.viewcode-block:target { + background-color: #f4debf; + border-top: 1px solid #ac9; + border-bottom: 1px solid #ac9; +} diff --git a/vendor/pygments/doc/_themes/pygments14/theme.conf b/vendor/pygments/doc/_themes/pygments14/theme.conf new file mode 100644 index 0000000..fffe66d --- /dev/null +++ b/vendor/pygments/doc/_themes/pygments14/theme.conf @@ -0,0 +1,15 @@ +[theme] +inherit = basic +stylesheet = pygments14.css +pygments_style = friendly + +[options] +green = #66b55e +darkgreen = #36852e +darkgray = #666666 +border = #66b55e +yellow = #f4cd00 +darkyellow = #d4ad00 +lightyellow = #fffbe3 +background = #f9f9f9 +font = PT Sans diff --git a/vendor/pygments/doc/conf.py b/vendor/pygments/doc/conf.py new file mode 100644 index 0000000..51a9161 --- /dev/null +++ b/vendor/pygments/doc/conf.py @@ -0,0 +1,241 @@ +# -*- coding: utf-8 -*- +# +# Pygments documentation build configuration file +# + +import sys, os + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +sys.path.insert(0, os.path.abspath('..')) + +import pygments + +# -- General configuration ----------------------------------------------------- + +# If your documentation needs a minimal Sphinx version, state it here. +#needs_sphinx = '1.0' + +# Add any Sphinx extension module names here, as strings. They can be extensions +# coming with Sphinx (named 'sphinx.ext.*') or your custom ones. +extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx', 'pygments.sphinxext'] + +# Add any paths that contain templates here, relative to this directory. +templates_path = ['_templates'] + +# The suffix of source filenames. +source_suffix = '.rst' + +# The encoding of source files. +#source_encoding = 'utf-8-sig' + +# The master toctree document. +master_doc = 'index' + +# General information about the project. +project = u'Pygments' +copyright = u'2015, Georg Brandl' + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The short X.Y version. +version = pygments.__version__ +# The full version, including alpha/beta/rc tags. +release = version + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +#language = None + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +#today = '' +# Else, today_fmt is used as the format for a strftime call. +#today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = ['_build'] + +# The reST default role (used for this markup: `text`) to use for all documents. +#default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +#add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +#add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +#show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +#pygments_style = 'sphinx' + +# A list of ignored prefixes for module index sorting. +#modindex_common_prefix = [] + + +# -- Options for HTML output --------------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = 'pygments14' + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +#html_theme_options = {} + +# Add any paths that contain custom themes here, relative to this directory. +html_theme_path = ['_themes'] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +#html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +#html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +#html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +html_favicon = '_static/favicon.ico' + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ['_static'] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +#html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +#html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +html_sidebars = {'index': 'indexsidebar.html', + 'docs/*': 'docssidebar.html'} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +#html_additional_pages = {} + +# If false, no module index is generated. +#html_domain_indices = True + +# If false, no index is generated. +#html_use_index = True + +# If true, the index is split into individual pages for each letter. +#html_split_index = False + +# If true, links to the reST sources are added to the pages. +#html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +#html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +#html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +#html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +#html_file_suffix = None + +# Output file base name for HTML help builder. +htmlhelp_basename = 'Pygmentsdoc' + + +# -- Options for LaTeX output -------------------------------------------------- + +latex_elements = { +# The paper size ('letterpaper' or 'a4paper'). +#'papersize': 'letterpaper', + +# The font size ('10pt', '11pt' or '12pt'). +#'pointsize': '10pt', + +# Additional stuff for the LaTeX preamble. +#'preamble': '', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, author, documentclass [howto/manual]). +latex_documents = [ + ('index', 'Pygments.tex', u'Pygments Documentation', + u'Georg Brandl', 'manual'), +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +#latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +#latex_use_parts = False + +# If true, show page references after internal links. +#latex_show_pagerefs = False + +# If true, show URL addresses after external links. +#latex_show_urls = False + +# Documents to append as an appendix to all manuals. +#latex_appendices = [] + +# If false, no module index is generated. +#latex_domain_indices = True + + +# -- Options for manual page output -------------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + ('index', 'pygments', u'Pygments Documentation', + [u'Georg Brandl'], 1) +] + +# If true, show URL addresses after external links. +#man_show_urls = False + + +# -- Options for Texinfo output ------------------------------------------------ + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + ('index', 'Pygments', u'Pygments Documentation', + u'Georg Brandl', 'Pygments', 'One line description of project.', + 'Miscellaneous'), +] + +# Documents to append as an appendix to all manuals. +#texinfo_appendices = [] + +# If false, no module index is generated. +#texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +#texinfo_show_urls = 'footnote' + + +# Example configuration for intersphinx: refer to the Python standard library. +#intersphinx_mapping = {'http://docs.python.org/': None} diff --git a/vendor/pygments/doc/docs/api.rst b/vendor/pygments/doc/docs/api.rst new file mode 100644 index 0000000..123a464 --- /dev/null +++ b/vendor/pygments/doc/docs/api.rst @@ -0,0 +1,316 @@ +.. -*- mode: rst -*- + +===================== +The full Pygments API +===================== + +This page describes the Pygments API. + +High-level API +============== + +.. module:: pygments + +Functions from the :mod:`pygments` module: + +.. function:: lex(code, lexer) + + Lex `code` with the `lexer` (must be a `Lexer` instance) + and return an iterable of tokens. Currently, this only calls + `lexer.get_tokens()`. + +.. function:: format(tokens, formatter, outfile=None) + + Format a token stream (iterable of tokens) `tokens` with the + `formatter` (must be a `Formatter` instance). The result is + written to `outfile`, or if that is ``None``, returned as a + string. + +.. function:: highlight(code, lexer, formatter, outfile=None) + + This is the most high-level highlighting function. + It combines `lex` and `format` in one function. + + +.. module:: pygments.lexers + +Functions from :mod:`pygments.lexers`: + +.. function:: get_lexer_by_name(alias, **options) + + Return an instance of a `Lexer` subclass that has `alias` in its + aliases list. The lexer is given the `options` at its + instantiation. + + Will raise :exc:`pygments.util.ClassNotFound` if no lexer with that alias is + found. + +.. function:: get_lexer_for_filename(fn, **options) + + Return a `Lexer` subclass instance that has a filename pattern + matching `fn`. The lexer is given the `options` at its + instantiation. + + Will raise :exc:`pygments.util.ClassNotFound` if no lexer for that filename + is found. + +.. function:: get_lexer_for_mimetype(mime, **options) + + Return a `Lexer` subclass instance that has `mime` in its mimetype + list. The lexer is given the `options` at its instantiation. + + Will raise :exc:`pygments.util.ClassNotFound` if not lexer for that mimetype + is found. + +.. function:: guess_lexer(text, **options) + + Return a `Lexer` subclass instance that's guessed from the text in + `text`. For that, the :meth:`.analyse_text()` method of every known lexer + class is called with the text as argument, and the lexer which returned the + highest value will be instantiated and returned. + + :exc:`pygments.util.ClassNotFound` is raised if no lexer thinks it can + handle the content. + +.. function:: guess_lexer_for_filename(filename, text, **options) + + As :func:`guess_lexer()`, but only lexers which have a pattern in `filenames` + or `alias_filenames` that matches `filename` are taken into consideration. + + :exc:`pygments.util.ClassNotFound` is raised if no lexer thinks it can + handle the content. + +.. function:: get_all_lexers() + + Return an iterable over all registered lexers, yielding tuples in the + format:: + + (longname, tuple of aliases, tuple of filename patterns, tuple of mimetypes) + + .. versionadded:: 0.6 + + +.. module:: pygments.formatters + +Functions from :mod:`pygments.formatters`: + +.. function:: get_formatter_by_name(alias, **options) + + Return an instance of a :class:`.Formatter` subclass that has `alias` in its + aliases list. The formatter is given the `options` at its instantiation. + + Will raise :exc:`pygments.util.ClassNotFound` if no formatter with that + alias is found. + +.. function:: get_formatter_for_filename(fn, **options) + + Return a :class:`.Formatter` subclass instance that has a filename pattern + matching `fn`. The formatter is given the `options` at its instantiation. + + Will raise :exc:`pygments.util.ClassNotFound` if no formatter for that filename + is found. + + +.. module:: pygments.styles + +Functions from :mod:`pygments.styles`: + +.. function:: get_style_by_name(name) + + Return a style class by its short name. The names of the builtin styles + are listed in :data:`pygments.styles.STYLE_MAP`. + + Will raise :exc:`pygments.util.ClassNotFound` if no style of that name is + found. + +.. function:: get_all_styles() + + Return an iterable over all registered styles, yielding their names. + + .. versionadded:: 0.6 + + +.. module:: pygments.lexer + +Lexers +====== + +The base lexer class from which all lexers are derived is: + +.. class:: Lexer(**options) + + The constructor takes a \*\*keywords dictionary of options. + Every subclass must first process its own options and then call + the `Lexer` constructor, since it processes the `stripnl`, + `stripall` and `tabsize` options. + + An example looks like this: + + .. sourcecode:: python + + def __init__(self, **options): + self.compress = options.get('compress', '') + Lexer.__init__(self, **options) + + As these options must all be specifiable as strings (due to the + command line usage), there are various utility functions + available to help with that, see `Option processing`_. + + .. method:: get_tokens(text) + + This method is the basic interface of a lexer. It is called by + the `highlight()` function. It must process the text and return an + iterable of ``(tokentype, value)`` pairs from `text`. + + Normally, you don't need to override this method. The default + implementation processes the `stripnl`, `stripall` and `tabsize` + options and then yields all tokens from `get_tokens_unprocessed()`, + with the ``index`` dropped. + + .. method:: get_tokens_unprocessed(text) + + This method should process the text and return an iterable of + ``(index, tokentype, value)`` tuples where ``index`` is the starting + position of the token within the input text. + + This method must be overridden by subclasses. + + .. staticmethod:: analyse_text(text) + + A static method which is called for lexer guessing. It should analyse + the text and return a float in the range from ``0.0`` to ``1.0``. + If it returns ``0.0``, the lexer will not be selected as the most + probable one, if it returns ``1.0``, it will be selected immediately. + + .. note:: You don't have to add ``@staticmethod`` to the definition of + this method, this will be taken care of by the Lexer's metaclass. + + For a list of known tokens have a look at the :doc:`tokens` page. + + A lexer also can have the following attributes (in fact, they are mandatory + except `alias_filenames`) that are used by the builtin lookup mechanism. + + .. attribute:: name + + Full name for the lexer, in human-readable form. + + .. attribute:: aliases + + A list of short, unique identifiers that can be used to lookup + the lexer from a list, e.g. using `get_lexer_by_name()`. + + .. attribute:: filenames + + A list of `fnmatch` patterns that match filenames which contain + content for this lexer. The patterns in this list should be unique among + all lexers. + + .. attribute:: alias_filenames + + A list of `fnmatch` patterns that match filenames which may or may not + contain content for this lexer. This list is used by the + :func:`.guess_lexer_for_filename()` function, to determine which lexers + are then included in guessing the correct one. That means that + e.g. every lexer for HTML and a template language should include + ``\*.html`` in this list. + + .. attribute:: mimetypes + + A list of MIME types for content that can be lexed with this + lexer. + + +.. module:: pygments.formatter + +Formatters +========== + +A formatter is derived from this class: + + +.. class:: Formatter(**options) + + As with lexers, this constructor processes options and then must call the + base class :meth:`__init__`. + + The :class:`Formatter` class recognizes the options `style`, `full` and + `title`. It is up to the formatter class whether it uses them. + + .. method:: get_style_defs(arg='') + + This method must return statements or declarations suitable to define + the current style for subsequent highlighted text (e.g. CSS classes + in the `HTMLFormatter`). + + The optional argument `arg` can be used to modify the generation and + is formatter dependent (it is standardized because it can be given on + the command line). + + This method is called by the ``-S`` :doc:`command-line option `, + the `arg` is then given by the ``-a`` option. + + .. method:: format(tokensource, outfile) + + This method must format the tokens from the `tokensource` iterable and + write the formatted version to the file object `outfile`. + + Formatter options can control how exactly the tokens are converted. + + .. versionadded:: 0.7 + A formatter must have the following attributes that are used by the + builtin lookup mechanism. + + .. attribute:: name + + Full name for the formatter, in human-readable form. + + .. attribute:: aliases + + A list of short, unique identifiers that can be used to lookup + the formatter from a list, e.g. using :func:`.get_formatter_by_name()`. + + .. attribute:: filenames + + A list of :mod:`fnmatch` patterns that match filenames for which this + formatter can produce output. The patterns in this list should be unique + among all formatters. + + +.. module:: pygments.util + +Option processing +================= + +The :mod:`pygments.util` module has some utility functions usable for option +processing: + +.. exception:: OptionError + + This exception will be raised by all option processing functions if + the type or value of the argument is not correct. + +.. function:: get_bool_opt(options, optname, default=None) + + Interpret the key `optname` from the dictionary `options` as a boolean and + return it. Return `default` if `optname` is not in `options`. + + The valid string values for ``True`` are ``1``, ``yes``, ``true`` and + ``on``, the ones for ``False`` are ``0``, ``no``, ``false`` and ``off`` + (matched case-insensitively). + +.. function:: get_int_opt(options, optname, default=None) + + As :func:`get_bool_opt`, but interpret the value as an integer. + +.. function:: get_list_opt(options, optname, default=None) + + If the key `optname` from the dictionary `options` is a string, + split it at whitespace and return it. If it is already a list + or a tuple, it is returned as a list. + +.. function:: get_choice_opt(options, optname, allowed, default=None) + + If the key `optname` from the dictionary is not in the sequence + `allowed`, raise an error, otherwise return it. + + .. versionadded:: 0.8 diff --git a/vendor/pygments/doc/docs/authors.rst b/vendor/pygments/doc/docs/authors.rst new file mode 100644 index 0000000..f8373f0 --- /dev/null +++ b/vendor/pygments/doc/docs/authors.rst @@ -0,0 +1,4 @@ +Full contributor list +===================== + +.. include:: ../../AUTHORS diff --git a/vendor/pygments/doc/docs/changelog.rst b/vendor/pygments/doc/docs/changelog.rst new file mode 100644 index 0000000..f264cab --- /dev/null +++ b/vendor/pygments/doc/docs/changelog.rst @@ -0,0 +1 @@ +.. include:: ../../CHANGES diff --git a/vendor/pygments/docs/src/cmdline.txt b/vendor/pygments/doc/docs/cmdline.rst similarity index 86% rename from vendor/pygments/docs/src/cmdline.txt rename to vendor/pygments/doc/docs/cmdline.rst index a48a5c2..165af96 100644 --- a/vendor/pygments/docs/src/cmdline.txt +++ b/vendor/pygments/doc/docs/cmdline.rst @@ -4,8 +4,8 @@ Command Line Interface ====================== -You can use Pygments from the shell, provided you installed the `pygmentize` -script:: +You can use Pygments from the shell, provided you installed the +:program:`pygmentize` script:: $ pygmentize test.py print "Hello World" @@ -28,7 +28,7 @@ written to stdout. The ``-f`` option selects a formatter (as with ``-l``, it can also be omitted if an output file name is given and has a supported extension). If no output file name is given and ``-f`` is omitted, the -`TerminalFormatter` is used. +:class:`.TerminalFormatter` is used. The above command could therefore also be given as:: @@ -82,14 +82,15 @@ Usage is as follows:: generates a CSS style sheet (because you selected the HTML formatter) for the "colorful" style prepending a ".syntax" selector to all style rules. -For an explanation what ``-a`` means for `a particular formatter`_, look for -the `arg` argument for the formatter's `get_style_defs()` method. +For an explanation what ``-a`` means for :doc:`a particular formatter +`, look for the `arg` argument for the formatter's +:meth:`.get_style_defs()` method. Getting lexer names ------------------- -*New in Pygments 1.0.* +.. versionadded:: 1.0 The ``-N`` option guesses a lexer name for a given filename, so that :: @@ -125,7 +126,7 @@ will print the help for the Python lexer, etc. A note on encodings ------------------- -*New in Pygments 0.9.* +.. versionadded:: 0.9 Pygments tries to be smart regarding encodings in the formatting process: @@ -135,13 +136,14 @@ Pygments tries to be smart regarding encodings in the formatting process: * If you give an ``outencoding`` option, it will override ``encoding`` as the output encoding. +* If you give an ``inencoding`` option, it will override ``encoding`` + as the input encoding. + * If you don't give an encoding and have given an output file, the default - encoding for lexer and formatter is ``latin1`` (which will pass through - all non-ASCII characters). + encoding for lexer and formatter is the terminal encoding or the default + locale encoding of the system. As a last resort, ``latin1`` is used (which + will pass through all non-ASCII characters). * If you don't give an encoding and haven't given an output file (that means output is written to the console), the default encoding for lexer and - formatter is the terminal encoding (`sys.stdout.encoding`). - - -.. _a particular formatter: formatters.txt + formatter is the terminal encoding (``sys.stdout.encoding``). diff --git a/vendor/pygments/docs/src/filterdevelopment.txt b/vendor/pygments/doc/docs/filterdevelopment.rst similarity index 88% rename from vendor/pygments/docs/src/filterdevelopment.txt rename to vendor/pygments/doc/docs/filterdevelopment.rst index c60e1e8..fbcd0a0 100644 --- a/vendor/pygments/docs/src/filterdevelopment.txt +++ b/vendor/pygments/doc/docs/filterdevelopment.rst @@ -4,11 +4,11 @@ Write your own filter ===================== -*New in Pygments 0.7.* +.. versionadded:: 0.7 Writing own filters is very easy. All you have to do is to subclass the `Filter` class and override the `filter` method. Additionally a -filter is instanciated with some keyword arguments you can use to +filter is instantiated with some keyword arguments you can use to adjust the behavior of your filter. @@ -58,7 +58,7 @@ You can also use the `simplefilter` decorator from the `pygments.filter` module: @simplefilter - def uncolor(lexer, stream, options): + def uncolor(self, lexer, stream, options): class_too = get_bool_opt(options, 'classtoo') for ttype, value in stream: if ttype is Name.Function or (class_too and @@ -67,4 +67,5 @@ You can also use the `simplefilter` decorator from the `pygments.filter` module: yield ttype, value The decorator automatically subclasses an internal filter class and uses the -decorated function for filtering. +decorated function as a method for filtering. (That's why there is a `self` +argument that you probably won't end up using in the method.) diff --git a/vendor/pygments/docs/src/filters.txt b/vendor/pygments/doc/docs/filters.rst similarity index 85% rename from vendor/pygments/docs/src/filters.txt rename to vendor/pygments/doc/docs/filters.rst index 522f633..ff2519a 100644 --- a/vendor/pygments/docs/src/filters.txt +++ b/vendor/pygments/doc/docs/filters.rst @@ -4,7 +4,7 @@ Filters ======= -*New in Pygments 0.7.* +.. versionadded:: 0.7 You can filter token streams coming from lexers to improve or annotate the output. For example, you can highlight special words in comments, convert @@ -31,12 +31,11 @@ To get a list of all registered filters by name, you can use the `get_all_filters()` function from the `pygments.filters` module that returns an iterable for all known filters. -If you want to write your own filter, have a look at `Write your own filter`_. - -.. _Write your own filter: filterdevelopment.txt +If you want to write your own filter, have a look at :doc:`Write your own filter +`. Builtin Filters =============== -[builtin_filter_docs] +.. pygmentsdoc:: filters diff --git a/vendor/pygments/docs/src/formatterdevelopment.txt b/vendor/pygments/doc/docs/formatterdevelopment.rst similarity index 98% rename from vendor/pygments/docs/src/formatterdevelopment.txt rename to vendor/pygments/doc/docs/formatterdevelopment.rst index 83a13b6..2bfac05 100644 --- a/vendor/pygments/docs/src/formatterdevelopment.txt +++ b/vendor/pygments/doc/docs/formatterdevelopment.rst @@ -4,7 +4,7 @@ Write your own formatter ======================== -As well as creating `your own lexer `_, writing a new +As well as creating :doc:`your own lexer `, writing a new formatter for Pygments is easy and straightforward. A formatter is a class that is initialized with some keyword arguments (the diff --git a/vendor/pygments/docs/src/formatters.txt b/vendor/pygments/doc/docs/formatters.rst similarity index 90% rename from vendor/pygments/docs/src/formatters.txt rename to vendor/pygments/doc/docs/formatters.rst index 7a59064..9e7074e 100644 --- a/vendor/pygments/docs/src/formatters.txt +++ b/vendor/pygments/doc/docs/formatters.rst @@ -12,8 +12,6 @@ Common options All formatters support these options: `encoding` - *New in Pygments 0.6.* - If given, must be an encoding name (such as ``"utf-8"``). This will be used to convert the token strings (which are Unicode strings) to byte strings in the output (default: ``None``). @@ -30,19 +28,21 @@ All formatters support these options: supports Unicode arguments to `write()`. Using a regular file object wouldn't work. -`outencoding` - *New in Pygments 0.7.* + .. versionadded:: 0.6 +`outencoding` When using Pygments from the command line, any `encoding` option given is passed to the lexer and the formatter. This is sometimes not desirable, for example if you want to set the input encoding to ``"guess"``. Therefore, `outencoding` has been introduced which overrides `encoding` for the formatter if given. + .. versionadded:: 0.7 + Formatter classes ================= -All these classes are importable from `pygments.formatters`. +All these classes are importable from :mod:`pygments.formatters`. -[builtin_formatter_docs] +.. pygmentsdoc:: formatters diff --git a/vendor/pygments/doc/docs/index.rst b/vendor/pygments/doc/docs/index.rst new file mode 100644 index 0000000..30d5c08 --- /dev/null +++ b/vendor/pygments/doc/docs/index.rst @@ -0,0 +1,66 @@ +Pygments documentation +====================== + +**Starting with Pygments** + +.. toctree:: + :maxdepth: 1 + + ../download + quickstart + cmdline + +**Builtin components** + +.. toctree:: + :maxdepth: 1 + + lexers + filters + formatters + styles + +**Reference** + +.. toctree:: + :maxdepth: 1 + + unicode + tokens + api + +**Hacking for Pygments** + +.. toctree:: + :maxdepth: 1 + + lexerdevelopment + formatterdevelopment + filterdevelopment + plugins + +**Hints and tricks** + +.. toctree:: + :maxdepth: 1 + + rstdirective + moinmoin + java + integrate + +**About Pygments** + +.. toctree:: + :maxdepth: 1 + + changelog + authors + + +If you find bugs or have suggestions for the documentation, please look +:ref:`here ` for info on how to contact the team. + +.. XXX You can download an offline version of this documentation from the + :doc:`download page `. + diff --git a/vendor/pygments/docs/src/integrate.txt b/vendor/pygments/doc/docs/integrate.rst similarity index 54% rename from vendor/pygments/docs/src/integrate.txt rename to vendor/pygments/doc/docs/integrate.rst index 6f8c125..77daaa4 100644 --- a/vendor/pygments/docs/src/integrate.txt +++ b/vendor/pygments/doc/docs/integrate.rst @@ -4,27 +4,13 @@ Using Pygments in various scenarios =================================== -PyGtk ------ - -Armin has written a piece of sample code that shows how to create a Gtk -`TextBuffer` object containing Pygments-highlighted text. - -See the article here: http://lucumr.pocoo.org/cogitations/2007/05/30/pygments-gtk-rendering/ - -Wordpress ---------- - -He also has a snippet that shows how to use Pygments in WordPress: - -http://lucumr.pocoo.org/cogitations/2007/05/30/pygments-in-wordpress/ - Markdown -------- Since Pygments 0.9, the distribution ships Markdown_ preprocessor sample code -that uses Pygments to render source code in `external/markdown-processor.py`. -You can copy and adapt it to your liking. +that uses Pygments to render source code in +:file:`external/markdown-processor.py`. You can copy and adapt it to your +liking. .. _Markdown: http://www.freewisdom.org/projects/python-markdown/ @@ -42,7 +28,13 @@ Bash completion The source distribution contains a file ``external/pygments.bashcomp`` that sets up completion for the ``pygmentize`` command in bash. -Java ----- +Wrappers for other languages +---------------------------- -See the `Java quickstart `_ document. +These libraries provide Pygments highlighting for users of other languages +than Python: + +* `pygments.rb `_, a pygments wrapper for Ruby +* `Clygments `_, a pygments wrapper for + Clojure +* `PHPygments `_, a pygments wrapper for PHP diff --git a/vendor/pygments/docs/src/java.txt b/vendor/pygments/doc/docs/java.rst similarity index 82% rename from vendor/pygments/docs/src/java.txt rename to vendor/pygments/doc/docs/java.rst index 5eb6196..f553463 100644 --- a/vendor/pygments/docs/src/java.txt +++ b/vendor/pygments/doc/docs/java.rst @@ -2,18 +2,18 @@ Use Pygments in Java ===================== -Thanks to `Jython `__ it is possible to use Pygments in +Thanks to `Jython `_ it is possible to use Pygments in Java. -This page is a simple tutorial to get an idea of how this is working. You can -then look at the `Jython documentation `__ for more -advanced use. +This page is a simple tutorial to get an idea of how this works. You can +then look at the `Jython documentation `_ for more +advanced uses. Since version 1.5, Pygments is deployed on `Maven Central -`__ as a JAR so is Jython -which makes it a lot easier to create the Java project. +`_ as a JAR, as is Jython +which makes it a lot easier to create a Java project. -Here is an example of a `Maven `__ ``pom.xml`` file for a +Here is an example of a `Maven `_ ``pom.xml`` file for a project running Pygments: .. sourcecode:: xml diff --git a/vendor/pygments/doc/docs/lexerdevelopment.rst b/vendor/pygments/doc/docs/lexerdevelopment.rst new file mode 100644 index 0000000..2c86844 --- /dev/null +++ b/vendor/pygments/doc/docs/lexerdevelopment.rst @@ -0,0 +1,681 @@ +.. -*- mode: rst -*- + +.. highlight:: python + +==================== +Write your own lexer +==================== + +If a lexer for your favorite language is missing in the Pygments package, you +can easily write your own and extend Pygments. + +All you need can be found inside the :mod:`pygments.lexer` module. As you can +read in the :doc:`API documentation `, a lexer is a class that is +initialized with some keyword arguments (the lexer options) and that provides a +:meth:`.get_tokens_unprocessed()` method which is given a string or unicode +object with the data to lex. + +The :meth:`.get_tokens_unprocessed()` method must return an iterator or iterable +containing tuples in the form ``(index, token, value)``. Normally you don't +need to do this since there are base lexers that do most of the work and that +you can subclass. + + +RegexLexer +========== + +The lexer base class used by almost all of Pygments' lexers is the +:class:`RegexLexer`. This class allows you to define lexing rules in terms of +*regular expressions* for different *states*. + +States are groups of regular expressions that are matched against the input +string at the *current position*. If one of these expressions matches, a +corresponding action is performed (such as yielding a token with a specific +type, or changing state), the current position is set to where the last match +ended and the matching process continues with the first regex of the current +state. + +Lexer states are kept on a stack: each time a new state is entered, the new +state is pushed onto the stack. The most basic lexers (like the `DiffLexer`) +just need one state. + +Each state is defined as a list of tuples in the form (`regex`, `action`, +`new_state`) where the last item is optional. In the most basic form, `action` +is a token type (like `Name.Builtin`). That means: When `regex` matches, emit a +token with the match text and type `tokentype` and push `new_state` on the state +stack. If the new state is ``'#pop'``, the topmost state is popped from the +stack instead. To pop more than one state, use ``'#pop:2'`` and so on. +``'#push'`` is a synonym for pushing the current state on the stack. + +The following example shows the `DiffLexer` from the builtin lexers. Note that +it contains some additional attributes `name`, `aliases` and `filenames` which +aren't required for a lexer. They are used by the builtin lexer lookup +functions. :: + + from pygments.lexer import RegexLexer + from pygments.token import * + + class DiffLexer(RegexLexer): + name = 'Diff' + aliases = ['diff'] + filenames = ['*.diff'] + + tokens = { + 'root': [ + (r' .*\n', Text), + (r'\+.*\n', Generic.Inserted), + (r'-.*\n', Generic.Deleted), + (r'@.*\n', Generic.Subheading), + (r'Index.*\n', Generic.Heading), + (r'=.*\n', Generic.Heading), + (r'.*\n', Text), + ] + } + +As you can see this lexer only uses one state. When the lexer starts scanning +the text, it first checks if the current character is a space. If this is true +it scans everything until newline and returns the data as a `Text` token (which +is the "no special highlighting" token). + +If this rule doesn't match, it checks if the current char is a plus sign. And +so on. + +If no rule matches at the current position, the current char is emitted as an +`Error` token that indicates a lexing error, and the position is increased by +one. + + +Adding and testing a new lexer +============================== + +To make Pygments aware of your new lexer, you have to perform the following +steps: + +First, change to the current directory containing the Pygments source code: + +.. code-block:: console + + $ cd .../pygments-main + +Select a matching module under ``pygments/lexers``, or create a new module for +your lexer class. + +Next, make sure the lexer is known from outside of the module. All modules in +the ``pygments.lexers`` specify ``__all__``. For example, ``esoteric.py`` sets:: + + __all__ = ['BrainfuckLexer', 'BefungeLexer', ...] + +Simply add the name of your lexer class to this list. + +Finally the lexer can be made publicly known by rebuilding the lexer mapping: + +.. code-block:: console + + $ make mapfiles + +To test the new lexer, store an example file with the proper extension in +``tests/examplefiles``. For example, to test your ``DiffLexer``, add a +``tests/examplefiles/example.diff`` containing a sample diff output. + +Now you can use pygmentize to render your example to HTML: + +.. code-block:: console + + $ ./pygmentize -O full -f html -o /tmp/example.html tests/examplefiles/example.diff + +Note that this explicitly calls the ``pygmentize`` in the current directory +by preceding it with ``./``. This ensures your modifications are used. +Otherwise a possibly already installed, unmodified version without your new +lexer would have been called from the system search path (``$PATH``). + +To view the result, open ``/tmp/example.html`` in your browser. + +Once the example renders as expected, you should run the complete test suite: + +.. code-block:: console + + $ make test + +It also tests that your lexer fulfills the lexer API and certain invariants, +such as that the concatenation of all token text is the same as the input text. + + +Regex Flags +=========== + +You can either define regex flags locally in the regex (``r'(?x)foo bar'``) or +globally by adding a `flags` attribute to your lexer class. If no attribute is +defined, it defaults to `re.MULTILINE`. For more information about regular +expression flags see the page about `regular expressions`_ in the Python +documentation. + +.. _regular expressions: http://docs.python.org/library/re.html#regular-expression-syntax + + +Scanning multiple tokens at once +================================ + +So far, the `action` element in the rule tuple of regex, action and state has +been a single token type. Now we look at the first of several other possible +values. + +Here is a more complex lexer that highlights INI files. INI files consist of +sections, comments and ``key = value`` pairs:: + + from pygments.lexer import RegexLexer, bygroups + from pygments.token import * + + class IniLexer(RegexLexer): + name = 'INI' + aliases = ['ini', 'cfg'] + filenames = ['*.ini', '*.cfg'] + + tokens = { + 'root': [ + (r'\s+', Text), + (r';.*?$', Comment), + (r'\[.*?\]$', Keyword), + (r'(.*?)(\s*)(=)(\s*)(.*?)$', + bygroups(Name.Attribute, Text, Operator, Text, String)) + ] + } + +The lexer first looks for whitespace, comments and section names. Later it +looks for a line that looks like a key, value pair, separated by an ``'='`` +sign, and optional whitespace. + +The `bygroups` helper yields each capturing group in the regex with a different +token type. First the `Name.Attribute` token, then a `Text` token for the +optional whitespace, after that a `Operator` token for the equals sign. Then a +`Text` token for the whitespace again. The rest of the line is returned as +`String`. + +Note that for this to work, every part of the match must be inside a capturing +group (a ``(...)``), and there must not be any nested capturing groups. If you +nevertheless need a group, use a non-capturing group defined using this syntax: +``(?:some|words|here)`` (note the ``?:`` after the beginning parenthesis). + +If you find yourself needing a capturing group inside the regex which shouldn't +be part of the output but is used in the regular expressions for backreferencing +(eg: ``r'(<(foo|bar)>)(.*?)()'``), you can pass `None` to the bygroups +function and that group will be skipped in the output. + + +Changing states +=============== + +Many lexers need multiple states to work as expected. For example, some +languages allow multiline comments to be nested. Since this is a recursive +pattern it's impossible to lex just using regular expressions. + +Here is a lexer that recognizes C++ style comments (multi-line with ``/* */`` +and single-line with ``//`` until end of line):: + + from pygments.lexer import RegexLexer + from pygments.token import * + + class CppCommentLexer(RegexLexer): + name = 'Example Lexer with states' + + tokens = { + 'root': [ + (r'[^/]+', Text), + (r'/\*', Comment.Multiline, 'comment'), + (r'//.*?$', Comment.Singleline), + (r'/', Text) + ], + 'comment': [ + (r'[^*/]', Comment.Multiline), + (r'/\*', Comment.Multiline, '#push'), + (r'\*/', Comment.Multiline, '#pop'), + (r'[*/]', Comment.Multiline) + ] + } + +This lexer starts lexing in the ``'root'`` state. It tries to match as much as +possible until it finds a slash (``'/'``). If the next character after the slash +is an asterisk (``'*'``) the `RegexLexer` sends those two characters to the +output stream marked as `Comment.Multiline` and continues lexing with the rules +defined in the ``'comment'`` state. + +If there wasn't an asterisk after the slash, the `RegexLexer` checks if it's a +Singleline comment (i.e. followed by a second slash). If this also wasn't the +case it must be a single slash, which is not a comment starter (the separate +regex for a single slash must also be given, else the slash would be marked as +an error token). + +Inside the ``'comment'`` state, we do the same thing again. Scan until the +lexer finds a star or slash. If it's the opening of a multiline comment, push +the ``'comment'`` state on the stack and continue scanning, again in the +``'comment'`` state. Else, check if it's the end of the multiline comment. If +yes, pop one state from the stack. + +Note: If you pop from an empty stack you'll get an `IndexError`. (There is an +easy way to prevent this from happening: don't ``'#pop'`` in the root state). + +If the `RegexLexer` encounters a newline that is flagged as an error token, the +stack is emptied and the lexer continues scanning in the ``'root'`` state. This +can help producing error-tolerant highlighting for erroneous input, e.g. when a +single-line string is not closed. + + +Advanced state tricks +===================== + +There are a few more things you can do with states: + +- You can push multiple states onto the stack if you give a tuple instead of a + simple string as the third item in a rule tuple. For example, if you want to + match a comment containing a directive, something like: + + .. code-block:: text + + /* rest of comment */ + + you can use this rule:: + + tokens = { + 'root': [ + (r'/\* <', Comment, ('comment', 'directive')), + ... + ], + 'directive': [ + (r'[^>]*', Comment.Directive), + (r'>', Comment, '#pop'), + ], + 'comment': [ + (r'[^*]+', Comment), + (r'\*/', Comment, '#pop'), + (r'\*', Comment), + ] + } + + When this encounters the above sample, first ``'comment'`` and ``'directive'`` + are pushed onto the stack, then the lexer continues in the directive state + until it finds the closing ``>``, then it continues in the comment state until + the closing ``*/``. Then, both states are popped from the stack again and + lexing continues in the root state. + + .. versionadded:: 0.9 + The tuple can contain the special ``'#push'`` and ``'#pop'`` (but not + ``'#pop:n'``) directives. + + +- You can include the rules of a state in the definition of another. This is + done by using `include` from `pygments.lexer`:: + + from pygments.lexer import RegexLexer, bygroups, include + from pygments.token import * + + class ExampleLexer(RegexLexer): + tokens = { + 'comments': [ + (r'/\*.*?\*/', Comment), + (r'//.*?\n', Comment), + ], + 'root': [ + include('comments'), + (r'(function )(\w+)( {)', + bygroups(Keyword, Name, Keyword), 'function'), + (r'.', Text), + ], + 'function': [ + (r'[^}/]+', Text), + include('comments'), + (r'/', Text), + (r'\}', Keyword, '#pop'), + ] + } + + This is a hypothetical lexer for a language that consist of functions and + comments. Because comments can occur at toplevel and in functions, we need + rules for comments in both states. As you can see, the `include` helper saves + repeating rules that occur more than once (in this example, the state + ``'comment'`` will never be entered by the lexer, as it's only there to be + included in ``'root'`` and ``'function'``). + +- Sometimes, you may want to "combine" a state from existing ones. This is + possible with the `combined` helper from `pygments.lexer`. + + If you, instead of a new state, write ``combined('state1', 'state2')`` as the + third item of a rule tuple, a new anonymous state will be formed from state1 + and state2 and if the rule matches, the lexer will enter this state. + + This is not used very often, but can be helpful in some cases, such as the + `PythonLexer`'s string literal processing. + +- If you want your lexer to start lexing in a different state you can modify the + stack by overriding the `get_tokens_unprocessed()` method:: + + from pygments.lexer import RegexLexer + + class ExampleLexer(RegexLexer): + tokens = {...} + + def get_tokens_unprocessed(self, text, stack=('root', 'otherstate')): + for item in RegexLexer.get_tokens_unprocessed(text, stack): + yield item + + Some lexers like the `PhpLexer` use this to make the leading ``', Name.Tag), + ], + 'script-content': [ + (r'(.+?)(<\s*/\s*script\s*>)', + bygroups(using(JavascriptLexer), Name.Tag), + '#pop'), + ] + } + +Here the content of a ```` end tag is processed by the `JavascriptLexer`, +while the end tag is yielded as a normal token with the `Name.Tag` type. + +Also note the ``(r'<\s*script\s*', Name.Tag, ('script-content', 'tag'))`` rule. +Here, two states are pushed onto the state stack, ``'script-content'`` and +``'tag'``. That means that first ``'tag'`` is processed, which will lex +attributes and the closing ``>``, then the ``'tag'`` state is popped and the +next state on top of the stack will be ``'script-content'``. + +Since you cannot refer to the class currently being defined, use `this` +(imported from `pygments.lexer`) to refer to the current lexer class, i.e. +``using(this)``. This construct may seem unnecessary, but this is often the +most obvious way of lexing arbitrary syntax between fixed delimiters without +introducing deeply nested states. + +The `using()` helper has a special keyword argument, `state`, which works as +follows: if given, the lexer to use initially is not in the ``"root"`` state, +but in the state given by this argument. This does not work with advanced +`RegexLexer` subclasses such as `ExtendedRegexLexer` (see below). + +Any other keywords arguments passed to `using()` are added to the keyword +arguments used to create the lexer. + + +Delegating Lexer +================ + +Another approach for nested lexers is the `DelegatingLexer` which is for example +used for the template engine lexers. It takes two lexers as arguments on +initialisation: a `root_lexer` and a `language_lexer`. + +The input is processed as follows: First, the whole text is lexed with the +`language_lexer`. All tokens yielded with the special type of ``Other`` are +then concatenated and given to the `root_lexer`. The language tokens of the +`language_lexer` are then inserted into the `root_lexer`'s token stream at the +appropriate positions. :: + + from pygments.lexer import DelegatingLexer + from pygments.lexers.web import HtmlLexer, PhpLexer + + class HtmlPhpLexer(DelegatingLexer): + def __init__(self, **options): + super(HtmlPhpLexer, self).__init__(HtmlLexer, PhpLexer, **options) + +This procedure ensures that e.g. HTML with template tags in it is highlighted +correctly even if the template tags are put into HTML tags or attributes. + +If you want to change the needle token ``Other`` to something else, you can give +the lexer another token type as the third parameter:: + + DelegatingLexer.__init__(MyLexer, OtherLexer, Text, **options) + + +Callbacks +========= + +Sometimes the grammar of a language is so complex that a lexer would be unable +to process it just by using regular expressions and stacks. + +For this, the `RegexLexer` allows callbacks to be given in rule tuples, instead +of token types (`bygroups` and `using` are nothing else but preimplemented +callbacks). The callback must be a function taking two arguments: + +* the lexer itself +* the match object for the last matched rule + +The callback must then return an iterable of (or simply yield) ``(index, +tokentype, value)`` tuples, which are then just passed through by +`get_tokens_unprocessed()`. The ``index`` here is the position of the token in +the input string, ``tokentype`` is the normal token type (like `Name.Builtin`), +and ``value`` the associated part of the input string. + +You can see an example here:: + + from pygments.lexer import RegexLexer + from pygments.token import Generic + + class HypotheticLexer(RegexLexer): + + def headline_callback(lexer, match): + equal_signs = match.group(1) + text = match.group(2) + yield match.start(), Generic.Headline, equal_signs + text + equal_signs + + tokens = { + 'root': [ + (r'(=+)(.*?)(\1)', headline_callback) + ] + } + +If the regex for the `headline_callback` matches, the function is called with +the match object. Note that after the callback is done, processing continues +normally, that is, after the end of the previous match. The callback has no +possibility to influence the position. + +There are not really any simple examples for lexer callbacks, but you can see +them in action e.g. in the `SMLLexer` class in `ml.py`_. + +.. _ml.py: http://bitbucket.org/birkenfeld/pygments-main/src/tip/pygments/lexers/ml.py + + +The ExtendedRegexLexer class +============================ + +The `RegexLexer`, even with callbacks, unfortunately isn't powerful enough for +the funky syntax rules of languages such as Ruby. + +But fear not; even then you don't have to abandon the regular expression +approach: Pygments has a subclass of `RegexLexer`, the `ExtendedRegexLexer`. +All features known from RegexLexers are available here too, and the tokens are +specified in exactly the same way, *except* for one detail: + +The `get_tokens_unprocessed()` method holds its internal state data not as local +variables, but in an instance of the `pygments.lexer.LexerContext` class, and +that instance is passed to callbacks as a third argument. This means that you +can modify the lexer state in callbacks. + +The `LexerContext` class has the following members: + +* `text` -- the input text +* `pos` -- the current starting position that is used for matching regexes +* `stack` -- a list containing the state stack +* `end` -- the maximum position to which regexes are matched, this defaults to + the length of `text` + +Additionally, the `get_tokens_unprocessed()` method can be given a +`LexerContext` instead of a string and will then process this context instead of +creating a new one for the string argument. + +Note that because you can set the current position to anything in the callback, +it won't be automatically be set by the caller after the callback is finished. +For example, this is how the hypothetical lexer above would be written with the +`ExtendedRegexLexer`:: + + from pygments.lexer import ExtendedRegexLexer + from pygments.token import Generic + + class ExHypotheticLexer(ExtendedRegexLexer): + + def headline_callback(lexer, match, ctx): + equal_signs = match.group(1) + text = match.group(2) + yield match.start(), Generic.Headline, equal_signs + text + equal_signs + ctx.pos = match.end() + + tokens = { + 'root': [ + (r'(=+)(.*?)(\1)', headline_callback) + ] + } + +This might sound confusing (and it can really be). But it is needed, and for an +example look at the Ruby lexer in `ruby.py`_. + +.. _ruby.py: https://bitbucket.org/birkenfeld/pygments-main/src/tip/pygments/lexers/ruby.py + + +Handling Lists of Keywords +========================== + +For a relatively short list (hundreds) you can construct an optimized regular +expression directly using ``words()`` (longer lists, see next section). This +function handles a few things for you automatically, including escaping +metacharacters and Python's first-match rather than longest-match in +alternations. Feel free to put the lists themselves in +``pygments/lexers/_$lang_builtins.py`` (see examples there), and generated by +code if possible. + +An example of using ``words()`` is something like:: + + from pygments.lexer import RegexLexer, words, Name + + class MyLexer(RegexLexer): + + tokens = { + 'root': [ + (words(('else', 'elseif'), suffix=r'\b'), Name.Builtin), + (r'\w+', Name), + ], + } + +As you can see, you can add ``prefix`` and ``suffix`` parts to the constructed +regex. + + +Modifying Token Streams +======================= + +Some languages ship a lot of builtin functions (for example PHP). The total +amount of those functions differs from system to system because not everybody +has every extension installed. In the case of PHP there are over 3000 builtin +functions. That's an incredibly huge amount of functions, much more than you +want to put into a regular expression. + +But because only `Name` tokens can be function names this is solvable by +overriding the ``get_tokens_unprocessed()`` method. The following lexer +subclasses the `PythonLexer` so that it highlights some additional names as +pseudo keywords:: + + from pygments.lexers.python import PythonLexer + from pygments.token import Name, Keyword + + class MyPythonLexer(PythonLexer): + EXTRA_KEYWORDS = set(('foo', 'bar', 'foobar', 'barfoo', 'spam', 'eggs')) + + def get_tokens_unprocessed(self, text): + for index, token, value in PythonLexer.get_tokens_unprocessed(self, text): + if token is Name and value in self.EXTRA_KEYWORDS: + yield index, Keyword.Pseudo, value + else: + yield index, token, value + +The `PhpLexer` and `LuaLexer` use this method to resolve builtin functions. diff --git a/vendor/pygments/docs/src/lexers.txt b/vendor/pygments/doc/docs/lexers.rst similarity index 89% rename from vendor/pygments/docs/src/lexers.txt rename to vendor/pygments/doc/docs/lexers.rst index 016de6c..9262efb 100644 --- a/vendor/pygments/docs/src/lexers.txt +++ b/vendor/pygments/doc/docs/lexers.rst @@ -18,35 +18,37 @@ Currently, **all lexers** support these options: `ensurenl` Make sure that the input ends with a newline (default: ``True``). This is required for some lexers that consume input linewise. - *New in Pygments 1.3.* + + .. versionadded:: 1.3 `tabsize` If given and greater than 0, expand tabs in the input (default: ``0``). `encoding` - *New in Pygments 0.6.* - If given, must be an encoding name (such as ``"utf-8"``). This encoding will be used to convert the input string to Unicode (if it is not already - a Unicode string). The default is ``"latin1"``. + a Unicode string). The default is ``"guess"``. If this option is set to ``"guess"``, a simple UTF-8 vs. Latin-1 detection is used, if it is set to ``"chardet"``, the - `chardet library `__ is used to + `chardet library `_ is used to guess the encoding of the input. + .. versionadded:: 0.6 + The "Short Names" field lists the identifiers that can be used with the `get_lexer_by_name()` function. These lexers are builtin and can be imported from `pygments.lexers`: -[builtin_lexer_docs] +.. pygmentsdoc:: lexers + Iterating over all lexers ------------------------- -*New in Pygments 0.6.* +.. versionadded:: 0.6 To get all lexers (both the builtin and the plugin ones), you can use the `get_all_lexers()` function from the `pygments.lexers` diff --git a/vendor/pygments/docs/src/moinmoin.txt b/vendor/pygments/doc/docs/moinmoin.rst similarity index 100% rename from vendor/pygments/docs/src/moinmoin.txt rename to vendor/pygments/doc/docs/moinmoin.rst diff --git a/vendor/pygments/docs/src/plugins.txt b/vendor/pygments/doc/docs/plugins.rst similarity index 100% rename from vendor/pygments/docs/src/plugins.txt rename to vendor/pygments/doc/docs/plugins.rst diff --git a/vendor/pygments/docs/src/quickstart.txt b/vendor/pygments/doc/docs/quickstart.rst similarity index 80% rename from vendor/pygments/docs/src/quickstart.txt rename to vendor/pygments/doc/docs/quickstart.rst index 4040910..dba7698 100644 --- a/vendor/pygments/docs/src/quickstart.txt +++ b/vendor/pygments/doc/docs/quickstart.rst @@ -58,8 +58,8 @@ can be produced by: print HtmlFormatter().get_style_defs('.highlight') -The argument to `get_style_defs` is used as an additional CSS selector: the output -may look like this: +The argument to :func:`get_style_defs` is used as an additional CSS selector: +the output may look like this: .. sourcecode:: css @@ -71,9 +71,9 @@ may look like this: Options ======= -The `highlight()` function supports a fourth argument called `outfile`, it must be -a file object if given. The formatted output will then be written to this file -instead of being returned as a string. +The :func:`highlight()` function supports a fourth argument called *outfile*, it +must be a file object if given. The formatted output will then be written to +this file instead of being returned as a string. Lexers and formatters both support options. They are given to them as keyword arguments either to the class or to the lookup method: @@ -103,9 +103,9 @@ Important options include: For an overview of builtin lexers and formatters and their options, visit the -`lexer `_ and `formatters `_ lists. +:doc:`lexer ` and :doc:`formatters ` lists. -For a documentation on filters, see `this page `_. +For a documentation on filters, see :doc:`this page `. Lexer and formatter lookup @@ -131,9 +131,9 @@ one of the following methods: All these functions accept keyword arguments; they will be passed to the lexer as options. -A similar API is available for formatters: use `get_formatter_by_name()` and -`get_formatter_for_filename()` from the `pygments.formatters` module -for this purpose. +A similar API is available for formatters: use :func:`.get_formatter_by_name()` +and :func:`.get_formatter_for_filename()` from the :mod:`pygments.formatters` +module for this purpose. Guessing lexers @@ -153,16 +153,17 @@ or some template tags), use these functions: >>> guess_lexer_for_filename('test.py', 'print "Hello World!"') -`guess_lexer()` passes the given content to the lexer classes' `analyse_text()` -method and returns the one for which it returns the highest number. +:func:`.guess_lexer()` passes the given content to the lexer classes' +:meth:`analyse_text()` method and returns the one for which it returns the +highest number. All lexers have two different filename pattern lists: the primary and the -secondary one. The `get_lexer_for_filename()` function only uses the primary -list, whose entries are supposed to be unique among all lexers. -`guess_lexer_for_filename()`, however, will first loop through all lexers and -look at the primary and secondary filename patterns if the filename matches. +secondary one. The :func:`.get_lexer_for_filename()` function only uses the +primary list, whose entries are supposed to be unique among all lexers. +:func:`.guess_lexer_for_filename()`, however, will first loop through all lexers +and look at the primary and secondary filename patterns if the filename matches. If only one lexer matches, it is returned, else the guessing mechanism of -`guess_lexer()` is used with the matching lexers. +:func:`.guess_lexer()` is used with the matching lexers. As usual, keyword arguments to these functions are given to the created lexer as options. @@ -171,7 +172,8 @@ as options. Command line usage ================== -You can use Pygments from the command line, using the `pygmentize` script:: +You can use Pygments from the command line, using the :program:`pygmentize` +script:: $ pygmentize test.py @@ -199,4 +201,5 @@ it can be created with:: where ``default`` is the style name. -More options and tricks and be found in the `command line reference `_. +More options and tricks and be found in the :doc:`command line reference +`. diff --git a/vendor/pygments/docs/src/rstdirective.txt b/vendor/pygments/doc/docs/rstdirective.rst similarity index 100% rename from vendor/pygments/docs/src/rstdirective.txt rename to vendor/pygments/doc/docs/rstdirective.rst diff --git a/vendor/pygments/docs/src/styles.txt b/vendor/pygments/doc/docs/styles.rst similarity index 95% rename from vendor/pygments/docs/src/styles.txt rename to vendor/pygments/doc/docs/styles.rst index e3e9cfb..d56db0d 100644 --- a/vendor/pygments/docs/src/styles.txt +++ b/vendor/pygments/doc/docs/styles.rst @@ -21,6 +21,7 @@ option in form of a string: .. sourcecode:: pycon >>> from pygments.styles import get_style_by_name + >>> from pygments.formatters import HtmlFormatter >>> HtmlFormatter(style='colorful').style @@ -30,6 +31,7 @@ Or you can also import your own style (which must be a subclass of .. sourcecode:: pycon >>> from yourapp.yourmodule import YourStyle + >>> from pygments.formatters import HtmlFormatter >>> HtmlFormatter(style=YourStyle).style @@ -68,7 +70,7 @@ they can be used for a variety of formatters.) To make the style usable for Pygments, you must -* either register it as a plugin (see `the plugin docs `_) +* either register it as a plugin (see :doc:`the plugin docs `) * or drop it into the `styles` subpackage of your Pygments distribution one style class per style, where the file name is the style name and the class name is `StylenameClass`. For example, if your style should be called @@ -132,7 +134,7 @@ To get a list of known styles you can use this snippet: Getting a list of available styles ================================== -*New in Pygments 0.6.* +.. versionadded:: 0.6 Because it could be that a plugin registered a style, there is a way to iterate over all styles: diff --git a/vendor/pygments/docs/src/tokens.txt b/vendor/pygments/doc/docs/tokens.rst similarity index 94% rename from vendor/pygments/docs/src/tokens.txt rename to vendor/pygments/doc/docs/tokens.rst index 4900a9a..6455a50 100644 --- a/vendor/pygments/docs/src/tokens.txt +++ b/vendor/pygments/doc/docs/tokens.rst @@ -4,7 +4,9 @@ Builtin Tokens ============== -Inside the `pygments.token` module, there is a special object called `Token` +.. module:: pygments.token + +In the :mod:`pygments.token` module, there is a special object called `Token` that is used to create token types. You can create a new token type by accessing an attribute of `Token`: @@ -30,8 +32,8 @@ As of Pygments 0.7 you can also use the ``in`` operator to perform set tests: >>> Comment in Comment.Multi False -This can be useful in `filters`_ and if you write lexers on your own without -using the base lexers. +This can be useful in :doc:`filters ` and if you write lexers on your +own without using the base lexers. You can also split a token type into a hierarchy, and get the parent of it: @@ -55,7 +57,7 @@ For some tokens aliases are already defined: >>> String Token.Literal.String -Inside the `pygments.token` module the following aliases are defined: +Inside the :mod:`pygments.token` module the following aliases are defined: ============= ============================ ==================================== `Text` `Token.Text` for any type of text data @@ -87,7 +89,7 @@ The `is_token_subtype()` function in the `pygments.token` module can be used to test if a token type is a subtype of another (such as `Name.Tag` and `Name`). (This is the same as ``Name.Tag in Name``. The overloaded `in` operator was newly introduced in Pygments 0.7, the function still exists for backwards -compatiblity.) +compatibility.) With Pygments 0.7, it's also possible to convert strings to token types (for example if you want to supply a token from the command line): @@ -158,7 +160,7 @@ Name Tokens other languages constants are uppercase by definition (Ruby). `Name.Decorator` - Token type for decorators. Decorators are synatic elements in the Python + Token type for decorators. Decorators are syntactic elements in the Python language. Similar syntax elements exist in C# and Java. `Name.Entity` @@ -251,6 +253,9 @@ Literals `Number` Token type for any number literal. +`Number.Bin` + Token type for binary literals (e.g. ``0b101010``). + `Number.Float` Token type for float literals (e.g. ``42.0``). @@ -280,7 +285,7 @@ Operators Punctuation =========== -*New in Pygments 0.7.* +.. versionadded:: 0.7 `Punctuation` For any punctuation which is not an operator (e.g. ``[``, ``(``...) @@ -292,6 +297,10 @@ Comments `Comment` Token type for any comment. +`Comment.Hashbang` + Token type for hashbang comments (i.e. first lines of files that start with + ``#!``). + `Comment.Multiline` Token type for multiline comments. @@ -345,5 +354,3 @@ highlight a programming language but a patch file. `Generic.Traceback` Marks the token value as a part of an error traceback. - -.. _filters: filters.txt diff --git a/vendor/pygments/docs/src/unicode.txt b/vendor/pygments/doc/docs/unicode.rst similarity index 53% rename from vendor/pygments/docs/src/unicode.txt rename to vendor/pygments/doc/docs/unicode.rst index dc6394a..17853a3 100644 --- a/vendor/pygments/docs/src/unicode.txt +++ b/vendor/pygments/doc/docs/unicode.rst @@ -3,15 +3,23 @@ Unicode and Encodings ===================== Since Pygments 0.6, all lexers use unicode strings internally. Because of that -you might encounter the occasional `UnicodeDecodeError` if you pass strings with the -wrong encoding. +you might encounter the occasional :exc:`UnicodeDecodeError` if you pass strings +with the wrong encoding. -Per default all lexers have their input encoding set to `latin1`. -If you pass a lexer a string object (not unicode), it tries to decode the data -using this encoding. -You can override the encoding using the `encoding` lexer option. If you have the -`chardet`_ library installed and set the encoding to ``chardet`` if will ananlyse -the text and use the encoding it thinks is the right one automatically: +Per default all lexers have their input encoding set to `guess`. This means +that the following encodings are tried: + +* UTF-8 (including BOM handling) +* The locale encoding (i.e. the result of `locale.getpreferredencoding()`) +* As a last resort, `latin1` + +If you pass a lexer a byte string object (not unicode), it tries to decode the +data using this encoding. + +You can override the encoding using the `encoding` or `inencoding` lexer +options. If you have the `chardet`_ library installed and set the encoding to +``chardet`` if will analyse the text and use the encoding it thinks is the +right one automatically: .. sourcecode:: python @@ -39,11 +47,12 @@ Unicode string with this encoding before writing it. This is the case for `sys.stdout`, for example. The other formatters don't have that behavior. Another note: If you call Pygments via the command line (`pygmentize`), -encoding is handled differently, see `the command line docs `_. +encoding is handled differently, see :doc:`the command line docs `. -*New in Pygments 0.7*: the formatters now also accept an `outencoding` option -which will override the `encoding` option if given. This makes it possible to -use a single options dict with lexers and formatters, and still have different -input and output encodings. +.. versionadded:: 0.7 + The formatters now also accept an `outencoding` option which will override + the `encoding` option if given. This makes it possible to use a single + options dict with lexers and formatters, and still have different input and + output encodings. .. _chardet: http://chardet.feedparser.org/ diff --git a/vendor/pygments/doc/download.rst b/vendor/pygments/doc/download.rst new file mode 100644 index 0000000..cf32f48 --- /dev/null +++ b/vendor/pygments/doc/download.rst @@ -0,0 +1,41 @@ +Download and installation +========================= + +The current release is version |version|. + +Packaged versions +----------------- + +You can download it `from the Python Package Index +`_. For installation of packages from +PyPI, we recommend `Pip `_, which works on all +major platforms. + +Under Linux, most distributions include a package for Pygments, usually called +``pygments`` or ``python-pygments``. You can install it with the package +manager as usual. + +Development sources +------------------- + +We're using the `Mercurial `_ version control +system. You can get the development source using this command:: + + hg clone http://bitbucket.org/birkenfeld/pygments-main pygments + +Development takes place at `Bitbucket +`_, you can browse the source +online `here `_. + +The latest changes in the development source code are listed in the `changelog +`_. + +.. Documentation + ------------- + +.. XXX todo + + You can download the documentation either as + a bunch of rst files from the Mercurial repository, see above, or + as a tar.gz containing rendered HTML files:

    +

    pygmentsdocs.tar.gz

    diff --git a/vendor/pygments/doc/faq.rst b/vendor/pygments/doc/faq.rst new file mode 100644 index 0000000..f375828 --- /dev/null +++ b/vendor/pygments/doc/faq.rst @@ -0,0 +1,139 @@ +:orphan: + +Pygments FAQ +============= + +What is Pygments? +----------------- + +Pygments is a syntax highlighting engine written in Python. That means, it will +take source code (or other markup) in a supported language and output a +processed version (in different formats) containing syntax highlighting markup. + +Its features include: + +* a wide range of common :doc:`languages and markup formats ` is supported +* new languages and formats are added easily +* a number of output formats is available, including: + + - HTML + - ANSI sequences (console output) + - LaTeX + - RTF + +* it is usable as a command-line tool and as a library +* parsing and formatting is fast + +Pygments is licensed under the BSD license. + +Where does the name Pygments come from? +--------------------------------------- + +*Py* of course stands for Python, while *pigments* are used for coloring paint, +and in this case, source code! + +What are the system requirements? +--------------------------------- + +Pygments only needs a standard Python install, version 2.6 or higher or version +3.3 or higher for Python 3. No additional libraries are needed. + +How can I use Pygments? +----------------------- + +Pygments is usable as a command-line tool as well as a library. + +From the command-line, usage looks like this (assuming the pygmentize script is +properly installed):: + + pygmentize -f html /path/to/file.py + +This will print a HTML-highlighted version of /path/to/file.py to standard output. + +For a complete help, please run ``pygmentize -h``. + +Usage as a library is thoroughly demonstrated in the Documentation section. + +How do I make a new style? +-------------------------- + +Please see the :doc:`documentation on styles `. + +How can I report a bug or suggest a feature? +-------------------------------------------- + +Please report bugs and feature wishes in the tracker at Bitbucket. + +You can also e-mail the author or use IRC, see the contact details. + +I want this support for this language! +-------------------------------------- + +Instead of waiting for others to include language support, why not write it +yourself? All you have to know is :doc:`outlined in the docs +`. + +Can I use Pygments for programming language processing? +------------------------------------------------------- + +The Pygments lexing machinery is quite powerful can be used to build lexers for +basically all languages. However, parsing them is not possible, though some +lexers go some steps in this direction in order to e.g. highlight function names +differently. + +Also, error reporting is not the scope of Pygments. It focuses on correctly +highlighting syntactically valid documents, not finding and compensating errors. + +Who uses Pygments? +------------------ + +This is an (incomplete) list of projects and sites known to use the Pygments highlighter. + +* `Wikipedia `_ +* `BitBucket `_, a Mercurial and Git hosting site +* `The Sphinx documentation builder `_, for embedded source examples +* `rst2pdf `_, a reStructuredText to PDF converter +* `Codecov `_, a code coverage CI service +* `Trac `_, the universal project management tool +* `AsciiDoc `_, a text-based documentation generator +* `ActiveState Code `_, the Python Cookbook successor +* `ViewVC `_, a web-based version control repository browser +* `BzrFruit `_, a Bazaar branch viewer +* `QBzr `_, a cross-platform Qt-based GUI front end for Bazaar +* `Review Board `_, a collaborative code reviewing tool +* `Diamanda `_, a Django powered wiki system with support for Pygments +* `Progopedia `_ (`English `_), + an encyclopedia of programming languages +* `Bruce `_, a reStructuredText presentation tool +* `PIDA `_, a universal IDE written in Python +* `BPython `_, a curses-based intelligent Python shell +* `PuDB `_, a console Python debugger +* `XWiki `_, a wiki-based development framework in Java, using Jython +* `roux `_, a script for running R scripts + and creating beautiful output including graphs +* `hurl `_, a web service for making HTTP requests +* `wxHTMLPygmentizer `_ is + a GUI utility, used to make code-colorization easier +* `Postmarkup `_, a BBCode to XHTML generator +* `WpPygments `_, and `WPygments + `_, highlighter plugins for WordPress +* `Siafoo `_, a tool for sharing and storing useful code and programming experience +* `D source `_, a community for the D programming language +* `dpaste.com `_, another Django pastebin +* `Django snippets `_, a pastebin for Django code +* `Fayaa `_, a Chinese pastebin +* `Incollo.com `_, a free collaborative debugging tool +* `PasteBox `_, a pastebin focused on privacy +* `hilite.me `_, a site to highlight code snippets +* `patx.me `_, a pastebin +* `Fluidic `_, an experiment in + integrating shells with a GUI +* `pygments.rb `_, a pygments wrapper for Ruby +* `Clygments `_, a pygments wrapper for + Clojure +* `PHPygments `_, a pygments wrapper for PHP + + +If you have a project or web site using Pygments, drop me a line, and I'll add a +link here. + diff --git a/vendor/pygments/doc/index.rst b/vendor/pygments/doc/index.rst new file mode 100644 index 0000000..2611404 --- /dev/null +++ b/vendor/pygments/doc/index.rst @@ -0,0 +1,54 @@ +Welcome! +======== + +This is the home of Pygments. It is a generic syntax highlighter suitable for +use in code hosting, forums, wikis or other applications that need to prettify +source code. Highlights are: + +* a wide range of over 300 languages and other text formats is supported +* special attention is paid to details that increase highlighting quality +* support for new languages and formats are added easily; most languages use a + simple regex-based lexing mechanism +* a number of output formats is available, among them HTML, RTF, LaTeX and ANSI + sequences +* it is usable as a command-line tool and as a library +* ... and it highlights even Perl 6! + +Read more in the :doc:`FAQ list ` or the :doc:`documentation `, +or `download the latest release `_. + +.. _contribute: + +Contribute +---------- + +Like every open-source project, we are always looking for volunteers to help us +with programming. Python knowledge is required, but don't fear: Python is a very +clear and easy to learn language. + +Development takes place on `Bitbucket +`_, where the Mercurial +repository, tickets and pull requests can be viewed. + +Our primary communication instrument is the IRC channel **#pocoo** on the +Freenode network. To join it, let your IRC client connect to +``irc.freenode.net`` and do ``/join #pocoo``. + +If you found a bug, just open a ticket in the Bitbucket tracker. Be sure to log +in to be notified when the issue is fixed -- development is not fast-paced as +the library is quite stable. You can also send an e-mail to the developers, see +below. + +The authors +----------- + +Pygments is maintained by **Georg Brandl**, e-mail address *georg*\ *@*\ *python.org*. + +Many lexers and fixes have been contributed by **Armin Ronacher**, the rest of +the `Pocoo `_ team and **Tim Hatch**. + +.. toctree:: + :maxdepth: 1 + :hidden: + + docs/index diff --git a/vendor/pygments/doc/languages.rst b/vendor/pygments/doc/languages.rst new file mode 100644 index 0000000..a495d15 --- /dev/null +++ b/vendor/pygments/doc/languages.rst @@ -0,0 +1,152 @@ +:orphan: + +Supported languages +=================== + +Pygments supports an ever-growing range of languages. Watch this space... + +Programming languages +--------------------- + +* ActionScript +* Ada +* ANTLR +* AppleScript +* Assembly (various) +* Asymptote +* Awk +* Befunge +* Boo +* BrainFuck +* C, C++ +* C# +* Clojure +* CoffeeScript +* ColdFusion +* Common Lisp +* Coq +* Cryptol (incl. Literate Cryptol) +* `Cython `_ +* `D `_ +* Dart +* Delphi +* Dylan +* Erlang +* `Ezhil `_ Ezhil - A Tamil programming language +* Factor +* Fancy +* Fortran +* F# +* GAP +* Gherkin (Cucumber) +* GL shaders +* Groovy +* `Haskell `_ (incl. Literate Haskell) +* IDL +* Io +* Java +* JavaScript +* Lasso +* LLVM +* Logtalk +* `Lua `_ +* Matlab +* MiniD +* Modelica +* Modula-2 +* MuPad +* Nemerle +* Nimrod +* Objective-C +* Objective-J +* Octave +* OCaml +* PHP +* `Perl `_ +* PovRay +* PostScript +* PowerShell +* Prolog +* `Python `_ 2.x and 3.x (incl. console sessions and tracebacks) +* `REBOL `_ +* `Red `_ +* Redcode +* `Ruby `_ (incl. irb sessions) +* Rust +* S, S-Plus, R +* Scala +* Scheme +* Scilab +* Smalltalk +* SNOBOL +* Tcl +* Vala +* Verilog +* VHDL +* Visual Basic.NET +* Visual FoxPro +* XQuery +* Zephir + +Template languages +------------------ + +* Cheetah templates +* `Django `_ / `Jinja + `_ templates +* ERB (Ruby templating) +* `Genshi `_ (the Trac template language) +* JSP (Java Server Pages) +* `Myghty `_ (the HTML::Mason based framework) +* `Mako `_ (the Myghty successor) +* `Smarty `_ templates (PHP templating) +* Tea + +Other markup +------------ + +* Apache config files +* Bash shell scripts +* BBCode +* CMake +* CSS +* Debian control files +* Diff files +* DTD +* Gettext catalogs +* Gnuplot script +* Groff markup +* HTML +* HTTP sessions +* INI-style config files +* IRC logs (irssi style) +* Lighttpd config files +* Makefiles +* MoinMoin/Trac Wiki markup +* MySQL +* Nginx config files +* POV-Ray scenes +* Ragel +* Redcode +* ReST +* Robot Framework +* RPM spec files +* SQL, also MySQL, SQLite +* Squid configuration +* TeX +* tcsh +* Vim Script +* Windows batch files +* XML +* XSLT +* YAML + +... that's all? +--------------- + +Well, why not write your own? Contributing to Pygments is easy and fun. Take a look at the +:doc:`docs on lexer development ` and +:ref:`contact details `. + +Note: the languages listed here are supported in the development version. The +latest release may lack a few of them. diff --git a/vendor/pygments/doc/make.bat b/vendor/pygments/doc/make.bat new file mode 100644 index 0000000..8803c98 --- /dev/null +++ b/vendor/pygments/doc/make.bat @@ -0,0 +1,190 @@ +@ECHO OFF + +REM Command file for Sphinx documentation + +if "%SPHINXBUILD%" == "" ( + set SPHINXBUILD=sphinx-build +) +set BUILDDIR=_build +set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% . +set I18NSPHINXOPTS=%SPHINXOPTS% . +if NOT "%PAPER%" == "" ( + set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS% + set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS% +) + +if "%1" == "" goto help + +if "%1" == "help" ( + :help + echo.Please use `make ^` where ^ is one of + echo. html to make standalone HTML files + echo. dirhtml to make HTML files named index.html in directories + echo. singlehtml to make a single large HTML file + echo. pickle to make pickle files + echo. json to make JSON files + echo. htmlhelp to make HTML files and a HTML help project + echo. qthelp to make HTML files and a qthelp project + echo. devhelp to make HTML files and a Devhelp project + echo. epub to make an epub + echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter + echo. text to make text files + echo. man to make manual pages + echo. texinfo to make Texinfo files + echo. gettext to make PO message catalogs + echo. changes to make an overview over all changed/added/deprecated items + echo. linkcheck to check all external links for integrity + echo. doctest to run all doctests embedded in the documentation if enabled + goto end +) + +if "%1" == "clean" ( + for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i + del /q /s %BUILDDIR%\* + goto end +) + +if "%1" == "html" ( + %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The HTML pages are in %BUILDDIR%/html. + goto end +) + +if "%1" == "dirhtml" ( + %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml. + goto end +) + +if "%1" == "singlehtml" ( + %SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml. + goto end +) + +if "%1" == "pickle" ( + %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle + if errorlevel 1 exit /b 1 + echo. + echo.Build finished; now you can process the pickle files. + goto end +) + +if "%1" == "json" ( + %SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json + if errorlevel 1 exit /b 1 + echo. + echo.Build finished; now you can process the JSON files. + goto end +) + +if "%1" == "htmlhelp" ( + %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp + if errorlevel 1 exit /b 1 + echo. + echo.Build finished; now you can run HTML Help Workshop with the ^ +.hhp project file in %BUILDDIR%/htmlhelp. + goto end +) + +if "%1" == "qthelp" ( + %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp + if errorlevel 1 exit /b 1 + echo. + echo.Build finished; now you can run "qcollectiongenerator" with the ^ +.qhcp project file in %BUILDDIR%/qthelp, like this: + echo.^> qcollectiongenerator %BUILDDIR%\qthelp\Pygments.qhcp + echo.To view the help file: + echo.^> assistant -collectionFile %BUILDDIR%\qthelp\Pygments.ghc + goto end +) + +if "%1" == "devhelp" ( + %SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. + goto end +) + +if "%1" == "epub" ( + %SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The epub file is in %BUILDDIR%/epub. + goto end +) + +if "%1" == "latex" ( + %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex + if errorlevel 1 exit /b 1 + echo. + echo.Build finished; the LaTeX files are in %BUILDDIR%/latex. + goto end +) + +if "%1" == "text" ( + %SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The text files are in %BUILDDIR%/text. + goto end +) + +if "%1" == "man" ( + %SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The manual pages are in %BUILDDIR%/man. + goto end +) + +if "%1" == "texinfo" ( + %SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo. + goto end +) + +if "%1" == "gettext" ( + %SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The message catalogs are in %BUILDDIR%/locale. + goto end +) + +if "%1" == "changes" ( + %SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes + if errorlevel 1 exit /b 1 + echo. + echo.The overview file is in %BUILDDIR%/changes. + goto end +) + +if "%1" == "linkcheck" ( + %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck + if errorlevel 1 exit /b 1 + echo. + echo.Link check complete; look for any errors in the above output ^ +or in %BUILDDIR%/linkcheck/output.txt. + goto end +) + +if "%1" == "doctest" ( + %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest + if errorlevel 1 exit /b 1 + echo. + echo.Testing of doctests in the sources finished, look at the ^ +results in %BUILDDIR%/doctest/output.txt. + goto end +) + +:end diff --git a/vendor/pygments/docs/pygmentize.1 b/vendor/pygments/doc/pygmentize.1 similarity index 100% rename from vendor/pygments/docs/pygmentize.1 rename to vendor/pygments/doc/pygmentize.1 diff --git a/vendor/pygments/docs/build/api.html b/vendor/pygments/docs/build/api.html deleted file mode 100644 index 1225e28..0000000 --- a/vendor/pygments/docs/build/api.html +++ /dev/null @@ -1,458 +0,0 @@ - - - - The full Pygments API — Pygments - - - - -
    -

    Pygments

    -

    The full Pygments API

    - - « Back To Index - - -
    -

    Contents

    - -
    - - -

    This page describes the Pygments API.

    -
    -

    High-level API

    -

    Functions from the pygments module:

    -
    -
    def lex(code, lexer):
    -
    Lex code with the lexer (must be a Lexer instance) -and return an iterable of tokens. Currently, this only calls -lexer.get_tokens().
    -
    def format(tokens, formatter, outfile=None):
    -
    Format a token stream (iterable of tokens) tokens with the -formatter (must be a Formatter instance). The result is -written to outfile, or if that is None, returned as a -string.
    -
    def highlight(code, lexer, formatter, outfile=None):
    -
    This is the most high-level highlighting function. -It combines lex and format in one function.
    -
    -

    Functions from pygments.lexers:

    -
    -
    def get_lexer_by_name(alias, **options):
    -

    Return an instance of a Lexer subclass that has alias in its -aliases list. The lexer is given the options at its -instantiation.

    -

    Will raise pygments.util.ClassNotFound if no lexer with that alias is -found.

    -
    -
    def get_lexer_for_filename(fn, **options):
    -

    Return a Lexer subclass instance that has a filename pattern -matching fn. The lexer is given the options at its -instantiation.

    -

    Will raise pygments.util.ClassNotFound if no lexer for that filename is -found.

    -
    -
    def get_lexer_for_mimetype(mime, **options):
    -

    Return a Lexer subclass instance that has mime in its mimetype -list. The lexer is given the options at its instantiation.

    -

    Will raise pygments.util.ClassNotFound if not lexer for that mimetype is -found.

    -
    -
    def guess_lexer(text, **options):
    -

    Return a Lexer subclass instance that's guessed from the text -in text. For that, the analyse_text() method of every known -lexer class is called with the text as argument, and the lexer -which returned the highest value will be instantiated and returned.

    -

    pygments.util.ClassNotFound is raised if no lexer thinks it can handle the -content.

    -
    -
    def guess_lexer_for_filename(filename, text, **options):
    -

    As guess_lexer(), but only lexers which have a pattern in filenames -or alias_filenames that matches filename are taken into consideration.

    -

    pygments.util.ClassNotFound is raised if no lexer thinks it can handle the -content.

    -
    -
    def get_all_lexers():
    -

    Return an iterable over all registered lexers, yielding tuples in the -format:

    -
    -(longname, tuple of aliases, tuple of filename patterns, tuple of mimetypes)
    -
    -

    New in Pygments 0.6.

    -
    -
    -

    Functions from pygments.formatters:

    -
    -
    def get_formatter_by_name(alias, **options):
    -

    Return an instance of a Formatter subclass that has alias in its -aliases list. The formatter is given the options at its -instantiation.

    -

    Will raise pygments.util.ClassNotFound if no formatter with that alias is -found.

    -
    -
    def get_formatter_for_filename(fn, **options):
    -

    Return a Formatter subclass instance that has a filename pattern -matching fn. The formatter is given the options at its -instantiation.

    -

    Will raise pygments.util.ClassNotFound if no formatter for that filename -is found.

    -
    -
    -

    Functions from pygments.styles:

    -
    -
    def get_style_by_name(name):
    -

    Return a style class by its short name. The names of the builtin styles -are listed in pygments.styles.STYLE_MAP.

    -

    Will raise pygments.util.ClassNotFound if no style of that name is found.

    -
    -
    def get_all_styles():
    -

    Return an iterable over all registered styles, yielding their names.

    -

    New in Pygments 0.6.

    -
    -
    -
    -
    -

    Lexers

    -

    A lexer (derived from pygments.lexer.Lexer) has the following functions:

    -
    -
    def __init__(self, **options):
    -

    The constructor. Takes a **keywords dictionary of options. -Every subclass must first process its own options and then call -the Lexer constructor, since it processes the stripnl, -stripall and tabsize options.

    -

    An example looks like this:

    -
    def __init__(self, **options):
    -    self.compress = options.get('compress', '')
    -    Lexer.__init__(self, **options)
    -
    -

    As these options must all be specifiable as strings (due to the -command line usage), there are various utility functions -available to help with that, see Option processing.

    -
    -
    def get_tokens(self, text):
    -

    This method is the basic interface of a lexer. It is called by -the highlight() function. It must process the text and return an -iterable of (tokentype, value) pairs from text.

    -

    Normally, you don't need to override this method. The default -implementation processes the stripnl, stripall and tabsize -options and then yields all tokens from get_tokens_unprocessed(), -with the index dropped.

    -
    -
    def get_tokens_unprocessed(self, text):
    -

    This method should process the text and return an iterable of -(index, tokentype, value) tuples where index is the starting -position of the token within the input text.

    -

    This method must be overridden by subclasses.

    -
    -
    def analyse_text(text):
    -
    A static method which is called for lexer guessing. It should analyse -the text and return a float in the range from 0.0 to 1.0. -If it returns 0.0, the lexer will not be selected as the most -probable one, if it returns 1.0, it will be selected immediately.
    -
    -

    For a list of known tokens have a look at the Tokens page.

    -

    A lexer also can have the following attributes (in fact, they are mandatory -except alias_filenames) that are used by the builtin lookup mechanism.

    -
    -
    name
    -
    Full name for the lexer, in human-readable form.
    -
    aliases
    -
    A list of short, unique identifiers that can be used to lookup -the lexer from a list, e.g. using get_lexer_by_name().
    -
    filenames
    -
    A list of fnmatch patterns that match filenames which contain -content for this lexer. The patterns in this list should be unique among -all lexers.
    -
    alias_filenames
    -
    A list of fnmatch patterns that match filenames which may or may not -contain content for this lexer. This list is used by the -guess_lexer_for_filename() function, to determine which lexers are -then included in guessing the correct one. That means that e.g. every -lexer for HTML and a template language should include \*.html in -this list.
    -
    mimetypes
    -
    A list of MIME types for content that can be lexed with this -lexer.
    -
    -
    -
    -

    Formatters

    -

    A formatter (derived from pygments.formatter.Formatter) has the following -functions:

    -
    -
    def __init__(self, **options):
    -

    As with lexers, this constructor processes options and then must call -the base class __init__.

    -

    The Formatter class recognizes the options style, full and -title. It is up to the formatter class whether it uses them.

    -
    -
    def get_style_defs(self, arg=''):
    -

    This method must return statements or declarations suitable to define -the current style for subsequent highlighted text (e.g. CSS classes -in the HTMLFormatter).

    -

    The optional argument arg can be used to modify the generation and -is formatter dependent (it is standardized because it can be given on -the command line).

    -

    This method is called by the -S command-line option, the arg -is then given by the -a option.

    -
    -
    def format(self, tokensource, outfile):
    -

    This method must format the tokens from the tokensource iterable and -write the formatted version to the file object outfile.

    -

    Formatter options can control how exactly the tokens are converted.

    -
    -
    -

    A formatter must have the following attributes that are used by the -builtin lookup mechanism. (New in Pygments 0.7.)

    -
    -
    name
    -
    Full name for the formatter, in human-readable form.
    -
    aliases
    -
    A list of short, unique identifiers that can be used to lookup -the formatter from a list, e.g. using get_formatter_by_name().
    -
    filenames
    -
    A list of fnmatch patterns that match filenames for which this formatter -can produce output. The patterns in this list should be unique among -all formatters.
    -
    -
    -
    -

    Option processing

    -

    The pygments.util module has some utility functions usable for option -processing:

    -
    -
    class OptionError
    -
    This exception will be raised by all option processing functions if -the type or value of the argument is not correct.
    -
    def get_bool_opt(options, optname, default=None):
    -

    Interpret the key optname from the dictionary options -as a boolean and return it. Return default if optname -is not in options.

    -

    The valid string values for True are 1, yes, -true and on, the ones for False are 0, -no, false and off (matched case-insensitively).

    -
    -
    def get_int_opt(options, optname, default=None):
    -
    As get_bool_opt, but interpret the value as an integer.
    -
    def get_list_opt(options, optname, default=None):
    -
    If the key optname from the dictionary options is a string, -split it at whitespace and return it. If it is already a list -or a tuple, it is returned as a list.
    -
    def get_choice_opt(options, optname, allowed, default=None):
    -
    If the key optname from the dictionary is not in the sequence -allowed, raise an error, otherwise return it. New in Pygments 0.8.
    -
    -
    - -
    - - - \ No newline at end of file diff --git a/vendor/pygments/docs/build/authors.html b/vendor/pygments/docs/build/authors.html deleted file mode 100644 index 98190b7..0000000 --- a/vendor/pygments/docs/build/authors.html +++ /dev/null @@ -1,355 +0,0 @@ - - - - Authors — Pygments - - - - -
    -

    Pygments

    -

    Authors

    - - « Back To Index - - -

    Pygments is written and maintained by Georg Brandl <georg@python.org>.

    -

    Major developers are Tim Hatch <tim@timhatch.com> and Armin Ronacher -<armin.ronacher@active-4.com>.

    -

    Other contributors, listed alphabetically, are:

    -
      -
    • Sam Aaron -- Ioke lexer
    • -
    • Kumar Appaiah -- Debian control lexer
    • -
    • Ali Afshar -- image formatter
    • -
    • Andreas Amann -- AppleScript lexer
    • -
    • Jeffrey Arnold -- R/S, Rd, BUGS, Jags, and Stan lexers
    • -
    • Jeremy Ashkenas -- CoffeeScript lexer
    • -
    • Stefan Matthias Aust -- Smalltalk lexer
    • -
    • Ben Bangert -- Mako lexers
    • -
    • Max Battcher -- Darcs patch lexer
    • -
    • Paul Baumgart, 280 North, Inc. -- Objective-J lexer
    • -
    • Michael Bayer -- Myghty lexers
    • -
    • John Benediktsson -- Factor lexer
    • -
    • Christopher Bertels -- Fancy lexer
    • -
    • Jarrett Billingsley -- MiniD lexer
    • -
    • Adam Blinkinsop -- Haskell, Redcode lexers
    • -
    • Frits van Bommel -- assembler lexers
    • -
    • Pierre Bourdon -- bugfixes
    • -
    • Hiram Chirino -- Scaml and Jade lexers
    • -
    • Ian Cooper -- VGL lexer
    • -
    • Leaf Corcoran -- MoonScript lexer
    • -
    • Christopher Creutzig -- MuPAD lexer
    • -
    • Pete Curry -- bugfixes
    • -
    • Owen Durni -- haXe lexer
    • -
    • Nick Efford -- Python 3 lexer
    • -
    • Sven Efftinge -- Xtend lexer
    • -
    • Artem Egorkine -- terminal256 formatter
    • -
    • James H. Fisher -- PostScript lexer
    • -
    • Carlos Galdino -- Elixir and Elixir Console lexers
    • -
    • Michael Galloy -- IDL lexer
    • -
    • Naveen Garg -- Autohotkey lexer
    • -
    • Laurent Gautier -- R/S lexer
    • -
    • Alex Gaynor -- PyPy log lexer
    • -
    • Alain Gilbert -- TypeScript lexer
    • -
    • Bertrand Goetzmann -- Groovy lexer
    • -
    • Krzysiek Goj -- Scala lexer
    • -
    • Matt Good -- Genshi, Cheetah lexers
    • -
    • Patrick Gotthardt -- PHP namespaces support
    • -
    • Olivier Guibe -- Asymptote lexer
    • -
    • Jordi Gutiérrez Hermoso -- Octave lexer
    • -
    • Martin Harriman -- SNOBOL lexer
    • -
    • Matthew Harrison -- SVG formatter
    • -
    • Steven Hazel -- Tcl lexer
    • -
    • Aslak Hellesøy -- Gherkin lexer
    • -
    • Greg Hendershott -- Racket lexer
    • -
    • David Hess, Fish Software, Inc. -- Objective-J lexer
    • -
    • Varun Hiremath -- Debian control lexer
    • -
    • Doug Hogan -- Mscgen lexer
    • -
    • Ben Hollis -- Mason lexer
    • -
    • Alastair Houghton -- Lexer inheritance facility
    • -
    • Tim Howard -- BlitzMax lexer
    • -
    • Ivan Inozemtsev -- Fantom lexer
    • -
    • Brian R. Jackson -- Tea lexer
    • -
    • Dennis Kaarsemaker -- sources.list lexer
    • -
    • Igor Kalnitsky -- vhdl lexer
    • -
    • Pekka Klärck -- Robot Framework lexer
    • -
    • Eric Knibbe -- Lasso lexer
    • -
    • Adam Koprowski -- Opa lexer
    • -
    • Benjamin Kowarsch -- Modula-2 lexer
    • -
    • Alexander Kriegisch -- Kconfig and AspectJ lexers
    • -
    • Marek Kubica -- Scheme lexer
    • -
    • Jochen Kupperschmidt -- Markdown processor
    • -
    • Gerd Kurzbach -- Modelica lexer
    • -
    • Jon Larimer, Google Inc. -- Smali lexer
    • -
    • Olov Lassus -- Dart lexer
    • -
    • Sylvestre Ledru -- Scilab lexer
    • -
    • Mark Lee -- Vala lexer
    • -
    • Ben Mabey -- Gherkin lexer
    • -
    • Angus MacArthur -- QML lexer
    • -
    • Simone Margaritelli -- Hybris lexer
    • -
    • Kirk McDonald -- D lexer
    • -
    • Gordon McGregor -- SystemVerilog lexer
    • -
    • Stephen McKamey -- Duel/JBST lexer
    • -
    • Brian McKenna -- F# lexer
    • -
    • Charles McLaughlin -- Puppet lexer
    • -
    • Lukas Meuser -- BBCode formatter, Lua lexer
    • -
    • Paul Miller -- LiveScript lexer
    • -
    • Hong Minhee -- HTTP lexer
    • -
    • Michael Mior -- Awk lexer
    • -
    • Bruce Mitchener -- Dylan lexer rewrite
    • -
    • Reuben Morais -- SourcePawn lexer
    • -
    • Jon Morton -- Rust lexer
    • -
    • Paulo Moura -- Logtalk lexer
    • -
    • Mher Movsisyan -- DTD lexer
    • -
    • Ana Nelson -- Ragel, ANTLR, R console lexers
    • -
    • Nam T. Nguyen -- Monokai style
    • -
    • Jesper Noehr -- HTML formatter "anchorlinenos"
    • -
    • Mike Nolta -- Julia lexer
    • -
    • Jonas Obrist -- BBCode lexer
    • -
    • David Oliva -- Rebol lexer
    • -
    • Jon Parise -- Protocol buffers lexer
    • -
    • Ronny Pfannschmidt -- BBCode lexer
    • -
    • Benjamin Peterson -- Test suite refactoring
    • -
    • Dominik Picheta -- Nimrod lexer
    • -
    • Clément Prévost -- UrbiScript lexer
    • -
    • Kashif Rasul -- CUDA lexer
    • -
    • Justin Reidy -- MXML lexer
    • -
    • Norman Richards -- JSON lexer
    • -
    • Lubomir Rintel -- GoodData MAQL and CL lexers
    • -
    • Andre Roberge -- Tango style
    • -
    • Konrad Rudolph -- LaTeX formatter enhancements
    • -
    • Mario Ruggier -- Evoque lexers
    • -
    • Stou Sandalski -- NumPy, FORTRAN, tcsh and XSLT lexers
    • -
    • Matteo Sasso -- Common Lisp lexer
    • -
    • Joe Schafer -- Ada lexer
    • -
    • Ken Schutte -- Matlab lexers
    • -
    • Tassilo Schweyer -- Io, MOOCode lexers
    • -
    • Ted Shaw -- AutoIt lexer
    • -
    • Joerg Sieker -- ABAP lexer
    • -
    • Robert Simmons -- Standard ML lexer
    • -
    • Kirill Simonov -- YAML lexer
    • -
    • Alexander Smishlajev -- Visual FoxPro lexer
    • -
    • Steve Spigarelli -- XQuery lexer
    • -
    • Jerome St-Louis -- eC lexer
    • -
    • James Strachan -- Kotlin lexer
    • -
    • Tom Stuart -- Treetop lexer
    • -
    • Tiberius Teng -- default style overhaul
    • -
    • Jeremy Thurgood -- Erlang, Squid config lexers
    • -
    • Brian Tiffin -- OpenCOBOL lexer
    • -
    • Erick Tryzelaar -- Felix lexer
    • -
    • Daniele Varrazzo -- PostgreSQL lexers
    • -
    • Abe Voelker -- OpenEdge ABL lexer
    • -
    • Pepijn de Vos -- HTML formatter CTags support
    • -
    • Whitney Young -- ObjectiveC lexer
    • -
    • Matthias Vallentin -- Bro lexer
    • -
    • Nathan Weizenbaum -- Haml and Sass lexers
    • -
    • Dietmar Winkler -- Modelica lexer
    • -
    • Nils Winter -- Smalltalk lexer
    • -
    • Davy Wybiral -- Clojure lexer
    • -
    • Diego Zamboni -- CFengine3 lexer
    • -
    • Enrique Zamudio -- Ceylon lexer
    • -
    • Alex Zimin -- Nemerle lexer
    • -
    -

    Many thanks for all contributions!

    - -
    - - - \ No newline at end of file diff --git a/vendor/pygments/docs/build/changelog.html b/vendor/pygments/docs/build/changelog.html deleted file mode 100644 index 0f8f603..0000000 --- a/vendor/pygments/docs/build/changelog.html +++ /dev/null @@ -1,930 +0,0 @@ - - - - Changelog — Pygments - - - - -
    -

    Pygments

    -

    Changelog

    - - « Back To Index - - - - -

    Issue numbers refer to the tracker at -<http://bitbucket.org/birkenfeld/pygments-main/issues>, -pull request numbers to the requests at -<http://bitbucket.org/birkenfeld/pygments-main/pull-requests/merged>.

    -
    -

    Version 1.6rc1

    -

    (released Jan 9, 2013)

    -
      -
    • Lexers added:
        -
      • AspectJ (PR#90)
      • -
      • AutoIt (PR#122)
      • -
      • BUGS-like languages (PR#89)
      • -
      • Ceylon (PR#86)
      • -
      • Croc (new name for MiniD)
      • -
      • CUDA (PR#75)
      • -
      • Dg (PR#116)
      • -
      • IDL (PR#115)
      • -
      • Jags (PR#89)
      • -
      • Julia (PR#61)
      • -
      • Kconfig (#711)
      • -
      • Lasso (PR#95, PR#113)
      • -
      • LiveScript (PR#84)
      • -
      • Monkey (PR#117)
      • -
      • Mscgen (PR#80)
      • -
      • NSIS scripts (PR#136)
      • -
      • OpenCOBOL (PR#72)
      • -
      • QML (PR#123)
      • -
      • Puppet (PR#133)
      • -
      • Racket (PR#94)
      • -
      • Rdoc (PR#99)
      • -
      • Robot Framework (PR#137)
      • -
      • RPM spec files (PR#124)
      • -
      • Rust (PR#67)
      • -
      • Smali (Dalvik assembly)
      • -
      • SourcePawn (PR#39)
      • -
      • Stan (PR#89)
      • -
      • Treetop (PR#125)
      • -
      • TypeScript (PR#114)
      • -
      • VGL (PR#12)
      • -
      • Visual FoxPro (#762)
      • -
      • Windows Registry (#819)
      • -
      • Xtend (PR#68)
      • -
      -
    • -
    • The HTML formatter now supports linking to tags using CTags files, when the -python-ctags package is installed (PR#87).
    • -
    • The HTML formatter now has a "linespans" option that wraps every line in a -<span> tag with a specific id (PR#82).
    • -
    • When deriving a lexer from another lexer with token definitions, definitions -for states not in the child lexer are now inherited. If you override a state -in the child lexer, an "inherit" keyword has been added to insert the base -state at that position (PR#141).
    • -
    • The C family lexers now inherit token definitions from a common base class, -removing code duplication (PR#141).
    • -
    • Use "colorama" on Windows for console color output (PR#142).
    • -
    • Fix Template Haskell highlighting (PR#63).
    • -
    • Fix some S/R lexer errors (PR#91).
    • -
    • Fix a bug in the Prolog lexer with names that start with 'is' (#810).
    • -
    • Rewrite Dylan lexer, add Dylan LID lexer (PR#147).
    • -
    • Add a Java quickstart document (PR#146).
    • -
    • Add a "external/autopygmentize" file that can be used as .lessfilter (#802).
    • -
    -
    -
    -

    Version 1.5

    -

    (codename Zeitdilatation, released Mar 10, 2012)

    -
      -
    • Lexers added:
        -
      • Awk (#630)
      • -
      • Fancy (#633)
      • -
      • PyPy Log
      • -
      • eC
      • -
      • Nimrod
      • -
      • Nemerle (#667)
      • -
      • F# (#353)
      • -
      • Groovy (#501)
      • -
      • PostgreSQL (#660)
      • -
      • DTD
      • -
      • Gosu (#634)
      • -
      • Octave (PR#22)
      • -
      • Standard ML (PR#14)
      • -
      • CFengine3 (#601)
      • -
      • Opa (PR#37)
      • -
      • HTTP sessions (PR#42)
      • -
      • JSON (PR#31)
      • -
      • SNOBOL (PR#30)
      • -
      • MoonScript (PR#43)
      • -
      • ECL (PR#29)
      • -
      • Urbiscript (PR#17)
      • -
      • OpenEdge ABL (PR#27)
      • -
      • SystemVerilog (PR#35)
      • -
      • Coq (#734)
      • -
      • PowerShell (#654)
      • -
      • Dart (#715)
      • -
      • Fantom (PR#36)
      • -
      • Bro (PR#5)
      • -
      • NewLISP (PR#26)
      • -
      • VHDL (PR#45)
      • -
      • Scilab (#740)
      • -
      • Elixir (PR#57)
      • -
      • Tea (PR#56)
      • -
      • Kotlin (PR#58)
      • -
      -
    • -
    • Fix Python 3 terminal highlighting with pygmentize (#691).
    • -
    • In the LaTeX formatter, escape special &, < and > chars (#648).
    • -
    • In the LaTeX formatter, fix display problems for styles with token -background colors (#670).
    • -
    • Enhancements to the Squid conf lexer (#664).
    • -
    • Several fixes to the reStructuredText lexer (#636).
    • -
    • Recognize methods in the ObjC lexer (#638).
    • -
    • Fix Lua "class" highlighting: it does not have classes (#665).
    • -
    • Fix degenerate regex in Scala lexer (#671) and highlighting bugs (#713, 708).
    • -
    • Fix number pattern order in Ocaml lexer (#647).
    • -
    • Fix generic type highlighting in ActionScript 3 (#666).
    • -
    • Fixes to the Clojure lexer (PR#9).
    • -
    • Fix degenerate regex in Nemerle lexer (#706).
    • -
    • Fix infinite looping in CoffeeScript lexer (#729).
    • -
    • Fix crashes and analysis with ObjectiveC lexer (#693, #696).
    • -
    • Add some Fortran 2003 keywords.
    • -
    • Fix Boo string regexes (#679).
    • -
    • Add "rrt" style (#727).
    • -
    • Fix infinite looping in Darcs Patch lexer.
    • -
    • Lots of misc fixes to character-eating bugs and ordering problems in many -different lexers.
    • -
    -
    -
    -

    Version 1.4

    -

    (codename Unschärfe, released Jan 03, 2011)

    -
      -
    • Lexers added:
        -
      • Factor (#520)
      • -
      • PostScript (#486)
      • -
      • Verilog (#491)
      • -
      • BlitzMax Basic (#478)
      • -
      • Ioke (#465)
      • -
      • Java properties, split out of the INI lexer (#445)
      • -
      • Scss (#509)
      • -
      • Duel/JBST
      • -
      • XQuery (#617)
      • -
      • Mason (#615)
      • -
      • GoodData (#609)
      • -
      • SSP (#473)
      • -
      • Autohotkey (#417)
      • -
      • Google Protocol Buffers
      • -
      • Hybris (#506)
      • -
      -
    • -
    • Do not fail in analyse_text methods (#618).
    • -
    • Performance improvements in the HTML formatter (#523).
    • -
    • With the noclasses option in the HTML formatter, some styles -present in the stylesheet were not added as inline styles.
    • -
    • Four fixes to the Lua lexer (#480, #481, #482, #497).
    • -
    • More context-sensitive Gherkin lexer with support for more i18n translations.
    • -
    • Support new OO keywords in Matlab lexer (#521).
    • -
    • Small fix in the CoffeeScript lexer (#519).
    • -
    • A bugfix for backslashes in ocaml strings (#499).
    • -
    • Fix unicode/raw docstrings in the Python lexer (#489).
    • -
    • Allow PIL to work without PIL.pth (#502).
    • -
    • Allow seconds as a unit in CSS (#496).
    • -
    • Support application/javascript as a JavaScript mime type (#504).
    • -
    • Support Offload C++ Extensions as -keywords in the C++ lexer (#484).
    • -
    • Escape more characters in LaTeX output (#505).
    • -
    • Update Haml/Sass lexers to version 3 (#509).
    • -
    • Small PHP lexer string escaping fix (#515).
    • -
    • Support comments before preprocessor directives, and unsigned/ -long long literals in C/C++ (#613, #616).
    • -
    • Support line continuations in the INI lexer (#494).
    • -
    • Fix lexing of Dylan string and char literals (#628).
    • -
    • Fix class/procedure name highlighting in VB.NET lexer (#624).
    • -
    -
    -
    -

    Version 1.3.1

    -

    (bugfix release, released Mar 05, 2010)

    -
      -
    • The pygmentize script was missing from the distribution.
    • -
    -
    -
    -

    Version 1.3

    -

    (codename Schneeglöckchen, released Mar 01, 2010)

    -
      -
    • Added the ensurenl lexer option, which can be used to suppress the -automatic addition of a newline to the lexer input.
    • -
    • Lexers added:
        -
      • Ada
      • -
      • Coldfusion
      • -
      • Modula-2
      • -
      • haXe
      • -
      • R console
      • -
      • Objective-J
      • -
      • Haml and Sass
      • -
      • CoffeeScript
      • -
      -
    • -
    • Enhanced reStructuredText highlighting.
    • -
    • Added support for PHP 5.3 namespaces in the PHP lexer.
    • -
    • Added a bash completion script for pygmentize, to the external/ -directory (#466).
    • -
    • Fixed a bug in do_insertions() used for multi-lexer languages.
    • -
    • Fixed a Ruby regex highlighting bug (#476).
    • -
    • Fixed regex highlighting bugs in Perl lexer (#258).
    • -
    • Add small enhancements to the C lexer (#467) and Bash lexer (#469).
    • -
    • Small fixes for the Tcl, Debian control file, Nginx config, -Smalltalk, Objective-C, Clojure, Lua lexers.
    • -
    • Gherkin lexer: Fixed single apostrophe bug and added new i18n keywords.
    • -
    -
    -
    -

    Version 1.2.2

    -

    (bugfix release, released Jan 02, 2010)

    -
      -
    • Removed a backwards incompatibility in the LaTeX formatter that caused -Sphinx to produce invalid commands when writing LaTeX output (#463).
    • -
    • Fixed a forever-backtracking regex in the BashLexer (#462).
    • -
    -
    -
    -

    Version 1.2.1

    -

    (bugfix release, released Jan 02, 2010)

    -
      -
    • Fixed mishandling of an ellipsis in place of the frames in a Python -console traceback, resulting in clobbered output.
    • -
    -
    -
    -

    Version 1.2

    -

    (codename Neujahr, released Jan 01, 2010)

    -
      -
    • Dropped Python 2.3 compatibility.
    • -
    • Lexers added:
        -
      • Asymptote
      • -
      • Go
      • -
      • Gherkin (Cucumber)
      • -
      • CMake
      • -
      • Ooc
      • -
      • Coldfusion
      • -
      • haXe
      • -
      • R console
      • -
      -
    • -
    • Added options for rendering LaTeX in source code comments in the -LaTeX formatter (#461).
    • -
    • Updated the Logtalk lexer.
    • -
    • Added line_number_start option to image formatter (#456).
    • -
    • Added hl_lines and hl_color options to image formatter (#457).
    • -
    • Fixed the HtmlFormatter's handling of noclasses=True to not output any -classes (#427).
    • -
    • Added the Monokai style (#453).
    • -
    • Fixed LLVM lexer identifier syntax and added new keywords (#442).
    • -
    • Fixed the PythonTracebackLexer to handle non-traceback data in header or -trailer, and support more partial tracebacks that start on line 2 (#437).
    • -
    • Fixed the CLexer to not highlight ternary statements as labels.
    • -
    • Fixed lexing of some Ruby quoting peculiarities (#460).
    • -
    • A few ASM lexer fixes (#450).
    • -
    -
    -
    -

    Version 1.1.1

    -

    (bugfix release, released Sep 15, 2009)

    -
      -
    • Fixed the BBCode lexer (#435).
    • -
    • Added support for new Jinja2 keywords.
    • -
    • Fixed test suite failures.
    • -
    • Added Gentoo-specific suffixes to Bash lexer.
    • -
    -
    -
    -

    Version 1.1

    -

    (codename Brillouin, released Sep 11, 2009)

    -
      -
    • Ported Pygments to Python 3. This needed a few changes in the way -encodings are handled; they may affect corner cases when used with -Python 2 as well.
    • -
    • Lexers added:
        -
      • Antlr/Ragel, thanks to Ana Nelson
      • -
      • (Ba)sh shell
      • -
      • Erlang shell
      • -
      • GLSL
      • -
      • Prolog
      • -
      • Evoque
      • -
      • Modelica
      • -
      • Rebol
      • -
      • MXML
      • -
      • Cython
      • -
      • ABAP
      • -
      • ASP.net (VB/C#)
      • -
      • Vala
      • -
      • Newspeak
      • -
      -
    • -
    • Fixed the LaTeX formatter's output so that output generated for one style -can be used with the style definitions of another (#384).
    • -
    • Added "anchorlinenos" and "noclobber_cssfile" (#396) options to HTML -formatter.
    • -
    • Support multiline strings in Lua lexer.
    • -
    • Rewrite of the JavaScript lexer by Pumbaa80 to better support regular -expression literals (#403).
    • -
    • When pygmentize is asked to highlight a file for which multiple lexers -match the filename, use the analyse_text guessing engine to determine the -winner (#355).
    • -
    • Fixed minor bugs in the JavaScript lexer (#383), the Matlab lexer (#378), -the Scala lexer (#392), the INI lexer (#391), the Clojure lexer (#387) -and the AS3 lexer (#389).
    • -
    • Fixed three Perl heredoc lexing bugs (#379, #400, #422).
    • -
    • Fixed a bug in the image formatter which misdetected lines (#380).
    • -
    • Fixed bugs lexing extended Ruby strings and regexes.
    • -
    • Fixed a bug when lexing git diffs.
    • -
    • Fixed a bug lexing the empty commit in the PHP lexer (#405).
    • -
    • Fixed a bug causing Python numbers to be mishighlighted as floats (#397).
    • -
    • Fixed a bug when backslashes are used in odd locations in Python (#395).
    • -
    • Fixed various bugs in Matlab and S-Plus lexers, thanks to Winston Chang (#410, -#411, #413, #414) and fmarc (#419).
    • -
    • Fixed a bug in Haskell single-line comment detection (#426).
    • -
    • Added new-style reStructuredText directive for docutils 0.5+ (#428).
    • -
    -
    -
    -

    Version 1.0

    -

    (codename Dreiundzwanzig, released Nov 23, 2008)

    -
      -
    • Don't use join(splitlines()) when converting newlines to \n, -because that doesn't keep all newlines at the end when the -stripnl lexer option is False.

      -
    • -
    • Added -N option to command-line interface to get a lexer name -for a given filename.

      -
    • -
    • Added Tango style, written by Andre Roberge for the Crunchy project.

      -
    • -
    • Added Python3TracebackLexer and python3 option to -PythonConsoleLexer.

      -
    • -
    • Fixed a few bugs in the Haskell lexer.

      -
    • -
    • Fixed PythonTracebackLexer to be able to recognize SyntaxError and -KeyboardInterrupt (#360).

      -
    • -
    • Provide one formatter class per image format, so that surprises like:

      -
      -pygmentize -f gif -o foo.gif foo.py
      -
      -

      creating a PNG file are avoided.

      -
    • -
    • Actually use the font_size option of the image formatter.

      -
    • -
    • Fixed numpy lexer that it doesn't listen for *.py any longer.

      -
    • -
    • Fixed HTML formatter so that text options can be Unicode -strings (#371).

      -
    • -
    • Unified Diff lexer supports the "udiff" alias now.

      -
    • -
    • Fixed a few issues in Scala lexer (#367).

      -
    • -
    • RubyConsoleLexer now supports simple prompt mode (#363).

      -
    • -
    • JavascriptLexer is smarter about what constitutes a regex (#356).

      -
    • -
    • Add Applescript lexer, thanks to Andreas Amann (#330).

      -
    • -
    • Make the codetags more strict about matching words (#368).

      -
    • -
    • NginxConfLexer is a little more accurate on mimetypes and -variables (#370).

      -
    • -
    -
    -
    -

    Version 0.11.1

    -

    (released Aug 24, 2008)

    -
      -
    • Fixed a Jython compatibility issue in pygments.unistring (#358).
    • -
    -
    -
    -

    Version 0.11

    -

    (codename Straußenei, released Aug 23, 2008)

    -

    Many thanks go to Tim Hatch for writing or integrating most of the bug -fixes and new features.

    -
      -
    • Lexers added:
        -
      • Nasm-style assembly language, thanks to delroth
      • -
      • YAML, thanks to Kirill Simonov
      • -
      • ActionScript 3, thanks to Pierre Bourdon
      • -
      • Cheetah/Spitfire templates, thanks to Matt Good
      • -
      • Lighttpd config files
      • -
      • Nginx config files
      • -
      • Gnuplot plotting scripts
      • -
      • Clojure
      • -
      • POV-Ray scene files
      • -
      • Sqlite3 interactive console sessions
      • -
      • Scala source files, thanks to Krzysiek Goj
      • -
      -
    • -
    • Lexers improved:
        -
      • C lexer highlights standard library functions now and supports C99 -types.
      • -
      • Bash lexer now correctly highlights heredocs without preceding -whitespace.
      • -
      • Vim lexer now highlights hex colors properly and knows a couple -more keywords.
      • -
      • Irc logs lexer now handles xchat's default time format (#340) and -correctly highlights lines ending in >.
      • -
      • Support more delimiters for perl regular expressions (#258).
      • -
      • ObjectiveC lexer now supports 2.0 features.
      • -
      -
    • -
    • Added "Visual Studio" style.
    • -
    • Updated markdown processor to Markdown 1.7.
    • -
    • Support roman/sans/mono style defs and use them in the LaTeX -formatter.
    • -
    • The RawTokenFormatter is no longer registered to *.raw and it's -documented that tokenization with this lexer may raise exceptions.
    • -
    • New option hl_lines to HTML formatter, to highlight certain -lines.
    • -
    • New option prestyles to HTML formatter.
    • -
    • New option -g to pygmentize, to allow lexer guessing based on -filetext (can be slowish, so file extensions are still checked -first).
    • -
    • guess_lexer() now makes its decision much faster due to a cache -of whether data is xml-like (a check which is used in several -versions of analyse_text(). Several lexers also have more -accurate analyse_text() now.
    • -
    -
    -
    -

    Version 0.10

    -

    (codename Malzeug, released May 06, 2008)

    -
      -
    • Lexers added:
        -
      • Io
      • -
      • Smalltalk
      • -
      • Darcs patches
      • -
      • Tcl
      • -
      • Matlab
      • -
      • Matlab sessions
      • -
      • FORTRAN
      • -
      • XSLT
      • -
      • tcsh
      • -
      • NumPy
      • -
      • Python 3
      • -
      • S, S-plus, R statistics languages
      • -
      • Logtalk
      • -
      -
    • -
    • In the LatexFormatter, the commandprefix option is now by default -'PY' instead of 'C', since the latter resulted in several collisions -with other packages. Also, the special meaning of the arg -argument to get_style_defs() was removed.
    • -
    • Added ImageFormatter, to format code as PNG, JPG, GIF or BMP. -(Needs the Python Imaging Library.)
    • -
    • Support doc comments in the PHP lexer.
    • -
    • Handle format specifications in the Perl lexer.
    • -
    • Fix comment handling in the Batch lexer.
    • -
    • Add more file name extensions for the C++, INI and XML lexers.
    • -
    • Fixes in the IRC and MuPad lexers.
    • -
    • Fix function and interface name highlighting in the Java lexer.
    • -
    • Fix at-rule handling in the CSS lexer.
    • -
    • Handle KeyboardInterrupts gracefully in pygmentize.
    • -
    • Added BlackWhiteStyle.
    • -
    • Bash lexer now correctly highlights math, does not require -whitespace after semicolons, and correctly highlights boolean -operators.
    • -
    • Makefile lexer is now capable of handling BSD and GNU make syntax.
    • -
    -
    -
    -

    Version 0.9

    -

    (codename Herbstzeitlose, released Oct 14, 2007)

    -
      -
    • Lexers added:
        -
      • Erlang
      • -
      • ActionScript
      • -
      • Literate Haskell
      • -
      • Common Lisp
      • -
      • Various assembly languages
      • -
      • Gettext catalogs
      • -
      • Squid configuration
      • -
      • Debian control files
      • -
      • MySQL-style SQL
      • -
      • MOOCode
      • -
      -
    • -
    • Lexers improved:
        -
      • Greatly improved the Haskell and OCaml lexers.
      • -
      • Improved the Bash lexer's handling of nested constructs.
      • -
      • The C# and Java lexers exhibited abysmal performance with some -input code; this should now be fixed.
      • -
      • The IRC logs lexer is now able to colorize weechat logs too.
      • -
      • The Lua lexer now recognizes multi-line comments.
      • -
      • Fixed bugs in the D and MiniD lexer.
      • -
      -
    • -
    • The encoding handling of the command line mode (pygmentize) was -enhanced. You shouldn't get UnicodeErrors from it anymore if you -don't give an encoding option.
    • -
    • Added a -P option to the command line mode which can be used to -give options whose values contain commas or equals signs.
    • -
    • Added 256-color terminal formatter.
    • -
    • Added an experimental SVG formatter.
    • -
    • Added the lineanchors option to the HTML formatter, thanks to -Ian Charnas for the idea.
    • -
    • Gave the line numbers table a CSS class in the HTML formatter.
    • -
    • Added a Vim 7-like style.
    • -
    -
    -
    -

    Version 0.8.1

    -

    (released Jun 27, 2007)

    -
      -
    • Fixed POD highlighting in the Ruby lexer.
    • -
    • Fixed Unicode class and namespace name highlighting in the C# lexer.
    • -
    • Fixed Unicode string prefix highlighting in the Python lexer.
    • -
    • Fixed a bug in the D and MiniD lexers.
    • -
    • Fixed the included MoinMoin parser.
    • -
    -
    -
    -

    Version 0.8

    -

    (codename Maikäfer, released May 30, 2007)

    -
      -
    • Lexers added:
        -
      • Haskell, thanks to Adam Blinkinsop
      • -
      • Redcode, thanks to Adam Blinkinsop
      • -
      • D, thanks to Kirk McDonald
      • -
      • MuPad, thanks to Christopher Creutzig
      • -
      • MiniD, thanks to Jarrett Billingsley
      • -
      • Vim Script, by Tim Hatch
      • -
      -
    • -
    • The HTML formatter now has a second line-numbers mode in which it -will just integrate the numbers in the same <pre> tag as the -code.
    • -
    • The CSharpLexer now is Unicode-aware, which means that it has an -option that can be set so that it correctly lexes Unicode -identifiers allowed by the C# specs.
    • -
    • Added a RaiseOnErrorTokenFilter that raises an exception when the -lexer generates an error token, and a VisibleWhitespaceFilter that -converts whitespace (spaces, tabs, newlines) into visible -characters.
    • -
    • Fixed the do_insertions() helper function to yield correct -indices.
    • -
    • The ReST lexer now automatically highlights source code blocks in -".. sourcecode:: language" and ".. code:: language" directive -blocks.
    • -
    • Improved the default style (thanks to Tiberius Teng). The old -default is still available as the "emacs" style (which was an alias -before).
    • -
    • The get_style_defs method of HTML formatters now uses the -cssclass option as the default selector if it was given.
    • -
    • Improved the ReST and Bash lexers a bit.
    • -
    • Fixed a few bugs in the Makefile and Bash lexers, thanks to Tim -Hatch.
    • -
    • Fixed a bug in the command line code that disallowed -O options -when using the -S option.
    • -
    • Fixed a bug in the RawTokenFormatter.
    • -
    -
    -
    -

    Version 0.7.1

    -

    (released Feb 15, 2007)

    -
      -
    • Fixed little highlighting bugs in the Python, Java, Scheme and -Apache Config lexers.
    • -
    • Updated the included manpage.
    • -
    • Included a built version of the documentation in the source tarball.
    • -
    -
    -
    -

    Version 0.7

    -

    (codename Faschingskrapfn, released Feb 14, 2007)

    -
      -
    • Added a MoinMoin parser that uses Pygments. With it, you get -Pygments highlighting in Moin Wiki pages.
    • -
    • Changed the exception raised if no suitable lexer, formatter etc. is -found in one of the get_*_by_* functions to a custom exception, -pygments.util.ClassNotFound. It is, however, a subclass of -ValueError in order to retain backwards compatibility.
    • -
    • Added a -H command line option which can be used to get the -docstring of a lexer, formatter or filter.
    • -
    • Made the handling of lexers and formatters more consistent. The -aliases and filename patterns of formatters are now attributes on -them.
    • -
    • Added an OCaml lexer, thanks to Adam Blinkinsop.
    • -
    • Made the HTML formatter more flexible, and easily subclassable in -order to make it easy to implement custom wrappers, e.g. alternate -line number markup. See the documentation.
    • -
    • Added an outencoding option to all formatters, making it possible -to override the encoding (which is used by lexers and formatters) -when using the command line interface. Also, if using the terminal -formatter and the output file is a terminal and has an encoding -attribute, use it if no encoding is given.
    • -
    • Made it possible to just drop style modules into the styles -subpackage of the Pygments installation.
    • -
    • Added a "state" keyword argument to the using helper.
    • -
    • Added a commandprefix option to the LatexFormatter which allows -to control how the command names are constructed.
    • -
    • Added quite a few new lexers, thanks to Tim Hatch:
        -
      • Java Server Pages
      • -
      • Windows batch files
      • -
      • Trac Wiki markup
      • -
      • Python tracebacks
      • -
      • ReStructuredText
      • -
      • Dylan
      • -
      • and the Befunge esoteric programming language (yay!)
      • -
      -
    • -
    • Added Mako lexers by Ben Bangert.
    • -
    • Added "fruity" style, another dark background originally vim-based -theme.
    • -
    • Added sources.list lexer by Dennis Kaarsemaker.
    • -
    • Added token stream filters, and a pygmentize option to use them.
    • -
    • Changed behavior of in Operator for tokens.
    • -
    • Added mimetypes for all lexers.
    • -
    • Fixed some problems lexing Python strings.
    • -
    • Fixed tickets: #167, #178, #179, #180, #185, #201.
    • -
    -
    -
    -

    Version 0.6

    -

    (codename Zimtstern, released Dec 20, 2006)

    -
      -
    • Added option for the HTML formatter to write the CSS to an external -file in "full document" mode.
    • -
    • Added RTF formatter.
    • -
    • Added Bash and Apache configuration lexers (thanks to Tim Hatch).
    • -
    • Improved guessing methods for various lexers.
    • -
    • Added @media support to CSS lexer (thanks to Tim Hatch).
    • -
    • Added a Groff lexer (thanks to Tim Hatch).
    • -
    • License change to BSD.
    • -
    • Added lexers for the Myghty template language.
    • -
    • Added a Scheme lexer (thanks to Marek Kubica).
    • -
    • Added some functions to iterate over existing lexers, formatters and -lexers.
    • -
    • The HtmlFormatter's get_style_defs() can now take a list as an -argument to generate CSS with multiple prefixes.
    • -
    • Support for guessing input encoding added.
    • -
    • Encoding support added: all processing is now done with Unicode -strings, input and output are converted from and optionally to byte -strings (see the encoding option of lexers and formatters).
    • -
    • Some improvements in the C(++) lexers handling comments and line -continuations.
    • -
    -
    -
    -

    Version 0.5.1

    -

    (released Oct 30, 2006)

    -
      -
    • Fixed traceback in pygmentize -L (thanks to Piotr Ozarowski).
    • -
    -
    -
    -

    Version 0.5

    -

    (codename PyKleur, released Oct 30, 2006)

    -
      -
    • Initial public release.
    • -
    -
    - -
    - - - \ No newline at end of file diff --git a/vendor/pygments/docs/build/cmdline.html b/vendor/pygments/docs/build/cmdline.html deleted file mode 100644 index 51eeed5..0000000 --- a/vendor/pygments/docs/build/cmdline.html +++ /dev/null @@ -1,353 +0,0 @@ - - - - Command Line Interface — Pygments - - - - -
    -

    Pygments

    -

    Command Line Interface

    - - « Back To Index - - - - - -

    You can use Pygments from the shell, provided you installed the pygmentize -script:

    -
    -$ pygmentize test.py
    -print "Hello World"
    -
    -

    will print the file test.py to standard output, using the Python lexer -(inferred from the file name extension) and the terminal formatter (because -you didn't give an explicit formatter name).

    -

    If you want HTML output:

    -
    -$ pygmentize -f html -l python -o test.html test.py
    -
    -

    As you can see, the -l option explicitly selects a lexer. As seen above, if you -give an input file name and it has an extension that Pygments recognizes, you can -omit this option.

    -

    The -o option gives an output file name. If it is not given, output is -written to stdout.

    -

    The -f option selects a formatter (as with -l, it can also be omitted -if an output file name is given and has a supported extension). -If no output file name is given and -f is omitted, the -TerminalFormatter is used.

    -

    The above command could therefore also be given as:

    -
    -$ pygmentize -o test.html test.py
    -
    -

    To create a full HTML document, including line numbers and stylesheet (using the -"emacs" style), highlighting the Python file test.py to test.html:

    -
    -$ pygmentize -O full,style=emacs -o test.html test.py
    -
    -
    -

    Options and filters

    -

    Lexer and formatter options can be given using the -O option:

    -
    -$ pygmentize -f html -O style=colorful,linenos=1 -l python test.py
    -
    -

    Be sure to enclose the option string in quotes if it contains any special shell -characters, such as spaces or expansion wildcards like *. If an option -expects a list value, separate the list entries with spaces (you'll have to -quote the option value in this case too, so that the shell doesn't split it).

    -

    Since the -O option argument is split at commas and expects the split values -to be of the form name=value, you can't give an option value that contains -commas or equals signs. Therefore, an option -P is provided (as of Pygments -0.9) that works like -O but can only pass one option per -P. Its value -can then contain all characters:

    -
    -$ pygmentize -P "heading=Pygments, the Python highlighter" ...
    -
    -

    Filters are added to the token stream using the -F option:

    -
    -$ pygmentize -f html -l pascal -F keywordcase:case=upper main.pas
    -
    -

    As you see, options for the filter are given after a colon. As for -O, the -filter name and options must be one shell word, so there may not be any spaces -around the colon.

    -
    -
    -

    Generating styles

    -

    Formatters normally don't output full style information. For example, the HTML -formatter by default only outputs <span> tags with class attributes. -Therefore, there's a special -S option for generating style definitions. -Usage is as follows:

    -
    -$ pygmentize -f html -S colorful -a .syntax
    -
    -

    generates a CSS style sheet (because you selected the HTML formatter) for -the "colorful" style prepending a ".syntax" selector to all style rules.

    -

    For an explanation what -a means for a particular formatter, look for -the arg argument for the formatter's get_style_defs() method.

    -
    -
    -

    Getting lexer names

    -

    New in Pygments 1.0.

    -

    The -N option guesses a lexer name for a given filename, so that

    -
    -$ pygmentize -N setup.py
    -
    -

    will print out python. It won't highlight anything yet. If no specific -lexer is known for that filename, text is printed.

    -
    -
    -

    Getting help

    -

    The -L option lists lexers, formatters, along with their short -names and supported file name extensions, styles and filters. If you want to see -only one category, give it as an argument:

    -
    -$ pygmentize -L filters
    -
    -

    will list only all installed filters.

    -

    The -H option will give you detailed information (the same that can be found -in this documentation) about a lexer, formatter or filter. Usage is as follows:

    -
    -$ pygmentize -H formatter html
    -
    -

    will print the help for the HTML formatter, while

    -
    -$ pygmentize -H lexer python
    -
    -

    will print the help for the Python lexer, etc.

    -
    -
    -

    A note on encodings

    -

    New in Pygments 0.9.

    -

    Pygments tries to be smart regarding encodings in the formatting process:

    -
      -
    • If you give an encoding option, it will be used as the input and -output encoding.
    • -
    • If you give an outencoding option, it will override encoding -as the output encoding.
    • -
    • If you don't give an encoding and have given an output file, the default -encoding for lexer and formatter is latin1 (which will pass through -all non-ASCII characters).
    • -
    • If you don't give an encoding and haven't given an output file (that means -output is written to the console), the default encoding for lexer and -formatter is the terminal encoding (sys.stdout.encoding).
    • -
    -
    - -
    - - - \ No newline at end of file diff --git a/vendor/pygments/docs/build/filterdevelopment.html b/vendor/pygments/docs/build/filterdevelopment.html deleted file mode 100644 index 25ffa59..0000000 --- a/vendor/pygments/docs/build/filterdevelopment.html +++ /dev/null @@ -1,282 +0,0 @@ - - - - Write your own filter — Pygments - - - - -
    -

    Pygments

    -

    Write your own filter

    - - « Back To Index - - -
    -

    Contents

    - -
    - - -

    New in Pygments 0.7.

    -

    Writing own filters is very easy. All you have to do is to subclass -the Filter class and override the filter method. Additionally a -filter is instanciated with some keyword arguments you can use to -adjust the behavior of your filter.

    -
    -

    Subclassing Filters

    -

    As an example, we write a filter that converts all Name.Function tokens -to normal Name tokens to make the output less colorful.

    -
    from pygments.util import get_bool_opt
    -from pygments.token import Name
    -from pygments.filter import Filter
    -
    -class UncolorFilter(Filter):
    -
    -    def __init__(self, **options):
    -        Filter.__init__(self, **options)
    -        self.class_too = get_bool_opt(options, 'classtoo')
    -
    -    def filter(self, lexer, stream):
    -        for ttype, value in stream:
    -            if ttype is Name.Function or (self.class_too and
    -                                          ttype is Name.Class):
    -                ttype = Name
    -            yield ttype, value
    -
    -

    Some notes on the lexer argument: that can be quite confusing since it doesn't -need to be a lexer instance. If a filter was added by using the add_filter() -function of lexers, that lexer is registered for the filter. In that case -lexer will refer to the lexer that has registered the filter. It can be used -to access options passed to a lexer. Because it could be None you always have -to check for that case if you access it.

    -
    -
    -

    Using a decorator

    -

    You can also use the simplefilter decorator from the pygments.filter module:

    -
    from pygments.util import get_bool_opt
    -from pygments.token import Name
    -from pygments.filter import simplefilter
    -
    -
    -@simplefilter
    -def uncolor(lexer, stream, options):
    -    class_too = get_bool_opt(options, 'classtoo')
    -    for ttype, value in stream:
    -        if ttype is Name.Function or (class_too and
    -                                      ttype is Name.Class):
    -            ttype = Name
    -        yield ttype, value
    -
    -

    The decorator automatically subclasses an internal filter class and uses the -decorated function for filtering.

    -
    - -
    - - - \ No newline at end of file diff --git a/vendor/pygments/docs/build/filters.html b/vendor/pygments/docs/build/filters.html deleted file mode 100644 index bf1c9f5..0000000 --- a/vendor/pygments/docs/build/filters.html +++ /dev/null @@ -1,412 +0,0 @@ - - - - Filters — Pygments - - - - -
    -

    Pygments

    -

    Filters

    - - « Back To Index - - -
    -

    Contents

    - -
    - - -

    New in Pygments 0.7.

    -

    You can filter token streams coming from lexers to improve or annotate the -output. For example, you can highlight special words in comments, convert -keywords to upper or lowercase to enforce a style guide etc.

    -

    To apply a filter, you can use the add_filter() method of a lexer:

    -
    >>> from pygments.lexers import PythonLexer
    ->>> l = PythonLexer()
    ->>> # add a filter given by a string and options
    ->>> l.add_filter('codetagify', case='lower')
    ->>> l.filters
    -[<pygments.filters.CodeTagFilter object at 0xb785decc>]
    ->>> from pygments.filters import KeywordCaseFilter
    ->>> # or give an instance
    ->>> l.add_filter(KeywordCaseFilter(case='lower'))
    -
    -

    The add_filter() method takes keyword arguments which are forwarded to -the constructor of the filter.

    -

    To get a list of all registered filters by name, you can use the -get_all_filters() function from the pygments.filters module that returns an -iterable for all known filters.

    -

    If you want to write your own filter, have a look at Write your own filter.

    -
    -

    Builtin Filters

    -

    RaiseOnErrorTokenFilter

    -
    -

    Raise an exception when the lexer generates an error token.

    -

    Options accepted:

    -
    -
    excclass : Exception class
    -
    The exception class to raise. -The default is pygments.filters.ErrorToken.
    -
    -

    New in Pygments 0.8.

    - --- - - - -
    Name:raiseonerror
    -
    -

    VisibleWhitespaceFilter

    -
    -

    Convert tabs, newlines and/or spaces to visible characters.

    -

    Options accepted:

    -
    -
    spaces : string or bool
    -
    If this is a one-character string, spaces will be replaces by this string. -If it is another true value, spaces will be replaced by · (unicode -MIDDLE DOT). If it is a false value, spaces will not be replaced. The -default is False.
    -
    tabs : string or bool
    -
    The same as for spaces, but the default replacement character is » -(unicode RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK). The default value -is False. Note: this will not work if the tabsize option for the -lexer is nonzero, as tabs will already have been expanded then.
    -
    tabsize : int
    -
    If tabs are to be replaced by this filter (see the tabs option), this -is the total number of characters that a tab should be expanded to. -The default is 8.
    -
    newlines : string or bool
    -
    The same as for spaces, but the default replacement character is -(unicode PILCROW SIGN). The default value is False.
    -
    wstokentype : bool
    -
    If true, give whitespace the special Whitespace token type. This allows -styling the visible whitespace differently (e.g. greyed out), but it can -disrupt background colors. The default is True.
    -
    -

    New in Pygments 0.8.

    - --- - - - -
    Name:whitespace
    -
    -

    TokenMergeFilter

    -
    -

    Merges consecutive tokens with the same token type in the output stream of a -lexer.

    -

    New in Pygments 1.2.

    - --- - - - -
    Name:tokenmerge
    -
    -

    NameHighlightFilter

    -
    -

    Highlight a normal Name token with a different token type.

    -

    Example:

    -
    -filter = NameHighlightFilter(
    -    names=['foo', 'bar', 'baz'],
    -    tokentype=Name.Function,
    -)
    -
    -

    This would highlight the names "foo", "bar" and "baz" -as functions. Name.Function is the default token type.

    -

    Options accepted:

    -
    -
    names : list of strings
    -
    A list of names that should be given the different token type. -There is no default.
    -
    tokentype : TokenType or string
    -
    A token type or a string containing a token type name that is -used for highlighting the strings in names. The default is -Name.Function.
    -
    - --- - - - -
    Name:highlight
    -
    -

    GobbleFilter

    -
    -

    Gobbles source code lines (eats initial characters).

    -

    This filter drops the first n characters off every line of code. This -may be useful when the source code fed to the lexer is indented by a fixed -amount of space that isn't desired in the output.

    -

    Options accepted:

    -
    -
    n : int
    -
    The number of characters to gobble.
    -
    -

    New in Pygments 1.2.

    - --- - - - -
    Name:gobble
    -
    -

    CodeTagFilter

    -
    -

    Highlight special code tags in comments and docstrings.

    -

    Options accepted:

    -
    -
    codetags : list of strings
    -
    A list of strings that are flagged as code tags. The default is to -highlight XXX, TODO, BUG and NOTE.
    -
    - --- - - - -
    Name:codetagify
    -
    -

    KeywordCaseFilter

    -
    -

    Convert keywords to lowercase or uppercase or capitalize them, which -means first letter uppercase, rest lowercase.

    -

    This can be useful e.g. if you highlight Pascal code and want to adapt the -code to your styleguide.

    -

    Options accepted:

    -
    -
    case : string
    -
    The casing to convert keywords to. Must be one of 'lower', -'upper' or 'capitalize'. The default is 'lower'.
    -
    - --- - - - -
    Name:keywordcase
    -
    -
    - -
    - - - \ No newline at end of file diff --git a/vendor/pygments/docs/build/formatterdevelopment.html b/vendor/pygments/docs/build/formatterdevelopment.html deleted file mode 100644 index 1485dd1..0000000 --- a/vendor/pygments/docs/build/formatterdevelopment.html +++ /dev/null @@ -1,374 +0,0 @@ - - - - Write your own formatter — Pygments - - - - -
    -

    Pygments

    -

    Write your own formatter

    - - « Back To Index - - - - - -

    As well as creating your own lexer, writing a new -formatter for Pygments is easy and straightforward.

    -

    A formatter is a class that is initialized with some keyword arguments (the -formatter options) and that must provides a format() method. -Additionally a formatter should provide a get_style_defs() method that -returns the style definitions from the style in a form usable for the -formatter's output format.

    -
    -

    Quickstart

    -

    The most basic formatter shipped with Pygments is the NullFormatter. It just -sends the value of a token to the output stream:

    -
    from pygments.formatter import Formatter
    -
    -class NullFormatter(Formatter):
    -    def format(self, tokensource, outfile):
    -        for ttype, value in tokensource:
    -            outfile.write(value)
    -
    -

    As you can see, the format() method is passed two parameters: tokensource -and outfile. The first is an iterable of (token_type, value) tuples, -the latter a file like object with a write() method.

    -

    Because the formatter is that basic it doesn't overwrite the get_style_defs() -method.

    -
    -
    -

    Styles

    -

    Styles aren't instantiated but their metaclass provides some class functions -so that you can access the style definitions easily.

    -

    Styles are iterable and yield tuples in the form (ttype, d) where ttype -is a token and d is a dict with the following keys:

    -
    -
    'color'
    -
    Hexadecimal color value (eg: 'ff0000' for red) or None if not -defined.
    -
    'bold'
    -
    True if the value should be bold
    -
    'italic'
    -
    True if the value should be italic
    -
    'underline'
    -
    True if the value should be underlined
    -
    'bgcolor'
    -
    Hexadecimal color value for the background (eg: 'eeeeeee' for light -gray) or None if not defined.
    -
    'border'
    -
    Hexadecimal color value for the border (eg: '0000aa' for a dark -blue) or None for no border.
    -
    -

    Additional keys might appear in the future, formatters should ignore all keys -they don't support.

    -
    -
    -

    HTML 3.2 Formatter

    -

    For an more complex example, let's implement a HTML 3.2 Formatter. We don't -use CSS but inline markup (<u>, <font>, etc). Because this isn't good -style this formatter isn't in the standard library ;-)

    -
    from pygments.formatter import Formatter
    -
    -class OldHtmlFormatter(Formatter):
    -
    -    def __init__(self, **options):
    -        Formatter.__init__(self, **options)
    -
    -        # create a dict of (start, end) tuples that wrap the
    -        # value of a token so that we can use it in the format
    -        # method later
    -        self.styles = {}
    -
    -        # we iterate over the `_styles` attribute of a style item
    -        # that contains the parsed style values.
    -        for token, style in self.style:
    -            start = end = ''
    -            # a style item is a tuple in the following form:
    -            # colors are readily specified in hex: 'RRGGBB'
    -            if style['color']:
    -                start += '<font color="#%s">' % style['color']
    -                end = '</font>' + end
    -            if style['bold']:
    -                start += '<b>'
    -                end = '</b>' + end
    -            if style['italic']:
    -                start += '<i>'
    -                end = '</i>' + end
    -            if style['underline']:
    -                start += '<u>'
    -                end = '</u>' + end
    -            self.styles[token] = (start, end)
    -
    -    def format(self, tokensource, outfile):
    -        # lastval is a string we use for caching
    -        # because it's possible that an lexer yields a number
    -        # of consecutive tokens with the same token type.
    -        # to minimize the size of the generated html markup we
    -        # try to join the values of same-type tokens here
    -        lastval = ''
    -        lasttype = None
    -
    -        # wrap the whole output with <pre>
    -        outfile.write('<pre>')
    -
    -        for ttype, value in tokensource:
    -            # if the token type doesn't exist in the stylemap
    -            # we try it with the parent of the token type
    -            # eg: parent of Token.Literal.String.Double is
    -            # Token.Literal.String
    -            while ttype not in self.styles:
    -                ttype = ttype.parent
    -            if ttype == lasttype:
    -                # the current token type is the same of the last
    -                # iteration. cache it
    -                lastval += value
    -            else:
    -                # not the same token as last iteration, but we
    -                # have some data in the buffer. wrap it with the
    -                # defined style and write it to the output file
    -                if lastval:
    -                    stylebegin, styleend = self.styles[lasttype]
    -                    outfile.write(stylebegin + lastval + styleend)
    -                # set lastval/lasttype to current values
    -                lastval = value
    -                lasttype = ttype
    -
    -        # if something is left in the buffer, write it to the
    -        # output file, then close the opened <pre> tag
    -        if lastval:
    -            stylebegin, styleend = self.styles[lasttype]
    -            outfile.write(stylebegin + lastval + styleend)
    -        outfile.write('</pre>\n')
    -
    -

    The comments should explain it. Again, this formatter doesn't override the -get_style_defs() method. If we would have used CSS classes instead of -inline HTML markup, we would need to generate the CSS first. For that -purpose the get_style_defs() method exists:

    -
    -
    -

    Generating Style Definitions

    -

    Some formatters like the LatexFormatter and the HtmlFormatter don't -output inline markup but reference either macros or css classes. Because -the definitions of those are not part of the output, the get_style_defs() -method exists. It is passed one parameter (if it's used and how it's used -is up to the formatter) and has to return a string or None.

    -
    - -
    - - - \ No newline at end of file diff --git a/vendor/pygments/docs/build/formatters.html b/vendor/pygments/docs/build/formatters.html deleted file mode 100644 index 02bfa5c..0000000 --- a/vendor/pygments/docs/build/formatters.html +++ /dev/null @@ -1,927 +0,0 @@ - - - - Available formatters — Pygments - - - - -
    -

    Pygments

    -

    Available formatters

    - - « Back To Index - - -
    -

    Contents

    - -
    - - -

    This page lists all builtin formatters.

    -
    -

    Common options

    -

    All formatters support these options:

    -
    -
    encoding
    -

    New in Pygments 0.6.

    -

    If given, must be an encoding name (such as "utf-8"). This will -be used to convert the token strings (which are Unicode strings) -to byte strings in the output (default: None). -It will also be written in an encoding declaration suitable for the -document format if the full option is given (e.g. a meta -content-type directive in HTML or an invocation of the inputenc -package in LaTeX).

    -

    If this is "" or None, Unicode strings will be written -to the output file, which most file-like objects do not support. -For example, pygments.highlight() will return a Unicode string if -called with no outfile argument and a formatter that has encoding -set to None because it uses a StringIO.StringIO object that -supports Unicode arguments to write(). Using a regular file object -wouldn't work.

    -
    -
    outencoding
    -

    New in Pygments 0.7.

    -

    When using Pygments from the command line, any encoding option given is -passed to the lexer and the formatter. This is sometimes not desirable, -for example if you want to set the input encoding to "guess". -Therefore, outencoding has been introduced which overrides encoding -for the formatter if given.

    -
    -
    -
    -
    -

    Formatter classes

    -

    All these classes are importable from pygments.formatters.

    -
    -

    BBCodeFormatter

    -
    -

    Format tokens with BBcodes. These formatting codes are used by many -bulletin boards, so you can highlight your sourcecode with pygments before -posting it there.

    -

    This formatter has no support for background colors and borders, as there -are no common BBcode tags for that.

    -

    Some board systems (e.g. phpBB) don't support colors in their [code] tag, -so you can't use the highlighting together with that tag. -Text in a [code] tag usually is shown with a monospace font (which this -formatter can do with the monofont option) and no spaces (which you -need for indentation) are removed.

    -

    Additional options accepted:

    -
    -
    style
    -
    The style to use, can be a string or a Style subclass (default: -'default').
    -
    codetag
    -
    If set to true, put the output into [code] tags (default: -false)
    -
    monofont
    -
    If set to true, add a tag to show the code with a monospace font -(default: false).
    -
    - --- - - - - - -
    Short names:bbcode, bb
    Filename patterns:None
    -
    -
    -
    -

    BmpImageFormatter

    -
    -

    Create a bitmap image from source code. This uses the Python Imaging Library to -generate a pixmap from the source code.

    -

    New in Pygments 1.0. (You could create bitmap images before by passing a -suitable image_format option to the ImageFormatter.)

    - --- - - - - - -
    Short names:bmp, bitmap
    Filename patterns:*.bmp
    -
    -
    -
    -

    GifImageFormatter

    -
    -

    Create a GIF image from source code. This uses the Python Imaging Library to -generate a pixmap from the source code.

    -

    New in Pygments 1.0. (You could create GIF images before by passing a -suitable image_format option to the ImageFormatter.)

    - --- - - - - - -
    Short names:gif
    Filename patterns:*.gif
    -
    -
    -
    -

    HtmlFormatter

    -
    -

    Format tokens as HTML 4 <span> tags within a <pre> tag, wrapped -in a <div> tag. The <div>'s CSS class can be set by the cssclass -option.

    -

    If the linenos option is set to "table", the <pre> is -additionally wrapped inside a <table> which has one row and two -cells: one containing the line numbers and one containing the code. -Example:

    -
    <div class="highlight" >
    -<table><tr>
    -  <td class="linenos" title="click to toggle"
    -    onclick="with (this.firstChild.style)
    -             { display = (display == '') ? 'none' : '' }">
    -    <pre>1
    -    2</pre>
    -  </td>
    -  <td class="code">
    -    <pre><span class="Ke">def </span><span class="NaFu">foo</span>(bar):
    -      <span class="Ke">pass</span>
    -    </pre>
    -  </td>
    -</tr></table></div>
    -
    -

    (whitespace added to improve clarity).

    -

    Wrapping can be disabled using the nowrap option.

    -

    A list of lines can be specified using the hl_lines option to make these -lines highlighted (as of Pygments 0.11).

    -

    With the full option, a complete HTML 4 document is output, including -the style definitions inside a <style> tag, or in a separate file if -the cssfile option is given.

    -

    When tagsfile is set to the path of a ctags index file, it is used to -generate hyperlinks from names to their definition. You must enable -anchorlines and run ctags with the -n option for this to work. The -python-ctags module from PyPI must be installed to use this feature; -otherwise a RuntimeError will be raised.

    -

    The get_style_defs(arg='') method of a HtmlFormatter returns a string -containing CSS rules for the CSS classes used by the formatter. The -argument arg can be used to specify additional CSS selectors that -are prepended to the classes. A call fmter.get_style_defs('td .code') -would result in the following CSS classes:

    -
    td .code .kw { font-weight: bold; color: #00FF00 }
    -td .code .cm { color: #999999 }
    -...
    -
    -

    If you have Pygments 0.6 or higher, you can also pass a list or tuple to the -get_style_defs() method to request multiple prefixes for the tokens:

    -
    formatter.get_style_defs(['div.syntax pre', 'pre.syntax'])
    -
    -

    The output would then look like this:

    -
    div.syntax pre .kw,
    -pre.syntax .kw { font-weight: bold; color: #00FF00 }
    -div.syntax pre .cm,
    -pre.syntax .cm { color: #999999 }
    -...
    -
    -

    Additional options accepted:

    -
    -
    nowrap
    -
    If set to True, don't wrap the tokens at all, not even inside a <pre> -tag. This disables most other options (default: False).
    -
    full
    -
    Tells the formatter to output a "full" document, i.e. a complete -self-contained document (default: False).
    -
    title
    -
    If full is true, the title that should be used to caption the -document (default: '').
    -
    style
    -
    The style to use, can be a string or a Style subclass (default: -'default'). This option has no effect if the cssfile -and noclobber_cssfile option are given and the file specified in -cssfile exists.
    -
    noclasses
    -
    If set to true, token <span> tags will not use CSS classes, but -inline styles. This is not recommended for larger pieces of code since -it increases output size by quite a bit (default: False).
    -
    classprefix
    -
    Since the token types use relatively short class names, they may clash -with some of your own class names. In this case you can use the -classprefix option to give a string to prepend to all Pygments-generated -CSS class names for token types. -Note that this option also affects the output of get_style_defs().
    -
    cssclass
    -

    CSS class for the wrapping <div> tag (default: 'highlight'). -If you set this option, the default selector for get_style_defs() -will be this class.

    -

    New in Pygments 0.9: If you select the 'table' line numbers, the -wrapping table will have a CSS class of this string plus 'table', -the default is accordingly 'highlighttable'.

    -
    -
    cssstyles
    -
    Inline CSS styles for the wrapping <div> tag (default: '').
    -
    prestyles
    -
    Inline CSS styles for the <pre> tag (default: ''). New in -Pygments 0.11.
    -
    cssfile
    -
    If the full option is true and this option is given, it must be the -name of an external file. If the filename does not include an absolute -path, the file's path will be assumed to be relative to the main output -file's path, if the latter can be found. The stylesheet is then written -to this file instead of the HTML file. New in Pygments 0.6.
    -
    noclobber_cssfile
    -
    If cssfile is given and the specified file exists, the css file will -not be overwritten. This allows the use of the full option in -combination with a user specified css file. Default is False. -New in Pygments 1.1.
    -
    linenos
    -

    If set to 'table', output line numbers as a table with two cells, -one containing the line numbers, the other the whole code. This is -copy-and-paste-friendly, but may cause alignment problems with some -browsers or fonts. If set to 'inline', the line numbers will be -integrated in the <pre> tag that contains the code (that setting -is new in Pygments 0.8).

    -

    For compatibility with Pygments 0.7 and earlier, every true value -except 'inline' means the same as 'table' (in particular, that -means also True).

    -

    The default value is False, which means no line numbers at all.

    -

    Note: with the default ("table") line number mechanism, the line -numbers and code can have different line heights in Internet Explorer -unless you give the enclosing <pre> tags an explicit line-height -CSS property (you get the default line spacing with line-height: -125%).

    -
    -
    hl_lines
    -
    Specify a list of lines to be highlighted. New in Pygments 0.11.
    -
    linenostart
    -
    The line number for the first line (default: 1).
    -
    linenostep
    -
    If set to a number n > 1, only every nth line number is printed.
    -
    linenospecial
    -
    If set to a number n > 0, every nth line number is given the CSS -class "special" (default: 0).
    -
    nobackground
    -
    If set to True, the formatter won't output the background color -for the wrapping element (this automatically defaults to False -when there is no wrapping element [eg: no argument for the -get_syntax_defs method given]) (default: False). New in -Pygments 0.6.
    -
    lineseparator
    -
    This string is output between lines of code. It defaults to "\n", -which is enough to break a line inside <pre> tags, but you can -e.g. set it to "<br>" to get HTML line breaks. New in Pygments -0.7.
    -
    lineanchors
    -
    If set to a nonempty string, e.g. foo, the formatter will wrap each -output line in an anchor tag with a name of foo-linenumber. -This allows easy linking to certain lines. New in Pygments 0.9.
    -
    linespans
    -
    If set to a nonempty string, e.g. foo, the formatter will wrap each -output line in a span tag with an id of foo-linenumber. -This allows easy access to lines via javascript. New in Pygments 1.6.
    -
    anchorlinenos
    -
    If set to True, will wrap line numbers in <a> tags. Used in -combination with linenos and lineanchors.
    -
    tagsfile
    -
    If set to the path of a ctags file, wrap names in anchor tags that -link to their definitions. lineanchors should be used, and the -tags file should specify line numbers (see the -n option to ctags). -New in Pygments 1.6.
    -
    tagurlformat
    -
    A string formatting pattern used to generate links to ctags definitions. -Avaliabe variable are %(path)s, %(fname)s and %(fext)s. -Defaults to an empty string, resulting in just #prefix-number links. -New in Pygments 1.6.
    -
    -

    Subclassing the HTML formatter

    -

    New in Pygments 0.7.

    -

    The HTML formatter is now built in a way that allows easy subclassing, thus -customizing the output HTML code. The format() method calls -self._format_lines() which returns a generator that yields tuples of (1, -line), where the 1 indicates that the line is a line of the -formatted source code.

    -

    If the nowrap option is set, the generator is the iterated over and the -resulting HTML is output.

    -

    Otherwise, format() calls self.wrap(), which wraps the generator with -other generators. These may add some HTML code to the one generated by -_format_lines(), either by modifying the lines generated by the latter, -then yielding them again with (1, line), and/or by yielding other HTML -code before or after the lines, with (0, html). The distinction between -source lines and other code makes it possible to wrap the generator multiple -times.

    -

    The default wrap() implementation adds a <div> and a <pre> tag.

    -

    A custom HtmlFormatter subclass could look like this:

    -
    class CodeHtmlFormatter(HtmlFormatter):
    -
    -    def wrap(self, source, outfile):
    -        return self._wrap_code(source)
    -
    -    def _wrap_code(self, source):
    -        yield 0, '<code>'
    -        for i, t in source:
    -            if i == 1:
    -                # it's a line of formatted code
    -                t += '<br>'
    -            yield i, t
    -        yield 0, '</code>'
    -
    -

    This results in wrapping the formatted lines with a <code> tag, where the -source lines are broken using <br> tags.

    -

    After calling wrap(), the format() method also adds the "line numbers" -and/or "full document" wrappers if the respective options are set. Then, all -HTML yielded by the wrapped generator is output.

    - --- - - - - - -
    Short names:html
    Filename patterns:*.html, *.htm
    -
    -
    -
    -

    ImageFormatter

    -
    -

    Create a PNG image from source code. This uses the Python Imaging Library to -generate a pixmap from the source code.

    -

    New in Pygments 0.10.

    -

    Additional options accepted:

    -
    -
    image_format
    -

    An image format to output to that is recognised by PIL, these include:

    -
      -
    • "PNG" (default)
    • -
    • "JPEG"
    • -
    • "BMP"
    • -
    • "GIF"
    • -
    -
    -
    line_pad
    -

    The extra spacing (in pixels) between each line of text.

    -

    Default: 2

    -
    -
    font_name
    -

    The font name to be used as the base font from which others, such as -bold and italic fonts will be generated. This really should be a -monospace font to look sane.

    -

    Default: "Bitstream Vera Sans Mono"

    -
    -
    font_size
    -

    The font size in points to be used.

    -

    Default: 14

    -
    -
    image_pad
    -

    The padding, in pixels to be used at each edge of the resulting image.

    -

    Default: 10

    -
    -
    line_numbers
    -

    Whether line numbers should be shown: True/False

    -

    Default: True

    -
    -
    line_number_start
    -

    The line number of the first line.

    -

    Default: 1

    -
    -
    line_number_step
    -

    The step used when printing line numbers.

    -

    Default: 1

    -
    -
    line_number_bg
    -

    The background colour (in "#123456" format) of the line number bar, or -None to use the style background color.

    -

    Default: "#eed"

    -
    -
    line_number_fg
    -

    The text color of the line numbers (in "#123456"-like format).

    -

    Default: "#886"

    -
    -
    line_number_chars
    -

    The number of columns of line numbers allowable in the line number -margin.

    -

    Default: 2

    -
    -
    line_number_bold
    -

    Whether line numbers will be bold: True/False

    -

    Default: False

    -
    -
    line_number_italic
    -

    Whether line numbers will be italicized: True/False

    -

    Default: False

    -
    -
    line_number_separator
    -

    Whether a line will be drawn between the line number area and the -source code area: True/False

    -

    Default: True

    -
    -
    line_number_pad
    -

    The horizontal padding (in pixels) between the line number margin, and -the source code area.

    -

    Default: 6

    -
    -
    hl_lines
    -

    Specify a list of lines to be highlighted. New in Pygments 1.2.

    -

    Default: empty list

    -
    -
    hl_color
    -

    Specify the color for highlighting lines. New in Pygments 1.2.

    -

    Default: highlight color of the selected style

    -
    -
    - --- - - - - - -
    Short names:img, IMG, png
    Filename patterns:*.png
    -
    -
    -
    -

    JpgImageFormatter

    -
    -

    Create a JPEG image from source code. This uses the Python Imaging Library to -generate a pixmap from the source code.

    -

    New in Pygments 1.0. (You could create JPEG images before by passing a -suitable image_format option to the ImageFormatter.)

    - --- - - - - - -
    Short names:jpg, jpeg
    Filename patterns:*.jpg
    -
    -
    -
    -

    LatexFormatter

    -
    -

    Format tokens as LaTeX code. This needs the fancyvrb and color -standard packages.

    -

    Without the full option, code is formatted as one Verbatim -environment, like this:

    -
    \begin{Verbatim}[commandchars=\\{\}]
    -\PY{k}{def }\PY{n+nf}{foo}(\PY{n}{bar}):
    -    \PY{k}{pass}
    -\end{Verbatim}
    -
    -

    The special command used here (\PY) and all the other macros it needs -are output by the get_style_defs method.

    -

    With the full option, a complete LaTeX document is output, including -the command definitions in the preamble.

    -

    The get_style_defs() method of a LatexFormatter returns a string -containing \def commands defining the macros needed inside the -Verbatim environments.

    -

    Additional options accepted:

    -
    -
    style
    -
    The style to use, can be a string or a Style subclass (default: -'default').
    -
    full
    -
    Tells the formatter to output a "full" document, i.e. a complete -self-contained document (default: False).
    -
    title
    -
    If full is true, the title that should be used to caption the -document (default: '').
    -
    docclass
    -
    If the full option is enabled, this is the document class to use -(default: 'article').
    -
    preamble
    -
    If the full option is enabled, this can be further preamble commands, -e.g. \usepackage (default: '').
    -
    linenos
    -
    If set to True, output line numbers (default: False).
    -
    linenostart
    -
    The line number for the first line (default: 1).
    -
    linenostep
    -
    If set to a number n > 1, only every nth line number is printed.
    -
    verboptions
    -
    Additional options given to the Verbatim environment (see the fancyvrb -docs for possible values) (default: '').
    -
    commandprefix
    -

    The LaTeX commands used to produce colored output are constructed -using this prefix and some letters (default: 'PY'). -New in Pygments 0.7.

    -

    New in Pygments 0.10: the default is now 'PY' instead of 'C'.

    -
    -
    texcomments
    -
    If set to True, enables LaTeX comment lines. That is, LaTex markup -in comment tokens is not escaped so that LaTeX can render it (default: -False). New in Pygments 1.2.
    -
    mathescape
    -
    If set to True, enables LaTeX math mode escape in comments. That -is, '$...$' inside a comment will trigger math mode (default: -False). New in Pygments 1.2.
    -
    - --- - - - - - -
    Short names:latex, tex
    Filename patterns:*.tex
    -
    -
    -
    -

    NullFormatter

    -
    -

    Output the text unchanged without any formatting.

    - --- - - - - - -
    Short names:text, null
    Filename patterns:*.txt
    -
    -
    -
    -

    RawTokenFormatter

    -
    -

    Format tokens as a raw representation for storing token streams.

    -

    The format is tokentype<TAB>repr(tokenstring)\n. The output can later -be converted to a token stream with the RawTokenLexer, described in the -lexer list.

    -

    Only two options are accepted:

    -
    -
    compress
    -
    If set to 'gz' or 'bz2', compress the output with the given -compression algorithm after encoding (default: '').
    -
    error_color
    -
    If set to a color name, highlight error tokens using that color. If -set but with no value, defaults to 'red'. -New in Pygments 0.11.
    -
    - --- - - - - - -
    Short names:raw, tokens
    Filename patterns:*.raw
    -
    -
    -
    -

    RtfFormatter

    -
    -

    Format tokens as RTF markup. This formatter automatically outputs full RTF -documents with color information and other useful stuff. Perfect for Copy and -Paste into Microsoft® Word® documents.

    -

    New in Pygments 0.6.

    -

    Additional options accepted:

    -
    -
    style
    -
    The style to use, can be a string or a Style subclass (default: -'default').
    -
    fontface
    -
    The used font famliy, for example Bitstream Vera Sans. Defaults to -some generic font which is supposed to have fixed width.
    -
    - --- - - - - - -
    Short names:rtf
    Filename patterns:*.rtf
    -
    -
    -
    -

    SvgFormatter

    -
    -

    Format tokens as an SVG graphics file. This formatter is still experimental. -Each line of code is a <text> element with explicit x and y -coordinates containing <tspan> elements with the individual token styles.

    -

    By default, this formatter outputs a full SVG document including doctype -declaration and the <svg> root element.

    -

    New in Pygments 0.9.

    -

    Additional options accepted:

    -
    -
    nowrap
    -
    Don't wrap the SVG <text> elements in <svg><g> elements and -don't add a XML declaration and a doctype. If true, the fontfamily -and fontsize options are ignored. Defaults to False.
    -
    fontfamily
    -
    The value to give the wrapping <g> element's font-family -attribute, defaults to "monospace".
    -
    fontsize
    -
    The value to give the wrapping <g> element's font-size -attribute, defaults to "14px".
    -
    xoffset
    -
    Starting offset in X direction, defaults to 0.
    -
    yoffset
    -
    Starting offset in Y direction, defaults to the font size if it is given -in pixels, or 20 else. (This is necessary since text coordinates -refer to the text baseline, not the top edge.)
    -
    ystep
    -
    Offset to add to the Y coordinate for each subsequent line. This should -roughly be the text size plus 5. It defaults to that value if the text -size is given in pixels, or 25 else.
    -
    spacehack
    -
    Convert spaces in the source to &#160;, which are non-breaking -spaces. SVG provides the xml:space attribute to control how -whitespace inside tags is handled, in theory, the preserve value -could be used to keep all whitespace as-is. However, many current SVG -viewers don't obey that rule, so this option is provided as a workaround -and defaults to True.
    -
    - --- - - - - - -
    Short names:svg
    Filename patterns:*.svg
    -
    -
    -
    -

    Terminal256Formatter

    -
    -

    Format tokens with ANSI color sequences, for output in a 256-color -terminal or console. Like in TerminalFormatter color sequences -are terminated at newlines, so that paging the output works correctly.

    -

    The formatter takes colors from a style defined by the style option -and converts them to nearest ANSI 256-color escape sequences. Bold and -underline attributes from the style are preserved (and displayed).

    -

    New in Pygments 0.9.

    -

    Options accepted:

    -
    -
    style
    -
    The style to use, can be a string or a Style subclass (default: -'default').
    -
    - --- - - - - - -
    Short names:terminal256, console256, 256
    Filename patterns:None
    -
    -
    -
    -

    TerminalFormatter

    -
    -

    Format tokens with ANSI color sequences, for output in a text console. -Color sequences are terminated at newlines, so that paging the output -works correctly.

    -

    The get_style_defs() method doesn't do anything special since there is -no support for common styles.

    -

    Options accepted:

    -
    -
    bg
    -
    Set to "light" or "dark" depending on the terminal's background -(default: "light").
    -
    colorscheme
    -
    A dictionary mapping token types to (lightbg, darkbg) color names or -None (default: None = use builtin colorscheme).
    -
    - --- - - - - - -
    Short names:terminal, console
    Filename patterns:None
    -
    -
    -
    - -
    - - - \ No newline at end of file diff --git a/vendor/pygments/docs/build/index.html b/vendor/pygments/docs/build/index.html deleted file mode 100644 index 6ab5fd9..0000000 --- a/vendor/pygments/docs/build/index.html +++ /dev/null @@ -1,261 +0,0 @@ - - - - Overview — Pygments - - - - -
    -

    Pygments

    -

    Overview

    - - - -

    Welcome to the Pygments documentation.

    - -
    -

    If you find bugs or have suggestions for the documentation, please -look here for info on how to contact the team.

    -

    You can download an offline version of this documentation from the -download page.

    - -
    - - - \ No newline at end of file diff --git a/vendor/pygments/docs/build/installation.html b/vendor/pygments/docs/build/installation.html deleted file mode 100644 index 6c32f77..0000000 --- a/vendor/pygments/docs/build/installation.html +++ /dev/null @@ -1,281 +0,0 @@ - - - - Installation — Pygments - - - - -
    -

    Pygments

    -

    Installation

    - - « Back To Index - - - - - -

    Pygments requires at least Python 2.4 to work correctly. Just to clarify: -there won't ever be support for Python versions below 2.4. However, there -are no other dependencies.

    -
    -

    Installing a released version

    -
    -

    As a Python egg (via easy_install)

    -

    You can install the most recent Pygments version using easy_install:

    -
    -sudo easy_install Pygments
    -
    -

    This will install a Pygments egg in your Python installation's site-packages -directory.

    -
    -
    -

    From the tarball release

    -
      -
    1. Download the most recent tarball from the download page
    2. -
    3. Unpack the tarball
    4. -
    5. sudo python setup.py install
    6. -
    -

    Note that the last command will automatically download and install -setuptools if you don't already have it installed. This requires a working -internet connection.

    -

    This will install Pygments into your Python installation's site-packages directory.

    -
    -
    -
    -

    Installing the development version

    -
    -

    If you want to play around with the code

    -
      -
    1. Install Mercurial
    2. -
    3. hg clone http://bitbucket.org/birkenfeld/pygments-main pygments
    4. -
    5. cd pygments
    6. -
    7. ln -s pygments /usr/lib/python2.X/site-packages
    8. -
    9. ln -s pygmentize /usr/local/bin
    10. -
    -

    As an alternative to steps 4 and 5 you can also do python setup.py develop -which will install the package via setuptools in development mode.

    - -
    -
    - -
    - - - \ No newline at end of file diff --git a/vendor/pygments/docs/build/integrate.html b/vendor/pygments/docs/build/integrate.html deleted file mode 100644 index 3b7ddc7..0000000 --- a/vendor/pygments/docs/build/integrate.html +++ /dev/null @@ -1,268 +0,0 @@ - - - - Using Pygments in various scenarios — Pygments - - - - -
    -

    Pygments

    -

    Using Pygments in various scenarios

    - - « Back To Index - - -
    -

    Contents

    - -
    - - -
    -

    PyGtk

    -

    Armin has written a piece of sample code that shows how to create a Gtk -TextBuffer object containing Pygments-highlighted text.

    -

    See the article here: http://lucumr.pocoo.org/cogitations/2007/05/30/pygments-gtk-rendering/

    -
    -
    -

    Wordpress

    -

    He also has a snippet that shows how to use Pygments in WordPress:

    -

    http://lucumr.pocoo.org/cogitations/2007/05/30/pygments-in-wordpress/

    -
    -
    -

    Markdown

    -

    Since Pygments 0.9, the distribution ships Markdown preprocessor sample code -that uses Pygments to render source code in external/markdown-processor.py. -You can copy and adapt it to your liking.

    -
    -
    -

    TextMate

    -

    Antonio Cangiano has created a Pygments bundle for TextMate that allows to -colorize code via a simple menu option. It can be found here.

    -
    -
    -

    Bash completion

    -

    The source distribution contains a file external/pygments.bashcomp that -sets up completion for the pygmentize command in bash.

    -
    -
    -

    Java

    -

    See the Java quickstart document.

    -
    - -
    - - - \ No newline at end of file diff --git a/vendor/pygments/docs/build/java.html b/vendor/pygments/docs/build/java.html deleted file mode 100644 index af0b6e1..0000000 --- a/vendor/pygments/docs/build/java.html +++ /dev/null @@ -1,271 +0,0 @@ - - - - Use Pygments in Java — Pygments - - - - -
    -

    Pygments

    -

    Use Pygments in Java

    - - « Back To Index - - -

    Thanks to Jython it is possible to use Pygments in -Java.

    -

    This page is a simple tutorial to get an idea of how this is working. You can -then look at the Jython documentation for more -advanced use.

    -

    Since version 1.5, Pygments is deployed on Maven Central as a JAR so is Jython -which makes it a lot easier to create the Java project.

    -

    Here is an example of a Maven pom.xml file for a -project running Pygments:

    -
    <?xml version="1.0" encoding="UTF-8"?>
    -
    -<project xmlns="http://maven.apache.org/POM/4.0.0"
    -         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
    -         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0
    -                             http://maven.apache.org/maven-v4_0_0.xsd">
    -  <modelVersion>4.0.0</modelVersion>
    -  <groupId>example</groupId>
    -  <artifactId>example</artifactId>
    -  <version>1.0-SNAPSHOT</version>
    -  <dependencies>
    -    <dependency>
    -      <groupId>org.python</groupId>
    -      <artifactId>jython-standalone</artifactId>
    -      <version>2.5.3</version>
    -    </dependency>
    -    <dependency>
    -      <groupId>org.pygments</groupId>
    -      <artifactId>pygments</artifactId>
    -      <version>1.5</version>
    -      <scope>runtime</scope>
    -    </dependency>
    -  </dependencies>
    -</project>
    -
    -

    The following Java example:

    -
    PythonInterpreter interpreter = new PythonInterpreter();
    -
    -// Set a variable with the content you want to work with
    -interpreter.set("code", code);
    -
    -// Simple use Pygments as you would in Python
    -interpreter.exec("from pygments import highlight\n"
    -    + "from pygments.lexers import PythonLexer\n"
    -    + "from pygments.formatters import HtmlFormatter\n"
    -    + "\nresult = highlight(code, PythonLexer(), HtmlFormatter())");
    -
    -// Get the result that has been set in a variable
    -System.out.println(interpreter.get("result", String.class));
    -
    -

    will print something like:

    -
    <div class="highlight">
    -<pre><span class="k">print</span> <span class="s">&quot;Hello World&quot;</span></pre>
    -</div>
    -
    - -
    - - - \ No newline at end of file diff --git a/vendor/pygments/docs/build/lexerdevelopment.html b/vendor/pygments/docs/build/lexerdevelopment.html deleted file mode 100644 index 3352d48..0000000 --- a/vendor/pygments/docs/build/lexerdevelopment.html +++ /dev/null @@ -1,691 +0,0 @@ - - - - Write your own lexer — Pygments - - - - -
    -

    Pygments

    -

    Write your own lexer

    - - « Back To Index - - - - - -

    If a lexer for your favorite language is missing in the Pygments package, you can -easily write your own and extend Pygments.

    -

    All you need can be found inside the pygments.lexer module. As you can read in -the API documentation, a lexer is a class that is initialized with -some keyword arguments (the lexer options) and that provides a -get_tokens_unprocessed() method which is given a string or unicode object with -the data to parse.

    -

    The get_tokens_unprocessed() method must return an iterator or iterable -containing tuples in the form (index, token, value). Normally you don't need -to do this since there are numerous base lexers you can subclass.

    -
    -

    RegexLexer

    -

    A very powerful (but quite easy to use) lexer is the RegexLexer. This lexer -base class allows you to define lexing rules in terms of regular expressions -for different states.

    -

    States are groups of regular expressions that are matched against the input -string at the current position. If one of these expressions matches, a -corresponding action is performed (normally yielding a token with a specific -type), the current position is set to where the last match ended and the -matching process continues with the first regex of the current state.

    -

    Lexer states are kept in a state stack: each time a new state is entered, the -new state is pushed onto the stack. The most basic lexers (like the -DiffLexer) just need one state.

    -

    Each state is defined as a list of tuples in the form (regex, action, -new_state) where the last item is optional. In the most basic form, action -is a token type (like Name.Builtin). That means: When regex matches, emit a -token with the match text and type tokentype and push new_state on the state -stack. If the new state is '#pop', the topmost state is popped from the -stack instead. (To pop more than one state, use '#pop:2' and so on.) -'#push' is a synonym for pushing the current state on the -stack.

    -

    The following example shows the DiffLexer from the builtin lexers. Note that -it contains some additional attributes name, aliases and filenames which -aren't required for a lexer. They are used by the builtin lexer lookup -functions.

    -
    from pygments.lexer import RegexLexer
    -from pygments.token import *
    -
    -class DiffLexer(RegexLexer):
    -    name = 'Diff'
    -    aliases = ['diff']
    -    filenames = ['*.diff']
    -
    -    tokens = {
    -        'root': [
    -            (r' .*\n', Text),
    -            (r'\+.*\n', Generic.Inserted),
    -            (r'-.*\n', Generic.Deleted),
    -            (r'@.*\n', Generic.Subheading),
    -            (r'Index.*\n', Generic.Heading),
    -            (r'=.*\n', Generic.Heading),
    -            (r'.*\n', Text),
    -        ]
    -    }
    -
    -

    As you can see this lexer only uses one state. When the lexer starts scanning -the text, it first checks if the current character is a space. If this is true -it scans everything until newline and returns the parsed data as Text token.

    -

    If this rule doesn't match, it checks if the current char is a plus sign. And -so on.

    -

    If no rule matches at the current position, the current char is emitted as an -Error token that indicates a parsing error, and the position is increased by -1.

    -
    -
    -

    Regex Flags

    -

    You can either define regex flags in the regex (r'(?x)foo bar') or by adding -a flags attribute to your lexer class. If no attribute is defined, it defaults -to re.MULTILINE. For more informations about regular expression flags see the -regular expressions help page in the python documentation.

    -
    -
    -

    Scanning multiple tokens at once

    -

    Here is a more complex lexer that highlights INI files. INI files consist of -sections, comments and key = value pairs:

    -
    from pygments.lexer import RegexLexer, bygroups
    -from pygments.token import *
    -
    -class IniLexer(RegexLexer):
    -    name = 'INI'
    -    aliases = ['ini', 'cfg']
    -    filenames = ['*.ini', '*.cfg']
    -
    -    tokens = {
    -        'root': [
    -            (r'\s+', Text),
    -            (r';.*?$', Comment),
    -            (r'\[.*?\]$', Keyword),
    -            (r'(.*?)(\s*)(=)(\s*)(.*?)$',
    -             bygroups(Name.Attribute, Text, Operator, Text, String))
    -        ]
    -    }
    -
    -

    The lexer first looks for whitespace, comments and section names. And later it -looks for a line that looks like a key, value pair, separated by an '=' -sign, and optional whitespace.

    -

    The bygroups helper makes sure that each group is yielded with a different -token type. First the Name.Attribute token, then a Text token for the -optional whitespace, after that a Operator token for the equals sign. Then a -Text token for the whitespace again. The rest of the line is returned as -String.

    -

    Note that for this to work, every part of the match must be inside a capturing -group (a (...)), and there must not be any nested capturing groups. If you -nevertheless need a group, use a non-capturing group defined using this syntax: -r'(?:some|words|here)' (note the ?: after the beginning parenthesis).

    -

    If you find yourself needing a capturing group inside the regex which -shouldn't be part of the output but is used in the regular expressions for -backreferencing (eg: r'(<(foo|bar)>)(.*?)(</\2>)'), you can pass None -to the bygroups function and it will skip that group will be skipped in the -output.

    -
    -
    -

    Changing states

    -

    Many lexers need multiple states to work as expected. For example, some -languages allow multiline comments to be nested. Since this is a recursive -pattern it's impossible to lex just using regular expressions.

    -

    Here is the solution:

    -
    from pygments.lexer import RegexLexer
    -from pygments.token import *
    -
    -class ExampleLexer(RegexLexer):
    -    name = 'Example Lexer with states'
    -
    -    tokens = {
    -        'root': [
    -            (r'[^/]+', Text),
    -            (r'/\*', Comment.Multiline, 'comment'),
    -            (r'//.*?$', Comment.Singleline),
    -            (r'/', Text)
    -        ],
    -        'comment': [
    -            (r'[^*/]', Comment.Multiline),
    -            (r'/\*', Comment.Multiline, '#push'),
    -            (r'\*/', Comment.Multiline, '#pop'),
    -            (r'[*/]', Comment.Multiline)
    -        ]
    -    }
    -
    -

    This lexer starts lexing in the 'root' state. It tries to match as much as -possible until it finds a slash ('/'). If the next character after the slash -is a star ('*') the RegexLexer sends those two characters to the output -stream marked as Comment.Multiline and continues parsing with the rules -defined in the 'comment' state.

    -

    If there wasn't a star after the slash, the RegexLexer checks if it's a -singleline comment (eg: followed by a second slash). If this also wasn't the -case it must be a single slash (the separate regex for a single slash must also -be given, else the slash would be marked as an error token).

    -

    Inside the 'comment' state, we do the same thing again. Scan until the lexer -finds a star or slash. If it's the opening of a multiline comment, push the -'comment' state on the stack and continue scanning, again in the -'comment' state. Else, check if it's the end of the multiline comment. If -yes, pop one state from the stack.

    -

    Note: If you pop from an empty stack you'll get an IndexError. (There is an -easy way to prevent this from happening: don't '#pop' in the root state).

    -

    If the RegexLexer encounters a newline that is flagged as an error token, the -stack is emptied and the lexer continues scanning in the 'root' state. This -helps producing error-tolerant highlighting for erroneous input, e.g. when a -single-line string is not closed.

    -
    -
    -

    Advanced state tricks

    -

    There are a few more things you can do with states:

    -
      -
    • You can push multiple states onto the stack if you give a tuple instead of a -simple string as the third item in a rule tuple. For example, if you want to -match a comment containing a directive, something like:

      -
      -/* <processing directive>    rest of comment */
      -
      -

      you can use this rule:

      -
      tokens = {
      -    'root': [
      -        (r'/\* <', Comment, ('comment', 'directive')),
      -        ...
      -    ],
      -    'directive': [
      -        (r'[^>]*', Comment.Directive),
      -        (r'>', Comment, '#pop'),
      -    ],
      -    'comment': [
      -        (r'[^*]+', Comment),
      -        (r'\*/', Comment, '#pop'),
      -        (r'\*', Comment),
      -    ]
      -}
      -
      -

      When this encounters the above sample, first 'comment' and 'directive' -are pushed onto the stack, then the lexer continues in the directive state -until it finds the closing >, then it continues in the comment state until -the closing */. Then, both states are popped from the stack again and -lexing continues in the root state.

      -

      New in Pygments 0.9: The tuple can contain the special '#push' and -'#pop' (but not '#pop:n') directives.

      -
    • -
    • You can include the rules of a state in the definition of another. This is -done by using include from pygments.lexer:

      -
      from pygments.lexer import RegexLexer, bygroups, include
      -from pygments.token import *
      -
      -class ExampleLexer(RegexLexer):
      -    tokens = {
      -        'comments': [
      -            (r'/\*.*?\*/', Comment),
      -            (r'//.*?\n', Comment),
      -        ],
      -        'root': [
      -            include('comments'),
      -            (r'(function )(\w+)( {)',
      -             bygroups(Keyword, Name, Keyword), 'function'),
      -            (r'.', Text),
      -        ],
      -        'function': [
      -            (r'[^}/]+', Text),
      -            include('comments'),
      -            (r'/', Text),
      -            (r'}', Keyword, '#pop'),
      -        ]
      -    }
      -
      -

      This is a hypothetical lexer for a language that consist of functions and -comments. Because comments can occur at toplevel and in functions, we need -rules for comments in both states. As you can see, the include helper saves -repeating rules that occur more than once (in this example, the state -'comment' will never be entered by the lexer, as it's only there to be -included in 'root' and 'function').

      -
    • -
    • Sometimes, you may want to "combine" a state from existing ones. This is -possible with the combine helper from pygments.lexer.

      -

      If you, instead of a new state, write combined('state1', 'state2') as the -third item of a rule tuple, a new anonymous state will be formed from state1 -and state2 and if the rule matches, the lexer will enter this state.

      -

      This is not used very often, but can be helpful in some cases, such as the -PythonLexer's string literal processing.

      -
    • -
    • If you want your lexer to start lexing in a different state you can modify -the stack by overloading the get_tokens_unprocessed() method:

      -
      from pygments.lexer import RegexLexer
      -
      -class MyLexer(RegexLexer):
      -    tokens = {...}
      -
      -    def get_tokens_unprocessed(self, text):
      -        stack = ['root', 'otherstate']
      -        for item in RegexLexer.get_tokens_unprocessed(text, stack):
      -            yield item
      -
      -

      Some lexers like the PhpLexer use this to make the leading <?php -preprocessor comments optional. Note that you can crash the lexer easily -by putting values into the stack that don't exist in the token map. Also -removing 'root' from the stack can result in strange errors!

      -
    • -
    • An empty regex at the end of a state list, combined with '#pop', can -act as a return point from a state that doesn't have a clear end marker.

      -
    • -
    -
    -
    -

    Using multiple lexers

    -

    Using multiple lexers for the same input can be tricky. One of the easiest -combination techniques is shown here: You can replace the token type entry in a -rule tuple (the second item) with a lexer class. The matched text will then be -lexed with that lexer, and the resulting tokens will be yielded.

    -

    For example, look at this stripped-down HTML lexer:

    -
    from pygments.lexer import RegexLexer, bygroups, using
    -from pygments.token import *
    -from pygments.lexers.web import JavascriptLexer
    -
    -class HtmlLexer(RegexLexer):
    -    name = 'HTML'
    -    aliases = ['html']
    -    filenames = ['*.html', '*.htm']
    -
    -    flags = re.IGNORECASE | re.DOTALL
    -    tokens = {
    -        'root': [
    -            ('[^<&]+', Text),
    -            ('&.*?;', Name.Entity),
    -            (r'<\s*script\s*', Name.Tag, ('script-content', 'tag')),
    -            (r'<\s*[a-zA-Z0-9:]+', Name.Tag, 'tag'),
    -            (r'<\s*/\s*[a-zA-Z0-9:]+\s*>', Name.Tag),
    -        ],
    -        'script-content': [
    -            (r'(.+?)(<\s*/\s*script\s*>)',
    -             bygroups(using(JavascriptLexer), Name.Tag),
    -             '#pop'),
    -        ]
    -    }
    -
    -

    Here the content of a <script> tag is passed to a newly created instance of -a JavascriptLexer and not processed by the HtmlLexer. This is done using the -using helper that takes the other lexer class as its parameter.

    -

    Note the combination of bygroups and using. This makes sure that the content -up to the </script> end tag is processed by the JavascriptLexer, while the -end tag is yielded as a normal token with the Name.Tag type.

    -

    As an additional goodie, if the lexer class is replaced by this (imported from -pygments.lexer), the "other" lexer will be the current one (because you cannot -refer to the current class within the code that runs at class definition time).

    -

    Also note the (r'<\s*script\s*', Name.Tag, ('script-content', 'tag')) rule. -Here, two states are pushed onto the state stack, 'script-content' and -'tag'. That means that first 'tag' is processed, which will parse -attributes and the closing >, then the 'tag' state is popped and the -next state on top of the stack will be 'script-content'.

    -

    The using() helper has a special keyword argument, state, which works as -follows: if given, the lexer to use initially is not in the "root" state, -but in the state given by this argument. This only works with a RegexLexer.

    -

    Any other keywords arguments passed to using() are added to the keyword -arguments used to create the lexer.

    -
    -
    -

    Delegating Lexer

    -

    Another approach for nested lexers is the DelegatingLexer which is for -example used for the template engine lexers. It takes two lexers as -arguments on initialisation: a root_lexer and a language_lexer.

    -

    The input is processed as follows: First, the whole text is lexed with the -language_lexer. All tokens yielded with a type of Other are then -concatenated and given to the root_lexer. The language tokens of the -language_lexer are then inserted into the root_lexer's token stream -at the appropriate positions.

    -
    from pygments.lexer import DelegatingLexer
    -from pygments.lexers.web import HtmlLexer, PhpLexer
    -
    -class HtmlPhpLexer(DelegatingLexer):
    -    def __init__(self, **options):
    -        super(HtmlPhpLexer, self).__init__(HtmlLexer, PhpLexer, **options)
    -
    -

    This procedure ensures that e.g. HTML with template tags in it is highlighted -correctly even if the template tags are put into HTML tags or attributes.

    -

    If you want to change the needle token Other to something else, you can -give the lexer another token type as the third parameter:

    -
    DelegatingLexer.__init__(MyLexer, OtherLexer, Text, **options)
    -
    -
    -
    -

    Callbacks

    -

    Sometimes the grammar of a language is so complex that a lexer would be unable -to parse it just by using regular expressions and stacks.

    -

    For this, the RegexLexer allows callbacks to be given in rule tuples, instead -of token types (bygroups and using are nothing else but preimplemented -callbacks). The callback must be a function taking two arguments:

    -
      -
    • the lexer itself
    • -
    • the match object for the last matched rule
    • -
    -

    The callback must then return an iterable of (or simply yield) (index, -tokentype, value) tuples, which are then just passed through by -get_tokens_unprocessed(). The index here is the position of the token in -the input string, tokentype is the normal token type (like Name.Builtin), -and value the associated part of the input string.

    -

    You can see an example here:

    -
    from pygments.lexer import RegexLexer
    -from pygments.token import Generic
    -
    -class HypotheticLexer(RegexLexer):
    -
    -    def headline_callback(lexer, match):
    -        equal_signs = match.group(1)
    -        text = match.group(2)
    -        yield match.start(), Generic.Headline, equal_signs + text + equal_signs
    -
    -    tokens = {
    -        'root': [
    -            (r'(=+)(.*?)(\1)', headline_callback)
    -        ]
    -    }
    -
    -

    If the regex for the headline_callback matches, the function is called with the -match object. Note that after the callback is done, processing continues -normally, that is, after the end of the previous match. The callback has no -possibility to influence the position.

    -

    There are not really any simple examples for lexer callbacks, but you can see -them in action e.g. in the compiled.py source code in the CLexer and -JavaLexer classes.

    -
    -
    -

    The ExtendedRegexLexer class

    -

    The RegexLexer, even with callbacks, unfortunately isn't powerful enough for -the funky syntax rules of some languages that will go unnamed, such as Ruby.

    -

    But fear not; even then you don't have to abandon the regular expression -approach. For Pygments has a subclass of RegexLexer, the ExtendedRegexLexer. -All features known from RegexLexers are available here too, and the tokens are -specified in exactly the same way, except for one detail:

    -

    The get_tokens_unprocessed() method holds its internal state data not as local -variables, but in an instance of the pygments.lexer.LexerContext class, and -that instance is passed to callbacks as a third argument. This means that you -can modify the lexer state in callbacks.

    -

    The LexerContext class has the following members:

    -
      -
    • text -- the input text
    • -
    • pos -- the current starting position that is used for matching regexes
    • -
    • stack -- a list containing the state stack
    • -
    • end -- the maximum position to which regexes are matched, this defaults to -the length of text
    • -
    -

    Additionally, the get_tokens_unprocessed() method can be given a -LexerContext instead of a string and will then process this context instead of -creating a new one for the string argument.

    -

    Note that because you can set the current position to anything in the callback, -it won't be automatically be set by the caller after the callback is finished. -For example, this is how the hypothetical lexer above would be written with the -ExtendedRegexLexer:

    -
    from pygments.lexer import ExtendedRegexLexer
    -from pygments.token import Generic
    -
    -class ExHypotheticLexer(ExtendedRegexLexer):
    -
    -    def headline_callback(lexer, match, ctx):
    -        equal_signs = match.group(1)
    -        text = match.group(2)
    -        yield match.start(), Generic.Headline, equal_signs + text + equal_signs
    -        ctx.pos = match.end()
    -
    -    tokens = {
    -        'root': [
    -            (r'(=+)(.*?)(\1)', headline_callback)
    -        ]
    -    }
    -
    -

    This might sound confusing (and it can really be). But it is needed, and for an -example look at the Ruby lexer in agile.py.

    -
    -
    -

    Filtering Token Streams

    -

    Some languages ship a lot of builtin functions (for example PHP). The total -amount of those functions differs from system to system because not everybody -has every extension installed. In the case of PHP there are over 3000 builtin -functions. That's an incredible huge amount of functions, much more than you -can put into a regular expression.

    -

    But because only Name tokens can be function names it's solvable by overriding -the get_tokens_unprocessed() method. The following lexer subclasses the -PythonLexer so that it highlights some additional names as pseudo keywords:

    -
    from pygments.lexers.agile import PythonLexer
    -from pygments.token import Name, Keyword
    -
    -class MyPythonLexer(PythonLexer):
    -    EXTRA_KEYWORDS = ['foo', 'bar', 'foobar', 'barfoo', 'spam', 'eggs']
    -
    -    def get_tokens_unprocessed(self, text):
    -        for index, token, value in PythonLexer.get_tokens_unprocessed(self, text):
    -            if token is Name and value in self.EXTRA_KEYWORDS:
    -                yield index, Keyword.Pseudo, value
    -            else:
    -                yield index, token, value
    -
    -

    The PhpLexer and LuaLexer use this method to resolve builtin functions.

    -

    Note Do not confuse this with the filter system.

    -
    - -
    - - - \ No newline at end of file diff --git a/vendor/pygments/docs/build/lexers.html b/vendor/pygments/docs/build/lexers.html deleted file mode 100644 index f8813fb..0000000 --- a/vendor/pygments/docs/build/lexers.html +++ /dev/null @@ -1,5359 +0,0 @@ - - - - Available lexers — Pygments - - - - -
    -

    Pygments

    -

    Available lexers

    - - « Back To Index - - - - - -

    This page lists all available builtin lexers and the options they take.

    -

    Currently, all lexers support these options:

    -
    -
    stripnl
    -
    Strip leading and trailing newlines from the input (default: True)
    -
    stripall
    -
    Strip all leading and trailing whitespace from the input (default: -False).
    -
    ensurenl
    -
    Make sure that the input ends with a newline (default: True). This -is required for some lexers that consume input linewise. -New in Pygments 1.3.
    -
    tabsize
    -
    If given and greater than 0, expand tabs in the input (default: 0).
    -
    encoding
    -

    New in Pygments 0.6.

    -

    If given, must be an encoding name (such as "utf-8"). This encoding -will be used to convert the input string to Unicode (if it is not already -a Unicode string). The default is "latin1".

    -

    If this option is set to "guess", a simple UTF-8 vs. Latin-1 -detection is used, if it is set to "chardet", the -chardet library is used to -guess the encoding of the input.

    -
    -
    -

    The "Short Names" field lists the identifiers that can be used with the -get_lexer_by_name() function.

    -

    These lexers are builtin and can be imported from pygments.lexers:

    -
    -

    Lexers for agile languages

    -

    CrocLexer

    -
    -

    For Croc source.

    - --- - - - - - - - -
    Short names:croc
    Filename patterns:*.croc
    Mimetypes:text/x-crocsrc
    -
    -

    DgLexer

    -
    -

    Lexer for dg, -a functional and object-oriented programming language -running on the CPython 3 VM.

    -

    New in Pygments 1.6.

    - --- - - - - - - - -
    Short names:dg
    Filename patterns:*.dg
    Mimetypes:text/x-dg
    -
    -

    FactorLexer

    -
    -

    Lexer for the Factor language.

    -

    New in Pygments 1.4.

    - --- - - - - - - - -
    Short names:factor
    Filename patterns:*.factor
    Mimetypes:text/x-factor
    -
    -

    FancyLexer

    -
    -

    Pygments Lexer For Fancy.

    -

    Fancy is a self-hosted, pure object-oriented, dynamic, -class-based, concurrent general-purpose programming language -running on Rubinius, the Ruby VM.

    -

    New in Pygments 1.5.

    - --- - - - - - - - -
    Short names:fancy, fy
    Filename patterns:*.fy, *.fancypack
    Mimetypes:text/x-fancysrc
    -
    -

    IoLexer

    -
    -

    For Io (a small, prototype-based -programming language) source.

    -

    New in Pygments 0.10.

    - --- - - - - - - - -
    Short names:io
    Filename patterns:*.io
    Mimetypes:text/x-iosrc
    -
    -

    LuaLexer

    -
    -

    For Lua source code.

    -

    Additional options accepted:

    -
    -
    func_name_highlighting
    -
    If given and True, highlight builtin function names -(default: True).
    -
    disabled_modules
    -

    If given, must be a list of module names whose function names -should not be highlighted. By default all modules are highlighted.

    -

    To get a list of allowed modules have a look into the -_luabuiltins module:

    -
    >>> from pygments.lexers._luabuiltins import MODULES
    ->>> MODULES.keys()
    -['string', 'coroutine', 'modules', 'io', 'basic', ...]
    -
    -
    -
    - --- - - - - - - - -
    Short names:lua
    Filename patterns:*.lua, *.wlua
    Mimetypes:text/x-lua, application/x-lua
    -
    -

    MiniDLexer

    -
    -

    For MiniD source. MiniD is now known as Croc.

    - --- - - - - - - - -
    Short names:minid
    Filename patterns:*.md
    Mimetypes:text/x-minidsrc
    -
    -

    MoonScriptLexer

    -
    -

    For MoonScript source code.

    -

    New in Pygments 1.5.

    - --- - - - - - - - -
    Short names:moon, moonscript
    Filename patterns:*.moon
    Mimetypes:text/x-moonscript, application/x-moonscript
    -
    -

    PerlLexer

    -
    -

    For Perl source code.

    - --- - - - - - - - -
    Short names:perl, pl
    Filename patterns:*.pl, *.pm
    Mimetypes:text/x-perl, application/x-perl
    -
    -

    Python3Lexer

    -
    -

    For Python source code (version 3.0).

    -

    New in Pygments 0.10.

    - --- - - - - - - - -
    Short names:python3, py3
    Filename patterns:None
    Mimetypes:text/x-python3, application/x-python3
    -
    -

    Python3TracebackLexer

    -
    -

    For Python 3.0 tracebacks, with support for chained exceptions.

    -

    New in Pygments 1.0.

    - --- - - - - - - - -
    Short names:py3tb
    Filename patterns:*.py3tb
    Mimetypes:text/x-python3-traceback
    -
    -

    PythonConsoleLexer

    -
    -

    For Python console output or doctests, such as:

    -
    >>> a = 'foo'
    ->>> print a
    -foo
    ->>> 1 / 0
    -Traceback (most recent call last):
    -  File "<stdin>", line 1, in <module>
    -ZeroDivisionError: integer division or modulo by zero
    -
    -

    Additional options:

    -
    -
    python3
    -
    Use Python 3 lexer for code. Default is False. -New in Pygments 1.0.
    -
    - --- - - - - - - - -
    Short names:pycon
    Filename patterns:None
    Mimetypes:text/x-python-doctest
    -
    -

    PythonLexer

    -
    -

    For Python source code.

    - --- - - - - - - - -
    Short names:python, py, sage
    Filename patterns:*.py, *.pyw, *.sc, SConstruct, SConscript, *.tac, *.sage
    Mimetypes:text/x-python, application/x-python
    -
    -

    PythonTracebackLexer

    -
    -

    For Python tracebacks.

    -

    New in Pygments 0.7.

    - --- - - - - - - - -
    Short names:pytb
    Filename patterns:*.pytb
    Mimetypes:text/x-python-traceback
    -
    -

    RubyConsoleLexer

    -
    -

    For Ruby interactive console (irb) output like:

    -
    irb(main):001:0> a = 1
    -=> 1
    -irb(main):002:0> puts a
    -1
    -=> nil
    -
    - --- - - - - - - - -
    Short names:rbcon, irb
    Filename patterns:None
    Mimetypes:text/x-ruby-shellsession
    -
    -

    RubyLexer

    -
    -

    For Ruby source code.

    - --- - - - - - - - -
    Short names:rb, ruby, duby
    Filename patterns:*.rb, *.rbw, Rakefile, *.rake, *.gemspec, *.rbx, *.duby
    Mimetypes:text/x-ruby, application/x-ruby
    -
    -

    TclLexer

    -
    -

    For Tcl source code.

    -

    New in Pygments 0.10.

    - --- - - - - - - - -
    Short names:tcl
    Filename patterns:*.tcl
    Mimetypes:text/x-tcl, text/x-script.tcl, application/x-tcl
    -
    -
    -
    -

    Lexers for assembly languages

    -

    CObjdumpLexer

    -
    -

    For the output of 'objdump -Sr on compiled C files'

    - --- - - - - - - - -
    Short names:c-objdump
    Filename patterns:*.c-objdump
    Mimetypes:text/x-c-objdump
    -
    -

    Ca65Lexer

    -
    -

    For ca65 assembler sources.

    -

    New in Pygments 1.6.

    - --- - - - - - - - -
    Short names:ca65
    Filename patterns:*.s
    Mimetypes:None
    -
    -

    CppObjdumpLexer

    -
    -

    For the output of 'objdump -Sr on compiled C++ files'

    - --- - - - - - - - -
    Short names:cpp-objdump, c++-objdumb, cxx-objdump
    Filename patterns:*.cpp-objdump, *.c++-objdump, *.cxx-objdump
    Mimetypes:text/x-cpp-objdump
    -
    -

    DObjdumpLexer

    -
    -

    For the output of 'objdump -Sr on compiled D files'

    - --- - - - - - - - -
    Short names:d-objdump
    Filename patterns:*.d-objdump
    Mimetypes:text/x-d-objdump
    -
    -

    GasLexer

    -
    -

    For Gas (AT&T) assembly code.

    - --- - - - - - - - -
    Short names:gas
    Filename patterns:*.s, *.S
    Mimetypes:text/x-gas
    -
    -

    LlvmLexer

    -
    -

    For LLVM assembly code.

    - --- - - - - - - - -
    Short names:llvm
    Filename patterns:*.ll
    Mimetypes:text/x-llvm
    -
    -

    NasmLexer

    -
    -

    For Nasm (Intel) assembly code.

    - --- - - - - - - - -
    Short names:nasm
    Filename patterns:*.asm, *.ASM
    Mimetypes:text/x-nasm
    -
    -

    ObjdumpLexer

    -
    -

    For the output of 'objdump -dr'

    - --- - - - - - - - -
    Short names:objdump
    Filename patterns:*.objdump
    Mimetypes:text/x-objdump
    -
    -
    -
    -

    Lexers for compiled languages

    -

    AdaLexer

    -
    -

    For Ada source code.

    -

    New in Pygments 1.3.

    - --- - - - - - - - -
    Short names:ada, ada95ada2005
    Filename patterns:*.adb, *.ads, *.ada
    Mimetypes:text/x-ada
    -
    -

    BlitzMaxLexer

    -
    -

    For BlitzMax source code.

    -

    New in Pygments 1.4.

    - --- - - - - - - - -
    Short names:blitzmax, bmax
    Filename patterns:*.bmx
    Mimetypes:text/x-bmx
    -
    -

    CLexer

    -
    -

    For C source code with preprocessor directives.

    - --- - - - - - - - -
    Short names:c
    Filename patterns:*.c, *.h, *.idc
    Mimetypes:text/x-chdr, text/x-csrc
    -
    -

    CobolFreeformatLexer

    -
    -

    Lexer for Free format OpenCOBOL code.

    -

    New in Pygments 1.6.

    - --- - - - - - - - -
    Short names:cobolfree
    Filename patterns:*.cbl, *.CBL
    Mimetypes:None
    -
    -

    CobolLexer

    -
    -

    Lexer for OpenCOBOL code.

    -

    New in Pygments 1.6.

    - --- - - - - - - - -
    Short names:cobol
    Filename patterns:*.cob, *.COB, *.cpy, *.CPY
    Mimetypes:text/x-cobol
    -
    -

    CppLexer

    -
    -

    For C++ source code with preprocessor directives.

    - --- - - - - - - - -
    Short names:cpp, c++
    Filename patterns:*.cpp, *.hpp, *.c++, *.h++, *.cc, *.hh, *.cxx, *.hxx, *.C, *.H, *.cp, *.CPP
    Mimetypes:text/x-c++hdr, text/x-c++src
    -
    -

    CudaLexer

    -
    -

    For NVIDIA CUDA™ -source.

    -

    New in Pygments 1.6.

    - --- - - - - - - - -
    Short names:cuda, cu
    Filename patterns:*.cu, *.cuh
    Mimetypes:text/x-cuda
    -
    -

    CythonLexer

    -
    -

    For Pyrex and Cython source code.

    -

    New in Pygments 1.1.

    - --- - - - - - - - -
    Short names:cython, pyx
    Filename patterns:*.pyx, *.pxd, *.pxi
    Mimetypes:text/x-cython, application/x-cython
    -
    -

    DLexer

    -
    -

    For D source.

    -

    New in Pygments 1.2.

    - --- - - - - - - - -
    Short names:d
    Filename patterns:*.d, *.di
    Mimetypes:text/x-dsrc
    -
    -

    DelphiLexer

    -
    -

    For Delphi (Borland Object Pascal), -Turbo Pascal and Free Pascal source code.

    -

    Additional options accepted:

    -
    -
    turbopascal
    -
    Highlight Turbo Pascal specific keywords (default: True).
    -
    delphi
    -
    Highlight Borland Delphi specific keywords (default: True).
    -
    freepascal
    -
    Highlight Free Pascal specific keywords (default: True).
    -
    units
    -
    A list of units that should be considered builtin, supported are -System, SysUtils, Classes and Math. -Default is to consider all of them builtin.
    -
    - --- - - - - - - - -
    Short names:delphi, pas, pascal, objectpascal
    Filename patterns:*.pas
    Mimetypes:text/x-pascal
    -
    -

    DylanLexer

    -
    -

    For the Dylan language.

    -

    New in Pygments 0.7.

    - --- - - - - - - - -
    Short names:dylan
    Filename patterns:*.dylan, *.dyl, *.intr
    Mimetypes:text/x-dylan
    -
    -

    DylanLidLexer

    -
    -

    For Dylan LID (Library Interchange Definition) files.

    -

    New in Pygments 1.6.

    - --- - - - - - - - -
    Short names:dylan-lid, lid
    Filename patterns:*.lid, *.hdp
    Mimetypes:text/x-dylan-lid
    -
    -

    ECLexer

    -
    -

    For eC source code with preprocessor directives.

    -

    New in Pygments 1.5.

    - --- - - - - - - - -
    Short names:ec
    Filename patterns:*.ec, *.eh
    Mimetypes:text/x-echdr, text/x-ecsrc
    -
    -

    FantomLexer

    -
    -

    For Fantom source code.

    -

    New in Pygments 1.5.

    - --- - - - - - - - -
    Short names:fan
    Filename patterns:*.fan
    Mimetypes:application/x-fantom
    -
    -

    FelixLexer

    -
    -

    For Felix source code.

    -

    New in Pygments 1.2.

    - --- - - - - - - - -
    Short names:felix, flx
    Filename patterns:*.flx, *.flxh
    Mimetypes:text/x-felix
    -
    -

    FortranLexer

    -
    -

    Lexer for FORTRAN 90 code.

    -

    New in Pygments 0.10.

    - --- - - - - - - - -
    Short names:fortran
    Filename patterns:*.f, *.f90, *.F, *.F90
    Mimetypes:text/x-fortran
    -
    -

    GLShaderLexer

    -
    -

    GLSL (OpenGL Shader) lexer.

    -

    New in Pygments 1.1.

    - --- - - - - - - - -
    Short names:glsl
    Filename patterns:*.vert, *.frag, *.geo
    Mimetypes:text/x-glslsrc
    -
    -

    GoLexer

    -
    -

    For Go source.

    - --- - - - - - - - -
    Short names:go
    Filename patterns:*.go
    Mimetypes:text/x-gosrc
    -
    -

    Modula2Lexer

    -
    -

    For Modula-2 source code.

    -

    Additional options that determine which keywords are highlighted:

    -
    -
    pim
    -
    Select PIM Modula-2 dialect (default: True).
    -
    iso
    -
    Select ISO Modula-2 dialect (default: False).
    -
    objm2
    -
    Select Objective Modula-2 dialect (default: False).
    -
    gm2ext
    -
    Also highlight GNU extensions (default: False).
    -
    -

    New in Pygments 1.3.

    - --- - - - - - - - -
    Short names:modula2, m2
    Filename patterns:*.def, *.mod
    Mimetypes:text/x-modula2
    -
    -

    MonkeyLexer

    -
    -

    For -Monkey -source code.

    -

    New in Pygments 1.6.

    - --- - - - - - - - -
    Short names:monkey
    Filename patterns:*.monkey
    Mimetypes:text/x-monkey
    -
    -

    NimrodLexer

    -
    -

    For Nimrod source code.

    -

    New in Pygments 1.5.

    - --- - - - - - - - -
    Short names:nimrod, nim
    Filename patterns:*.nim, *.nimrod
    Mimetypes:text/x-nimrod
    -
    -

    ObjectiveCLexer

    -
    -

    For Objective-C source code with preprocessor directives.

    - --- - - - - - - - -
    Short names:objective-c, objectivec, obj-c, objc
    Filename patterns:*.m, *.h
    Mimetypes:text/x-objective-c
    -
    -

    ObjectiveCppLexer

    -
    -

    For Objective-C++ source code with preprocessor directives.

    - --- - - - - - - - -
    Short names:objective-c++, objectivec++, obj-c++, objc++
    Filename patterns:*.mm, *.hh
    Mimetypes:text/x-objective-c++
    -
    -

    OocLexer

    -
    -

    For Ooc source code

    -

    New in Pygments 1.2.

    - --- - - - - - - - -
    Short names:ooc
    Filename patterns:*.ooc
    Mimetypes:text/x-ooc
    -
    -

    PrologLexer

    -
    -

    Lexer for Prolog files.

    - --- - - - - - - - -
    Short names:prolog
    Filename patterns:*.prolog, *.pro, *.pl
    Mimetypes:text/x-prolog
    -
    -

    RustLexer

    -
    -

    Lexer for Mozilla's Rust programming language.

    -

    New in Pygments 1.6.

    - --- - - - - - - - -
    Short names:rust
    Filename patterns:*.rs, *.rc
    Mimetypes:text/x-rustsrc
    -
    -

    ValaLexer

    -
    -

    For Vala source code with preprocessor directives.

    -

    New in Pygments 1.1.

    - --- - - - - - - - -
    Short names:vala, vapi
    Filename patterns:*.vala, *.vapi
    Mimetypes:text/x-vala
    -
    -
    - -
    -

    Lexers for .net languages

    -

    BooLexer

    -
    -

    For Boo source code.

    - --- - - - - - - - -
    Short names:boo
    Filename patterns:*.boo
    Mimetypes:text/x-boo
    -
    -

    CSharpAspxLexer

    -
    -

    Lexer for highligting C# within ASP.NET pages.

    - --- - - - - - - - -
    Short names:aspx-cs
    Filename patterns:*.aspx, *.asax, *.ascx, *.ashx, *.asmx, *.axd
    Mimetypes:None
    -
    -

    CSharpLexer

    -
    -

    For C# -source code.

    -

    Additional options accepted:

    -
    -
    unicodelevel
    -

    Determines which Unicode characters this lexer allows for identifiers. -The possible values are:

    -
      -
    • none -- only the ASCII letters and numbers are allowed. This -is the fastest selection.
    • -
    • basic -- all Unicode characters from the specification except -category Lo are allowed.
    • -
    • full -- all Unicode characters as specified in the C# specs -are allowed. Note that this means a considerable slowdown since the -Lo category has more than 40,000 characters in it!
    • -
    -

    The default value is basic.

    -

    New in Pygments 0.8.

    -
    -
    - --- - - - - - - - -
    Short names:csharp, c#
    Filename patterns:*.cs
    Mimetypes:text/x-csharp
    -
    -

    FSharpLexer

    -
    -

    For the F# language.

    -

    New in Pygments 1.5.

    - --- - - - - - - - -
    Short names:fsharp
    Filename patterns:*.fs, *.fsi
    Mimetypes:text/x-fsharp
    -
    -

    NemerleLexer

    -
    -

    For Nemerle source code.

    -

    Additional options accepted:

    -
    -
    unicodelevel
    -

    Determines which Unicode characters this lexer allows for identifiers. -The possible values are:

    -
      -
    • none -- only the ASCII letters and numbers are allowed. This -is the fastest selection.
    • -
    • basic -- all Unicode characters from the specification except -category Lo are allowed.
    • -
    • full -- all Unicode characters as specified in the C# specs -are allowed. Note that this means a considerable slowdown since the -Lo category has more than 40,000 characters in it!
    • -
    -

    The default value is basic.

    -
    -
    -

    New in Pygments 1.5.

    - --- - - - - - - - -
    Short names:nemerle
    Filename patterns:*.n
    Mimetypes:text/x-nemerle
    -
    -

    VbNetAspxLexer

    -
    -

    Lexer for highligting Visual Basic.net within ASP.NET pages.

    - --- - - - - - - - -
    Short names:aspx-vb
    Filename patterns:*.aspx, *.asax, *.ascx, *.ashx, *.asmx, *.axd
    Mimetypes:None
    -
    -

    VbNetLexer

    -
    -

    For -Visual Basic.NET -source code.

    - --- - - - - - - - -
    Short names:vb.net, vbnet
    Filename patterns:*.vb, *.bas
    Mimetypes:text/x-vbnet, text/x-vba
    -
    -
    -
    -

    Simple lexer for Microsoft Visual FoxPro source code

    -

    FoxProLexer -Lexer for Microsoft Visual FoxPro language.

    -
    -

    FoxPro syntax allows to shorten all keywords and function names -to 4 characters. Shortened forms are not recognized by this lexer.

    -

    New in Pygments 1.6.

    - --- - - - - - - - -
    Short names:Clipper, XBase
    Filename patterns:*.PRG, *.prg
    Mimetypes:None
    -
    -
    -
    -

    Lexers for functional languages

    -

    CommonLispLexer

    -
    -

    A Common Lisp lexer.

    -

    New in Pygments 0.9.

    - --- - - - - - - - -
    Short names:common-lisp, cl
    Filename patterns:*.cl, *.lisp, *.el
    Mimetypes:text/x-common-lisp
    -
    -

    CoqLexer

    -
    -

    For the Coq theorem prover.

    -

    New in Pygments 1.5.

    - --- - - - - - - - -
    Short names:coq
    Filename patterns:*.v
    Mimetypes:text/x-coq
    -
    -

    ElixirConsoleLexer

    -
    -

    For Elixir interactive console (iex) output like:

    -
    iex> [head | tail] = [1,2,3]
    -[1,2,3]
    -iex> head
    -1
    -iex> tail
    -[2,3]
    -iex> [head | tail]
    -[1,2,3]
    -iex> length [head | tail]
    -3
    -
    -

    New in Pygments 1.5.

    - --- - - - - - - - -
    Short names:iex
    Filename patterns:None
    Mimetypes:text/x-elixir-shellsession
    -
    -

    ElixirLexer

    -
    -

    For the Elixir language.

    -

    New in Pygments 1.5.

    - --- - - - - - - - -
    Short names:elixir, ex, exs
    Filename patterns:*.ex, *.exs
    Mimetypes:text/x-elixir
    -
    -

    ErlangLexer

    -
    -

    For the Erlang functional programming language.

    -

    Blame Jeremy Thurgood (http://jerith.za.net/).

    -

    New in Pygments 0.9.

    - --- - - - - - - - -
    Short names:erlang
    Filename patterns:*.erl, *.hrl, *.es, *.escript
    Mimetypes:text/x-erlang
    -
    -

    ErlangShellLexer

    -
    -

    Shell sessions in erl (for Erlang code).

    -

    New in Pygments 1.1.

    - --- - - - - - - - -
    Short names:erl
    Filename patterns:*.erl-sh
    Mimetypes:text/x-erl-shellsession
    -
    -

    HaskellLexer

    -
    -

    A Haskell lexer based on the lexemes defined in the Haskell 98 Report.

    -

    New in Pygments 0.8.

    - --- - - - - - - - -
    Short names:haskell, hs
    Filename patterns:*.hs
    Mimetypes:text/x-haskell
    -
    -

    KokaLexer

    -
    -

    Lexer for the Koka -language.

    -

    New in Pygments 1.6.

    - --- - - - - - - - -
    Short names:koka
    Filename patterns:*.kk, *.kki
    Mimetypes:text/x-koka
    -
    -

    LiterateHaskellLexer

    -
    -

    For Literate Haskell (Bird-style or LaTeX) source.

    -

    Additional options accepted:

    -
    -
    litstyle
    -
    If given, must be "bird" or "latex". If not given, the style -is autodetected: if the first non-whitespace character in the source -is a backslash or percent character, LaTeX is assumed, else Bird.
    -
    -

    New in Pygments 0.9.

    - --- - - - - - - - -
    Short names:lhs, literate-haskell
    Filename patterns:*.lhs
    Mimetypes:text/x-literate-haskell
    -
    -

    NewLispLexer

    -
    -

    For newLISP. source code (version 10.3.0).

    -

    New in Pygments 1.5.

    - --- - - - - - - - -
    Short names:newlisp
    Filename patterns:*.lsp, *.nl
    Mimetypes:text/x-newlisp, application/x-newlisp
    -
    -

    OcamlLexer

    -
    -

    For the OCaml language.

    -

    New in Pygments 0.7.

    - --- - - - - - - - -
    Short names:ocaml
    Filename patterns:*.ml, *.mli, *.mll, *.mly
    Mimetypes:text/x-ocaml
    -
    -

    OpaLexer

    -
    -

    Lexer for the Opa language (http://opalang.org).

    -

    New in Pygments 1.5.

    - --- - - - - - - - -
    Short names:opa
    Filename patterns:*.opa
    Mimetypes:text/x-opa
    -
    -

    RacketLexer

    -
    -

    Lexer for Racket source code (formerly known as -PLT Scheme).

    -

    New in Pygments 1.6.

    - --- - - - - - - - -
    Short names:racket, rkt
    Filename patterns:*.rkt, *.rktl
    Mimetypes:text/x-racket, application/x-racket
    -
    -

    SMLLexer

    -
    -

    For the Standard ML language.

    -

    New in Pygments 1.5.

    - --- - - - - - - - -
    Short names:sml
    Filename patterns:*.sml, *.sig, *.fun
    Mimetypes:text/x-standardml, application/x-standardml
    -
    -

    SchemeLexer

    -
    -

    A Scheme lexer, parsing a stream and outputting the tokens -needed to highlight scheme code. -This lexer could be most probably easily subclassed to parse -other LISP-Dialects like Common Lisp, Emacs Lisp or AutoLisp.

    -

    This parser is checked with pastes from the LISP pastebin -at http://paste.lisp.org/ to cover as much syntax as possible.

    -

    It supports the full Scheme syntax as defined in R5RS.

    -

    New in Pygments 0.6.

    - --- - - - - - - - -
    Short names:scheme, scm
    Filename patterns:*.scm, *.ss
    Mimetypes:text/x-scheme, application/x-scheme
    -
    -
    -
    -

    Lexers for hardware descriptor languages

    -

    SystemVerilogLexer

    -
    -

    Extends verilog lexer to recognise all SystemVerilog keywords from IEEE -1800-2009 standard.

    -

    New in Pygments 1.5.

    - --- - - - - - - - -
    Short names:sv
    Filename patterns:*.sv, *.svh
    Mimetypes:text/x-systemverilog
    -
    -

    VerilogLexer

    -
    -

    For verilog source code with preprocessor directives.

    -

    New in Pygments 1.4.

    - --- - - - - - - - -
    Short names:v
    Filename patterns:*.v
    Mimetypes:text/x-verilog
    -
    -

    VhdlLexer

    -
    -

    For VHDL source code.

    -

    New in Pygments 1.5.

    - --- - - - - - - - -
    Short names:vhdl
    Filename patterns:*.vhdl, *.vhd
    Mimetypes:text/x-vhdl
    -
    -
    -
    -

    Pygments lexers for JVM languages

    -

    AspectJLexer

    -
    -

    For AspectJ source code.

    -

    New in Pygments 1.6.

    - --- - - - - - - - -
    Short names:aspectj
    Filename patterns:*.aj
    Mimetypes:text/x-aspectj
    -
    -

    CeylonLexer

    -
    -

    For Ceylon source code.

    -

    New in Pygments 1.6.

    - --- - - - - - - - -
    Short names:ceylon
    Filename patterns:*.ceylon
    Mimetypes:text/x-ceylon
    -
    -

    ClojureLexer

    -
    -

    Lexer for Clojure source code.

    -

    New in Pygments 0.11.

    - --- - - - - - - - -
    Short names:clojure, clj
    Filename patterns:*.clj
    Mimetypes:text/x-clojure, application/x-clojure
    -
    -

    GosuLexer

    -
    -

    For Gosu source code.

    -

    New in Pygments 1.5.

    - --- - - - - - - - -
    Short names:gosu
    Filename patterns:*.gs, *.gsx, *.gsp, *.vark
    Mimetypes:text/x-gosu
    -
    -

    GosuTemplateLexer

    -
    -

    For Gosu templates.

    -

    New in Pygments 1.5.

    - --- - - - - - - - -
    Short names:gst
    Filename patterns:*.gst
    Mimetypes:text/x-gosu-template
    -
    -

    GroovyLexer

    -
    -

    For Groovy source code.

    -

    New in Pygments 1.5.

    - --- - - - - - - - -
    Short names:groovy
    Filename patterns:*.groovy
    Mimetypes:text/x-groovy
    -
    -

    IokeLexer

    -
    -

    For Ioke (a strongly typed, dynamic, -prototype based programming language) source.

    -

    New in Pygments 1.4.

    - --- - - - - - - - -
    Short names:ioke, ik
    Filename patterns:*.ik
    Mimetypes:text/x-iokesrc
    -
    -

    JavaLexer

    -
    -

    For Java source code.

    - --- - - - - - - - -
    Short names:java
    Filename patterns:*.java
    Mimetypes:text/x-java
    -
    -

    KotlinLexer

    -
    -

    For Kotlin -source code.

    -

    Additional options accepted:

    -
    -
    unicodelevel
    -

    Determines which Unicode characters this lexer allows for identifiers. -The possible values are:

    -
      -
    • none -- only the ASCII letters and numbers are allowed. This -is the fastest selection.
    • -
    • basic -- all Unicode characters from the specification except -category Lo are allowed.
    • -
    • full -- all Unicode characters as specified in the C# specs -are allowed. Note that this means a considerable slowdown since the -Lo category has more than 40,000 characters in it!
    • -
    -

    The default value is basic.

    -
    -
    -

    New in Pygments 1.5.

    - --- - - - - - - - -
    Short names:kotlin
    Filename patterns:*.kt
    Mimetypes:text/x-kotlin
    -
    -

    ScalaLexer

    -
    -

    For Scala source code.

    - --- - - - - - - - -
    Short names:scala
    Filename patterns:*.scala
    Mimetypes:text/x-scala
    -
    -

    XtendLexer

    -
    -

    For Xtend source code.

    -

    New in Pygments 1.6.

    - --- - - - - - - - -
    Short names:xtend
    Filename patterns:*.xtend
    Mimetypes:text/x-xtend
    -
    -
    -
    -

    Lexers for math languages

    -

    BugsLexer

    -
    -

    Pygments Lexer for OpenBugs and WinBugs -models.

    -

    New in Pygments 1.6.

    - --- - - - - - - - -
    Short names:bugs, winbugs, openbugs
    Filename patterns:*.bug
    Mimetypes:None
    -
    -

    IDLLexer

    -
    -

    Pygments Lexer for IDL (Interactive Data Language).

    -

    New in Pygments 1.6.

    - --- - - - - - - - -
    Short names:idl
    Filename patterns:*.pro
    Mimetypes:text/idl
    -
    -

    JagsLexer

    -
    -

    Pygments Lexer for JAGS.

    -

    New in Pygments 1.6.

    - --- - - - - - - - -
    Short names:jags
    Filename patterns:*.jag, *.bug
    Mimetypes:None
    -
    -

    JuliaConsoleLexer

    -
    -

    For Julia console sessions. Modeled after MatlabSessionLexer.

    -

    New in Pygments 1.6.

    - --- - - - - - - - -
    Short names:jlcon
    Filename patterns:None
    Mimetypes:None
    -
    -

    JuliaLexer

    -
    -

    For Julia source code.

    -

    New in Pygments 1.6.

    - --- - - - - - - - -
    Short names:julia, jl
    Filename patterns:*.jl
    Mimetypes:text/x-julia, application/x-julia
    -
    -

    MatlabLexer

    -
    -

    For Matlab source code.

    -

    New in Pygments 0.10.

    - --- - - - - - - - -
    Short names:matlab
    Filename patterns:*.m
    Mimetypes:text/matlab
    -
    -

    MatlabSessionLexer

    -
    -

    For Matlab sessions. Modeled after PythonConsoleLexer. -Contributed by Ken Schutte <kschutte@csail.mit.edu>.

    -

    New in Pygments 0.10.

    - --- - - - - - - - -
    Short names:matlabsession
    Filename patterns:None
    Mimetypes:None
    -
    -

    MuPADLexer

    -
    -

    A MuPAD lexer. -Contributed by Christopher Creutzig <christopher@creutzig.de>.

    -

    New in Pygments 0.8.

    - --- - - - - - - - -
    Short names:mupad
    Filename patterns:*.mu
    Mimetypes:None
    -
    -

    NumPyLexer

    -
    -

    A Python lexer recognizing Numerical Python builtins.

    -

    New in Pygments 0.10.

    - --- - - - - - - - -
    Short names:numpy
    Filename patterns:None
    Mimetypes:None
    -
    -

    OctaveLexer

    -
    -

    For GNU Octave source code.

    -

    New in Pygments 1.5.

    - --- - - - - - - - -
    Short names:octave
    Filename patterns:*.m
    Mimetypes:text/octave
    -
    -

    RConsoleLexer

    -
    -

    For R console transcripts or R CMD BATCH output files.

    - --- - - - - - - - -
    Short names:rconsole, rout
    Filename patterns:*.Rout
    Mimetypes:None
    -
    -

    RdLexer

    -
    -

    Pygments Lexer for R documentation (Rd) files

    -

    This is a very minimal implementation, highlighting little more -than the macros. A description of Rd syntax is found in Writing R -Extensions -and Parsing Rd files.

    -

    New in Pygments 1.6.

    - --- - - - - - - - -
    Short names:rd
    Filename patterns:*.Rd
    Mimetypes:text/x-r-doc
    -
    -

    SLexer

    -
    -

    For S, S-plus, and R source code.

    -

    New in Pygments 0.10.

    - --- - - - - - - - -
    Short names:splus, s, r
    Filename patterns:*.S, *.R, .Rhistory, .Rprofile
    Mimetypes:text/S-plus, text/S, text/x-r-source, text/x-r, text/x-R, text/x-r-history, text/x-r-profile
    -
    -

    ScilabLexer

    -
    -

    For Scilab source code.

    -

    New in Pygments 1.5.

    - --- - - - - - - - -
    Short names:scilab
    Filename patterns:*.sci, *.sce, *.tst
    Mimetypes:text/scilab
    -
    -

    StanLexer

    -
    -

    Pygments Lexer for Stan models.

    -

    New in Pygments 1.6.

    - --- - - - - - - - -
    Short names:stan
    Filename patterns:*.stan
    Mimetypes:None
    -
    -
    -
    -

    Lexers for other languages

    -

    ABAPLexer

    -
    -

    Lexer for ABAP, SAP's integrated language.

    -

    New in Pygments 1.1.

    - --- - - - - - - - -
    Short names:abap
    Filename patterns:*.abap
    Mimetypes:text/x-abap
    -
    -

    AppleScriptLexer

    -
    -

    For AppleScript source code, -including AppleScript Studio. -Contributed by Andreas Amann <aamann@mac.com>.

    - --- - - - - - - - -
    Short names:applescript
    Filename patterns:*.applescript
    Mimetypes:None
    -
    -

    AsymptoteLexer

    -
    -

    For Asymptote source code.

    -

    New in Pygments 1.2.

    - --- - - - - - - - -
    Short names:asy, asymptote
    Filename patterns:*.asy
    Mimetypes:text/x-asymptote
    -
    -

    AutoItLexer

    -
    -

    For AutoIt files.

    -

    AutoIt is a freeware BASIC-like scripting language -designed for automating the Windows GUI and general scripting

    -

    New in Pygments 1.6.

    - --- - - - - - - - -
    Short names:autoit, Autoit
    Filename patterns:*.au3
    Mimetypes:text/x-autoit
    -
    -

    AutohotkeyLexer

    -
    -

    For autohotkey source code.

    -

    New in Pygments 1.4.

    - --- - - - - - - - -
    Short names:ahk
    Filename patterns:*.ahk, *.ahkl
    Mimetypes:text/x-autohotkey
    -
    -

    AwkLexer

    -
    -

    For Awk scripts.

    -

    New in Pygments 1.5.

    - --- - - - - - - - -
    Short names:awk, gawk, mawk, nawk
    Filename patterns:*.awk
    Mimetypes:application/x-awk
    -
    -

    BefungeLexer

    -
    -

    Lexer for the esoteric Befunge -language.

    -

    New in Pygments 0.7.

    - --- - - - - - - - -
    Short names:befunge
    Filename patterns:*.befunge
    Mimetypes:application/x-befunge
    -
    -

    BrainfuckLexer

    -
    -

    Lexer for the esoteric BrainFuck -language.

    - --- - - - - - - - -
    Short names:brainfuck, bf
    Filename patterns:*.bf, *.b
    Mimetypes:application/x-brainfuck
    -
    -

    BroLexer

    -
    -

    For Bro scripts.

    -

    New in Pygments 1.5.

    - --- - - - - - - - -
    Short names:bro
    Filename patterns:*.bro
    Mimetypes:None
    -
    -

    CbmBasicV2Lexer

    -
    -

    For CBM BASIC V2 sources.

    -

    New in Pygments 1.6.

    - --- - - - - - - - -
    Short names:cbmbas
    Filename patterns:*.bas
    Mimetypes:None
    -
    -

    Cfengine3Lexer

    -
    -

    Lexer for CFEngine3 policy files.

    -

    New in Pygments 1.5.

    - --- - - - - - - - -
    Short names:cfengine3, cf3
    Filename patterns:*.cf
    Mimetypes:None
    -
    -

    ECLLexer

    -
    -

    Lexer for the declarative big-data ECL -language.

    -

    New in Pygments 1.5.

    - --- - - - - - - - -
    Short names:ecl
    Filename patterns:*.ecl
    Mimetypes:application/x-ecl
    -
    -

    GherkinLexer

    -
    -

    For Gherkin <http://github.com/aslakhellesoy/gherkin/> syntax.

    -

    New in Pygments 1.2.

    - --- - - - - - - - -
    Short names:Cucumber, cucumber, Gherkin, gherkin
    Filename patterns:*.feature
    Mimetypes:text/x-gherkin
    -
    -

    GnuplotLexer

    -
    -

    For Gnuplot plotting scripts.

    -

    New in Pygments 0.11.

    - --- - - - - - - - -
    Short names:gnuplot
    Filename patterns:*.plot, *.plt
    Mimetypes:text/x-gnuplot
    -
    -

    GoodDataCLLexer

    -
    -

    Lexer for GoodData-CL -script files.

    -

    New in Pygments 1.4.

    - --- - - - - - - - -
    Short names:gooddata-cl
    Filename patterns:*.gdc
    Mimetypes:text/x-gooddata-cl
    -
    -

    HybrisLexer

    -
    -

    For Hybris source code.

    -

    New in Pygments 1.4.

    - --- - - - - - - - -
    Short names:hybris, hy
    Filename patterns:*.hy, *.hyb
    Mimetypes:text/x-hybris, application/x-hybris
    -
    -

    KconfigLexer

    -
    -

    For Linux-style Kconfig files.

    -

    New in Pygments 1.6.

    - --- - - - - - - - -
    Short names:kconfig, menuconfig, linux-config, kernel-config
    Filename patterns:Kconfig, *Config.in*, external.in*, standard-modules.in
    Mimetypes:text/x-kconfig
    -
    -

    LogtalkLexer

    -
    -

    For Logtalk source code.

    -

    New in Pygments 0.10.

    - --- - - - - - - - -
    Short names:logtalk
    Filename patterns:*.lgt
    Mimetypes:text/x-logtalk
    -
    -

    MOOCodeLexer

    -
    -

    For MOOCode (the MOO scripting -language).

    -

    New in Pygments 0.9.

    - --- - - - - - - - -
    Short names:moocode
    Filename patterns:*.moo
    Mimetypes:text/x-moocode
    -
    -

    MaqlLexer

    -
    -

    Lexer for GoodData MAQL -scripts.

    -

    New in Pygments 1.4.

    - --- - - - - - - - -
    Short names:maql
    Filename patterns:*.maql
    Mimetypes:text/x-gooddata-maql, application/x-gooddata-maql
    -
    -

    ModelicaLexer

    -
    -

    For Modelica source code.

    -

    New in Pygments 1.1.

    - --- - - - - - - - -
    Short names:modelica
    Filename patterns:*.mo
    Mimetypes:text/x-modelica
    -
    -

    MscgenLexer

    -
    -

    For Mscgen files.

    -

    New in Pygments 1.6.

    - --- - - - - - - - -
    Short names:mscgen, msc
    Filename patterns:*.msc
    Mimetypes:None
    -
    -

    NSISLexer

    -
    -

    For NSIS scripts.

    -

    New in Pygments 1.6.

    - --- - - - - - - - -
    Short names:nsis, nsi, nsh
    Filename patterns:*.nsi, *.nsh
    Mimetypes:text/x-nsis
    -
    -

    NewspeakLexer

    -
    -

    For Newspeak <http://newspeaklanguage.org/> syntax.

    - --- - - - - - - - -
    Short names:newspeak
    Filename patterns:*.ns2
    Mimetypes:text/x-newspeak
    -
    -

    OpenEdgeLexer

    -
    -

    Lexer for OpenEdge ABL (formerly Progress) source code.

    -

    New in Pygments 1.5.

    - --- - - - - - - - -
    Short names:openedge, abl, progress
    Filename patterns:*.p, *.cls
    Mimetypes:text/x-openedge, application/x-openedge
    -
    -

    PostScriptLexer

    -
    -

    Lexer for PostScript files.

    -

    The PostScript Language Reference published by Adobe at -<http://partners.adobe.com/public/developer/en/ps/PLRM.pdf> -is the authority for this.

    -

    New in Pygments 1.4.

    - --- - - - - - - - -
    Short names:postscript
    Filename patterns:*.ps, *.eps
    Mimetypes:application/postscript
    -
    -

    PovrayLexer

    -
    -

    For Persistence of Vision Raytracer files.

    -

    New in Pygments 0.11.

    - --- - - - - - - - -
    Short names:pov
    Filename patterns:*.pov, *.inc
    Mimetypes:text/x-povray
    -
    -

    ProtoBufLexer

    -
    -

    Lexer for Protocol Buffer -definition files.

    -

    New in Pygments 1.4.

    - --- - - - - - - - -
    Short names:protobuf
    Filename patterns:*.proto
    Mimetypes:None
    -
    -

    PuppetLexer

    -
    -

    For Puppet configuration DSL.

    -

    New in Pygments 1.6.

    - --- - - - - - - - -
    Short names:puppet
    Filename patterns:*.pp
    Mimetypes:None
    -
    -

    RPMSpecLexer

    -
    -

    For RPM *.spec files

    -
    -

    System Message: WARNING/2 (<string>, line 1687); backlink

    -Inline emphasis start-string without end-string.
    -

    New in Pygments 1.6.

    - --- - - - - - - - -
    Short names:spec
    Filename patterns:*.spec
    Mimetypes:text/x-rpm-spec
    -
    -

    RebolLexer

    -
    -

    A REBOL lexer.

    -

    New in Pygments 1.1.

    - --- - - - - - - - -
    Short names:rebol
    Filename patterns:*.r, *.r3
    Mimetypes:text/x-rebol
    -
    -

    RedcodeLexer

    -
    -

    A simple Redcode lexer based on ICWS'94. -Contributed by Adam Blinkinsop <blinks@acm.org>.

    -

    New in Pygments 0.8.

    - --- - - - - - - - -
    Short names:redcode
    Filename patterns:*.cw
    Mimetypes:None
    -
    -

    RobotFrameworkLexer

    -
    -

    For Robot Framework test data.

    -

    Supports both space and pipe separated plain text formats.

    -

    New in Pygments 1.6.

    - --- - - - - - - - -
    Short names:RobotFramework, robotframework
    Filename patterns:*.txt
    Mimetypes:text/x-robotframework
    -
    -

    SmalltalkLexer

    -
    -

    For Smalltalk syntax. -Contributed by Stefan Matthias Aust. -Rewritten by Nils Winter.

    -

    New in Pygments 0.10.

    - --- - - - - - - - -
    Short names:smalltalk, squeak
    Filename patterns:*.st
    Mimetypes:text/x-smalltalk
    -
    -

    SnobolLexer

    -
    -

    Lexer for the SNOBOL4 programming language.

    -

    Recognizes the common ASCII equivalents of the original SNOBOL4 operators. -Does not require spaces around binary operators.

    -

    New in Pygments 1.5.

    - --- - - - - - - - -
    Short names:snobol
    Filename patterns:*.snobol
    Mimetypes:text/x-snobol
    -
    -

    SourcePawnLexer

    -
    -

    For SourcePawn source code with preprocessor directives.

    -

    New in Pygments 1.6.

    - --- - - - - - - - -
    Short names:sp
    Filename patterns:*.sp
    Mimetypes:text/x-sourcepawn
    -
    -

    UrbiscriptLexer

    -
    -

    For UrbiScript source code.

    -

    New in Pygments 1.5.

    - --- - - - - - - - -
    Short names:urbiscript
    Filename patterns:*.u
    Mimetypes:application/x-urbiscript
    -
    -

    VGLLexer

    -
    -

    For SampleManager VGL -source code.

    -

    New in Pygments 1.6.

    - --- - - - - - - - -
    Short names:vgl
    Filename patterns:*.rpf
    Mimetypes:None
    -
    -
    -
    -

    Lexers for parser generators

    -

    AntlrActionScriptLexer

    -
    -

    ANTLR with ActionScript Target

    -

    New in Pygments 1.1.

    - --- - - - - - - - -
    Short names:antlr-as, antlr-actionscript
    Filename patterns:*.G, *.g
    Mimetypes:None
    -
    -

    AntlrCSharpLexer

    -
    -

    ANTLR with C# Target

    -

    New in Pygments 1.1.

    - --- - - - - - - - -
    Short names:antlr-csharp, antlr-c#
    Filename patterns:*.G, *.g
    Mimetypes:None
    -
    -

    AntlrCppLexer

    -
    -

    ANTLR with CPP Target

    -

    New in Pygments 1.1.

    - --- - - - - - - - -
    Short names:antlr-cpp
    Filename patterns:*.G, *.g
    Mimetypes:None
    -
    -

    AntlrJavaLexer

    -
    -

    ANTLR with Java Target

    -

    New in Pygments 1.1

    - --- - - - - - - - -
    Short names:antlr-java
    Filename patterns:*.G, *.g
    Mimetypes:None
    -
    -

    AntlrLexer

    -
    -

    Generic ANTLR Lexer. -Should not be called directly, instead -use DelegatingLexer for your target language.

    -

    New in Pygments 1.1.

    - --- - - - - - - - -
    Short names:antlr
    Filename patterns:None
    Mimetypes:None
    -
    -

    AntlrObjectiveCLexer

    -
    -

    ANTLR with Objective-C Target

    -

    New in Pygments 1.1.

    - --- - - - - - - - -
    Short names:antlr-objc
    Filename patterns:*.G, *.g
    Mimetypes:None
    -
    -

    AntlrPerlLexer

    -
    -

    ANTLR with Perl Target

    -

    New in Pygments 1.1.

    - --- - - - - - - - -
    Short names:antlr-perl
    Filename patterns:*.G, *.g
    Mimetypes:None
    -
    -

    AntlrPythonLexer

    -
    -

    ANTLR with Python Target

    -

    New in Pygments 1.1.

    - --- - - - - - - - -
    Short names:antlr-python
    Filename patterns:*.G, *.g
    Mimetypes:None
    -
    -

    AntlrRubyLexer

    -
    -

    ANTLR with Ruby Target

    -

    New in Pygments 1.1.

    - --- - - - - - - - -
    Short names:antlr-ruby, antlr-rb
    Filename patterns:*.G, *.g
    Mimetypes:None
    -
    -

    RagelCLexer

    -
    -

    A lexer for Ragel in a C host file.

    -

    New in Pygments 1.1.

    - --- - - - - - - - -
    Short names:ragel-c
    Filename patterns:*.rl
    Mimetypes:None
    -
    -

    RagelCppLexer

    -
    -

    A lexer for Ragel in a CPP host file.

    -

    New in Pygments 1.1.

    - --- - - - - - - - -
    Short names:ragel-cpp
    Filename patterns:*.rl
    Mimetypes:None
    -
    -

    RagelDLexer

    -
    -

    A lexer for Ragel in a D host file.

    -

    New in Pygments 1.1.

    - --- - - - - - - - -
    Short names:ragel-d
    Filename patterns:*.rl
    Mimetypes:None
    -
    -

    RagelEmbeddedLexer

    -
    -

    A lexer for Ragel embedded in a host language file.

    -

    This will only highlight Ragel statements. If you want host language -highlighting then call the language-specific Ragel lexer.

    -

    New in Pygments 1.1.

    - --- - - - - - - - -
    Short names:ragel-em
    Filename patterns:*.rl
    Mimetypes:None
    -
    -

    RagelJavaLexer

    -
    -

    A lexer for Ragel in a Java host file.

    -

    New in Pygments 1.1.

    - --- - - - - - - - -
    Short names:ragel-java
    Filename patterns:*.rl
    Mimetypes:None
    -
    -

    RagelLexer

    -
    -

    A pure Ragel lexer. Use this for -fragments of Ragel. For .rl files, use RagelEmbeddedLexer instead -(or one of the language-specific subclasses).

    -

    New in Pygments 1.1.

    - --- - - - - - - - -
    Short names:ragel
    Filename patterns:None
    Mimetypes:None
    -
    -

    RagelObjectiveCLexer

    -
    -

    A lexer for Ragel in an Objective C host file.

    -

    New in Pygments 1.1.

    - --- - - - - - - - -
    Short names:ragel-objc
    Filename patterns:*.rl
    Mimetypes:None
    -
    -

    RagelRubyLexer

    -
    -

    A lexer for Ragel in a Ruby host file.

    -

    New in Pygments 1.1.

    - --- - - - - - - - -
    Short names:ragel-ruby, ragel-rb
    Filename patterns:*.rl
    Mimetypes:None
    -
    -

    TreetopLexer

    -
    -

    A lexer for Treetop grammars.

    -

    New in Pygments 1.6.

    - --- - - - - - - - -
    Short names:treetop
    Filename patterns:*.treetop, *.tt
    Mimetypes:None
    -
    -
    -
    -

    Lexers for various shells

    -

    BashLexer

    -
    -

    Lexer for (ba|k|)sh shell scripts.

    -

    New in Pygments 0.6.

    - --- - - - - - - - -
    Short names:bash, sh, ksh
    Filename patterns:*.sh, *.ksh, *.bash, *.ebuild, *.eclass, .bashrc, bashrc, .bash\*, bash\*
    Mimetypes:application/x-sh, application/x-shellscript
    -
    -

    BashSessionLexer

    -
    -

    Lexer for simplistic shell sessions.

    -

    New in Pygments 1.1.

    - --- - - - - - - - -
    Short names:console
    Filename patterns:*.sh-session
    Mimetypes:application/x-shell-session
    -
    -

    BatchLexer

    -
    -

    Lexer for the DOS/Windows Batch file format.

    -

    New in Pygments 0.7.

    - --- - - - - - - - -
    Short names:bat
    Filename patterns:*.bat, *.cmd
    Mimetypes:application/x-dos-batch
    -
    -

    PowerShellLexer

    -
    -

    For Windows PowerShell code.

    -

    New in Pygments 1.5.

    - --- - - - - - - - -
    Short names:powershell, posh, ps1
    Filename patterns:*.ps1
    Mimetypes:text/x-powershell
    -
    -

    TcshLexer

    -
    -

    Lexer for tcsh scripts.

    -

    New in Pygments 0.10.

    - --- - - - - - - - -
    Short names:tcsh, csh
    Filename patterns:*.tcsh, *.csh
    Mimetypes:application/x-csh
    -
    -
    -
    -

    Special lexers

    -

    RawTokenLexer

    -
    -

    Recreate a token stream formatted with the RawTokenFormatter. This -lexer raises exceptions during parsing if the token stream in the -file is malformed.

    -

    Additional options accepted:

    -
    -
    compress
    -
    If set to "gz" or "bz2", decompress the token stream with -the given compression algorithm before lexing (default: "").
    -
    - --- - - - - - - - -
    Short names:raw
    Filename patterns:None
    Mimetypes:application/x-pygments-tokens
    -
    -

    TextLexer

    -
    -

    "Null" lexer, doesn't highlight anything.

    - --- - - - - - - - -
    Short names:text
    Filename patterns:*.txt
    Mimetypes:text/plain
    -
    -
    - -
    -

    Lexers for various template engines' markup

    -

    CheetahHtmlLexer

    -
    -

    Subclass of the CheetahLexer that highlights unlexer data -with the HtmlLexer.

    - --- - - - - - - - -
    Short names:html+cheetah, html+spitfire
    Filename patterns:None
    Mimetypes:text/html+cheetah, text/html+spitfire
    -
    -

    CheetahJavascriptLexer

    -
    -

    Subclass of the CheetahLexer that highlights unlexer data -with the JavascriptLexer.

    - --- - - - - - - - -
    Short names:js+cheetah, javascript+cheetah, js+spitfire, javascript+spitfire
    Filename patterns:None
    Mimetypes:application/x-javascript+cheetah, text/x-javascript+cheetah, text/javascript+cheetah, application/x-javascript+spitfire, text/x-javascript+spitfire, text/javascript+spitfire
    -
    -

    CheetahLexer

    -
    -

    Generic cheetah templates lexer. Code that isn't Cheetah -markup is yielded as Token.Other. This also works for -spitfire templates which use the same syntax.

    - --- - - - - - - - -
    Short names:cheetah, spitfire
    Filename patterns:*.tmpl, *.spt
    Mimetypes:application/x-cheetah, application/x-spitfire
    -
    -

    CheetahXmlLexer

    -
    -

    Subclass of the CheetahLexer that highlights unlexer data -with the XmlLexer.

    - --- - - - - - - - -
    Short names:xml+cheetah, xml+spitfire
    Filename patterns:None
    Mimetypes:application/xml+cheetah, application/xml+spitfire
    -
    -

    ColdfusionHtmlLexer

    -
    -

    Coldfusion markup in html

    - --- - - - - - - - -
    Short names:cfm
    Filename patterns:*.cfm, *.cfml, *.cfc
    Mimetypes:application/x-coldfusion
    -
    -

    ColdfusionLexer

    -
    -

    Coldfusion statements

    - --- - - - - - - - -
    Short names:cfs
    Filename patterns:None
    Mimetypes:None
    -
    -

    CssDjangoLexer

    -
    -

    Subclass of the DjangoLexer that highlights unlexed data with the -CssLexer.

    - --- - - - - - - - -
    Short names:css+django, css+jinja
    Filename patterns:None
    Mimetypes:text/css+django, text/css+jinja
    -
    -

    CssErbLexer

    -
    -

    Subclass of ErbLexer which highlights unlexed data with the CssLexer.

    - --- - - - - - - - -
    Short names:css+erb, css+ruby
    Filename patterns:None
    Mimetypes:text/css+ruby
    -
    -

    CssGenshiLexer

    -
    -

    A lexer that highlights CSS definitions in genshi text templates.

    - --- - - - - - - - -
    Short names:css+genshitext, css+genshi
    Filename patterns:None
    Mimetypes:text/css+genshi
    -
    -

    CssPhpLexer

    -
    -

    Subclass of PhpLexer which highlights unmatched data with the CssLexer.

    - --- - - - - - - - -
    Short names:css+php
    Filename patterns:None
    Mimetypes:text/css+php
    -
    -

    CssSmartyLexer

    -
    -

    Subclass of the SmartyLexer that highlights unlexed data with the -CssLexer.

    - --- - - - - - - - -
    Short names:css+smarty
    Filename patterns:None
    Mimetypes:text/css+smarty
    -
    -

    DjangoLexer

    -
    -

    Generic django -and jinja template lexer.

    -

    It just highlights django/jinja code between the preprocessor directives, -other data is left untouched by the lexer.

    - --- - - - - - - - -
    Short names:django, jinja
    Filename patterns:None
    Mimetypes:application/x-django-templating, application/x-jinja
    -
    -

    ErbLexer

    -
    -

    Generic ERB (Ruby Templating) -lexer.

    -

    Just highlights ruby code between the preprocessor directives, other data -is left untouched by the lexer.

    -

    All options are also forwarded to the RubyLexer.

    - --- - - - - - - - -
    Short names:erb
    Filename patterns:None
    Mimetypes:application/x-ruby-templating
    -
    -

    EvoqueHtmlLexer

    -
    -

    Subclass of the EvoqueLexer that highlights unlexed data with the -HtmlLexer.

    -

    New in Pygments 1.1.

    - --- - - - - - - - -
    Short names:html+evoque
    Filename patterns:*.html
    Mimetypes:text/html+evoque
    -
    -

    EvoqueLexer

    -
    -

    For files using the Evoque templating system.

    -

    New in Pygments 1.1.

    - --- - - - - - - - -
    Short names:evoque
    Filename patterns:*.evoque
    Mimetypes:application/x-evoque
    -
    -

    EvoqueXmlLexer

    -
    -

    Subclass of the EvoqueLexer that highlights unlexed data with the -XmlLexer.

    -

    New in Pygments 1.1.

    - --- - - - - - - - -
    Short names:xml+evoque
    Filename patterns:*.xml
    Mimetypes:application/xml+evoque
    -
    -

    GenshiLexer

    -
    -

    A lexer that highlights genshi and -kid kid XML templates.

    - --- - - - - - - - -
    Short names:genshi, kid, xml+genshi, xml+kid
    Filename patterns:*.kid
    Mimetypes:application/x-genshi, application/x-kid
    -
    -

    GenshiTextLexer

    -
    -

    A lexer that highlights genshi text -templates.

    - --- - - - - - - - -
    Short names:genshitext
    Filename patterns:None
    Mimetypes:application/x-genshi-text, text/x-genshi
    -
    -

    HtmlDjangoLexer

    -
    -

    Subclass of the DjangoLexer that highighlights unlexed data with the -HtmlLexer.

    -

    Nested Javascript and CSS is highlighted too.

    - --- - - - - - - - -
    Short names:html+django, html+jinja
    Filename patterns:None
    Mimetypes:text/html+django, text/html+jinja
    -
    -

    HtmlGenshiLexer

    -
    -

    A lexer that highlights genshi and -kid kid HTML templates.

    - --- - - - - - - - -
    Short names:html+genshi, html+kid
    Filename patterns:None
    Mimetypes:text/html+genshi
    -
    -

    HtmlPhpLexer

    -
    -

    Subclass of PhpLexer that highlights unhandled data with the HtmlLexer.

    -

    Nested Javascript and CSS is highlighted too.

    - --- - - - - - - - -
    Short names:html+php
    Filename patterns:*.phtml
    Mimetypes:application/x-php, application/x-httpd-php, application/x-httpd-php3, application/x-httpd-php4, application/x-httpd-php5
    -
    -

    HtmlSmartyLexer

    -
    -

    Subclass of the SmartyLexer that highighlights unlexed data with the -HtmlLexer.

    -

    Nested Javascript and CSS is highlighted too.

    - --- - - - - - - - -
    Short names:html+smarty
    Filename patterns:None
    Mimetypes:text/html+smarty
    -
    -

    JavascriptDjangoLexer

    -
    -

    Subclass of the DjangoLexer that highlights unlexed data with the -JavascriptLexer.

    - --- - - - - - - - -
    Short names:js+django, javascript+django, js+jinja, javascript+jinja
    Filename patterns:None
    Mimetypes:application/x-javascript+django, application/x-javascript+jinja, text/x-javascript+django, text/x-javascript+jinja, text/javascript+django, text/javascript+jinja
    -
    -

    JavascriptErbLexer

    -
    -

    Subclass of ErbLexer which highlights unlexed data with the -JavascriptLexer.

    - --- - - - - - - - -
    Short names:js+erb, javascript+erb, js+ruby, javascript+ruby
    Filename patterns:None
    Mimetypes:application/x-javascript+ruby, text/x-javascript+ruby, text/javascript+ruby
    -
    -

    JavascriptGenshiLexer

    -
    -

    A lexer that highlights javascript code in genshi text templates.

    - --- - - - - - - - -
    Short names:js+genshitext, js+genshi, javascript+genshitext, javascript+genshi
    Filename patterns:None
    Mimetypes:application/x-javascript+genshi, text/x-javascript+genshi, text/javascript+genshi
    -
    -

    JavascriptPhpLexer

    -
    -

    Subclass of PhpLexer which highlights unmatched data with the -JavascriptLexer.

    - --- - - - - - - - -
    Short names:js+php, javascript+php
    Filename patterns:None
    Mimetypes:application/x-javascript+php, text/x-javascript+php, text/javascript+php
    -
    -

    JavascriptSmartyLexer

    -
    -

    Subclass of the SmartyLexer that highlights unlexed data with the -JavascriptLexer.

    - --- - - - - - - - -
    Short names:js+smarty, javascript+smarty
    Filename patterns:None
    Mimetypes:application/x-javascript+smarty, text/x-javascript+smarty, text/javascript+smarty
    -
    -

    JspLexer

    -
    -

    Lexer for Java Server Pages.

    -

    New in Pygments 0.7.

    - --- - - - - - - - -
    Short names:jsp
    Filename patterns:*.jsp
    Mimetypes:application/x-jsp
    -
    -

    LassoCssLexer

    -
    -

    Subclass of the LassoLexer which highlights unhandled data with the -CssLexer.

    -

    New in Pygments 1.6.

    - --- - - - - - - - -
    Short names:css+lasso
    Filename patterns:None
    Mimetypes:text/css+lasso
    -
    -

    LassoHtmlLexer

    -
    -

    Subclass of the LassoLexer which highlights unhandled data with the -HtmlLexer.

    -

    Nested JavaScript and CSS is also highlighted.

    -

    New in Pygments 1.6.

    - --- - - - - - - - -
    Short names:html+lasso
    Filename patterns:None
    Mimetypes:text/html+lasso, application/x-httpd-lasso, application/x-httpd-lasso[89]
    -
    -

    LassoJavascriptLexer

    -
    -

    Subclass of the LassoLexer which highlights unhandled data with the -JavascriptLexer.

    -

    New in Pygments 1.6.

    - --- - - - - - - - -
    Short names:js+lasso, javascript+lasso
    Filename patterns:None
    Mimetypes:application/x-javascript+lasso, text/x-javascript+lasso, text/javascript+lasso
    -
    -

    LassoXmlLexer

    -
    -

    Subclass of the LassoLexer which highlights unhandled data with the -XmlLexer.

    -

    New in Pygments 1.6.

    - --- - - - - - - - -
    Short names:xml+lasso
    Filename patterns:None
    Mimetypes:application/xml+lasso
    -
    -

    MakoCssLexer

    -
    -

    Subclass of the MakoLexer that highlights unlexer data -with the CssLexer.

    -

    New in Pygments 0.7.

    - --- - - - - - - - -
    Short names:css+mako
    Filename patterns:None
    Mimetypes:text/css+mako
    -
    -

    MakoHtmlLexer

    -
    -

    Subclass of the MakoLexer that highlights unlexed data -with the HtmlLexer.

    -

    New in Pygments 0.7.

    - --- - - - - - - - -
    Short names:html+mako
    Filename patterns:None
    Mimetypes:text/html+mako
    -
    -

    MakoJavascriptLexer

    -
    -

    Subclass of the MakoLexer that highlights unlexer data -with the JavascriptLexer.

    -

    New in Pygments 0.7.

    - --- - - - - - - - -
    Short names:js+mako, javascript+mako
    Filename patterns:None
    Mimetypes:application/x-javascript+mako, text/x-javascript+mako, text/javascript+mako
    -
    -

    MakoLexer

    -
    -

    Generic mako templates lexer. Code that isn't Mako -markup is yielded as Token.Other.

    -

    New in Pygments 0.7.

    - --- - - - - - - - -
    Short names:mako
    Filename patterns:*.mao
    Mimetypes:application/x-mako
    -
    -

    MakoXmlLexer

    -
    -

    Subclass of the MakoLexer that highlights unlexer data -with the XmlLexer.

    -

    New in Pygments 0.7.

    - --- - - - - - - - -
    Short names:xml+mako
    Filename patterns:None
    Mimetypes:application/xml+mako
    -
    -

    MasonLexer

    -
    -

    Generic mason templates lexer. Stolen from Myghty lexer. Code that isn't -Mason markup is HTML.

    -

    New in Pygments 1.4.

    - --- - - - - - - - -
    Short names:mason
    Filename patterns:*.m, *.mhtml, *.mc, *.mi, autohandler, dhandler
    Mimetypes:application/x-mason
    -
    -

    MyghtyCssLexer

    -
    -

    Subclass of the MyghtyLexer that highlights unlexer data -with the CssLexer.

    -

    New in Pygments 0.6.

    - --- - - - - - - - -
    Short names:css+myghty
    Filename patterns:None
    Mimetypes:text/css+myghty
    -
    -

    MyghtyHtmlLexer

    -
    -

    Subclass of the MyghtyLexer that highlights unlexer data -with the HtmlLexer.

    -

    New in Pygments 0.6.

    - --- - - - - - - - -
    Short names:html+myghty
    Filename patterns:None
    Mimetypes:text/html+myghty
    -
    -

    MyghtyJavascriptLexer

    -
    -

    Subclass of the MyghtyLexer that highlights unlexer data -with the JavascriptLexer.

    -

    New in Pygments 0.6.

    - --- - - - - - - - -
    Short names:js+myghty, javascript+myghty
    Filename patterns:None
    Mimetypes:application/x-javascript+myghty, text/x-javascript+myghty, text/javascript+mygthy
    -
    -

    MyghtyLexer

    -
    -

    Generic myghty templates lexer. Code that isn't Myghty -markup is yielded as Token.Other.

    -

    New in Pygments 0.6.

    - --- - - - - - - - -
    Short names:myghty
    Filename patterns:*.myt, autodelegate
    Mimetypes:application/x-myghty
    -
    -

    MyghtyXmlLexer

    -
    -

    Subclass of the MyghtyLexer that highlights unlexer data -with the XmlLexer.

    -

    New in Pygments 0.6.

    - --- - - - - - - - -
    Short names:xml+myghty
    Filename patterns:None
    Mimetypes:application/xml+myghty
    -
    -

    RhtmlLexer

    -
    -

    Subclass of the ERB lexer that highlights the unlexed data with the -html lexer.

    -

    Nested Javascript and CSS is highlighted too.

    - --- - - - - - - - -
    Short names:rhtml, html+erb, html+ruby
    Filename patterns:*.rhtml
    Mimetypes:text/html+ruby
    -
    -

    SmartyLexer

    -
    -

    Generic Smarty template lexer.

    -

    Just highlights smarty code between the preprocessor directives, other -data is left untouched by the lexer.

    - --- - - - - - - - -
    Short names:smarty
    Filename patterns:*.tpl
    Mimetypes:application/x-smarty
    -
    -

    SspLexer

    -
    -

    Lexer for Scalate Server Pages.

    -

    New in Pygments 1.4.

    - --- - - - - - - - -
    Short names:ssp
    Filename patterns:*.ssp
    Mimetypes:application/x-ssp
    -
    -

    TeaTemplateLexer

    -
    -

    Lexer for Tea Templates.

    -

    New in Pygments 1.5.

    - --- - - - - - - - -
    Short names:tea
    Filename patterns:*.tea
    Mimetypes:text/x-tea
    -
    -

    VelocityHtmlLexer

    -
    -

    Subclass of the VelocityLexer that highlights unlexer data -with the HtmlLexer.

    - --- - - - - - - - -
    Short names:html+velocity
    Filename patterns:None
    Mimetypes:text/html+velocity
    -
    -

    VelocityLexer

    -
    -

    Generic Velocity template lexer.

    -

    Just highlights velocity directives and variable references, other -data is left untouched by the lexer.

    - --- - - - - - - - -
    Short names:velocity
    Filename patterns:*.vm, *.fhtml
    Mimetypes:None
    -
    -

    VelocityXmlLexer

    -
    -

    Subclass of the VelocityLexer that highlights unlexer data -with the XmlLexer.

    - --- - - - - - - - -
    Short names:xml+velocity
    Filename patterns:None
    Mimetypes:application/xml+velocity
    -
    -

    XmlDjangoLexer

    -
    -

    Subclass of the DjangoLexer that highlights unlexed data with the -XmlLexer.

    - --- - - - - - - - -
    Short names:xml+django, xml+jinja
    Filename patterns:None
    Mimetypes:application/xml+django, application/xml+jinja
    -
    -

    XmlErbLexer

    -
    -

    Subclass of ErbLexer which highlights data outside preprocessor -directives with the XmlLexer.

    - --- - - - - - - - -
    Short names:xml+erb, xml+ruby
    Filename patterns:None
    Mimetypes:application/xml+ruby
    -
    -

    XmlPhpLexer

    -
    -

    Subclass of PhpLexer that higlights unhandled data with the XmlLexer.

    - --- - - - - - - - -
    Short names:xml+php
    Filename patterns:None
    Mimetypes:application/xml+php
    -
    -

    XmlSmartyLexer

    -
    -

    Subclass of the SmartyLexer that highlights unlexed data with the -XmlLexer.

    - --- - - - - - - - -
    Short names:xml+smarty
    Filename patterns:None
    Mimetypes:application/xml+smarty
    -
    -
    -
    -

    Lexers for non-source code file types

    -

    ApacheConfLexer

    -
    -

    Lexer for configuration files following the Apache config file -format.

    -

    New in Pygments 0.6.

    - --- - - - - - - - -
    Short names:apacheconf, aconf, apache
    Filename patterns:.htaccess, apache.conf, apache2.conf
    Mimetypes:text/x-apacheconf
    -
    -

    BBCodeLexer

    -
    -

    A lexer that highlights BBCode(-like) syntax.

    -

    New in Pygments 0.6.

    - --- - - - - - - - -
    Short names:bbcode
    Filename patterns:None
    Mimetypes:text/x-bbcode
    -
    -

    BaseMakefileLexer

    -
    -

    Lexer for simple Makefiles (no preprocessing).

    -

    New in Pygments 0.10.

    - --- - - - - - - - -
    Short names:basemake
    Filename patterns:None
    Mimetypes:None
    -
    -

    CMakeLexer

    -
    -

    Lexer for CMake files.

    -

    New in Pygments 1.2.

    - --- - - - - - - - -
    Short names:cmake
    Filename patterns:*.cmake, CMakeLists.txt
    Mimetypes:text/x-cmake
    -
    -

    DarcsPatchLexer

    -
    -

    DarcsPatchLexer is a lexer for the various versions of the darcs patch -format. Examples of this format are derived by commands such as -darcs annotate --patch and darcs send.

    -

    New in Pygments 0.10.

    - --- - - - - - - - -
    Short names:dpatch
    Filename patterns:*.dpatch, *.darcspatch
    Mimetypes:None
    -
    -

    DebianControlLexer

    -
    -

    Lexer for Debian control files and apt-cache show <pkg> outputs.

    -

    New in Pygments 0.9.

    - --- - - - - - - - -
    Short names:control
    Filename patterns:control
    Mimetypes:None
    -
    -

    DiffLexer

    -
    -

    Lexer for unified or context-style diffs or patches.

    - --- - - - - - - - -
    Short names:diff, udiff
    Filename patterns:*.diff, *.patch
    Mimetypes:text/x-diff, text/x-patch
    -
    -

    GettextLexer

    -
    -

    Lexer for Gettext catalog files.

    -

    New in Pygments 0.9.

    - --- - - - - - - - -
    Short names:pot, po
    Filename patterns:*.pot, *.po
    Mimetypes:application/x-gettext, text/x-gettext, text/gettext
    -
    -

    GroffLexer

    -
    -

    Lexer for the (g)roff typesetting language, supporting groff -extensions. Mainly useful for highlighting manpage sources.

    -

    New in Pygments 0.6.

    - --- - - - - - - - -
    Short names:groff, nroff, man
    Filename patterns:*.[1234567], *.man
    Mimetypes:application/x-troff, text/troff
    -
    -

    HttpLexer

    -
    -

    Lexer for HTTP sessions.

    -

    New in Pygments 1.5.

    - --- - - - - - - - -
    Short names:http
    Filename patterns:None
    Mimetypes:None
    -
    -

    HxmlLexer

    -
    -

    Lexer for haXe build files.

    -

    New in Pygments 1.6.

    - --- - - - - - - - -
    Short names:haxeml, hxml
    Filename patterns:*.hxml
    Mimetypes:None
    -
    -

    IniLexer

    -
    -

    Lexer for configuration files in INI style.

    - --- - - - - - - - -
    Short names:ini, cfg
    Filename patterns:*.ini, *.cfg
    Mimetypes:text/x-ini
    -
    -

    IrcLogsLexer

    -
    -

    Lexer for IRC logs in irssi, xchat or weechat style.

    - --- - - - - - - - -
    Short names:irc
    Filename patterns:*.weechatlog
    Mimetypes:text/x-irclog
    -
    -

    LighttpdConfLexer

    -
    -

    Lexer for Lighttpd configuration files.

    -

    New in Pygments 0.11.

    - --- - - - - - - - -
    Short names:lighty, lighttpd
    Filename patterns:None
    Mimetypes:text/x-lighttpd-conf
    -
    -

    MakefileLexer

    -
    -

    Lexer for BSD and GNU make extensions (lenient enough to handle both in -the same file even).

    -

    Rewritten in Pygments 0.10.

    - --- - - - - - - - -
    Short names:make, makefile, mf, bsdmake
    Filename patterns:*.mak, Makefile, makefile, Makefile.*, GNUmakefile
    Mimetypes:text/x-makefile
    -
    -

    MoinWikiLexer

    -
    -

    For MoinMoin (and Trac) Wiki markup.

    -

    New in Pygments 0.7.

    - --- - - - - - - - -
    Short names:trac-wiki, moin
    Filename patterns:None
    Mimetypes:text/x-trac-wiki
    -
    -

    NginxConfLexer

    -
    -

    Lexer for Nginx configuration files.

    -

    New in Pygments 0.11.

    - --- - - - - - - - -
    Short names:nginx
    Filename patterns:None
    Mimetypes:text/x-nginx-conf
    -
    -

    PropertiesLexer

    -
    -

    Lexer for configuration files in Java's properties format.

    -

    New in Pygments 1.4.

    - --- - - - - - - - -
    Short names:properties
    Filename patterns:*.properties
    Mimetypes:text/x-java-properties
    -
    -

    PyPyLogLexer

    -
    -

    Lexer for PyPy log files.

    -

    New in Pygments 1.5.

    - --- - - - - - - - -
    Short names:pypylog, pypy
    Filename patterns:*.pypylog
    Mimetypes:application/x-pypylog
    -
    -

    RegeditLexer

    -
    -

    Lexer for Windows Registry files produced -by regedit.

    -

    New in Pygments 1.6.

    - --- - - - - - - - -
    Short names:None
    Filename patterns:*.reg
    Mimetypes:text/x-windows-registry
    -
    -

    RstLexer

    -
    -

    For reStructuredText markup.

    -

    New in Pygments 0.7.

    -

    Additional options accepted:

    -
    -
    handlecodeblocks
    -
    Highlight the contents of .. sourcecode:: langauge and -.. code:: language directives with a lexer for the given -language (default: True). New in Pygments 0.8.
    -
    - --- - - - - - - - -
    Short names:rst, rest, restructuredtext
    Filename patterns:*.rst, *.rest
    Mimetypes:text/x-rst, text/prs.fallenstein.rst
    -
    -

    SourcesListLexer

    -
    -

    Lexer that highlights debian sources.list files.

    -

    New in Pygments 0.7.

    - --- - - - - - - - -
    Short names:sourceslist, sources.list
    Filename patterns:sources.list
    Mimetypes:None
    -
    -

    SquidConfLexer

    -
    -

    Lexer for squid configuration files.

    -

    New in Pygments 0.9.

    - --- - - - - - - - -
    Short names:squidconf, squid.conf, squid
    Filename patterns:squid.conf
    Mimetypes:text/x-squidconf
    -
    -

    TexLexer

    -
    -

    Lexer for the TeX and LaTeX typesetting languages.

    - --- - - - - - - - -
    Short names:tex, latex
    Filename patterns:*.tex, *.aux, *.toc
    Mimetypes:text/x-tex, text/x-latex
    -
    -

    VimLexer

    -
    -

    Lexer for VimL script files.

    -

    New in Pygments 0.8.

    - --- - - - - - - - -
    Short names:vim
    Filename patterns:*.vim, .vimrc, .exrc, .gvimrc, vimrc, exrc, gvimrc, vimrc, gvimrc
    Mimetypes:text/x-vim
    -
    -

    YamlLexer

    -
    -

    Lexer for YAML, a human-friendly data serialization -language.

    -

    New in Pygments 0.11.

    - --- - - - - - - - -
    Short names:yaml
    Filename patterns:*.yaml, *.yml
    Mimetypes:text/x-yaml
    -
    -
    - -
    -

    Iterating over all lexers

    -

    New in Pygments 0.6.

    -

    To get all lexers (both the builtin and the plugin ones), you can -use the get_all_lexers() function from the pygments.lexers -module:

    -
    >>> from pygments.lexers import get_all_lexers
    ->>> i = get_all_lexers()
    ->>> i.next()
    -('Diff', ('diff',), ('*.diff', '*.patch'), ('text/x-diff', 'text/x-patch'))
    ->>> i.next()
    -('Delphi', ('delphi', 'objectpascal', 'pas', 'pascal'), ('*.pas',), ('text/x-pascal',))
    ->>> i.next()
    -('XML+Ruby', ('xml+erb', 'xml+ruby'), (), ())
    -
    -

    As you can see, the return value is an iterator which yields tuples -in the form (name, aliases, filetypes, mimetypes).

    -
    - -
    - - - \ No newline at end of file diff --git a/vendor/pygments/docs/build/moinmoin.html b/vendor/pygments/docs/build/moinmoin.html deleted file mode 100644 index 4851501..0000000 --- a/vendor/pygments/docs/build/moinmoin.html +++ /dev/null @@ -1,245 +0,0 @@ - - - - Using Pygments with MoinMoin — Pygments - - - - -
    -

    Pygments

    -

    Using Pygments with MoinMoin

    - - « Back To Index - - - -

    From Pygments 0.7, the source distribution ships a Moin parser plugin that -can be used to get Pygments highlighting in Moin wiki pages.

    -

    To use it, copy the file external/moin-parser.py from the Pygments -distribution to the data/plugin/parser subdirectory of your Moin instance. -Edit the options at the top of the file (currently ATTACHMENTS and -INLINESTYLES) and rename the file to the name that the parser directive -should have. For example, if you name the file code.py, you can get a -highlighted Python code sample with this Wiki markup:

    -
    -{{{
    -#!code python
    -[...]
    -}}}
    -
    -

    where python is the Pygments name of the lexer to use.

    -

    Additionally, if you set the ATTACHMENTS option to True, Pygments will also -be called for all attachments for whose filenames there is no other parser -registered.

    -

    You are responsible for including CSS rules that will map the Pygments CSS -classes to colors. You can output a stylesheet file with pygmentize, put it -into the htdocs directory of your Moin instance and then include it in the -stylesheets configuration option in the Moin config, e.g.:

    -
    -stylesheets = [('screen', '/htdocs/pygments.css')]
    -
    -

    If you do not want to do that and are willing to accept larger HTML output, you -can set the INLINESTYLES option to True.

    - -
    - - - \ No newline at end of file diff --git a/vendor/pygments/docs/build/plugins.html b/vendor/pygments/docs/build/plugins.html deleted file mode 100644 index 5cb2229..0000000 --- a/vendor/pygments/docs/build/plugins.html +++ /dev/null @@ -1,294 +0,0 @@ - - - - Register Plugins — Pygments - - - - -
    -

    Pygments

    -

    Register Plugins

    - - « Back To Index - - -
    -

    Contents

    - -
    - -

    If you want to extend Pygments without hacking the sources, but want to -use the lexer/formatter/style/filter lookup functions (lexers.get_lexer_by_name -et al.), you can use setuptools entrypoints to add new lexers, formatters -or styles as if they were in the Pygments core.

    -

    That means you can use your highlighter modules with the pygmentize script, -which relies on the mentioned functions.

    -
    -

    Entrypoints

    -

    Here is a list of setuptools entrypoints that Pygments understands:

    -

    pygments.lexers

    -
    -

    This entrypoint is used for adding new lexers to the Pygments core. -The name of the entrypoint values doesn't really matter, Pygments extracts -required metadata from the class definition:

    -
    [pygments.lexers]
    -yourlexer = yourmodule:YourLexer
    -
    -

    Note that you have to define name, aliases and filename -attributes so that you can use the highlighter from the command line:

    -
    class YourLexer(...):
    -    name = 'Name Of Your Lexer'
    -    aliases = ['alias']
    -    filenames = ['*.ext']
    -
    -
    -

    pygments.formatters

    -
    -

    You can use this entrypoint to add new formatters to Pygments. The -name of an entrypoint item is the name of the formatter. If you -prefix the name with a slash it's used as a filename pattern:

    -
    [pygments.formatters]
    -yourformatter = yourmodule:YourFormatter
    -/.ext = yourmodule:YourFormatter
    -
    -
    -

    pygments.styles

    -
    -

    To add a new style you can use this entrypoint. The name of the entrypoint -is the name of the style:

    -
    [pygments.styles]
    -yourstyle = yourmodule:YourStyle
    -
    -
    -

    pygments.filters

    -
    -

    Use this entrypoint to register a new filter. The name of the -entrypoint is the name of the filter:

    -
    [pygments.filters]
    -yourfilter = yourmodule:YourFilter
    -
    -
    -
    -
    -

    How To Use Entrypoints

    -

    This documentation doesn't explain how to use those entrypoints because this is -covered in the setuptools documentation. That page should cover everything -you need to write a plugin.

    -
    -
    -

    Extending The Core

    -

    If you have written a Pygments plugin that is open source, please inform us -about that. There is a high chance that we'll add it to the Pygments -distribution.

    -
    - -
    - - - \ No newline at end of file diff --git a/vendor/pygments/docs/build/quickstart.html b/vendor/pygments/docs/build/quickstart.html deleted file mode 100644 index 234aa80..0000000 --- a/vendor/pygments/docs/build/quickstart.html +++ /dev/null @@ -1,390 +0,0 @@ - - - - Introduction and Quickstart — Pygments - - - - -
    -

    Pygments

    -

    Introduction and Quickstart

    - - « Back To Index - - - - - -

    Welcome to Pygments! This document explains the basic concepts and terms and -gives a few examples of how to use the library.

    -
    -

    Architecture

    -

    There are four types of components that work together highlighting a piece of -code:

    -
      -
    • A lexer splits the source into tokens, fragments of the source that -have a token type that determines what the text represents semantically -(e.g., keyword, string, or comment). There is a lexer for every language -or markup format that Pygments supports.
    • -
    • The token stream can be piped through filters, which usually modify -the token types or text fragments, e.g. uppercasing all keywords.
    • -
    • A formatter then takes the token stream and writes it to an output -file, in a format such as HTML, LaTeX or RTF.
    • -
    • While writing the output, a style determines how to highlight all the -different token types. It maps them to attributes like "red and bold".
    • -
    -
    -
    -

    Example

    -

    Here is a small example for highlighting Python code:

    -
    from pygments import highlight
    -from pygments.lexers import PythonLexer
    -from pygments.formatters import HtmlFormatter
    -
    -code = 'print "Hello World"'
    -print highlight(code, PythonLexer(), HtmlFormatter())
    -
    -

    which prints something like this:

    -
    <div class="highlight">
    -<pre><span class="k">print</span> <span class="s">&quot;Hello World&quot;</span></pre>
    -</div>
    -
    -

    As you can see, Pygments uses CSS classes (by default, but you can change that) -instead of inline styles in order to avoid outputting redundant style information over -and over. A CSS stylesheet that contains all CSS classes possibly used in the output -can be produced by:

    -
    print HtmlFormatter().get_style_defs('.highlight')
    -
    -

    The argument to get_style_defs is used as an additional CSS selector: the output -may look like this:

    -
    .highlight .k { color: #AA22FF; font-weight: bold }
    -.highlight .s { color: #BB4444 }
    -...
    -
    -
    -
    -

    Options

    -

    The highlight() function supports a fourth argument called outfile, it must be -a file object if given. The formatted output will then be written to this file -instead of being returned as a string.

    -

    Lexers and formatters both support options. They are given to them as keyword -arguments either to the class or to the lookup method:

    -
    from pygments import highlight
    -from pygments.lexers import get_lexer_by_name
    -from pygments.formatters import HtmlFormatter
    -
    -lexer = get_lexer_by_name("python", stripall=True)
    -formatter = HtmlFormatter(linenos=True, cssclass="source")
    -result = highlight(code, lexer, formatter)
    -
    -

    This makes the lexer strip all leading and trailing whitespace from the input -(stripall option), lets the formatter output line numbers (linenos option), -and sets the wrapping <div>'s class to source (instead of -highlight).

    -

    Important options include:

    -
    -
    encoding : for lexers and formatters
    -
    Since Pygments uses Unicode strings internally, this determines which -encoding will be used to convert to or from byte strings.
    -
    style : for formatters
    -
    The name of the style to use when writing the output.
    -
    -

    For an overview of builtin lexers and formatters and their options, visit the -lexer and formatters lists.

    -

    For a documentation on filters, see this page.

    -
    -
    -

    Lexer and formatter lookup

    -

    If you want to lookup a built-in lexer by its alias or a filename, you can use -one of the following methods:

    -
    >>> from pygments.lexers import (get_lexer_by_name,
    -...     get_lexer_for_filename, get_lexer_for_mimetype)
    -
    ->>> get_lexer_by_name('python')
    -<pygments.lexers.PythonLexer>
    -
    ->>> get_lexer_for_filename('spam.rb')
    -<pygments.lexers.RubyLexer>
    -
    ->>> get_lexer_for_mimetype('text/x-perl')
    -<pygments.lexers.PerlLexer>
    -
    -

    All these functions accept keyword arguments; they will be passed to the lexer -as options.

    -

    A similar API is available for formatters: use get_formatter_by_name() and -get_formatter_for_filename() from the pygments.formatters module -for this purpose.

    -
    -
    -

    Guessing lexers

    -

    If you don't know the content of the file, or you want to highlight a file -whose extension is ambiguous, such as .html (which could contain plain HTML -or some template tags), use these functions:

    -
    >>> from pygments.lexers import guess_lexer, guess_lexer_for_filename
    -
    ->>> guess_lexer('#!/usr/bin/python\nprint "Hello World!"')
    -<pygments.lexers.PythonLexer>
    -
    ->>> guess_lexer_for_filename('test.py', 'print "Hello World!"')
    -<pygments.lexers.PythonLexer>
    -
    -

    guess_lexer() passes the given content to the lexer classes' analyse_text() -method and returns the one for which it returns the highest number.

    -

    All lexers have two different filename pattern lists: the primary and the -secondary one. The get_lexer_for_filename() function only uses the primary -list, whose entries are supposed to be unique among all lexers. -guess_lexer_for_filename(), however, will first loop through all lexers and -look at the primary and secondary filename patterns if the filename matches. -If only one lexer matches, it is returned, else the guessing mechanism of -guess_lexer() is used with the matching lexers.

    -

    As usual, keyword arguments to these functions are given to the created lexer -as options.

    -
    -
    -

    Command line usage

    -

    You can use Pygments from the command line, using the pygmentize script:

    -
    -$ pygmentize test.py
    -
    -

    will highlight the Python file test.py using ANSI escape sequences -(a.k.a. terminal colors) and print the result to standard output.

    -

    To output HTML, use the -f option:

    -
    -$ pygmentize -f html -o test.html test.py
    -
    -

    to write an HTML-highlighted version of test.py to the file test.html. -Note that it will only be a snippet of HTML, if you want a full HTML document, -use the "full" option:

    -
    -$ pygmentize -f html -O full -o test.html test.py
    -
    -

    This will produce a full HTML document with included stylesheet.

    -

    A style can be selected with -O style=<name>.

    -

    If you need a stylesheet for an existing HTML file using Pygments CSS classes, -it can be created with:

    -
    -$ pygmentize -S default -f html > style.css
    -
    -

    where default is the style name.

    -

    More options and tricks and be found in the command line reference.

    -
    - -
    - - - \ No newline at end of file diff --git a/vendor/pygments/docs/build/rstdirective.html b/vendor/pygments/docs/build/rstdirective.html deleted file mode 100644 index f7f54ec..0000000 --- a/vendor/pygments/docs/build/rstdirective.html +++ /dev/null @@ -1,229 +0,0 @@ - - - - Using Pygments in ReST documents — Pygments - - - - -
    -

    Pygments

    -

    Using Pygments in ReST documents

    - - « Back To Index - - - -

    Many Python people use ReST for documentation their sourcecode, programs, -scripts et cetera. This also means that documentation often includes sourcecode -samples or snippets.

    -

    You can easily enable Pygments support for your ReST texts using a custom -directive -- this is also how this documentation displays source code.

    -

    From Pygments 0.9, the directive is shipped in the distribution as -external/rst-directive.py. You can copy and adapt this code to your liking.

    - - -
    - - - \ No newline at end of file diff --git a/vendor/pygments/docs/build/styles.html b/vendor/pygments/docs/build/styles.html deleted file mode 100644 index 0a897de..0000000 --- a/vendor/pygments/docs/build/styles.html +++ /dev/null @@ -1,341 +0,0 @@ - - - - Styles — Pygments - - - - -
    -

    Pygments

    -

    Styles

    - - « Back To Index - - - - - -

    Pygments comes with some builtin styles that work for both the HTML and -LaTeX formatter.

    -

    The builtin styles can be looked up with the get_style_by_name function:

    -
    >>> from pygments.styles import get_style_by_name
    ->>> get_style_by_name('colorful')
    -<class 'pygments.styles.colorful.ColorfulStyle'>
    -
    -

    You can pass a instance of a Style class to a formatter as the style -option in form of a string:

    -
    >>> from pygments.styles import get_style_by_name
    ->>> HtmlFormatter(style='colorful').style
    -<class 'pygments.styles.colorful.ColorfulStyle'>
    -
    -

    Or you can also import your own style (which must be a subclass of -pygments.style.Style) and pass it to the formatter:

    -
    >>> from yourapp.yourmodule import YourStyle
    ->>> HtmlFormatter(style=YourStyle).style
    -<class 'yourapp.yourmodule.YourStyle'>
    -
    -
    -

    Creating Own Styles

    -

    So, how to create a style? All you have to do is to subclass Style and -define some styles:

    -
    from pygments.style import Style
    -from pygments.token import Keyword, Name, Comment, String, Error, \
    -     Number, Operator, Generic
    -
    -class YourStyle(Style):
    -    default_style = ""
    -    styles = {
    -        Comment:                'italic #888',
    -        Keyword:                'bold #005',
    -        Name:                   '#f00',
    -        Name.Function:          '#0f0',
    -        Name.Class:             'bold #0f0',
    -        String:                 'bg:#eee #111'
    -    }
    -
    -

    That's it. There are just a few rules. When you define a style for Name -the style automatically also affects Name.Function and so on. If you -defined 'bold' and you don't want boldface for a subtoken use 'nobold'.

    -

    (Philosophy: the styles aren't written in CSS syntax since this way -they can be used for a variety of formatters.)

    -

    default_style is the style inherited by all token types.

    -

    To make the style usable for Pygments, you must

    -
      -
    • either register it as a plugin (see the plugin docs)
    • -
    • or drop it into the styles subpackage of your Pygments distribution one style -class per style, where the file name is the style name and the class name is -StylenameClass. For example, if your style should be called -"mondrian", name the class MondrianStyle, put it into the file -mondrian.py and this file into the pygments.styles subpackage -directory.
    • -
    -
    -
    -

    Style Rules

    -

    Here a small overview of all allowed styles:

    -
    -
    bold
    -
    render text as bold
    -
    nobold
    -
    don't render text as bold (to prevent subtokens being highlighted bold)
    -
    italic
    -
    render text italic
    -
    noitalic
    -
    don't render text as italic
    -
    underline
    -
    render text underlined
    -
    nounderline
    -
    don't render text underlined
    -
    bg:
    -
    transparent background
    -
    bg:#000000
    -
    background color (black)
    -
    border:
    -
    no border
    -
    border:#ffffff
    -
    border color (white)
    -
    #ff0000
    -
    text color (red)
    -
    noinherit
    -
    don't inherit styles from supertoken
    -
    -

    Note that there may not be a space between bg: and the color value -since the style definition string is split at whitespace. -Also, using named colors is not allowed since the supported color names -vary for different formatters.

    -

    Furthermore, not all lexers might support every style.

    -
    -
    -

    Builtin Styles

    -

    Pygments ships some builtin styles which are maintained by the Pygments team.

    -

    To get a list of known styles you can use this snippet:

    -
    >>> from pygments.styles import STYLE_MAP
    ->>> STYLE_MAP.keys()
    -['default', 'emacs', 'friendly', 'colorful']
    -
    -
    -
    -

    Getting a list of available styles

    -

    New in Pygments 0.6.

    -

    Because it could be that a plugin registered a style, there is -a way to iterate over all styles:

    -
    >>> from pygments.styles import get_all_styles
    ->>> styles = list(get_all_styles())
    -
    -
    - -
    - - - \ No newline at end of file diff --git a/vendor/pygments/docs/build/tokens.html b/vendor/pygments/docs/build/tokens.html deleted file mode 100644 index fe919ec..0000000 --- a/vendor/pygments/docs/build/tokens.html +++ /dev/null @@ -1,541 +0,0 @@ - - - - Builtin Tokens — Pygments - - - - -
    -

    Pygments

    -

    Builtin Tokens

    - - « Back To Index - - -
    -

    Contents

    - -
    - - -

    Inside the pygments.token module, there is a special object called Token -that is used to create token types.

    -

    You can create a new token type by accessing an attribute of Token:

    -
    >>> from pygments.token import Token
    ->>> Token.String
    -Token.String
    ->>> Token.String is Token.String
    -True
    -
    -

    Note that tokens are singletons so you can use the is operator for comparing -token types.

    -

    As of Pygments 0.7 you can also use the in operator to perform set tests:

    -
    >>> from pygments.token import Comment
    ->>> Comment.Single in Comment
    -True
    ->>> Comment in Comment.Multi
    -False
    -
    -

    This can be useful in filters and if you write lexers on your own without -using the base lexers.

    -

    You can also split a token type into a hierarchy, and get the parent of it:

    -
    >>> String.split()
    -[Token, Token.Literal, Token.Literal.String]
    ->>> String.parent
    -Token.Literal
    -
    -

    In principle, you can create an unlimited number of token types but nobody can -guarantee that a style would define style rules for a token type. Because of -that, Pygments proposes some global token types defined in the -pygments.token.STANDARD_TYPES dict.

    -

    For some tokens aliases are already defined:

    -
    >>> from pygments.token import String
    ->>> String
    -Token.Literal.String
    -
    -

    Inside the pygments.token module the following aliases are defined:

    - ----- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    TextToken.Textfor any type of text data
    WhitespaceToken.Text.Whitespacefor specially highlighted whitespace
    ErrorToken.Errorrepresents lexer errors
    OtherToken.Otherspecial token for data not -matched by a parser (e.g. HTML -markup in PHP code)
    KeywordToken.Keywordany kind of keywords
    NameToken.Namevariable/function names
    LiteralToken.LiteralAny literals
    StringToken.Literal.Stringstring literals
    NumberToken.Literal.Numbernumber literals
    OperatorToken.Operatoroperators (+, not...)
    PunctuationToken.Punctuationpunctuation ([, (...)
    CommentToken.Commentany kind of comments
    GenericToken.Genericgeneric tokens (have a look at -the explanation below)
    -

    The Whitespace token type is new in Pygments 0.8. It is used only by the -VisibleWhitespaceFilter currently.

    -

    Normally you just create token types using the already defined aliases. For each -of those token aliases, a number of subtypes exists (excluding the special tokens -Token.Text, Token.Error and Token.Other)

    -

    The is_token_subtype() function in the pygments.token module can be used to -test if a token type is a subtype of another (such as Name.Tag and Name). -(This is the same as Name.Tag in Name. The overloaded in operator was newly -introduced in Pygments 0.7, the function still exists for backwards -compatiblity.)

    -

    With Pygments 0.7, it's also possible to convert strings to token types (for example -if you want to supply a token from the command line):

    -
    >>> from pygments.token import String, string_to_tokentype
    ->>> string_to_tokentype("String")
    -Token.Literal.String
    ->>> string_to_tokentype("Token.Literal.String")
    -Token.Literal.String
    ->>> string_to_tokentype(String)
    -Token.Literal.String
    -
    -
    -

    Keyword Tokens

    -
    -
    Keyword
    -
    For any kind of keyword (especially if it doesn't match any of the -subtypes of course).
    -
    Keyword.Constant
    -
    For keywords that are constants (e.g. None in future Python versions).
    -
    Keyword.Declaration
    -
    For keywords used for variable declaration (e.g. var in some programming -languages like JavaScript).
    -
    Keyword.Namespace
    -
    For keywords used for namespace declarations (e.g. import in Python and -Java and package in Java).
    -
    Keyword.Pseudo
    -
    For keywords that aren't really keywords (e.g. None in old Python -versions).
    -
    Keyword.Reserved
    -
    For reserved keywords.
    -
    Keyword.Type
    -
    For builtin types that can't be used as identifiers (e.g. int, -char etc. in C).
    -
    -
    -
    -

    Name Tokens

    -
    -
    Name
    -
    For any name (variable names, function names, classes).
    -
    Name.Attribute
    -
    For all attributes (e.g. in HTML tags).
    -
    Name.Builtin
    -
    Builtin names; names that are available in the global namespace.
    -
    Name.Builtin.Pseudo
    -
    Builtin names that are implicit (e.g. self in Ruby, this in Java).
    -
    Name.Class
    -
    Class names. Because no lexer can know if a name is a class or a function -or something else this token is meant for class declarations.
    -
    Name.Constant
    -
    Token type for constants. In some languages you can recognise a token by the -way it's defined (the value after a const keyword for example). In -other languages constants are uppercase by definition (Ruby).
    -
    Name.Decorator
    -
    Token type for decorators. Decorators are synatic elements in the Python -language. Similar syntax elements exist in C# and Java.
    -
    Name.Entity
    -
    Token type for special entities. (e.g. &nbsp; in HTML).
    -
    Name.Exception
    -
    Token type for exception names (e.g. RuntimeError in Python). Some languages -define exceptions in the function signature (Java). You can highlight -the name of that exception using this token then.
    -
    Name.Function
    -
    Token type for function names.
    -
    Name.Label
    -
    Token type for label names (e.g. in languages that support goto).
    -
    Name.Namespace
    -
    Token type for namespaces. (e.g. import paths in Java/Python), names following -the module/namespace keyword in other languages.
    -
    Name.Other
    -
    Other names. Normally unused.
    -
    Name.Tag
    -
    Tag names (in HTML/XML markup or configuration files).
    -
    Name.Variable
    -
    Token type for variables. Some languages have prefixes for variable names -(PHP, Ruby, Perl). You can highlight them using this token.
    -
    Name.Variable.Class
    -
    same as Name.Variable but for class variables (also static variables).
    -
    Name.Variable.Global
    -
    same as Name.Variable but for global variables (used in Ruby, for -example).
    -
    Name.Variable.Instance
    -
    same as Name.Variable but for instance variables.
    -
    -
    -
    -

    Literals

    -
    -
    Literal
    -
    For any literal (if not further defined).
    -
    Literal.Date
    -
    for date literals (e.g. 42d in Boo).
    -
    String
    -
    For any string literal.
    -
    String.Backtick
    -
    Token type for strings enclosed in backticks.
    -
    String.Char
    -
    Token type for single characters (e.g. Java, C).
    -
    String.Doc
    -
    Token type for documentation strings (for example Python).
    -
    String.Double
    -
    Double quoted strings.
    -
    String.Escape
    -
    Token type for escape sequences in strings.
    -
    String.Heredoc
    -
    Token type for "heredoc" strings (e.g. in Ruby or Perl).
    -
    String.Interpol
    -
    Token type for interpolated parts in strings (e.g. #{foo} in Ruby).
    -
    String.Other
    -
    Token type for any other strings (for example %q{foo} string constructs -in Ruby).
    -
    String.Regex
    -
    Token type for regular expression literals (e.g. /foo/ in JavaScript).
    -
    String.Single
    -
    Token type for single quoted strings.
    -
    String.Symbol
    -
    Token type for symbols (e.g. :foo in LISP or Ruby).
    -
    Number
    -
    Token type for any number literal.
    -
    Number.Float
    -
    Token type for float literals (e.g. 42.0).
    -
    Number.Hex
    -
    Token type for hexadecimal number literals (e.g. 0xdeadbeef).
    -
    Number.Integer
    -
    Token type for integer literals (e.g. 42).
    -
    Number.Integer.Long
    -
    Token type for long integer literals (e.g. 42L in Python).
    -
    Number.Oct
    -
    Token type for octal literals.
    -
    -
    -
    -

    Operators

    -
    -
    Operator
    -
    For any punctuation operator (e.g. +, -).
    -
    Operator.Word
    -
    For any operator that is a word (e.g. not).
    -
    -
    -
    -

    Punctuation

    -

    New in Pygments 0.7.

    -
    -
    Punctuation
    -
    For any punctuation which is not an operator (e.g. [, (...)
    -
    -
    -
    -

    Comments

    -
    -
    Comment
    -
    Token type for any comment.
    -
    Comment.Multiline
    -
    Token type for multiline comments.
    -
    Comment.Preproc
    -
    Token type for preprocessor comments (also <?php/<% constructs).
    -
    Comment.Single
    -
    Token type for comments that end at the end of a line (e.g. # foo).
    -
    Comment.Special
    -
    Special data in comments. For example code tags, author and license -information, etc.
    -
    -
    -
    -

    Generic Tokens

    -

    Generic tokens are for special lexers like the DiffLexer that doesn't really -highlight a programming language but a patch file.

    -
    -
    Generic
    -
    A generic, unstyled token. Normally you don't use this token type.
    -
    Generic.Deleted
    -
    Marks the token value as deleted.
    -
    Generic.Emph
    -
    Marks the token value as emphasized.
    -
    Generic.Error
    -
    Marks the token value as an error message.
    -
    Generic.Heading
    -
    Marks the token value as headline.
    -
    Generic.Inserted
    -
    Marks the token value as inserted.
    -
    Generic.Output
    -
    Marks the token value as program output (e.g. for python cli lexer).
    -
    Generic.Prompt
    -
    Marks the token value as command prompt (e.g. bash lexer).
    -
    Generic.Strong
    -
    Marks the token value as bold (e.g. for rst lexer).
    -
    Generic.Subheading
    -
    Marks the token value as subheadline.
    -
    Generic.Traceback
    -
    Marks the token value as a part of an error traceback.
    -
    -
    - -
    - - - \ No newline at end of file diff --git a/vendor/pygments/docs/build/unicode.html b/vendor/pygments/docs/build/unicode.html deleted file mode 100644 index ba7784a..0000000 --- a/vendor/pygments/docs/build/unicode.html +++ /dev/null @@ -1,249 +0,0 @@ - - - - Unicode and Encodings — Pygments - - - - -
    -

    Pygments

    -

    Unicode and Encodings

    - - « Back To Index - - -

    Since Pygments 0.6, all lexers use unicode strings internally. Because of that -you might encounter the occasional UnicodeDecodeError if you pass strings with the -wrong encoding.

    -

    Per default all lexers have their input encoding set to latin1. -If you pass a lexer a string object (not unicode), it tries to decode the data -using this encoding. -You can override the encoding using the encoding lexer option. If you have the -chardet library installed and set the encoding to chardet if will ananlyse -the text and use the encoding it thinks is the right one automatically:

    -
    from pygments.lexers import PythonLexer
    -lexer = PythonLexer(encoding='chardet')
    -
    -

    The best way is to pass Pygments unicode objects. In that case you can't get -unexpected output.

    -

    The formatters now send Unicode objects to the stream if you don't set the -output encoding. You can do so by passing the formatters an encoding option:

    -
    from pygments.formatters import HtmlFormatter
    -f = HtmlFormatter(encoding='utf-8')
    -
    -

    You will have to set this option if you have non-ASCII characters in the -source and the output stream does not accept Unicode written to it! -This is the case for all regular files and for terminals.

    -

    Note: The Terminal formatter tries to be smart: if its output stream has an -encoding attribute, and you haven't set the option, it will encode any -Unicode string with this encoding before writing it. This is the case for -sys.stdout, for example. The other formatters don't have that behavior.

    -

    Another note: If you call Pygments via the command line (pygmentize), -encoding is handled differently, see the command line docs.

    -

    New in Pygments 0.7: the formatters now also accept an outencoding option -which will override the encoding option if given. This makes it possible to -use a single options dict with lexers and formatters, and still have different -input and output encodings.

    - -
    - - - \ No newline at end of file diff --git a/vendor/pygments/docs/generate.py b/vendor/pygments/docs/generate.py deleted file mode 100755 index f540507..0000000 --- a/vendor/pygments/docs/generate.py +++ /dev/null @@ -1,472 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -""" - Generate Pygments Documentation - ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - - Generates a bunch of html files containing the documentation. - - :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS. - :license: BSD, see LICENSE for details. -""" - -import os -import sys -from datetime import datetime -from cgi import escape - -from docutils import nodes -from docutils.parsers.rst import directives -from docutils.core import publish_parts -from docutils.writers import html4css1 - -from jinja2 import Template - -# try to use the right Pygments to build the docs -sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..')) - -from pygments import highlight, __version__ -from pygments.lexers import get_lexer_by_name -from pygments.formatters import HtmlFormatter - - -LEXERDOC = ''' -`%s` -%s - :Short names: %s - :Filename patterns: %s - :Mimetypes: %s - -''' - -def generate_lexer_docs(): - from pygments.lexers import LEXERS - - out = [] - - modules = {} - moduledocstrings = {} - for classname, data in sorted(LEXERS.iteritems(), key=lambda x: x[0]): - module = data[0] - mod = __import__(module, None, None, [classname]) - cls = getattr(mod, classname) - if not cls.__doc__: - print "Warning: %s does not have a docstring." % classname - modules.setdefault(module, []).append(( - classname, - cls.__doc__, - ', '.join(data[2]) or 'None', - ', '.join(data[3]).replace('*', '\\*').replace('_', '\\') or 'None', - ', '.join(data[4]) or 'None')) - if module not in moduledocstrings: - moduledocstrings[module] = mod.__doc__ - - for module, lexers in sorted(modules.iteritems(), key=lambda x: x[0]): - heading = moduledocstrings[module].splitlines()[4].strip().rstrip('.') - out.append('\n' + heading + '\n' + '-'*len(heading) + '\n') - for data in lexers: - out.append(LEXERDOC % data) - return ''.join(out).decode('utf-8') - -def generate_formatter_docs(): - from pygments.formatters import FORMATTERS - - out = [] - for cls, data in sorted(FORMATTERS.iteritems(), - key=lambda x: x[0].__name__): - heading = cls.__name__ - out.append('`' + heading + '`\n' + '-'*(2+len(heading)) + '\n') - out.append(cls.__doc__) - out.append(''' - :Short names: %s - :Filename patterns: %s - - -''' % (', '.join(data[1]) or 'None', ', '.join(data[2]).replace('*', '\\*') or 'None')) - return ''.join(out).decode('utf-8') - -def generate_filter_docs(): - from pygments.filters import FILTERS - - out = [] - for name, cls in FILTERS.iteritems(): - out.append(''' -`%s` -%s - :Name: %s -''' % (cls.__name__, cls.__doc__, name)) - return ''.join(out).decode('utf-8') - -def generate_changelog(): - fn = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', - 'CHANGES')) - f = file(fn) - result = [] - in_header = False - header = True - for line in f: - if header: - if not in_header and line.strip(): - in_header = True - elif in_header and not line.strip(): - header = False - else: - result.append(line.rstrip()) - f.close() - return '\n'.join(result).decode('utf-8') - -def generate_authors(): - fn = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', - 'AUTHORS')) - f = file(fn) - r = f.read().rstrip().decode('utf-8') - f.close() - return r - -LEXERDOCS = generate_lexer_docs() -FORMATTERDOCS = generate_formatter_docs() -FILTERDOCS = generate_filter_docs() -CHANGELOG = generate_changelog() -AUTHORS = generate_authors() - - -PYGMENTS_FORMATTER = HtmlFormatter(style='pastie', cssclass='syntax') - -USAGE = '''\ -Usage: %s [ ...] - -Generate either python or html files out of the documentation. - -Mode can either be python or html.\ -''' % sys.argv[0] - -TEMPLATE = '''\ - - - - {{ title }} — Pygments - - - - -
    -

    Pygments

    -

    {{ title }}

    - {% if file_id != "index" %} - « Back To Index - {% endif %} - {% if toc %} -
    -

    Contents

    -
      - {% for key, value in toc %} -
    • {{ value }}
    • - {% endfor %} -
    -
    - {% endif %} - {{ body }} -
    - - -\ -''' - -STYLESHEET = '''\ -body { - background-color: #f2f2f2; - margin: 0; - padding: 0; - font-family: 'Georgia', serif; - color: #111; -} - -#content { - background-color: white; - padding: 20px; - margin: 20px auto 20px auto; - max-width: 800px; - border: 4px solid #ddd; -} - -h1 { - font-weight: normal; - font-size: 40px; - color: #09839A; -} - -h2 { - font-weight: normal; - font-size: 30px; - color: #C73F00; -} - -h1.heading { - margin: 0 0 30px 0; -} - -h2.subheading { - margin: -30px 0 0 45px; -} - -h3 { - margin-top: 30px; -} - -table.docutils { - border-collapse: collapse; - border: 2px solid #aaa; - margin: 0.5em 1.5em 0.5em 1.5em; -} - -table.docutils td { - padding: 2px; - border: 1px solid #ddd; -} - -p, li, dd, dt, blockquote { - font-size: 15px; - color: #333; -} - -p { - line-height: 150%; - margin-bottom: 0; - margin-top: 10px; -} - -hr { - border-top: 1px solid #ccc; - border-bottom: 0; - border-right: 0; - border-left: 0; - margin-bottom: 10px; - margin-top: 20px; -} - -dl { - margin-left: 10px; -} - -li, dt { - margin-top: 5px; -} - -dt { - font-weight: bold; -} - -th { - text-align: left; -} - -a { - color: #990000; -} - -a:hover { - color: #c73f00; -} - -pre { - background-color: #f9f9f9; - border-top: 1px solid #ccc; - border-bottom: 1px solid #ccc; - padding: 5px; - font-size: 13px; - font-family: Bitstream Vera Sans Mono,monospace; -} - -tt { - font-size: 13px; - font-family: Bitstream Vera Sans Mono,monospace; - color: black; - padding: 1px 2px 1px 2px; - background-color: #f0f0f0; -} - -cite { - /* abusing , it's generated by ReST for `x` */ - font-size: 13px; - font-family: Bitstream Vera Sans Mono,monospace; - font-weight: bold; - font-style: normal; -} - -#backlink { - float: right; - font-size: 11px; - color: #888; -} - -div.toc { - margin: 0 0 10px 0; -} - -div.toc h2 { - font-size: 20px; -} -''' #' - - -def pygments_directive(name, arguments, options, content, lineno, - content_offset, block_text, state, state_machine): - try: - lexer = get_lexer_by_name(arguments[0]) - except ValueError: - # no lexer found - lexer = get_lexer_by_name('text') - parsed = highlight(u'\n'.join(content), lexer, PYGMENTS_FORMATTER) - return [nodes.raw('', parsed, format="html")] -pygments_directive.arguments = (1, 0, 1) -pygments_directive.content = 1 -directives.register_directive('sourcecode', pygments_directive) - - -def create_translator(link_style): - class Translator(html4css1.HTMLTranslator): - def visit_reference(self, node): - refuri = node.get('refuri') - if refuri is not None and '/' not in refuri and refuri.endswith('.txt'): - node['refuri'] = link_style(refuri[:-4]) - html4css1.HTMLTranslator.visit_reference(self, node) - return Translator - - -class DocumentationWriter(html4css1.Writer): - - def __init__(self, link_style): - html4css1.Writer.__init__(self) - self.translator_class = create_translator(link_style) - - def translate(self): - html4css1.Writer.translate(self) - # generate table of contents - contents = self.build_contents(self.document) - contents_doc = self.document.copy() - contents_doc.children = contents - contents_visitor = self.translator_class(contents_doc) - contents_doc.walkabout(contents_visitor) - self.parts['toc'] = self._generated_toc - - def build_contents(self, node, level=0): - sections = [] - i = len(node) - 1 - while i >= 0 and isinstance(node[i], nodes.section): - sections.append(node[i]) - i -= 1 - sections.reverse() - toc = [] - for section in sections: - try: - reference = nodes.reference('', '', refid=section['ids'][0], *section[0]) - except IndexError: - continue - ref_id = reference['refid'] - text = escape(reference.astext()) - toc.append((ref_id, text)) - - self._generated_toc = [('#%s' % href, caption) for href, caption in toc] - # no further processing - return [] - - -def generate_documentation(data, link_style): - writer = DocumentationWriter(link_style) - data = data.replace('[builtin_lexer_docs]', LEXERDOCS).\ - replace('[builtin_formatter_docs]', FORMATTERDOCS).\ - replace('[builtin_filter_docs]', FILTERDOCS).\ - replace('[changelog]', CHANGELOG).\ - replace('[authors]', AUTHORS) - parts = publish_parts( - data, - writer=writer, - settings_overrides={ - 'initial_header_level': 3, - 'field_name_limit': 50, - } - ) - return { - 'title': parts['title'], - 'body': parts['body'], - 'toc': parts['toc'] - } - - -def handle_python(filename, fp, dst): - now = datetime.now() - title = os.path.basename(filename)[:-4] - content = fp.read() - def urlize(href): - # create links for the pygments webpage - if href == 'index.txt': - return '/docs/' - else: - return '/docs/%s/' % href - parts = generate_documentation(content, urlize) - result = file(os.path.join(dst, title + '.py'), 'w') - result.write('# -*- coding: utf-8 -*-\n') - result.write('"""\n Pygments Documentation - %s\n' % title) - result.write(' %s\n\n' % ('~' * (24 + len(title)))) - result.write(' Generated on: %s\n"""\n\n' % now) - result.write('import datetime\n') - result.write('DATE = %r\n' % now) - result.write('TITLE = %r\n' % parts['title']) - result.write('TOC = %r\n' % parts['toc']) - result.write('BODY = %r\n' % parts['body']) - result.close() - - -def handle_html(filename, fp, dst): - now = datetime.now() - title = os.path.basename(filename)[:-4] - content = fp.read().decode('utf-8') - c = generate_documentation(content, (lambda x: './%s.html' % x)) - result = file(os.path.join(dst, title + '.html'), 'w') - c['style'] = STYLESHEET + PYGMENTS_FORMATTER.get_style_defs('.syntax') - c['generation_date'] = now - c['file_id'] = title - t = Template(TEMPLATE) - result.write(t.render(c).encode('utf-8')) - result.close() - - -def run(handle_file, dst, sources=()): - path = os.path.abspath(os.path.join(os.path.dirname(__file__), 'src')) - if not sources: - sources = [os.path.join(path, fn) for fn in os.listdir(path)] - if not os.path.isdir(dst): - os.makedirs(dst) - print 'Making docs for Pygments %s in %s' % (__version__, dst) - for fn in sources: - if not os.path.isfile(fn): - continue - print 'Processing %s' % fn - f = open(fn) - try: - handle_file(fn, f, dst) - finally: - f.close() - - -def main(mode, dst='build/', *sources): - try: - handler = { - 'html': handle_html, - 'python': handle_python - }[mode] - except KeyError: - print 'Error: unknown mode "%s"' % mode - sys.exit(1) - run(handler, os.path.realpath(dst), sources) - - -if __name__ == '__main__': - if len(sys.argv) == 1: - print USAGE - else: - main(*sys.argv[1:]) diff --git a/vendor/pygments/docs/src/api.txt b/vendor/pygments/docs/src/api.txt deleted file mode 100644 index 4276eea..0000000 --- a/vendor/pygments/docs/src/api.txt +++ /dev/null @@ -1,270 +0,0 @@ -.. -*- mode: rst -*- - -===================== -The full Pygments API -===================== - -This page describes the Pygments API. - -High-level API -============== - -Functions from the `pygments` module: - -def `lex(code, lexer):` - Lex `code` with the `lexer` (must be a `Lexer` instance) - and return an iterable of tokens. Currently, this only calls - `lexer.get_tokens()`. - -def `format(tokens, formatter, outfile=None):` - Format a token stream (iterable of tokens) `tokens` with the - `formatter` (must be a `Formatter` instance). The result is - written to `outfile`, or if that is ``None``, returned as a - string. - -def `highlight(code, lexer, formatter, outfile=None):` - This is the most high-level highlighting function. - It combines `lex` and `format` in one function. - - -Functions from `pygments.lexers`: - -def `get_lexer_by_name(alias, **options):` - Return an instance of a `Lexer` subclass that has `alias` in its - aliases list. The lexer is given the `options` at its - instantiation. - - Will raise `pygments.util.ClassNotFound` if no lexer with that alias is - found. - -def `get_lexer_for_filename(fn, **options):` - Return a `Lexer` subclass instance that has a filename pattern - matching `fn`. The lexer is given the `options` at its - instantiation. - - Will raise `pygments.util.ClassNotFound` if no lexer for that filename is - found. - -def `get_lexer_for_mimetype(mime, **options):` - Return a `Lexer` subclass instance that has `mime` in its mimetype - list. The lexer is given the `options` at its instantiation. - - Will raise `pygments.util.ClassNotFound` if not lexer for that mimetype is - found. - -def `guess_lexer(text, **options):` - Return a `Lexer` subclass instance that's guessed from the text - in `text`. For that, the `analyse_text()` method of every known - lexer class is called with the text as argument, and the lexer - which returned the highest value will be instantiated and returned. - - `pygments.util.ClassNotFound` is raised if no lexer thinks it can handle the - content. - -def `guess_lexer_for_filename(filename, text, **options):` - As `guess_lexer()`, but only lexers which have a pattern in `filenames` - or `alias_filenames` that matches `filename` are taken into consideration. - - `pygments.util.ClassNotFound` is raised if no lexer thinks it can handle the - content. - -def `get_all_lexers():` - Return an iterable over all registered lexers, yielding tuples in the - format:: - - (longname, tuple of aliases, tuple of filename patterns, tuple of mimetypes) - - *New in Pygments 0.6.* - - -Functions from `pygments.formatters`: - -def `get_formatter_by_name(alias, **options):` - Return an instance of a `Formatter` subclass that has `alias` in its - aliases list. The formatter is given the `options` at its - instantiation. - - Will raise `pygments.util.ClassNotFound` if no formatter with that alias is - found. - -def `get_formatter_for_filename(fn, **options):` - Return a `Formatter` subclass instance that has a filename pattern - matching `fn`. The formatter is given the `options` at its - instantiation. - - Will raise `pygments.util.ClassNotFound` if no formatter for that filename - is found. - - -Functions from `pygments.styles`: - -def `get_style_by_name(name):` - Return a style class by its short name. The names of the builtin styles - are listed in `pygments.styles.STYLE_MAP`. - - Will raise `pygments.util.ClassNotFound` if no style of that name is found. - -def `get_all_styles():` - Return an iterable over all registered styles, yielding their names. - - *New in Pygments 0.6.* - - -Lexers -====== - -A lexer (derived from `pygments.lexer.Lexer`) has the following functions: - -def `__init__(self, **options):` - The constructor. Takes a \*\*keywords dictionary of options. - Every subclass must first process its own options and then call - the `Lexer` constructor, since it processes the `stripnl`, - `stripall` and `tabsize` options. - - An example looks like this: - - .. sourcecode:: python - - def __init__(self, **options): - self.compress = options.get('compress', '') - Lexer.__init__(self, **options) - - As these options must all be specifiable as strings (due to the - command line usage), there are various utility functions - available to help with that, see `Option processing`_. - -def `get_tokens(self, text):` - This method is the basic interface of a lexer. It is called by - the `highlight()` function. It must process the text and return an - iterable of ``(tokentype, value)`` pairs from `text`. - - Normally, you don't need to override this method. The default - implementation processes the `stripnl`, `stripall` and `tabsize` - options and then yields all tokens from `get_tokens_unprocessed()`, - with the ``index`` dropped. - -def `get_tokens_unprocessed(self, text):` - This method should process the text and return an iterable of - ``(index, tokentype, value)`` tuples where ``index`` is the starting - position of the token within the input text. - - This method must be overridden by subclasses. - -def `analyse_text(text):` - A static method which is called for lexer guessing. It should analyse - the text and return a float in the range from ``0.0`` to ``1.0``. - If it returns ``0.0``, the lexer will not be selected as the most - probable one, if it returns ``1.0``, it will be selected immediately. - -For a list of known tokens have a look at the `Tokens`_ page. - -A lexer also can have the following attributes (in fact, they are mandatory -except `alias_filenames`) that are used by the builtin lookup mechanism. - -`name` - Full name for the lexer, in human-readable form. - -`aliases` - A list of short, unique identifiers that can be used to lookup - the lexer from a list, e.g. using `get_lexer_by_name()`. - -`filenames` - A list of `fnmatch` patterns that match filenames which contain - content for this lexer. The patterns in this list should be unique among - all lexers. - -`alias_filenames` - A list of `fnmatch` patterns that match filenames which may or may not - contain content for this lexer. This list is used by the - `guess_lexer_for_filename()` function, to determine which lexers are - then included in guessing the correct one. That means that e.g. every - lexer for HTML and a template language should include ``\*.html`` in - this list. - -`mimetypes` - A list of MIME types for content that can be lexed with this - lexer. - - -.. _Tokens: tokens.txt - - -Formatters -========== - -A formatter (derived from `pygments.formatter.Formatter`) has the following -functions: - -def `__init__(self, **options):` - As with lexers, this constructor processes options and then must call - the base class `__init__`. - - The `Formatter` class recognizes the options `style`, `full` and - `title`. It is up to the formatter class whether it uses them. - -def `get_style_defs(self, arg=''):` - This method must return statements or declarations suitable to define - the current style for subsequent highlighted text (e.g. CSS classes - in the `HTMLFormatter`). - - The optional argument `arg` can be used to modify the generation and - is formatter dependent (it is standardized because it can be given on - the command line). - - This method is called by the ``-S`` `command-line option`_, the `arg` - is then given by the ``-a`` option. - -def `format(self, tokensource, outfile):` - This method must format the tokens from the `tokensource` iterable and - write the formatted version to the file object `outfile`. - - Formatter options can control how exactly the tokens are converted. - -.. _command-line option: cmdline.txt - -A formatter must have the following attributes that are used by the -builtin lookup mechanism. (*New in Pygments 0.7.*) - -`name` - Full name for the formatter, in human-readable form. - -`aliases` - A list of short, unique identifiers that can be used to lookup - the formatter from a list, e.g. using `get_formatter_by_name()`. - -`filenames` - A list of `fnmatch` patterns that match filenames for which this formatter - can produce output. The patterns in this list should be unique among - all formatters. - - -Option processing -================= - -The `pygments.util` module has some utility functions usable for option -processing: - -class `OptionError` - This exception will be raised by all option processing functions if - the type or value of the argument is not correct. - -def `get_bool_opt(options, optname, default=None):` - Interpret the key `optname` from the dictionary `options` - as a boolean and return it. Return `default` if `optname` - is not in `options`. - - The valid string values for ``True`` are ``1``, ``yes``, - ``true`` and ``on``, the ones for ``False`` are ``0``, - ``no``, ``false`` and ``off`` (matched case-insensitively). - -def `get_int_opt(options, optname, default=None):` - As `get_bool_opt`, but interpret the value as an integer. - -def `get_list_opt(options, optname, default=None):` - If the key `optname` from the dictionary `options` is a string, - split it at whitespace and return it. If it is already a list - or a tuple, it is returned as a list. - -def `get_choice_opt(options, optname, allowed, default=None):` - If the key `optname` from the dictionary is not in the sequence - `allowed`, raise an error, otherwise return it. *New in Pygments 0.8.* diff --git a/vendor/pygments/docs/src/authors.txt b/vendor/pygments/docs/src/authors.txt deleted file mode 100644 index c8c532a..0000000 --- a/vendor/pygments/docs/src/authors.txt +++ /dev/null @@ -1,5 +0,0 @@ -======= -Authors -======= - -[authors] diff --git a/vendor/pygments/docs/src/changelog.txt b/vendor/pygments/docs/src/changelog.txt deleted file mode 100644 index 6caf0a3..0000000 --- a/vendor/pygments/docs/src/changelog.txt +++ /dev/null @@ -1,5 +0,0 @@ -========= -Changelog -========= - -[changelog] diff --git a/vendor/pygments/docs/src/index.txt b/vendor/pygments/docs/src/index.txt deleted file mode 100644 index b1e099c..0000000 --- a/vendor/pygments/docs/src/index.txt +++ /dev/null @@ -1,69 +0,0 @@ -.. -*- mode: rst -*- - -======== -Overview -======== - -Welcome to the Pygments documentation. - -- Starting with Pygments - - - `Installation `_ - - - `Introduction and Quickstart `_ - - - `Command line interface `_ - -- Builtin components - - - `Lexers `_ - - - `Formatters `_ - - - `Filters `_ - - - `Styles `_ - -- Reference - - - `Unicode and encodings `_ - - - `Builtin tokens `_ - - - `API documentation `_ - -- Hacking for Pygments - - - `Write your own lexer `_ - - - `Write your own formatter `_ - - - `Write your own filter `_ - - - `Register plugins `_ - -- Hints and Tricks - - - `Using Pygments in ReST documents `_ - - - `Using Pygments with MoinMoin `_ - - - `Using Pygments in other contexts `_ - -- About Pygments - - - `Changelog `_ - - - `Authors `_ - - --------------- - -If you find bugs or have suggestions for the documentation, please -look `here`_ for info on how to contact the team. - -You can download an offline version of this documentation from the -`download page`_. - -.. _here: http://pygments.org/contribute/ -.. _download page: http://pygments.org/download/ diff --git a/vendor/pygments/docs/src/installation.txt b/vendor/pygments/docs/src/installation.txt deleted file mode 100644 index 17a9aad..0000000 --- a/vendor/pygments/docs/src/installation.txt +++ /dev/null @@ -1,71 +0,0 @@ -.. -*- mode: rst -*- - -============ -Installation -============ - -Pygments requires at least Python 2.4 to work correctly. Just to clarify: -there *won't* ever be support for Python versions below 2.4. However, there -are no other dependencies. - - -Installing a released version -============================= - -As a Python egg (via easy_install) ----------------------------------- - -You can install the most recent Pygments version using `easy_install`_:: - - sudo easy_install Pygments - -This will install a Pygments egg in your Python installation's site-packages -directory. - - -From the tarball release -------------------------- - -1. Download the most recent tarball from the `download page`_ -2. Unpack the tarball -3. ``sudo python setup.py install`` - -Note that the last command will automatically download and install -`setuptools`_ if you don't already have it installed. This requires a working -internet connection. - -This will install Pygments into your Python installation's site-packages directory. - - -Installing the development version -================================== - -If you want to play around with the code ----------------------------------------- - -1. Install `Mercurial`_ -2. ``hg clone http://bitbucket.org/birkenfeld/pygments-main pygments`` -3. ``cd pygments`` -4. ``ln -s pygments /usr/lib/python2.X/site-packages`` -5. ``ln -s pygmentize /usr/local/bin`` - -As an alternative to steps 4 and 5 you can also do ``python setup.py develop`` -which will install the package via setuptools in development mode. - -.. - If you just want the latest features and use them - ------------------------------------------------- - - :: - - sudo easy_install Pygments==dev - - This will install a Pygments egg containing the latest Subversion trunk code - in your Python installation's site-packages directory. Every time the command - is run, the sources are updated from Subversion. - - -.. _download page: http://pygments.org/download/ -.. _setuptools: http://peak.telecommunity.com/DevCenter/setuptools -.. _easy_install: http://peak.telecommunity.com/DevCenter/EasyInstall -.. _Mercurial: http://selenic.com/mercurial/ diff --git a/vendor/pygments/docs/src/lexerdevelopment.txt b/vendor/pygments/docs/src/lexerdevelopment.txt deleted file mode 100644 index 6ffc4b7..0000000 --- a/vendor/pygments/docs/src/lexerdevelopment.txt +++ /dev/null @@ -1,551 +0,0 @@ -.. -*- mode: rst -*- - -==================== -Write your own lexer -==================== - -If a lexer for your favorite language is missing in the Pygments package, you can -easily write your own and extend Pygments. - -All you need can be found inside the `pygments.lexer` module. As you can read in -the `API documentation `_, a lexer is a class that is initialized with -some keyword arguments (the lexer options) and that provides a -`get_tokens_unprocessed()` method which is given a string or unicode object with -the data to parse. - -The `get_tokens_unprocessed()` method must return an iterator or iterable -containing tuples in the form ``(index, token, value)``. Normally you don't need -to do this since there are numerous base lexers you can subclass. - - -RegexLexer -========== - -A very powerful (but quite easy to use) lexer is the `RegexLexer`. This lexer -base class allows you to define lexing rules in terms of *regular expressions* -for different *states*. - -States are groups of regular expressions that are matched against the input -string at the *current position*. If one of these expressions matches, a -corresponding action is performed (normally yielding a token with a specific -type), the current position is set to where the last match ended and the -matching process continues with the first regex of the current state. - -Lexer states are kept in a state stack: each time a new state is entered, the -new state is pushed onto the stack. The most basic lexers (like the -`DiffLexer`) just need one state. - -Each state is defined as a list of tuples in the form (`regex`, `action`, -`new_state`) where the last item is optional. In the most basic form, `action` -is a token type (like `Name.Builtin`). That means: When `regex` matches, emit a -token with the match text and type `tokentype` and push `new_state` on the state -stack. If the new state is ``'#pop'``, the topmost state is popped from the -stack instead. (To pop more than one state, use ``'#pop:2'`` and so on.) -``'#push'`` is a synonym for pushing the current state on the -stack. - -The following example shows the `DiffLexer` from the builtin lexers. Note that -it contains some additional attributes `name`, `aliases` and `filenames` which -aren't required for a lexer. They are used by the builtin lexer lookup -functions. - -.. sourcecode:: python - - from pygments.lexer import RegexLexer - from pygments.token import * - - class DiffLexer(RegexLexer): - name = 'Diff' - aliases = ['diff'] - filenames = ['*.diff'] - - tokens = { - 'root': [ - (r' .*\n', Text), - (r'\+.*\n', Generic.Inserted), - (r'-.*\n', Generic.Deleted), - (r'@.*\n', Generic.Subheading), - (r'Index.*\n', Generic.Heading), - (r'=.*\n', Generic.Heading), - (r'.*\n', Text), - ] - } - -As you can see this lexer only uses one state. When the lexer starts scanning -the text, it first checks if the current character is a space. If this is true -it scans everything until newline and returns the parsed data as `Text` token. - -If this rule doesn't match, it checks if the current char is a plus sign. And -so on. - -If no rule matches at the current position, the current char is emitted as an -`Error` token that indicates a parsing error, and the position is increased by -1. - - -Regex Flags -=========== - -You can either define regex flags in the regex (``r'(?x)foo bar'``) or by adding -a `flags` attribute to your lexer class. If no attribute is defined, it defaults -to `re.MULTILINE`. For more informations about regular expression flags see the -`regular expressions`_ help page in the python documentation. - -.. _regular expressions: http://docs.python.org/lib/re-syntax.html - - -Scanning multiple tokens at once -================================ - -Here is a more complex lexer that highlights INI files. INI files consist of -sections, comments and key = value pairs: - -.. sourcecode:: python - - from pygments.lexer import RegexLexer, bygroups - from pygments.token import * - - class IniLexer(RegexLexer): - name = 'INI' - aliases = ['ini', 'cfg'] - filenames = ['*.ini', '*.cfg'] - - tokens = { - 'root': [ - (r'\s+', Text), - (r';.*?$', Comment), - (r'\[.*?\]$', Keyword), - (r'(.*?)(\s*)(=)(\s*)(.*?)$', - bygroups(Name.Attribute, Text, Operator, Text, String)) - ] - } - -The lexer first looks for whitespace, comments and section names. And later it -looks for a line that looks like a key, value pair, separated by an ``'='`` -sign, and optional whitespace. - -The `bygroups` helper makes sure that each group is yielded with a different -token type. First the `Name.Attribute` token, then a `Text` token for the -optional whitespace, after that a `Operator` token for the equals sign. Then a -`Text` token for the whitespace again. The rest of the line is returned as -`String`. - -Note that for this to work, every part of the match must be inside a capturing -group (a ``(...)``), and there must not be any nested capturing groups. If you -nevertheless need a group, use a non-capturing group defined using this syntax: -``r'(?:some|words|here)'`` (note the ``?:`` after the beginning parenthesis). - -If you find yourself needing a capturing group inside the regex which -shouldn't be part of the output but is used in the regular expressions for -backreferencing (eg: ``r'(<(foo|bar)>)(.*?)()'``), you can pass `None` -to the bygroups function and it will skip that group will be skipped in the -output. - - -Changing states -=============== - -Many lexers need multiple states to work as expected. For example, some -languages allow multiline comments to be nested. Since this is a recursive -pattern it's impossible to lex just using regular expressions. - -Here is the solution: - -.. sourcecode:: python - - from pygments.lexer import RegexLexer - from pygments.token import * - - class ExampleLexer(RegexLexer): - name = 'Example Lexer with states' - - tokens = { - 'root': [ - (r'[^/]+', Text), - (r'/\*', Comment.Multiline, 'comment'), - (r'//.*?$', Comment.Singleline), - (r'/', Text) - ], - 'comment': [ - (r'[^*/]', Comment.Multiline), - (r'/\*', Comment.Multiline, '#push'), - (r'\*/', Comment.Multiline, '#pop'), - (r'[*/]', Comment.Multiline) - ] - } - -This lexer starts lexing in the ``'root'`` state. It tries to match as much as -possible until it finds a slash (``'/'``). If the next character after the slash -is a star (``'*'``) the `RegexLexer` sends those two characters to the output -stream marked as `Comment.Multiline` and continues parsing with the rules -defined in the ``'comment'`` state. - -If there wasn't a star after the slash, the `RegexLexer` checks if it's a -singleline comment (eg: followed by a second slash). If this also wasn't the -case it must be a single slash (the separate regex for a single slash must also -be given, else the slash would be marked as an error token). - -Inside the ``'comment'`` state, we do the same thing again. Scan until the lexer -finds a star or slash. If it's the opening of a multiline comment, push the -``'comment'`` state on the stack and continue scanning, again in the -``'comment'`` state. Else, check if it's the end of the multiline comment. If -yes, pop one state from the stack. - -Note: If you pop from an empty stack you'll get an `IndexError`. (There is an -easy way to prevent this from happening: don't ``'#pop'`` in the root state). - -If the `RegexLexer` encounters a newline that is flagged as an error token, the -stack is emptied and the lexer continues scanning in the ``'root'`` state. This -helps producing error-tolerant highlighting for erroneous input, e.g. when a -single-line string is not closed. - - -Advanced state tricks -===================== - -There are a few more things you can do with states: - -- You can push multiple states onto the stack if you give a tuple instead of a - simple string as the third item in a rule tuple. For example, if you want to - match a comment containing a directive, something like:: - - /* rest of comment */ - - you can use this rule: - - .. sourcecode:: python - - tokens = { - 'root': [ - (r'/\* <', Comment, ('comment', 'directive')), - ... - ], - 'directive': [ - (r'[^>]*', Comment.Directive), - (r'>', Comment, '#pop'), - ], - 'comment': [ - (r'[^*]+', Comment), - (r'\*/', Comment, '#pop'), - (r'\*', Comment), - ] - } - - When this encounters the above sample, first ``'comment'`` and ``'directive'`` - are pushed onto the stack, then the lexer continues in the directive state - until it finds the closing ``>``, then it continues in the comment state until - the closing ``*/``. Then, both states are popped from the stack again and - lexing continues in the root state. - - *New in Pygments 0.9:* The tuple can contain the special ``'#push'`` and - ``'#pop'`` (but not ``'#pop:n'``) directives. - - -- You can include the rules of a state in the definition of another. This is - done by using `include` from `pygments.lexer`: - - .. sourcecode:: python - - from pygments.lexer import RegexLexer, bygroups, include - from pygments.token import * - - class ExampleLexer(RegexLexer): - tokens = { - 'comments': [ - (r'/\*.*?\*/', Comment), - (r'//.*?\n', Comment), - ], - 'root': [ - include('comments'), - (r'(function )(\w+)( {)', - bygroups(Keyword, Name, Keyword), 'function'), - (r'.', Text), - ], - 'function': [ - (r'[^}/]+', Text), - include('comments'), - (r'/', Text), - (r'}', Keyword, '#pop'), - ] - } - - This is a hypothetical lexer for a language that consist of functions and - comments. Because comments can occur at toplevel and in functions, we need - rules for comments in both states. As you can see, the `include` helper saves - repeating rules that occur more than once (in this example, the state - ``'comment'`` will never be entered by the lexer, as it's only there to be - included in ``'root'`` and ``'function'``). - - -- Sometimes, you may want to "combine" a state from existing ones. This is - possible with the `combine` helper from `pygments.lexer`. - - If you, instead of a new state, write ``combined('state1', 'state2')`` as the - third item of a rule tuple, a new anonymous state will be formed from state1 - and state2 and if the rule matches, the lexer will enter this state. - - This is not used very often, but can be helpful in some cases, such as the - `PythonLexer`'s string literal processing. - -- If you want your lexer to start lexing in a different state you can modify - the stack by overloading the `get_tokens_unprocessed()` method: - - .. sourcecode:: python - - from pygments.lexer import RegexLexer - - class MyLexer(RegexLexer): - tokens = {...} - - def get_tokens_unprocessed(self, text): - stack = ['root', 'otherstate'] - for item in RegexLexer.get_tokens_unprocessed(text, stack): - yield item - - Some lexers like the `PhpLexer` use this to make the leading ``', Name.Tag), - ], - 'script-content': [ - (r'(.+?)(<\s*/\s*script\s*>)', - bygroups(using(JavascriptLexer), Name.Tag), - '#pop'), - ] - } - -Here the content of a ```` end tag is processed by the `JavascriptLexer`, while the -end tag is yielded as a normal token with the `Name.Tag` type. - -As an additional goodie, if the lexer class is replaced by `this` (imported from -`pygments.lexer`), the "other" lexer will be the current one (because you cannot -refer to the current class within the code that runs at class definition time). - -Also note the ``(r'<\s*script\s*', Name.Tag, ('script-content', 'tag'))`` rule. -Here, two states are pushed onto the state stack, ``'script-content'`` and -``'tag'``. That means that first ``'tag'`` is processed, which will parse -attributes and the closing ``>``, then the ``'tag'`` state is popped and the -next state on top of the stack will be ``'script-content'``. - -The `using()` helper has a special keyword argument, `state`, which works as -follows: if given, the lexer to use initially is not in the ``"root"`` state, -but in the state given by this argument. This *only* works with a `RegexLexer`. - -Any other keywords arguments passed to `using()` are added to the keyword -arguments used to create the lexer. - - -Delegating Lexer -================ - -Another approach for nested lexers is the `DelegatingLexer` which is for -example used for the template engine lexers. It takes two lexers as -arguments on initialisation: a `root_lexer` and a `language_lexer`. - -The input is processed as follows: First, the whole text is lexed with the -`language_lexer`. All tokens yielded with a type of ``Other`` are then -concatenated and given to the `root_lexer`. The language tokens of the -`language_lexer` are then inserted into the `root_lexer`'s token stream -at the appropriate positions. - -.. sourcecode:: python - - from pygments.lexer import DelegatingLexer - from pygments.lexers.web import HtmlLexer, PhpLexer - - class HtmlPhpLexer(DelegatingLexer): - def __init__(self, **options): - super(HtmlPhpLexer, self).__init__(HtmlLexer, PhpLexer, **options) - -This procedure ensures that e.g. HTML with template tags in it is highlighted -correctly even if the template tags are put into HTML tags or attributes. - -If you want to change the needle token ``Other`` to something else, you can -give the lexer another token type as the third parameter: - -.. sourcecode:: python - - DelegatingLexer.__init__(MyLexer, OtherLexer, Text, **options) - - -Callbacks -========= - -Sometimes the grammar of a language is so complex that a lexer would be unable -to parse it just by using regular expressions and stacks. - -For this, the `RegexLexer` allows callbacks to be given in rule tuples, instead -of token types (`bygroups` and `using` are nothing else but preimplemented -callbacks). The callback must be a function taking two arguments: - -* the lexer itself -* the match object for the last matched rule - -The callback must then return an iterable of (or simply yield) ``(index, -tokentype, value)`` tuples, which are then just passed through by -`get_tokens_unprocessed()`. The ``index`` here is the position of the token in -the input string, ``tokentype`` is the normal token type (like `Name.Builtin`), -and ``value`` the associated part of the input string. - -You can see an example here: - -.. sourcecode:: python - - from pygments.lexer import RegexLexer - from pygments.token import Generic - - class HypotheticLexer(RegexLexer): - - def headline_callback(lexer, match): - equal_signs = match.group(1) - text = match.group(2) - yield match.start(), Generic.Headline, equal_signs + text + equal_signs - - tokens = { - 'root': [ - (r'(=+)(.*?)(\1)', headline_callback) - ] - } - -If the regex for the `headline_callback` matches, the function is called with the -match object. Note that after the callback is done, processing continues -normally, that is, after the end of the previous match. The callback has no -possibility to influence the position. - -There are not really any simple examples for lexer callbacks, but you can see -them in action e.g. in the `compiled.py`_ source code in the `CLexer` and -`JavaLexer` classes. - -.. _compiled.py: http://bitbucket.org/birkenfeld/pygments-main/src/tip/pygments/lexers/compiled.py - - -The ExtendedRegexLexer class -============================ - -The `RegexLexer`, even with callbacks, unfortunately isn't powerful enough for -the funky syntax rules of some languages that will go unnamed, such as Ruby. - -But fear not; even then you don't have to abandon the regular expression -approach. For Pygments has a subclass of `RegexLexer`, the `ExtendedRegexLexer`. -All features known from RegexLexers are available here too, and the tokens are -specified in exactly the same way, *except* for one detail: - -The `get_tokens_unprocessed()` method holds its internal state data not as local -variables, but in an instance of the `pygments.lexer.LexerContext` class, and -that instance is passed to callbacks as a third argument. This means that you -can modify the lexer state in callbacks. - -The `LexerContext` class has the following members: - -* `text` -- the input text -* `pos` -- the current starting position that is used for matching regexes -* `stack` -- a list containing the state stack -* `end` -- the maximum position to which regexes are matched, this defaults to - the length of `text` - -Additionally, the `get_tokens_unprocessed()` method can be given a -`LexerContext` instead of a string and will then process this context instead of -creating a new one for the string argument. - -Note that because you can set the current position to anything in the callback, -it won't be automatically be set by the caller after the callback is finished. -For example, this is how the hypothetical lexer above would be written with the -`ExtendedRegexLexer`: - -.. sourcecode:: python - - from pygments.lexer import ExtendedRegexLexer - from pygments.token import Generic - - class ExHypotheticLexer(ExtendedRegexLexer): - - def headline_callback(lexer, match, ctx): - equal_signs = match.group(1) - text = match.group(2) - yield match.start(), Generic.Headline, equal_signs + text + equal_signs - ctx.pos = match.end() - - tokens = { - 'root': [ - (r'(=+)(.*?)(\1)', headline_callback) - ] - } - -This might sound confusing (and it can really be). But it is needed, and for an -example look at the Ruby lexer in `agile.py`_. - -.. _agile.py: https://bitbucket.org/birkenfeld/pygments-main/src/tip/pygments/lexers/agile.py - - -Filtering Token Streams -======================= - -Some languages ship a lot of builtin functions (for example PHP). The total -amount of those functions differs from system to system because not everybody -has every extension installed. In the case of PHP there are over 3000 builtin -functions. That's an incredible huge amount of functions, much more than you -can put into a regular expression. - -But because only `Name` tokens can be function names it's solvable by overriding -the ``get_tokens_unprocessed()`` method. The following lexer subclasses the -`PythonLexer` so that it highlights some additional names as pseudo keywords: - -.. sourcecode:: python - - from pygments.lexers.agile import PythonLexer - from pygments.token import Name, Keyword - - class MyPythonLexer(PythonLexer): - EXTRA_KEYWORDS = ['foo', 'bar', 'foobar', 'barfoo', 'spam', 'eggs'] - - def get_tokens_unprocessed(self, text): - for index, token, value in PythonLexer.get_tokens_unprocessed(self, text): - if token is Name and value in self.EXTRA_KEYWORDS: - yield index, Keyword.Pseudo, value - else: - yield index, token, value - -The `PhpLexer` and `LuaLexer` use this method to resolve builtin functions. - -**Note** Do not confuse this with the `filter`_ system. - -.. _filter: filters.txt diff --git a/vendor/pygments/external/autopygmentize b/vendor/pygments/external/autopygmentize index 85c8dfd..d2f969a 100755 --- a/vendor/pygments/external/autopygmentize +++ b/vendor/pygments/external/autopygmentize @@ -1,64 +1,83 @@ -#!/bin/sh +#!/bin/bash # Best effort auto-pygmentization with transparent decompression -# (c) Reuben Thomas 2012-2013 +# by Reuben Thomas 2008-2015 # This program is in the public domain. # Strategy: first see if pygmentize can find a lexer; if not, ask file; if that finds nothing, fail -# Set the environment variable PYGMENTIZE_OPTS to configure pygments. +# Set the environment variable PYGMENTIZE_OPTS or pass options before the file path to configure pygments. # This program can be used as a .lessfilter for the less pager to auto-color less's output -lexer=`pygmentize -N "$1"` -if [ "$lexer" = "text" ]; then - file_common_opts="--brief --dereference --uncompress" +file="${!#}" # last argument +options=${@:1:$(($#-1))} # handle others args as options to pass to pygmentize - unset lexer - case `file --mime-type $file_common_opts "$1"` in - application/xml|image/svg+xml) lexer=xml;; - text/html) lexer=html;; - text/troff) lexer=nroff;; - text/x-asm) lexer=nasm;; - text/x-awk) lexer=awk;; - text/x-c) lexer=c;; - text/x-c++) lexer=cpp;; - text/x-diff) lexer=diff;; - text/x-fortran) lexer=fortran;; - text/x-gawk) lexer=gawk;; - text/x-java) lexer=java;; - text/x-lisp) lexer=common-lisp;; - text/x-lua) lexer=lua;; - text/x-makefile) lexer=make;; - text/x-msdos-batch) lexer=bat;; - text/x-nawk) lexer=nawk;; - text/x-pascal) lexer=pascal;; - text/x-perl) lexer=perl;; - text/x-php) lexer=php;; - text/x-po) lexer=po;; - text/x-python) lexer=python;; - text/x-ruby) lexer=ruby;; - text/x-shellscript) lexer=sh;; - text/x-tcl) lexer=tcl;; - text/x-tex|text/x-texinfo) lexer=latex;; # FIXME: texinfo really needs its own lexer - - # Types that file outputs which pygmentize didn't support as of file 5.11, pygments 1.6rc1 - # text/calendar - # text/PGP - # text/rtf - # text/texmacs - # text/x-bcpl - # text/x-info - # text/x-m4 - # text/x-vcard - # text/x-xmcd - esac +file_common_opts="--brief --dereference" + +lexer=$(pygmentize -N "$file") +if [[ "$lexer" == text ]]; then + unset lexer + case $(file --mime-type --uncompress $file_common_opts "$file") in + application/xml|image/svg+xml) lexer=xml;; + application/javascript) lexer=javascript;; + text/html) lexer=html;; + text/troff) lexer=nroff;; + text/x-asm) lexer=nasm;; + text/x-awk) lexer=awk;; + text/x-c) lexer=c;; + text/x-c++) lexer=cpp;; + text/x-diff) lexer=diff;; + text/x-fortran) lexer=fortran;; + text/x-gawk) lexer=gawk;; + text/x-java) lexer=java;; + text/x-lisp) lexer=common-lisp;; + text/x-lua) lexer=lua;; + text/x-makefile) lexer=make;; + text/x-msdos-batch) lexer=bat;; + text/x-nawk) lexer=nawk;; + text/x-pascal) lexer=pascal;; + text/x-perl) lexer=perl;; + text/x-php) lexer=php;; + text/x-po) lexer=po;; + text/x-python) lexer=python;; + text/x-ruby) lexer=ruby;; + text/x-shellscript) lexer=sh;; + text/x-tcl) lexer=tcl;; + text/x-tex|text/x-texinfo) lexer=latex;; # FIXME: texinfo really needs its own lexer + + # Types that file outputs which pygmentize didn't support as of file 5.20, pygments 2.0 + # text/calendar + # text/inf + # text/PGP + # text/rtf + # text/texmacs + # text/vnd.graphviz + # text/x-bcpl + # text/x-info + # text/x-m4 + # text/x-vcard + # text/x-xmcd + + text/plain) # special filenames. TODO: insert more + case $(basename "$file") in + .zshrc) lexer=sh;; + esac + ;; + esac fi -encoding=`file --brief --mime-encoding $file_common_opts "$1"` +encoding=$(file --mime-encoding --uncompress $file_common_opts "$file") +if [[ $encoding == "us-asciibinarybinary" ]]; then + encoding="us-ascii" +fi -if [ -n "$lexer" ]; then - # FIXME: Specify input encoding rather than output encoding https://bitbucket.org/birkenfeld/pygments-main/issue/800 - zcat "$1" | pygmentize -O encoding=$encoding,outencoding=UTF-8 $PYGMENTIZE_OPTS -l $lexer - exit 0 +if [[ -n "$lexer" ]]; then + concat=cat + case $(file $file_common_opts --mime-type "$file") in + application/x-gzip) concat=zcat;; + application/x-bzip2) concat=bzcat;; + application/x-xz) concat=xzcat;; + esac + exec $concat "$file" | pygmentize -O inencoding=$encoding $PYGMENTIZE_OPTS $options -l $lexer fi exit 1 diff --git a/vendor/pygments/external/lasso-builtins-generator-9.lasso b/vendor/pygments/external/lasso-builtins-generator-9.lasso index bea8b2a..0156299 100755 --- a/vendor/pygments/external/lasso-builtins-generator-9.lasso +++ b/vendor/pygments/external/lasso-builtins-generator-9.lasso @@ -4,14 +4,20 @@ Builtins Generator for Lasso 9 This is the shell script that was used to extract Lasso 9's built-in keywords - and generate most of the _lassobuiltins.py file. When run, it creates a file - named "lassobuiltins-9.py" containing the types, traits, and methods of the - currently-installed version of Lasso 9. + and generate most of the _lasso_builtins.py file. When run, it creates a file + containing the types, traits, methods, and members of the currently-installed + version of Lasso 9. - A partial list of keywords in Lasso 8 can be generated with this code: + A list of tags in Lasso 8 can be generated with this code: insert(string_removeleading(#i, -pattern='_global_')); /iterate; @@ -23,98 +29,133 @@ */ output("This output statement is required for a complete list of methods.") -local(f) = file("lassobuiltins-9.py") +local(f) = file("_lasso_builtins-9.py") #f->doWithClose => { -#f->openWrite +#f->openTruncate #f->writeString('# -*- coding: utf-8 -*- """ - pygments.lexers._lassobuiltins - ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + pygments.lexers._lasso_builtins + ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - Built-in Lasso types, traits, and methods. + Built-in Lasso types, traits, methods, and members. + + :copyright: Copyright 2006-'+date->year+' by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. """ ') -lcapi_loadModules +// Load and register contents of $LASSO9_MASTER_HOME/LassoModules/ +database_initialize // Load all of the libraries from builtins and lassoserver // This forces all possible available types and methods to be registered local(srcs = - tie( - dir(sys_masterHomePath + 'LassoLibraries/builtins/')->eachFilePath, - dir(sys_masterHomePath + 'LassoLibraries/lassoserver/')->eachFilePath - ) + (: + dir(sys_masterHomePath + '/LassoLibraries/builtins/')->eachFilePath, + dir(sys_masterHomePath + '/LassoLibraries/lassoserver/')->eachFilePath + ) ) -with topLevelDir in #srcs -where !#topLevelDir->lastComponent->beginsWith('.') +with topLevelDir in delve(#srcs) +where not #topLevelDir->lastComponent->beginsWith('.') do protect => { - handle_error => { + handle_error => { stdoutnl('Unable to load: ' + #topLevelDir + ' ' + error_msg) } library_thread_loader->loadLibrary(#topLevelDir) stdoutnl('Loaded: ' + #topLevelDir) } +email_initialize +log_initialize +session_initialize + local( - typesList = list(), - traitsList = list(), - methodsList = list() + typesList = set(), + traitsList = set(), + unboundMethodsList = set(), + memberMethodsList = set() ) -// unbound methods -with method in sys_listUnboundMethods -where !#method->methodName->asString->endsWith('=') -where #method->methodName->asString->isalpha(1) -where #methodsList !>> #method->methodName->asString -do #methodsList->insert(#method->methodName->asString) - -// traits -with trait in sys_listTraits -where !#trait->asString->beginsWith('$') -where #traitsList !>> #trait->asString -do { - #traitsList->insert(#trait->asString) - with tmethod in tie(#trait->getType->provides, #trait->getType->requires) - where !#tmethod->methodName->asString->endsWith('=') - where #tmethod->methodName->asString->isalpha(1) - where #methodsList !>> #tmethod->methodName->asString - do #methodsList->insert(#tmethod->methodName->asString) -} - // types with type in sys_listTypes -where #typesList !>> #type->asString +where not #type->asString->endsWith('$') // skip threads do { - #typesList->insert(#type->asString) - with tmethod in #type->getType->listMethods - where !#tmethod->methodName->asString->endsWith('=') - where #tmethod->methodName->asString->isalpha(1) - where #methodsList !>> #tmethod->methodName->asString - do #methodsList->insert(#tmethod->methodName->asString) + #typesList->insert(#type) } -#f->writeString("BUILTINS = { - 'Types': [ -") -with t in #typesList -do #f->writeString(" '"+string_lowercase(#t)+"',\n") +// traits +with trait in sys_listTraits +where not #trait->asString->beginsWith('$') // skip combined traits +do { + #traitsList->insert(#trait) +} -#f->writeString(" ], - 'Traits': [ -") -with t in #traitsList -do #f->writeString(" '"+string_lowercase(#t)+"',\n") +// member methods +with type in #typesList +do { + with method in #type->getType->listMethods + where #method->typeName == #type // skip inherited methods + let name = #method->methodName + where not #name->asString->endsWith('=') // skip setter methods + where #name->asString->isAlpha(1) // skip unpublished methods + do { + #memberMethodsList->insert(#name) + } +} +with trait in #traitsList +do { + with method in #trait->getType->provides + where #method->typeName == #trait // skip inherited methods + let name = #method->methodName + where not #name->asString->endsWith('=') // skip setter methods + where #name->asString->isAlpha(1) // skip unpublished methods + do { + #memberMethodsList->insert(#name) + } +} -#f->writeString(" ], - 'Methods': [ -") -with t in #methodsList -do #f->writeString(" '"+string_lowercase(#t)+"',\n") +// unbound methods +with method in sys_listUnboundMethods +let name = #method->methodName +where not #name->asString->endsWith('=') // skip setter methods +where #name->asString->isAlpha(1) // skip unpublished methods +where #typesList !>> #name +where #traitsList !>> #name +do { + #unboundMethodsList->insert(#name) +} -#f->writeString(" ], +// write to file +with i in (: + pair(#typesList, "BUILTINS = { + 'Types': ( +"), + pair(#traitsList, " ), + 'Traits': ( +"), + pair(#unboundMethodsList, " ), + 'Unbound Methods': ( +"), + pair(#memberMethodsList, " ) +} +MEMBERS = { + 'Member Methods': ( +") +) +do { + #f->writeString(#i->second) + with t in (#i->first) + let ts = #t->asString + order by #ts + do { + #f->writeString(" '"+#ts->lowercase&asString+"',\n") + } +} + +#f->writeString(" ) } ") diff --git a/vendor/pygments/external/markdown-processor.py b/vendor/pygments/external/markdown-processor.py index 12e6468..a3e178e 100644 --- a/vendor/pygments/external/markdown-processor.py +++ b/vendor/pygments/external/markdown-processor.py @@ -6,14 +6,9 @@ This fragment is a Markdown_ preprocessor that renders source code to HTML via Pygments. To use it, invoke Markdown like so:: - from markdown import Markdown + import markdown - md = Markdown() - md.textPreprocessors.insert(0, CodeBlockPreprocessor()) - html = md.convert(someText) - - markdown is then a callable that can be passed to the context of - a template and used in that template, for example. + html = markdown.markdown(someText, extensions=[CodeBlockExtension()]) This uses CSS classes by default, so use ``pygmentize -S -f html > pygments.css`` @@ -25,9 +20,9 @@ some code [/sourcecode] - .. _Markdown: http://www.freewisdom.org/projects/python-markdown/ + .. _Markdown: https://pypi.python.org/pypi/Markdown - :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ @@ -40,17 +35,17 @@ INLINESTYLES = False import re -from markdown import TextPreprocessor +from markdown.preprocessors import Preprocessor +from markdown.extensions import Extension from pygments import highlight from pygments.formatters import HtmlFormatter from pygments.lexers import get_lexer_by_name, TextLexer -class CodeBlockPreprocessor(TextPreprocessor): +class CodeBlockPreprocessor(Preprocessor): - pattern = re.compile( - r'\[sourcecode:(.+?)\](.+?)\[/sourcecode\]', re.S) + pattern = re.compile(r'\[sourcecode:(.+?)\](.+?)\[/sourcecode\]', re.S) formatter = HtmlFormatter(noclasses=INLINESTYLES) @@ -63,5 +58,10 @@ class CodeBlockPreprocessor(TextPreprocessor): code = highlight(m.group(2), lexer, self.formatter) code = code.replace('\n\n', '\n \n').replace('\n', '
    ') return '\n\n
    %s
    \n\n' % code - return self.pattern.sub( - repl, lines) + joined_lines = "\n".join(lines) + joined_lines = self.pattern.sub(repl, joined_lines) + return joined_lines.split("\n") + +class CodeBlockExtension(Extension): + def extendMarkdown(self, md, md_globals): + md.preprocessors.add('CodeBlockPreprocessor', CodeBlockPreprocessor(), '_begin') diff --git a/vendor/pygments/external/moin-parser.py b/vendor/pygments/external/moin-parser.py index 6544da1..9cb082a 100644 --- a/vendor/pygments/external/moin-parser.py +++ b/vendor/pygments/external/moin-parser.py @@ -31,7 +31,7 @@ If you do not want to do that and are willing to accept larger HTML output, you can set the INLINESTYLES option below to True. - :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ diff --git a/vendor/pygments/external/rst-directive-old.py b/vendor/pygments/external/rst-directive-old.py deleted file mode 100644 index a074536..0000000 --- a/vendor/pygments/external/rst-directive-old.py +++ /dev/null @@ -1,77 +0,0 @@ -# -*- coding: utf-8 -*- -""" - The Pygments reStructuredText directive - ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - - This fragment is a Docutils_ 0.4 directive that renders source code - (to HTML only, currently) via Pygments. - - To use it, adjust the options below and copy the code into a module - that you import on initialization. The code then automatically - registers a ``sourcecode`` directive that you can use instead of - normal code blocks like this:: - - .. sourcecode:: python - - My code goes here. - - If you want to have different code styles, e.g. one with line numbers - and one without, add formatters with their names in the VARIANTS dict - below. You can invoke them instead of the DEFAULT one by using a - directive option:: - - .. sourcecode:: python - :linenos: - - My code goes here. - - Look at the `directive documentation`_ to get all the gory details. - - .. _Docutils: http://docutils.sf.net/ - .. _directive documentation: - http://docutils.sourceforge.net/docs/howto/rst-directives.html - - :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS. - :license: BSD, see LICENSE for details. -""" - -# Options -# ~~~~~~~ - -# Set to True if you want inline CSS styles instead of classes -INLINESTYLES = False - -from pygments.formatters import HtmlFormatter - -# The default formatter -DEFAULT = HtmlFormatter(noclasses=INLINESTYLES) - -# Add name -> formatter pairs for every variant you want to use -VARIANTS = { - # 'linenos': HtmlFormatter(noclasses=INLINESTYLES, linenos=True), -} - - -from docutils import nodes -from docutils.parsers.rst import directives - -from pygments import highlight -from pygments.lexers import get_lexer_by_name, TextLexer - -def pygments_directive(name, arguments, options, content, lineno, - content_offset, block_text, state, state_machine): - try: - lexer = get_lexer_by_name(arguments[0]) - except ValueError: - # no lexer found - use the text one instead of an exception - lexer = TextLexer() - # take an arbitrary option if more than one is given - formatter = options and VARIANTS[options.keys()[0]] or DEFAULT - parsed = highlight(u'\n'.join(content), lexer, formatter) - return [nodes.raw('', parsed, format='html')] - -pygments_directive.arguments = (1, 0, 1) -pygments_directive.content = 1 -pygments_directive.options = dict([(key, directives.flag) for key in VARIANTS]) - -directives.register_directive('sourcecode', pygments_directive) diff --git a/vendor/pygments/external/rst-directive.py b/vendor/pygments/external/rst-directive.py index 5c04038..f81677b 100644 --- a/vendor/pygments/external/rst-directive.py +++ b/vendor/pygments/external/rst-directive.py @@ -31,7 +31,7 @@ .. _directive documentation: http://docutils.sourceforge.net/docs/howto/rst-directives.html - :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ @@ -75,9 +75,8 @@ class Pygments(Directive): # no lexer found - use the text one instead of an exception lexer = TextLexer() # take an arbitrary option if more than one is given - formatter = self.options and VARIANTS[self.options.keys()[0]] or DEFAULT + formatter = self.options and VARIANTS[list(self.options)[0]] or DEFAULT parsed = highlight(u'\n'.join(self.content), lexer, formatter) return [nodes.raw('', parsed, format='html')] directives.register_directive('sourcecode', Pygments) - diff --git a/vendor/pygments/ez_setup.py b/vendor/pygments/ez_setup.py old mode 100755 new mode 100644 index e33744b..9dc2c87 --- a/vendor/pygments/ez_setup.py +++ b/vendor/pygments/ez_setup.py @@ -13,264 +13,370 @@ the appropriate options to ``use_setuptools()``. This file can also be run as a script to install or upgrade setuptools. """ +import os +import shutil import sys -DEFAULT_VERSION = "0.6c9" -DEFAULT_URL = "http://pypi.python.org/packages/%s/s/setuptools/" % sys.version[:3] +import tempfile +import tarfile +import optparse +import subprocess +import platform -md5_data = { - 'setuptools-0.6b1-py2.3.egg': '8822caf901250d848b996b7f25c6e6ca', - 'setuptools-0.6b1-py2.4.egg': 'b79a8a403e4502fbb85ee3f1941735cb', - 'setuptools-0.6b2-py2.3.egg': '5657759d8a6d8fc44070a9d07272d99b', - 'setuptools-0.6b2-py2.4.egg': '4996a8d169d2be661fa32a6e52e4f82a', - 'setuptools-0.6b3-py2.3.egg': 'bb31c0fc7399a63579975cad9f5a0618', - 'setuptools-0.6b3-py2.4.egg': '38a8c6b3d6ecd22247f179f7da669fac', - 'setuptools-0.6b4-py2.3.egg': '62045a24ed4e1ebc77fe039aa4e6f7e5', - 'setuptools-0.6b4-py2.4.egg': '4cb2a185d228dacffb2d17f103b3b1c4', - 'setuptools-0.6c1-py2.3.egg': 'b3f2b5539d65cb7f74ad79127f1a908c', - 'setuptools-0.6c1-py2.4.egg': 'b45adeda0667d2d2ffe14009364f2a4b', - 'setuptools-0.6c2-py2.3.egg': 'f0064bf6aa2b7d0f3ba0b43f20817c27', - 'setuptools-0.6c2-py2.4.egg': '616192eec35f47e8ea16cd6a122b7277', - 'setuptools-0.6c3-py2.3.egg': 'f181fa125dfe85a259c9cd6f1d7b78fa', - 'setuptools-0.6c3-py2.4.egg': 'e0ed74682c998bfb73bf803a50e7b71e', - 'setuptools-0.6c3-py2.5.egg': 'abef16fdd61955514841c7c6bd98965e', - 'setuptools-0.6c4-py2.3.egg': 'b0b9131acab32022bfac7f44c5d7971f', - 'setuptools-0.6c4-py2.4.egg': '2a1f9656d4fbf3c97bf946c0a124e6e2', - 'setuptools-0.6c4-py2.5.egg': '8f5a052e32cdb9c72bcf4b5526f28afc', - 'setuptools-0.6c5-py2.3.egg': 'ee9fd80965da04f2f3e6b3576e9d8167', - 'setuptools-0.6c5-py2.4.egg': 'afe2adf1c01701ee841761f5bcd8aa64', - 'setuptools-0.6c5-py2.5.egg': 'a8d3f61494ccaa8714dfed37bccd3d5d', - 'setuptools-0.6c6-py2.3.egg': '35686b78116a668847237b69d549ec20', - 'setuptools-0.6c6-py2.4.egg': '3c56af57be3225019260a644430065ab', - 'setuptools-0.6c6-py2.5.egg': 'b2f8a7520709a5b34f80946de5f02f53', - 'setuptools-0.6c7-py2.3.egg': '209fdf9adc3a615e5115b725658e13e2', - 'setuptools-0.6c7-py2.4.egg': '5a8f954807d46a0fb67cf1f26c55a82e', - 'setuptools-0.6c7-py2.5.egg': '45d2ad28f9750e7434111fde831e8372', - 'setuptools-0.6c8-py2.3.egg': '50759d29b349db8cfd807ba8303f1902', - 'setuptools-0.6c8-py2.4.egg': 'cba38d74f7d483c06e9daa6070cce6de', - 'setuptools-0.6c8-py2.5.egg': '1721747ee329dc150590a58b3e1ac95b', - 'setuptools-0.6c9-py2.3.egg': 'a83c4020414807b496e4cfbe08507c03', - 'setuptools-0.6c9-py2.4.egg': '260a2be2e5388d66bdaee06abec6342a', - 'setuptools-0.6c9-py2.5.egg': 'fe67c3e5a17b12c0e7c541b7ea43a8e6', - 'setuptools-0.6c9-py2.6.egg': 'ca37b1ff16fa2ede6e19383e7b59245a', -} +from distutils import log -import sys, os -try: from hashlib import md5 -except ImportError: from md5 import md5 +try: + from site import USER_SITE +except ImportError: + USER_SITE = None -def _validate_md5(egg_name, data): - if egg_name in md5_data: - digest = md5(data).hexdigest() - if digest != md5_data[egg_name]: - print >>sys.stderr, ( - "md5 validation of %s failed! (Possible download problem?)" - % egg_name - ) - sys.exit(2) - return data +DEFAULT_VERSION = "1.4.2" +DEFAULT_URL = "https://pypi.python.org/packages/source/s/setuptools/" -def use_setuptools( - version=DEFAULT_VERSION, download_base=DEFAULT_URL, to_dir=os.curdir, - download_delay=15 -): - """Automatically find/download setuptools and make it available on sys.path +def _python_cmd(*args): + args = (sys.executable,) + args + return subprocess.call(args) == 0 - `version` should be a valid setuptools version number that is available - as an egg for download under the `download_base` URL (which should end with - a '/'). `to_dir` is the directory where setuptools will be downloaded, if - it is not already available. If `download_delay` is specified, it should - be the number of seconds that will be paused before initiating a download, - should one be required. If an older version of setuptools is installed, - this routine will print a message to ``sys.stderr`` and raise SystemExit in - an attempt to abort the calling script. - """ - was_imported = 'pkg_resources' in sys.modules or 'setuptools' in sys.modules - def do_download(): - egg = download_setuptools(version, download_base, to_dir, download_delay) - sys.path.insert(0, egg) - import setuptools; setuptools.bootstrap_install_from = egg +def _check_call_py24(cmd, *args, **kwargs): + res = subprocess.call(cmd, *args, **kwargs) + class CalledProcessError(Exception): + pass + if not res == 0: + msg = "Command '%s' return non-zero exit status %d" % (cmd, res) + raise CalledProcessError(msg) +vars(subprocess).setdefault('check_call', _check_call_py24) + +def _install(tarball, install_args=()): + # extracting the tarball + tmpdir = tempfile.mkdtemp() + log.warn('Extracting in %s', tmpdir) + old_wd = os.getcwd() + try: + os.chdir(tmpdir) + tar = tarfile.open(tarball) + _extractall(tar) + tar.close() + + # going in the directory + subdir = os.path.join(tmpdir, os.listdir(tmpdir)[0]) + os.chdir(subdir) + log.warn('Now working in %s', subdir) + + # installing + log.warn('Installing Setuptools') + if not _python_cmd('setup.py', 'install', *install_args): + log.warn('Something went wrong during the installation.') + log.warn('See the error message above.') + # exitcode will be 2 + return 2 + finally: + os.chdir(old_wd) + shutil.rmtree(tmpdir) + + +def _build_egg(egg, tarball, to_dir): + # extracting the tarball + tmpdir = tempfile.mkdtemp() + log.warn('Extracting in %s', tmpdir) + old_wd = os.getcwd() + try: + os.chdir(tmpdir) + tar = tarfile.open(tarball) + _extractall(tar) + tar.close() + + # going in the directory + subdir = os.path.join(tmpdir, os.listdir(tmpdir)[0]) + os.chdir(subdir) + log.warn('Now working in %s', subdir) + + # building an egg + log.warn('Building a Setuptools egg in %s', to_dir) + _python_cmd('setup.py', '-q', 'bdist_egg', '--dist-dir', to_dir) + + finally: + os.chdir(old_wd) + shutil.rmtree(tmpdir) + # returning the result + log.warn(egg) + if not os.path.exists(egg): + raise IOError('Could not build the egg.') + + +def _do_download(version, download_base, to_dir, download_delay): + egg = os.path.join(to_dir, 'setuptools-%s-py%d.%d.egg' + % (version, sys.version_info[0], sys.version_info[1])) + if not os.path.exists(egg): + tarball = download_setuptools(version, download_base, + to_dir, download_delay) + _build_egg(egg, tarball, to_dir) + sys.path.insert(0, egg) + + # Remove previously-imported pkg_resources if present (see + # https://bitbucket.org/pypa/setuptools/pull-request/7/ for details). + if 'pkg_resources' in sys.modules: + del sys.modules['pkg_resources'] + + import setuptools + setuptools.bootstrap_install_from = egg + + +def use_setuptools(version=DEFAULT_VERSION, download_base=DEFAULT_URL, + to_dir=os.curdir, download_delay=15): + # making sure we use the absolute path + to_dir = os.path.abspath(to_dir) + was_imported = 'pkg_resources' in sys.modules or \ + 'setuptools' in sys.modules try: import pkg_resources except ImportError: - return do_download() + return _do_download(version, download_base, to_dir, download_delay) try: - pkg_resources.require("setuptools>="+version); return - except pkg_resources.VersionConflict, e: + pkg_resources.require("setuptools>=" + version) + return + except pkg_resources.VersionConflict: + e = sys.exc_info()[1] if was_imported: - print >>sys.stderr, ( - "The required version of setuptools (>=%s) is not available, and\n" - "can't be installed while this script is running. Please install\n" - " a more recent version first, using 'easy_install -U setuptools'." - "\n\n(Currently using %r)" - ) % (version, e.args[0]) + sys.stderr.write( + "The required version of setuptools (>=%s) is not available,\n" + "and can't be installed while this script is running. Please\n" + "install a more recent version first, using\n" + "'easy_install -U setuptools'." + "\n\n(Currently using %r)\n" % (version, e.args[0])) sys.exit(2) else: del pkg_resources, sys.modules['pkg_resources'] # reload ok - return do_download() + return _do_download(version, download_base, to_dir, + download_delay) except pkg_resources.DistributionNotFound: - return do_download() + return _do_download(version, download_base, to_dir, + download_delay) -def download_setuptools( - version=DEFAULT_VERSION, download_base=DEFAULT_URL, to_dir=os.curdir, - delay = 15 -): +def _clean_check(cmd, target): + """ + Run the command to download target. If the command fails, clean up before + re-raising the error. + """ + try: + subprocess.check_call(cmd) + except subprocess.CalledProcessError: + if os.access(target, os.F_OK): + os.unlink(target) + raise + +def download_file_powershell(url, target): + """ + Download the file at url to target using Powershell (which will validate + trust). Raise an exception if the command cannot complete. + """ + target = os.path.abspath(target) + cmd = [ + 'powershell', + '-Command', + "(new-object System.Net.WebClient).DownloadFile(%(url)r, %(target)r)" % vars(), + ] + _clean_check(cmd, target) + +def has_powershell(): + if platform.system() != 'Windows': + return False + cmd = ['powershell', '-Command', 'echo test'] + devnull = open(os.path.devnull, 'wb') + try: + try: + subprocess.check_call(cmd, stdout=devnull, stderr=devnull) + except: + return False + finally: + devnull.close() + return True + +download_file_powershell.viable = has_powershell + +def download_file_curl(url, target): + cmd = ['curl', url, '--silent', '--output', target] + _clean_check(cmd, target) + +def has_curl(): + cmd = ['curl', '--version'] + devnull = open(os.path.devnull, 'wb') + try: + try: + subprocess.check_call(cmd, stdout=devnull, stderr=devnull) + except: + return False + finally: + devnull.close() + return True + +download_file_curl.viable = has_curl + +def download_file_wget(url, target): + cmd = ['wget', url, '--quiet', '--output-document', target] + _clean_check(cmd, target) + +def has_wget(): + cmd = ['wget', '--version'] + devnull = open(os.path.devnull, 'wb') + try: + try: + subprocess.check_call(cmd, stdout=devnull, stderr=devnull) + except: + return False + finally: + devnull.close() + return True + +download_file_wget.viable = has_wget + +def download_file_insecure(url, target): + """ + Use Python to download the file, even though it cannot authenticate the + connection. + """ + try: + from urllib.request import urlopen + except ImportError: + from urllib2 import urlopen + src = dst = None + try: + src = urlopen(url) + # Read/write all in one block, so we don't create a corrupt file + # if the download is interrupted. + data = src.read() + dst = open(target, "wb") + dst.write(data) + finally: + if src: + src.close() + if dst: + dst.close() + +download_file_insecure.viable = lambda: True + +def get_best_downloader(): + downloaders = [ + download_file_powershell, + download_file_curl, + download_file_wget, + download_file_insecure, + ] + + for dl in downloaders: + if dl.viable(): + return dl + +def download_setuptools(version=DEFAULT_VERSION, download_base=DEFAULT_URL, + to_dir=os.curdir, delay=15, + downloader_factory=get_best_downloader): """Download setuptools from a specified location and return its filename `version` should be a valid setuptools version number that is available as an egg for download under the `download_base` URL (which should end with a '/'). `to_dir` is the directory where the egg will be downloaded. - `delay` is the number of seconds to pause before an actual download attempt. + `delay` is the number of seconds to pause before an actual download + attempt. + + ``downloader_factory`` should be a function taking no arguments and + returning a function for downloading a URL to a target. """ - import urllib2, shutil - egg_name = "setuptools-%s-py%s.egg" % (version,sys.version[:3]) - url = download_base + egg_name - saveto = os.path.join(to_dir, egg_name) - src = dst = None + # making sure we use the absolute path + to_dir = os.path.abspath(to_dir) + tgz_name = "setuptools-%s.tar.gz" % version + url = download_base + tgz_name + saveto = os.path.join(to_dir, tgz_name) if not os.path.exists(saveto): # Avoid repeated downloads - try: - from distutils import log - if delay: - log.warn(""" ---------------------------------------------------------------------------- -This script requires setuptools version %s to run (even to display -help). I will attempt to download it for you (from -%s), but -you may need to enable firewall access for this script first. -I will start the download in %d seconds. - -(Note: if this machine does not have network access, please obtain the file - - %s - -and place it in this directory before rerunning this script.) ----------------------------------------------------------------------------""", - version, download_base, delay, url - ); from time import sleep; sleep(delay) - log.warn("Downloading %s", url) - src = urllib2.urlopen(url) - # Read/write all in one block, so we don't create a corrupt file - # if the download is interrupted. - data = _validate_md5(egg_name, src.read()) - dst = open(saveto,"wb"); dst.write(data) - finally: - if src: src.close() - if dst: dst.close() + log.warn("Downloading %s", url) + downloader = downloader_factory() + downloader(url, saveto) return os.path.realpath(saveto) +def _extractall(self, path=".", members=None): + """Extract all members from the archive to the current working + directory and set owner, modification time and permissions on + directories afterwards. `path' specifies a different directory + to extract to. `members' is optional and must be a subset of the + list returned by getmembers(). + """ + import copy + import operator + from tarfile import ExtractError + directories = [] + + if members is None: + members = self + + for tarinfo in members: + if tarinfo.isdir(): + # Extract directories with a safe mode. + directories.append(tarinfo) + tarinfo = copy.copy(tarinfo) + tarinfo.mode = 448 # decimal for oct 0700 + self.extract(tarinfo, path) + + # Reverse sort directories. + if sys.version_info < (2, 4): + def sorter(dir1, dir2): + return cmp(dir1.name, dir2.name) + directories.sort(sorter) + directories.reverse() + else: + directories.sort(key=operator.attrgetter('name'), reverse=True) + + # Set correct owner, mtime and filemode on directories. + for tarinfo in directories: + dirpath = os.path.join(path, tarinfo.name) + try: + self.chown(tarinfo, dirpath) + self.utime(tarinfo, dirpath) + self.chmod(tarinfo, dirpath) + except ExtractError: + e = sys.exc_info()[1] + if self.errorlevel > 1: + raise + else: + self._dbg(1, "tarfile: %s" % e) +def _build_install_args(options): + """ + Build the arguments to 'python setup.py install' on the setuptools package + """ + install_args = [] + if options.user_install: + if sys.version_info < (2, 6): + log.warn("--user requires Python 2.6 or later") + raise SystemExit(1) + install_args.append('--user') + return install_args +def _parse_args(): + """ + Parse the command line for options + """ + parser = optparse.OptionParser() + parser.add_option( + '--user', dest='user_install', action='store_true', default=False, + help='install in user site package (requires Python 2.6 or later)') + parser.add_option( + '--download-base', dest='download_base', metavar="URL", + default=DEFAULT_URL, + help='alternative URL from where to download the setuptools package') + parser.add_option( + '--insecure', dest='downloader_factory', action='store_const', + const=lambda: download_file_insecure, default=get_best_downloader, + help='Use internal, non-validating downloader' + ) + options, args = parser.parse_args() + # positional arguments are ignored + return options - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -def main(argv, version=DEFAULT_VERSION): +def main(version=DEFAULT_VERSION): """Install or upgrade setuptools and EasyInstall""" - try: - import setuptools - except ImportError: - egg = None - try: - egg = download_setuptools(version, delay=0) - sys.path.insert(0,egg) - from setuptools.command.easy_install import main - return main(list(argv)+[egg]) # we're done here - finally: - if egg and os.path.exists(egg): - os.unlink(egg) - else: - if setuptools.__version__ == '0.0.1': - print >>sys.stderr, ( - "You have an obsolete version of setuptools installed. Please\n" - "remove it from your system entirely before rerunning this script." - ) - sys.exit(2) - - req = "setuptools>="+version - import pkg_resources - try: - pkg_resources.require(req) - except pkg_resources.VersionConflict: - try: - from setuptools.command.easy_install import main - except ImportError: - from easy_install import main - main(list(argv)+[download_setuptools(delay=0)]) - sys.exit(0) # try to force an exit - else: - if argv: - from setuptools.command.easy_install import main - main(argv) - else: - print "Setuptools version",version,"or greater has been installed." - print '(Run "ez_setup.py -U setuptools" to reinstall or upgrade.)' - -def update_md5(filenames): - """Update our built-in md5 registry""" - - import re - - for name in filenames: - base = os.path.basename(name) - f = open(name,'rb') - md5_data[base] = md5(f.read()).hexdigest() - f.close() - - data = [" %r: %r,\n" % it for it in md5_data.items()] - data.sort() - repl = "".join(data) - - import inspect - srcfile = inspect.getsourcefile(sys.modules[__name__]) - f = open(srcfile, 'rb'); src = f.read(); f.close() - - match = re.search("\nmd5_data = {\n([^}]+)}", src) - if not match: - print >>sys.stderr, "Internal error!" - sys.exit(2) - - src = src[:match.start(1)] + repl + src[match.end(1):] - f = open(srcfile,'w') - f.write(src) - f.close() - - -if __name__=='__main__': - if len(sys.argv)>2 and sys.argv[1]=='--md5update': - update_md5(sys.argv[2:]) - else: - main(sys.argv[1:]) - - - - - + options = _parse_args() + tarball = download_setuptools(download_base=options.download_base, + downloader_factory=options.downloader_factory) + return _install(tarball, _build_install_args(options)) +if __name__ == '__main__': + sys.exit(main()) diff --git a/vendor/pygments/pygmentize b/vendor/pygments/pygmentize index e237919..aea3872 100755 --- a/vendor/pygments/pygmentize +++ b/vendor/pygments/pygmentize @@ -1,6 +1,7 @@ -#!/usr/bin/env python +#!/usr/bin/env python2 -import sys, pygments.cmdline +import sys +import pygments.cmdline try: sys.exit(pygments.cmdline.main(sys.argv)) except KeyboardInterrupt: diff --git a/vendor/pygments/pygments/__init__.py b/vendor/pygments/pygments/__init__.py index 2bfd8ba..c623440 100644 --- a/vendor/pygments/pygments/__init__.py +++ b/vendor/pygments/pygments/__init__.py @@ -22,11 +22,11 @@ .. _Pygments tip: http://bitbucket.org/birkenfeld/pygments-main/get/tip.zip#egg=Pygments-dev - :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ -__version__ = '1.6' +__version__ = '2.1.3' __docformat__ = 'restructuredtext' __all__ = ['lex', 'format', 'highlight'] @@ -43,15 +43,16 @@ def lex(code, lexer): """ try: return lexer.get_tokens(code) - except TypeError, err: + except TypeError as err: if isinstance(err.args[0], str) and \ - 'unbound method get_tokens' in err.args[0]: + ('unbound method get_tokens' in err.args[0] or + 'missing 1 required positional argument' in err.args[0]): raise TypeError('lex() argument must be a lexer instance, ' 'not a class') raise -def format(tokens, formatter, outfile=None): +def format(tokens, formatter, outfile=None): # pylint: disable=redefined-builtin """ Format a tokenlist ``tokens`` with the formatter ``formatter``. @@ -61,15 +62,15 @@ def format(tokens, formatter, outfile=None): """ try: if not outfile: - #print formatter, 'using', formatter.encoding - realoutfile = formatter.encoding and BytesIO() or StringIO() + realoutfile = getattr(formatter, 'encoding', None) and BytesIO() or StringIO() formatter.format(tokens, realoutfile) return realoutfile.getvalue() else: formatter.format(tokens, outfile) - except TypeError, err: + except TypeError as err: if isinstance(err.args[0], str) and \ - 'unbound method format' in err.args[0]: + ('unbound method format' in err.args[0] or + 'missing 1 required positional argument' in err.args[0]): raise TypeError('format() argument must be a formatter instance, ' 'not a class') raise @@ -86,6 +87,6 @@ def highlight(code, lexer, formatter, outfile=None): return format(lex(code, lexer), formatter, outfile) -if __name__ == '__main__': +if __name__ == '__main__': # pragma: no cover from pygments.cmdline import main sys.exit(main(sys.argv)) diff --git a/vendor/pygments/pygments/cmdline.py b/vendor/pygments/pygments/cmdline.py index c25204b..00745ed 100644 --- a/vendor/pygments/pygments/cmdline.py +++ b/vendor/pygments/pygments/cmdline.py @@ -5,27 +5,33 @@ Command line interface. - :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ + +from __future__ import print_function + import sys import getopt from textwrap import dedent from pygments import __version__, highlight -from pygments.util import ClassNotFound, OptionError, docstring_headline -from pygments.lexers import get_all_lexers, get_lexer_by_name, get_lexer_for_filename, \ - find_lexer_class, guess_lexer, TextLexer +from pygments.util import ClassNotFound, OptionError, docstring_headline, \ + guess_decode, guess_decode_from_terminal, terminal_encoding +from pygments.lexers import get_all_lexers, get_lexer_by_name, guess_lexer, \ + get_lexer_for_filename, find_lexer_class_for_filename +from pygments.lexers.special import TextLexer +from pygments.formatters.latex import LatexEmbeddedLexer, LatexFormatter from pygments.formatters import get_all_formatters, get_formatter_by_name, \ - get_formatter_for_filename, find_formatter_class, \ - TerminalFormatter # pylint:disable-msg=E0611 + get_formatter_for_filename, find_formatter_class +from pygments.formatters.terminal import TerminalFormatter from pygments.filters import get_all_filters, find_filter_class from pygments.styles import get_all_styles, get_style_by_name USAGE = """\ Usage: %s [-l | -g] [-F [:]] [-f ] - [-O ] [-P ] [-o ] [] + [-O ] [-P ] [-s] [-v] [-o ] [] %s -S - - -

    Code tags report for %s

    - - -%s -
    LineTagWhoDescription
    - - -''' - - TABLE = '\nFile: %s\n' - - TR = ('%%(lno)d' - '%%(tag)s' - '%%(who)s%%(what)s') - - f = file(output, 'w') - table = '\n'.join(TABLE % fname + - '\n'.join(TR % (no % 2,) % entry - for no, entry in enumerate(store[fname])) - for fname in sorted(store)) - f.write(HTML % (', '.join(map(abspath, args)), table)) - f.close() - - print "Report written to %s." % output - return 0 - -if __name__ == '__main__': - sys.exit(main()) diff --git a/vendor/pygments/scripts/find_error.py b/vendor/pygments/scripts/find_error.py deleted file mode 100755 index 0092356..0000000 --- a/vendor/pygments/scripts/find_error.py +++ /dev/null @@ -1,170 +0,0 @@ -#!/usr/bin/python -# -*- coding: utf-8 -*- -""" - Lexing error finder - ~~~~~~~~~~~~~~~~~~~ - - For the source files given on the command line, display - the text where Error tokens are being generated, along - with some context. - - :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS. - :license: BSD, see LICENSE for details. -""" - -import sys, os - -# always prefer Pygments from source if exists -srcpath = os.path.join(os.path.dirname(__file__), '..') -if os.path.isdir(os.path.join(srcpath, 'pygments')): - sys.path.insert(0, srcpath) - - -from pygments.lexer import RegexLexer -from pygments.lexers import get_lexer_for_filename, get_lexer_by_name -from pygments.token import Error, Text, _TokenType -from pygments.cmdline import _parse_options - - -class DebuggingRegexLexer(RegexLexer): - """Make the state stack, position and current match instance attributes.""" - - def get_tokens_unprocessed(self, text, stack=('root',)): - """ - Split ``text`` into (tokentype, text) pairs. - - ``stack`` is the inital stack (default: ``['root']``) - """ - self.pos = 0 - tokendefs = self._tokens - self.statestack = list(stack) - statetokens = tokendefs[self.statestack[-1]] - while 1: - for rexmatch, action, new_state in statetokens: - self.m = m = rexmatch(text, self.pos) - if m: - if type(action) is _TokenType: - yield self.pos, action, m.group() - else: - for item in action(self, m): - yield item - self.pos = m.end() - if new_state is not None: - # state transition - if isinstance(new_state, tuple): - for state in new_state: - if state == '#pop': - self.statestack.pop() - elif state == '#push': - self.statestack.append(self.statestack[-1]) - else: - self.statestack.append(state) - elif isinstance(new_state, int): - # pop - del self.statestack[new_state:] - elif new_state == '#push': - self.statestack.append(self.statestack[-1]) - else: - assert False, 'wrong state def: %r' % new_state - statetokens = tokendefs[self.statestack[-1]] - break - else: - try: - if text[self.pos] == '\n': - # at EOL, reset state to 'root' - self.pos += 1 - self.statestack = ['root'] - statetokens = tokendefs['root'] - yield self.pos, Text, u'\n' - continue - yield self.pos, Error, text[self.pos] - self.pos += 1 - except IndexError: - break - - -def main(fn, lexer=None, options={}): - if lexer is not None: - lx = get_lexer_by_name(lexer) - else: - try: - lx = get_lexer_for_filename(os.path.basename(fn), **options) - except ValueError: - try: - name, rest = fn.split('_', 1) - lx = get_lexer_by_name(name, **options) - except ValueError: - raise AssertionError('no lexer found for file %r' % fn) - debug_lexer = False - # does not work for e.g. ExtendedRegexLexers - if lx.__class__.__bases__ == (RegexLexer,): - lx.__class__.__bases__ = (DebuggingRegexLexer,) - debug_lexer = True - elif lx.__class__.__bases__ == (DebuggingRegexLexer,): - # already debugged before - debug_lexer = True - lno = 1 - text = file(fn, 'U').read() - text = text.strip('\n') + '\n' - tokens = [] - states = [] - - def show_token(tok, state): - reprs = map(repr, tok) - print ' ' + reprs[1] + ' ' + ' ' * (29-len(reprs[1])) + reprs[0], - if debug_lexer: - print ' ' + ' ' * (29-len(reprs[0])) + repr(state), - print - - for type, val in lx.get_tokens(text): - lno += val.count('\n') - if type == Error: - print 'Error parsing', fn, 'on line', lno - print 'Previous tokens' + (debug_lexer and ' and states' or '') + ':' - if showall: - for tok, state in map(None, tokens, states): - show_token(tok, state) - else: - for i in range(max(len(tokens) - num, 0), len(tokens)): - show_token(tokens[i], states[i]) - print 'Error token:' - l = len(repr(val)) - print ' ' + repr(val), - if debug_lexer and hasattr(lx, 'statestack'): - print ' ' * (60-l) + repr(lx.statestack), - print - print - return 1 - tokens.append((type, val)) - if debug_lexer: - if hasattr(lx, 'statestack'): - states.append(lx.statestack[:]) - else: - states.append(None) - if showall: - for tok, state in map(None, tokens, states): - show_token(tok, state) - return 0 - - -num = 10 -showall = False -lexer = None -options = {} - -if __name__ == '__main__': - import getopt - opts, args = getopt.getopt(sys.argv[1:], 'n:l:aO:') - for opt, val in opts: - if opt == '-n': - num = int(val) - elif opt == '-a': - showall = True - elif opt == '-l': - lexer = val - elif opt == '-O': - options = _parse_options([val]) - ret = 0 - for f in args: - ret += main(f, lexer, options) - sys.exit(bool(ret)) diff --git a/vendor/pygments/scripts/find_error.py b/vendor/pygments/scripts/find_error.py new file mode 120000 index 0000000..ba0b76f --- /dev/null +++ b/vendor/pygments/scripts/find_error.py @@ -0,0 +1 @@ +debug_lexer.py \ No newline at end of file diff --git a/vendor/pygments/scripts/get_vimkw.py b/vendor/pygments/scripts/get_vimkw.py index 153c88c..4565274 100644 --- a/vendor/pygments/scripts/get_vimkw.py +++ b/vendor/pygments/scripts/get_vimkw.py @@ -1,13 +1,42 @@ +from __future__ import print_function + import re -from pprint import pprint + +from pygments.util import format_lines r_line = re.compile(r"^(syn keyword vimCommand contained|syn keyword vimOption " r"contained|syn keyword vimAutoEvent contained)\s+(.*)") r_item = re.compile(r"(\w+)(?:\[(\w+)\])?") +HEADER = '''\ +# -*- coding: utf-8 -*- +""" + pygments.lexers._vim_builtins + ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + This file is autogenerated by scripts/get_vimkw.py + + :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" + +# Split up in multiple functions so it's importable by jython, which has a +# per-method size limit. +''' + +METHOD = '''\ +def _get%(key)s(): +%(body)s + return var +%(key)s = _get%(key)s() +''' + def getkw(input, output): out = file(output, 'w') + # Copy template from an existing file. + print(HEADER, file=out) + output_info = {'command': [], 'option': [], 'auto': []} for line in file(input): m = r_line.match(line) @@ -29,15 +58,17 @@ def getkw(input, output): output_info['option'].append("('inoremap','inoremap')") output_info['option'].append("('vnoremap','vnoremap')") - for a, b in output_info.items(): - b.sort() - print >>out, '%s=[%s]' % (a, ','.join(b)) + for key, keywordlist in output_info.items(): + keywordlist.sort() + body = format_lines('var', keywordlist, raw=True, indent_level=1) + print(METHOD % locals(), file=out) def is_keyword(w, keywords): for i in range(len(w), 0, -1): if w[:i] in keywords: - return signals[w[:i]][:len(w)] == w + return keywords[w[:i]][:len(w)] == w return False if __name__ == "__main__": - getkw("/usr/share/vim/vim73/syntax/vim.vim", "temp.py") + getkw("/usr/share/vim/vim74/syntax/vim.vim", + "pygments/lexers/_vim_builtins.py") diff --git a/vendor/pygments/scripts/reindent.py b/vendor/pygments/scripts/reindent.py deleted file mode 100755 index e6ee828..0000000 --- a/vendor/pygments/scripts/reindent.py +++ /dev/null @@ -1,291 +0,0 @@ -#! /usr/bin/env python - -# Released to the public domain, by Tim Peters, 03 October 2000. -# -B option added by Georg Brandl, 2006. - -"""reindent [-d][-r][-v] [ path ... ] - --d (--dryrun) Dry run. Analyze, but don't make any changes to files. --r (--recurse) Recurse. Search for all .py files in subdirectories too. --B (--no-backup) Don't write .bak backup files. --v (--verbose) Verbose. Print informative msgs; else only names of changed files. --h (--help) Help. Print this usage information and exit. - -Change Python (.py) files to use 4-space indents and no hard tab characters. -Also trim excess spaces and tabs from ends of lines, and remove empty lines -at the end of files. Also ensure the last line ends with a newline. - -If no paths are given on the command line, reindent operates as a filter, -reading a single source file from standard input and writing the transformed -source to standard output. In this case, the -d, -r and -v flags are -ignored. - -You can pass one or more file and/or directory paths. When a directory -path, all .py files within the directory will be examined, and, if the -r -option is given, likewise recursively for subdirectories. - -If output is not to standard output, reindent overwrites files in place, -renaming the originals with a .bak extension. If it finds nothing to -change, the file is left alone. If reindent does change a file, the changed -file is a fixed-point for future runs (i.e., running reindent on the -resulting .py file won't change it again). - -The hard part of reindenting is figuring out what to do with comment -lines. So long as the input files get a clean bill of health from -tabnanny.py, reindent should do a good job. -""" - -__version__ = "1" - -import tokenize -import os -import sys - -verbose = 0 -recurse = 0 -dryrun = 0 -no_backup = 0 - -def usage(msg=None): - if msg is not None: - print >> sys.stderr, msg - print >> sys.stderr, __doc__ - -def errprint(*args): - sep = "" - for arg in args: - sys.stderr.write(sep + str(arg)) - sep = " " - sys.stderr.write("\n") - -def main(): - import getopt - global verbose, recurse, dryrun, no_backup - - try: - opts, args = getopt.getopt(sys.argv[1:], "drvhB", - ["dryrun", "recurse", "verbose", "help", - "no-backup"]) - except getopt.error, msg: - usage(msg) - return - for o, a in opts: - if o in ('-d', '--dryrun'): - dryrun += 1 - elif o in ('-r', '--recurse'): - recurse += 1 - elif o in ('-v', '--verbose'): - verbose += 1 - elif o in ('-B', '--no-backup'): - no_backup += 1 - elif o in ('-h', '--help'): - usage() - return - if not args: - r = Reindenter(sys.stdin) - r.run() - r.write(sys.stdout) - return - for arg in args: - check(arg) - -def check(file): - if os.path.isdir(file) and not os.path.islink(file): - if verbose: - print "listing directory", file - names = os.listdir(file) - for name in names: - fullname = os.path.join(file, name) - if ((recurse and os.path.isdir(fullname) and - not os.path.islink(fullname)) - or name.lower().endswith(".py")): - check(fullname) - return - - if verbose: - print "checking", file, "...", - try: - f = open(file) - except IOError, msg: - errprint("%s: I/O Error: %s" % (file, str(msg))) - return - - r = Reindenter(f) - f.close() - if r.run(): - if verbose: - print "changed." - if dryrun: - print "But this is a dry run, so leaving it alone." - else: - print "reindented", file, (dryrun and "(dry run => not really)" or "") - if not dryrun: - if not no_backup: - bak = file + ".bak" - if os.path.exists(bak): - os.remove(bak) - os.rename(file, bak) - if verbose: - print "renamed", file, "to", bak - f = open(file, "w") - r.write(f) - f.close() - if verbose: - print "wrote new", file - else: - if verbose: - print "unchanged." - - -class Reindenter: - - def __init__(self, f): - self.find_stmt = 1 # next token begins a fresh stmt? - self.level = 0 # current indent level - - # Raw file lines. - self.raw = f.readlines() - - # File lines, rstripped & tab-expanded. Dummy at start is so - # that we can use tokenize's 1-based line numbering easily. - # Note that a line is all-blank iff it's "\n". - self.lines = [line.rstrip('\n \t').expandtabs() + "\n" - for line in self.raw] - self.lines.insert(0, None) - self.index = 1 # index into self.lines of next line - - # List of (lineno, indentlevel) pairs, one for each stmt and - # comment line. indentlevel is -1 for comment lines, as a - # signal that tokenize doesn't know what to do about them; - # indeed, they're our headache! - self.stats = [] - - def run(self): - tokenize.tokenize(self.getline, self.tokeneater) - # Remove trailing empty lines. - lines = self.lines - while lines and lines[-1] == "\n": - lines.pop() - # Sentinel. - stats = self.stats - stats.append((len(lines), 0)) - # Map count of leading spaces to # we want. - have2want = {} - # Program after transformation. - after = self.after = [] - # Copy over initial empty lines -- there's nothing to do until - # we see a line with *something* on it. - i = stats[0][0] - after.extend(lines[1:i]) - for i in range(len(stats)-1): - thisstmt, thislevel = stats[i] - nextstmt = stats[i+1][0] - have = getlspace(lines[thisstmt]) - want = thislevel * 4 - if want < 0: - # A comment line. - if have: - # An indented comment line. If we saw the same - # indentation before, reuse what it most recently - # mapped to. - want = have2want.get(have, -1) - if want < 0: - # Then it probably belongs to the next real stmt. - for j in xrange(i+1, len(stats)-1): - jline, jlevel = stats[j] - if jlevel >= 0: - if have == getlspace(lines[jline]): - want = jlevel * 4 - break - if want < 0: # Maybe it's a hanging - # comment like this one, - # in which case we should shift it like its base - # line got shifted. - for j in xrange(i-1, -1, -1): - jline, jlevel = stats[j] - if jlevel >= 0: - want = have + getlspace(after[jline-1]) - \ - getlspace(lines[jline]) - break - if want < 0: - # Still no luck -- leave it alone. - want = have - else: - want = 0 - assert want >= 0 - have2want[have] = want - diff = want - have - if diff == 0 or have == 0: - after.extend(lines[thisstmt:nextstmt]) - else: - for line in lines[thisstmt:nextstmt]: - if diff > 0: - if line == "\n": - after.append(line) - else: - after.append(" " * diff + line) - else: - remove = min(getlspace(line), -diff) - after.append(line[remove:]) - return self.raw != self.after - - def write(self, f): - f.writelines(self.after) - - # Line-getter for tokenize. - def getline(self): - if self.index >= len(self.lines): - line = "" - else: - line = self.lines[self.index] - self.index += 1 - return line - - # Line-eater for tokenize. - def tokeneater(self, type, token, (sline, scol), end, line, - INDENT=tokenize.INDENT, - DEDENT=tokenize.DEDENT, - NEWLINE=tokenize.NEWLINE, - COMMENT=tokenize.COMMENT, - NL=tokenize.NL): - - if type == NEWLINE: - # A program statement, or ENDMARKER, will eventually follow, - # after some (possibly empty) run of tokens of the form - # (NL | COMMENT)* (INDENT | DEDENT+)? - self.find_stmt = 1 - - elif type == INDENT: - self.find_stmt = 1 - self.level += 1 - - elif type == DEDENT: - self.find_stmt = 1 - self.level -= 1 - - elif type == COMMENT: - if self.find_stmt: - self.stats.append((sline, -1)) - # but we're still looking for a new stmt, so leave - # find_stmt alone - - elif type == NL: - pass - - elif self.find_stmt: - # This is the first "real token" following a NEWLINE, so it - # must be the first token of the next program statement, or an - # ENDMARKER. - self.find_stmt = 0 - if line: # not endmarker - self.stats.append((sline, self.level)) - -# Count number of leading blanks. -def getlspace(line): - i, n = 0, len(line) - while i < n and line[i] == " ": - i += 1 - return i - -if __name__ == '__main__': - main() diff --git a/vendor/pygments/scripts/vim2pygments.py b/vendor/pygments/scripts/vim2pygments.py old mode 100644 new mode 100755 index 80f0ada..42af0bb --- a/vendor/pygments/scripts/vim2pygments.py +++ b/vendor/pygments/scripts/vim2pygments.py @@ -11,10 +11,12 @@ :license: BSD, see LICENSE for details. """ +from __future__ import print_function + import sys import re from os import path -from cStringIO import StringIO +from io import StringIO split_re = re.compile(r'(?' % sys.argv[0] + print('Usage: %s ' % sys.argv[0]) return 2 if sys.argv[1] in ('-v', '--version'): - print '%s %s' % (SCRIPT_NAME, SCRIPT_VERSION) + print('%s %s' % (SCRIPT_NAME, SCRIPT_VERSION)) return filename = sys.argv[1] if not (path.exists(filename) and path.isfile(filename)): - print 'Error: %s not found' % filename + print('Error: %s not found' % filename) return 1 convert(filename, sys.stdout) sys.stdout.write('\n') diff --git a/vendor/pygments/setup.cfg b/vendor/pygments/setup.cfg index 3581741..04980ac 100644 --- a/vendor/pygments/setup.cfg +++ b/vendor/pygments/setup.cfg @@ -5,4 +5,8 @@ tag_svn_revision = 0 [aliases] release = egg_info -RDb '' +upload = upload --sign --identity=36580288 + +[bdist_wheel] +universal = 1 diff --git a/vendor/pygments/setup.py b/vendor/pygments/setup.py index 17bbf81..3949909 100755 --- a/vendor/pygments/setup.py +++ b/vendor/pygments/setup.py @@ -1,29 +1,22 @@ #!/usr/bin/env python # -*- coding: utf-8 -*- -""" - Pygments +"""Pygments ~~~~~~~~ Pygments is a syntax highlighting package written in Python. - It is a generic syntax highlighter for general use in all kinds of software - such as forum systems, wikis or other applications that need to prettify - source code. Highlights are: + It is a generic syntax highlighter suitable for use in code hosting, forums, + wikis or other applications that need to prettify source code. Highlights + are: - * a wide range of common languages and markup formats is supported + * a wide range of over 300 languages and other text formats is supported * special attention is paid to details, increasing quality by a fair amount * support for new languages and formats are added easily * a number of output formats, presently HTML, LaTeX, RTF, SVG, all image \ formats that PIL supports and ANSI sequences * it is usable as a command-line tool and as a library - * ... and it highlights even Brainfuck! - The `Pygments tip`_ is installable with ``easy_install Pygments==dev``. - - .. _Pygments tip: - http://bitbucket.org/birkenfeld/pygments-main/get/default.zip#egg=Pygments-dev - - :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ @@ -31,21 +24,22 @@ try: from setuptools import setup, find_packages have_setuptools = True except ImportError: - from distutils.core import setup - def find_packages(): - return [ - 'pygments', - 'pygments.lexers', - 'pygments.formatters', - 'pygments.styles', - 'pygments.filters', - ] - have_setuptools = False - -try: - from distutils.command.build_py import build_py_2to3 as build_py -except ImportError: - from distutils.command.build_py import build_py + try: + import ez_setup + ez_setup.use_setuptools() + from setuptools import setup, find_packages + have_setuptools = True + except ImportError: + from distutils.core import setup + def find_packages(*args, **kwargs): + return [ + 'pygments', + 'pygments.lexers', + 'pygments.formatters', + 'pygments.styles', + 'pygments.filters', + ] + have_setuptools = False if have_setuptools: add_keywords = dict( @@ -60,7 +54,7 @@ else: setup( name = 'Pygments', - version = '1.6', + version = '2.1.3', url = 'http://pygments.org/', license = 'BSD License', author = 'Georg Brandl', @@ -68,7 +62,7 @@ setup( description = 'Pygments is a syntax highlighting package written in Python.', long_description = __doc__, keywords = 'syntax highlighting', - packages = find_packages(), + packages = find_packages(exclude=['ez_setup']), platforms = 'any', zip_safe = False, include_package_data = True, @@ -85,6 +79,5 @@ setup( 'Topic :: Text Processing :: Filters', 'Topic :: Utilities', ], - cmdclass = {'build_py': build_py}, **add_keywords ) diff --git a/vendor/pygments/tests/.coverage b/vendor/pygments/tests/.coverage new file mode 100644 index 0000000000000000000000000000000000000000..965431f2de8881046a5ee20f13fe6921deb1f097 GIT binary patch literal 13152 zcmb7LX@C^f)y;hq7gRtN6;TkCWmtzDK?GC;PjF(r(r%=D)czzejaqd`+X5 zacUZpg_=62*-6*rS{pJ>wp48YN3qnJc8WE%wQkle)z*&6wdVJ<@_SYDKD-}y2diK8y$O;d}%i$pd*159T3U%_DdukK!7h%u{$8Pv@CV zVoc|UNINY~p;%)#xM@e9U~eliCGx6*m-I^Y1v}#`^9oIRc^4}&Ir8$_Tw|^_S$5MU zH(S)J_pzcJIfz?J%BwR+-q%Wu>wM2(RnjY@?QEUiZ9gkf)A_m}Kyp;QrmK}08+lE= zoh^A8y{ucDtqOJZdQEpLF{1OWQkgoPc6!17R+Ov@z9(5O>TES(?-A*vf&k|1V#zi8 z^^CM%uuR^LSEw(i^+9`Ci3y#5S#Z#jlgXv+l4DNV+e#c2d8M5yxn8!ID!93l-u8e< zi|f5Y#x9lgqo{nD2U_`q!k5{NyYv3R>(p;kzs!E}4)rS>%EROx9>ufyWOlfo8`PyIPr*IV;owNa!0dXKUK{|e2g#;WZPy7%bF2#Kej~8&!<^5Qm?ZmjrI=f`+ zpB)@0ZBhcCp3e)0#91e3EhW7{?^hjXU87xW3a)c#oOO1(!ATa(_YI45rNLv&lnc2= zb34Q1WKUzA$+!`b>R($KYbiN(u6gn!Bb_l=STAMFGZ`@|(jyF(HF!GuHCAF`grsXP zdC6qRsz%3Ra(1z`1CtvQhsik^J7utGY`lk6G|?OvXRDH(^&>Mr&Q@u+XdeCqD>1C| zaF)C#C#$1B(aKK>IfG^lm5jcRaE6e}YaSo6_@U;Rj}@34&l7l(aQUMJERQiyeug~y znLLM26u1PRYPdu*FXI+&<>kDBSMn-8msj(7d_G^u-;#R2hA-|=^Vji}e3exGYk0j> z|Lb@oZ{i#HM&8Uf@fP07H}f{Wg>U8C_;$XN@8Y|8JKw|i@_l?ie}^CB9lVow@pt)Q zev}{M@A2dO1V717@zeZ${sBMZL}9GOl%37`72%3X8=WyZ&J)VInSy61 z|CBh(>bz3cF)4O*Brs|#f5jR%W591}Bvc8&sM$4A;h0G29KP~`oihb`TBMzVRZhy_ z*Yr5}Rmp{CSoveayzW-Pg?%QfHR@r)g*E3KE~->3Qs%RTcW#o5+QIJ!9CLSq*~Jjd zZiZv-uW6JC?Pl36)D<)1#4#hHRRo(!j}zit znw>KrH!D&R2vRD!OvBLZNV+HZwAxg<1CY;&bH=P|G|Gu_p0$@;DxQklY;Iw0oU0Q> zWL_s*iIJV7a`1holFqXdM@L>!o3@KW*eiVs#JO>O%=%2p6u9|u@JG15R4aXh3*zio zaEe~JkTUT+Db9W=5hea(SQshE%2~HqDg?QFvXx&{Eo8O3RJmTFNqtq^Uwo<~#G@K4 z%6OPy!6+WhW2Lf96cRf{>e@6T17?Z@m?Jf99?$2KL^q!zwe2)sV#L7Noa8#AqnCvPbcvg#=BUlgi`jfSwX3E{(VBuahg&ds%TFPJ=F{uFpizinUuYZQ@ZI>82}aLZM_%d3GF(E#}Qt+i?!q zXtx+RNX7|$otq7j<#Y=Qj8X$BBgGl~knO$~RPtHrA&207Ijzg)}eV zB~A=)*Y)*WW2DavUO=H>YL6QyqV--&u3&W8(l~ipo3V4b0A`!4e7c&uwb_qRn*$*k z^IHU?#YYGw#zcwNQKo6oQD{`UGrr_t0cZ$BFA$JkC@{T9fI1;iy;#8dbb;$L1h6gb z-w4^W1-4TH+}Z+d_>u|g35e(0tFpX2BG1^yBLm|x_V_+|bHzrsJ|pYf~wbN&Ut#{b8!^Beq2ev{wg zU-8@g4*x&@nt#K;<#+i#{vE&1zvmD55Bwp2#DC;J@yGlL|CvAKzwl>m7VO`nnXtl< z_aW>Xy0H~xx!sdJprb9@*Eq9>e!V$6$oR8Ej72-txU|D}Git|Fc(oG<6A6=cGi{GC zzU_3v48pO5Jlc8vQ4V zDK~8=JN!U45-YDL7L_pIS@~QP7#J9d9R(c1D>eGEJRb=NgH?9X$ksw6=q@-7PK)Qa zV~SS3RL%PeXY6J)q~?viO{Y`C`C+EJsa+8bU14uivq$ZW=pLtrt7F?NWbKLg(6mSc zQ6x(~_=4JW+3s*P@E*7eP!KM)+IwbQVRt569V{W2+-SK3Gtq=^HtJg z(*Dwh0AjUc^oVhcI`~Bbtv?mud{e;k3*CXz&OnB3QK_u;JPL2(J^~ApDZ>CgCl@uLy4w-XZ*&@EgK!3GWi#Bm9BzA>kv!9|<25{!I9k z@HfKW3I8Dci|}v4Hxx*rN?{L$Jr(v+*ju5C!oCUzDfCe|SfQ`Np$h#J`YRl%Fi>HT z!eE6V3R4uODoj(Dt}sL4ScMZ5W-H89V1<(uPElx3a1}g-oI+lqpiopODU=nO6_zQq zD6}e^qp(6@rNSzO)e7e+oUd@9!WxB(6)sg+r*Ngh)e6@r+^DcwVXML|3b!e2SGZr{ zL4_R(pDB#5!h|YJs>0kVa24iNVSW`BRAFINq<%-h&q@W`w8>V)(I~0fRL&T^D`T18 znS4jjWR;be9-}rGwJvDV0fWwsNrIKS$(9gm zrggM_rR(L5ZedN3whI;7B~I>hLR#MA&vjFGAVHXc5CUn{P77Nj0B*;Ix$J4(dL>Q`Bq%r zR%|x5%tevNe~^&%g|b^Ry{0vBq>=9UYSg|s(iww~lS1wl98;GsiIe|HyXZ&c(nv=P zzARL4mql6>9I;$T8YkxRI9x;Kq;xE*CzPGAEgFizdUajN>lbWtkeX_3Vn?@~SvVCQHj1v+U|f%Yqn*DouMPrLT$0O2Yu@ zqppt>ogi!KoO-*QHurh0mA@|RC+INB|BjMrbuxN?#|96U70J?@xMOK0}R>Y z4RHWMd{E!wzcEr^f+M4Wv)R&e+BH4>o8oLHFGr-74#$=_&?1jjuh|;sp~}>v z^dr;3V9m{Ow)6ZUo^5gBQE-x;sRg&h*=cFBsROrKaU^ffE0!9hk==D zV!O4^4XovLGB2=Rl$ z|Ec<3B)mj;neeKSf0f#Acj5Ot!uy2Z6F%rD0{>)m;3uIJ{0reT!e0rW8(H{8s0{xZ zio^ee`cN5tD6()LqYS$lO?XI!EF7*dLZL=sw89vLu?pi9CMrxe67iT&93H1|yuwVQ z5@#8qI7i{cP%h3_Sg3Ha!Xl#^7b~2mutdR9I7`7+NGhZh93v^46tbbJEQWg0SCY#^ z8F{V|ke4Z3u5g9ITB9MaG6HhF!gUH86gDbsQn+5>1|unNQrKdIgS9n67@Ax1;s_q%)Rn2^#MAM>3qu*>p|)j+K8Pq<7)iv`&E_NoqXZ#=;_}#$CndU5LOaa5zZy7CY-O0Fel1jeUNTB e+n-^slZjY;!8_xSruuT$^ql=B_b#hk9`#>2QU6f@ literal 0 HcmV?d00001 diff --git a/vendor/pygments/tests/cover/coverage_html.js b/vendor/pygments/tests/cover/coverage_html.js new file mode 100644 index 0000000..b24006d --- /dev/null +++ b/vendor/pygments/tests/cover/coverage_html.js @@ -0,0 +1,376 @@ +// Coverage.py HTML report browser code. +/*jslint browser: true, sloppy: true, vars: true, plusplus: true, maxerr: 50, indent: 4 */ +/*global coverage: true, document, window, $ */ + +coverage = {}; + +// Find all the elements with shortkey_* class, and use them to assign a shotrtcut key. +coverage.assign_shortkeys = function () { + $("*[class*='shortkey_']").each(function (i, e) { + $.each($(e).attr("class").split(" "), function (i, c) { + if (/^shortkey_/.test(c)) { + $(document).bind('keydown', c.substr(9), function () { + $(e).click(); + }); + } + }); + }); +}; + +// Create the events for the help panel. +coverage.wire_up_help_panel = function () { + $("#keyboard_icon").click(function () { + // Show the help panel, and position it so the keyboard icon in the + // panel is in the same place as the keyboard icon in the header. + $(".help_panel").show(); + var koff = $("#keyboard_icon").offset(); + var poff = $("#panel_icon").position(); + $(".help_panel").offset({ + top: koff.top-poff.top, + left: koff.left-poff.left + }); + }); + $("#panel_icon").click(function () { + $(".help_panel").hide(); + }); +}; + +// Loaded on index.html +coverage.index_ready = function ($) { + // Look for a cookie containing previous sort settings: + var sort_list = []; + var cookie_name = "COVERAGE_INDEX_SORT"; + var i; + + // This almost makes it worth installing the jQuery cookie plugin: + if (document.cookie.indexOf(cookie_name) > -1) { + var cookies = document.cookie.split(";"); + for (i = 0; i < cookies.length; i++) { + var parts = cookies[i].split("="); + + if ($.trim(parts[0]) === cookie_name && parts[1]) { + sort_list = eval("[[" + parts[1] + "]]"); + break; + } + } + } + + // Create a new widget which exists only to save and restore + // the sort order: + $.tablesorter.addWidget({ + id: "persistentSort", + + // Format is called by the widget before displaying: + format: function (table) { + if (table.config.sortList.length === 0 && sort_list.length > 0) { + // This table hasn't been sorted before - we'll use + // our stored settings: + $(table).trigger('sorton', [sort_list]); + } + else { + // This is not the first load - something has + // already defined sorting so we'll just update + // our stored value to match: + sort_list = table.config.sortList; + } + } + }); + + // Configure our tablesorter to handle the variable number of + // columns produced depending on report options: + var headers = []; + var col_count = $("table.index > thead > tr > th").length; + + headers[0] = { sorter: 'text' }; + for (i = 1; i < col_count-1; i++) { + headers[i] = { sorter: 'digit' }; + } + headers[col_count-1] = { sorter: 'percent' }; + + // Enable the table sorter: + $("table.index").tablesorter({ + widgets: ['persistentSort'], + headers: headers + }); + + coverage.assign_shortkeys(); + coverage.wire_up_help_panel(); + + // Watch for page unload events so we can save the final sort settings: + $(window).unload(function () { + document.cookie = cookie_name + "=" + sort_list.toString() + "; path=/"; + }); +}; + +// -- pyfile stuff -- + +coverage.pyfile_ready = function ($) { + // If we're directed to a particular line number, highlight the line. + var frag = location.hash; + if (frag.length > 2 && frag[1] === 'n') { + $(frag).addClass('highlight'); + coverage.set_sel(parseInt(frag.substr(2), 10)); + } + else { + coverage.set_sel(0); + } + + $(document) + .bind('keydown', 'j', coverage.to_next_chunk_nicely) + .bind('keydown', 'k', coverage.to_prev_chunk_nicely) + .bind('keydown', '0', coverage.to_top) + .bind('keydown', '1', coverage.to_first_chunk) + ; + + $(".button_toggle_run").click(function (evt) {coverage.toggle_lines(evt.target, "run");}); + $(".button_toggle_exc").click(function (evt) {coverage.toggle_lines(evt.target, "exc");}); + $(".button_toggle_mis").click(function (evt) {coverage.toggle_lines(evt.target, "mis");}); + $(".button_toggle_par").click(function (evt) {coverage.toggle_lines(evt.target, "par");}); + + coverage.assign_shortkeys(); + coverage.wire_up_help_panel(); +}; + +coverage.toggle_lines = function (btn, cls) { + btn = $(btn); + var hide = "hide_"+cls; + if (btn.hasClass(hide)) { + $("#source ."+cls).removeClass(hide); + btn.removeClass(hide); + } + else { + $("#source ."+cls).addClass(hide); + btn.addClass(hide); + } +}; + +// Return the nth line div. +coverage.line_elt = function (n) { + return $("#t" + n); +}; + +// Return the nth line number div. +coverage.num_elt = function (n) { + return $("#n" + n); +}; + +// Return the container of all the code. +coverage.code_container = function () { + return $(".linenos"); +}; + +// Set the selection. b and e are line numbers. +coverage.set_sel = function (b, e) { + // The first line selected. + coverage.sel_begin = b; + // The next line not selected. + coverage.sel_end = (e === undefined) ? b+1 : e; +}; + +coverage.to_top = function () { + coverage.set_sel(0, 1); + coverage.scroll_window(0); +}; + +coverage.to_first_chunk = function () { + coverage.set_sel(0, 1); + coverage.to_next_chunk(); +}; + +coverage.is_transparent = function (color) { + // Different browsers return different colors for "none". + return color === "transparent" || color === "rgba(0, 0, 0, 0)"; +}; + +coverage.to_next_chunk = function () { + var c = coverage; + + // Find the start of the next colored chunk. + var probe = c.sel_end; + while (true) { + var probe_line = c.line_elt(probe); + if (probe_line.length === 0) { + return; + } + var color = probe_line.css("background-color"); + if (!c.is_transparent(color)) { + break; + } + probe++; + } + + // There's a next chunk, `probe` points to it. + var begin = probe; + + // Find the end of this chunk. + var next_color = color; + while (next_color === color) { + probe++; + probe_line = c.line_elt(probe); + next_color = probe_line.css("background-color"); + } + c.set_sel(begin, probe); + c.show_selection(); +}; + +coverage.to_prev_chunk = function () { + var c = coverage; + + // Find the end of the prev colored chunk. + var probe = c.sel_begin-1; + var probe_line = c.line_elt(probe); + if (probe_line.length === 0) { + return; + } + var color = probe_line.css("background-color"); + while (probe > 0 && c.is_transparent(color)) { + probe--; + probe_line = c.line_elt(probe); + if (probe_line.length === 0) { + return; + } + color = probe_line.css("background-color"); + } + + // There's a prev chunk, `probe` points to its last line. + var end = probe+1; + + // Find the beginning of this chunk. + var prev_color = color; + while (prev_color === color) { + probe--; + probe_line = c.line_elt(probe); + prev_color = probe_line.css("background-color"); + } + c.set_sel(probe+1, end); + c.show_selection(); +}; + +// Return the line number of the line nearest pixel position pos +coverage.line_at_pos = function (pos) { + var l1 = coverage.line_elt(1), + l2 = coverage.line_elt(2), + result; + if (l1.length && l2.length) { + var l1_top = l1.offset().top, + line_height = l2.offset().top - l1_top, + nlines = (pos - l1_top) / line_height; + if (nlines < 1) { + result = 1; + } + else { + result = Math.ceil(nlines); + } + } + else { + result = 1; + } + return result; +}; + +// Returns 0, 1, or 2: how many of the two ends of the selection are on +// the screen right now? +coverage.selection_ends_on_screen = function () { + if (coverage.sel_begin === 0) { + return 0; + } + + var top = coverage.line_elt(coverage.sel_begin); + var next = coverage.line_elt(coverage.sel_end-1); + + return ( + (top.isOnScreen() ? 1 : 0) + + (next.isOnScreen() ? 1 : 0) + ); +}; + +coverage.to_next_chunk_nicely = function () { + coverage.finish_scrolling(); + if (coverage.selection_ends_on_screen() === 0) { + // The selection is entirely off the screen: select the top line on + // the screen. + var win = $(window); + coverage.select_line_or_chunk(coverage.line_at_pos(win.scrollTop())); + } + coverage.to_next_chunk(); +}; + +coverage.to_prev_chunk_nicely = function () { + coverage.finish_scrolling(); + if (coverage.selection_ends_on_screen() === 0) { + var win = $(window); + coverage.select_line_or_chunk(coverage.line_at_pos(win.scrollTop() + win.height())); + } + coverage.to_prev_chunk(); +}; + +// Select line number lineno, or if it is in a colored chunk, select the +// entire chunk +coverage.select_line_or_chunk = function (lineno) { + var c = coverage; + var probe_line = c.line_elt(lineno); + if (probe_line.length === 0) { + return; + } + var the_color = probe_line.css("background-color"); + if (!c.is_transparent(the_color)) { + // The line is in a highlighted chunk. + // Search backward for the first line. + var probe = lineno; + var color = the_color; + while (probe > 0 && color === the_color) { + probe--; + probe_line = c.line_elt(probe); + if (probe_line.length === 0) { + break; + } + color = probe_line.css("background-color"); + } + var begin = probe + 1; + + // Search forward for the last line. + probe = lineno; + color = the_color; + while (color === the_color) { + probe++; + probe_line = c.line_elt(probe); + color = probe_line.css("background-color"); + } + + coverage.set_sel(begin, probe); + } + else { + coverage.set_sel(lineno); + } +}; + +coverage.show_selection = function () { + var c = coverage; + + // Highlight the lines in the chunk + c.code_container().find(".highlight").removeClass("highlight"); + for (var probe = c.sel_begin; probe > 0 && probe < c.sel_end; probe++) { + c.num_elt(probe).addClass("highlight"); + } + + c.scroll_to_selection(); +}; + +coverage.scroll_to_selection = function () { + // Scroll the page if the chunk isn't fully visible. + if (coverage.selection_ends_on_screen() < 2) { + // Need to move the page. The html,body trick makes it scroll in all + // browsers, got it from http://stackoverflow.com/questions/3042651 + var top = coverage.line_elt(coverage.sel_begin); + var top_pos = parseInt(top.offset().top, 10); + coverage.scroll_window(top_pos - 30); + } +}; + +coverage.scroll_window = function (to_pos) { + $("html,body").animate({scrollTop: to_pos}, 200); +}; + +coverage.finish_scrolling = function () { + $("html,body").stop(true, true); +}; diff --git a/vendor/pygments/tests/cover/jquery.hotkeys.js b/vendor/pygments/tests/cover/jquery.hotkeys.js new file mode 100644 index 0000000..09b21e0 --- /dev/null +++ b/vendor/pygments/tests/cover/jquery.hotkeys.js @@ -0,0 +1,99 @@ +/* + * jQuery Hotkeys Plugin + * Copyright 2010, John Resig + * Dual licensed under the MIT or GPL Version 2 licenses. + * + * Based upon the plugin by Tzury Bar Yochay: + * http://github.com/tzuryby/hotkeys + * + * Original idea by: + * Binny V A, http://www.openjs.com/scripts/events/keyboard_shortcuts/ +*/ + +(function(jQuery){ + + jQuery.hotkeys = { + version: "0.8", + + specialKeys: { + 8: "backspace", 9: "tab", 13: "return", 16: "shift", 17: "ctrl", 18: "alt", 19: "pause", + 20: "capslock", 27: "esc", 32: "space", 33: "pageup", 34: "pagedown", 35: "end", 36: "home", + 37: "left", 38: "up", 39: "right", 40: "down", 45: "insert", 46: "del", + 96: "0", 97: "1", 98: "2", 99: "3", 100: "4", 101: "5", 102: "6", 103: "7", + 104: "8", 105: "9", 106: "*", 107: "+", 109: "-", 110: ".", 111 : "/", + 112: "f1", 113: "f2", 114: "f3", 115: "f4", 116: "f5", 117: "f6", 118: "f7", 119: "f8", + 120: "f9", 121: "f10", 122: "f11", 123: "f12", 144: "numlock", 145: "scroll", 191: "/", 224: "meta" + }, + + shiftNums: { + "`": "~", "1": "!", "2": "@", "3": "#", "4": "$", "5": "%", "6": "^", "7": "&", + "8": "*", "9": "(", "0": ")", "-": "_", "=": "+", ";": ": ", "'": "\"", ",": "<", + ".": ">", "/": "?", "\\": "|" + } + }; + + function keyHandler( handleObj ) { + // Only care when a possible input has been specified + if ( typeof handleObj.data !== "string" ) { + return; + } + + var origHandler = handleObj.handler, + keys = handleObj.data.toLowerCase().split(" "); + + handleObj.handler = function( event ) { + // Don't fire in text-accepting inputs that we didn't directly bind to + if ( this !== event.target && (/textarea|select/i.test( event.target.nodeName ) || + event.target.type === "text") ) { + return; + } + + // Keypress represents characters, not special keys + var special = event.type !== "keypress" && jQuery.hotkeys.specialKeys[ event.which ], + character = String.fromCharCode( event.which ).toLowerCase(), + key, modif = "", possible = {}; + + // check combinations (alt|ctrl|shift+anything) + if ( event.altKey && special !== "alt" ) { + modif += "alt+"; + } + + if ( event.ctrlKey && special !== "ctrl" ) { + modif += "ctrl+"; + } + + // TODO: Need to make sure this works consistently across platforms + if ( event.metaKey && !event.ctrlKey && special !== "meta" ) { + modif += "meta+"; + } + + if ( event.shiftKey && special !== "shift" ) { + modif += "shift+"; + } + + if ( special ) { + possible[ modif + special ] = true; + + } else { + possible[ modif + character ] = true; + possible[ modif + jQuery.hotkeys.shiftNums[ character ] ] = true; + + // "$" can be triggered as "Shift+4" or "Shift+$" or just "$" + if ( modif === "shift+" ) { + possible[ jQuery.hotkeys.shiftNums[ character ] ] = true; + } + } + + for ( var i = 0, l = keys.length; i < l; i++ ) { + if ( possible[ keys[i] ] ) { + return origHandler.apply( this, arguments ); + } + } + }; + } + + jQuery.each([ "keydown", "keyup", "keypress" ], function() { + jQuery.event.special[ this ] = { add: keyHandler }; + }); + +})( jQuery ); diff --git a/vendor/pygments/tests/cover/jquery.isonscreen.js b/vendor/pygments/tests/cover/jquery.isonscreen.js new file mode 100644 index 0000000..0182ebd --- /dev/null +++ b/vendor/pygments/tests/cover/jquery.isonscreen.js @@ -0,0 +1,53 @@ +/* Copyright (c) 2010 + * @author Laurence Wheway + * Dual licensed under the MIT (http://www.opensource.org/licenses/mit-license.php) + * and GPL (http://www.opensource.org/licenses/gpl-license.php) licenses. + * + * @version 1.2.0 + */ +(function($) { + jQuery.extend({ + isOnScreen: function(box, container) { + //ensure numbers come in as intgers (not strings) and remove 'px' is it's there + for(var i in box){box[i] = parseFloat(box[i])}; + for(var i in container){container[i] = parseFloat(container[i])}; + + if(!container){ + container = { + left: $(window).scrollLeft(), + top: $(window).scrollTop(), + width: $(window).width(), + height: $(window).height() + } + } + + if( box.left+box.width-container.left > 0 && + box.left < container.width+container.left && + box.top+box.height-container.top > 0 && + box.top < container.height+container.top + ) return true; + return false; + } + }) + + + jQuery.fn.isOnScreen = function (container) { + for(var i in container){container[i] = parseFloat(container[i])}; + + if(!container){ + container = { + left: $(window).scrollLeft(), + top: $(window).scrollTop(), + width: $(window).width(), + height: $(window).height() + } + } + + if( $(this).offset().left+$(this).width()-container.left > 0 && + $(this).offset().left < container.width+container.left && + $(this).offset().top+$(this).height()-container.top > 0 && + $(this).offset().top < container.height+container.top + ) return true; + return false; + } +})(jQuery); diff --git a/vendor/pygments/tests/cover/jquery.min.js b/vendor/pygments/tests/cover/jquery.min.js new file mode 100644 index 0000000..c941a5f --- /dev/null +++ b/vendor/pygments/tests/cover/jquery.min.js @@ -0,0 +1,166 @@ +/*! + * jQuery JavaScript Library v1.4.3 + * http://jquery.com/ + * + * Copyright 2010, John Resig + * Dual licensed under the MIT or GPL Version 2 licenses. + * http://jquery.org/license + * + * Includes Sizzle.js + * http://sizzlejs.com/ + * Copyright 2010, The Dojo Foundation + * Released under the MIT, BSD, and GPL Licenses. + * + * Date: Thu Oct 14 23:10:06 2010 -0400 + */ +(function(E,A){function U(){return false}function ba(){return true}function ja(a,b,d){d[0].type=a;return c.event.handle.apply(b,d)}function Ga(a){var b,d,e=[],f=[],h,k,l,n,s,v,B,D;k=c.data(this,this.nodeType?"events":"__events__");if(typeof k==="function")k=k.events;if(!(a.liveFired===this||!k||!k.live||a.button&&a.type==="click")){if(a.namespace)D=RegExp("(^|\\.)"+a.namespace.split(".").join("\\.(?:.*\\.)?")+"(\\.|$)");a.liveFired=this;var H=k.live.slice(0);for(n=0;nd)break;a.currentTarget=f.elem;a.data=f.handleObj.data; +a.handleObj=f.handleObj;D=f.handleObj.origHandler.apply(f.elem,arguments);if(D===false||a.isPropagationStopped()){d=f.level;if(D===false)b=false}}return b}}function Y(a,b){return(a&&a!=="*"?a+".":"")+b.replace(Ha,"`").replace(Ia,"&")}function ka(a,b,d){if(c.isFunction(b))return c.grep(a,function(f,h){return!!b.call(f,h,f)===d});else if(b.nodeType)return c.grep(a,function(f){return f===b===d});else if(typeof b==="string"){var e=c.grep(a,function(f){return f.nodeType===1});if(Ja.test(b))return c.filter(b, +e,!d);else b=c.filter(b,e)}return c.grep(a,function(f){return c.inArray(f,b)>=0===d})}function la(a,b){var d=0;b.each(function(){if(this.nodeName===(a[d]&&a[d].nodeName)){var e=c.data(a[d++]),f=c.data(this,e);if(e=e&&e.events){delete f.handle;f.events={};for(var h in e)for(var k in e[h])c.event.add(this,h,e[h][k],e[h][k].data)}}})}function Ka(a,b){b.src?c.ajax({url:b.src,async:false,dataType:"script"}):c.globalEval(b.text||b.textContent||b.innerHTML||"");b.parentNode&&b.parentNode.removeChild(b)} +function ma(a,b,d){var e=b==="width"?a.offsetWidth:a.offsetHeight;if(d==="border")return e;c.each(b==="width"?La:Ma,function(){d||(e-=parseFloat(c.css(a,"padding"+this))||0);if(d==="margin")e+=parseFloat(c.css(a,"margin"+this))||0;else e-=parseFloat(c.css(a,"border"+this+"Width"))||0});return e}function ca(a,b,d,e){if(c.isArray(b)&&b.length)c.each(b,function(f,h){d||Na.test(a)?e(a,h):ca(a+"["+(typeof h==="object"||c.isArray(h)?f:"")+"]",h,d,e)});else if(!d&&b!=null&&typeof b==="object")c.isEmptyObject(b)? +e(a,""):c.each(b,function(f,h){ca(a+"["+f+"]",h,d,e)});else e(a,b)}function S(a,b){var d={};c.each(na.concat.apply([],na.slice(0,b)),function(){d[this]=a});return d}function oa(a){if(!da[a]){var b=c("<"+a+">").appendTo("body"),d=b.css("display");b.remove();if(d==="none"||d==="")d="block";da[a]=d}return da[a]}function ea(a){return c.isWindow(a)?a:a.nodeType===9?a.defaultView||a.parentWindow:false}var u=E.document,c=function(){function a(){if(!b.isReady){try{u.documentElement.doScroll("left")}catch(i){setTimeout(a, +1);return}b.ready()}}var b=function(i,r){return new b.fn.init(i,r)},d=E.jQuery,e=E.$,f,h=/^(?:[^<]*(<[\w\W]+>)[^>]*$|#([\w\-]+)$)/,k=/\S/,l=/^\s+/,n=/\s+$/,s=/\W/,v=/\d/,B=/^<(\w+)\s*\/?>(?:<\/\1>)?$/,D=/^[\],:{}\s]*$/,H=/\\(?:["\\\/bfnrt]|u[0-9a-fA-F]{4})/g,w=/"[^"\\\n\r]*"|true|false|null|-?\d+(?:\.\d*)?(?:[eE][+\-]?\d+)?/g,G=/(?:^|:|,)(?:\s*\[)+/g,M=/(webkit)[ \/]([\w.]+)/,g=/(opera)(?:.*version)?[ \/]([\w.]+)/,j=/(msie) ([\w.]+)/,o=/(mozilla)(?:.*? rv:([\w.]+))?/,m=navigator.userAgent,p=false, +q=[],t,x=Object.prototype.toString,C=Object.prototype.hasOwnProperty,P=Array.prototype.push,N=Array.prototype.slice,R=String.prototype.trim,Q=Array.prototype.indexOf,L={};b.fn=b.prototype={init:function(i,r){var y,z,F;if(!i)return this;if(i.nodeType){this.context=this[0]=i;this.length=1;return this}if(i==="body"&&!r&&u.body){this.context=u;this[0]=u.body;this.selector="body";this.length=1;return this}if(typeof i==="string")if((y=h.exec(i))&&(y[1]||!r))if(y[1]){F=r?r.ownerDocument||r:u;if(z=B.exec(i))if(b.isPlainObject(r)){i= +[u.createElement(z[1])];b.fn.attr.call(i,r,true)}else i=[F.createElement(z[1])];else{z=b.buildFragment([y[1]],[F]);i=(z.cacheable?z.fragment.cloneNode(true):z.fragment).childNodes}return b.merge(this,i)}else{if((z=u.getElementById(y[2]))&&z.parentNode){if(z.id!==y[2])return f.find(i);this.length=1;this[0]=z}this.context=u;this.selector=i;return this}else if(!r&&!s.test(i)){this.selector=i;this.context=u;i=u.getElementsByTagName(i);return b.merge(this,i)}else return!r||r.jquery?(r||f).find(i):b(r).find(i); +else if(b.isFunction(i))return f.ready(i);if(i.selector!==A){this.selector=i.selector;this.context=i.context}return b.makeArray(i,this)},selector:"",jquery:"1.4.3",length:0,size:function(){return this.length},toArray:function(){return N.call(this,0)},get:function(i){return i==null?this.toArray():i<0?this.slice(i)[0]:this[i]},pushStack:function(i,r,y){var z=b();b.isArray(i)?P.apply(z,i):b.merge(z,i);z.prevObject=this;z.context=this.context;if(r==="find")z.selector=this.selector+(this.selector?" ": +"")+y;else if(r)z.selector=this.selector+"."+r+"("+y+")";return z},each:function(i,r){return b.each(this,i,r)},ready:function(i){b.bindReady();if(b.isReady)i.call(u,b);else q&&q.push(i);return this},eq:function(i){return i===-1?this.slice(i):this.slice(i,+i+1)},first:function(){return this.eq(0)},last:function(){return this.eq(-1)},slice:function(){return this.pushStack(N.apply(this,arguments),"slice",N.call(arguments).join(","))},map:function(i){return this.pushStack(b.map(this,function(r,y){return i.call(r, +y,r)}))},end:function(){return this.prevObject||b(null)},push:P,sort:[].sort,splice:[].splice};b.fn.init.prototype=b.fn;b.extend=b.fn.extend=function(){var i=arguments[0]||{},r=1,y=arguments.length,z=false,F,I,K,J,fa;if(typeof i==="boolean"){z=i;i=arguments[1]||{};r=2}if(typeof i!=="object"&&!b.isFunction(i))i={};if(y===r){i=this;--r}for(;r0)){if(q){for(var r=0;i=q[r++];)i.call(u,b);q=null}b.fn.triggerHandler&&b(u).triggerHandler("ready")}}},bindReady:function(){if(!p){p=true;if(u.readyState==="complete")return setTimeout(b.ready, +1);if(u.addEventListener){u.addEventListener("DOMContentLoaded",t,false);E.addEventListener("load",b.ready,false)}else if(u.attachEvent){u.attachEvent("onreadystatechange",t);E.attachEvent("onload",b.ready);var i=false;try{i=E.frameElement==null}catch(r){}u.documentElement.doScroll&&i&&a()}}},isFunction:function(i){return b.type(i)==="function"},isArray:Array.isArray||function(i){return b.type(i)==="array"},isWindow:function(i){return i&&typeof i==="object"&&"setInterval"in i},isNaN:function(i){return i== +null||!v.test(i)||isNaN(i)},type:function(i){return i==null?String(i):L[x.call(i)]||"object"},isPlainObject:function(i){if(!i||b.type(i)!=="object"||i.nodeType||b.isWindow(i))return false;if(i.constructor&&!C.call(i,"constructor")&&!C.call(i.constructor.prototype,"isPrototypeOf"))return false;for(var r in i);return r===A||C.call(i,r)},isEmptyObject:function(i){for(var r in i)return false;return true},error:function(i){throw i;},parseJSON:function(i){if(typeof i!=="string"||!i)return null;i=b.trim(i); +if(D.test(i.replace(H,"@").replace(w,"]").replace(G,"")))return E.JSON&&E.JSON.parse?E.JSON.parse(i):(new Function("return "+i))();else b.error("Invalid JSON: "+i)},noop:function(){},globalEval:function(i){if(i&&k.test(i)){var r=u.getElementsByTagName("head")[0]||u.documentElement,y=u.createElement("script");y.type="text/javascript";if(b.support.scriptEval)y.appendChild(u.createTextNode(i));else y.text=i;r.insertBefore(y,r.firstChild);r.removeChild(y)}},nodeName:function(i,r){return i.nodeName&&i.nodeName.toUpperCase()=== +r.toUpperCase()},each:function(i,r,y){var z,F=0,I=i.length,K=I===A||b.isFunction(i);if(y)if(K)for(z in i){if(r.apply(i[z],y)===false)break}else for(;F";a=u.createDocumentFragment();a.appendChild(d.firstChild);c.support.checkClone=a.cloneNode(true).cloneNode(true).lastChild.checked;c(function(){var s=u.createElement("div"); +s.style.width=s.style.paddingLeft="1px";u.body.appendChild(s);c.boxModel=c.support.boxModel=s.offsetWidth===2;if("zoom"in s.style){s.style.display="inline";s.style.zoom=1;c.support.inlineBlockNeedsLayout=s.offsetWidth===2;s.style.display="";s.innerHTML="
    ";c.support.shrinkWrapBlocks=s.offsetWidth!==2}s.innerHTML="
    t
    ";var v=s.getElementsByTagName("td");c.support.reliableHiddenOffsets=v[0].offsetHeight=== +0;v[0].style.display="";v[1].style.display="none";c.support.reliableHiddenOffsets=c.support.reliableHiddenOffsets&&v[0].offsetHeight===0;s.innerHTML="";u.body.removeChild(s).style.display="none"});a=function(s){var v=u.createElement("div");s="on"+s;var B=s in v;if(!B){v.setAttribute(s,"return;");B=typeof v[s]==="function"}return B};c.support.submitBubbles=a("submit");c.support.changeBubbles=a("change");a=b=d=f=h=null}})();c.props={"for":"htmlFor","class":"className",readonly:"readOnly",maxlength:"maxLength", +cellspacing:"cellSpacing",rowspan:"rowSpan",colspan:"colSpan",tabindex:"tabIndex",usemap:"useMap",frameborder:"frameBorder"};var pa={},Oa=/^(?:\{.*\}|\[.*\])$/;c.extend({cache:{},uuid:0,expando:"jQuery"+c.now(),noData:{embed:true,object:"clsid:D27CDB6E-AE6D-11cf-96B8-444553540000",applet:true},data:function(a,b,d){if(c.acceptData(a)){a=a==E?pa:a;var e=a.nodeType,f=e?a[c.expando]:null,h=c.cache;if(!(e&&!f&&typeof b==="string"&&d===A)){if(e)f||(a[c.expando]=f=++c.uuid);else h=a;if(typeof b==="object")if(e)h[f]= +c.extend(h[f],b);else c.extend(h,b);else if(e&&!h[f])h[f]={};a=e?h[f]:h;if(d!==A)a[b]=d;return typeof b==="string"?a[b]:a}}},removeData:function(a,b){if(c.acceptData(a)){a=a==E?pa:a;var d=a.nodeType,e=d?a[c.expando]:a,f=c.cache,h=d?f[e]:e;if(b){if(h){delete h[b];d&&c.isEmptyObject(h)&&c.removeData(a)}}else if(d&&c.support.deleteExpando)delete a[c.expando];else if(a.removeAttribute)a.removeAttribute(c.expando);else if(d)delete f[e];else for(var k in a)delete a[k]}},acceptData:function(a){if(a.nodeName){var b= +c.noData[a.nodeName.toLowerCase()];if(b)return!(b===true||a.getAttribute("classid")!==b)}return true}});c.fn.extend({data:function(a,b){if(typeof a==="undefined")return this.length?c.data(this[0]):null;else if(typeof a==="object")return this.each(function(){c.data(this,a)});var d=a.split(".");d[1]=d[1]?"."+d[1]:"";if(b===A){var e=this.triggerHandler("getData"+d[1]+"!",[d[0]]);if(e===A&&this.length){e=c.data(this[0],a);if(e===A&&this[0].nodeType===1){e=this[0].getAttribute("data-"+a);if(typeof e=== +"string")try{e=e==="true"?true:e==="false"?false:e==="null"?null:!c.isNaN(e)?parseFloat(e):Oa.test(e)?c.parseJSON(e):e}catch(f){}else e=A}}return e===A&&d[1]?this.data(d[0]):e}else return this.each(function(){var h=c(this),k=[d[0],b];h.triggerHandler("setData"+d[1]+"!",k);c.data(this,a,b);h.triggerHandler("changeData"+d[1]+"!",k)})},removeData:function(a){return this.each(function(){c.removeData(this,a)})}});c.extend({queue:function(a,b,d){if(a){b=(b||"fx")+"queue";var e=c.data(a,b);if(!d)return e|| +[];if(!e||c.isArray(d))e=c.data(a,b,c.makeArray(d));else e.push(d);return e}},dequeue:function(a,b){b=b||"fx";var d=c.queue(a,b),e=d.shift();if(e==="inprogress")e=d.shift();if(e){b==="fx"&&d.unshift("inprogress");e.call(a,function(){c.dequeue(a,b)})}}});c.fn.extend({queue:function(a,b){if(typeof a!=="string"){b=a;a="fx"}if(b===A)return c.queue(this[0],a);return this.each(function(){var d=c.queue(this,a,b);a==="fx"&&d[0]!=="inprogress"&&c.dequeue(this,a)})},dequeue:function(a){return this.each(function(){c.dequeue(this, +a)})},delay:function(a,b){a=c.fx?c.fx.speeds[a]||a:a;b=b||"fx";return this.queue(b,function(){var d=this;setTimeout(function(){c.dequeue(d,b)},a)})},clearQueue:function(a){return this.queue(a||"fx",[])}});var qa=/[\n\t]/g,ga=/\s+/,Pa=/\r/g,Qa=/^(?:href|src|style)$/,Ra=/^(?:button|input)$/i,Sa=/^(?:button|input|object|select|textarea)$/i,Ta=/^a(?:rea)?$/i,ra=/^(?:radio|checkbox)$/i;c.fn.extend({attr:function(a,b){return c.access(this,a,b,true,c.attr)},removeAttr:function(a){return this.each(function(){c.attr(this, +a,"");this.nodeType===1&&this.removeAttribute(a)})},addClass:function(a){if(c.isFunction(a))return this.each(function(s){var v=c(this);v.addClass(a.call(this,s,v.attr("class")))});if(a&&typeof a==="string")for(var b=(a||"").split(ga),d=0,e=this.length;d-1)return true;return false}, +val:function(a){if(!arguments.length){var b=this[0];if(b){if(c.nodeName(b,"option")){var d=b.attributes.value;return!d||d.specified?b.value:b.text}if(c.nodeName(b,"select")){var e=b.selectedIndex;d=[];var f=b.options;b=b.type==="select-one";if(e<0)return null;var h=b?e:0;for(e=b?e+1:f.length;h=0;else if(c.nodeName(this,"select")){var B=c.makeArray(v);c("option",this).each(function(){this.selected= +c.inArray(c(this).val(),B)>=0});if(!B.length)this.selectedIndex=-1}else this.value=v}})}});c.extend({attrFn:{val:true,css:true,html:true,text:true,data:true,width:true,height:true,offset:true},attr:function(a,b,d,e){if(!a||a.nodeType===3||a.nodeType===8)return A;if(e&&b in c.attrFn)return c(a)[b](d);e=a.nodeType!==1||!c.isXMLDoc(a);var f=d!==A;b=e&&c.props[b]||b;if(a.nodeType===1){var h=Qa.test(b);if((b in a||a[b]!==A)&&e&&!h){if(f){b==="type"&&Ra.test(a.nodeName)&&a.parentNode&&c.error("type property can't be changed"); +if(d===null)a.nodeType===1&&a.removeAttribute(b);else a[b]=d}if(c.nodeName(a,"form")&&a.getAttributeNode(b))return a.getAttributeNode(b).nodeValue;if(b==="tabIndex")return(b=a.getAttributeNode("tabIndex"))&&b.specified?b.value:Sa.test(a.nodeName)||Ta.test(a.nodeName)&&a.href?0:A;return a[b]}if(!c.support.style&&e&&b==="style"){if(f)a.style.cssText=""+d;return a.style.cssText}f&&a.setAttribute(b,""+d);if(!a.attributes[b]&&a.hasAttribute&&!a.hasAttribute(b))return A;a=!c.support.hrefNormalized&&e&& +h?a.getAttribute(b,2):a.getAttribute(b);return a===null?A:a}}});var X=/\.(.*)$/,ha=/^(?:textarea|input|select)$/i,Ha=/\./g,Ia=/ /g,Ua=/[^\w\s.|`]/g,Va=function(a){return a.replace(Ua,"\\$&")},sa={focusin:0,focusout:0};c.event={add:function(a,b,d,e){if(!(a.nodeType===3||a.nodeType===8)){if(c.isWindow(a)&&a!==E&&!a.frameElement)a=E;if(d===false)d=U;var f,h;if(d.handler){f=d;d=f.handler}if(!d.guid)d.guid=c.guid++;if(h=c.data(a)){var k=a.nodeType?"events":"__events__",l=h[k],n=h.handle;if(typeof l=== +"function"){n=l.handle;l=l.events}else if(!l){a.nodeType||(h[k]=h=function(){});h.events=l={}}if(!n)h.handle=n=function(){return typeof c!=="undefined"&&!c.event.triggered?c.event.handle.apply(n.elem,arguments):A};n.elem=a;b=b.split(" ");for(var s=0,v;k=b[s++];){h=f?c.extend({},f):{handler:d,data:e};if(k.indexOf(".")>-1){v=k.split(".");k=v.shift();h.namespace=v.slice(0).sort().join(".")}else{v=[];h.namespace=""}h.type=k;if(!h.guid)h.guid=d.guid;var B=l[k],D=c.event.special[k]||{};if(!B){B=l[k]=[]; +if(!D.setup||D.setup.call(a,e,v,n)===false)if(a.addEventListener)a.addEventListener(k,n,false);else a.attachEvent&&a.attachEvent("on"+k,n)}if(D.add){D.add.call(a,h);if(!h.handler.guid)h.handler.guid=d.guid}B.push(h);c.event.global[k]=true}a=null}}},global:{},remove:function(a,b,d,e){if(!(a.nodeType===3||a.nodeType===8)){if(d===false)d=U;var f,h,k=0,l,n,s,v,B,D,H=a.nodeType?"events":"__events__",w=c.data(a),G=w&&w[H];if(w&&G){if(typeof G==="function"){w=G;G=G.events}if(b&&b.type){d=b.handler;b=b.type}if(!b|| +typeof b==="string"&&b.charAt(0)==="."){b=b||"";for(f in G)c.event.remove(a,f+b)}else{for(b=b.split(" ");f=b[k++];){v=f;l=f.indexOf(".")<0;n=[];if(!l){n=f.split(".");f=n.shift();s=RegExp("(^|\\.)"+c.map(n.slice(0).sort(),Va).join("\\.(?:.*\\.)?")+"(\\.|$)")}if(B=G[f])if(d){v=c.event.special[f]||{};for(h=e||0;h=0){a.type= +f=f.slice(0,-1);a.exclusive=true}if(!d){a.stopPropagation();c.event.global[f]&&c.each(c.cache,function(){this.events&&this.events[f]&&c.event.trigger(a,b,this.handle.elem)})}if(!d||d.nodeType===3||d.nodeType===8)return A;a.result=A;a.target=d;b=c.makeArray(b);b.unshift(a)}a.currentTarget=d;(e=d.nodeType?c.data(d,"handle"):(c.data(d,"__events__")||{}).handle)&&e.apply(d,b);e=d.parentNode||d.ownerDocument;try{if(!(d&&d.nodeName&&c.noData[d.nodeName.toLowerCase()]))if(d["on"+f]&&d["on"+f].apply(d,b)=== +false){a.result=false;a.preventDefault()}}catch(h){}if(!a.isPropagationStopped()&&e)c.event.trigger(a,b,e,true);else if(!a.isDefaultPrevented()){e=a.target;var k,l=f.replace(X,""),n=c.nodeName(e,"a")&&l==="click",s=c.event.special[l]||{};if((!s._default||s._default.call(d,a)===false)&&!n&&!(e&&e.nodeName&&c.noData[e.nodeName.toLowerCase()])){try{if(e[l]){if(k=e["on"+l])e["on"+l]=null;c.event.triggered=true;e[l]()}}catch(v){}if(k)e["on"+l]=k;c.event.triggered=false}}},handle:function(a){var b,d,e; +d=[];var f,h=c.makeArray(arguments);a=h[0]=c.event.fix(a||E.event);a.currentTarget=this;b=a.type.indexOf(".")<0&&!a.exclusive;if(!b){e=a.type.split(".");a.type=e.shift();d=e.slice(0).sort();e=RegExp("(^|\\.)"+d.join("\\.(?:.*\\.)?")+"(\\.|$)")}a.namespace=a.namespace||d.join(".");f=c.data(this,this.nodeType?"events":"__events__");if(typeof f==="function")f=f.events;d=(f||{})[a.type];if(f&&d){d=d.slice(0);f=0;for(var k=d.length;f-1?c.map(a.options,function(e){return e.selected}).join("-"):"";else if(a.nodeName.toLowerCase()==="select")d=a.selectedIndex;return d},Z=function(a,b){var d=a.target,e,f;if(!(!ha.test(d.nodeName)||d.readOnly)){e=c.data(d,"_change_data");f=va(d);if(a.type!=="focusout"||d.type!=="radio")c.data(d,"_change_data",f);if(!(e===A||f===e))if(e!=null||f){a.type="change";a.liveFired= +A;return c.event.trigger(a,b,d)}}};c.event.special.change={filters:{focusout:Z,beforedeactivate:Z,click:function(a){var b=a.target,d=b.type;if(d==="radio"||d==="checkbox"||b.nodeName.toLowerCase()==="select")return Z.call(this,a)},keydown:function(a){var b=a.target,d=b.type;if(a.keyCode===13&&b.nodeName.toLowerCase()!=="textarea"||a.keyCode===32&&(d==="checkbox"||d==="radio")||d==="select-multiple")return Z.call(this,a)},beforeactivate:function(a){a=a.target;c.data(a,"_change_data",va(a))}},setup:function(){if(this.type=== +"file")return false;for(var a in V)c.event.add(this,a+".specialChange",V[a]);return ha.test(this.nodeName)},teardown:function(){c.event.remove(this,".specialChange");return ha.test(this.nodeName)}};V=c.event.special.change.filters;V.focus=V.beforeactivate}u.addEventListener&&c.each({focus:"focusin",blur:"focusout"},function(a,b){function d(e){e=c.event.fix(e);e.type=b;return c.event.trigger(e,null,e.target)}c.event.special[b]={setup:function(){sa[b]++===0&&u.addEventListener(a,d,true)},teardown:function(){--sa[b]=== +0&&u.removeEventListener(a,d,true)}}});c.each(["bind","one"],function(a,b){c.fn[b]=function(d,e,f){if(typeof d==="object"){for(var h in d)this[b](h,e,d[h],f);return this}if(c.isFunction(e)||e===false){f=e;e=A}var k=b==="one"?c.proxy(f,function(n){c(this).unbind(n,k);return f.apply(this,arguments)}):f;if(d==="unload"&&b!=="one")this.one(d,e,f);else{h=0;for(var l=this.length;h0?this.bind(b,d,e):this.trigger(b)};if(c.attrFn)c.attrFn[b]=true});E.attachEvent&&!E.addEventListener&&c(E).bind("unload",function(){for(var a in c.cache)if(c.cache[a].handle)try{c.event.remove(c.cache[a].handle.elem)}catch(b){}}); +(function(){function a(g,j,o,m,p,q){p=0;for(var t=m.length;p0){C=x;break}}x=x[g]}m[p]=C}}}var d=/((?:\((?:\([^()]+\)|[^()]+)+\)|\[(?:\[[^\[\]]*\]|['"][^'"]*['"]|[^\[\]'"]+)+\]|\\.|[^ >+~,(\[\\]+)+|[>+~])(\s*,\s*)?((?:.|\r|\n)*)/g,e=0,f=Object.prototype.toString,h=false,k=true;[0,0].sort(function(){k=false;return 0});var l=function(g,j,o,m){o=o||[];var p=j=j||u;if(j.nodeType!==1&&j.nodeType!==9)return[];if(!g||typeof g!=="string")return o;var q=[],t,x,C,P,N=true,R=l.isXML(j),Q=g,L;do{d.exec("");if(t=d.exec(Q)){Q=t[3];q.push(t[1]);if(t[2]){P=t[3]; +break}}}while(t);if(q.length>1&&s.exec(g))if(q.length===2&&n.relative[q[0]])x=M(q[0]+q[1],j);else for(x=n.relative[q[0]]?[j]:l(q.shift(),j);q.length;){g=q.shift();if(n.relative[g])g+=q.shift();x=M(g,x)}else{if(!m&&q.length>1&&j.nodeType===9&&!R&&n.match.ID.test(q[0])&&!n.match.ID.test(q[q.length-1])){t=l.find(q.shift(),j,R);j=t.expr?l.filter(t.expr,t.set)[0]:t.set[0]}if(j){t=m?{expr:q.pop(),set:D(m)}:l.find(q.pop(),q.length===1&&(q[0]==="~"||q[0]==="+")&&j.parentNode?j.parentNode:j,R);x=t.expr?l.filter(t.expr, +t.set):t.set;if(q.length>0)C=D(x);else N=false;for(;q.length;){t=L=q.pop();if(n.relative[L])t=q.pop();else L="";if(t==null)t=j;n.relative[L](C,t,R)}}else C=[]}C||(C=x);C||l.error(L||g);if(f.call(C)==="[object Array]")if(N)if(j&&j.nodeType===1)for(g=0;C[g]!=null;g++){if(C[g]&&(C[g]===true||C[g].nodeType===1&&l.contains(j,C[g])))o.push(x[g])}else for(g=0;C[g]!=null;g++)C[g]&&C[g].nodeType===1&&o.push(x[g]);else o.push.apply(o,C);else D(C,o);if(P){l(P,p,o,m);l.uniqueSort(o)}return o};l.uniqueSort=function(g){if(w){h= +k;g.sort(w);if(h)for(var j=1;j0};l.find=function(g,j,o){var m;if(!g)return[];for(var p=0,q=n.order.length;p":function(g,j){var o=typeof j==="string",m,p=0,q=g.length;if(o&&!/\W/.test(j))for(j=j.toLowerCase();p=0))o||m.push(t);else if(o)j[q]=false;return false},ID:function(g){return g[1].replace(/\\/g,"")},TAG:function(g){return g[1].toLowerCase()},CHILD:function(g){if(g[1]==="nth"){var j=/(-?)(\d*)n((?:\+|-)?\d*)/.exec(g[2]==="even"&&"2n"||g[2]==="odd"&&"2n+1"||!/\D/.test(g[2])&&"0n+"+g[2]||g[2]);g[2]=j[1]+(j[2]||1)-0;g[3]=j[3]-0}g[0]=e++;return g},ATTR:function(g,j,o, +m,p,q){j=g[1].replace(/\\/g,"");if(!q&&n.attrMap[j])g[1]=n.attrMap[j];if(g[2]==="~=")g[4]=" "+g[4]+" ";return g},PSEUDO:function(g,j,o,m,p){if(g[1]==="not")if((d.exec(g[3])||"").length>1||/^\w/.test(g[3]))g[3]=l(g[3],null,null,j);else{g=l.filter(g[3],j,o,true^p);o||m.push.apply(m,g);return false}else if(n.match.POS.test(g[0])||n.match.CHILD.test(g[0]))return true;return g},POS:function(g){g.unshift(true);return g}},filters:{enabled:function(g){return g.disabled===false&&g.type!=="hidden"},disabled:function(g){return g.disabled=== +true},checked:function(g){return g.checked===true},selected:function(g){return g.selected===true},parent:function(g){return!!g.firstChild},empty:function(g){return!g.firstChild},has:function(g,j,o){return!!l(o[3],g).length},header:function(g){return/h\d/i.test(g.nodeName)},text:function(g){return"text"===g.type},radio:function(g){return"radio"===g.type},checkbox:function(g){return"checkbox"===g.type},file:function(g){return"file"===g.type},password:function(g){return"password"===g.type},submit:function(g){return"submit"=== +g.type},image:function(g){return"image"===g.type},reset:function(g){return"reset"===g.type},button:function(g){return"button"===g.type||g.nodeName.toLowerCase()==="button"},input:function(g){return/input|select|textarea|button/i.test(g.nodeName)}},setFilters:{first:function(g,j){return j===0},last:function(g,j,o,m){return j===m.length-1},even:function(g,j){return j%2===0},odd:function(g,j){return j%2===1},lt:function(g,j,o){return jo[3]-0},nth:function(g,j,o){return o[3]- +0===j},eq:function(g,j,o){return o[3]-0===j}},filter:{PSEUDO:function(g,j,o,m){var p=j[1],q=n.filters[p];if(q)return q(g,o,j,m);else if(p==="contains")return(g.textContent||g.innerText||l.getText([g])||"").indexOf(j[3])>=0;else if(p==="not"){j=j[3];o=0;for(m=j.length;o=0}},ID:function(g,j){return g.nodeType===1&&g.getAttribute("id")===j},TAG:function(g,j){return j==="*"&&g.nodeType===1||g.nodeName.toLowerCase()=== +j},CLASS:function(g,j){return(" "+(g.className||g.getAttribute("class"))+" ").indexOf(j)>-1},ATTR:function(g,j){var o=j[1];o=n.attrHandle[o]?n.attrHandle[o](g):g[o]!=null?g[o]:g.getAttribute(o);var m=o+"",p=j[2],q=j[4];return o==null?p==="!=":p==="="?m===q:p==="*="?m.indexOf(q)>=0:p==="~="?(" "+m+" ").indexOf(q)>=0:!q?m&&o!==false:p==="!="?m!==q:p==="^="?m.indexOf(q)===0:p==="$="?m.substr(m.length-q.length)===q:p==="|="?m===q||m.substr(0,q.length+1)===q+"-":false},POS:function(g,j,o,m){var p=n.setFilters[j[2]]; +if(p)return p(g,o,j,m)}}},s=n.match.POS,v=function(g,j){return"\\"+(j-0+1)},B;for(B in n.match){n.match[B]=RegExp(n.match[B].source+/(?![^\[]*\])(?![^\(]*\))/.source);n.leftMatch[B]=RegExp(/(^(?:.|\r|\n)*?)/.source+n.match[B].source.replace(/\\(\d+)/g,v))}var D=function(g,j){g=Array.prototype.slice.call(g,0);if(j){j.push.apply(j,g);return j}return g};try{Array.prototype.slice.call(u.documentElement.childNodes,0)}catch(H){D=function(g,j){var o=j||[],m=0;if(f.call(g)==="[object Array]")Array.prototype.push.apply(o, +g);else if(typeof g.length==="number")for(var p=g.length;m";var o=u.documentElement;o.insertBefore(g,o.firstChild);if(u.getElementById(j)){n.find.ID=function(m,p,q){if(typeof p.getElementById!=="undefined"&&!q)return(p=p.getElementById(m[1]))?p.id===m[1]||typeof p.getAttributeNode!=="undefined"&&p.getAttributeNode("id").nodeValue===m[1]?[p]:A:[]};n.filter.ID=function(m,p){var q=typeof m.getAttributeNode!=="undefined"&&m.getAttributeNode("id");return m.nodeType===1&&q&&q.nodeValue===p}}o.removeChild(g); +o=g=null})();(function(){var g=u.createElement("div");g.appendChild(u.createComment(""));if(g.getElementsByTagName("*").length>0)n.find.TAG=function(j,o){var m=o.getElementsByTagName(j[1]);if(j[1]==="*"){for(var p=[],q=0;m[q];q++)m[q].nodeType===1&&p.push(m[q]);m=p}return m};g.innerHTML="";if(g.firstChild&&typeof g.firstChild.getAttribute!=="undefined"&&g.firstChild.getAttribute("href")!=="#")n.attrHandle.href=function(j){return j.getAttribute("href",2)};g=null})();u.querySelectorAll&& +function(){var g=l,j=u.createElement("div");j.innerHTML="

    ";if(!(j.querySelectorAll&&j.querySelectorAll(".TEST").length===0)){l=function(m,p,q,t){p=p||u;if(!t&&!l.isXML(p))if(p.nodeType===9)try{return D(p.querySelectorAll(m),q)}catch(x){}else if(p.nodeType===1&&p.nodeName.toLowerCase()!=="object"){var C=p.id,P=p.id="__sizzle__";try{return D(p.querySelectorAll("#"+P+" "+m),q)}catch(N){}finally{if(C)p.id=C;else p.removeAttribute("id")}}return g(m,p,q,t)};for(var o in g)l[o]=g[o]; +j=null}}();(function(){var g=u.documentElement,j=g.matchesSelector||g.mozMatchesSelector||g.webkitMatchesSelector||g.msMatchesSelector,o=false;try{j.call(u.documentElement,":sizzle")}catch(m){o=true}if(j)l.matchesSelector=function(p,q){try{if(o||!n.match.PSEUDO.test(q))return j.call(p,q)}catch(t){}return l(q,null,null,[p]).length>0}})();(function(){var g=u.createElement("div");g.innerHTML="
    ";if(!(!g.getElementsByClassName||g.getElementsByClassName("e").length=== +0)){g.lastChild.className="e";if(g.getElementsByClassName("e").length!==1){n.order.splice(1,0,"CLASS");n.find.CLASS=function(j,o,m){if(typeof o.getElementsByClassName!=="undefined"&&!m)return o.getElementsByClassName(j[1])};g=null}}})();l.contains=u.documentElement.contains?function(g,j){return g!==j&&(g.contains?g.contains(j):true)}:function(g,j){return!!(g.compareDocumentPosition(j)&16)};l.isXML=function(g){return(g=(g?g.ownerDocument||g:0).documentElement)?g.nodeName!=="HTML":false};var M=function(g, +j){for(var o=[],m="",p,q=j.nodeType?[j]:j;p=n.match.PSEUDO.exec(g);){m+=p[0];g=g.replace(n.match.PSEUDO,"")}g=n.relative[g]?g+"*":g;p=0;for(var t=q.length;p0)for(var h=d;h0},closest:function(a, +b){var d=[],e,f,h=this[0];if(c.isArray(a)){var k={},l,n=1;if(h&&a.length){e=0;for(f=a.length;e-1:c(h).is(e))d.push({selector:l,elem:h,level:n})}h=h.parentNode;n++}}return d}k=$a.test(a)?c(a,b||this.context):null;e=0;for(f=this.length;e-1:c.find.matchesSelector(h,a)){d.push(h);break}else{h=h.parentNode;if(!h|| +!h.ownerDocument||h===b)break}d=d.length>1?c.unique(d):d;return this.pushStack(d,"closest",a)},index:function(a){if(!a||typeof a==="string")return c.inArray(this[0],a?c(a):this.parent().children());return c.inArray(a.jquery?a[0]:a,this)},add:function(a,b){var d=typeof a==="string"?c(a,b||this.context):c.makeArray(a),e=c.merge(this.get(),d);return this.pushStack(!d[0]||!d[0].parentNode||d[0].parentNode.nodeType===11||!e[0]||!e[0].parentNode||e[0].parentNode.nodeType===11?e:c.unique(e))},andSelf:function(){return this.add(this.prevObject)}}); +c.each({parent:function(a){return(a=a.parentNode)&&a.nodeType!==11?a:null},parents:function(a){return c.dir(a,"parentNode")},parentsUntil:function(a,b,d){return c.dir(a,"parentNode",d)},next:function(a){return c.nth(a,2,"nextSibling")},prev:function(a){return c.nth(a,2,"previousSibling")},nextAll:function(a){return c.dir(a,"nextSibling")},prevAll:function(a){return c.dir(a,"previousSibling")},nextUntil:function(a,b,d){return c.dir(a,"nextSibling",d)},prevUntil:function(a,b,d){return c.dir(a,"previousSibling", +d)},siblings:function(a){return c.sibling(a.parentNode.firstChild,a)},children:function(a){return c.sibling(a.firstChild)},contents:function(a){return c.nodeName(a,"iframe")?a.contentDocument||a.contentWindow.document:c.makeArray(a.childNodes)}},function(a,b){c.fn[a]=function(d,e){var f=c.map(this,b,d);Wa.test(a)||(e=d);if(e&&typeof e==="string")f=c.filter(e,f);f=this.length>1?c.unique(f):f;if((this.length>1||Ya.test(e))&&Xa.test(a))f=f.reverse();return this.pushStack(f,a,Za.call(arguments).join(","))}}); +c.extend({filter:function(a,b,d){if(d)a=":not("+a+")";return b.length===1?c.find.matchesSelector(b[0],a)?[b[0]]:[]:c.find.matches(a,b)},dir:function(a,b,d){var e=[];for(a=a[b];a&&a.nodeType!==9&&(d===A||a.nodeType!==1||!c(a).is(d));){a.nodeType===1&&e.push(a);a=a[b]}return e},nth:function(a,b,d){b=b||1;for(var e=0;a;a=a[d])if(a.nodeType===1&&++e===b)break;return a},sibling:function(a,b){for(var d=[];a;a=a.nextSibling)a.nodeType===1&&a!==b&&d.push(a);return d}});var xa=/ jQuery\d+="(?:\d+|null)"/g, +$=/^\s+/,ya=/<(?!area|br|col|embed|hr|img|input|link|meta|param)(([\w:]+)[^>]*)\/>/ig,za=/<([\w:]+)/,ab=/\s]+\/)>/g,O={option:[1,""],legend:[1,"
    ","
    "],thead:[1,"","
    "],tr:[2,"","
    "],td:[3,"","
    "],col:[2,"","
    "], +area:[1,"",""],_default:[0,"",""]};O.optgroup=O.option;O.tbody=O.tfoot=O.colgroup=O.caption=O.thead;O.th=O.td;if(!c.support.htmlSerialize)O._default=[1,"div
    ","
    "];c.fn.extend({text:function(a){if(c.isFunction(a))return this.each(function(b){var d=c(this);d.text(a.call(this,b,d.text()))});if(typeof a!=="object"&&a!==A)return this.empty().append((this[0]&&this[0].ownerDocument||u).createTextNode(a));return c.text(this)},wrapAll:function(a){if(c.isFunction(a))return this.each(function(d){c(this).wrapAll(a.call(this, +d))});if(this[0]){var b=c(a,this[0].ownerDocument).eq(0).clone(true);this[0].parentNode&&b.insertBefore(this[0]);b.map(function(){for(var d=this;d.firstChild&&d.firstChild.nodeType===1;)d=d.firstChild;return d}).append(this)}return this},wrapInner:function(a){if(c.isFunction(a))return this.each(function(b){c(this).wrapInner(a.call(this,b))});return this.each(function(){var b=c(this),d=b.contents();d.length?d.wrapAll(a):b.append(a)})},wrap:function(a){return this.each(function(){c(this).wrapAll(a)})}, +unwrap:function(){return this.parent().each(function(){c.nodeName(this,"body")||c(this).replaceWith(this.childNodes)}).end()},append:function(){return this.domManip(arguments,true,function(a){this.nodeType===1&&this.appendChild(a)})},prepend:function(){return this.domManip(arguments,true,function(a){this.nodeType===1&&this.insertBefore(a,this.firstChild)})},before:function(){if(this[0]&&this[0].parentNode)return this.domManip(arguments,false,function(b){this.parentNode.insertBefore(b,this)});else if(arguments.length){var a= +c(arguments[0]);a.push.apply(a,this.toArray());return this.pushStack(a,"before",arguments)}},after:function(){if(this[0]&&this[0].parentNode)return this.domManip(arguments,false,function(b){this.parentNode.insertBefore(b,this.nextSibling)});else if(arguments.length){var a=this.pushStack(this,"after",arguments);a.push.apply(a,c(arguments[0]).toArray());return a}},remove:function(a,b){for(var d=0,e;(e=this[d])!=null;d++)if(!a||c.filter(a,[e]).length){if(!b&&e.nodeType===1){c.cleanData(e.getElementsByTagName("*")); +c.cleanData([e])}e.parentNode&&e.parentNode.removeChild(e)}return this},empty:function(){for(var a=0,b;(b=this[a])!=null;a++)for(b.nodeType===1&&c.cleanData(b.getElementsByTagName("*"));b.firstChild;)b.removeChild(b.firstChild);return this},clone:function(a){var b=this.map(function(){if(!c.support.noCloneEvent&&!c.isXMLDoc(this)){var d=this.outerHTML,e=this.ownerDocument;if(!d){d=e.createElement("div");d.appendChild(this.cloneNode(true));d=d.innerHTML}return c.clean([d.replace(xa,"").replace(cb,'="$1">').replace($, +"")],e)[0]}else return this.cloneNode(true)});if(a===true){la(this,b);la(this.find("*"),b.find("*"))}return b},html:function(a){if(a===A)return this[0]&&this[0].nodeType===1?this[0].innerHTML.replace(xa,""):null;else if(typeof a==="string"&&!Aa.test(a)&&(c.support.leadingWhitespace||!$.test(a))&&!O[(za.exec(a)||["",""])[1].toLowerCase()]){a=a.replace(ya,"<$1>");try{for(var b=0,d=this.length;b0||e.cacheable||this.length>1?l.cloneNode(true):l)}k.length&&c.each(k,Ka)}return this}});c.buildFragment=function(a,b,d){var e,f,h;b=b&&b[0]?b[0].ownerDocument||b[0]:u;if(a.length===1&&typeof a[0]==="string"&&a[0].length<512&&b===u&&!Aa.test(a[0])&&(c.support.checkClone|| +!Ba.test(a[0]))){f=true;if(h=c.fragments[a[0]])if(h!==1)e=h}if(!e){e=b.createDocumentFragment();c.clean(a,b,e,d)}if(f)c.fragments[a[0]]=h?e:1;return{fragment:e,cacheable:f}};c.fragments={};c.each({appendTo:"append",prependTo:"prepend",insertBefore:"before",insertAfter:"after",replaceAll:"replaceWith"},function(a,b){c.fn[a]=function(d){var e=[];d=c(d);var f=this.length===1&&this[0].parentNode;if(f&&f.nodeType===11&&f.childNodes.length===1&&d.length===1){d[b](this[0]);return this}else{f=0;for(var h= +d.length;f0?this.clone(true):this).get();c(d[f])[b](k);e=e.concat(k)}return this.pushStack(e,a,d.selector)}}});c.extend({clean:function(a,b,d,e){b=b||u;if(typeof b.createElement==="undefined")b=b.ownerDocument||b[0]&&b[0].ownerDocument||u;for(var f=[],h=0,k;(k=a[h])!=null;h++){if(typeof k==="number")k+="";if(k){if(typeof k==="string"&&!bb.test(k))k=b.createTextNode(k);else if(typeof k==="string"){k=k.replace(ya,"<$1>");var l=(za.exec(k)||["",""])[1].toLowerCase(),n=O[l]||O._default, +s=n[0],v=b.createElement("div");for(v.innerHTML=n[1]+k+n[2];s--;)v=v.lastChild;if(!c.support.tbody){s=ab.test(k);l=l==="table"&&!s?v.firstChild&&v.firstChild.childNodes:n[1]===""&&!s?v.childNodes:[];for(n=l.length-1;n>=0;--n)c.nodeName(l[n],"tbody")&&!l[n].childNodes.length&&l[n].parentNode.removeChild(l[n])}!c.support.leadingWhitespace&&$.test(k)&&v.insertBefore(b.createTextNode($.exec(k)[0]),v.firstChild);k=v.childNodes}if(k.nodeType)f.push(k);else f=c.merge(f,k)}}if(d)for(h=0;f[h];h++)if(e&& +c.nodeName(f[h],"script")&&(!f[h].type||f[h].type.toLowerCase()==="text/javascript"))e.push(f[h].parentNode?f[h].parentNode.removeChild(f[h]):f[h]);else{f[h].nodeType===1&&f.splice.apply(f,[h+1,0].concat(c.makeArray(f[h].getElementsByTagName("script"))));d.appendChild(f[h])}return f},cleanData:function(a){for(var b,d,e=c.cache,f=c.event.special,h=c.support.deleteExpando,k=0,l;(l=a[k])!=null;k++)if(!(l.nodeName&&c.noData[l.nodeName.toLowerCase()]))if(d=l[c.expando]){if((b=e[d])&&b.events)for(var n in b.events)f[n]? +c.event.remove(l,n):c.removeEvent(l,n,b.handle);if(h)delete l[c.expando];else l.removeAttribute&&l.removeAttribute(c.expando);delete e[d]}}});var Ca=/alpha\([^)]*\)/i,db=/opacity=([^)]*)/,eb=/-([a-z])/ig,fb=/([A-Z])/g,Da=/^-?\d+(?:px)?$/i,gb=/^-?\d/,hb={position:"absolute",visibility:"hidden",display:"block"},La=["Left","Right"],Ma=["Top","Bottom"],W,ib=u.defaultView&&u.defaultView.getComputedStyle,jb=function(a,b){return b.toUpperCase()};c.fn.css=function(a,b){if(arguments.length===2&&b===A)return this; +return c.access(this,a,b,true,function(d,e,f){return f!==A?c.style(d,e,f):c.css(d,e)})};c.extend({cssHooks:{opacity:{get:function(a,b){if(b){var d=W(a,"opacity","opacity");return d===""?"1":d}else return a.style.opacity}}},cssNumber:{zIndex:true,fontWeight:true,opacity:true,zoom:true,lineHeight:true},cssProps:{"float":c.support.cssFloat?"cssFloat":"styleFloat"},style:function(a,b,d,e){if(!(!a||a.nodeType===3||a.nodeType===8||!a.style)){var f,h=c.camelCase(b),k=a.style,l=c.cssHooks[h];b=c.cssProps[h]|| +h;if(d!==A){if(!(typeof d==="number"&&isNaN(d)||d==null)){if(typeof d==="number"&&!c.cssNumber[h])d+="px";if(!l||!("set"in l)||(d=l.set(a,d))!==A)try{k[b]=d}catch(n){}}}else{if(l&&"get"in l&&(f=l.get(a,false,e))!==A)return f;return k[b]}}},css:function(a,b,d){var e,f=c.camelCase(b),h=c.cssHooks[f];b=c.cssProps[f]||f;if(h&&"get"in h&&(e=h.get(a,true,d))!==A)return e;else if(W)return W(a,b,f)},swap:function(a,b,d){var e={},f;for(f in b){e[f]=a.style[f];a.style[f]=b[f]}d.call(a);for(f in b)a.style[f]= +e[f]},camelCase:function(a){return a.replace(eb,jb)}});c.curCSS=c.css;c.each(["height","width"],function(a,b){c.cssHooks[b]={get:function(d,e,f){var h;if(e){if(d.offsetWidth!==0)h=ma(d,b,f);else c.swap(d,hb,function(){h=ma(d,b,f)});return h+"px"}},set:function(d,e){if(Da.test(e)){e=parseFloat(e);if(e>=0)return e+"px"}else return e}}});if(!c.support.opacity)c.cssHooks.opacity={get:function(a,b){return db.test((b&&a.currentStyle?a.currentStyle.filter:a.style.filter)||"")?parseFloat(RegExp.$1)/100+"": +b?"1":""},set:function(a,b){var d=a.style;d.zoom=1;var e=c.isNaN(b)?"":"alpha(opacity="+b*100+")",f=d.filter||"";d.filter=Ca.test(f)?f.replace(Ca,e):d.filter+" "+e}};if(ib)W=function(a,b,d){var e;d=d.replace(fb,"-$1").toLowerCase();if(!(b=a.ownerDocument.defaultView))return A;if(b=b.getComputedStyle(a,null)){e=b.getPropertyValue(d);if(e===""&&!c.contains(a.ownerDocument.documentElement,a))e=c.style(a,d)}return e};else if(u.documentElement.currentStyle)W=function(a,b){var d,e,f=a.currentStyle&&a.currentStyle[b], +h=a.style;if(!Da.test(f)&&gb.test(f)){d=h.left;e=a.runtimeStyle.left;a.runtimeStyle.left=a.currentStyle.left;h.left=b==="fontSize"?"1em":f||0;f=h.pixelLeft+"px";h.left=d;a.runtimeStyle.left=e}return f};if(c.expr&&c.expr.filters){c.expr.filters.hidden=function(a){var b=a.offsetHeight;return a.offsetWidth===0&&b===0||!c.support.reliableHiddenOffsets&&(a.style.display||c.css(a,"display"))==="none"};c.expr.filters.visible=function(a){return!c.expr.filters.hidden(a)}}var kb=c.now(),lb=/)<[^<]*)*<\/script>/gi, +mb=/^(?:select|textarea)/i,nb=/^(?:color|date|datetime|email|hidden|month|number|password|range|search|tel|text|time|url|week)$/i,ob=/^(?:GET|HEAD|DELETE)$/,Na=/\[\]$/,T=/\=\?(&|$)/,ia=/\?/,pb=/([?&])_=[^&]*/,qb=/^(\w+:)?\/\/([^\/?#]+)/,rb=/%20/g,sb=/#.*$/,Ea=c.fn.load;c.fn.extend({load:function(a,b,d){if(typeof a!=="string"&&Ea)return Ea.apply(this,arguments);else if(!this.length)return this;var e=a.indexOf(" ");if(e>=0){var f=a.slice(e,a.length);a=a.slice(0,e)}e="GET";if(b)if(c.isFunction(b)){d= +b;b=null}else if(typeof b==="object"){b=c.param(b,c.ajaxSettings.traditional);e="POST"}var h=this;c.ajax({url:a,type:e,dataType:"html",data:b,complete:function(k,l){if(l==="success"||l==="notmodified")h.html(f?c("
    ").append(k.responseText.replace(lb,"")).find(f):k.responseText);d&&h.each(d,[k.responseText,l,k])}});return this},serialize:function(){return c.param(this.serializeArray())},serializeArray:function(){return this.map(function(){return this.elements?c.makeArray(this.elements):this}).filter(function(){return this.name&& +!this.disabled&&(this.checked||mb.test(this.nodeName)||nb.test(this.type))}).map(function(a,b){var d=c(this).val();return d==null?null:c.isArray(d)?c.map(d,function(e){return{name:b.name,value:e}}):{name:b.name,value:d}}).get()}});c.each("ajaxStart ajaxStop ajaxComplete ajaxError ajaxSuccess ajaxSend".split(" "),function(a,b){c.fn[b]=function(d){return this.bind(b,d)}});c.extend({get:function(a,b,d,e){if(c.isFunction(b)){e=e||d;d=b;b=null}return c.ajax({type:"GET",url:a,data:b,success:d,dataType:e})}, +getScript:function(a,b){return c.get(a,null,b,"script")},getJSON:function(a,b,d){return c.get(a,b,d,"json")},post:function(a,b,d,e){if(c.isFunction(b)){e=e||d;d=b;b={}}return c.ajax({type:"POST",url:a,data:b,success:d,dataType:e})},ajaxSetup:function(a){c.extend(c.ajaxSettings,a)},ajaxSettings:{url:location.href,global:true,type:"GET",contentType:"application/x-www-form-urlencoded",processData:true,async:true,xhr:function(){return new E.XMLHttpRequest},accepts:{xml:"application/xml, text/xml",html:"text/html", +script:"text/javascript, application/javascript",json:"application/json, text/javascript",text:"text/plain",_default:"*/*"}},ajax:function(a){var b=c.extend(true,{},c.ajaxSettings,a),d,e,f,h=b.type.toUpperCase(),k=ob.test(h);b.url=b.url.replace(sb,"");b.context=a&&a.context!=null?a.context:b;if(b.data&&b.processData&&typeof b.data!=="string")b.data=c.param(b.data,b.traditional);if(b.dataType==="jsonp"){if(h==="GET")T.test(b.url)||(b.url+=(ia.test(b.url)?"&":"?")+(b.jsonp||"callback")+"=?");else if(!b.data|| +!T.test(b.data))b.data=(b.data?b.data+"&":"")+(b.jsonp||"callback")+"=?";b.dataType="json"}if(b.dataType==="json"&&(b.data&&T.test(b.data)||T.test(b.url))){d=b.jsonpCallback||"jsonp"+kb++;if(b.data)b.data=(b.data+"").replace(T,"="+d+"$1");b.url=b.url.replace(T,"="+d+"$1");b.dataType="script";var l=E[d];E[d]=function(m){f=m;c.handleSuccess(b,w,e,f);c.handleComplete(b,w,e,f);if(c.isFunction(l))l(m);else{E[d]=A;try{delete E[d]}catch(p){}}v&&v.removeChild(B)}}if(b.dataType==="script"&&b.cache===null)b.cache= +false;if(b.cache===false&&h==="GET"){var n=c.now(),s=b.url.replace(pb,"$1_="+n);b.url=s+(s===b.url?(ia.test(b.url)?"&":"?")+"_="+n:"")}if(b.data&&h==="GET")b.url+=(ia.test(b.url)?"&":"?")+b.data;b.global&&c.active++===0&&c.event.trigger("ajaxStart");n=(n=qb.exec(b.url))&&(n[1]&&n[1]!==location.protocol||n[2]!==location.host);if(b.dataType==="script"&&h==="GET"&&n){var v=u.getElementsByTagName("head")[0]||u.documentElement,B=u.createElement("script");if(b.scriptCharset)B.charset=b.scriptCharset;B.src= +b.url;if(!d){var D=false;B.onload=B.onreadystatechange=function(){if(!D&&(!this.readyState||this.readyState==="loaded"||this.readyState==="complete")){D=true;c.handleSuccess(b,w,e,f);c.handleComplete(b,w,e,f);B.onload=B.onreadystatechange=null;v&&B.parentNode&&v.removeChild(B)}}}v.insertBefore(B,v.firstChild);return A}var H=false,w=b.xhr();if(w){b.username?w.open(h,b.url,b.async,b.username,b.password):w.open(h,b.url,b.async);try{if(b.data!=null&&!k||a&&a.contentType)w.setRequestHeader("Content-Type", +b.contentType);if(b.ifModified){c.lastModified[b.url]&&w.setRequestHeader("If-Modified-Since",c.lastModified[b.url]);c.etag[b.url]&&w.setRequestHeader("If-None-Match",c.etag[b.url])}n||w.setRequestHeader("X-Requested-With","XMLHttpRequest");w.setRequestHeader("Accept",b.dataType&&b.accepts[b.dataType]?b.accepts[b.dataType]+", */*; q=0.01":b.accepts._default)}catch(G){}if(b.beforeSend&&b.beforeSend.call(b.context,w,b)===false){b.global&&c.active--===1&&c.event.trigger("ajaxStop");w.abort();return false}b.global&& +c.triggerGlobal(b,"ajaxSend",[w,b]);var M=w.onreadystatechange=function(m){if(!w||w.readyState===0||m==="abort"){H||c.handleComplete(b,w,e,f);H=true;if(w)w.onreadystatechange=c.noop}else if(!H&&w&&(w.readyState===4||m==="timeout")){H=true;w.onreadystatechange=c.noop;e=m==="timeout"?"timeout":!c.httpSuccess(w)?"error":b.ifModified&&c.httpNotModified(w,b.url)?"notmodified":"success";var p;if(e==="success")try{f=c.httpData(w,b.dataType,b)}catch(q){e="parsererror";p=q}if(e==="success"||e==="notmodified")d|| +c.handleSuccess(b,w,e,f);else c.handleError(b,w,e,p);d||c.handleComplete(b,w,e,f);m==="timeout"&&w.abort();if(b.async)w=null}};try{var g=w.abort;w.abort=function(){w&&g.call&&g.call(w);M("abort")}}catch(j){}b.async&&b.timeout>0&&setTimeout(function(){w&&!H&&M("timeout")},b.timeout);try{w.send(k||b.data==null?null:b.data)}catch(o){c.handleError(b,w,null,o);c.handleComplete(b,w,e,f)}b.async||M();return w}},param:function(a,b){var d=[],e=function(h,k){k=c.isFunction(k)?k():k;d[d.length]=encodeURIComponent(h)+ +"="+encodeURIComponent(k)};if(b===A)b=c.ajaxSettings.traditional;if(c.isArray(a)||a.jquery)c.each(a,function(){e(this.name,this.value)});else for(var f in a)ca(f,a[f],b,e);return d.join("&").replace(rb,"+")}});c.extend({active:0,lastModified:{},etag:{},handleError:function(a,b,d,e){a.error&&a.error.call(a.context,b,d,e);a.global&&c.triggerGlobal(a,"ajaxError",[b,a,e])},handleSuccess:function(a,b,d,e){a.success&&a.success.call(a.context,e,d,b);a.global&&c.triggerGlobal(a,"ajaxSuccess",[b,a])},handleComplete:function(a, +b,d){a.complete&&a.complete.call(a.context,b,d);a.global&&c.triggerGlobal(a,"ajaxComplete",[b,a]);a.global&&c.active--===1&&c.event.trigger("ajaxStop")},triggerGlobal:function(a,b,d){(a.context&&a.context.url==null?c(a.context):c.event).trigger(b,d)},httpSuccess:function(a){try{return!a.status&&location.protocol==="file:"||a.status>=200&&a.status<300||a.status===304||a.status===1223}catch(b){}return false},httpNotModified:function(a,b){var d=a.getResponseHeader("Last-Modified"),e=a.getResponseHeader("Etag"); +if(d)c.lastModified[b]=d;if(e)c.etag[b]=e;return a.status===304},httpData:function(a,b,d){var e=a.getResponseHeader("content-type")||"",f=b==="xml"||!b&&e.indexOf("xml")>=0;a=f?a.responseXML:a.responseText;f&&a.documentElement.nodeName==="parsererror"&&c.error("parsererror");if(d&&d.dataFilter)a=d.dataFilter(a,b);if(typeof a==="string")if(b==="json"||!b&&e.indexOf("json")>=0)a=c.parseJSON(a);else if(b==="script"||!b&&e.indexOf("javascript")>=0)c.globalEval(a);return a}});if(E.ActiveXObject)c.ajaxSettings.xhr= +function(){if(E.location.protocol!=="file:")try{return new E.XMLHttpRequest}catch(a){}try{return new E.ActiveXObject("Microsoft.XMLHTTP")}catch(b){}};c.support.ajax=!!c.ajaxSettings.xhr();var da={},tb=/^(?:toggle|show|hide)$/,ub=/^([+\-]=)?([\d+.\-]+)(.*)$/,aa,na=[["height","marginTop","marginBottom","paddingTop","paddingBottom"],["width","marginLeft","marginRight","paddingLeft","paddingRight"],["opacity"]];c.fn.extend({show:function(a,b,d){if(a||a===0)return this.animate(S("show",3),a,b,d);else{a= +0;for(b=this.length;a=0;e--)if(d[e].elem===this){b&&d[e](true);d.splice(e,1)}});b||this.dequeue();return this}});c.each({slideDown:S("show",1),slideUp:S("hide",1),slideToggle:S("toggle",1),fadeIn:{opacity:"show"},fadeOut:{opacity:"hide"}},function(a,b){c.fn[a]=function(d,e,f){return this.animate(b, +d,e,f)}});c.extend({speed:function(a,b,d){var e=a&&typeof a==="object"?c.extend({},a):{complete:d||!d&&b||c.isFunction(a)&&a,duration:a,easing:d&&b||b&&!c.isFunction(b)&&b};e.duration=c.fx.off?0:typeof e.duration==="number"?e.duration:e.duration in c.fx.speeds?c.fx.speeds[e.duration]:c.fx.speeds._default;e.old=e.complete;e.complete=function(){e.queue!==false&&c(this).dequeue();c.isFunction(e.old)&&e.old.call(this)};return e},easing:{linear:function(a,b,d,e){return d+e*a},swing:function(a,b,d,e){return(-Math.cos(a* +Math.PI)/2+0.5)*e+d}},timers:[],fx:function(a,b,d){this.options=b;this.elem=a;this.prop=d;if(!b.orig)b.orig={}}});c.fx.prototype={update:function(){this.options.step&&this.options.step.call(this.elem,this.now,this);(c.fx.step[this.prop]||c.fx.step._default)(this)},cur:function(){if(this.elem[this.prop]!=null&&(!this.elem.style||this.elem.style[this.prop]==null))return this.elem[this.prop];var a=parseFloat(c.css(this.elem,this.prop));return a&&a>-1E4?a:0},custom:function(a,b,d){function e(h){return f.step(h)} +this.startTime=c.now();this.start=a;this.end=b;this.unit=d||this.unit||"px";this.now=this.start;this.pos=this.state=0;var f=this;a=c.fx;e.elem=this.elem;if(e()&&c.timers.push(e)&&!aa)aa=setInterval(a.tick,a.interval)},show:function(){this.options.orig[this.prop]=c.style(this.elem,this.prop);this.options.show=true;this.custom(this.prop==="width"||this.prop==="height"?1:0,this.cur());c(this.elem).show()},hide:function(){this.options.orig[this.prop]=c.style(this.elem,this.prop);this.options.hide=true; +this.custom(this.cur(),0)},step:function(a){var b=c.now(),d=true;if(a||b>=this.options.duration+this.startTime){this.now=this.end;this.pos=this.state=1;this.update();this.options.curAnim[this.prop]=true;for(var e in this.options.curAnim)if(this.options.curAnim[e]!==true)d=false;if(d){if(this.options.overflow!=null&&!c.support.shrinkWrapBlocks){var f=this.elem,h=this.options;c.each(["","X","Y"],function(l,n){f.style["overflow"+n]=h.overflow[l]})}this.options.hide&&c(this.elem).hide();if(this.options.hide|| +this.options.show)for(var k in this.options.curAnim)c.style(this.elem,k,this.options.orig[k]);this.options.complete.call(this.elem)}return false}else{a=b-this.startTime;this.state=a/this.options.duration;b=this.options.easing||(c.easing.swing?"swing":"linear");this.pos=c.easing[this.options.specialEasing&&this.options.specialEasing[this.prop]||b](this.state,a,0,1,this.options.duration);this.now=this.start+(this.end-this.start)*this.pos;this.update()}return true}};c.extend(c.fx,{tick:function(){for(var a= +c.timers,b=0;b-1;e={};var s={};if(n)s=f.position();k=n?s.top:parseInt(k,10)||0;l=n?s.left:parseInt(l,10)||0;if(c.isFunction(b))b=b.call(a,d,h);if(b.top!=null)e.top=b.top-h.top+k;if(b.left!=null)e.left=b.left-h.left+l;"using"in b?b.using.call(a, +e):f.css(e)}};c.fn.extend({position:function(){if(!this[0])return null;var a=this[0],b=this.offsetParent(),d=this.offset(),e=Fa.test(b[0].nodeName)?{top:0,left:0}:b.offset();d.top-=parseFloat(c.css(a,"marginTop"))||0;d.left-=parseFloat(c.css(a,"marginLeft"))||0;e.top+=parseFloat(c.css(b[0],"borderTopWidth"))||0;e.left+=parseFloat(c.css(b[0],"borderLeftWidth"))||0;return{top:d.top-e.top,left:d.left-e.left}},offsetParent:function(){return this.map(function(){for(var a=this.offsetParent||u.body;a&&!Fa.test(a.nodeName)&& +c.css(a,"position")==="static";)a=a.offsetParent;return a})}});c.each(["Left","Top"],function(a,b){var d="scroll"+b;c.fn[d]=function(e){var f=this[0],h;if(!f)return null;if(e!==A)return this.each(function(){if(h=ea(this))h.scrollTo(!a?e:c(h).scrollLeft(),a?e:c(h).scrollTop());else this[d]=e});else return(h=ea(f))?"pageXOffset"in h?h[a?"pageYOffset":"pageXOffset"]:c.support.boxModel&&h.document.documentElement[d]||h.document.body[d]:f[d]}});c.each(["Height","Width"],function(a,b){var d=b.toLowerCase(); +c.fn["inner"+b]=function(){return this[0]?parseFloat(c.css(this[0],d,"padding")):null};c.fn["outer"+b]=function(e){return this[0]?parseFloat(c.css(this[0],d,e?"margin":"border")):null};c.fn[d]=function(e){var f=this[0];if(!f)return e==null?null:this;if(c.isFunction(e))return this.each(function(h){var k=c(this);k[d](e.call(this,h,k[d]()))});return c.isWindow(f)?f.document.compatMode==="CSS1Compat"&&f.document.documentElement["client"+b]||f.document.body["client"+b]:f.nodeType===9?Math.max(f.documentElement["client"+ +b],f.body["scroll"+b],f.documentElement["scroll"+b],f.body["offset"+b],f.documentElement["offset"+b]):e===A?parseFloat(c.css(f,d)):this.css(d,typeof e==="string"?e:e+"px")}})})(window); diff --git a/vendor/pygments/tests/cover/jquery.tablesorter.min.js b/vendor/pygments/tests/cover/jquery.tablesorter.min.js new file mode 100644 index 0000000..64c7007 --- /dev/null +++ b/vendor/pygments/tests/cover/jquery.tablesorter.min.js @@ -0,0 +1,2 @@ + +(function($){$.extend({tablesorter:new function(){var parsers=[],widgets=[];this.defaults={cssHeader:"header",cssAsc:"headerSortUp",cssDesc:"headerSortDown",sortInitialOrder:"asc",sortMultiSortKey:"shiftKey",sortForce:null,sortAppend:null,textExtraction:"simple",parsers:{},widgets:[],widgetZebra:{css:["even","odd"]},headers:{},widthFixed:false,cancelSelection:true,sortList:[],headerList:[],dateFormat:"us",decimal:'.',debug:false};function benchmark(s,d){log(s+","+(new Date().getTime()-d.getTime())+"ms");}this.benchmark=benchmark;function log(s){if(typeof console!="undefined"&&typeof console.debug!="undefined"){console.log(s);}else{alert(s);}}function buildParserCache(table,$headers){if(table.config.debug){var parsersDebug="";}var rows=table.tBodies[0].rows;if(table.tBodies[0].rows[0]){var list=[],cells=rows[0].cells,l=cells.length;for(var i=0;i1){arr=arr.concat(checkCellColSpan(table,headerArr,row++));}else{if(table.tHead.length==1||(cell.rowSpan>1||!r[row+1])){arr.push(cell);}}}return arr;};function checkHeaderMetadata(cell){if(($.metadata)&&($(cell).metadata().sorter===false)){return true;};return false;}function checkHeaderOptions(table,i){if((table.config.headers[i])&&(table.config.headers[i].sorter===false)){return true;};return false;}function applyWidget(table){var c=table.config.widgets;var l=c.length;for(var i=0;i');$("tr:first td",table.tBodies[0]).each(function(){colgroup.append($('
    ').css('width',$(this).width()));});$(table).prepend(colgroup);};}function updateHeaderSortCount(table,sortList){var c=table.config,l=sortList.length;for(var i=0;ib)?1:0));};function sortTextDesc(a,b){return((ba)?1:0));};function sortNumeric(a,b){return a-b;};function sortNumericDesc(a,b){return b-a;};function getCachedSortType(parsers,i){return parsers[i].type;};this.construct=function(settings){return this.each(function(){if(!this.tHead||!this.tBodies)return;var $this,$document,$headers,cache,config,shiftDown=0,sortOrder;this.config={};config=$.extend(this.config,$.tablesorter.defaults,settings);$this=$(this);$headers=buildHeaders(this);this.config.parsers=buildParserCache(this,$headers);cache=buildCache(this);var sortCSS=[config.cssDesc,config.cssAsc];fixColumnWidth(this);$headers.click(function(e){$this.trigger("sortStart");var totalRows=($this[0].tBodies[0]&&$this[0].tBodies[0].rows.length)||0;if(!this.sortDisabled&&totalRows>0){var $cell=$(this);var i=this.column;this.order=this.count++%2;if(!e[config.sortMultiSortKey]){config.sortList=[];if(config.sortForce!=null){var a=config.sortForce;for(var j=0;j0){$this.trigger("sorton",[config.sortList]);}applyWidget(this);});};this.addParser=function(parser){var l=parsers.length,a=true;for(var i=0;iD6{MWQjEnx?oJHr&dIz4a@dl*-CY>| zgW!U_%O?XxI14-?iy0WWg+Z8+Vb&Z8pdfpRr>`sfZ8lau9@bl*u7(4JIy_w*Lo808 zo$Afkpupp@{Fv_bobxQ#pD>iB3oNa1d9=pM`D99*FvsH{pKJfpB1-4UD;=6}F=+gKX>Gx9b=!>PY1_pdfo@{(boFyt=akR{ E04sl8JOBUy literal 0 HcmV?d00001 diff --git a/vendor/pygments/tests/cover/keybd_open.png b/vendor/pygments/tests/cover/keybd_open.png new file mode 100644 index 0000000000000000000000000000000000000000..a77961db5424cfff43a63d399972ee85fc0dfdb1 GIT binary patch literal 267 zcmeAS@N?(olHy`uVBq!ia0vp^%0SG+!3HE>D6{MWQjEnx?oJHr&dIz4a@dl*-CY>| zgW!U_%O?XxI14-?iy0WWg+Z8+Vb&Z8pdfpRr>`sfZ8lau9%kc-1xY}mZci7-5R21$ zCp+>TR^VYdE*ieC^FGV{Cyeh_21=Rotz3KNq=!VmdK II;Vst00jnQH~;_u literal 0 HcmV?d00001 diff --git a/vendor/pygments/tests/cover/status.dat b/vendor/pygments/tests/cover/status.dat new file mode 100644 index 0000000..74df6c4 --- /dev/null +++ b/vendor/pygments/tests/cover/status.dat @@ -0,0 +1,5179 @@ +(dp1 +S'files' +p2 +(dp3 +S'pygments_lexers_textedit' +p4 +(dp5 +S'index' +p6 +(dp7 +S'html_filename' +p8 +S'pygments_lexers_textedit.html' +p9 +sS'name' +p10 +S'pygments.lexers.textedit' +p11 +sS'nums' +p12 +ccopy_reg +_reconstructor +p13 +(ccoverage.results +Numbers +p14 +c__builtin__ +object +p15 +NtRp16 +(dp17 +S'n_files' +p18 +I1 +sS'n_branches' +p19 +I0 +sS'n_statements' +p20 +I44 +sS'n_excluded' +p21 +I0 +sS'n_partial_branches' +p22 +I0 +sS'n_missing' +p23 +I1 +sS'n_missing_branches' +p24 +I0 +sbssS'hash' +p25 +S'\xac\xd5/\x12\x80xh\xce\xb7\x87\xb9\xa5~g\x95&' +p26 +ssS'pygments_lexers_robotframework' +p27 +(dp28 +g6 +(dp29 +g8 +S'pygments_lexers_robotframework.html' +p30 +sg10 +S'pygments.lexers.robotframework' +p31 +sg12 +g13 +(g14 +g15 +NtRp32 +(dp33 +g18 +I1 +sg19 +I0 +sg20 +I395 +sg21 +I0 +sg22 +I0 +sg23 +I40 +sg24 +I0 +sbssg25 +S'\xb5\x90\x07_\x96m\xab\x02\xc2\xd5\xbdB\x16@\xd5\xf7' +p34 +ssS'pygments_lexers_urbi' +p35 +(dp36 +g6 +(dp37 +g8 +S'pygments_lexers_urbi.html' +p38 +sg10 +S'pygments.lexers.urbi' +p39 +sg12 +g13 +(g14 +g15 +NtRp40 +(dp41 +g18 +I1 +sg19 +I0 +sg20 +I27 +sg21 +I0 +sg22 +I0 +sg23 +I0 +sg24 +I0 +sbssg25 +S"R\xd2\xcbD\xbe\x19}-\xcb'\xb5\x18\xf0\xe5\xcb\xe1" +p42 +ssS'pygments_filters' +p43 +(dp44 +g6 +(dp45 +S'html_filename' +p46 +S'pygments_filters.html' +p47 +sS'name' +p48 +S'pygments.filters' +p49 +sS'nums' +p50 +g13 +(g14 +g15 +NtRp51 +(dp52 +g18 +I1 +sg19 +I0 +sg20 +I159 +sg21 +I0 +sg22 +I0 +sg23 +I124 +sg24 +I0 +sbssg25 +S'\xd7\xb0\x07\xdd\x91\xb4' +p325 +ssS'pygments_lexers_haxe' +p326 +(dp327 +g6 +(dp328 +g8 +S'pygments_lexers_haxe.html' +p329 +sg10 +S'pygments.lexers.haxe' +p330 +sg12 +g13 +(g14 +g15 +NtRp331 +(dp332 +g18 +I1 +sg19 +I0 +sg20 +I42 +sg21 +I0 +sg22 +I0 +sg23 +I1 +sg24 +I0 +sbssg25 +S'\x10\x1a\xfcG\nT(Z\xfb\x1bs\x1f:\xbdB\x17' +p333 +ssS'pygments_lexers_factor' +p334 +(dp335 +g6 +(dp336 +g8 +S'pygments_lexers_factor.html' +p337 +sg10 +S'pygments.lexers.factor' +p338 +sg12 +g13 +(g14 +g15 +NtRp339 +(dp340 +g18 +I1 +sg19 +I0 +sg20 +I23 +sg21 +I0 +sg22 +I0 +sg23 +I0 +sg24 +I0 +sbssg25 +S'{\xa5`lW\x02\xca\xaf\x8b\x8e\xf8\xfe\xa2@\x87G' +p341 +ssS'pygments_formatters_bbcode' +p342 +(dp343 +g6 +(dp344 +g8 +S'pygments_formatters_bbcode.html' +p345 +sg10 +S'pygments.formatters.bbcode' +p346 +sg12 +g13 +(g14 +g15 +NtRp347 +(dp348 +g18 +I1 +sg19 +I0 +sg20 +I56 +sg21 +I0 +sg22 +I0 +sg23 +I8 +sg24 +I0 +sbssg25 +S'\xdc\r\xa1\xf7-tGa\xaf\x82\xa1~\x18\xb5q\x14' +p349 +ssS'pygments_styles_fruity' +p350 +(dp351 +g6 +(dp352 +g8 +S'pygments_styles_fruity.html' +p353 +sg10 +S'pygments.styles.fruity' +p354 +sg12 +g13 +(g14 +g15 +NtRp355 +(dp356 +g18 +I1 +sg19 +I0 +sg20 +I7 +sg21 +I0 +sg22 +I0 +sg23 +I0 +sg24 +I0 +sbssg25 +S"\xa9'\xdf\xb5\x92\xeeI\r\xd8-Ut\xd0m\xa7\xd7" +p357 +ssS'pygments_lexers_dotnet' +p358 +(dp359 +g6 +(dp360 +g8 +S'pygments_lexers_dotnet.html' +p361 +sg10 +S'pygments.lexers.dotnet' +p362 +sg12 +g13 +(g14 +g15 +NtRp363 +(dp364 +g18 +I1 +sg19 +I0 +sg20 +I103 +sg21 +I0 +sg22 +I0 +sg23 +I5 +sg24 +I0 +sbssg25 +S"\xee\xed\x1b\x10'\x9c2W\xfd\x11*\xb4M\xf8\xacp" +p365 +ssS'pygments_lexers__stan_builtins' +p366 +(dp367 +g6 +(dp368 +g8 +S'pygments_lexers__stan_builtins.html' +p369 +sg10 +S'pygments.lexers._stan_builtins' +p370 +sg12 +g13 +(g14 +g15 +NtRp371 +(dp372 +g18 +I1 +sg19 +I0 +sg20 +I6 +sg21 +I0 +sg22 +I0 +sg23 +I0 +sg24 +I0 +sbssg25 +S"\xe9\xf0^3>/O\xe8\x95\xd6R'\x84\x9b%R" +p373 +ssS'pygments_lexers__scilab_builtins' +p374 +(dp375 +g6 +(dp376 +g8 +S'pygments_lexers__scilab_builtins.html' +p377 +sg10 +S'pygments.lexers._scilab_builtins' +p378 +sg12 +g13 +(g14 +g15 +NtRp379 +(dp380 +g18 +I1 +sg19 +I0 +sg20 +I5 +sg21 +I28 +sg22 +I0 +sg23 +I0 +sg24 +I0 +sbssg25 +S"x'N\x91L\\\xb0-\x1es\xae\xa3*.\xf7\x96" +p381 +ssS'pygments_lexers_modeling' +p382 +(dp383 +g6 +(dp384 +g8 +S'pygments_lexers_modeling.html' +p385 +sg10 +S'pygments.lexers.modeling' +p386 +sg12 +g13 +(g14 +g15 +NtRp387 +(dp388 +g18 +I1 +sg19 +I0 +sg20 +I51 +sg21 +I0 +sg22 +I0 +sg23 +I1 +sg24 +I0 +sbssg25 +S'\x05Rw\x8c\xbb\xe9\x01\xfe6^H\x83F\xedS\xf4' +p389 +ssS'pygments_lexers_css' +p390 +(dp391 +g6 +(dp392 +g8 +S'pygments_lexers_css.html' +p393 +sg10 +S'pygments.lexers.css' +p394 +sg12 +g13 +(g14 +g15 +NtRp395 +(dp396 +g18 +I1 +sg19 +I0 +sg20 +I55 +sg21 +I0 +sg22 +I0 +sg23 +I1 +sg24 +I0 +sbssg25 +S'\xdf\xcd\xed&2\x176v\xdd\x07\xca\t\xfe)\x08\xd6' +p397 +ssS'pygments_formatter' +p398 +(dp399 +g6 +(dp400 +g46 +S'pygments_formatter.html' +p401 +sg48 +S'pygments.formatter' +p402 +sg50 +g13 +(g14 +g15 +NtRp403 +(dp404 +g18 +I1 +sg19 +I0 +sg20 +I29 +sg21 +I0 +sg22 +I0 +sg23 +I3 +sg24 +I0 +sbssg25 +S'Q\n7\xa3DZ\x80{\x8c&\xeau\xec\xa10?' +p405 +ssS'pygments_lexers_nimrod' +p406 +(dp407 +g6 +(dp408 +g8 +S'pygments_lexers_nimrod.html' +p409 +sg10 +S'pygments.lexers.nimrod' +p410 +sg12 +g13 +(g14 +g15 +NtRp411 +(dp412 +g18 +I1 +sg19 +I0 +sg20 +I25 +sg21 +I0 +sg22 +I0 +sg23 +I0 +sg24 +I0 +sbssg25 +S'\x07|\t\tcl\xe1\x14\xac\xb8a\xb4;\xea\x9bX' +p413 +ssS'pygments_lexers_rust' +p414 +(dp415 +g6 +(dp416 +g8 +S'pygments_lexers_rust.html' +p417 +sg10 +S'pygments.lexers.rust' +p418 +sg12 +g13 +(g14 +g15 +NtRp419 +(dp420 +g18 +I1 +sg19 +I0 +sg20 +I10 +sg21 +I0 +sg22 +I0 +sg23 +I0 +sg24 +I0 +sbssg25 +S'\xbc<^\xbb\x88\\\x18\xbfg\xcc\x9a\x11\x89I8\xc2' +p421 +ssS'pygments_styles_xcode' +p422 +(dp423 +g6 +(dp424 +g8 +S'pygments_styles_xcode.html' +p425 +sg10 +S'pygments.styles.xcode' +p426 +sg12 +g13 +(g14 +g15 +NtRp427 +(dp428 +g18 +I1 +sg19 +I0 +sg20 +I6 +sg21 +I0 +sg22 +I0 +sg23 +I0 +sg24 +I0 +sbssg25 +S'K\xa1l0\xaf\x05+\x16#O)u\xed\xbf\xda\x9b' +p429 +ssS'pygments_lexers_markup' +p430 +(dp431 +g6 +(dp432 +g8 +S'pygments_lexers_markup.html' +p433 +sg10 +S'pygments.lexers.markup' +p434 +sg12 +g13 +(g14 +g15 +NtRp435 +(dp436 +g18 +I1 +sg19 +I0 +sg20 +I130 +sg21 +I0 +sg22 +I0 +sg23 +I9 +sg24 +I0 +sbssg25 +S'\x93<\xadV4\x7fZ\xa4}\x99=\xe9\xd2\xfa\x950' +p437 +ssS'pygments_styles_vs' +p438 +(dp439 +g6 +(dp440 +g8 +S'pygments_styles_vs.html' +p441 +sg10 +S'pygments.styles.vs' +p442 +sg12 +g13 +(g14 +g15 +NtRp443 +(dp444 +g18 +I1 +sg19 +I0 +sg20 +I7 +sg21 +I0 +sg22 +I0 +sg23 +I0 +sg24 +I0 +sbssg25 +S'*\xa8\x1dS"\xf2&\x12\xe4"t\xd8!\xb56\xf9' +p445 +ssS'pygments_lexers_prolog' +p446 +(dp447 +g6 +(dp448 +g8 +S'pygments_lexers_prolog.html' +p449 +sg10 +S'pygments.lexers.prolog' +p450 +sg12 +g13 +(g14 +g15 +NtRp451 +(dp452 +g18 +I1 +sg19 +I0 +sg20 +I30 +sg21 +I0 +sg22 +I0 +sg23 +I3 +sg24 +I0 +sbssg25 +S'G\x99\x18\xdc\x1f\x9a\x1d\xee+\x8c\xbd\xc0\xa8\xcc\xfb\xec' +p453 +ssS'pygments_modeline' +p454 +(dp455 +g6 +(dp456 +g46 +S'pygments_modeline.html' +p457 +sg48 +S'pygments.modeline' +p458 +sg50 +g13 +(g14 +g15 +NtRp459 +(dp460 +g18 +I1 +sg19 +I0 +sg20 +I19 +sg21 +I0 +sg22 +I0 +sg23 +I13 +sg24 +I0 +sbssg25 +S'\t\x0c\xa0\x84Yn\xd63\xfa\xb2:\x1a\xb4\xe0\x8e8' +p461 +ssS'pygments_formatters_svg' +p462 +(dp463 +g6 +(dp464 +g8 +S'pygments_formatters_svg.html' +p465 +sg10 +S'pygments.formatters.svg' +p466 +sg12 +g13 +(g14 +g15 +NtRp467 +(dp468 +g18 +I1 +sg19 +I0 +sg20 +I70 +sg21 +I0 +sg22 +I0 +sg23 +I4 +sg24 +I0 +sbssg25 +S'\x04\xbd\xb1p"\xb3\xebeJ*\xe0\xcd<\x94\xddb' +p469 +ssS'pygments_styles_colorful' +p470 +(dp471 +g6 +(dp472 +g8 +S'pygments_styles_colorful.html' +p473 +sg10 +S'pygments.styles.colorful' +p474 +sg12 +g13 +(g14 +g15 +NtRp475 +(dp476 +g18 +I1 +sg19 +I0 +sg20 +I6 +sg21 +I0 +sg22 +I0 +sg23 +I0 +sg24 +I0 +sbssg25 +S'\xd7\xcf\x17\x94\x10]{BL\xc3\x1f_\x9b7\xf4!' +p477 +ssS'pygments_scanner' +p478 +(dp479 +g6 +(dp480 +g8 +S'pygments_scanner.html' +p481 +sg10 +S'pygments.scanner' +p482 +sg12 +g13 +(g14 +g15 +NtRp483 +(dp484 +g18 +I1 +sg19 +I0 +sg20 +I41 +sg21 +I0 +sg22 +I0 +sg23 +I3 +sg24 +I0 +sbssg25 +S'\x07\x83\xf0\x96\xf4\x10c\xbd_\xa4\x98\x07\xca\t\xc0k' +p485 +ssS'pygments_styles_default' +p486 +(dp487 +g6 +(dp488 +g8 +S'pygments_styles_default.html' +p489 +sg10 +S'pygments.styles.default' +p490 +sg12 +g13 +(g14 +g15 +NtRp491 +(dp492 +g18 +I1 +sg19 +I0 +sg20 +I7 +sg21 +I0 +sg22 +I0 +sg23 +I0 +sg24 +I0 +sbssg25 +S'&i\xcdL\xbfT\xe0Z\xc8D\x0e\x85\x83\xd8\xd46' +p493 +ssS'pygments_lexers__php_builtins' +p494 +(dp495 +g6 +(dp496 +g8 +S'pygments_lexers__php_builtins.html' +p497 +sg10 +S'pygments.lexers._php_builtins' +p498 +sg12 +g13 +(g14 +g15 +NtRp499 +(dp500 +g18 +I1 +sg19 +I0 +sg20 +I3 +sg21 +I66 +sg22 +I0 +sg23 +I0 +sg24 +I0 +sbssg25 +S'\xcc\x83C\xda\xe4\xeb\x9fe\x94`\x04h\x9b\xed\xbem' +p501 +ssS'pygments_lexers_graphics' +p502 +(dp503 +g6 +(dp504 +g8 +S'pygments_lexers_graphics.html' +p505 +sg10 +S'pygments.lexers.graphics' +p506 +sg12 +g13 +(g14 +g15 +NtRp507 +(dp508 +g18 +I1 +sg19 +I0 +sg20 +I52 +sg21 +I0 +sg22 +I0 +sg23 +I0 +sg24 +I0 +sbssg25 +S'VU\xf5\x88\xbb1\xf6\xb90\r\x05\xc0\xde\xa5\x0c\xfe' +p509 +ssS'pygments_lexers_scripting' +p510 +(dp511 +g6 +(dp512 +g8 +S'pygments_lexers_scripting.html' +p513 +sg10 +S'pygments.lexers.scripting' +p514 +sg12 +g13 +(g14 +g15 +NtRp515 +(dp516 +g18 +I1 +sg19 +I0 +sg20 +I135 +sg21 +I0 +sg22 +I0 +sg23 +I0 +sg24 +I0 +sbssg25 +S'\x03\x9e\x82\xdc\xfa\xcfi\xb8i\x8f\xe60W\xfe\x836' +p517 +ssS'pygments_lexers_ecl' +p518 +(dp519 +g6 +(dp520 +g8 +S'pygments_lexers_ecl.html' +p521 +sg10 +S'pygments.lexers.ecl' +p522 +sg12 +g13 +(g14 +g15 +NtRp523 +(dp524 +g18 +I1 +sg19 +I0 +sg20 +I12 +sg21 +I0 +sg22 +I0 +sg23 +I0 +sg24 +I0 +sbssg25 +S'\x98\x14\x8d\x11\xbd\xcf=\xd3\x19\xf6K\xa2\x96@#z' +p525 +ssS'pygments_lexers_algebra' +p526 +(dp527 +g6 +(dp528 +g8 +S'pygments_lexers_algebra.html' +p529 +sg10 +S'pygments.lexers.algebra' +p530 +sg12 +g13 +(g14 +g15 +NtRp531 +(dp532 +g18 +I1 +sg19 +I0 +sg20 +I25 +sg21 +I0 +sg22 +I0 +sg23 +I1 +sg24 +I0 +sbssg25 +S'\x8c\xbc\x06^m\xa9Z\xf4u\x91\xf3\x06O\xc7X\xdf' +p533 +ssS'pygments_styles_rrt' +p534 +(dp535 +g6 +(dp536 +g8 +S'pygments_styles_rrt.html' +p537 +sg10 +S'pygments.styles.rrt' +p538 +sg12 +g13 +(g14 +g15 +NtRp539 +(dp540 +g18 +I1 +sg19 +I0 +sg20 +I7 +sg21 +I0 +sg22 +I0 +sg23 +I0 +sg24 +I0 +sbssg25 +S'\xc2\x1drQ^\x0b\x8a\xb1\xb7\xa2\x81\x19\x14\x87\xebD' +p541 +ssS'pygments_styles_paraiso_dark' +p542 +(dp543 +g6 +(dp544 +g8 +S'pygments_styles_paraiso_dark.html' +p545 +sg10 +S'pygments.styles.paraiso_dark' +p546 +sg12 +g13 +(g14 +g15 +NtRp547 +(dp548 +g18 +I1 +sg19 +I0 +sg20 +I22 +sg21 +I0 +sg22 +I0 +sg23 +I0 +sg24 +I0 +sbssg25 +S'\xd4\xd3\x7f\xe8[\xb0\xe8\xc3\xfeF3\x8fl\xbf\xe50' +p549 +ssS'pygments_lexers_jvm' +p550 +(dp551 +g6 +(dp552 +g8 +S'pygments_lexers_jvm.html' +p553 +sg10 +S'pygments.lexers.jvm' +p554 +sg12 +g13 +(g14 +g15 +NtRp555 +(dp556 +g18 +I1 +sg19 +I0 +sg20 +I150 +sg21 +I0 +sg22 +I0 +sg23 +I4 +sg24 +I0 +sbssg25 +S'`\xa6\xe4\xc9\xe61\xd7\x94\x852\xd8\x85\xb6\xfd\xac\n' +p557 +ssS'pygments_lexer' +p558 +(dp559 +g6 +(dp560 +g46 +S'pygments_lexer.html' +p561 +sg48 +S'pygments.lexer' +p562 +sg50 +g13 +(g14 +g15 +NtRp563 +(dp564 +g18 +I1 +sg19 +I0 +sg20 +I470 +sg21 +I0 +sg22 +I0 +sg23 +I239 +sg24 +I0 +sbssg25 +S'LR\x13H\x1aj\x1cT!7PbU\x85\xa8\x02' +p565 +ssS'pygments_lexers_asm' +p566 +(dp567 +g6 +(dp568 +g8 +S'pygments_lexers_asm.html' +p569 +sg10 +S'pygments.lexers.asm' +p570 +sg12 +g13 +(g14 +g15 +NtRp571 +(dp572 +g18 +I1 +sg19 +I0 +sg20 +I94 +sg21 +I0 +sg22 +I0 +sg23 +I3 +sg24 +I0 +sbssg25 +S'\x89\x9e\xec\xc0&\xe5\x97\x9eG\x90\x93\x1d\x91\xa3\xda(' +p573 +ssS'pygments_console' +p574 +(dp575 +S'index' +p576 +(dp577 +S'html_filename' +p578 +S'pygments_console.html' +p579 +sS'name' +p580 +S'pygments.console' +p581 +sS'nums' +p582 +g13 +(g14 +g15 +NtRp583 +(dp584 +g18 +I1 +sg19 +I0 +sg20 +I42 +sg21 +I0 +sg22 +I0 +sg23 +I0 +sg24 +I0 +sbssS'hash' +p585 +S'b!s\x8d\xefc \xa3\x16\xc7\x1b\xeak\xd2c\xc4' +p586 +ssS'pygments_formatters_terminal256' +p587 +(dp588 +g6 +(dp589 +g8 +S'pygments_formatters_terminal256.html' +p590 +sg10 +S'pygments.formatters.terminal256' +p591 +sg12 +g13 +(g14 +g15 +NtRp592 +(dp593 +g18 +I1 +sg19 +I0 +sg20 +I133 +sg21 +I0 +sg22 +I0 +sg23 +I12 +sg24 +I0 +sbssg25 +S'\xaf\xc6Qu\x1e\xd5\xbaz\x89w/\x12V\xb9\x99\xd7' +p594 +ssS'pygments_lexers_parsers' +p595 +(dp596 +g6 +(dp597 +g8 +S'pygments_lexers_parsers.html' +p598 +sg10 +S'pygments.lexers.parsers' +p599 +sg12 +g13 +(g14 +g15 +NtRp600 +(dp601 +g18 +I1 +sg19 +I0 +sg20 +I164 +sg21 +I0 +sg22 +I0 +sg23 +I0 +sg24 +I0 +sbssg25 +S"'%\xe3\xea\x91\xec\xf6;N\xb56?%\xf3\x07\xfd" +p602 +ssS'pygments_formatters__mapping' +p603 +(dp604 +g6 +(dp605 +g8 +S'pygments_formatters__mapping.html' +p606 +sg10 +S'pygments.formatters._mapping' +p607 +sg12 +g13 +(g14 +g15 +NtRp608 +(dp609 +g18 +I1 +sg19 +I0 +sg20 +I3 +sg21 +I26 +sg22 +I0 +sg23 +I0 +sg24 +I0 +sbssg25 +S'\xd2\x8fQ\x97\xe8\x7f\xbf\x87<\xc6\x18\x03\xeb\xb5Lt' +p610 +ssS'pygments_lexers_ml' +p611 +(dp612 +g6 +(dp613 +g8 +S'pygments_lexers_ml.html' +p614 +sg10 +S'pygments.lexers.ml' +p615 +sg12 +g13 +(g14 +g15 +NtRp616 +(dp617 +g18 +I1 +sg19 +I0 +sg20 +I62 +sg21 +I0 +sg22 +I0 +sg23 +I3 +sg24 +I0 +sbssg25 +S'\xd9P`\xca\xd0\x88,\x12~e\x82\xe40\x97\x0f\x1d' +p618 +ssS'pygments_lexers_basic' +p619 +(dp620 +g6 +(dp621 +g8 +S'pygments_lexers_basic.html' +p622 +sg10 +S'pygments.lexers.basic' +p623 +sg12 +g13 +(g14 +g15 +NtRp624 +(dp625 +g18 +I1 +sg19 +I0 +sg20 +I66 +sg21 +I0 +sg22 +I0 +sg23 +I3 +sg24 +I0 +sbssg25 +S'Q\x17\ntj\x97\x9bK\xbd\xce\xff\xa2\xfck\xa8\xa8' +p626 +ssS'pygments_style' +p627 +(dp628 +g6 +(dp629 +g46 +S'pygments_style.html' +p630 +sg48 +S'pygments.style' +p631 +sg50 +g13 +(g14 +g15 +NtRp632 +(dp633 +g18 +I1 +sg19 +I0 +sg20 +I76 +sg21 +I0 +sg22 +I0 +sg23 +I14 +sg24 +I0 +sbssg25 +S'b]\x18\xde\xe2\x8b\xfd\xdd\x04\xce\xaa3\xc1\x8d\xa0\x15' +p634 +ssS'pygments_styles_bw' +p635 +(dp636 +g6 +(dp637 +g8 +S'pygments_styles_bw.html' +p638 +sg10 +S'pygments.styles.bw' +p639 +sg12 +g13 +(g14 +g15 +NtRp640 +(dp641 +g18 +I1 +sg19 +I0 +sg20 +I7 +sg21 +I0 +sg22 +I0 +sg23 +I0 +sg24 +I0 +sbssg25 +S'\xed\xf4\x113$lj\x82\xaf_\xdfC\\\xca\xc4[' +p642 +ssS'pygments' +p643 +(dp644 +g576 +(dp645 +g46 +S'pygments.html' +p646 +sg48 +S'pygments' +p647 +sg50 +g13 +(g14 +g15 +NtRp648 +(dp649 +g18 +I1 +sg19 +I0 +sg20 +I26 +sg21 +I3 +sg22 +I0 +sg23 +I17 +sg24 +I0 +sbssg585 +S'\xf7R\xe4\x12\x0b\xe4\x10\x07\xa5\x93\xe2j\xbd\x04\xde\x90' +p650 +ssS'pygments_styles_vim' +p651 +(dp652 +g6 +(dp653 +g8 +S'pygments_styles_vim.html' +p654 +sg10 +S'pygments.styles.vim' +p655 +sg12 +g13 +(g14 +g15 +NtRp656 +(dp657 +g18 +I1 +sg19 +I0 +sg20 +I8 +sg21 +I0 +sg22 +I0 +sg23 +I0 +sg24 +I0 +sbssg25 +S'=\x0f\xf69\xbc\xc6}+\xd8SQZ(\xc3\xca<' +p658 +ssS'pygments_styles_paraiso_light' +p659 +(dp660 +g6 +(dp661 +g8 +S'pygments_styles_paraiso_light.html' +p662 +sg10 +S'pygments.styles.paraiso_light' +p663 +sg12 +g13 +(g14 +g15 +NtRp664 +(dp665 +g18 +I1 +sg19 +I0 +sg20 +I22 +sg21 +I0 +sg22 +I0 +sg23 +I0 +sg24 +I0 +sbssg25 +S'j\xeb\x14\x81z\xff\x90\x80E\xea\x83V\xb0\xb1\xe2\xf4' +p666 +ssS'pygments_lexers_c_cpp' +p667 +(dp668 +g6 +(dp669 +g8 +S'pygments_lexers_c_cpp.html' +p670 +sg10 +S'pygments.lexers.c_cpp' +p671 +sg12 +g13 +(g14 +g15 +NtRp672 +(dp673 +g18 +I1 +sg19 +I0 +sg20 +I47 +sg21 +I0 +sg22 +I0 +sg23 +I3 +sg24 +I0 +sbssg25 +S'\x98\x0c\x05F\xfa\xfa`\xe3\xe9\x107T\x16\x14x\x10' +p674 +ssS'pygments_lexers_webmisc' +p675 +(dp676 +g6 +(dp677 +g8 +S'pygments_lexers_webmisc.html' +p678 +sg10 +S'pygments.lexers.webmisc' +p679 +sg12 +g13 +(g14 +g15 +NtRp680 +(dp681 +g18 +I1 +sg19 +I0 +sg20 +I226 +sg21 +I0 +sg22 +I0 +sg23 +I66 +sg24 +I0 +sbssg25 +S'\x0fL\xaa\x1f\xe6\x15\xefMi\xe9\xd7(\xcc\xed\x9bZ' +p682 +ssS'pygments_lexers_foxpro' +p683 +(dp684 +g6 +(dp685 +g8 +S'pygments_lexers_foxpro.html' +p686 +sg10 +S'pygments.lexers.foxpro' +p687 +sg12 +g13 +(g14 +g15 +NtRp688 +(dp689 +g18 +I1 +sg19 +I0 +sg20 +I12 +sg21 +I0 +sg22 +I0 +sg23 +I0 +sg24 +I0 +sbssg25 +S'\xdda\xff./5xT;\xc7WK\x04\xd8v\xd2' +p690 +ssS'pygments_lexers_ruby' +p691 +(dp692 +g6 +(dp693 +g8 +S'pygments_lexers_ruby.html' +p694 +sg10 +S'pygments.lexers.ruby' +p695 +sg12 +g13 +(g14 +g15 +NtRp696 +(dp697 +g18 +I1 +sg19 +I0 +sg20 +I109 +sg21 +I0 +sg22 +I0 +sg23 +I2 +sg24 +I0 +sbssg25 +S'\xa2\xbbv\xb7\xe1&\xb7#\x86h(\xb9\t\xaf)\xd7' +p698 +ssS'pygments_styles_tango' +p699 +(dp700 +g6 +(dp701 +g8 +S'pygments_styles_tango.html' +p702 +sg10 +S'pygments.styles.tango' +p703 +sg12 +g13 +(g14 +g15 +NtRp704 +(dp705 +g18 +I1 +sg19 +I0 +sg20 +I7 +sg21 +I0 +sg22 +I0 +sg23 +I0 +sg24 +I0 +sbssg25 +S'\x860\xfd\x9fE\xdd\x1f\xe1&G\x7fO\x87^\xadw' +p706 +ssS'pygments_cmdline' +p707 +(dp708 +g6 +(dp709 +g8 +S'pygments_cmdline.html' +p710 +sg10 +S'pygments.cmdline' +p711 +sg12 +g13 +(g14 +g15 +NtRp712 +(dp713 +g18 +I1 +sg19 +I0 +sg20 +I318 +sg21 +I13 +sg22 +I0 +sg23 +I13 +sg24 +I0 +sbssg25 +S',9\x9e\xb7B\x1b\xbe\xdf\xcfE\xeb$^\x077\xf1' +p714 +ssS'pygments_lexers_iolang' +p715 +(dp716 +g6 +(dp717 +g8 +S'pygments_lexers_iolang.html' +p718 +sg10 +S'pygments.lexers.iolang' +p719 +sg12 +g13 +(g14 +g15 +NtRp720 +(dp721 +g18 +I1 +sg19 +I0 +sg20 +I10 +sg21 +I0 +sg22 +I0 +sg23 +I0 +sg24 +I0 +sbssg25 +S"\xc7\xa5zkn\xee\xd4\x9e\xd7\xe3A\x98\xf4\x9d'\x9a" +p722 +ssS'pygments_lexers_erlang' +p723 +(dp724 +g6 +(dp725 +g8 +S'pygments_lexers_erlang.html' +p726 +sg10 +S'pygments.lexers.erlang' +p727 +sg12 +g13 +(g14 +g15 +NtRp728 +(dp729 +g18 +I1 +sg19 +I0 +sg20 +I158 +sg21 +I0 +sg22 +I0 +sg23 +I2 +sg24 +I0 +sbssg25 +S'J\xd8/\xd1\x04=\x08\x0e\xe8}\xfa\xc90\x01\xa9=' +p730 +ssS'pygments_lexers__mapping' +p731 +(dp732 +g6 +(dp733 +g8 +S'pygments_lexers__mapping.html' +p734 +sg10 +S'pygments.lexers._mapping' +p735 +sg12 +g13 +(g14 +g15 +NtRp736 +(dp737 +g18 +I1 +sg19 +I0 +sg20 +I3 +sg21 +I24 +sg22 +I0 +sg23 +I0 +sg24 +I0 +sbssg25 +S'\x88\x8d\x9e\x9c!\x03Us\x95\x0cJBrd\xdc\xc0' +p738 +ssS'pygments_lexers_haskell' +p739 +(dp740 +g6 +(dp741 +g8 +S'pygments_lexers_haskell.html' +p742 +sg10 +S'pygments.lexers.haskell' +p743 +sg12 +g13 +(g14 +g15 +NtRp744 +(dp745 +g18 +I1 +sg19 +I0 +sg20 +I134 +sg21 +I0 +sg22 +I0 +sg23 +I1 +sg24 +I0 +sbssg25 +S'h\xdfV\x93\x81\xd2\xe5\xae\n\xb1\x95?S\xa6\xee;' +p746 +ssS'pygments_unistring' +p747 +(dp748 +g6 +(dp749 +g46 +S'pygments_unistring.html' +p750 +sg48 +S'pygments.unistring' +p751 +sg50 +g13 +(g14 +g15 +NtRp752 +(dp753 +g18 +I1 +sg19 +I0 +sg20 +I63 +sg21 +I57 +sg22 +I0 +sg23 +I7 +sg24 +I0 +sbssg25 +S'z*\xce\xf9t\xbc\xf7Z\xfd\x02\x9e\xd0\x99\xca\xe6\xd3' +p754 +ssS'pygments_lexers_textfmts' +p755 +(dp756 +g6 +(dp757 +g8 +S'pygments_lexers_textfmts.html' +p758 +sg10 +S'pygments.lexers.textfmts' +p759 +sg12 +g13 +(g14 +g15 +NtRp760 +(dp761 +g18 +I1 +sg19 +I0 +sg20 +I81 +sg21 +I0 +sg22 +I0 +sg23 +I0 +sg24 +I0 +sbssg25 +S"\x0et\x93\x9e\xd01N \x06\x90\x1d\xa7'\xd1\xe9\xd1" +p762 +ssS'pygments_lexers_configs' +p763 +(dp764 +g6 +(dp765 +g8 +S'pygments_lexers_configs.html' +p766 +sg10 +S'pygments.lexers.configs' +p767 +sg12 +g13 +(g14 +g15 +NtRp768 +(dp769 +g18 +I1 +sg19 +I0 +sg20 +I98 +sg21 +I0 +sg22 +I0 +sg23 +I1 +sg24 +I0 +sbssg25 +S'\x9d\xf3#\xf7di\xbc\x9b?\x03}\xb3\xfd%\x14y' +p770 +ssS'pygments_lexers_dalvik' +p771 +(dp772 +g6 +(dp773 +g8 +S'pygments_lexers_dalvik.html' +p774 +sg10 +S'pygments.lexers.dalvik' +p775 +sg12 +g13 +(g14 +g15 +NtRp776 +(dp777 +g18 +I1 +sg19 +I0 +sg20 +I20 +sg21 +I0 +sg22 +I0 +sg23 +I0 +sg24 +I0 +sbssg25 +S'k\x1fq\xe8\xaa\xda\x87Oh_\x8a\xc3[\xb3\xb9\xa0' +p778 +ssS'pygments_lexers_julia' +p779 +(dp780 +g6 +(dp781 +g8 +S'pygments_lexers_julia.html' +p782 +sg10 +S'pygments.lexers.julia' +p783 +sg12 +g13 +(g14 +g15 +NtRp784 +(dp785 +g18 +I1 +sg19 +I0 +sg20 +I42 +sg21 +I0 +sg22 +I0 +sg23 +I12 +sg24 +I0 +sbssg25 +S'\x1a\x04+\x10p#\xcaO8\x8b6\xfe\xe7\x8a\xfa-' +p786 +ssS'pygments_formatters_rtf' +p787 +(dp788 +g6 +(dp789 +g8 +S'pygments_formatters_rtf.html' +p790 +sg10 +S'pygments.formatters.rtf' +p791 +sg12 +g13 +(g14 +g15 +NtRp792 +(dp793 +g18 +I1 +sg19 +I0 +sg20 +I65 +sg21 +I0 +sg22 +I0 +sg23 +I7 +sg24 +I0 +sbssg25 +S'\xa8h\xcd\x8e\xc3#R\x92\xaf|\xf9 U\xe2\xcb\x83' +p794 +ssS'pygments_lexers_installers' +p795 +(dp796 +g6 +(dp797 +g8 +S'pygments_lexers_installers.html' +p798 +sg10 +S'pygments.lexers.installers' +p799 +sg12 +g13 +(g14 +g15 +NtRp800 +(dp801 +g18 +I1 +sg19 +I0 +sg20 +I35 +sg21 +I0 +sg22 +I0 +sg23 +I0 +sg24 +I0 +sbssg25 +S"\xd2\xca\x05\n\x99'\xbf\xf3Q\x05-[\\\xdco\xf1" +p802 +ssS'pygments_styles_borland' +p803 +(dp804 +g6 +(dp805 +g8 +S'pygments_styles_borland.html' +p806 +sg10 +S'pygments.styles.borland' +p807 +sg12 +g13 +(g14 +g15 +NtRp808 +(dp809 +g18 +I1 +sg19 +I0 +sg20 +I6 +sg21 +I0 +sg22 +I0 +sg23 +I0 +sg24 +I0 +sbssg25 +S'H\x88%n\xbb\x93\xac\x17k"\x1f\xd0\x99\xf8\x92\xb2' +p810 +ssS'pygments_lexers_make' +p811 +(dp812 +g6 +(dp813 +g8 +S'pygments_lexers_make.html' +p814 +sg10 +S'pygments.lexers.make' +p815 +sg12 +g13 +(g14 +g15 +NtRp816 +(dp817 +g18 +I1 +sg19 +I0 +sg20 +I48 +sg21 +I0 +sg22 +I0 +sg23 +I0 +sg24 +I0 +sbssg25 +S'\x0fyp\xd3vK\xf2\x03F0\x10\n\x05\x8d\\h' +p818 +ssS'pygments_lexers_diff' +p819 +(dp820 +g6 +(dp821 +g8 +S'pygments_lexers_diff.html' +p822 +sg10 +S'pygments.lexers.diff' +p823 +sg12 +g13 +(g14 +g15 +NtRp824 +(dp825 +g18 +I1 +sg19 +I0 +sg20 +I23 +sg21 +I0 +sg22 +I0 +sg23 +I3 +sg24 +I0 +sbssg25 +S'\xb9\x86\xbfo\xcc\x1c\x04\xa0\xc4Y?\xc6^]\xe0W' +p826 +ssS'pygments_formatters_other' +p827 +(dp828 +g6 +(dp829 +g8 +S'pygments_formatters_other.html' +p830 +sg10 +S'pygments.formatters.other' +p831 +sg12 +g13 +(g14 +g15 +NtRp832 +(dp833 +g18 +I1 +sg19 +I0 +sg20 +I89 +sg21 +I0 +sg22 +I0 +sg23 +I25 +sg24 +I0 +sbssg25 +S'\xfa\x8d\xe484:bo\xe3\xde<\xc5\xe1*\xacr' +p834 +ssS'pygments_lexers_chapel' +p835 +(dp836 +g6 +(dp837 +g8 +S'pygments_lexers_chapel.html' +p838 +sg10 +S'pygments.lexers.chapel' +p839 +sg12 +g13 +(g14 +g15 +NtRp840 +(dp841 +g18 +I1 +sg19 +I0 +sg20 +I9 +sg21 +I0 +sg22 +I0 +sg23 +I0 +sg24 +I0 +sbssg25 +S'\xf2\xb1\xeb\xbd2\x98-\x0c?\x1d\x80\xe3\xa6\xe8\xef\x17' +p842 +ssS'pygments_styles_murphy' +p843 +(dp844 +g6 +(dp845 +g8 +S'pygments_styles_murphy.html' +p846 +sg10 +S'pygments.styles.murphy' +p847 +sg12 +g13 +(g14 +g15 +NtRp848 +(dp849 +g18 +I1 +sg19 +I0 +sg20 +I6 +sg21 +I0 +sg22 +I0 +sg23 +I0 +sg24 +I0 +sbssg25 +S'@\x82\x16\xfc\x00\x9ex\xf2\x03P\x87\x90l\x1c\xcc\xc2' +p850 +ssS'pygments_lexers_rebol' +p851 +(dp852 +g6 +(dp853 +g8 +S'pygments_lexers_rebol.html' +p854 +sg10 +S'pygments.lexers.rebol' +p855 +sg12 +g13 +(g14 +g15 +NtRp856 +(dp857 +g18 +I1 +sg19 +I0 +sg20 +I88 +sg21 +I0 +sg22 +I0 +sg23 +I6 +sg24 +I0 +sbssg25 +S'\xdc\x82\xda,yd\xca\\f\x8c\x1b\x87;\x92\xa8\x1f' +p858 +ssS'pygments_lexers_special' +p859 +(dp860 +g6 +(dp861 +g8 +S'pygments_lexers_special.html' +p862 +sg10 +S'pygments.lexers.special' +p863 +sg12 +g13 +(g14 +g15 +NtRp864 +(dp865 +g18 +I1 +sg19 +I0 +sg20 +I56 +sg21 +I0 +sg22 +I0 +sg23 +I16 +sg24 +I0 +sbssg25 +S'fL\xbdpQ\xd2\xd9\x9c\x9d"\xbe\xd8\xe3\xef\xf7\x9f' +p866 +ssS'pygments_lexers_actionscript' +p867 +(dp868 +g6 +(dp869 +g8 +S'pygments_lexers_actionscript.html' +p870 +sg10 +S'pygments.lexers.actionscript' +p871 +sg12 +g13 +(g14 +g15 +NtRp872 +(dp873 +g18 +I1 +sg19 +I0 +sg20 +I32 +sg21 +I0 +sg22 +I0 +sg23 +I1 +sg24 +I0 +sbssg25 +S'\xd5\xc0\xce\xfd\xe00\xc8\x1e\xe2\xc3h\xa8\x93\xe7\x95I' +p874 +ssS'pygments_styles_monokai' +p875 +(dp876 +g6 +(dp877 +g8 +S'pygments_styles_monokai.html' +p878 +sg10 +S'pygments.styles.monokai' +p879 +sg12 +g13 +(g14 +g15 +NtRp880 +(dp881 +g18 +I1 +sg19 +I0 +sg20 +I7 +sg21 +I0 +sg22 +I0 +sg23 +I0 +sg24 +I0 +sbssg25 +S'\x1f2\x05\x95\x8e\xa5\xe2\x88U\x92\xe6!\x1a\x9a6\xe6' +p882 +ssS'pygments_lexers__cocoa_builtins' +p883 +(dp884 +g6 +(dp885 +g8 +S'pygments_lexers__cocoa_builtins.html' +p886 +sg10 +S'pygments.lexers._cocoa_builtins' +p887 +sg12 +g13 +(g14 +g15 +NtRp888 +(dp889 +g18 +I1 +sg19 +I0 +sg20 +I5 +sg21 +I39 +sg22 +I0 +sg23 +I0 +sg24 +I0 +sbssg25 +S'\xf4f:\x8b\x91\xfc\xa3\xa9\xa1[]\xc0>Z\x1a\x1d' +p890 +ssS'pygments_lexers_felix' +p891 +(dp892 +g6 +(dp893 +g8 +S'pygments_lexers_felix.html' +p894 +sg10 +S'pygments.lexers.felix' +p895 +sg12 +g13 +(g14 +g15 +NtRp896 +(dp897 +g18 +I1 +sg19 +I0 +sg20 +I20 +sg21 +I0 +sg22 +I0 +sg23 +I0 +sg24 +I0 +sbssg25 +S'\x9f"\xb9\x95;\xeeC\xf9\x1e\x0b\x82\xb5;z\x02\xfc' +p898 +ssS'pygments_styles_igor' +p899 +(dp900 +g6 +(dp901 +g8 +S'pygments_styles_igor.html' +p902 +sg10 +S'pygments.styles.igor' +p903 +sg12 +g13 +(g14 +g15 +NtRp904 +(dp905 +g18 +I1 +sg19 +I0 +sg20 +I6 +sg21 +I0 +sg22 +I0 +sg23 +I0 +sg24 +I0 +sbssg25 +S'\xb1\x9d\xa8\x853\xafh\xf7\xbca\x18A\xeb"\x01z' +p906 +ssS'pygments_lexers_hdl' +p907 +(dp908 +g6 +(dp909 +g8 +S'pygments_lexers_hdl.html' +p910 +sg10 +S'pygments.lexers.hdl' +p911 +sg12 +g13 +(g14 +g15 +NtRp912 +(dp913 +g18 +I1 +sg19 +I0 +sg20 +I38 +sg21 +I0 +sg22 +I0 +sg23 +I0 +sg24 +I0 +sbssg25 +S'\xe8\xe1}lz\xe0R\xca:\x13\x84\xa7\xd8\x8e\xde\xed' +p914 +ssS'pygments_lexers__mql_builtins' +p915 +(dp916 +g6 +(dp917 +g8 +S'pygments_lexers__mql_builtins.html' +p918 +sg10 +S'pygments.lexers._mql_builtins' +p919 +sg12 +g13 +(g14 +g15 +NtRp920 +(dp921 +g18 +I1 +sg19 +I0 +sg20 +I6 +sg21 +I0 +sg22 +I0 +sg23 +I0 +sg24 +I0 +sbssg25 +S'\xce\x1f\xf4mf\xfa\xd4\xe9\xe9a\xc0I\x007GH' +p922 +ssS'pygments_lexers_shell' +p923 +(dp924 +g6 +(dp925 +g8 +S'pygments_lexers_shell.html' +p926 +sg10 +S'pygments.lexers.shell' +p927 +sg12 +g13 +(g14 +g15 +NtRp928 +(dp929 +g18 +I1 +sg19 +I0 +sg20 +I101 +sg21 +I0 +sg22 +I0 +sg23 +I1 +sg24 +I0 +sbssg25 +S'\xe9q\x9bK\x8e\x10\xb9\x8a\xe4\xf6\xc7\xad\xf1j=t' +p930 +ssS'pygments_lexers_idl' +p931 +(dp932 +g6 +(dp933 +g8 +S'pygments_lexers_idl.html' +p934 +sg10 +S'pygments.lexers.idl' +p935 +sg12 +g13 +(g14 +g15 +NtRp936 +(dp937 +g18 +I1 +sg19 +I0 +sg20 +I14 +sg21 +I0 +sg22 +I0 +sg23 +I0 +sg24 +I0 +sbssg25 +S',\xea\x84\xdf\x83\xb2soE{\xea\xc3kXf\xcd' +p938 +ssS'pygments_lexers_inferno' +p939 +(dp940 +g6 +(dp941 +g8 +S'pygments_lexers_inferno.html' +p942 +sg10 +S'pygments.lexers.inferno' +p943 +sg12 +g13 +(g14 +g15 +NtRp944 +(dp945 +g18 +I1 +sg19 +I0 +sg20 +I14 +sg21 +I0 +sg22 +I0 +sg23 +I0 +sg24 +I0 +sbssg25 +S'\x83\xd5-Av\xe0\xbb~\xe2K\xfc\x0b8\xa8\xddm' +p946 +ssS'pygments_lexers_ambient' +p947 +(dp948 +g6 +(dp949 +g8 +S'pygments_lexers_ambient.html' +p950 +sg10 +S'pygments.lexers.ambient' +p951 +sg12 +g13 +(g14 +g15 +NtRp952 +(dp953 +g18 +I1 +sg19 +I0 +sg20 +I13 +sg21 +I0 +sg22 +I0 +sg23 +I0 +sg24 +I0 +sbssg25 +S'1S\x1bp\xe7Y\xdcG\xb2\x9f\x8cH(\\]r' +p954 +ssS'pygments_formatters_img' +p955 +(dp956 +g6 +(dp957 +g8 +S'pygments_formatters_img.html' +p958 +sg10 +S'pygments.formatters.img' +p959 +sg12 +g13 +(g14 +g15 +NtRp960 +(dp961 +g18 +I1 +sg19 +I0 +sg20 +I241 +sg21 +I0 +sg22 +I0 +sg23 +I60 +sg24 +I0 +sbssg25 +S'\xd9[\xecC#\xb2<\x1e\xa7Hv\xe8W\xdc\xa7?' +p962 +ssS'pygments_lexers_fortran' +p963 +(dp964 +g6 +(dp965 +g8 +S'pygments_lexers_fortran.html' +p966 +sg10 +S'pygments.lexers.fortran' +p967 +sg12 +g13 +(g14 +g15 +NtRp968 +(dp969 +g18 +I1 +sg19 +I0 +sg20 +I12 +sg21 +I0 +sg22 +I0 +sg23 +I0 +sg24 +I0 +sbssg25 +S'\xd9\xeb\x7f\xd0\xe5C%p&\xb7\nt!\xa1\xb8y' +p970 +ssS'pygments_lexers_fantom' +p971 +(dp972 +g6 +(dp973 +g8 +S'pygments_lexers_fantom.html' +p974 +sg10 +S'pygments.lexers.fantom' +p975 +sg12 +g13 +(g14 +g15 +NtRp976 +(dp977 +g18 +I1 +sg19 +I0 +sg20 +I13 +sg21 +I0 +sg22 +I0 +sg23 +I0 +sg24 +I0 +sbssg25 +S'\xcdE;&\x94\xe4\xb1=)\xcc\x90\xbcV+_\xa2' +p978 +ssS'pygments_lexers_go' +p979 +(dp980 +g6 +(dp981 +g8 +S'pygments_lexers_go.html' +p982 +sg10 +S'pygments.lexers.go' +p983 +sg12 +g13 +(g14 +g15 +NtRp984 +(dp985 +g18 +I1 +sg19 +I0 +sg20 +I12 +sg21 +I0 +sg22 +I0 +sg23 +I0 +sg24 +I0 +sbssg25 +S'\x9fp\x05^\x1a\x86L\xe4\x0c\x05\x94,\x8f\x07\xa2(' +p986 +ssS'pygments_lexers_tcl' +p987 +(dp988 +g6 +(dp989 +g8 +S'pygments_lexers_tcl.html' +p990 +sg10 +S'pygments.lexers.tcl' +p991 +sg12 +g13 +(g14 +g15 +NtRp992 +(dp993 +g18 +I1 +sg19 +I0 +sg20 +I17 +sg21 +I0 +sg22 +I0 +sg23 +I0 +sg24 +I0 +sbssg25 +S"E\xb3\x7f\xc1'q\xde\xeb\x0e\xc3\xc5\xcbm0Nx" +p994 +ssS'pygments_lexers_agile' +p995 +(dp996 +g6 +(dp997 +g8 +S'pygments_lexers_agile.html' +p998 +sg10 +S'pygments.lexers.agile' +p999 +sg12 +g13 +(g14 +g15 +NtRp1000 +(dp1001 +g18 +I1 +sg19 +I0 +sg20 +I12 +sg21 +I0 +sg22 +I0 +sg23 +I0 +sg24 +I0 +sbssg25 +S'\x92\x0c/\xdb\xea\xfb\x9f\xcc@@\xe7\xb6\\\x07\xa7 ' +p1002 +ssS'pygments_styles_native' +p1003 +(dp1004 +g6 +(dp1005 +g8 +S'pygments_styles_native.html' +p1006 +sg10 +S'pygments.styles.native' +p1007 +sg12 +g13 +(g14 +g15 +NtRp1008 +(dp1009 +g18 +I1 +sg19 +I0 +sg20 +I7 +sg21 +I0 +sg22 +I0 +sg23 +I0 +sg24 +I0 +sbssg25 +S'\x12/\xf0\xd2\xc9"\xccZ\xab/\x04\xad\xbf\x1at\x02' +p1010 +ssS'pygments_lexers_esoteric' +p1011 +(dp1012 +g6 +(dp1013 +g8 +S'pygments_lexers_esoteric.html' +p1014 +sg10 +S'pygments.lexers.esoteric' +p1015 +sg12 +g13 +(g14 +g15 +NtRp1016 +(dp1017 +g18 +I1 +sg19 +I0 +sg20 +I23 +sg21 +I0 +sg22 +I0 +sg23 +I0 +sg24 +I0 +sbssg25 +S'H9D\xae\x9eZ\xc2\x03\x1a2\x83\x9cxK&;' +p1018 +ssS'pygments_lexers_pascal' +p1019 +(dp1020 +g6 +(dp1021 +g8 +S'pygments_lexers_pascal.html' +p1022 +sg10 +S'pygments.lexers.pascal' +p1023 +sg12 +g13 +(g14 +g15 +NtRp1024 +(dp1025 +g18 +I1 +sg19 +I0 +sg20 +I226 +sg21 +I0 +sg22 +I0 +sg23 +I23 +sg24 +I0 +sbssg25 +S'\x8fT\x10*q!\xda\x9d\xb4#\xc8^R\x82\xfb\xee' +p1026 +ssS'pygments_regexopt' +p1027 +(dp1028 +g6 +(dp1029 +g46 +S'pygments_regexopt.html' +p1030 +sg48 +S'pygments.regexopt' +p1031 +sg50 +g13 +(g14 +g15 +NtRp1032 +(dp1033 +g18 +I1 +sg19 +I0 +sg20 +I43 +sg21 +I0 +sg22 +I0 +sg23 +I2 +sg24 +I0 +sbssg25 +S">rG5'\xf0\xa6\xa0\xbc\xef,\x94\x06@\x86a" +p1034 +ssS'pygments_lexers__lasso_builtins' +p1035 +(dp1036 +g6 +(dp1037 +g8 +S'pygments_lexers__lasso_builtins.html' +p1038 +sg10 +S'pygments.lexers._lasso_builtins' +p1039 +sg12 +g13 +(g14 +g15 +NtRp1040 +(dp1041 +g18 +I1 +sg19 +I0 +sg20 +I3 +sg21 +I0 +sg22 +I0 +sg23 +I0 +sg24 +I0 +sbssg25 +S'&\xd8\x14\xfb+\xc1\xc7\xb9\x90\xf5\xd8k\x8c\x93y\xf3' +p1042 +ssS'pygments_lexers_javascript' +p1043 +(dp1044 +g6 +(dp1045 +g8 +S'pygments_lexers_javascript.html' +p1046 +sg10 +S'pygments.lexers.javascript' +p1047 +sg12 +g13 +(g14 +g15 +NtRp1048 +(dp1049 +g18 +I1 +sg19 +I0 +sg20 +I109 +sg21 +I0 +sg22 +I0 +sg23 +I1 +sg24 +I0 +sbssg25 +S'\xcf\xe5\x8du<\xa3$%\xb2\xe8\x1f\x14u\x9b\xc2\xa0' +p1050 +ssS'pygments_lexers__openedge_builtins' +p1051 +(dp1052 +g6 +(dp1053 +g8 +S'pygments_lexers__openedge_builtins.html' +p1054 +sg10 +S'pygments.lexers._openedge_builtins' +p1055 +sg12 +g13 +(g14 +g15 +NtRp1056 +(dp1057 +g18 +I1 +sg19 +I0 +sg20 +I2 +sg21 +I0 +sg22 +I0 +sg23 +I0 +sg24 +I0 +sbssg25 +S'\x0eF\x90{\xda!\x03"\xebX\x08\x87$#BJ' +p1058 +ssS'pygments_formatters_latex' +p1059 +(dp1060 +g6 +(dp1061 +S'html_filename' +p1062 +S'pygments_formatters_latex.html' +p1063 +sS'name' +p1064 +S'pygments.formatters.latex' +p1065 +sS'nums' +p1066 +g13 +(g14 +g15 +NtRp1067 +(dp1068 +g18 +I1 +sg19 +I0 +sg20 +I189 +sg21 +I0 +sg22 +I0 +sg23 +I72 +sg24 +I0 +sbssg25 +S'\xe2\n\x90\x01I\x1c6\xf2w)\xa4,\xe5\xc1\xbe=' +p1069 +ssS'pygments_lexers_html' +p1070 +(dp1071 +g6 +(dp1072 +g8 +S'pygments_lexers_html.html' +p1073 +sg10 +S'pygments.lexers.html' +p1074 +sg12 +g13 +(g14 +g15 +NtRp1075 +(dp1076 +g18 +I1 +sg19 +I0 +sg20 +I80 +sg21 +I0 +sg22 +I0 +sg23 +I2 +sg24 +I0 +sbssg25 +S'\x18\x97\x07#\xe4\x9c\xa5\xff\xbf\xf1\xbc\xdb\x83`2?' +p1077 +ssS'pygments_styles_friendly' +p1078 +(dp1079 +g6 +(dp1080 +g8 +S'pygments_styles_friendly.html' +p1081 +sg10 +S'pygments.styles.friendly' +p1082 +sg12 +g13 +(g14 +g15 +NtRp1083 +(dp1084 +g18 +I1 +sg19 +I0 +sg20 +I7 +sg21 +I0 +sg22 +I0 +sg23 +I0 +sg24 +I0 +sbssg25 +S'\x8b\x8a\x97\x9c\x17\x82W\xc1V> \xc5\xae\xc1N\x0e' +p1085 +ssS'pygments_lexers_lisp' +p1086 +(dp1087 +g6 +(dp1088 +g8 +S'pygments_lexers_lisp.html' +p1089 +sg10 +S'pygments.lexers.lisp' +p1090 +sg12 +g13 +(g14 +g15 +NtRp1091 +(dp1092 +g18 +I1 +sg19 +I0 +sg20 +I109 +sg21 +I0 +sg22 +I0 +sg23 +I2 +sg24 +I0 +sbssg25 +S'\x07\xb7\xeco4\x86L\n\xa8\xe8|\xeeRh\xa7\x14' +p1093 +ssS'pygments_lexers_ooc' +p1094 +(dp1095 +g6 +(dp1096 +g8 +S'pygments_lexers_ooc.html' +p1097 +sg10 +S'pygments.lexers.ooc' +p1098 +sg12 +g13 +(g14 +g15 +NtRp1099 +(dp1100 +g18 +I1 +sg19 +I0 +sg20 +I10 +sg21 +I0 +sg22 +I0 +sg23 +I0 +sg24 +I0 +sbssg25 +S'\xb4\xb6\xef\x10/\x90v\x91\x19VN\x0e\xffE\xfe\xfc' +p1101 +ssS'pygments_lexers_perl' +p1102 +(dp1103 +g6 +(dp1104 +g8 +S'pygments_lexers_perl.html' +p1105 +sg10 +S'pygments.lexers.perl' +p1106 +sg12 +g13 +(g14 +g15 +NtRp1107 +(dp1108 +g18 +I1 +sg19 +I0 +sg20 +I129 +sg21 +I0 +sg22 +I0 +sg23 +I11 +sg24 +I0 +sbssg25 +S'\xfbf:\xae\x15\xbc\xa2\xedK\xc3\xda~\xebQl\xf7' +p1109 +ssS'pygments_lexers' +p1110 +(dp1111 +g6 +(dp1112 +g46 +S'pygments_lexers.html' +p1113 +sg48 +S'pygments.lexers' +p1114 +sg50 +g13 +(g14 +g15 +NtRp1115 +(dp1116 +g18 +I1 +sg19 +I0 +sg20 +I156 +sg21 +I0 +sg22 +I0 +sg23 +I115 +sg24 +I0 +sbssg25 +S'\xbdb&K\xc4y\x19=\xb1\xa7\xd4\xd0\x04\xfdjY' +p1117 +ssS'pygments_lexers_console' +p1118 +(dp1119 +g6 +(dp1120 +g8 +S'pygments_lexers_console.html' +p1121 +sg10 +S'pygments.lexers.console' +p1122 +sg12 +g13 +(g14 +g15 +NtRp1123 +(dp1124 +g18 +I1 +sg19 +I0 +sg20 +I16 +sg21 +I0 +sg22 +I0 +sg23 +I0 +sg24 +I0 +sbssg25 +S'\xf0\xc1\x12_E;G\xefs\xbbC\xace\x1fvM' +p1125 +ssS'pygments_lexers_nit' +p1126 +(dp1127 +g6 +(dp1128 +g8 +S'pygments_lexers_nit.html' +p1129 +sg10 +S'pygments.lexers.nit' +p1130 +sg12 +g13 +(g14 +g15 +NtRp1131 +(dp1132 +g18 +I1 +sg19 +I0 +sg20 +I9 +sg21 +I0 +sg22 +I0 +sg23 +I0 +sg24 +I0 +sbssg25 +S'\xe8%\xf4\xf5-\xc5\xf79\x96\xfa8m\xa4h\xb8_' +p1133 +ssS'pygments_lexers_python' +p1134 +(dp1135 +g6 +(dp1136 +g46 +S'pygments_lexers_python.html' +p1137 +sg48 +S'pygments.lexers.python' +p1138 +sg50 +g13 +(g14 +g15 +NtRp1139 +(dp1140 +g18 +I1 +sg19 +I0 +sg20 +I124 +sg21 +I0 +sg22 +I0 +sg23 +I52 +sg24 +I0 +sbssg25 +S'\x02\x11\x8c\x17W\x9c\xf2 \xf3Z\xae\xce\xcfM\xda\xf4' +p1141 +ssS'pygments_util' +p1142 +(dp1143 +g576 +(dp1144 +g46 +S'pygments_util.html' +p1145 +sg48 +S'pygments.util' +p1146 +sg50 +g13 +(g14 +g15 +NtRp1147 +(dp1148 +g18 +I1 +sg19 +I0 +sg20 +I207 +sg21 +I0 +sg22 +I0 +sg23 +I141 +sg24 +I0 +sbssg585 +S'N\x94T\xf7\xf2S8?\xbc6\xf2s\xde\x84dS' +p1149 +ssS'pygments_lexers_nix' +p1150 +(dp1151 +g6 +(dp1152 +g8 +S'pygments_lexers_nix.html' +p1153 +sg10 +S'pygments.lexers.nix' +p1154 +sg12 +g13 +(g14 +g15 +NtRp1155 +(dp1156 +g18 +I1 +sg19 +I0 +sg20 +I27 +sg21 +I0 +sg22 +I0 +sg23 +I3 +sg24 +I0 +sbssg25 +S'\x13\x97_n\xb1\x1aY\xbf\xaa/aNb\x9f\xf1\xd6' +p1157 +ssS'pygments_lexers_dsls' +p1158 +(dp1159 +g6 +(dp1160 +g8 +S'pygments_lexers_dsls.html' +p1161 +sg10 +S'pygments.lexers.dsls' +p1162 +sg12 +g13 +(g14 +g15 +NtRp1163 +(dp1164 +g18 +I1 +sg19 +I0 +sg20 +I59 +sg21 +I0 +sg22 +I0 +sg23 +I0 +sg24 +I0 +sbssg25 +S'\xabaRq\xac&\x12\x84\xdd\x0c3I\xca\x97\x07\xdf' +p1165 +ssS'pygments_lexers_theorem' +p1166 +(dp1167 +g6 +(dp1168 +g8 +S'pygments_lexers_theorem.html' +p1169 +sg10 +S'pygments.lexers.theorem' +p1170 +sg12 +g13 +(g14 +g15 +NtRp1171 +(dp1172 +g18 +I1 +sg19 +I0 +sg20 +I66 +sg21 +I0 +sg22 +I0 +sg23 +I1 +sg24 +I0 +sbssg25 +S'\xae\xc2,\xcf\xd0v\xc8\x1c>\xe8\xc2\xc5\x0c\xb5[\xed' +p1173 +ssS'pygments_lexers_r' +p1174 +(dp1175 +g6 +(dp1176 +g8 +S'pygments_lexers_r.html' +p1177 +sg10 +S'pygments.lexers.r' +p1178 +sg12 +g13 +(g14 +g15 +NtRp1179 +(dp1180 +g18 +I1 +sg19 +I0 +sg20 +I44 +sg21 +I0 +sg22 +I0 +sg23 +I0 +sg24 +I0 +sbssg25 +S"l[\x81\xc4\xa5\x00Ab\xdf'u\xe1(\xe9\x95\xd5" +p1181 +ssS'pygments_lexers__postgres_builtins' +p1182 +(dp1183 +g6 +(dp1184 +g8 +S'pygments_lexers__postgres_builtins.html' +p1185 +sg10 +S'pygments.lexers._postgres_builtins' +p1186 +sg12 +g13 +(g14 +g15 +NtRp1187 +(dp1188 +g18 +I1 +sg19 +I0 +sg20 +I6 +sg21 +I77 +sg22 +I0 +sg23 +I0 +sg24 +I0 +sbssg25 +S'\xba\xc8G)\x13\xdax\x03\x00\xbfd6\xcc\xf5\xfd\xff' +p1189 +ssS'pygments_lexers_eiffel' +p1190 +(dp1191 +g6 +(dp1192 +g8 +S'pygments_lexers_eiffel.html' +p1193 +sg10 +S'pygments.lexers.eiffel' +p1194 +sg12 +g13 +(g14 +g15 +NtRp1195 +(dp1196 +g18 +I1 +sg19 +I0 +sg20 +I10 +sg21 +I0 +sg22 +I0 +sg23 +I0 +sg24 +I0 +sbssg25 +S'7\xe6\xd4\xb9\xb4S\xd8I\xd9\xae\x8a\x8c\x15\xf2R\x06' +p1197 +ssS'pygments_lexers_d' +p1198 +(dp1199 +g6 +(dp1200 +g8 +S'pygments_lexers_d.html' +p1201 +sg10 +S'pygments.lexers.d' +p1202 +sg12 +g13 +(g14 +g15 +NtRp1203 +(dp1204 +g18 +I1 +sg19 +I0 +sg20 +I21 +sg21 +I0 +sg22 +I0 +sg23 +I0 +sg24 +I0 +sbssg25 +S'\xab\x13a\xfe\xf8,\xb7u\xd0\x0fL\xe0\xc91\xa7z' +p1205 +ssS'pygments_styles_manni' +p1206 +(dp1207 +g6 +(dp1208 +g8 +S'pygments_styles_manni.html' +p1209 +sg10 +S'pygments.styles.manni' +p1210 +sg12 +g13 +(g14 +g15 +NtRp1211 +(dp1212 +g18 +I1 +sg19 +I0 +sg20 +I6 +sg21 +I0 +sg22 +I0 +sg23 +I0 +sg24 +I0 +sbssg25 +S'X\xfc\x88\xafu\x0e;\xa6\x07*Ex\x0f\x13U\x86' +p1213 +ssS'pygments_lexers_smalltalk' +p1214 +(dp1215 +g6 +(dp1216 +g8 +S'pygments_lexers_smalltalk.html' +p1217 +sg10 +S'pygments.lexers.smalltalk' +p1218 +sg12 +g13 +(g14 +g15 +NtRp1219 +(dp1220 +g18 +I1 +sg19 +I0 +sg20 +I16 +sg21 +I0 +sg22 +I0 +sg23 +I0 +sg24 +I0 +sbssg25 +S'\n\xa8\xf0t\xd2\x9fVa\xad\xeba\x18=\xa2\x96\xc8' +p1221 +ssS'pygments_lexers_php' +p1222 +(dp1223 +g6 +(dp1224 +g8 +S'pygments_lexers_php.html' +p1225 +sg10 +S'pygments.lexers.php' +p1226 +sg12 +g13 +(g14 +g15 +NtRp1227 +(dp1228 +g18 +I1 +sg19 +I0 +sg20 +I53 +sg21 +I0 +sg22 +I0 +sg23 +I1 +sg24 +I0 +sbssg25 +S'\x9c\xabG\x80\xd0\x03\xd9\xa0\x14\x84\x00~1\xc5\x8c\x0c' +p1229 +ssS'pygments_lexers_igor' +p1230 +(dp1231 +g6 +(dp1232 +g8 +S'pygments_lexers_igor.html' +p1233 +sg10 +S'pygments.lexers.igor' +p1234 +sg12 +g13 +(g14 +g15 +NtRp1235 +(dp1236 +g18 +I1 +sg19 +I0 +sg20 +I18 +sg21 +I0 +sg22 +I0 +sg23 +I0 +sg24 +I0 +sbssg25 +S' \xd0\x11)\xab7Ey\x8b\x83ybody { + font-size: 16px; + } + +/* Set base font size to 12/16 */ +p { + font-size: .75em; /* 12/16 */ + line-height: 1.33333333em; /* 16/12 */ + } + +table { + border-collapse: collapse; + } + +a.nav { + text-decoration: none; + color: inherit; + } +a.nav:hover { + text-decoration: underline; + color: inherit; + } + +/* Page structure */ +#header { + background: #f8f8f8; + width: 100%; + border-bottom: 1px solid #eee; + } + +#source { + padding: 1em; + font-family: "courier new", monospace; + } + +#indexfile #footer { + margin: 1em 3em; + } + +#pyfile #footer { + margin: 1em 1em; + } + +#footer .content { + padding: 0; + font-size: 85%; + font-family: verdana, sans-serif; + color: #666666; + font-style: italic; + } + +#index { + margin: 1em 0 0 3em; + } + +/* Header styles */ +#header .content { + padding: 1em 3em; + } + +h1 { + font-size: 1.25em; +} + +h2.stats { + margin-top: .5em; + font-size: 1em; +} +.stats span { + border: 1px solid; + padding: .1em .25em; + margin: 0 .1em; + cursor: pointer; + border-color: #999 #ccc #ccc #999; +} +.stats span.hide_run, .stats span.hide_exc, +.stats span.hide_mis, .stats span.hide_par, +.stats span.par.hide_run.hide_par { + border-color: #ccc #999 #999 #ccc; +} +.stats span.par.hide_run { + border-color: #999 #ccc #ccc #999; +} + +.stats span.run { + background: #ddffdd; +} +.stats span.exc { + background: #eeeeee; +} +.stats span.mis { + background: #ffdddd; +} +.stats span.hide_run { + background: #eeffee; +} +.stats span.hide_exc { + background: #f5f5f5; +} +.stats span.hide_mis { + background: #ffeeee; +} +.stats span.par { + background: #ffffaa; +} +.stats span.hide_par { + background: #ffffcc; +} + +/* Help panel */ +#keyboard_icon { + float: right; + cursor: pointer; +} + +.help_panel { + position: absolute; + background: #ffc; + padding: .5em; + border: 1px solid #883; + display: none; +} + +#indexfile .help_panel { + width: 20em; height: 4em; +} + +#pyfile .help_panel { + width: 16em; height: 8em; +} + +.help_panel .legend { + font-style: italic; + margin-bottom: 1em; +} + +#panel_icon { + float: right; + cursor: pointer; +} + +.keyhelp { + margin: .75em; +} + +.keyhelp .key { + border: 1px solid black; + border-color: #888 #333 #333 #888; + padding: .1em .35em; + font-family: monospace; + font-weight: bold; + background: #eee; +} + +/* Source file styles */ +.linenos p { + text-align: right; + margin: 0; + padding: 0 .5em; + color: #999999; + font-family: verdana, sans-serif; + font-size: .625em; /* 10/16 */ + line-height: 1.6em; /* 16/10 */ + } +.linenos p.highlight { + background: #ffdd00; + } +.linenos p a { + text-decoration: none; + color: #999999; + } +.linenos p a:hover { + text-decoration: underline; + color: #999999; + } + +td.text { + width: 100%; + } +.text p { + margin: 0; + padding: 0 0 0 .5em; + border-left: 2px solid #ffffff; + white-space: nowrap; + } + +.text p.mis { + background: #ffdddd; + border-left: 2px solid #ff0000; + } +.text p.run, .text p.run.hide_par { + background: #ddffdd; + border-left: 2px solid #00ff00; + } +.text p.exc { + background: #eeeeee; + border-left: 2px solid #808080; + } +.text p.par, .text p.par.hide_run { + background: #ffffaa; + border-left: 2px solid #eeee99; + } +.text p.hide_run, .text p.hide_exc, .text p.hide_mis, .text p.hide_par, +.text p.hide_run.hide_par { + background: inherit; + } + +.text span.annotate { + font-family: georgia; + font-style: italic; + color: #666; + float: right; + padding-right: .5em; + } +.text p.hide_par span.annotate { + display: none; + } + +/* Syntax coloring */ +.text .com { + color: green; + font-style: italic; + line-height: 1px; + } +.text .key { + font-weight: bold; + line-height: 1px; + } +.text .str { + color: #000080; + } + +/* index styles */ +#index td, #index th { + text-align: right; + width: 5em; + padding: .25em .5em; + border-bottom: 1px solid #eee; + } +#index th { + font-style: italic; + color: #333; + border-bottom: 1px solid #ccc; + cursor: pointer; + } +#index th:hover { + background: #eee; + border-bottom: 1px solid #999; + } +#index td.left, #index th.left { + padding-left: 0; + } +#index td.right, #index th.right { + padding-right: 0; + } +#index th.headerSortDown, #index th.headerSortUp { + border-bottom: 1px solid #000; + } +#index td.name, #index th.name { + text-align: left; + width: auto; + } +#index td.name a { + text-decoration: none; + color: #000; + } +#index td.name a:hover { + text-decoration: underline; + color: #000; + } +#index tr.total { + } +#index tr.total td { + font-weight: bold; + border-top: 1px solid #ccc; + border-bottom: none; + } +#index tr.file:hover { + background: #eeeeee; + } diff --git a/vendor/pygments/tests/examplefiles/99_bottles_of_beer.chpl b/vendor/pygments/tests/examplefiles/99_bottles_of_beer.chpl new file mode 100644 index 0000000..cdc1e65 --- /dev/null +++ b/vendor/pygments/tests/examplefiles/99_bottles_of_beer.chpl @@ -0,0 +1,179 @@ +/*********************************************************************** + * Chapel implementation of "99 bottles of beer" + * + * by Brad Chamberlain and Steve Deitz + * 07/13/2006 in Knoxville airport while waiting for flight home from + * HPLS workshop + * compiles and runs with chpl compiler version 1.12.0 + * for more information, contact: chapel_info@cray.com + * + * + * Notes: + * o as in all good parallel computations, boundary conditions + * constitute the vast bulk of complexity in this code (invite Brad to + * tell you about his zany boundary condition simplification scheme) + * o uses type inference for variables, arguments + * o relies on integer->string coercions + * o uses named argument passing (for documentation purposes only) + ***********************************************************************/ + +// allow executable command-line specification of number of bottles +// (e.g., ./a.out -snumBottles=999999) +config const numBottles = 99; +const numVerses = numBottles+1; + +// a domain to describe the space of lyrics +var LyricsSpace: domain(1) = {1..numVerses}; + +// array of lyrics +var Lyrics: [LyricsSpace] string; + +// parallel computation of lyrics array +[verse in LyricsSpace] Lyrics(verse) = computeLyric(verse); + +// as in any good parallel language, I/O to stdout is serialized. +// (Note that I/O to a file could be parallelized using a parallel +// prefix computation on the verse strings' lengths with file seeking) +writeln(Lyrics); + + +// HELPER FUNCTIONS: + +proc computeLyric(verseNum) { + var bottleNum = numBottles - (verseNum - 1); + var nextBottle = (bottleNum + numVerses - 1)%numVerses; + return "\n" // disguise space used to separate elements in array I/O + + describeBottles(bottleNum, startOfVerse=true) + " on the wall, " + + describeBottles(bottleNum) + ".\n" + + computeAction(bottleNum) + + describeBottles(nextBottle) + " on the wall.\n"; +} + + +proc describeBottles(bottleNum, startOfVerse:bool = false) { + // NOTE: bool should not be necessary here (^^^^); working around bug + var bottleDescription = if (bottleNum) then bottleNum:string + else (if startOfVerse then "N" + else "n") + + "o more"; + return bottleDescription + + " bottle" + (if (bottleNum == 1) then "" else "s") + + " of beer"; +} + + +proc computeAction(bottleNum) { + return if (bottleNum == 0) then "Go to the store and buy some more, " + else "Take one down and pass it around, "; +} + + +// Modules... +module M1 { + var x = 10; + + var y = 13.0; +} + +module M2 { + use M1 except y; + use M1 only y; + proc main() { + writeln("M2 -> M1 -> x " + x); + } +} + + +// Classes, records, unions... +const PI: real = 3.14159; + +record Point { + var x, y: real; +} +var p: Point; +writeln("Distance from origin: " + sqrt(p.x ** 2 + p.y ** 2)); +p = new Point(1.0, 2.0); +writeln("Distance from origin: " + sqrt(p.x ** 2 + p.y ** 2)); + +class Circle { + var p: Point; + var r: real; +} +var c = new Circle(r=2.0); +proc Circle.area() + return PI * r ** 2; +writeln("Area of circle: " + c.area()); + +class Oval: Circle { + var r2: real; +} +proc Oval.area() + return PI * r * r2; + +delete c; +c = nil; +c = new Oval(r=1.0, r2=2.0); +writeln("Area of oval: " + c.area()); + +// This is a valid decimal integer: +var x = 0000000000012; + +union U { + var i: int; + var r: real; +} + +// chapel ranges are awesome. +var r1 = 1..10, // 1 2 3 4 5 6 7 8 9 10 + r2 = 10..1, // no values in this range + r3 = 1..10 by -1, // 10 9 8 7 6 5 4 3 2 1 + r4 = 1..10 by 2, // 1 3 5 7 9 + r5 = 1..10 by 2 align 0, // 2 4 6 8 10 + r6 = 1..10 by 2 align 2, // 2 4 6 8 10 + r7 = 1..10 # 3, // 1 2 3 + r8 = 1..10 # -2, // 9 10 + r9 = 1..100 # 10 by 2, // 1 3 5 7 9 + ra = 1..100 by 2 # 10, // 1 3 5 7 9 11 13 15 17 19 + rb = 1.. # 100 by 10; // 1 11 21 31 41 51 61 71 81 91 + +// create a variable with default initialization +var myVarWithoutInit: real = noinit; +myVarWithoutInit = 1.0; + +// Chapel has <~> operator for read and write I/O operations. +class IntPair { + var x: int; + var y: int; + proc readWriteThis(f) { + f <~> x <~> new ioLiteral(",") <~> y <~> new ioNewline(); + } +} +var ip = new IntPair(17,2); +write(ip); + +var targetDom = {1..10}, + target: [targetDom] int; +coforall i in targetDom with (ref target) { + target[i] = i ** 3; +} + +var wideOpen = 0o777, + mememe = 0o600, + clique_y = 0O660, + zeroOct = 0o0, + minPosOct = 0O1; + +private module M3 { + private proc foo() { + + } + + private iter bar() { + for i in 1..10 { + yield i; + } + } + + private var x: int; + +} diff --git a/vendor/pygments/tests/examplefiles/Blink.ino b/vendor/pygments/tests/examplefiles/Blink.ino new file mode 100644 index 0000000..993bd74 --- /dev/null +++ b/vendor/pygments/tests/examplefiles/Blink.ino @@ -0,0 +1,24 @@ +/* + Blink + Turns on an LED on for one second, then off for one second, repeatedly. + + This example code is in the public domain. + */ + +// Pin 13 has an LED connected on most Arduino boards. +// give it a name: +int led = 13; + +// the setup routine runs once when you press reset: +void setup() { + // initialize the digital pin as an output. + pinMode(led, OUTPUT); +} + +// the loop routine runs over and over again forever: +void loop() { + digitalWrite(led, HIGH); // turn the LED on (HIGH is the voltage level) + delay(1000); // wait for a second + digitalWrite(led, LOW); // turn the LED off by making the voltage LOW + delay(1000); // wait for a second +} diff --git a/vendor/pygments/tests/examplefiles/CPDictionary.j b/vendor/pygments/tests/examplefiles/CPDictionary.j old mode 100755 new mode 100644 diff --git a/vendor/pygments/tests/examplefiles/Deflate.fs b/vendor/pygments/tests/examplefiles/Deflate.fs new file mode 100644 index 0000000..7d3680e --- /dev/null +++ b/vendor/pygments/tests/examplefiles/Deflate.fs @@ -0,0 +1,578 @@ +// public domain + +module Deflate + +open System +open System.Collections.Generic +open System.IO +open System.Linq +open Crc + +let maxbuf = 32768 +let maxlen = 258 + +let getBit (b:byte) (bit:int) = + if b &&& (1uy <<< bit) = 0uy then 0 else 1 + +type BitReader(sin:Stream) = + let mutable bit = 8 + let mutable cur = 0uy + + member x.Skip() = + bit <- 8 + + member x.ReadBit() = + if bit = 8 then + bit <- 0 + let b = sin.ReadByte() + if b = -1 then + failwith "バッファを超過しました" + cur <- byte b + let ret = if cur &&& (1uy <<< bit) = 0uy then 0 else 1 + bit <- bit + 1 + ret + + member x.ReadLE n = + let mutable ret = 0 + for i = 0 to n - 1 do + if x.ReadBit() = 1 then ret <- ret ||| (1 <<< i) + ret + + member x.ReadBE n = + let mutable ret = 0 + for i = 0 to n - 1 do + ret <- (ret <<< 1) ||| x.ReadBit() + ret + + member x.ReadBytes len = + if bit <> 8 then bit <- 8 + let buf = Array.zeroCreate len + ignore <| sin.Read(buf, 0, len) + buf + +type WriteBuffer(sout:Stream) = + let mutable prev:byte[] = null + let mutable buf = Array.zeroCreate maxbuf + let mutable p = 0 + + let next newbuf = + prev <- buf + buf <- if newbuf then Array.zeroCreate maxbuf else null + p <- 0 + + member x.Close() = + next false + next false + + interface IDisposable with + member x.Dispose() = x.Close() + + member x.WriteByte (b:byte) = + buf.[p] <- b + sout.WriteByte b + p <- p + 1 + if p = maxbuf then next true + + member x.Write (src:byte[]) start len = + let maxlen = maxbuf - p + if len <= maxlen then + Array.Copy(src, start, buf, p, len) + sout.Write(src, start, len) + p <- p + len + if p = maxbuf then next true + else + x.Write src start maxlen + x.Write src (start + maxlen) (len - maxlen) + + member x.Copy len dist = + if dist < 1 then + failwith <| sprintf "dist too small: %d < 1" dist + elif dist > maxbuf then + failwith <| sprintf "dist too big: %d > %d" dist maxbuf + let pp = p - dist + if pp < 0 then + if prev = null then + failwith <| sprintf "dist too big: %d > %d" dist p + let pp = pp + maxbuf + let maxlen = maxbuf - pp + if len <= maxlen then + x.Write prev pp len + else + x.Write prev pp maxlen + x.Copy (len - maxlen) dist + else + let maxlen = p - pp + if len <= maxlen then + x.Write buf pp len + else + if dist = 1 then + let b = buf.[pp] + for i = 1 to len do + x.WriteByte b + else + let buf' = buf + let mutable len' = len + while len' > 0 do + let len'' = Math.Min(len', maxlen) + x.Write buf' pp len'' + len' <- len' - len'' + +type Huffman(lens:int[]) = + let vals = Array.zeroCreate lens.Length + let min = lens.Where(fun x -> x > 0).Min() + let max = lens.Max() + let counts = Array.zeroCreate (max + 1) + let firsts = Array.zeroCreate (max + 1) + let nexts = Array.zeroCreate (max + 1) + let tables = Array.zeroCreate(max + 1) + + do + for len in lens do + if len > 0 then counts.[len] <- counts.[len] + 1 + for i = 1 to max do + firsts.[i] <- (firsts.[i - 1] + counts.[i - 1]) <<< 1 + Array.Copy(firsts, 0, nexts, 0, max + 1) + for i = 0 to vals.Length - 1 do + let len = lens.[i] + if len > 0 then + vals.[i] <- nexts.[len] + nexts.[len] <- nexts.[len] + 1 + + for i = 0 to vals.Length - 1 do + let len = lens.[i] + if len > 0 then + let start = firsts.[len] + if tables.[len] = null then + let count = nexts.[len] - start + tables.[len] <- Array.zeroCreate count + tables.[len].[vals.[i] - start] <- i + + member x.GetValue h = + let rec getv i = + if i > max then -1 else + if h < nexts.[i] then + tables.[i].[h - firsts.[i]] + else + getv (i + 1) + getv min + + member x.Read(br:BitReader) = + let rec read h i = + if h < nexts.[i] then + tables.[i].[h - firsts.[i]] + else + read ((h <<< 1) ||| br.ReadBit()) (i + 1) + read (br.ReadBE min) min + +type [] HuffmanDecoder() = + abstract GetValue: unit->int + abstract GetDistance: unit->int + +type FixedHuffman(br:BitReader) = + inherit HuffmanDecoder() + + override x.GetValue() = + let v = br.ReadBE 7 + if v < 24 then v + 256 else + let v = (v <<< 1) ||| br.ReadBit() + if v < 192 then v - 48 + elif v < 200 then v + 88 + else ((v <<< 1) ||| br.ReadBit()) - 256 + + override x.GetDistance() = br.ReadBE 5 + +type DynamicHuffman(br:BitReader) = + inherit HuffmanDecoder() + + let lit, dist = + let hlit = + let hlit = (br.ReadLE 5) + 257 + if hlit > 286 then failwith <| sprintf "hlit: %d > 286" hlit + hlit + + let hdist = + let hdist = (br.ReadLE 5) + 1 + if hdist > 32 then failwith <| sprintf "hdist: %d > 32" hdist + hdist + + let hclen = + let hclen = (br.ReadLE 4) + 4 + if hclen > 19 then failwith <| sprintf "hclen: %d > 19" hclen + hclen + + let clen = + let hclens = Array.zeroCreate 19 + let order = [| 16; 17; 18; 0; 8; 7; 9; 6; 10; 5; + 11; 4; 12; 3; 13; 2; 14; 1; 15 |] + for i = 0 to hclen - 1 do + hclens.[order.[i]] <- br.ReadLE 3 + new Huffman(hclens) + + let ld = Array.zeroCreate(hlit + hdist) + let mutable i = 0 + while i < ld.Length do + let v = clen.Read(br) + if v < 16 then + ld.[i] <- v + i <- i + 1 + else + let r, v = + match v with + | 16 -> (br.ReadLE 2) + 3, ld.[i - 1] + | 17 -> (br.ReadLE 3) + 3, 0 + | 18 -> (br.ReadLE 7) + 11, 0 + | _ -> failwith "不正な値です。" + for j = 0 to r - 1 do + ld.[i + j] <- v + i <- i + r + + new Huffman(ld.[0 .. hlit - 1]), + new Huffman(ld.[hlit .. hlit + hdist - 1]) + + override x.GetValue() = lit.Read br + override x.GetDistance() = dist.Read br + +let getLitExLen v = if v < 265 || v = 285 then 0 else (v - 261) >>> 2 +let getDistExLen d = if d < 4 then 0 else (d - 2) >>> 1 + +let litlens = + let litlens = Array.zeroCreate 286 + let mutable v = 3 + for i = 257 to 284 do + litlens.[i] <- v + v <- v + (1 <<< (getLitExLen i)) + litlens.[285] <- maxlen + litlens.[257..285] + +let distlens = + let distlens = Array.zeroCreate 30 + let mutable v = 1 + for i = 0 to 29 do + distlens.[i] <- v + v <- v + (1 <<< (getDistExLen i)) + distlens + +type Reader(sin:Stream) = + inherit Stream() + + let br = new BitReader(sin) + let fh = new FixedHuffman(br) + + let sout = new MemoryStream() + let dbuf = new WriteBuffer(sout) + + let mutable cache:byte[] = null + let mutable canRead = true + + let rec read (h:HuffmanDecoder) = + let v = h.GetValue() + if v > 285 then failwith <| sprintf "不正な値: %d" v + if v < 256 then + dbuf.WriteByte(byte v) + elif v > 256 then + let len = + if v < 265 then v - 254 else + litlens.[v - 257] + (br.ReadLE (getLitExLen v)) + let dist = + let d = h.GetDistance() + if d > 29 then failwith <| sprintf "不正な距離: %d" d + if d < 4 then d + 1 else + distlens.[d] + (br.ReadLE (getDistExLen d)) + dbuf.Copy len dist + if v <> 256 then read h + + override x.CanRead = canRead + override x.CanWrite = false + override x.CanSeek = false + override x.Flush() = () + + override x.Close() = + dbuf.Close() + canRead <- false + + override x.Read(buffer, offset, count) = + let offset = + if cache = null then 0 else + let clen = cache.Length + let len = Math.Min(clen, count) + Array.Copy(cache, 0, buffer, offset, len) + cache <- if len = clen then null + else cache.[len .. clen - 1] + len + let req = int64 <| count - offset + while canRead && sout.Length < req do + x.readBlock() + let len = + if sout.Length = 0L then 0 else + let data = sout.ToArray() + sout.SetLength(0L) + let dlen = data.Length + let len = Math.Min(int req, dlen) + Array.Copy(data, 0, buffer, offset, len) + if dlen > len then + cache <- data.[len..] + len + offset + len + + override x.Position + with get() = raise <| new NotImplementedException() + and set(v) = raise <| new NotImplementedException() + + override x.Length = raise <| new NotImplementedException() + override x.Seek(_, _) = raise <| new NotImplementedException() + override x.Write(_, _, _) = raise <| new NotImplementedException() + override x.SetLength(_) = raise <| new NotImplementedException() + + member private x.readBlock() = + let bfinal = br.ReadBit() + match br.ReadLE 2 with + | 0 -> br.Skip() + let len = br.ReadLE 16 + let nlen = br.ReadLE 16 + if len + nlen <> 0x10000 then + failwith "不正な非圧縮長" + dbuf.Write (br.ReadBytes len) 0 len + | 1 -> read fh + | 2 -> read (new DynamicHuffman(br)) + | _ -> failwith "不正なブロックタイプ" + if bfinal = 1 then + canRead <- false + x.Close() + +type BitWriter(sout:Stream) = + let mutable bit = 0 + let mutable cur = 0uy + + member x.Skip() = + if bit > 0 then + sout.WriteByte(cur) + bit <- 0 + cur <- 0uy + + interface IDisposable with + member x.Dispose() = + x.Skip() + sout.Flush() + + member x.WriteBit(b:int) = + cur <- cur ||| ((byte b) <<< bit) + bit <- bit + 1 + if bit = 8 then + sout.WriteByte(cur) + bit <- 0 + cur <- 0uy + + member x.WriteLE (len:int) (b:int) = + for i = 0 to len - 1 do + x.WriteBit <| if (b &&& (1 <<< i)) = 0 then 0 else 1 + + member x.WriteBE (len:int) (b:int) = + for i = len - 1 downto 0 do + x.WriteBit <| if (b &&& (1 <<< i)) = 0 then 0 else 1 + + member x.WriteBytes(data:byte[]) = + x.Skip() + sout.Write(data, 0, data.Length) + +type FixedHuffmanWriter(bw:BitWriter) = + member x.Write (b:int) = + if b < 144 then + bw.WriteBE 8 (b + 0b110000) + elif b < 256 then + bw.WriteBE 9 (b - 144 + 0b110010000) + elif b < 280 then + bw.WriteBE 7 (b - 256) + elif b < 288 then + bw.WriteBE 8 (b - 280 + 0b11000000) + + member x.WriteLen (len:int) = + if len < 3 || len > maxlen then + failwith <| sprintf "不正な長さ: %d" len + let mutable ll = 285 + while len < litlens.[ll - 257] do + ll <- ll - 1 + x.Write ll + bw.WriteLE (getLitExLen ll) (len - litlens.[ll - 257]) + + member x.WriteDist (d:int) = + if d < 1 || d > maxbuf then + failwith <| sprintf "不正な距離: %d" d + let mutable dl = 29 + while d < distlens.[dl] do + dl <- dl - 1 + bw.WriteBE 5 dl + bw.WriteLE (getDistExLen dl) (d - distlens.[dl]) + +let maxbuf2 = maxbuf * 2 +let buflen = maxbuf2 + maxlen + +let inline getHash (buf:byte[]) pos = + ((int buf.[pos]) <<< 4) ^^^ ((int buf.[pos + 1]) <<< 2) ^^^ (int buf.[pos + 2]) + +let inline addHash (hash:List[]) (buf:byte[]) pos = + if buf.[pos] <> buf.[pos + 1] then + hash.[getHash buf pos].Add pos + +let inline addHash2 (tables:int[,]) (counts:int[]) (buf:byte[]) pos = + if buf.[pos] <> buf.[pos + 1] then + let h = getHash buf pos + let c = counts.[h] + tables.[h, c &&& 15] <- pos + counts.[h] <- c + 1 + +type Writer(t:int, sin:Stream) = + let mutable length = buflen + let buf = Array.zeroCreate buflen + let tables, counts = + if t = 2 then Array2D.zeroCreate 4096 16, Array.create 4096 0 else null, null + let hash = if tables = null then [| for _ in 0..4095 -> new List() |] else null + let mutable crc = ~~~0u + + let read pos len = + let rlen = sin.Read(buf, pos, len) + if rlen < len then length <- pos + rlen + for i = pos to pos + rlen - 1 do + let b = int(crc ^^^ (uint32 buf.[i])) &&& 0xff + crc <- (crc >>> 8) ^^^ crc32_table.[b] + if hash <> null then + for list in hash do list.Clear() + else + Array.fill counts 0 counts.Length 0 + + do + read 0 buflen + + let search (pos:int) = + let mutable maxp = -1 + let mutable maxl = 2 + let mlen = Math.Min(maxlen, length - pos) + let last = Math.Max(0, pos - maxbuf) + let h = getHash buf pos + if hash <> null then + let list = hash.[h] + let mutable i = list.Count - 1 + while i >= 0 do + let p = list.[i] + if p < last then i <- 0 else + let mutable len = 0 + while len < mlen && buf.[p + len] = buf.[pos + len] do + len <- len + 1 + if len > maxl then + maxp <- p + maxl <- len + i <- i - 1 + else + let c = counts.[h] + let p1, p2 = if c < 16 then 0, c - 1 else c + 1, c + 16 + let mutable i = p2 + while i >= p1 do + let p = tables.[h, i &&& 15] + if p < last then i <- 0 else + let mutable len = 0 + while len < mlen && buf.[p + len] = buf.[pos + len] do + len <- len + 1 + if len > maxl then + maxp <- p + maxl <- len + i <- i - 1 + maxp, maxl + + member x.Crc = ~~~crc + + member x.Compress (sout:Stream) = + use bw = new BitWriter(sout) + bw.WriteBit 1 + bw.WriteLE 2 1 + let hw = new FixedHuffmanWriter(bw) + let mutable p = 0 + match t with + | 2 -> + while p < length do + let b = buf.[p] + if p < length - 4 && b = buf.[p + 1] && b = buf.[p + 2] && b = buf.[p + 3] then + let mutable len = 4 + let mlen = Math.Min(maxlen + 1, length - p) + while len < mlen && b = buf.[p + len] do + len <- len + 1 + hw.Write(int b) + hw.WriteLen(len - 1) + hw.WriteDist 1 + p <- p + len + else + let maxp, maxl = search p + if maxp < 0 then + hw.Write(int b) + addHash2 tables counts buf p + p <- p + 1 + else + hw.WriteLen maxl + hw.WriteDist (p - maxp) + for i = p to p + maxl - 1 do + addHash2 tables counts buf i + p <- p + maxl + if p > maxbuf2 then + Array.Copy(buf, maxbuf, buf, 0, maxbuf + maxlen) + if length < buflen then length <- length - maxbuf else + read (maxbuf + maxlen) maxbuf + p <- p - maxbuf + for i = 0 to p - 1 do + addHash2 tables counts buf i + | 1 -> + while p < length do + let b = buf.[p] + if p < length - 4 && b = buf.[p + 1] && b = buf.[p + 2] && b = buf.[p + 3] then + let mutable len = 4 + let mlen = Math.Min(maxlen + 1, length - p) + while len < mlen && b = buf.[p + len] do + len <- len + 1 + hw.Write(int b) + hw.WriteLen(len - 1) + hw.WriteDist 1 + p <- p + len + else + let maxp, maxl = search p + if maxp < 0 then + hw.Write(int b) + addHash hash buf p + p <- p + 1 + else + hw.WriteLen maxl + hw.WriteDist (p - maxp) + for i = p to p + maxl - 1 do + addHash hash buf i + p <- p + maxl + if p > maxbuf2 then + Array.Copy(buf, maxbuf, buf, 0, maxbuf + maxlen) + if length < buflen then length <- length - maxbuf else + read (maxbuf + maxlen) maxbuf + p <- p - maxbuf + for i = 0 to p - 1 do + addHash hash buf i + | _ -> + while p < length do + let maxp, maxl = search p + if maxp < 0 then + hw.Write(int buf.[p]) + hash.[getHash buf p].Add p + p <- p + 1 + else + hw.WriteLen maxl + hw.WriteDist (p - maxp) + for i = p to p + maxl - 1 do + hash.[getHash buf i].Add i + p <- p + maxl + if p > maxbuf2 then + Array.Copy(buf, maxbuf, buf, 0, maxbuf + maxlen) + if length < buflen then length <- length - maxbuf else + read (maxbuf + maxlen) maxbuf + p <- p - maxbuf + for i = 0 to p - 1 do + hash.[getHash buf i].Add i + hw.Write 256 + +let GetCompressBytes (sin:Stream) = + let now = DateTime.Now + let ms = new MemoryStream() + let w = new Writer(1, sin) + w.Compress ms + ms.ToArray(), w.Crc diff --git a/vendor/pygments/tests/examplefiles/Error.pmod b/vendor/pygments/tests/examplefiles/Error.pmod new file mode 100644 index 0000000..808ecb0 --- /dev/null +++ b/vendor/pygments/tests/examplefiles/Error.pmod @@ -0,0 +1,38 @@ +#pike __REAL_VERSION__ + +constant Generic = __builtin.GenericError; + +constant Index = __builtin.IndexError; + +constant BadArgument = __builtin.BadArgumentError; + +constant Math = __builtin.MathError; + +constant Resource = __builtin.ResourceError; + +constant Permission = __builtin.PermissionError; + +constant Decode = __builtin.DecodeError; + +constant Cpp = __builtin.CppError; + +constant Compilation = __builtin.CompilationError; + +constant MasterLoad = __builtin.MasterLoadError; + +constant ModuleLoad = __builtin.ModuleLoadError; + +//! Returns an Error object for any argument it receives. If the +//! argument already is an Error object or is empty, it does nothing. +object mkerror(mixed error) +{ + if (error == UNDEFINED) + return error; + if (objectp(error) && error->is_generic_error) + return error; + if (arrayp(error)) + return Error.Generic(@error); + if (stringp(error)) + return Error.Generic(error); + return Error.Generic(sprintf("%O", error)); +} \ No newline at end of file diff --git a/vendor/pygments/tests/examplefiles/Errors.scala b/vendor/pygments/tests/examplefiles/Errors.scala index 67198c0..7af7028 100644 --- a/vendor/pygments/tests/examplefiles/Errors.scala +++ b/vendor/pygments/tests/examplefiles/Errors.scala @@ -11,6 +11,11 @@ String val foo_+ = "foo plus" val foo_⌬⌬ = "double benzene" + // Test some interpolated strings + val mu = s"${if (true) "a:b" else "c" {with "braces"}}" + val mu2 = f"${if (true) "a:b" else "c" {with "braces"}}" + val raw = raw"a raw\nstring\"with escaped quotes" + def main(argv: Array[String]) { println(⌘.interface + " " + foo_+ + " " + foo_⌬⌬ ) } diff --git a/vendor/pygments/tests/examplefiles/FakeFile.pike b/vendor/pygments/tests/examplefiles/FakeFile.pike new file mode 100644 index 0000000..48f3ea6 --- /dev/null +++ b/vendor/pygments/tests/examplefiles/FakeFile.pike @@ -0,0 +1,360 @@ +#pike __REAL_VERSION__ + +//! A string wrapper that pretends to be a @[Stdio.File] object +//! in addition to some features of a @[Stdio.FILE] object. + + +//! This constant can be used to distinguish a FakeFile object +//! from a real @[Stdio.File] object. +constant is_fake_file = 1; + +protected string data; +protected int ptr; +protected int(0..1) r; +protected int(0..1) w; +protected int mtime; + +protected function read_cb; +protected function read_oob_cb; +protected function write_cb; +protected function write_oob_cb; +protected function close_cb; + +//! @seealso +//! @[Stdio.File()->close()] +int close(void|string direction) { + direction = lower_case(direction||"rw"); + int cr = has_value(direction, "r"); + int cw = has_value(direction, "w"); + + if(cr) { + r = 0; + } + + if(cw) { + w = 0; + } + + // FIXME: Close callback + return 1; +} + +//! @decl void create(string data, void|string type, void|int pointer) +//! @seealso +//! @[Stdio.File()->create()] +void create(string _data, void|string type, int|void _ptr) { + if(!_data) error("No data string given to FakeFile.\n"); + data = _data; + ptr = _ptr; + mtime = time(); + if(type) { + type = lower_case(type); + if(has_value(type, "r")) + r = 1; + if(has_value(type, "w")) + w = 1; + } + else + r = w = 1; +} + +protected string make_type_str() { + string type = ""; + if(r) type += "r"; + if(w) type += "w"; + return type; +} + +//! @seealso +//! @[Stdio.File()->dup()] +this_program dup() { + return this_program(data, make_type_str(), ptr); +} + +//! Always returns 0. +//! @seealso +//! @[Stdio.File()->errno()] +int errno() { return 0; } + +//! Returns size and the creation time of the string. +Stdio.Stat stat() { + Stdio.Stat st = Stdio.Stat(); + st->size = sizeof(data); + st->mtime=st->ctime=mtime; + st->atime=time(); + return st; +} + +//! @seealso +//! @[Stdio.File()->line_iterator()] +String.SplitIterator line_iterator(int|void trim) { + if(trim) + return String.SplitIterator( data-"\r", '\n' ); + return String.SplitIterator( data, '\n' ); +} + +protected mixed id; + +//! @seealso +//! @[Stdio.File()->query_id()] +mixed query_id() { return id; } + +//! @seealso +//! @[Stdio.File()->set_id()] +void set_id(mixed _id) { id = _id; } + +//! @seealso +//! @[Stdio.File()->read_function()] +function(:string) read_function(int nbytes) { + return lambda() { return read(nbytes); }; +} + +//! @seealso +//! @[Stdio.File()->peek()] +int(-1..1) peek(int|float|void timeout) { + if(!r) return -1; + if(ptr >= sizeof(data)) return 0; + return 1; +} + +//! Always returns 0. +//! @seealso +//! @[Stdio.File()->query_address()] +string query_address(void|int(0..1) is_local) { return 0; } + +//! @seealso +//! @[Stdio.File()->read()] +string read(void|int(0..) len, void|int(0..1) not_all) { + if(!r) return 0; + if (len < 0) error("Cannot read negative number of characters.\n"); + int start=ptr; + ptr += len; + if(zero_type(len) || ptr>sizeof(data)) + ptr = sizeof(data); + + // FIXME: read callback + return data[start..ptr-1]; +} + +//! @seealso +//! @[Stdio.FILE()->gets()] +string gets() { + if(!r) return 0; + string ret; + sscanf(data,"%*"+(string)ptr+"s%[^\n]",ret); + if(ret) + { + ptr+=sizeof(ret)+1; + if(ptr>sizeof(data)) + { + ptr=sizeof(data); + if(!sizeof(ret)) + ret = 0; + } + } + + // FIXME: read callback + return ret; +} + +//! @seealso +//! @[Stdio.FILE()->getchar()] +int getchar() { + if(!r) return 0; + int c; + if(catch(c=data[ptr])) + c=-1; + else + ptr++; + + // FIXME: read callback + return c; +} + +//! @seealso +//! @[Stdio.FILE()->unread()] +void unread(string s) { + if(!r) return; + if(data[ptr-sizeof(s)..ptr-1]==s) + ptr-=sizeof(s); + else + { + data=s+data[ptr..]; + ptr=0; + } +} + +//! @seealso +//! @[Stdio.File()->seek()] +int seek(int pos, void|int mult, void|int add) { + if(mult) + pos = pos*mult+add; + if(pos<0) + { + pos = sizeof(data)+pos; + if( pos < 0 ) + pos = 0; + } + ptr = pos; + if( ptr > strlen( data ) ) + ptr = strlen(data); + return ptr; +} + +//! Always returns 1. +//! @seealso +//! @[Stdio.File()->sync()] +int(1..1) sync() { return 1; } + +//! @seealso +//! @[Stdio.File()->tell()] +int tell() { return ptr; } + +//! @seealso +//! @[Stdio.File()->truncate()] +int(0..1) truncate(int length) { + data = data[..length-1]; + return sizeof(data)==length; +} + +//! @seealso +//! @[Stdio.File()->write()] +int(-1..) write(string|array(string) str, mixed ... extra) { + if(!w) return -1; + if(arrayp(str)) str=str*""; + if(sizeof(extra)) str=sprintf(str, @extra); + + if(ptr==sizeof(data)) { + data += str; + ptr = sizeof(data); + } + else if(sizeof(str)==1) + data[ptr++] = str[0]; + else { + data = data[..ptr-1] + str + data[ptr+sizeof(str)..]; + ptr += sizeof(str); + } + + // FIXME: write callback + return sizeof(str); +} + +//! @seealso +//! @[Stdio.File()->set_blocking] +void set_blocking() { + close_cb = 0; + read_cb = 0; + read_oob_cb = 0; + write_cb = 0; + write_oob_cb = 0; +} + +//! @seealso +//! @[Stdio.File()->set_blocking_keep_callbacks] +void set_blocking_keep_callbacks() { } + +//! @seealso +//! @[Stdio.File()->set_blocking] +void set_nonblocking(function rcb, function wcb, function ccb, + function rocb, function wocb) { + read_cb = rcb; + write_cb = wcb; + close_cb = ccb; + read_oob_cb = rocb; + write_oob_cb = wocb; +} + +//! @seealso +//! @[Stdio.File()->set_blocking_keep_callbacks] +void set_nonblocking_keep_callbacks() { } + + +//! @seealso +//! @[Stdio.File()->set_close_callback] +void set_close_callback(function cb) { close_cb = cb; } + +//! @seealso +//! @[Stdio.File()->set_read_callback] +void set_read_callback(function cb) { read_cb = cb; } + +//! @seealso +//! @[Stdio.File()->set_read_oob_callback] +void set_read_oob_callback(function cb) { read_oob_cb = cb; } + +//! @seealso +//! @[Stdio.File()->set_write_callback] +void set_write_callback(function cb) { write_cb = cb; } + +//! @seealso +//! @[Stdio.File()->set_write_oob_callback] +void set_write_oob_callback(function cb) { write_oob_cb = cb; } + + +//! @seealso +//! @[Stdio.File()->query_close_callback] +function query_close_callback() { return close_cb; } + +//! @seealso +//! @[Stdio.File()->query_read_callback] +function query_read_callback() { return read_cb; } + +//! @seealso +//! @[Stdio.File()->query_read_oob_callback] +function query_read_oob_callback() { return read_oob_cb; } + +//! @seealso +//! @[Stdio.File()->query_write_callback] +function query_write_callback() { return write_cb; } + +//! @seealso +//! @[Stdio.File()->query_write_oob_callback] +function query_write_oob_callback() { return write_oob_cb; } + +string _sprintf(int t) { + return t=='O' && sprintf("%O(%d,%O)", this_program, sizeof(data), + make_type_str()); +} + + +// FakeFile specials. + +//! A FakeFile can be casted to a string. +mixed cast(string to) { + switch(to) { + case "string": return data; + case "object": return this; + } + error("Can not cast object to %O.\n", to); +} + +//! Sizeof on a FakeFile returns the size of its contents. +int(0..) _sizeof() { + return sizeof(data); +} + +//! @ignore + +#define NOPE(X) mixed X (mixed ... args) { error("This is a FakeFile. %s is not available.\n", #X); } +NOPE(assign); +NOPE(async_connect); +NOPE(connect); +NOPE(connect_unix); +NOPE(open); +NOPE(open_socket); +NOPE(pipe); +NOPE(tcgetattr); +NOPE(tcsetattr); + +// Stdio.Fd +NOPE(dup2); +NOPE(lock); // We could implement this +NOPE(mode); // We could implement this +NOPE(proxy); // We could implement this +NOPE(query_fd); +NOPE(read_oob); +NOPE(set_close_on_exec); +NOPE(set_keepalive); +NOPE(trylock); // We could implement this +NOPE(write_oob); + +//! @endignore \ No newline at end of file diff --git a/vendor/pygments/tests/examplefiles/Get-CommandDefinitionHtml.ps1 b/vendor/pygments/tests/examplefiles/Get-CommandDefinitionHtml.ps1 new file mode 100644 index 0000000..b181955 --- /dev/null +++ b/vendor/pygments/tests/examplefiles/Get-CommandDefinitionHtml.ps1 @@ -0,0 +1,66 @@ + +function Get-CommandDefinitionHtml { + + # this tells powershell to allow advanced features, + # like the [validatenotnullorempty()] attribute below. + [CmdletBinding()] + param( + [ValidateNotNullOrEmpty()] + [string]$name + ) + + $command = get-command $name + + # Look mom! I'm a cmdlet! + $PSCmdlet.WriteVerbose("Dumping HTML for " + $command) + +@" + + + $($command.name) + + +
    +$( + $command.parametersets | % { +@" + + + + + +"@ + } +) +
    $($_.name) + + + + +$( + $count = 0 + $_.parameters | % { + if (0 -eq ($count % 8)) { +@' + + +'@ + } +@" + +"@ + $count++ + } +) + +
    Parameters
    $($_.name)
    +
    + + +"@ +} + +Get-CommandDefinitionHtml get-item > out.html + +# show in browser +invoke-item out.html diff --git a/vendor/pygments/tests/examplefiles/IPDispatchC.nc b/vendor/pygments/tests/examplefiles/IPDispatchC.nc new file mode 100644 index 0000000..95a61a2 --- /dev/null +++ b/vendor/pygments/tests/examplefiles/IPDispatchC.nc @@ -0,0 +1,104 @@ +/* + * "Copyright (c) 2008-2011 The Regents of the University of California. + * All rights reserved." + * + * Permission to use, copy, modify, and distribute this software and its + * documentation for any purpose, without fee, and without written agreement is + * hereby granted, provided that the above copyright notice, the following + * two paragraphs and the author appear in all copies of this software. + * + * IN NO EVENT SHALL THE UNIVERSITY OF CALIFORNIA BE LIABLE TO ANY PARTY FOR + * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES ARISING OUT + * OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN IF THE UNIVERSITY OF + * CALIFORNIA HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + * + * THE UNIVERSITY OF CALIFORNIA SPECIFICALLY DISCLAIMS ANY WARRANTIES, + * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY + * AND FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS + * ON AN "AS IS" BASIS, AND THE UNIVERSITY OF CALIFORNIA HAS NO OBLIGATION TO + * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS." + * + */ + +/** + * + * + */ +#include "IPDispatch.h" +#include "BlipStatistics.h" + +configuration IPDispatchC { + provides { + interface SplitControl; + interface IPLower; + interface BlipStatistics; + } +} implementation { + + components MainC; + components NoLedsC as LedsC; + + /* IPDispatchP wiring -- fragment rassembly and lib6lowpan bindings */ + components IPDispatchP; + components CC2420RadioC as MessageC; + components ReadLqiC; + components new TimerMilliC(); + + SplitControl = IPDispatchP.SplitControl; + IPLower = IPDispatchP; + BlipStatistics = IPDispatchP; + + IPDispatchP.Boot -> MainC; +/* #else */ +/* components ResourceSendP; */ +/* ResourceSendP.SubSend -> MessageC; */ +/* ResourceSendP.Resource -> MessageC.SendResource[unique("RADIO_SEND_RESOURCE")]; */ +/* IPDispatchP.Ieee154Send -> ResourceSendP.Ieee154Send; */ +/* #endif */ + IPDispatchP.RadioControl -> MessageC; + + IPDispatchP.BarePacket -> MessageC.BarePacket; + IPDispatchP.Ieee154Send -> MessageC.BareSend; + IPDispatchP.Ieee154Receive -> MessageC.BareReceive; + +#ifdef LOW_POWER_LISTENING + IPDispatchP.LowPowerListening -> MessageC; +#endif + MainC.SoftwareInit -> IPDispatchP.Init; + + IPDispatchP.PacketLink -> MessageC; + IPDispatchP.ReadLqi -> ReadLqiC; + IPDispatchP.Leds -> LedsC; + IPDispatchP.ExpireTimer -> TimerMilliC; + + components new PoolC(message_t, N_FRAGMENTS) as FragPool; + components new PoolC(struct send_entry, N_FRAGMENTS) as SendEntryPool; + components new QueueC(struct send_entry *, N_FRAGMENTS); + components new PoolC(struct send_info, N_CONCURRENT_SENDS) as SendInfoPool; + + IPDispatchP.FragPool -> FragPool; + IPDispatchP.SendEntryPool -> SendEntryPool; + IPDispatchP.SendInfoPool -> SendInfoPool; + IPDispatchP.SendQueue -> QueueC; + + components IPNeighborDiscoveryP; + IPDispatchP.NeighborDiscovery -> IPNeighborDiscoveryP; + +/* components ICMPResponderC; */ +/* #ifdef BLIP_MULTICAST */ +/* components MulticastP; */ +/* components new TrickleTimerMilliC(2, 30, 2, 1); */ +/* IP = MulticastP.IP; */ + +/* MainC.SoftwareInit -> MulticastP.Init; */ +/* MulticastP.MulticastRx -> IPDispatchP.Multicast; */ +/* MulticastP.HopHeader -> IPExtensionP.HopByHopExt[0]; */ +/* MulticastP.TrickleTimer -> TrickleTimerMilliC.TrickleTimer[0]; */ +/* MulticastP.IPExtensions -> IPDispatchP; */ +/* #endif */ + +#ifdef DELUGE + components NWProgC; +#endif + +} diff --git a/vendor/pygments/tests/examplefiles/IPDispatchP.nc b/vendor/pygments/tests/examplefiles/IPDispatchP.nc new file mode 100644 index 0000000..628f39a --- /dev/null +++ b/vendor/pygments/tests/examplefiles/IPDispatchP.nc @@ -0,0 +1,671 @@ +/* + * "Copyright (c) 2008 The Regents of the University of California. + * All rights reserved." + * + * Permission to use, copy, modify, and distribute this software and its + * documentation for any purpose, without fee, and without written agreement is + * hereby granted, provided that the above copyright notice, the following + * two paragraphs and the author appear in all copies of this software. + * + * IN NO EVENT SHALL THE UNIVERSITY OF CALIFORNIA BE LIABLE TO ANY PARTY FOR + * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES ARISING OUT + * OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN IF THE UNIVERSITY OF + * CALIFORNIA HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + * + * THE UNIVERSITY OF CALIFORNIA SPECIFICALLY DISCLAIMS ANY WARRANTIES, + * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY + * AND FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS + * ON AN "AS IS" BASIS, AND THE UNIVERSITY OF CALIFORNIA HAS NO OBLIGATION TO + * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS." + * + */ + +#include +#include +#include +#include +#include +#include + +#include "blip_printf.h" +#include "IPDispatch.h" +#include "BlipStatistics.h" +#include "table.h" + +/* + * Provides IP layer reception to applications on motes. + * + * @author Stephen Dawson-Haggerty + */ + +module IPDispatchP { + provides { + interface SplitControl; + // interface for protocols not requiring special hand-holding + interface IPLower; + + interface BlipStatistics; + + } + uses { + interface Boot; + + + /* link-layer wiring */ + interface SplitControl as RadioControl; + + interface Packet as BarePacket; + interface Send as Ieee154Send; + interface Receive as Ieee154Receive; + + /* context lookup */ + interface NeighborDiscovery; + + interface ReadLqi; + interface PacketLink; + interface LowPowerListening; + + /* buffers for outgoing fragments */ + interface Pool as FragPool; + interface Pool as SendInfoPool; + interface Pool as SendEntryPool; + interface Queue as SendQueue; + + /* expire reconstruction */ + interface Timer as ExpireTimer; + + interface Leds; + + } + provides interface Init; +} implementation { + +#define HAVE_LOWPAN_EXTERN_MATCH_CONTEXT +int lowpan_extern_read_context(struct in6_addr *addr, int context) { + return call NeighborDiscovery.getContext(context, addr); +} + +int lowpan_extern_match_context(struct in6_addr *addr, uint8_t *ctx_id) { + return call NeighborDiscovery.matchContext(addr, ctx_id); +} + + // generally including source files like this is a no-no. I'm doing + // this in the hope that the optimizer will do a better job when + // they're part of a component. +#include +#include +#include +#include + + enum { + S_RUNNING, + S_STOPPED, + S_STOPPING, + }; + uint8_t state = S_STOPPED; + bool radioBusy; + uint8_t current_local_label = 0; + ip_statistics_t stats; + + // this in theory could be arbitrarily large; however, it needs to + // be large enough to hold all active reconstructions, and any tags + // which we are dropping. It's important to keep dropped tags + // around for a while, or else there are pathological situations + // where you continually allocate buffers for packets which will + // never complete. + + //////////////////////////////////////// + // + // + + table_t recon_cache; + + // table of packets we are currently receiving fragments from, that + // are destined to us + struct lowpan_reconstruct recon_data[N_RECONSTRUCTIONS]; + + // + // + //////////////////////////////////////// + + // task void sendTask(); + + void reconstruct_clear(void *ent) { + struct lowpan_reconstruct *recon = (struct lowpan_reconstruct *)ent; + memclr((uint8_t *)&recon->r_meta, sizeof(struct ip6_metadata)); + recon->r_timeout = T_UNUSED; + recon->r_buf = NULL; + } + + struct send_info *getSendInfo() { + struct send_info *ret = call SendInfoPool.get(); + if (ret == NULL) return ret; + ret->_refcount = 1; + ret->upper_data = NULL; + ret->failed = FALSE; + ret->link_transmissions = 0; + ret->link_fragments = 0; + ret->link_fragment_attempts = 0; + return ret; + } +#define SENDINFO_INCR(X) ((X)->_refcount)++ +void SENDINFO_DECR(struct send_info *si) { + if (--(si->_refcount) == 0) { + call SendInfoPool.put(si); + } +} + + command error_t SplitControl.start() { + return call RadioControl.start(); + } + + command error_t SplitControl.stop() { + if (!radioBusy) { + state = S_STOPPED; + return call RadioControl.stop(); + } else { + // if there's a packet in the radio, wait for it to exit before + // stopping + state = S_STOPPING; + return SUCCESS; + } + } + + event void RadioControl.startDone(error_t error) { +#ifdef LPL_SLEEP_INTERVAL + call LowPowerListening.setLocalWakeupInterval(LPL_SLEEP_INTERVAL); +#endif + + if (error == SUCCESS) { + call Leds.led2Toggle(); + call ExpireTimer.startPeriodic(FRAG_EXPIRE_TIME); + state = S_RUNNING; + radioBusy = FALSE; + } + + signal SplitControl.startDone(error); + } + + event void RadioControl.stopDone(error_t error) { + signal SplitControl.stopDone(error); + } + + command error_t Init.init() { + // ip_malloc_init needs to be in init, not booted, because + // context for coap is initialised in init + ip_malloc_init(); + return SUCCESS; + } + + event void Boot.booted() { + call BlipStatistics.clear(); + + /* set up our reconstruction cache */ + table_init(&recon_cache, recon_data, sizeof(struct lowpan_reconstruct), N_RECONSTRUCTIONS); + table_map(&recon_cache, reconstruct_clear); + + call SplitControl.start(); + } + + /* + * Receive-side code. + */ + void deliver(struct lowpan_reconstruct *recon) { + struct ip6_hdr *iph = (struct ip6_hdr *)recon->r_buf; + + // printf("deliver [%i]: ", recon->r_bytes_rcvd); + // printf_buf(recon->r_buf, recon->r_bytes_rcvd); + + /* the payload length field is always compressed, have to put it back here */ + iph->ip6_plen = htons(recon->r_bytes_rcvd - sizeof(struct ip6_hdr)); + signal IPLower.recv(iph, (void *)(iph + 1), &recon->r_meta); + + // printf("ip_free(%p)\n", recon->r_buf); + ip_free(recon->r_buf); + recon->r_timeout = T_UNUSED; + recon->r_buf = NULL; + } + + /* + * Bulletproof recovery logic is very important to make sure we + * don't get wedged with no free buffers. + * + * The table is managed as follows: + * - unused entries are marked T_UNUSED + * - entries which + * o have a buffer allocated + * o have had a fragment reception before we fired + * are marked T_ACTIVE + * - entries which have not had a fragment reception during the last timer period + * and were active are marked T_ZOMBIE + * - zombie receptions are deleted: their buffer is freed and table entry marked unused. + * - when a fragment is dropped, it is entered into the table as T_FAILED1. + * no buffer is allocated + * - when the timer fires, T_FAILED1 entries are aged to T_FAILED2. + * - T_FAILED2 entries are deleted. Incomming fragments with tags + * that are marked either FAILED1 or FAILED2 are dropped; this + * prevents us from allocating a buffer for a packet which we + * have already dropped fragments from. + * + */ + void reconstruct_age(void *elt) { + struct lowpan_reconstruct *recon = (struct lowpan_reconstruct *)elt; + if (recon->r_timeout != T_UNUSED) + printf("recon src: 0x%x tag: 0x%x buf: %p recvd: %i/%i\n", + recon->r_source_key, recon->r_tag, recon->r_buf, + recon->r_bytes_rcvd, recon->r_size); + switch (recon->r_timeout) { + case T_ACTIVE: + recon->r_timeout = T_ZOMBIE; break; // age existing receptions + case T_FAILED1: + recon->r_timeout = T_FAILED2; break; // age existing receptions + case T_ZOMBIE: + case T_FAILED2: + // deallocate the space for reconstruction + printf("timing out buffer: src: %i tag: %i\n", recon->r_source_key, recon->r_tag); + if (recon->r_buf != NULL) { + printf("ip_free(%p)\n", recon->r_buf); + ip_free(recon->r_buf); + } + recon->r_timeout = T_UNUSED; + recon->r_buf = NULL; + break; + } + } + + void ip_print_heap() { +#ifdef PRINTFUART_ENABLED + bndrt_t *cur = (bndrt_t *)heap; + while (((uint8_t *)cur) - heap < IP_MALLOC_HEAP_SIZE) { + printf ("heap region start: %p length: %u used: %u\n", + cur, (*cur & IP_MALLOC_LEN), (*cur & IP_MALLOC_INUSE) >> 15); + cur = (bndrt_t *)(((uint8_t *)cur) + ((*cur) & IP_MALLOC_LEN)); + } +#endif + } + + event void ExpireTimer.fired() { + table_map(&recon_cache, reconstruct_age); + + + printf("Frag pool size: %i\n", call FragPool.size()); + printf("SendInfo pool size: %i\n", call SendInfoPool.size()); + printf("SendEntry pool size: %i\n", call SendEntryPool.size()); + printf("Forward queue length: %i\n", call SendQueue.size()); + ip_print_heap(); + printfflush(); + } + + /* + * allocate a structure for recording information about incomming fragments. + */ + + struct lowpan_reconstruct *get_reconstruct(uint16_t key, uint16_t tag) { + struct lowpan_reconstruct *ret = NULL; + int i; + + // printf("get_reconstruct: %x %i\n", key, tag); + + for (i = 0; i < N_RECONSTRUCTIONS; i++) { + struct lowpan_reconstruct *recon = (struct lowpan_reconstruct *)&recon_data[i]; + + if (recon->r_tag == tag && + recon->r_source_key == key) { + + if (recon->r_timeout > T_UNUSED) { + recon->r_timeout = T_ACTIVE; + ret = recon; + goto done; + + } else if (recon->r_timeout < T_UNUSED) { + // if we have already tried and failed to get a buffer, we + // need to drop remaining fragments. + ret = NULL; + goto done; + } + } + if (recon->r_timeout == T_UNUSED) + ret = recon; + } + done: + // printf("got%p\n", ret); + return ret; + } + + event message_t *Ieee154Receive.receive(message_t *msg, void *msg_payload, uint8_t len) { + struct packed_lowmsg lowmsg; + struct ieee154_frame_addr frame_address; + uint8_t *buf = msg_payload; + + // printf(" -- RECEIVE -- len : %i\n", len); + + BLIP_STATS_INCR(stats.rx_total); + + /* unpack the 802.15.4 address fields */ + buf = unpack_ieee154_hdr(msg_payload, &frame_address); + len -= buf - (uint8_t *)msg_payload; + + /* unpack and 6lowpan headers */ + lowmsg.data = buf; + lowmsg.len = len; + lowmsg.headers = getHeaderBitmap(&lowmsg); + if (lowmsg.headers == LOWMSG_NALP) { + goto fail; + } + + if (hasFrag1Header(&lowmsg) || hasFragNHeader(&lowmsg)) { + // start reassembly + int rv; + struct lowpan_reconstruct *recon; + uint16_t tag, source_key; + + source_key = ieee154_hashaddr(&frame_address.ieee_src); + getFragDgramTag(&lowmsg, &tag); + recon = get_reconstruct(source_key, tag); + if (!recon) { + goto fail; + } + + /* fill in metadata: on fragmented packets, it applies to the + first fragment only */ + memcpy(&recon->r_meta.sender, &frame_address.ieee_src, + sizeof(ieee154_addr_t)); + recon->r_meta.lqi = call ReadLqi.readLqi(msg); + recon->r_meta.rssi = call ReadLqi.readRssi(msg); + + if (hasFrag1Header(&lowmsg)) { + if (recon->r_buf != NULL) goto fail; + rv = lowpan_recon_start(&frame_address, recon, buf, len); + } else { + rv = lowpan_recon_add(recon, buf, len); + } + + if (rv < 0) { + recon->r_timeout = T_FAILED1; + goto fail; + } else { + // printf("start recon buf: %p\n", recon->r_buf); + recon->r_timeout = T_ACTIVE; + recon->r_source_key = source_key; + recon->r_tag = tag; + } + + if (recon->r_size == recon->r_bytes_rcvd) { + deliver(recon); + } + + } else { + /* no fragmentation, just deliver it */ + int rv; + struct lowpan_reconstruct recon; + + /* fill in metadata */ + memcpy(&recon.r_meta.sender, &frame_address.ieee_src, + sizeof(ieee154_addr_t)); + recon.r_meta.lqi = call ReadLqi.readLqi(msg); + recon.r_meta.rssi = call ReadLqi.readRssi(msg); + + buf = getLowpanPayload(&lowmsg); + if ((rv = lowpan_recon_start(&frame_address, &recon, buf, len)) < 0) { + goto fail; + } + + if (recon.r_size == recon.r_bytes_rcvd) { + deliver(&recon); + } else { + // printf("ip_free(%p)\n", recon.r_buf); + ip_free(recon.r_buf); + } + } + goto done; + fail: + BLIP_STATS_INCR(stats.rx_drop); + done: + return msg; + } + + + /* + * Send-side functionality + */ + task void sendTask() { + struct send_entry *s_entry; + + // printf("sendTask() - sending\n"); + + if (radioBusy || state != S_RUNNING) return; + if (call SendQueue.empty()) return; + // this does not dequeue + s_entry = call SendQueue.head(); + +#ifdef LPL_SLEEP_INTERVAL + call LowPowerListening.setRemoteWakeupInterval(s_entry->msg, + call LowPowerListening.getLocalWakeupInterval()); +#endif + + if (s_entry->info->failed) { + dbg("Drops", "drops: sendTask: dropping failed fragment\n"); + goto fail; + } + + if ((call Ieee154Send.send(s_entry->msg, + call BarePacket.payloadLength(s_entry->msg))) != SUCCESS) { + dbg("Drops", "drops: sendTask: send failed\n"); + goto fail; + } else { + radioBusy = TRUE; + } + + return; + fail: + printf("SEND FAIL\n"); + post sendTask(); + BLIP_STATS_INCR(stats.tx_drop); + + // deallocate the memory associated with this request. + // other fragments associated with this packet will get dropped. + s_entry->info->failed = TRUE; + SENDINFO_DECR(s_entry->info); + call FragPool.put(s_entry->msg); + call SendEntryPool.put(s_entry); + call SendQueue.dequeue(); + } + + + /* + * it will pack the message into the fragment pool and enqueue + * those fragments for sending + * + * it will set + * - payload length + * - version, traffic class and flow label + * + * the source and destination IP addresses must be set by higher + * layers. + */ + command error_t IPLower.send(struct ieee154_frame_addr *frame_addr, + struct ip6_packet *msg, + void *data) { + struct lowpan_ctx ctx; + struct send_info *s_info; + struct send_entry *s_entry; + message_t *outgoing; + + int frag_len = 1; + error_t rc = SUCCESS; + + if (state != S_RUNNING) { + return EOFF; + } + + /* set version to 6 in case upper layers forgot */ + msg->ip6_hdr.ip6_vfc &= ~IPV6_VERSION_MASK; + msg->ip6_hdr.ip6_vfc |= IPV6_VERSION; + + ctx.tag = current_local_label++; + ctx.offset = 0; + + s_info = getSendInfo(); + if (s_info == NULL) { + rc = ERETRY; + goto cleanup_outer; + } + s_info->upper_data = data; + + while (frag_len > 0) { + s_entry = call SendEntryPool.get(); + outgoing = call FragPool.get(); + + if (s_entry == NULL || outgoing == NULL) { + if (s_entry != NULL) + call SendEntryPool.put(s_entry); + if (outgoing != NULL) + call FragPool.put(outgoing); + // this will cause any fragments we have already enqueued to + // be dropped by the send task. + s_info->failed = TRUE; + printf("drops: IP send: no fragments\n"); + rc = ERETRY; + goto done; + } + + call BarePacket.clear(outgoing); + frag_len = lowpan_frag_get(call Ieee154Send.getPayload(outgoing, 0), + call BarePacket.maxPayloadLength(), + msg, + frame_addr, + &ctx); + if (frag_len < 0) { + printf(" get frag error: %i\n", frag_len); + } + + printf("fragment length: %i offset: %i\n", frag_len, ctx.offset); + call BarePacket.setPayloadLength(outgoing, frag_len); + + if (frag_len <= 0) { + call FragPool.put(outgoing); + call SendEntryPool.put(s_entry); + goto done; + } + + if (call SendQueue.enqueue(s_entry) != SUCCESS) { + BLIP_STATS_INCR(stats.encfail); + s_info->failed = TRUE; + printf("drops: IP send: enqueue failed\n"); + goto done; + } + + s_info->link_fragments++; + s_entry->msg = outgoing; + s_entry->info = s_info; + + /* configure the L2 */ + if (frame_addr->ieee_dst.ieee_mode == IEEE154_ADDR_SHORT && + frame_addr->ieee_dst.i_saddr == IEEE154_BROADCAST_ADDR) { + call PacketLink.setRetries(s_entry->msg, 0); + } else { + call PacketLink.setRetries(s_entry->msg, BLIP_L2_RETRIES); + } + call PacketLink.setRetryDelay(s_entry->msg, BLIP_L2_DELAY); + + SENDINFO_INCR(s_info);} + + // printf("got %i frags\n", s_info->link_fragments); + done: + BLIP_STATS_INCR(stats.sent); + SENDINFO_DECR(s_info); + post sendTask(); + cleanup_outer: + return rc; + } + + event void Ieee154Send.sendDone(message_t *msg, error_t error) { + struct send_entry *s_entry = call SendQueue.head(); + + radioBusy = FALSE; + + // printf("sendDone: %p %i\n", msg, error); + + if (state == S_STOPPING) { + call RadioControl.stop(); + state = S_STOPPED; + goto done; + } + + s_entry->info->link_transmissions += (call PacketLink.getRetries(msg)); + s_entry->info->link_fragment_attempts++; + + if (!call PacketLink.wasDelivered(msg)) { + printf("sendDone: was not delivered! (%i tries)\n", + call PacketLink.getRetries(msg)); + s_entry->info->failed = TRUE; + signal IPLower.sendDone(s_entry->info); +/* if (s_entry->info->policy.dest[0] != 0xffff) */ +/* dbg("Drops", "drops: sendDone: frag was not delivered\n"); */ + // need to check for broadcast frames + // BLIP_STATS_INCR(stats.tx_drop); + } else if (s_entry->info->link_fragment_attempts == + s_entry->info->link_fragments) { + signal IPLower.sendDone(s_entry->info); + } + + done: + // kill off any pending fragments + SENDINFO_DECR(s_entry->info); + call FragPool.put(s_entry->msg); + call SendEntryPool.put(s_entry); + call SendQueue.dequeue(); + + post sendTask(); + } + +#if 0 + command struct tlv_hdr *IPExtensions.findTlv(struct ip6_ext *ext, uint8_t tlv_val) { + int len = ext->len - sizeof(struct ip6_ext); + struct tlv_hdr *tlv = (struct tlv_hdr *)(ext + 1); + while (len > 0) { + if (tlv->type == tlv_val) return tlv; + if (tlv->len == 0) return NULL; + tlv = (struct tlv_hdr *)(((uint8_t *)tlv) + tlv->len); + len -= tlv->len; + } + return NULL; + } +#endif + + + /* + * BlipStatistics interface + */ + command void BlipStatistics.get(ip_statistics_t *statistics) { +#ifdef BLIP_STATS_IP_MEM + stats.fragpool = call FragPool.size(); + stats.sendinfo = call SendInfoPool.size(); + stats.sendentry= call SendEntryPool.size(); + stats.sndqueue = call SendQueue.size(); + stats.heapfree = ip_malloc_freespace(); + printf("frag: %i sendinfo: %i sendentry: %i sendqueue: %i heap: %i\n", + stats.fragpool, + stats.sendinfo, + stats.sendentry, + stats.sndqueue, + stats.heapfree); +#endif + memcpy(statistics, &stats, sizeof(ip_statistics_t)); + + } + + command void BlipStatistics.clear() { + memclr((uint8_t *)&stats, sizeof(ip_statistics_t)); + } + +/* default event void IP.recv[uint8_t nxt_hdr](struct ip6_hdr *iph, */ +/* void *payload, */ +/* struct ip_metadata *meta) { */ +/* } */ + +/* default event void Multicast.recv[uint8_t scope](struct ip6_hdr *iph, */ +/* void *payload, */ +/* struct ip_metadata *meta) { */ +/* } */ +} diff --git a/vendor/pygments/tests/examplefiles/RoleQ.pm6 b/vendor/pygments/tests/examplefiles/RoleQ.pm6 new file mode 100644 index 0000000..9b66bde --- /dev/null +++ b/vendor/pygments/tests/examplefiles/RoleQ.pm6 @@ -0,0 +1,23 @@ +role q { + token stopper { \' } + + token escape:sym<\\> { } + + token backslash:sym { } + token backslash:sym<\\> { } + token backslash:sym { } + + token backslash:sym { {} . } + + method tweak_q($v) { self.panic("Too late for :q") } + method tweak_qq($v) { self.panic("Too late for :qq") } +} + +role qq does b1 does c1 does s1 does a1 does h1 does f1 { + token stopper { \" } + token backslash:sym { {} (\w) { self.throw_unrecog_backslash_seq: $/[0].Str } } + token backslash:sym { \W } + + method tweak_q($v) { self.panic("Too late for :q") } + method tweak_qq($v) { self.panic("Too late for :qq") } +} diff --git a/vendor/pygments/tests/examplefiles/abnf_example1.abnf b/vendor/pygments/tests/examplefiles/abnf_example1.abnf new file mode 100644 index 0000000..5cd9cd2 --- /dev/null +++ b/vendor/pygments/tests/examplefiles/abnf_example1.abnf @@ -0,0 +1,22 @@ +; This examples from WikiPedia . + + postal-address = name-part street zip-part + + name-part = *(personal-part SP) last-name [SP suffix] CRLF + name-part =/ personal-part CRLF + + personal-part = first-name / (initial ".") + first-name = *ALPHA + initial = ALPHA + last-name = *ALPHA + suffix = ("Jr." / "Sr." / 1*("I" / "V" / "X")) + + street = [apt SP] house-num SP street-name CRLF + apt = 1*4DIGIT + house-num = 1*8(DIGIT / ALPHA) + street-name = 1*VCHAR + + zip-part = town-name "," SP state 1*2SP zip-code CRLF + town-name = 1*(ALPHA / SP) + state = 2ALPHA + zip-code = 5DIGIT ["-" 4DIGIT] diff --git a/vendor/pygments/tests/examplefiles/abnf_example2.abnf b/vendor/pygments/tests/examplefiles/abnf_example2.abnf new file mode 100644 index 0000000..8781adf --- /dev/null +++ b/vendor/pygments/tests/examplefiles/abnf_example2.abnf @@ -0,0 +1,9 @@ +crlf = %d13.10 + +command = "command string" + +char-line = %x0D.0A *(%x20-7E) %x0D.0A + +without-ws-and-ctl = %d1-8 / %d11 / %d12 / %d14-31 / %d127 + +three-blank-lines = %x0D.0A.0D.0A.0D.0A diff --git a/vendor/pygments/tests/examplefiles/ahcon.f b/vendor/pygments/tests/examplefiles/ahcon.f new file mode 100644 index 0000000..48ae920 --- /dev/null +++ b/vendor/pygments/tests/examplefiles/ahcon.f @@ -0,0 +1,340 @@ + SUBROUTINE AHCON (SIZE,N,M,A,B,OLEVR,OLEVI,CLEVR,CLEVI, TRUNCATED + & SCR1,SCR2,IPVT,JPVT,CON,WORK,ISEED,IERR) !Test inline comment +C +C FUNCTION: +CF +CF Determines whether the pair (A,B) is controllable and flags +CF the eigenvalues corresponding to uncontrollable modes. +CF this ad-hoc controllability calculation uses a random matrix F +CF and computes whether eigenvalues move from A to the controlled +CF system A+B*F. +CF +C USAGE: +CU +CU CALL AHCON (SIZE,N,M,A,B,OLEVR,OLEVI,CLEVR,CLEVI,SCR1,SCR2,IPVT, +CU JPVT,CON,WORK,ISEED,IERR) +CU +CU since AHCON generates different random F matrices for each +CU call, as long as iseed is not re-initialized by the main +CU program, and since this code has the potential to be fooled +CU by extremely ill-conditioned problems, the cautious user +CU may wish to call it multiple times and rely, perhaps, on +CU a 2-of-3 vote. We believe, but have not proved, that any +CU errors this routine may produce are conservative--i.e., that +CU it may flag a controllable mode as uncontrollable, but +CU not vice-versa. +CU +C INPUTS: +CI +CI SIZE integer - first dimension of all 2-d arrays. +CI +CI N integer - number of states. +CI +CI M integer - number of inputs. +CI +CI A double precision - SIZE by N array containing the +CI N by N system dynamics matrix A. +CI +CI B double precision - SIZE by M array containing the +CI N by M system input matrix B. +CI +CI ISEED initial seed for random number generator; if ISEED=0, +CI then AHCON will set ISEED to a legal value. +CI +C OUTPUTS: +CO +CO OLEVR double precision - N dimensional vector containing the +CO real parts of the eigenvalues of A. +CO +CO OLEVI double precision - N dimensional vector containing the +CO imaginary parts of the eigenvalues of A. +CO +CO CLEVR double precision - N dimensional vector work space +CO containing the real parts of the eigenvalues of A+B*F, +CO where F is the random matrix. +CO +CO CLEVI double precision - N dimensional vector work space +CO containing the imaginary parts of the eigenvalues of +CO A+B*F, where F is the random matrix. +CO +CO SCR1 double precision - N dimensional vector containing the +CO magnitudes of the corresponding eigenvalues of A. +CO +CO SCR2 double precision - N dimensional vector containing the +CO damping factors of the corresponding eigenvalues of A. +CO +CO IPVT integer - N dimensional vector; contains the row pivots +CO used in finding the nearest neighbor eigenvalues between +CO those of A and of A+B*F. The IPVT(1)th eigenvalue of +CO A and the JPVT(1)th eigenvalue of A+B*F are the closest +CO pair. +CO +CO JPVT integer - N dimensional vector; contains the column +CO pivots used in finding the nearest neighbor eigenvalues; +CO see IPVT. +CO +CO CON logical - N dimensional vector; flagging the uncontrollable +CO modes of the system. CON(I)=.TRUE. implies the +CO eigenvalue of A given by DCMPLX(OLEVR(IPVT(I)),OLEVI(IPVT(i))) +CO corresponds to a controllable mode; CON(I)=.FALSE. +CO implies an uncontrollable mode for that eigenvalue. +CO +CO WORK double precision - SIZE by N dimensional array containing +CO an N by N matrix. WORK(I,J) is the distance between +CO the open loop eigenvalue given by DCMPLX(OLEVR(I),OLEVI(I)) +CO and the closed loop eigenvalue of A+B*F given by +CO DCMPLX(CLEVR(J),CLEVI(J)). +CO +CO IERR integer - IERR=0 indicates normal return; a non-zero +CO value indicates trouble in the eigenvalue calculation. +CO see the EISPACK and EIGEN documentation for details. +CO +C ALGORITHM: +CA +CA Calculate eigenvalues of A and of A+B*F for a randomly +CA generated F, and see which ones change. Use a full pivot +CA search through a matrix of euclidean distance measures +CA between each pair of eigenvalues from (A,A+BF) to +CA determine the closest pairs. +CA +C MACHINE DEPENDENCIES: +CM +CM NONE +CM +C HISTORY: +CH +CH written by: Birdwell & Laub +CH date: May 18, 1985 +CH current version: 1.0 +CH modifications: made machine independent and modified for +CH f77:bb:8-86. +CH changed cmplx -> dcmplx: 7/27/88 jdb +CH +C ROUTINES CALLED: +CC +CC EIGEN,RAND +CC +C COMMON MEMORY USED: +CM +CM none +CM +C---------------------------------------------------------------------- +C written for: The CASCADE Project +C Oak Ridge National Laboratory +C U.S. Department of Energy +C contract number DE-AC05-840R21400 +C subcontract number 37B-7685 S13 +C organization: The University of Tennessee +C---------------------------------------------------------------------- +C THIS SOFTWARE IS IN THE PUBLIC DOMAIN +C NO RESTRICTIONS ON ITS USE ARE IMPLIED +C---------------------------------------------------------------------- +C +C--global variables: +C + INTEGER SIZE + INTEGER N + INTEGER M + INTEGER IPVT(1) + INTEGER JPVT(1) + INTEGER IERR +C + DOUBLE PRECISION A(SIZE,N) + DOUBLE PRECISION B(SIZE,M) + DOUBLE PRECISION WORK(SIZE,N) + DOUBLE PRECISION CLEVR(N) + DOUBLE PRECISION CLEVI(N) + DOUBLE PRECISION OLEVR(N) + DOUBLE PRECISION OLEVI(N) + DOUBLE PRECISION SCR1(N) + DOUBLE PRECISION SCR2(N) +C + LOGICAL CON(N) +C +C--local variables: +C + INTEGER ISEED + INTEGER ITEMP + INTEGER K1 + INTEGER K2 + INTEGER I + INTEGER J + INTEGER K + INTEGER IMAX + INTEGER JMAX +C + DOUBLE PRECISION VALUE + DOUBLE PRECISION EPS + DOUBLE PRECISION EPS1 + DOUBLE PRECISION TEMP + DOUBLE PRECISION CURR + DOUBLE PRECISION ANORM + DOUBLE PRECISION BNORM + DOUBLE PRECISION COLNRM + DOUBLE PRECISION RNDMNO +C + DOUBLE COMPLEX DCMPLX +C +C--compute machine epsilon +C + EPS = 1.D0 +100 CONTINUE + EPS = EPS / 2.D0 + EPS1 = 1.D0 + EPS + IF (EPS1 .NE. 1.D0) GO TO 100 + EPS = EPS * 2.D0 +C +C--compute the l-1 norm of a +C + ANORM = 0.0D0 + DO 120 J = 1, N + COLNRM = 0.D0 + DO 110 I = 1, N + COLNRM = COLNRM + ABS(A(I,J)) +110 CONTINUE + IF (COLNRM .GT. ANORM) ANORM = COLNRM +120 CONTINUE +C +C--compute the l-1 norm of b +C + BNORM = 0.0D0 + DO 140 J = 1, M + COLNRM = 0.D0 + DO 130 I = 1, N + COLNRM = COLNRM + ABS(B(I,J)) +130 CONTINUE + IF (COLNRM .GT. BNORM) BNORM = COLNRM +140 CONTINUE +C +C--compute a + b * f +C + DO 160 J = 1, N + DO 150 I = 1, N + WORK(I,J) = A(I,J) +150 CONTINUE +160 CONTINUE +C +C--the elements of f are random with uniform distribution +C--from -anorm/bnorm to +anorm/bnorm +C--note that f is not explicitly stored as a matrix +C--pathalogical floating point notes: the if (bnorm .gt. 0.d0) +C--test should actually be if (bnorm .gt. dsmall), where dsmall +C--is the smallest representable number whose reciprocal does +C--not generate an overflow or loss of precision. +C + IF (ISEED .EQ. 0) ISEED = 86345823 + IF (ANORM .EQ. 0.D0) ANORM = 1.D0 + IF (BNORM .GT. 0.D0) THEN + TEMP = 2.D0 * ANORM / BNORM + ELSE + TEMP = 2.D0 + END IF + DO 190 K = 1, M + DO 180 J = 1, N + CALL RAND(ISEED,ISEED,RNDMNO) + VALUE = (RNDMNO - 0.5D0) * TEMP + DO 170 I = 1, N + WORK(I,J) = WORK(I,J) + B(I,K)*VALUE +170 CONTINUE +180 CONTINUE +190 CONTINUE +C +C--compute the eigenvalues of a + b*f, and several other things +C + CALL EIGEN (0,SIZE,N,WORK,CLEVR,CLEVI,WORK,SCR1,SCR2,IERR) + IF (IERR .NE. 0) RETURN +C +C--copy a so it is not destroyed +C + DO 210 J = 1, N + DO 200 I = 1, N + WORK(I,J) = A(I,J) +200 CONTINUE +210 CONTINUE +C +C--compute the eigenvalues of a, and several other things +C + CALL EIGEN (0,SIZE,N,WORK,OLEVR,OLEVI,WORK,SCR1,SCR2,IERR) + IF (IERR .NE. 0) RETURN +C +C--form the matrix of distances between eigenvalues of a and +C--EIGENVALUES OF A+B*F +C + DO 230 J = 1, N + DO 220 I = 1, N + WORK(I,J) = + & ABS(DCMPLX(OLEVR(I),OLEVI(I))-DCMPLX(CLEVR(J),CLEVI(J))) +220 CONTINUE +230 CONTINUE +C +C--initialize row and column pivots +C + DO 240 I = 1, N + IPVT(I) = I + JPVT(I) = I +240 CONTINUE +C +C--a little bit messy to avoid swapping columns and +C--rows of work +C + DO 270 I = 1, N-1 +C +C--find the minimum element of each lower right square +C--submatrix of work, for submatrices of size n x n +C--through 2 x 2 +C + CURR = WORK(IPVT(I),JPVT(I)) + IMAX = I + JMAX = I + TEMP = CURR +C +C--find the minimum element +C + DO 260 K1 = I, N + DO 250 K2 = I, N + IF (WORK(IPVT(K1),JPVT(K2)) .LT. TEMP) THEN + TEMP = WORK(IPVT(K1),JPVT(K2)) + IMAX = K1 + JMAX = K2 + END IF +250 CONTINUE +260 CONTINUE +C +C--update row and column pivots for indirect addressing of work +C + ITEMP = IPVT(I) + IPVT(I) = IPVT(IMAX) + IPVT(IMAX) = ITEMP +C + ITEMP = JPVT(I) + JPVT(I) = JPVT(JMAX) + JPVT(JMAX) = ITEMP +C +C--do next submatrix +C +270 CONTINUE +C +C--this threshold for determining when an eigenvalue has +C--not moved, and is therefore uncontrollable, is critical, +C--and may require future changes with more experience. +C + EPS1 = SQRT(EPS) +C +C--for each eigenvalue pair, decide if it is controllable +C + DO 280 I = 1, N +C +C--note that we are working with the "pivoted" work matrix +C--and are looking at its diagonal elements +C + IF (WORK(IPVT(I),JPVT(I))/ANORM .LE. EPS1) THEN + CON(I) = .FALSE. + ELSE + CON(I) = .TRUE. + END IF +280 CONTINUE +C +C--finally! +C + RETURN + END diff --git a/vendor/pygments/tests/examplefiles/all.nit b/vendor/pygments/tests/examplefiles/all.nit new file mode 100644 index 0000000..d4e1ddf --- /dev/null +++ b/vendor/pygments/tests/examplefiles/all.nit @@ -0,0 +1,1986 @@ +# This file is part of NIT ( http://www.nitlanguage.org ). +# +# Copyright 2013 Alexis Laferrière +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import gtk + +class CalculatorContext + var result : nullable Float = null + + var last_op : nullable Char = null + + var current : nullable Float = null + var after_point : nullable Int = null + + fun push_op( op : Char ) + do + apply_last_op_if_any + if op == 'C' then + self.result = 0.0 + last_op = null + else + last_op = op # store for next push_op + end + + # prepare next current + after_point = null + current = null + end + + fun push_digit( digit : Int ) + do + var current = current + if current == null then current = 0.0 + + var after_point = after_point + if after_point == null then + current = current * 10.0 + digit.to_f + else + current = current + digit.to_f * 10.0.pow(after_point.to_f) + self.after_point -= 1 + end + + self.current = current + end + + fun switch_to_decimals + do + if self.current == null then current = 0.0 + if after_point != null then return + + after_point = -1 + end + + fun apply_last_op_if_any + do + var op = last_op + + var result = result + if result == null then result = 0.0 + + var current = current + if current == null then current = 0.0 + + if op == null then + result = current + else if op == '+' then + result = result + current + else if op == '-' then + result = result - current + else if op == '/' then + result = result / current + else if op == '*' then + result = result * current + end + self.result = result + self.current = null + end +end + +class CalculatorGui + super GtkCallable + + var win : GtkWindow + var container : GtkGrid + + var lbl_disp : GtkLabel + var but_eq : GtkButton + var but_dot : GtkButton + + var context = new CalculatorContext + + redef fun signal( sender, user_data ) + do + var after_point = context.after_point + if after_point == null then + after_point = 0 + else + after_point = (after_point.abs) + end + + if user_data isa Char then # is an operation + var c = user_data + if c == '.' then + but_dot.sensitive= false + context.switch_to_decimals + lbl_disp.text = "{context.current.to_i}." + else + but_dot.sensitive= true + context.push_op( c ) + + var s = context.result.to_precision_native(6) + var index : nullable Int = null + for i in s.length.times do + var chiffre = s.chars[i] + if chiffre == '0' and index == null then + index = i + else if chiffre != '0' then + index = null + end + end + if index != null then + s = s.substring(0, index) + if s.chars[s.length-1] == ',' then s = s.substring(0, s.length-1) + end + lbl_disp.text = s + end + else if user_data isa Int then # is a number + var n = user_data + context.push_digit( n ) + lbl_disp.text = context.current.to_precision_native(after_point) + end + end + + init + do + init_gtk + + win = new GtkWindow( 0 ) + + container = new GtkGrid(5,5,true) + win.add( container ) + + lbl_disp = new GtkLabel( "_" ) + container.attach( lbl_disp, 0, 0, 5, 1 ) + + # digits + for n in [0..9] do + var but = new GtkButton.with_label( n.to_s ) + but.request_size( 64, 64 ) + but.signal_connect( "clicked", self, n ) + if n == 0 then + container.attach( but, 0, 4, 1, 1 ) + else container.attach( but, (n-1)%3, 3-(n-1)/3, 1, 1 ) + end + + # operators + var r = 1 + for op in ['+', '-', '*', '/' ] do + var but = new GtkButton.with_label( op.to_s ) + but.request_size( 64, 64 ) + but.signal_connect( "clicked", self, op ) + container.attach( but, 3, r, 1, 1 ) + r+=1 + end + + # = + but_eq = new GtkButton.with_label( "=" ) + but_eq.request_size( 64, 64 ) + but_eq.signal_connect( "clicked", self, '=' ) + container.attach( but_eq, 4, 3, 1, 2 ) + + # . + but_dot = new GtkButton.with_label( "." ) + but_dot.request_size( 64, 64 ) + but_dot.signal_connect( "clicked", self, '.' ) + container.attach( but_dot, 1, 4, 1, 1 ) + + #C + var but_c = new GtkButton.with_label( "C" ) + but_c.request_size( 64, 64 ) + but_c.signal_connect("clicked", self, 'C') + container.attach( but_c, 2, 4, 1, 1 ) + + win.show_all + end +end + +# context tests +var context = new CalculatorContext +context.push_digit( 1 ) +context.push_digit( 2 ) +context.push_op( '+' ) +context.push_digit( 3 ) +context.push_op( '*' ) +context.push_digit( 2 ) +context.push_op( '=' ) +var r = context.result.to_precision( 2 ) +assert r == "30.00" else print r + +context = new CalculatorContext +context.push_digit( 1 ) +context.push_digit( 4 ) +context.switch_to_decimals +context.push_digit( 1 ) +context.push_op( '*' ) +context.push_digit( 3 ) +context.push_op( '=' ) +r = context.result.to_precision( 2 ) +assert r == "42.30" else print r + +context.push_op( '+' ) +context.push_digit( 1 ) +context.push_digit( 1 ) +context.push_op( '=' ) +r = context.result.to_precision( 2 ) +assert r == "53.30" else print r + +context = new CalculatorContext +context.push_digit( 4 ) +context.push_digit( 2 ) +context.switch_to_decimals +context.push_digit( 3 ) +context.push_op( '/' ) +context.push_digit( 3 ) +context.push_op( '=' ) +r = context.result.to_precision( 2 ) +assert r == "14.10" else print r + +#test multiple decimals +context = new CalculatorContext +context.push_digit( 5 ) +context.push_digit( 0 ) +context.switch_to_decimals +context.push_digit( 1 ) +context.push_digit( 2 ) +context.push_digit( 3 ) +context.push_op( '+' ) +context.push_digit( 1 ) +context.push_op( '=' ) +r = context.result.to_precision( 3 ) +assert r == "51.123" else print r + +#test 'C' button +context = new CalculatorContext +context.push_digit( 1 ) +context.push_digit( 0 ) +context.push_op( '+' ) +context.push_digit( 1 ) +context.push_digit( 0 ) +context.push_op( '=' ) +context.push_op( 'C' ) +r = context.result.to_precision( 1 ) +assert r == "0.0" else print r + +# graphical application + +if "NIT_TESTING".environ != "true" then + var app = new CalculatorGui + run_gtk +end +# This file is part of NIT ( http://www.nitlanguage.org ). +# +# Copyright 2013 Matthieu Lucas +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# This sample has been implemented to show you how simple is it to play +# with native callbacks (C) through an high level with NIT program. + +module callback_chimpanze +import callback_monkey + +class Chimpanze + super MonkeyActionCallable + + fun create + do + var monkey = new Monkey + print "Hum, I'm sleeping ..." + # Invoking method which will take some time to compute, and + # will be back in wokeUp method with information. + # - Callback method defined in MonkeyActionCallable Interface + monkey.wokeUpAction(self, "Hey, I'm awake.") + end + + # Inherit callback method, defined by MonkeyActionCallable interface + # - Back of wokeUpAction method + redef fun wokeUp( sender:Monkey, message:Object ) + do + print message + end +end + +var m = new Chimpanze +m.create +# This file is part of NIT ( http://www.nitlanguage.org ). +# +# Copyright 2013 Matthieu Lucas +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# This sample has been implemented to show you how simple is it to play +# with native callbacks (C) through an high level with NIT program. + +module callback_monkey + +in "C header" `{ + #include + #include + + typedef struct { + int id; + int age; + } CMonkey; + + typedef struct { + MonkeyActionCallable toCall; + Object message; + } MonkeyAction; +`} + +in "C body" `{ + // Method which reproduce a callback answer + // Please note that a function pointer is only used to reproduce the callback + void cbMonkey(CMonkey *mkey, void callbackFunc(CMonkey*, MonkeyAction*), MonkeyAction *data) + { + sleep(2); + callbackFunc( mkey, data ); + } + + // Back of background treatment, will be redirected to callback function + void nit_monkey_callback_func( CMonkey *mkey, MonkeyAction *data ) + { + // To call a your method, the signature must be written like this : + // _... + MonkeyActionCallable_wokeUp( data->toCall, mkey, data->message ); + } +`} + +# Implementable interface to get callback in defined methods +interface MonkeyActionCallable + fun wokeUp( sender:Monkey, message: Object) is abstract +end + +# Defining my object type Monkey, which is, in a low level, a pointer to a C struct (CMonkey) +extern class Monkey `{ CMonkey * `} + + new `{ + CMonkey *monkey = malloc( sizeof(CMonkey) ); + monkey->age = 10; + monkey->id = 1; + return monkey; + `} + + # Object method which will get a callback in wokeUp method, defined in MonkeyActionCallable interface + # Must be defined as Nit/C method because of C call inside + fun wokeUpAction( toCall: MonkeyActionCallable, message: Object ) is extern import MonkeyActionCallable.wokeUp `{ + + // Allocating memory to keep reference of received parameters : + // - Object receiver + // - Message + MonkeyAction *data = malloc( sizeof(MonkeyAction) ); + + // Incrementing reference counter to prevent from releasing + MonkeyActionCallable_incr_ref( toCall ); + Object_incr_ref( message ); + + data->toCall = toCall; + data->message = message; + + // Calling method which reproduce a callback by passing : + // - Receiver + // - Function pointer to object return method + // - Datas + cbMonkey( recv, &nit_monkey_callback_func, data ); + `} +end +# This file is part of NIT ( http://www.nitlanguage.org ). +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Implementation of circular lists +# This example shows the usage of generics and somewhat a specialisation of collections. +module circular_list + +# Sequences of elements implemented with a double-linked circular list +class CircularList[E] + # Like standard Array or LinkedList, CircularList is a Sequence. + super Sequence[E] + + # The first node of the list if any + # The special case of an empty list is handled by a null node + private var node: nullable CLNode[E] = null + + redef fun iterator do return new CircularListIterator[E](self) + + redef fun first do return self.node.item + + redef fun push(e) + do + var new_node = new CLNode[E](e) + var n = self.node + if n == null then + # the first node + self.node = new_node + else + # not the first one, so attach nodes correctly. + var old_last_node = n.prev + new_node.next = n + new_node.prev = old_last_node + old_last_node.next = new_node + n.prev = new_node + end + end + + redef fun pop + do + var n = self.node + assert n != null + var prev = n.prev + if prev == n then + # the only node + self.node = null + return n.item + end + # not the only one do detach nodes correctly. + var prev_prev = prev.prev + n.prev = prev_prev + prev_prev.next = n + return prev.item + end + + redef fun unshift(e) + do + # Circularity has benefits. + push(e) + self.node = self.node.prev + end + + redef fun shift + do + # Circularity has benefits. + self.node = self.node.next + return self.pop + end + + # Move the first at the last position, the second at the first, etc. + fun rotate + do + var n = self.node + if n == null then return + self.node = n.next + end + + # Sort the list using the Josephus algorithm. + fun josephus(step: Int) + do + var res = new CircularList[E] + while not self.is_empty do + # count 'step' + for i in [1..step[ do self.rotate + # kill + var x = self.shift + res.add(x) + end + self.node = res.node + end +end + +# Nodes of a CircularList +private class CLNode[E] + # The current item + var item: E + + # The next item in the circular list. + # Because of circularity, there is always a next; + # so by default let it be self + var next: CLNode[E] = self + + # The previous item in the circular list. + # Coherence between next and previous nodes has to be maintained by the + # circular list. + var prev: CLNode[E] = self +end + +# An iterator of a CircularList. +private class CircularListIterator[E] + super IndexedIterator[E] + + redef var index: Int + + # The current node pointed. + # Is null if the list is empty. + var node: nullable CLNode[E] + + # The list iterated. + var list: CircularList[E] + + redef fun is_ok + do + # Empty lists are not OK. + # Pointing again the first node is not OK. + return self.node != null and (self.index == 0 or self.node != self.list.node) + end + + redef fun next + do + self.node = self.node.next + self.index += 1 + end + + redef fun item do return self.node.item + + init(list: CircularList[E]) + do + self.node = list.node + self.list = list + self.index = 0 + end +end + +var i = new CircularList[Int] +i.add_all([1, 2, 3, 4, 5, 6, 7]) +print i.first +print i.join(":") + +i.push(8) +print i.shift +print i.pop +i.unshift(0) +print i.join(":") + +i.josephus(3) +print i.join(":") +# This file is part of NIT ( http://www.nitlanguage.org ). +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# This module beef up the clock module by allowing a clock to be comparable. +# It show the usage of class refinement +module clock_more + +import clock + +redef class Clock + # Clock are now comparable + super Comparable + + # Comparaison of a clock make only sense with an other clock + redef type OTHER: Clock + + redef fun <(o) + do + # Note: < is the only abstract method of Comparable. + # All other operators and methods rely on < and ==. + return self.total_minutes < o.total_minutes + end +end + +var c1 = new Clock(8, 12) +var c2 = new Clock(8, 13) +var c3 = new Clock(9, 13) + +print "{c1}<{c2}? {c1{c2}? {c1>c2}" +print "{c1}>={c2}? {c1>=c2}" +print "{c1}<=>{c2}? {c1<=>c2}" +print "{c1},{c2}? max={c1.max(c2)} min={c1.min(c2)}" +print "{c1}.is_between({c2}, {c3})? {c1.is_between(c2, c3)}" +print "{c2}.is_between({c1}, {c3})? {c2.is_between(c1, c3)}" + +print "-" + +c1.minutes += 1 + +print "{c1}<{c2}? {c1{c2}? {c1>c2}" +print "{c1}>={c2}? {c1>=c2}" +print "{c1}<=>{c2}? {c1<=>c2}" +print "{c1},{c2}? max={c1.max(c2)} min={c1.min(c2)}" +print "{c1}.is_between({c2}, {c3})? {c1.is_between(c2, c3)}" +print "{c2}.is_between({c1}, {c3})? {c2.is_between(c1, c3)}" +# This file is part of NIT ( http://www.nitlanguage.org ). +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# This module provide a simple wall clock. +# It is an example of getters and setters. +# A beefed-up module is available in clock_more +module clock + +# A simple wall clock with 60 minutes and 12 hours. +class Clock + # total number of minutes from 0 to 719 + var total_minutes: Int + # Note: only the read acces is public, the write access is private. + + # number of minutes in the current hour (from 0 to 59) + fun minutes: Int do return self.total_minutes % 60 + + # set the number of minutes in the current hour. + # if m < 0 or m >= 60, the hour will be changed accordinlgy + fun minutes=(m: Int) do self.total_minutes = self.hours * 60 + m + + # number of hours (from 0 to 11) + fun hours: Int do return self.total_minutes / 60 + + # set the number of hours + # the minutes will not be updated + fun hours=(h: Int) do self.total_minutes = h * 60 + minutes + + # the position of the hour arrow in the [0..60[ interval + fun hour_pos: Int do return total_minutes / 12 + + # replace the arrow of hours (from 0 to 59). + # the hours and the minutes will be updated. + fun hour_pos=(h: Int) do self.total_minutes = h * 12 + + redef fun to_s do return "{hours}:{minutes}" + + fun reset(hours, minutes: Int) do self.total_minutes = hours*60 + minutes + + init(hours, minutes: Int) do self.reset(hours, minutes) + + redef fun ==(o) + do + # Note: o is a nullable Object, a type test is required + # Thanks to adaptive typing, there is no downcast + # i.e. the code is safe! + return o isa Clock and self.total_minutes == o.total_minutes + end +end + +var c = new Clock(10,50) +print "It's {c} o'clock." + +c.minutes += 22 +print "Now it's {c} o'clock." + +print "The short arrow in on the {c.hour_pos/5} and the long arrow in on the {c.minutes/5}." + +c.hours -= 2 +print "Now it's {c} o'clock." + +var c2 = new Clock(9, 11) +print "It's {c2} on the second clock." +print "The two clocks are synchronized: {c == c2}." +c2.minutes += 1 +print "It's now {c2} on the second clock." +print "The two clocks are synchronized: {c == c2}." +# This file is part of NIT ( http://www.nitlanguage.org ). +# +# Copyright 2013 Matthieu Lucas +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Sample of the Curl module. +module curl_http + +import curl + +# Small class to represent an Http Fetcher +class MyHttpFetcher + super CurlCallbacks + + var curl: Curl + var our_body: String = "" + + init(curl: Curl) do self.curl = curl + + # Release curl object + fun destroy do self.curl.destroy + + # Header callback + redef fun header_callback(line: String) do + # We keep this callback silent for testing purposes + #if not line.has_prefix("Date:") then print "Header_callback : {line}" + end + + # Body callback + redef fun body_callback(line: String) do self.our_body = "{self.our_body}{line}" + + # Stream callback - Cf : No one is registered + redef fun stream_callback(buffer: String, size: Int, count: Int) do print "Stream_callback : {buffer} - {size} - {count}" +end + + +# Program +if args.length < 2 then + print "Usage: curl_http " +else + var curl = new Curl + var url = args[1] + var request = new CurlHTTPRequest(url, curl) + + # HTTP Get Request + if args[0] == "GET" then + request.verbose = false + var getResponse = request.execute + + if getResponse isa CurlResponseSuccess then + print "Status code : {getResponse.status_code}" + print "Body : {getResponse.body_str}" + else if getResponse isa CurlResponseFailed then + print "Error code : {getResponse.error_code}" + print "Error msg : {getResponse.error_msg}" + end + + # HTTP Post Request + else if args[0] == "POST" then + var myHttpFetcher = new MyHttpFetcher(curl) + request.delegate = myHttpFetcher + + var postDatas = new HeaderMap + postDatas["Bugs Bunny"] = "Daffy Duck" + postDatas["Batman"] = "Robin likes special characters @#ùà!è§'(\"é&://,;<>∞~*" + postDatas["Batman"] = "Yes you can set multiple identical keys, but APACHE will consider only once, the last one" + request.datas = postDatas + request.verbose = false + var postResponse = request.execute + + print "Our body from the callback : {myHttpFetcher.our_body}" + + if postResponse isa CurlResponseSuccess then + print "*** Answer ***" + print "Status code : {postResponse.status_code}" + print "Body should be empty, because we decided to manage callbacks : {postResponse.body_str.length}" + else if postResponse isa CurlResponseFailed then + print "Error code : {postResponse.error_code}" + print "Error msg : {postResponse.error_msg}" + end + + # HTTP Get to file Request + else if args[0] == "GET_FILE" then + var headers = new HeaderMap + headers["Accept"] = "Moo" + request.headers = headers + request.verbose = false + var downloadResponse = request.download_to_file(null) + + if downloadResponse isa CurlFileResponseSuccess then + print "*** Answer ***" + print "Status code : {downloadResponse.status_code}" + print "Size downloaded : {downloadResponse.size_download}" + else if downloadResponse isa CurlResponseFailed then + print "Error code : {downloadResponse.error_code}" + print "Error msg : {downloadResponse.error_msg}" + end + # Program logic + else + print "Usage : Method[POST, GET, GET_FILE]" + end +end +# This file is part of NIT ( http://www.nitlanguage.org ). +# +# Copyright 2013 Matthieu Lucas +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Mail sender sample using the Curl module +module curl_mail + +import curl + +var curl = new Curl +var mail_request = new CurlMailRequest(curl) + +# Networks +var response = mail_request.set_outgoing_server("smtps://smtp.example.org:465", "user@example.org", "mypassword") +if response isa CurlResponseFailed then + print "Error code : {response.error_code}" + print "Error msg : {response.error_msg}" +end + +# Headers +mail_request.from = "Billy Bob" +mail_request.to = ["user@example.org"] +mail_request.cc = ["bob@example.org"] +mail_request.bcc = null + +var headers_body = new HeaderMap +headers_body["Content-Type:"] = "text/html; charset=\"UTF-8\"" +headers_body["Content-Transfer-Encoding:"] = "quoted-printable" +mail_request.headers_body = headers_body + +# Content +mail_request.body = "

    Here you can write HTML stuff.

    " +mail_request.subject = "Hello From My Nit Program" + +# Others +mail_request.verbose = false + +# Send mail +response = mail_request.execute +if response isa CurlResponseFailed then + print "Error code : {response.error_code}" + print "Error msg : {response.error_msg}" +else if response isa CurlMailResponseSuccess then + print "Mail Sent" +else + print "Unknown Curl Response type" +end +# This file is part of NIT ( http://www.nitlanguage.org ). +# +# Copyright 2012-2013 Alexis Laferrière +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Draws an arithmetic operation to the terminal +module draw_operation + +redef enum Int + fun n_chars: Int `{ + int c; + if ( abs(recv) >= 10 ) + c = 1+(int)log10f( (float)abs(recv) ); + else + c = 1; + if ( recv < 0 ) c ++; + return c; + `} +end + +redef enum Char + fun as_operator(a, b: Int): Int + do + if self == '+' then return a + b + if self == '-' then return a - b + if self == '*' then return a * b + if self == '/' then return a / b + if self == '%' then return a % b + abort + end + + fun override_dispc: Bool + do + return self == '+' or self == '-' or self == '*' or self == '/' or self == '%' + end + + fun lines(s: Int): Array[Line] + do + if self == '+' then + return [new Line(new P(0,s/2),1,0,s), new Line(new P(s/2,1),0,1,s-2)] + else if self == '-' then + return [new Line(new P(0,s/2),1,0,s)] + else if self == '*' then + var lines = new Array[Line] + for y in [1..s-1[ do + lines.add( new Line(new P(1,y), 1,0,s-2) ) + end + return lines + else if self == '/' then + return [new Line(new P(s-1,0), -1,1, s )] + else if self == '%' then + var q4 = s/4 + var lines = [new Line(new P(s-1,0),-1,1,s)] + for l in [0..q4[ do + lines.append([ new Line( new P(0,l), 1,0,q4), new Line( new P(s-1,s-1-l), -1,0,q4) ]) + end + return lines + else if self == '1' then + return [new Line(new P(s/2,0), 0,1,s),new Line(new P(0,s-1),1,0,s), + new Line( new P(s/2,0),-1,1,s/2)] + else if self == '2' then + return [new Line(new P(0,0), 1,0,s),new Line(new P(s-1,0),0,1,s/2), + new Line( new P(0,s-1),1,0,s), new Line( new P(0,s/2), 0,1,s/2), + new Line( new P(0,s/2), 1,0,s)] + else if self == '3' then + return [new Line(new P(0,0), 1,0,s),new Line(new P(s-1,0),0,1,s), + new Line( new P(0,s-1),1,0,s), new Line( new P(0,s/2), 1,0,s)] + else if self == '4' then + return [new Line(new P(s-1,0),0,1,s), new Line( new P(0,0), 0,1,s/2), + new Line( new P(0,s/2), 1,0,s)] + else if self == '5' then + return [new Line(new P(0,0), 1,0,s),new Line(new P(s-1,s/2),0,1,s/2), + new Line( new P(0,s-1),1,0,s), new Line( new P(0,0), 0,1,s/2), + new Line( new P(0,s/2), 1,0,s)] + else if self == '6' then + return [new Line(new P(0,0), 1,0,s),new Line(new P(s-1,s/2),0,1,s/2), + new Line( new P(0,s-1),1,0,s), new Line( new P(0,0), 0,1,s), + new Line( new P(0,s/2), 1,0,s)] + else if self == '7' then + var tl = new P(0,0) + var tr = new P(s-1,0) + return [new Line(tl, 1,0,s), new Line(tr,-1,1,s)] + else if self == '8' then + return [new Line(new P(0,0), 1,0,s),new Line(new P(s-1,0),0,1,s), + new Line( new P(0,s-1),1,0,s), new Line( new P(0,0), 0,1,s), + new Line( new P(0,s/2), 1,0,s)] + else if self == '9' then + return [new Line(new P(0,0), 1,0,s),new Line(new P(s-1,0),0,1,s), + new Line( new P(0,s-1),1,0,s), new Line( new P(0,0), 0,1,s/2), + new Line( new P(0,s/2), 1,0,s)] + else if self == '0' then + return [new Line(new P(0,0), 1,0,s),new Line(new P(s-1,0),0,1,s), + new Line( new P(0,s-1),1,0,s), new Line( new P(0,0), 0,1,s)] + end + return new Array[Line] + end +end + +class P + var x : Int + var y : Int +end + +redef class String + # hack is to support a bug in the evaluation software + fun draw(dispc: Char, size, gap: Int, hack: Bool) + do + var w = size * length +(length-1)*gap + var h = size + var map = new Array[Array[Char]] + for x in [0..w[ do + map[x] = new Array[Char].filled_with( ' ', h ) + end + + var ci = 0 + for c in self.chars do + var local_dispc + if c.override_dispc then + local_dispc = c + else + local_dispc = dispc + end + + var lines = c.lines( size ) + for line in lines do + var x = line.o.x+ci*size + x += ci*gap + var y = line.o.y + for s in [0..line.len[ do + assert map.length > x and map[x].length > y else print "setting {x},{y} as {local_dispc}" + map[x][y] = local_dispc + x += line.step_x + y += line.step_y + end + end + + ci += 1 + end + + if hack then + for c in [0..size[ do + map[c][0] = map[map.length-size+c][0] + map[map.length-size+c][0] = ' ' + end + end + + for y in [0..h[ do + for x in [0..w[ do + printn map[x][y] + end + print "" + end + end +end + +class Line + var o : P + var step_x : Int + var step_y : Int + var len : Int +end + +var a +var b +var op_char +var disp_char +var disp_size +var disp_gap + +if "NIT_TESTING".environ == "true" then + a = 567 + b = 13 + op_char = '*' + disp_char = 'O' + disp_size = 8 + disp_gap = 1 +else + printn "Left operand: " + a = gets.to_i + + printn "Right operand: " + b = gets.to_i + + printn "Operator (+, -, *, /, %): " + op_char = gets.chars[0] + + printn "Char to display: " + disp_char = gets.chars[0] + + printn "Size of text: " + disp_size = gets.to_i + + printn "Space between digits: " + disp_gap = gets.to_i +end + +var result = op_char.as_operator( a, b ) + +var len_a = a.n_chars +var len_b = b.n_chars +var len_res = result.n_chars +var max_len = len_a.max( len_b.max( len_res ) ) + 1 + +# draw first line +var d = max_len - len_a +var line_a = "" +for i in [0..d[ do line_a += " " +line_a += a.to_s +line_a.draw( disp_char, disp_size, disp_gap, false ) + +print "" +# draw second line +d = max_len - len_b-1 +var line_b = op_char.to_s +for i in [0..d[ do line_b += " " +line_b += b.to_s +line_b.draw( disp_char, disp_size, disp_gap, false ) + +# draw ----- +print "" +for i in [0..disp_size*max_len+(max_len-1)*disp_gap] do + printn "_" +end +print "" +print "" + +# draw result +d = max_len - len_res +var line_res = "" +for i in [0..d[ do line_res += " " +line_res += result.to_s +line_res.draw( disp_char, disp_size, disp_gap, false ) +# This file is part of NIT ( http://www.nitlanguage.org ). +# +# Copyright 2013 Alexis Laferrière +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Example using the privileges module to drop privileges from root +module drop_privileges + +import privileges + +# basic command line options +var opts = new OptionContext +var opt_ug = new OptionUserAndGroup.for_dropping_privileges +opt_ug.mandatory = true +opts.add_option(opt_ug) + +# parse and check command line options +opts.parse(args) +if not opts.errors.is_empty then + print opts.errors + print "Usage: drop_privileges [options]" + opts.usage + exit 1 +end + +# original user +print "before {sys.uid}:{sys.gid}" + +# make the switch +var user_group = opt_ug.value +assert user_group != null +user_group.drop_privileges + +# final user +print "after {sys.uid}:{sys.egid}" +# This file is part of NIT ( http://www.nitlanguage.org ). +# +# Copyright 2012-2013 Alexis Laferrière +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# This module illustrates some uses of the FFI, specifically +# how to use extern methods. Which means to implement a Nit method in C. +module extern_methods + +redef enum Int + # Returns self'th fibonnaci number + # implemented here in C for optimization purposes + fun fib : Int import fib `{ + if ( recv < 2 ) + return recv; + else + return Int_fib( recv-1 ) + Int_fib( recv-2 ); + `} + + # System call to sleep for "self" seconds + fun sleep `{ + sleep( recv ); + `} + + # Return atan2l( self, x ) from libmath + fun atan_with( x : Int ) : Float `{ + return atan2( recv, x ); + `} + + # This method callback to Nit methods from C code + # It will use from C code: + # * the local fib method + # * the + operator, a method of Int + # * to_s, a method of all objects + # * String.to_cstring, a method of String to return an equivalent char* + fun foo import fib, +, to_s, String.to_cstring `{ + long recv_fib = Int_fib( recv ); + long recv_plus_fib = Int__plus( recv, recv_fib ); + + String nit_string = Int_to_s( recv_plus_fib ); + char *c_string = String_to_cstring( nit_string ); + + printf( "from C: self + fib(self) = %s\n", c_string ); + `} + + # Equivalent to foo but written in pure Nit + fun bar do print "from Nit: self + fib(self) = {self+self.fib}" +end + +print 12.fib + +print "sleeping 1 second..." +1.sleep + +print 100.atan_with( 200 ) +8.foo +8.bar + +# This file is part of NIT ( http://www.nitlanguage.org ). +# +# Copyright 2004-2008 Jean Privat +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# A simple exemple of refinement where a method is added to the integer class. +module fibonacci + +redef class Int + # Calculate the self-th element of the fibonacci sequence. + fun fibonacci: Int + do + if self < 2 then + return 1 + else + return (self-2).fibonacci + (self-1).fibonacci + end + end +end + +# Print usage and exit. +fun usage +do + print "Usage: fibonnaci " + exit 0 +end + +# Main part +if args.length != 1 then + usage +end +print args.first.to_i.fibonacci +print "hello world" +# This file is part of NIT ( http://www.nitlanguage.org ). +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import html + +class NitHomepage + super HTMLPage + + redef fun head do + add("meta").attr("charset", "utf-8") + add("title").text("Nit") + add("link").attr("rel", "icon").attr("href", "http://nitlanguage.org/favicon.ico").attr("type", "image/x-icon") + add("link").attr("rel", "stylesheet").attr("href", "http://nitlanguage.org/style.css").attr("type", "text/css") + add("link").attr("rel", "stylesheet").attr("href", "http://nitlanguage.org/local.css").attr("type", "text/css") + end + + redef fun body do + open("article").add_class("page") + open("section").add_class("pageheader") + add_html("theNitProgramming Language") + open("header").add_class("header") + open("div").add_class("topsubtitle") + add("p").text("A Fun Language for Serious Programming") + close("div") + close("header") + close("section") + + open("div").attr("id", "pagebody") + open("section").attr("id", "content") + add("h1").text("# What is Nit?") + add("p").text("Nit is an object-oriented programming language. The goal of Nit is to propose a robust statically typed programming language where structure is not a pain.") + add("p").text("So, what does the famous hello world program look like, in Nit?") + add_html("
    print 'Hello, World!'
    ") + + add("h1").text("# Feature Highlights") + add("h2").text("Usability") + add("p").text("Nit's goal is to be usable by real programmers for real projects") + + open("ul") + open("li") + add("a").attr("href", "http://en.wikipedia.org/wiki/KISS_principle").text("KISS principle") + close("li") + add("li").text("Script-like language without verbosity nor cryptic statements") + add("li").text("Painless static types: static typing should help programmers") + add("li").text("Efficient development, efficient execution, efficient evolution.") + close("ul") + + add("h2").text("Robustness") + add("p").text("Nit will help you to write bug-free programs") + + open("ul") + add("li").text("Strong static typing") + add("li").text("No more NullPointerException") + close("ul") + + add("h2").text("Object-Oriented") + add("p").text("Nit's guideline is to follow the most powerful OO principles") + + open("ul") + open("li") + add("a").attr("href", "./everything_is_an_object/").text("Everything is an object") + close("li") + open("li") + add("a").attr("href", "./multiple_inheritance/").text("Multiple inheritance") + close("li") + open("li") + add("a").attr("href", "./refinement/").text("Open classes") + close("li") + open("li") + add("a").attr("href", "./virtual_types/").text("Virtual types") + close("li") + close("ul") + + + add("h1").text("# Getting Started") + add("p").text("Get Nit from its Git repository:") + + add_html("
    $ git clone http://nitlanguage.org/nit.git
    ") + add("p").text("Build the compiler (may be long):") + add_html("
    $ cd nit\n")
    +					add_html("$ make
    ") + add("p").text("Compile a program:") + add_html("
    $ bin/nitc examples/hello_world.nit
    ") + add("p").text("Execute the program:") + add_html("
    $ ./hello_world
    ") + close("section") + close("div") + close("article") + end +end + +var page = new NitHomepage +page.write_to stdout +page.write_to_file("nit.html") +# This file is part of NIT ( http://www.nitlanguage.org ). +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# An example that defines and uses stacks of integers. +# The implementation is done with a simple linked list. +# It features: free constructors, nullable types and some adaptive typing. +module int_stack + +# A stack of integer implemented by a simple linked list. +# Note that this is only a toy class since a real linked list will gain to use +# generics and extends interfaces, like Collection, from the standard library. +class IntStack + # The head node of the list. + # Null means that the stack is empty. + private var head: nullable ISNode = null + + # Add a new integer in the stack. + fun push(val: Int) + do + self.head = new ISNode(val, self.head) + end + + # Remove and return the last pushed integer. + # Return null if the stack is empty. + fun pop: nullable Int + do + var head = self.head + if head == null then return null + # Note: the followings are statically safe because of the + # previous 'if'. + var val = head.val + self.head = head.next + return val + end + + # Return the sum of all integers of the stack. + # Return 0 if the stack is empty. + fun sumall: Int + do + var sum = 0 + var cur = self.head + while cur != null do + # Note: the followings are statically safe because of + # the condition of the 'while'. + sum += cur.val + cur = cur.next + end + return sum + end + + # Note: Because all attributes have a default value, a free constructor + # "init()" is implicitly defined. +end + +# A node of a IntStack +private class ISNode + # The integer value stored in the node. + var val: Int + + # The next node, if any. + var next: nullable ISNode + + # Note: A free constructor "init(val: Int, next: nullable ISNode)" is + # implicitly defined. +end + +var l = new IntStack +l.push(1) +l.push(2) +l.push(3) + +print l.sumall + +# Note: the 'for' control structure cannot be used on IntStack in its current state. +# It requires a more advanced topic. +# However, why not using the 'loop' control structure? +loop + var i = l.pop + if i == null then break + # The following is statically safe because of the previous 'if'. + print i * 10 +end + +# Note: 'or else' is used to give an alternative of a null expression. +l.push(5) +print l.pop or else 0 # l.pop gives 5, so print 5 +print l.pop or else 0 # l.pop gives null, so print the alternative: 0 + + +# This file is part of NIT ( http://www.nitlanguage.org ). +# +# Copyright 2014 Alexis Laferrière +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Basic example of OpenGL ES 2.0 usage from the book OpenGL ES 2.0 Programming Guide. +# +# Code reference: +# https://code.google.com/p/opengles-book-samples/source/browse/trunk/LinuxX11/Chapter_2/Hello_Triangle/Hello_Triangle.c +module opengles2_hello_triangle + +import glesv2 +import egl +import mnit_linux # for sdl +import x11 + +if "NIT_TESTING".environ == "true" then exit(0) + +var window_width = 800 +var window_height = 600 + +# +## SDL +# +var sdl_display = new SDLDisplay(window_width, window_height) +var sdl_wm_info = new SDLSystemWindowManagerInfo +var x11_window_handle = sdl_wm_info.x11_window_handle + +# +## X11 +# +var x_display = x_open_default_display +assert x_display != 0 else print "x11 fail" + +# +## EGL +# +var egl_display = new EGLDisplay(x_display) +assert egl_display.is_valid else print "EGL display is not valid" +egl_display.initialize + +print "EGL version: {egl_display.version}" +print "EGL vendor: {egl_display.vendor}" +print "EGL extensions: {egl_display.extensions.join(", ")}" +print "EGL client APIs: {egl_display.client_apis.join(", ")}" + +assert egl_display.is_valid else print egl_display.error + +var config_chooser = new EGLConfigChooser +#config_chooser.surface_type_egl +config_chooser.blue_size = 8 +config_chooser.green_size = 8 +config_chooser.red_size = 8 +#config_chooser.alpha_size = 8 +#config_chooser.depth_size = 8 +#config_chooser.stencil_size = 8 +#config_chooser.sample_buffers = 1 +config_chooser.close + +var configs = config_chooser.choose(egl_display) +assert configs != null else print "choosing config failed: {egl_display.error}" +assert not configs.is_empty else print "no EGL config" + +print "{configs.length} EGL configs available" +for config in configs do + var attribs = config.attribs(egl_display) + print "* caveats: {attribs.caveat}" + print " conformant to: {attribs.conformant}" + print " size of RGBA: {attribs.red_size} {attribs.green_size} {attribs.blue_size} {attribs.alpha_size}" + print " buffer, depth, stencil: {attribs.buffer_size} {attribs.depth_size} {attribs.stencil_size}" +end + +var config = configs.first + +var format = config.attribs(egl_display).native_visual_id + +# TODO android part +# Opengles1Display_midway_init(recv, format); + +var surface = egl_display.create_window_surface(config, x11_window_handle, [0]) +assert surface.is_ok else print egl_display.error + +var context = egl_display.create_context(config) +assert context.is_ok else print egl_display.error + +var make_current_res = egl_display.make_current(surface, surface, context) +assert make_current_res + +var width = surface.attribs(egl_display).width +var height = surface.attribs(egl_display).height +print "Width: {width}" +print "Height: {height}" + +assert egl_bind_opengl_es_api else print "eglBingAPI failed: {egl_display.error}" + +# +## GLESv2 +# + +print "Can compile shaders? {gl_shader_compiler}" +assert_no_gl_error + +assert gl_shader_compiler else print "Cannot compile shaders" + +# gl program +print gl_error.to_s +var program = new GLProgram +if not program.is_ok then + print "Program is not ok: {gl_error.to_s}\nLog:" + print program.info_log + abort +end +assert_no_gl_error + +# vertex shader +var vertex_shader = new GLVertexShader +assert vertex_shader.is_ok else print "Vertex shader is not ok: {gl_error}" +vertex_shader.source = """ +attribute vec4 vPosition; +void main() +{ + gl_Position = vPosition; +} """ +vertex_shader.compile +assert vertex_shader.is_compiled else print "Vertex shader compilation failed with: {vertex_shader.info_log} {program.info_log}" +assert_no_gl_error + +# fragment shader +var fragment_shader = new GLFragmentShader +assert fragment_shader.is_ok else print "Fragment shader is not ok: {gl_error}" +fragment_shader.source = """ +precision mediump float; +void main() +{ + gl_FragColor = vec4(1.0, 0.0, 0.0, 1.0); +} +""" +fragment_shader.compile +assert fragment_shader.is_compiled else print "Fragment shader compilation failed with: {fragment_shader.info_log}" +assert_no_gl_error + +program.attach_shader vertex_shader +program.attach_shader fragment_shader +program.bind_attrib_location(0, "vPosition") +program.link +assert program.is_linked else print "Linking failed: {program.info_log}" +assert_no_gl_error + +# draw! +var vertices = [0.0, 0.5, 0.0, -0.5, -0.5, 0.0, 0.5, -0.5, 0.0] +var vertex_array = new VertexArray(0, 3, vertices) +vertex_array.attrib_pointer +gl_clear_color(0.5, 0.0, 0.5, 1.0) +for i in [0..10000[ do + printn "." + assert_no_gl_error + gl_viewport(0, 0, width, height) + gl_clear_color_buffer + program.use + vertex_array.enable + vertex_array.draw_arrays_triangles + egl_display.swap_buffers(surface) +end + +# delete +program.delete +vertex_shader.delete +fragment_shader.delete + +# +## EGL +# +# close +egl_display.make_current(new EGLSurface.none, new EGLSurface.none, new EGLContext.none) +egl_display.destroy_context(context) +egl_display.destroy_surface(surface) + +# +## SDL +# +# close +sdl_display.destroy +# This file is part of NIT ( http://www.nitlanguage.org ). +# +# Copyright 2004-2008 Jean Privat +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# How to print arguments of the command line. +module print_arguments + +for a in args do + print a +end +# This file is part of NIT ( http://www.nitlanguage.org ). +# +# Copyright 2004-2008 Jean Privat +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# A procedural program (without explicit class definition). +# This program manipulates arrays of integers. +module procedural_array + +# The sum of the elements of `a'. +# Uses a 'for' control structure. +fun array_sum(a: Array[Int]): Int +do + var sum = 0 + for i in a do + sum = sum + i + end + return sum +end + +# The sum of the elements of `a' (alternative version). +# Uses a 'while' control structure. +fun array_sum_alt(a: Array[Int]): Int +do + var sum = 0 + var i = 0 + while i < a.length do + sum = sum + a[i] + i = i + 1 + end + return sum +end + +# The main part of the program. +var a = [10, 5, 8, 9] +print(array_sum(a)) +print(array_sum_alt(a)) +# This file is part of NIT ( http://www.nitlanguage.org ). +# +# Copyright 2013 Matthieu Lucas +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Client sample using the Socket module which connect to the server sample. +module socket_client + +import socket + +if args.length < 2 then + print "Usage : socket_client " + return +end + +var s = new Socket.client(args[0], args[1].to_i) +print "[HOST ADDRESS] : {s.address}" +print "[HOST] : {s.host}" +print "[PORT] : {s.port}" +print "Connecting ... {s.connected}" +if s.connected then + print "Writing ... Hello server !" + s.write("Hello server !") + print "[Response from server] : {s.read(100)}" + print "Closing ..." + s.close +end +# This file is part of NIT ( http://www.nitlanguage.org ). +# +# Copyright 2013 Matthieu Lucas +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Server sample using the Socket module which allow client to connect +module socket_server + +import socket + +if args.is_empty then + print "Usage : socket_server " + return +end + +var socket = new Socket.server(args[0].to_i, 1) +print "[PORT] : {socket.port.to_s}" + +var clients = new Array[Socket] +var max = socket +loop + var fs = new SocketObserver(true, true, true) + fs.readset.set(socket) + + for c in clients do fs.readset.set(c) + + if fs.select(max, 4, 0) == 0 then + print "Error occured in select {sys.errno.strerror}" + break + end + + if fs.readset.is_set(socket) then + var ns = socket.accept + print "Accepting {ns.address} ... " + print "[Message from {ns.address}] : {ns.read(100)}" + ns.write("Goodbye client.") + print "Closing {ns.address} ..." + ns.close + end +end + +# This file is part of NIT ( http://www.nitlanguage.org ). +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import template + +### Here, definition of the specific templates + +# The root template for composers +class TmplComposers + super Template + + # Short list of composers + var composers = new Array[TmplComposer] + + # Detailled list of composers + var composer_details = new Array[TmplComposerDetail] + + # Add a composer in both lists + fun add_composer(firstname, lastname: String, birth, death: Int) + do + composers.add(new TmplComposer(lastname)) + composer_details.add(new TmplComposerDetail(firstname, lastname, birth, death)) + end + + redef fun rendering do + add """ +COMPOSERS +========= +""" + add_all composers + add """ + +DETAILS +======= +""" + add_all composer_details + end +end + +# A composer in the short list of composers +class TmplComposer + super Template + + # Short name + var name: String + + init(name: String) do self.name = name + + redef fun rendering do add "- {name}\n" +end + +# A composer in the detailled list of composers +class TmplComposerDetail + super Template + + var firstname: String + var lastname: String + var birth: Int + var death: Int + + init(firstname, lastname: String, birth, death: Int) do + self.firstname = firstname + self.lastname = lastname + self.birth = birth + self.death = death + end + + redef fun rendering do add """ + +COMPOSER: {{{firstname}}} {{{lastname}}} +BIRTH...: {{{birth}}} +DEATH...: {{{death}}} +""" + +end + +### Here a simple usage of the templates + +var f = new TmplComposers +f.add_composer("Johann Sebastian", "Bach", 1685, 1750) +f.add_composer("George Frideric", "Handel", 1685, 1759) +f.add_composer("Wolfgang Amadeus", "Mozart", 1756, 1791) +f.write_to(stdout) +# This file is part of NIT ( http://www.nitlanguage.org ). +# +# Copyright 2014 Lucas Bajolet +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Sample module for a minimal chat server using Websockets on port 8088 +module websocket_server + +import websocket + +var sock = new WebSocket(8088, 1) + +var msg: String + +if sock.listener.eof then + print sys.errno.strerror +end + +sock.accept + +while not sock.listener.eof do + if not sock.connected then sock.accept + if sys.stdin.poll_in then + msg = gets + printn "Received message : {msg}" + if msg == "exit" then sock.close + if msg == "disconnect" then sock.disconnect_client + sock.write(msg) + end + if sock.can_read(10) then + msg = sock.read_line + if msg != "" then print msg + end +end + diff --git a/vendor/pygments/tests/examplefiles/ANTLRv3.g b/vendor/pygments/tests/examplefiles/antlr_ANTLRv3.g similarity index 100% rename from vendor/pygments/tests/examplefiles/ANTLRv3.g rename to vendor/pygments/tests/examplefiles/antlr_ANTLRv3.g diff --git a/vendor/pygments/tests/examplefiles/autoit_submit.au3 b/vendor/pygments/tests/examplefiles/autoit_submit.au3 index e5054de..84fb715 100644 --- a/vendor/pygments/tests/examplefiles/autoit_submit.au3 +++ b/vendor/pygments/tests/examplefiles/autoit_submit.au3 @@ -16,8 +16,10 @@ _IEFormElementOptionSelect ($oSelect, "S2", 1, "byText") ;options raido _IEFormElementRadioSelect($oForm, "2nd", "type", 1, "byValue") +#cs ConsoleWrite(@Error) Sleep(10000) +#ce _IEFormSubmit($oForm, 0) _IELoadWait($oIE) Sleep(60000) diff --git a/vendor/pygments/tests/examplefiles/automake.mk b/vendor/pygments/tests/examplefiles/automake.mk new file mode 100644 index 0000000..47a50a3 --- /dev/null +++ b/vendor/pygments/tests/examplefiles/automake.mk @@ -0,0 +1,7 @@ +if DEBUG +DBG = debug +else +DBG = +endif +noinst_PROGRAMS = $(DBG) + diff --git a/vendor/pygments/tests/examplefiles/batchfile.bat b/vendor/pygments/tests/examplefiles/batchfile.bat deleted file mode 100644 index 5cdc625..0000000 --- a/vendor/pygments/tests/examplefiles/batchfile.bat +++ /dev/null @@ -1,49 +0,0 @@ -rem this is a demo file. -@rem -@echo off - -call c:\temp.bat somearg -call :lab somearg -rem This next one is wrong in the vim lexer! -call c:temp.bat - -echo "Hi!" -echo hi -echo on -echo off -echo. -@echo off -if exist *.log echo The log file has arrived. -rem These are all escapes, also done incorrectly by the vim lexer -echo ^^ ^> ^< ^| - -x=beginning -setlocal -x = new text -endlocal - -echo testrem x -echo test rem x - -for %%var in (*.jpg) do echo %%var -for /D %%var in (a b c) do echo %%var -for /R C:\temp %%var in (*.jpg) do iexplore.exe %%var -rem Vim has this one wrong too. -for /L %%var in (10,-1,1) do echo %%var -for /F %%var in ("hi!") do echo %%var -for /F "eol=c,skip=1,usebackq" %%var in (`command`) do echo %%var %~l %~fl %~dl %~pl %~nl %~xl %~sl %~al %~tl %~zl %~$PATH:l %~dpl %~dp$PATH:l %~ftzal - -echo some file ?! > somefile.txt - -set PATH=%PATH%;c:\windows - -goto answer%errorlevel% - :answer0 - echo Hi it's zero - :answer1 - echo New - -if exist a del a -else echo A is missing! - - diff --git a/vendor/pygments/tests/examplefiles/bnf_example1.bnf b/vendor/pygments/tests/examplefiles/bnf_example1.bnf new file mode 100644 index 0000000..fe041a6 --- /dev/null +++ b/vendor/pygments/tests/examplefiles/bnf_example1.bnf @@ -0,0 +1,15 @@ +; This examples from WikiPedia . + + ::= + + ::= + | + + ::= "." | + + ::= + + ::= "," + + ::= "Sr." | "Jr." | | "" + ::= | "" diff --git a/vendor/pygments/tests/examplefiles/char.scala b/vendor/pygments/tests/examplefiles/char.scala new file mode 100644 index 0000000..0032612 --- /dev/null +++ b/vendor/pygments/tests/examplefiles/char.scala @@ -0,0 +1,4 @@ +'symbol +'a' +'\u1234' +'\n' diff --git a/vendor/pygments/tests/examplefiles/clojure-weird-keywords.clj b/vendor/pygments/tests/examplefiles/clojure-weird-keywords.clj new file mode 100644 index 0000000..2d914c5 --- /dev/null +++ b/vendor/pygments/tests/examplefiles/clojure-weird-keywords.clj @@ -0,0 +1,5 @@ +; Note, clojure lexer is here (and is a good deal more liberal than the language spec: +; https://github.com/clojure/clojure/blob/master/src/jvm/clojure/lang/LispReader.java#L62 + +(defn valid [#^java.lang.reflect.Method meth] + [:keyword :#initial-hash :h#sh-in-middle :hash-at-end# #js {:keyword "value"}]) diff --git a/vendor/pygments/tests/examplefiles/core.cljs b/vendor/pygments/tests/examplefiles/core.cljs new file mode 100644 index 0000000..f135b83 --- /dev/null +++ b/vendor/pygments/tests/examplefiles/core.cljs @@ -0,0 +1,52 @@ + +(ns bounder.core + (:require [bounder.html :as html] + [domina :refer [value set-value! single-node]] + [domina.css :refer [sel]] + [lowline.functions :refer [debounce]] + [enfocus.core :refer [at]] + [cljs.reader :as reader] + [clojure.string :as s]) + (:require-macros [enfocus.macros :as em])) + +(def filter-input + (single-node + (sel ".search input"))) + +(defn project-matches [query project] + (let [words (cons (:name project) + (map name (:categories project))) + to-match (->> words + (s/join "") + (s/lower-case))] + (<= 0 (.indexOf to-match (s/lower-case query))))) + +(defn apply-filter-for [projects] + (let [query (value filter-input)] + (html/render-projects + (filter (partial project-matches query) + projects)))) + +(defn filter-category [projects evt] + (let [target (.-currentTarget evt)] + (set-value! filter-input + (.-innerHTML target)) + (apply-filter-for projects))) + +(defn init-listeners [projects] + (at js/document + ["input"] (em/listen + :keyup + (debounce + (partial apply-filter-for projects) + 500)) + [".category-links li"] (em/listen + :click + (partial filter-category projects)))) + +(defn init [projects-edn] + (let [projects (reader/read-string projects-edn)] + (init-listeners projects) + (html/render-projects projects) + (html/loaded))) + diff --git a/vendor/pygments/tests/examplefiles/demo.cfm b/vendor/pygments/tests/examplefiles/demo.cfm index d94a06a..78098c0 100644 --- a/vendor/pygments/tests/examplefiles/demo.cfm +++ b/vendor/pygments/tests/examplefiles/demo.cfm @@ -1,4 +1,11 @@ + ---> + +---> @@ -17,6 +24,9 @@ #IsDate("foo")#
    #DaysInMonth(RightNow)# + + + #x# #y# @@ -29,10 +39,12 @@ + + - + ---> comment ---> diff --git a/vendor/pygments/tests/examplefiles/demo.css.in b/vendor/pygments/tests/examplefiles/demo.css.in new file mode 100644 index 0000000..36330a9 --- /dev/null +++ b/vendor/pygments/tests/examplefiles/demo.css.in @@ -0,0 +1,6 @@ +%if defined(__foo__) +.cls { + color: #fff; +} +%endif +%literal %foo diff --git a/vendor/pygments/tests/examplefiles/demo.hbs b/vendor/pygments/tests/examplefiles/demo.hbs new file mode 100644 index 0000000..1b9ed5a --- /dev/null +++ b/vendor/pygments/tests/examplefiles/demo.hbs @@ -0,0 +1,12 @@ + + +
    + {{intro}} +
    + +{{#if isExpanded}} +
    {{body}}
    + +{{else}} + +{{/if}} diff --git a/vendor/pygments/tests/examplefiles/demo.js.in b/vendor/pygments/tests/examplefiles/demo.js.in new file mode 100644 index 0000000..f44fc53 --- /dev/null +++ b/vendor/pygments/tests/examplefiles/demo.js.in @@ -0,0 +1,6 @@ +window.foo = { +#if defined(__foo__) + 'key': 'value' +#endif +} +#literal #foo diff --git a/vendor/pygments/tests/examplefiles/demo.thrift b/vendor/pygments/tests/examplefiles/demo.thrift new file mode 100644 index 0000000..e50544d --- /dev/null +++ b/vendor/pygments/tests/examplefiles/demo.thrift @@ -0,0 +1,14 @@ +/* comment */ +/** doc comment */ + +namespace cpp shared // inline comment + +struct Foo1 { + 1: i32 key + 2: string value +} + +service Foo2 { + Foo1 bar(1: i32 key) +} + diff --git a/vendor/pygments/tests/examplefiles/demo.xul.in b/vendor/pygments/tests/examplefiles/demo.xul.in new file mode 100644 index 0000000..9e1f493 --- /dev/null +++ b/vendor/pygments/tests/examplefiles/demo.xul.in @@ -0,0 +1,7 @@ + + +#if defined(__foo__) + +#endif + diff --git a/vendor/pygments/tests/examplefiles/docker.docker b/vendor/pygments/tests/examplefiles/docker.docker new file mode 100644 index 0000000..d65385b --- /dev/null +++ b/vendor/pygments/tests/examplefiles/docker.docker @@ -0,0 +1,5 @@ +maintainer First O'Last + +run echo \ + 123 $bar +# comment diff --git a/vendor/pygments/tests/examplefiles/eg_example1.eg b/vendor/pygments/tests/examplefiles/eg_example1.eg new file mode 100644 index 0000000..e23bf8c --- /dev/null +++ b/vendor/pygments/tests/examplefiles/eg_example1.eg @@ -0,0 +1,155 @@ +;; filepath tests +require: + library + ./awesome + ./variable/greatness/file + + +;; Strings +"Hello World!" +"Hello!\nWorld!" +'sasadads\ssdasdasd{ @method variable }' +var dstring = .dotted-string + +key-dash +but-can-it-do-ops + + yes-it-can: + 100 + + print "So cool!" + 100 + {that = "is awesome", you = "are hilarious"} + + jiminy-crickets: + oliver-twist each mr-bojangles + + kittens = + 12 + +check-it: + still-works: + {1, 2, 3}.reversed.reversed.awesome{}.that.sort().what.map with + x -> x * x + (1, 2, 3) + this + +if total awesomeness > great stupidity: + print "You've won!" + not-sure-what-this-does @@ but-it-wont-capture +else: + print "Keep trying!" + needs-work ++ you + +;; with tests +needs-obj('awesome') with {"greatness values"} +object.field.method with {1, 2, 3} + +;; object assignment +obj = { + key = "oh yeah" + tree = "schmoh cheah" +} +obj.field = { + that = super cool: "wowzers!" + thatFunc = {x} -> x +} + +;; match statements +match @awesome.great: + {head, *tail} -> [print head; tail] + Array[] -> convert(.arr) + String? -> convert(.str) + else -> throw E.error("This is an error!") + +unimpl-func = -> pass + +;; if / elif / else test +if coolness > 11: + add something: something-else +elif true: + add nothing: something-else +else: + add everything: the-castle + +;; nested if / elif / else test +mad-function = bananas -> + if bananas > 5: + print "You've got a lot of bananas!" + elif bananas == 5: + print "You might want to consider getting more bananas" + else: + print "Go get more bananas now!" + +;; class test +class Dog: + constructor(@name, @breed) = + this.awesomesauce = 100 + + unimpl-meth = -> pass + + bark(@, sd) + +;; error test +try: + throw E.test.my-error("This is my error.") +catch TypeError? e: + print "There was a type error." +catch E.my-error? e: + print "My error!" +catch e: + print "Some other error." +finally: + print "We are done." + +;; method shorthand operator +stream-of-twos = *-> + while [true]: yield 2 ;; need to fix literals with colons after them. + +;; gen shorthand operator +full-name = @-> + '{@first-name} {@last-name}' + +name-method = @-> + @name.parents.full-name() + +;; Keyword highlight test +key x +key +x; key @x; key .x ; key "x"; key 0; .asdasd +key (x); key [x]; key {x} +nokey.x(); nokey{x} +key x + y +key key x +x + key y +x - key y +nokey + x +nokey +key: x +key nokey: y +key x > nokey: z +x + key nokey: z +x and {y, z} +x + nokey: y +x mod nokey: y +x = key: y ;; comments work after keywords! +x each key: y +x each* k ;; +a.b{c.d, e.f} +a.bad-method(c.d, e.f) +#a{} +nokey mod: y ;; do not highlight nokey +;; because mod is an operator + +;; Highlight all of these: +await; break; chain; continue; else:; expr-value +match; return; yield + +;; Edge-case highlighting test +key-word: xyz +nokey - x: yz + +;; Some keywords may contain operators as a subpart. If your regexp +;; uses \b to single out operators like each, is or in, you may +;; fail to highlight these properly: +beaches ;; Do not highlight each inside the word beaches +each-thing ;; Do not highlight each +sleep-in ;; Do not highlight in +before-each: xyz ;; Highlight before-each as a keyword +is-great: xyz ;; Highlight is-great as a keyword + +send() diff --git a/vendor/pygments/tests/examplefiles/ember.handlebars b/vendor/pygments/tests/examplefiles/ember.handlebars new file mode 100644 index 0000000..515dffb --- /dev/null +++ b/vendor/pygments/tests/examplefiles/ember.handlebars @@ -0,0 +1,33 @@ +{{#view EmberFirebaseChat.ChatView class="chat-container"}} +
    +
      + {{#each message in content}} +
    • + [{{formatTimestamp "message.timestamp" fmtString="h:mm:ss A"}}] + {{message.sender}}: {{message.content}} +
    • + {{/each}} +
    +
    + + {{! Comment }} + {{{unescaped value}}} + + {{#view EmberFirebaseChat.InputView class="chat-input-container"}} +
    + {{#if "auth.authed"}} + {{#if "auth.hasName"}} + + + {{else}} + + + {{/if}} + {{else}} + + + {{/if}} +
    + {{/view}} +{{/view}} + diff --git a/vendor/pygments/tests/examplefiles/es6.js b/vendor/pygments/tests/examplefiles/es6.js new file mode 100644 index 0000000..79bfd3e --- /dev/null +++ b/vendor/pygments/tests/examplefiles/es6.js @@ -0,0 +1,46 @@ +// Most examples from https://github.com/rse/es6-features under MIT license +const PI = 3.141593; + +let callbacks = []; + +odds = evens.map(v => v + 1); + +nums.forEach(v => { + if (v % 5 === 0) + fives.push(v); +}) + +function f (x, y, ...a) { + return (x + y) * a.length; +} + +var params = [ "hello", true, 7 ]; +var other = [ 1, 2, ...params ]; // [ 1, 2, "hello", true, 7 ] +f(1, 2, ...params) === 9; + +var str = "foo"; +var chars = [ ...str ]; // [ "f", "o", "o" ] + +var customer = { name: "Foo" }; +var card = { amount: 7, product: "Bar", unitprice: 42 }; +message = `Hello ${customer.name}, +want to buy ${card.amount} ${card.product} for +a total of ${card.amount * card.unitprice} bucks?`; + +0b111110111 === 503; +0o767 === 503; + +for (let codepoint of "𠮷") console.log(codepoint); + +function* (); +*function(); +yield; + +export class Node { +} + +isFinite(); +isNaN(); +isSafeInteger(); +x = new Promise(...a); +x = new Proxy(...a); diff --git a/vendor/pygments/tests/examplefiles/eval.rs b/vendor/pygments/tests/examplefiles/eval.rs new file mode 100644 index 0000000..17e585a --- /dev/null +++ b/vendor/pygments/tests/examplefiles/eval.rs @@ -0,0 +1,606 @@ +// ------------------------------------------------------------------------------------------------- +// Rick, a Rust intercal compiler. Save your souls! +// +// Copyright (c) 2015 Georg Brandl +// +// This program is free software; you can redistribute it and/or modify it under the terms of the +// GNU General Public License as published by the Free Software Foundation; either version 2 of the +// License, or (at your option) any later version. +// +// This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without +// even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +// General Public License for more details. +// +// You should have received a copy of the GNU General Public License along with this program; +// if not, write to the Free Software Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA. +// ------------------------------------------------------------------------------------------------- + +/// Interprets INTERCAL source. +/// +/// The evaluator is used when rick is called with `-i`, or when the compiler generates +/// the output while compiling (in the constant-output case). + +use std::fmt::{ Debug, Display }; +use std::io::Write; +use std::u16; + +use err::{ Res, IE123, IE129, IE252, IE275, IE555, IE633, IE774, IE994 }; +use ast::{ self, Program, Stmt, StmtBody, ComeFrom, Expr, Var, VType }; +use stdops::{ Bind, Array, write_number, read_number, check_chance, check_ovf, pop_jumps, + get_random_seed, mingle, select, and_16, and_32, or_16, or_32, xor_16, xor_32 }; + + +/// Represents a value (either 16-bit or 32-bit) at runtime. +#[derive(Clone, PartialEq, Eq, Debug)] +pub enum Val { + I16(u16), + I32(u32), +} + +impl Val { + /// Cast as a 16-bit value; returns an error if 32-bit and too big. + pub fn as_u16(&self) -> Res { + match *self { + Val::I16(v) => Ok(v), + Val::I32(v) => { + if v > (u16::MAX as u32) { + return IE275.err(); + } + Ok(v as u16) + } + } + } + + /// Cast as a 32-bit value; always succeeds. + pub fn as_u32(&self) -> u32 { + match *self { + Val::I16(v) => v as u32, + Val::I32(v) => v + } + } + + /// Cast as an usize value; always succeeds. + pub fn as_usize(&self) -> usize { + self.as_u32() as usize + } + + /// Create from a 32-bit value; will select the smallest possible type. + pub fn from_u32(v: u32) -> Val { + if v & 0xFFFF == v { + Val::I16(v as u16) + } else { + Val::I32(v) + } + } +} + +/// The state of the interpreter's evaluator. +pub struct Eval<'a> { + /// Program to execute. + program: &'a Program, + /// Stream to use for printing output. + stdout: &'a mut Write, + /// Whether to print debugging output during execution. + debug: bool, + /// Variable bindings for the four types of variables. + spot: Vec>, + twospot: Vec>, + tail: Vec>>, + hybrid: Vec>>, + /// The infamous NEXT stack, capable of holding 80 elements. + jumps: Vec, + /// Abstain counter for each statement. + abstain: Vec, + /// Binary I/O "tape" state. + last_in: u8, + last_out: u8, + /// Random number generator state. + rand_st: u32, + /// Counts the number of executed statements. + stmt_ctr: usize, +} + +/// Represents the control flow effect of an executed statement. +enum StmtRes { + /// normal execution, next statement + Next, + /// jump around, from DO ... NEXT + Jump(usize), + /// jump back, from RESUME + Back(usize), + /// start from the first statement, from TRY AGAIN + FromTop, + /// end the program, from GIVE UP + End, +} + +impl<'a> Eval<'a> { + /// Construct a new evaluator. + pub fn new(program: &'a Program, stdout: &'a mut Write, debug: bool, + random: bool) -> Eval<'a> { + let abs = program.stmts.iter().map(|stmt| stmt.props.disabled as u32).collect(); + let nvars = (program.var_info.0.len(), + program.var_info.1.len(), + program.var_info.2.len(), + program.var_info.3.len()); + Eval { + program: program, + stdout: stdout, + debug: debug, + spot: vec![Bind::new(0); nvars.0], + twospot: vec![Bind::new(0); nvars.1], + tail: vec![Bind::new(Array::empty()); nvars.2], + hybrid: vec![Bind::new(Array::empty()); nvars.3], + jumps: Vec::with_capacity(80), + rand_st: if random { get_random_seed() } else { 0 }, + abstain: abs, + last_in: 0, + last_out: 0, + stmt_ctr: 0, + } + } + + /// Interpret the program. Returns either the number of executed statements, + /// or an error (RtError). + pub fn eval(&mut self) -> Res { + let mut pctr = 0; // index of current statement + let program = self.program.clone(); + let nstmts = program.stmts.len(); + loop { + // check for falling off the end + if pctr >= nstmts { + // if the last statement was a TRY AGAIN, falling off the end is fine + if let StmtBody::TryAgain = program.stmts[program.stmts.len() - 1].body { + break; + } + return IE633.err(); + } + self.stmt_ctr += 1; + let stmt = &program.stmts[pctr]; + // execute statement if not abstained + if self.abstain[pctr] == 0 { + // check execution chance + let (passed, rand_st) = check_chance(stmt.props.chance, self.rand_st); + self.rand_st = rand_st; + if passed { + // try to eval this statement + let res = match self.eval_stmt(stmt) { + // on error, set the correct line number and bubble up + Err(mut err) => { + err.set_line(stmt.props.onthewayto); + // special treatment for NEXT + if let StmtBody::DoNext(n) = stmt.body { + if let Some(i) = program.labels.get(&n) { + err.set_line(program.stmts[*i as usize].props.srcline); + } + } + return Err(err); + } + Ok(res) => res + }; + // handle control flow effects + match res { + StmtRes::Next => { } + StmtRes::Jump(n) => { + self.jumps.push(pctr as u16); // push the line with the NEXT + pctr = n; + continue; // do not increment or check for COME FROMs + } + StmtRes::Back(n) => { + pctr = n; // will be incremented below after COME FROM check + } + StmtRes::FromTop => { + pctr = 0; // start from the beginning, do not push any stack + continue; + } + StmtRes::End => break, + } + } + } + // if we are on the line with the compiler bug, error out + if pctr == self.program.bugline as usize { + return IE774.err_with(None, stmt.props.onthewayto); + } + // try to determine if we have to go to a COME FROM statement + // (note: in general, program.stmts[pctr] != stmt) + // + // the static COME FROM is always a possibility + let mut maybe_next = program.stmts[pctr].comefrom; + // the complicated case: evaluate all computed-come-from expressions + let my_label = program.stmts[pctr].props.label; + if program.uses_complex_comefrom && my_label > 0 { + for (i, stmt) in program.stmts.iter().enumerate() { + if let StmtBody::ComeFrom(ComeFrom::Expr(ref e)) = stmt.body { + let v = try!(try!(self.eval_expr(e)).as_u16()); + if v == my_label { + // as soon as we have multiple candidates, we can bail out + if maybe_next.is_some() { + return IE555.err(); + } + maybe_next = Some(i as u16); + } + } + } + } + // check for COME FROMs from this line + if let Some(next) = maybe_next { + let next = next as usize; + // check for abstained COME FROM + if self.abstain[next] == 0 { + // the COME FROM can also have a % chance + let (passed, rand_st) = check_chance(program.stmts[next].props.chance, + self.rand_st); + self.rand_st = rand_st; + if passed { + pctr = next; + continue; + } + } + } + // no COME FROM, normal execution + pctr += 1; + } + Ok(self.stmt_ctr) + } + + /// Interpret a single statement. + fn eval_stmt(&mut self, stmt: &Stmt) -> Res { + if self.debug { + println!("\nExecuting Stmt #{} (state before following)", self.stmt_ctr); + self.dump_state(); + println!("{}", stmt); + } + match stmt.body { + StmtBody::Calc(ref var, ref expr) => { + let val = try!(self.eval_expr(expr)); + try!(self.assign(var, val)); + Ok(StmtRes::Next) + } + StmtBody::Dim(ref var, ref exprs) => { + try!(self.array_dim(var, exprs)); + Ok(StmtRes::Next) + } + StmtBody::DoNext(n) => { + match self.program.labels.get(&n) { + // too many jumps on stack already? + Some(_) if self.jumps.len() >= 80 => IE123.err(), + Some(i) => Ok(StmtRes::Jump(*i as usize)), + None => IE129.err(), + } + } + StmtBody::ComeFrom(_) => { + // nothing to do here at runtime + Ok(StmtRes::Next) + } + StmtBody::Resume(ref expr) => { + let n = try!(self.eval_expr(expr)).as_u32(); + // this expect() is safe: if the third arg is true, there will + // be no Ok(None) returns + let next = try!(pop_jumps(&mut self.jumps, n, true, 0)) + .expect("https://xkcd.com/378/ ?!"); + Ok(StmtRes::Back(next as usize)) + } + StmtBody::Forget(ref expr) => { + let n = try!(self.eval_expr(expr)).as_u32(); + try!(pop_jumps(&mut self.jumps, n, false, 0)); + Ok(StmtRes::Next) + } + StmtBody::Ignore(ref vars) => { + for var in vars { + self.set_rw(var, false); + } + Ok(StmtRes::Next) + } + StmtBody::Remember(ref vars) => { + for var in vars { + self.set_rw(var, true); + } + Ok(StmtRes::Next) + } + StmtBody::Stash(ref vars) => { + for var in vars { + self.stash(var); + } + Ok(StmtRes::Next) + } + StmtBody::Retrieve(ref vars) => { + for var in vars { + try!(self.retrieve(var)); + } + Ok(StmtRes::Next) + } + StmtBody::Abstain(ref expr, ref whats) => { + let f: Box u32> = if let Some(ref e) = *expr { + let n = try!(self.eval_expr(e)).as_u32(); + box move |v: u32| v.saturating_add(n) + } else { + box |_| 1 + }; + for what in whats { + self.abstain(what, &*f); + } + Ok(StmtRes::Next) + } + StmtBody::Reinstate(ref whats) => { + for what in whats { + self.abstain(what, &|v: u32| v.saturating_sub(1)); + } + Ok(StmtRes::Next) + } + StmtBody::ReadOut(ref vars) => { + for var in vars { + match *var { + // read out whole array + Expr::Var(ref var) if var.is_dim() => { + try!(self.array_readout(var)); + } + // read out single var or array element + Expr::Var(ref var) => { + let varval = try!(self.lookup(var)); + try!(write_number(self.stdout, varval.as_u32(), 0)); + } + // read out constant + Expr::Num(_, v) => try!(write_number(self.stdout, v, 0)), + // others will not be generated + _ => return IE994.err(), + }; + } + Ok(StmtRes::Next) + } + StmtBody::WriteIn(ref vars) => { + for var in vars { + if var.is_dim() { + // write in whole array + try!(self.array_writein(var)); + } else { + // write in single var or array element + let n = try!(read_number(0)); + try!(self.assign(var, Val::from_u32(n))); + } + } + Ok(StmtRes::Next) + } + // this one is only generated by the constant-program optimizer + StmtBody::Print(ref s) => { + if let Err(_) = self.stdout.write(&s) { + return IE252.err(); + } + Ok(StmtRes::Next) + } + StmtBody::TryAgain => Ok(StmtRes::FromTop), + StmtBody::GiveUp => Ok(StmtRes::End), + StmtBody::Error(ref e) => Err((*e).clone()), + } + } + + /// Evaluate an expression to a value. + fn eval_expr(&self, expr: &Expr) -> Res { + match *expr { + Expr::Num(vtype, v) => match vtype { + VType::I16 => Ok(Val::I16(v as u16)), + VType::I32 => Ok(Val::I32(v)), + }, + Expr::Var(ref var) => self.lookup(var), + Expr::Mingle(ref vx, ref wx) => { + let v = try!(self.eval_expr(vx)).as_u32(); + let w = try!(self.eval_expr(wx)).as_u32(); + let v = try!(check_ovf(v, 0)); + let w = try!(check_ovf(w, 0)); + Ok(Val::I32(mingle(v, w))) + } + Expr::Select(vtype, ref vx, ref wx) => { + let v = try!(self.eval_expr(vx)); + let w = try!(self.eval_expr(wx)); + if vtype == VType::I16 { + Ok(Val::I16(select(v.as_u32(), try!(w.as_u16()) as u32) as u16)) + } else { + Ok(Val::I32(select(v.as_u32(), w.as_u32()))) + } + } + Expr::And(vtype, ref vx) => { + let v = try!(self.eval_expr(vx)); + match vtype { + VType::I16 => Ok(Val::I16(and_16(try!(v.as_u16()) as u32) as u16)), + VType::I32 => Ok(Val::I32(and_32(v.as_u32()))), + } + } + Expr::Or(vtype, ref vx) => { + let v = try!(self.eval_expr(vx)); + match vtype { + VType::I16 => Ok(Val::I16(or_16(try!(v.as_u16()) as u32) as u16)), + VType::I32 => Ok(Val::I32(or_32(v.as_u32()))), + } + } + Expr::Xor(vtype, ref vx) => { + let v = try!(self.eval_expr(vx)); + match vtype { + VType::I16 => Ok(Val::I16(xor_16(try!(v.as_u16()) as u32) as u16)), + VType::I32 => Ok(Val::I32(xor_32(v.as_u32()))), + } + } + Expr::RsNot(ref vx) => { + let v = try!(self.eval_expr(vx)); + Ok(Val::I32(!v.as_u32())) + } + Expr::RsAnd(ref vx, ref wx) => { + let v = try!(self.eval_expr(vx)); + let w = try!(self.eval_expr(wx)); + Ok(Val::I32(v.as_u32() & w.as_u32())) + } + Expr::RsOr(ref vx, ref wx) => { + let v = try!(self.eval_expr(vx)); + let w = try!(self.eval_expr(wx)); + Ok(Val::I32(v.as_u32() | w.as_u32())) + } + Expr::RsXor(ref vx, ref wx) => { + let v = try!(self.eval_expr(vx)); + let w = try!(self.eval_expr(wx)); + Ok(Val::I32(v.as_u32() ^ w.as_u32())) + } + Expr::RsRshift(ref vx, ref wx) => { + let v = try!(self.eval_expr(vx)); + let w = try!(self.eval_expr(wx)); + Ok(Val::I32(v.as_u32() >> w.as_u32())) + } + Expr::RsLshift(ref vx, ref wx) => { + let v = try!(self.eval_expr(vx)); + let w = try!(self.eval_expr(wx)); + Ok(Val::I32(v.as_u32() << w.as_u32())) + } + // Expr::RsEqual(ref vx, ref wx) => { + // let v = try!(self.eval_expr(vx)); + // let w = try!(self.eval_expr(wx)); + // Ok(Val::I32((v.as_u32() == w.as_u32()) as u32)) + // } + Expr::RsNotEqual(ref vx, ref wx) => { + let v = try!(self.eval_expr(vx)); + let w = try!(self.eval_expr(wx)); + Ok(Val::I32((v.as_u32() != w.as_u32()) as u32)) + } + Expr::RsPlus(ref vx, ref wx) => { + let v = try!(self.eval_expr(vx)); + let w = try!(self.eval_expr(wx)); + Ok(Val::I32(v.as_u32() + w.as_u32())) + } + Expr::RsMinus(ref vx, ref wx) => { + let v = try!(self.eval_expr(vx)); + let w = try!(self.eval_expr(wx)); + Ok(Val::I32(v.as_u32() - w.as_u32())) + } + } + } + + #[inline] + fn eval_subs(&self, subs: &Vec) -> Res> { + subs.iter().map(|v| self.eval_expr(v).map(|w| w.as_usize())).collect() + } + + /// Dimension an array. + fn array_dim(&mut self, var: &Var, dims: &Vec) -> Res<()> { + let dims = try!(self.eval_subs(dims)); + match *var { + Var::A16(n, _) => self.tail[n].dimension(dims, 0), + Var::A32(n, _) => self.hybrid[n].dimension(dims, 0), + _ => return IE994.err(), + } + } + + /// Assign to a variable. + fn assign(&mut self, var: &Var, val: Val) -> Res<()> { + match *var { + Var::I16(n) => Ok(self.spot[n].assign(try!(val.as_u16()))), + Var::I32(n) => Ok(self.twospot[n].assign(val.as_u32())), + Var::A16(n, ref subs) => { + let subs = try!(self.eval_subs(subs)); + self.tail[n].set_md(subs, try!(val.as_u16()), 0) + } + Var::A32(n, ref subs) => { + let subs = try!(self.eval_subs(subs)); + self.hybrid[n].set_md(subs, val.as_u32(), 0) + } + } + } + + /// Look up the value of a variable. + fn lookup(&self, var: &Var) -> Res { + match *var { + Var::I16(n) => Ok(Val::I16(self.spot[n].val)), + Var::I32(n) => Ok(Val::I32(self.twospot[n].val)), + Var::A16(n, ref subs) => { + let subs = try!(self.eval_subs(subs)); + self.tail[n].get_md(subs, 0).map(Val::I16) + } + Var::A32(n, ref subs) => { + let subs = try!(self.eval_subs(subs)); + self.hybrid[n].get_md(subs, 0).map(Val::I32) + } + } + } + + /// Process a STASH statement. + fn stash(&mut self, var: &Var) { + match *var { + Var::I16(n) => self.spot[n].stash(), + Var::I32(n) => self.twospot[n].stash(), + Var::A16(n, _) => self.tail[n].stash(), + Var::A32(n, _) => self.hybrid[n].stash(), + } + } + + /// Process a RETRIEVE statement. + fn retrieve(&mut self, var: &Var) -> Res<()> { + match *var { + Var::I16(n) => self.spot[n].retrieve(0), + Var::I32(n) => self.twospot[n].retrieve(0), + Var::A16(n, _) => self.tail[n].retrieve(0), + Var::A32(n, _) => self.hybrid[n].retrieve(0), + } + } + + /// Process an IGNORE or REMEMBER statement. Cannot fail. + fn set_rw(&mut self, var: &Var, rw: bool) { + match *var { + Var::I16(n) => self.spot[n].rw = rw, + Var::I32(n) => self.twospot[n].rw = rw, + Var::A16(n, _) => self.tail[n].rw = rw, + Var::A32(n, _) => self.hybrid[n].rw = rw, + } + } + + /// P()rocess an ABSTAIN or REINSTATE statement. Cannot fail. + fn abstain(&mut self, what: &ast::Abstain, f: &Fn(u32) -> u32) { + if let &ast::Abstain::Label(lbl) = what { + let idx = self.program.labels[&lbl] as usize; + if self.program.stmts[idx].body != StmtBody::GiveUp { + self.abstain[idx] = f(self.abstain[idx]); + } + } else { + for (i, stype) in self.program.stmt_types.iter().enumerate() { + if stype == what { + self.abstain[i] = f(self.abstain[i]); + } + } + } + } + + /// Array readout helper. + fn array_readout(&mut self, var: &Var) -> Res<()> { + let state = &mut self.last_out; + match *var { + Var::A16(n, _) => self.tail[n].readout(self.stdout, state, 0), + Var::A32(n, _) => self.hybrid[n].readout(self.stdout, state, 0), + _ => return IE994.err(), + } + } + + /// Array writein helper. + fn array_writein(&mut self, var: &Var) -> Res<()> { + let state = &mut self.last_in; + match *var { + Var::A16(n, _) => self.tail[n].writein(state, 0), + Var::A32(n, _) => self.hybrid[n].writein(state, 0), + _ => return IE994.err(), + } + } + + /// Debug helpers. + fn dump_state(&self) { + self.dump_state_one(&self.spot, "."); + self.dump_state_one(&self.twospot, ":"); + self.dump_state_one(&self.tail, ","); + self.dump_state_one(&self.hybrid, ";"); + if self.jumps.len() > 0 { + println!("Next stack: {:?}", self.jumps); + } + //println!("Abstained: {:?}", self.abstain); + } + + fn dump_state_one(&self, vec: &Vec>, sigil: &str) { + if vec.len() > 0 { + for (i, v) in vec.iter().enumerate() { + print!("{}{} = {}, ", sigil, i, v); + } + println!(""); + } + } +} diff --git a/vendor/pygments/tests/examplefiles/example.als b/vendor/pygments/tests/examplefiles/example.als new file mode 100644 index 0000000..3a5ab82 --- /dev/null +++ b/vendor/pygments/tests/examplefiles/example.als @@ -0,0 +1,217 @@ +module examples/systems/views + +/* + * Model of views in object-oriented programming. + * + * Two object references, called the view and the backing, + * are related by a view mechanism when changes to the + * backing are automatically propagated to the view. Note + * that the state of a view need not be a projection of the + * state of the backing; the keySet method of Map, for + * example, produces two view relationships, and for the + * one in which the map is modified by changes to the key + * set, the value of the new map cannot be determined from + * the key set. Note that in the iterator view mechanism, + * the iterator is by this definition the backing object, + * since changes are propagated from iterator to collection + * and not vice versa. Oddly, a reference may be a view of + * more than one backing: there can be two iterators on the + * same collection, eg. A reference cannot be a view under + * more than one view type. + * + * A reference is made dirty when it is a backing for a view + * with which it is no longer related by the view invariant. + * This usually happens when a view is modified, either + * directly or via another backing. For example, changing a + * collection directly when it has an iterator invalidates + * it, as does changing the collection through one iterator + * when there are others. + * + * More work is needed if we want to model more closely the + * failure of an iterator when its collection is invalidated. + * + * As a terminological convention, when there are two + * complementary view relationships, we will give them types + * t and t'. For example, KeySetView propagates from map to + * set, and KeySetView' propagates from set to map. + * + * author: Daniel Jackson + */ + +open util/ordering[State] as so +open util/relation as rel + +sig Ref {} +sig Object {} + +-- t->b->v in views when v is view of type t of backing b +-- dirty contains refs that have been invalidated +sig State { + refs: set Ref, + obj: refs -> one Object, + views: ViewType -> refs -> refs, + dirty: set refs +-- , anyviews: Ref -> Ref -- for visualization + } +-- {anyviews = ViewType.views} + +sig Map extends Object { + keys: set Ref, + map: keys -> one Ref + }{all s: State | keys + Ref.map in s.refs} +sig MapRef extends Ref {} +fact {State.obj[MapRef] in Map} + +sig Iterator extends Object { + left, done: set Ref, + lastRef: lone done + }{all s: State | done + left + lastRef in s.refs} +sig IteratorRef extends Ref {} +fact {State.obj[IteratorRef] in Iterator} + +sig Set extends Object { + elts: set Ref + }{all s: State | elts in s.refs} +sig SetRef extends Ref {} +fact {State.obj[SetRef] in Set} + +abstract sig ViewType {} +one sig KeySetView, KeySetView', IteratorView extends ViewType {} +fact ViewTypes { + State.views[KeySetView] in MapRef -> SetRef + State.views[KeySetView'] in SetRef -> MapRef + State.views[IteratorView] in IteratorRef -> SetRef + all s: State | s.views[KeySetView] = ~(s.views[KeySetView']) + } + +/** + * mods is refs modified directly or by view mechanism + * doesn't handle possibility of modifying an object and its view at once? + * should we limit frame conds to non-dirty refs? + */ +pred modifies [pre, post: State, rs: set Ref] { + let vr = pre.views[ViewType], mods = rs.*vr { + all r: pre.refs - mods | pre.obj[r] = post.obj[r] + all b: mods, v: pre.refs, t: ViewType | + b->v in pre.views[t] => viewFrame [t, pre.obj[v], post.obj[v], post.obj[b]] + post.dirty = pre.dirty + + {b: pre.refs | some v: Ref, t: ViewType | + b->v in pre.views[t] && !viewFrame [t, pre.obj[v], post.obj[v], post.obj[b]] + } + } + } + +pred allocates [pre, post: State, rs: set Ref] { + no rs & pre.refs + post.refs = pre.refs + rs + } + +/** + * models frame condition that limits change to view object from v to v' when backing object changes to b' + */ +pred viewFrame [t: ViewType, v, v', b': Object] { + t in KeySetView => v'.elts = dom [b'.map] + t in KeySetView' => b'.elts = dom [v'.map] + t in KeySetView' => (b'.elts) <: (v.map) = (b'.elts) <: (v'.map) + t in IteratorView => v'.elts = b'.left + b'.done + } + +pred MapRef.keySet [pre, post: State, setRefs: SetRef] { + post.obj[setRefs].elts = dom [pre.obj[this].map] + modifies [pre, post, none] + allocates [pre, post, setRefs] + post.views = pre.views + KeySetView->this->setRefs + KeySetView'->setRefs->this + } + +pred MapRef.put [pre, post: State, k, v: Ref] { + post.obj[this].map = pre.obj[this].map ++ k->v + modifies [pre, post, this] + allocates [pre, post, none] + post.views = pre.views + } + +pred SetRef.iterator [pre, post: State, iterRef: IteratorRef] { + let i = post.obj[iterRef] { + i.left = pre.obj[this].elts + no i.done + i.lastRef + } + modifies [pre,post,none] + allocates [pre, post, iterRef] + post.views = pre.views + IteratorView->iterRef->this + } + +pred IteratorRef.remove [pre, post: State] { + let i = pre.obj[this], i' = post.obj[this] { + i'.left = i.left + i'.done = i.done - i.lastRef + no i'.lastRef + } + modifies [pre,post,this] + allocates [pre, post, none] + pre.views = post.views + } + +pred IteratorRef.next [pre, post: State, ref: Ref] { + let i = pre.obj[this], i' = post.obj[this] { + ref in i.left + i'.left = i.left - ref + i'.done = i.done + ref + i'.lastRef = ref + } + modifies [pre, post, this] + allocates [pre, post, none] + pre.views = post.views + } + +pred IteratorRef.hasNext [s: State] { + some s.obj[this].left + } + +assert zippishOK { + all + ks, vs: SetRef, + m: MapRef, + ki, vi: IteratorRef, + k, v: Ref | + let s0=so/first, + s1=so/next[s0], + s2=so/next[s1], + s3=so/next[s2], + s4=so/next[s3], + s5=so/next[s4], + s6=so/next[s5], + s7=so/next[s6] | + ({ + precondition [s0, ks, vs, m] + no s0.dirty + ks.iterator [s0, s1, ki] + vs.iterator [s1, s2, vi] + ki.hasNext [s2] + vi.hasNext [s2] + ki.this/next [s2, s3, k] + vi.this/next [s3, s4, v] + m.put [s4, s5, k, v] + ki.remove [s5, s6] + vi.remove [s6, s7] + } => no State.dirty) + } + +pred precondition [pre: State, ks, vs, m: Ref] { + // all these conditions and other errors discovered in scope of 6 but 8,3 + // in initial state, must have view invariants hold + (all t: ViewType, b, v: pre.refs | + b->v in pre.views[t] => viewFrame [t, pre.obj[v], pre.obj[v], pre.obj[b]]) + // sets are not aliases +-- ks != vs + // sets are not views of map +-- no (ks+vs)->m & ViewType.pre.views + // no iterator currently on either set +-- no Ref->(ks+vs) & ViewType.pre.views + } + +check zippishOK for 6 but 8 State, 3 ViewType expect 1 + +/** + * experiment with controlling heap size + */ +fact {all s: State | #s.obj < 5} diff --git a/vendor/pygments/tests/examplefiles/example.bat b/vendor/pygments/tests/examplefiles/example.bat new file mode 100644 index 0000000..596f65d --- /dev/null +++ b/vendor/pygments/tests/examplefiles/example.bat @@ -0,0 +1,209 @@ +@ @@ echo off +::This is an example of the Windows batch language. + +setlocal EnableDelayedExpansion +(cls) +set/a^ +_te^ +sts^ +=0,^ +_"pa"^ +ssed=0^ +0 +set,/a title= Batch test +title=%,/a title% +echo^ %~nx0,^ the>,con comprehensive testing suite +ver +echo( + +if cmdextversion 2 goto =) +goto :fail + + :) +echo Starting tests at: +date/t & time/t +echo( + +if '%*'=='--help' ( + echo Usage: %~nx0 [--help] + echo --help: Display this help message and quit. + shift + goto :exit comment) else rem + +(call :comments) +call ::io+x +call:control:x +call::internal x + +:exit +if /i !_tests!==!_passed! ( + color 02 +) else if !*==* ( + color c + if not defined _exit^ +Code set _exit^ +Code=1 +) +set _percentage=NaN +if defined _tests ( + if !_tests! neq 0 (set/a_percentage=100*_passed/_tests) +) +echo( +if !_percentage!==NaN ( echo(There were no tests^^! & color e +) else ( echo Tests passed: %_passed%/%_tests% (%_percentage%%%^) ) +pause +color +title +endlocal +exit /b %_exitCode% + +x:fail +rem This should never happen. +echo Internal error 1>& 269105>>&2 +set /a _exitCode=0x69+(0105*1000) +break +goto :exit + +:comments +(rem )/? +) +rem "comment^ +(rem.) & set /a _tests+=1 +(rem) & goto :fail +(rem. ) & (rem. comment ) & echo Test %_tests%: Comments +rem ) +) +)|comment +)( +:: comment +goto :comments^^1:comment +:comments^^1 comment +if(1==1) goto :comments^ +^1 +rem^ /? +rem ^ +^ +goto :comments^ +2+comment +goto :fail +:comments2 +rem >^ +if 1==1 (goto :comments3) +:comments3) +goto :fail +:comments3 +rem comment^ +goto:fail +rem.comment comment^ +goto fail +rem "comment comment"^ +goto fail +rem comment comment^ +if "1==1" equ "1==1" goto comments4 +goto fail +:comments4 +rem comment"comment^ +set /a _passed+=1 +GOTO :EOF +goto :fail + +:IO +SET /A _tests+=1 & Echo Test !_tests:*!==^!: I/O +verify on +pushd . +if exist temp echo temp already exists. & goto :eof +md temp +cd temp +mkdir 2>nul temp +chdir temp +>cd echo Checking drive... +>>cd echo must be C or else this won't work +for /f "tokens=* usebackq" %%G in ("cd +) do (test0^ +.bat echo rem Machine-generated; do not edit +call echo set /a _passed+=1 >>test0.bat +type test0.bat >"test 1.bat +ren "test 1.bat" test2.bat +rename test2.bat test.bat +caLL ^ +C:test +del test.bat 2>nul +2>NUL erase test0.bat +popd +rd temp\temp +rmdir temp +VERIFY OFF +goto:eof + +:control +set /a _tests+=1 +echo Test %_tests%: Control statements +set "_iterations=0">nul +for %%G in (,+,,-, +) do @( + for /l %%H in (,-1;;-1 -3,) do ( + for /f tokens^=1-2^,5 %%I in ("2 %%H _ _ 10") do ( + for /f "tokens=1 usebackq" %%L in ( `echo %%G%%J ``` ` +` ` ) do ( for /f "tokens=2" %%M in ('echo ' %%L0 ' +' ' ) do ( set /a _iterations+=(%%M%%M^) + ) + ) + ) + ) +) +if exist %~nx0 if not exist %~nx0 goto :fail +if exist %~nx0 ( + if not exist %~nx0 goto :fail +) else ( + if exist %~nx0 goto :fail +) +if /i %_iterations% gtr -2 ( + if /i %_iterations% geq -1 ( + if /i %_iterations% lss 1 ( + if /i %_iterations% leq 0 ( + if /i %_iterations% equ 0 ( + if 1 equ 01 ( + if 1 neq "01" ( + if "1" neq 01 ( + set /a _passed+=1)))))))) +) comment +goto :eof + +:internal +set /a _tests+=1 +echo Test %_tests%: Internal commands +keys on +mklink 2>nul +>nul path %path% +>nul dpath %dpath% +if not defined prompt prompt $P$G +prompt !prompt:~!rem/ $H? +echo on +rem/? +@echo off +rem(/?>nul +rem )/? >nul +(rem (/?) >nul +rem /?>nul +rem^/?>nul +if/?>nul || if^/^?>nul || if /?>nul || if x/? >nul +for/?>nul && for^/^?>nul && for /?>nul && for x/? >nul && for /?x >nul +goto/?>nul && goto^/? && goto^ /? && goto /^ +? && goto /?>nul && goto:/? >nul && goto ) /? ) >nul && (goto /? )>nul +=set+;/p extension'),=.bat +for /f "tokens=2 delims==" %%G in ( 'assoc %+;/p extension'),%' + ) do ( + assoc 2>nul %+;/p extension'),:*.=.%=%%G + ftype 1>nul %%G +) &>nul ver +if errorlevel 0 if not errorlevel 1 set /a _passed+=1 +goto :eof +:/? +goto :fail diff --git a/vendor/pygments/tests/examplefiles/example.bc b/vendor/pygments/tests/examplefiles/example.bc new file mode 100644 index 0000000..6604cd3 --- /dev/null +++ b/vendor/pygments/tests/examplefiles/example.bc @@ -0,0 +1,53 @@ +/* + * Calculate the Greatest Common Divisor of a and b. + */ +define gcd(a, b) { + auto tmp; + + /* + * Euclidean algorithm + */ + while (b != 0) { + tmp = a % b; + a = b; + b = tmp; + } + return a; +} +"gcd(225, 150) = " ; gcd(225, 150) + +/* assign operators */ +a = 10 +a += 1 +a++ +++a +a-- +--a +a += 5 +a -= 5 +a *= 2 +a /= 3 +a ^= 2 +a %= 2 + +/* comparison */ +if (a > 2) { +} +if (a >= 2) { +} +if (a == 2) { +} +if (a != 2) { +} +if (a <= 2) { +} +if (a < 2) { +} + +a /* /*/ * 2 /* == a * 2 */ +a //* /*/ 1.5 /* == a / 1.5 */ +a /*/*/ * 3 /* == a * 3 */ +a * 3 /**/ * 4 /* == a * 3 * 4 */ +a / 3 //*//*/ .4 /* == a / 3 / 0.4 */ +a / 3 //*//*/ 1.3 /* == a / 3 / 1.4 */ +a / 3 /*//*// 1.3 /* == a / 3 / 1.4 */ diff --git a/vendor/pygments/tests/examplefiles/example.c b/vendor/pygments/tests/examplefiles/example.c index a7f546d..7bf7014 100644 --- a/vendor/pygments/tests/examplefiles/example.c +++ b/vendor/pygments/tests/examplefiles/example.c @@ -195,7 +195,7 @@ char convertType(int type) { case TYPE_INT: return 'I'; case TYPE_FLOAT: return 'F'; case TYPE_BOOLEAN: return 'Z'; - default: yyerror("compiler-intern error in convertType().\n"); + default : yyerror("compiler-intern error in convertType().\n"); } return 0; /* to avoid compiler-warning */ } diff --git a/vendor/pygments/tests/examplefiles/example.ceylon b/vendor/pygments/tests/examplefiles/example.ceylon index b136b99..04223c5 100644 --- a/vendor/pygments/tests/examplefiles/example.ceylon +++ b/vendor/pygments/tests/examplefiles/example.ceylon @@ -1,33 +1,52 @@ +import ceylon.language { parseInteger } + doc "A top-level function, with multi-line documentation." -void topLevel(String? a, Integer b=5, String... seqs) { +void topLevel(String? a, Integer b=5, String* seqs) { function nested(String s) { print(s[1..2]); return true; } - for (s in seqs.filter((String x) x.size > 2)) { + for (s in seqs.filter((String x) => x.size > 2)) { nested(s); } - value uppers = seqs.sequence[].uppercased; - String|Nothing z = a; - Sequence ints = { 1, 2, 3, 4, 5 }; + value uppers = seqs.map((String x) { + return x.uppercased; + }); + String|Null z = a; + {Integer+} ints = { 1, 2, 3, 4, 5 }; + value numbers = [ 1, #ffff, #ffff_ffff, $10101010, $1010_1010_1010_1010, + 123_456_789 ]; + value chars = ['a', '\{#ffff}' ]; } -shared class Example(name, element) satisfies Comparable> +shared class Example_1(name, element) satisfies Comparable> given Element satisfies Comparable { shared String name; shared Element element; + shared [Integer,String] tuple = [1, "2"]; + shared late String lastName; + variable Integer cnt = 0; - shared actual Comparison compare(Example other) { + shared Integer count => cnt; + assign count { + assert(count >= cnt); + cnt = count; + } + + shared actual Comparison compare(Example_1 other) { return element <=> other.element; } shared actual String string { - return "Example with " + element.string; + return "Example with ``element.string``"; } } -Example instance = Example { - name = "Named args call"; +Example_1 instance = Example_1 { element = 5; + name = "Named args call \{#0060}"; }; + +object example1 extends Example_1("object", 5) { +} \ No newline at end of file diff --git a/vendor/pygments/tests/examplefiles/example.chai b/vendor/pygments/tests/examplefiles/example.chai new file mode 100644 index 0000000..85f53c3 --- /dev/null +++ b/vendor/pygments/tests/examplefiles/example.chai @@ -0,0 +1,6 @@ +var f = fun(x) { x + 2; } +// comment +puts(someFunc(2 + 2 - 1 * 5 / 4)); +var x = "str"; +def dosomething(lhs, rhs) { print("lhs: ${lhs}, rhs: ${rhs}"); } +callfunc(`+`, 1, 4); diff --git a/vendor/pygments/tests/examplefiles/example.clay b/vendor/pygments/tests/examplefiles/example.clay new file mode 100644 index 0000000..784752c --- /dev/null +++ b/vendor/pygments/tests/examplefiles/example.clay @@ -0,0 +1,33 @@ + +/// @section StringLiteralRef + +record StringLiteralRef ( + sizep : Pointer[SizeT], +); + + +/// @section predicates + +overload ContiguousSequence?(#StringLiteralRef) : Bool = true; +[s when StringLiteral?(s)] +overload ContiguousSequence?(#Static[s]) : Bool = true; + + + +/// @section size, begin, end, index + +forceinline overload size(a:StringLiteralRef) = a.sizep^; + +forceinline overload begin(a:StringLiteralRef) : Pointer[Char] = Pointer[Char](a.sizep + 1); +forceinline overload end(a:StringLiteralRef) = begin(a) + size(a); + +[I when Integer?(I)] +forceinline overload index(a:StringLiteralRef, i:I) : ByRef[Char] { + assert["boundsChecks"](i >= 0 and i < size(a), "StringLiteralRef index out of bounds"); + return ref (begin(a) + i)^; +} + +foo() = """ +long\tlong +story +""" diff --git a/vendor/pygments/tests/examplefiles/example.cob b/vendor/pygments/tests/examplefiles/example.cob index 3f65e49..92d2e30 100644 --- a/vendor/pygments/tests/examplefiles/example.cob +++ b/vendor/pygments/tests/examplefiles/example.cob @@ -2617,940 +2617,4 @@ GC0710 88 Token-Is-Reserved-Word VALUE " ". ***************************************************************** ** Perform all program-wide initialization operations ** ***************************************************************** - 101-Establish-Working-Env. - MOVE TRIM(Src-Filename,Leading) TO Src-Filename - ACCEPT Env-TEMP - FROM ENVIRONMENT "TEMP" - END-ACCEPT - ACCEPT Lines-Per-Page-ENV - FROM ENVIRONMENT "OCXREF_LINES" - END-ACCEPT - INSPECT Src-Filename REPLACING ALL "\" BY "/" - INSPECT Env-TEMP REPLACING ALL "\" BY "/" - MOVE Src-Filename TO Program-Path - MOVE Program-Path TO Heading-2 - CALL "C$JUSTIFY" - USING Heading-2, "Right" - END-CALL - MOVE LENGTH(TRIM(Src-Filename,Trailing)) TO I - MOVE 0 TO J - PERFORM UNTIL Src-Filename(I:1) = '/' - OR I = 0 - SUBTRACT 1 FROM I - ADD 1 TO J - END-PERFORM - UNSTRING Src-Filename((I + 1):J) DELIMITED BY "." - INTO Filename, Dummy - END-UNSTRING - STRING TRIM(Env-TEMP,Trailing) - "/" - TRIM(Filename,Trailing) - ".i" - DELIMITED SIZE - INTO Expanded-Src-Filename - END-STRING - STRING Program-Path(1:I) - TRIM(Filename,Trailing) - ".lst" - DELIMITED SIZE - INTO Report-Filename - END-STRING - IF Lines-Per-Page-ENV NOT = SPACES - MOVE NUMVAL(Lines-Per-Page-ENV) TO Lines-Per-Page - ELSE - MOVE 60 TO Lines-Per-Page - END-IF - ACCEPT Todays-Date - FROM DATE YYYYMMDD - END-ACCEPT - MOVE Todays-Date TO H1X-Date - H1S-Date - MOVE "????????????..." TO SPI-Current-Program-ID - MOVE SPACES TO SPI-Current-Verb - Held-Reference - MOVE "Y" TO F-First-Record - . - / - 200-Execute-cobc SECTION. - 201-Build-Cmd. - STRING "cobc -E " - TRIM(Program-Path, Trailing) - " > " - TRIM(Expanded-Src-Filename,Trailing) - DELIMITED SIZE - INTO Cmd - END-STRING - CALL "SYSTEM" - USING Cmd - END-CALL - IF RETURN-CODE NOT = 0 - DISPLAY - "Cross-reference terminated by previous errors" - UPON SYSERR - END-DISPLAY - GOBACK - END-IF - . - - 209-Exit. - EXIT - . - / - 300-Tokenize-Source SECTION. - 301-Driver. - OPEN INPUT Expand-Code - MOVE SPACES TO Expand-Code-Rec - MOVE 256 TO Src-Ptr - MOVE 0 TO Num-UserNames - SPI-Current-Line-No - MOVE "?" TO SPI-Current-Division -GC0710 MOVE "N" TO F-Verb-Has-Been-Found. - PERFORM FOREVER - PERFORM 310-Get-Token - IF Token-Is-EOF - EXIT PERFORM - END-IF - MOVE UPPER-CASE(SPI-Current-Token) - TO SPI-Current-Token-UC - IF Token-Is-Verb - MOVE SPI-Current-Token-UC TO SPI-Current-Verb - SPI-Prior-Token - IF Held-Reference NOT = SPACES - MOVE Held-Reference TO Sort-Rec - MOVE SPACES TO Held-Reference - RELEASE Sort-Rec - END-IF - END-IF - EVALUATE TRUE - WHEN In-IDENTIFICATION-DIVISION - PERFORM 320-IDENTIFICATION-DIVISION - WHEN In-ENVIRONMENT-DIVISION - PERFORM 330-ENVIRONMENT-DIVISION - WHEN In-DATA-DIVISION - PERFORM 340-DATA-DIVISION - WHEN In-PROCEDURE-DIVISION - PERFORM 350-PROCEDURE-DIVISION - END-EVALUATE - IF Token-Is-Key-Word - MOVE SPI-Current-Token-UC TO SPI-Prior-Token - END-IF - IF F-Token-Ended-Sentence = "Y" - AND SPI-Current-Division NOT = "I" - MOVE SPACES TO SPI-Prior-Token - SPI-Current-Verb - END-IF - - END-PERFORM - CLOSE Expand-Code - EXIT SECTION - . - *>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> - 310-Get-Token. - *>-- Position to 1st non-blank character - MOVE F-Token-Ended-Sentence TO F-Last-Token-Ended-Sent - MOVE "N" TO F-Token-Ended-Sentence - PERFORM UNTIL Expand-Code-Rec(Src-Ptr : 1) NOT = SPACE - IF Src-Ptr > 255 - READ Expand-Code AT END - IF Held-Reference NOT = SPACES - MOVE Held-Reference TO Sort-Rec - MOVE SPACES TO Held-Reference - RELEASE Sort-Rec - END-IF - SET Token-Is-EOF TO TRUE - MOVE 0 TO SPI-Current-Line-No - EXIT PARAGRAPH - END-READ - IF ECR-1 = "#" - PERFORM 311-Control-Record - ELSE - PERFORM 312-Expand-Code-Record - END-IF - ELSE - ADD 1 TO Src-Ptr - END-IF - END-PERFORM - *>-- Extract token string - MOVE Expand-Code-Rec(Src-Ptr : 1) TO SPI-Current-Char - MOVE Expand-Code-Rec(Src-Ptr + 1: 1) TO SPI-Next-Char - IF SPI-Current-Char = "." - ADD 1 TO Src-Ptr - MOVE SPI-Current-Char TO SPI-Current-Token - MOVE SPACE TO SPI-Token-Type - MOVE "Y" TO F-Token-Ended-Sentence - EXIT PARAGRAPH - END-IF - IF Current-Char-Is-Punct - AND SPI-Current-Char = "=" - AND SPI-Current-Division = "P" - ADD 1 TO Src-Ptr - MOVE "EQUALS" TO SPI-Current-Token - MOVE "K" TO SPI-Token-Type - EXIT PARAGRAPH - END-IF - IF Current-Char-Is-Punct *> So subscripts don't get flagged w/ "*" - AND SPI-Current-Char = "(" - AND SPI-Current-Division = "P" - MOVE SPACES TO SPI-Prior-Token - END-IF - IF Current-Char-Is-Punct - ADD 1 TO Src-Ptr - MOVE SPI-Current-Char TO SPI-Current-Token - MOVE SPACE TO SPI-Token-Type - EXIT PARAGRAPH - END-IF - IF Current-Char-Is-Quote - ADD 1 TO Src-Ptr - UNSTRING Expand-Code-Rec - DELIMITED BY SPI-Current-Char - INTO SPI-Current-Token - WITH POINTER Src-Ptr - END-UNSTRING - IF Expand-Code-Rec(Src-Ptr : 1) = "." - MOVE "Y" TO F-Token-Ended-Sentence - ADD 1 TO Src-Ptr - END-IF - SET Token-Is-Literal-Alpha TO TRUE - EXIT PARAGRAPH - END-IF - IF Current-Char-Is-X AND Next-Char-Is-Quote - ADD 2 TO Src-Ptr - UNSTRING Expand-Code-Rec - DELIMITED BY SPI-Next-Char - INTO SPI-Current-Token - WITH POINTER Src-Ptr - END-UNSTRING - IF Expand-Code-Rec(Src-Ptr : 1) = "." - MOVE "Y" TO F-Token-Ended-Sentence - ADD 1 TO Src-Ptr - END-IF - SET Token-Is-Literal-Number TO TRUE - EXIT PARAGRAPH - END-IF - IF Current-Char-Is-Z AND Next-Char-Is-Quote - ADD 2 TO Src-Ptr - UNSTRING Expand-Code-Rec - DELIMITED BY SPI-Next-Char - INTO SPI-Current-Token - WITH POINTER Src-Ptr - END-UNSTRING - IF Expand-Code-Rec(Src-Ptr : 1) = "." - MOVE "Y" TO F-Token-Ended-Sentence - ADD 1 TO Src-Ptr - END-IF - SET Token-Is-Literal-Alpha TO TRUE - EXIT PARAGRAPH - END-IF - IF F-Processing-PICTURE = "Y" - UNSTRING Expand-Code-Rec - DELIMITED BY ". " OR " " - INTO SPI-Current-Token - DELIMITER IN Delim - WITH POINTER Src-Ptr - END-UNSTRING - IF Delim = ". " - MOVE "Y" TO F-Token-Ended-Sentence - ADD 1 TO Src-Ptr - END-IF - IF UPPER-CASE(SPI-Current-Token) = "IS" - MOVE SPACE TO SPI-Token-Type - EXIT PARAGRAPH - ELSE - MOVE "N" TO F-Processing-PICTURE - MOVE SPACE TO SPI-Token-Type - EXIT PARAGRAPH - END-IF - END-IF - UNSTRING Expand-Code-Rec - DELIMITED BY ". " OR " " OR "=" OR "(" OR ")" OR "*" - OR "/" OR "&" OR ";" OR "," OR "<" - OR ">" OR ":" - INTO SPI-Current-Token - DELIMITER IN Delim - WITH POINTER Src-Ptr - END-UNSTRING - IF Delim = ". " - MOVE "Y" TO F-Token-Ended-Sentence - END-IF - IF Delim NOT = ". " AND " " - SUBTRACT 1 FROM Src-Ptr - END-IF - *>-- Classify Token - MOVE UPPER-CASE(SPI-Current-Token) TO Search-Token - IF Search-Token = "EQUAL" OR "EQUALS" - MOVE "EQUALS" TO SPI-Current-Token - MOVE "K" TO SPI-Token-Type - EXIT PARAGRAPH - END-IF - SEARCH ALL Reserved-Word - WHEN RW-Word (RW-Idx) = Search-Token - MOVE RW-Type (RW-Idx) TO SPI-Token-Type -GC0710 IF Token-Is-Verb -GC0710 MOVE "Y" TO F-Verb-Has-Been-Found -GC0710 END-IF - EXIT PARAGRAPH - END-SEARCH - *>-- Not a reserved word, must be a user name - SET Token-Is-Identifier TO TRUE *> NEEDS EXPANSION!!!! - PERFORM 313-Check-For-Numeric-Token - IF Token-Is-Literal-Number - IF (F-Last-Token-Ended-Sent = "Y") - AND (SPI-Current-Division = "D") - MOVE "LEVEL #" TO SPI-Current-Token - MOVE "K" TO SPI-Token-Type - EXIT PARAGRAPH - ELSE - EXIT PARAGRAPH - END-IF - END-IF - EXIT PARAGRAPH - . - *>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> - 311-Control-Record. - UNSTRING ECR-2-256 - DELIMITED BY '"' - INTO PIC-X10, PIC-X256, Dummy - END-UNSTRING - INSPECT PIC-X10 REPLACING ALL '"' BY SPACE - COMPUTE I = NUMVAL(PIC-X10) - 1 - IF TRIM(PIC-X256,Trailing) = TRIM(Program-Path,Trailing) - MOVE I TO SPI-Current-Line-No - SET In-Main-Module TO TRUE - IF Saved-Section NOT = SPACES - MOVE Saved-Section TO SPI-Current-Section - END-IF - ELSE - SET In-Copybook TO TRUE - IF Saved-Section = SPACES - MOVE SPI-Current-Section TO Saved-Section - END-IF - MOVE LENGTH(TRIM(PIC-X256,Trailing)) TO I - MOVE 0 TO J - PERFORM UNTIL PIC-X256(I:1) = '/' - OR I = 0 - SUBTRACT 1 FROM I - ADD 1 TO J - END-PERFORM - UNSTRING PIC-X256((I + 1):J) DELIMITED BY "." - INTO Filename, Dummy - END-UNSTRING - MOVE "[" TO SPI-CS-1 - MOVE Filename TO SPI-CS-2-14 - IF SPI-CS-11-14 NOT = SPACES - MOVE "..." TO SPI-CS-11-14 - END-IF - MOVE "]" TO SPI-CS-15 - END-IF - MOVE SPACES TO Expand-Code-Rec *> Force another READ - MOVE 256 TO Src-Ptr - . - *>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> - 312-Expand-Code-Record. - MOVE 1 TO Src-Ptr - IF In-Main-Module - ADD 1 To SPI-Current-Line-No - END-IF - . - *>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> - 313-Check-For-Numeric-Token. - MOVE SPI-Current-Token TO PIC-X32 - INSPECT PIC-X32 - REPLACING TRAILING SPACES BY "0" - IF PIC-X32 IS NUMERIC *> Simple Unsigned Integer - SET Token-Is-Literal-Number TO TRUE - EXIT PARAGRAPH - END-IF - IF PIC-X32(1:1) = "+" OR "-" - MOVE "0" TO PIC-X32(1:1) - END-IF - MOVE 0 TO Tally - INSPECT PIC-X32 - TALLYING Tally FOR ALL "." - IF Tally = 1 - INSPECT PIC-X32 REPLACING ALL "." BY "0" - END-IF - IF PIC-X32 IS NUMERIC - SET Token-Is-Literal-Number TO TRUE - EXIT PARAGRAPH - END-IF - . - *>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> - 320-IDENTIFICATION-DIVISION. -GC0710 MOVE "N" TO F-Verb-Has-Been-Found - IF Token-Is-Key-Word AND SPI-Current-Token = "DIVISION" - MOVE SPI-Prior-Token TO SPI-Current-Division - EXIT PARAGRAPH - END-IF - IF SPI-Prior-Token = "PROGRAM-ID" - MOVE SPACES TO SPI-Prior-Token - MOVE SPI-Current-Token TO SPI-Current-Program-ID - IF SPI-CP-13-15 NOT = SPACES - MOVE "..." TO SPI-CP-13-15 - END-IF - EXIT PARAGRAPH - END-IF - . - *>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> - 330-ENVIRONMENT-DIVISION. - IF Token-Is-Key-Word AND SPI-Current-Token = "DIVISION" - MOVE SPI-Prior-Token TO SPI-Current-Division - EXIT PARAGRAPH - END-IF - IF Token-Is-Key-Word AND SPI-Current-Token = "SECTION" - MOVE SPI-Prior-Token TO SPI-Current-Section - EXIT PARAGRAPH - END-IF - IF Token-Is-Identifier - PERFORM 361-Release-Ref - END-IF - . - *>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> - 340-DATA-DIVISION. - IF Token-Is-Key-Word AND SPI-Current-Token = "DIVISION" - MOVE SPI-Prior-Token TO SPI-Current-Division - EXIT PARAGRAPH - END-IF - IF Token-Is-Key-Word AND SPI-Current-Token = "SECTION" - MOVE SPI-Prior-Token TO SPI-Current-Section - EXIT PARAGRAPH - END-IF - IF (SPI-Current-Token = "PIC" OR "PICTURE") - AND (Token-Is-Key-Word) - MOVE "Y" TO F-Processing-PICTURE - EXIT PARAGRAPH - END-IF -GC0710 IF Token-Is-Reserved-Word -GC0710 AND SPI-Prior-Token = "LEVEL #" -GC0710 MOVE SPACES TO SPI-Prior-Token -GC0710 EXIT PARAGRAPH -GC0710 END-IF - IF Token-Is-Identifier - EVALUATE SPI-Prior-Token - WHEN "FD" - PERFORM 360-Release-Def - MOVE SPACES TO SPI-Prior-Token - WHEN "SD" - PERFORM 360-Release-Def - MOVE SPACES TO SPI-Prior-Token - WHEN "LEVEL #" - PERFORM 360-Release-Def - MOVE SPACES TO SPI-Prior-Token - WHEN "INDEXED" - PERFORM 360-Release-Def - MOVE SPACES TO SPI-Prior-Token - WHEN "USING" - PERFORM 362-Release-Upd - MOVE SPACES TO SPI-Prior-Token - WHEN "INTO" - PERFORM 362-Release-Upd - MOVE SPACES TO SPI-Prior-Token - WHEN OTHER - PERFORM 361-Release-Ref - END-EVALUATE - EXIT PARAGRAPH - END-IF - . - *>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> - 350-PROCEDURE-DIVISION. - IF SPI-Current-Section NOT = "PROCEDURE" - MOVE "PROCEDURE" TO SPI-Current-Section - END-IF -GC0710 IF SPI-Current-Token-UC = "PROGRAM" -GC0710 AND SPI-Prior-Token = "END" -GC0710 MOVE "?" TO SPI-Current-Division -GC0710 EXIT PARAGRAPH -GC0710 END-IF - IF Token-Is-Key-Word AND SPI-Current-Token = "DIVISION" - MOVE SPI-Prior-Token TO SPI-Current-Division - EXIT PARAGRAPH - END-IF - IF SPI-Current-Verb = SPACES -GC0710 AND F-Verb-Has-Been-Found = "Y" - IF Token-Is-Identifier - PERFORM 360-Release-Def - MOVE SPACES TO SPI-Prior-Token - END-IF - EXIT PARAGRAPH - END-IF - IF NOT Token-Is-Identifier - EXIT PARAGRAPH - END-IF - EVALUATE SPI-Current-Verb - WHEN "ACCEPT" - PERFORM 351-ACCEPT - WHEN "ADD" - PERFORM 351-ADD - WHEN "ALLOCATE" - PERFORM 351-ALLOCATE - WHEN "CALL" - PERFORM 351-CALL - WHEN "COMPUTE" - PERFORM 351-COMPUTE - WHEN "DIVIDE" - PERFORM 351-DIVIDE - WHEN "FREE" - PERFORM 351-FREE - WHEN "INITIALIZE" - PERFORM 351-INITIALIZE - WHEN "INSPECT" - PERFORM 351-INSPECT - WHEN "MOVE" - PERFORM 351-MOVE - WHEN "MULTIPLY" - PERFORM 351-MULTIPLY - WHEN "PERFORM" - PERFORM 351-PERFORM - WHEN "SET" - PERFORM 351-SET - WHEN "STRING" - PERFORM 351-STRING - WHEN "SUBTRACT" - PERFORM 351-SUBTRACT - WHEN "TRANSFORM" - PERFORM 351-TRANSFORM - WHEN "UNSTRING" - PERFORM 351-UNSTRING - WHEN OTHER - PERFORM 361-Release-Ref - END-EVALUATE - . - *>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> - 351-ACCEPT. - EVALUATE SPI-Prior-Token - WHEN "ACCEPT" - PERFORM 362-Release-Upd - MOVE SPACES TO SPI-Prior-Token - WHEN OTHER - PERFORM 361-Release-Ref - END-EVALUATE - . - *>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> - 351-ADD. - EVALUATE SPI-Prior-Token - WHEN "GIVING" - PERFORM 362-Release-Upd - WHEN "TO" - PERFORM 362-Release-Upd - WHEN OTHER - PERFORM 361-Release-Ref - END-EVALUATE - . - *>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> - 351-ALLOCATE. - EVALUATE SPI-Prior-Token - WHEN "ALLOCATE" - PERFORM 362-Release-Upd - MOVE SPACES TO SPI-Prior-Token - WHEN "RETURNING" - PERFORM 362-Release-Upd - WHEN OTHER - PERFORM 361-Release-Ref - END-EVALUATE - . - *>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> - 351-CALL. - EVALUATE SPI-Prior-Token - WHEN "RETURNING" - PERFORM 362-Release-Upd - WHEN "GIVING" - PERFORM 362-Release-Upd - WHEN OTHER - PERFORM 361-Release-Ref - END-EVALUATE - . - *>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> - 351-COMPUTE. - EVALUATE SPI-Prior-Token - WHEN "COMPUTE" - PERFORM 362-Release-Upd - WHEN OTHER - PERFORM 361-Release-Ref - END-EVALUATE - . - *>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> - 351-DIVIDE. - EVALUATE SPI-Prior-Token - WHEN "INTO" - PERFORM 363-Set-Upd - MOVE Sort-Rec TO Held-Reference - WHEN "GIVING" - IF Held-Reference NOT = SPACES - MOVE Held-Reference To Sort-Rec - MOVE SPACES To Held-Reference - SR-Ref-Flag - RELEASE Sort-Rec - END-IF - PERFORM 362-Release-Upd - WHEN "REMAINDER" - PERFORM 362-Release-Upd - WHEN OTHER - PERFORM 361-Release-Ref - END-EVALUATE - . - *>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> - 351-FREE. - PERFORM 362-Release-Upd - . - *>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> - 351-INITIALIZE. - EVALUATE SPI-Prior-Token - WHEN "INITIALIZE" - PERFORM 362-Release-Upd - WHEN "REPLACING" - PERFORM 361-Release-Ref - END-EVALUATE - . - *>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> - 351-INSPECT. - EVALUATE SPI-Prior-Token - WHEN "INSPECT" - PERFORM 364-Set-Ref - MOVE SPACES TO Held-Reference - MOVE SPACES TO SPI-Prior-Token - WHEN "TALLYING" - PERFORM 362-Release-Upd - MOVE SPACES TO SPI-Prior-Token - WHEN "REPLACING" - IF Held-Reference NOT = SPACES - MOVE Held-Reference TO Sort-Rec - MOVE SPACES TO Held-Reference - MOVE "*" TO SR-Ref-Flag - RELEASE Sort-Rec - END-IF - MOVE SPACES TO SPI-Prior-Token - WHEN "CONVERTING" - IF Held-Reference NOT = SPACES - MOVE Held-Reference TO Sort-Rec - MOVE SPACES TO Held-Reference - MOVE "*" TO SR-Ref-Flag - RELEASE Sort-Rec - END-IF - MOVE SPACES TO SPI-Prior-Token - WHEN OTHER - PERFORM 361-Release-Ref - END-EVALUATE - . - *>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> - 351-MOVE. - EVALUATE SPI-Prior-Token - WHEN "TO" - PERFORM 362-Release-Upd - WHEN OTHER - PERFORM 361-Release-Ref - END-EVALUATE - . - *>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> - 351-MULTIPLY. - EVALUATE SPI-Prior-Token - WHEN "BY" - PERFORM 363-Set-Upd - MOVE Sort-Rec TO Held-Reference - WHEN "GIVING" - MOVE Held-Reference TO Sort-Rec - MOVE SPACES TO Held-Reference - SR-Ref-Flag - RELEASE Sort-Rec - PERFORM 362-Release-Upd - WHEN OTHER - PERFORM 361-Release-Ref - END-EVALUATE - . - *>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> - 351-PERFORM. - EVALUATE SPI-Prior-Token - WHEN "VARYING" - PERFORM 362-Release-Upd - MOVE SPACES TO SPI-Prior-Token - WHEN "AFTER" - PERFORM 362-Release-Upd - MOVE SPACES TO SPI-Prior-Token - WHEN OTHER - PERFORM 361-Release-Ref - END-EVALUATE - . - *>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> - 351-SET. - EVALUATE SPI-Prior-Token - WHEN "SET" - PERFORM 362-Release-Upd - WHEN OTHER - PERFORM 361-Release-Ref - END-EVALUATE - . - *>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> - 351-STRING. - EVALUATE SPI-Prior-Token - WHEN "INTO" - PERFORM 362-Release-Upd - WHEN "POINTER" - PERFORM 362-Release-Upd - WHEN OTHER - PERFORM 361-Release-Ref - END-EVALUATE - . - *>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> - 351-SUBTRACT. - EVALUATE SPI-Prior-Token - WHEN "GIVING" - PERFORM 362-Release-Upd - WHEN "FROM" - PERFORM 362-Release-Upd - WHEN OTHER - PERFORM 361-Release-Ref - END-EVALUATE - . - *>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> - 351-TRANSFORM. - EVALUATE SPI-Prior-Token - WHEN "TRANSFORM" - PERFORM 362-Release-Upd - MOVE SPACES TO SPI-Prior-Token - WHEN OTHER - PERFORM 361-Release-Ref - END-EVALUATE - . - *>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> - 351-UNSTRING. - EVALUATE SPI-Prior-Token - WHEN "INTO" - PERFORM 362-Release-Upd - WHEN "DELIMITER" - PERFORM 362-Release-Upd - WHEN "COUNT" - PERFORM 362-Release-Upd - WHEN "POINTER" - PERFORM 362-Release-Upd - WHEN "TALLYING" - PERFORM 362-Release-Upd - WHEN OTHER - PERFORM 361-Release-Ref - END-EVALUATE - . - *>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> - 360-Release-Def. - MOVE SPACES TO Sort-Rec - MOVE SPI-Current-Program-ID TO SR-Prog-ID - MOVE SPI-Current-Token-UC TO SR-Token-UC - MOVE SPI-Current-Token TO SR-Token - MOVE SPI-Current-Section TO SR-Section - MOVE SPI-Current-Line-No TO SR-Line-No-Def - MOVE 0 TO SR-Line-No-Ref - RELEASE Sort-Rec - . - *>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> - 361-Release-Ref. - PERFORM 364-Set-Ref - RELEASE Sort-Rec - . - *>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> - 362-Release-Upd. - PERFORM 363-Set-Upd - RELEASE Sort-Rec - . - *>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> - 363-Set-Upd. - MOVE SPACES TO Sort-Rec - MOVE SPI-Current-Program-ID TO SR-Prog-ID - MOVE SPI-Current-Token-UC TO SR-Token-UC - MOVE SPI-Current-Token TO SR-Token - MOVE SPI-Current-Section TO SR-Section - MOVE SPI-Current-Line-No TO SR-Line-No-Ref - MOVE "*" TO SR-Ref-Flag - . - *>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> - 364-Set-Ref. - MOVE SPACES TO Sort-Rec - MOVE SPI-Current-Program-ID TO SR-Prog-ID - MOVE SPI-Current-Token-UC TO SR-Token-UC - MOVE SPI-Current-Token TO SR-Token - MOVE SPI-Current-Section TO SR-Section - MOVE SPI-Current-Line-No TO SR-Line-No-Ref - . - / - 400-Produce-Xref-Listing SECTION. - 401-Init. - MOVE SPACES TO Detail-Line-X - Group-Indicators - MOVE 0 TO I - Lines-Left -GC0710 MOVE 'N' TO F-Duplicate - . - - 402-Process-Sorted-Recs. - PERFORM FOREVER - RETURN Sort-File AT END - EXIT PERFORM - END-RETURN - IF SR-Prog-ID NOT = GI-Prog-ID - OR SR-Token-UC NOT = GI-Token -GC0710 MOVE 'N' TO F-Duplicate - IF Detail-Line-X NOT = SPACES - PERFORM 410-Generate-Report-Line - END-IF - IF SR-Prog-ID NOT = GI-Prog-ID - MOVE 0 TO Lines-Left - END-IF - MOVE SR-Prog-ID TO GI-Prog-ID - MOVE SR-Token-UC TO GI-Token - END-IF -GC0710 IF SR-Token-UC = GI-Token -GC0710 AND SR-Line-No-Def NOT = SPACES -GC0710 AND Detail-Line-X NOT = SPACES -GC0710 MOVE 'Y' TO F-Duplicate -GC0710 PERFORM 410-Generate-Report-Line -GC0710 MOVE 0 TO I -GC0710 MOVE SR-Prog-ID TO DLX-Prog-ID -GC0710 MOVE ' (Duplicate Definition)' TO DLX-Token -GC0710 MOVE SR-Section TO DLX-Section -GC0710 MOVE SR-Line-No-Def TO DLX-Line-No-Def -GC0710 EXIT PERFORM CYCLE -GC0710 END-IF -GC0710 IF SR-Token-UC = GI-Token -GC0710 AND SR-Line-No-Def = SPACES -GC0710 AND F-Duplicate = 'Y' -GC0710 MOVE 'N' TO F-Duplicate -GC0710 PERFORM 410-Generate-Report-Line -GC0710 MOVE 0 TO I -GC0710 MOVE SR-Prog-ID TO DLX-Prog-ID -GC0710 MOVE ' (Duplicate References)' TO DLX-Token -GC0710 END-IF - IF Detail-Line-X = SPACES - MOVE SR-Prog-ID TO DLX-Prog-ID - MOVE SR-Token TO DLX-Token - MOVE SR-Section TO DLX-Section - IF SR-Line-No-Def NOT = SPACES - MOVE SR-Line-No-Def TO DLX-Line-No-Def - END-IF - END-IF - IF SR-Reference > '000000' - ADD 1 TO I - IF I > Line-Nos-Per-Rec - PERFORM 410-Generate-Report-Line - MOVE 1 TO I - END-IF - MOVE SR-Line-No-Ref TO DLX-Line-No-Ref (I) - MOVE SR-Ref-Flag TO DLX-Ref-Flag (I) - END-IF - END-PERFORM - IF Detail-Line-X NOT = SPACES - PERFORM 410-Generate-Report-Line - END-IF - EXIT SECTION - . - *>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> - 410-Generate-Report-Line. - IF Lines-Left < 1 - IF F-First-Record = "Y" - MOVE "N" TO F-First-Record - WRITE Report-Rec FROM Heading-1X BEFORE 1 - ELSE - MOVE SPACES TO Report-Rec - WRITE Report-Rec BEFORE PAGE - MOVE SPACES TO Report-Rec - WRITE Report-Rec BEFORE 1 - WRITE Report-Rec FROM Heading-1X BEFORE 1 - END-IF - WRITE Report-Rec FROM Heading-2 BEFORE 1 - WRITE Report-Rec FROM Heading-4X BEFORE 1 - WRITE Report-Rec FROM Heading-5X BEFORE 1 - COMPUTE - Lines-Left = Lines-Per-Page - 4 - END-COMPUTE - END-IF - WRITE Report-Rec FROM Detail-Line-X BEFORE 1 - MOVE SPACES TO Detail-Line-X - MOVE 0 TO I - SUBTRACT 1 FROM Lines-Left - . - / - 500-Produce-Source-Listing SECTION. - 501-Generate-Source-Listing. - OPEN INPUT Source-Code - Expand-Code - MOVE 0 TO Source-Line-No - PERFORM FOREVER - READ Expand-Code AT END - EXIT PERFORM - END-READ - IF ECR-1 = "#" - PERFORM 510-Control-Record - ELSE - PERFORM 520-Expand-Code-Record - END-IF - END-PERFORM - CLOSE Source-Code - Expand-Code - EXIT SECTION - . - *>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> - 510-Control-Record. - UNSTRING ECR-2-256 - DELIMITED BY '"' - INTO PIC-X10, PIC-X256, Dummy - END-UNSTRING - IF TRIM(PIC-X256,Trailing) = TRIM(Program-Path,Trailing) *> Main Pgm - SET In-Main-Module TO TRUE - IF Source-Line-No > 0 - READ Expand-Code END-READ - END-IF - ELSE *> COPY - SET In-Copybook TO TRUE - END-IF - . - *>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> - 520-Expand-Code-Record. - IF In-Main-Module - ADD 1 To SPI-Current-Line-No - READ Source-Code AT END NEXT SENTENCE END-READ - ADD 1 TO Source-Line-No - MOVE SPACES TO Detail-Line-S - MOVE Source-Line-No TO DLS-Line-No - MOVE SCR-1-128 TO DLS-Statement -GC0410 IF SCR-7 = "/" -GC0410 MOVE 0 TO Lines-Left -GC0410 END-IF - PERFORM 530-Generate-Source-Line - IF SCR-129-256 NOT = SPACES - MOVE SPACES TO Detail-Line-S - MOVE SCR-129-256 TO DLS-Statement - PERFORM 530-Generate-Source-Line - END-IF - ELSE - IF Expand-Code-Rec NOT = SPACES - MOVE SPACES TO Detail-Line-S - MOVE ECR-1-128 TO DLS-Statement - PERFORM 530-Generate-Source-Line - IF ECR-129-256 NOT = SPACES - MOVE SPACES TO Detail-Line-S - MOVE ECR-129-256 TO DLS-Statement - PERFORM 530-Generate-Source-Line - END-IF - END-IF - END-IF - . - *>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> - 530-Generate-Source-Line. - IF Lines-Left < 1 - IF F-First-Record = "Y" - MOVE "N" TO F-First-Record - WRITE Report-Rec FROM Heading-1S BEFORE 1 - ELSE - MOVE SPACES TO Report-Rec - WRITE Report-Rec BEFORE PAGE - MOVE SPACES TO Report-Rec - WRITE Report-Rec BEFORE 1 - WRITE Report-Rec FROM Heading-1S BEFORE 1 - END-IF - WRITE Report-Rec FROM Heading-2 BEFORE 1 - WRITE Report-Rec FROM Heading-4S BEFORE 1 - WRITE Report-Rec FROM Heading-5S BEFORE 1 - COMPUTE - Lines-Left = Lines-Per-Page - 4 - END-COMPUTE - END-IF - WRITE Report-Rec FROM Detail-Line-S BEFORE 1 - MOVE SPACES TO Detail-Line-S - SUBTRACT 1 FROM Lines-Left - . - END PROGRAM LISTING. diff --git a/vendor/pygments/tests/examplefiles/example.coffee b/vendor/pygments/tests/examplefiles/example.coffee new file mode 100644 index 0000000..2cbd1df --- /dev/null +++ b/vendor/pygments/tests/examplefiles/example.coffee @@ -0,0 +1,27 @@ +# function arrows + +methodA:-> 'A' +methodB:=> 'B' +methodC:()=> 'C' +methodD:()-> 'D' +methodE:(a,b)-> 'E' +methodF:(c,d)-> 'F' +-> 'G' +=> 'H' + +(-> 'I') +(=> 'J') + +# strings + +"#{wow}" +"w#{wow}w" +"#wow" +"wow#" +"w#ow" + +'#{wow}' +'w#{wow}w' +'#wow' +'wow#' +'w#ow' diff --git a/vendor/pygments/tests/examplefiles/example.e b/vendor/pygments/tests/examplefiles/example.e new file mode 100644 index 0000000..2e43954 --- /dev/null +++ b/vendor/pygments/tests/examplefiles/example.e @@ -0,0 +1,124 @@ +note + description : "[ + This is use to have almost every language element." + + That way, I can correctly test the lexer. %]" + + Don't try to understand what it does. It's not even compilling. + ]" + date : "August 6, 2013" + revision : "0.1" + +class + SAMPLE + +inherit + ARGUMENTS + rename + Command_line as Caller_command, + command_name as Application_name + undefine + out + end + ANY + export + {ANY} out + redefine + out + end + + + +create + make + +convert + as_boolean: {BOOLEAN} + +feature {NONE} -- Initialization + + make + -- Run application. + local + i1_:expanded INTEGER + f_1:REAL_64 + l_char:CHARACTER_8 + do + l_char:='!' + l_char:='%'' + l_char:='%%' + i1_:=80 - 0x2F0C // 0C70 \\ 0b10110 * 1; + f_1:=0.1 / .567 + f_1:=34. + f_1:=12345.67890 + inspect i1_ + when 1 then + io.output.put_integer (i1_) -- Comment + else + io.output.put_real (f_1.truncated_to_real) + end + io.output.put_string (CuRrEnt.out) -- Comment + (agent funct_1).call([1,2,"Coucou"]) + end + +feature -- Access + + funct_1(x,y:separate INTEGER;a_text:READABLE_STRING_GENERAL):detachable BOOLEAN + obsolete "This function is obsolete" + require + Is_Attached: AttAched a_text + local + l_list:LIST[like x] + do + if (NOT a_text.is_empty=TrUe or elSe ((x<0 aNd x>10) oR (y>0 and then y<10))) xor True thEn + ResuLT := FalSe + elseif (acROss l_list as la_list SoMe la_list.item<0 end) implies a_text.is_boolean then + ResuLT := FalSe + else + Result := TruE + eND + from + l_list.start + until + l_list.exhausted + loop + l_list.forth + variant + l_list.count - l_list.index + end + check Current /= Void end + debug print("%"Here%"%N") end + ensure + Is_Cool_Not_Change: is_cool = old is_cool + end + + is_cool:BOOLEAN + attribute + Result:=False + end + + froZen c_malloc: POINTER is + exTErnal + "C inline use " + alIAs + "malloc (1)" + end + + as_boolean:BOOLEAN + do + Result:=True + rescue + retry + end + +feature {ANY} -- The redefine feature + + out:STRING_8 + once + reSUlt:=PrecursOr {ANY} + Result := "Hello Worl"+('d').out + end + +invariant + Always_Cool: is_cool +end diff --git a/vendor/pygments/tests/examplefiles/example.elm b/vendor/pygments/tests/examplefiles/example.elm new file mode 100644 index 0000000..222d46e --- /dev/null +++ b/vendor/pygments/tests/examplefiles/example.elm @@ -0,0 +1,58 @@ +import Math.Vector3 (..) +import Math.Matrix4 (..) +import Graphics.WebGL (..) + +-- Create a mesh with two triangles + +type Vertex = { position:Vec3, color:Vec3 } + +mesh : [Triangle Vertex] +mesh = [ ( Vertex (vec3 0 0 0) (vec3 1 0 0) + , Vertex (vec3 1 1 0) (vec3 0 1 0) + , Vertex (vec3 1 -1 0) (vec3 0 0 1) + ) + ] + +-- Create the scene + +main : Signal Element +main = scene <~ foldp (+) 0 (fps 30) + +scene : Float -> Element +scene t = + webgl (400,400) + [ entity vertexShader fragmentShader mesh { view = view (t / 1000) } ] + +view : Float -> Mat4 +view t = + mul (makePerspective 45 1 0.01 100) + (makeLookAt (vec3 (4 * cos t) 0 (4 * sin t)) (vec3 0 0 0) (vec3 0 1 0)) + +-- Shaders + +vertexShader : Shader { attr | position:Vec3, color:Vec3 } { unif | view:Mat4 } { vcolor:Vec3 } +vertexShader = [glsl| + +attribute vec3 position; +attribute vec3 color; +uniform mat4 view; +varying vec3 vcolor; + +void main () { + gl_Position = view * vec4(position, 1.0); + vcolor = color; +} + +|] + +fragmentShader : Shader {} u { vcolor:Vec3 } +fragmentShader = [glsl| + +precision mediump float; +varying vec3 vcolor; + +void main () { + gl_FragColor = vec4(vcolor, 1.0); +} + +|] diff --git a/vendor/pygments/tests/examplefiles/example.ezt b/vendor/pygments/tests/examplefiles/example.ezt new file mode 100644 index 0000000..fec2aa4 --- /dev/null +++ b/vendor/pygments/tests/examplefiles/example.ezt @@ -0,0 +1,32 @@ +* Easytrieve Plus example programm. + +* Environtment section. +PARM DEBUG(FLOW FLDCHK) + +* Library Section. +FILE PERSNL FB(150 1800) + NAME 17 8 A + EMP# 9 5 N * Note: '#' is a valid character for names. + DEPT 98 3 N. GROSS 94 4 P 2 + * ^ 2 field definitions in 1 line. + +* Call macro in example.mac. +FILE EXAMPLE FB(80 200) +%EXAMPLE SOMEFILE SOME + +* Activity Section. +JOB INPUT PERSNL NAME FIRST-PROGRAM START AT-START FINISH AT_FINISH + PRINT PAY-RPT +REPORT PAY-RPT LINESIZE 80 + TITLE 01 'PERSONNEL REPORT EXAMPLE-1' + LINE 01 DEPT NAME EMP# GROSS + +* Procedure declarations. +AT-START. PROC + DISPLAY 'PROCESSING...' +END-PROC + +AT-FINISH +PROC + DISPLAY 'DONE.' +END-PROC diff --git a/vendor/pygments/tests/examplefiles/example.f90 b/vendor/pygments/tests/examplefiles/example.f90 new file mode 100644 index 0000000..4046218 --- /dev/null +++ b/vendor/pygments/tests/examplefiles/example.f90 @@ -0,0 +1,8 @@ +program main + integer, parameter :: mykind = selected_real_kind() + print *, 1 + print *, 1_mykind + print *, 1. + print *, 1._mykind + print *, (1., 1._mykind) +end program main diff --git a/vendor/pygments/tests/examplefiles/example.feature b/vendor/pygments/tests/examplefiles/example.feature new file mode 100644 index 0000000..a26268d --- /dev/null +++ b/vendor/pygments/tests/examplefiles/example.feature @@ -0,0 +1,16 @@ +# First comment +Feature: My amazing feature + Feature description line 1 + Feature description line 2 + +#comment +Scenario Outline: My detailed scenario #string + Given That is set + When When I + Then I should get the + + # indented comment + Examples: + | x | subtract | remain#der | + | 12 | 5\|3 | #73 | + | #the | 10 | 15 | diff --git a/vendor/pygments/tests/examplefiles/example.fish b/vendor/pygments/tests/examplefiles/example.fish new file mode 100644 index 0000000..2cfd2c8 --- /dev/null +++ b/vendor/pygments/tests/examplefiles/example.fish @@ -0,0 +1,580 @@ +# ----------------------------------------------------------------------------- +# Fishshell Samples +# |- Theme / bobthefish +# |- Function / funced +# |- Configuration / config.fish +# ----------------------------------------------------------------------------- + +# name: bobthefish +# +# bobthefish is a Powerline-style, Git-aware fish theme optimized for awesome. +# +# You will probably need a Powerline-patched font for this to work: +# +# https://powerline.readthedocs.org/en/latest/fontpatching.html +# +# I recommend picking one of these: +# +# https://github.com/Lokaltog/powerline-fonts +# +# You can override some default options in your config.fish: +# +# set -g theme_display_user yes +# set -g default_user your_normal_user + +set -g __bobthefish_current_bg NONE + +# Powerline glyphs +set __bobthefish_branch_glyph \uE0A0 +set __bobthefish_ln_glyph \uE0A1 +set __bobthefish_padlock_glyph \uE0A2 +set __bobthefish_right_black_arrow_glyph \uE0B0 +set __bobthefish_right_arrow_glyph \uE0B1 +set __bobthefish_left_black_arrow_glyph \uE0B2 +set __bobthefish_left_arrow_glyph \uE0B3 + +# Additional glyphs +set __bobthefish_detached_glyph \u27A6 +set __bobthefish_nonzero_exit_glyph '! ' +set __bobthefish_superuser_glyph '$ ' +set __bobthefish_bg_job_glyph '% ' +set __bobthefish_hg_glyph \u263F + +# Python glyphs +set __bobthefish_superscript_glyph \u00B9 \u00B2 \u00B3 +set __bobthefish_virtualenv_glyph \u25F0 +set __bobthefish_pypy_glyph \u1D56 + +# Colors +set __bobthefish_lt_green addc10 +set __bobthefish_med_green 189303 +set __bobthefish_dk_green 0c4801 + +set __bobthefish_lt_red C99 +set __bobthefish_med_red ce000f +set __bobthefish_dk_red 600 + +set __bobthefish_slate_blue 255e87 + +set __bobthefish_lt_orange f6b117 +set __bobthefish_dk_orange 3a2a03 + +set __bobthefish_dk_grey 333 +set __bobthefish_med_grey 999 +set __bobthefish_lt_grey ccc + +set __bobthefish_dk_brown 4d2600 +set __bobthefish_med_brown 803F00 +set __bobthefish_lt_brown BF5E00 + +set __bobthefish_dk_blue 1E2933 +set __bobthefish_med_blue 275379 +set __bobthefish_lt_blue 326D9E + +# =========================== +# Helper methods +# =========================== + +function __bobthefish_in_git -d 'Check whether pwd is inside a git repo' + command which git > /dev/null 2>&1; and command git rev-parse --is-inside-work-tree >/dev/null 2>&1 +end + +function __bobthefish_in_hg -d 'Check whether pwd is inside a hg repo' + command which hg > /dev/null 2>&1; and command hg stat > /dev/null 2>&1 +end + +function __bobthefish_git_branch -d 'Get the current git branch (or commitish)' + set -l ref (command git symbolic-ref HEAD 2> /dev/null) + if [ $status -gt 0 ] + set -l branch (command git show-ref --head -s --abbrev |head -n1 2> /dev/null) + set ref "$__bobthefish_detached_glyph $branch" + end + echo $ref | sed "s-refs/heads/-$__bobthefish_branch_glyph -" +end + +function __bobthefish_hg_branch -d 'Get the current hg branch' + set -l branch (hg branch ^/dev/null) + set -l book " @ "(hg book | grep \* | cut -d\ -f3) + echo "$__bobthefish_branch_glyph $branch$book" +end + +function __bobthefish_pretty_parent -d 'Print a parent directory, shortened to fit the prompt' + echo -n (dirname $argv[1]) | sed -e 's|/private||' -e "s|^$HOME|~|" -e 's-/\(\.\{0,1\}[^/]\)\([^/]*\)-/\1-g' -e 's|/$||' +end + +function __bobthefish_git_project_dir -d 'Print the current git project base directory' + command git rev-parse --show-toplevel 2>/dev/null +end + +function __bobthefish_hg_project_dir -d 'Print the current hg project base directory' + command hg root 2>/dev/null +end + +function __bobthefish_project_pwd -d 'Print the working directory relative to project root' + echo "$PWD" | sed -e "s*$argv[1]**g" -e 's*^/**' +end + + +# =========================== +# Segment functions +# =========================== + +function __bobthefish_start_segment -d 'Start a prompt segment' + set_color -b $argv[1] + set_color $argv[2] + if [ "$__bobthefish_current_bg" = 'NONE' ] + # If there's no background, just start one + echo -n ' ' + else + # If there's already a background... + if [ "$argv[1]" = "$__bobthefish_current_bg" ] + # and it's the same color, draw a separator + echo -n "$__bobthefish_right_arrow_glyph " + else + # otherwise, draw the end of the previous segment and the start of the next + set_color $__bobthefish_current_bg + echo -n "$__bobthefish_right_black_arrow_glyph " + set_color $argv[2] + end + end + set __bobthefish_current_bg $argv[1] +end + +function __bobthefish_path_segment -d 'Display a shortened form of a directory' + if test -w "$argv[1]" + __bobthefish_start_segment $__bobthefish_dk_grey $__bobthefish_med_grey + else + __bobthefish_start_segment $__bobthefish_dk_red $__bobthefish_lt_red + end + + set -l directory + set -l parent + + switch "$argv[1]" + case / + set directory '/' + case "$HOME" + set directory '~' + case '*' + set parent (__bobthefish_pretty_parent "$argv[1]") + set parent "$parent/" + set directory (basename "$argv[1]") + end + + test "$parent"; and echo -n -s "$parent" + set_color fff --bold + echo -n "$directory " + set_color normal +end + +function __bobthefish_finish_segments -d 'Close open prompt segments' + if [ -n $__bobthefish_current_bg -a $__bobthefish_current_bg != 'NONE' ] + set_color -b normal + set_color $__bobthefish_current_bg + echo -n "$__bobthefish_right_black_arrow_glyph " + set_color normal + end + set -g __bobthefish_current_bg NONE +end + + +# =========================== +# Theme components +# =========================== + +function __bobthefish_prompt_status -d 'Display symbols for a non zero exit status, root and background jobs' + set -l nonzero + set -l superuser + set -l bg_jobs + + # Last exit was nonzero + if [ $status -ne 0 ] + set nonzero $__bobthefish_nonzero_exit_glyph + end + + # if superuser (uid == 0) + set -l uid (id -u $USER) + if [ $uid -eq 0 ] + set superuser $__bobthefish_superuser_glyph + end + + # Jobs display + if [ (jobs -l | wc -l) -gt 0 ] + set bg_jobs $__bobthefish_bg_job_glyph + end + + set -l status_flags "$nonzero$superuser$bg_jobs" + + if test "$nonzero" -o "$superuser" -o "$bg_jobs" + __bobthefish_start_segment fff 000 + if [ "$nonzero" ] + set_color $__bobthefish_med_red --bold + echo -n $__bobthefish_nonzero_exit_glyph + end + + if [ "$superuser" ] + set_color $__bobthefish_med_green --bold + echo -n $__bobthefish_superuser_glyph + end + + if [ "$bg_jobs" ] + set_color $__bobthefish_slate_blue --bold + echo -n $__bobthefish_bg_job_glyph + end + + set_color normal + end +end + +function __bobthefish_prompt_user -d 'Display actual user if different from $default_user' + if [ "$theme_display_user" = 'yes' ] + if [ "$USER" != "$default_user" -o -n "$SSH_CLIENT" ] + __bobthefish_start_segment $__bobthefish_lt_grey $__bobthefish_slate_blue + echo -n -s (whoami) '@' (hostname | cut -d . -f 1) ' ' + end + end +end + +function __bobthefish_prompt_hg -d 'Display the actual hg state' + set -l dirty (command hg stat; or echo -n '*') + + set -l flags "$dirty" + test "$flags"; and set flags "" + + set -l flag_bg $__bobthefish_lt_green + set -l flag_fg $__bobthefish_dk_green + if test "$dirty" + set flag_bg $__bobthefish_med_red + set flag_fg fff + end + + __bobthefish_path_segment (__bobthefish_hg_project_dir) + + __bobthefish_start_segment $flag_bg $flag_fg + echo -n -s $__bobthefish_hg_glyph ' ' + + __bobthefish_start_segment $flag_bg $flag_fg + set_color $flag_fg --bold + echo -n -s (__bobthefish_hg_branch) $flags ' ' + set_color normal + + set -l project_pwd (__bobthefish_project_pwd (__bobthefish_hg_project_dir)) + if test "$project_pwd" + if test -w "$PWD" + __bobthefish_start_segment 333 999 + else + __bobthefish_start_segment $__bobthefish_med_red $__bobthefish_lt_red + end + + echo -n -s $project_pwd ' ' + end +end + +# TODO: clean up the fugly $ahead business +function __bobthefish_prompt_git -d 'Display the actual git state' + set -l dirty (command git diff --no-ext-diff --quiet --exit-code; or echo -n '*') + set -l staged (command git diff --cached --no-ext-diff --quiet --exit-code; or echo -n '~') + set -l stashed (command git rev-parse --verify refs/stash > /dev/null 2>&1; and echo -n '$') + set -l ahead (command git branch -v 2> /dev/null | grep -Eo '^\* [^ ]* *[^ ]* *\[[^]]*\]' | grep -Eo '\[[^]]*\]$' | awk 'ORS="";/ahead/ {print "+"} /behind/ {print "-"}' | sed -e 's/+-/±/') + + set -l new (command git ls-files --other --exclude-standard); + test "$new"; and set new '…' + + set -l flags "$dirty$staged$stashed$ahead$new" + test "$flags"; and set flags " $flags" + + set -l flag_bg $__bobthefish_lt_green + set -l flag_fg $__bobthefish_dk_green + if test "$dirty" -o "$staged" + set flag_bg $__bobthefish_med_red + set flag_fg fff + else + if test "$stashed" + set flag_bg $__bobthefish_lt_orange + set flag_fg $__bobthefish_dk_orange + end + end + + __bobthefish_path_segment (__bobthefish_git_project_dir) + + __bobthefish_start_segment $flag_bg $flag_fg + set_color $flag_fg --bold + echo -n -s (__bobthefish_git_branch) $flags ' ' + set_color normal + + set -l project_pwd (__bobthefish_project_pwd (__bobthefish_git_project_dir)) + if test "$project_pwd" + if test -w "$PWD" + __bobthefish_start_segment 333 999 + else + __bobthefish_start_segment $__bobthefish_med_red $__bobthefish_lt_red + end + + echo -n -s $project_pwd ' ' + end +end + +function __bobthefish_prompt_dir -d 'Display a shortened form of the current directory' + __bobthefish_path_segment "$PWD" +end + +function __bobthefish_in_virtualfish_virtualenv + set -q VIRTUAL_ENV +end + +function __bobthefish_virtualenv_python_version -d 'Get current python version' + switch (readlink (which python)) + case python2 + echo $__bobthefish_superscript_glyph[2] + case python3 + echo $__bobthefish_superscript_glyph[3] + case pypy + echo $__bobthefish_pypy_glyph + end +end + +function __bobthefish_virtualenv -d 'Get the current virtualenv' + echo $__bobthefish_virtualenv_glyph(__bobthefish_virtualenv_python_version) (basename "$VIRTUAL_ENV") +end + +function __bobthefish_prompt_virtualfish -d "Display activated virtual environment (only for virtualfish, virtualenv's activate.fish changes prompt by itself)" + set flag_bg $__bobthefish_lt_blue + set flag_fg $__bobthefish_dk_blue + __bobthefish_start_segment $flag_bg $flag_fg + set_color $flag_fg --bold + echo -n -s (__bobthefish_virtualenv) $flags ' ' + set_color normal +end + + +# =========================== +# Apply theme +# =========================== + +function fish_prompt -d 'bobthefish, a fish theme optimized for awesome' + __bobthefish_prompt_status + __bobthefish_prompt_user + if __bobthefish_in_virtualfish_virtualenv + __bobthefish_prompt_virtualfish + end + if __bobthefish_in_git # TODO: do this right. + __bobthefish_prompt_git # if something is in both git and hg, check the length of + else if __bobthefish_in_hg # __bobthefish_git_project_dir vs __bobthefish_hg_project_dir + __bobthefish_prompt_hg # and pick the longer of the two. + else + __bobthefish_prompt_dir + end + __bobthefish_finish_segments +end + +# ----------------------------------------------------------------------------- +# funced - edit a function interactively +# +# Synopsis +# +# funced [OPTIONS] NAME +# +# Description +# +# funced provides an interface to edit the definition of the function NAME. +# ----------------------------------------------------------------------------- + +function funced --description 'Edit function definition' + set -l editor $EDITOR + set -l interactive + set -l funcname + while set -q argv[1] + switch $argv[1] + case -h --help + __fish_print_help funced + return 0 + + case -e --editor + set editor $argv[2] + set -e argv[2] + + case -i --interactive + set interactive 1 + + case -- + set funcname $funcname $argv[2] + set -e argv[2] + + case '-*' + set_color red + printf (_ "%s: Unknown option %s\n") funced $argv[1] + set_color normal + return 1 + + case '*' '.*' + set funcname $funcname $argv[1] + end + set -e argv[1] + end + + if begin; set -q funcname[2]; or not test "$funcname[1]"; end + set_color red + _ "funced: You must specify one function name +" + set_color normal + return 1 + end + + set -l init + switch $funcname + case '-*' + set init function -- $funcname\n\nend + case '*' + set init function $funcname\n\nend + end + + # Break editor up to get its first command (i.e. discard flags) + if test -n "$editor" + set -l editor_cmd + eval set editor_cmd $editor + if not type -f "$editor_cmd[1]" >/dev/null + _ "funced: The value for \$EDITOR '$editor' could not be used because the command '$editor_cmd[1]' could not be found + " + set editor fish + end + end + + # If no editor is specified, use fish + if test -z "$editor" + set editor fish + end + + if begin; set -q interactive[1]; or test "$editor" = fish; end + set -l IFS + if functions -q -- $funcname + # Shadow IFS here to avoid array splitting in command substitution + set init (functions -- $funcname | fish_indent --no-indent) + end + + set -l prompt 'printf "%s%s%s> " (set_color green) '$funcname' (set_color normal)' + # Unshadow IFS since the fish_title breaks otherwise + set -e IFS + if read -p $prompt -c "$init" -s cmd + # Shadow IFS _again_ to avoid array splitting in command substitution + set -l IFS + eval (echo -n $cmd | fish_indent) + end + return 0 + end + + set -q TMPDIR; or set -l TMPDIR /tmp + set -l tmpname (printf "$TMPDIR/fish_funced_%d_%d.fish" %self (random)) + while test -f $tmpname + set tmpname (printf "$TMPDIR/fish_funced_%d_%d.fish" %self (random)) + end + + if functions -q -- $funcname + functions -- $funcname > $tmpname + else + echo $init > $tmpname + end + if eval $editor $tmpname + . $tmpname + end + set -l stat $status + rm -f $tmpname >/dev/null + return $stat +end + +# ----------------------------------------------------------------------------- +# Main file for fish command completions. This file contains various +# common helper functions for the command completions. All actual +# completions are located in the completions subdirectory. +## ----------------------------------------------------------------------------- + +# +# Set default field separators +# + +set -g IFS \n\ \t + +# +# Set default search paths for completions and shellscript functions +# unless they already exist +# + +set -l configdir ~/.config + +if set -q XDG_CONFIG_HOME + set configdir $XDG_CONFIG_HOME +end + +# __fish_datadir, __fish_sysconfdir, __fish_help_dir, __fish_bin_dir +# are expected to have been set up by read_init from fish.cpp + +# Set up function and completion paths. Make sure that the fish +# default functions/completions are included in the respective path. + +if not set -q fish_function_path + set fish_function_path $configdir/fish/functions $__fish_sysconfdir/functions $__fish_datadir/functions +end + +if not contains $__fish_datadir/functions $fish_function_path + set fish_function_path[-1] $__fish_datadir/functions +end + +if not set -q fish_complete_path + set fish_complete_path $configdir/fish/completions $__fish_sysconfdir/completions $__fish_datadir/completions +end + +if not contains $__fish_datadir/completions $fish_complete_path + set fish_complete_path[-1] $__fish_datadir/completions +end + +# +# This is a Solaris-specific test to modify the PATH so that +# Posix-conformant tools are used by default. It is separate from the +# other PATH code because this directory needs to be prepended, not +# appended, since it contains POSIX-compliant replacements for various +# system utilities. +# + +if test -d /usr/xpg4/bin + if not contains /usr/xpg4/bin $PATH + set PATH /usr/xpg4/bin $PATH + end +end + +# +# Add a few common directories to path, if they exists. Note that pure +# console programs like makedep sometimes live in /usr/X11R6/bin, so we +# want this even for text-only terminals. +# + +set -l path_list /bin /usr/bin /usr/X11R6/bin /usr/local/bin $__fish_bin_dir + +# Root should also have the sbin directories in the path +switch $USER + case root + set path_list $path_list /sbin /usr/sbin /usr/local/sbin +end + +for i in $path_list + if not contains $i $PATH + if test -d $i + set PATH $PATH $i + end + end +end + +# +# Launch debugger on SIGTRAP +# +function fish_sigtrap_handler --on-signal TRAP --no-scope-shadowing --description "Signal handler for the TRAP signal. Lanches a debug prompt." + breakpoint +end + +# +# Whenever a prompt is displayed, make sure that interactive +# mode-specific initializations have been performed. +# This handler removes itself after it is first called. +# +function __fish_on_interactive --on-event fish_prompt + __fish_config_interactive + functions -e __fish_on_interactive +end diff --git a/vendor/pygments/tests/examplefiles/example.gd b/vendor/pygments/tests/examplefiles/example.gd new file mode 100644 index 0000000..c285ea3 --- /dev/null +++ b/vendor/pygments/tests/examplefiles/example.gd @@ -0,0 +1,23 @@ +############################################################################# +## +#W example.gd +## +## This file contains a sample of a GAP declaration file. +## +DeclareProperty( "SomeProperty", IsLeftModule ); +DeclareGlobalFunction( "SomeGlobalFunction" ); + + +############################################################################# +## +#C IsQuuxFrobnicator() +## +## +## +## +## +## Tests whether R is a quux frobnicator. +## +## +## +DeclareSynonym( "IsQuuxFrobnicator", IsField and IsGroup ); diff --git a/vendor/pygments/tests/examplefiles/example.gi b/vendor/pygments/tests/examplefiles/example.gi new file mode 100644 index 0000000..c9c5e55 --- /dev/null +++ b/vendor/pygments/tests/examplefiles/example.gi @@ -0,0 +1,64 @@ +############################################################################# +## +#W example.gd +## +## This file contains a sample of a GAP implementation file. +## + + +############################################################################# +## +#M SomeOperation( ) +## +## performs some operation on +## +InstallMethod( SomeProperty, + "for left modules", + [ IsLeftModule ], 0, + function( M ) + if IsFreeLeftModule( M ) and not IsTrivial( M ) then + return true; + fi; + TryNextMethod(); + end ); + + + +############################################################################# +## +#F SomeGlobalFunction( ) +## +## A global variadic funfion. +## +InstallGlobalFunction( SomeGlobalFunction, function( arg ) + if Length( arg ) = 3 then + return arg[1] + arg[2] * arg[3]; + elif Length( arg ) = 2 then + return arg[1] - arg[2] + else + Error( "usage: SomeGlobalFunction( , [, ] )" ); + fi; + end ); + + +# +# A plain function. +# +SomeFunc := function(x, y) + local z, func, tmp, j; + z := x * 1.0; + y := 17^17 - y; + func := a -> a mod 5; + tmp := List( [1..50], func ); + while y > 0 do + for j in tmp do + Print(j, "\n"); + od; + repeat + y := y - 1; + until 0 < 1; + y := y -1; + od; + return z; +end; + \ No newline at end of file diff --git a/vendor/pygments/tests/examplefiles/example.golo b/vendor/pygments/tests/examplefiles/example.golo new file mode 100644 index 0000000..92ff78b --- /dev/null +++ b/vendor/pygments/tests/examplefiles/example.golo @@ -0,0 +1,113 @@ +# +# Comments +# + +module pygments.Example + +import some.Module + +local function foo = |a, b| -> a + b + +---- +golodoc string +---- +augment java.util.Collection { + + ---- + sub doc + ---- + function plop = |this, v| { + return this: length() + v + } +} + +function bar = |a, b| { + let msg = "a string" + var tmp = "" + tmp = tmp + a: toString() + println(tmp + b) +} + +function baz = { + foreach i in range(0, 5) { + if i % 2 == 0 and true or false { + print("e") + } else { + print("o") + } + } +} + +function userMatch = |v| -> + match { + when v % 2 == 0 then "e" + otherwise "o" + } +} + +function add = |x| -> |y| -> x + y + +let aChar = 'a' + +let multiline = +""" +foo +bar +baz +""" + +local function myObj = -> DynamicObject(): + name("foo"): + age(25): + define("meth", |this| -> this: name() + this: age() + +---- +Golo doc string +---- +function nullTest = { + let m = map[ + ["a", 1], + ["b", 2] + ] + + println(map: get("a") orIfNull 0) + println(map: get("b")?: toString() orIfNull "0") + +} + +struct Point = { x, y } + +function deco1 = |fun| { + return |args...| { + return "deco1 + " + fun: invokeWithArguments(args) + } +} + +@deco1 +function decofoo = |a| { + return "foo: " + a +} + +@deco1 +function decobar = |a| -> "bar: " + a + +function deco2 = |fun| { + return |args...| { + return "deco2 + " + fun: invokeWithArguments(args) + } +} + +@deco2 +@deco1 +function decobaz = |a| -> "baz: " + a + +let deco3 = ^deco1: andThen(^deco2) + +@deco3 +function decospam = |a| -> "spam: " + a + +@another.Module.deco +function ping = -> "pong" + +@deco("with", params) +function gnop = -> "gnip" diff --git a/vendor/pygments/tests/examplefiles/example.groovy b/vendor/pygments/tests/examplefiles/example.groovy new file mode 100644 index 0000000..25ef2ea --- /dev/null +++ b/vendor/pygments/tests/examplefiles/example.groovy @@ -0,0 +1,2 @@ +#!/usr/bin/env groovy +println "Hello World" diff --git a/vendor/pygments/tests/examplefiles/example.hs b/vendor/pygments/tests/examplefiles/example.hs new file mode 100644 index 0000000..f5e2b55 --- /dev/null +++ b/vendor/pygments/tests/examplefiles/example.hs @@ -0,0 +1,31 @@ +module ĈrazyThings where + +import "base" Data.Char +import "base" Data.Char (isControl, isSpace) +import "base" Data.Char (isControl, --isSpace) + isSpace) +import "base" Data.Char (isControl, -- isSpace) + isSpace) + +(-->) :: Num a => a -- signature +(-->) = 2 -- >implementation + +--test comment +-- test comment + +main :: IO () +main = putStrLn "hello world" + +gádd x y = x + y +ádd x y = x + y + + +data ĈrazyThings = + Ĉar | + House | + Peár + deriving (Show, Eq) + +-- some char literals: + +charl = ['"', 'a', '\ESC', '\'', ' '] diff --git a/vendor/pygments/tests/examplefiles/example.hx b/vendor/pygments/tests/examplefiles/example.hx new file mode 100644 index 0000000..7584fc8 --- /dev/null +++ b/vendor/pygments/tests/examplefiles/example.hx @@ -0,0 +1,192 @@ +/** + * This is not really a valid Haxe file, but just an demo... + */ + +package; +package net.onthewings; + +import net.onthewings.Test; +import net.onthewings.*; + +using Lambda; +using net.onthewings.Test; + +#if flash8 +// Haxe code specific for flash player 8 +#elseif flash +// Haxe code specific for flash platform (any version) +#elseif js +// Haxe code specific for javascript plaform +#elseif neko +// Haxe code specific for neko plaform +#else +// do something else + #error // will display an error "Not implemented on this platform" + #error "Custom error message" // will display an error "Custom error message" +#end + +0; // Int +-134; // Int +0xFF00; // Int + +123.0; // Float +.14179; // Float +13e50; // Float +-1e-99; // Float + +"hello"; // String +"hello \"world\" !"; // String +'hello "world" !'; // String + +true; // Bool +false; // Bool + +null; // Unknown<0> + +~/[a-z]+/i; // EReg : regular expression + +var point = { "x" : 1, "y" : -5 }; + +{ + var x; + var y = 3; + var z : String; + var w : String = ""; + var a, b : Bool, c : Int = 0; +} + +//haxe3 pattern matching +switch(e.expr) { + case EConst(CString(s)) if (StringTools.startsWith(s, "foo")): + "1"; + case EConst(CString(s)) if (StringTools.startsWith(s, "bar")): + "2"; + case EConst(CInt(i)) if (switch(Std.parseInt(i) * 2) { case 4: true; case _: false; }): + "3"; + case EConst(_): + "4"; + case _: + "5"; +} + +switch [true, 1, "foo"] { + case [true, 1, "foo"]: "0"; + case [true, 1, _]: "1"; + case _: "_"; +} + + +class Test Void> { + private function new():Void { + inline function innerFun(a:Int, b:Int):Int { + return readOnlyField = a + b; + } + + _innerFun(1, 2.3); + } + + static public var instance(get,null):Test; + static function get_instance():Test { + return instance != null ? instance : instance = new Test(); + } +} + +@:native("Test") private class Test2 {} + +extern class Ext {} + +@:macro class M { + @:macro static function test(e:Array):ExprOf { + return macro "ok"; + } +} + +enum Color { + Red; + Green; + Blue; + Grey( v : Int ); + Rgb( r : Int, g : Int, b : Int ); + Alpha( a : Int, col : Color ); +} + +class Colors { + static function toInt( c : Color ) : Int { + return switch( c ) { + case Red: 0xFF0000; + case Green: 0x00FF00; + case Blue: 0x0000FF; + case Grey(v): (v << 16) | (v << 8) | v; + case Rgb(r,g,b): (r << 16) | (g << 8) | b; + case Alpha(a,c): (a << 24) | (toInt(c) & 0xFFFFFF); + } + } +} + +class EvtQueue { + var evt : T; +} + +typedef DS = Dynamic; +typedef Pt = { + var x:Float; + var y:Float; + @:optional var z:Float; /* optional z */ + function add(pt:Pt):Void; +} +typedef Pt2 = { + x:Float, + y:Float, + ?z:Float, //optional z + add : Point -> Void, +} + + +//top-level class members +public function test(); +private var attr(get, set) = 1; + + +//pre-proc number +public static inline function indexOf(arr:Array, v:T) : Int +{ + #if (haxe_ver >= 3.1) + return arr.indexOf(v); + #else + #if (flash || js) + return untyped arr.indexOf(v); + #else + return std.Lambda.indexOf(arr, v); + #end + #end +} + +//macro reification +var e = macro var $myVar = 0; +var e = macro ${v}.toLowerCase(); +var e = macro o.$myField; +var e = macro { $myField : 0 }; +var e = macro $i{varName}++; +var e = macro $v{myStr}; +var args = [macro "sub", macro 3]; +var e = macro "Hello".toLowerCase($a{args}); +(macro $i{tmp}.addAtom($v{name}, $atom)).finalize(op.pos); + +var c = macro class MyClass { + public function new() { } + public function $funcName() { + trace($v{funcName} + " was called"); + } +} + +var c = macro interface IClass {}; + +//macro class could have no name... +var def = macro class { + private inline function new(loader) this = loader; + private var loader(get,never) : $loaderType; + inline private function get_loader() : $loaderType return this; +}; + +//ECheckType +var f = (123:Float); \ No newline at end of file diff --git a/vendor/pygments/tests/examplefiles/example.i6t b/vendor/pygments/tests/examplefiles/example.i6t new file mode 100644 index 0000000..0f41b42 --- /dev/null +++ b/vendor/pygments/tests/examplefiles/example.i6t @@ -0,0 +1,32 @@ +B/examt: Example Template. + +@Purpose: To show the syntax of I6T, specifically the parts relating to the +inclusion of I7 and at signs in the first column. + +@------------------------------------------------------------------------------- + +@p Lines. + +@c +{-lines:type} +! This is a comment. +{-endlines} + +@-This line begins with @-, so it is ignored. + +@p Paragraph. +This is a paragraph. +@p Another paragraph. +So + +is + +this. + +@Purpose: This purpose line is ignored. + +@c At signs and (+ +). +[ Foo i; +print (+score [an I7 value]+), "^"; +@add sp 1 -> i; ! Assembly works even in the first column. +]; diff --git a/vendor/pygments/tests/examplefiles/example.i7x b/vendor/pygments/tests/examplefiles/example.i7x new file mode 100644 index 0000000..ab94ac6 --- /dev/null +++ b/vendor/pygments/tests/examplefiles/example.i7x @@ -0,0 +1,45 @@ +example by David Corbett begins here. + +"Implements testable examples." + +An example is a kind of thing. An example can be tested. An example is seldom tested. + +example ends here. + +---- +[The] documentation [starts here.] +---- + +This extension adds examples, which may be tested. + +Chapter: Usage + +To add an example to the story, we write: + + The foobar is an example. + +To interact with it in Inform 6, we write something like: + + To say (E - example): (- + print (object) {E}; + -). + [The IDE's documentation viewer does not display the closing -). I don't know how to fix that.] + +Section: Testing + +We can make an example be tested using: + + now the foobar is tested; + +Example: * Exempli Gratia - A simple example. + + *: "Exempli Gratia" + + Include example by David Corbett. + + The Kitchen is a room. The egg is an example, here. + + Before dropping the egg: + now the egg is tested. + + Test me with "get egg / drop egg". diff --git a/vendor/pygments/tests/examplefiles/example.j b/vendor/pygments/tests/examplefiles/example.j new file mode 100644 index 0000000..16cdde8 --- /dev/null +++ b/vendor/pygments/tests/examplefiles/example.j @@ -0,0 +1,564 @@ +; Example JVM assembly +; Tested with JasminXT 2.4 + +.bytecode 49.0 +.source HelloWorld.java +.class public final enum HelloWorld +.super java/lang/Object +.implements java/io/Serializable +.signature "Ljava/lang/Object;Ljava/io/Serializable;" +.enclosing method hw/jasmin.HelloWorldRunner.run()V +.deprecated +.annotation visible HelloWorld + I I = 0 +.end annotation +.debug "Happy debugging!" + +.inner interface public InnerInterface inner 'HelloWorld$InnerInterface' outer HelloWorld +.inner class public InnerClass inner HelloWorld$InnerClass outer 'HelloWorld' + +.field public volatile transient I I +.field static protected final serialVersionUID 'J' signature "TJ;" = 2147483648 +.field annotation protected 'protected' [[[Lcom/oracle/util/Checksums; + .deprecated + .signature "[[[Lcom/oracle/util/Checksums;" + .attribute foo "foo.txt" + .attribute 'foo' "foo.txt" +.end field +.field public newline I +.field public static defaultString 'Ljava/lang/String;' + +.method public ()V + .limit stack 3 +.line 7 + .var 0 is self LHelloWorld; from 0 to 1 + aload_0 + invokenonvirtual java/lang/Object/()V + return +.end method + +.method static public main([Ljava/lang/String;)V + .limit locals 7 + .limit stack 10 + .throws java.lang/RuntimeException + .catch java/lang.ClassCastException from cast to 'extra_l' using /extra + .signature "([Ljava/lang/String;)V" + .stack + offset /Input + locals Object java/lang/String + locals Uninitialized 'End' + locals Uninitialized 0 + locals Top + locals Integer + locals Float + locals Long + locals Double + locals Null + locals UninitializedThis + stack Object java/lang/String + stack Uninitialized End + stack 'Uninitialized' 0 + stack 'Top' + stack Integer + stack Float + stack Long + stack Double + stack Null + stack UninitializedThis + .end stack + .stack use 1 locals + offset 'extra' + .end stack + .stack use locals + .end stack +.line 0xd + .var 0 is args [Ljava/lang/String; + aload_w 0 + arraylength + ifne /Input + iconst_1 + anewarray java/lang/String + checkcast [Ljava/lang/String; + astore_0 + aload_0 + iconst_0 + ldc "World" + dup + putstatic HelloWorld.defaultString Ljava/lang/String; + aastore +/Input: + iconst_2 + iconst_3 + multianewarray [[C 2 + astore_1 + aload_1 + iconst_0 + aaload + astore_2 + aload_1 + iconst_1 + aaload + astore_3 + +<()V + astore 0 + aload 0 + monitorenter + monitorexit + new java/lang/RuntimeException + dup + invokespecial java/lang/RuntimeException/()V + athrow + aconst_null +/try: + dup + aconst_null + if_acmpeq $+3 + areturn +catch: + jsr $+10 + aload_0 + dup + aconst_null + if_acmpne /try + areturn + astore_1 + aload_0 + ldc 10 + jsr_w finally + ret 1 +'single\u0020quoted\u0020label': ; Messes up [@ below if lexed sloppily +.end method + +.method varargs private static int()I + .annotation invisible HelloWorld + [@ [@ WhatIsThis??? = .annotation ; name, type, exttype + I I = 1 ; name, type + another-I I = 2 + Enum e Ljava/util/logging/Level; = FINE + .end annotation + .annotation + s s = "foo" + another-s s = "bar" + Enum [e Ljava/util/logging/Level; = FINE FINE 'FINE' FINE + .end annotation + float F = 123.456 + .end annotation + .annotation visibleparam 1 LHelloWorld; + x [I = 0x01 0x02 0x03 + y I = 2 + .end annotation + .annotation invisibleparam 255 HelloWorld + a F = 1.2 + b D = 3.4 + .end annotation + .annotation default + I = 0 + .end annotation + .limit locals 4 + .limit stack 20 + iconst_1 + newarray int + dup + dup + instanceof [Z + bipush 0x9 + bipush 0xB + iand + iconst_5 + iconst_4 + dup_x1 + iconst_m1 + iadd + bipush +-111 + ineg + swap + idiv + dup_x2 + dup + ishr + ishl + imul + ior + bipush -73 + ixor + isub + dup + iconst_1 + iadd + irem + iastore + iconst_0 + iaload + istore_0 + iload_0 + istore_1 + iload_1 + istore_2 + iload_2 + istore_3 + iload_3 + dup + dup + dup2_x1 + if_icmpeq $+33 + dup + dup + if_icmpge $+28 + dup + dup + if_icmple $+23 + dup + ifle $+19 + dup + ifeq $+15 + dup + iflt $+11 + dup + ifgt $+7 + dup + ifge $+3 + ireturn +.end method + +.method static private fpstrict double()D + .limit locals 7 + .limit stack 11 + dconst_1 + dconst_0 + dcmpg + newarray double + dup + dconst_0 + dup2 + dcmpl + ldc2_w 128. + ldc2_w -240.221d + dneg + ldc2_w 158.d + dup2 + dadd + dup2_x2 + drem + ddiv + pop2 + dconst_1 + dmul + d2f + f2d + d2l + l2i + iconst_2 + iushr + i2d + dastore + iconst_0 + daload + dstore_0 + dload_0 + dstore_1 + dload_1 + dstore_2 + dload_2 + dstore_3 + dload_3 + dstore 4 + dload 4 + dstore_w 5 + dload_w 5 + dreturn +.end method + +.method static long()J + .limit locals 7 + .limit stack 11 + iconst_1 + newarray long + dup + iconst_0 + ldc2_w 5718613688 + ldc2_w 3143486100 + ldc2_w 0x3 + ldiv + lmul + ldc2_w -10000000000 + lrem + ldc_w 0x60 + i2l + lor + ldc 0x33 + i2l + land + dup2 + iconst_1 + lshl + iconst_3 + lshr + iconst_3 + lushr + ladd + l2d + d2l + l2f + f2l + lastore + iconst_0 + laload + lstore_0 + lload_0 + lstore_1 + lload_1 + lstore_2 + lload_2 + lstore_3 + lload_3 + lstore 4 + lload 4 + lstore_w 5 + lload_w 5 + lreturn +.end method + +.method private static float(F)F + .limit locals 6 + .limit stack 9 + iconst_1 + newarray float + dup + fload_0 + dup + fcmpg + fload_0 + dup + dup + dup + dup2_x2 + fadd + fsub + fneg + frem + ldc 70 + i2f + fadd + fadd + swap + pop + fastore + fload_0 + dup + fcmpl + faload + fstore_0 + fload_0 + fstore_1 + fload_1 + fstore_2 + fload_2 + fstore_3 + fload_3 + fstore 4 + fload 4 + fstore_w 5 + fload_w 5 + freturn +.end method + +.method abstract bridge synthetic 'acc1()V' + breakpoint +.end method + +.method native synchronized acc2()V +.end method diff --git a/vendor/pygments/tests/examplefiles/example.java b/vendor/pygments/tests/examplefiles/example.java new file mode 100644 index 0000000..78f9d72 --- /dev/null +++ b/vendor/pygments/tests/examplefiles/example.java @@ -0,0 +1,16 @@ +class _PostUnico$deClassá +{void fo$o() {} + + void PostUnicodeFunctioná() { + láb$el: + break láb$el; + + } +} + +class áPreUnicode$Class +{ + public int $foo; + public int á$foo; + _PostUnico$deClassá áPreUnicodeFunction() { return null; } +} diff --git a/vendor/pygments/tests/examplefiles/example.jcl b/vendor/pygments/tests/examplefiles/example.jcl new file mode 100644 index 0000000..18d4ae3 --- /dev/null +++ b/vendor/pygments/tests/examplefiles/example.jcl @@ -0,0 +1,31 @@ +//IS198CPY JOB (PYGM-TEST-001),'PYGMENTS TEST JOB', +// CLASS=L,MSGCLASS=X,TIME=(00,10) +//* Copy 'OLDFILE' to 'NEWFILE'. +//COPY01 EXEC PGM=IEBGENER +//SYSPRINT DD SYSOUT=* +//SYSUT1 DD DSN=OLDFILE,DISP=SHR +//SYSUT2 DD DSN=NEWFILE, +// DISP=(NEW,CATLG,DELETE), +// SPACE=(CYL,(40,5),RLSE), Some comment +// DCB=(LRECL=115,BLKSIZE=1150) +//SYSIN DD DUMMY +/* +//* Test line continuation in strings. +//CONT01 EXEC PGM=IEFBR14,PARM='THIS IS A LONG PARAMETER WITHIN APOST +// ROPHES, CONTINUED IN COLUMN 15 OF THE NEXT RECORD' +//* Sort a couple of lines and show the result in the job log. +//SORT01 EXEC PGM=IEFBR14 +//SORTIN DD * +spam +eggs +ham +/* +//SORTOUT DD SYSOUT=* +/* +//* Test line continuation with comment at end of line continued by a +//* character at column 72 (in this case 'X'). +//STP4 EXEC PROC=BILLING,COND.PAID=((20,LT),EVEN), +// COND.LATE=(60,GT,FIND), +// COND.BILL=((20,GE),(30,LT,CHGE)) THIS STATEMENT CALLS THE X +// BILLING PROCEDURE AND SPECIFIES RETURN CODE TESTS FOR THREEX +// PROCEDURE STEPS. diff --git a/vendor/pygments/tests/examplefiles/example.jsonld b/vendor/pygments/tests/examplefiles/example.jsonld new file mode 100644 index 0000000..48787d7 --- /dev/null +++ b/vendor/pygments/tests/examplefiles/example.jsonld @@ -0,0 +1,27 @@ +{ + "@context": { + "schema": "http://schema.org/", + "name": "schema:name", + "body": "schema:articleBody", + "words": "schema:wordCount", + "post": { + "@id": "schema:blogPost", + "@container": "@index" + } + }, + "@id": "http://example.com/", + "@type": "schema:Blog", + "name": "World Financial News", + "post": { + "en": { + "@id": "http://example.com/posts/1/en", + "body": "World commodities were up today with heavy trading of crude oil...", + "words": 1539 + }, + "de": { + "@id": "http://example.com/posts/1/de", + "body": "Die Werte an Warenbörsen stiegen im Sog eines starken Handels von Rohöl...", + "words": 1204 + } + } +} diff --git a/vendor/pygments/tests/examplefiles/example.kal b/vendor/pygments/tests/examplefiles/example.kal new file mode 100644 index 0000000..c05c14c --- /dev/null +++ b/vendor/pygments/tests/examplefiles/example.kal @@ -0,0 +1,75 @@ +#!/usr/bin/env kal + +# This demo executes GET requests in parallel and in series +# using `for` loops and `wait for` statements. + +# Notice how the serial GET requests always return in order +# and take longer in total. Parallel requests come back in +# order of receipt. + +http = require 'http' + +urls = ['http://www.google.com' + 'http://www.apple.com' + 'http://www.microsoft.com' + 'http://www.nodejs.org' + 'http://www.yahoo.com'] + +# This function does a GET request for each URL in series +# It will wait for a response from each request before moving on +# to the next request. Notice the output will be in the same order as the +# urls variable every time regardless of response time. +# It is a task rather than a function because it is called asynchronously +# This allows us to use `return` to implicitly call back +task series_demo() + # The `series` keyword is optional here (for loops are serial by default) + total_time = 0 + + for series url in urls + timer = new Date + + # we use the `safe` keyword because get is a "nonstandard" task + # that does not call back with an error argument + safe wait for response from http.get url + + delay = new Date() - timer + total_time += delay + + print "GET #{url} - #{response.statusCode} - #{response.connection.bytesRead} bytes - #{delay} ms" + + # because we are in a task rather than a function, this actually exectutes a callback + return total_time + +# This function does a GET request for each URL in parallel +# It will NOT wait for a response from each request before moving on +# to the next request. Notice the output will be determined by the order in which +# the requests complete! +task parallel_demo() + total_time = 0 + + # The `parallel` keyword is only meaningful here because the loop contains + # a `wait for` statement (meaning callbacks are used) + for parallel url in urls + timer = new Date + + # we use the `safe` keyword because get is a "nonstandard" task + # that does not call back with an error argument + safe wait for response from http.get url + + delay = new Date() - timer + total_time += delay + + print "GET #{url} - #{response.statusCode} - #{response.connection.bytesRead} bytes - #{delay}ms" + + # because we are in a task rather than a function, this actually exectutes a callback + return total_time + +print 'Series Requests...' +wait for time1 from series_demo() +print "Total duration #{time1}ms" + +print '' + +print 'Parallel Requests...' +wait for time2 from parallel_demo() +print "Total duration #{time2}ms" diff --git a/vendor/pygments/tests/examplefiles/example.lagda b/vendor/pygments/tests/examplefiles/example.lagda new file mode 100644 index 0000000..b5476fa --- /dev/null +++ b/vendor/pygments/tests/examplefiles/example.lagda @@ -0,0 +1,19 @@ +\documentclass{article} +% this is a LaTeX comment +\usepackage{agda} + +\begin{document} + +Here's how you can define \emph{RGB} colors in Agda: + +\begin{code} +module example where + +open import Data.Fin +open import Data.Nat + +data Color : Set where + RGB : Fin 256 → Fin 256 → Fin 256 → Color +\end{code} + +\end{document} \ No newline at end of file diff --git a/vendor/pygments/tests/examplefiles/example.liquid b/vendor/pygments/tests/examplefiles/example.liquid new file mode 100644 index 0000000..8f3ea9e --- /dev/null +++ b/vendor/pygments/tests/examplefiles/example.liquid @@ -0,0 +1,42 @@ +# This is an example file. Process it with `./pygmentize -O full -f html -o /liquid-example.html example.liquid`. + +{% raw %} +some {{raw}} liquid syntax + +{% raw %} +{% endraw %} + +Just regular text - what happens? + +{% comment %}My lovely {{comment}} {% comment %}{% endcomment %} + +{% custom_tag params: true %} +{% custom_block my="abc" c = false %} + Just usual {{liquid}}. +{% endcustom_block %} + +{% another_tag "my string param" %} + +{{ variable | upcase }} +{{ var.field | textilize | markdownify }} +{{ var.field.property | textilize | markdownify }} +{{ 'string' | truncate: 100 param='df"g' }} + +{% cycle '1', 2, var %} +{% cycle 'group1': '1', var, 2 %} +{% cycle group2: '1', var, 2 %} + +{% if a == 'B' %} +{% elsif a == 'C%}' %} +{% else %} +{% endif %} + +{% unless not a %} +{% else %} +{% endunless %} + +{% case a %} +{% when 'B' %} +{% when 'C' %} +{% else %} +{% endcase %} \ No newline at end of file diff --git a/vendor/pygments/tests/examplefiles/example.ma b/vendor/pygments/tests/examplefiles/example.ma new file mode 100644 index 0000000..a8119ea --- /dev/null +++ b/vendor/pygments/tests/examplefiles/example.ma @@ -0,0 +1,8 @@ +1 + 1 (* This is a comment *) +Global` +SomeNamespace`Foo +f[x_, y__, 3, z___] := tsneirsnteintie "fosrt" neisnrteiasrn +E + 3 +Plus[1,Times[2,3]] +Map[#1 + #2&, SomePairList] +Plus[1.,-1,-1.,-1.0,] \ No newline at end of file diff --git a/vendor/pygments/tests/examplefiles/example.mac b/vendor/pygments/tests/examplefiles/example.mac new file mode 100644 index 0000000..1c3831d --- /dev/null +++ b/vendor/pygments/tests/examplefiles/example.mac @@ -0,0 +1,6 @@ +* Example Easytrieve macro declaration. For an example on calling this +* macro, see example.ezt. +MACRO FILENAME PREFIX +&FILENAME. +&PREFIX.-LINE 1 80 A +&PREFIX.-KEY 1 8 A diff --git a/vendor/pygments/tests/examplefiles/example.mq4 b/vendor/pygments/tests/examplefiles/example.mq4 new file mode 100644 index 0000000..54a5fa6 --- /dev/null +++ b/vendor/pygments/tests/examplefiles/example.mq4 @@ -0,0 +1,187 @@ +//+------------------------------------------------------------------+ +//| PeriodConverter.mq4 | +//| Copyright 2006-2014, MetaQuotes Software Corp. | +//| http://www.metaquotes.net | +//+------------------------------------------------------------------+ +#property copyright "2006-2014, MetaQuotes Software Corp." +#property link "http://www.mql4.com" +#property description "Period Converter to updated format of history base" +#property strict +#property show_inputs +#include + +input int InpPeriodMultiplier=3; // Period multiplier factor +int ExtHandle=-1; +//+------------------------------------------------------------------+ +//| script program start function | +//+------------------------------------------------------------------+ +void OnStart() + { + datetime time0; + ulong last_fpos=0; + long last_volume=0; + int i,start_pos,periodseconds; + int hwnd=0,cnt=0; +//---- History header + int file_version=401; + string c_copyright; + string c_symbol=Symbol(); + int i_period=Period()*InpPeriodMultiplier; + int i_digits=Digits; + int i_unused[13]; + MqlRates rate; +//--- + ExtHandle=FileOpenHistory(c_symbol+(string)i_period+".hst",FILE_BIN|FILE_WRITE|FILE_SHARE_WRITE|FILE_SHARE_READ|FILE_ANSI); + if(ExtHandle<0) + return; + c_copyright="(C)opyright 2003, MetaQuotes Software Corp."; + ArrayInitialize(i_unused,0); +//--- write history file header + FileWriteInteger(ExtHandle,file_version,LONG_VALUE); + FileWriteString(ExtHandle,c_copyright,64); + FileWriteString(ExtHandle,c_symbol,12); + FileWriteInteger(ExtHandle,i_period,LONG_VALUE); + FileWriteInteger(ExtHandle,i_digits,LONG_VALUE); + FileWriteInteger(ExtHandle,0,LONG_VALUE); + FileWriteInteger(ExtHandle,0,LONG_VALUE); + FileWriteArray(ExtHandle,i_unused,0,13); +//--- write history file + periodseconds=i_period*60; + start_pos=Bars-1; + rate.open=Open[start_pos]; + rate.low=Low[start_pos]; + rate.high=High[start_pos]; + rate.tick_volume=(long)Volume[start_pos]; + rate.spread=0; + rate.real_volume=0; + //--- normalize open time + rate.time=Time[start_pos]/periodseconds; + rate.time*=periodseconds; + for(i=start_pos-1; i>=0; i--) + { + if(IsStopped()) + break; + time0=Time[i]; + //--- history may be updated + if(i==0) + { + //--- modify index if history was updated + if(RefreshRates()) + i=iBarShift(NULL,0,time0); + } + //--- + if(time0>=rate.time+periodseconds || i==0) + { + if(i==0 && time0Low[0]) + rate.low=Low[0]; + if(rate.high=rate.time+periodseconds) + { + rate.time=time0/periodseconds; + rate.time*=periodseconds; + rate.open=Open[i]; + rate.low=Low[i]; + rate.high=High[i]; + rate.close=Close[i]; + rate.tick_volume=last_volume; + } + } + else + { + rate.tick_volume+=(long)Volume[i]; + if(rate.low>Low[i]) + rate.low=Low[i]; + if(rate.highLow[0]) + rate.low=Low[0]; + if(rate.highLow[1]) + rate.low=Low[1]; + if(rate.high=2) + { + PostMessageA(hwnd,WM_COMMAND,33324,0); + last_time=cur_time; + } + } + Sleep(50); + } +//--- + } +//+------------------------------------------------------------------+ +//| | +//+------------------------------------------------------------------+ +void OnDeinit(const int reason) + { +//--- + if(ExtHandle>=0) + { + FileClose(ExtHandle); + ExtHandle=-1; + } +//--- + } +//+------------------------------------------------------------------+ \ No newline at end of file diff --git a/vendor/pygments/tests/examplefiles/example.mqh b/vendor/pygments/tests/examplefiles/example.mqh new file mode 100644 index 0000000..ee80ed5 --- /dev/null +++ b/vendor/pygments/tests/examplefiles/example.mqh @@ -0,0 +1,123 @@ +//+------------------------------------------------------------------+ +//| Array.mqh | +//| Copyright 2009-2013, MetaQuotes Software Corp. | +//| http://www.mql4.com | +//+------------------------------------------------------------------+ +#include +//+------------------------------------------------------------------+ +//| Class CArray | +//| Purpose: Base class of dynamic arrays. | +//| Derives from class CObject. | +//+------------------------------------------------------------------+ +class CArray : public CObject + { +protected: + int m_step_resize; // increment size of the array + int m_data_total; // number of elements + int m_data_max; // maximmum size of the array without memory reallocation + int m_sort_mode; // mode of array sorting + +public: + CArray(void); + ~CArray(void); + //--- methods of access to protected data + int Step(void) const { return(m_step_resize); } + bool Step(const int step); + int Total(void) const { return(m_data_total); } + int Available(void) const { return(m_data_max-m_data_total); } + int Max(void) const { return(m_data_max); } + bool IsSorted(const int mode=0) const { return(m_sort_mode==mode); } + int SortMode(void) const { return(m_sort_mode); } + //--- cleaning method + void Clear(void) { m_data_total=0; } + //--- methods for working with files + virtual bool Save(const int file_handle); + virtual bool Load(const int file_handle); + //--- sorting method + void Sort(const int mode=0); + +protected: + virtual void QuickSort(int beg,int end,const int mode=0) { } + }; +//+------------------------------------------------------------------+ +//| Constructor | +//+------------------------------------------------------------------+ +CArray::CArray(void) : m_step_resize(16), + m_data_total(0), + m_data_max(0), + m_sort_mode(-1) + { + } +//+------------------------------------------------------------------+ +//| Destructor | +//+------------------------------------------------------------------+ +CArray::~CArray(void) + { + } +//+------------------------------------------------------------------+ +//| Method Set for variable m_step_resize | +//+------------------------------------------------------------------+ +bool CArray::Step(const int step) + { +//--- check + if(step>0) + { + m_step_resize=step; + return(true); + } +//--- failure + return(false); + } +//+------------------------------------------------------------------+ +//| Sorting an array in ascending order | +//+------------------------------------------------------------------+ +void CArray::Sort(const int mode) + { +//--- check + if(IsSorted(mode)) + return; + m_sort_mode=mode; + if(m_data_total<=1) + return; +//--- sort + QuickSort(0,m_data_total-1,mode); + } +//+------------------------------------------------------------------+ +//| Writing header of array to file | +//+------------------------------------------------------------------+ +bool CArray::Save(const int file_handle) + { +//--- check handle + if(file_handle!=INVALID_HANDLE) + { + //--- write start marker - 0xFFFFFFFFFFFFFFFF + if(FileWriteLong(file_handle,-1)==sizeof(long)) + { + //--- write array type + if(FileWriteInteger(file_handle,Type(),INT_VALUE)==INT_VALUE) + return(true); + } + } +//--- failure + return(false); + } +//+------------------------------------------------------------------+ +//| Reading header of array from file | +//+------------------------------------------------------------------+ +bool CArray::Load(const int file_handle) + { +//--- check handle + if(file_handle!=INVALID_HANDLE) + { + //--- read and check start marker - 0xFFFFFFFFFFFFFFFF + if(FileReadLong(file_handle)==-1) + { + //--- read and check array type + if(FileReadInteger(file_handle,INT_VALUE)==Type()) + return(true); + } + } +//--- failure + return(false); + } +//+------------------------------------------------------------------+ diff --git a/vendor/pygments/tests/examplefiles/example.ni b/vendor/pygments/tests/examplefiles/example.ni new file mode 100644 index 0000000..32279e8 --- /dev/null +++ b/vendor/pygments/tests/examplefiles/example.ni @@ -0,0 +1,57 @@ + | | | +"Informal by Nature" +[ * * * ] +by +[ * * * ] +David Corbett + +[This is a [nested] comment.] + +Section 1 - Use option translation + +Use maximum tests of at least 100 translates as (- +@c +Constant MAX_TESTS = {N}; —). | Section 2 + +A room has a number called size. + +The Kitchen is a room. "A nondescript kitchen.“ The Kitchen has size 2. + +When play begins: + say "Testing:[line break]"; + test 0. + +To test (N — number): (— + if (Test({N}) == (+size of the Kitchen [this should succeed]+)) {-open—brace} + print ”Success.^”; + {-close-brace} else { + print “Failure.^"; + } +]; ! You shouldn't end a routine within a phrase definition, but it works. +[ Unused; + #Include "\ +@p \ +"; ! At signs hold no power here. +! Of course, the file "@p .h" must exist. +-). + +Include (-!% This is not ICL. + +[ Test x; + if (x) {x++;} + {–! Single line comment.} +@inc x; +@p At signs. +... +@Purpose: ... +... +@-... +@c ... +@inc x; +@c +@c + return x; +]; +@Purpose: ... +@------------------------------------------------------------------------------- +-). diff --git a/vendor/pygments/tests/examplefiles/example.nix b/vendor/pygments/tests/examplefiles/example.nix new file mode 100644 index 0000000..515b686 --- /dev/null +++ b/vendor/pygments/tests/examplefiles/example.nix @@ -0,0 +1,80 @@ +{ stdenv, fetchurl, fetchgit, openssl, zlib, pcre, libxml2, libxslt, expat +, rtmp ? false +, fullWebDAV ? false +, syslog ? false +, moreheaders ? false, ...}: + +let + version = "1.4.4"; + mainSrc = fetchurl { + url = "http://nginx.org/download/nginx-${version}.tar.gz"; + sha256 = "1f82845mpgmhvm151fhn2cnqjggw9w7cvsqbva9rb320wmc9m63w"; + }; + + rtmp-ext = fetchgit { + url = git://github.com/arut/nginx-rtmp-module.git; + rev = "1cfb7aeb582789f3b15a03da5b662d1811e2a3f1"; + sha256 = "03ikfd2l8mzsjwx896l07rdrw5jn7jjfdiyl572yb9jfrnk48fwi"; + }; + + dav-ext = fetchgit { + url = git://github.com/arut/nginx-dav-ext-module.git; + rev = "54cebc1f21fc13391aae692c6cce672fa7986f9d"; + sha256 = "1dvpq1fg5rslnl05z8jc39sgnvh3akam9qxfl033akpczq1bh8nq"; + }; + + syslog-ext = fetchgit { + url = https://github.com/yaoweibin/nginx_syslog_patch.git; + rev = "165affd9741f0e30c4c8225da5e487d33832aca3"; + sha256 = "14dkkafjnbapp6jnvrjg9ip46j00cr8pqc2g7374z9aj7hrvdvhs"; + }; + + moreheaders-ext = fetchgit { + url = https://github.com/agentzh/headers-more-nginx-module.git; + rev = "refs/tags/v0.23"; + sha256 = "12pbjgsxnvcf2ff2i2qdn39q4cm5czlgrng96j8ml4cgxvnbdh39"; + }; +in + +stdenv.mkDerivation rec { + name = "nginx-${version}"; + src = mainSrc; + + buildInputs = [ openssl zlib pcre libxml2 libxslt + ] ++ stdenv.lib.optional fullWebDAV expat; + + patches = if syslog then [ "${syslog-ext}/syslog_1.4.0.patch" ] else []; + + configureFlags = [ + "--with-http_ssl_module" + "--with-http_spdy_module" + "--with-http_xslt_module" + "--with-http_sub_module" + "--with-http_dav_module" + "--with-http_gzip_static_module" + "--with-http_secure_link_module" + "--with-ipv6" + # Install destination problems + # "--with-http_perl_module" + ] ++ stdenv.lib.optional rtmp "--add-module=${rtmp-ext}" + ++ stdenv.lib.optional fullWebDAV "--add-module=${dav-ext}" + ++ stdenv.lib.optional syslog "--add-module=${syslog-ext}" + ++ stdenv.lib.optional moreheaders "--add-module=${moreheaders-ext}"; + + preConfigure = '' + export NIX_CFLAGS_COMPILE="$NIX_CFLAGS_COMPILE -I${libxml2 }/include/libxml2" + ''; + + # escape example + postInstall = '' + mv $out/sbin $out/bin ''' ''${ + ${ if true then ${ "" } else false } + ''; + + meta = { + description = "A reverse proxy and lightweight webserver"; + maintainers = [ stdenv.lib.maintainers.raskin]; + platforms = stdenv.lib.platforms.all; + inherit version; + }; +} diff --git a/vendor/pygments/tests/examplefiles/example.pcmk b/vendor/pygments/tests/examplefiles/example.pcmk new file mode 100644 index 0000000..22cc60e --- /dev/null +++ b/vendor/pygments/tests/examplefiles/example.pcmk @@ -0,0 +1,115 @@ +node 167906355: sle12-a +node 167906357: sle12-c \ + description="The second node" \ + utilization memory=64 +node node1 \ + attributes mem=16G +node node2 utilization cpu=4 +primitive st stonith:ssh \ + params hostlist="node1 node2" \ + meta target-role="Started" \ + op start requires=nothing timeout=60s \ + op monitor interval=60m timeout=60s +primitive d1 ocf:pacemaker:Dummy \ + operations $id=d1-ops \ + op monitor interval=60m \ + op monitor interval=120m OCF_CHECK_LEVEL=10 +primitive fs1 Filesystem \ + params device="/dev/nfs-vg/fs1" directory="/srv/nfs" fstype=ext3 \ + op monitor interval=10s +primitive nfs-server nfsserver \ + params nfs_shared_infodir="/srv/nfs/state" nfs_ip=10.2.12.100 \ + op monitor interval=0 trace_ra=1 +primitive nfs-vg LVM \ + params volgrpname=nfs-vg +primitive p_drbd_nfs ocf:linbit:drbd \ + params drbd_resource=nfs \ + op monitor interval=15 role=Master \ + op monitor interval=30 role=Slave \ + op start interval=0 timeout=300 \ + op stop interval=0 timeout=120 +primitive s-libvirt stonith:external/libvirt \ + params hostlist="sle12-a sle12-c" hypervisor_uri="qemu+ssh://hex-10.suse.de/system?keyfile=/root/.ssh/xen" reset_method=reboot \ + op monitor interval=5m timeout=60s +primitive virtual-ip IPaddr2 \ + params ip=10.2.12.100 +primitive xen0 @vm_scheme1 xmfile=/etc/xen/vm/xen0 +primitive d7 Dummy \ + params rule inf: #uname eq node1 fake=1 \ + params rule inf: #uname eq node2 fake=2 +primitive very-primitive Dummy \ + params 3: rule #uname eq node1 interface=eth1 \ + params 2: rule #uname string:eq node2 interface=eth2 port=8888 \ + params 1: interface=eth0 port=9999 \ + operations $id-ref=those_other_ops +fencing_topology poison-pill power +fencing_topology \ + node-a: poison-pill power \ + node-b: ipmi serial +role nfs_admin \ + write meta:nfs-server:target-role \ + write meta:nfs-server:is-managed \ + write location:nfs-server \ + read ref:nfs-server +role basic-read \ + read status \ + read type:node attribute:uname \ + read type:node attribute:type \ + read property +role basic-read-basic \ + read cib +role d0-admin \ + write meta:d0:target-role \ + write meta:d0:is-managed \ + read xpath:"//nodes//attributes" \ + read ref:d0 +acl_target joe \ + nfs_admin +tag nfs: nfs-server nfs-vg +group nfs-disk nfs-vg fs1 +group nfs-srv virtual-ip nfs-server +ms ms_drbd_nfs p_drbd_nfs \ + meta notify=true clone-max=2 +location nfs-pref virtual-ip 100: sle12-a +location l1 nfs-srv 100: node1 +location l2 d1 \ + rule 100: #uname eq node1 +location l3 d1 \ + rule inf: #uname eq node1 and pingd gt 0 +location l4 d1 \ + rule -inf: not_defined pingd or pingd lte 0 +location l5 fs1 \ + rule -inf: not_defined pingd or pingd lte 0 \ + rule #uname eq node1 and pingd gt 0 \ + rule date lt 2009-05-26 and date in start=2009-05-26 end=2009-07-26 and date in start=2009-05-26 years=2009 and date spec years=2009 hours=09-17 +location l6 d1 \ + rule $id-ref=l2-rule1 +location l7 d1 \ + rule $id-ref=l2 +colocation c-nfs inf: nfs-server fs1 +colocation vg-with-drbd inf: nfs-vg ms_drbd_nfs:Master +# drbd device is the nfs-vg PV +order drbd-before-vg inf: ms_drbd_nfs:promote nfs-vg:start +# need fs1 for the NFS server +order o-nfs inf: fs1 nfs-server +rsc_ticket ticket-A_m6 ticket-A: d1 +rsc_ticket ticket-B_m6_m5 ticket-B: d1 d7 loss-policy=fence +rsc_ticket ticket-C_master ticket-C: d1 ms_drbd_nfs:Master loss-policy=fence +property cpset2: \ + maintenance-mode=true +property cib-bootstrap-options: \ + dc-version=1.1.12-ad083a8 \ + cluster-infrastructure=corosync \ + cluster-name=sle12-test3l-public \ + no-quorum-policy=ignore \ + startup-fencing=false \ + last-lrm-refresh=1415877622 \ + maintenance-mode=false +op_defaults op-options: \ + timeout=120s +rsc_defaults rsc-options: \ + failure-timeout=10m +op_defaults opsdef2: \ + rule 100: #uname eq node1 \ + record-pending=true +tag t1: d1 d7 opsdef2 diff --git a/vendor/pygments/tests/examplefiles/example.pp b/vendor/pygments/tests/examplefiles/example.pp new file mode 100644 index 0000000..ea697be --- /dev/null +++ b/vendor/pygments/tests/examplefiles/example.pp @@ -0,0 +1,8 @@ +exec { 'grep': + command => 'grep "\'" -rI *', + path => '/bin:/usr/bin', +} + +node default { + notify {"Hello World":;} +} diff --git a/vendor/pygments/tests/examplefiles/example.praat b/vendor/pygments/tests/examplefiles/example.praat new file mode 100644 index 0000000..bf2d005 --- /dev/null +++ b/vendor/pygments/tests/examplefiles/example.praat @@ -0,0 +1,245 @@ +form Highlighter test + sentence Blank + sentence My_sentence This should all be a string + text My_text This should also all be a string + word My_word Only the first word is a string, the rest is invalid + boolean Binary 1 + boolean Text no + boolean Quoted "yes" + comment This should be a string + real left_Range -123.6 + positive right_Range_max 3.3 + integer Int 4 + natural Nat 4 +endform + +# External scripts +include /path/to/file +runScript: "/path/to/file" +execute /path/to/file + +# Predefined variables +a = praatVersion +a = e +a = pi +a$ = homeDirectory$ + tab$ + newline$ +a$ = temporaryDirectory$ +a$ = praatVersion$ +a$ = shellDirectory$ +a$ = homeDirectory$ +a$ = preferencesDirectory$ +a$ = defaultDirectory$ +nocheck selectObject: undefined + +# Arrays are not comments +a# = zero# (5, 6) +a [3], 5 = 7 +printline 'a[3,5]', 'a[3]' +a [1] = 2 +b [a [1]] = 3 +assert b [a [1]] = 3 +printline 'b[2]' + +# if-block with built-in variables +if windows + # We are on Windows +elsif unix = 1 or !macintosh + exitScript: "We are on Linux" +else macintosh == 1 + exit We are on Mac +endif + +string$ = "Strings can be 'interpolated'" +string$ = "But don't interpolate everything!" + +Text... 1 Right 0.2 Half many----hyphens +Text... 1 Right -0.4 Bottom aحبيبa +Text... 1 Right -0.6 Bottom 日本 +Draw circle (mm)... 0.5 0.5 i +x=1 + +rows = Object_'table'.nrow +value$ = Table_'table'$[25, "f0"] +fixed = Sound_10.xmin +fixed = Object_foo.xmin +fixed = Procrustes_foo.nx + +# old-style procedure call +call oldStyle "quoted" 2 unquoted string +assert oldStyle.local = 1 + +# New-style procedure call with parens +@newStyle("quoted", 2, "quoted string") +if praatVersion >= 5364 + # New-style procedure call with colon + @newStyle: "quoted", 2, "quoted string" +endif + +# inline if with inline comment +var = if macintosh = 1 then 0 else 1 fi ; This is an inline comment + +# for-loop with explicit from using local variable +# and paren-style function calls and variable interpolation +n = numberOfSelected("Sound") +for i from newStyle.local to n + name = selected$(extractWord$(selected$(), " ")) + sound'i' = selected("Sound", i) + sound[i] = sound'i' +endfor + +for i from 1 to n + # Different styles of object selection + select sound'i' + sound = selected() + sound$ = selected$("Sound") + select Sound 'sound$' + selectObject(sound[i]) + selectObject: sound + + # Pause commands + beginPause("Viewing " + sound$) + if i > 1 + button = endPause("Stop", "Previous", + ...if i = total_sounds then "Finish" else "Next" fi, + ...3, 1) + else + button = endPause("Stop", + ...if i = total_sounds then "Finish" else "Next" fi, + ...2, 1) + endif + editor_name$ = if total_textgrids then "TextGrid " else "Sound " fi + name$ + nocheck editor Sound 'editor_name$' + nocheck Close + nocheck endeditor + editor_id = editor: editor_name$ + Close + endeditor + + # New-style standalone command call + Rename: "SomeName" + + # Command call with assignment + duration = Get total duration + + # Multi-line command with modifier + pitch = noprogress To Pitch (ac): 0, 75, 15, "no", + ...0.03, 0.45, 0.01, 0.35, 0.14, 600 + + # do-style command with assignment + minimum = do("Get minimum...", 0, 0, "Hertz", "Parabolic") + + # New-style multi-line command call with broken strings + table = Create Table with column names: "table", 0, + ..."file subject speaker + ...f0 f1 f2 f3 " + + ..."duration response" + + # Function call with trailing space + removeObject: pitch, table + + # Picture window commands + selectObject: sound + # do-style command + do("Select inner viewport...", 1, 6, 0.5, 1.5) + Black + Draw... 0 0 0 0 "no" Curve + Draw inner box + Text bottom: "yes", sound$ + Erase all + + # Demo window commands + demo Erase all + demo Select inner viewport... 0 100 0 100 + demo Axes... 0 100 0 100 + demo Paint rectangle... white 0 100 0 100 + demo Text... 50 centre 50 half Click to finish + demoWaitForInput ( ) + demo Erase all + demo Text: 50, "centre", 50, "half", "Finished" +endfor + +switch$ = if switch == 1 then "a" else + ... if switch == 2 then "b" else + ... if switch == 3 then "c" else + ... if switch == 4 then "d" else + ... "default" fi fi fi fi + +# An old-style sendpraat block +# All these lines should be a string! +sendpraat Praat + ...'newline$' Create Sound as pure tone... "tone" 1 0 0.4 44100 440 0.2 0.01 0.01 + ...'newline$' Play + ...'newline$' Remove + +# A new-style sendpraat block +beginSendPraat: "Praat" + Create Sound as pure tone: "tone", 1, 0, 0.4, 44100, 440, 0.2, 0.01, 0.01 + duration = Get total duration + Remove +endSendPraat: "duration" +appendInfoLine: "The generated sound lasted for ", duration, "seconds" + +# Number types +a = 10% +a = -10 +a = +10 +a = 10.4 +a = 294e12 +a = 2.94e12 + +# Operators +a = 2 ^ -6 +a = -(1+1)^6 +a = 4^3 ^ 2 +a = 54 div 5.1 +a = 54.3 mod 5 +a = 3 ** 8 - 7 +a = 3 / (8 + 7) +a = (7 * (3 + 5)) / ((2 + 3) - 1) + +# Logical operators +assert (a = b) and c +assert a == (b or c) +assert a <= b not c +assert a >= b !c +assert a != b & c +assert a != b && c +assert a <> b || c +assert a < b | c +assert a > b +assert "hello" = "he" + "llo" +assert "hello" == "hello world" - " world" + +stopwatch +time = stopwatch +clearinfo +echo This script took +print 'time' seconds to +printline execute. + +# Old-style procedure declaration +procedure oldStyle .str1$ .num .str2$ + .local = 1 +endproc + +# New-style procedure declaration with parentheses +procedure newStyle (.str1$, .num, .str2$) + # Command with "local" variable + .local = Get total duration + .local = Get 'some' duration + .local = Get 'some[1]' value... hello 10 p[i] + .local = Get 'some[1,3]' value: "hello", 10, 'p[i]' + .local = Get 'some$' duration + .local = Get 'some$[1]' duration +endproc + +# New-style procedure declaration with colon +procedure _new_style: .str1$, .num, .str2$ + # Command with "local" variable + # Initial underscores in variables not allowed (unless interpolated) + _new_style.local = Get total duration +endproc + +asserterror Unknown symbol:'newline$'« _ +assert '_new_style.local' + diff --git a/vendor/pygments/tests/examplefiles/example.red b/vendor/pygments/tests/examplefiles/example.red new file mode 100644 index 0000000..37c17ef --- /dev/null +++ b/vendor/pygments/tests/examplefiles/example.red @@ -0,0 +1,257 @@ +Red [ + Title: "Red console" + Author: ["Nenad Rakocevic" "Kaj de Vos"] + File: %console.red + Tabs: 4 + Rights: "Copyright (C) 2012-2013 Nenad Rakocevic. All rights reserved." + License: { + Distributed under the Boost Software License, Version 1.0. + See https://github.com/dockimbel/Red/blob/master/BSL-License.txt + } + Purpose: "Just some code for testing Pygments colorizer" + Language: http://www.red-lang.org/ +] + +#system-global [ + #either OS = 'Windows [ + #import [ + "kernel32.dll" stdcall [ + AttachConsole: "AttachConsole" [ + processID [integer!] + return: [integer!] + ] + SetConsoleTitle: "SetConsoleTitleA" [ + title [c-string!] + return: [integer!] + ] + ReadConsole: "ReadConsoleA" [ + consoleInput [integer!] + buffer [byte-ptr!] + charsToRead [integer!] + numberOfChars [int-ptr!] + inputControl [int-ptr!] + return: [integer!] + ] + ] + ] + line-buffer-size: 16 * 1024 + line-buffer: allocate line-buffer-size + ][ + #switch OS [ + MacOSX [ + #define ReadLine-library "libreadline.dylib" + ] + #default [ + #define ReadLine-library "libreadline.so.6" + #define History-library "libhistory.so.6" + ] + ] + #import [ + ReadLine-library cdecl [ + read-line: "readline" [ ; Read a line from the console. + prompt [c-string!] + return: [c-string!] + ] + rl-bind-key: "rl_bind_key" [ + key [integer!] + command [integer!] + return: [integer!] + ] + rl-insert: "rl_insert" [ + count [integer!] + key [integer!] + return: [integer!] + ] + ] + #if OS <> 'MacOSX [ + History-library cdecl [ + add-history: "add_history" [ ; Add line to the history. + line [c-string!] + ] + ] + ] + ] + + rl-insert-wrapper: func [ + [cdecl] + count [integer!] + key [integer!] + return: [integer!] + ][ + rl-insert count key + ] + + ] +] + +Windows?: system/platform = 'Windows + +read-argument: routine [ + /local + args [str-array!] + str [red-string!] +][ + if system/args-count <> 2 [ + SET_RETURN(none-value) + exit + ] + args: system/args-list + 1 ;-- skip binary filename + str: simple-io/read-txt args/item + SET_RETURN(str) +] + +init-console: routine [ + str [string!] + /local + ret +][ + #either OS = 'Windows [ + ;ret: AttachConsole -1 + ;if zero? ret [print-line "ReadConsole failed!" halt] + + ret: SetConsoleTitle as c-string! string/rs-head str + if zero? ret [print-line "SetConsoleTitle failed!" halt] + ][ + rl-bind-key as-integer tab as-integer :rl-insert-wrapper + ] +] + +input: routine [ + prompt [string!] + /local + len ret str buffer line +][ + #either OS = 'Windows [ + len: 0 + print as c-string! string/rs-head prompt + ret: ReadConsole stdin line-buffer line-buffer-size :len null + if zero? ret [print-line "ReadConsole failed!" halt] + len: len + 1 + line-buffer/len: null-byte + str: string/load as c-string! line-buffer len + ][ + line: read-line as c-string! string/rs-head prompt + if line = null [halt] ; EOF + + #if OS <> 'MacOSX [add-history line] + + str: string/load line 1 + length? line +; free as byte-ptr! line + ] + SET_RETURN(str) +] + +count-delimiters: function [ + buffer [string!] + return: [block!] +][ + list: copy [0 0] + c: none + + foreach c buffer [ + case [ + escaped? [ + escaped?: no + ] + in-comment? [ + switch c [ + #"^/" [in-comment?: no] + ] + ] + 'else [ + switch c [ + #"^^" [escaped?: yes] + #";" [if zero? list/2 [in-comment?: yes]] + #"[" [list/1: list/1 + 1] + #"]" [list/1: list/1 - 1] + #"{" [list/2: list/2 + 1] + #"}" [list/2: list/2 - 1] + ] + ] + ] + ] + list +] + +do-console: function [][ + buffer: make string! 10000 + prompt: red-prompt: "red>> " + mode: 'mono + + switch-mode: [ + mode: case [ + cnt/1 > 0 ['block] + cnt/2 > 0 ['string] + 'else [ + prompt: red-prompt + do eval + 'mono + ] + ] + prompt: switch mode [ + block ["[^-"] + string ["{^-"] + mono [red-prompt] + ] + ] + + eval: [ + code: load/all buffer + + unless tail? code [ + set/any 'result do code + + unless unset? :result [ + if 67 = length? result: mold/part :result 67 [ ;-- optimized for width = 72 + clear back tail result + append result "..." + ] + print ["==" result] + ] + ] + clear buffer + ] + + while [true][ + unless tail? line: input prompt [ + append buffer line + cnt: count-delimiters buffer + + either Windows? [ + remove skip tail buffer -2 ;-- clear extra CR (Windows) + ][ + append buffer lf ;-- Unix + ] + + switch mode [ + block [if cnt/1 <= 0 [do switch-mode]] + string [if cnt/2 <= 0 [do switch-mode]] + mono [do either any [cnt/1 > 0 cnt/2 > 0][switch-mode][eval]] + ] + ] + ] +] + +q: :quit + +if script: read-argument [ + script: load script + either any [ + script/1 <> 'Red + not block? script/2 + ][ + print "*** Error: not a Red program!" + ][ + do skip script 2 + ] + quit +] + +init-console "Red Console" + +print { +-=== Red Console alpha version ===- +(only ASCII input supported) +} + +do-console \ No newline at end of file diff --git a/vendor/pygments/tests/examplefiles/example.reds b/vendor/pygments/tests/examplefiles/example.reds new file mode 100644 index 0000000..eb92310 --- /dev/null +++ b/vendor/pygments/tests/examplefiles/example.reds @@ -0,0 +1,150 @@ +Red/System [ + Title: "Red/System example file" + Purpose: "Just some code for testing Pygments colorizer" + Language: http://www.red-lang.org/ +] + +#include %../common/FPU-configuration.reds + +; C types + +#define time! long! +#define clock! long! + +date!: alias struct! [ + second [integer!] ; 0-61 (60?) + minute [integer!] ; 0-59 + hour [integer!] ; 0-23 + + day [integer!] ; 1-31 + month [integer!] ; 0-11 + year [integer!] ; Since 1900 + + weekday [integer!] ; 0-6 since Sunday + yearday [integer!] ; 0-365 + daylight-saving-time? [integer!] ; Negative: unknown +] + +#either OS = 'Windows [ + #define clocks-per-second 1000 +][ + ; CLOCKS_PER_SEC value for Syllable, Linux (XSI-conformant systems) + ; TODO: check for other systems + #define clocks-per-second 1000'000 +] + +#import [LIBC-file cdecl [ + + ; Error handling + + form-error: "strerror" [ ; Return error description. + code [integer!] + return: [c-string!] + ] + print-error: "perror" [ ; Print error to standard error output. + string [c-string!] + ] + + + ; Memory management + + make: "calloc" [ ; Allocate zero-filled memory. + chunks [size!] + size [size!] + return: [binary!] + ] + resize: "realloc" [ ; Resize memory allocation. + memory [binary!] + size [size!] + return: [binary!] + ] + ] + + JVM!: alias struct! [ + reserved0 [int-ptr!] + reserved1 [int-ptr!] + reserved2 [int-ptr!] + + DestroyJavaVM [function! [[JNICALL] vm [JVM-ptr!] return: [jint!]]] + AttachCurrentThread [function! [[JNICALL] vm [JVM-ptr!] penv [struct! [p [int-ptr!]]] args [byte-ptr!] return: [jint!]]] + DetachCurrentThread [function! [[JNICALL] vm [JVM-ptr!] return: [jint!]]] + GetEnv [function! [[JNICALL] vm [JVM-ptr!] penv [struct! [p [int-ptr!]]] version [integer!] return: [jint!]]] + AttachCurrentThreadAsDaemon [function! [[JNICALL] vm [JVM-ptr!] penv [struct! [p [int-ptr!]]] args [byte-ptr!] return: [jint!]]] +] + + ;just some datatypes for testing: + + #some-hash + 10-1-2013 + quit + + ;binary: + #{00FF0000} + #{00FF0000 FF000000} + #{00FF0000 FF000000} ;with tab instead of space + 2#{00001111} + 64#{/wAAAA==} + 64#{/wAAA A==} ;with space inside + 64#{/wAAA A==} ;with tab inside + + + ;string with char + {bla ^(ff) foo} + {bla ^(( foo} + ;some numbers: + 12 + 1'000 + 1.2 + FF00FF00h + + ;some tests of hexa number notation with not common ending + [ff00h ff00h] ff00h{} FFh"foo" 00h(1 + 2) (AEh) + +;normal words: +foo char + +;get-word +:foo + +;lit-word: +'foo 'foo + +;multiple comment tests... +1 + 1 +comment "aa" +2 + 2 +comment {aa} +3 + 3 +comment {a^{} +4 + 4 +comment {{}} +5 + 5 +comment { + foo: 6 +} +6 + 6 +comment [foo: 6] +7 + 7 +comment [foo: "[" ] +8 + 8 +comment [foo: {^{} ] +9 + 9 +comment [foo: {boo} ] +10 + 10 +comment 5-May-2014/11:17:34+2:00 +11 + 11 + + +to-integer foo +foo/(a + 1)/b + +call/output reform ['which interpreter] path: copy "" + + version-1.1: 00010001h + + #if type = 'exe [ + push system/stack/frame ;-- save previous frame pointer + system/stack/frame: system/stack/top ;-- @@ reposition frame pointer just after the catch flag +] +push CATCH_ALL ;-- exceptions root barrier +push 0 ;-- keep stack aligned on 64-bit \ No newline at end of file diff --git a/vendor/pygments/tests/examplefiles/example.rexx b/vendor/pygments/tests/examplefiles/example.rexx new file mode 100644 index 0000000..ec4da5a --- /dev/null +++ b/vendor/pygments/tests/examplefiles/example.rexx @@ -0,0 +1,50 @@ +/* REXX example. */ + +/* Some basic constructs. */ +almost_pi = 0.1415 + 3 +if almost_pi < 3 then + say 'huh?' +else do + say 'almost_pi=' almost_pi || " - ok" +end +x = '"' || "'" || '''' || """" /* quotes */ + +/* A comment + * spawning multiple + lines. /* / */ + +/* Built-in functions. */ +line = 'line containing some short text' +say WordPos(line, 'some') +say Word(line, 4) + +/* Labels and procedures. */ +some_label : + +divide: procedure + parse arg some other + return some / other + +call divide(5, 2) + +/* Loops */ +do i = 1 to 5 + do j = -3 to -9 by -3 + say i '+' j '=' i + j + end j +end i + +do forever + leave +end + +/* Print a text file on MVS. */ +ADDRESS TSO +"ALLOC F(TEXTFILE) DSN('some.text.dsn') SHR REU" +"EXECIO * DISKR TEXTFILE ( FINIS STEM LINES." +"FREE F(TEXTFILE)" +I = 1 +DO WHILE I <= LINES.0 + SAY ' LINE ' I ' : ' LINES.I + I = I + 1 +END diff --git a/vendor/pygments/tests/examplefiles/example.rkt b/vendor/pygments/tests/examplefiles/example.rkt index a3e4a29..acc0328 100644 --- a/vendor/pygments/tests/examplefiles/example.rkt +++ b/vendor/pygments/tests/examplefiles/example.rkt @@ -1,5 +1,7 @@ #lang racket +(require (only-in srfi/13 string-contains)) + ; Single-line comment style. ;; Single-line comment style. @@ -8,45 +10,259 @@ #| Multi-line comment style ... +#|### #| nested |#||| |# ... on multiple lines |# -(define (a-function x #:keyword [y 0]) +#;(s-expression comment (one line)) + +#; +(s-expression comment + (multiple lines)) + +#! shebang comment + +#!/shebang comment + +#! shebang \ +comment + +#!/shebang \ +comment + +;; Uncommented numbers after single-line comments +;; NEL…133 +;; LS
8232 +;; PS
8233 + +#reader racket +(define(a-function x #:keyword [y 0]) (define foo0 'symbol) ; () [define foo1 'symbol] ; [] {define foo2 'symbol} ; {} - (and (append (car '(1 2 3)))) + (define 100-Continue 'symbol) + (and (append (car'(1 2 3)))) (regexp-match? #rx"foobar" "foobar") - (regexp-match? #px"foobar" "foobar") - (define a 1)) - (let ([b "foo"]) - (displayln b)) + (regexp-match? #px"\"foo\\(bar\\)?\"" "foobar") + (regexp-match? #rx#"foobar" "foobar") + (regexp-match? #px#"foobar" "foobar") + (define #csa 1) + #Ci (let ([#%A|||b #true C +\|d "foo"]) + (displayln #cS #%\ab\ #true\ C\ +\\d||)) (for/list ([x (in-list (list 1 2 (list 3 4)))]) - (cond - [(pair? x) (car x)] - [else x]))) + (cond + [(pair? x) (car x)] + [else x]))) -;; Literal number examples +;; Literals (values ;; #b - #b1.1 - #b-1.1 - #b1e1 - #b0/1 - #b1/1 - #b1e-1 - #b101 - + #b1 + #b+1 + #b-1 + #b.1 + #b1. + #b0.1 + #b+0.1 + #b-0.1 + #b1/10 + #b+1/10 + #b-1/10 + #b1e11 + #b+1e11 + #b-1e11 + #b.1e11 + #b1.e11 + #b0.1e11 + #b+0.1e11 + #b-0.1e11 + #b1/10e11 + #b+1/10e11 + #b-1/10e11 + #b+i + #b1+i + #b+1+i + #b-1+i + #b.1+i + #b1.+i + #b0.1+i + #b+0.1+i + #b-0.1+i + #b1/10+i + #b+1/10+i + #b-1/10+i + #b1e11+i + #b+1e11+i + #b-1e11+i + #b1.e11+i + #b.1e11+i + #b0.1e11+i + #b+0.1e11+i + #b-0.1e11+i + #b1/10e11+i + #b+1/10e11+i + #b-1/10e11+i + #b+1i + #b1+1i + #b+1+1i + #b-1+1i + #b1.+1i + #b.1+1i + #b0.1+1i + #b+0.1+1i + #b-0.1+1i + #b1/10+1i + #b+1/10+1i + #b-1/10+1i + #b1e11+1i + #b+1e11+1i + #b-1e11+1i + #b.1e11+1i + #b0.1e11+1i + #b+0.1e11+1i + #b-0.1e11+1i + #b1/10e11+1i + #b+1/10e11+1i + #b-1/10e11+1i + #b+1/10e11i + #b1+1/10e11i + #b+1+1/10e11i + #b-1+1/10e11i + #b.1+1/10e11i + #b0.1+1/10e11i + #b+0.1+1/10e11i + #b-0.1+1/10e11i + #b1/10+1/10e11i + #b+1/10+1/10e11i + #b-1/10+1/10e11i + #b1e11+1/10e11i + #b+1e11+1/10e11i + #b-1e11+1/10e11i + #b.1e11+1/10e11i + #b0.1e11+1/10e11i + #b+0.1e11+1/10e11i + #b-0.1e11+1/10e11i + #b1/10e11+1/10e11i + #b+1/10e11+1/10e11i + #b-1/10e11+1/10e11i ;; #d - #d-1.23 - #d1.123 - #d1e3 - #d1e-22 - #d1/2 - #d-1/2 #d1 + #d+1 #d-1 - + #d.1 + #d1. + #d1.2 + #d+1.2 + #d-1.2 + #d1/2 + #d+1/2 + #d-1/2 + #d1e3 + #d+1e3 + #d-1e3 + #d.1e3 + #d1.e3 + #d1.2e3 + #d+1.2e3 + #d-1.2e3 + #d1/2e3 + #d+1/2e3 + #d-1/2e3 + #d+i + #d1+i + #d+1+i + #d-1+i + #d.1+i + #d1.+i + #d1.2+i + #d+1.2+i + #d-1.2+i + #d1/2+i + #d+1/2+i + #d-1/2+i + #d1e3+i + #d+1e3+i + #d-1e3+i + #d1.e3+i + #d.1e3+i + #d1.2e3+i + #d+1.2e3+i + #d-1.2e3+i + #d1/2e3+i + #d+1/2e3+i + #d-1/2e3+i + #d+1i + #d1+1i + #d+1+1i + #d-1+1i + #d1.+1i + #d.1+1i + #d1.2+1i + #d+1.2+1i + #d-1.2+1i + #d1/2+1i + #d+1/2+1i + #d-1/2+1i + #d1e3+1i + #d+1e3+1i + #d-1e3+1i + #d.1e3+1i + #d1.2e3+1i + #d+1.2e3+1i + #d-1.2e3+1i + #d1/2e3+1i + #d+1/2e3+1i + #d-1/2e3+1i + #d+1/2e3i + #d1+1/2e3i + #d+1+1/2e3i + #d-1+1/2e3i + #d.1+1/2e3i + #d1.2+1/2e3i + #d+1.2+1/2e3i + #d-1.2+1/2e3i + #d1/2+1/2e3i + #d+1/2+1/2e3i + #d-1/2+1/2e3i + #d1e3+1/2e3i + #d+1e3+1/2e3i + #d-1e3+1/2e3i + #d.1e3+1/2e3i + #d1.2e3+1/2e3i + #d+1.2e3+1/2e3i + #d-1.2e3+1/2e3i + #d1/2e3+1/2e3i + #d+1/2e3+1/2e3i + #d-1/2e3+1/2e3i + ;; Extflonums + +nan.t + 1t3 + +1t3 + -1t3 + .1t3 + 1.t3 + 1.2t3 + +1.2t3 + -1.2t3 + 1/2t3 + +1/2t3 + -1/2t3 + 1#t0 + 1.#t0 + .2#t0 + 1.2#t0 + 1#/2t0 + 1/2#t0 + 1#/2#t0 + 1#t3 + 1.#t3 + .2#t3 + 1.2#t3 + 1#/2t3 + 1/2#t3 + 1#/2#t3 ;; No # reader prefix -- same as #d -1.23 1.123 @@ -56,7 +272,6 @@ Multi-line comment style ... -1/2 1 -1 - ;; #e #e-1.23 #e1.123 @@ -66,7 +281,24 @@ Multi-line comment style ... #e-1 #e1/2 #e-1/2 - + ;; #d#e + #d#e-1.23 + #d#e1.123 + #d#e1e3 + #d#e1e-22 + #d#e1 + #d#e-1 + #d#e1/2 + #d#e-1/2 + ;; #e#d + #e#d-1.23 + #e#d1.123 + #e#d1e3 + #e#d1e-22 + #e#d1 + #e#d-1 + #e#d1/2 + #e#d-1/2 ;; #i always float #i-1.23 #i1.123 @@ -76,7 +308,126 @@ Multi-line comment style ... #i-1/2 #i1 #i-1 - + ;; Implicitly inexact numbers + +nan.0 + 1# + 1.# + .2# + 1.2# + 1#/2 + 1/2# + 1#/2# + 1#e3 + 1.#e3 + .2#e3 + 1.2#e3 + 1#/2e3 + 1/2#e3 + 1#/2#e3 + +nan.0+i + 1#+i + 1.#+i + .2#+i + 1.2#+i + 1#/2+i + 1/2#+i + 1#/2#+i + 1#e3+i + 1.#e3+i + .2#e3+i + 1.2#e3+i + 1#/2e3+i + 1/2#e3+i + 1#/2#e3+i + +nan.0i + +1#i + +1.#i + +.2#i + +1.2#i + +1#/2i + +1/2#i + +1#/2#i + +1#e3i + +1.#e3i + +.2#e3i + +1.2#e3i + +1#/2e3i + +1/2#e3i + +1#/2#e3i + 0+nan.0i + 0+1#i + 0+1.#i + 0+.2#i + 0+1.2#i + 0+1#/2i + 0+1/2#i + 0+1#/2#i + 0+1#e3i + 0+1.#e3i + 0+.2#e3i + 0+1.2#e3i + 0+1#/2e3i + 0+1/2#e3i + 0+1#/2#e3i + 1#/2#e3+nan.0i + 1#/2#e3+1#i + 1#/2#e3+1.#i + 1#/2#e3+.2#i + 1#/2#e3+1.2#i + 1#/2#e3+1#/2i + 1#/2#e3+1/2#i + 1#/2#e3+1#/2#i + 1#/2#e3+1#e3i + 1#/2#e3+1.#e3i + 1#/2#e3+.2#e3i + 1#/2#e3+1.2#e3i + 1#/2#e3+1#/2e3i + 1#/2#e3+1/2#e3i + 1#/2#e3+1#/2#e3i + +nan.0@1 + 1#@1 + 1.#@1 + .2#@1 + 1.2#@1 + 1#/2@1 + 1/2#@1 + 1#/2#@1 + 1#e3@1 + 1.#e3@1 + .2#e3@1 + 1.2#e3@1 + 1#/2e3@1 + 1/2#e3@1 + 1#/2#e3@1 + 1@+nan.0 + 1@1# + 1@1.# + 1@.2# + 1@1.2# + 1@1#/2 + 1@1/2# + 1@1#/2# + 1@1#e3 + 1@1.#e3 + 1@.2#e3 + 1@1.2#e3 + 1@1#/2e3 + 1@1/2#e3 + 1@1#/2#e3 + 1#/2#e3@1# + 1#/2#e3@1.# + 1#/2#e3@.2# + 1#/2#e3@1.2# + 1#/2#e3@1#/2 + 1#/2#e3@1/2# + 1#/2#e3@1#/2# + 1#/2#e3@1#e3 + 1#/2#e3@1.#e3 + 1#/2#e3@.2#e3 + 1#/2#e3@1.2#e3 + 1#/2#e3@1#/2e3 + 1#/2#e3@1/2#e3 + 1#/2#e3@1#/2#e3 ;; #o #o777.777 #o-777.777 @@ -86,10 +437,307 @@ Multi-line comment style ... #o-3/7 #o777 #o-777 - + #e#o777.777 + #e#o-777.777 + #e#o777e777 + #e#o777e-777 + #e#o3/7 + #e#o-3/7 + #e#o777 + #e#o-777 + #i#o777.777 + #i#o-777.777 + #i#o777e777 + #i#o777e-777 + #i#o3/7 + #i#o-3/7 + #i#o777 + #i#o-777 ;; #x #x-f.f #xf.f + #xfsf + #xfs-f + #x7/f + #x-7/f #x-f #xf + #e#x-f.f + #e#xf.f + #e#xfsf + #e#xfs-f + #e#x7/f + #e#x-7/f + #e#x-f + #e#xf + #i#x-f.f + #i#xf.f + #i#xfsf + #i#xfs-f + #i#x7/f + #i#x-7/f + #i#x-f + #i#xf + ;; Not numbers + '-1.23x + '1.123x + '1e3x + '1e-22x + '1/2x + '-1/2x + '1x + '-1x + '/ + '1/ + '/2 + '1//2 + '1e3. + '1e + 'e3 + '.i + '1.2.3 + '1..2 + '.1. + '@ + '1@ + '@2 + '1@@2 + '1@2@3 + '1@2i + '1+-2i + '1i+2 + '1i+2i + '1+2i+3i + '- + '--1 + '+ + '++1 + '1/2.3 + '1#2 + '1#.2 + '1.#2 + '.#2 + '+nan.t+nan.ti + '+nan.t@nan.t + ;; Booleans + #t + #T + #true + #f + #F + #false + ;; Characters, strings, and byte strings + #\ + #\Null9 + #\n9 + #\99 + #\0009 + #\u3BB + #\u03BB9 + #\U3BB + #\U000003BB9 + #\λ9 + "string\ + \a.\b.\t.\n.\v.\f.\r.\e.\".\'.\\.\1.\123.\1234.\x9.\x30.\x303" + "\u9.\u1234.\u12345.\U9.\U00100000.\U001000000" + #"byte-string\7\xff\t" + #< (listof string?) string?) +;; Appends all the strings together, quoting them as appropriate for Python, +;; with commas and spaces between them, wrapping at 80 characters, with an +;; indentation of 8 spaces. +(define (wrap-lines lst) + (define INDENTATION '" ") + (define WIDTH '80) + (define (wrap-lines* lst done-lines current-line) + (if (null? lst) + (string-append (foldr string-append "" done-lines) current-line) + (let* ([str (first lst)] + [wrapped-str (if (regexp-match-exact? '#px"[[:ascii:]]+" str) + (string-append "'" str "',") + (string-append "u'" str "',"))] + [new-line (string-append current-line " " wrapped-str)]) + (if ((string-length new-line) . >= . WIDTH) + (wrap-lines* (rest lst) + (append done-lines + `(,(string-append current-line "\n"))) + (string-append INDENTATION wrapped-str)) + (wrap-lines* (rest lst) + done-lines + new-line))))) + (wrap-lines* lst '() INDENTATION)) + +;; (-> string? boolean?) +;; Returns #t if str represents a syntax identifier in the current namespace, +;; otherwise #f. +(define (syntax-identifier? str) + (with-handlers ([exn? exn?]) + (not (eval (call-with-input-string str read))))) + +(define RACKET-NAMESPACE + (parameterize ([current-namespace (make-base-namespace)]) + (namespace-require 'racket) + (current-namespace))) + +(define BOUND-IDENTIFIERS + (parameterize ([current-namespace RACKET-NAMESPACE]) + (sort (map symbol->string (namespace-mapped-symbols)) + string<=?))) + +(define-values (KEYWORDS BUILTINS) + (parameterize ([current-namespace RACKET-NAMESPACE]) + (partition syntax-identifier? BOUND-IDENTIFIERS))) diff --git a/vendor/pygments/tests/examplefiles/example.rts b/vendor/pygments/tests/examplefiles/example.rts new file mode 100644 index 0000000..1f9cfc5 --- /dev/null +++ b/vendor/pygments/tests/examplefiles/example.rts @@ -0,0 +1,118 @@ +# Example of a Riverbed TrafficScript (*.rts) file. + +http.setHeader( "Host", "secure.mysite.com" ); +$body = http.getBody( ); # get the POST data +$single = 'Hello \ +world'; +$double = "Hello \ +world"; +$pi = 3.14157; +$message = "The URL path is " . http.GetPath(); +$four = 2 + 2; +# Sets $ratio to "75%" (for example) +$ratio = ( $a / ($a + $b) * 100 ) . "%"; +$contentLength = http.getHeader( "Content-Length" ); +if( $contentLength > 1024 * 1024 ) { + log.warn( "Large request body: ".$contentLength ); +} +4 + 7.5 * $a +-$b / $c - 1 +7 % 3 # Returns 1 +"foo" && !0 # true +( 1 < 2 ) && ( 3 < 4 ) # true +$a || $b # true if $a or $b is true +0x1234 & 255 # 0x34 +1|2|4 #7 +1^3 #2 +~1 & 0xffff # 65534 +1 << 2 # 4 +2 >> 1 # 1 +$foo *= 5 # Product equals ($foo = $foo * 5) +$foo /= 2 # Quotient equals ($foo = $foo / 5) +$foo %= 2 # Modulo equals ($foo = $foo % 5) +$foo <<= 2 # Bit-shift left equals ($foo = $foo << 2) +$foo >>= 2 # Bit-shift right equals ($foo = $foo >> 2) +$foo &= 2 # Bitwise AND equals ($foo = $foo & 2) +$foo |= 2 # Bitwise OR equals ($foo = $foo | 2) +$foo ^= 2 # Bitwise XOR equals ($foo = $foo ^ 2) +$int = 10; +$double = 2.71828; +string.len( $double ); # casts to string, returns 7 +# Convert $string to a number, and add 4: +$r = $string + 4; # $r is 14 +if( string.startsWith( $path, "/secure" ) ) { + pool.use( "secure pool" ); +} else { + pool.use( "non-secure pool" ); +} + +for( $count = 0; $count < 10; $count++ ) { + log.info( "In loop, count = " . $count ); +} + +i$count = 0; +while( $count < 10 ) { + log.info( "In loop, count = " . $count ); + $count = $count + 1; +} + +$count = 0; +do { + log.info( "In loop, count = " . $count ); + $count = $count + 1; +} while( $count < 10 ); + +$mime = http.getResponseHeader( "Content-Type" ); +if( !string.startsWith( $mime, "text/html" )) break; +$array = [ "Alex", "Matt", "Oliver", "Laurence" ]; +$someone = $array[0]; +$arraylen = array.length($array); +log.info("My array has " . $arraylen . " elements.\n"); + +for ( $i = 0; $i < $arraylen; $i++ ){ + log.info ( "Element #" . $i . " " . $array[$i]); +} + +$hash = [ "orange" => "fruit", + "apple" => "fruit", + "cabbage" => "vegetable", + "pear" => "fruit" ]; + +foreach ( $key in hash.keys($hash)){ + log.info("Key: " . $key . "; Value: " . $hash[$key] . +";"); } + +# Declare a subroutine to calculate factorials +sub factorial( $n ) { + if( $n == 0 ) return 1; + return $n*factorial( $n-1 ); +} +# Put entries into the array +$c = 0; +while( $c <= 10 ) { + $msg = "Did you know that ". $c ."! is ". factorial( $c ) +."?" ; + data.set( "myarray".$c, $msg ); +$c++; } +# Look up several entries. Note: the 1000th entry is empty +$msg = ""; +$msg .= "Index 1000: ".data.get( "myarray1000" )."\n"; +# delete the entire array (but no other data stored by data.set) +data.reset( "myarray" ); +http.sendResponse( "200 OK", "text/plain", $msg, "" ); +sub headbug(){ + # Prints each header to the event log. + $headers = http.listHeaderNames(); + foreach ($header in $headers){ + log.info( $header . ": " . http.getheader($header)); +} } + +import foo; +foo.headbug(); +# Sets the regex string as ^192\.168\. ; the two examples +# below have the same effect +$regex = "^(192)\\.168\\."; +$regex = '^192\.168\.'; +if ( string.regexMatch( $ip, $regex ) ) { + # IP is on 192.168.* network +} diff --git a/vendor/pygments/tests/examplefiles/example.scd b/vendor/pygments/tests/examplefiles/example.scd new file mode 100644 index 0000000..a27247e --- /dev/null +++ b/vendor/pygments/tests/examplefiles/example.scd @@ -0,0 +1,76 @@ +Instr("cs.fm.BasicFM", { + arg freq = 440, + amp = 0.9, + gate = 0, + carrierFreqRatio = 1.0, + modulatorFreqRatio = 1.0, + // not sure if having these defaults here actually does anything. + modEnvShape = Env.adsr( + attackTime: 0.05, + decayTime: 0.1, + sustainLevel: 0.5 * amp, + releaseTime: 0.1, + peakLevel: amp, + curve: [4, -4, -2] + ), + carrierEnvShape = Env.adsr( + attackTime: 0.05, + decayTime: 0.1, + sustainLevel: 0.5 * amp, + releaseTime: 0.1, + peakLevel: amp, + curve: [4, -4, -2] + ); + + var carrier, + modulator, + carrierEnv, + modEnv, + out; + + modEnv = EnvGen.kr( + envelope: modEnvShape, + gate: gate + ); + + modulator = modEnv * SinOsc.ar(freq * modulatorFreqRatio); + + // carrier sustains until noteoff + carrierEnvShape.releaseNode = 2; + + carrierEnv = EnvGen.kr( + envelope: carrierEnvShape, + gate: gate + ); + + carrier = carrierEnv * SinOsc.ar( + (freq * carrierFreqRatio) + (modulator * freq) + ); + + // free synth when both carrier and modulator envelopes are done + FreeSelf.kr(Done.kr(carrierEnv) + Done.kr(modEnv) - 1); + + out = amp * carrier; +}, [ + \freq.asSpec(), + \amp.asSpec(), + \nil, + ControlSpec(0.1, 10), + ControlSpec(0.1, 10), + EnvSpec(Env.adsr( + attackTime: 0.05, + decayTime: 0.1, + sustainLevel: 0.8, + releaseTime: 0.1, + peakLevel: 1.0, + curve: [4, -4, -2] + )), + EnvSpec(Env.adsr( + attackTime: 0.05, + decayTime: 0.1, + sustainLevel: 0.8, + releaseTime: 0.1, + peakLevel: 1.0, + curve: [4, -4, -2] + )) +]); diff --git a/vendor/pygments/tests/examplefiles/example.sh b/vendor/pygments/tests/examplefiles/example.sh new file mode 100644 index 0000000..2112cdd --- /dev/null +++ b/vendor/pygments/tests/examplefiles/example.sh @@ -0,0 +1,22 @@ +#!/bin/bash + +printf "%d %s\n" 10 "foo" +printf "%d %s\n" $((10#1)) "bar" + +let "m = 10#${1:1:2}" +echo $m + +m=$((10#${1:4:3} + 10#${1:1:3})) +echo $m + +m=$((10#${1:4:3})) +echo $m + +m=$((10#$1)) +echo $m + +m=$((10#1)) +echo $m + +m=$((10)) +echo $m diff --git a/vendor/pygments/tests/examplefiles/example.slim b/vendor/pygments/tests/examplefiles/example.slim new file mode 100644 index 0000000..0e20920 --- /dev/null +++ b/vendor/pygments/tests/examplefiles/example.slim @@ -0,0 +1,31 @@ +doctype html +html + head + title Slim Examples + meta name="keywords" content="template language" + meta name="author" content=author + javascript: + alert('Slim supports embedded javascript!') + + body + h1 Markup examples + + #content + p This example shows you how a basic Slim file looks like. + + == yield + + - unless items.empty? + table + - for item in items do + tr + td.name = item.name + td.price = item.price + - else + p + | No items found. Please add some inventory. + Thank you! + + div id="footer" + = render 'footer' + | Copyright (C) #{year} #{author} diff --git a/vendor/pygments/tests/examplefiles/example.sls b/vendor/pygments/tests/examplefiles/example.sls new file mode 100644 index 0000000..824700e --- /dev/null +++ b/vendor/pygments/tests/examplefiles/example.sls @@ -0,0 +1,51 @@ +include: + - moosefs + +{% for mnt in salt['cmd.run']('ls /dev/data/moose*').split() %} +/mnt/moose{{ mnt[-1] }}: + mount.mounted: + - device: {{ mnt }} + - fstype: xfs + - mkmnt: True + file.directory: + - user: mfs + - group: mfs + - require: + - user: mfs + - group: mfs +{% endfor %} + +/etc/mfshdd.cfg: + file.managed: + - source: salt://moosefs/mfshdd.cfg + - user: root + - group: root + - mode: 644 + - template: jinja + - require: + - pkg: mfs-chunkserver + +/etc/mfschunkserver.cfg: + file.managed: + - source: salt://moosefs/mfschunkserver.cfg + - user: root + - group: root + - mode: 644 + - template: jinja + - require: + - pkg: mfs-chunkserver + +mfs-chunkserver: + pkg: + - installed +mfschunkserver: + service: + - running + - require: +{% for mnt in salt['cmd.run']('ls /dev/data/moose*') %} + - mount: /mnt/moose{{ mnt[-1] }} + - file: /mnt/moose{{ mnt[-1] }} +{% endfor %} + - file: /etc/mfschunkserver.cfg + - file: /etc/mfshdd.cfg + - file: /var/lib/mfs diff --git a/vendor/pygments/tests/examplefiles/example.stan b/vendor/pygments/tests/examplefiles/example.stan index 5723403..69c9ac7 100644 --- a/vendor/pygments/tests/examplefiles/example.stan +++ b/vendor/pygments/tests/examplefiles/example.stan @@ -5,93 +5,118 @@ It is not a real model and will not compile */ # also a comment // also a comment +functions { + void f1(void a, real b) { + return 1 / a; + } + real f2(int a, vector b, real c) { + return a + b + c; + } +} data { - // valid name - int abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_abc; - // all types should be highlighed - int a3; - real foo[2]; - vector[3] bar; - row_vector[3] baz; - matrix[3,3] qux; - simplex[3] quux; - ordered[3] corge; - positive_ordered[3] wibble; - corr_matrix[3] grault; - cov_matrix[3] garply; - - real foo1; - real foo2; - real foo3; - - // bad names - // includes . - // real foo.; - // beings with number - //real 0foo; - // begins with _ - //real _foo; + // valid name + int abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_abc; + // all types should be highlighed + int a3; + real foo[2]; + vector[3] bar; + row_vector[3] baz; + matrix[3,3] qux; + simplex[3] quux; + ordered[3] corge; + positive_ordered[3] wibble; + corr_matrix[3] grault; + cov_matrix[3] garply; + cholesky_factor_cov[3] waldo; + cholesky_factor_corr[3] waldo2; + + real foo1; + real foo2; + real foo3; } transformed data { - real xyzzy; - int thud; - row_vector grault2; - matrix qux2; - - // all floating point literals should be recognized - // all operators should be recognized - // paren should be recognized; - xyzzy <- 1234.5687 + .123 - (2.7e3 / 2E-5 * 135e-5); - // integer literal - thud <- -12309865; - // ./ and .* should be recognized as operators - grault2 <- grault .* garply ./ garply; - // ' and \ should be regognized as operators - qux2 <- qux' \ bar; - + real xyzzy; + int thud; + row_vector grault2; + matrix qux2; + + // all floating point literals should be recognized + // all operators should be recognized + // paren should be recognized; + xyzzy <- 1234.5687 + .123 - (2.7e3 / 2E-5 * 135e-5); + // integer literal + thud <- -12309865; + // ./ and .* should be recognized as operators + grault2 <- grault .* garply ./ garply; + // ' and \ should be regognized as operators + qux2 <- qux' \ bar; + } parameters { - real fred; - real plugh; - + real fred; + real plugh; } transformed parameters { } model { - // ~, <- are operators, - // T may be be recognized - // normal is a function - fred ~ normal(0, 1) T(-0.5, 0.5); - // interior block - { - real tmp; - // for, in should be highlighted - for (i in 1:10) { - tmp <- tmp + 0.1; - } - } - // lp__ should be highlighted - // normal_log as a function - lp__ <- lp__ + normal_log(plugh, 0, 1); + // ~, <- are operators, + // T may be be recognized + // normal is a function + fred ~ normal(0, 1) T(-0.5, 0.5); + real tmp; + // C++ reserved + real public; + + // control structures + for (i in 1:10) { + tmp <- tmp + 0.1; + } + tmp <- 0.0; + while (tmp < 5.0) { + tmp <- tmp + 1; + } + if (tmp > 0.0) { + print(tmp); + } else { + print(tmp); + } - // print statement and string literal - print("abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_~@#$%^&*`'-+={}[].,;: "); - print("Hello, world!"); - print(""); + // operators + tmp || tmp; + tmp && tmp; + tmp == tmp; + tmp != tmp; + tmp < tmp; + tmp <= tmp; + tmp > tmp; + tmp >= tmp; + tmp + tmp; + tmp - tmp; + tmp * tmp; + tmp / tmp; + tmp .* tmp; + tmp ./ tmp; + tmp ^ tmp; + ! tmp; + - tmp; + + tmp; + tmp '; + // lp__ should be highlighted + // normal_log as a function + lp__ <- lp__ + normal_log(plugh, 0, 1); + increment_log_prob(normal_log(plugh, 0, 1)); + + // print statement and string literal + print("abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_~@#$%^&*`'-+={}[].,;: "); + print("Hello, world!"); + print(""); + + // reject statement + reject("I just don't like it"); + } generated quantities { - real bar1; - bar1 <- foo + 1; + real bar1; + bar1 <- foo + 1; } - -## Baddness -//foo <- 2.0; -//foo ~ normal(0, 1); -//not_a_block { -//} - -/* -what happens with this? -*/ -// */ diff --git a/vendor/pygments/tests/examplefiles/example.tap b/vendor/pygments/tests/examplefiles/example.tap new file mode 100644 index 0000000..a70a239 --- /dev/null +++ b/vendor/pygments/tests/examplefiles/example.tap @@ -0,0 +1,37 @@ +TAP version 13 +1..42 +1..13 A plan only supports directives so this text is wrong. +ok 1 A normal test line includes a number. +ok But a test line may also omit a number. + +A random line that does not look like a test or diagnostic should be ignored. + No matter how it is spaced out. + +Or if it is a totally blank line. + +not ok 3 This is a failing test line. + +# Diagnostics are any lines... +# ... beginning with a hash character. + +not ok 4 There are a couple of directives. # TODO is one of those directives. +not ok 5 # TODO: is invalid because the directive must be followed by a space. +ok 6 - Another directive line # toDO is not case sensitive. + +ok 7 A line that is a # SKIP +ok 8 Tests can be # skipped as long as the directive has the "skip" stem. +ok 9 The TODO directive must be followed by a space, but # skip: is valid. +1..0 # Skipped directives can show on a plan line too. + +Bail out! is a special phrase emitted when a TAP file aborted. + +not ok 10 Having TAP version 13 in the middle of a line is not a TAP version. +not ok 11 Having Bail out! in the middle of a line is not a bail out. + +ok 12 Here is an empty directive. # + +# The most basic valid test lines. +ok +not ok + +ok 15 Only the test number should look different. Not another 42, for example. diff --git a/vendor/pygments/tests/examplefiles/example.tf b/vendor/pygments/tests/examplefiles/example.tf new file mode 100644 index 0000000..d3f0277 --- /dev/null +++ b/vendor/pygments/tests/examplefiles/example.tf @@ -0,0 +1,162 @@ +variable "key_name" { + description = "Name of the SSH keypair to use in AWS." +} + +variable "key_path" { + description = "Path to the private portion of the SSH key specified." +} + +variable "aws_region" { + description = "AWS region to launch servers." + default = "us-west-2" + somevar = true +} + +# Ubuntu Precise 12.04 LTS (x64) +variable "aws_amis" { + default = { + eu-west-1 = "ami-b1cf19c6" + us-east-1 = "ami-de7ab6b6" + us-west-1 = "ami-3f75767a" + us-west-2 = "ami-21f78e11" + } +} + + + + + + +provider "aws" { + access_key = "${myvar}" + secret_key = "your aws secret key" + region = "us-east-1" +} +/* multiline + + comment + +*/ + + +# Single line comment +resource "aws_instance" "example" { + ami = "ami-408c7f28" + instance_type = "t1.micro" + key_name = "your-aws-key-name" +} + +# Create our Heroku application. Heroku will +# automatically assign a name. +resource "heroku_app" "web" {} + +# Create our DNSimple record to point to the +# heroku application. +resource "dnsimple_record" "web" { + domain = "${var.dnsimple_domain}" + + + # heroku_hostname is a computed attribute on the heroku + # application we can use to determine the hostname + value = "${heroku_app.web.heroku_hostname}" + + type = "CNAME" + ttl = 3600 +} + +# The Heroku domain, which will be created and added +# to the heroku application after we have assigned the domain +# in DNSimple +resource "heroku_domain" "foobar" { + app = "${heroku_app.web.name}" + hostname = "${dnsimple_record.web.hostname}" +} + + +# Specify the provider and access details +provider "aws" { + region = "${var.aws_region}" + value = ${file("path.txt")} +} + +# Our default security group to access +# the instances over SSH and HTTP +resource "aws_security_group" "default" { + name = "terraform_example" + description = "Used in the terraform" + + # SSH access from anywhere + ingress { + from_port = 22 + to_port = 22 + protocol = "tcp" + cidr_blocks = ["0.0.0.0/0"] + } + + # HTTP access from anywhere + ingress { + from_port = 80 + to_port = 80 + protocol = "tcp" + cidr_blocks = ["0.0.0.0/0"] + } +} + + +resource "aws_elb" "web" { + name = "terraform-example-elb" + + # The same availability zone as our instance + availability_zones = ["${aws_instance.web.availability_zone}"] + + listener { + instance_port = 80 + instance_protocol = "http" + lb_port = 80 + lb_protocol = "http" + } + + # The instance is registered automatically + instances = ["${aws_instance.web.id}"] +} + + +resource "aws_instance" "web" { + # The connection block tells our provisioner how to + # communicate with the resource (instance) + connection { + # The default username for our AMI + user = "ubuntu" + + # The path to your keyfile + key_file = "${var.key_path}" + } + + instance_type = "m1.small" + + # Lookup the correct AMI based on the region + # we specified + ami = "${lookup(var.aws_amis, var.aws_region)}" + + # The name of our SSH keypair you've created and downloaded + # from the AWS console. + # + # https://console.aws.amazon.com/ec2/v2/home?region=us-west-2#KeyPairs: + # + key_name = "${var.key_name}" + + # Our Security group to allow HTTP and SSH access + security_groups = ["${aws_security_group.default.name}"] + + # We run a remote provisioner on the instance after creating it. + # In this case, we just install nginx and start it. By default, + # this should be on port 80 + provisioner "remote-exec" { + inline = [ + "sudo apt-get -y update", + "sudo apt-get -y install nginx", + "sudo service nginx start" + ] + } +} + diff --git a/vendor/pygments/tests/examplefiles/example.thy b/vendor/pygments/tests/examplefiles/example.thy new file mode 100644 index 0000000..abaa1af --- /dev/null +++ b/vendor/pygments/tests/examplefiles/example.thy @@ -0,0 +1,751 @@ +(* from Isabelle2013-2 src/HOL/Power.thy; BSD license *) + +(* Title: HOL/Power.thy + Author: Lawrence C Paulson, Cambridge University Computer Laboratory + Copyright 1997 University of Cambridge +*) + +header {* Exponentiation *} + +theory Power +imports Num +begin + +subsection {* Powers for Arbitrary Monoids *} + +class power = one + times +begin + +primrec power :: "'a \ nat \ 'a" (infixr "^" 80) where + power_0: "a ^ 0 = 1" + | power_Suc: "a ^ Suc n = a * a ^ n" + +notation (latex output) + power ("(_\<^bsup>_\<^esup>)" [1000] 1000) + +notation (HTML output) + power ("(_\<^bsup>_\<^esup>)" [1000] 1000) + +text {* Special syntax for squares. *} + +abbreviation (xsymbols) + power2 :: "'a \ 'a" ("(_\<^sup>2)" [1000] 999) where + "x\<^sup>2 \ x ^ 2" + +notation (latex output) + power2 ("(_\<^sup>2)" [1000] 999) + +notation (HTML output) + power2 ("(_\<^sup>2)" [1000] 999) + +end + +context monoid_mult +begin + +subclass power . + +lemma power_one [simp]: + "1 ^ n = 1" + by (induct n) simp_all + +lemma power_one_right [simp]: + "a ^ 1 = a" + by simp + +lemma power_commutes: + "a ^ n * a = a * a ^ n" + by (induct n) (simp_all add: mult_assoc) + +lemma power_Suc2: + "a ^ Suc n = a ^ n * a" + by (simp add: power_commutes) + +lemma power_add: + "a ^ (m + n) = a ^ m * a ^ n" + by (induct m) (simp_all add: algebra_simps) + +lemma power_mult: + "a ^ (m * n) = (a ^ m) ^ n" + by (induct n) (simp_all add: power_add) + +lemma power2_eq_square: "a\<^sup>2 = a * a" + by (simp add: numeral_2_eq_2) + +lemma power3_eq_cube: "a ^ 3 = a * a * a" + by (simp add: numeral_3_eq_3 mult_assoc) + +lemma power_even_eq: + "a ^ (2 * n) = (a ^ n)\<^sup>2" + by (subst mult_commute) (simp add: power_mult) + +lemma power_odd_eq: + "a ^ Suc (2*n) = a * (a ^ n)\<^sup>2" + by (simp add: power_even_eq) + +lemma power_numeral_even: + "z ^ numeral (Num.Bit0 w) = (let w = z ^ (numeral w) in w * w)" + unfolding numeral_Bit0 power_add Let_def .. + +lemma power_numeral_odd: + "z ^ numeral (Num.Bit1 w) = (let w = z ^ (numeral w) in z * w * w)" + unfolding numeral_Bit1 One_nat_def add_Suc_right add_0_right + unfolding power_Suc power_add Let_def mult_assoc .. + +lemma funpow_times_power: + "(times x ^^ f x) = times (x ^ f x)" +proof (induct "f x" arbitrary: f) + case 0 then show ?case by (simp add: fun_eq_iff) +next + case (Suc n) + def g \ "\x. f x - 1" + with Suc have "n = g x" by simp + with Suc have "times x ^^ g x = times (x ^ g x)" by simp + moreover from Suc g_def have "f x = g x + 1" by simp + ultimately show ?case by (simp add: power_add funpow_add fun_eq_iff mult_assoc) +qed + +end + +context comm_monoid_mult +begin + +lemma power_mult_distrib: + "(a * b) ^ n = (a ^ n) * (b ^ n)" + by (induct n) (simp_all add: mult_ac) + +end + +context semiring_numeral +begin + +lemma numeral_sqr: "numeral (Num.sqr k) = numeral k * numeral k" + by (simp only: sqr_conv_mult numeral_mult) + +lemma numeral_pow: "numeral (Num.pow k l) = numeral k ^ numeral l" + by (induct l, simp_all only: numeral_class.numeral.simps pow.simps + numeral_sqr numeral_mult power_add power_one_right) + +lemma power_numeral [simp]: "numeral k ^ numeral l = numeral (Num.pow k l)" + by (rule numeral_pow [symmetric]) + +end + +context semiring_1 +begin + +lemma of_nat_power: + "of_nat (m ^ n) = of_nat m ^ n" + by (induct n) (simp_all add: of_nat_mult) + +lemma power_zero_numeral [simp]: "(0::'a) ^ numeral k = 0" + by (simp add: numeral_eq_Suc) + +lemma zero_power2: "0\<^sup>2 = 0" (* delete? *) + by (rule power_zero_numeral) + +lemma one_power2: "1\<^sup>2 = 1" (* delete? *) + by (rule power_one) + +end + +context comm_semiring_1 +begin + +text {* The divides relation *} + +lemma le_imp_power_dvd: + assumes "m \ n" shows "a ^ m dvd a ^ n" +proof + have "a ^ n = a ^ (m + (n - m))" + using `m \ n` by simp + also have "\ = a ^ m * a ^ (n - m)" + by (rule power_add) + finally show "a ^ n = a ^ m * a ^ (n - m)" . +qed + +lemma power_le_dvd: + "a ^ n dvd b \ m \ n \ a ^ m dvd b" + by (rule dvd_trans [OF le_imp_power_dvd]) + +lemma dvd_power_same: + "x dvd y \ x ^ n dvd y ^ n" + by (induct n) (auto simp add: mult_dvd_mono) + +lemma dvd_power_le: + "x dvd y \ m \ n \ x ^ n dvd y ^ m" + by (rule power_le_dvd [OF dvd_power_same]) + +lemma dvd_power [simp]: + assumes "n > (0::nat) \ x = 1" + shows "x dvd (x ^ n)" +using assms proof + assume "0 < n" + then have "x ^ n = x ^ Suc (n - 1)" by simp + then show "x dvd (x ^ n)" by simp +next + assume "x = 1" + then show "x dvd (x ^ n)" by simp +qed + +end + +context ring_1 +begin + +lemma power_minus: + "(- a) ^ n = (- 1) ^ n * a ^ n" +proof (induct n) + case 0 show ?case by simp +next + case (Suc n) then show ?case + by (simp del: power_Suc add: power_Suc2 mult_assoc) +qed + +lemma power_minus_Bit0: + "(- x) ^ numeral (Num.Bit0 k) = x ^ numeral (Num.Bit0 k)" + by (induct k, simp_all only: numeral_class.numeral.simps power_add + power_one_right mult_minus_left mult_minus_right minus_minus) + +lemma power_minus_Bit1: + "(- x) ^ numeral (Num.Bit1 k) = - (x ^ numeral (Num.Bit1 k))" + by (simp only: eval_nat_numeral(3) power_Suc power_minus_Bit0 mult_minus_left) + +lemma power_neg_numeral_Bit0 [simp]: + "neg_numeral k ^ numeral (Num.Bit0 l) = numeral (Num.pow k (Num.Bit0 l))" + by (simp only: neg_numeral_def power_minus_Bit0 power_numeral) + +lemma power_neg_numeral_Bit1 [simp]: + "neg_numeral k ^ numeral (Num.Bit1 l) = neg_numeral (Num.pow k (Num.Bit1 l))" + by (simp only: neg_numeral_def power_minus_Bit1 power_numeral pow.simps) + +lemma power2_minus [simp]: + "(- a)\<^sup>2 = a\<^sup>2" + by (rule power_minus_Bit0) + +lemma power_minus1_even [simp]: + "-1 ^ (2*n) = 1" +proof (induct n) + case 0 show ?case by simp +next + case (Suc n) then show ?case by (simp add: power_add power2_eq_square) +qed + +lemma power_minus1_odd: + "-1 ^ Suc (2*n) = -1" + by simp + +lemma power_minus_even [simp]: + "(-a) ^ (2*n) = a ^ (2*n)" + by (simp add: power_minus [of a]) + +end + +context ring_1_no_zero_divisors +begin + +lemma field_power_not_zero: + "a \ 0 \ a ^ n \ 0" + by (induct n) auto + +lemma zero_eq_power2 [simp]: + "a\<^sup>2 = 0 \ a = 0" + unfolding power2_eq_square by simp + +lemma power2_eq_1_iff: + "a\<^sup>2 = 1 \ a = 1 \ a = - 1" + unfolding power2_eq_square by (rule square_eq_1_iff) + +end + +context idom +begin + +lemma power2_eq_iff: "x\<^sup>2 = y\<^sup>2 \ x = y \ x = - y" + unfolding power2_eq_square by (rule square_eq_iff) + +end + +context division_ring +begin + +text {* FIXME reorient or rename to @{text nonzero_inverse_power} *} +lemma nonzero_power_inverse: + "a \ 0 \ inverse (a ^ n) = (inverse a) ^ n" + by (induct n) + (simp_all add: nonzero_inverse_mult_distrib power_commutes field_power_not_zero) + +end + +context field +begin + +lemma nonzero_power_divide: + "b \ 0 \ (a / b) ^ n = a ^ n / b ^ n" + by (simp add: divide_inverse power_mult_distrib nonzero_power_inverse) + +end + + +subsection {* Exponentiation on ordered types *} + +context linordered_ring (* TODO: move *) +begin + +lemma sum_squares_ge_zero: + "0 \ x * x + y * y" + by (intro add_nonneg_nonneg zero_le_square) + +lemma not_sum_squares_lt_zero: + "\ x * x + y * y < 0" + by (simp add: not_less sum_squares_ge_zero) + +end + +context linordered_semidom +begin + +lemma zero_less_power [simp]: + "0 < a \ 0 < a ^ n" + by (induct n) (simp_all add: mult_pos_pos) + +lemma zero_le_power [simp]: + "0 \ a \ 0 \ a ^ n" + by (induct n) (simp_all add: mult_nonneg_nonneg) + +lemma power_mono: + "a \ b \ 0 \ a \ a ^ n \ b ^ n" + by (induct n) (auto intro: mult_mono order_trans [of 0 a b]) + +lemma one_le_power [simp]: "1 \ a \ 1 \ a ^ n" + using power_mono [of 1 a n] by simp + +lemma power_le_one: "\0 \ a; a \ 1\ \ a ^ n \ 1" + using power_mono [of a 1 n] by simp + +lemma power_gt1_lemma: + assumes gt1: "1 < a" + shows "1 < a * a ^ n" +proof - + from gt1 have "0 \ a" + by (fact order_trans [OF zero_le_one less_imp_le]) + have "1 * 1 < a * 1" using gt1 by simp + also have "\ \ a * a ^ n" using gt1 + by (simp only: mult_mono `0 \ a` one_le_power order_less_imp_le + zero_le_one order_refl) + finally show ?thesis by simp +qed + +lemma power_gt1: + "1 < a \ 1 < a ^ Suc n" + by (simp add: power_gt1_lemma) + +lemma one_less_power [simp]: + "1 < a \ 0 < n \ 1 < a ^ n" + by (cases n) (simp_all add: power_gt1_lemma) + +lemma power_le_imp_le_exp: + assumes gt1: "1 < a" + shows "a ^ m \ a ^ n \ m \ n" +proof (induct m arbitrary: n) + case 0 + show ?case by simp +next + case (Suc m) + show ?case + proof (cases n) + case 0 + with Suc.prems Suc.hyps have "a * a ^ m \ 1" by simp + with gt1 show ?thesis + by (force simp only: power_gt1_lemma + not_less [symmetric]) + next + case (Suc n) + with Suc.prems Suc.hyps show ?thesis + by (force dest: mult_left_le_imp_le + simp add: less_trans [OF zero_less_one gt1]) + qed +qed + +text{*Surely we can strengthen this? It holds for @{text "0 a ^ m = a ^ n \ m = n" + by (force simp add: order_antisym power_le_imp_le_exp) + +text{*Can relax the first premise to @{term "0 a ^ m < a ^ n \ m < n" + by (simp add: order_less_le [of m n] less_le [of "a^m" "a^n"] + power_le_imp_le_exp) + +lemma power_strict_mono [rule_format]: + "a < b \ 0 \ a \ 0 < n \ a ^ n < b ^ n" + by (induct n) + (auto simp add: mult_strict_mono le_less_trans [of 0 a b]) + +text{*Lemma for @{text power_strict_decreasing}*} +lemma power_Suc_less: + "0 < a \ a < 1 \ a * a ^ n < a ^ n" + by (induct n) + (auto simp add: mult_strict_left_mono) + +lemma power_strict_decreasing [rule_format]: + "n < N \ 0 < a \ a < 1 \ a ^ N < a ^ n" +proof (induct N) + case 0 then show ?case by simp +next + case (Suc N) then show ?case + apply (auto simp add: power_Suc_less less_Suc_eq) + apply (subgoal_tac "a * a^N < 1 * a^n") + apply simp + apply (rule mult_strict_mono) apply auto + done +qed + +text{*Proof resembles that of @{text power_strict_decreasing}*} +lemma power_decreasing [rule_format]: + "n \ N \ 0 \ a \ a \ 1 \ a ^ N \ a ^ n" +proof (induct N) + case 0 then show ?case by simp +next + case (Suc N) then show ?case + apply (auto simp add: le_Suc_eq) + apply (subgoal_tac "a * a^N \ 1 * a^n", simp) + apply (rule mult_mono) apply auto + done +qed + +lemma power_Suc_less_one: + "0 < a \ a < 1 \ a ^ Suc n < 1" + using power_strict_decreasing [of 0 "Suc n" a] by simp + +text{*Proof again resembles that of @{text power_strict_decreasing}*} +lemma power_increasing [rule_format]: + "n \ N \ 1 \ a \ a ^ n \ a ^ N" +proof (induct N) + case 0 then show ?case by simp +next + case (Suc N) then show ?case + apply (auto simp add: le_Suc_eq) + apply (subgoal_tac "1 * a^n \ a * a^N", simp) + apply (rule mult_mono) apply (auto simp add: order_trans [OF zero_le_one]) + done +qed + +text{*Lemma for @{text power_strict_increasing}*} +lemma power_less_power_Suc: + "1 < a \ a ^ n < a * a ^ n" + by (induct n) (auto simp add: mult_strict_left_mono less_trans [OF zero_less_one]) + +lemma power_strict_increasing [rule_format]: + "n < N \ 1 < a \ a ^ n < a ^ N" +proof (induct N) + case 0 then show ?case by simp +next + case (Suc N) then show ?case + apply (auto simp add: power_less_power_Suc less_Suc_eq) + apply (subgoal_tac "1 * a^n < a * a^N", simp) + apply (rule mult_strict_mono) apply (auto simp add: less_trans [OF zero_less_one] less_imp_le) + done +qed + +lemma power_increasing_iff [simp]: + "1 < b \ b ^ x \ b ^ y \ x \ y" + by (blast intro: power_le_imp_le_exp power_increasing less_imp_le) + +lemma power_strict_increasing_iff [simp]: + "1 < b \ b ^ x < b ^ y \ x < y" +by (blast intro: power_less_imp_less_exp power_strict_increasing) + +lemma power_le_imp_le_base: + assumes le: "a ^ Suc n \ b ^ Suc n" + and ynonneg: "0 \ b" + shows "a \ b" +proof (rule ccontr) + assume "~ a \ b" + then have "b < a" by (simp only: linorder_not_le) + then have "b ^ Suc n < a ^ Suc n" + by (simp only: assms power_strict_mono) + from le and this show False + by (simp add: linorder_not_less [symmetric]) +qed + +lemma power_less_imp_less_base: + assumes less: "a ^ n < b ^ n" + assumes nonneg: "0 \ b" + shows "a < b" +proof (rule contrapos_pp [OF less]) + assume "~ a < b" + hence "b \ a" by (simp only: linorder_not_less) + hence "b ^ n \ a ^ n" using nonneg by (rule power_mono) + thus "\ a ^ n < b ^ n" by (simp only: linorder_not_less) +qed + +lemma power_inject_base: + "a ^ Suc n = b ^ Suc n \ 0 \ a \ 0 \ b \ a = b" +by (blast intro: power_le_imp_le_base antisym eq_refl sym) + +lemma power_eq_imp_eq_base: + "a ^ n = b ^ n \ 0 \ a \ 0 \ b \ 0 < n \ a = b" + by (cases n) (simp_all del: power_Suc, rule power_inject_base) + +lemma power2_le_imp_le: + "x\<^sup>2 \ y\<^sup>2 \ 0 \ y \ x \ y" + unfolding numeral_2_eq_2 by (rule power_le_imp_le_base) + +lemma power2_less_imp_less: + "x\<^sup>2 < y\<^sup>2 \ 0 \ y \ x < y" + by (rule power_less_imp_less_base) + +lemma power2_eq_imp_eq: + "x\<^sup>2 = y\<^sup>2 \ 0 \ x \ 0 \ y \ x = y" + unfolding numeral_2_eq_2 by (erule (2) power_eq_imp_eq_base) simp + +end + +context linordered_ring_strict +begin + +lemma sum_squares_eq_zero_iff: + "x * x + y * y = 0 \ x = 0 \ y = 0" + by (simp add: add_nonneg_eq_0_iff) + +lemma sum_squares_le_zero_iff: + "x * x + y * y \ 0 \ x = 0 \ y = 0" + by (simp add: le_less not_sum_squares_lt_zero sum_squares_eq_zero_iff) + +lemma sum_squares_gt_zero_iff: + "0 < x * x + y * y \ x \ 0 \ y \ 0" + by (simp add: not_le [symmetric] sum_squares_le_zero_iff) + +end + +context linordered_idom +begin + +lemma power_abs: + "abs (a ^ n) = abs a ^ n" + by (induct n) (auto simp add: abs_mult) + +lemma abs_power_minus [simp]: + "abs ((-a) ^ n) = abs (a ^ n)" + by (simp add: power_abs) + +lemma zero_less_power_abs_iff [simp, no_atp]: + "0 < abs a ^ n \ a \ 0 \ n = 0" +proof (induct n) + case 0 show ?case by simp +next + case (Suc n) show ?case by (auto simp add: Suc zero_less_mult_iff) +qed + +lemma zero_le_power_abs [simp]: + "0 \ abs a ^ n" + by (rule zero_le_power [OF abs_ge_zero]) + +lemma zero_le_power2 [simp]: + "0 \ a\<^sup>2" + by (simp add: power2_eq_square) + +lemma zero_less_power2 [simp]: + "0 < a\<^sup>2 \ a \ 0" + by (force simp add: power2_eq_square zero_less_mult_iff linorder_neq_iff) + +lemma power2_less_0 [simp]: + "\ a\<^sup>2 < 0" + by (force simp add: power2_eq_square mult_less_0_iff) + +lemma abs_power2 [simp]: + "abs (a\<^sup>2) = a\<^sup>2" + by (simp add: power2_eq_square abs_mult abs_mult_self) + +lemma power2_abs [simp]: + "(abs a)\<^sup>2 = a\<^sup>2" + by (simp add: power2_eq_square abs_mult_self) + +lemma odd_power_less_zero: + "a < 0 \ a ^ Suc (2*n) < 0" +proof (induct n) + case 0 + then show ?case by simp +next + case (Suc n) + have "a ^ Suc (2 * Suc n) = (a*a) * a ^ Suc(2*n)" + by (simp add: mult_ac power_add power2_eq_square) + thus ?case + by (simp del: power_Suc add: Suc mult_less_0_iff mult_neg_neg) +qed + +lemma odd_0_le_power_imp_0_le: + "0 \ a ^ Suc (2*n) \ 0 \ a" + using odd_power_less_zero [of a n] + by (force simp add: linorder_not_less [symmetric]) + +lemma zero_le_even_power'[simp]: + "0 \ a ^ (2*n)" +proof (induct n) + case 0 + show ?case by simp +next + case (Suc n) + have "a ^ (2 * Suc n) = (a*a) * a ^ (2*n)" + by (simp add: mult_ac power_add power2_eq_square) + thus ?case + by (simp add: Suc zero_le_mult_iff) +qed + +lemma sum_power2_ge_zero: + "0 \ x\<^sup>2 + y\<^sup>2" + by (intro add_nonneg_nonneg zero_le_power2) + +lemma not_sum_power2_lt_zero: + "\ x\<^sup>2 + y\<^sup>2 < 0" + unfolding not_less by (rule sum_power2_ge_zero) + +lemma sum_power2_eq_zero_iff: + "x\<^sup>2 + y\<^sup>2 = 0 \ x = 0 \ y = 0" + unfolding power2_eq_square by (simp add: add_nonneg_eq_0_iff) + +lemma sum_power2_le_zero_iff: + "x\<^sup>2 + y\<^sup>2 \ 0 \ x = 0 \ y = 0" + by (simp add: le_less sum_power2_eq_zero_iff not_sum_power2_lt_zero) + +lemma sum_power2_gt_zero_iff: + "0 < x\<^sup>2 + y\<^sup>2 \ x \ 0 \ y \ 0" + unfolding not_le [symmetric] by (simp add: sum_power2_le_zero_iff) + +end + + +subsection {* Miscellaneous rules *} + +lemma power_eq_if: "p ^ m = (if m=0 then 1 else p * (p ^ (m - 1)))" + unfolding One_nat_def by (cases m) simp_all + +lemma power2_sum: + fixes x y :: "'a::comm_semiring_1" + shows "(x + y)\<^sup>2 = x\<^sup>2 + y\<^sup>2 + 2 * x * y" + by (simp add: algebra_simps power2_eq_square mult_2_right) + +lemma power2_diff: + fixes x y :: "'a::comm_ring_1" + shows "(x - y)\<^sup>2 = x\<^sup>2 + y\<^sup>2 - 2 * x * y" + by (simp add: ring_distribs power2_eq_square mult_2) (rule mult_commute) + +lemma power_0_Suc [simp]: + "(0::'a::{power, semiring_0}) ^ Suc n = 0" + by simp + +text{*It looks plausible as a simprule, but its effect can be strange.*} +lemma power_0_left: + "0 ^ n = (if n = 0 then 1 else (0::'a::{power, semiring_0}))" + by (induct n) simp_all + +lemma power_eq_0_iff [simp]: + "a ^ n = 0 \ + a = (0::'a::{mult_zero,zero_neq_one,no_zero_divisors,power}) \ n \ 0" + by (induct n) + (auto simp add: no_zero_divisors elim: contrapos_pp) + +lemma (in field) power_diff: + assumes nz: "a \ 0" + shows "n \ m \ a ^ (m - n) = a ^ m / a ^ n" + by (induct m n rule: diff_induct) (simp_all add: nz field_power_not_zero) + +text{*Perhaps these should be simprules.*} +lemma power_inverse: + fixes a :: "'a::division_ring_inverse_zero" + shows "inverse (a ^ n) = inverse a ^ n" +apply (cases "a = 0") +apply (simp add: power_0_left) +apply (simp add: nonzero_power_inverse) +done (* TODO: reorient or rename to inverse_power *) + +lemma power_one_over: + "1 / (a::'a::{field_inverse_zero, power}) ^ n = (1 / a) ^ n" + by (simp add: divide_inverse) (rule power_inverse) + +lemma power_divide: + "(a / b) ^ n = (a::'a::field_inverse_zero) ^ n / b ^ n" +apply (cases "b = 0") +apply (simp add: power_0_left) +apply (rule nonzero_power_divide) +apply assumption +done + +text {* Simprules for comparisons where common factors can be cancelled. *} + +lemmas zero_compare_simps = + add_strict_increasing add_strict_increasing2 add_increasing + zero_le_mult_iff zero_le_divide_iff + zero_less_mult_iff zero_less_divide_iff + mult_le_0_iff divide_le_0_iff + mult_less_0_iff divide_less_0_iff + zero_le_power2 power2_less_0 + + +subsection {* Exponentiation for the Natural Numbers *} + +lemma nat_one_le_power [simp]: + "Suc 0 \ i \ Suc 0 \ i ^ n" + by (rule one_le_power [of i n, unfolded One_nat_def]) + +lemma nat_zero_less_power_iff [simp]: + "x ^ n > 0 \ x > (0::nat) \ n = 0" + by (induct n) auto + +lemma nat_power_eq_Suc_0_iff [simp]: + "x ^ m = Suc 0 \ m = 0 \ x = Suc 0" + by (induct m) auto + +lemma power_Suc_0 [simp]: + "Suc 0 ^ n = Suc 0" + by simp + +text{*Valid for the naturals, but what if @{text"0nat)" + assumes less: "i ^ m < i ^ n" + shows "m < n" +proof (cases "i = 1") + case True with less power_one [where 'a = nat] show ?thesis by simp +next + case False with nonneg have "1 < i" by auto + from power_strict_increasing_iff [OF this] less show ?thesis .. +qed + +lemma power_dvd_imp_le: + "i ^ m dvd i ^ n \ (1::nat) < i \ m \ n" + apply (rule power_le_imp_le_exp, assumption) + apply (erule dvd_imp_le, simp) + done + +lemma power2_nat_le_eq_le: + fixes m n :: nat + shows "m\<^sup>2 \ n\<^sup>2 \ m \ n" + by (auto intro: power2_le_imp_le power_mono) + +lemma power2_nat_le_imp_le: + fixes m n :: nat + assumes "m\<^sup>2 \ n" + shows "m \ n" + using assms by (cases m) (simp_all add: power2_eq_square) + + + +subsection {* Code generator tweak *} + +lemma power_power_power [code]: + "power = power.power (1::'a::{power}) (op *)" + unfolding power_def power.power_def .. + +declare power.power.simps [code] + +code_identifier + code_module Power \ (SML) Arith and (OCaml) Arith and (Haskell) Arith + +end + diff --git a/vendor/pygments/tests/examplefiles/example.todotxt b/vendor/pygments/tests/examplefiles/example.todotxt new file mode 100644 index 0000000..55ee528 --- /dev/null +++ b/vendor/pygments/tests/examplefiles/example.todotxt @@ -0,0 +1,9 @@ +(A) Call Mom @Phone +Family +(A) 2014-01-08 Schedule annual checkup +Health +(B) Outline chapter 5 +Novel @Computer +(C) Add cover sheets @Office +TPSReports +Plan backyard herb garden @Home +Pick up milk @GroceryStore +Research self-publishing services +Novel @Computer +x 2014-01-10 Download Todo.txt mobile app @Phone +x 2014-01-10 2014-01-07 Download Todo.txt CLI @Computer diff --git a/vendor/pygments/tests/examplefiles/example.ts b/vendor/pygments/tests/examplefiles/example.ts index 545c6cf..760e254 100644 --- a/vendor/pygments/tests/examplefiles/example.ts +++ b/vendor/pygments/tests/examplefiles/example.ts @@ -21,6 +21,17 @@ class Horse extends Animal { } } +@View({ + templateUrl: "app/components/LoginForm.html", + directives: [FORM_DIRECTIVES, NgIf] +}) +@Component({ + selector: "login-form" +}) +class LoginForm { + +} + var sam = new Snake("Sammy the Python") var tom: Animal = new Horse("Tommy the Palomino") diff --git a/vendor/pygments/tests/examplefiles/example.ttl b/vendor/pygments/tests/examplefiles/example.ttl new file mode 100644 index 0000000..e524d86 --- /dev/null +++ b/vendor/pygments/tests/examplefiles/example.ttl @@ -0,0 +1,43 @@ +@base . +@prefix dcterms: . @prefix xs: . +@prefix mads: . +@prefix skos: . +@PREFIX dc: # SPARQL-like syntax is OK +@prefix : . # empty prefix is OK + + . + +<#doc1> a <#document> + dc:creator "Smith", "Jones"; + :knows + dcterms:hasPart [ # A comment + dc:title "Some title", "Some other title"; + dc:creator "برشت، برتولد"@ar; + dc:date "2009"^^xs:date + ]; + dc:title "A sample title", 23.0; + dcterms:isPartOf [ + dc:title "another", "title" + ] ; + :exists true . + + a mads:Topic, + skos:Concept ; + dcterms:created "2014-08-25"^^xsd:date ; + dcterms:modified "2014-11-12"^^xsd:date ; + dcterms:identifier "REAL006839" ; + skos:prefLabel "Flerbørstemarker"@nb, + "Polychaeta"@la ; + skos:altLabel "Flerbørsteormer"@nb, + "Mangebørstemark"@nb, + "Mangebørsteormer"@nb, + "Havbørsteormer"@nb, + "Havbørstemarker"@nb, + "Polycheter"@nb. + skos:inScheme ; + skos:narrower , + , + ; + skos:exactMatch , + , + . diff --git a/vendor/pygments/tests/examplefiles/example.weechatlog b/vendor/pygments/tests/examplefiles/example.weechatlog index 9f03616..15e8130 100644 --- a/vendor/pygments/tests/examplefiles/example.weechatlog +++ b/vendor/pygments/tests/examplefiles/example.weechatlog @@ -6,4 +6,6 @@ 2007 Sep 01 00:23:55 -=- Das Topic von &bitlbee lautet: "Welcome to the control channel. Type help for help information." 2007 Sep 01 00:23:55 Welcome to the BitlBee gateway! 2007 Sep 01 00:23:55 -2007 Sep 01 00:23:55 If you've never used BitlBee before, please do read the help information using the help command. Lots of FAQ's are answered there. \ No newline at end of file +2007 Sep 01 00:23:55 If you've never used BitlBee before, please do read the help information using the help command. Lots of FAQ's are answered there. +# check for fixed pathological matching behavior +1111111111111111111111111111111 diff --git a/vendor/pygments/tests/examplefiles/example.x10 b/vendor/pygments/tests/examplefiles/example.x10 new file mode 100644 index 0000000..9cca164 --- /dev/null +++ b/vendor/pygments/tests/examplefiles/example.x10 @@ -0,0 +1,9 @@ +/** Example file for the X10 programming langauge (http://x10-lang.org). + */ +class Example { + + public static def main(Rail[String]) { + Console.OUT.println("Hello World!"); // say hello. + } + +} diff --git a/vendor/pygments/tests/examplefiles/example1.cadl b/vendor/pygments/tests/examplefiles/example1.cadl new file mode 100644 index 0000000..3350fa3 --- /dev/null +++ b/vendor/pygments/tests/examplefiles/example1.cadl @@ -0,0 +1,149 @@ + -- + -- Example fragment of an openEHR Archetype, written in cADL, a subsyntax of the Archetype Definition Language (ADL) + -- definition available here: http://www.openehr.org/releases/trunk/architecture/am/adl2.pdf + -- Author: Thomas Beale + -- + + EVALUATION[id1] matches { -- Adverse Reaction + data matches { + ITEM_TREE[id2] matches { + items cardinality matches {1..*; unordered} matches { + ELEMENT[id3] matches { -- Substance/Agent + value matches { + DV_TEXT[id51] + } + } + ELEMENT[id5] occurrences matches {0..1} matches { -- Absolute Contraindication? + value matches { + DV_BOOLEAN[id52] matches { + value matches {True} + } + } + } + ELEMENT[id50] occurrences matches {0..1} matches { -- Future Use + value matches { + DV_TEXT[id53] + } + } + ELEMENT[id7] occurrences matches {0..1} matches { -- Overall Comment + value matches { + DV_TEXT[id54] + } + } + CLUSTER[id10] matches { -- Reaction Event + items matches { + ELEMENT[id11] occurrences matches {0..1} matches { -- Specific Substance/Agent + value matches { + DV_TEXT[id55] + } + } + ELEMENT[id12] matches { -- Manifestation + value matches { + DV_TEXT[id56] + } + } + ELEMENT[id17] occurrences matches {0..1} matches { -- Reaction Type + value matches { + DV_TEXT[id57] + } + } + ELEMENT[id22] occurrences matches {0..1} matches { -- Certainty + value matches { + DV_CODED_TEXT[id58] matches { + defining_code matches {[ac1]} -- Certainty (synthesised) + } + } + } + ELEMENT[id13] occurrences matches {0..1} matches { -- Reaction Description + value matches { + DV_TEXT[id59] + } + } + ELEMENT[id28] occurrences matches {0..1} matches { -- Onset of Reaction + value matches { + DV_DATE_TIME[id60] + } + } + ELEMENT[id29] occurrences matches {0..1} matches { -- Duration of Reaction + value matches { + DV_DURATION[id61] + } + } + allow_archetype CLUSTER[id30] matches { -- Additional Reaction Detail + include + archetype_id/value matches {/openEHR-EHR-CLUSTER\.anatomical_location(-a-zA-Z0-9_]+)*\.v1/} + } + ELEMENT[id19] occurrences matches {0..1} matches { -- Exposure Description + value matches { + DV_TEXT[id62] + } + } + ELEMENT[id21] occurrences matches {0..1} matches { -- Earliest Exposure + value matches { + DV_DATE_TIME[id63] + } + } + ELEMENT[id26] occurrences matches {0..1} matches { -- Duration of Exposure + value matches { + DV_DURATION[id64] + } + } + allow_archetype CLUSTER[id20] matches { -- Additional Exposure Detail + include + archetype_id/value matches {/openEHR-EHR-CLUSTER\.amount(-a-zA-Z0-9_]+)*\.v1|openEHR-EHR-CLUSTER\.medication_admin(-a-zA-Z0-9_]+)*\.v1|openEHR-EHR-CLUSTER\.timing(-a-zA-Z0-9_]+)*\.v1/} + } + ELEMENT[id41] occurrences matches {0..1} matches { -- Clinical Management Description + value matches { + DV_TEXT[id65] + } + } + ELEMENT[id32] matches { -- Multimedia + value matches { + DV_MULTIMEDIA[id66] matches { + media_type + } + } + } + allow_archetype CLUSTER[id42] matches { -- Reporting Details + include + archetype_id/value matches {/.*/} + } + ELEMENT[id33] occurrences matches {0..1} matches { -- Reaction Comment + value matches { + DV_TEXT[id67] + } + } + } + } + } + } + } + protocol matches { + ITEM_TREE[id43] matches { + items matches { + ELEMENT[id45] occurrences matches {0..1} matches { -- Reaction Reported? + value matches { + DV_BOOLEAN[id68] matches { + value matches {True, False} + } + } + } + ELEMENT[id49] occurrences matches {0..1} matches { -- Report Comment + value matches { + DV_TEXT[id69] + } + } + ELEMENT[id46] matches { -- Adverse Reaction Report + value matches { + DV_URI[id70] + } + } + ELEMENT[id48] occurrences matches {0..1} matches { -- Supporting Clinical Record Information + value matches { + DV_EHR_URI[id71] + } + } + } + } + } + } diff --git a/vendor/pygments/tests/examplefiles/exampleScript.cfc b/vendor/pygments/tests/examplefiles/exampleScript.cfc new file mode 100644 index 0000000..002acbc --- /dev/null +++ b/vendor/pygments/tests/examplefiles/exampleScript.cfc @@ -0,0 +1,241 @@ + +/** +******************************************************************************** +ContentBox - A Modular Content Platform +Copyright 2012 by Luis Majano and Ortus Solutions, Corp +www.gocontentbox.org | www.luismajano.com | www.ortussolutions.com +******************************************************************************** +Apache License, Version 2.0 + +Copyright Since [2012] [Luis Majano and Ortus Solutions,Corp] + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + +http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +******************************************************************************** +* A generic content service for content objects +*/ +component extends="coldbox.system.orm.hibernate.VirtualEntityService" singleton{ + + // DI + property name="settingService" inject="id:settingService@cb"; + property name="cacheBox" inject="cachebox"; + property name="log" inject="logbox:logger:{this}"; + property name="customFieldService" inject="customFieldService@cb"; + property name="categoryService" inject="categoryService@cb"; + property name="commentService" inject="commentService@cb"; + property name="contentVersionService" inject="contentVersionService@cb"; + property name="authorService" inject="authorService@cb"; + property name="populator" inject="wirebox:populator"; + property name="systemUtil" inject="SystemUtil@cb"; + + /* + * Constructor + * @entityName.hint The content entity name to bind this service to. + */ + ContentService function init(entityName="cbContent"){ + // init it + super.init(entityName=arguments.entityName, useQueryCaching=true); + + // Test scope coloring in pygments + this.colorTestVar = "Just for testing pygments!"; + cookie.colorTestVar = ""; + client.colorTestVar = "" + session.colorTestVar = ""; + application.colorTestVar = ""; + + return this; + } + + /** + * Clear all content caches + * @async.hint Run it asynchronously or not, defaults to false + */ + function clearAllCaches(boolean async=false){ + var settings = settingService.getAllSettings(asStruct=true); + // Get appropriate cache provider + var cache = cacheBox.getCache( settings.cb_content_cacheName ); + cache.clearByKeySnippet(keySnippet="cb-content",async=arguments.async); + return this; + } + + /** + * Clear all page wrapper caches + * @async.hint Run it asynchronously or not, defaults to false + */ + function clearAllPageWrapperCaches(boolean async=false){ + var settings = settingService.getAllSettings(asStruct=true); + // Get appropriate cache provider + var cache = cacheBox.getCache( settings.cb_content_cacheName ); + cache.clearByKeySnippet(keySnippet="cb-content-pagewrapper",async=arguments.async); + return this; + } + + /** + * Clear all page wrapper caches + * @slug.hint The slug partial to clean on + * @async.hint Run it asynchronously or not, defaults to false + */ + function clearPageWrapperCaches(required any slug, boolean async=false){ + var settings = settingService.getAllSettings(asStruct=true); + // Get appropriate cache provider + var cache = cacheBox.getCache( settings.cb_content_cacheName ); + cache.clearByKeySnippet(keySnippet="cb-content-pagewrapper-#arguments.slug#",async=arguments.async); + return this; + } + + /** + * Clear a page wrapper cache + * @slug.hint The slug to clean + * @async.hint Run it asynchronously or not, defaults to false + */ + function clearPageWrapper(required any slug, boolean async=false){ + var settings = settingService.getAllSettings(asStruct=true); + // Get appropriate cache provider + var cache = cacheBox.getCache( settings.cb_content_cacheName ); + cache.clear("cb-content-pagewrapper-#arguments.slug#/"); + return this; + } + + /** + * Searches published content with cool paramters, remember published content only + * @searchTerm.hint The search term to search + * @max.hint The maximum number of records to paginate + * @offset.hint The offset in the pagination + * @asQuery.hint Return as query or array of objects, defaults to array of objects + * @sortOrder.hint The sorting of the search results, defaults to publishedDate DESC + * @isPublished.hint Search for published, non-published or both content objects [true, false, 'all'] + * @searchActiveContent.hint Search only content titles or both title and active content. Defaults to both. + */ + function searchContent( + any searchTerm="", + numeric max=0, + numeric offset=0, + boolean asQuery=false, + any sortOrder="publishedDate DESC", + any isPublished=true, + boolean searchActiveContent=true){ + + var results = {}; + var c = newCriteria(); + + // only published content + if( isBoolean( arguments.isPublished ) ){ + // Published bit + c.isEq( "isPublished", javaCast( "Boolean", arguments.isPublished ) ); + // Published eq true evaluate other params + if( arguments.isPublished ){ + c.isLt("publishedDate", now() ) + .$or( c.restrictions.isNull("expireDate"), c.restrictions.isGT("expireDate", now() ) ) + .isEq("passwordProtection",""); + } + } + + // Search Criteria + if( len( arguments.searchTerm ) ){ + // like disjunctions + c.createAlias("activeContent","ac"); + // Do we search title and active content or just title? + if( arguments.searchActiveContent ){ + c.$or( c.restrictions.like("title","%#arguments.searchTerm#%"), + c.restrictions.like("ac.content", "%#arguments.searchTerm#%") ); + } + else{ + c.like( "title", "%#arguments.searchTerm#%" ); + } + } + + // run criteria query and projections count + results.count = c.count( "contentID" ); + results.content = c.resultTransformer( c.DISTINCT_ROOT_ENTITY ) + .list(offset=arguments.offset, max=arguments.max, sortOrder=arguments.sortOrder, asQuery=arguments.asQuery); + + return results; + } + +/********************************************* PRIVATE *********************************************/ + + + /** + * Update the content hits + * @contentID.hint The content id to update + */ + private function syncUpdateHits(required contentID){ + var q = new Query(sql="UPDATE cb_content SET hits = hits + 1 WHERE contentID = #arguments.contentID#").execute(); + return this; + } + + + private function closureTest(){ + methodCall( + param1, + function( arg1, required arg2 ){ + var settings = settingService.getAllSettings(asStruct=true); + // Get appropriate cache provider + var cache = cacheBox.getCache( settings.cb_content_cacheName ); + cache.clear("cb-content-pagewrapper-#arguments.slug#/"); + return this; + }, + param1 + ); + } + + private function StructliteralTest(){ + return { + foo = bar, + brad = 'Wood', + func = function( arg1, required arg2 ){ + var settings = settingService.getAllSettings(asStruct=true); + // Get appropriate cache provider + var cache = cacheBox.getCache( settings.cb_content_cacheName ); + cache.clear("cb-content-pagewrapper-#arguments.slug#/"); + return this; + }, + array = [ + 1, + 2, + 3, + 4, + 5, + 'test', + 'testing', + 'testerton', + { + foo = true, + brad = false, + wood = null + } + ], + last = "final" + }; + } + + private function arrayliteralTest(){ + return [ + 1, + 2, + 3, + 4, + 5, + 'test', + 'testing', + 'testerton', + { + foo = true, + brad = false, + wood = null + }, + 'testy-von-testavich' + ]; + } + +} + \ No newline at end of file diff --git a/vendor/pygments/tests/examplefiles/exampleTag.cfc b/vendor/pygments/tests/examplefiles/exampleTag.cfc new file mode 100644 index 0000000..753bb82 --- /dev/null +++ b/vendor/pygments/tests/examplefiles/exampleTag.cfc @@ -0,0 +1,18 @@ + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/pygments/tests/examplefiles/example_coq.v b/vendor/pygments/tests/examplefiles/example_coq.v new file mode 100644 index 0000000..fd1a7bc --- /dev/null +++ b/vendor/pygments/tests/examplefiles/example_coq.v @@ -0,0 +1,4 @@ +Lemma FalseLemma : False <-> False. +tauto. +Qed. +Check FalseLemma. diff --git a/vendor/pygments/tests/examplefiles/example_elixir.ex b/vendor/pygments/tests/examplefiles/example_elixir.ex index 2e92163..ddca7f6 100644 --- a/vendor/pygments/tests/examplefiles/example_elixir.ex +++ b/vendor/pygments/tests/examplefiles/example_elixir.ex @@ -1,363 +1,233 @@ -# We cannot use to_char_list because it depends on inspect, -# which depends on protocol, which depends on this module. -import Elixir::Builtin, except: [to_char_list: 1] +# Numbers +0b0101011 +1234 ; 0x1A ; 0xbeef ; 0763 ; 0o123 +3.14 ; 5.0e21 ; 0.5e-12 +100_000_000 -defmodule Module do - require Erlang.ets, as: ETS +# these are not valid numbers +0b012 ; 0xboar ; 0o888 +0B01 ; 0XAF ; 0O123 - @moduledoc """ - This module provides many functions to deal with modules during - compilation time. It allows a developer to dynamically attach - documentation, merge data, register attributes and so forth. +# Characters +?a ; ?1 ; ?\n ; ?\s ; ?\c ; ? ; ?, +?\x{12} ; ?\x{abcd} +?\x34 ; ?\xF - After the module is compiled, using many of the functions in - this module will raise errors, since it is out of their purpose - to inspect runtime data. Most of the runtime data can be inspected - via the `__info__(attr)` function attached to each compiled module. - """ +# these show that only the first digit is part of the character +?\123 ; ?\12 ; ?\7 + +# Atoms +:this ; :that +:'complex atom' +:"with' \"\" 'quotes" +:" multi + line ' \s \123 \xff +atom" +:... ; :<<>> ; :%{} ; :% ; :{} +:++; :--; :*; :~~~; ::: +:% ; :. ; :<- + +# Strings +"Hello world" +"Interspersed \x{ff} codes \7 \8 \65 \016 and \t\s\\s\z\+ \\ escapes" +"Quotes ' inside \" \123 the \"\" \xF \\xF string \\\" end" +"Multiline + string" + +# Char lists +'this is a list' +'escapes \' \t \\\'' +'Multiline + char + list +' + +# Binaries +<<1, 2, 3>> +<<"hello"::binary, c :: utf8, x::[4, unit(2)]>> = "hello™1" + +# Sigils +~r/this + i\s "a" regex/ +~R'this + i\s "a" regex too' +~w(hello #{ ["has" <> "123", '\c\d', "\123 interpol" | []] } world)s +~W(hello #{no "123" \c\d \123 interpol} world)s + +~s{Escapes terminators \{ and \}, but no {balancing} # outside of sigil here } + +~S"No escapes \s\t\n and no #{interpolation}" + +:"atoms work #{"to" <> "o"}" + +# Operators +x = 1 + 2.0 * 3 +y = true and false; z = false or true +... = 144 +... == !x && y || z +"hello" |> String.upcase |> String.downcase() +{^z, a} = {true, x} + +# Free operators (added in 1.0.0) +p ~>> f = bind(p, f) +p1 ~> p2 = pair_right(p1, p2) +p1 <~ p2 = pair_left(p1, p2) +p1 <~> p2 = pair_both(p1, p2) +p |~> f = map(p, f) +p1 <|> p2 = either(p1, p2) + +# Lists, tuples, maps, keywords +[1, :a, 'hello'] ++ [2, 3] +[:head | [?t, ?a, ?i, ?l]] + +{:one, 2.0, "three"} + +[...: "this", <<>>: "is", %{}: "a keyword", %: "list", {}: "too"] +["this is an atom too": 1, "so is this": 2] +[option: "value", key: :word] +[++: "operator", ~~~: :&&&] + +map = %{shortcut: "syntax"} +%{map | "update" => "me"} +%{ 12 => 13, :weird => ['thing'] } + +# Comprehensions +for x <- 1..10, x < 5, do: {x, x} +pixels = "12345678" +for << <> <- pixels >> do + [r, {g, %{"b" => a}}] +end + +# String interpolation +"String #{inspect "interpolation"} is quite #{1+4+7} difficult" + +# Identifiers +abc_123 = 1 +_018OP = 2 +A__0 == 3 + +# Modules +defmodule Long.Module.Name do + @moduledoc "Simple module docstring" @doc """ - Evalutes the quotes contents in the given module context. - Raises an error if the module was already compiled. - - ## Examples - - defmodule Foo do - contents = quote do: (def sum(a, b), do: a + b) - Module.eval_quoted __MODULE__, contents, [], __FILE__, __LINE__ - end - - Foo.sum(1, 2) #=> 3 + Multiline docstring + "with quotes" + and #{ inspect %{"interpolation" => "in" <> "action"} } + now with #{ {:a, 'tuple'} } + and #{ inspect { + :tuple, + %{ with: "nested #{ inspect %{ :interpolation => %{} } }" } + } } """ - def eval_quoted(module, quoted, binding, filename, line) do - assert_not_compiled!(:eval_quoted, module) - { binding, scope } = Erlang.elixir_module.binding_and_scope_for_eval(line, to_char_list(filename), module, binding) - Erlang.elixir_def.reset_last(module) - Erlang.elixir.eval_quoted([quoted], binding, line, scope) - end + defstruct [:a, :name, :height] - @doc """ - Checks if the module is compiled or not. + @doc ~S''' + No #{interpolation} of any kind. + \000 \x{ff} - ## Examples - - defmodule Foo do - Module.compiled?(__MODULE__) #=> false - end - - Module.compiled?(Foo) #=> true - - """ - def compiled?(module) do - table = data_table_for(module) - table == ETS.info(table, :name) - end - - @doc """ - Reads the data for the given module. This is used - to read data of uncompiled modules. If the module - was already compiled, you shoul access the data - directly by invoking `__info__(:data)` in that module. - - ## Examples - - defmodule Foo do - Module.merge_data __MODULE__, value: 1 - Module.read_data __MODULE__ #=> [value: 1] - end - - """ - def read_data(module) do - assert_not_compiled!(:read_data, module) - ETS.lookup_element(data_table_for(module), :data, 2) - end - - @doc """ - Reads the data from `module` at the given key `at`. - - ## Examples - - defmodule Foo do - Module.merge_data __MODULE__, value: 1 - Module.read_data __MODULE__, :value #=> 1 - end - - """ - def read_data(module, at) do - Orddict.get read_data(module), at - end - - @doc """ - Merge the given data into the module, overriding any - previous one. - - If any of the given data is a registered attribute, it is - automatically added to the attribute set, instead of marking - it as data. See register_attribute/2 and add_attribute/3 for - more info. - - ## Examples - - defmodule Foo do - Module.merge_data __MODULE__, value: 1 - end - - Foo.__info__(:data) #=> [value: 1] - - """ - def merge_data(module, data) do - assert_not_compiled!(:merge_data, module) - - table = data_table_for(module) - old = ETS.lookup_element(table, :data, 2) - registered = ETS.lookup_element(table, :registered_attributes, 2) - - { attrs, new } = Enum.partition data, fn({k,_}) -> List.member?(registered, k) end - Enum.each attrs, fn({k,v}) -> add_attribute(module, k, v) end - ETS.insert(table, { :data, Orddict.merge(old, new) }) - end - - @doc """ - Attaches documentation to a given function. It expects - the module the function belongs to, the line (a non negative - integer), the kind (def or defmacro), a tuple representing - the function and its arity and the documentation, which should - be either a binary or a boolean. - - ## Examples - - defmodule MyModule do - Module.add_doc(__MODULE__, __LINE__ + 1, :def, { :version, 0 }, "Manually added docs") - def version, do: 1 - end - - """ - def add_doc(module, line, kind, tuple, doc) when - is_binary(doc) or is_boolean(doc) do - assert_not_compiled!(:add_doc, module) - case kind do - match: :defp - :warn - else: - table = docs_table_for(module) - ETS.insert(table, { tuple, line, kind, doc }) - :ok - end - end - - @doc """ - Checks if a function was defined, regardless if it is - a macro or a private function. Use function_defined?/3 - to assert for an specific type. - - ## Examples - - defmodule Example do - Module.function_defined? __MODULE__, { :version, 0 } #=> false - def version, do: 1 - Module.function_defined? __MODULE__, { :version, 0 } #=> true - end - - """ - def function_defined?(module, tuple) when is_tuple(tuple) do - assert_not_compiled!(:function_defined?, module) - table = function_table_for(module) - ETS.lookup(table, tuple) != [] - end - - @doc """ - Checks if a function was defined and also for its `kind`. - `kind` can be either :def, :defp or :defmacro. - - ## Examples - - defmodule Example do - Module.function_defined? __MODULE__, { :version, 0 }, :defp #=> false - def version, do: 1 - Module.function_defined? __MODULE__, { :version, 0 }, :defp #=> false - end - - """ - def function_defined?(module, tuple, kind) do - List.member? defined_functions(module, kind), tuple - end - - @doc """ - Return all functions defined in the given module. - - ## Examples - - defmodule Example do - def version, do: 1 - Module.defined_functions __MODULE__ #=> [{:version,1}] - end - - """ - def defined_functions(module) do - assert_not_compiled!(:defined_functions, module) - table = function_table_for(module) - lc { tuple, _, _ } in ETS.tab2list(table), do: tuple - end - - @doc """ - Returns all functions defined in te given module according - to its kind. - - ## Examples - - defmodule Example do - def version, do: 1 - Module.defined_functions __MODULE__, :def #=> [{:version,1}] - Module.defined_functions __MODULE__, :defp #=> [] - end - - """ - def defined_functions(module, kind) do - assert_not_compiled!(:defined_functions, module) - table = function_table_for(module) - entry = kind_to_entry(kind) - ETS.lookup_element(table, entry, 2) - end - - @doc """ - Adds a compilation callback hook that is invoked - exactly before the module is compiled. - - This callback is useful when used with `use` as a mechanism - to clean up any internal data in the module before it is compiled. - - ## Examples - - Imagine you are creating a module/library that is meant for - external usage called `MyLib`. It could be defined as: - - defmodule MyLib do - def __using__(target) do - Module.merge_data target, some_data: true - Module.add_compile_callback(target, __MODULE__, :__callback__) - end - - defmacro __callback__(target) do - value = Orddict.get(Module.read_data(target), :some_data, []) - quote do: (def my_lib_value, do: unquote(value)) - end - end - - And a module could use `MyLib` with: - - defmodule App do - use ModuleTest::ToBeUsed - end - - In the example above, `MyLib` defines a data to the target. This data - can be updated throughout the module definition and therefore, the final - value of the data can only be compiled using a compiation callback, - which will read the final value of :some_data and compile to a function. - """ - def add_compile_callback(module, target, fun // :__compiling__) do - assert_not_compiled!(:add_compile_callback, module) - new = { target, fun } - table = data_table_for(module) - old = ETS.lookup_element(table, :compile_callbacks, 2) - ETS.insert(table, { :compile_callbacks, [new|old] }) - end - - @doc """ - Adds an Erlang attribute to the given module with the given - key and value. The same attribute can be added more than once. - - ## Examples - - defmodule MyModule do - Module.add_attribute __MODULE__, :custom_threshold_for_lib, 10 - end - - """ - def add_attribute(module, key, value) when is_atom(key) do - assert_not_compiled!(:add_attribute, module) - table = data_table_for(module) - attrs = ETS.lookup_element(table, :attributes, 2) - ETS.insert(table, { :attributes, [{key, value}|attrs] }) - end - - @doc """ - Deletes all attributes that matches the given key. - - ## Examples - - defmodule MyModule do - Module.add_attribute __MODULE__, :custom_threshold_for_lib, 10 - Module.delete_attribute __MODULE__, :custom_threshold_for_lib - end - - """ - def delete_attribute(module, key) when is_atom(key) do - assert_not_compiled!(:delete_attribute, module) - table = data_table_for(module) - attrs = ETS.lookup_element(table, :attributes, 2) - final = lc {k,v} in attrs, k != key, do: {k,v} - ETS.insert(table, { :attributes, final }) - end - - @doc """ - Registers an attribute. This allows a developer to use the data API - but Elixir will register the data as an attribute automatically. - By default, `vsn`, `behavior` and other Erlang attributes are - automatically registered. - - ## Examples - - defmodule MyModule do - Module.register_attribute __MODULE__, :custom_threshold_for_lib - @custom_threshold_for_lib 10 - end - - """ - def register_attribute(module, new) do - assert_not_compiled!(:register_attribute, module) - table = data_table_for(module) - old = ETS.lookup_element(table, :registered_attributes, 2) - ETS.insert(table, { :registered_attributes, [new|old] }) - end + \n #{\x{ff}} + ''' + def func(a, b \\ []), do: :ok @doc false - # Used internally to compile documentation. This function - # is private and must be used only internally. - def compile_doc(module, line, kind, pair) do - case read_data(module, :doc) do - match: nil - # We simply discard nil - match: doc - result = add_doc(module, line, kind, pair, doc) - merge_data(module, doc: nil) - result - end + def __before_compile__(_) do + :ok + end +end + +# Structs +defmodule Second.Module do + s = %Long.Module.Name{name: "Silly"} + %Long.Module.Name{s | height: {192, :cm}} + ".. #{%Long.Module.Name{s | height: {192, :cm}}} .." +end + +# Types, pseudo-vars, attributes +defmodule M do + @custom_attr :some_constant + + @before_compile Long.Module.Name + + @typedoc "This is a type" + @type typ :: integer + + @typedoc """ + Another type + """ + @opaque typtyp :: 1..10 + + @spec func(typ, typtyp) :: :ok | :fail + def func(a, b) do + a || b || :ok || :fail + Path.expand("..", __DIR__) + IO.inspect __ENV__ + __NOTAPSEUDOVAR__ = 11 + __MODULE__.func(b, a) end - ## Helpers - - defp kind_to_entry(:def), do: :public - defp kind_to_entry(:defp), do: :private - defp kind_to_entry(:defmacro), do: :macros - - defp to_char_list(list) when is_list(list), do: list - defp to_char_list(bin) when is_binary(bin), do: binary_to_list(bin) - - defp data_table_for(module) do - list_to_atom Erlang.lists.concat([:d, module]) + defmacro m() do + __CALLER__ end +end - defp function_table_for(module) do - list_to_atom Erlang.lists.concat([:f, module]) +# Functions +anon = fn x, y, z -> + fn(a, b, c) -> + &(x + y - z * a / &1 + b + div(&2, c)) end +end - defp docs_table_for(module) do - list_to_atom Erlang.lists.concat([:o, module]) - end +&Set.put(&1, &2) ; & Set.put(&1, &2) ; &( Set.put(&1, &1) ) - defp assert_not_compiled!(fun, module) do - compiled?(module) || - raise ArgumentError, message: - "could not call #{fun} on module #{module} because it was already compiled" - end -end \ No newline at end of file +# Function calls +anon.(1, 2, 3); self; hd([1,2,3]) +Kernel.spawn(fn -> :ok end) +IO.ANSI.black + +# Control flow +if :this do + :that +else + :otherwise +end + +pid = self +receive do + {:EXIT, _} -> :done + {^pid, :_} -> nil + after 100 -> :no_luck +end + +case __ENV__.line do + x when is_integer(x) -> x + x when x in 1..12 -> -x +end + +cond do + false -> "too bad" + 4 > 5 -> "oops" + true -> nil +end + +# Lexical scope modifiers +import Kernel, except: [spawn: 1, +: 2, /: 2, Unless: 2] +alias Long.Module.Name, as: N0men123_and4 +use Bitwise + +4 &&& 5 +2 <<< 3 + +# Protocols +defprotocol Useless do + def func1(this) + def func2(that) +end + +defimpl Useless, for: Atom do +end + +# Exceptions +defmodule NotAnError do + defexception [:message] +end + +raise NotAnError, message: "This is not an error" diff --git a/vendor/pygments/tests/examplefiles/ezhil_primefactors.n b/vendor/pygments/tests/examplefiles/ezhil_primefactors.n new file mode 100644 index 0000000..1339061 --- /dev/null +++ b/vendor/pygments/tests/examplefiles/ezhil_primefactors.n @@ -0,0 +1,152 @@ +# (C) முத்தையா அண்ணாமலை 2013 +# (A) என். சொக்கன் +# எழில் தமிழ் நிரலாக்க மொழி உதாரணம் +# Muthu A granted permission for this to be included under the BSD license +# https://bitbucket.org/birkenfeld/pygments-main/pull-requests/443/ezhil-language-lexer-for-pygments/diff + +## Prime Factors Example +## பகா எண் கூறுகளைக் கண்டறியும் உதாரணம் + +## இது நிரல் தரப்பட்ட எண்ணின் பகாஎண் கூறுகளைக் கண்டறியும் + +நிரல்பாகம் பகாஎண்ணா(எண்1) + + ## இது நிரல்பாகம் தரப்பட்ட எண் பகு எண்ணா அல்லது பகா எண்ணா என்று கண்டறிந்து சொல்லும் + ## பகுஎண் என்றால் 0 திரும்பத் தரப்படும் + ## பகாஎண் என்றால் 1 திரும்பத் தரப்படும் + + @(எண்1 < 0) ஆனால் + + ## எதிர்மறை எண்களை நேராக்குதல் + + எண்1 = எண்1 * (-1) + + முடி + + @(எண்1 < 2) ஆனால் + + ## பூஜ்ஜியம், ஒன்று ஆகியவை பகா எண்கள் அல்ல + + பின்கொடு 0 + + முடி + + @(எண்1 == 2) ஆனால் + + ## இரண்டு என்ற எண் ஒரு பகா எண் + + பின்கொடு 1 + + முடி + + மீதம் = எண்1%2 + + @(மீதம் == 0) ஆனால் + + ## இரட்டைப்படை எண், ஆகவே, இது பகா எண் அல்ல + + பின்கொடு 0 + + முடி + + எண்1வர்க்கமூலம் = எண்1^0.5 + + @(எண்2 = 3, எண்2 <= எண்1வர்க்கமூலம், எண்2 = எண்2 + 2) ஆக + + மீதம்1 = எண்1%எண்2 + + @(மீதம்1 == 0) ஆனால் + + ## ஏதேனும் ஓர் எண்ணால் முழுமையாக வகுபட்டுவிட்டது, ஆகவே அது பகா எண் அல்ல + + பின்கொடு 0 + + முடி + + முடி + + பின்கொடு 1 + +முடி + +நிரல்பாகம் பகுத்தெடு(எண்1) + + ## இது எண் தரப்பட்ட எண்ணின் பகா எண் கூறுகளைக் கண்டறிந்து பட்டியல் இடும் + + கூறுகள் = பட்டியல்() + + @(எண்1 < 0) ஆனால் + + ## எதிர்மறை எண்களை நேராக்குதல் + + எண்1 = எண்1 * (-1) + + முடி + + @(எண்1 <= 1) ஆனால் + + ## ஒன்று அல்லது அதற்குக் குறைவான எண்களுக்குப் பகா எண் விகிதம் கண்டறியமுடியாது + + பின்கொடு கூறுகள் + + முடி + + @(பகாஎண்ணா(எண்1) == 1) ஆனால் + + ## தரப்பட்ட எண்ணே பகா எண்ணாக அமைந்துவிட்டால், அதற்கு அதுவே பகாஎண் கூறு ஆகும் + + பின்இணை(கூறுகள், எண்1) + பின்கொடு கூறுகள் + + முடி + + தாற்காலிகஎண் = எண்1 + + எண்2 = 2 + + @(எண்2 <= தாற்காலிகஎண்) வரை + + விடை1 = பகாஎண்ணா(எண்2) + மீண்டும்தொடங்கு = 0 + + @(விடை1 == 1) ஆனால் + + விடை2 = தாற்காலிகஎண்%எண்2 + + @(விடை2 == 0) ஆனால் + + ## பகா எண்ணால் முழுமையாக வகுபட்டுள்ளது, அதனைப் பட்டியலில் இணைக்கிறோம் + + பின்இணை(கூறுகள், எண்2) + தாற்காலிகஎண் = தாற்காலிகஎண்/எண்2 + + ## மீண்டும் இரண்டில் தொடங்கி இதே கணக்கிடுதலைத் தொடரவேண்டும் + + எண்2 = 2 + மீண்டும்தொடங்கு = 1 + + முடி + + முடி + + @(மீண்டும்தொடங்கு == 0) ஆனால் + + ## அடுத்த எண்ணைத் தேர்ந்தெடுத்துக் கணக்கிடுதலைத் தொடரவேண்டும் + + எண்2 = எண்2 + 1 + + முடி + + முடி + + பின்கொடு கூறுகள் + +முடி + +அ = int(உள்ளீடு("உங்களுக்குப் பிடித்த ஓர் எண்ணைத் தாருங்கள்: ")) + +பகாஎண்கூறுகள் = பட்டியல்() + +பகாஎண்கூறுகள் = பகுத்தெடு(அ) + +பதிப்பி "நீங்கள் தந்த எண்ணின் பகா எண் கூறுகள் இவை: ", பகாஎண்கூறுகள் diff --git a/vendor/pygments/tests/examplefiles/garcia-wachs.kk b/vendor/pygments/tests/examplefiles/garcia-wachs.kk index f766e05..91a01fb 100644 --- a/vendor/pygments/tests/examplefiles/garcia-wachs.kk +++ b/vendor/pygments/tests/examplefiles/garcia-wachs.kk @@ -1,9 +1,25 @@ -/* This is an example in the Koka Language of the Garcia-Wachs algorithm */ -module garcia-wachs +// Koka language test module -public fun main() -{ - test().print +// This module implements the GarsiaWachs algorithm. +// It is an adaptation of the algorithm in ML as described by JeanChristophe Filli�tre: +// in ''A functional implementation of the GarsiaWachs algorithm. (functional pearl). ML workshop 2008, pages 91--96''. +// See: http://www.lri.fr/~filliatr/publis/gwWml08.pdf +// +// The algorithm is interesting since it uses mutable references shared between a list and tree but the +// side effects are not observable from outside. Koka automatically infers that the final algorithm is pure. +// Note: due to a current limitation in the divergence analysis, koka cannot yet infer that mutually recursive +// definitions in "insert" and "extract" are terminating and the final algorithm still has a divergence effect. +// However, koka does infer that no other effect (i.e. an exception due to a partial match) can occur. +module garcsiaWachs + +import test = qualified std/flags + +# pre processor test + +public function main() { + wlist = Cons1(('a',3), [('b',2),('c',1),('d',4),('e',5)]) + tree = wlist.garsiaWachs() + tree.show.println() } //---------------------------------------------------- @@ -14,10 +30,9 @@ public type tree { con Node(left :tree, right :tree) } -fun show( t : tree ) : string -{ +function show( t : tree ) : string { match(t) { - Leaf(c) -> Core.show(c) + Leaf(c) -> core/show(c) Node(l,r) -> "Node(" + show(l) + "," + show(r) + ")" } } @@ -30,23 +45,21 @@ public type list1 { Cons1( head : a, tail : list ) } - -fun map( xs, f ) { +function map( xs, f ) { val Cons1(y,ys) = xs - return Cons1(f(y), Core.map(ys,f)) + return Cons1(f(y), core/map(ys,f)) } -fun zip( xs :list1, ys :list1 ) : list1<(a,b)> { +function zip( xs :list1, ys :list1 ) : list1<(a,b)> { Cons1( (xs.head, ys.head), zip(xs.tail, ys.tail)) } - //---------------------------------------------------- // Phase 1 //---------------------------------------------------- -fun insert( after : list<(tree,int)>, t : (tree,int), before : list<(tree,int)> ) : div tree +function insert( after : list<(tree,int)>, t : (tree,int), before : list<(tree,int)> ) : div tree { match(before) { Nil -> extract( [], Cons1(t,after) ) @@ -60,7 +73,7 @@ fun insert( after : list<(tree,int)>, t : (tree,int), before : list<(tree< } } -fun extract( before : list<(tree,int)>, after : list1<(tree,int)> ) : div tree +function extract( before : list<(tree,int)>, after : list1<(tree,int)> ) : div tree { val Cons1((t1,w1) as x, xs ) = after match(xs) { @@ -75,25 +88,24 @@ fun extract( before : list<(tree,int)>, after : list1<(tree,int)> ) : div } } - - -fun balance( xs : list1<(tree,int)> ) : div tree -{ +function balance( xs : list1<(tree,int)> ) : div tree { extract( [], xs ) } -fun mark( depth :int, t :tree<(a,ref)> ) : > () -{ +//---------------------------------------------------- +// Phase 2 +//---------------------------------------------------- + +function mark( depth :int, t :tree<(a,ref)> ) : > () { match(t) { Leaf((_,d)) -> d := depth Node(l,r) -> { mark(depth+1,l); mark(depth+1,r) } } } - -fun build( depth :int, xs :list1<(a,ref)> ) : ,div> (tree,list<(a,ref)>) +function build( depth :int, xs :list1<(a,ref)> ) : ,div> (tree,list<(a,ref)>) { - if (!xs.head.snd == depth) return (Leaf(xs.head.fst), xs.tail) + if (!(xs.head.snd) == depth) return (Leaf(xs.head.fst), xs.tail) l = build(depth+1, xs) match(l.snd) { @@ -105,13 +117,11 @@ fun build( depth :int, xs :list1<(a,ref)> ) : ,div> (tree,list } } -public fun test() { - wlist = Cons1(('a',3), [('b',2),('c',1),('d',4),('e',5)]) - tree = wlist.garciawachs() - tree.show() -} +//---------------------------------------------------- +// Main +//---------------------------------------------------- -public fun garciawachs( xs : list1<(a,int)> ) : div tree +public function garsiaWachs( xs : list1<(a,int)> ) : div tree { refs = xs.map(fst).map( fun(x) { (x, ref(0)) } ) wleafs = zip( refs.map(Leaf), xs.map(snd) ) diff --git a/vendor/pygments/tests/examplefiles/grammar-test.p6 b/vendor/pygments/tests/examplefiles/grammar-test.p6 new file mode 100644 index 0000000..28107f3 --- /dev/null +++ b/vendor/pygments/tests/examplefiles/grammar-test.p6 @@ -0,0 +1,22 @@ +token pod_formatting_code { + $=<[A..Z]> + '<' { $*POD_IN_FORMATTINGCODE := 1 } + $=[ '> ]+ + '>' { $*POD_IN_FORMATTINGCODE := 0 } +} + +token pod_string { + + +} + +token something:sym«<» { + +} + +token name { + +} + +token comment:sym<#> { + '#' {} \N* +} diff --git a/vendor/pygments/tests/examplefiles/hash_syntax.rb b/vendor/pygments/tests/examplefiles/hash_syntax.rb new file mode 100644 index 0000000..35b2772 --- /dev/null +++ b/vendor/pygments/tests/examplefiles/hash_syntax.rb @@ -0,0 +1,5 @@ +{ :old_syntax => 'ok' } +{ 'stings as key' => 'should be ok' } +{ new_syntax: 'broken until now' } +{ withoutunderscore: 'should be ok' } +{ _underscoreinfront: 'might be ok, if I understand the pygments code correct' } diff --git a/vendor/pygments/tests/examplefiles/hello.at b/vendor/pygments/tests/examplefiles/hello.at new file mode 100644 index 0000000..23af2f2 --- /dev/null +++ b/vendor/pygments/tests/examplefiles/hello.at @@ -0,0 +1,6 @@ +def me := object: { + def name := "Kevin"; + def sayHello(peerName) { + system.println(peerName + " says hello!"); + }; +}; diff --git a/vendor/pygments/tests/examplefiles/hello.golo b/vendor/pygments/tests/examplefiles/hello.golo new file mode 100644 index 0000000..7e8ca21 --- /dev/null +++ b/vendor/pygments/tests/examplefiles/hello.golo @@ -0,0 +1,5 @@ +module hello.World + +function main = |args| { + println("Hello world!") +} diff --git a/vendor/pygments/tests/examplefiles/hello.lsl b/vendor/pygments/tests/examplefiles/hello.lsl new file mode 100644 index 0000000..61697e7 --- /dev/null +++ b/vendor/pygments/tests/examplefiles/hello.lsl @@ -0,0 +1,12 @@ +default +{ + state_entry() + { + llSay(0, "Hello, Avatar!"); + } + + touch_start(integer total_number) + { + llSay(0, "Touched."); + } +} diff --git a/vendor/pygments/tests/examplefiles/hexdump_debugexe b/vendor/pygments/tests/examplefiles/hexdump_debugexe new file mode 100644 index 0000000..31fefdb --- /dev/null +++ b/vendor/pygments/tests/examplefiles/hexdump_debugexe @@ -0,0 +1,309 @@ +0000:0000 2F 2F 20 43 72 65 61 74-65 64 20 62 79 20 4C 69 // Created by Li +0000:0010 6F 6E 65 6C 6C 6F 20 4C-75 6E 65 73 75 20 61 6E onello Lunesu an +0000:0020 64 20 70 6C 61 63 65 64-20 69 6E 20 74 68 65 20 d placed in the +0000:0030 70 75 62 6C 69 63 20 64-6F 6D 61 69 6E 2E 0A 2F public domain.◙/ +0000:0040 2F 20 54 68 69 73 20 66-69 6C 65 20 68 61 73 20 / This file has +0000:0050 62 65 65 6E 20 6D 6F 64-69 66 69 65 64 20 66 72 been modified fr +0000:0060 6F 6D 20 69 74 73 20 6F-72 69 67 69 6E 61 6C 20 om its original +0000:0070 76 65 72 73 69 6F 6E 2E-0A 2F 2F 20 49 74 20 68 version.◙// It h +0000:0080 61 73 20 62 65 65 6E 20-66 6F 72 6D 61 74 74 65 as been formatte +0000:0090 64 20 74 6F 20 66 69 74-20 79 6F 75 72 20 73 63 d to fit your sc +0000:00A0 72 65 65 6E 2E 0A 6D 6F-64 75 6C 65 20 70 68 6F reen.◙module pho +0000:00B0 6E 65 6E 6F 3B 20 20 20-20 20 2F 2F 20 6F 70 74 neno; // opt +0000:00C0 69 6F 6E 61 6C 0A 69 6D-70 6F 72 74 20 73 74 64 ional◙import std +0000:00D0 2E 73 74 64 69 6F 3B 20-20 20 2F 2F 20 77 72 69 .stdio; // wri +0000:00E0 74 65 66 6C 6E 20 20 20-20 20 0A 69 6D 70 6F 72 tefln ◙impor +0000:00F0 74 20 73 74 64 2E 63 74-79 70 65 3B 20 20 20 2F t std.ctype; / +0000:0100 2F 20 69 73 64 69 67 69-74 20 20 20 20 20 0A 69 / isdigit ◙i +0000:0110 6D 70 6F 72 74 20 73 74-64 2E 73 74 72 65 61 6D mport std.stream +0000:0120 3B 20 20 2F 2F 20 42 75-66 66 65 72 65 64 46 69 ; // BufferedFi +0000:0130 6C 65 0A 0A 2F 2F 20 4A-75 73 74 20 66 6F 72 20 le◙◙// Just for +0000:0140 72 65 61 64 61 62 69 6C-69 74 79 20 28 69 6D 61 readability (ima +0000:0150 67 69 6E 65 20 63 68 61-72 5B 5D 5B 5D 5B 63 68 gine char[][][ch +0000:0160 61 72 5B 5D 5D 29 20 20-20 20 0A 61 6C 69 61 73 ar[]]) ◙alias +0000:0170 20 63 68 61 72 5B 5D 20-73 74 72 69 6E 67 3B 0A char[] string;◙ +0000:0180 61 6C 69 61 73 20 73 74-72 69 6E 67 5B 5D 20 73 alias string[] s +0000:0190 74 72 69 6E 67 61 72 72-61 79 3B 0A 0A 2F 2F 2F tringarray;◙◙/// +0000:01A0 20 53 74 72 69 70 73 20-6E 6F 6E 2D 64 69 67 69 Strips non-digi +0000:01B0 74 20 63 68 61 72 61 63-74 65 72 73 20 66 72 6F t characters fro +0000:01C0 6D 20 74 68 65 20 73 74-72 69 6E 67 20 28 43 4F m the string (CO +0000:01D0 57 29 0A 73 74 72 69 6E-67 20 73 74 72 69 70 4E W)◙string stripN +0000:01E0 6F 6E 44 69 67 69 74 28-20 69 6E 20 73 74 72 69 onDigit( in stri +0000:01F0 6E 67 20 6C 69 6E 65 20-29 20 0A 7B 0A 20 20 20 ng line ) ◙{◙ +0000:0200 20 73 74 72 69 6E 67 20-72 65 74 3B 0A 20 20 20 string ret;◙ +0000:0210 20 66 6F 72 65 61 63 68-28 75 69 6E 74 20 69 2C foreach(uint i, +0000:0220 20 63 3B 20 6C 69 6E 65-29 20 7B 0A 20 20 20 20 c; line) {◙ +0000:0230 20 20 20 20 2F 2F 20 45-72 72 6F 72 3A 20 73 74 // Error: st +0000:0240 64 2E 63 74 79 70 65 2E-69 73 64 69 67 69 74 20 d.ctype.isdigit +0000:0250 61 74 20 43 3A 5C 64 6D-64 5C 73 72 63 5C 70 68 at C:\dmd\src\ph +0000:0260 6F 62 6F 73 5C 73 74 64-5C 63 74 79 70 65 2E 64 obos\std\ctype.d +0000:0270 28 33 37 29 20 0A 20 20-20 20 20 20 20 20 2F 2F (37) ◙ // +0000:0280 20 63 6F 6E 66 6C 69 63-74 73 20 77 69 74 68 20 conflicts with +0000:0290 73 74 64 2E 73 74 72 65-61 6D 2E 69 73 64 69 67 std.stream.isdig +0000:02A0 69 74 20 61 74 20 43 3A-5C 64 6D 64 5C 73 72 63 it at C:\dmd\src +0000:02B0 5C 70 68 6F 62 6F 73 5C-73 74 64 5C 73 74 72 65 \phobos\std\stre +0000:02C0 61 6D 2E 64 28 32 39 32-34 29 0A 20 20 20 20 20 am.d(2924)◙ +0000:02D0 20 20 20 69 66 20 28 21-73 74 64 2E 63 74 79 70 if (!std.ctyp +0000:02E0 65 2E 69 73 64 69 67 69-74 28 63 29 29 20 7B 0A e.isdigit(c)) {◙ +0000:02F0 20 20 20 20 20 20 20 20-20 20 20 20 69 66 20 28 if ( +0000:0300 21 72 65 74 29 0A 20 20-20 20 20 20 20 20 20 20 !ret)◙ +0000:0310 20 20 20 20 20 20 72 65-74 20 3D 20 6C 69 6E 65 ret = line +0000:0320 5B 30 2E 2E 69 5D 3B 20-20 20 20 0A 20 20 20 20 [0..i]; ◙ +0000:0330 20 20 20 20 7D 20 20 20-20 0A 20 20 20 20 20 20 } ◙ +0000:0340 20 20 65 6C 73 65 20 69-66 20 28 72 65 74 29 0A else if (ret)◙ +0000:0350 20 20 20 20 20 20 20 20-20 20 20 20 72 65 74 20 ret +0000:0360 7E 3D 20 63 3B 20 20 20-20 0A 20 20 20 20 7D 20 ~= c; ◙ } +0000:0370 20 20 20 0A 20 20 20 20-72 65 74 75 72 6E 20 72 ◙ return r +0000:0380 65 74 3F 72 65 74 3A 6C-69 6E 65 3B 0A 7D 0A 0A et?ret:line;◙}◙◙ +0000:0390 75 6E 69 74 74 65 73 74-20 7B 0A 20 20 20 20 61 unittest {◙ a +0000:03A0 73 73 65 72 74 28 20 73-74 72 69 70 4E 6F 6E 44 ssert( stripNonD +0000:03B0 69 67 69 74 28 22 61 73-64 66 22 29 20 3D 3D 20 igit("asdf") == +0000:03C0 22 22 20 20 29 3B 0A 20-20 20 20 61 73 73 65 72 "" );◙ asser +0000:03D0 74 28 20 73 74 72 69 70-4E 6F 6E 44 69 67 69 74 t( stripNonDigit +0000:03E0 28 22 5C 27 31 33 2D 3D-32 20 34 6B 6F 70 22 29 ("\'13-=2 4kop") +0000:03F0 20 3D 3D 20 20 22 31 33-32 34 22 20 20 29 3B 0A == "1324" );◙ +0000:0400 7D 0A 0A 2F 2F 2F 20 43-6F 6E 76 65 72 74 73 20 }◙◙/// Converts +0000:0410 61 20 77 6F 72 64 20 69-6E 74 6F 20 61 20 6E 75 a word into a nu +0000:0420 6D 62 65 72 2C 20 69 67-6E 6F 72 69 6E 67 20 61 mber, ignoring a +0000:0430 6C 6C 20 6E 6F 6E 20 61-6C 70 68 61 20 63 68 61 ll non alpha cha +0000:0440 72 61 63 74 65 72 73 20-20 0A 73 74 72 69 6E 67 racters ◙string +0000:0450 20 77 6F 72 64 54 6F 4E-75 6D 28 20 69 6E 20 73 wordToNum( in s +0000:0460 74 72 69 6E 67 20 77 6F-72 64 20 29 0A 7B 0A 2F tring word )◙{◙/ +0000:0470 2F 20 74 72 61 6E 73 6C-61 74 69 6F 6E 20 74 61 / translation ta +0000:0480 62 6C 65 20 66 6F 72 20-74 68 65 20 74 61 73 6B ble for the task +0000:0490 20 61 74 20 68 61 6E 64-0A 63 6F 6E 73 74 20 63 at hand◙const c +0000:04A0 68 61 72 5B 32 35 36 5D-20 54 52 41 4E 53 4C 41 har[256] TRANSLA +0000:04B0 54 45 20 3D 20 20 20 20-0A 20 20 20 20 22 20 20 TE = ◙ " +0000:04C0 20 20 20 20 20 20 20 20-20 20 20 20 20 20 20 20 +0000:04D0 20 20 20 20 20 20 20 20-20 20 20 20 20 20 22 20 " +0000:04E0 20 2F 2F 20 30 20 20 20-0A 20 20 20 20 22 20 20 // 0 ◙ " +0000:04F0 20 20 20 20 20 20 20 20-20 20 20 20 20 20 30 31 01 +0000:0500 32 33 34 35 36 37 38 39-20 20 20 20 20 20 22 20 23456789 " +0000:0510 20 2F 2F 20 33 32 20 20-20 20 20 0A 20 20 20 20 // 32 ◙ +0000:0520 22 20 35 37 36 33 30 34-39 39 36 31 37 38 35 31 " 57630499617851 +0000:0530 38 38 31 32 33 34 37 36-32 32 33 39 20 20 20 20 881234762239 +0000:0540 20 22 20 20 2F 2F 20 36-34 20 20 20 0A 20 20 20 " // 64 ◙ +0000:0550 20 22 20 35 37 36 33 30-34 39 39 36 31 37 38 35 " 5763049961785 +0000:0560 31 38 38 31 32 33 34 37-36 32 32 33 39 20 20 20 1881234762239 +0000:0570 20 20 22 0A 20 20 20 20-22 20 20 20 20 20 20 20 "◙ " +0000:0580 20 20 20 20 20 20 20 20-20 20 20 20 20 20 20 20 +0000:0590 20 20 20 20 20 20 20 20-20 22 0A 20 20 20 20 22 "◙ " +0000:05A0 20 20 20 20 20 20 20 20-20 20 20 20 20 20 20 20 +0000:05B0 20 20 20 20 20 20 20 20-20 20 20 20 20 20 20 20 +0000:05C0 22 0A 20 20 20 20 22 20-20 20 20 20 20 20 20 20 "◙ " +0000:05D0 20 20 20 20 20 20 20 20-20 20 20 20 20 20 20 20 +0000:05E0 20 20 20 20 20 20 20 22-20 20 20 20 0A 20 20 20 " ◙ +0000:05F0 20 22 20 20 20 20 20 20-20 20 20 20 20 20 20 20 " +0000:0600 20 20 20 20 20 20 20 20-20 20 20 20 20 20 20 20 +0000:0610 20 20 22 3B 0A 20 20 20-20 73 74 72 69 6E 67 20 ";◙ string +0000:0620 72 65 74 3B 0A 20 20 20-20 66 6F 72 65 61 63 68 ret;◙ foreach +0000:0630 28 63 3B 20 63 61 73 74-28 75 62 79 74 65 5B 5D (c; cast(ubyte[] +0000:0640 29 77 6F 72 64 29 0A 20-20 20 20 20 20 20 20 69 )word)◙ i +0000:0650 66 20 28 54 52 41 4E 53-4C 41 54 45 5B 63 5D 20 f (TRANSLATE[c] +0000:0660 21 3D 20 27 20 27 29 0A-20 20 20 20 20 20 20 20 != ' ')◙ +0000:0670 20 20 20 20 72 65 74 20-7E 3D 20 54 52 41 4E 53 ret ~= TRANS +0000:0680 4C 41 54 45 5B 63 5D 3B-0A 20 20 20 20 72 65 74 LATE[c];◙ ret +0000:0690 75 72 6E 20 72 65 74 3B-0A 7D 0A 0A 75 6E 69 74 urn ret;◙}◙◙unit +0000:06A0 74 65 73 74 20 7B 0A 20-2F 2F 20 54 65 73 74 20 test {◙ // Test +0000:06B0 77 6F 72 64 54 6F 4E 75-6D 20 75 73 69 6E 67 20 wordToNum using +0000:06C0 74 68 65 20 74 61 62 6C-65 20 66 72 6F 6D 20 74 the table from t +0000:06D0 68 65 20 74 61 73 6B 20-64 65 73 63 72 69 70 74 he task descript +0000:06E0 69 6F 6E 2E 0A 20 61 73-73 65 72 74 28 20 22 30 ion.◙ assert( "0 +0000:06F0 31 31 31 32 32 32 33 33-33 34 34 35 35 36 36 36 1112223334455666 +0000:0700 37 37 37 38 38 38 39 39-39 22 20 3D 3D 0A 20 20 777888999" ==◙ +0000:0710 20 77 6F 72 64 54 6F 4E-75 6D 28 22 45 20 7C 20 wordToNum("E | +0000:0720 4A 20 4E 20 51 20 7C 20-52 20 57 20 58 20 7C 20 J N Q | R W X | +0000:0730 44 20 53 20 59 20 7C 20-46 20 54 20 7C 20 41 20 D S Y | F T | A +0000:0740 4D 20 7C 20 43 20 49 20-56 20 7C 20 42 20 4B 20 M | C I V | B K +0000:0750 55 20 7C 20 4C 20 4F 20-50 20 7C 20 47 20 48 20 U | L O P | G H +0000:0760 5A 22 29 29 3B 0A 20 61-73 73 65 72 74 28 20 22 Z"));◙ assert( " +0000:0770 30 31 31 31 32 32 32 33-33 33 34 34 35 35 36 36 0111222333445566 +0000:0780 36 37 37 37 38 38 38 39-39 39 22 20 3D 3D 20 0A 6777888999" == ◙ +0000:0790 20 20 20 77 6F 72 64 54-6F 4E 75 6D 28 22 65 20 wordToNum("e +0000:07A0 7C 20 6A 20 6E 20 71 20-7C 20 72 20 77 20 78 20 | j n q | r w x +0000:07B0 7C 20 64 20 73 20 79 20-7C 20 66 20 74 20 7C 20 | d s y | f t | +0000:07C0 61 20 6D 20 7C 20 63 20-69 20 76 20 7C 20 62 20 a m | c i v | b +0000:07D0 6B 20 75 20 7C 20 6C 20-6F 20 70 20 7C 20 67 20 k u | l o p | g +0000:07E0 68 20 7A 22 29 29 3B 0A-20 61 73 73 65 72 74 28 h z"));◙ assert( +0000:07F0 20 22 30 31 32 33 34 35-36 37 38 39 22 20 3D 3D "0123456789" == +0000:0800 20 0A 20 20 20 77 6F 72-64 54 6F 4E 75 6D 28 22 ◙ wordToNum(" +0000:0810 30 20 7C 20 20 20 31 20-20 20 7C 20 20 20 32 20 0 | 1 | 2 +0000:0820 20 20 7C 20 20 20 33 20-20 20 7C 20 20 34 20 20 | 3 | 4 +0000:0830 7C 20 20 35 20 20 7C 20-20 20 36 20 20 20 7C 20 | 5 | 6 | +0000:0840 20 20 37 20 20 20 7C 20-20 20 38 20 20 20 7C 20 7 | 8 | +0000:0850 20 20 39 22 29 29 3B 0A-7D 0A 0A 76 6F 69 64 20 9"));◙}◙◙void +0000:0860 6D 61 69 6E 28 20 73 74-72 69 6E 67 5B 5D 20 61 main( string[] a +0000:0870 72 67 73 20 29 0A 7B 0A-20 20 20 20 2F 2F 20 54 rgs )◙{◙ // T +0000:0880 68 69 73 20 61 73 73 6F-63 69 61 74 69 76 65 20 his associative +0000:0890 61 72 72 61 79 20 6D 61-70 73 20 61 20 6E 75 6D array maps a num +0000:08A0 62 65 72 20 74 6F 20 61-6E 20 61 72 72 61 79 20 ber to an array +0000:08B0 6F 66 20 77 6F 72 64 73-2E 20 20 20 20 0A 20 20 of words. ◙ +0000:08C0 20 20 73 74 72 69 6E 67-61 72 72 61 79 5B 73 74 stringarray[st +0000:08D0 72 69 6E 67 5D 20 20 20-20 6E 75 6D 32 77 6F 72 ring] num2wor +0000:08E0 64 73 3B 0A 0A 20 20 20-20 66 6F 72 65 61 63 68 ds;◙◙ foreach +0000:08F0 28 73 74 72 69 6E 67 20-77 6F 72 64 3B 20 6E 65 (string word; ne +0000:0900 77 20 42 75 66 66 65 72-65 64 46 69 6C 65 28 22 w BufferedFile(" +0000:0910 64 69 63 74 69 6F 6E 61-72 79 2E 74 78 74 22 20 dictionary.txt" +0000:0920 29 20 29 0A 20 20 20 20-20 20 20 20 6E 75 6D 32 ) )◙ num2 +0000:0930 77 6F 72 64 73 5B 20 77-6F 72 64 54 6F 4E 75 6D words[ wordToNum +0000:0940 28 77 6F 72 64 29 20 5D-20 7E 3D 20 77 6F 72 64 (word) ] ~= word +0000:0950 2E 64 75 70 3B 20 20 20-20 20 20 20 20 2F 2F 20 .dup; // +0000:0960 6D 75 73 74 20 64 75 70-0A 0A 20 20 20 20 2F 2F must dup◙◙ // +0000:0970 2F 20 46 69 6E 64 73 20-61 6C 6C 20 61 6C 74 65 / Finds all alte +0000:0980 72 6E 61 74 69 76 65 73-20 66 6F 72 20 74 68 65 rnatives for the +0000:0990 20 67 69 76 65 6E 20 6E-75 6D 62 65 72 0A 20 20 given number◙ +0000:09A0 20 20 2F 2F 2F 20 28 73-68 6F 75 6C 64 20 68 61 /// (should ha +0000:09B0 76 65 20 62 65 65 6E 20-73 74 72 69 70 70 65 64 ve been stripped +0000:09C0 20 66 72 6F 6D 20 6E 6F-6E 2D 64 69 67 69 74 20 from non-digit +0000:09D0 63 68 61 72 61 63 74 65-72 73 29 0A 20 20 20 20 characters)◙ +0000:09E0 73 74 72 69 6E 67 61 72-72 61 79 20 5F 46 69 6E stringarray _Fin +0000:09F0 64 57 6F 72 64 73 28 20-73 74 72 69 6E 67 20 6E dWords( string n +0000:0A00 75 6D 62 65 72 73 2C 20-62 6F 6F 6C 20 64 69 67 umbers, bool dig +0000:0A10 69 74 6F 6B 20 29 0A 20-20 20 20 69 6E 20 7B 0A itok )◙ in {◙ +0000:0A20 20 20 20 20 20 20 20 20-61 73 73 65 72 74 28 6E assert(n +0000:0A30 75 6D 62 65 72 73 2E 6C-65 6E 67 74 68 20 3E 20 umbers.length > +0000:0A40 20 30 29 3B 20 20 20 20-0A 20 20 20 20 7D 20 20 0); ◙ } +0000:0A50 20 20 0A 20 20 20 20 6F-75 74 28 72 65 73 75 6C ◙ out(resul +0000:0A60 74 29 20 7B 0A 20 20 20-20 20 20 20 20 66 6F 72 t) {◙ for +0000:0A70 65 61 63 68 20 28 61 3B-20 72 65 73 75 6C 74 29 each (a; result) +0000:0A80 0A 20 20 20 20 20 20 20-20 20 20 20 20 61 73 73 ◙ ass +0000:0A90 65 72 74 28 20 77 6F 72-64 54 6F 4E 75 6D 28 61 ert( wordToNum(a +0000:0AA0 29 20 3D 3D 20 6E 75 6D-62 65 72 73 20 29 3B 0A ) == numbers );◙ +0000:0AB0 20 20 20 20 7D 20 20 20-20 0A 20 20 20 20 62 6F } ◙ bo +0000:0AC0 64 79 20 7B 0A 20 20 20-20 20 20 20 20 73 74 72 dy {◙ str +0000:0AD0 69 6E 67 61 72 72 61 79-20 72 65 74 3B 0A 20 20 ingarray ret;◙ +0000:0AE0 20 20 20 20 20 20 62 6F-6F 6C 20 66 6F 75 6E 64 bool found +0000:0AF0 77 6F 72 64 20 3D 20 66-61 6C 73 65 3B 0A 20 20 word = false;◙ +0000:0B00 20 20 20 20 20 20 66 6F-72 20 28 75 69 6E 74 20 for (uint +0000:0B10 74 3D 31 3B 20 74 3C 3D-6E 75 6D 62 65 72 73 2E t=1; t<=numbers. +0000:0B20 6C 65 6E 67 74 68 3B 20-2B 2B 74 29 20 7B 0A 20 length; ++t) {◙ +0000:0B30 20 20 20 20 20 20 20 20-20 20 20 61 75 74 6F 20 auto +0000:0B40 61 6C 74 65 72 6E 61 74-69 76 65 73 20 3D 20 6E alternatives = n +0000:0B50 75 6D 62 65 72 73 5B 30-2E 2E 74 5D 20 69 6E 20 umbers[0..t] in +0000:0B60 6E 75 6D 32 77 6F 72 64-73 3B 0A 20 20 20 20 20 num2words;◙ +0000:0B70 20 20 20 20 20 20 20 69-66 20 28 21 61 6C 74 65 if (!alte +0000:0B80 72 6E 61 74 69 76 65 73-29 0A 20 20 20 20 20 20 rnatives)◙ +0000:0B90 20 20 20 20 20 20 20 20-20 20 63 6F 6E 74 69 6E contin +0000:0BA0 75 65 3B 0A 20 20 20 20-20 20 20 20 20 20 20 20 ue;◙ +0000:0BB0 66 6F 75 6E 64 77 6F 72-64 20 3D 20 74 72 75 65 foundword = true +0000:0BC0 3B 0A 20 20 20 20 20 20-20 20 20 20 20 20 69 66 ;◙ if +0000:0BD0 20 28 6E 75 6D 62 65 72-73 2E 6C 65 6E 67 74 68 (numbers.length +0000:0BE0 20 3E 20 20 74 29 20 7B-0A 20 20 20 20 20 20 20 > t) {◙ +0000:0BF0 20 20 20 20 20 20 20 20-20 2F 2F 20 43 6F 6D 62 // Comb +0000:0C00 69 6E 65 20 61 6C 6C 20-63 75 72 72 65 6E 74 20 ine all current +0000:0C10 61 6C 74 65 72 6E 61 74-69 76 65 73 20 77 69 74 alternatives wit +0000:0C20 68 20 61 6C 6C 20 61 6C-74 65 72 6E 61 74 69 76 h all alternativ +0000:0C30 65 73 20 20 20 20 20 0A-20 20 20 20 20 20 20 20 es ◙ +0000:0C40 20 20 20 20 20 20 20 20-2F 2F 20 6F 66 20 74 68 // of th +0000:0C50 65 20 72 65 73 74 20 28-6E 65 78 74 20 70 69 65 e rest (next pie +0000:0C60 63 65 20 63 61 6E 20 73-74 61 72 74 20 77 69 74 ce can start wit +0000:0C70 68 20 61 20 64 69 67 69-74 29 20 20 20 20 20 20 h a digit) +0000:0C80 20 20 20 20 20 20 20 20-0A 20 20 20 20 20 20 20 ◙ +0000:0C90 20 20 20 20 20 20 20 20-20 66 6F 72 65 61 63 68 foreach +0000:0CA0 20 28 61 32 3B 20 5F 46-69 6E 64 57 6F 72 64 73 (a2; _FindWords +0000:0CB0 28 20 6E 75 6D 62 65 72-73 5B 74 2E 2E 24 5D 2C ( numbers[t..$], +0000:0CC0 20 74 72 75 65 20 20 20-20 20 29 20 29 0A 20 20 true ) )◙ +0000:0CD0 20 20 20 20 20 20 20 20-20 20 20 20 20 20 20 20 +0000:0CE0 20 20 66 6F 72 65 61 63-68 28 61 31 3B 20 2A 61 foreach(a1; *a +0000:0CF0 6C 74 65 72 6E 61 74 69-76 65 73 29 0A 20 20 20 lternatives)◙ +0000:0D00 20 20 20 20 20 20 20 20-20 20 20 20 20 20 20 20 +0000:0D10 20 20 20 20 72 65 74 20-7E 3D 20 61 31 20 7E 20 ret ~= a1 ~ +0000:0D20 22 20 22 20 7E 20 61 32-3B 0A 20 20 20 20 20 20 " " ~ a2;◙ +0000:0D30 20 20 20 20 20 20 7D 0A-20 20 20 20 20 20 20 20 }◙ +0000:0D40 20 20 20 20 65 6C 73 65-20 20 20 20 0A 20 20 20 else ◙ +0000:0D50 20 20 20 20 20 20 20 20-20 20 20 20 20 72 65 74 ret +0000:0D60 20 7E 3D 20 2A 61 6C 74-65 72 6E 61 74 69 76 65 ~= *alternative +0000:0D70 73 3B 20 20 20 20 2F 2F-20 61 70 70 65 6E 64 20 s; // append +0000:0D80 74 68 65 73 65 20 61 6C-74 65 72 6E 61 74 69 76 these alternativ +0000:0D90 65 73 0A 20 20 20 20 20-20 20 20 7D 0A 20 20 20 es◙ }◙ +0000:0DA0 20 20 20 20 20 2F 2F 20-54 72 79 20 74 6F 20 6B // Try to k +0000:0DB0 65 65 70 20 31 20 64 69-67 69 74 2C 20 6F 6E 6C eep 1 digit, onl +0000:0DC0 79 20 69 66 20 77 65 27-72 65 20 61 6C 6C 6F 77 y if we're allow +0000:0DD0 65 64 20 61 6E 64 20 6E-6F 20 6F 74 68 65 72 0A ed and no other◙ +0000:0DE0 20 20 20 20 20 20 20 20-2F 2F 20 61 6C 74 65 72 // alter +0000:0DF0 6E 61 74 69 76 65 73 20-77 65 72 65 20 66 6F 75 natives were fou +0000:0E00 6E 64 0A 20 20 20 20 20-20 20 20 2F 2F 20 54 65 nd◙ // Te +0000:0E10 73 74 69 6E 67 20 22 72-65 74 2E 6C 65 6E 67 74 sting "ret.lengt +0000:0E20 68 22 20 6D 61 6B 65 73-20 6D 6F 72 65 20 73 65 h" makes more se +0000:0E30 6E 73 65 20 74 68 61 6E-20 74 65 73 74 69 6E 67 nse than testing +0000:0E40 20 22 66 6F 75 6E 64 77-6F 72 64 22 2C 0A 20 20 "foundword",◙ +0000:0E50 20 20 20 20 20 20 2F 2F-20 62 75 74 20 74 68 65 // but the +0000:0E60 20 6F 74 68 65 72 20 69-6D 70 6C 65 6D 65 6E 74 other implement +0000:0E70 61 74 69 6F 6E 73 20 73-65 65 6D 20 74 6F 20 64 ations seem to d +0000:0E80 6F 20 6A 75 73 74 20 74-68 69 73 2E 0A 20 20 20 o just this.◙ +0000:0E90 20 20 20 20 20 69 66 20-28 64 69 67 69 74 6F 6B if (digitok +0000:0EA0 20 26 26 20 21 66 6F 75-6E 64 77 6F 72 64 29 20 && !foundword) +0000:0EB0 7B 20 2F 2F 72 65 74 2E-6C 65 6E 67 74 68 20 3D { //ret.length = +0000:0EC0 3D 20 30 20 20 0A 20 20-20 20 20 20 20 20 20 20 = 0 ◙ +0000:0ED0 20 20 69 66 28 6E 75 6D-62 65 72 73 2E 6C 65 6E if(numbers.len +0000:0EE0 67 74 68 20 3E 20 20 31-29 20 7B 0A 20 20 20 20 gth > 1) {◙ +0000:0EF0 20 20 20 20 20 20 20 20-20 20 20 20 2F 2F 20 43 // C +0000:0F00 6F 6D 62 69 6E 65 20 31-20 64 69 67 69 74 20 77 ombine 1 digit w +0000:0F10 69 74 68 20 61 6C 6C 20-61 6C 74 65 6E 61 74 69 ith all altenati +0000:0F20 76 65 73 20 66 72 6F 6D-20 74 68 65 20 72 65 73 ves from the res +0000:0F30 74 20 20 20 20 0A 20 20-20 20 20 20 20 20 20 20 t ◙ +0000:0F40 20 20 20 20 20 20 2F 2F-20 28 6E 65 78 74 20 70 // (next p +0000:0F50 69 65 63 65 20 63 61 6E-20 6E 6F 74 20 73 74 61 iece can not sta +0000:0F60 72 74 20 77 69 74 68 20-61 20 64 69 67 69 74 29 rt with a digit) +0000:0F70 20 20 20 20 20 20 20 20-20 20 0A 20 20 20 20 20 ◙ +0000:0F80 20 20 20 20 20 20 20 20-20 20 20 66 6F 72 65 61 forea +0000:0F90 63 68 20 28 61 3B 20 5F-46 69 6E 64 57 6F 72 64 ch (a; _FindWord +0000:0FA0 73 28 20 6E 75 6D 62 65-72 73 5B 31 2E 2E 24 5D s( numbers[1..$] +0000:0FB0 2C 20 66 61 6C 73 65 20-29 20 29 0A 20 20 20 20 , false ) )◙ +0000:0FC0 20 20 20 20 20 20 20 20-20 20 20 20 20 20 20 20 +0000:0FD0 72 65 74 20 7E 3D 20 6E-75 6D 62 65 72 73 5B 30 ret ~= numbers[0 +0000:0FE0 2E 2E 31 5D 20 7E 20 22-20 22 20 7E 20 61 3B 0A ..1] ~ " " ~ a;◙ +0000:0FF0 20 20 20 20 20 20 20 20-20 20 20 20 7D 20 20 20 } +0000:1000 20 0A 20 20 20 20 20 20-20 20 20 20 20 20 65 6C ◙ el +0000:1010 73 65 20 20 20 20 0A 20-20 20 20 20 20 20 20 20 se ◙ +0000:1020 20 20 20 20 20 20 20 72-65 74 20 7E 3D 20 6E 75 ret ~= nu +0000:1030 6D 62 65 72 73 5B 30 2E-2E 31 5D 3B 20 20 20 20 mbers[0..1]; +0000:1040 2F 2F 20 6A 75 73 74 20-61 70 70 65 6E 64 20 74 // just append t +0000:1050 68 69 73 20 64 69 67 69-74 20 20 20 20 20 20 20 his digit +0000:1060 20 20 20 20 20 20 0A 20-20 20 20 20 20 20 20 7D ◙ } +0000:1070 20 20 20 20 0A 20 20 20-20 20 20 20 20 72 65 74 ◙ ret +0000:1080 75 72 6E 20 72 65 74 3B-0A 20 20 20 20 7D 0A 0A urn ret;◙ }◙◙ +0000:1090 20 20 20 20 2F 2F 2F 20-28 54 68 69 73 20 66 75 /// (This fu +0000:10A0 6E 63 74 69 6F 6E 20 77-61 73 20 69 6E 6C 69 6E nction was inlin +0000:10B0 65 64 20 69 6E 20 74 68-65 20 6F 72 69 67 69 6E ed in the origin +0000:10C0 61 6C 20 70 72 6F 67 72-61 6D 29 20 0A 20 20 20 al program) ◙ +0000:10D0 20 2F 2F 2F 20 46 69 6E-64 73 20 61 6C 6C 20 61 /// Finds all a +0000:10E0 6C 74 65 72 6E 61 74 69-76 65 73 20 66 6F 72 20 lternatives for +0000:10F0 74 68 65 20 67 69 76 65-6E 20 70 68 6F 6E 65 20 the given phone +0000:1100 6E 75 6D 62 65 72 20 0A-20 20 20 20 2F 2F 2F 20 number ◙ /// +0000:1110 52 65 74 75 72 6E 73 3A-20 61 72 72 61 79 20 6F Returns: array o +0000:1120 66 20 73 74 72 69 6E 67-73 20 0A 20 20 20 20 73 f strings ◙ s +0000:1130 74 72 69 6E 67 61 72 72-61 79 20 46 69 6E 64 57 tringarray FindW +0000:1140 6F 72 64 73 28 20 73 74-72 69 6E 67 20 70 68 6F ords( string pho +0000:1150 6E 65 5F 6E 75 6D 62 65-72 20 29 0A 20 20 20 20 ne_number )◙ +0000:1160 7B 0A 20 20 20 20 20 20-20 20 69 66 20 28 21 70 {◙ if (!p +0000:1170 68 6F 6E 65 5F 6E 75 6D-62 65 72 2E 6C 65 6E 67 hone_number.leng +0000:1180 74 68 29 0A 20 20 20 20-20 20 20 20 20 20 20 20 th)◙ +0000:1190 72 65 74 75 72 6E 20 6E-75 6C 6C 3B 0A 20 20 20 return null;◙ +0000:11A0 20 20 20 20 20 2F 2F 20-53 74 72 69 70 20 74 68 // Strip th +0000:11B0 65 20 6E 6F 6E 2D 64 69-67 69 74 20 63 68 61 72 e non-digit char +0000:11C0 61 63 74 65 72 73 20 66-72 6F 6D 20 74 68 65 20 acters from the +0000:11D0 70 68 6F 6E 65 20 6E 75-6D 62 65 72 2C 20 61 6E phone number, an +0000:11E0 64 0A 20 20 20 20 20 20-20 20 2F 2F 20 70 61 73 d◙ // pas +0000:11F0 73 20 69 74 20 74 6F 20-74 68 65 20 72 65 63 75 s it to the recu +0000:1200 72 73 69 76 65 20 66 75-6E 63 74 69 6F 6E 20 28 rsive function ( +0000:1210 6C 65 61 64 69 6E 67 20-64 69 67 69 74 20 69 73 leading digit is +0000:1220 20 61 6C 6C 6F 77 65 64-29 0A 20 20 20 20 20 20 allowed)◙ +0000:1230 20 20 72 65 74 75 72 6E-20 5F 46 69 6E 64 57 6F return _FindWo +0000:1240 72 64 73 28 20 73 74 72-69 70 4E 6F 6E 44 69 67 rds( stripNonDig +0000:1250 69 74 28 70 68 6F 6E 65-5F 6E 75 6D 62 65 72 29 it(phone_number) +0000:1260 2C 20 74 72 75 65 20 29-3B 20 20 20 20 0A 20 20 , true ); ◙ +0000:1270 20 20 7D 20 20 20 20 0A-20 20 20 20 0A 20 20 20 } ◙ ◙ +0000:1280 20 2F 2F 20 52 65 61 64-20 74 68 65 20 70 68 6F // Read the pho +0000:1290 6E 65 20 6E 75 6D 62 65-72 73 20 20 20 20 20 0A ne numbers ◙ +0000:12A0 20 20 20 20 66 6F 72 65-61 63 68 28 73 74 72 69 foreach(stri +0000:12B0 6E 67 20 70 68 6F 6E 65-3B 20 6E 65 77 20 42 75 ng phone; new Bu +0000:12C0 66 66 65 72 65 64 46 69-6C 65 28 22 69 6E 70 75 fferedFile("inpu +0000:12D0 74 2E 74 78 74 22 20 20-20 29 20 29 0A 20 20 20 t.txt" ) )◙ +0000:12E0 20 20 20 20 20 66 6F 72-65 61 63 68 28 61 6C 74 foreach(alt +0000:12F0 65 72 6E 61 74 69 76 65-3B 20 46 69 6E 64 57 6F ernative; FindWo +0000:1300 72 64 73 28 20 70 68 6F-6E 65 20 29 20 29 0A 20 rds( phone ) )◙ +0000:1310 20 20 20 20 20 20 20 20-20 20 20 77 72 69 74 65 write +0000:1320 66 6C 6E 28 70 68 6F 6E-65 2C 20 22 3A 20 22 2C fln(phone, ": ", +0000:1330 20 61 6C 74 65 72 6E 61-74 69 76 65 20 29 3B 0A alternative );◙ +0000:1340 7D 0A 0A }◙◙ diff --git a/vendor/pygments/tests/examplefiles/hexdump_hd b/vendor/pygments/tests/examplefiles/hexdump_hd new file mode 100644 index 0000000..4af46fc --- /dev/null +++ b/vendor/pygments/tests/examplefiles/hexdump_hd @@ -0,0 +1,310 @@ +00000000 2f 2f 20 43 72 65 61 74 65 64 20 62 79 20 4c 69 |// Created by Li| +00000010 6f 6e 65 6c 6c 6f 20 4c 75 6e 65 73 75 20 61 6e |onello Lunesu an| +00000020 64 20 70 6c 61 63 65 64 20 69 6e 20 74 68 65 20 |d placed in the | +00000030 70 75 62 6c 69 63 20 64 6f 6d 61 69 6e 2e 0a 2f |public domain../| +00000040 2f 20 54 68 69 73 20 66 69 6c 65 20 68 61 73 20 |/ This file has | +00000050 62 65 65 6e 20 6d 6f 64 69 66 69 65 64 20 66 72 |been modified fr| +00000060 6f 6d 20 69 74 73 20 6f 72 69 67 69 6e 61 6c 20 |om its original | +00000070 76 65 72 73 69 6f 6e 2e 0a 2f 2f 20 49 74 20 68 |version..// It h| +00000080 61 73 20 62 65 65 6e 20 66 6f 72 6d 61 74 74 65 |as been formatte| +00000090 64 20 74 6f 20 66 69 74 20 79 6f 75 72 20 73 63 |d to fit your sc| +000000a0 72 65 65 6e 2e 0a 6d 6f 64 75 6c 65 20 70 68 6f |reen..module pho| +000000b0 6e 65 6e 6f 3b 20 20 20 20 20 2f 2f 20 6f 70 74 |neno; // opt| +000000c0 69 6f 6e 61 6c 0a 69 6d 70 6f 72 74 20 73 74 64 |ional.import std| +000000d0 2e 73 74 64 69 6f 3b 20 20 20 2f 2f 20 77 72 69 |.stdio; // wri| +000000e0 74 65 66 6c 6e 20 20 20 20 20 0a 69 6d 70 6f 72 |tefln .impor| +000000f0 74 20 73 74 64 2e 63 74 79 70 65 3b 20 20 20 2f |t std.ctype; /| +00000100 2f 20 69 73 64 69 67 69 74 20 20 20 20 20 0a 69 |/ isdigit .i| +00000110 6d 70 6f 72 74 20 73 74 64 2e 73 74 72 65 61 6d |mport std.stream| +00000120 3b 20 20 2f 2f 20 42 75 66 66 65 72 65 64 46 69 |; // BufferedFi| +00000130 6c 65 0a 0a 2f 2f 20 4a 75 73 74 20 66 6f 72 20 |le..// Just for | +00000140 72 65 61 64 61 62 69 6c 69 74 79 20 28 69 6d 61 |readability (ima| +00000150 67 69 6e 65 20 63 68 61 72 5b 5d 5b 5d 5b 63 68 |gine char[][][ch| +00000160 61 72 5b 5d 5d 29 20 20 20 20 0a 61 6c 69 61 73 |ar[]]) .alias| +00000170 20 63 68 61 72 5b 5d 20 73 74 72 69 6e 67 3b 0a | char[] string;.| +00000180 61 6c 69 61 73 20 73 74 72 69 6e 67 5b 5d 20 73 |alias string[] s| +00000190 74 72 69 6e 67 61 72 72 61 79 3b 0a 0a 2f 2f 2f |tringarray;..///| +000001a0 20 53 74 72 69 70 73 20 6e 6f 6e 2d 64 69 67 69 | Strips non-digi| +000001b0 74 20 63 68 61 72 61 63 74 65 72 73 20 66 72 6f |t characters fro| +000001c0 6d 20 74 68 65 20 73 74 72 69 6e 67 20 28 43 4f |m the string (CO| +000001d0 57 29 0a 73 74 72 69 6e 67 20 73 74 72 69 70 4e |W).string stripN| +000001e0 6f 6e 44 69 67 69 74 28 20 69 6e 20 73 74 72 69 |onDigit( in stri| +000001f0 6e 67 20 6c 69 6e 65 20 29 20 0a 7b 0a 20 20 20 |ng line ) .{. | +00000200 20 73 74 72 69 6e 67 20 72 65 74 3b 0a 20 20 20 | string ret;. | +00000210 20 66 6f 72 65 61 63 68 28 75 69 6e 74 20 69 2c | foreach(uint i,| +00000220 20 63 3b 20 6c 69 6e 65 29 20 7b 0a 20 20 20 20 | c; line) {. | +00000230 20 20 20 20 2f 2f 20 45 72 72 6f 72 3a 20 73 74 | // Error: st| +00000240 64 2e 63 74 79 70 65 2e 69 73 64 69 67 69 74 20 |d.ctype.isdigit | +00000250 61 74 20 43 3a 5c 64 6d 64 5c 73 72 63 5c 70 68 |at C:\dmd\src\ph| +00000260 6f 62 6f 73 5c 73 74 64 5c 63 74 79 70 65 2e 64 |obos\std\ctype.d| +00000270 28 33 37 29 20 0a 20 20 20 20 20 20 20 20 2f 2f |(37) . //| +00000280 20 63 6f 6e 66 6c 69 63 74 73 20 77 69 74 68 20 | conflicts with | +00000290 73 74 64 2e 73 74 72 65 61 6d 2e 69 73 64 69 67 |std.stream.isdig| +000002a0 69 74 20 61 74 20 43 3a 5c 64 6d 64 5c 73 72 63 |it at C:\dmd\src| +000002b0 5c 70 68 6f 62 6f 73 5c 73 74 64 5c 73 74 72 65 |\phobos\std\stre| +000002c0 61 6d 2e 64 28 32 39 32 34 29 0a 20 20 20 20 20 |am.d(2924). | +000002d0 20 20 20 69 66 20 28 21 73 74 64 2e 63 74 79 70 | if (!std.ctyp| +000002e0 65 2e 69 73 64 69 67 69 74 28 63 29 29 20 7b 0a |e.isdigit(c)) {.| +000002f0 20 20 20 20 20 20 20 20 20 20 20 20 69 66 20 28 | if (| +00000300 21 72 65 74 29 0a 20 20 20 20 20 20 20 20 20 20 |!ret). | +00000310 20 20 20 20 20 20 72 65 74 20 3d 20 6c 69 6e 65 | ret = line| +00000320 5b 30 2e 2e 69 5d 3b 20 20 20 20 0a 20 20 20 20 |[0..i]; . | +00000330 20 20 20 20 7d 20 20 20 20 0a 20 20 20 20 20 20 | } . | +00000340 20 20 65 6c 73 65 20 69 66 20 28 72 65 74 29 0a | else if (ret).| +00000350 20 20 20 20 20 20 20 20 20 20 20 20 72 65 74 20 | ret | +00000360 7e 3d 20 63 3b 20 20 20 20 0a 20 20 20 20 7d 20 |~= c; . } | +00000370 20 20 20 0a 20 20 20 20 72 65 74 75 72 6e 20 72 | . return r| +00000380 65 74 3f 72 65 74 3a 6c 69 6e 65 3b 0a 7d 0a 0a |et?ret:line;.}..| +00000390 75 6e 69 74 74 65 73 74 20 7b 0a 20 20 20 20 61 |unittest {. a| +000003a0 73 73 65 72 74 28 20 73 74 72 69 70 4e 6f 6e 44 |ssert( stripNonD| +000003b0 69 67 69 74 28 22 61 73 64 66 22 29 20 3d 3d 20 |igit("asdf") == | +000003c0 22 22 20 20 29 3b 0a 20 20 20 20 61 73 73 65 72 |"" );. asser| +000003d0 74 28 20 73 74 72 69 70 4e 6f 6e 44 69 67 69 74 |t( stripNonDigit| +000003e0 28 22 5c 27 31 33 2d 3d 32 20 34 6b 6f 70 22 29 |("\'13-=2 4kop")| +000003f0 20 3d 3d 20 20 22 31 33 32 34 22 20 20 29 3b 0a | == "1324" );.| +00000400 7d 0a 0a 2f 2f 2f 20 43 6f 6e 76 65 72 74 73 20 |}../// Converts | +00000410 61 20 77 6f 72 64 20 69 6e 74 6f 20 61 20 6e 75 |a word into a nu| +00000420 6d 62 65 72 2c 20 69 67 6e 6f 72 69 6e 67 20 61 |mber, ignoring a| +00000430 6c 6c 20 6e 6f 6e 20 61 6c 70 68 61 20 63 68 61 |ll non alpha cha| +00000440 72 61 63 74 65 72 73 20 20 0a 73 74 72 69 6e 67 |racters .string| +00000450 20 77 6f 72 64 54 6f 4e 75 6d 28 20 69 6e 20 73 | wordToNum( in s| +00000460 74 72 69 6e 67 20 77 6f 72 64 20 29 0a 7b 0a 2f |tring word ).{./| +00000470 2f 20 74 72 61 6e 73 6c 61 74 69 6f 6e 20 74 61 |/ translation ta| +00000480 62 6c 65 20 66 6f 72 20 74 68 65 20 74 61 73 6b |ble for the task| +00000490 20 61 74 20 68 61 6e 64 0a 63 6f 6e 73 74 20 63 | at hand.const c| +000004a0 68 61 72 5b 32 35 36 5d 20 54 52 41 4e 53 4c 41 |har[256] TRANSLA| +000004b0 54 45 20 3d 20 20 20 20 0a 20 20 20 20 22 20 20 |TE = . " | +000004c0 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 | | +000004d0 20 20 20 20 20 20 20 20 20 20 20 20 20 20 22 20 | " | +000004e0 20 2f 2f 20 30 20 20 20 0a 20 20 20 20 22 20 20 | // 0 . " | +000004f0 20 20 20 20 20 20 20 20 20 20 20 20 20 20 30 31 | 01| +00000500 32 33 34 35 36 37 38 39 20 20 20 20 20 20 22 20 |23456789 " | +00000510 20 2f 2f 20 33 32 20 20 20 20 20 0a 20 20 20 20 | // 32 . | +00000520 22 20 35 37 36 33 30 34 39 39 36 31 37 38 35 31 |" 57630499617851| +00000530 38 38 31 32 33 34 37 36 32 32 33 39 20 20 20 20 |881234762239 | +00000540 20 22 20 20 2f 2f 20 36 34 20 20 20 0a 20 20 20 | " // 64 . | +00000550 20 22 20 35 37 36 33 30 34 39 39 36 31 37 38 35 | " 5763049961785| +00000560 31 38 38 31 32 33 34 37 36 32 32 33 39 20 20 20 |1881234762239 | +00000570 20 20 22 0a 20 20 20 20 22 20 20 20 20 20 20 20 | ". " | +00000580 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 | | +00000590 20 20 20 20 20 20 20 20 20 22 0a 20 20 20 20 22 | ". "| +000005a0 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 | | +* +000005c0 22 0a 20 20 20 20 22 20 20 20 20 20 20 20 20 20 |". " | +000005d0 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 | | +000005e0 20 20 20 20 20 20 20 22 20 20 20 20 0a 20 20 20 | " . | +000005f0 20 22 20 20 20 20 20 20 20 20 20 20 20 20 20 20 | " | +00000600 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 | | +00000610 20 20 22 3b 0a 20 20 20 20 73 74 72 69 6e 67 20 | ";. string | +00000620 72 65 74 3b 0a 20 20 20 20 66 6f 72 65 61 63 68 |ret;. foreach| +00000630 28 63 3b 20 63 61 73 74 28 75 62 79 74 65 5b 5d |(c; cast(ubyte[]| +00000640 29 77 6f 72 64 29 0a 20 20 20 20 20 20 20 20 69 |)word). i| +00000650 66 20 28 54 52 41 4e 53 4c 41 54 45 5b 63 5d 20 |f (TRANSLATE[c] | +00000660 21 3d 20 27 20 27 29 0a 20 20 20 20 20 20 20 20 |!= ' '). | +00000670 20 20 20 20 72 65 74 20 7e 3d 20 54 52 41 4e 53 | ret ~= TRANS| +00000680 4c 41 54 45 5b 63 5d 3b 0a 20 20 20 20 72 65 74 |LATE[c];. ret| +00000690 75 72 6e 20 72 65 74 3b 0a 7d 0a 0a 75 6e 69 74 |urn ret;.}..unit| +000006a0 74 65 73 74 20 7b 0a 20 2f 2f 20 54 65 73 74 20 |test {. // Test | +000006b0 77 6f 72 64 54 6f 4e 75 6d 20 75 73 69 6e 67 20 |wordToNum using | +000006c0 74 68 65 20 74 61 62 6c 65 20 66 72 6f 6d 20 74 |the table from t| +000006d0 68 65 20 74 61 73 6b 20 64 65 73 63 72 69 70 74 |he task descript| +000006e0 69 6f 6e 2e 0a 20 61 73 73 65 72 74 28 20 22 30 |ion.. assert( "0| +000006f0 31 31 31 32 32 32 33 33 33 34 34 35 35 36 36 36 |1112223334455666| +00000700 37 37 37 38 38 38 39 39 39 22 20 3d 3d 0a 20 20 |777888999" ==. | +00000710 20 77 6f 72 64 54 6f 4e 75 6d 28 22 45 20 7c 20 | wordToNum("E | | +00000720 4a 20 4e 20 51 20 7c 20 52 20 57 20 58 20 7c 20 |J N Q | R W X | | +00000730 44 20 53 20 59 20 7c 20 46 20 54 20 7c 20 41 20 |D S Y | F T | A | +00000740 4d 20 7c 20 43 20 49 20 56 20 7c 20 42 20 4b 20 |M | C I V | B K | +00000750 55 20 7c 20 4c 20 4f 20 50 20 7c 20 47 20 48 20 |U | L O P | G H | +00000760 5a 22 29 29 3b 0a 20 61 73 73 65 72 74 28 20 22 |Z"));. assert( "| +00000770 30 31 31 31 32 32 32 33 33 33 34 34 35 35 36 36 |0111222333445566| +00000780 36 37 37 37 38 38 38 39 39 39 22 20 3d 3d 20 0a |6777888999" == .| +00000790 20 20 20 77 6f 72 64 54 6f 4e 75 6d 28 22 65 20 | wordToNum("e | +000007a0 7c 20 6a 20 6e 20 71 20 7c 20 72 20 77 20 78 20 || j n q | r w x | +000007b0 7c 20 64 20 73 20 79 20 7c 20 66 20 74 20 7c 20 || d s y | f t | | +000007c0 61 20 6d 20 7c 20 63 20 69 20 76 20 7c 20 62 20 |a m | c i v | b | +000007d0 6b 20 75 20 7c 20 6c 20 6f 20 70 20 7c 20 67 20 |k u | l o p | g | +000007e0 68 20 7a 22 29 29 3b 0a 20 61 73 73 65 72 74 28 |h z"));. assert(| +000007f0 20 22 30 31 32 33 34 35 36 37 38 39 22 20 3d 3d | "0123456789" ==| +00000800 20 0a 20 20 20 77 6f 72 64 54 6f 4e 75 6d 28 22 | . wordToNum("| +00000810 30 20 7c 20 20 20 31 20 20 20 7c 20 20 20 32 20 |0 | 1 | 2 | +00000820 20 20 7c 20 20 20 33 20 20 20 7c 20 20 34 20 20 | | 3 | 4 | +00000830 7c 20 20 35 20 20 7c 20 20 20 36 20 20 20 7c 20 || 5 | 6 | | +00000840 20 20 37 20 20 20 7c 20 20 20 38 20 20 20 7c 20 | 7 | 8 | | +00000850 20 20 39 22 29 29 3b 0a 7d 0a 0a 76 6f 69 64 20 | 9"));.}..void | +00000860 6d 61 69 6e 28 20 73 74 72 69 6e 67 5b 5d 20 61 |main( string[] a| +00000870 72 67 73 20 29 0a 7b 0a 20 20 20 20 2f 2f 20 54 |rgs ).{. // T| +00000880 68 69 73 20 61 73 73 6f 63 69 61 74 69 76 65 20 |his associative | +00000890 61 72 72 61 79 20 6d 61 70 73 20 61 20 6e 75 6d |array maps a num| +000008a0 62 65 72 20 74 6f 20 61 6e 20 61 72 72 61 79 20 |ber to an array | +000008b0 6f 66 20 77 6f 72 64 73 2e 20 20 20 20 0a 20 20 |of words. . | +000008c0 20 20 73 74 72 69 6e 67 61 72 72 61 79 5b 73 74 | stringarray[st| +000008d0 72 69 6e 67 5d 20 20 20 20 6e 75 6d 32 77 6f 72 |ring] num2wor| +000008e0 64 73 3b 0a 0a 20 20 20 20 66 6f 72 65 61 63 68 |ds;.. foreach| +000008f0 28 73 74 72 69 6e 67 20 77 6f 72 64 3b 20 6e 65 |(string word; ne| +00000900 77 20 42 75 66 66 65 72 65 64 46 69 6c 65 28 22 |w BufferedFile("| +00000910 64 69 63 74 69 6f 6e 61 72 79 2e 74 78 74 22 20 |dictionary.txt" | +00000920 29 20 29 0a 20 20 20 20 20 20 20 20 6e 75 6d 32 |) ). num2| +00000930 77 6f 72 64 73 5b 20 77 6f 72 64 54 6f 4e 75 6d |words[ wordToNum| +00000940 28 77 6f 72 64 29 20 5d 20 7e 3d 20 77 6f 72 64 |(word) ] ~= word| +00000950 2e 64 75 70 3b 20 20 20 20 20 20 20 20 2f 2f 20 |.dup; // | +00000960 6d 75 73 74 20 64 75 70 0a 0a 20 20 20 20 2f 2f |must dup.. //| +00000970 2f 20 46 69 6e 64 73 20 61 6c 6c 20 61 6c 74 65 |/ Finds all alte| +00000980 72 6e 61 74 69 76 65 73 20 66 6f 72 20 74 68 65 |rnatives for the| +00000990 20 67 69 76 65 6e 20 6e 75 6d 62 65 72 0a 20 20 | given number. | +000009a0 20 20 2f 2f 2f 20 28 73 68 6f 75 6c 64 20 68 61 | /// (should ha| +000009b0 76 65 20 62 65 65 6e 20 73 74 72 69 70 70 65 64 |ve been stripped| +000009c0 20 66 72 6f 6d 20 6e 6f 6e 2d 64 69 67 69 74 20 | from non-digit | +000009d0 63 68 61 72 61 63 74 65 72 73 29 0a 20 20 20 20 |characters). | +000009e0 73 74 72 69 6e 67 61 72 72 61 79 20 5f 46 69 6e |stringarray _Fin| +000009f0 64 57 6f 72 64 73 28 20 73 74 72 69 6e 67 20 6e |dWords( string n| +00000a00 75 6d 62 65 72 73 2c 20 62 6f 6f 6c 20 64 69 67 |umbers, bool dig| +00000a10 69 74 6f 6b 20 29 0a 20 20 20 20 69 6e 20 7b 0a |itok ). in {.| +00000a20 20 20 20 20 20 20 20 20 61 73 73 65 72 74 28 6e | assert(n| +00000a30 75 6d 62 65 72 73 2e 6c 65 6e 67 74 68 20 3e 20 |umbers.length > | +00000a40 20 30 29 3b 20 20 20 20 0a 20 20 20 20 7d 20 20 | 0); . } | +00000a50 20 20 0a 20 20 20 20 6f 75 74 28 72 65 73 75 6c | . out(resul| +00000a60 74 29 20 7b 0a 20 20 20 20 20 20 20 20 66 6f 72 |t) {. for| +00000a70 65 61 63 68 20 28 61 3b 20 72 65 73 75 6c 74 29 |each (a; result)| +00000a80 0a 20 20 20 20 20 20 20 20 20 20 20 20 61 73 73 |. ass| +00000a90 65 72 74 28 20 77 6f 72 64 54 6f 4e 75 6d 28 61 |ert( wordToNum(a| +00000aa0 29 20 3d 3d 20 6e 75 6d 62 65 72 73 20 29 3b 0a |) == numbers );.| +00000ab0 20 20 20 20 7d 20 20 20 20 0a 20 20 20 20 62 6f | } . bo| +00000ac0 64 79 20 7b 0a 20 20 20 20 20 20 20 20 73 74 72 |dy {. str| +00000ad0 69 6e 67 61 72 72 61 79 20 72 65 74 3b 0a 20 20 |ingarray ret;. | +00000ae0 20 20 20 20 20 20 62 6f 6f 6c 20 66 6f 75 6e 64 | bool found| +00000af0 77 6f 72 64 20 3d 20 66 61 6c 73 65 3b 0a 20 20 |word = false;. | +00000b00 20 20 20 20 20 20 66 6f 72 20 28 75 69 6e 74 20 | for (uint | +00000b10 74 3d 31 3b 20 74 3c 3d 6e 75 6d 62 65 72 73 2e |t=1; t<=numbers.| +00000b20 6c 65 6e 67 74 68 3b 20 2b 2b 74 29 20 7b 0a 20 |length; ++t) {. | +00000b30 20 20 20 20 20 20 20 20 20 20 20 61 75 74 6f 20 | auto | +00000b40 61 6c 74 65 72 6e 61 74 69 76 65 73 20 3d 20 6e |alternatives = n| +00000b50 75 6d 62 65 72 73 5b 30 2e 2e 74 5d 20 69 6e 20 |umbers[0..t] in | +00000b60 6e 75 6d 32 77 6f 72 64 73 3b 0a 20 20 20 20 20 |num2words;. | +00000b70 20 20 20 20 20 20 20 69 66 20 28 21 61 6c 74 65 | if (!alte| +00000b80 72 6e 61 74 69 76 65 73 29 0a 20 20 20 20 20 20 |rnatives). | +00000b90 20 20 20 20 20 20 20 20 20 20 63 6f 6e 74 69 6e | contin| +00000ba0 75 65 3b 0a 20 20 20 20 20 20 20 20 20 20 20 20 |ue;. | +00000bb0 66 6f 75 6e 64 77 6f 72 64 20 3d 20 74 72 75 65 |foundword = true| +00000bc0 3b 0a 20 20 20 20 20 20 20 20 20 20 20 20 69 66 |;. if| +00000bd0 20 28 6e 75 6d 62 65 72 73 2e 6c 65 6e 67 74 68 | (numbers.length| +00000be0 20 3e 20 20 74 29 20 7b 0a 20 20 20 20 20 20 20 | > t) {. | +00000bf0 20 20 20 20 20 20 20 20 20 2f 2f 20 43 6f 6d 62 | // Comb| +00000c00 69 6e 65 20 61 6c 6c 20 63 75 72 72 65 6e 74 20 |ine all current | +00000c10 61 6c 74 65 72 6e 61 74 69 76 65 73 20 77 69 74 |alternatives wit| +00000c20 68 20 61 6c 6c 20 61 6c 74 65 72 6e 61 74 69 76 |h all alternativ| +00000c30 65 73 20 20 20 20 20 0a 20 20 20 20 20 20 20 20 |es . | +00000c40 20 20 20 20 20 20 20 20 2f 2f 20 6f 66 20 74 68 | // of th| +00000c50 65 20 72 65 73 74 20 28 6e 65 78 74 20 70 69 65 |e rest (next pie| +00000c60 63 65 20 63 61 6e 20 73 74 61 72 74 20 77 69 74 |ce can start wit| +00000c70 68 20 61 20 64 69 67 69 74 29 20 20 20 20 20 20 |h a digit) | +00000c80 20 20 20 20 20 20 20 20 0a 20 20 20 20 20 20 20 | . | +00000c90 20 20 20 20 20 20 20 20 20 66 6f 72 65 61 63 68 | foreach| +00000ca0 20 28 61 32 3b 20 5f 46 69 6e 64 57 6f 72 64 73 | (a2; _FindWords| +00000cb0 28 20 6e 75 6d 62 65 72 73 5b 74 2e 2e 24 5d 2c |( numbers[t..$],| +00000cc0 20 74 72 75 65 20 20 20 20 20 29 20 29 0a 20 20 | true ) ). | +00000cd0 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 | | +00000ce0 20 20 66 6f 72 65 61 63 68 28 61 31 3b 20 2a 61 | foreach(a1; *a| +00000cf0 6c 74 65 72 6e 61 74 69 76 65 73 29 0a 20 20 20 |lternatives). | +00000d00 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 | | +00000d10 20 20 20 20 72 65 74 20 7e 3d 20 61 31 20 7e 20 | ret ~= a1 ~ | +00000d20 22 20 22 20 7e 20 61 32 3b 0a 20 20 20 20 20 20 |" " ~ a2;. | +00000d30 20 20 20 20 20 20 7d 0a 20 20 20 20 20 20 20 20 | }. | +00000d40 20 20 20 20 65 6c 73 65 20 20 20 20 0a 20 20 20 | else . | +00000d50 20 20 20 20 20 20 20 20 20 20 20 20 20 72 65 74 | ret| +00000d60 20 7e 3d 20 2a 61 6c 74 65 72 6e 61 74 69 76 65 | ~= *alternative| +00000d70 73 3b 20 20 20 20 2f 2f 20 61 70 70 65 6e 64 20 |s; // append | +00000d80 74 68 65 73 65 20 61 6c 74 65 72 6e 61 74 69 76 |these alternativ| +00000d90 65 73 0a 20 20 20 20 20 20 20 20 7d 0a 20 20 20 |es. }. | +00000da0 20 20 20 20 20 2f 2f 20 54 72 79 20 74 6f 20 6b | // Try to k| +00000db0 65 65 70 20 31 20 64 69 67 69 74 2c 20 6f 6e 6c |eep 1 digit, onl| +00000dc0 79 20 69 66 20 77 65 27 72 65 20 61 6c 6c 6f 77 |y if we're allow| +00000dd0 65 64 20 61 6e 64 20 6e 6f 20 6f 74 68 65 72 0a |ed and no other.| +00000de0 20 20 20 20 20 20 20 20 2f 2f 20 61 6c 74 65 72 | // alter| +00000df0 6e 61 74 69 76 65 73 20 77 65 72 65 20 66 6f 75 |natives were fou| +00000e00 6e 64 0a 20 20 20 20 20 20 20 20 2f 2f 20 54 65 |nd. // Te| +00000e10 73 74 69 6e 67 20 22 72 65 74 2e 6c 65 6e 67 74 |sting "ret.lengt| +00000e20 68 22 20 6d 61 6b 65 73 20 6d 6f 72 65 20 73 65 |h" makes more se| +00000e30 6e 73 65 20 74 68 61 6e 20 74 65 73 74 69 6e 67 |nse than testing| +00000e40 20 22 66 6f 75 6e 64 77 6f 72 64 22 2c 0a 20 20 | "foundword",. | +00000e50 20 20 20 20 20 20 2f 2f 20 62 75 74 20 74 68 65 | // but the| +00000e60 20 6f 74 68 65 72 20 69 6d 70 6c 65 6d 65 6e 74 | other implement| +00000e70 61 74 69 6f 6e 73 20 73 65 65 6d 20 74 6f 20 64 |ations seem to d| +00000e80 6f 20 6a 75 73 74 20 74 68 69 73 2e 0a 20 20 20 |o just this.. | +00000e90 20 20 20 20 20 69 66 20 28 64 69 67 69 74 6f 6b | if (digitok| +00000ea0 20 26 26 20 21 66 6f 75 6e 64 77 6f 72 64 29 20 | && !foundword) | +00000eb0 7b 20 2f 2f 72 65 74 2e 6c 65 6e 67 74 68 20 3d |{ //ret.length =| +00000ec0 3d 20 30 20 20 0a 20 20 20 20 20 20 20 20 20 20 |= 0 . | +00000ed0 20 20 69 66 28 6e 75 6d 62 65 72 73 2e 6c 65 6e | if(numbers.len| +00000ee0 67 74 68 20 3e 20 20 31 29 20 7b 0a 20 20 20 20 |gth > 1) {. | +00000ef0 20 20 20 20 20 20 20 20 20 20 20 20 2f 2f 20 43 | // C| +00000f00 6f 6d 62 69 6e 65 20 31 20 64 69 67 69 74 20 77 |ombine 1 digit w| +00000f10 69 74 68 20 61 6c 6c 20 61 6c 74 65 6e 61 74 69 |ith all altenati| +00000f20 76 65 73 20 66 72 6f 6d 20 74 68 65 20 72 65 73 |ves from the res| +00000f30 74 20 20 20 20 0a 20 20 20 20 20 20 20 20 20 20 |t . | +00000f40 20 20 20 20 20 20 2f 2f 20 28 6e 65 78 74 20 70 | // (next p| +00000f50 69 65 63 65 20 63 61 6e 20 6e 6f 74 20 73 74 61 |iece can not sta| +00000f60 72 74 20 77 69 74 68 20 61 20 64 69 67 69 74 29 |rt with a digit)| +00000f70 20 20 20 20 20 20 20 20 20 20 0a 20 20 20 20 20 | . | +00000f80 20 20 20 20 20 20 20 20 20 20 20 66 6f 72 65 61 | forea| +00000f90 63 68 20 28 61 3b 20 5f 46 69 6e 64 57 6f 72 64 |ch (a; _FindWord| +00000fa0 73 28 20 6e 75 6d 62 65 72 73 5b 31 2e 2e 24 5d |s( numbers[1..$]| +00000fb0 2c 20 66 61 6c 73 65 20 29 20 29 0a 20 20 20 20 |, false ) ). | +00000fc0 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 | | +00000fd0 72 65 74 20 7e 3d 20 6e 75 6d 62 65 72 73 5b 30 |ret ~= numbers[0| +00000fe0 2e 2e 31 5d 20 7e 20 22 20 22 20 7e 20 61 3b 0a |..1] ~ " " ~ a;.| +00000ff0 20 20 20 20 20 20 20 20 20 20 20 20 7d 20 20 20 | } | +00001000 20 0a 20 20 20 20 20 20 20 20 20 20 20 20 65 6c | . el| +00001010 73 65 20 20 20 20 0a 20 20 20 20 20 20 20 20 20 |se . | +00001020 20 20 20 20 20 20 20 72 65 74 20 7e 3d 20 6e 75 | ret ~= nu| +00001030 6d 62 65 72 73 5b 30 2e 2e 31 5d 3b 20 20 20 20 |mbers[0..1]; | +00001040 2f 2f 20 6a 75 73 74 20 61 70 70 65 6e 64 20 74 |// just append t| +00001050 68 69 73 20 64 69 67 69 74 20 20 20 20 20 20 20 |his digit | +00001060 20 20 20 20 20 20 0a 20 20 20 20 20 20 20 20 7d | . }| +00001070 20 20 20 20 0a 20 20 20 20 20 20 20 20 72 65 74 | . ret| +00001080 75 72 6e 20 72 65 74 3b 0a 20 20 20 20 7d 0a 0a |urn ret;. }..| +00001090 20 20 20 20 2f 2f 2f 20 28 54 68 69 73 20 66 75 | /// (This fu| +000010a0 6e 63 74 69 6f 6e 20 77 61 73 20 69 6e 6c 69 6e |nction was inlin| +000010b0 65 64 20 69 6e 20 74 68 65 20 6f 72 69 67 69 6e |ed in the origin| +000010c0 61 6c 20 70 72 6f 67 72 61 6d 29 20 0a 20 20 20 |al program) . | +000010d0 20 2f 2f 2f 20 46 69 6e 64 73 20 61 6c 6c 20 61 | /// Finds all a| +000010e0 6c 74 65 72 6e 61 74 69 76 65 73 20 66 6f 72 20 |lternatives for | +000010f0 74 68 65 20 67 69 76 65 6e 20 70 68 6f 6e 65 20 |the given phone | +00001100 6e 75 6d 62 65 72 20 0a 20 20 20 20 2f 2f 2f 20 |number . /// | +00001110 52 65 74 75 72 6e 73 3a 20 61 72 72 61 79 20 6f |Returns: array o| +00001120 66 20 73 74 72 69 6e 67 73 20 0a 20 20 20 20 73 |f strings . s| +00001130 74 72 69 6e 67 61 72 72 61 79 20 46 69 6e 64 57 |tringarray FindW| +00001140 6f 72 64 73 28 20 73 74 72 69 6e 67 20 70 68 6f |ords( string pho| +00001150 6e 65 5f 6e 75 6d 62 65 72 20 29 0a 20 20 20 20 |ne_number ). | +00001160 7b 0a 20 20 20 20 20 20 20 20 69 66 20 28 21 70 |{. if (!p| +00001170 68 6f 6e 65 5f 6e 75 6d 62 65 72 2e 6c 65 6e 67 |hone_number.leng| +00001180 74 68 29 0a 20 20 20 20 20 20 20 20 20 20 20 20 |th). | +00001190 72 65 74 75 72 6e 20 6e 75 6c 6c 3b 0a 20 20 20 |return null;. | +000011a0 20 20 20 20 20 2f 2f 20 53 74 72 69 70 20 74 68 | // Strip th| +000011b0 65 20 6e 6f 6e 2d 64 69 67 69 74 20 63 68 61 72 |e non-digit char| +000011c0 61 63 74 65 72 73 20 66 72 6f 6d 20 74 68 65 20 |acters from the | +000011d0 70 68 6f 6e 65 20 6e 75 6d 62 65 72 2c 20 61 6e |phone number, an| +000011e0 64 0a 20 20 20 20 20 20 20 20 2f 2f 20 70 61 73 |d. // pas| +000011f0 73 20 69 74 20 74 6f 20 74 68 65 20 72 65 63 75 |s it to the recu| +00001200 72 73 69 76 65 20 66 75 6e 63 74 69 6f 6e 20 28 |rsive function (| +00001210 6c 65 61 64 69 6e 67 20 64 69 67 69 74 20 69 73 |leading digit is| +00001220 20 61 6c 6c 6f 77 65 64 29 0a 20 20 20 20 20 20 | allowed). | +00001230 20 20 72 65 74 75 72 6e 20 5f 46 69 6e 64 57 6f | return _FindWo| +00001240 72 64 73 28 20 73 74 72 69 70 4e 6f 6e 44 69 67 |rds( stripNonDig| +00001250 69 74 28 70 68 6f 6e 65 5f 6e 75 6d 62 65 72 29 |it(phone_number)| +00001260 2c 20 74 72 75 65 20 29 3b 20 20 20 20 0a 20 20 |, true ); . | +00001270 20 20 7d 20 20 20 20 0a 20 20 20 20 0a 20 20 20 | } . . | +00001280 20 2f 2f 20 52 65 61 64 20 74 68 65 20 70 68 6f | // Read the pho| +00001290 6e 65 20 6e 75 6d 62 65 72 73 20 20 20 20 20 0a |ne numbers .| +000012a0 20 20 20 20 66 6f 72 65 61 63 68 28 73 74 72 69 | foreach(stri| +000012b0 6e 67 20 70 68 6f 6e 65 3b 20 6e 65 77 20 42 75 |ng phone; new Bu| +000012c0 66 66 65 72 65 64 46 69 6c 65 28 22 69 6e 70 75 |fferedFile("inpu| +000012d0 74 2e 74 78 74 22 20 20 20 29 20 29 0a 20 20 20 |t.txt" ) ). | +000012e0 20 20 20 20 20 66 6f 72 65 61 63 68 28 61 6c 74 | foreach(alt| +000012f0 65 72 6e 61 74 69 76 65 3b 20 46 69 6e 64 57 6f |ernative; FindWo| +00001300 72 64 73 28 20 70 68 6f 6e 65 20 29 20 29 0a 20 |rds( phone ) ). | +00001310 20 20 20 20 20 20 20 20 20 20 20 77 72 69 74 65 | write| +00001320 66 6c 6e 28 70 68 6f 6e 65 2c 20 22 3a 20 22 2c |fln(phone, ": ",| +00001330 20 61 6c 74 65 72 6e 61 74 69 76 65 20 29 3b 0a | alternative );.| +00001340 7d 0a 0a |}..| +00001343 diff --git a/vendor/pygments/tests/examplefiles/hexdump_hexcat b/vendor/pygments/tests/examplefiles/hexdump_hexcat new file mode 100644 index 0000000..522074c --- /dev/null +++ b/vendor/pygments/tests/examplefiles/hexdump_hexcat @@ -0,0 +1,247 @@ +00000000 2F 2F 20 43 72 65 61 74 65 64 20 62 79 20 4C 69 6F 6E 65 6C //.Created.by.Lionel +00000014 6C 6F 20 4C 75 6E 65 73 75 20 61 6E 64 20 70 6C 61 63 65 64 lo.Lunesu.and.placed +00000028 20 69 6E 20 74 68 65 20 70 75 62 6C 69 63 20 64 6F 6D 61 69 .in.the.public.domai +0000003C 6E 2E 0A 2F 2F 20 54 68 69 73 20 66 69 6C 65 20 68 61 73 20 n..//.This.file.has. +00000050 62 65 65 6E 20 6D 6F 64 69 66 69 65 64 20 66 72 6F 6D 20 69 been.modified.from.i +00000064 74 73 20 6F 72 69 67 69 6E 61 6C 20 76 65 72 73 69 6F 6E 2E ts.original.version. +00000078 0A 2F 2F 20 49 74 20 68 61 73 20 62 65 65 6E 20 66 6F 72 6D .//.It.has.been.form +0000008C 61 74 74 65 64 20 74 6F 20 66 69 74 20 79 6F 75 72 20 73 63 atted.to.fit.your.sc +000000A0 72 65 65 6E 2E 0A 6D 6F 64 75 6C 65 20 70 68 6F 6E 65 6E 6F reen..module.phoneno +000000B4 3B 20 20 20 20 20 2F 2F 20 6F 70 74 69 6F 6E 61 6C 0A 69 6D ;.....//.optional.im +000000C8 70 6F 72 74 20 73 74 64 2E 73 74 64 69 6F 3B 20 20 20 2F 2F port.std.stdio;...// +000000DC 20 77 72 69 74 65 66 6C 6E 20 20 20 20 20 0A 69 6D 70 6F 72 .writefln......impor +000000F0 74 20 73 74 64 2E 63 74 79 70 65 3B 20 20 20 2F 2F 20 69 73 t.std.ctype;...//.is +00000104 64 69 67 69 74 20 20 20 20 20 0A 69 6D 70 6F 72 74 20 73 74 digit......import.st +00000118 64 2E 73 74 72 65 61 6D 3B 20 20 2F 2F 20 42 75 66 66 65 72 d.stream;..//.Buffer +0000012C 65 64 46 69 6C 65 0A 0A 2F 2F 20 4A 75 73 74 20 66 6F 72 20 edFile..//.Just.for. +00000140 72 65 61 64 61 62 69 6C 69 74 79 20 28 69 6D 61 67 69 6E 65 readability.(imagine +00000154 20 63 68 61 72 5B 5D 5B 5D 5B 63 68 61 72 5B 5D 5D 29 20 20 .char[][][char[]]).. +00000168 20 20 0A 61 6C 69 61 73 20 63 68 61 72 5B 5D 20 73 74 72 69 ...alias.char[].stri +0000017C 6E 67 3B 0A 61 6C 69 61 73 20 73 74 72 69 6E 67 5B 5D 20 73 ng;.alias.string[].s +00000190 74 72 69 6E 67 61 72 72 61 79 3B 0A 0A 2F 2F 2F 20 53 74 72 tringarray;..///.Str +000001A4 69 70 73 20 6E 6F 6E 2D 64 69 67 69 74 20 63 68 61 72 61 63 ips.non-digit.charac +000001B8 74 65 72 73 20 66 72 6F 6D 20 74 68 65 20 73 74 72 69 6E 67 ters.from.the.string +000001CC 20 28 43 4F 57 29 0A 73 74 72 69 6E 67 20 73 74 72 69 70 4E .(COW).string.stripN +000001E0 6F 6E 44 69 67 69 74 28 20 69 6E 20 73 74 72 69 6E 67 20 6C onDigit(.in.string.l +000001F4 69 6E 65 20 29 20 0A 7B 0A 20 20 20 20 73 74 72 69 6E 67 20 ine.)..{.....string. +00000208 72 65 74 3B 0A 20 20 20 20 66 6F 72 65 61 63 68 28 75 69 6E ret;.....foreach(uin +0000021C 74 20 69 2C 20 63 3B 20 6C 69 6E 65 29 20 7B 0A 20 20 20 20 t.i,.c;.line).{..... +00000230 20 20 20 20 2F 2F 20 45 72 72 6F 72 3A 20 73 74 64 2E 63 74 ....//.Error:.std.ct +00000244 79 70 65 2E 69 73 64 69 67 69 74 20 61 74 20 43 3A 5C 64 6D ype.isdigit.at.C:\dm +00000258 64 5C 73 72 63 5C 70 68 6F 62 6F 73 5C 73 74 64 5C 63 74 79 d\src\phobos\std\cty +0000026C 70 65 2E 64 28 33 37 29 20 0A 20 20 20 20 20 20 20 20 2F 2F pe.d(37)..........// +00000280 20 63 6F 6E 66 6C 69 63 74 73 20 77 69 74 68 20 73 74 64 2E .conflicts.with.std. +00000294 73 74 72 65 61 6D 2E 69 73 64 69 67 69 74 20 61 74 20 43 3A stream.isdigit.at.C: +000002A8 5C 64 6D 64 5C 73 72 63 5C 70 68 6F 62 6F 73 5C 73 74 64 5C \dmd\src\phobos\std\ +000002BC 73 74 72 65 61 6D 2E 64 28 32 39 32 34 29 0A 20 20 20 20 20 stream.d(2924)...... +000002D0 20 20 20 69 66 20 28 21 73 74 64 2E 63 74 79 70 65 2E 69 73 ...if.(!std.ctype.is +000002E4 64 69 67 69 74 28 63 29 29 20 7B 0A 20 20 20 20 20 20 20 20 digit(c)).{......... +000002F8 20 20 20 20 69 66 20 28 21 72 65 74 29 0A 20 20 20 20 20 20 ....if.(!ret)....... +0000030C 20 20 20 20 20 20 20 20 20 20 72 65 74 20 3D 20 6C 69 6E 65 ..........ret.=.line +00000320 5B 30 2E 2E 69 5D 3B 20 20 20 20 0A 20 20 20 20 20 20 20 20 [0..i];............. +00000334 7D 20 20 20 20 0A 20 20 20 20 20 20 20 20 65 6C 73 65 20 69 }.............else.i +00000348 66 20 28 72 65 74 29 0A 20 20 20 20 20 20 20 20 20 20 20 20 f.(ret)............. +0000035C 72 65 74 20 7E 3D 20 63 3B 20 20 20 20 0A 20 20 20 20 7D 20 ret.~=.c;.........}. +00000370 20 20 20 0A 20 20 20 20 72 65 74 75 72 6E 20 72 65 74 3F 72 ........return.ret?r +00000384 65 74 3A 6C 69 6E 65 3B 0A 7D 0A 0A 75 6E 69 74 74 65 73 74 et:line;.}..unittest +00000398 20 7B 0A 20 20 20 20 61 73 73 65 72 74 28 20 73 74 72 69 70 .{.....assert(.strip +000003AC 4E 6F 6E 44 69 67 69 74 28 22 61 73 64 66 22 29 20 3D 3D 20 NonDigit("asdf").==. +000003C0 22 22 20 20 29 3B 0A 20 20 20 20 61 73 73 65 72 74 28 20 73 ""..);.....assert(.s +000003D4 74 72 69 70 4E 6F 6E 44 69 67 69 74 28 22 5C 27 31 33 2D 3D tripNonDigit("\'13-= +000003E8 32 20 34 6B 6F 70 22 29 20 3D 3D 20 20 22 31 33 32 34 22 20 2.4kop").==.."1324". +000003FC 20 29 3B 0A 7D 0A 0A 2F 2F 2F 20 43 6F 6E 76 65 72 74 73 20 .);.}..///.Converts. +00000410 61 20 77 6F 72 64 20 69 6E 74 6F 20 61 20 6E 75 6D 62 65 72 a.word.into.a.number +00000424 2C 20 69 67 6E 6F 72 69 6E 67 20 61 6C 6C 20 6E 6F 6E 20 61 ,.ignoring.all.non.a +00000438 6C 70 68 61 20 63 68 61 72 61 63 74 65 72 73 20 20 0A 73 74 lpha.characters...st +0000044C 72 69 6E 67 20 77 6F 72 64 54 6F 4E 75 6D 28 20 69 6E 20 73 ring.wordToNum(.in.s +00000460 74 72 69 6E 67 20 77 6F 72 64 20 29 0A 7B 0A 2F 2F 20 74 72 tring.word.).{.//.tr +00000474 61 6E 73 6C 61 74 69 6F 6E 20 74 61 62 6C 65 20 66 6F 72 20 anslation.table.for. +00000488 74 68 65 20 74 61 73 6B 20 61 74 20 68 61 6E 64 0A 63 6F 6E the.task.at.hand.con +0000049C 73 74 20 63 68 61 72 5B 32 35 36 5D 20 54 52 41 4E 53 4C 41 st.char[256].TRANSLA +000004B0 54 45 20 3D 20 20 20 20 0A 20 20 20 20 22 20 20 20 20 20 20 TE.=........."...... +000004C4 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 .................... +000004D8 20 20 20 20 20 20 22 20 20 2F 2F 20 30 20 20 20 0A 20 20 20 ......"..//.0....... +000004EC 20 22 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 30 31 ."................01 +00000500 32 33 34 35 36 37 38 39 20 20 20 20 20 20 22 20 20 2F 2F 20 23456789......"..//. +00000514 33 32 20 20 20 20 20 0A 20 20 20 20 22 20 35 37 36 33 30 34 32..........".576304 +00000528 39 39 36 31 37 38 35 31 38 38 31 32 33 34 37 36 32 32 33 39 99617851881234762239 +0000053C 20 20 20 20 20 22 20 20 2F 2F 20 36 34 20 20 20 0A 20 20 20 ....."..//.64....... +00000550 20 22 20 35 37 36 33 30 34 39 39 36 31 37 38 35 31 38 38 31 .".57630499617851881 +00000564 32 33 34 37 36 32 32 33 39 20 20 20 20 20 22 0A 20 20 20 20 234762239....."..... +00000578 22 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 "................... +0000058C 20 20 20 20 20 20 20 20 20 20 20 20 20 22 0A 20 20 20 20 22 ............."....." +000005A0 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 .................... +000005B4 20 20 20 20 20 20 20 20 20 20 20 20 22 0A 20 20 20 20 22 20 ............".....". +000005C8 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 .................... +000005DC 20 20 20 20 20 20 20 20 20 20 20 22 20 20 20 20 0A 20 20 20 ..........."........ +000005F0 20 22 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 .".................. +00000604 20 20 20 20 20 20 20 20 20 20 20 20 20 20 22 3B 0A 20 20 20 ..............";.... +00000618 20 73 74 72 69 6E 67 20 72 65 74 3B 0A 20 20 20 20 66 6F 72 .string.ret;.....for +0000062C 65 61 63 68 28 63 3B 20 63 61 73 74 28 75 62 79 74 65 5B 5D each(c;.cast(ubyte[] +00000640 29 77 6F 72 64 29 0A 20 20 20 20 20 20 20 20 69 66 20 28 54 )word).........if.(T +00000654 52 41 4E 53 4C 41 54 45 5B 63 5D 20 21 3D 20 27 20 27 29 0A RANSLATE[c].!=.'.'). +00000668 20 20 20 20 20 20 20 20 20 20 20 20 72 65 74 20 7E 3D 20 54 ............ret.~=.T +0000067C 52 41 4E 53 4C 41 54 45 5B 63 5D 3B 0A 20 20 20 20 72 65 74 RANSLATE[c];.....ret +00000690 75 72 6E 20 72 65 74 3B 0A 7D 0A 0A 75 6E 69 74 74 65 73 74 urn.ret;.}..unittest +000006A4 20 7B 0A 20 2F 2F 20 54 65 73 74 20 77 6F 72 64 54 6F 4E 75 .{..//.Test.wordToNu +000006B8 6D 20 75 73 69 6E 67 20 74 68 65 20 74 61 62 6C 65 20 66 72 m.using.the.table.fr +000006CC 6F 6D 20 74 68 65 20 74 61 73 6B 20 64 65 73 63 72 69 70 74 om.the.task.descript +000006E0 69 6F 6E 2E 0A 20 61 73 73 65 72 74 28 20 22 30 31 31 31 32 ion...assert(."01112 +000006F4 32 32 33 33 33 34 34 35 35 36 36 36 37 37 37 38 38 38 39 39 22333445566677788899 +00000708 39 22 20 3D 3D 0A 20 20 20 77 6F 72 64 54 6F 4E 75 6D 28 22 9".==....wordToNum(" +0000071C 45 20 7C 20 4A 20 4E 20 51 20 7C 20 52 20 57 20 58 20 7C 20 E.|.J.N.Q.|.R.W.X.|. +00000730 44 20 53 20 59 20 7C 20 46 20 54 20 7C 20 41 20 4D 20 7C 20 D.S.Y.|.F.T.|.A.M.|. +00000744 43 20 49 20 56 20 7C 20 42 20 4B 20 55 20 7C 20 4C 20 4F 20 C.I.V.|.B.K.U.|.L.O. +00000758 50 20 7C 20 47 20 48 20 5A 22 29 29 3B 0A 20 61 73 73 65 72 P.|.G.H.Z"));..asser +0000076C 74 28 20 22 30 31 31 31 32 32 32 33 33 33 34 34 35 35 36 36 t(."0111222333445566 +00000780 36 37 37 37 38 38 38 39 39 39 22 20 3D 3D 20 0A 20 20 20 77 6777888999".==.....w +00000794 6F 72 64 54 6F 4E 75 6D 28 22 65 20 7C 20 6A 20 6E 20 71 20 ordToNum("e.|.j.n.q. +000007A8 7C 20 72 20 77 20 78 20 7C 20 64 20 73 20 79 20 7C 20 66 20 |.r.w.x.|.d.s.y.|.f. +000007BC 74 20 7C 20 61 20 6D 20 7C 20 63 20 69 20 76 20 7C 20 62 20 t.|.a.m.|.c.i.v.|.b. +000007D0 6B 20 75 20 7C 20 6C 20 6F 20 70 20 7C 20 67 20 68 20 7A 22 k.u.|.l.o.p.|.g.h.z" +000007E4 29 29 3B 0A 20 61 73 73 65 72 74 28 20 22 30 31 32 33 34 35 ));..assert(."012345 +000007F8 36 37 38 39 22 20 3D 3D 20 0A 20 20 20 77 6F 72 64 54 6F 4E 6789".==.....wordToN +0000080C 75 6D 28 22 30 20 7C 20 20 20 31 20 20 20 7C 20 20 20 32 20 um("0.|...1...|...2. +00000820 20 20 7C 20 20 20 33 20 20 20 7C 20 20 34 20 20 7C 20 20 35 ..|...3...|..4..|..5 +00000834 20 20 7C 20 20 20 36 20 20 20 7C 20 20 20 37 20 20 20 7C 20 ..|...6...|...7...|. +00000848 20 20 38 20 20 20 7C 20 20 20 39 22 29 29 3B 0A 7D 0A 0A 76 ..8...|...9"));.}..v +0000085C 6F 69 64 20 6D 61 69 6E 28 20 73 74 72 69 6E 67 5B 5D 20 61 oid.main(.string[].a +00000870 72 67 73 20 29 0A 7B 0A 20 20 20 20 2F 2F 20 54 68 69 73 20 rgs.).{.....//.This. +00000884 61 73 73 6F 63 69 61 74 69 76 65 20 61 72 72 61 79 20 6D 61 associative.array.ma +00000898 70 73 20 61 20 6E 75 6D 62 65 72 20 74 6F 20 61 6E 20 61 72 ps.a.number.to.an.ar +000008AC 72 61 79 20 6F 66 20 77 6F 72 64 73 2E 20 20 20 20 0A 20 20 ray.of.words........ +000008C0 20 20 73 74 72 69 6E 67 61 72 72 61 79 5B 73 74 72 69 6E 67 ..stringarray[string +000008D4 5D 20 20 20 20 6E 75 6D 32 77 6F 72 64 73 3B 0A 0A 20 20 20 ]....num2words;..... +000008E8 20 66 6F 72 65 61 63 68 28 73 74 72 69 6E 67 20 77 6F 72 64 .foreach(string.word +000008FC 3B 20 6E 65 77 20 42 75 66 66 65 72 65 64 46 69 6C 65 28 22 ;.new.BufferedFile(" +00000910 64 69 63 74 69 6F 6E 61 72 79 2E 74 78 74 22 20 29 20 29 0A dictionary.txt".).). +00000924 20 20 20 20 20 20 20 20 6E 75 6D 32 77 6F 72 64 73 5B 20 77 ........num2words[.w +00000938 6F 72 64 54 6F 4E 75 6D 28 77 6F 72 64 29 20 5D 20 7E 3D 20 ordToNum(word).].~=. +0000094C 77 6F 72 64 2E 64 75 70 3B 20 20 20 20 20 20 20 20 2F 2F 20 word.dup;........//. +00000960 6D 75 73 74 20 64 75 70 0A 0A 20 20 20 20 2F 2F 2F 20 46 69 must.dup......///.Fi +00000974 6E 64 73 20 61 6C 6C 20 61 6C 74 65 72 6E 61 74 69 76 65 73 nds.all.alternatives +00000988 20 66 6F 72 20 74 68 65 20 67 69 76 65 6E 20 6E 75 6D 62 65 .for.the.given.numbe +0000099C 72 0A 20 20 20 20 2F 2F 2F 20 28 73 68 6F 75 6C 64 20 68 61 r.....///.(should.ha +000009B0 76 65 20 62 65 65 6E 20 73 74 72 69 70 70 65 64 20 66 72 6F ve.been.stripped.fro +000009C4 6D 20 6E 6F 6E 2D 64 69 67 69 74 20 63 68 61 72 61 63 74 65 m.non-digit.characte +000009D8 72 73 29 0A 20 20 20 20 73 74 72 69 6E 67 61 72 72 61 79 20 rs).....stringarray. +000009EC 5F 46 69 6E 64 57 6F 72 64 73 28 20 73 74 72 69 6E 67 20 6E _FindWords(.string.n +00000A00 75 6D 62 65 72 73 2C 20 62 6F 6F 6C 20 64 69 67 69 74 6F 6B umbers,.bool.digitok +00000A14 20 29 0A 20 20 20 20 69 6E 20 7B 0A 20 20 20 20 20 20 20 20 .).....in.{......... +00000A28 61 73 73 65 72 74 28 6E 75 6D 62 65 72 73 2E 6C 65 6E 67 74 assert(numbers.lengt +00000A3C 68 20 3E 20 20 30 29 3B 20 20 20 20 0A 20 20 20 20 7D 20 20 h.>..0);.........}.. +00000A50 20 20 0A 20 20 20 20 6F 75 74 28 72 65 73 75 6C 74 29 20 7B .......out(result).{ +00000A64 0A 20 20 20 20 20 20 20 20 66 6F 72 65 61 63 68 20 28 61 3B .........foreach.(a; +00000A78 20 72 65 73 75 6C 74 29 0A 20 20 20 20 20 20 20 20 20 20 20 .result)............ +00000A8C 20 61 73 73 65 72 74 28 20 77 6F 72 64 54 6F 4E 75 6D 28 61 .assert(.wordToNum(a +00000AA0 29 20 3D 3D 20 6E 75 6D 62 65 72 73 20 29 3B 0A 20 20 20 20 ).==.numbers.);..... +00000AB4 7D 20 20 20 20 0A 20 20 20 20 62 6F 64 79 20 7B 0A 20 20 20 }.........body.{.... +00000AC8 20 20 20 20 20 73 74 72 69 6E 67 61 72 72 61 79 20 72 65 74 .....stringarray.ret +00000ADC 3B 0A 20 20 20 20 20 20 20 20 62 6F 6F 6C 20 66 6F 75 6E 64 ;.........bool.found +00000AF0 77 6F 72 64 20 3D 20 66 61 6C 73 65 3B 0A 20 20 20 20 20 20 word.=.false;....... +00000B04 20 20 66 6F 72 20 28 75 69 6E 74 20 74 3D 31 3B 20 74 3C 3D ..for.(uint.t=1;.t<= +00000B18 6E 75 6D 62 65 72 73 2E 6C 65 6E 67 74 68 3B 20 2B 2B 74 29 numbers.length;.++t) +00000B2C 20 7B 0A 20 20 20 20 20 20 20 20 20 20 20 20 61 75 74 6F 20 .{.............auto. +00000B40 61 6C 74 65 72 6E 61 74 69 76 65 73 20 3D 20 6E 75 6D 62 65 alternatives.=.numbe +00000B54 72 73 5B 30 2E 2E 74 5D 20 69 6E 20 6E 75 6D 32 77 6F 72 64 rs[0..t].in.num2word +00000B68 73 3B 0A 20 20 20 20 20 20 20 20 20 20 20 20 69 66 20 28 21 s;.............if.(! +00000B7C 61 6C 74 65 72 6E 61 74 69 76 65 73 29 0A 20 20 20 20 20 20 alternatives)....... +00000B90 20 20 20 20 20 20 20 20 20 20 63 6F 6E 74 69 6E 75 65 3B 0A ..........continue;. +00000BA4 20 20 20 20 20 20 20 20 20 20 20 20 66 6F 75 6E 64 77 6F 72 ............foundwor +00000BB8 64 20 3D 20 74 72 75 65 3B 0A 20 20 20 20 20 20 20 20 20 20 d.=.true;........... +00000BCC 20 20 69 66 20 28 6E 75 6D 62 65 72 73 2E 6C 65 6E 67 74 68 ..if.(numbers.length +00000BE0 20 3E 20 20 74 29 20 7B 0A 20 20 20 20 20 20 20 20 20 20 20 .>..t).{............ +00000BF4 20 20 20 20 20 2F 2F 20 43 6F 6D 62 69 6E 65 20 61 6C 6C 20 .....//.Combine.all. +00000C08 63 75 72 72 65 6E 74 20 61 6C 74 65 72 6E 61 74 69 76 65 73 current.alternatives +00000C1C 20 77 69 74 68 20 61 6C 6C 20 61 6C 74 65 72 6E 61 74 69 76 .with.all.alternativ +00000C30 65 73 20 20 20 20 20 0A 20 20 20 20 20 20 20 20 20 20 20 20 es.................. +00000C44 20 20 20 20 2F 2F 20 6F 66 20 74 68 65 20 72 65 73 74 20 28 ....//.of.the.rest.( +00000C58 6E 65 78 74 20 70 69 65 63 65 20 63 61 6E 20 73 74 61 72 74 next.piece.can.start +00000C6C 20 77 69 74 68 20 61 20 64 69 67 69 74 29 20 20 20 20 20 20 .with.a.digit)...... +00000C80 20 20 20 20 20 20 20 20 0A 20 20 20 20 20 20 20 20 20 20 20 .................... +00000C94 20 20 20 20 20 66 6F 72 65 61 63 68 20 28 61 32 3B 20 5F 46 .....foreach.(a2;._F +00000CA8 69 6E 64 57 6F 72 64 73 28 20 6E 75 6D 62 65 72 73 5B 74 2E indWords(.numbers[t. +00000CBC 2E 24 5D 2C 20 74 72 75 65 20 20 20 20 20 29 20 29 0A 20 20 .$],.true.....).)... +00000CD0 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 66 6F ..................fo +00000CE4 72 65 61 63 68 28 61 31 3B 20 2A 61 6C 74 65 72 6E 61 74 69 reach(a1;.*alternati +00000CF8 76 65 73 29 0A 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 ves)................ +00000D0C 20 20 20 20 20 20 20 20 72 65 74 20 7E 3D 20 61 31 20 7E 20 ........ret.~=.a1.~. +00000D20 22 20 22 20 7E 20 61 32 3B 0A 20 20 20 20 20 20 20 20 20 20 ".".~.a2;........... +00000D34 20 20 7D 0A 20 20 20 20 20 20 20 20 20 20 20 20 65 6C 73 65 ..}.............else +00000D48 20 20 20 20 0A 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 .................... +00000D5C 20 72 65 74 20 7E 3D 20 2A 61 6C 74 65 72 6E 61 74 69 76 65 .ret.~=.*alternative +00000D70 73 3B 20 20 20 20 2F 2F 20 61 70 70 65 6E 64 20 74 68 65 73 s;....//.append.thes +00000D84 65 20 61 6C 74 65 72 6E 61 74 69 76 65 73 0A 20 20 20 20 20 e.alternatives...... +00000D98 20 20 20 7D 0A 20 20 20 20 20 20 20 20 2F 2F 20 54 72 79 20 ...}.........//.Try. +00000DAC 74 6F 20 6B 65 65 70 20 31 20 64 69 67 69 74 2C 20 6F 6E 6C to.keep.1.digit,.onl +00000DC0 79 20 69 66 20 77 65 27 72 65 20 61 6C 6C 6F 77 65 64 20 61 y.if.we're.allowed.a +00000DD4 6E 64 20 6E 6F 20 6F 74 68 65 72 0A 20 20 20 20 20 20 20 20 nd.no.other......... +00000DE8 2F 2F 20 61 6C 74 65 72 6E 61 74 69 76 65 73 20 77 65 72 65 //.alternatives.were +00000DFC 20 66 6F 75 6E 64 0A 20 20 20 20 20 20 20 20 2F 2F 20 54 65 .found.........//.Te +00000E10 73 74 69 6E 67 20 22 72 65 74 2E 6C 65 6E 67 74 68 22 20 6D sting."ret.length".m +00000E24 61 6B 65 73 20 6D 6F 72 65 20 73 65 6E 73 65 20 74 68 61 6E akes.more.sense.than +00000E38 20 74 65 73 74 69 6E 67 20 22 66 6F 75 6E 64 77 6F 72 64 22 .testing."foundword" +00000E4C 2C 0A 20 20 20 20 20 20 20 20 2F 2F 20 62 75 74 20 74 68 65 ,.........//.but.the +00000E60 20 6F 74 68 65 72 20 69 6D 70 6C 65 6D 65 6E 74 61 74 69 6F .other.implementatio +00000E74 6E 73 20 73 65 65 6D 20 74 6F 20 64 6F 20 6A 75 73 74 20 74 ns.seem.to.do.just.t +00000E88 68 69 73 2E 0A 20 20 20 20 20 20 20 20 69 66 20 28 64 69 67 his..........if.(dig +00000E9C 69 74 6F 6B 20 26 26 20 21 66 6F 75 6E 64 77 6F 72 64 29 20 itok.&&.!foundword). +00000EB0 7B 20 2F 2F 72 65 74 2E 6C 65 6E 67 74 68 20 3D 3D 20 30 20 {.//ret.length.==.0. +00000EC4 20 0A 20 20 20 20 20 20 20 20 20 20 20 20 69 66 28 6E 75 6D ..............if(num +00000ED8 62 65 72 73 2E 6C 65 6E 67 74 68 20 3E 20 20 31 29 20 7B 0A bers.length.>..1).{. +00000EEC 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 2F 2F 20 43 ................//.C +00000F00 6F 6D 62 69 6E 65 20 31 20 64 69 67 69 74 20 77 69 74 68 20 ombine.1.digit.with. +00000F14 61 6C 6C 20 61 6C 74 65 6E 61 74 69 76 65 73 20 66 72 6F 6D all.altenatives.from +00000F28 20 74 68 65 20 72 65 73 74 20 20 20 20 0A 20 20 20 20 20 20 .the.rest........... +00000F3C 20 20 20 20 20 20 20 20 20 20 2F 2F 20 28 6E 65 78 74 20 70 ..........//.(next.p +00000F50 69 65 63 65 20 63 61 6E 20 6E 6F 74 20 73 74 61 72 74 20 77 iece.can.not.start.w +00000F64 69 74 68 20 61 20 64 69 67 69 74 29 20 20 20 20 20 20 20 20 ith.a.digit)........ +00000F78 20 20 0A 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 66 ...................f +00000F8C 6F 72 65 61 63 68 20 28 61 3B 20 5F 46 69 6E 64 57 6F 72 64 oreach.(a;._FindWord +00000FA0 73 28 20 6E 75 6D 62 65 72 73 5B 31 2E 2E 24 5D 2C 20 66 61 s(.numbers[1..$],.fa +00000FB4 6C 73 65 20 29 20 29 0A 20 20 20 20 20 20 20 20 20 20 20 20 lse.).)............. +00000FC8 20 20 20 20 20 20 20 20 72 65 74 20 7E 3D 20 6E 75 6D 62 65 ........ret.~=.numbe +00000FDC 72 73 5B 30 2E 2E 31 5D 20 7E 20 22 20 22 20 7E 20 61 3B 0A rs[0..1].~.".".~.a;. +00000FF0 20 20 20 20 20 20 20 20 20 20 20 20 7D 20 20 20 20 0A 20 20 ............}....... +00001004 20 20 20 20 20 20 20 20 20 20 65 6C 73 65 20 20 20 20 0A 20 ..........else...... +00001018 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 72 65 74 20 7E ...............ret.~ +0000102C 3D 20 6E 75 6D 62 65 72 73 5B 30 2E 2E 31 5D 3B 20 20 20 20 =.numbers[0..1];.... +00001040 2F 2F 20 6A 75 73 74 20 61 70 70 65 6E 64 20 74 68 69 73 20 //.just.append.this. +00001054 64 69 67 69 74 20 20 20 20 20 20 20 20 20 20 20 20 20 0A 20 digit............... +00001068 20 20 20 20 20 20 20 7D 20 20 20 20 0A 20 20 20 20 20 20 20 .......}............ +0000107C 20 72 65 74 75 72 6E 20 72 65 74 3B 0A 20 20 20 20 7D 0A 0A .return.ret;.....}.. +00001090 20 20 20 20 2F 2F 2F 20 28 54 68 69 73 20 66 75 6E 63 74 69 ....///.(This.functi +000010A4 6F 6E 20 77 61 73 20 69 6E 6C 69 6E 65 64 20 69 6E 20 74 68 on.was.inlined.in.th +000010B8 65 20 6F 72 69 67 69 6E 61 6C 20 70 72 6F 67 72 61 6D 29 20 e.original.program). +000010CC 0A 20 20 20 20 2F 2F 2F 20 46 69 6E 64 73 20 61 6C 6C 20 61 .....///.Finds.all.a +000010E0 6C 74 65 72 6E 61 74 69 76 65 73 20 66 6F 72 20 74 68 65 20 lternatives.for.the. +000010F4 67 69 76 65 6E 20 70 68 6F 6E 65 20 6E 75 6D 62 65 72 20 0A given.phone.number.. +00001108 20 20 20 20 2F 2F 2F 20 52 65 74 75 72 6E 73 3A 20 61 72 72 ....///.Returns:.arr +0000111C 61 79 20 6F 66 20 73 74 72 69 6E 67 73 20 0A 20 20 20 20 73 ay.of.strings......s +00001130 74 72 69 6E 67 61 72 72 61 79 20 46 69 6E 64 57 6F 72 64 73 tringarray.FindWords +00001144 28 20 73 74 72 69 6E 67 20 70 68 6F 6E 65 5F 6E 75 6D 62 65 (.string.phone_numbe +00001158 72 20 29 0A 20 20 20 20 7B 0A 20 20 20 20 20 20 20 20 69 66 r.).....{.........if +0000116C 20 28 21 70 68 6F 6E 65 5F 6E 75 6D 62 65 72 2E 6C 65 6E 67 .(!phone_number.leng +00001180 74 68 29 0A 20 20 20 20 20 20 20 20 20 20 20 20 72 65 74 75 th).............retu +00001194 72 6E 20 6E 75 6C 6C 3B 0A 20 20 20 20 20 20 20 20 2F 2F 20 rn.null;.........//. +000011A8 53 74 72 69 70 20 74 68 65 20 6E 6F 6E 2D 64 69 67 69 74 20 Strip.the.non-digit. +000011BC 63 68 61 72 61 63 74 65 72 73 20 66 72 6F 6D 20 74 68 65 20 characters.from.the. +000011D0 70 68 6F 6E 65 20 6E 75 6D 62 65 72 2C 20 61 6E 64 0A 20 20 phone.number,.and... +000011E4 20 20 20 20 20 20 2F 2F 20 70 61 73 73 20 69 74 20 74 6F 20 ......//.pass.it.to. +000011F8 74 68 65 20 72 65 63 75 72 73 69 76 65 20 66 75 6E 63 74 69 the.recursive.functi +0000120C 6F 6E 20 28 6C 65 61 64 69 6E 67 20 64 69 67 69 74 20 69 73 on.(leading.digit.is +00001220 20 61 6C 6C 6F 77 65 64 29 0A 20 20 20 20 20 20 20 20 72 65 .allowed).........re +00001234 74 75 72 6E 20 5F 46 69 6E 64 57 6F 72 64 73 28 20 73 74 72 turn._FindWords(.str +00001248 69 70 4E 6F 6E 44 69 67 69 74 28 70 68 6F 6E 65 5F 6E 75 6D ipNonDigit(phone_num +0000125C 62 65 72 29 2C 20 74 72 75 65 20 29 3B 20 20 20 20 0A 20 20 ber),.true.);....... +00001270 20 20 7D 20 20 20 20 0A 20 20 20 20 0A 20 20 20 20 2F 2F 20 ..}..............//. +00001284 52 65 61 64 20 74 68 65 20 70 68 6F 6E 65 20 6E 75 6D 62 65 Read.the.phone.numbe +00001298 72 73 20 20 20 20 20 0A 20 20 20 20 66 6F 72 65 61 63 68 28 rs..........foreach( +000012AC 73 74 72 69 6E 67 20 70 68 6F 6E 65 3B 20 6E 65 77 20 42 75 string.phone;.new.Bu +000012C0 66 66 65 72 65 64 46 69 6C 65 28 22 69 6E 70 75 74 2E 74 78 fferedFile("input.tx +000012D4 74 22 20 20 20 29 20 29 0A 20 20 20 20 20 20 20 20 66 6F 72 t"...).).........for +000012E8 65 61 63 68 28 61 6C 74 65 72 6E 61 74 69 76 65 3B 20 46 69 each(alternative;.Fi +000012FC 6E 64 57 6F 72 64 73 28 20 70 68 6F 6E 65 20 29 20 29 0A 20 ndWords(.phone.).).. +00001310 20 20 20 20 20 20 20 20 20 20 20 77 72 69 74 65 66 6C 6E 28 ...........writefln( +00001324 70 68 6F 6E 65 2C 20 22 3A 20 22 2C 20 61 6C 74 65 72 6E 61 phone,.":.",.alterna +00001338 74 69 76 65 20 29 3B 0A 7D 0A 0A tive.);.}.. diff --git a/vendor/pygments/tests/examplefiles/hexdump_hexdump b/vendor/pygments/tests/examplefiles/hexdump_hexdump new file mode 100644 index 0000000..06c2e86 --- /dev/null +++ b/vendor/pygments/tests/examplefiles/hexdump_hexdump @@ -0,0 +1,310 @@ +0000000 2f2f 4320 6572 7461 6465 6220 2079 694c +0000010 6e6f 6c65 6f6c 4c20 6e75 7365 2075 6e61 +0000020 2064 6c70 6361 6465 6920 206e 6874 2065 +0000030 7570 6c62 6369 6420 6d6f 6961 2e6e 2f0a +0000040 202f 6854 7369 6620 6c69 2065 6168 2073 +0000050 6562 6e65 6d20 646f 6669 6569 2064 7266 +0000060 6d6f 6920 7374 6f20 6972 6967 616e 206c +0000070 6576 7372 6f69 2e6e 2f0a 202f 7449 6820 +0000080 7361 6220 6565 206e 6f66 6d72 7461 6574 +0000090 2064 6f74 6620 7469 7920 756f 2072 6373 +00000a0 6572 6e65 0a2e 6f6d 7564 656c 7020 6f68 +00000b0 656e 6f6e 203b 2020 2020 2f2f 6f20 7470 +00000c0 6f69 616e 0a6c 6d69 6f70 7472 7320 6474 +00000d0 732e 6474 6f69 203b 2020 2f2f 7720 6972 +00000e0 6574 6c66 206e 2020 2020 690a 706d 726f +00000f0 2074 7473 2e64 7463 7079 3b65 2020 2f20 +0000100 202f 7369 6964 6967 2074 2020 2020 690a +0000110 706d 726f 2074 7473 2e64 7473 6572 6d61 +0000120 203b 2f20 202f 7542 6666 7265 6465 6946 +0000130 656c 0a0a 2f2f 4a20 7375 2074 6f66 2072 +0000140 6572 6461 6261 6c69 7469 2079 6928 616d +0000150 6967 656e 6320 6168 5b72 5b5d 5b5d 6863 +0000160 7261 5d5b 295d 2020 2020 610a 696c 7361 +0000170 6320 6168 5b72 205d 7473 6972 676e 0a3b +0000180 6c61 6169 2073 7473 6972 676e 5d5b 7320 +0000190 7274 6e69 6167 7272 7961 0a3b 2f0a 2f2f +00001a0 5320 7274 7069 2073 6f6e 2d6e 6964 6967 +00001b0 2074 6863 7261 6361 6574 7372 6620 6f72 +00001c0 206d 6874 2065 7473 6972 676e 2820 4f43 +00001d0 2957 730a 7274 6e69 2067 7473 6972 4e70 +00001e0 6e6f 6944 6967 2874 6920 206e 7473 6972 +00001f0 676e 6c20 6e69 2065 2029 7b0a 200a 2020 +0000200 7320 7274 6e69 2067 6572 3b74 200a 2020 +0000210 6620 726f 6165 6863 7528 6e69 2074 2c69 +0000220 6320 203b 696c 656e 2029 0a7b 2020 2020 +0000230 2020 2020 2f2f 4520 7272 726f 203a 7473 +0000240 2e64 7463 7079 2e65 7369 6964 6967 2074 +0000250 7461 4320 5c3a 6d64 5c64 7273 5c63 6870 +0000260 626f 736f 735c 6474 635c 7974 6570 642e +0000270 3328 2937 0a20 2020 2020 2020 2020 2f2f +0000280 6320 6e6f 6c66 6369 7374 7720 7469 2068 +0000290 7473 2e64 7473 6572 6d61 692e 6473 6769 +00002a0 7469 6120 2074 3a43 645c 646d 735c 6372 +00002b0 705c 6f68 6f62 5c73 7473 5c64 7473 6572 +00002c0 6d61 642e 3228 3239 2934 200a 2020 2020 +00002d0 2020 6920 2066 2128 7473 2e64 7463 7079 +00002e0 2e65 7369 6964 6967 2874 2963 2029 0a7b +00002f0 2020 2020 2020 2020 2020 2020 6669 2820 +0000300 7221 7465 0a29 2020 2020 2020 2020 2020 +0000310 2020 2020 2020 6572 2074 203d 696c 656e +0000320 305b 2e2e 5d69 203b 2020 0a20 2020 2020 +0000330 2020 2020 207d 2020 0a20 2020 2020 2020 +0000340 2020 6c65 6573 6920 2066 7228 7465 0a29 +0000350 2020 2020 2020 2020 2020 2020 6572 2074 +0000360 3d7e 6320 203b 2020 0a20 2020 2020 207d +0000370 2020 0a20 2020 2020 6572 7574 6e72 7220 +0000380 7465 723f 7465 6c3a 6e69 3b65 7d0a 0a0a +0000390 6e75 7469 6574 7473 7b20 200a 2020 6120 +00003a0 7373 7265 2874 7320 7274 7069 6f4e 446e +00003b0 6769 7469 2228 7361 6664 2922 3d20 203d +00003c0 2222 2020 3b29 200a 2020 6120 7373 7265 +00003d0 2874 7320 7274 7069 6f4e 446e 6769 7469 +00003e0 2228 275c 3331 3d2d 2032 6b34 706f 2922 +00003f0 3d20 203d 2220 3331 3432 2022 2920 0a3b +0000400 0a7d 2f0a 2f2f 4320 6e6f 6576 7472 2073 +0000410 2061 6f77 6472 6920 746e 206f 2061 756e +0000420 626d 7265 202c 6769 6f6e 6972 676e 6120 +0000430 6c6c 6e20 6e6f 6120 706c 6168 6320 6168 +0000440 6172 7463 7265 2073 0a20 7473 6972 676e +0000450 7720 726f 5464 4e6f 6d75 2028 6e69 7320 +0000460 7274 6e69 2067 6f77 6472 2920 7b0a 2f0a +0000470 202f 7274 6e61 6c73 7461 6f69 206e 6174 +0000480 6c62 2065 6f66 2072 6874 2065 6174 6b73 +0000490 6120 2074 6168 646e 630a 6e6f 7473 6320 +00004a0 6168 5b72 3532 5d36 5420 4152 534e 414c +00004b0 4554 3d20 2020 2020 200a 2020 2220 2020 +00004c0 2020 2020 2020 2020 2020 2020 2020 2020 +00004d0 2020 2020 2020 2020 2020 2020 2020 2022 +00004e0 2f20 202f 2030 2020 200a 2020 2220 2020 +00004f0 2020 2020 2020 2020 2020 2020 2020 3130 +0000500 3332 3534 3736 3938 2020 2020 2020 2022 +0000510 2f20 202f 3233 2020 2020 0a20 2020 2020 +0000520 2022 3735 3336 3430 3939 3136 3837 3135 +0000530 3838 3231 3433 3637 3232 3933 2020 2020 +0000540 2220 2020 2f2f 3620 2034 2020 200a 2020 +0000550 2220 3520 3637 3033 3934 3639 3731 3538 +0000560 3831 3138 3332 3734 3236 3332 2039 2020 +0000570 2020 0a22 2020 2020 2022 2020 2020 2020 +0000580 2020 2020 2020 2020 2020 2020 2020 2020 +0000590 2020 2020 2020 2020 2220 200a 2020 2220 +00005a0 2020 2020 2020 2020 2020 2020 2020 2020 +* +00005c0 0a22 2020 2020 2022 2020 2020 2020 2020 +00005d0 2020 2020 2020 2020 2020 2020 2020 2020 +00005e0 2020 2020 2020 2220 2020 2020 200a 2020 +00005f0 2220 2020 2020 2020 2020 2020 2020 2020 +0000600 2020 2020 2020 2020 2020 2020 2020 2020 +0000610 2020 3b22 200a 2020 7320 7274 6e69 2067 +0000620 6572 3b74 200a 2020 6620 726f 6165 6863 +0000630 6328 203b 6163 7473 7528 7962 6574 5d5b +0000640 7729 726f 2964 200a 2020 2020 2020 6920 +0000650 2066 5428 4152 534e 414c 4554 635b 205d +0000660 3d21 2720 2720 0a29 2020 2020 2020 2020 +0000670 2020 2020 6572 2074 3d7e 5420 4152 534e +0000680 414c 4554 635b 3b5d 200a 2020 7220 7465 +0000690 7275 206e 6572 3b74 7d0a 0a0a 6e75 7469 +00006a0 6574 7473 7b20 200a 2f2f 5420 7365 2074 +00006b0 6f77 6472 6f54 754e 206d 7375 6e69 2067 +00006c0 6874 2065 6174 6c62 2065 7266 6d6f 7420 +00006d0 6568 7420 7361 206b 6564 6373 6972 7470 +00006e0 6f69 2e6e 200a 7361 6573 7472 2028 3022 +00006f0 3131 3231 3232 3333 3433 3534 3635 3636 +0000700 3737 3837 3838 3939 2239 3d20 0a3d 2020 +0000710 7720 726f 5464 4e6f 6d75 2228 2045 207c +0000720 204a 204e 2051 207c 2052 2057 2058 207c +0000730 2044 2053 2059 207c 2046 2054 207c 2041 +0000740 204d 207c 2043 2049 2056 207c 2042 204b +0000750 2055 207c 204c 204f 2050 207c 2047 2048 +0000760 225a 2929 0a3b 6120 7373 7265 2874 2220 +0000770 3130 3131 3232 3332 3333 3434 3535 3636 +0000780 3736 3737 3838 3938 3939 2022 3d3d 0a20 +0000790 2020 7720 726f 5464 4e6f 6d75 2228 2065 +00007a0 207c 206a 206e 2071 207c 2072 2077 2078 +00007b0 207c 2064 2073 2079 207c 2066 2074 207c +00007c0 2061 206d 207c 2063 2069 2076 207c 2062 +00007d0 206b 2075 207c 206c 206f 2070 207c 2067 +00007e0 2068 227a 2929 0a3b 6120 7373 7265 2874 +00007f0 2220 3130 3332 3534 3736 3938 2022 3d3d +0000800 0a20 2020 7720 726f 5464 4e6f 6d75 2228 +0000810 2030 207c 2020 2031 2020 207c 2020 2032 +0000820 2020 207c 2020 2033 2020 207c 3420 2020 +0000830 207c 3520 2020 207c 2020 2036 2020 207c +0000840 2020 2037 2020 207c 2020 2038 2020 207c +0000850 2020 2239 2929 0a3b 0a7d 760a 696f 2064 +0000860 616d 6e69 2028 7473 6972 676e 5d5b 6120 +0000870 6772 2073 0a29 0a7b 2020 2020 2f2f 5420 +0000880 6968 2073 7361 6f73 6963 7461 7669 2065 +0000890 7261 6172 2079 616d 7370 6120 6e20 6d75 +00008a0 6562 2072 6f74 6120 206e 7261 6172 2079 +00008b0 666f 7720 726f 7364 202e 2020 0a20 2020 +00008c0 2020 7473 6972 676e 7261 6172 5b79 7473 +00008d0 6972 676e 205d 2020 6e20 6d75 7732 726f +00008e0 7364 0a3b 200a 2020 6620 726f 6165 6863 +00008f0 7328 7274 6e69 2067 6f77 6472 203b 656e +0000900 2077 7542 6666 7265 6465 6946 656c 2228 +0000910 6964 7463 6f69 616e 7972 742e 7478 2022 +0000920 2029 0a29 2020 2020 2020 2020 756e 326d +0000930 6f77 6472 5b73 7720 726f 5464 4e6f 6d75 +0000940 7728 726f 2964 5d20 7e20 203d 6f77 6472 +0000950 642e 7075 203b 2020 2020 2020 2f20 202f +0000960 756d 7473 6420 7075 0a0a 2020 2020 2f2f +0000970 202f 6946 646e 2073 6c61 206c 6c61 6574 +0000980 6e72 7461 7669 7365 6620 726f 7420 6568 +0000990 6720 7669 6e65 6e20 6d75 6562 0a72 2020 +00009a0 2020 2f2f 202f 7328 6f68 6c75 2064 6168 +00009b0 6576 6220 6565 206e 7473 6972 7070 6465 +00009c0 6620 6f72 206d 6f6e 2d6e 6964 6967 2074 +00009d0 6863 7261 6361 6574 7372 0a29 2020 2020 +00009e0 7473 6972 676e 7261 6172 2079 465f 6e69 +00009f0 5764 726f 7364 2028 7473 6972 676e 6e20 +0000a00 6d75 6562 7372 202c 6f62 6c6f 6420 6769 +0000a10 7469 6b6f 2920 200a 2020 6920 206e 0a7b +0000a20 2020 2020 2020 2020 7361 6573 7472 6e28 +0000a30 6d75 6562 7372 6c2e 6e65 7467 2068 203e +0000a40 3020 3b29 2020 2020 200a 2020 7d20 2020 +0000a50 2020 200a 2020 6f20 7475 7228 7365 6c75 +0000a60 2974 7b20 200a 2020 2020 2020 6620 726f +0000a70 6165 6863 2820 3b61 7220 7365 6c75 2974 +0000a80 200a 2020 2020 2020 2020 2020 6120 7373 +0000a90 7265 2874 7720 726f 5464 4e6f 6d75 6128 +0000aa0 2029 3d3d 6e20 6d75 6562 7372 2920 0a3b +0000ab0 2020 2020 207d 2020 0a20 2020 2020 6f62 +0000ac0 7964 7b20 200a 2020 2020 2020 7320 7274 +0000ad0 6e69 6167 7272 7961 7220 7465 0a3b 2020 +0000ae0 2020 2020 2020 6f62 6c6f 6620 756f 646e +0000af0 6f77 6472 3d20 6620 6c61 6573 0a3b 2020 +0000b00 2020 2020 2020 6f66 2072 7528 6e69 2074 +0000b10 3d74 3b31 7420 3d3c 756e 626d 7265 2e73 +0000b20 656c 676e 6874 203b 2b2b 2974 7b20 200a +0000b30 2020 2020 2020 2020 2020 6120 7475 206f +0000b40 6c61 6574 6e72 7461 7669 7365 3d20 6e20 +0000b50 6d75 6562 7372 305b 2e2e 5d74 6920 206e +0000b60 756e 326d 6f77 6472 3b73 200a 2020 2020 +0000b70 2020 2020 2020 6920 2066 2128 6c61 6574 +0000b80 6e72 7461 7669 7365 0a29 2020 2020 2020 +0000b90 2020 2020 2020 2020 2020 6f63 746e 6e69 +0000ba0 6575 0a3b 2020 2020 2020 2020 2020 2020 +0000bb0 6f66 6e75 7764 726f 2064 203d 7274 6575 +0000bc0 0a3b 2020 2020 2020 2020 2020 2020 6669 +0000bd0 2820 756e 626d 7265 2e73 656c 676e 6874 +0000be0 3e20 2020 2974 7b20 200a 2020 2020 2020 +0000bf0 2020 2020 2020 2020 2f20 202f 6f43 626d +0000c00 6e69 2065 6c61 206c 7563 7272 6e65 2074 +0000c10 6c61 6574 6e72 7461 7669 7365 7720 7469 +0000c20 2068 6c61 206c 6c61 6574 6e72 7461 7669 +0000c30 7365 2020 2020 0a20 2020 2020 2020 2020 +0000c40 2020 2020 2020 2020 2f2f 6f20 2066 6874 +0000c50 2065 6572 7473 2820 656e 7478 7020 6569 +0000c60 6563 6320 6e61 7320 6174 7472 7720 7469 +0000c70 2068 2061 6964 6967 2974 2020 2020 2020 +0000c80 2020 2020 2020 2020 200a 2020 2020 2020 +0000c90 2020 2020 2020 2020 6620 726f 6165 6863 +0000ca0 2820 3261 203b 465f 6e69 5764 726f 7364 +0000cb0 2028 756e 626d 7265 5b73 2e74 242e 2c5d +0000cc0 7420 7572 2065 2020 2020 2029 0a29 2020 +0000cd0 2020 2020 2020 2020 2020 2020 2020 2020 +0000ce0 2020 6f66 6572 6361 2868 3161 203b 612a +0000cf0 746c 7265 616e 6974 6576 2973 200a 2020 +0000d00 2020 2020 2020 2020 2020 2020 2020 2020 +0000d10 2020 2020 6572 2074 3d7e 6120 2031 207e +0000d20 2022 2022 207e 3261 0a3b 2020 2020 2020 +0000d30 2020 2020 2020 0a7d 2020 2020 2020 2020 +0000d40 2020 2020 6c65 6573 2020 2020 200a 2020 +0000d50 2020 2020 2020 2020 2020 2020 7220 7465 +0000d60 7e20 203d 612a 746c 7265 616e 6974 6576 +0000d70 3b73 2020 2020 2f2f 6120 7070 6e65 2064 +0000d80 6874 7365 2065 6c61 6574 6e72 7461 7669 +0000d90 7365 200a 2020 2020 2020 7d20 200a 2020 +0000da0 2020 2020 2f20 202f 7254 2079 6f74 6b20 +0000db0 6565 2070 2031 6964 6967 2c74 6f20 6c6e +0000dc0 2079 6669 7720 2765 6572 6120 6c6c 776f +0000dd0 6465 6120 646e 6e20 206f 746f 6568 0a72 +0000de0 2020 2020 2020 2020 2f2f 6120 746c 7265 +0000df0 616e 6974 6576 2073 6577 6572 6620 756f +0000e00 646e 200a 2020 2020 2020 2f20 202f 6554 +0000e10 7473 6e69 2067 7222 7465 6c2e 6e65 7467 +0000e20 2268 6d20 6b61 7365 6d20 726f 2065 6573 +0000e30 736e 2065 6874 6e61 7420 7365 6974 676e +0000e40 2220 6f66 6e75 7764 726f 2264 0a2c 2020 +0000e50 2020 2020 2020 2f2f 6220 7475 7420 6568 +0000e60 6f20 6874 7265 6920 706d 656c 656d 746e +0000e70 7461 6f69 736e 7320 6565 206d 6f74 6420 +0000e80 206f 756a 7473 7420 6968 2e73 200a 2020 +0000e90 2020 2020 6920 2066 6428 6769 7469 6b6f +0000ea0 2620 2026 6621 756f 646e 6f77 6472 2029 +0000eb0 207b 2f2f 6572 2e74 656c 676e 6874 3d20 +0000ec0 203d 2030 0a20 2020 2020 2020 2020 2020 +0000ed0 2020 6669 6e28 6d75 6562 7372 6c2e 6e65 +0000ee0 7467 2068 203e 3120 2029 0a7b 2020 2020 +0000ef0 2020 2020 2020 2020 2020 2020 2f2f 4320 +0000f00 6d6f 6962 656e 3120 6420 6769 7469 7720 +0000f10 7469 2068 6c61 206c 6c61 6574 616e 6974 +0000f20 6576 2073 7266 6d6f 7420 6568 7220 7365 +0000f30 2074 2020 0a20 2020 2020 2020 2020 2020 +0000f40 2020 2020 2020 2f2f 2820 656e 7478 7020 +0000f50 6569 6563 6320 6e61 6e20 746f 7320 6174 +0000f60 7472 7720 7469 2068 2061 6964 6967 2974 +0000f70 2020 2020 2020 2020 2020 200a 2020 2020 +0000f80 2020 2020 2020 2020 2020 6620 726f 6165 +0000f90 6863 2820 3b61 5f20 6946 646e 6f57 6472 +0000fa0 2873 6e20 6d75 6562 7372 315b 2e2e 5d24 +0000fb0 202c 6166 736c 2065 2029 0a29 2020 2020 +0000fc0 2020 2020 2020 2020 2020 2020 2020 2020 +0000fd0 6572 2074 3d7e 6e20 6d75 6562 7372 305b +0000fe0 2e2e 5d31 7e20 2220 2220 7e20 6120 0a3b +0000ff0 2020 2020 2020 2020 2020 2020 207d 2020 +0001000 0a20 2020 2020 2020 2020 2020 2020 6c65 +0001010 6573 2020 2020 200a 2020 2020 2020 2020 +0001020 2020 2020 2020 7220 7465 7e20 203d 756e +0001030 626d 7265 5b73 2e30 312e 3b5d 2020 2020 +0001040 2f2f 6a20 7375 2074 7061 6570 646e 7420 +0001050 6968 2073 6964 6967 2074 2020 2020 2020 +0001060 2020 2020 2020 200a 2020 2020 2020 7d20 +0001070 2020 2020 200a 2020 2020 2020 7220 7465 +0001080 7275 206e 6572 3b74 200a 2020 7d20 0a0a +0001090 2020 2020 2f2f 202f 5428 6968 2073 7566 +00010a0 636e 6974 6e6f 7720 7361 6920 6c6e 6e69 +00010b0 6465 6920 206e 6874 2065 726f 6769 6e69 +00010c0 6c61 7020 6f72 7267 6d61 2029 200a 2020 +00010d0 2f20 2f2f 4620 6e69 7364 6120 6c6c 6120 +00010e0 746c 7265 616e 6974 6576 2073 6f66 2072 +00010f0 6874 2065 6967 6576 206e 6870 6e6f 2065 +0001100 756e 626d 7265 0a20 2020 2020 2f2f 202f +0001110 6552 7574 6e72 3a73 6120 7272 7961 6f20 +0001120 2066 7473 6972 676e 2073 200a 2020 7320 +0001130 7274 6e69 6167 7272 7961 4620 6e69 5764 +0001140 726f 7364 2028 7473 6972 676e 7020 6f68 +0001150 656e 6e5f 6d75 6562 2072 0a29 2020 2020 +0001160 0a7b 2020 2020 2020 2020 6669 2820 7021 +0001170 6f68 656e 6e5f 6d75 6562 2e72 656c 676e +0001180 6874 0a29 2020 2020 2020 2020 2020 2020 +0001190 6572 7574 6e72 6e20 6c75 3b6c 200a 2020 +00011a0 2020 2020 2f20 202f 7453 6972 2070 6874 +00011b0 2065 6f6e 2d6e 6964 6967 2074 6863 7261 +00011c0 6361 6574 7372 6620 6f72 206d 6874 2065 +00011d0 6870 6e6f 2065 756e 626d 7265 202c 6e61 +00011e0 0a64 2020 2020 2020 2020 2f2f 7020 7361 +00011f0 2073 7469 7420 206f 6874 2065 6572 7563 +0001200 7372 7669 2065 7566 636e 6974 6e6f 2820 +0001210 656c 6461 6e69 2067 6964 6967 2074 7369 +0001220 6120 6c6c 776f 6465 0a29 2020 2020 2020 +0001230 2020 6572 7574 6e72 5f20 6946 646e 6f57 +0001240 6472 2873 7320 7274 7069 6f4e 446e 6769 +0001250 7469 7028 6f68 656e 6e5f 6d75 6562 2972 +0001260 202c 7274 6575 2920 203b 2020 0a20 2020 +0001270 2020 207d 2020 0a20 2020 2020 200a 2020 +0001280 2f20 202f 6552 6461 7420 6568 7020 6f68 +0001290 656e 6e20 6d75 6562 7372 2020 2020 0a20 +00012a0 2020 2020 6f66 6572 6361 2868 7473 6972 +00012b0 676e 7020 6f68 656e 203b 656e 2077 7542 +00012c0 6666 7265 6465 6946 656c 2228 6e69 7570 +00012d0 2e74 7874 2274 2020 2920 2920 200a 2020 +00012e0 2020 2020 6620 726f 6165 6863 6128 746c +00012f0 7265 616e 6974 6576 203b 6946 646e 6f57 +0001300 6472 2873 7020 6f68 656e 2920 2920 200a +0001310 2020 2020 2020 2020 2020 7720 6972 6574 +0001320 6c66 286e 6870 6e6f 2c65 2220 203a 2c22 +0001330 6120 746c 7265 616e 6974 6576 2920 0a3b +0001340 0a7d 000a +0001343 diff --git a/vendor/pygments/tests/examplefiles/hexdump_od b/vendor/pygments/tests/examplefiles/hexdump_od new file mode 100644 index 0000000..a407aef --- /dev/null +++ b/vendor/pygments/tests/examplefiles/hexdump_od @@ -0,0 +1,310 @@ +0000000 2f 2f 20 43 72 65 61 74 65 64 20 62 79 20 4c 69 >// Created by Li< +0000020 6f 6e 65 6c 6c 6f 20 4c 75 6e 65 73 75 20 61 6e >onello Lunesu an< +0000040 64 20 70 6c 61 63 65 64 20 69 6e 20 74 68 65 20 >d placed in the < +0000060 70 75 62 6c 69 63 20 64 6f 6d 61 69 6e 2e 0a 2f >public domain../< +0000100 2f 20 54 68 69 73 20 66 69 6c 65 20 68 61 73 20 >/ This file has < +0000120 62 65 65 6e 20 6d 6f 64 69 66 69 65 64 20 66 72 >been modified fr< +0000140 6f 6d 20 69 74 73 20 6f 72 69 67 69 6e 61 6c 20 >om its original < +0000160 76 65 72 73 69 6f 6e 2e 0a 2f 2f 20 49 74 20 68 >version..// It h< +0000200 61 73 20 62 65 65 6e 20 66 6f 72 6d 61 74 74 65 >as been formatte< +0000220 64 20 74 6f 20 66 69 74 20 79 6f 75 72 20 73 63 >d to fit your sc< +0000240 72 65 65 6e 2e 0a 6d 6f 64 75 6c 65 20 70 68 6f >reen..module pho< +0000260 6e 65 6e 6f 3b 20 20 20 20 20 2f 2f 20 6f 70 74 >neno; // opt< +0000300 69 6f 6e 61 6c 0a 69 6d 70 6f 72 74 20 73 74 64 >ional.import std< +0000320 2e 73 74 64 69 6f 3b 20 20 20 2f 2f 20 77 72 69 >.stdio; // wri< +0000340 74 65 66 6c 6e 20 20 20 20 20 0a 69 6d 70 6f 72 >tefln .impor< +0000360 74 20 73 74 64 2e 63 74 79 70 65 3b 20 20 20 2f >t std.ctype; /< +0000400 2f 20 69 73 64 69 67 69 74 20 20 20 20 20 0a 69 >/ isdigit .i< +0000420 6d 70 6f 72 74 20 73 74 64 2e 73 74 72 65 61 6d >mport std.stream< +0000440 3b 20 20 2f 2f 20 42 75 66 66 65 72 65 64 46 69 >; // BufferedFi< +0000460 6c 65 0a 0a 2f 2f 20 4a 75 73 74 20 66 6f 72 20 >le..// Just for < +0000500 72 65 61 64 61 62 69 6c 69 74 79 20 28 69 6d 61 >readability (ima< +0000520 67 69 6e 65 20 63 68 61 72 5b 5d 5b 5d 5b 63 68 >gine char[][][ch< +0000540 61 72 5b 5d 5d 29 20 20 20 20 0a 61 6c 69 61 73 >ar[]]) .alias< +0000560 20 63 68 61 72 5b 5d 20 73 74 72 69 6e 67 3b 0a > char[] string;.< +0000600 61 6c 69 61 73 20 73 74 72 69 6e 67 5b 5d 20 73 >alias string[] s< +0000620 74 72 69 6e 67 61 72 72 61 79 3b 0a 0a 2f 2f 2f >tringarray;..///< +0000640 20 53 74 72 69 70 73 20 6e 6f 6e 2d 64 69 67 69 > Strips non-digi< +0000660 74 20 63 68 61 72 61 63 74 65 72 73 20 66 72 6f >t characters fro< +0000700 6d 20 74 68 65 20 73 74 72 69 6e 67 20 28 43 4f >m the string (CO< +0000720 57 29 0a 73 74 72 69 6e 67 20 73 74 72 69 70 4e >W).string stripN< +0000740 6f 6e 44 69 67 69 74 28 20 69 6e 20 73 74 72 69 >onDigit( in stri< +0000760 6e 67 20 6c 69 6e 65 20 29 20 0a 7b 0a 20 20 20 >ng line ) .{. < +0001000 20 73 74 72 69 6e 67 20 72 65 74 3b 0a 20 20 20 > string ret;. < +0001020 20 66 6f 72 65 61 63 68 28 75 69 6e 74 20 69 2c > foreach(uint i,< +0001040 20 63 3b 20 6c 69 6e 65 29 20 7b 0a 20 20 20 20 > c; line) {. < +0001060 20 20 20 20 2f 2f 20 45 72 72 6f 72 3a 20 73 74 > // Error: st< +0001100 64 2e 63 74 79 70 65 2e 69 73 64 69 67 69 74 20 >d.ctype.isdigit < +0001120 61 74 20 43 3a 5c 64 6d 64 5c 73 72 63 5c 70 68 >at C:\dmd\src\ph< +0001140 6f 62 6f 73 5c 73 74 64 5c 63 74 79 70 65 2e 64 >obos\std\ctype.d< +0001160 28 33 37 29 20 0a 20 20 20 20 20 20 20 20 2f 2f >(37) . //< +0001200 20 63 6f 6e 66 6c 69 63 74 73 20 77 69 74 68 20 > conflicts with < +0001220 73 74 64 2e 73 74 72 65 61 6d 2e 69 73 64 69 67 >std.stream.isdig< +0001240 69 74 20 61 74 20 43 3a 5c 64 6d 64 5c 73 72 63 >it at C:\dmd\src< +0001260 5c 70 68 6f 62 6f 73 5c 73 74 64 5c 73 74 72 65 >\phobos\std\stre< +0001300 61 6d 2e 64 28 32 39 32 34 29 0a 20 20 20 20 20 >am.d(2924). < +0001320 20 20 20 69 66 20 28 21 73 74 64 2e 63 74 79 70 > if (!std.ctyp< +0001340 65 2e 69 73 64 69 67 69 74 28 63 29 29 20 7b 0a >e.isdigit(c)) {.< +0001360 20 20 20 20 20 20 20 20 20 20 20 20 69 66 20 28 > if (< +0001400 21 72 65 74 29 0a 20 20 20 20 20 20 20 20 20 20 >!ret). < +0001420 20 20 20 20 20 20 72 65 74 20 3d 20 6c 69 6e 65 > ret = line< +0001440 5b 30 2e 2e 69 5d 3b 20 20 20 20 0a 20 20 20 20 >[0..i]; . < +0001460 20 20 20 20 7d 20 20 20 20 0a 20 20 20 20 20 20 > } . < +0001500 20 20 65 6c 73 65 20 69 66 20 28 72 65 74 29 0a > else if (ret).< +0001520 20 20 20 20 20 20 20 20 20 20 20 20 72 65 74 20 > ret < +0001540 7e 3d 20 63 3b 20 20 20 20 0a 20 20 20 20 7d 20 >~= c; . } < +0001560 20 20 20 0a 20 20 20 20 72 65 74 75 72 6e 20 72 > . return r< +0001600 65 74 3f 72 65 74 3a 6c 69 6e 65 3b 0a 7d 0a 0a >et?ret:line;.}..< +0001620 75 6e 69 74 74 65 73 74 20 7b 0a 20 20 20 20 61 >unittest {. a< +0001640 73 73 65 72 74 28 20 73 74 72 69 70 4e 6f 6e 44 >ssert( stripNonD< +0001660 69 67 69 74 28 22 61 73 64 66 22 29 20 3d 3d 20 >igit("asdf") == < +0001700 22 22 20 20 29 3b 0a 20 20 20 20 61 73 73 65 72 >"" );. asser< +0001720 74 28 20 73 74 72 69 70 4e 6f 6e 44 69 67 69 74 >t( stripNonDigit< +0001740 28 22 5c 27 31 33 2d 3d 32 20 34 6b 6f 70 22 29 >("\'13-=2 4kop")< +0001760 20 3d 3d 20 20 22 31 33 32 34 22 20 20 29 3b 0a > == "1324" );.< +0002000 7d 0a 0a 2f 2f 2f 20 43 6f 6e 76 65 72 74 73 20 >}../// Converts < +0002020 61 20 77 6f 72 64 20 69 6e 74 6f 20 61 20 6e 75 >a word into a nu< +0002040 6d 62 65 72 2c 20 69 67 6e 6f 72 69 6e 67 20 61 >mber, ignoring a< +0002060 6c 6c 20 6e 6f 6e 20 61 6c 70 68 61 20 63 68 61 >ll non alpha cha< +0002100 72 61 63 74 65 72 73 20 20 0a 73 74 72 69 6e 67 >racters .string< +0002120 20 77 6f 72 64 54 6f 4e 75 6d 28 20 69 6e 20 73 > wordToNum( in s< +0002140 74 72 69 6e 67 20 77 6f 72 64 20 29 0a 7b 0a 2f >tring word ).{./< +0002160 2f 20 74 72 61 6e 73 6c 61 74 69 6f 6e 20 74 61 >/ translation ta< +0002200 62 6c 65 20 66 6f 72 20 74 68 65 20 74 61 73 6b >ble for the task< +0002220 20 61 74 20 68 61 6e 64 0a 63 6f 6e 73 74 20 63 > at hand.const c< +0002240 68 61 72 5b 32 35 36 5d 20 54 52 41 4e 53 4c 41 >har[256] TRANSLA< +0002260 54 45 20 3d 20 20 20 20 0a 20 20 20 20 22 20 20 >TE = . " < +0002300 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 > < +0002320 20 20 20 20 20 20 20 20 20 20 20 20 20 20 22 20 > " < +0002340 20 2f 2f 20 30 20 20 20 0a 20 20 20 20 22 20 20 > // 0 . " < +0002360 20 20 20 20 20 20 20 20 20 20 20 20 20 20 30 31 > 01< +0002400 32 33 34 35 36 37 38 39 20 20 20 20 20 20 22 20 >23456789 " < +0002420 20 2f 2f 20 33 32 20 20 20 20 20 0a 20 20 20 20 > // 32 . < +0002440 22 20 35 37 36 33 30 34 39 39 36 31 37 38 35 31 >" 57630499617851< +0002460 38 38 31 32 33 34 37 36 32 32 33 39 20 20 20 20 >881234762239 < +0002500 20 22 20 20 2f 2f 20 36 34 20 20 20 0a 20 20 20 > " // 64 . < +0002520 20 22 20 35 37 36 33 30 34 39 39 36 31 37 38 35 > " 5763049961785< +0002540 31 38 38 31 32 33 34 37 36 32 32 33 39 20 20 20 >1881234762239 < +0002560 20 20 22 0a 20 20 20 20 22 20 20 20 20 20 20 20 > ". " < +0002600 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 > < +0002620 20 20 20 20 20 20 20 20 20 22 0a 20 20 20 20 22 > ". "< +0002640 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 > < +* +0002700 22 0a 20 20 20 20 22 20 20 20 20 20 20 20 20 20 >". " < +0002720 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 > < +0002740 20 20 20 20 20 20 20 22 20 20 20 20 0a 20 20 20 > " . < +0002760 20 22 20 20 20 20 20 20 20 20 20 20 20 20 20 20 > " < +0003000 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 > < +0003020 20 20 22 3b 0a 20 20 20 20 73 74 72 69 6e 67 20 > ";. string < +0003040 72 65 74 3b 0a 20 20 20 20 66 6f 72 65 61 63 68 >ret;. foreach< +0003060 28 63 3b 20 63 61 73 74 28 75 62 79 74 65 5b 5d >(c; cast(ubyte[]< +0003100 29 77 6f 72 64 29 0a 20 20 20 20 20 20 20 20 69 >)word). i< +0003120 66 20 28 54 52 41 4e 53 4c 41 54 45 5b 63 5d 20 >f (TRANSLATE[c] < +0003140 21 3d 20 27 20 27 29 0a 20 20 20 20 20 20 20 20 >!= ' '). < +0003160 20 20 20 20 72 65 74 20 7e 3d 20 54 52 41 4e 53 > ret ~= TRANS< +0003200 4c 41 54 45 5b 63 5d 3b 0a 20 20 20 20 72 65 74 >LATE[c];. ret< +0003220 75 72 6e 20 72 65 74 3b 0a 7d 0a 0a 75 6e 69 74 >urn ret;.}..unit< +0003240 74 65 73 74 20 7b 0a 20 2f 2f 20 54 65 73 74 20 >test {. // Test < +0003260 77 6f 72 64 54 6f 4e 75 6d 20 75 73 69 6e 67 20 >wordToNum using < +0003300 74 68 65 20 74 61 62 6c 65 20 66 72 6f 6d 20 74 >the table from t< +0003320 68 65 20 74 61 73 6b 20 64 65 73 63 72 69 70 74 >he task descript< +0003340 69 6f 6e 2e 0a 20 61 73 73 65 72 74 28 20 22 30 >ion.. assert( "0< +0003360 31 31 31 32 32 32 33 33 33 34 34 35 35 36 36 36 >1112223334455666< +0003400 37 37 37 38 38 38 39 39 39 22 20 3d 3d 0a 20 20 >777888999" ==. < +0003420 20 77 6f 72 64 54 6f 4e 75 6d 28 22 45 20 7c 20 > wordToNum("E | < +0003440 4a 20 4e 20 51 20 7c 20 52 20 57 20 58 20 7c 20 >J N Q | R W X | < +0003460 44 20 53 20 59 20 7c 20 46 20 54 20 7c 20 41 20 >D S Y | F T | A < +0003500 4d 20 7c 20 43 20 49 20 56 20 7c 20 42 20 4b 20 >M | C I V | B K < +0003520 55 20 7c 20 4c 20 4f 20 50 20 7c 20 47 20 48 20 >U | L O P | G H < +0003540 5a 22 29 29 3b 0a 20 61 73 73 65 72 74 28 20 22 >Z"));. assert( "< +0003560 30 31 31 31 32 32 32 33 33 33 34 34 35 35 36 36 >0111222333445566< +0003600 36 37 37 37 38 38 38 39 39 39 22 20 3d 3d 20 0a >6777888999" == .< +0003620 20 20 20 77 6f 72 64 54 6f 4e 75 6d 28 22 65 20 > wordToNum("e < +0003640 7c 20 6a 20 6e 20 71 20 7c 20 72 20 77 20 78 20 >| j n q | r w x < +0003660 7c 20 64 20 73 20 79 20 7c 20 66 20 74 20 7c 20 >| d s y | f t | < +0003700 61 20 6d 20 7c 20 63 20 69 20 76 20 7c 20 62 20 >a m | c i v | b < +0003720 6b 20 75 20 7c 20 6c 20 6f 20 70 20 7c 20 67 20 >k u | l o p | g < +0003740 68 20 7a 22 29 29 3b 0a 20 61 73 73 65 72 74 28 >h z"));. assert(< +0003760 20 22 30 31 32 33 34 35 36 37 38 39 22 20 3d 3d > "0123456789" ==< +0004000 20 0a 20 20 20 77 6f 72 64 54 6f 4e 75 6d 28 22 > . wordToNum("< +0004020 30 20 7c 20 20 20 31 20 20 20 7c 20 20 20 32 20 >0 | 1 | 2 < +0004040 20 20 7c 20 20 20 33 20 20 20 7c 20 20 34 20 20 > | 3 | 4 < +0004060 7c 20 20 35 20 20 7c 20 20 20 36 20 20 20 7c 20 >| 5 | 6 | < +0004100 20 20 37 20 20 20 7c 20 20 20 38 20 20 20 7c 20 > 7 | 8 | < +0004120 20 20 39 22 29 29 3b 0a 7d 0a 0a 76 6f 69 64 20 > 9"));.}..void < +0004140 6d 61 69 6e 28 20 73 74 72 69 6e 67 5b 5d 20 61 >main( string[] a< +0004160 72 67 73 20 29 0a 7b 0a 20 20 20 20 2f 2f 20 54 >rgs ).{. // T< +0004200 68 69 73 20 61 73 73 6f 63 69 61 74 69 76 65 20 >his associative < +0004220 61 72 72 61 79 20 6d 61 70 73 20 61 20 6e 75 6d >array maps a num< +0004240 62 65 72 20 74 6f 20 61 6e 20 61 72 72 61 79 20 >ber to an array < +0004260 6f 66 20 77 6f 72 64 73 2e 20 20 20 20 0a 20 20 >of words. . < +0004300 20 20 73 74 72 69 6e 67 61 72 72 61 79 5b 73 74 > stringarray[st< +0004320 72 69 6e 67 5d 20 20 20 20 6e 75 6d 32 77 6f 72 >ring] num2wor< +0004340 64 73 3b 0a 0a 20 20 20 20 66 6f 72 65 61 63 68 >ds;.. foreach< +0004360 28 73 74 72 69 6e 67 20 77 6f 72 64 3b 20 6e 65 >(string word; ne< +0004400 77 20 42 75 66 66 65 72 65 64 46 69 6c 65 28 22 >w BufferedFile("< +0004420 64 69 63 74 69 6f 6e 61 72 79 2e 74 78 74 22 20 >dictionary.txt" < +0004440 29 20 29 0a 20 20 20 20 20 20 20 20 6e 75 6d 32 >) ). num2< +0004460 77 6f 72 64 73 5b 20 77 6f 72 64 54 6f 4e 75 6d >words[ wordToNum< +0004500 28 77 6f 72 64 29 20 5d 20 7e 3d 20 77 6f 72 64 >(word) ] ~= word< +0004520 2e 64 75 70 3b 20 20 20 20 20 20 20 20 2f 2f 20 >.dup; // < +0004540 6d 75 73 74 20 64 75 70 0a 0a 20 20 20 20 2f 2f >must dup.. //< +0004560 2f 20 46 69 6e 64 73 20 61 6c 6c 20 61 6c 74 65 >/ Finds all alte< +0004600 72 6e 61 74 69 76 65 73 20 66 6f 72 20 74 68 65 >rnatives for the< +0004620 20 67 69 76 65 6e 20 6e 75 6d 62 65 72 0a 20 20 > given number. < +0004640 20 20 2f 2f 2f 20 28 73 68 6f 75 6c 64 20 68 61 > /// (should ha< +0004660 76 65 20 62 65 65 6e 20 73 74 72 69 70 70 65 64 >ve been stripped< +0004700 20 66 72 6f 6d 20 6e 6f 6e 2d 64 69 67 69 74 20 > from non-digit < +0004720 63 68 61 72 61 63 74 65 72 73 29 0a 20 20 20 20 >characters). < +0004740 73 74 72 69 6e 67 61 72 72 61 79 20 5f 46 69 6e >stringarray _Fin< +0004760 64 57 6f 72 64 73 28 20 73 74 72 69 6e 67 20 6e >dWords( string n< +0005000 75 6d 62 65 72 73 2c 20 62 6f 6f 6c 20 64 69 67 >umbers, bool dig< +0005020 69 74 6f 6b 20 29 0a 20 20 20 20 69 6e 20 7b 0a >itok ). in {.< +0005040 20 20 20 20 20 20 20 20 61 73 73 65 72 74 28 6e > assert(n< +0005060 75 6d 62 65 72 73 2e 6c 65 6e 67 74 68 20 3e 20 >umbers.length > < +0005100 20 30 29 3b 20 20 20 20 0a 20 20 20 20 7d 20 20 > 0); . } < +0005120 20 20 0a 20 20 20 20 6f 75 74 28 72 65 73 75 6c > . out(resul< +0005140 74 29 20 7b 0a 20 20 20 20 20 20 20 20 66 6f 72 >t) {. for< +0005160 65 61 63 68 20 28 61 3b 20 72 65 73 75 6c 74 29 >each (a; result)< +0005200 0a 20 20 20 20 20 20 20 20 20 20 20 20 61 73 73 >. ass< +0005220 65 72 74 28 20 77 6f 72 64 54 6f 4e 75 6d 28 61 >ert( wordToNum(a< +0005240 29 20 3d 3d 20 6e 75 6d 62 65 72 73 20 29 3b 0a >) == numbers );.< +0005260 20 20 20 20 7d 20 20 20 20 0a 20 20 20 20 62 6f > } . bo< +0005300 64 79 20 7b 0a 20 20 20 20 20 20 20 20 73 74 72 >dy {. str< +0005320 69 6e 67 61 72 72 61 79 20 72 65 74 3b 0a 20 20 >ingarray ret;. < +0005340 20 20 20 20 20 20 62 6f 6f 6c 20 66 6f 75 6e 64 > bool found< +0005360 77 6f 72 64 20 3d 20 66 61 6c 73 65 3b 0a 20 20 >word = false;. < +0005400 20 20 20 20 20 20 66 6f 72 20 28 75 69 6e 74 20 > for (uint < +0005420 74 3d 31 3b 20 74 3c 3d 6e 75 6d 62 65 72 73 2e >t=1; t<=numbers.< +0005440 6c 65 6e 67 74 68 3b 20 2b 2b 74 29 20 7b 0a 20 >length; ++t) {. < +0005460 20 20 20 20 20 20 20 20 20 20 20 61 75 74 6f 20 > auto < +0005500 61 6c 74 65 72 6e 61 74 69 76 65 73 20 3d 20 6e >alternatives = n< +0005520 75 6d 62 65 72 73 5b 30 2e 2e 74 5d 20 69 6e 20 >umbers[0..t] in < +0005540 6e 75 6d 32 77 6f 72 64 73 3b 0a 20 20 20 20 20 >num2words;. < +0005560 20 20 20 20 20 20 20 69 66 20 28 21 61 6c 74 65 > if (!alte< +0005600 72 6e 61 74 69 76 65 73 29 0a 20 20 20 20 20 20 >rnatives). < +0005620 20 20 20 20 20 20 20 20 20 20 63 6f 6e 74 69 6e > contin< +0005640 75 65 3b 0a 20 20 20 20 20 20 20 20 20 20 20 20 >ue;. < +0005660 66 6f 75 6e 64 77 6f 72 64 20 3d 20 74 72 75 65 >foundword = true< +0005700 3b 0a 20 20 20 20 20 20 20 20 20 20 20 20 69 66 >;. if< +0005720 20 28 6e 75 6d 62 65 72 73 2e 6c 65 6e 67 74 68 > (numbers.length< +0005740 20 3e 20 20 74 29 20 7b 0a 20 20 20 20 20 20 20 > > t) {. < +0005760 20 20 20 20 20 20 20 20 20 2f 2f 20 43 6f 6d 62 > // Comb< +0006000 69 6e 65 20 61 6c 6c 20 63 75 72 72 65 6e 74 20 >ine all current < +0006020 61 6c 74 65 72 6e 61 74 69 76 65 73 20 77 69 74 >alternatives wit< +0006040 68 20 61 6c 6c 20 61 6c 74 65 72 6e 61 74 69 76 >h all alternativ< +0006060 65 73 20 20 20 20 20 0a 20 20 20 20 20 20 20 20 >es . < +0006100 20 20 20 20 20 20 20 20 2f 2f 20 6f 66 20 74 68 > // of th< +0006120 65 20 72 65 73 74 20 28 6e 65 78 74 20 70 69 65 >e rest (next pie< +0006140 63 65 20 63 61 6e 20 73 74 61 72 74 20 77 69 74 >ce can start wit< +0006160 68 20 61 20 64 69 67 69 74 29 20 20 20 20 20 20 >h a digit) < +0006200 20 20 20 20 20 20 20 20 0a 20 20 20 20 20 20 20 > . < +0006220 20 20 20 20 20 20 20 20 20 66 6f 72 65 61 63 68 > foreach< +0006240 20 28 61 32 3b 20 5f 46 69 6e 64 57 6f 72 64 73 > (a2; _FindWords< +0006260 28 20 6e 75 6d 62 65 72 73 5b 74 2e 2e 24 5d 2c >( numbers[t..$],< +0006300 20 74 72 75 65 20 20 20 20 20 29 20 29 0a 20 20 > true ) ). < +0006320 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 > < +0006340 20 20 66 6f 72 65 61 63 68 28 61 31 3b 20 2a 61 > foreach(a1; *a< +0006360 6c 74 65 72 6e 61 74 69 76 65 73 29 0a 20 20 20 >lternatives). < +0006400 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 > < +0006420 20 20 20 20 72 65 74 20 7e 3d 20 61 31 20 7e 20 > ret ~= a1 ~ < +0006440 22 20 22 20 7e 20 61 32 3b 0a 20 20 20 20 20 20 >" " ~ a2;. < +0006460 20 20 20 20 20 20 7d 0a 20 20 20 20 20 20 20 20 > }. < +0006500 20 20 20 20 65 6c 73 65 20 20 20 20 0a 20 20 20 > else . < +0006520 20 20 20 20 20 20 20 20 20 20 20 20 20 72 65 74 > ret< +0006540 20 7e 3d 20 2a 61 6c 74 65 72 6e 61 74 69 76 65 > ~= *alternative< +0006560 73 3b 20 20 20 20 2f 2f 20 61 70 70 65 6e 64 20 >s; // append < +0006600 74 68 65 73 65 20 61 6c 74 65 72 6e 61 74 69 76 >these alternativ< +0006620 65 73 0a 20 20 20 20 20 20 20 20 7d 0a 20 20 20 >es. }. < +0006640 20 20 20 20 20 2f 2f 20 54 72 79 20 74 6f 20 6b > // Try to k< +0006660 65 65 70 20 31 20 64 69 67 69 74 2c 20 6f 6e 6c >eep 1 digit, onl< +0006700 79 20 69 66 20 77 65 27 72 65 20 61 6c 6c 6f 77 >y if we're allow< +0006720 65 64 20 61 6e 64 20 6e 6f 20 6f 74 68 65 72 0a >ed and no other.< +0006740 20 20 20 20 20 20 20 20 2f 2f 20 61 6c 74 65 72 > // alter< +0006760 6e 61 74 69 76 65 73 20 77 65 72 65 20 66 6f 75 >natives were fou< +0007000 6e 64 0a 20 20 20 20 20 20 20 20 2f 2f 20 54 65 >nd. // Te< +0007020 73 74 69 6e 67 20 22 72 65 74 2e 6c 65 6e 67 74 >sting "ret.lengt< +0007040 68 22 20 6d 61 6b 65 73 20 6d 6f 72 65 20 73 65 >h" makes more se< +0007060 6e 73 65 20 74 68 61 6e 20 74 65 73 74 69 6e 67 >nse than testing< +0007100 20 22 66 6f 75 6e 64 77 6f 72 64 22 2c 0a 20 20 > "foundword",. < +0007120 20 20 20 20 20 20 2f 2f 20 62 75 74 20 74 68 65 > // but the< +0007140 20 6f 74 68 65 72 20 69 6d 70 6c 65 6d 65 6e 74 > other implement< +0007160 61 74 69 6f 6e 73 20 73 65 65 6d 20 74 6f 20 64 >ations seem to d< +0007200 6f 20 6a 75 73 74 20 74 68 69 73 2e 0a 20 20 20 >o just this.. < +0007220 20 20 20 20 20 69 66 20 28 64 69 67 69 74 6f 6b > if (digitok< +0007240 20 26 26 20 21 66 6f 75 6e 64 77 6f 72 64 29 20 > && !foundword) < +0007260 7b 20 2f 2f 72 65 74 2e 6c 65 6e 67 74 68 20 3d >{ //ret.length =< +0007300 3d 20 30 20 20 0a 20 20 20 20 20 20 20 20 20 20 >= 0 . < +0007320 20 20 69 66 28 6e 75 6d 62 65 72 73 2e 6c 65 6e > if(numbers.len< +0007340 67 74 68 20 3e 20 20 31 29 20 7b 0a 20 20 20 20 >gth > 1) {. < +0007360 20 20 20 20 20 20 20 20 20 20 20 20 2f 2f 20 43 > // C< +0007400 6f 6d 62 69 6e 65 20 31 20 64 69 67 69 74 20 77 >ombine 1 digit w< +0007420 69 74 68 20 61 6c 6c 20 61 6c 74 65 6e 61 74 69 >ith all altenati< +0007440 76 65 73 20 66 72 6f 6d 20 74 68 65 20 72 65 73 >ves from the res< +0007460 74 20 20 20 20 0a 20 20 20 20 20 20 20 20 20 20 >t . < +0007500 20 20 20 20 20 20 2f 2f 20 28 6e 65 78 74 20 70 > // (next p< +0007520 69 65 63 65 20 63 61 6e 20 6e 6f 74 20 73 74 61 >iece can not sta< +0007540 72 74 20 77 69 74 68 20 61 20 64 69 67 69 74 29 >rt with a digit)< +0007560 20 20 20 20 20 20 20 20 20 20 0a 20 20 20 20 20 > . < +0007600 20 20 20 20 20 20 20 20 20 20 20 66 6f 72 65 61 > forea< +0007620 63 68 20 28 61 3b 20 5f 46 69 6e 64 57 6f 72 64 >ch (a; _FindWord< +0007640 73 28 20 6e 75 6d 62 65 72 73 5b 31 2e 2e 24 5d >s( numbers[1..$]< +0007660 2c 20 66 61 6c 73 65 20 29 20 29 0a 20 20 20 20 >, false ) ). < +0007700 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 > < +0007720 72 65 74 20 7e 3d 20 6e 75 6d 62 65 72 73 5b 30 >ret ~= numbers[0< +0007740 2e 2e 31 5d 20 7e 20 22 20 22 20 7e 20 61 3b 0a >..1] ~ " " ~ a;.< +0007760 20 20 20 20 20 20 20 20 20 20 20 20 7d 20 20 20 > } < +0010000 20 0a 20 20 20 20 20 20 20 20 20 20 20 20 65 6c > . el< +0010020 73 65 20 20 20 20 0a 20 20 20 20 20 20 20 20 20 >se . < +0010040 20 20 20 20 20 20 20 72 65 74 20 7e 3d 20 6e 75 > ret ~= nu< +0010060 6d 62 65 72 73 5b 30 2e 2e 31 5d 3b 20 20 20 20 >mbers[0..1]; < +0010100 2f 2f 20 6a 75 73 74 20 61 70 70 65 6e 64 20 74 >// just append t< +0010120 68 69 73 20 64 69 67 69 74 20 20 20 20 20 20 20 >his digit < +0010140 20 20 20 20 20 20 0a 20 20 20 20 20 20 20 20 7d > . }< +0010160 20 20 20 20 0a 20 20 20 20 20 20 20 20 72 65 74 > . ret< +0010200 75 72 6e 20 72 65 74 3b 0a 20 20 20 20 7d 0a 0a >urn ret;. }..< +0010220 20 20 20 20 2f 2f 2f 20 28 54 68 69 73 20 66 75 > /// (This fu< +0010240 6e 63 74 69 6f 6e 20 77 61 73 20 69 6e 6c 69 6e >nction was inlin< +0010260 65 64 20 69 6e 20 74 68 65 20 6f 72 69 67 69 6e >ed in the origin< +0010300 61 6c 20 70 72 6f 67 72 61 6d 29 20 0a 20 20 20 >al program) . < +0010320 20 2f 2f 2f 20 46 69 6e 64 73 20 61 6c 6c 20 61 > /// Finds all a< +0010340 6c 74 65 72 6e 61 74 69 76 65 73 20 66 6f 72 20 >lternatives for < +0010360 74 68 65 20 67 69 76 65 6e 20 70 68 6f 6e 65 20 >the given phone < +0010400 6e 75 6d 62 65 72 20 0a 20 20 20 20 2f 2f 2f 20 >number . /// < +0010420 52 65 74 75 72 6e 73 3a 20 61 72 72 61 79 20 6f >Returns: array o< +0010440 66 20 73 74 72 69 6e 67 73 20 0a 20 20 20 20 73 >f strings . s< +0010460 74 72 69 6e 67 61 72 72 61 79 20 46 69 6e 64 57 >tringarray FindW< +0010500 6f 72 64 73 28 20 73 74 72 69 6e 67 20 70 68 6f >ords( string pho< +0010520 6e 65 5f 6e 75 6d 62 65 72 20 29 0a 20 20 20 20 >ne_number ). < +0010540 7b 0a 20 20 20 20 20 20 20 20 69 66 20 28 21 70 >{. if (!p< +0010560 68 6f 6e 65 5f 6e 75 6d 62 65 72 2e 6c 65 6e 67 >hone_number.leng< +0010600 74 68 29 0a 20 20 20 20 20 20 20 20 20 20 20 20 >th). < +0010620 72 65 74 75 72 6e 20 6e 75 6c 6c 3b 0a 20 20 20 >return null;. < +0010640 20 20 20 20 20 2f 2f 20 53 74 72 69 70 20 74 68 > // Strip th< +0010660 65 20 6e 6f 6e 2d 64 69 67 69 74 20 63 68 61 72 >e non-digit char< +0010700 61 63 74 65 72 73 20 66 72 6f 6d 20 74 68 65 20 >acters from the < +0010720 70 68 6f 6e 65 20 6e 75 6d 62 65 72 2c 20 61 6e >phone number, an< +0010740 64 0a 20 20 20 20 20 20 20 20 2f 2f 20 70 61 73 >d. // pas< +0010760 73 20 69 74 20 74 6f 20 74 68 65 20 72 65 63 75 >s it to the recu< +0011000 72 73 69 76 65 20 66 75 6e 63 74 69 6f 6e 20 28 >rsive function (< +0011020 6c 65 61 64 69 6e 67 20 64 69 67 69 74 20 69 73 >leading digit is< +0011040 20 61 6c 6c 6f 77 65 64 29 0a 20 20 20 20 20 20 > allowed). < +0011060 20 20 72 65 74 75 72 6e 20 5f 46 69 6e 64 57 6f > return _FindWo< +0011100 72 64 73 28 20 73 74 72 69 70 4e 6f 6e 44 69 67 >rds( stripNonDig< +0011120 69 74 28 70 68 6f 6e 65 5f 6e 75 6d 62 65 72 29 >it(phone_number)< +0011140 2c 20 74 72 75 65 20 29 3b 20 20 20 20 0a 20 20 >, true ); . < +0011160 20 20 7d 20 20 20 20 0a 20 20 20 20 0a 20 20 20 > } . . < +0011200 20 2f 2f 20 52 65 61 64 20 74 68 65 20 70 68 6f > // Read the pho< +0011220 6e 65 20 6e 75 6d 62 65 72 73 20 20 20 20 20 0a >ne numbers .< +0011240 20 20 20 20 66 6f 72 65 61 63 68 28 73 74 72 69 > foreach(stri< +0011260 6e 67 20 70 68 6f 6e 65 3b 20 6e 65 77 20 42 75 >ng phone; new Bu< +0011300 66 66 65 72 65 64 46 69 6c 65 28 22 69 6e 70 75 >fferedFile("inpu< +0011320 74 2e 74 78 74 22 20 20 20 29 20 29 0a 20 20 20 >t.txt" ) ). < +0011340 20 20 20 20 20 66 6f 72 65 61 63 68 28 61 6c 74 > foreach(alt< +0011360 65 72 6e 61 74 69 76 65 3b 20 46 69 6e 64 57 6f >ernative; FindWo< +0011400 72 64 73 28 20 70 68 6f 6e 65 20 29 20 29 0a 20 >rds( phone ) ). < +0011420 20 20 20 20 20 20 20 20 20 20 20 77 72 69 74 65 > write< +0011440 66 6c 6e 28 70 68 6f 6e 65 2c 20 22 3a 20 22 2c >fln(phone, ": ",< +0011460 20 61 6c 74 65 72 6e 61 74 69 76 65 20 29 3b 0a > alternative );.< +0011500 7d 0a 0a >}..< +0011503 diff --git a/vendor/pygments/tests/examplefiles/hexdump_xxd b/vendor/pygments/tests/examplefiles/hexdump_xxd new file mode 100644 index 0000000..33a8a6e --- /dev/null +++ b/vendor/pygments/tests/examplefiles/hexdump_xxd @@ -0,0 +1,309 @@ +0000000: 2f2f 2043 7265 6174 6564 2062 7920 4c69 // Created by Li +0000010: 6f6e 656c 6c6f 204c 756e 6573 7520 616e onello Lunesu an +0000020: 6420 706c 6163 6564 2069 6e20 7468 6520 d placed in the +0000030: 7075 626c 6963 2064 6f6d 6169 6e2e 0a2f public domain../ +0000040: 2f20 5468 6973 2066 696c 6520 6861 7320 / This file has +0000050: 6265 656e 206d 6f64 6966 6965 6420 6672 been modified fr +0000060: 6f6d 2069 7473 206f 7269 6769 6e61 6c20 om its original +0000070: 7665 7273 696f 6e2e 0a2f 2f20 4974 2068 version..// It h +0000080: 6173 2062 6565 6e20 666f 726d 6174 7465 as been formatte +0000090: 6420 746f 2066 6974 2079 6f75 7220 7363 d to fit your sc +00000a0: 7265 656e 2e0a 6d6f 6475 6c65 2070 686f reen..module pho +00000b0: 6e65 6e6f 3b20 2020 2020 2f2f 206f 7074 neno; // opt +00000c0: 696f 6e61 6c0a 696d 706f 7274 2073 7464 ional.import std +00000d0: 2e73 7464 696f 3b20 2020 2f2f 2077 7269 .stdio; // wri +00000e0: 7465 666c 6e20 2020 2020 0a69 6d70 6f72 tefln .impor +00000f0: 7420 7374 642e 6374 7970 653b 2020 202f t std.ctype; / +0000100: 2f20 6973 6469 6769 7420 2020 2020 0a69 / isdigit .i +0000110: 6d70 6f72 7420 7374 642e 7374 7265 616d mport std.stream +0000120: 3b20 202f 2f20 4275 6666 6572 6564 4669 ; // BufferedFi +0000130: 6c65 0a0a 2f2f 204a 7573 7420 666f 7220 le..// Just for +0000140: 7265 6164 6162 696c 6974 7920 2869 6d61 readability (ima +0000150: 6769 6e65 2063 6861 725b 5d5b 5d5b 6368 gine char[][][ch +0000160: 6172 5b5d 5d29 2020 2020 0a61 6c69 6173 ar[]]) .alias +0000170: 2063 6861 725b 5d20 7374 7269 6e67 3b0a char[] string;. +0000180: 616c 6961 7320 7374 7269 6e67 5b5d 2073 alias string[] s +0000190: 7472 696e 6761 7272 6179 3b0a 0a2f 2f2f tringarray;../// +00001a0: 2053 7472 6970 7320 6e6f 6e2d 6469 6769 Strips non-digi +00001b0: 7420 6368 6172 6163 7465 7273 2066 726f t characters fro +00001c0: 6d20 7468 6520 7374 7269 6e67 2028 434f m the string (CO +00001d0: 5729 0a73 7472 696e 6720 7374 7269 704e W).string stripN +00001e0: 6f6e 4469 6769 7428 2069 6e20 7374 7269 onDigit( in stri +00001f0: 6e67 206c 696e 6520 2920 0a7b 0a20 2020 ng line ) .{. +0000200: 2073 7472 696e 6720 7265 743b 0a20 2020 string ret;. +0000210: 2066 6f72 6561 6368 2875 696e 7420 692c foreach(uint i, +0000220: 2063 3b20 6c69 6e65 2920 7b0a 2020 2020 c; line) {. +0000230: 2020 2020 2f2f 2045 7272 6f72 3a20 7374 // Error: st +0000240: 642e 6374 7970 652e 6973 6469 6769 7420 d.ctype.isdigit +0000250: 6174 2043 3a5c 646d 645c 7372 635c 7068 at C:\dmd\src\ph +0000260: 6f62 6f73 5c73 7464 5c63 7479 7065 2e64 obos\std\ctype.d +0000270: 2833 3729 200a 2020 2020 2020 2020 2f2f (37) . // +0000280: 2063 6f6e 666c 6963 7473 2077 6974 6820 conflicts with +0000290: 7374 642e 7374 7265 616d 2e69 7364 6967 std.stream.isdig +00002a0: 6974 2061 7420 433a 5c64 6d64 5c73 7263 it at C:\dmd\src +00002b0: 5c70 686f 626f 735c 7374 645c 7374 7265 \phobos\std\stre +00002c0: 616d 2e64 2832 3932 3429 0a20 2020 2020 am.d(2924). +00002d0: 2020 2069 6620 2821 7374 642e 6374 7970 if (!std.ctyp +00002e0: 652e 6973 6469 6769 7428 6329 2920 7b0a e.isdigit(c)) {. +00002f0: 2020 2020 2020 2020 2020 2020 6966 2028 if ( +0000300: 2172 6574 290a 2020 2020 2020 2020 2020 !ret). +0000310: 2020 2020 2020 7265 7420 3d20 6c69 6e65 ret = line +0000320: 5b30 2e2e 695d 3b20 2020 200a 2020 2020 [0..i]; . +0000330: 2020 2020 7d20 2020 200a 2020 2020 2020 } . +0000340: 2020 656c 7365 2069 6620 2872 6574 290a else if (ret). +0000350: 2020 2020 2020 2020 2020 2020 7265 7420 ret +0000360: 7e3d 2063 3b20 2020 200a 2020 2020 7d20 ~= c; . } +0000370: 2020 200a 2020 2020 7265 7475 726e 2072 . return r +0000380: 6574 3f72 6574 3a6c 696e 653b 0a7d 0a0a et?ret:line;.}.. +0000390: 756e 6974 7465 7374 207b 0a20 2020 2061 unittest {. a +00003a0: 7373 6572 7428 2073 7472 6970 4e6f 6e44 ssert( stripNonD +00003b0: 6967 6974 2822 6173 6466 2229 203d 3d20 igit("asdf") == +00003c0: 2222 2020 293b 0a20 2020 2061 7373 6572 "" );. asser +00003d0: 7428 2073 7472 6970 4e6f 6e44 6967 6974 t( stripNonDigit +00003e0: 2822 5c27 3133 2d3d 3220 346b 6f70 2229 ("\'13-=2 4kop") +00003f0: 203d 3d20 2022 3133 3234 2220 2029 3b0a == "1324" );. +0000400: 7d0a 0a2f 2f2f 2043 6f6e 7665 7274 7320 }../// Converts +0000410: 6120 776f 7264 2069 6e74 6f20 6120 6e75 a word into a nu +0000420: 6d62 6572 2c20 6967 6e6f 7269 6e67 2061 mber, ignoring a +0000430: 6c6c 206e 6f6e 2061 6c70 6861 2063 6861 ll non alpha cha +0000440: 7261 6374 6572 7320 200a 7374 7269 6e67 racters .string +0000450: 2077 6f72 6454 6f4e 756d 2820 696e 2073 wordToNum( in s +0000460: 7472 696e 6720 776f 7264 2029 0a7b 0a2f tring word ).{./ +0000470: 2f20 7472 616e 736c 6174 696f 6e20 7461 / translation ta +0000480: 626c 6520 666f 7220 7468 6520 7461 736b ble for the task +0000490: 2061 7420 6861 6e64 0a63 6f6e 7374 2063 at hand.const c +00004a0: 6861 725b 3235 365d 2054 5241 4e53 4c41 har[256] TRANSLA +00004b0: 5445 203d 2020 2020 0a20 2020 2022 2020 TE = . " +00004c0: 2020 2020 2020 2020 2020 2020 2020 2020 +00004d0: 2020 2020 2020 2020 2020 2020 2020 2220 " +00004e0: 202f 2f20 3020 2020 0a20 2020 2022 2020 // 0 . " +00004f0: 2020 2020 2020 2020 2020 2020 2020 3031 01 +0000500: 3233 3435 3637 3839 2020 2020 2020 2220 23456789 " +0000510: 202f 2f20 3332 2020 2020 200a 2020 2020 // 32 . +0000520: 2220 3537 3633 3034 3939 3631 3738 3531 " 57630499617851 +0000530: 3838 3132 3334 3736 3232 3339 2020 2020 881234762239 +0000540: 2022 2020 2f2f 2036 3420 2020 0a20 2020 " // 64 . +0000550: 2022 2035 3736 3330 3439 3936 3137 3835 " 5763049961785 +0000560: 3138 3831 3233 3437 3632 3233 3920 2020 1881234762239 +0000570: 2020 220a 2020 2020 2220 2020 2020 2020 ". " +0000580: 2020 2020 2020 2020 2020 2020 2020 2020 +0000590: 2020 2020 2020 2020 2022 0a20 2020 2022 ". " +00005a0: 2020 2020 2020 2020 2020 2020 2020 2020 +00005b0: 2020 2020 2020 2020 2020 2020 2020 2020 +00005c0: 220a 2020 2020 2220 2020 2020 2020 2020 ". " +00005d0: 2020 2020 2020 2020 2020 2020 2020 2020 +00005e0: 2020 2020 2020 2022 2020 2020 0a20 2020 " . +00005f0: 2022 2020 2020 2020 2020 2020 2020 2020 " +0000600: 2020 2020 2020 2020 2020 2020 2020 2020 +0000610: 2020 223b 0a20 2020 2073 7472 696e 6720 ";. string +0000620: 7265 743b 0a20 2020 2066 6f72 6561 6368 ret;. foreach +0000630: 2863 3b20 6361 7374 2875 6279 7465 5b5d (c; cast(ubyte[] +0000640: 2977 6f72 6429 0a20 2020 2020 2020 2069 )word). i +0000650: 6620 2854 5241 4e53 4c41 5445 5b63 5d20 f (TRANSLATE[c] +0000660: 213d 2027 2027 290a 2020 2020 2020 2020 != ' '). +0000670: 2020 2020 7265 7420 7e3d 2054 5241 4e53 ret ~= TRANS +0000680: 4c41 5445 5b63 5d3b 0a20 2020 2072 6574 LATE[c];. ret +0000690: 7572 6e20 7265 743b 0a7d 0a0a 756e 6974 urn ret;.}..unit +00006a0: 7465 7374 207b 0a20 2f2f 2054 6573 7420 test {. // Test +00006b0: 776f 7264 546f 4e75 6d20 7573 696e 6720 wordToNum using +00006c0: 7468 6520 7461 626c 6520 6672 6f6d 2074 the table from t +00006d0: 6865 2074 6173 6b20 6465 7363 7269 7074 he task descript +00006e0: 696f 6e2e 0a20 6173 7365 7274 2820 2230 ion.. assert( "0 +00006f0: 3131 3132 3232 3333 3334 3435 3536 3636 1112223334455666 +0000700: 3737 3738 3838 3939 3922 203d 3d0a 2020 777888999" ==. +0000710: 2077 6f72 6454 6f4e 756d 2822 4520 7c20 wordToNum("E | +0000720: 4a20 4e20 5120 7c20 5220 5720 5820 7c20 J N Q | R W X | +0000730: 4420 5320 5920 7c20 4620 5420 7c20 4120 D S Y | F T | A +0000740: 4d20 7c20 4320 4920 5620 7c20 4220 4b20 M | C I V | B K +0000750: 5520 7c20 4c20 4f20 5020 7c20 4720 4820 U | L O P | G H +0000760: 5a22 2929 3b0a 2061 7373 6572 7428 2022 Z"));. assert( " +0000770: 3031 3131 3232 3233 3333 3434 3535 3636 0111222333445566 +0000780: 3637 3737 3838 3839 3939 2220 3d3d 200a 6777888999" == . +0000790: 2020 2077 6f72 6454 6f4e 756d 2822 6520 wordToNum("e +00007a0: 7c20 6a20 6e20 7120 7c20 7220 7720 7820 | j n q | r w x +00007b0: 7c20 6420 7320 7920 7c20 6620 7420 7c20 | d s y | f t | +00007c0: 6120 6d20 7c20 6320 6920 7620 7c20 6220 a m | c i v | b +00007d0: 6b20 7520 7c20 6c20 6f20 7020 7c20 6720 k u | l o p | g +00007e0: 6820 7a22 2929 3b0a 2061 7373 6572 7428 h z"));. assert( +00007f0: 2022 3031 3233 3435 3637 3839 2220 3d3d "0123456789" == +0000800: 200a 2020 2077 6f72 6454 6f4e 756d 2822 . wordToNum(" +0000810: 3020 7c20 2020 3120 2020 7c20 2020 3220 0 | 1 | 2 +0000820: 2020 7c20 2020 3320 2020 7c20 2034 2020 | 3 | 4 +0000830: 7c20 2035 2020 7c20 2020 3620 2020 7c20 | 5 | 6 | +0000840: 2020 3720 2020 7c20 2020 3820 2020 7c20 7 | 8 | +0000850: 2020 3922 2929 3b0a 7d0a 0a76 6f69 6420 9"));.}..void +0000860: 6d61 696e 2820 7374 7269 6e67 5b5d 2061 main( string[] a +0000870: 7267 7320 290a 7b0a 2020 2020 2f2f 2054 rgs ).{. // T +0000880: 6869 7320 6173 736f 6369 6174 6976 6520 his associative +0000890: 6172 7261 7920 6d61 7073 2061 206e 756d array maps a num +00008a0: 6265 7220 746f 2061 6e20 6172 7261 7920 ber to an array +00008b0: 6f66 2077 6f72 6473 2e20 2020 200a 2020 of words. . +00008c0: 2020 7374 7269 6e67 6172 7261 795b 7374 stringarray[st +00008d0: 7269 6e67 5d20 2020 206e 756d 3277 6f72 ring] num2wor +00008e0: 6473 3b0a 0a20 2020 2066 6f72 6561 6368 ds;.. foreach +00008f0: 2873 7472 696e 6720 776f 7264 3b20 6e65 (string word; ne +0000900: 7720 4275 6666 6572 6564 4669 6c65 2822 w BufferedFile(" +0000910: 6469 6374 696f 6e61 7279 2e74 7874 2220 dictionary.txt" +0000920: 2920 290a 2020 2020 2020 2020 6e75 6d32 ) ). num2 +0000930: 776f 7264 735b 2077 6f72 6454 6f4e 756d words[ wordToNum +0000940: 2877 6f72 6429 205d 207e 3d20 776f 7264 (word) ] ~= word +0000950: 2e64 7570 3b20 2020 2020 2020 202f 2f20 .dup; // +0000960: 6d75 7374 2064 7570 0a0a 2020 2020 2f2f must dup.. // +0000970: 2f20 4669 6e64 7320 616c 6c20 616c 7465 / Finds all alte +0000980: 726e 6174 6976 6573 2066 6f72 2074 6865 rnatives for the +0000990: 2067 6976 656e 206e 756d 6265 720a 2020 given number. +00009a0: 2020 2f2f 2f20 2873 686f 756c 6420 6861 /// (should ha +00009b0: 7665 2062 6565 6e20 7374 7269 7070 6564 ve been stripped +00009c0: 2066 726f 6d20 6e6f 6e2d 6469 6769 7420 from non-digit +00009d0: 6368 6172 6163 7465 7273 290a 2020 2020 characters). +00009e0: 7374 7269 6e67 6172 7261 7920 5f46 696e stringarray _Fin +00009f0: 6457 6f72 6473 2820 7374 7269 6e67 206e dWords( string n +0000a00: 756d 6265 7273 2c20 626f 6f6c 2064 6967 umbers, bool dig +0000a10: 6974 6f6b 2029 0a20 2020 2069 6e20 7b0a itok ). in {. +0000a20: 2020 2020 2020 2020 6173 7365 7274 286e assert(n +0000a30: 756d 6265 7273 2e6c 656e 6774 6820 3e20 umbers.length > +0000a40: 2030 293b 2020 2020 0a20 2020 207d 2020 0); . } +0000a50: 2020 0a20 2020 206f 7574 2872 6573 756c . out(resul +0000a60: 7429 207b 0a20 2020 2020 2020 2066 6f72 t) {. for +0000a70: 6561 6368 2028 613b 2072 6573 756c 7429 each (a; result) +0000a80: 0a20 2020 2020 2020 2020 2020 2061 7373 . ass +0000a90: 6572 7428 2077 6f72 6454 6f4e 756d 2861 ert( wordToNum(a +0000aa0: 2920 3d3d 206e 756d 6265 7273 2029 3b0a ) == numbers );. +0000ab0: 2020 2020 7d20 2020 200a 2020 2020 626f } . bo +0000ac0: 6479 207b 0a20 2020 2020 2020 2073 7472 dy {. str +0000ad0: 696e 6761 7272 6179 2072 6574 3b0a 2020 ingarray ret;. +0000ae0: 2020 2020 2020 626f 6f6c 2066 6f75 6e64 bool found +0000af0: 776f 7264 203d 2066 616c 7365 3b0a 2020 word = false;. +0000b00: 2020 2020 2020 666f 7220 2875 696e 7420 for (uint +0000b10: 743d 313b 2074 3c3d 6e75 6d62 6572 732e t=1; t<=numbers. +0000b20: 6c65 6e67 7468 3b20 2b2b 7429 207b 0a20 length; ++t) {. +0000b30: 2020 2020 2020 2020 2020 2061 7574 6f20 auto +0000b40: 616c 7465 726e 6174 6976 6573 203d 206e alternatives = n +0000b50: 756d 6265 7273 5b30 2e2e 745d 2069 6e20 umbers[0..t] in +0000b60: 6e75 6d32 776f 7264 733b 0a20 2020 2020 num2words;. +0000b70: 2020 2020 2020 2069 6620 2821 616c 7465 if (!alte +0000b80: 726e 6174 6976 6573 290a 2020 2020 2020 rnatives). +0000b90: 2020 2020 2020 2020 2020 636f 6e74 696e contin +0000ba0: 7565 3b0a 2020 2020 2020 2020 2020 2020 ue;. +0000bb0: 666f 756e 6477 6f72 6420 3d20 7472 7565 foundword = true +0000bc0: 3b0a 2020 2020 2020 2020 2020 2020 6966 ;. if +0000bd0: 2028 6e75 6d62 6572 732e 6c65 6e67 7468 (numbers.length +0000be0: 203e 2020 7429 207b 0a20 2020 2020 2020 > t) {. +0000bf0: 2020 2020 2020 2020 202f 2f20 436f 6d62 // Comb +0000c00: 696e 6520 616c 6c20 6375 7272 656e 7420 ine all current +0000c10: 616c 7465 726e 6174 6976 6573 2077 6974 alternatives wit +0000c20: 6820 616c 6c20 616c 7465 726e 6174 6976 h all alternativ +0000c30: 6573 2020 2020 200a 2020 2020 2020 2020 es . +0000c40: 2020 2020 2020 2020 2f2f 206f 6620 7468 // of th +0000c50: 6520 7265 7374 2028 6e65 7874 2070 6965 e rest (next pie +0000c60: 6365 2063 616e 2073 7461 7274 2077 6974 ce can start wit +0000c70: 6820 6120 6469 6769 7429 2020 2020 2020 h a digit) +0000c80: 2020 2020 2020 2020 0a20 2020 2020 2020 . +0000c90: 2020 2020 2020 2020 2066 6f72 6561 6368 foreach +0000ca0: 2028 6132 3b20 5f46 696e 6457 6f72 6473 (a2; _FindWords +0000cb0: 2820 6e75 6d62 6572 735b 742e 2e24 5d2c ( numbers[t..$], +0000cc0: 2074 7275 6520 2020 2020 2920 290a 2020 true ) ). +0000cd0: 2020 2020 2020 2020 2020 2020 2020 2020 +0000ce0: 2020 666f 7265 6163 6828 6131 3b20 2a61 foreach(a1; *a +0000cf0: 6c74 6572 6e61 7469 7665 7329 0a20 2020 lternatives). +0000d00: 2020 2020 2020 2020 2020 2020 2020 2020 +0000d10: 2020 2020 7265 7420 7e3d 2061 3120 7e20 ret ~= a1 ~ +0000d20: 2220 2220 7e20 6132 3b0a 2020 2020 2020 " " ~ a2;. +0000d30: 2020 2020 2020 7d0a 2020 2020 2020 2020 }. +0000d40: 2020 2020 656c 7365 2020 2020 0a20 2020 else . +0000d50: 2020 2020 2020 2020 2020 2020 2072 6574 ret +0000d60: 207e 3d20 2a61 6c74 6572 6e61 7469 7665 ~= *alternative +0000d70: 733b 2020 2020 2f2f 2061 7070 656e 6420 s; // append +0000d80: 7468 6573 6520 616c 7465 726e 6174 6976 these alternativ +0000d90: 6573 0a20 2020 2020 2020 207d 0a20 2020 es. }. +0000da0: 2020 2020 202f 2f20 5472 7920 746f 206b // Try to k +0000db0: 6565 7020 3120 6469 6769 742c 206f 6e6c eep 1 digit, onl +0000dc0: 7920 6966 2077 6527 7265 2061 6c6c 6f77 y if we're allow +0000dd0: 6564 2061 6e64 206e 6f20 6f74 6865 720a ed and no other. +0000de0: 2020 2020 2020 2020 2f2f 2061 6c74 6572 // alter +0000df0: 6e61 7469 7665 7320 7765 7265 2066 6f75 natives were fou +0000e00: 6e64 0a20 2020 2020 2020 202f 2f20 5465 nd. // Te +0000e10: 7374 696e 6720 2272 6574 2e6c 656e 6774 sting "ret.lengt +0000e20: 6822 206d 616b 6573 206d 6f72 6520 7365 h" makes more se +0000e30: 6e73 6520 7468 616e 2074 6573 7469 6e67 nse than testing +0000e40: 2022 666f 756e 6477 6f72 6422 2c0a 2020 "foundword",. +0000e50: 2020 2020 2020 2f2f 2062 7574 2074 6865 // but the +0000e60: 206f 7468 6572 2069 6d70 6c65 6d65 6e74 other implement +0000e70: 6174 696f 6e73 2073 6565 6d20 746f 2064 ations seem to d +0000e80: 6f20 6a75 7374 2074 6869 732e 0a20 2020 o just this.. +0000e90: 2020 2020 2069 6620 2864 6967 6974 6f6b if (digitok +0000ea0: 2026 2620 2166 6f75 6e64 776f 7264 2920 && !foundword) +0000eb0: 7b20 2f2f 7265 742e 6c65 6e67 7468 203d { //ret.length = +0000ec0: 3d20 3020 200a 2020 2020 2020 2020 2020 = 0 . +0000ed0: 2020 6966 286e 756d 6265 7273 2e6c 656e if(numbers.len +0000ee0: 6774 6820 3e20 2031 2920 7b0a 2020 2020 gth > 1) {. +0000ef0: 2020 2020 2020 2020 2020 2020 2f2f 2043 // C +0000f00: 6f6d 6269 6e65 2031 2064 6967 6974 2077 ombine 1 digit w +0000f10: 6974 6820 616c 6c20 616c 7465 6e61 7469 ith all altenati +0000f20: 7665 7320 6672 6f6d 2074 6865 2072 6573 ves from the res +0000f30: 7420 2020 200a 2020 2020 2020 2020 2020 t . +0000f40: 2020 2020 2020 2f2f 2028 6e65 7874 2070 // (next p +0000f50: 6965 6365 2063 616e 206e 6f74 2073 7461 iece can not sta +0000f60: 7274 2077 6974 6820 6120 6469 6769 7429 rt with a digit) +0000f70: 2020 2020 2020 2020 2020 0a20 2020 2020 . +0000f80: 2020 2020 2020 2020 2020 2066 6f72 6561 forea +0000f90: 6368 2028 613b 205f 4669 6e64 576f 7264 ch (a; _FindWord +0000fa0: 7328 206e 756d 6265 7273 5b31 2e2e 245d s( numbers[1..$] +0000fb0: 2c20 6661 6c73 6520 2920 290a 2020 2020 , false ) ). +0000fc0: 2020 2020 2020 2020 2020 2020 2020 2020 +0000fd0: 7265 7420 7e3d 206e 756d 6265 7273 5b30 ret ~= numbers[0 +0000fe0: 2e2e 315d 207e 2022 2022 207e 2061 3b0a ..1] ~ " " ~ a;. +0000ff0: 2020 2020 2020 2020 2020 2020 7d20 2020 } +0001000: 200a 2020 2020 2020 2020 2020 2020 656c . el +0001010: 7365 2020 2020 0a20 2020 2020 2020 2020 se . +0001020: 2020 2020 2020 2072 6574 207e 3d20 6e75 ret ~= nu +0001030: 6d62 6572 735b 302e 2e31 5d3b 2020 2020 mbers[0..1]; +0001040: 2f2f 206a 7573 7420 6170 7065 6e64 2074 // just append t +0001050: 6869 7320 6469 6769 7420 2020 2020 2020 his digit +0001060: 2020 2020 2020 0a20 2020 2020 2020 207d . } +0001070: 2020 2020 0a20 2020 2020 2020 2072 6574 . ret +0001080: 7572 6e20 7265 743b 0a20 2020 207d 0a0a urn ret;. }.. +0001090: 2020 2020 2f2f 2f20 2854 6869 7320 6675 /// (This fu +00010a0: 6e63 7469 6f6e 2077 6173 2069 6e6c 696e nction was inlin +00010b0: 6564 2069 6e20 7468 6520 6f72 6967 696e ed in the origin +00010c0: 616c 2070 726f 6772 616d 2920 0a20 2020 al program) . +00010d0: 202f 2f2f 2046 696e 6473 2061 6c6c 2061 /// Finds all a +00010e0: 6c74 6572 6e61 7469 7665 7320 666f 7220 lternatives for +00010f0: 7468 6520 6769 7665 6e20 7068 6f6e 6520 the given phone +0001100: 6e75 6d62 6572 200a 2020 2020 2f2f 2f20 number . /// +0001110: 5265 7475 726e 733a 2061 7272 6179 206f Returns: array o +0001120: 6620 7374 7269 6e67 7320 0a20 2020 2073 f strings . s +0001130: 7472 696e 6761 7272 6179 2046 696e 6457 tringarray FindW +0001140: 6f72 6473 2820 7374 7269 6e67 2070 686f ords( string pho +0001150: 6e65 5f6e 756d 6265 7220 290a 2020 2020 ne_number ). +0001160: 7b0a 2020 2020 2020 2020 6966 2028 2170 {. if (!p +0001170: 686f 6e65 5f6e 756d 6265 722e 6c65 6e67 hone_number.leng +0001180: 7468 290a 2020 2020 2020 2020 2020 2020 th). +0001190: 7265 7475 726e 206e 756c 6c3b 0a20 2020 return null;. +00011a0: 2020 2020 202f 2f20 5374 7269 7020 7468 // Strip th +00011b0: 6520 6e6f 6e2d 6469 6769 7420 6368 6172 e non-digit char +00011c0: 6163 7465 7273 2066 726f 6d20 7468 6520 acters from the +00011d0: 7068 6f6e 6520 6e75 6d62 6572 2c20 616e phone number, an +00011e0: 640a 2020 2020 2020 2020 2f2f 2070 6173 d. // pas +00011f0: 7320 6974 2074 6f20 7468 6520 7265 6375 s it to the recu +0001200: 7273 6976 6520 6675 6e63 7469 6f6e 2028 rsive function ( +0001210: 6c65 6164 696e 6720 6469 6769 7420 6973 leading digit is +0001220: 2061 6c6c 6f77 6564 290a 2020 2020 2020 allowed). +0001230: 2020 7265 7475 726e 205f 4669 6e64 576f return _FindWo +0001240: 7264 7328 2073 7472 6970 4e6f 6e44 6967 rds( stripNonDig +0001250: 6974 2870 686f 6e65 5f6e 756d 6265 7229 it(phone_number) +0001260: 2c20 7472 7565 2029 3b20 2020 200a 2020 , true ); . +0001270: 2020 7d20 2020 200a 2020 2020 0a20 2020 } . . +0001280: 202f 2f20 5265 6164 2074 6865 2070 686f // Read the pho +0001290: 6e65 206e 756d 6265 7273 2020 2020 200a ne numbers . +00012a0: 2020 2020 666f 7265 6163 6828 7374 7269 foreach(stri +00012b0: 6e67 2070 686f 6e65 3b20 6e65 7720 4275 ng phone; new Bu +00012c0: 6666 6572 6564 4669 6c65 2822 696e 7075 fferedFile("inpu +00012d0: 742e 7478 7422 2020 2029 2029 0a20 2020 t.txt" ) ). +00012e0: 2020 2020 2066 6f72 6561 6368 2861 6c74 foreach(alt +00012f0: 6572 6e61 7469 7665 3b20 4669 6e64 576f ernative; FindWo +0001300: 7264 7328 2070 686f 6e65 2029 2029 0a20 rds( phone ) ). +0001310: 2020 2020 2020 2020 2020 2077 7269 7465 write +0001320: 666c 6e28 7068 6f6e 652c 2022 3a20 222c fln(phone, ": ", +0001330: 2061 6c74 6572 6e61 7469 7665 2029 3b0a alternative );. +0001340: 7d0a 0a }.. diff --git a/vendor/pygments/tests/examplefiles/File.hy b/vendor/pygments/tests/examplefiles/hybris_File.hy similarity index 100% rename from vendor/pygments/tests/examplefiles/File.hy rename to vendor/pygments/tests/examplefiles/hybris_File.hy diff --git a/vendor/pygments/tests/examplefiles/mg_sample.pro b/vendor/pygments/tests/examplefiles/idl_sample.pro similarity index 100% rename from vendor/pygments/tests/examplefiles/mg_sample.pro rename to vendor/pygments/tests/examplefiles/idl_sample.pro diff --git a/vendor/pygments/tests/examplefiles/iex_example b/vendor/pygments/tests/examplefiles/iex_example new file mode 100644 index 0000000..22407e4 --- /dev/null +++ b/vendor/pygments/tests/examplefiles/iex_example @@ -0,0 +1,23 @@ +iex> :" multi +...> line ' \s \123 \x20 +...> atom" +:" multi\n line ' S \natom" + +iex(1)> <<"hello"::binary, c :: utf8, x::[4, unit(2)]>> = "hello™1" +"hello™1" + +iex(2)> c +8482 + +iex> 1 + :atom +** (ArithmeticError) bad argument in arithmetic expression + :erlang.+(1, :atom) + +iex(3)> 1 + +...(3)> 2 + +...(3)> 3 +6 + +iex> IO.puts "Hello world" +Hello world +:ok diff --git a/vendor/pygments/tests/examplefiles/import.hs b/vendor/pygments/tests/examplefiles/import.hs deleted file mode 100644 index 09058ae..0000000 --- a/vendor/pygments/tests/examplefiles/import.hs +++ /dev/null @@ -1,4 +0,0 @@ -import "mtl" Control.Monad.Trans - -main :: IO () -main = putStrLn "hello world" diff --git a/vendor/pygments/tests/examplefiles/inet_pton6.dg b/vendor/pygments/tests/examplefiles/inet_pton6.dg index 4104b3e..3813d5b 100644 --- a/vendor/pygments/tests/examplefiles/inet_pton6.dg +++ b/vendor/pygments/tests/examplefiles/inet_pton6.dg @@ -1,5 +1,5 @@ -re = import! -sys = import! +import '/re' +import '/sys' # IPv6address = hexpart [ ":" IPv4address ] @@ -20,7 +20,7 @@ addrv6 = re.compile $ r'(?i)(?:{})(?::{})?$'.format hexpart addrv4 # # :return: a decimal integer # -base_n = (q digits) -> foldl (x y) -> (x * q + y) 0 digits +base_n = q digits -> foldl (x y -> x * q + y) 0 digits # Parse a sequence of hexadecimal numbers @@ -29,7 +29,7 @@ base_n = (q digits) -> foldl (x y) -> (x * q + y) 0 digits # # :return: an iterable of Python ints # -unhex = q -> q and map p -> (int p 16) (q.split ':') +unhex = q -> q and map (p -> int p 16) (q.split ':') # Parse an IPv6 address as specified in RFC 4291. @@ -39,33 +39,33 @@ unhex = q -> q and map p -> (int p 16) (q.split ':') # :return: an integer which, written in binary form, points to the same node. # inet_pton6 = address -> - raise $ ValueError 'not a valid IPv6 address' if not (match = addrv6.match address) + not (match = addrv6.match address) => raise $ ValueError 'not a valid IPv6 address' start, end, *ipv4 = match.groups! is_ipv4 = not $ None in ipv4 shift = (7 - start.count ':' - 2 * is_ipv4) * 16 - raise $ ValueError 'not a valid IPv6 address' if (end is None and shift) or shift < 0 + (end is None and shift) or shift < 0 => raise $ ValueError 'not a valid IPv6 address' hexaddr = (base_n 0x10000 (unhex start) << shift) + base_n 0x10000 (unhex $ end or '') - (hexaddr << 32) + base_n 0x100 (map int ipv4) if is_ipv4 else hexaddr + if (is_ipv4 => (hexaddr << 32) + base_n 0x100 (map int ipv4)) (otherwise => hexaddr) -inet6_type = q -> switch - not q = 'unspecified' - q == 1 = 'loopback' - (q >> 32) == 0x000000000000ffff = 'IPv4-mapped' - (q >> 64) == 0xfe80000000000000 = 'link-local' - (q >> 120) != 0x00000000000000ff = 'general unicast' - (q >> 112) % (1 << 4) == 0x0000000000000000 = 'multicast w/ reserved scope value' - (q >> 112) % (1 << 4) == 0x000000000000000f = 'multicast w/ reserved scope value' - (q >> 112) % (1 << 4) == 0x0000000000000001 = 'interface-local multicast' - (q >> 112) % (1 << 4) == 0x0000000000000004 = 'admin-local multicast' - (q >> 112) % (1 << 4) == 0x0000000000000005 = 'site-local multicast' - (q >> 112) % (1 << 4) == 0x0000000000000008 = 'organization-local multicast' - (q >> 112) % (1 << 4) == 0x000000000000000e = 'global multicast' - (q >> 112) % (1 << 4) != 0x0000000000000002 = 'multicast w/ unknown scope value' - (q >> 24) % (1 << 112) == 0x00000000000001ff = 'solicited-node multicast' - True = 'link-local multicast' +inet6_type = q -> if + q == 0 => 'unspecified' + q == 1 => 'loopback' + (q >> 32) == 0x000000000000ffff => 'IPv4-mapped' + (q >> 64) == 0xfe80000000000000 => 'link-local' + (q >> 120) != 0x00000000000000ff => 'general unicast' + (q >> 112) % (1 << 4) == 0x0000000000000000 => 'multicast w/ reserved scope value' + (q >> 112) % (1 << 4) == 0x000000000000000f => 'multicast w/ reserved scope value' + (q >> 112) % (1 << 4) == 0x0000000000000001 => 'interface-local multicast' + (q >> 112) % (1 << 4) == 0x0000000000000004 => 'admin-local multicast' + (q >> 112) % (1 << 4) == 0x0000000000000005 => 'site-local multicast' + (q >> 112) % (1 << 4) == 0x0000000000000008 => 'organization-local multicast' + (q >> 112) % (1 << 4) == 0x000000000000000e => 'global multicast' + (q >> 112) % (1 << 4) != 0x0000000000000002 => 'multicast w/ unknown scope value' + (q >> 24) % (1 << 112) == 0x00000000000001ff => 'solicited-node multicast' + otherwise => 'link-local multicast' -print $ (x -> (inet6_type x, hex x)) $ inet_pton6 $ sys.stdin.read!.strip! +print $ (x -> inet6_type x, hex x) $ inet_pton6 $ sys.stdin.read!.strip! diff --git a/vendor/pygments/tests/examplefiles/inform6_example b/vendor/pygments/tests/examplefiles/inform6_example new file mode 100644 index 0000000..6fa1fe5 --- /dev/null +++ b/vendor/pygments/tests/examplefiles/inform6_example @@ -0,0 +1,375 @@ +!% $SMALL ! This is ICL, not a comment. +!% -w + +!% A comprehensive test of Inform6Lexer. + +Switches d2SDq; + +Constant Story "Informal Testing"; +Constant Headline "^Not a game.^";!% This is a comment, not ICL. + +Release 3; +Serial "151213"; +Version 5; + +Ifndef TARGET_ZCODE; +Ifndef TARGET_GLULX; +Ifndef WORDSIZE; +Default WORDSIZE 2; +Constant TARGET_ZCODE; +Endif; +Endif; +Endif; + +Ifv3; Message "Compiling to version 3"; Endif; +Ifv5; Message "Not compiling to version 3"; endif; +ifdef TARGET_ZCODE; +#IFTRUE (#version_number == 5); +Message "Compiling to version 5"; +#ENDIF; +endif ; + +Replace CreatureTest; + +Include "Parser"; +Include "VerbLib"; + +# ! A hash is optional at the top level. +Object kitchen "Kitchen" + with description "You are in a kitchen.", + arr 1 2 3 4, + has light; + +#[ Initialise; + location = kitchen; + print "v"; inversion; "^"; +]; + +Ifdef VN_1633; +Replace IsSeeThrough IsSeeThroughOrig; +[ IsSeeThrough * o; + return o hasnt opaque || IsSeeThroughOrig(o); +]; +Endif; + +Abbreviate "test"; + +Array table buffer 260; + +Attribute reversed; +Attribute opaque alias locked; +Constant to reversed; + +Property long additive additive long alias; +Property long long long wingspan alias alias; + +Class Flier with wingspan 5; +Class Bird(10) has animate class Flier with wingspan 2; + +Constant Constant1; +Constant Constant2 Constant1; +Constant Constant3 = Constant2; +Ifdef VN_1633; Undef Constant; Endif; + +Ifdef VN_1633; +Dictionary 'word' 1 2; +Ifnot; +Dictionary dict_word "word"; +Endif; + +Fake_action NotReal; + +Global global1; +Global global2 = 69105; + +Lowstring low_string "low string"; + +Iftrue false; +Message error "Uh-oh!^~false~ shouldn't be ~true~."; +Endif; +Iffalse true; +Message fatalerror "Uh-oh!^~true~ shouldn't be ~false~."; +Endif; + +Nearby person "person" + with name 'person', + description "This person is barely implemented.", + life [ * x y z; + Ask: print_ret (The) self, " says nothing."; + Answer: print (The) self, " didn't say anything.^"; rfalse; + ] + has has animate transparent; + +Object -> -> test_tube "test tube" + with name 'test' "tube" 'testtube', + has ~openable ~opaque container; + +Bird -> pigeon + with name 'pigeon', + description [; + "The pigeon has a wingspan of ", self.&wingspan-->0, " wing units."; + ]; + +Object -> "thimble" with name 'thimble'; + +Object -> pebble "pebble" with name 'pebble'; + +Ifdef TARGET_ZCODE; Trace objects; Endif; + +Statusline score; + +Stub StubR 3; + +Ifdef TARGET_ZCODE; +Zcharacter "abcdefghijklmnopqrstuvwxyz" + "ABCDEFGHIJKLMNOPQRSTUVWXYZ" + "123456789.,!?_#'0/@{005C}-:()"; +Zcharacter table '@!!' '@<<' '@'A'; +Zcharacter table + '@AE' '@{dc}' '@et' '@:y'; +Ifnot; +Ifdef TARGET_GLULX; +Message "Glulx doesn't use ~Zcharacter~.^Oh well."; ! '~' and '^' work here. +Ifnot; +Message warning "Uh-oh! ^~^"; ! They don't work in other Messages. +Endif; +Endif; + +Include "Grammar"; + +Verb"acquire"'collect'='take'; + +[ NounFilter; return noun ofclass Bird; ]; + +[ ScopeFilter obj; + switch (scope_stage) { + 1: rtrue; + 2: objectloop (obj in compass) PlaceInScope(obj); + 3: "Nothing is in scope."; + } +]; + +Verb meta "t" 'test' + * 'held' held -> TestHeld + * number -> TestNumber + * reversed -> TestAttribute + * 'creature' creature -> TestCreature + * 'multiheld' multiheld -> TestMultiheld + * 'm' multiexcept 'into'/"in" noun -> TestMultiexcept + * 'm' multiinside 'from' noun -> TestMultiinside + * multi -> TestMulti + * 'filter'/'f' noun=NounFilter -> TestNounFilter + * 'filter'/'f' scope=ScopeFilter -> TestScopeFilter + * 'special' special -> TestSpecial + * topic -> TestTopic; + +Verb 'reverse' 'swap' 'exchange' + * held 'for' noun -> reverse + * noun 'with' noun -> reverse reverse; + +Extend "t" last * noun -> TestNoun; + +Extend 't' first * -> Test; + +Extend 'wave' replace * -> NewWave; + +Extend only 'feel' 'touch' replace * noun -> Feel; + +[ TestSub "a\ + " b o "@@98"; ! Not an escape sequence. + string 25 low_string; + print "Test what?> "; + table->0 = 260; + parse->0 = 61; + #Ifdef TARGET_ZCODE; + read buffer parse; + #Ifnot; ! TARGET_GLULX + KeyboardPrimitive(buffer, parse); + #Endif; ! TARGET_ + switch (parse-->1) { + 'save': + #Ifdef TARGET_ZCODE; + #Ifv3; + @save ?saved; + #Ifnot; + save saved; + #Endif; + #Endif; + print "Saving failed.^"; + 'restore': + #Ifdef TARGET_ZCODE; + restore saved; + #Endif; + print "Restoring failed.^"; + 'restart': + @restart; + 'quit', 'q//': + quit; + return 2; rtrue; rfalse; return; + 'print', 'p//': + print "Print:^", + " (string): ", (string) "xyzzy^", + " (number): ", (number) 123, "^", + " (char): ", (char) 'x', "^", + " (address): ", (address) 'plugh//p', "^", + " (The): ", (The) person, "^", + " (the): ", (the) person, "^", + " (A): ", (A) person, "^", + " (a): ", (a) person, "^", + " (an): ", (an) person, "^", + " (name): ", (name) person, "^", + " (object): ", (object) person, "^", + " (property): ", (property) alias, "^", + " (): ", (LanguageNumber) 123, "^", + " : ", a * 2 - 1, "^", + " (): ", (a + person), "^"; + print "Escapes:^", + " by mnemonic: @!! @<< @'A @AE @et @:y^", + " by decimal value: @@64 @@126^", + " by Unicode value: @{DC}@{002b}^", + " by string variable: @25^"; + 'font', 'style': + font off; print "font off^"; + font on; print "font on^"; + style reverse; print "style reverse^"; style roman; + style bold; print "style bold^"; + style underline; print "style underline^"; + style fixed; print "style fixed^"; + style roman; print "style roman^"; + 'statements': + spaces 8; + objectloop (o) { + print "objectloop (o): ", (the) o, "^"; + } + objectloop (o in compass) { ! 'in' is a keyword + print "objectloop (o in compass): ", (the) o, "^"; + } + objectloop (o in compass && true) { ! 'in' is an operator + print "objectloop (o in compass && true): ", (the) o, "^"; + } + objectloop (o from se_obj) { + print "objectloop (o from se_obj): ", (the) o, "^"; + } + objectloop (o near person) { + print "objectloop (o near person): ", (the) o, "^"; + } + #Ifdef TARGET_ZCODE; + #Trace assembly on; +@ ! This is assembly. + add -4 ($$1+$3)*2 -> b; + @get_sibling test_tube -> b ?saved; + @inc [b]; + @je sp (1+3*0) ? equal; + @je 1 ((sp)) ?~ different; + .! This is a label: + equal; + print "sp == 1^"; + jump label; + .different; + print "sp @@126= 1^"; + .label; + #Trace off; #Endif; ! TARGET_ZCODE + a = random(10); + switch (a) { + 1, 9: + box "Testing oneself is best when done alone." + " -- Jimmy Carter"; + 2, 6, to, 3 to 5, to to to: + ; + #Ifdef VN_1633; + ; + #Endif; + a = ##Drop; + < ! The angle brackets may be separated by whitespace. + < (a) pigeon > >; + default: + do { + give person general ~general; + } until (person provides life && ~~false); + if (a == 7) a = 4; + else a = 5; + } + 'expressions': + a = 1+1-1*1/1%1&1|1&&1||1==(1~=(1>(1<(1>=(1<=1))))); + a++; ++a; a--; --a; + a = person.life; + a = kitchen.&arr; + a = kitchen.#arr; + a = Bird::wingspan; + a = kitchen has general; + a = kitchen hasnt general; + a = kitchen provides arr; + a = person in kitchen; + a = person notin kitchen; + a = person ofclass Bird; + a = a == 0 or 1; + a = StubR(); + a = StubR(a); + a = StubR(, a); + a = "string"; + a = 'word'; + a = '''; ! character + a = $09afAF; + a = $$01; + a = ##Eat; a = #a$Eat; + a = #g$self; + a = #n$!word; + a = #r$StubR; + a = #dict_par1; + default: + for (a = 2, b = a; (a < buffer->1 + 2) && (Bird::wingspan): ++a, b--) { + print (char) buffer->a; + } + new_line; + for (::) break; + } + .saved;; +]; + +[ TestNumberSub; + print_ret parsed_number, " is ", (number) parsed_number, "."; +]; + +[ TestAttributeSub; print_ret (The) noun, " has been reversed."; ]; + +[ CreatureTest obj; return obj has animate; ]; + +[ TestCreatureSub; print_ret (The) noun, " is a creature."; ]; + +[ TestMultiheldSub; print_ret "You are holding ", (the) noun, "."; ]; + +[ TestMultiexceptSub; "You test ", (the) noun, " with ", (the) second, "."; ]; + +[ TestMultiinsideSub; "You test ", (the) noun, " from ", (the) second, "."; ]; + +[ TestMultiSub; print_ret (The) noun, " is a thing."; ]; + +[ TestNounFilterSub; print_ret (The) noun, " is a bird."; ]; + +[ TestScopeFilterSub; print_ret (The) noun, " is a direction."; ]; + +[ TestSpecialSub; "Your lucky number is ", parsed_number, "."; ]; + +[ TestTopicSub; "You discuss a topic."; ]; + +[ TestNounSub; "That is ", (a) noun, "."; ]; + +[ TestHeldSub; "You are holding ", (a) noun, "."; ]; + +[ NewWaveSub; "That would be foolish."; ]; + +[ FeelSub; print_ret (The) noun, " feels normal."; ]; + +[ ReverseSub from; + from = parent(noun); + move noun to parent(second); + if (from == to) + move second to to; + else + move second to from; + give noun to; + from = to; + give second from; + "You swap ", (the) noun, " and ", (the) second, "."; +]; + +End: The End directive ends the source code. diff --git a/vendor/pygments/tests/examplefiles/interp.scala b/vendor/pygments/tests/examplefiles/interp.scala new file mode 100644 index 0000000..4131b75 --- /dev/null +++ b/vendor/pygments/tests/examplefiles/interp.scala @@ -0,0 +1,10 @@ +val n = 123; +val a = s"n=$n"; +val a2 = s"n=$n''"; +val b = s"""n=$n"""; +val c = f"n=$n%f"; +val d = f"""n=$n%f"""; +val d2 = s"""a""""; +val e = s"abc\u00e9"; +val f = s"a${n}b"; +val g = s"a${n + 1}b"; diff --git a/vendor/pygments/tests/examplefiles/irc.lsp b/vendor/pygments/tests/examplefiles/irc.lsp old mode 100755 new mode 100644 diff --git a/vendor/pygments/tests/examplefiles/language.hy b/vendor/pygments/tests/examplefiles/language.hy new file mode 100644 index 0000000..9768c39 --- /dev/null +++ b/vendor/pygments/tests/examplefiles/language.hy @@ -0,0 +1,165 @@ +;;;; This contains some of the core Hy functions used +;;;; to make functional programming slightly easier. +;;;; + + +(defn _numeric-check [x] + (if (not (numeric? x)) + (raise (TypeError (.format "{0!r} is not a number" x))))) + +(defn cycle [coll] + "Yield an infinite repetition of the items in coll" + (setv seen []) + (for [x coll] + (yield x) + (.append seen x)) + (while seen + (for [x seen] + (yield x)))) + +(defn dec [n] + "Decrement n by 1" + (_numeric-check n) + (- n 1)) + +(defn distinct [coll] + "Return a generator from the original collection with duplicates + removed" + (let [[seen []] [citer (iter coll)]] + (for [val citer] + (if (not_in val seen) + (do + (yield val) + (.append seen val)))))) + +(defn drop [count coll] + "Drop `count` elements from `coll` and yield back the rest" + (let [[citer (iter coll)]] + (try (for [i (range count)] + (next citer)) + (catch [StopIteration])) + citer)) + +(defn even? [n] + "Return true if n is an even number" + (_numeric-check n) + (= (% n 2) 0)) + +(defn filter [pred coll] + "Return all elements from `coll` that pass `pred`" + (let [[citer (iter coll)]] + (for [val citer] + (if (pred val) + (yield val))))) + +(defn inc [n] + "Increment n by 1" + (_numeric-check n) + (+ n 1)) + +(defn instance? [klass x] + (isinstance x klass)) + +(defn iterable? [x] + "Return true if x is iterable" + (try (do (iter x) true) + (catch [Exception] false))) + +(defn iterate [f x] + (setv val x) + (while true + (yield val) + (setv val (f val)))) + +(defn iterator? [x] + "Return true if x is an iterator" + (try (= x (iter x)) + (catch [TypeError] false))) + +(defn neg? [n] + "Return true if n is < 0" + (_numeric-check n) + (< n 0)) + +(defn none? [x] + "Return true if x is None" + (is x None)) + +(defn numeric? [x] + (import numbers) + (instance? numbers.Number x)) + +(defn nth [coll index] + "Return nth item in collection or sequence, counting from 0" + (if (not (neg? index)) + (if (iterable? coll) + (try (first (list (take 1 (drop index coll)))) + (catch [IndexError] None)) + (try (get coll index) + (catch [IndexError] None))) + None)) + +(defn odd? [n] + "Return true if n is an odd number" + (_numeric-check n) + (= (% n 2) 1)) + +(defn pos? [n] + "Return true if n is > 0" + (_numeric_check n) + (> n 0)) + +(defn remove [pred coll] + "Return coll with elements removed that pass `pred`" + (let [[citer (iter coll)]] + (for [val citer] + (if (not (pred val)) + (yield val))))) + +(defn repeat [x &optional n] + "Yield x forever or optionally n times" + (if (none? n) + (setv dispatch (fn [] (while true (yield x)))) + (setv dispatch (fn [] (for [_ (range n)] (yield x))))) + (dispatch)) + +(defn repeatedly [func] + "Yield result of running func repeatedly" + (while true + (yield (func)))) + +(defn take [count coll] + "Take `count` elements from `coll`, or the whole set if the total + number of entries in `coll` is less than `count`." + (let [[citer (iter coll)]] + (for [_ (range count)] + (yield (next citer))))) + +(defn take-nth [n coll] + "Return every nth member of coll + raises ValueError for (not (pos? n))" + (if (pos? n) + (let [[citer (iter coll)] [skip (dec n)]] + (for [val citer] + (yield val) + (for [_ (range skip)] + (next citer)))) + (raise (ValueError "n must be positive")))) + +(defn take-while [pred coll] + "Take all elements while `pred` is true" + (let [[citer (iter coll)]] + (for [val citer] + (if (pred val) + (yield val) + (break))))) + +(defn zero? [n] + "Return true if n is 0" + (_numeric_check n) + (= n 0)) + +(def *exports* ["cycle" "dec" "distinct" "drop" "even?" "filter" "inc" + "instance?" "iterable?" "iterate" "iterator?" "neg?" + "none?" "nth" "numeric?" "odd?" "pos?" "remove" "repeat" + "repeatedly" "take" "take_nth" "take_while" "zero?"]) diff --git a/vendor/pygments/tests/examplefiles/limbo.b b/vendor/pygments/tests/examplefiles/limbo.b new file mode 100644 index 0000000..e55a0a6 --- /dev/null +++ b/vendor/pygments/tests/examplefiles/limbo.b @@ -0,0 +1,456 @@ +implement Ninewin; +include "sys.m"; + sys: Sys; +include "draw.m"; + draw: Draw; + Image, Display, Pointer: import draw; +include "arg.m"; +include "keyboard.m"; +include "tk.m"; +include "wmclient.m"; + wmclient: Wmclient; + Window: import wmclient; +include "sh.m"; + sh: Sh; + +# run a p9 graphics program (default rio) under inferno wm, +# making available to it: +# /dev/winname - naming the current inferno window (changing on resize) +# /dev/mouse - pointer file + resize events; write to change position +# /dev/cursor - change appearance of cursor. +# /dev/draw - inferno draw device +# /dev/cons - read keyboard events, write to 9win stdout. + +Ninewin: module { + init: fn(ctxt: ref Draw->Context, argv: list of string); +}; +winname: string; + +init(ctxt: ref Draw->Context, argv: list of string) +{ + size := Draw->Point(500, 500); + sys = load Sys Sys->PATH; + draw = load Draw Draw->PATH; + wmclient = load Wmclient Wmclient->PATH; + wmclient->init(); + sh = load Sh Sh->PATH; + + buts := Wmclient->Resize; + if(ctxt == nil){ + ctxt = wmclient->makedrawcontext(); + buts = Wmclient->Plain; + } + arg := load Arg Arg->PATH; + arg->init(argv); + arg->setusage("9win [-s] [-x width] [-y height]"); + exportonly := 0; + while(((opt := arg->opt())) != 0){ + case opt { + 's' => + exportonly = 1; + 'x' => + size.x = int arg->earg(); + 'y' => + size.y = int arg->earg(); + * => + arg->usage(); + } + } + if(size.x < 1 || size.y < 1) + arg->usage(); + argv = arg->argv(); + if(argv != nil && hd argv == "-s"){ + exportonly = 1; + argv = tl argv; + } + if(argv == nil && !exportonly) + argv = "rio" :: nil; + if(argv != nil && exportonly){ + sys->fprint(sys->fildes(2), "9win: no command allowed with -s flag\n"); + raise "fail:usage"; + } + title := "9win"; + if(!exportonly) + title += " " + hd argv; + w := wmclient->window(ctxt, title, buts); + w.reshape(((0, 0), size)); + w.onscreen(nil); + if(w.image == nil){ + sys->fprint(sys->fildes(2), "9win: cannot get image to draw on\n"); + raise "fail:no window"; + } + + sys->pctl(Sys->FORKNS|Sys->NEWPGRP, nil); + ld := "/n/9win"; + if(sys->bind("#s", ld, Sys->MREPL) == -1 && + sys->bind("#s", ld = "/n/local", Sys->MREPL) == -1){ + sys->fprint(sys->fildes(2), "9win: cannot bind files: %r\n"); + raise "fail:error"; + } + w.startinput("kbd" :: "ptr" :: nil); + spawn ptrproc(rq := chan of Sys->Rread, ptr := chan[10] of ref Pointer, reshape := chan[1] of int); + + + fwinname := sys->file2chan(ld, "winname"); + fconsctl := sys->file2chan(ld, "consctl"); + fcons := sys->file2chan(ld, "cons"); + fmouse := sys->file2chan(ld, "mouse"); + fcursor := sys->file2chan(ld, "cursor"); + if(!exportonly){ + spawn run(sync := chan of string, w.ctl, ld, argv); + if((e := <-sync) != nil){ + sys->fprint(sys->fildes(2), "9win: %s", e); + raise "fail:error"; + } + } + spawn serveproc(w, rq, fwinname, fconsctl, fcons, fmouse, fcursor); + if(!exportonly){ + # handle events synchronously so that we don't get a "killed" message + # from the shell. + handleevents(w, ptr, reshape); + }else{ + spawn handleevents(w, ptr, reshape); + sys->bind(ld, "/dev", Sys->MBEFORE); + export(sys->fildes(0), w.ctl); + } +} + +handleevents(w: ref Window, ptr: chan of ref Pointer, reshape: chan of int) +{ + for(;;)alt{ + c := <-w.ctxt.ctl or + c = <-w.ctl => + e := w.wmctl(c); + if(e != nil) + sys->fprint(sys->fildes(2), "9win: ctl error: %s\n", e); + if(e == nil && c != nil && c[0] == '!'){ + alt{ + reshape <-= 1 => + ; + * => + ; + } + winname = nil; + } + p := <-w.ctxt.ptr => + if(w.pointer(*p) == 0){ + # XXX would block here if client isn't reading mouse... but we do want to + # extert back-pressure, which conflicts. + alt{ + ptr <-= p => + ; + * => + ; # sys->fprint(sys->fildes(2), "9win: discarding mouse event\n"); + } + } + } +} + +serveproc(w: ref Window, mouserq: chan of Sys->Rread, fwinname, fconsctl, fcons, fmouse, fcursor: ref Sys->FileIO) +{ + winid := 0; + krc: list of Sys->Rread; + ks: string; + + for(;;)alt { + c := <-w.ctxt.kbd => + ks[len ks] = inf2p9key(c); + if(krc != nil){ + hd krc <-= (array of byte ks, nil); + ks = nil; + krc = tl krc; + } + (nil, d, nil, wc) := <-fcons.write => + if(wc != nil){ + sys->write(sys->fildes(1), d, len d); + wc <-= (len d, nil); + } + (nil, nil, nil, rc) := <-fcons.read => + if(rc != nil){ + if(ks != nil){ + rc <-= (array of byte ks, nil); + ks = nil; + }else + krc = rc :: krc; + } + (offset, nil, nil, rc) := <-fwinname.read => + if(rc != nil){ + if(winname == nil){ + winname = sys->sprint("noborder.9win.%d", winid++); + if(w.image.name(winname, 1) == -1){ + sys->fprint(sys->fildes(2), "9win: namewin %q failed: %r", winname); + rc <-= (nil, "namewin failure"); + break; + } + } + d := array of byte winname; + if(offset < len d) + d = d[offset:]; + else + d = nil; + rc <-= (d, nil); + } + (nil, nil, nil, wc) := <-fwinname.write => + if(wc != nil) + wc <-= (-1, "permission denied"); + (nil, nil, nil, rc) := <-fconsctl.read => + if(rc != nil) + rc <-= (nil, "permission denied"); + (nil, d, nil, wc) := <-fconsctl.write => + if(wc != nil){ + if(string d != "rawon") + wc <-= (-1, "cannot change console mode"); + else + wc <-= (len d, nil); + } + (nil, nil, nil, rc) := <-fmouse.read => + if(rc != nil) + mouserq <-= rc; + (nil, d, nil, wc) := <-fmouse.write => + if(wc != nil){ + e := cursorset(w, string d); + if(e == nil) + wc <-= (len d, nil); + else + wc <-= (-1, e); + } + (nil, nil, nil, rc) := <-fcursor.read => + if(rc != nil) + rc <-= (nil, "permission denied"); + (nil, d, nil, wc) := <-fcursor.write => + if(wc != nil){ + e := cursorswitch(w, d); + if(e == nil) + wc <-= (len d, nil); + else + wc <-= (-1, e); + } + } +} + +ptrproc(rq: chan of Sys->Rread, ptr: chan of ref Pointer, reshape: chan of int) +{ + rl: list of Sys->Rread; + c := ref Pointer(0, (0, 0), 0); + for(;;){ + ch: int; + alt{ + p := <-ptr => + ch = 'm'; + c = p; + <-reshape => + ch = 'r'; + rc := <-rq => + rl = rc :: rl; + continue; + } + if(rl == nil) + rl = <-rq :: rl; + hd rl <-= (sys->aprint("%c%11d %11d %11d %11d ", ch, c.xy.x, c.xy.y, c.buttons, c.msec), nil); + rl = tl rl; + } +} + +cursorset(w: ref Window, m: string): string +{ + if(m == nil || m[0] != 'm') + return "invalid mouse message"; + x := int m[1:]; + for(i := 1; i < len m; i++) + if(m[i] == ' '){ + while(m[i] == ' ') + i++; + break; + } + if(i == len m) + return "invalid mouse message"; + y := int m[i:]; + return w.wmctl(sys->sprint("ptr %d %d", x, y)); +} + +cursorswitch(w: ref Window, d: array of byte): string +{ + Hex: con "0123456789abcdef"; + if(len d != 2*4+64) + return w.wmctl("cursor"); + hot := Draw->Point(bglong(d, 0*4), bglong(d, 1*4)); + s := sys->sprint("cursor %d %d 16 32 ", hot.x, hot.y); + for(i := 2*4; i < len d; i++){ + c := int d[i]; + s[len s] = Hex[c >> 4]; + s[len s] = Hex[c & 16rf]; + } + return w.wmctl(s); +} + +run(sync, ctl: chan of string, ld: string, argv: list of string) +{ + Rcmeta: con "|<>&^*[]?();"; + sys->pctl(Sys->FORKNS, nil); + if(sys->bind("#₪", "/srv", Sys->MCREATE) == -1){ + sync <-= sys->sprint("cannot bind srv device: %r"); + exit; + } + srvname := "/srv/9win."+string sys->pctl(0, nil); # XXX do better. + fd := sys->create(srvname, Sys->ORDWR, 8r600); + if(fd == nil){ + sync <-= sys->sprint("cannot create %s: %r", srvname); + exit; + } + sync <-= nil; + spawn export(fd, ctl); + sh->run(nil, "os" :: + "rc" :: "-c" :: + "mount "+srvname+" /mnt/term;"+ + "rm "+srvname+";"+ + "bind -b /mnt/term"+ld+" /dev;"+ + "bind /mnt/term/dev/draw /dev/draw ||"+ + "bind -a /mnt/term/dev /dev;"+ + quotedc("cd"::"/mnt/term"+cwd()::nil, Rcmeta)+";"+ + quotedc(argv, Rcmeta)+";":: + nil + ); +} + +export(fd: ref Sys->FD, ctl: chan of string) +{ + sys->export(fd, "/", Sys->EXPWAIT); + ctl <-= "exit"; +} + +inf2p9key(c: int): int +{ + KF: import Keyboard; + + P9KF: con 16rF000; + Spec: con 16rF800; + Khome: con P9KF|16r0D; + Kup: con P9KF|16r0E; + Kpgup: con P9KF|16r0F; + Kprint: con P9KF|16r10; + Kleft: con P9KF|16r11; + Kright: con P9KF|16r12; + Kdown: con Spec|16r00; + Kview: con Spec|16r00; + Kpgdown: con P9KF|16r13; + Kins: con P9KF|16r14; + Kend: con P9KF|16r18; + Kalt: con P9KF|16r15; + Kshift: con P9KF|16r16; + Kctl: con P9KF|16r17; + + case c { + Keyboard->LShift => + return Kshift; + Keyboard->LCtrl => + return Kctl; + Keyboard->LAlt => + return Kalt; + Keyboard->Home => + return Khome; + Keyboard->End => + return Kend; + Keyboard->Up => + return Kup; + Keyboard->Down => + return Kdown; + Keyboard->Left => + return Kleft; + Keyboard->Right => + return Kright; + Keyboard->Pgup => + return Kpgup; + Keyboard->Pgdown => + return Kpgdown; + Keyboard->Ins => + return Kins; + + # function keys + KF|1 or + KF|2 or + KF|3 or + KF|4 or + KF|5 or + KF|6 or + KF|7 or + KF|8 or + KF|9 or + KF|10 or + KF|11 or + KF|12 => + return (c - KF) + P9KF; + } + return c; +} + +cwd(): string +{ + return sys->fd2path(sys->open(".", Sys->OREAD)); +} + +# from string.b, waiting for declaration to be uncommented. +quotedc(argv: list of string, cl: string): string +{ + s := ""; + while (argv != nil) { + arg := hd argv; + for (i := 0; i < len arg; i++) { + c := arg[i]; + if (c == ' ' || c == '\t' || c == '\n' || c == '\'' || in(c, cl)) + break; + } + if (i < len arg || arg == nil) { + s += "'" + arg[0:i]; + for (; i < len arg; i++) { + if (arg[i] == '\'') + s[len s] = '\''; + s[len s] = arg[i]; + } + s[len s] = '\''; + } else + s += arg; + if (tl argv != nil) + s[len s] = ' '; + argv = tl argv; + } + return s; +} + +in(c: int, s: string): int +{ + n := len s; + if(n == 0) + return 0; + ans := 0; + negate := 0; + if(s[0] == '^') { + negate = 1; + s = s[1:]; + n--; + } + for(i := 0; i < n; i++) { + if(s[i] == '-' && i > 0 && i < n-1) { + if(c >= s[i-1] && c <= s[i+1]) { + ans = 1; + break; + } + i++; + } + else + if(c == s[i]) { + ans = 1; + break; + } + } + if(negate) + ans = !ans; + + # just to showcase labels +skip: + return ans; +} + +bglong(d: array of byte, i: int): int +{ + return int d[i] | (int d[i+1]<<8) | (int d[i+2]<<16) | (int d[i+3]<<24); +} diff --git a/vendor/pygments/tests/examplefiles/livescript-demo.ls b/vendor/pygments/tests/examplefiles/livescript-demo.ls index 2ff68c6..03cbcc9 100644 --- a/vendor/pygments/tests/examplefiles/livescript-demo.ls +++ b/vendor/pygments/tests/examplefiles/livescript-demo.ls @@ -7,7 +7,9 @@ dashes-identifiers = -> underscores_i$d = -> /regexp1/ //regexp2//g - 'strings' and "strings" and \strings + 'strings' and "strings" and \strings and \#$-"\'strings + +another-word-list = <[ more words ]> [2 til 10] |> map (* 2) diff --git a/vendor/pygments/tests/examplefiles/main.cmake b/vendor/pygments/tests/examplefiles/main.cmake index dac3da4..6dfcab1 100644 --- a/vendor/pygments/tests/examplefiles/main.cmake +++ b/vendor/pygments/tests/examplefiles/main.cmake @@ -1,4 +1,7 @@ +CMAKE_MINIMUM_REQUIRED(VERSION 2.6 FATAL_ERROR) + SET( SOURCES back.c io.c main.c ) +SET( PATH $ENV{PATH} ) MESSAGE( ${SOURCES} ) # three arguments, prints "back.cio.cmain.c" MESSAGE( "${SOURCES}" ) # one argument, prints "back.c;io.c;main.c" MESSAGE( "" ) # one argument, prints "" an empty line diff --git a/vendor/pygments/tests/examplefiles/markdown.lsp b/vendor/pygments/tests/examplefiles/markdown.lsp old mode 100755 new mode 100644 diff --git a/vendor/pygments/tests/examplefiles/matlab_sample b/vendor/pygments/tests/examplefiles/matlab_sample index 4f61afe..bb00b51 100644 --- a/vendor/pygments/tests/examplefiles/matlab_sample +++ b/vendor/pygments/tests/examplefiles/matlab_sample @@ -28,3 +28,7 @@ y = exp(x); {% a block comment %} + +function no_arg_func +fprintf('%s\n', 'function with no args') +end diff --git a/vendor/pygments/tests/examplefiles/modula2_test_cases.def b/vendor/pygments/tests/examplefiles/modula2_test_cases.def new file mode 100644 index 0000000..ce86a55 --- /dev/null +++ b/vendor/pygments/tests/examplefiles/modula2_test_cases.def @@ -0,0 +1,354 @@ +(* Test Cases for Modula-2 Lexer *) + +(* Notes: + (1) Without dialect option nor embedded dialect tag, the lexer operates in + fallback mode, recognising the *combined* literals, punctuation symbols + and operators of all supported dialects, and the *combined* reserved + words and builtins of PIM Modula-2, ISO Modula-2 and Modula-2 R10. + (1) If multiple embedded dialect tags are present, the lexer will use the + first valid tag and ignore any subsequent dialect tags in the file. + (2) An embedded dialect tag overrides any command line dialect option. *) + + +(* Testing command line dialect option *) + +(* for PIM Modula-2 : pygmentize -O full,dialect=m2pim ... + for ISO Modula-2 : pygmentize -O full,dialect=m2iso ... + for Modula-2 R10 : pygmentize -O full,dialect=m2r10 ... + for Objective Modula-2 : pygmentize -O full,dialect=objm2 ... *) + +(* for Aglet extensions : pygmentize -O full,dialect=m2iso+aglet ... + for GNU extensions : pygmentize -O full,dialect=m2pim+gm2 ... + for p1 extensions : pygmentize -O full,dialect=m2iso+p1 ... + for XDS extensions : pygmentize -O full,dialect=m2iso+xds ... + + +(* Testing embedded dialect tags *) + +(* !m2pim*) (* <-- remove whitespace before ! for PIM Modula-2 *) +(* !m2iso*) (* <-- remove whitespace before ! for ISO Modula-2 *) +(* !m2r10*) (* <-- remove whitespace before ! for Modula-2 R10 *) +(* !objm2*) (* <-- remove whitespace before ! for Objective Modula-2 *) + +(* !m2iso+aglet*) (* <-- remove whitespace before ! for Aglet extensions *) +(* !m2pim+gm2*) (* <-- remove whitespace before ! for GNU extensions *) +(* !m2iso+p1*) (* <-- remove whitespace before ! for p1 extensions *) +(* !m2iso+xds*) (* <-- remove whitespace before ! for XDS extensions *) + + +(* Dialect Indicating Names *) + +(* recognised names should be highlighted *) + +QUALIFIED (* PIM and ISO *) + +PACKEDSET (* ISO only *) + +ARGLIST (* M2 R10 and ObjM2 *) + +BYCOPY (* ObjM2 only *) + +BITSET8 (* Aglet, GNU and M2 R10 *) + +__FILE__ (* GNU only *) + +BCD (* p1 and M2 R10 *) + +SEQ (* XDS only *) + + +(* Literal Tests *) + +(* recognised literals should be rendered as one unit + unrecognised literals should be rendered as error *) + +ch := 'a'; ch := "a"; (* all dialects *) +ch := 0u20; unich := 0u2038 (* M2 R10 *) + +s := 'The cat said "meow!".'; +s := "It is eight O'clock."; + + +n := 123; n = 1000000; (* all dialects *) +n := 123; n = 1'000'000; (* M2 R10 *) + +n := 0b0110; n:= 0b0110'1100'0111; (* M2 R10 *) +n := 0xFF00; n:= 0xDEAD'BEEF'0F00; (* M2 R10 *) + +r := 1.23; r := 1000000.000001; (* all dialects *) +r := 1.23; r := 1'000'000.000'001; (* M2 R10 *) + +r := 1.234E6; r:= 1.234E-6; r := 1.234567E1000; (* PIM + ISO *) +r := 1.234e6; r:= 1.234e-6; r := 1.234'567e1'000; (* M2 R10 *) + +ch := 0377C; n := 0377B; n := 07FF0H; (* ISO + PIM *) + + +(* Non-Alphabetic Operator Tests *) + +(* supported operators should be rendered as one unit + unsupported operators should be rendered as errors *) + +a := b + c - d * e / f; (* all dialects *) + +SetDiff := A \ B; (* M2 R10 *) + +dotProduct := v1 *. v2; catArray := array1 +> array2; (* M2 R10 *) + +bool := a = b; bool := a > b; bool := a < b; +bool := a # b; bool := a >= b; bool := a <= b; + +bool := a <> b; (* PIM + ISO *) + +bool := a == b; (* M2 R10 *) + +(*&*) IF a & b THEN ... END; (* PIM + ISO *) + +(*~*) IF ~ b THEN ... END; (* PIM + ISO *) + +(*::*) int := real :: INTEGER; (* M2 R10 *) + +(*++*) FOR i++ IN range DO ... END; (* M2 R10 *) +(*--*) FOR i-- IN range DO ... END; (* M2 R10 *) + +(*^*) next := this^.next; (* all dialects *) +(*@*) next := this@.next; (* ISO *) + +(*`*) str := `NSString alloc init; (* ObjM2 *) + + +(* Punctuation Tests *) + +(* supported punctuation should be rendered as one unit + unsupported punctuation should be rendered as an error *) + +(*.*) Foo.Bar.Baz; (*..*) TYPE Sign = [-1..1] OF INTEGER; + +(*|:*) CASE foo OF | 1 : bar | 2 : bam | 3 : boo END; +(*!:*) CASE foo OF 1 : bar ! 2 : bam ! 3 : boo END; (* ISO *) + +(*[]()*) array[n] := foo(); + +(*{}*) CONST Bar = { 1, 2, 3 }; + +(*?*) TPROPERTIES = isCollection, isIndexed | isRigid?; (* M2 R10 *) + +(*~*) CONST ~ isFoobar = Foo AND Bar; (* M2 R10 *) +(*->*) isFoobar -> PROCEDURE [ABS]; (* M2 R10 *) + +(*<<>>*) GENLIB Foo FROM Template FOR Bar = <> END; (* M2 R10 *) + + +(* Single Line Comment Test *) + +(* should be rendered as comment if supported, as error if unsupported *) + +// This is a single line comment (M2 R10 + ObjM2) + + +(* Pragma Delimiter Tests *) + +(* PIM style pragma should be rendered as pragma in PIM dialects, + as multiline comment in all other dialects. *) + +(*$INLINE*) (* PIM *) + +(* ISO style pragma should be rendered as error in PIM dialects, + as pragma in all other dialects. *) + +<*INLINE*> (* all other dialects *) + + +(* Operator Substitution Test When in Algol mode *) + +IF foo # bar THEN ... END; (* # should be rendered as not equal symbol *) + +IF foo >= bar THEN ... END; (* >= should be rendered as not less symbol *) + +IF foo <= bar THEN ... END; (* <= should be rendered as not greater symbol *) + +IF foo == bar THEN ... END; (* == should be rendered as identity symbol *) + +dotProduct := v1 *. v2; (* *. should be rendered as dot product symbol *) + + +(* Reserved Words and Builtins Test *) + +(* supported reserved words and builtins should be highlighted *) + +(* reserved words common to all dialects *) + +AND ARRAY BEGIN BY CASE CONST DEFINITION DIV DO ELSE ELSIF END EXIT FOR FROM +IF IMPLEMENTATION IMPORT IN LOOP MOD MODULE NOT OF OR POINTER PROCEDURE +RECORD REPEAT RETURN SET THEN TO TYPE UNTIL VAR WHILE + +(* builtins common to all dialects *) + +ABS BOOLEAN CARDINAL CHAR CHR FALSE INTEGER LONGINT LONGREAL +MAX MIN NIL ODD ORD REAL TRUE + +(* pseudo builtins common to all dialects *) + +ADDRESS BYTE WORD ADR + + +(* additional reserved words for PIM *) + +EXPORT QUALIFIED WITH + +(* additional builtins for PIM *) + +BITSET CAP DEC DISPOSE EXCL FLOAT HALT HIGH INC INCL NEW NIL PROC SIZE TRUNC VAL + +(* additional pseudo-builtins for PIM *) + +SYSTEM PROCESS TSIZE NEWPROCESS TRANSFER + + +(* additional reserved words for ISO 10514-1 *) + +EXCEPT EXPORT FINALLY FORWARD PACKEDSET QUALIFIED REM RETRY WITH + +(* additional reserved words for ISO 10514-2 & ISO 10514-3 *) + +ABSTRACT AS CLASS GUARD INHERIT OVERRIDE READONLY REVEAL TRACED UNSAFEGUARDED + +(* additional builtins for ISO 10514-1 *) + +BITSET CAP CMPLX COMPLEX DEC DISPOSE EXCL FLOAT HALT HIGH IM INC INCL INT +INTERRUPTIBLE LENGTH LFLOAT LONGCOMPLEX NEW PROC PROTECTION RE SIZE TRUNC +UNINTERRUBTIBLE VAL + +(* additional builtins for ISO 10514-2 & ISO 10514-3 *) + +CREATE DESTROY EMPTY ISMEMBER SELF + + +(* additional pseudo-builtins for ISO *) + +(* SYSTEM *) +SYSTEM BITSPERLOC LOCSPERBYTE LOCSPERWORD LOC ADDADR SUBADR DIFADR MAKEADR +ADR ROTATE SHIFT CAST TSIZE + +(* COROUTINES *) +COROUTINES ATTACH COROUTINE CURRENT DETACH HANDLER INTERRUPTSOURCE IOTRANSFER +IsATTACHED LISTEN NEWCOROUTINE PROT TRANSFER + +(* EXCEPTIONS *) +EXCEPTIONS AllocateSource CurrentNumber ExceptionNumber ExceptionSource +GetMessage IsCurrentSource IsExceptionalExecution RAISE + +(* TERMINATION *) +TERMINATION IsTerminating HasHalted + +(* M2EXCEPTION *) +M2EXCEPTION M2Exceptions M2Exception IsM2Exception indexException rangeException +caseSelectException invalidLocation functionException wholeValueException +wholeDivException realValueException realDivException complexValueException +complexDivException protException sysException coException exException + + +(* additional reserved words for M2 R10 *) + +ALIAS ARGLIST BLUEPRINT COPY GENLIB INDETERMINATE NEW NONE OPAQUE REFERENTIAL +RELEASE RETAIN + +(* with symbolic assembler language extension *) +ASM REG + +(* additional builtins for M2 R10 *) + +CARDINAL COUNT EMPTY EXISTS INSERT LENGTH LONGCARD OCTET PTR PRED READ READNEW +REMOVE RETRIEVE SORT STORE SUBSET SUCC TLIMIT TMAX TMIN TRUE TSIZE UNICHAR +WRITE WRITEF + +(* additional pseudo-builtins for M2 R10 *) + +(* TPROPERTIES *) +TPROPERTIES PROPERTY LITERAL TPROPERTY TLITERAL TBUILTIN TDYN TREFC TNIL +TBASE TPRECISION TMAXEXP TMINEXP + +(* CONVERSION *) +CONVERSION TSXFSIZE SXF VAL + +(* UNSAFE *) +UNSAFE CAST INTRINSIC AVAIL ADD SUB ADDC SUBC FETCHADD FETCHSUB SHL SHR ASHR +ROTL ROTR ROTLC ROTRC BWNOT BWAND BWOR BWXOR BWNAND BWNOR SETBIT TESTBIT +LSBIT MSBIT CSBITS BAIL HALT TODO FFI ADDR VARGLIST VARGC + +(* ATOMIC *) +ATOMIC INTRINSIC AVAIL SWAP CAS INC DEC BWAND BWNAND BWOR BWXOR + +(* COMPILER *) +COMPILER DEBUG MODNAME PROCNAME LINENUM DEFAULT HASH + +(* ASSEMBLER *) +ASSEMBLER REGISTER SETREG GETREG CODE + + +(* standard library ADT identifiers for M2 R10 *) + +(* rendered as builtins when dialect is set to Modula-2 R10, + this can be turned off by option treat_stdlib_adts_as_builtins=off *) +BCD LONGBCD BITSET SHORTBITSET LONGBITSET LONGLONGBITSET COMPLEX LONGCOMPLEX +SHORTCARD LONGLONGCARD SHORTINT LONGLONGINT POSINT SHORTPOSINT LONGPOSINT +LONGLONGPOSINT BITSET8 BITSET16 BITSET32 BITSET64 BITSET128 BS8 BS16 BS32 +BS64 BS128 CARDINAL8 CARDINAL16 CARDINAL32 CARDINAL64 CARDINAL128 CARD8 +CARD16 CARD32 CARD64 CARD128 INTEGER8 INTEGER16 INTEGER32 INTEGER64 +INTEGER128 INT8 INT16 INT32 INT64 INT128 STRING UNISTRING + + +(* additional reserved words for ObjM2 *) + +(* Note: ObjM2 is a superset of M2 R10 *) + +BYCOPY BYREF CLASS CONTINUE CRITICAL INOUT METHOD ON OPTIONAL OUT PRIVATE +PROTECTED PROTOCOL PUBLIC SUPER TRY + +(* additional builtins for ObjM2 *) + +OBJECT NO YES + + +(* additional builtins for Aglet Extensions to ISO *) + +BITSET8 BITSET16 BITSET32 CARDINAL8 CARDINAL16 CARDINAL32 INTEGER8 INTEGER16 +INTEGER32 + + +(* additional reserved words for GNU Extensions to PIM *) + +ASM __ATTRIBUTE__ __BUILTIN__ __COLUMN__ __DATE__ __FILE__ __FUNCTION__ +__LINE__ __MODULE__ VOLATILE + +(* additional builtins for GNU Extensions to PIM *) + +BITSET8 BITSET16 BITSET32 CARDINAL8 CARDINAL16 CARDINAL32 CARDINAL64 COMPLEX32 +COMPLEX64 COMPLEX96 COMPLEX128 INTEGER8 INTEGER16 INTEGER32 INTEGER64 REAL8 +REAL16 REAL32 REAL96 REAL128 THROW + + +(* additional pseudo-builtins for p1 Extensions to ISO *) + +BCD + + +(* additional reserved words for XDS Extensions to ISO *) + +SEQ + +(* additional builtins for XDS Extensions to ISO *) + +ASH ASSERT DIFFADR_TYPE ENTIER INDEX LEN LONGCARD SHORTCARD SHORTINT + +(* additional pseudo-builtins for XDS Extensions to ISO *) + +(* SYSTEM *) +PROCESS NEWPROCESS BOOL8 BOOL16 BOOL32 CARD8 CARD16 CARD32 INT8 INT16 INT32 +REF MOVE FILL GET PUT CC int unsigned size_t void + +(* COMPILER *) +COMPILER OPTION EQUATION + + +(* end of file *) \ No newline at end of file diff --git a/vendor/pygments/tests/examplefiles/noexcept.cpp b/vendor/pygments/tests/examplefiles/noexcept.cpp new file mode 100644 index 0000000..f83e50d --- /dev/null +++ b/vendor/pygments/tests/examplefiles/noexcept.cpp @@ -0,0 +1,8 @@ +void* operator new (std::size_t size); +void* operator new (std::size_t size, const std::nothrow_t& nothrow_value) noexcept; +void* operator new (std::size_t size, const std::nothrow_t& nothrow_value)noexcept; +void* operator new (std::size_t size, const std::nothrow_t& nothrow_value); +void* operator new (std::size_t size); +void* operator new (std::size_t size) noexcept; +void* operator new (std::size_t size)noexcept; + diff --git a/vendor/pygments/tests/examplefiles/objc_example.m b/vendor/pygments/tests/examplefiles/objc_example.m index cb5c097..f3f85f6 100644 --- a/vendor/pygments/tests/examplefiles/objc_example.m +++ b/vendor/pygments/tests/examplefiles/objc_example.m @@ -1,25 +1,179 @@ -#import "Somefile.h" +// Test various types of includes +#import +# import +#import "stdio.h" +#\ + import \ + "stdlib.h" +# /*line1*/ \ +import /* line 2 */ \ +"stdlib.h" // line 3 -@implementation ABC +// Commented out code with preprocessor +#if 0 +#define MY_NUMBER 3 +#endif + + #\ + if 1 +#define TEST_NUMBER 3 +#endif + +// Empty preprocessor +# + +// Class forward declaration +@class MyClass; + +// Empty classes +@interface EmptyClass +@end +@interface EmptyClass2 +{ +} +@end +@interface EmptyClass3 : EmptyClass2 +{ +} +@end + +// Custom class inheriting from built-in +@interface MyClass : NSObject +{ +@public + NSString *myString; + __weak NSString *_weakString; +@protected + NSTextField *_textField; +@private + NSDate *privateDate; +} + +// Various property aatributes +@property(copy, readwrite, nonatomic) NSString *myString; +@property(weak) NSString *weakString; +@property(retain, strong, atomic) IBOutlet NSTextField *textField; + +// Class methods ++ (void)classMethod1:(NSString *)arg; ++ (void)classMethod2:(NSString *) arg; // Test space before arg + +@end + +typedef id B; + +#pragma mark MyMarker + +// MyClass.m +// Class extension to declare private property +@interface MyClass () +@property(retain) NSDate *privateDate; +- (void)hiddenMethod; +@end + +// Special category +@interface MyClass (Special) +@property(retain) NSDate *specialDate; +@end + +@implementation MyClass +@synthesize myString; +@synthesize privateDate; - (id)a:(B)b { - return 1; + /** + * C-style comment + */ + + // Selector keywords/types + SEL someMethod = @selector(hiddenMethod); + + // Boolean types + Boolean b1 = FALSE; + BOOL b2 = NO; + bool b3 = true; + + /** + * Number literals + */ + // Int Literal + NSNumber *n1 = @( 1 ); + // Method call + NSNumber *n2 = @( [b length] ); + // Define variable + NSNumber *n3 = @( TEST_NUMBER ); + // Arthimetic expression + NSNumber *n4 = @(1 + 2); + // From variable + int myInt = 5; + NSNumber *n5 = @(myInt); + // Nest expression + NSNumber *n6 = @(1 + (2 + 6.0)); + // Bool literal + NSNumber *n7 = @NO; + // Bool expression + NSNumber *n8 = @(YES); + // Character + NSNumber *n9 = @'a'; + // int + NSNumber *n10 = @123; + // unsigned + NSNumber *n11 = @1234U; + // long + NSNumber *n12 = @1234567890L; + // float + NSNumber *n13 = @3.14F; + // double + NSNumber *n14 = @3.14F; + + // Array literals + NSArray *arr = @[ @"1", @"2" ]; + arr = @[ @[ @"1", @"2" ], [arr lastObject] ]; + [arr lastObject]; + [@[ @"1", @"2" ] lastObject]; + + // Dictionary literals + NSDictionary *d = @{ @"key": @"value" }; + [[d allKeys] lastObject]; + [[@{ @"key": @"value" } allKeys] lastObject]; + d = @{ @"key": @{ @"key": @"value" } }; + + [self hiddenMethod]; + [b length]; + [privateDate class]; + + NSDictionary *dictionary = [NSDictionary dictionaryWithObjectsAndKeys: + @"1", @"one", @"2", @"two", @"3", @"three", nil]; + + NSString *key; + for (key in dictionary) { + NSLog(@"Number: %@, Word: %@", key, [dictionary valueForKey:key]); + } + + // Blocks + int (^myBlock)(int arg1, int arg2); + NSString *(^myName)(NSString *) = ^(NSString *value) { + return value; + }; + + return nil; +} + +- (void)hiddenMethod { + // Synchronized block + @synchronized(self) { + [myString retain]; + [myString release]; + } +} + ++ (void)classMethod1:(NSString *)arg {} ++ (void)classMethod2:(NSString *) arg +{ + // Autorelease pool block + @autoreleasepool { + NSLog(@"Hello, World!"); + } } @end - -@implementation ABC - -- (void)xyz; - -@end - -NSDictionary *dictionary = [NSDictionary dictionaryWithObjectsAndKeys: - @"quattuor", @"four", @"quinque", @"five", @"sex", @"six", nil]; - - -NSString *key; -for (key in dictionary) { - NSLog(@"English: %@, Latin: %@", key, [dictionary valueForKey:key]); -} - diff --git a/vendor/pygments/tests/examplefiles/objc_example2.m b/vendor/pygments/tests/examplefiles/objc_example2.m deleted file mode 100644 index 8cd9b06..0000000 --- a/vendor/pygments/tests/examplefiles/objc_example2.m +++ /dev/null @@ -1,24 +0,0 @@ -// MyClass.h -@interface MyClass : NSObject -{ - NSString *value; - NSTextField *textField; -@private - NSDate *lastModifiedDate; -} -@property(copy, readwrite) NSString *value; -@property(retain) IBOutlet NSTextField *textField; -@end - -// MyClass.m -// Class extension to declare private property -@interface MyClass () -@property(retain) NSDate *lastModifiedDate; -@end - -@implementation MyClass -@synthesize value; -@synthesize textField; -@synthesize lastModifiedDate; -// implementation continues -@end diff --git a/vendor/pygments/tests/examplefiles/example.p b/vendor/pygments/tests/examplefiles/openedge_example similarity index 100% rename from vendor/pygments/tests/examplefiles/example.p rename to vendor/pygments/tests/examplefiles/openedge_example diff --git a/vendor/pygments/tests/examplefiles/pacman.conf b/vendor/pygments/tests/examplefiles/pacman.conf new file mode 100644 index 0000000..78dbf5e --- /dev/null +++ b/vendor/pygments/tests/examplefiles/pacman.conf @@ -0,0 +1,49 @@ +# +# /etc/pacman.conf +# +# This example file has no relation to `pacman.ijs` +# but is of configuration of Arch Linux's package manager `pacman`. +# + +# +# GENERAL OPTIONS +# +[options] +RootDir = /opt/local/site-private +#DBPath = /var/lib/pacman/ +#CacheDir = /var/cache/pacman/pkg/ +LogFile = /opt/local/site-private/var/log/pacman.log +#GPGDir = /etc/pacman.d/gnupg/ +HoldPkg = pacman +#XferCommand = /usr/bin/curl -C - -f %u > %o +XferCommand = /usr/local/bin/wget --passive-ftp -c -O %o %u +#CleanMethod = KeepInstalled +#UseDelta = 0.7 +Architecture = auto + +#IgnorePkg = +#IgnoreGroup = + +NoUpgrade = etc/passwd etc/group etc/shadow +NoUpgrade = etc/fstab +#NoExtract = + +#UseSyslog +Color +#TotalDownload +CheckSpace +#VerbosePkgLists + +#SigLevel = Never +SigLevel = Required DatabaseOptional +LocalFileSigLevel = Optional +RemoteFileSigLevel = Required + +Server = ftp://ftp9.yaphatchpotchgen.net/$repo/os/$arch + +[fubar32] +Include = /etc/pacman.d/mirrorlist.fubar32 # comment is allowed here + +#[custom] +#SigLevel = Optional TrustAll +#Server = file:///home/custompkgs diff --git a/vendor/pygments/tests/examplefiles/pacman.ijs b/vendor/pygments/tests/examplefiles/pacman.ijs new file mode 100644 index 0000000..f067b6e --- /dev/null +++ b/vendor/pygments/tests/examplefiles/pacman.ijs @@ -0,0 +1,1107 @@ +cocurrent 'jpacman' +coinsert 'j' + +BASELIB=: 'base library' +DATAMASK=: 0 +HWNDP=: '' +ISGUI=: 0 +ONLINE=: 0 +PKGDATA=: 0 7$a: +SECTION=: ,<'All' +SYSNAME=: 'Package Manager' +TIMEOUT=: 60 +WWWREV=: REV=: _1 + +IgnoreIOS=: 0 : 0 +api/jni +data/dbman +data/ddmysql +data/odbc +demos/isigraph +demos/wd +demos/wdplot +games/minesweeper +games/nurikabe +games/pousse +games/solitaire +general/pcall +general/sfl +graphics/d3 +graphics/fvj3 +graphics/gl2 +graphics/gnuplot +graphics/graph +graphics/graphviz +graphics/jturtle +graphics/print +graphics/tgsj +graphics/treemap +graphics/viewmat +gui/monthview +gui/util +ide/qt +math/tabula +media/animate +media/gdiplus +media/image3 +media/ming +media/paint +media/wav +) + +Ignore=: 3 : 0'' +if. IFIOS do. + <;._2 IgnoreIOS +else. + <'ide/ios' +end. +) +3 : 0'' +nc=. '--no-cache' +if. IFUNIX do. + if. UNAME-:'Darwin' do. + HTTPCMD=: 'curl -o %O --stderr %L -f -s -S %U' + elseif. do. + if. 'Android'-:UNAME do. nc=. '' + else. try. nc=. nc #~ 1 e. nc E. shell 'wget --help' catch. nc=. '' end. end. + HTTPCMD=: 'wget ',nc,' -O %O -o %L -t %t %U' + end. +else. + if. fexist exe=. jpath '~tools/ftp/wget.exe' do. exe=. '"',exe,'"' else. exe=. 'wget.exe' end. + try. nc=. nc #~ 1 e. nc E. shell exe,' --help' catch. nc=. '' end. + HTTPCMD=: exe,' ',nc,' -O %O -o %L -t %t -T %T %U' + if. fexist UNZIP=: jpath '~tools/zip/unzip.exe' do. UNZIP=: '"',UNZIP,'" -o -C ' else. UNZIP=: 'unzip.exe -o -C ' end. +end. +) +setfiles=: 3 : 0 +ADDCFG=: jpath '~addons/config/' +makedir ADDCFG +ADDCFGIJS=: ADDCFG,'config.ijs' +JRELEASE=: ({.~i.&'/') 9!:14'' +JRELEASE=: 'j802' +LIBTREE=: readtree'' +if. IFIOS do. + WWW=: '/jal/',JRELEASE,'/' +else. + WWW=: 'http://www.jsoftware.com/jal/',JRELEASE,'/' +end. +LIBVER=: jpath '~system/config/version.txt' +) +destroy=: codestroy +CFGFILES=: <;._2 (0 : 0) +addons.txt +library.txt +release.txt +revision.txt +zips.txt +) +LIBDESC=: 0 : 0 +This is the base library of scripts and labs included in the J system. + +Reinstalling or upgrading this library will overwrite files in the system subdirectory. Restart J afterwards. + +Files outside the system subdirectory, such as profile.ijs, are not changed. +) +cutjal=: ([: (* 4 > +/\) ' ' = ]) <;._1 ] +cutjsp=: ([: (* 5 > +/\) ' ' = ]) <;._1 ] +dquote=: '"'&, @ (,&'"') +fname=: #~ ([: *./\. ~:&'/') +hostcmd=: [: 2!:0 '(' , ] , ' || true)'"_ +ischar=: 2 = 3!:0 +rnd=: [ * [: <. 0.5 + %~ +sep2under=: '/' & (I.@('_' = ])}) +termLF=: , (0 < #) # LF -. {: +todel=: ; @: (DEL&, @ (,&(DEL,' ')) each) +tolist=: }. @ ; @: (LF&,@,@":each) +isjpkgout=: ((4 = {:) *. 2 = #)@$ *. 1 = L. +getintro=: ('...' ,~ -&3@[ {. ])^:(<#) +info=: smoutput +getnames=: 3 : 0 +select. L.y +case. 0 do. + if. +/ BASELIB E. y do. + y=. ({:"1 y +y=. (45&getintro &.> idx{y) idx}y +) +deltree=: 3 : 0 +try. + res=. 0< ferase {."1 dirtree y + *./ res,0 #y do. i.0 5 return. end. +m=. _2 |. (LF,')',LF) E. y +r=. _2 }. each m <;._2 y +x=. r i.&> LF +d=. (x+1) }.each r +r=. x {.each r +r=. 3 {."1 cutjal &> ' ' ,each r +x=. d i.&> LF +c=. x {.each d +d=. (x+1) }.each d +r,.c,.d +) +fixjal2=: 3 : 0 +if. 2 > #y do. i.0 2 return. end. +cutjal &> ' ' ,each <;._2 y +) +fixjsp=: 3 : 0 +if. 2 > #y do. i.0 5 return. end. +m=. _2 |. (LF,')',LF) E. y +r=. _2 }. each m <;._2 y +x=. r i.&> LF +d=. (x+1) }.each r +r=. x {.each r +r=. ' ' ,each r +(cutjsp &> r),.d +) +fixlib=: 3 : 0 +msk=. ( #y do. + i.0 6 return. +end. +fls=. <;._2 y +ndx=. fls i.&> ' ' +siz=. <&> 0 ". (ndx+1) }.&> fls +fls=. ndx {.each fls +zps=. <;._2 &> fls ,each '_' +pfm=. 3 {"1 zps +uname=. tolower UNAME +msk=. (uname -: ({.~ i.&'.')) &> pfm +if. 1 ~: +/msk do. msk=. 1,~ }:0*.msk end. +msk # zps,.fls,.siz +) +fixrev=: 3 : 0 +{. _1 ". :: _1: y -. CRLF +) +fixupd=: 3 : 0 +_1 ". :: _1: y -. CRLF +) +fixver=: 3 : 0 +if. ischar y do. + y=. y -. CRLF + y=. 0 ". ' ' (I. y='.') } y +end. +3 {. y +) +fixvers=: 3 : 0 +s=. $y +y=. ,y +3 {."1 [ 0 ". s $ ' ' (I. y e. './') } y +) +fmtjal=: 3 : 0 +if. 0 = #y do. '' return. end. +r=. (4 {."1 y) ,each "1 ' ',LF2 +r=. <@; "1 r +; r ,each ({:"1 y) ,each <')',LF +) +fmtjal2=: 3 : 0 +if. 0 = #y do. '' return. end. +; (2 {."1 y) ,each "1 ' ',LF +) +fmtdep=: 3 : 0 +}. ; ',' ,each a: -.~ <;._2 y +) +fmtjsp=: 3 : 0 +if. 0 = #y do. '' return. end. +r=. (4 {."1 y) ,each "1 ' ',LF +r=. <@; "1 r +; r ,each ({:"1 y) ,each <')',LF +) +fmtlib=: 3 : 0 +, 'q<.>,q<.>r<0>3.0,r<0>3.0' 8!:2 y +) +fmtver=: 3 : 0 +if. 0=#y do. '' return. end. +if. ischar y do. y return. end. +}. ; '.' ,each ": each y +) +fmtverlib=: 3 : 0 +fmtver y +) +fixzips=: 3 : 0 +if. 2 > #y do. i.0 5 return. end. +fls=. <;._2 y +ndx=. fls i.&> ' ' +siz=. 0 ". (ndx+1) }.&> fls +fls=. ndx {.each fls +zps=. <;._2 &> fls ,each '_' +zps=. zps,.fls,.<&>siz +pfm=. 3 {"1 zps +and=. (1 e. 'android'&E.) &> pfm +lnx=. (1 e. 'linux'&E.) &> pfm +mac=. (1 e. 'darwin'&E.) &> pfm +win=. mac < (1 e. 'win'&E.) &> pfm + +select. UNAME +case. 'Win' do. + zps=. win # zps +case. 'Linux' do. + zps=. lnx # zps +case. 'Android' do. + zps=. and # zps +case. 'Darwin' do. + zps=. mac # zps + zps=. zps /: 3 {"1 zps + zps=. (~: 3 {."1 zps) # zps +end. + +bit=. IF64 pick '64';'32' +pfm=. 3 {"1 zps +exc=. (1 e. bit&E.) &> pfm +zps=. zps \: exc +zps=. (~: 3 {."1 zps) # zps +fnm=. 0 {"1 zps +lnm=. 1 {"1 zps +ver=. 2 {"1 zps +pfm=. 3 {"1 zps +fls=. 4 {"1 zps +siz=. 5 {"1 zps +nms=. fnm ,each '/' ,each lnm +pfm=. (pfm i.&> '.') {.each pfm +ndx=. \: # &> pfm +sort ndx { nms,.pfm,.ver,.fls,.siz +) +fwritenew=: 4 : 0 +if. x -: fread y do. + 0 +else. + x fwrite y +end. +) +platformparent=: 3 : 0 +((< _2 {. y) e. '32';'64') # _2 }. y +) +makedir=: 1!:5 :: 0: @ < +plural=: 4 : 0 +y,(1=x)#'s' +) +sizefmt=: 3 : 0 +select. +/ y >: 1e3 1e4 1e6 1e7 1e9 +case. 0 do. + (": y), ' byte',(y~:1)#'s' +case. 1 do. + (": 0.1 rnd y%1e3),' KB' +case. 2 do. + (": 1 rnd y%1e3),' KB' +case. 3 do. + (": 0.1 rnd y%1e6),' MB' +case. 4 do. + (": 1 rnd y%1e6),' MB' +case. do. + (": 0.1 rnd y%1e9),' GB' +end. +) +shellcmd=: 3 : 0 +if. IFUNIX do. + hostcmd y +else. + spawn_jtask_ y +end. +) +subdir=: 3 : 0 +if. 0=#y do. '' return. end. +a=. 1!:0 y,'*' +if. 0=#a do. '' return. end. +a=. a #~ '-d' -:"1 [ 1 4 {"1 > 4 {"1 a +( '/mnt/sdcard'-:2!:5'EXTERNAL_STORAGE' do. notarcmd=. 1 end. + end. + if. notarcmd do. + require 'tar' + 'file dir'=. y + if. (i.0 0) -: tar 'x';file;dir do. e=. '' end. + else. + e=. shellcmd 'tar ',((IFIOS+:UNAME-:'Android')#(('Darwin'-:UNAME){::'--no-same-owner --no-same-permissions';'-o -p')),' -xzf ',file,' -C ',dir + end. + if. (0~:FHS) *. ('root'-:2!:5'USER') +. (<2!:5'HOME') e. 0;'/var/root';'/root';'';,'/' do. + shellcmd ::0: 'find ',dir,' -type d -exec chmod a+rx {} \+' + shellcmd ::0: 'find ',dir,' -type f -exec chmod a+r {} \+' + end. +else. + dir=. (_2&}. , '/' -.~ _2&{.) dir + e=. shellcmd UNZIP,' ',file,' -d ',dir +end. +e +) +zipext=: 3 : 0 +y, IFUNIX pick '.zip';'.tar.gz' +) +CHECKADDONSDIR=: 0 : 0 +The addons directory does not exist and cannot be created. + +It is set to: XX. + +You can either create the directory manually, or set a new addons directory in your profile script. +) +CHECKASK=: 0 : 0 +Read catalog from the server using Internet connection now? + +Otherwise the local catalog is used offline. +) +CHECKONLINE=: 0 : 0 +An active Internet connection is needed to install packages. + +Continue only if you have an active Internet connection. + +OK to continue? +) +CHECKREADSVR=: 0 : 0 +An active Internet connection is needed to read the server repository catalog. + +Continue only if you have an active Internet connection. + +OK to continue? +) +CHECKSTARTUP=: 0 : 0 +Setup repository using Internet connection now? + +Select No if not connected, to complete setup later. After Setup is done, repository can be used offline with more options in Tools menu and Preferences dialog. +) +checkaccess=: 3 : 0 +if. testaccess'' do. 1 return. end. +msg=. 'Unable to run Package Manager, as you do not have access to the installation folder.' +if. IFWIN do. + msg=. msg,LF2,'To run as Administrator, right-click the J icon, select Run as... and ' + msg=. msg,'then select Adminstrator.' +end. +info msg +0 +) +checkaddonsdir=: 3 : 0 +d=. jpath '~addons' +if. # 1!:0 d do. 1 return. end. +if. 1!:5 :: 0: : 0 do. + ONLINE=: 0 + log 'Using local copy of catalog. See Preferences to change the setting.' + 1 return. + end. + if. 0 = getonline 'Read Catalog from Server';CHECKREADSVR do. 0 return. end. +case. 1 do. + ONLINE=: 1 +case. 2 do. + if. REV >: 0 do. + if. 0 = getonline 'Read Catalog from Server';CHECKASK do. + log 'Using local copy of catalog. See Preferences to change the setting.' + 1 return. + end. + else. + if. 0 = getonline 'Setup Repository';CHECKSTARTUP do. 0 return. end. + end. +end. +log 'Updating server catalog...' +if. 0 = getserver'' do. + ONLINE=: 0 + log 'Working offline using local copy of catalog.' +else. + log 'Done.' +end. +1 +) +checkstatus=: 3 : 0 +if. 0 e. #LIBS do. '' return. end. +msk=. masklib PKGDATA +ups=. pkgups'' +libupm=. 1 e. msk *. ups +msk=. -. msk +addnim=. +/msk *. pkgnew'' +addupm=. +/msk *. pkgups'' +tot=. +/addnim,addupm,libupm +if. 0 = tot do. + 'All available packages are installed and up to date.' return. +end. +select. 0 < addnim,addupm +case. 0 0 do. + msg=. 'Addons are up to date.' +case. 0 1 do. + msg=. 'All addons are installed, ',(":addupm), ' can be upgraded.' +case. 1 0 do. + if. addnim = <:#PKGDATA do. + msg=. 'No addons are installed.' + else. + j=. ' addon',('s'#~1: fsize p do. + if. _1-:msg=. freads q do. + if. 0=#msg=. e do. msg=. 'Unexpected error' end. end. + log 'Connection failed: ',msg + info 'Connection failed:',LF2,msg + r=. 1;msg + ferase p;q +else. + r=. 0;p + ferase q +end. +r +) +httpgetr=: 3 : 0 +res=. httpget y +if. 0 = 0 pick res do. + f=. 1 pick res + txt=. freads f + ferase f + 0;txt +end. +) +install=: 3 : 0 +dat=. getdepend y +'num siz'=. pmview_applycounts dat +many=. 1 < num +msg=. 'Installing ',(":num),' package',many#'s' +msg=. msg,' of ',(many#'total '),'size ',sizefmt siz +log msg +installdo 1 {"1 dat +log 'Done.' +readlocal'' +pacman_init 0 +) +install_console=: 3 : 0 + if. -. init_console 'server' do. '' return. end. + pkgs=. getnames y + if. pkgs -: ,<'all' do. pkgs=. 1 {"1 PKGDATA end. + pkgs=. pkgs (e. # [) ~. (<'base library'), ((pkgnew +. pkgups) # 1&{"1@]) PKGDATA + pkgs=. pkgs -. Ignore + pkgs=. getdepend_console pkgs + if. 0 = num=. #pkgs do. '' return. end. + many=. 1 < num + msg=. 'Installing ',(":num),' package',many#'s' + log msg + installdo pkgs + log 'Done.' + readlocal'' + pacman_init '' + checkstatus'' +) +upgrade_console=: 3 : 0 + if. -. init_console 'read' do. '' return. end. + pkgs=. getnames y + if. (0=#pkgs) +. pkgs -: ,<'all' do. pkgs=. 1{"1 PKGDATA end. + pkgs=. pkgs (e. # [) (pkgups # 1&{"1@])PKGDATA + install_console pkgs +) +installdo=: 3 : 0 +msk=. -. y e. :fsize jpath'~addons/',y,'/manifest.ijs' do. + log 'Extraction failed: ',msg + info 'Extraction failed:',LF2,msg + return. +end. +install_addins y +install_config y +) +install_addins=: 3 :0 +fl=. ADDCFG,'addins.txt' +ins=. fixjal2 freads fl +ins=. ins #~ ( txt +msk=. fexist &> ( msk # 1 {"1 PKGDATA) ,. res + res=. (2#LF) joinstring (70&foldtext)&.> res + end. + case. 'showinstalled' do. + dat=. (isjpkgout y) {:: (1 2 3 4 {"1 PKGDATA); (pkgs) ,&.> <'/',x,(x-:'history'){::'.ijs';'.txt' + res=. res #~ msk=. (<_1) ~: res=. fread@jpath &.> fn + if. #res do. + res=. ,((<'== '), &.> msk#pkgs) ,. res + res=. (2#LF) joinstring res + end. +) +remove_console=: 3 : 0 + if. -. init_console 'edit' do. '' return. end. + pkgs=. getnames y + if. pkgs -: ,<'all' do. pkgs=. 1 {"1 PKGDATA end. + pkgs=. pkgs (e. # [) (-.@pkgnew # 1&{"1@]) PKGDATA + pkgs=. pkgs -. . fixver freads LIBVER +) +readlocal=: 3 : 0 +readlin'' +ADDONS=: fixjal freads ADDCFG,'addons.txt' +ADDINS=: fixjal2 freads ADDCFG,'addins.txt' +REV=: fixrev freads ADDCFG,'revision.txt' +LASTUPD=: fixupd freads ADDCFG,'lastupdate.txt' +LIBS=: fixlibs freads ADDCFG,'library.txt' +LIB=: fixlib LIBS +ZIPS=: fixzips freads ADDCFG,'zips.txt' +EMPTY +) +readtree=: 3 : 0 +f=. ADDCFG,'tree.txt' +tree=. LF -.~ freads f +if. -. (d),'manifest.ijs' + if. mft -: _1 do. continue. end. + VERSION=: '' + 0!:100 mft + ver=. fmtver fixver VERSION + n=. }: (#p) }. >d + n=. '/' (I.n='\') } n + r=. r,n,' ',ver,LF + s=. s,d +end. +r fwritenew f +s=. (#p) }.each }: each s +install_labs each s +write_config'' +) +refreshjal=: 3 : 0 +'rc p'=. httpget WWW,zipext 'jal' +if. rc do. 0 return. end. +unzip p;ADDCFG +ferase p +if. *./ CFGFILES e. {."1 [ 1!:0 ADDCFG,'*' do. 1 return. end. +msg=. 'Could not install the local repository catalog.' +log msg +info msg +0 +) +updatejal=: 3 : 0 + log 'Updating server catalog...' + if. -. init_console 'server' do. '' return. end. + refreshaddins'' + readlocal'' + pacman_init'' + res=. checklastupdate'' + res,LF,checkstatus'' +) +RELIBMSG=: 0 : 0 +You are now using the XX base library, and can switch to the YY base library. + +This will download the YY version of the base library and overwrite existing files. Addons are not affected. + +OK to switch to the YY library? +) +prelib=: 3 : 0 +old=. LIBTREE +new=. (('stable';'current') i. (2-s) {"1 dat +srv=. fixvers > (3-s) {"1 dat +{."1 /:"2 srv ,:"1 loc +) +pkgnew=: 3 : 0 +dat=. (s=.isjpkgout y){:: PKGDATA; (2-s) {"1 dat +) +pkgups=: pkgnew < pkglater +pkgsearch=: 3 : 0 + +./"1 +./ y E."1&>"(0 _) 1{"1 PKGDATA +) +pkgshow=: 3 : 0 + y e.~ 1{"1 PKGDATA +) +setshowall=: 3 : 0 +PKGDATA=: ( '/') {.each nms +SECTION=: 'All';nms +DATAMASK=: (#PKGDATA) $ 1 +EMPTY +) +init_console=: 3 : 0 + if. 0=#y do. y=. 'read' end. + select. y + fcase. 'edit';'server' do. + if. -. checkaccess'' do. 0 return. end. + case. 'read' do. + if. -. checkaddonsdir'' do. 0 return. end. + setfiles'' + readlocal'' + pacman_init '' + res=. 1 + case. do. res=. 0 + end. + if. y -: 'server' do. res=. getserver'' end. + res +) +jpkg=: 4 : 0 + select. x + case. 'history';'manifest' do. + x showfiles_console y + case. 'install' do. + install_console y + case. 'reinstall' do. + remove_console y + install_console y + case. 'remove' do. + remove_console y + case. ;:'show search showinstalled shownotinstalled showupgrade status' do. + x show_console y + case. 'update' do. + updatejal '' + case. 'upgrade' do. + upgrade_console y + case. do. + msg=. 'Valid options are:',LF + msg=. msg,' history, install, manifest, remove, reinstall, show, search,',LF + msg=. msg,' showinstalled, shownotinstalled, showupgrade, status,',LF + msg,' update, upgrade' + end. +) +do_install=: 3 : 0 +if. -. checkaccess_jpacman_ '' do. return. end. +'update' jpkg '' +select. y +case. 'qtide';'angle' do. + 'install' jpkg 'base library ide/qt' + getqtbin (y-:'angle'){::0;'angle' + msg=. (+/ 2 1 * IFWIN,'Darwin'-:UNAME) pick 'jqt.sh';'the jqt icon';'jqt.cmd' + smoutput 'exit and restart J using ',msg +case. 'all' do. + 'install' jpkg 'all' + getqtbin 0 +end. +) +do_getqtbin=: 3 : 0 +smoutput 'Installing JQt binaries...' +if. 'Linux'-:UNAME do. + if. IFRASPI do. + z=. 'jqt-raspi-32.tar.gz' + else. + z=. 'jqt-',((y-:'slim') pick 'linux';'slim'),'-',(IF64 pick 'x86';'x64'),'.tar.gz' + end. + z1=. 'libjqt.so' +elseif. IFWIN do. + z=. 'jqt-win',((y-:'slim')#'slim'),'-',(IF64 pick 'x86';'x64'),'.zip' + z1=. 'jqt.dll' +elseif. do. + z=. 'jqt-mac',((y-:'slim')#'slim'),'-',(IF64 pick 'x86';'x64'),'.zip' + z1=. 'libjqt.dylib' +end. +'rc p'=. httpget_jpacman_ 'http://www.jsoftware.com/download/j802/qtide/',z +if. rc do. + smoutput 'unable to download: ',z return. +end. +d=. jpath '~bin' +if. IFWIN do. + unzip_jpacman_ p;d +else. + if. 'Linux'-:UNAME do. + if. (0~:FHS) do. + if. IFRASPI do. + d1=. '/usr/lib/arm-linux-gnueabihf/.' + elseif. IF64 do. + d1=. '/usr/lib/x86_64-linux-gnu/.' + elseif. do. + d1=. '/usr/lib/i386-linux-gnu/.' + end. + hostcmd_jpacman_ 'cd /usr/bin && tar --no-same-owner --no-same-permissions -xzf ',(dquote p), ' && chmod 755 jqt && chmod 644 libjqt.so && mv libjqt.so ',d1 + else. + hostcmd_jpacman_ 'cd ',(dquote d),' && tar xzf ',(dquote p) + end. + else. + hostcmd_jpacman_ 'unzip -o ',(dquote p),' -d ',dquote d + end. +end. +ferase p +if. #1!:0 ((0~:FHS)*.'Linux'-:UNAME){::(jpath '~bin/',z1);'/usr/bin/jqt' do. + m=. 'Finished install of JQt binaries.' +else. + m=. 'Unable to install JQt binaries.',LF + m=. m,'check that you have write permission for: ',LF,((0~:FHS)*.'Linux'-:UNAME){::(jpath '~bin');'/usr/bin' +end. +smoutput m +if. 'Linux'-:UNAME do. return. end. + +tgt=. jpath IFWIN{::'~install/Qt';'~bin/Qt5Core.dll' +y=. (*#y){::0;y +smoutput 'Installing Qt library...' +if. IFWIN do. + z=. 'qt53-',((y-:'angle') pick 'win';'angle'),'-',((y-:'slim')#'slim-'),(IF64 pick 'x86';'x64'),'.zip' +else. + z=. 'qt53-mac-',((y-:'slim')#'slim-'),(IF64 pick 'x86';'x64'),'.zip' +end. +'rc p'=. httpget_jpacman_ 'http://www.jsoftware.com/download/j802/qtlib/',z +if. rc do. + smoutput 'unable to download: ',z return. +end. +d=. jpath IFWIN{::'~install';'~bin' +if. IFWIN do. + unzip_jpacman_ p;d +else. + hostcmd_jpacman_ 'unzip -o ',(dquote p),' -d ',dquote d +end. +ferase p +if. #1!:0 tgt do. + m=. 'Finished install of Qt binaries.' +else. + m=. 'Unable to install Qt binaries.',LF + m=. m,'check that you have write permission for: ',LF,IFWIN{::tgt;jpath'~bin' +end. +smoutput m + +) +jpkg_z_=: 3 : 0 + 'help' jpkg y + : + a=. conew 'jpacman' + res=. x jpkg__a y + destroy__a'' + res +) +jpkgv_z_=: (<@:>"1@|:^:(0 ~: #))@jpkg \ No newline at end of file diff --git a/vendor/pygments/tests/examplefiles/pawn_example b/vendor/pygments/tests/examplefiles/pawn_example new file mode 100644 index 0000000..ee2ecca --- /dev/null +++ b/vendor/pygments/tests/examplefiles/pawn_example @@ -0,0 +1,25 @@ +{include.i} +{nested.i {include.i}} + +&SCOPED-DEFINE MY_NAME "Abe" + +DEF VAR i AS INT NO-UNDO. +i = 0xABE + 1337 / (1 * 1.00) + +def var clowercasetest as char no-undo. +DEF VAR vardashtest AS DATETIME-TZ NO-UNDO. + +DEFINE TEMP-TABLE ttNames NO-UNDO + FIELD cName AS CHAR + INDEX IXPK_ttNames IS PRIMARY UNIQUE cName. + +/* One-line comment */ +/* Two-line + Comment */ + +CREATE ttNames. +ASSIGN ttNames.cName = {&MY_NAME}. + +FOR EACH ttNames: + MESSAGE "Hello, " + ttNames.cName + '!' VIEW-AS ALERT-BOX. +END. diff --git a/vendor/pygments/tests/examplefiles/pkgconfig_example.pc b/vendor/pygments/tests/examplefiles/pkgconfig_example.pc new file mode 100644 index 0000000..2a59204 --- /dev/null +++ b/vendor/pygments/tests/examplefiles/pkgconfig_example.pc @@ -0,0 +1,18 @@ +# This is for a fictional package `yet another portable hatchpotch generator'. +prefix=/usr/local/opt/site/private # define variable `prefix` +exec_prefix=${prefix} # using variable reference +libdir=${exec_prefix}/lib +includedir=${prefix}/include +just_for_test=$${this is not a part of variable reference} # escape with `$$` + +Name: YAPHatchPotchGen +Description: Yet Another Portable HatchPotch GENerator. +Version: 352.9.3 +URL: http://www9.yaphatchpotchgen.net # Don't access. +Requires: piyohogelib-9.0 = 9.5.3 +Requires.private: nyorolib-3.0 = 3.0.9 +Conflicts: apiyohoge <= 8.3 +Libs: -L${libdir} -lyaphatchpotchgen-352.9 # using variable reference +Libs.private: -ll -ly +Cflags: -I${includedir}/piyohogelib-9.0 -I${libdir}/yaphatchpotchgen/include + diff --git a/vendor/pygments/tests/examplefiles/py3tb_test.py3tb b/vendor/pygments/tests/examplefiles/py3tb_test.py3tb new file mode 100644 index 0000000..706a540 --- /dev/null +++ b/vendor/pygments/tests/examplefiles/py3tb_test.py3tb @@ -0,0 +1,4 @@ + File "", line 1 + 1+ + ^ +SyntaxError: invalid syntax diff --git a/vendor/pygments/tests/examplefiles/pycon_ctrlc_traceback b/vendor/pygments/tests/examplefiles/pycon_ctrlc_traceback new file mode 100644 index 0000000..4998fd9 --- /dev/null +++ b/vendor/pygments/tests/examplefiles/pycon_ctrlc_traceback @@ -0,0 +1,118 @@ +x = r""" +>>> import os +>>> print os + +>>> for x in range(10): +... y = x + 2 +... print(x) +... if x > 5: +... raise Exception +... +0 +1 +2 +3 +4 +5 +6 +Traceback (most recent call last): + File "", line 5, in +Exception +>>> +>>> while True: +... pass +... +^CTraceback (most recent call last): + File "", line 1, in +KeyboardInterrupt + +>>> class A(Exception):pass +... +>>> class B(Exception):pass +... +>>> try: +... try: +... raise A('first') +... finally: +... raise B('second') +... except A as c: +... print(c) +... +Traceback (most recent call last): + File "", line 3, in +__main__.A: first + +During handling of the above exception, another exception occurred: + +Traceback (most recent call last): + File "", line 5, in +__main__.B: second + +>>> x = + File "", line 1 + x = + ^ +SyntaxError: invalid syntax +>>> + +>>> x = 3 +>>> with 5 as y: +... print(x + y) +... +8 + +# TODO +#>>> raise ValueError('multi\n line\ndetail') +#Traceback (most recent call last): +#........ +#ValueError: multi +# line +#detail + +>>> raise ValueError('multi\n line\ndetail') +Traceback (most recent call last): + .123 +ValueError: multi + line +detail + +>>> raise ValueError('multi\n line\ndetail') +Traceback (most recent call last): + ... +ValueError: multi + line +detail + +>>> raise ValueError('multi\n line\ndetail') +Traceback (most recent call last): + .... +ValueError: multi + line +detail + +>>> raise ValueError('multi\n line\ndetail') +Traceback (most recent call last): + .... +ValueError: multi + line +detail + +>>> raise ValueError('multi\n line\ndetail') +Traceback (most recent call last): + ... +ValueError: multi + line +detail + +>>> raise Exception +Traceback (most recent call last): + File "", line 1, in +Exception +>>> import somemodule +>>> somemodule.blah() +Traceback (most recent call last): + File "", line 1, in + File "/path/to/stuff/somemodule/blah.py", line 658, in blah + raise Exception('Hi.') +Exception: Hi. + diff --git a/vendor/pygments/tests/examplefiles/pycon_test.pycon b/vendor/pygments/tests/examplefiles/pycon_test.pycon index ff70286..9c4fc3d 100644 --- a/vendor/pygments/tests/examplefiles/pycon_test.pycon +++ b/vendor/pygments/tests/examplefiles/pycon_test.pycon @@ -9,6 +9,9 @@ KeyboardInterrupt >>> 1/0 Traceback (most recent call last): -... + ... ZeroDivisionError +>>> 1/0 # this used to swallow the traceback +Traceback (most recent call last): + ... diff --git a/vendor/pygments/tests/examplefiles/qbasic_example b/vendor/pygments/tests/examplefiles/qbasic_example new file mode 100644 index 0000000..27041af --- /dev/null +++ b/vendor/pygments/tests/examplefiles/qbasic_example @@ -0,0 +1,2 @@ +10 print RIGHT$("hi there", 5) +20 goto 10 diff --git a/vendor/pygments/tests/examplefiles/r6rs-comments.scm b/vendor/pygments/tests/examplefiles/r6rs-comments.scm new file mode 100644 index 0000000..cd5c363 --- /dev/null +++ b/vendor/pygments/tests/examplefiles/r6rs-comments.scm @@ -0,0 +1,23 @@ +#!r6rs + +#| + + The FACT procedure computes the factorial + + of a non-negative integer. + +|# + +(define fact + + (lambda (n) + + ;; base case + + (if (= n 0) + + #;(= n 1) + + 1 ; identity of * + + (* n (fact (- n 1)))))) diff --git a/vendor/pygments/tests/examplefiles/resourcebundle_demo b/vendor/pygments/tests/examplefiles/resourcebundle_demo new file mode 100644 index 0000000..e1daa56 --- /dev/null +++ b/vendor/pygments/tests/examplefiles/resourcebundle_demo @@ -0,0 +1,9 @@ +root:table { + usage:string { "Usage: genrb [Options] files" } + version:int { 122 } + errorcodes:array { + :string { "Invalid argument" } + :string { "File not found" } + :string { "\x00 \r \t \n \u1234" } + } +} diff --git a/vendor/pygments/tests/examplefiles/roboconf.graph b/vendor/pygments/tests/examplefiles/roboconf.graph new file mode 100644 index 0000000..e5fdedf --- /dev/null +++ b/vendor/pygments/tests/examplefiles/roboconf.graph @@ -0,0 +1,40 @@ +################## +# A sample graph +################## + +import some-definition.graph; +import another-definition.graph; + +VM { + installer : target; + children: deployable; +} + +facet deployable { + # nothing +} + +# Sample deployables +mysql { + insTaller: puppet; + facets: deployable; + exports: ip, port = 3306; +} + +tomcat { + installer: bash; + facets: deployable; + exports: ip; + children: web-application; +} + +facet web-application { + exports: full-path = undefined; +} + +my-war-1 { + facets: web-application; + installer: file; + exports: full-path = apps/my-war-1; # the relative path + imports: mysql.*; +} diff --git a/vendor/pygments/tests/examplefiles/roboconf.instances b/vendor/pygments/tests/examplefiles/roboconf.instances new file mode 100644 index 0000000..c69a2ab --- /dev/null +++ b/vendor/pygments/tests/examplefiles/roboconf.instances @@ -0,0 +1,24 @@ + +# Deal with imports +import others.instances; + +instance of VM { + name: VM-mysql; + instance of mysql { + name: MySQL; + } +} + +instance of VM { + name: VM ; + count: 5; + + INSTANCE of tomcat { + name: Tomcat; + + instance of my-war-1 { + name: my-war-1; + full-path: apps/my-war; + } + } +} diff --git a/vendor/pygments/tests/examplefiles/robotframework.txt b/vendor/pygments/tests/examplefiles/robotframework_test.txt similarity index 95% rename from vendor/pygments/tests/examplefiles/robotframework.txt rename to vendor/pygments/tests/examplefiles/robotframework_test.txt index 63ba63e..0d8179c 100644 --- a/vendor/pygments/tests/examplefiles/robotframework.txt +++ b/vendor/pygments/tests/examplefiles/robotframework_test.txt @@ -6,6 +6,7 @@ Test Setup Keyword argument argument with ${VARIABLE} *** Variables *** ${VARIABLE} Variable value @{LIST} List variable here +&{DICT} Key1=Value1 Key2=Value2 *** Test Cases *** Keyword-driven example diff --git a/vendor/pygments/tests/examplefiles/rql-queries.rql b/vendor/pygments/tests/examplefiles/rql-queries.rql new file mode 100644 index 0000000..1d86df3 --- /dev/null +++ b/vendor/pygments/tests/examplefiles/rql-queries.rql @@ -0,0 +1,34 @@ +Any N, N2 where N is Note, N2 is Note, N a_faire_par P1, P1 nom 'john', N2 a_faire_par P2, P2 nom 'jane' ; +DISTINCT Any N, D, C, T, A ORDERBY D DESC LIMIT 40 where N is Note, N diem D, W is Workcase, W concerned_by N, N cost C, N text T, N author A, N diem <= today +Bookmark B WHERE B owned_by G, G eid 5; +Any X WHERE E eid 22762, NOT E is_in X, X modification_date D ORDERBY D DESC LIMIT 41; +Any A, R, SUB ORDERBY R WHERE A is "Workcase", S is Division, S concerned_by A, A subject SUB, S eid 85, A ref R; +Any D, T, L WHERE D is Document, A concerned_by D,A eid 14533, D title T, D location L; +Any N,A,B,C,D ORDERBY A DESC WHERE N is Note, W concerned_by N, W eid 14533, N diem A,N author B,N text C,N cost D; +Any X ORDERBY D DESC LIMIT 41 WHERE E eid 18134, NOT E concerned_by X, X modification_date D +DISTINCT Any N, D, C, T, A ORDERBY D ASC LIMIT 40 WHERE N is Note, N diem D, P is Person, N to_be_contacted_by G, N cost C, N text T, N author A, G login "john"; +INSERT Person X: X surname "Doe", X firstname "John"; +Workcase W where W ref "ABCD12"; +Workcase W where W ref LIKE "AB%"; +Any X WHERE X X eid 53 +Any X WHERE X Document X occurence_of F, F class C, C name 'Comics' X owned_by U, U login 'syt' X available true +Person P WHERE P work_for P, S name 'Acme', P interested_by T, T name 'training' +Note N WHERE N written_on D, D day> (today -10), N written_by P, P name 'joe' or P name 'jack' +Person P WHERE (P interested_by T, T name 'training') or (P city 'Paris') +Any N, P WHERE X is Person, X name N, X first_name P +String N, P WHERE X is Person, X name N, X first_name P +INSERT Person X: X name 'widget' +INSERT Person X, Person Y: X name 'foo', Y name 'nice', X friend Y +INSERT Person X: X name 'foo', X friend Y WHERE name 'nice' +SET X name 'bar', X first_name 'original' where X is Person X name 'foo' +SET X know Y WHERE X friend Y +DELETE Person X WHERE X name 'foo' +DELETE X friend Y WHERE X is Person, X name 'foo' +Any X WHERE X name LIKE '%lt' +Any X WHERE X name IN ( 'joe', 'jack', 'william', 'averell') +Any X, V WHERE X concerns P, P eid 42, X corrected_in V? +Any C, P WHERE C is Card, P? documented_by C +Point P where P abs X, P ord Y, P value X+Y +Document X where X class C, C name 'Cartoon', X owned_by U, U login 'joe', X available true +(Any X WHERE X is Document) UNION (Any X WHERE X is File) +Any A,B WHERE A creation_date B WITH A BEING (Any X WHERE X is Document) UNION (Any X WHERE X is File) diff --git a/vendor/pygments/tests/examplefiles/rust_example.rs b/vendor/pygments/tests/examplefiles/rust_example.rs deleted file mode 100644 index 1c0a70c..0000000 --- a/vendor/pygments/tests/examplefiles/rust_example.rs +++ /dev/null @@ -1,233 +0,0 @@ -// Copyright 2012 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - -// based on: -// http://shootout.alioth.debian.org/u32/benchmark.php?test=nbody&lang=java - -extern mod std; - -use core::os; - -// Using sqrt from the standard library is way slower than using libc -// directly even though std just calls libc, I guess it must be -// because the the indirection through another dynamic linker -// stub. Kind of shocking. Might be able to make it faster still with -// an llvm intrinsic. -#[nolink] -extern mod libc { - #[legacy_exports]; - fn sqrt(n: float) -> float; -} - -fn main() { - let args = os::args(); - let args = if os::getenv(~"RUST_BENCH").is_some() { - ~[~"", ~"4000000"] - } else if args.len() <= 1u { - ~[~"", ~"100000"] - } else { - args - }; - let n = int::from_str(args[1]).get(); - let mut bodies: ~[Body::props] = NBodySystem::make(); - io::println(fmt!("%f", NBodySystem::energy(bodies))); - let mut i = 0; - while i < n { - NBodySystem::advance(bodies, 0.01); - i += 1; - } - io::println(fmt!("%f", NBodySystem::energy(bodies))); -} - -mod NBodySystem { - use Body; - - pub fn make() -> ~[Body::props] { - let mut bodies: ~[Body::props] = - ~[Body::sun(), - Body::jupiter(), - Body::saturn(), - Body::uranus(), - Body::neptune()]; - - let mut px = 0.0; - let mut py = 0.0; - let mut pz = 0.0; - - let mut i = 0; - while i < 5 { - px += bodies[i].vx * bodies[i].mass; - py += bodies[i].vy * bodies[i].mass; - pz += bodies[i].vz * bodies[i].mass; - - i += 1; - } - - // side-effecting - Body::offset_momentum(&mut bodies[0], px, py, pz); - - return bodies; - } - - pub fn advance(bodies: &mut [Body::props], dt: float) { - let mut i = 0; - while i < 5 { - let mut j = i + 1; - while j < 5 { - advance_one(&mut bodies[i], - &mut bodies[j], dt); - j += 1; - } - - i += 1; - } - - i = 0; - while i < 5 { - move_(&mut bodies[i], dt); - i += 1; - } - } - - pub fn advance_one(bi: &mut Body::props, - bj: &mut Body::props, - dt: float) unsafe { - let dx = bi.x - bj.x; - let dy = bi.y - bj.y; - let dz = bi.z - bj.z; - - let dSquared = dx * dx + dy * dy + dz * dz; - - let distance = ::libc::sqrt(dSquared); - let mag = dt / (dSquared * distance); - - bi.vx -= dx * bj.mass * mag; - bi.vy -= dy * bj.mass * mag; - bi.vz -= dz * bj.mass * mag; - - bj.vx += dx * bi.mass * mag; - bj.vy += dy * bi.mass * mag; - bj.vz += dz * bi.mass * mag; - } - - pub fn move_(b: &mut Body::props, dt: float) { - b.x += dt * b.vx; - b.y += dt * b.vy; - b.z += dt * b.vz; - } - - pub fn energy(bodies: &[Body::props]) -> float unsafe { - let mut dx; - let mut dy; - let mut dz; - let mut distance; - let mut e = 0.0; - - let mut i = 0; - while i < 5 { - e += - 0.5 * bodies[i].mass * - (bodies[i].vx * bodies[i].vx + bodies[i].vy * bodies[i].vy - + bodies[i].vz * bodies[i].vz); - - let mut j = i + 1; - while j < 5 { - dx = bodies[i].x - bodies[j].x; - dy = bodies[i].y - bodies[j].y; - dz = bodies[i].z - bodies[j].z; - - distance = ::libc::sqrt(dx * dx + dy * dy + dz * dz); - e -= bodies[i].mass * bodies[j].mass / distance; - - j += 1; - } - - i += 1; - } - return e; - - } -} - -mod Body { - use Body; - - pub const PI: float = 3.141592653589793; - pub const SOLAR_MASS: float = 39.478417604357432; - // was 4 * PI * PI originally - pub const DAYS_PER_YEAR: float = 365.24; - - pub type props = - {mut x: float, - mut y: float, - mut z: float, - mut vx: float, - mut vy: float, - mut vz: float, - mass: float}; - - pub fn jupiter() -> Body::props { - return {mut x: 4.84143144246472090e+00, - mut y: -1.16032004402742839e+00, - mut z: -1.03622044471123109e-01, - mut vx: 1.66007664274403694e-03 * DAYS_PER_YEAR, - mut vy: 7.69901118419740425e-03 * DAYS_PER_YEAR, - mut vz: -6.90460016972063023e-05 * DAYS_PER_YEAR, - mass: 9.54791938424326609e-04 * SOLAR_MASS}; - } - - pub fn saturn() -> Body::props { - return {mut x: 8.34336671824457987e+00, - mut y: 4.12479856412430479e+00, - mut z: -4.03523417114321381e-01, - mut vx: -2.76742510726862411e-03 * DAYS_PER_YEAR, - mut vy: 4.99852801234917238e-03 * DAYS_PER_YEAR, - mut vz: 2.30417297573763929e-05 * DAYS_PER_YEAR, - mass: 2.85885980666130812e-04 * SOLAR_MASS}; - } - - pub fn uranus() -> Body::props { - return {mut x: 1.28943695621391310e+01, - mut y: -1.51111514016986312e+01, - mut z: -2.23307578892655734e-01, - mut vx: 2.96460137564761618e-03 * DAYS_PER_YEAR, - mut vy: 2.37847173959480950e-03 * DAYS_PER_YEAR, - mut vz: -2.96589568540237556e-05 * DAYS_PER_YEAR, - mass: 4.36624404335156298e-05 * SOLAR_MASS}; - } - - pub fn neptune() -> Body::props { - return {mut x: 1.53796971148509165e+01, - mut y: -2.59193146099879641e+01, - mut z: 1.79258772950371181e-01, - mut vx: 2.68067772490389322e-03 * DAYS_PER_YEAR, - mut vy: 1.62824170038242295e-03 * DAYS_PER_YEAR, - mut vz: -9.51592254519715870e-05 * DAYS_PER_YEAR, - mass: 5.15138902046611451e-05 * SOLAR_MASS}; - } - - pub fn sun() -> Body::props { - return {mut x: 0.0, - mut y: 0.0, - mut z: 0.0, - mut vx: 0.0, - mut vy: 0.0, - mut vz: 0.0, - mass: SOLAR_MASS}; - } - - pub fn offset_momentum(props: &mut Body::props, - px: float, py: float, pz: float) { - props.vx = -px / SOLAR_MASS; - props.vy = -py / SOLAR_MASS; - props.vz = -pz / SOLAR_MASS; - } - -} diff --git a/vendor/pygments/tests/examplefiles/sample.qvto b/vendor/pygments/tests/examplefiles/sample.qvto new file mode 100644 index 0000000..6241ee2 --- /dev/null +++ b/vendor/pygments/tests/examplefiles/sample.qvto @@ -0,0 +1,4 @@ +transformation Foo(uml: SimpleUML, + rdbms : SimpleRDBMS) { +} +/* comment */ diff --git a/vendor/pygments/tests/examplefiles/scope.cirru b/vendor/pygments/tests/examplefiles/scope.cirru new file mode 100644 index 0000000..c3d1a2c --- /dev/null +++ b/vendor/pygments/tests/examplefiles/scope.cirru @@ -0,0 +1,237 @@ + +-- demo + +define a (read cd) $ if (> a cd) + print demo + print "not demo" + +say $ print a $ save $ b $ x $ c 8 + +print fun + +-- test on folding + +a $ + +b $ c + +d $ e $ f + +g $ h $ i j $ k $ + +-- test on comma + +print (, a) + a + , b + , c (, d) + +-- test on HTML + +doctype + +html + head + title $ = Cirru + script (:defer) $ :src build/build.js + link (:rel stylesheet) $ :href css/page.css + link (:rel icon) + :href http://logo.cirru.org/cirru-32x32.png?v=3 + body + textarea.demo.source $ :placeholder "Source Code" + textarea.demo.target $ :placeholder "Compiled Data" + @insert ../html/ga.html + +-- test on indentation + +a $ b $ c + +e f + (g) + h + +-- test on parentheses + +3 4 (1) 4 + +((((1)))) + +x + +-- test on quotes + +a b c d + +"a b c d" + +"a b \" c d" + +"a b" "c d" + +-- test on unfolding + +set + add 1 $ + , x y + add 5 $ + add 2 + +-- test on HTML attributes + +div + div + :class a + div + :class a b c d + + div + :class a (@ b) (@ c) d + + div + :class a + @if (@ b) + div b + div c + div + :class a + @if (@ b) b c + +-- test on helpers + +@if (@call a b) (div) (span) + +@each members + div (@ name) + +@each a + div (@ b) + @each c + div (@ d) + +-- test on HTML structure + +@rich more + #demo-more-box + #demo-more + :data-lang-text demo-more + #demo-more-list + @each room + .demo-more-room + span.demo-name + @ topic + span.demo-join + :data-lang-text demo-join + :data-id (@ id) + +-- text on bool + +print #true +print #false +print #yes +print #no +print #t +print #f + +-- test on Cirru js + +set a 1 +set a (= "This is a string") +set b #t + +-- this is comment + +number 1.4 +string x +regex ^\s$ +regex "^\\s-\"$" +sentence this is a string + +array 1 2 3 (= nothing) #t (= #t) + +set c (array 1 (= nothing)) + +set d $ object (a (= google)) + b (= reader) + c 1 + d $ array 1 2 (= string) + +1 c +-1 c + +:b d +.log console a 2 +.log console + +set demo $ object + call $ \ x (.log console x) (. this call) +. demo (.call 1) (.call 4) + +=.x d 3 + +set d null + +new Array 1 2 3 + +set x (:length c) +set str (= str) +set c (.toUpperCase str) + +\ x (+ x 1) +\ (x y) (+ x y) +\ x (set aa 1) (+ aa x) + +set f (\ x (+ x 1)) + ++ a 1 2 ++= a 1 + +> 1 2 3 + +if (> 2 1) (+ a 1) +else 2 + +if (> a 2) + .log console (= "large") +elseif (> a 1) + .log console (= "still good") +else + .log console (= "so so") + +set a $ if (> 2 1) #t #f + +switch a + 1 (.log console 1) + 2 (.log console 2) + else (.log console (= "something else")) + +set a $ array 2 +3 -4 +for (a x i) (.log console x i) + +set a 0 +while (< a 10) (+= a 1) (.log console a) + +-- WebAssembly variable names + +-- ":(c) 2015 Andreas Rossberg" + +module + export :even $even + export "odd" $odd + + func $even (param $n i32) (result i32) + if (i32.eq (get_local $n) (i32.const 0)) + i32.const 1 + call $odd (i32.sub (get_local $n) (i32.const 1)) + + func $odd (param $n i32) (result i32) + store_global $scratch (get_local $n) + if (i32.eq (get_local $n) (i32.const 0) + i32.const 0 + call $even (i32.sub (get_local $n) (i32.const 1)) + + global $scratch i32 + +assert_eq (invoke :even (i32.const 13)) (i32.const 0) +assert_eq (invoke :even (i32.const 20)) (i32.const 1) +assert_eq (invoke :odd (i32.const 13)) (i32.const 1) +assert_eq (invoke :odd (i32.const 20)) (i32.const 0) diff --git a/vendor/pygments/tests/examplefiles/simple.camkes b/vendor/pygments/tests/examplefiles/simple.camkes new file mode 100644 index 0000000..43e1173 --- /dev/null +++ b/vendor/pygments/tests/examplefiles/simple.camkes @@ -0,0 +1,38 @@ +/* + * Example input for CAmkES lexer. + */ + +import ; + +// A single-line comment. + +import "components/Client/Client.camkes"; +import "components/Echo/Echo.camkes"; + +component Foo { + include "proc_defn.h"; + control; + dataport Buf my_port; +} + +#ifdef BAR_AVAILABLE + component Bar { + provides CharAccess ca; + } +#endif + + #define HASH_DEF_WITH_LEADING_SPACE + +assembly { /* Another multiline comment. */ + composition { + component Echo echo; + component Client client; + + connection seL4RPC simple(from client.s, to echo.s); + } + + configuration { + echo.dma_pool = 4096; + } +} + diff --git a/vendor/pygments/tests/examplefiles/simple.md b/vendor/pygments/tests/examplefiles/simple.croc similarity index 100% rename from vendor/pygments/tests/examplefiles/simple.md rename to vendor/pygments/tests/examplefiles/simple.croc diff --git a/vendor/pygments/tests/examplefiles/sparql.rq b/vendor/pygments/tests/examplefiles/sparql.rq new file mode 100644 index 0000000..d979d20 --- /dev/null +++ b/vendor/pygments/tests/examplefiles/sparql.rq @@ -0,0 +1,48 @@ +# This is a test SPARQL query + +BASE + +PREFIX foaf: +PREFIX ex: +PREFIX xsd: +PREFIX dcterms: + +SELECT ?person (COUNT(?nick) AS ?nickCount) { + <#jonny> foaf:knows ?person . + ?person a foaf:Person . + ?person foaf:firstName "Freddy" . + ?person foaf:lastName "Smith" . + # predicate-object list + ?person foaf:nick ?nick ; + foaf:age "21"^^xsd:int ; # typed literal + ex:title 'Mr' ; # single-quoted string + ex:width 2 ; # integer + ex:height 1.80 ; # float + ex:distanceToSun 1.4e8 ; # float with exponent + ex:ownsACat true ; + ex:catName "Kitty", "Kitty_" ; # object list + # some other float values + ex:float1 .125 ; + ex:float2 +2.5e10 ; + ex:float3 2.5e+10 ; + ex:float4 -1.e-10 ; + ex:float5 .0e1 ; + ex:float6 5e11 ; + ex:float7 1. ; + ex:aUnicodeÀExample "somestring" ; + ex:catName "Kitty", "Kitty_" ; # object list + ex:escape "\n\u00c0\U00010000"; + ex:catAge ?catage ; + dcterms:description "Someone with a cat called \"cat\"."@en . # language tag + ?person foaf:knows _:b0 . + _:b0 foaf:knows [ _:b1 a foaf:Person; foaf:name "Jonny" . ] . + OPTIONAL { ?person foaf:isPrimaryTopicOf ?page } + OPTIONAL { ?person foaf:name ?name + { ?person foaf:depiction ?img } + UNION + { ?person foaf:firstName ?firstN } } + FILTER ( bound(?page) || bound(?img) || bound(?firstN) ) + FILTER ( ?catage < 101 && ?catage > 9 && ?catage >= 10 && ?catage <= 100 && ?catage != 20 ) +} +GROUP BY ?person +ORDER BY ?img ASC(?firstN) DESC(?page) diff --git a/vendor/pygments/tests/examplefiles/subr.el b/vendor/pygments/tests/examplefiles/subr.el new file mode 100644 index 0000000..deadca6 --- /dev/null +++ b/vendor/pygments/tests/examplefiles/subr.el @@ -0,0 +1,4868 @@ +;;; subr.el --- basic lisp subroutines for Emacs -*- coding: utf-8; lexical-binding:t -*- + +;; Copyright (C) 1985-1986, 1992, 1994-1995, 1999-2015 Free Software +;; Foundation, Inc. + +;; Maintainer: emacs-devel@gnu.org +;; Keywords: internal +;; Package: emacs + +;; This file is part of GNU Emacs. + +;; GNU Emacs is free software: you can redistribute it and/or modify +;; it under the terms of the GNU General Public License as published by +;; the Free Software Foundation, either version 3 of the License, or +;; (at your option) any later version. + +;; GNU Emacs is distributed in the hope that it will be useful, +;; but WITHOUT ANY WARRANTY; without even the implied warranty of +;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +;; GNU General Public License for more details. + +;; You should have received a copy of the GNU General Public License +;; along with GNU Emacs. If not, see . + +;;; Commentary: + +;;; Code: + +;; Beware: while this file has tag `utf-8', before it's compiled, it gets +;; loaded as "raw-text", so non-ASCII chars won't work right during bootstrap. + +(defmacro declare-function (_fn _file &optional _arglist _fileonly) + "Tell the byte-compiler that function FN is defined, in FILE. +Optional ARGLIST is the argument list used by the function. +The FILE argument is not used by the byte-compiler, but by the +`check-declare' package, which checks that FILE contains a +definition for FN. ARGLIST is used by both the byte-compiler +and `check-declare' to check for consistency. + +FILE can be either a Lisp file (in which case the \".el\" +extension is optional), or a C file. C files are expanded +relative to the Emacs \"src/\" directory. Lisp files are +searched for using `locate-library', and if that fails they are +expanded relative to the location of the file containing the +declaration. A FILE with an \"ext:\" prefix is an external file. +`check-declare' will check such files if they are found, and skip +them without error if they are not. + +FILEONLY non-nil means that `check-declare' will only check that +FILE exists, not that it defines FN. This is intended for +function-definitions that `check-declare' does not recognize, e.g. +`defstruct'. + +To specify a value for FILEONLY without passing an argument list, +set ARGLIST to t. This is necessary because nil means an +empty argument list, rather than an unspecified one. + +Note that for the purposes of `check-declare', this statement +must be the first non-whitespace on a line. + +For more information, see Info node `(elisp)Declaring Functions'." + ;; Does nothing - byte-compile-declare-function does the work. + nil) + + +;;;; Basic Lisp macros. + +(defalias 'not 'null) + +(defmacro noreturn (form) + "Evaluate FORM, expecting it not to return. +If FORM does return, signal an error." + (declare (debug t)) + `(prog1 ,form + (error "Form marked with `noreturn' did return"))) + +(defmacro 1value (form) + "Evaluate FORM, expecting a constant return value. +This is the global do-nothing version. There is also `testcover-1value' +that complains if FORM ever does return differing values." + (declare (debug t)) + form) + +(defmacro def-edebug-spec (symbol spec) + "Set the `edebug-form-spec' property of SYMBOL according to SPEC. +Both SYMBOL and SPEC are unevaluated. The SPEC can be: +0 (instrument no arguments); t (instrument all arguments); +a symbol (naming a function with an Edebug specification); or a list. +The elements of the list describe the argument types; see +Info node `(elisp)Specification List' for details." + `(put (quote ,symbol) 'edebug-form-spec (quote ,spec))) + +(defmacro lambda (&rest cdr) + "Return a lambda expression. +A call of the form (lambda ARGS DOCSTRING INTERACTIVE BODY) is +self-quoting; the result of evaluating the lambda expression is the +expression itself. The lambda expression may then be treated as a +function, i.e., stored as the function value of a symbol, passed to +`funcall' or `mapcar', etc. + +ARGS should take the same form as an argument list for a `defun'. +DOCSTRING is an optional documentation string. + If present, it should describe how to call the function. + But documentation strings are usually not useful in nameless functions. +INTERACTIVE should be a call to the function `interactive', which see. +It may also be omitted. +BODY should be a list of Lisp expressions. + +\(fn ARGS [DOCSTRING] [INTERACTIVE] BODY)" + (declare (doc-string 2) (indent defun) + (debug (&define lambda-list + [&optional stringp] + [&optional ("interactive" interactive)] + def-body))) + ;; Note that this definition should not use backquotes; subr.el should not + ;; depend on backquote.el. + (list 'function (cons 'lambda cdr))) + +(defmacro setq-local (var val) + "Set variable VAR to value VAL in current buffer." + ;; Can't use backquote here, it's too early in the bootstrap. + (list 'set (list 'make-local-variable (list 'quote var)) val)) + +(defmacro defvar-local (var val &optional docstring) + "Define VAR as a buffer-local variable with default value VAL. +Like `defvar' but additionally marks the variable as being automatically +buffer-local wherever it is set." + (declare (debug defvar) (doc-string 3)) + ;; Can't use backquote here, it's too early in the bootstrap. + (list 'progn (list 'defvar var val docstring) + (list 'make-variable-buffer-local (list 'quote var)))) + +(defun apply-partially (fun &rest args) + "Return a function that is a partial application of FUN to ARGS. +ARGS is a list of the first N arguments to pass to FUN. +The result is a new function which does the same as FUN, except that +the first N arguments are fixed at the values with which this function +was called." + (lambda (&rest args2) + (apply fun (append args args2)))) + +(defmacro push (newelt place) + "Add NEWELT to the list stored in the generalized variable PLACE. +This is morally equivalent to (setf PLACE (cons NEWELT PLACE)), +except that PLACE is only evaluated once (after NEWELT)." + (declare (debug (form gv-place))) + (if (symbolp place) + ;; Important special case, to avoid triggering GV too early in + ;; the bootstrap. + (list 'setq place + (list 'cons newelt place)) + (require 'macroexp) + (macroexp-let2 macroexp-copyable-p v newelt + (gv-letplace (getter setter) place + (funcall setter `(cons ,v ,getter)))))) + +(defmacro pop (place) + "Return the first element of PLACE's value, and remove it from the list. +PLACE must be a generalized variable whose value is a list. +If the value is nil, `pop' returns nil but does not actually +change the list." + (declare (debug (gv-place))) + ;; We use `car-safe' here instead of `car' because the behavior is the same + ;; (if it's not a cons cell, the `cdr' would have signaled an error already), + ;; but `car-safe' is total, so the byte-compiler can safely remove it if the + ;; result is not used. + `(car-safe + ,(if (symbolp place) + ;; So we can use `pop' in the bootstrap before `gv' can be used. + (list 'prog1 place (list 'setq place (list 'cdr place))) + (gv-letplace (getter setter) place + (macroexp-let2 macroexp-copyable-p x getter + `(prog1 ,x ,(funcall setter `(cdr ,x)))))))) + +(defmacro when (cond &rest body) + "If COND yields non-nil, do BODY, else return nil. +When COND yields non-nil, eval BODY forms sequentially and return +value of last one, or nil if there are none. + +\(fn COND BODY...)" + (declare (indent 1) (debug t)) + (list 'if cond (cons 'progn body))) + +(defmacro unless (cond &rest body) + "If COND yields nil, do BODY, else return nil. +When COND yields nil, eval BODY forms sequentially and return +value of last one, or nil if there are none. + +\(fn COND BODY...)" + (declare (indent 1) (debug t)) + (cons 'if (cons cond (cons nil body)))) + +(defmacro dolist (spec &rest body) + "Loop over a list. +Evaluate BODY with VAR bound to each car from LIST, in turn. +Then evaluate RESULT to get return value, default nil. + +\(fn (VAR LIST [RESULT]) BODY...)" + (declare (indent 1) (debug ((symbolp form &optional form) body))) + ;; It would be cleaner to create an uninterned symbol, + ;; but that uses a lot more space when many functions in many files + ;; use dolist. + ;; FIXME: This cost disappears in byte-compiled lexical-binding files. + (let ((temp '--dolist-tail--)) + ;; This is not a reliable test, but it does not matter because both + ;; semantics are acceptable, tho one is slightly faster with dynamic + ;; scoping and the other is slightly faster (and has cleaner semantics) + ;; with lexical scoping. + (if lexical-binding + `(let ((,temp ,(nth 1 spec))) + (while ,temp + (let ((,(car spec) (car ,temp))) + ,@body + (setq ,temp (cdr ,temp)))) + ,@(cdr (cdr spec))) + `(let ((,temp ,(nth 1 spec)) + ,(car spec)) + (while ,temp + (setq ,(car spec) (car ,temp)) + ,@body + (setq ,temp (cdr ,temp))) + ,@(if (cdr (cdr spec)) + `((setq ,(car spec) nil) ,@(cdr (cdr spec)))))))) + +(defmacro dotimes (spec &rest body) + "Loop a certain number of times. +Evaluate BODY with VAR bound to successive integers running from 0, +inclusive, to COUNT, exclusive. Then evaluate RESULT to get +the return value (nil if RESULT is omitted). + +\(fn (VAR COUNT [RESULT]) BODY...)" + (declare (indent 1) (debug dolist)) + ;; It would be cleaner to create an uninterned symbol, + ;; but that uses a lot more space when many functions in many files + ;; use dotimes. + ;; FIXME: This cost disappears in byte-compiled lexical-binding files. + (let ((temp '--dotimes-limit--) + (start 0) + (end (nth 1 spec))) + ;; This is not a reliable test, but it does not matter because both + ;; semantics are acceptable, tho one is slightly faster with dynamic + ;; scoping and the other has cleaner semantics. + (if lexical-binding + (let ((counter '--dotimes-counter--)) + `(let ((,temp ,end) + (,counter ,start)) + (while (< ,counter ,temp) + (let ((,(car spec) ,counter)) + ,@body) + (setq ,counter (1+ ,counter))) + ,@(if (cddr spec) + ;; FIXME: This let often leads to "unused var" warnings. + `((let ((,(car spec) ,counter)) ,@(cddr spec)))))) + `(let ((,temp ,end) + (,(car spec) ,start)) + (while (< ,(car spec) ,temp) + ,@body + (setq ,(car spec) (1+ ,(car spec)))) + ,@(cdr (cdr spec)))))) + +(defmacro declare (&rest _specs) + "Do not evaluate any arguments, and return nil. +If a `declare' form appears as the first form in the body of a +`defun' or `defmacro' form, SPECS specifies various additional +information about the function or macro; these go into effect +during the evaluation of the `defun' or `defmacro' form. + +The possible values of SPECS are specified by +`defun-declarations-alist' and `macro-declarations-alist'. + +For more information, see info node `(elisp)Declare Form'." + ;; FIXME: edebug spec should pay attention to defun-declarations-alist. + nil) + +(defmacro ignore-errors (&rest body) + "Execute BODY; if an error occurs, return nil. +Otherwise, return result of last form in BODY. +See also `with-demoted-errors' that does something similar +without silencing all errors." + (declare (debug t) (indent 0)) + `(condition-case nil (progn ,@body) (error nil))) + +;;;; Basic Lisp functions. + +(defun ignore (&rest _ignore) + "Do nothing and return nil. +This function accepts any number of arguments, but ignores them." + (interactive) + nil) + +;; Signal a compile-error if the first arg is missing. +(defun error (&rest args) + "Signal an error, making error message by passing all args to `format'. +In Emacs, the convention is that error messages start with a capital +letter but *do not* end with a period. Please follow this convention +for the sake of consistency." + (declare (advertised-calling-convention (string &rest args) "23.1")) + (signal 'error (list (apply 'format args)))) + +(defun user-error (format &rest args) + "Signal a pilot error, making error message by passing all args to `format'. +In Emacs, the convention is that error messages start with a capital +letter but *do not* end with a period. Please follow this convention +for the sake of consistency. +This is just like `error' except that `user-error's are expected to be the +result of an incorrect manipulation on the part of the user, rather than the +result of an actual problem." + (signal 'user-error (list (apply #'format format args)))) + +(defun define-error (name message &optional parent) + "Define NAME as a new error signal. +MESSAGE is a string that will be output to the echo area if such an error +is signaled without being caught by a `condition-case'. +PARENT is either a signal or a list of signals from which it inherits. +Defaults to `error'." + (unless parent (setq parent 'error)) + (let ((conditions + (if (consp parent) + (apply #'append + (mapcar (lambda (parent) + (cons parent + (or (get parent 'error-conditions) + (error "Unknown signal `%s'" parent)))) + parent)) + (cons parent (get parent 'error-conditions))))) + (put name 'error-conditions + (delete-dups (copy-sequence (cons name conditions)))) + (when message (put name 'error-message message)))) + +;; We put this here instead of in frame.el so that it's defined even on +;; systems where frame.el isn't loaded. +(defun frame-configuration-p (object) + "Return non-nil if OBJECT seems to be a frame configuration. +Any list whose car is `frame-configuration' is assumed to be a frame +configuration." + (and (consp object) + (eq (car object) 'frame-configuration))) + + +;;;; List functions. + +(defsubst caar (x) + "Return the car of the car of X." + (car (car x))) + +(defsubst cadr (x) + "Return the car of the cdr of X." + (car (cdr x))) + +(defsubst cdar (x) + "Return the cdr of the car of X." + (cdr (car x))) + +(defsubst cddr (x) + "Return the cdr of the cdr of X." + (cdr (cdr x))) + +(defun last (list &optional n) + "Return the last link of LIST. Its car is the last element. +If LIST is nil, return nil. +If N is non-nil, return the Nth-to-last link of LIST. +If N is bigger than the length of LIST, return LIST." + (if n + (and (>= n 0) + (let ((m (safe-length list))) + (if (< n m) (nthcdr (- m n) list) list))) + (and list + (nthcdr (1- (safe-length list)) list)))) + +(defun butlast (list &optional n) + "Return a copy of LIST with the last N elements removed. +If N is omitted or nil, the last element is removed from the +copy." + (if (and n (<= n 0)) list + (nbutlast (copy-sequence list) n))) + +(defun nbutlast (list &optional n) + "Modifies LIST to remove the last N elements. +If N is omitted or nil, remove the last element." + (let ((m (length list))) + (or n (setq n 1)) + (and (< n m) + (progn + (if (> n 0) (setcdr (nthcdr (- (1- m) n) list) nil)) + list)))) + +(defun zerop (number) + "Return t if NUMBER is zero." + ;; Used to be in C, but it's pointless since (= 0 n) is faster anyway because + ;; = has a byte-code. + (declare (compiler-macro (lambda (_) `(= 0 ,number)))) + (= 0 number)) + +(defun delete-dups (list) + "Destructively remove `equal' duplicates from LIST. +Store the result in LIST and return it. LIST must be a proper list. +Of several `equal' occurrences of an element in LIST, the first +one is kept." + (let ((tail list)) + (while tail + (setcdr tail (delete (car tail) (cdr tail))) + (setq tail (cdr tail)))) + list) + +;; See http://lists.gnu.org/archive/html/emacs-devel/2013-05/msg00204.html +(defun delete-consecutive-dups (list &optional circular) + "Destructively remove `equal' consecutive duplicates from LIST. +First and last elements are considered consecutive if CIRCULAR is +non-nil." + (let ((tail list) last) + (while (consp tail) + (if (equal (car tail) (cadr tail)) + (setcdr tail (cddr tail)) + (setq last (car tail) + tail (cdr tail)))) + (if (and circular + (cdr list) + (equal last (car list))) + (nbutlast list) + list))) + +(defun number-sequence (from &optional to inc) + "Return a sequence of numbers from FROM to TO (both inclusive) as a list. +INC is the increment used between numbers in the sequence and defaults to 1. +So, the Nth element of the list is (+ FROM (* N INC)) where N counts from +zero. TO is only included if there is an N for which TO = FROM + N * INC. +If TO is nil or numerically equal to FROM, return (FROM). +If INC is positive and TO is less than FROM, or INC is negative +and TO is larger than FROM, return nil. +If INC is zero and TO is neither nil nor numerically equal to +FROM, signal an error. + +This function is primarily designed for integer arguments. +Nevertheless, FROM, TO and INC can be integer or float. However, +floating point arithmetic is inexact. For instance, depending on +the machine, it may quite well happen that +\(number-sequence 0.4 0.6 0.2) returns the one element list (0.4), +whereas (number-sequence 0.4 0.8 0.2) returns a list with three +elements. Thus, if some of the arguments are floats and one wants +to make sure that TO is included, one may have to explicitly write +TO as (+ FROM (* N INC)) or use a variable whose value was +computed with this exact expression. Alternatively, you can, +of course, also replace TO with a slightly larger value +\(or a slightly more negative value if INC is negative)." + (if (or (not to) (= from to)) + (list from) + (or inc (setq inc 1)) + (when (zerop inc) (error "The increment can not be zero")) + (let (seq (n 0) (next from)) + (if (> inc 0) + (while (<= next to) + (setq seq (cons next seq) + n (1+ n) + next (+ from (* n inc)))) + (while (>= next to) + (setq seq (cons next seq) + n (1+ n) + next (+ from (* n inc))))) + (nreverse seq)))) + +(defun copy-tree (tree &optional vecp) + "Make a copy of TREE. +If TREE is a cons cell, this recursively copies both its car and its cdr. +Contrast to `copy-sequence', which copies only along the cdrs. With second +argument VECP, this copies vectors as well as conses." + (if (consp tree) + (let (result) + (while (consp tree) + (let ((newcar (car tree))) + (if (or (consp (car tree)) (and vecp (vectorp (car tree)))) + (setq newcar (copy-tree (car tree) vecp))) + (push newcar result)) + (setq tree (cdr tree))) + (nconc (nreverse result) tree)) + (if (and vecp (vectorp tree)) + (let ((i (length (setq tree (copy-sequence tree))))) + (while (>= (setq i (1- i)) 0) + (aset tree i (copy-tree (aref tree i) vecp))) + tree) + tree))) + +;;;; Various list-search functions. + +(defun assoc-default (key alist &optional test default) + "Find object KEY in a pseudo-alist ALIST. +ALIST is a list of conses or objects. Each element + (or the element's car, if it is a cons) is compared with KEY by + calling TEST, with two arguments: (i) the element or its car, + and (ii) KEY. +If that is non-nil, the element matches; then `assoc-default' + returns the element's cdr, if it is a cons, or DEFAULT if the + element is not a cons. + +If no element matches, the value is nil. +If TEST is omitted or nil, `equal' is used." + (let (found (tail alist) value) + (while (and tail (not found)) + (let ((elt (car tail))) + (when (funcall (or test 'equal) (if (consp elt) (car elt) elt) key) + (setq found t value (if (consp elt) (cdr elt) default)))) + (setq tail (cdr tail))) + value)) + +(defun assoc-ignore-case (key alist) + "Like `assoc', but ignores differences in case and text representation. +KEY must be a string. Upper-case and lower-case letters are treated as equal. +Unibyte strings are converted to multibyte for comparison." + (declare (obsolete assoc-string "22.1")) + (assoc-string key alist t)) + +(defun assoc-ignore-representation (key alist) + "Like `assoc', but ignores differences in text representation. +KEY must be a string. +Unibyte strings are converted to multibyte for comparison." + (declare (obsolete assoc-string "22.1")) + (assoc-string key alist nil)) + +(defun member-ignore-case (elt list) + "Like `member', but ignore differences in case and text representation. +ELT must be a string. Upper-case and lower-case letters are treated as equal. +Unibyte strings are converted to multibyte for comparison. +Non-strings in LIST are ignored." + (while (and list + (not (and (stringp (car list)) + (eq t (compare-strings elt 0 nil (car list) 0 nil t))))) + (setq list (cdr list))) + list) + +(defun assq-delete-all (key alist) + "Delete from ALIST all elements whose car is `eq' to KEY. +Return the modified alist. +Elements of ALIST that are not conses are ignored." + (while (and (consp (car alist)) + (eq (car (car alist)) key)) + (setq alist (cdr alist))) + (let ((tail alist) tail-cdr) + (while (setq tail-cdr (cdr tail)) + (if (and (consp (car tail-cdr)) + (eq (car (car tail-cdr)) key)) + (setcdr tail (cdr tail-cdr)) + (setq tail tail-cdr)))) + alist) + +(defun rassq-delete-all (value alist) + "Delete from ALIST all elements whose cdr is `eq' to VALUE. +Return the modified alist. +Elements of ALIST that are not conses are ignored." + (while (and (consp (car alist)) + (eq (cdr (car alist)) value)) + (setq alist (cdr alist))) + (let ((tail alist) tail-cdr) + (while (setq tail-cdr (cdr tail)) + (if (and (consp (car tail-cdr)) + (eq (cdr (car tail-cdr)) value)) + (setcdr tail (cdr tail-cdr)) + (setq tail tail-cdr)))) + alist) + +(defun alist-get (key alist &optional default remove) + "Get the value associated to KEY in ALIST. +DEFAULT is the value to return if KEY is not found in ALIST. +REMOVE, if non-nil, means that when setting this element, we should +remove the entry if the new value is `eql' to DEFAULT." + (ignore remove) ;;Silence byte-compiler. + (let ((x (assq key alist))) + (if x (cdr x) default))) + +(defun remove (elt seq) + "Return a copy of SEQ with all occurrences of ELT removed. +SEQ must be a list, vector, or string. The comparison is done with `equal'." + (if (nlistp seq) + ;; If SEQ isn't a list, there's no need to copy SEQ because + ;; `delete' will return a new object. + (delete elt seq) + (delete elt (copy-sequence seq)))) + +(defun remq (elt list) + "Return LIST with all occurrences of ELT removed. +The comparison is done with `eq'. Contrary to `delq', this does not use +side-effects, and the argument LIST is not modified." + (while (and (eq elt (car list)) (setq list (cdr list)))) + (if (memq elt list) + (delq elt (copy-sequence list)) + list)) + +;;;; Keymap support. + +(defun kbd (keys) + "Convert KEYS to the internal Emacs key representation. +KEYS should be a string constant in the format used for +saving keyboard macros (see `edmacro-mode')." + ;; Don't use a defalias, since the `pure' property is only true for + ;; the calling convention of `kbd'. + (read-kbd-macro keys)) +(put 'kbd 'pure t) + +(defun undefined () + "Beep to tell the user this binding is undefined." + (interactive) + (ding) + (message "%s is undefined" (key-description (this-single-command-keys))) + (setq defining-kbd-macro nil) + (force-mode-line-update) + ;; If this is a down-mouse event, don't reset prefix-arg; + ;; pass it to the command run by the up event. + (setq prefix-arg + (when (memq 'down (event-modifiers last-command-event)) + current-prefix-arg))) + +;; Prevent the \{...} documentation construct +;; from mentioning keys that run this command. +(put 'undefined 'suppress-keymap t) + +(defun suppress-keymap (map &optional nodigits) + "Make MAP override all normally self-inserting keys to be undefined. +Normally, as an exception, digits and minus-sign are set to make prefix args, +but optional second arg NODIGITS non-nil treats them like other chars." + (define-key map [remap self-insert-command] 'undefined) + (or nodigits + (let (loop) + (define-key map "-" 'negative-argument) + ;; Make plain numbers do numeric args. + (setq loop ?0) + (while (<= loop ?9) + (define-key map (char-to-string loop) 'digit-argument) + (setq loop (1+ loop)))))) + +(defun make-composed-keymap (maps &optional parent) + "Construct a new keymap composed of MAPS and inheriting from PARENT. +When looking up a key in the returned map, the key is looked in each +keymap of MAPS in turn until a binding is found. +If no binding is found in MAPS, the lookup continues in PARENT, if non-nil. +As always with keymap inheritance, a nil binding in MAPS overrides +any corresponding binding in PARENT, but it does not override corresponding +bindings in other keymaps of MAPS. +MAPS can be a list of keymaps or a single keymap. +PARENT if non-nil should be a keymap." + `(keymap + ,@(if (keymapp maps) (list maps) maps) + ,@parent)) + +(defun define-key-after (keymap key definition &optional after) + "Add binding in KEYMAP for KEY => DEFINITION, right after AFTER's binding. +This is like `define-key' except that the binding for KEY is placed +just after the binding for the event AFTER, instead of at the beginning +of the map. Note that AFTER must be an event type (like KEY), NOT a command +\(like DEFINITION). + +If AFTER is t or omitted, the new binding goes at the end of the keymap. +AFTER should be a single event type--a symbol or a character, not a sequence. + +Bindings are always added before any inherited map. + +The order of bindings in a keymap only matters when it is used as +a menu, so this function is not useful for non-menu keymaps." + (unless after (setq after t)) + (or (keymapp keymap) + (signal 'wrong-type-argument (list 'keymapp keymap))) + (setq key + (if (<= (length key) 1) (aref key 0) + (setq keymap (lookup-key keymap + (apply 'vector + (butlast (mapcar 'identity key))))) + (aref key (1- (length key))))) + (let ((tail keymap) done inserted) + (while (and (not done) tail) + ;; Delete any earlier bindings for the same key. + (if (eq (car-safe (car (cdr tail))) key) + (setcdr tail (cdr (cdr tail)))) + ;; If we hit an included map, go down that one. + (if (keymapp (car tail)) (setq tail (car tail))) + ;; When we reach AFTER's binding, insert the new binding after. + ;; If we reach an inherited keymap, insert just before that. + ;; If we reach the end of this keymap, insert at the end. + (if (or (and (eq (car-safe (car tail)) after) + (not (eq after t))) + (eq (car (cdr tail)) 'keymap) + (null (cdr tail))) + (progn + ;; Stop the scan only if we find a parent keymap. + ;; Keep going past the inserted element + ;; so we can delete any duplications that come later. + (if (eq (car (cdr tail)) 'keymap) + (setq done t)) + ;; Don't insert more than once. + (or inserted + (setcdr tail (cons (cons key definition) (cdr tail)))) + (setq inserted t))) + (setq tail (cdr tail))))) + +(defun map-keymap-sorted (function keymap) + "Implement `map-keymap' with sorting. +Don't call this function; it is for internal use only." + (let (list) + (map-keymap (lambda (a b) (push (cons a b) list)) + keymap) + (setq list (sort list + (lambda (a b) + (setq a (car a) b (car b)) + (if (integerp a) + (if (integerp b) (< a b) + t) + (if (integerp b) t + ;; string< also accepts symbols. + (string< a b)))))) + (dolist (p list) + (funcall function (car p) (cdr p))))) + +(defun keymap--menu-item-binding (val) + "Return the binding part of a menu-item." + (cond + ((not (consp val)) val) ;Not a menu-item. + ((eq 'menu-item (car val)) + (let* ((binding (nth 2 val)) + (plist (nthcdr 3 val)) + (filter (plist-get plist :filter))) + (if filter (funcall filter binding) + binding))) + ((and (consp (cdr val)) (stringp (cadr val))) + (cddr val)) + ((stringp (car val)) + (cdr val)) + (t val))) ;Not a menu-item either. + +(defun keymap--menu-item-with-binding (item binding) + "Build a menu-item like ITEM but with its binding changed to BINDING." + (cond + ((not (consp item)) binding) ;Not a menu-item. + ((eq 'menu-item (car item)) + (setq item (copy-sequence item)) + (let ((tail (nthcdr 2 item))) + (setcar tail binding) + ;; Remove any potential filter. + (if (plist-get (cdr tail) :filter) + (setcdr tail (plist-put (cdr tail) :filter nil)))) + item) + ((and (consp (cdr item)) (stringp (cadr item))) + (cons (car item) (cons (cadr item) binding))) + (t (cons (car item) binding)))) + +(defun keymap--merge-bindings (val1 val2) + "Merge bindings VAL1 and VAL2." + (let ((map1 (keymap--menu-item-binding val1)) + (map2 (keymap--menu-item-binding val2))) + (if (not (and (keymapp map1) (keymapp map2))) + ;; There's nothing to merge: val1 takes precedence. + val1 + (let ((map (list 'keymap map1 map2)) + (item (if (keymapp val1) (if (keymapp val2) nil val2) val1))) + (keymap--menu-item-with-binding item map))))) + +(defun keymap-canonicalize (map) + "Return a simpler equivalent keymap. +This resolves inheritance and redefinitions. The returned keymap +should behave identically to a copy of KEYMAP w.r.t `lookup-key' +and use in active keymaps and menus. +Subkeymaps may be modified but are not canonicalized." + ;; FIXME: Problem with the difference between a nil binding + ;; that hides a binding in an inherited map and a nil binding that's ignored + ;; to let some further binding visible. Currently a nil binding hides all. + ;; FIXME: we may want to carefully (re)order elements in case they're + ;; menu-entries. + (let ((bindings ()) + (ranges ()) + (prompt (keymap-prompt map))) + (while (keymapp map) + (setq map (map-keymap ;; -internal + (lambda (key item) + (if (consp key) + ;; Treat char-ranges specially. + (push (cons key item) ranges) + (push (cons key item) bindings))) + map))) + ;; Create the new map. + (setq map (funcall (if ranges 'make-keymap 'make-sparse-keymap) prompt)) + (dolist (binding ranges) + ;; Treat char-ranges specially. FIXME: need to merge as well. + (define-key map (vector (car binding)) (cdr binding))) + ;; Process the bindings starting from the end. + (dolist (binding (prog1 bindings (setq bindings ()))) + (let* ((key (car binding)) + (oldbind (assq key bindings))) + (push (if (not oldbind) + ;; The normal case: no duplicate bindings. + binding + ;; This is the second binding for this key. + (setq bindings (delq oldbind bindings)) + (cons key (keymap--merge-bindings (cdr binding) + (cdr oldbind)))) + bindings))) + (nconc map bindings))) + +(put 'keyboard-translate-table 'char-table-extra-slots 0) + +(defun keyboard-translate (from to) + "Translate character FROM to TO on the current terminal. +This function creates a `keyboard-translate-table' if necessary +and then modifies one entry in it." + (or (char-table-p keyboard-translate-table) + (setq keyboard-translate-table + (make-char-table 'keyboard-translate-table nil))) + (aset keyboard-translate-table from to)) + +;;;; Key binding commands. + +(defun global-set-key (key command) + "Give KEY a global binding as COMMAND. +COMMAND is the command definition to use; usually it is +a symbol naming an interactively-callable function. +KEY is a key sequence; noninteractively, it is a string or vector +of characters or event types, and non-ASCII characters with codes +above 127 (such as ISO Latin-1) can be included if you use a vector. + +Note that if KEY has a local binding in the current buffer, +that local binding will continue to shadow any global binding +that you make with this function." + (interactive "KSet key globally: \nCSet key %s to command: ") + (or (vectorp key) (stringp key) + (signal 'wrong-type-argument (list 'arrayp key))) + (define-key (current-global-map) key command)) + +(defun local-set-key (key command) + "Give KEY a local binding as COMMAND. +COMMAND is the command definition to use; usually it is +a symbol naming an interactively-callable function. +KEY is a key sequence; noninteractively, it is a string or vector +of characters or event types, and non-ASCII characters with codes +above 127 (such as ISO Latin-1) can be included if you use a vector. + +The binding goes in the current buffer's local map, which in most +cases is shared with all other buffers in the same major mode." + (interactive "KSet key locally: \nCSet key %s locally to command: ") + (let ((map (current-local-map))) + (or map + (use-local-map (setq map (make-sparse-keymap)))) + (or (vectorp key) (stringp key) + (signal 'wrong-type-argument (list 'arrayp key))) + (define-key map key command))) + +(defun global-unset-key (key) + "Remove global binding of KEY. +KEY is a string or vector representing a sequence of keystrokes." + (interactive "kUnset key globally: ") + (global-set-key key nil)) + +(defun local-unset-key (key) + "Remove local binding of KEY. +KEY is a string or vector representing a sequence of keystrokes." + (interactive "kUnset key locally: ") + (if (current-local-map) + (local-set-key key nil)) + nil) + +;;;; substitute-key-definition and its subroutines. + +(defvar key-substitution-in-progress nil + "Used internally by `substitute-key-definition'.") + +(defun substitute-key-definition (olddef newdef keymap &optional oldmap prefix) + "Replace OLDDEF with NEWDEF for any keys in KEYMAP now defined as OLDDEF. +In other words, OLDDEF is replaced with NEWDEF where ever it appears. +Alternatively, if optional fourth argument OLDMAP is specified, we redefine +in KEYMAP as NEWDEF those keys which are defined as OLDDEF in OLDMAP. + +If you don't specify OLDMAP, you can usually get the same results +in a cleaner way with command remapping, like this: + (define-key KEYMAP [remap OLDDEF] NEWDEF) +\n(fn OLDDEF NEWDEF KEYMAP &optional OLDMAP)" + ;; Don't document PREFIX in the doc string because we don't want to + ;; advertise it. It's meant for recursive calls only. Here's its + ;; meaning + + ;; If optional argument PREFIX is specified, it should be a key + ;; prefix, a string. Redefined bindings will then be bound to the + ;; original key, with PREFIX added at the front. + (or prefix (setq prefix "")) + (let* ((scan (or oldmap keymap)) + (prefix1 (vconcat prefix [nil])) + (key-substitution-in-progress + (cons scan key-substitution-in-progress))) + ;; Scan OLDMAP, finding each char or event-symbol that + ;; has any definition, and act on it with hack-key. + (map-keymap + (lambda (char defn) + (aset prefix1 (length prefix) char) + (substitute-key-definition-key defn olddef newdef prefix1 keymap)) + scan))) + +(defun substitute-key-definition-key (defn olddef newdef prefix keymap) + (let (inner-def skipped menu-item) + ;; Find the actual command name within the binding. + (if (eq (car-safe defn) 'menu-item) + (setq menu-item defn defn (nth 2 defn)) + ;; Skip past menu-prompt. + (while (stringp (car-safe defn)) + (push (pop defn) skipped)) + ;; Skip past cached key-equivalence data for menu items. + (if (consp (car-safe defn)) + (setq defn (cdr defn)))) + (if (or (eq defn olddef) + ;; Compare with equal if definition is a key sequence. + ;; That is useful for operating on function-key-map. + (and (or (stringp defn) (vectorp defn)) + (equal defn olddef))) + (define-key keymap prefix + (if menu-item + (let ((copy (copy-sequence menu-item))) + (setcar (nthcdr 2 copy) newdef) + copy) + (nconc (nreverse skipped) newdef))) + ;; Look past a symbol that names a keymap. + (setq inner-def + (or (indirect-function defn t) defn)) + ;; For nested keymaps, we use `inner-def' rather than `defn' so as to + ;; avoid autoloading a keymap. This is mostly done to preserve the + ;; original non-autoloading behavior of pre-map-keymap times. + (if (and (keymapp inner-def) + ;; Avoid recursively scanning + ;; where KEYMAP does not have a submap. + (let ((elt (lookup-key keymap prefix))) + (or (null elt) (natnump elt) (keymapp elt))) + ;; Avoid recursively rescanning keymap being scanned. + (not (memq inner-def key-substitution-in-progress))) + ;; If this one isn't being scanned already, scan it now. + (substitute-key-definition olddef newdef keymap inner-def prefix))))) + + +;;;; The global keymap tree. + +;; global-map, esc-map, and ctl-x-map have their values set up in +;; keymap.c; we just give them docstrings here. + +(defvar global-map nil + "Default global keymap mapping Emacs keyboard input into commands. +The value is a keymap which is usually (but not necessarily) Emacs's +global map.") + +(defvar esc-map nil + "Default keymap for ESC (meta) commands. +The normal global definition of the character ESC indirects to this keymap.") + +(defvar ctl-x-map nil + "Default keymap for C-x commands. +The normal global definition of the character C-x indirects to this keymap.") + +(defvar ctl-x-4-map (make-sparse-keymap) + "Keymap for subcommands of C-x 4.") +(defalias 'ctl-x-4-prefix ctl-x-4-map) +(define-key ctl-x-map "4" 'ctl-x-4-prefix) + +(defvar ctl-x-5-map (make-sparse-keymap) + "Keymap for frame commands.") +(defalias 'ctl-x-5-prefix ctl-x-5-map) +(define-key ctl-x-map "5" 'ctl-x-5-prefix) + + +;;;; Event manipulation functions. + +(defconst listify-key-sequence-1 (logior 128 ?\M-\C-@)) + +(defun listify-key-sequence (key) + "Convert a key sequence to a list of events." + (if (vectorp key) + (append key nil) + (mapcar (function (lambda (c) + (if (> c 127) + (logxor c listify-key-sequence-1) + c))) + key))) + +(defun eventp (obj) + "True if the argument is an event object." + (when obj + (or (integerp obj) + (and (symbolp obj) obj (not (keywordp obj))) + (and (consp obj) (symbolp (car obj)))))) + +(defun event-modifiers (event) + "Return a list of symbols representing the modifier keys in event EVENT. +The elements of the list may include `meta', `control', +`shift', `hyper', `super', `alt', `click', `double', `triple', `drag', +and `down'. +EVENT may be an event or an event type. If EVENT is a symbol +that has never been used in an event that has been read as input +in the current Emacs session, then this function may fail to include +the `click' modifier." + (let ((type event)) + (if (listp type) + (setq type (car type))) + (if (symbolp type) + ;; Don't read event-symbol-elements directly since we're not + ;; sure the symbol has already been parsed. + (cdr (internal-event-symbol-parse-modifiers type)) + (let ((list nil) + (char (logand type (lognot (logior ?\M-\^@ ?\C-\^@ ?\S-\^@ + ?\H-\^@ ?\s-\^@ ?\A-\^@))))) + (if (not (zerop (logand type ?\M-\^@))) + (push 'meta list)) + (if (or (not (zerop (logand type ?\C-\^@))) + (< char 32)) + (push 'control list)) + (if (or (not (zerop (logand type ?\S-\^@))) + (/= char (downcase char))) + (push 'shift list)) + (or (zerop (logand type ?\H-\^@)) + (push 'hyper list)) + (or (zerop (logand type ?\s-\^@)) + (push 'super list)) + (or (zerop (logand type ?\A-\^@)) + (push 'alt list)) + list)))) + +(defun event-basic-type (event) + "Return the basic type of the given event (all modifiers removed). +The value is a printing character (not upper case) or a symbol. +EVENT may be an event or an event type. If EVENT is a symbol +that has never been used in an event that has been read as input +in the current Emacs session, then this function may return nil." + (if (consp event) + (setq event (car event))) + (if (symbolp event) + (car (get event 'event-symbol-elements)) + (let* ((base (logand event (1- ?\A-\^@))) + (uncontrolled (if (< base 32) (logior base 64) base))) + ;; There are some numbers that are invalid characters and + ;; cause `downcase' to get an error. + (condition-case () + (downcase uncontrolled) + (error uncontrolled))))) + +(defsubst mouse-movement-p (object) + "Return non-nil if OBJECT is a mouse movement event." + (eq (car-safe object) 'mouse-movement)) + +(defun mouse-event-p (object) + "Return non-nil if OBJECT is a mouse click event." + ;; is this really correct? maybe remove mouse-movement? + (memq (event-basic-type object) '(mouse-1 mouse-2 mouse-3 mouse-movement))) + +(defun event-start (event) + "Return the starting position of EVENT. +EVENT should be a mouse click, drag, or key press event. If +EVENT is nil, the value of `posn-at-point' is used instead. + +The following accessor functions are used to access the elements +of the position: + +`posn-window': The window the event is in. +`posn-area': A symbol identifying the area the event occurred in, +or nil if the event occurred in the text area. +`posn-point': The buffer position of the event. +`posn-x-y': The pixel-based coordinates of the event. +`posn-col-row': The estimated column and row corresponding to the +position of the event. +`posn-actual-col-row': The actual column and row corresponding to the +position of the event. +`posn-string': The string object of the event, which is either +nil or (STRING . POSITION)'. +`posn-image': The image object of the event, if any. +`posn-object': The image or string object of the event, if any. +`posn-timestamp': The time the event occurred, in milliseconds. + +For more information, see Info node `(elisp)Click Events'." + (if (consp event) (nth 1 event) + (or (posn-at-point) + (list (selected-window) (point) '(0 . 0) 0)))) + +(defun event-end (event) + "Return the ending position of EVENT. +EVENT should be a click, drag, or key press event. + +See `event-start' for a description of the value returned." + (if (consp event) (nth (if (consp (nth 2 event)) 2 1) event) + (or (posn-at-point) + (list (selected-window) (point) '(0 . 0) 0)))) + +(defsubst event-click-count (event) + "Return the multi-click count of EVENT, a click or drag event. +The return value is a positive integer." + (if (and (consp event) (integerp (nth 2 event))) (nth 2 event) 1)) + +;;;; Extracting fields of the positions in an event. + +(defun posnp (obj) + "Return non-nil if OBJ appears to be a valid `posn' object specifying a window. +If OBJ is a valid `posn' object, but specifies a frame rather +than a window, return nil." + ;; FIXME: Correct the behavior of this function so that all valid + ;; `posn' objects are recognized, after updating other code that + ;; depends on its present behavior. + (and (windowp (car-safe obj)) + (atom (car-safe (setq obj (cdr obj)))) ;AREA-OR-POS. + (integerp (car-safe (car-safe (setq obj (cdr obj))))) ;XOFFSET. + (integerp (car-safe (cdr obj))))) ;TIMESTAMP. + +(defsubst posn-window (position) + "Return the window in POSITION. +POSITION should be a list of the form returned by the `event-start' +and `event-end' functions." + (nth 0 position)) + +(defsubst posn-area (position) + "Return the window area recorded in POSITION, or nil for the text area. +POSITION should be a list of the form returned by the `event-start' +and `event-end' functions." + (let ((area (if (consp (nth 1 position)) + (car (nth 1 position)) + (nth 1 position)))) + (and (symbolp area) area))) + +(defun posn-point (position) + "Return the buffer location in POSITION. +POSITION should be a list of the form returned by the `event-start' +and `event-end' functions. +Returns nil if POSITION does not correspond to any buffer location (e.g. +a click on a scroll bar)." + (or (nth 5 position) + (let ((pt (nth 1 position))) + (or (car-safe pt) + ;; Apparently this can also be `vertical-scroll-bar' (bug#13979). + (if (integerp pt) pt))))) + +(defun posn-set-point (position) + "Move point to POSITION. +Select the corresponding window as well." + (if (not (windowp (posn-window position))) + (error "Position not in text area of window")) + (select-window (posn-window position)) + (if (numberp (posn-point position)) + (goto-char (posn-point position)))) + +(defsubst posn-x-y (position) + "Return the x and y coordinates in POSITION. +The return value has the form (X . Y), where X and Y are given in +pixels. POSITION should be a list of the form returned by +`event-start' and `event-end'." + (nth 2 position)) + +(declare-function scroll-bar-scale "scroll-bar" (num-denom whole)) + +(defun posn-col-row (position) + "Return the nominal column and row in POSITION, measured in characters. +The column and row values are approximations calculated from the x +and y coordinates in POSITION and the frame's default character width +and default line height, including spacing. +For a scroll-bar event, the result column is 0, and the row +corresponds to the vertical position of the click in the scroll bar. +POSITION should be a list of the form returned by the `event-start' +and `event-end' functions." + (let* ((pair (posn-x-y position)) + (frame-or-window (posn-window position)) + (frame (if (framep frame-or-window) + frame-or-window + (window-frame frame-or-window))) + (window (when (windowp frame-or-window) frame-or-window)) + (area (posn-area position))) + (cond + ((null frame-or-window) + '(0 . 0)) + ((eq area 'vertical-scroll-bar) + (cons 0 (scroll-bar-scale pair (1- (window-height window))))) + ((eq area 'horizontal-scroll-bar) + (cons (scroll-bar-scale pair (window-width window)) 0)) + (t + ;; FIXME: This should take line-spacing properties on + ;; newlines into account. + (let* ((spacing (when (display-graphic-p frame) + (or (with-current-buffer + (window-buffer (frame-selected-window frame)) + line-spacing) + (frame-parameter frame 'line-spacing))))) + (cond ((floatp spacing) + (setq spacing (truncate (* spacing + (frame-char-height frame))))) + ((null spacing) + (setq spacing 0))) + (cons (/ (car pair) (frame-char-width frame)) + (/ (cdr pair) (+ (frame-char-height frame) spacing)))))))) + +(defun posn-actual-col-row (position) + "Return the window row number in POSITION and character number in that row. + +Return nil if POSITION does not contain the actual position; in that case +\`posn-col-row' can be used to get approximate values. +POSITION should be a list of the form returned by the `event-start' +and `event-end' functions. + +This function does not account for the width on display, like the +number of visual columns taken by a TAB or image. If you need +the coordinates of POSITION in character units, you should use +\`posn-col-row', not this function." + (nth 6 position)) + +(defsubst posn-timestamp (position) + "Return the timestamp of POSITION. +POSITION should be a list of the form returned by the `event-start' +and `event-end' functions." + (nth 3 position)) + +(defun posn-string (position) + "Return the string object of POSITION. +Value is a cons (STRING . STRING-POS), or nil if not a string. +POSITION should be a list of the form returned by the `event-start' +and `event-end' functions." + (let ((x (nth 4 position))) + ;; Apparently this can also be `handle' or `below-handle' (bug#13979). + (when (consp x) x))) + +(defsubst posn-image (position) + "Return the image object of POSITION. +Value is a list (image ...), or nil if not an image. +POSITION should be a list of the form returned by the `event-start' +and `event-end' functions." + (nth 7 position)) + +(defsubst posn-object (position) + "Return the object (image or string) of POSITION. +Value is a list (image ...) for an image object, a cons cell +\(STRING . STRING-POS) for a string object, and nil for a buffer position. +POSITION should be a list of the form returned by the `event-start' +and `event-end' functions." + (or (posn-image position) (posn-string position))) + +(defsubst posn-object-x-y (position) + "Return the x and y coordinates relative to the object of POSITION. +The return value has the form (DX . DY), where DX and DY are +given in pixels. POSITION should be a list of the form returned +by `event-start' and `event-end'." + (nth 8 position)) + +(defsubst posn-object-width-height (position) + "Return the pixel width and height of the object of POSITION. +The return value has the form (WIDTH . HEIGHT). POSITION should +be a list of the form returned by `event-start' and `event-end'." + (nth 9 position)) + + +;;;; Obsolescent names for functions. + +(define-obsolete-function-alias 'window-dot 'window-point "22.1") +(define-obsolete-function-alias 'set-window-dot 'set-window-point "22.1") +(define-obsolete-function-alias 'read-input 'read-string "22.1") +(define-obsolete-function-alias 'show-buffer 'set-window-buffer "22.1") +(define-obsolete-function-alias 'eval-current-buffer 'eval-buffer "22.1") +(define-obsolete-function-alias 'string-to-int 'string-to-number "22.1") + +(make-obsolete 'forward-point "use (+ (point) N) instead." "23.1") +(make-obsolete 'buffer-has-markers-at nil "24.3") + +(defun insert-string (&rest args) + "Mocklisp-compatibility insert function. +Like the function `insert' except that any argument that is a number +is converted into a string by expressing it in decimal." + (declare (obsolete insert "22.1")) + (dolist (el args) + (insert (if (integerp el) (number-to-string el) el)))) + +(defun makehash (&optional test) + (declare (obsolete make-hash-table "22.1")) + (make-hash-table :test (or test 'eql))) + +(defun log10 (x) + "Return (log X 10), the log base 10 of X." + (declare (obsolete log "24.4")) + (log x 10)) + +;; These are used by VM and some old programs +(defalias 'focus-frame 'ignore "") +(make-obsolete 'focus-frame "it does nothing." "22.1") +(defalias 'unfocus-frame 'ignore "") +(make-obsolete 'unfocus-frame "it does nothing." "22.1") +(make-obsolete 'make-variable-frame-local + "explicitly check for a frame-parameter instead." "22.2") +(set-advertised-calling-convention + 'all-completions '(string collection &optional predicate) "23.1") +(set-advertised-calling-convention 'unintern '(name obarray) "23.3") +(set-advertised-calling-convention 'indirect-function '(object) "25.1") +(set-advertised-calling-convention 'redirect-frame-focus '(frame focus-frame) "24.3") +(set-advertised-calling-convention 'decode-char '(ch charset) "21.4") +(set-advertised-calling-convention 'encode-char '(ch charset) "21.4") + +;;;; Obsolescence declarations for variables, and aliases. + +;; Special "default-FOO" variables which contain the default value of +;; the "FOO" variable are nasty. Their implementation is brittle, and +;; slows down several unrelated variable operations; furthermore, they +;; can lead to really odd behavior if you decide to make them +;; buffer-local. + +;; Not used at all in Emacs, last time I checked: +(make-obsolete-variable 'default-mode-line-format 'mode-line-format "23.2") +(make-obsolete-variable 'default-header-line-format 'header-line-format "23.2") +(make-obsolete-variable 'default-line-spacing 'line-spacing "23.2") +(make-obsolete-variable 'default-abbrev-mode 'abbrev-mode "23.2") +(make-obsolete-variable 'default-ctl-arrow 'ctl-arrow "23.2") +(make-obsolete-variable 'default-truncate-lines 'truncate-lines "23.2") +(make-obsolete-variable 'default-left-margin 'left-margin "23.2") +(make-obsolete-variable 'default-tab-width 'tab-width "23.2") +(make-obsolete-variable 'default-case-fold-search 'case-fold-search "23.2") +(make-obsolete-variable 'default-left-margin-width 'left-margin-width "23.2") +(make-obsolete-variable 'default-right-margin-width 'right-margin-width "23.2") +(make-obsolete-variable 'default-left-fringe-width 'left-fringe-width "23.2") +(make-obsolete-variable 'default-right-fringe-width 'right-fringe-width "23.2") +(make-obsolete-variable 'default-fringes-outside-margins 'fringes-outside-margins "23.2") +(make-obsolete-variable 'default-scroll-bar-width 'scroll-bar-width "23.2") +(make-obsolete-variable 'default-vertical-scroll-bar 'vertical-scroll-bar "23.2") +(make-obsolete-variable 'default-indicate-empty-lines 'indicate-empty-lines "23.2") +(make-obsolete-variable 'default-indicate-buffer-boundaries 'indicate-buffer-boundaries "23.2") +(make-obsolete-variable 'default-fringe-indicator-alist 'fringe-indicator-alist "23.2") +(make-obsolete-variable 'default-fringe-cursor-alist 'fringe-cursor-alist "23.2") +(make-obsolete-variable 'default-scroll-up-aggressively 'scroll-up-aggressively "23.2") +(make-obsolete-variable 'default-scroll-down-aggressively 'scroll-down-aggressively "23.2") +(make-obsolete-variable 'default-fill-column 'fill-column "23.2") +(make-obsolete-variable 'default-cursor-type 'cursor-type "23.2") +(make-obsolete-variable 'default-cursor-in-non-selected-windows 'cursor-in-non-selected-windows "23.2") +(make-obsolete-variable 'default-buffer-file-coding-system 'buffer-file-coding-system "23.2") +(make-obsolete-variable 'default-major-mode 'major-mode "23.2") +(make-obsolete-variable 'default-enable-multibyte-characters + "use enable-multibyte-characters or set-buffer-multibyte instead" "23.2") + +(make-obsolete-variable 'define-key-rebound-commands nil "23.2") +(make-obsolete-variable 'redisplay-end-trigger-functions 'jit-lock-register "23.1") +(make-obsolete-variable 'deferred-action-list 'post-command-hook "24.1") +(make-obsolete-variable 'deferred-action-function 'post-command-hook "24.1") +(make-obsolete-variable 'redisplay-dont-pause nil "24.5") +(make-obsolete 'window-redisplay-end-trigger nil "23.1") +(make-obsolete 'set-window-redisplay-end-trigger nil "23.1") + +(make-obsolete 'process-filter-multibyte-p nil "23.1") +(make-obsolete 'set-process-filter-multibyte nil "23.1") + +;; Lisp manual only updated in 22.1. +(define-obsolete-variable-alias 'executing-macro 'executing-kbd-macro + "before 19.34") + +(define-obsolete-variable-alias 'x-lost-selection-hooks + 'x-lost-selection-functions "22.1") +(define-obsolete-variable-alias 'x-sent-selection-hooks + 'x-sent-selection-functions "22.1") + +;; This was introduced in 21.4 for pre-unicode unification. That +;; usage was rendered obsolete in 23.1 which uses Unicode internally. +;; Other uses are possible, so this variable is not _really_ obsolete, +;; but Stefan insists to mark it so. +(make-obsolete-variable 'translation-table-for-input nil "23.1") + +(defvaralias 'messages-buffer-max-lines 'message-log-max) + +;;;; Alternate names for functions - these are not being phased out. + +(defalias 'send-string 'process-send-string) +(defalias 'send-region 'process-send-region) +(defalias 'string= 'string-equal) +(defalias 'string< 'string-lessp) +(defalias 'move-marker 'set-marker) +(defalias 'rplaca 'setcar) +(defalias 'rplacd 'setcdr) +(defalias 'beep 'ding) ;preserve lingual purity +(defalias 'indent-to-column 'indent-to) +(defalias 'backward-delete-char 'delete-backward-char) +(defalias 'search-forward-regexp (symbol-function 're-search-forward)) +(defalias 'search-backward-regexp (symbol-function 're-search-backward)) +(defalias 'int-to-string 'number-to-string) +(defalias 'store-match-data 'set-match-data) +(defalias 'chmod 'set-file-modes) +(defalias 'mkdir 'make-directory) +;; These are the XEmacs names: +(defalias 'point-at-eol 'line-end-position) +(defalias 'point-at-bol 'line-beginning-position) + +(defalias 'user-original-login-name 'user-login-name) + + +;;;; Hook manipulation functions. + +(defun add-hook (hook function &optional append local) + "Add to the value of HOOK the function FUNCTION. +FUNCTION is not added if already present. +FUNCTION is added (if necessary) at the beginning of the hook list +unless the optional argument APPEND is non-nil, in which case +FUNCTION is added at the end. + +The optional fourth argument, LOCAL, if non-nil, says to modify +the hook's buffer-local value rather than its global value. +This makes the hook buffer-local, and it makes t a member of the +buffer-local value. That acts as a flag to run the hook +functions of the global value as well as in the local value. + +HOOK should be a symbol, and FUNCTION may be any valid function. If +HOOK is void, it is first set to nil. If HOOK's value is a single +function, it is changed to a list of functions." + (or (boundp hook) (set hook nil)) + (or (default-boundp hook) (set-default hook nil)) + (if local (unless (local-variable-if-set-p hook) + (set (make-local-variable hook) (list t))) + ;; Detect the case where make-local-variable was used on a hook + ;; and do what we used to do. + (unless (and (consp (symbol-value hook)) (memq t (symbol-value hook))) + (setq local t))) + (let ((hook-value (if local (symbol-value hook) (default-value hook)))) + ;; If the hook value is a single function, turn it into a list. + (when (or (not (listp hook-value)) (functionp hook-value)) + (setq hook-value (list hook-value))) + ;; Do the actual addition if necessary + (unless (member function hook-value) + (when (stringp function) + (setq function (purecopy function))) + (setq hook-value + (if append + (append hook-value (list function)) + (cons function hook-value)))) + ;; Set the actual variable + (if local + (progn + ;; If HOOK isn't a permanent local, + ;; but FUNCTION wants to survive a change of modes, + ;; mark HOOK as partially permanent. + (and (symbolp function) + (get function 'permanent-local-hook) + (not (get hook 'permanent-local)) + (put hook 'permanent-local 'permanent-local-hook)) + (set hook hook-value)) + (set-default hook hook-value)))) + +(defun remove-hook (hook function &optional local) + "Remove from the value of HOOK the function FUNCTION. +HOOK should be a symbol, and FUNCTION may be any valid function. If +FUNCTION isn't the value of HOOK, or, if FUNCTION doesn't appear in the +list of hooks to run in HOOK, then nothing is done. See `add-hook'. + +The optional third argument, LOCAL, if non-nil, says to modify +the hook's buffer-local value rather than its default value." + (or (boundp hook) (set hook nil)) + (or (default-boundp hook) (set-default hook nil)) + ;; Do nothing if LOCAL is t but this hook has no local binding. + (unless (and local (not (local-variable-p hook))) + ;; Detect the case where make-local-variable was used on a hook + ;; and do what we used to do. + (when (and (local-variable-p hook) + (not (and (consp (symbol-value hook)) + (memq t (symbol-value hook))))) + (setq local t)) + (let ((hook-value (if local (symbol-value hook) (default-value hook)))) + ;; Remove the function, for both the list and the non-list cases. + (if (or (not (listp hook-value)) (eq (car hook-value) 'lambda)) + (if (equal hook-value function) (setq hook-value nil)) + (setq hook-value (delete function (copy-sequence hook-value)))) + ;; If the function is on the global hook, we need to shadow it locally + ;;(when (and local (member function (default-value hook)) + ;; (not (member (cons 'not function) hook-value))) + ;; (push (cons 'not function) hook-value)) + ;; Set the actual variable + (if (not local) + (set-default hook hook-value) + (if (equal hook-value '(t)) + (kill-local-variable hook) + (set hook hook-value)))))) + +(defmacro letrec (binders &rest body) + "Bind variables according to BINDERS then eval BODY. +The value of the last form in BODY is returned. +Each element of BINDERS is a list (SYMBOL VALUEFORM) which binds +SYMBOL to the value of VALUEFORM. +All symbols are bound before the VALUEFORMs are evalled." + ;; Only useful in lexical-binding mode. + ;; As a special-form, we could implement it more efficiently (and cleanly, + ;; making the vars actually unbound during evaluation of the binders). + (declare (debug let) (indent 1)) + `(let ,(mapcar #'car binders) + ,@(mapcar (lambda (binder) `(setq ,@binder)) binders) + ,@body)) + +(defmacro with-wrapper-hook (hook args &rest body) + "Run BODY, using wrapper functions from HOOK with additional ARGS. +HOOK is an abnormal hook. Each hook function in HOOK \"wraps\" +around the preceding ones, like a set of nested `around' advices. + +Each hook function should accept an argument list consisting of a +function FUN, followed by the additional arguments in ARGS. + +The first hook function in HOOK is passed a FUN that, if it is called +with arguments ARGS, performs BODY (i.e., the default operation). +The FUN passed to each successive hook function is defined based +on the preceding hook functions; if called with arguments ARGS, +it does what the `with-wrapper-hook' call would do if the +preceding hook functions were the only ones present in HOOK. + +Each hook function may call its FUN argument as many times as it wishes, +including never. In that case, such a hook function acts to replace +the default definition altogether, and any preceding hook functions. +Of course, a subsequent hook function may do the same thing. + +Each hook function definition is used to construct the FUN passed +to the next hook function, if any. The last (or \"outermost\") +FUN is then called once." + (declare (indent 2) (debug (form sexp body)) + (obsolete "use a -function variable modified by `add-function'." + "24.4")) + ;; We need those two gensyms because CL's lexical scoping is not available + ;; for function arguments :-( + (let ((funs (make-symbol "funs")) + (global (make-symbol "global")) + (argssym (make-symbol "args")) + (runrestofhook (make-symbol "runrestofhook"))) + ;; Since the hook is a wrapper, the loop has to be done via + ;; recursion: a given hook function will call its parameter in order to + ;; continue looping. + `(letrec ((,runrestofhook + (lambda (,funs ,global ,argssym) + ;; `funs' holds the functions left on the hook and `global' + ;; holds the functions left on the global part of the hook + ;; (in case the hook is local). + (if (consp ,funs) + (if (eq t (car ,funs)) + (funcall ,runrestofhook + (append ,global (cdr ,funs)) nil ,argssym) + (apply (car ,funs) + (apply-partially + (lambda (,funs ,global &rest ,argssym) + (funcall ,runrestofhook ,funs ,global ,argssym)) + (cdr ,funs) ,global) + ,argssym)) + ;; Once there are no more functions on the hook, run + ;; the original body. + (apply (lambda ,args ,@body) ,argssym))))) + (funcall ,runrestofhook ,hook + ;; The global part of the hook, if any. + ,(if (symbolp hook) + `(if (local-variable-p ',hook) + (default-value ',hook))) + (list ,@args))))) + +(defun add-to-list (list-var element &optional append compare-fn) + "Add ELEMENT to the value of LIST-VAR if it isn't there yet. +The test for presence of ELEMENT is done with `equal', or with +COMPARE-FN if that's non-nil. +If ELEMENT is added, it is added at the beginning of the list, +unless the optional argument APPEND is non-nil, in which case +ELEMENT is added at the end. + +The return value is the new value of LIST-VAR. + +This is handy to add some elements to configuration variables, +but please do not abuse it in Elisp code, where you are usually +better off using `push' or `cl-pushnew'. + +If you want to use `add-to-list' on a variable that is not +defined until a certain package is loaded, you should put the +call to `add-to-list' into a hook function that will be run only +after loading the package. `eval-after-load' provides one way to +do this. In some cases other hooks, such as major mode hooks, +can do the job." + (declare + (compiler-macro + (lambda (exp) + ;; FIXME: Something like this could be used for `set' as well. + (if (or (not (eq 'quote (car-safe list-var))) + (special-variable-p (cadr list-var)) + (not (macroexp-const-p append))) + exp + (let* ((sym (cadr list-var)) + (append (eval append)) + (msg (format "`add-to-list' can't use lexical var `%s'; use `push' or `cl-pushnew'" + sym)) + ;; Big ugly hack so we only output a warning during + ;; byte-compilation, and so we can use + ;; byte-compile-not-lexical-var-p to silence the warning + ;; when a defvar has been seen but not yet executed. + (warnfun (lambda () + ;; FIXME: We should also emit a warning for let-bound + ;; variables with dynamic binding. + (when (assq sym byte-compile--lexical-environment) + (byte-compile-log-warning msg t :error)))) + (code + (macroexp-let2 macroexp-copyable-p x element + `(if ,(if compare-fn + (progn + (require 'cl-lib) + `(cl-member ,x ,sym :test ,compare-fn)) + ;; For bootstrapping reasons, don't rely on + ;; cl--compiler-macro-member for the base case. + `(member ,x ,sym)) + ,sym + ,(if append + `(setq ,sym (append ,sym (list ,x))) + `(push ,x ,sym)))))) + (if (not (macroexp--compiling-p)) + code + `(progn + (macroexp--funcall-if-compiled ',warnfun) + ,code))))))) + (if (cond + ((null compare-fn) + (member element (symbol-value list-var))) + ((eq compare-fn 'eq) + (memq element (symbol-value list-var))) + ((eq compare-fn 'eql) + (memql element (symbol-value list-var))) + (t + (let ((lst (symbol-value list-var))) + (while (and lst + (not (funcall compare-fn element (car lst)))) + (setq lst (cdr lst))) + lst))) + (symbol-value list-var) + (set list-var + (if append + (append (symbol-value list-var) (list element)) + (cons element (symbol-value list-var)))))) + + +(defun add-to-ordered-list (list-var element &optional order) + "Add ELEMENT to the value of LIST-VAR if it isn't there yet. +The test for presence of ELEMENT is done with `eq'. + +The resulting list is reordered so that the elements are in the +order given by each element's numeric list order. Elements +without a numeric list order are placed at the end of the list. + +If the third optional argument ORDER is a number (integer or +float), set the element's list order to the given value. If +ORDER is nil or omitted, do not change the numeric order of +ELEMENT. If ORDER has any other value, remove the numeric order +of ELEMENT if it has one. + +The list order for each element is stored in LIST-VAR's +`list-order' property. + +The return value is the new value of LIST-VAR." + (let ((ordering (get list-var 'list-order))) + (unless ordering + (put list-var 'list-order + (setq ordering (make-hash-table :weakness 'key :test 'eq)))) + (when order + (puthash element (and (numberp order) order) ordering)) + (unless (memq element (symbol-value list-var)) + (set list-var (cons element (symbol-value list-var)))) + (set list-var (sort (symbol-value list-var) + (lambda (a b) + (let ((oa (gethash a ordering)) + (ob (gethash b ordering))) + (if (and oa ob) + (< oa ob) + oa))))))) + +(defun add-to-history (history-var newelt &optional maxelt keep-all) + "Add NEWELT to the history list stored in the variable HISTORY-VAR. +Return the new history list. +If MAXELT is non-nil, it specifies the maximum length of the history. +Otherwise, the maximum history length is the value of the `history-length' +property on symbol HISTORY-VAR, if set, or the value of the `history-length' +variable. +Remove duplicates of NEWELT if `history-delete-duplicates' is non-nil. +If optional fourth arg KEEP-ALL is non-nil, add NEWELT to history even +if it is empty or a duplicate." + (unless maxelt + (setq maxelt (or (get history-var 'history-length) + history-length))) + (let ((history (symbol-value history-var)) + tail) + (when (and (listp history) + (or keep-all + (not (stringp newelt)) + (> (length newelt) 0)) + (or keep-all + (not (equal (car history) newelt)))) + (if history-delete-duplicates + (setq history (delete newelt history))) + (setq history (cons newelt history)) + (when (integerp maxelt) + (if (= 0 maxelt) + (setq history nil) + (setq tail (nthcdr (1- maxelt) history)) + (when (consp tail) + (setcdr tail nil))))) + (set history-var history))) + + +;;;; Mode hooks. + +(defvar delay-mode-hooks nil + "If non-nil, `run-mode-hooks' should delay running the hooks.") +(defvar delayed-mode-hooks nil + "List of delayed mode hooks waiting to be run.") +(make-variable-buffer-local 'delayed-mode-hooks) +(put 'delay-mode-hooks 'permanent-local t) + +(defvar change-major-mode-after-body-hook nil + "Normal hook run in major mode functions, before the mode hooks.") + +(defvar after-change-major-mode-hook nil + "Normal hook run at the very end of major mode functions.") + +(defun run-mode-hooks (&rest hooks) + "Run mode hooks `delayed-mode-hooks' and HOOKS, or delay HOOKS. +If the variable `delay-mode-hooks' is non-nil, does not run any hooks, +just adds the HOOKS to the list `delayed-mode-hooks'. +Otherwise, runs hooks in the sequence: `change-major-mode-after-body-hook', +`delayed-mode-hooks' (in reverse order), HOOKS, and finally +`after-change-major-mode-hook'. Major mode functions should use +this instead of `run-hooks' when running their FOO-mode-hook." + (if delay-mode-hooks + ;; Delaying case. + (dolist (hook hooks) + (push hook delayed-mode-hooks)) + ;; Normal case, just run the hook as before plus any delayed hooks. + (setq hooks (nconc (nreverse delayed-mode-hooks) hooks)) + (setq delayed-mode-hooks nil) + (apply 'run-hooks (cons 'change-major-mode-after-body-hook hooks)) + (run-hooks 'after-change-major-mode-hook))) + +(defmacro delay-mode-hooks (&rest body) + "Execute BODY, but delay any `run-mode-hooks'. +These hooks will be executed by the first following call to +`run-mode-hooks' that occurs outside any `delayed-mode-hooks' form. +Only affects hooks run in the current buffer." + (declare (debug t) (indent 0)) + `(progn + (make-local-variable 'delay-mode-hooks) + (let ((delay-mode-hooks t)) + ,@body))) + +;; PUBLIC: find if the current mode derives from another. + +(defun derived-mode-p (&rest modes) + "Non-nil if the current major mode is derived from one of MODES. +Uses the `derived-mode-parent' property of the symbol to trace backwards." + (let ((parent major-mode)) + (while (and (not (memq parent modes)) + (setq parent (get parent 'derived-mode-parent)))) + parent)) + +;;;; Minor modes. + +;; If a minor mode is not defined with define-minor-mode, +;; add it here explicitly. +;; isearch-mode is deliberately excluded, since you should +;; not call it yourself. +(defvar minor-mode-list '(auto-save-mode auto-fill-mode abbrev-mode + overwrite-mode view-mode + hs-minor-mode) + "List of all minor mode functions.") + +(defun add-minor-mode (toggle name &optional keymap after toggle-fun) + "Register a new minor mode. + +This is an XEmacs-compatibility function. Use `define-minor-mode' instead. + +TOGGLE is a symbol which is the name of a buffer-local variable that +is toggled on or off to say whether the minor mode is active or not. + +NAME specifies what will appear in the mode line when the minor mode +is active. NAME should be either a string starting with a space, or a +symbol whose value is such a string. + +Optional KEYMAP is the keymap for the minor mode that will be added +to `minor-mode-map-alist'. + +Optional AFTER specifies that TOGGLE should be added after AFTER +in `minor-mode-alist'. + +Optional TOGGLE-FUN is an interactive function to toggle the mode. +It defaults to (and should by convention be) TOGGLE. + +If TOGGLE has a non-nil `:included' property, an entry for the mode is +included in the mode-line minor mode menu. +If TOGGLE has a `:menu-tag', that is used for the menu item's label." + (unless (memq toggle minor-mode-list) + (push toggle minor-mode-list)) + + (unless toggle-fun (setq toggle-fun toggle)) + (unless (eq toggle-fun toggle) + (put toggle :minor-mode-function toggle-fun)) + ;; Add the name to the minor-mode-alist. + (when name + (let ((existing (assq toggle minor-mode-alist))) + (if existing + (setcdr existing (list name)) + (let ((tail minor-mode-alist) found) + (while (and tail (not found)) + (if (eq after (caar tail)) + (setq found tail) + (setq tail (cdr tail)))) + (if found + (let ((rest (cdr found))) + (setcdr found nil) + (nconc found (list (list toggle name)) rest)) + (push (list toggle name) minor-mode-alist)))))) + ;; Add the toggle to the minor-modes menu if requested. + (when (get toggle :included) + (define-key mode-line-mode-menu + (vector toggle) + (list 'menu-item + (concat + (or (get toggle :menu-tag) + (if (stringp name) name (symbol-name toggle))) + (let ((mode-name (if (symbolp name) (symbol-value name)))) + (if (and (stringp mode-name) (string-match "[^ ]+" mode-name)) + (concat " (" (match-string 0 mode-name) ")")))) + toggle-fun + :button (cons :toggle toggle)))) + + ;; Add the map to the minor-mode-map-alist. + (when keymap + (let ((existing (assq toggle minor-mode-map-alist))) + (if existing + (setcdr existing keymap) + (let ((tail minor-mode-map-alist) found) + (while (and tail (not found)) + (if (eq after (caar tail)) + (setq found tail) + (setq tail (cdr tail)))) + (if found + (let ((rest (cdr found))) + (setcdr found nil) + (nconc found (list (cons toggle keymap)) rest)) + (push (cons toggle keymap) minor-mode-map-alist))))))) + +;;;; Load history + +(defsubst autoloadp (object) + "Non-nil if OBJECT is an autoload." + (eq 'autoload (car-safe object))) + +;; (defun autoload-type (object) +;; "Returns the type of OBJECT or `function' or `command' if the type is nil. +;; OBJECT should be an autoload object." +;; (when (autoloadp object) +;; (let ((type (nth 3 object))) +;; (cond ((null type) (if (nth 2 object) 'command 'function)) +;; ((eq 'keymap t) 'macro) +;; (type))))) + +;; (defalias 'autoload-file #'cadr +;; "Return the name of the file from which AUTOLOAD will be loaded. +;; \n\(fn AUTOLOAD)") + +(defun symbol-file (symbol &optional type) + "Return the name of the file that defined SYMBOL. +The value is normally an absolute file name. It can also be nil, +if the definition is not associated with any file. If SYMBOL +specifies an autoloaded function, the value can be a relative +file name without extension. + +If TYPE is nil, then any kind of definition is acceptable. If +TYPE is `defun', `defvar', or `defface', that specifies function +definition, variable definition, or face definition only." + (if (and (or (null type) (eq type 'defun)) + (symbolp symbol) + (autoloadp (symbol-function symbol))) + (nth 1 (symbol-function symbol)) + (let ((files load-history) + file) + (while files + (if (if type + (if (eq type 'defvar) + ;; Variables are present just as their names. + (member symbol (cdr (car files))) + ;; Other types are represented as (TYPE . NAME). + (member (cons type symbol) (cdr (car files)))) + ;; We accept all types, so look for variable def + ;; and then for any other kind. + (or (member symbol (cdr (car files))) + (rassq symbol (cdr (car files))))) + (setq file (car (car files)) files nil)) + (setq files (cdr files))) + file))) + +(defun locate-library (library &optional nosuffix path interactive-call) + "Show the precise file name of Emacs library LIBRARY. +LIBRARY should be a relative file name of the library, a string. +It can omit the suffix (a.k.a. file-name extension) if NOSUFFIX is +nil (which is the default, see below). +This command searches the directories in `load-path' like `\\[load-library]' +to find the file that `\\[load-library] RET LIBRARY RET' would load. +Optional second arg NOSUFFIX non-nil means don't add suffixes `load-suffixes' +to the specified name LIBRARY. + +If the optional third arg PATH is specified, that list of directories +is used instead of `load-path'. + +When called from a program, the file name is normally returned as a +string. When run interactively, the argument INTERACTIVE-CALL is t, +and the file name is displayed in the echo area." + (interactive (list (completing-read "Locate library: " + (apply-partially + 'locate-file-completion-table + load-path (get-load-suffixes))) + nil nil + t)) + (let ((file (locate-file library + (or path load-path) + (append (unless nosuffix (get-load-suffixes)) + load-file-rep-suffixes)))) + (if interactive-call + (if file + (message "Library is file %s" (abbreviate-file-name file)) + (message "No library %s in search path" library))) + file)) + + +;;;; Process stuff. + +(defun process-lines (program &rest args) + "Execute PROGRAM with ARGS, returning its output as a list of lines. +Signal an error if the program returns with a non-zero exit status." + (with-temp-buffer + (let ((status (apply 'call-process program nil (current-buffer) nil args))) + (unless (eq status 0) + (error "%s exited with status %s" program status)) + (goto-char (point-min)) + (let (lines) + (while (not (eobp)) + (setq lines (cons (buffer-substring-no-properties + (line-beginning-position) + (line-end-position)) + lines)) + (forward-line 1)) + (nreverse lines))))) + +(defun process-live-p (process) + "Returns non-nil if PROCESS is alive. +A process is considered alive if its status is `run', `open', +`listen', `connect' or `stop'. Value is nil if PROCESS is not a +process." + (and (processp process) + (memq (process-status process) + '(run open listen connect stop)))) + +;; compatibility + +(make-obsolete + 'process-kill-without-query + "use `process-query-on-exit-flag' or `set-process-query-on-exit-flag'." + "22.1") +(defun process-kill-without-query (process &optional _flag) + "Say no query needed if PROCESS is running when Emacs is exited. +Optional second argument if non-nil says to require a query. +Value is t if a query was formerly required." + (let ((old (process-query-on-exit-flag process))) + (set-process-query-on-exit-flag process nil) + old)) + +(defun process-kill-buffer-query-function () + "Ask before killing a buffer that has a running process." + (let ((process (get-buffer-process (current-buffer)))) + (or (not process) + (not (memq (process-status process) '(run stop open listen))) + (not (process-query-on-exit-flag process)) + (yes-or-no-p + (format "Buffer %S has a running process; kill it? " + (buffer-name (current-buffer))))))) + +(add-hook 'kill-buffer-query-functions 'process-kill-buffer-query-function) + +;; process plist management + +(defun process-get (process propname) + "Return the value of PROCESS' PROPNAME property. +This is the last value stored with `(process-put PROCESS PROPNAME VALUE)'." + (plist-get (process-plist process) propname)) + +(defun process-put (process propname value) + "Change PROCESS' PROPNAME property to VALUE. +It can be retrieved with `(process-get PROCESS PROPNAME)'." + (set-process-plist process + (plist-put (process-plist process) propname value))) + + +;;;; Input and display facilities. + +(defconst read-key-empty-map (make-sparse-keymap)) + +(defvar read-key-delay 0.01) ;Fast enough for 100Hz repeat rate, hopefully. + +(defun read-key (&optional prompt) + "Read a key from the keyboard. +Contrary to `read-event' this will not return a raw event but instead will +obey the input decoding and translations usually done by `read-key-sequence'. +So escape sequences and keyboard encoding are taken into account. +When there's an ambiguity because the key looks like the prefix of +some sort of escape sequence, the ambiguity is resolved via `read-key-delay'." + ;; This overriding-terminal-local-map binding also happens to + ;; disable quail's input methods, so although read-key-sequence + ;; always inherits the input method, in practice read-key does not + ;; inherit the input method (at least not if it's based on quail). + (let ((overriding-terminal-local-map nil) + (overriding-local-map read-key-empty-map) + (echo-keystrokes 0) + (old-global-map (current-global-map)) + (timer (run-with-idle-timer + ;; Wait long enough that Emacs has the time to receive and + ;; process all the raw events associated with the single-key. + ;; But don't wait too long, or the user may find the delay + ;; annoying (or keep hitting more keys which may then get + ;; lost or misinterpreted). + ;; This is only relevant for keys which Emacs perceives as + ;; "prefixes", such as C-x (because of the C-x 8 map in + ;; key-translate-table and the C-x @ map in function-key-map) + ;; or ESC (because of terminal escape sequences in + ;; input-decode-map). + read-key-delay t + (lambda () + (let ((keys (this-command-keys-vector))) + (unless (zerop (length keys)) + ;; `keys' is non-empty, so the user has hit at least + ;; one key; there's no point waiting any longer, even + ;; though read-key-sequence thinks we should wait + ;; for more input to decide how to interpret the + ;; current input. + (throw 'read-key keys))))))) + (unwind-protect + (progn + (use-global-map + (let ((map (make-sparse-keymap))) + ;; Don't hide the menu-bar and tool-bar entries. + (define-key map [menu-bar] (lookup-key global-map [menu-bar])) + (define-key map [tool-bar] + ;; This hack avoids evaluating the :filter (Bug#9922). + (or (cdr (assq 'tool-bar global-map)) + (lookup-key global-map [tool-bar]))) + map)) + (let* ((keys + (catch 'read-key (read-key-sequence-vector prompt nil t))) + (key (aref keys 0))) + (if (and (> (length keys) 1) + (memq key '(mode-line header-line + left-fringe right-fringe))) + (aref keys 1) + key))) + (cancel-timer timer) + (use-global-map old-global-map)))) + +(defvar read-passwd-map + ;; BEWARE: `defconst' would purecopy it, breaking the sharing with + ;; minibuffer-local-map along the way! + (let ((map (make-sparse-keymap))) + (set-keymap-parent map minibuffer-local-map) + (define-key map "\C-u" #'delete-minibuffer-contents) ;bug#12570 + map) + "Keymap used while reading passwords.") + +(defun read-passwd (prompt &optional confirm default) + "Read a password, prompting with PROMPT, and return it. +If optional CONFIRM is non-nil, read the password twice to make sure. +Optional DEFAULT is a default password to use instead of empty input. + +This function echoes `.' for each character that the user types. +You could let-bind `read-hide-char' to another hiding character, though. + +Once the caller uses the password, it can erase the password +by doing (clear-string STRING)." + (if confirm + (let (success) + (while (not success) + (let ((first (read-passwd prompt nil default)) + (second (read-passwd "Confirm password: " nil default))) + (if (equal first second) + (progn + (and (arrayp second) (clear-string second)) + (setq success first)) + (and (arrayp first) (clear-string first)) + (and (arrayp second) (clear-string second)) + (message "Password not repeated accurately; please start over") + (sit-for 1)))) + success) + (let ((hide-chars-fun + (lambda (beg end _len) + (clear-this-command-keys) + (setq beg (min end (max (minibuffer-prompt-end) + beg))) + (dotimes (i (- end beg)) + (put-text-property (+ i beg) (+ 1 i beg) + 'display (string (or read-hide-char ?.)))))) + minibuf) + (minibuffer-with-setup-hook + (lambda () + (setq minibuf (current-buffer)) + ;; Turn off electricity. + (setq-local post-self-insert-hook nil) + (setq-local buffer-undo-list t) + (setq-local select-active-regions nil) + (use-local-map read-passwd-map) + (setq-local inhibit-modification-hooks nil) ;bug#15501. + (setq-local show-paren-mode nil) ;bug#16091. + (add-hook 'after-change-functions hide-chars-fun nil 'local)) + (unwind-protect + (let ((enable-recursive-minibuffers t) + (read-hide-char (or read-hide-char ?.))) + (read-string prompt nil t default)) ; t = "no history" + (when (buffer-live-p minibuf) + (with-current-buffer minibuf + ;; Not sure why but it seems that there might be cases where the + ;; minibuffer is not always properly reset later on, so undo + ;; whatever we've done here (bug#11392). + (remove-hook 'after-change-functions hide-chars-fun 'local) + (kill-local-variable 'post-self-insert-hook) + ;; And of course, don't keep the sensitive data around. + (erase-buffer)))))))) + +(defun read-number (prompt &optional default) + "Read a numeric value in the minibuffer, prompting with PROMPT. +DEFAULT specifies a default value to return if the user just types RET. +The value of DEFAULT is inserted into PROMPT. +This function is used by the `interactive' code letter `n'." + (let ((n nil) + (default1 (if (consp default) (car default) default))) + (when default1 + (setq prompt + (if (string-match "\\(\\):[ \t]*\\'" prompt) + (replace-match (format " (default %s)" default1) t t prompt 1) + (replace-regexp-in-string "[ \t]*\\'" + (format " (default %s) " default1) + prompt t t)))) + (while + (progn + (let ((str (read-from-minibuffer + prompt nil nil nil nil + (when default + (if (consp default) + (mapcar 'number-to-string (delq nil default)) + (number-to-string default)))))) + (condition-case nil + (setq n (cond + ((zerop (length str)) default1) + ((stringp str) (read str)))) + (error nil))) + (unless (numberp n) + (message "Please enter a number.") + (sit-for 1) + t))) + n)) + +(defun read-char-choice (prompt chars &optional inhibit-keyboard-quit) + "Read and return one of CHARS, prompting for PROMPT. +Any input that is not one of CHARS is ignored. + +If optional argument INHIBIT-KEYBOARD-QUIT is non-nil, ignore +keyboard-quit events while waiting for a valid input." + (unless (consp chars) + (error "Called `read-char-choice' without valid char choices")) + (let (char done show-help (helpbuf " *Char Help*")) + (let ((cursor-in-echo-area t) + (executing-kbd-macro executing-kbd-macro) + (esc-flag nil)) + (save-window-excursion ; in case we call help-form-show + (while (not done) + (unless (get-text-property 0 'face prompt) + (setq prompt (propertize prompt 'face 'minibuffer-prompt))) + (setq char (let ((inhibit-quit inhibit-keyboard-quit)) + (read-key prompt))) + (and show-help (buffer-live-p (get-buffer helpbuf)) + (kill-buffer helpbuf)) + (cond + ((not (numberp char))) + ;; If caller has set help-form, that's enough. + ;; They don't explicitly have to add help-char to chars. + ((and help-form + (eq char help-char) + (setq show-help t) + (help-form-show))) + ((memq char chars) + (setq done t)) + ((and executing-kbd-macro (= char -1)) + ;; read-event returns -1 if we are in a kbd macro and + ;; there are no more events in the macro. Attempt to + ;; get an event interactively. + (setq executing-kbd-macro nil)) + ((not inhibit-keyboard-quit) + (cond + ((and (null esc-flag) (eq char ?\e)) + (setq esc-flag t)) + ((memq char '(?\C-g ?\e)) + (keyboard-quit)))))))) + ;; Display the question with the answer. But without cursor-in-echo-area. + (message "%s%s" prompt (char-to-string char)) + char)) + +(defun sit-for (seconds &optional nodisp obsolete) + "Redisplay, then wait for SECONDS seconds. Stop when input is available. +SECONDS may be a floating-point value. +\(On operating systems that do not support waiting for fractions of a +second, floating-point values are rounded down to the nearest integer.) + +If optional arg NODISP is t, don't redisplay, just wait for input. +Redisplay does not happen if input is available before it starts. + +Value is t if waited the full time with no input arriving, and nil otherwise. + +An obsolete, but still supported form is +\(sit-for SECONDS &optional MILLISECONDS NODISP) +where the optional arg MILLISECONDS specifies an additional wait period, +in milliseconds; this was useful when Emacs was built without +floating point support." + (declare (advertised-calling-convention (seconds &optional nodisp) "22.1")) + ;; This used to be implemented in C until the following discussion: + ;; http://lists.gnu.org/archive/html/emacs-devel/2006-07/msg00401.html + ;; Then it was moved here using an implementation based on an idle timer, + ;; which was then replaced by the use of read-event. + (if (numberp nodisp) + (setq seconds (+ seconds (* 1e-3 nodisp)) + nodisp obsolete) + (if obsolete (setq nodisp obsolete))) + (cond + (noninteractive + (sleep-for seconds) + t) + ((input-pending-p t) + nil) + ((<= seconds 0) + (or nodisp (redisplay))) + (t + (or nodisp (redisplay)) + ;; FIXME: we should not read-event here at all, because it's much too + ;; difficult to reliably "undo" a read-event by pushing it onto + ;; unread-command-events. + ;; For bug#14782, we need read-event to do the keyboard-coding-system + ;; decoding (hence non-nil as second arg under POSIX ttys). + ;; For bug#15614, we need read-event not to inherit-input-method. + ;; So we temporarily suspend input-method-function. + (let ((read (let ((input-method-function nil)) + (read-event nil t seconds)))) + (or (null read) + (progn + ;; https://lists.gnu.org/archive/html/emacs-devel/2006-10/msg00394.html + ;; We want `read' appear in the next command's this-command-event + ;; but not in the current one. + ;; By pushing (cons t read), we indicate that `read' has not + ;; yet been recorded in this-command-keys, so it will be recorded + ;; next time it's read. + ;; And indeed the `seconds' argument to read-event correctly + ;; prevented recording this event in the current command's + ;; this-command-keys. + (push (cons t read) unread-command-events) + nil)))))) + +;; Behind display-popup-menus-p test. +(declare-function x-popup-dialog "menu.c" (position contents &optional header)) + +(defun y-or-n-p (prompt) + "Ask user a \"y or n\" question. Return t if answer is \"y\". +PROMPT is the string to display to ask the question. It should +end in a space; `y-or-n-p' adds \"(y or n) \" to it. + +No confirmation of the answer is requested; a single character is +enough. SPC also means yes, and DEL means no. + +To be precise, this function translates user input into responses +by consulting the bindings in `query-replace-map'; see the +documentation of that variable for more information. In this +case, the useful bindings are `act', `skip', `recenter', +`scroll-up', `scroll-down', and `quit'. +An `act' response means yes, and a `skip' response means no. +A `quit' response means to invoke `keyboard-quit'. +If the user enters `recenter', `scroll-up', or `scroll-down' +responses, perform the requested window recentering or scrolling +and ask again. + +Under a windowing system a dialog box will be used if `last-nonmenu-event' +is nil and `use-dialog-box' is non-nil." + ;; ¡Beware! when I tried to edebug this code, Emacs got into a weird state + ;; where all the keys were unbound (i.e. it somehow got triggered + ;; within read-key, apparently). I had to kill it. + (let ((answer 'recenter) + (padded (lambda (prompt &optional dialog) + (let ((l (length prompt))) + (concat prompt + (if (or (zerop l) (eq ?\s (aref prompt (1- l)))) + "" " ") + (if dialog "" "(y or n) ")))))) + (cond + (noninteractive + (setq prompt (funcall padded prompt)) + (let ((temp-prompt prompt)) + (while (not (memq answer '(act skip))) + (let ((str (read-string temp-prompt))) + (cond ((member str '("y" "Y")) (setq answer 'act)) + ((member str '("n" "N")) (setq answer 'skip)) + (t (setq temp-prompt (concat "Please answer y or n. " + prompt)))))))) + ((and (display-popup-menus-p) + (listp last-nonmenu-event) + use-dialog-box) + (setq prompt (funcall padded prompt t) + answer (x-popup-dialog t `(,prompt ("Yes" . act) ("No" . skip))))) + (t + (setq prompt (funcall padded prompt)) + (while + (let* ((scroll-actions '(recenter scroll-up scroll-down + scroll-other-window scroll-other-window-down)) + (key + (let ((cursor-in-echo-area t)) + (when minibuffer-auto-raise + (raise-frame (window-frame (minibuffer-window)))) + (read-key (propertize (if (memq answer scroll-actions) + prompt + (concat "Please answer y or n. " + prompt)) + 'face 'minibuffer-prompt))))) + (setq answer (lookup-key query-replace-map (vector key) t)) + (cond + ((memq answer '(skip act)) nil) + ((eq answer 'recenter) + (recenter) t) + ((eq answer 'scroll-up) + (ignore-errors (scroll-up-command)) t) + ((eq answer 'scroll-down) + (ignore-errors (scroll-down-command)) t) + ((eq answer 'scroll-other-window) + (ignore-errors (scroll-other-window)) t) + ((eq answer 'scroll-other-window-down) + (ignore-errors (scroll-other-window-down)) t) + ((or (memq answer '(exit-prefix quit)) (eq key ?\e)) + (signal 'quit nil) t) + (t t))) + (ding) + (discard-input)))) + (let ((ret (eq answer 'act))) + (unless noninteractive + (message "%s%c" prompt (if ret ?y ?n))) + ret))) + + +;;; Atomic change groups. + +(defmacro atomic-change-group (&rest body) + "Perform BODY as an atomic change group. +This means that if BODY exits abnormally, +all of its changes to the current buffer are undone. +This works regardless of whether undo is enabled in the buffer. + +This mechanism is transparent to ordinary use of undo; +if undo is enabled in the buffer and BODY succeeds, the +user can undo the change normally." + (declare (indent 0) (debug t)) + (let ((handle (make-symbol "--change-group-handle--")) + (success (make-symbol "--change-group-success--"))) + `(let ((,handle (prepare-change-group)) + ;; Don't truncate any undo data in the middle of this. + (undo-outer-limit nil) + (undo-limit most-positive-fixnum) + (undo-strong-limit most-positive-fixnum) + (,success nil)) + (unwind-protect + (progn + ;; This is inside the unwind-protect because + ;; it enables undo if that was disabled; we need + ;; to make sure that it gets disabled again. + (activate-change-group ,handle) + ,@body + (setq ,success t)) + ;; Either of these functions will disable undo + ;; if it was disabled before. + (if ,success + (accept-change-group ,handle) + (cancel-change-group ,handle)))))) + +(defun prepare-change-group (&optional buffer) + "Return a handle for the current buffer's state, for a change group. +If you specify BUFFER, make a handle for BUFFER's state instead. + +Pass the handle to `activate-change-group' afterward to initiate +the actual changes of the change group. + +To finish the change group, call either `accept-change-group' or +`cancel-change-group' passing the same handle as argument. Call +`accept-change-group' to accept the changes in the group as final; +call `cancel-change-group' to undo them all. You should use +`unwind-protect' to make sure the group is always finished. The call +to `activate-change-group' should be inside the `unwind-protect'. +Once you finish the group, don't use the handle again--don't try to +finish the same group twice. For a simple example of correct use, see +the source code of `atomic-change-group'. + +The handle records only the specified buffer. To make a multibuffer +change group, call this function once for each buffer you want to +cover, then use `nconc' to combine the returned values, like this: + + (nconc (prepare-change-group buffer-1) + (prepare-change-group buffer-2)) + +You can then activate that multibuffer change group with a single +call to `activate-change-group' and finish it with a single call +to `accept-change-group' or `cancel-change-group'." + + (if buffer + (list (cons buffer (with-current-buffer buffer buffer-undo-list))) + (list (cons (current-buffer) buffer-undo-list)))) + +(defun activate-change-group (handle) + "Activate a change group made with `prepare-change-group' (which see)." + (dolist (elt handle) + (with-current-buffer (car elt) + (if (eq buffer-undo-list t) + (setq buffer-undo-list nil))))) + +(defun accept-change-group (handle) + "Finish a change group made with `prepare-change-group' (which see). +This finishes the change group by accepting its changes as final." + (dolist (elt handle) + (with-current-buffer (car elt) + (if (eq (cdr elt) t) + (setq buffer-undo-list t))))) + +(defun cancel-change-group (handle) + "Finish a change group made with `prepare-change-group' (which see). +This finishes the change group by reverting all of its changes." + (dolist (elt handle) + (with-current-buffer (car elt) + (setq elt (cdr elt)) + (save-restriction + ;; Widen buffer temporarily so if the buffer was narrowed within + ;; the body of `atomic-change-group' all changes can be undone. + (widen) + (let ((old-car + (if (consp elt) (car elt))) + (old-cdr + (if (consp elt) (cdr elt)))) + ;; Temporarily truncate the undo log at ELT. + (when (consp elt) + (setcar elt nil) (setcdr elt nil)) + (unless (eq last-command 'undo) (undo-start)) + ;; Make sure there's no confusion. + (when (and (consp elt) (not (eq elt (last pending-undo-list)))) + (error "Undoing to some unrelated state")) + ;; Undo it all. + (save-excursion + (while (listp pending-undo-list) (undo-more 1))) + ;; Reset the modified cons cell ELT to its original content. + (when (consp elt) + (setcar elt old-car) + (setcdr elt old-cdr)) + ;; Revert the undo info to what it was when we grabbed the state. + (setq buffer-undo-list elt)))))) + +;;;; Display-related functions. + +;; For compatibility. +(define-obsolete-function-alias 'redraw-modeline + 'force-mode-line-update "24.3") + +(defun momentary-string-display (string pos &optional exit-char message) + "Momentarily display STRING in the buffer at POS. +Display remains until next event is input. +If POS is a marker, only its position is used; its buffer is ignored. +Optional third arg EXIT-CHAR can be a character, event or event +description list. EXIT-CHAR defaults to SPC. If the input is +EXIT-CHAR it is swallowed; otherwise it is then available as +input (as a command if nothing else). +Display MESSAGE (optional fourth arg) in the echo area. +If MESSAGE is nil, instructions to type EXIT-CHAR are displayed there." + (or exit-char (setq exit-char ?\s)) + (let ((ol (make-overlay pos pos)) + (str (copy-sequence string))) + (unwind-protect + (progn + (save-excursion + (overlay-put ol 'after-string str) + (goto-char pos) + ;; To avoid trouble with out-of-bounds position + (setq pos (point)) + ;; If the string end is off screen, recenter now. + (if (<= (window-end nil t) pos) + (recenter (/ (window-height) 2)))) + (message (or message "Type %s to continue editing.") + (single-key-description exit-char)) + (let ((event (read-key))) + ;; `exit-char' can be an event, or an event description list. + (or (eq event exit-char) + (eq event (event-convert-list exit-char)) + (setq unread-command-events + (append (this-single-command-raw-keys)))))) + (delete-overlay ol)))) + + +;;;; Overlay operations + +(defun copy-overlay (o) + "Return a copy of overlay O." + (let ((o1 (if (overlay-buffer o) + (make-overlay (overlay-start o) (overlay-end o) + ;; FIXME: there's no easy way to find the + ;; insertion-type of the two markers. + (overlay-buffer o)) + (let ((o1 (make-overlay (point-min) (point-min)))) + (delete-overlay o1) + o1))) + (props (overlay-properties o))) + (while props + (overlay-put o1 (pop props) (pop props))) + o1)) + +(defun remove-overlays (&optional beg end name val) + "Clear BEG and END of overlays whose property NAME has value VAL. +Overlays might be moved and/or split. +BEG and END default respectively to the beginning and end of buffer." + ;; This speeds up the loops over overlays. + (unless beg (setq beg (point-min))) + (unless end (setq end (point-max))) + (overlay-recenter end) + (if (< end beg) + (setq beg (prog1 end (setq end beg)))) + (save-excursion + (dolist (o (overlays-in beg end)) + (when (eq (overlay-get o name) val) + ;; Either push this overlay outside beg...end + ;; or split it to exclude beg...end + ;; or delete it entirely (if it is contained in beg...end). + (if (< (overlay-start o) beg) + (if (> (overlay-end o) end) + (progn + (move-overlay (copy-overlay o) + (overlay-start o) beg) + (move-overlay o end (overlay-end o))) + (move-overlay o (overlay-start o) beg)) + (if (> (overlay-end o) end) + (move-overlay o end (overlay-end o)) + (delete-overlay o))))))) + +;;;; Miscellanea. + +(defvar suspend-hook nil + "Normal hook run by `suspend-emacs', before suspending.") + +(defvar suspend-resume-hook nil + "Normal hook run by `suspend-emacs', after Emacs is continued.") + +(defvar temp-buffer-show-hook nil + "Normal hook run by `with-output-to-temp-buffer' after displaying the buffer. +When the hook runs, the temporary buffer is current, and the window it +was displayed in is selected.") + +(defvar temp-buffer-setup-hook nil + "Normal hook run by `with-output-to-temp-buffer' at the start. +When the hook runs, the temporary buffer is current. +This hook is normally set up with a function to put the buffer in Help +mode.") + +(defconst user-emacs-directory + (if (eq system-type 'ms-dos) + ;; MS-DOS cannot have initial dot. + "~/_emacs.d/" + "~/.emacs.d/") + "Directory beneath which additional per-user Emacs-specific files are placed. +Various programs in Emacs store information in this directory. +Note that this should end with a directory separator. +See also `locate-user-emacs-file'.") + +;;;; Misc. useful functions. + +(defsubst buffer-narrowed-p () + "Return non-nil if the current buffer is narrowed." + (/= (- (point-max) (point-min)) (buffer-size))) + +(defun find-tag-default-bounds () + "Determine the boundaries of the default tag, based on text at point. +Return a cons cell with the beginning and end of the found tag. +If there is no plausible default, return nil." + (let (from to bound) + (when (or (progn + ;; Look at text around `point'. + (save-excursion + (skip-syntax-backward "w_") (setq from (point))) + (save-excursion + (skip-syntax-forward "w_") (setq to (point))) + (> to from)) + ;; Look between `line-beginning-position' and `point'. + (save-excursion + (and (setq bound (line-beginning-position)) + (skip-syntax-backward "^w_" bound) + (> (setq to (point)) bound) + (skip-syntax-backward "w_") + (setq from (point)))) + ;; Look between `point' and `line-end-position'. + (save-excursion + (and (setq bound (line-end-position)) + (skip-syntax-forward "^w_" bound) + (< (setq from (point)) bound) + (skip-syntax-forward "w_") + (setq to (point))))) + (cons from to)))) + +(defun find-tag-default () + "Determine default tag to search for, based on text at point. +If there is no plausible default, return nil." + (let ((bounds (find-tag-default-bounds))) + (when bounds + (buffer-substring-no-properties (car bounds) (cdr bounds))))) + +(defun find-tag-default-as-regexp () + "Return regexp that matches the default tag at point. +If there is no tag at point, return nil. + +When in a major mode that does not provide its own +`find-tag-default-function', return a regexp that matches the +symbol at point exactly." + (let ((tag (funcall (or find-tag-default-function + (get major-mode 'find-tag-default-function) + 'find-tag-default)))) + (if tag (regexp-quote tag)))) + +(defun find-tag-default-as-symbol-regexp () + "Return regexp that matches the default tag at point as symbol. +If there is no tag at point, return nil. + +When in a major mode that does not provide its own +`find-tag-default-function', return a regexp that matches the +symbol at point exactly." + (let ((tag-regexp (find-tag-default-as-regexp))) + (if (and tag-regexp + (eq (or find-tag-default-function + (get major-mode 'find-tag-default-function) + 'find-tag-default) + 'find-tag-default)) + (format "\\_<%s\\_>" tag-regexp) + tag-regexp))) + +(defun play-sound (sound) + "SOUND is a list of the form `(sound KEYWORD VALUE...)'. +The following keywords are recognized: + + :file FILE - read sound data from FILE. If FILE isn't an +absolute file name, it is searched in `data-directory'. + + :data DATA - read sound data from string DATA. + +Exactly one of :file or :data must be present. + + :volume VOL - set volume to VOL. VOL must an integer in the +range 0..100 or a float in the range 0..1.0. If not specified, +don't change the volume setting of the sound device. + + :device DEVICE - play sound on DEVICE. If not specified, +a system-dependent default device name is used. + +Note: :data and :device are currently not supported on Windows." + (if (fboundp 'play-sound-internal) + (play-sound-internal sound) + (error "This Emacs binary lacks sound support"))) + +(declare-function w32-shell-dos-semantics "w32-fns" nil) + +(defun shell-quote-argument (argument) + "Quote ARGUMENT for passing as argument to an inferior shell." + (cond + ((eq system-type 'ms-dos) + ;; Quote using double quotes, but escape any existing quotes in + ;; the argument with backslashes. + (let ((result "") + (start 0) + end) + (if (or (null (string-match "[^\"]" argument)) + (< (match-end 0) (length argument))) + (while (string-match "[\"]" argument start) + (setq end (match-beginning 0) + result (concat result (substring argument start end) + "\\" (substring argument end (1+ end))) + start (1+ end)))) + (concat "\"" result (substring argument start) "\""))) + + ((and (eq system-type 'windows-nt) (w32-shell-dos-semantics)) + + ;; First, quote argument so that CommandLineToArgvW will + ;; understand it. See + ;; http://msdn.microsoft.com/en-us/library/17w5ykft%28v=vs.85%29.aspx + ;; After we perform that level of quoting, escape shell + ;; metacharacters so that cmd won't mangle our argument. If the + ;; argument contains no double quote characters, we can just + ;; surround it with double quotes. Otherwise, we need to prefix + ;; each shell metacharacter with a caret. + + (setq argument + ;; escape backslashes at end of string + (replace-regexp-in-string + "\\(\\\\*\\)$" + "\\1\\1" + ;; escape backslashes and quotes in string body + (replace-regexp-in-string + "\\(\\\\*\\)\"" + "\\1\\1\\\\\"" + argument))) + + (if (string-match "[%!\"]" argument) + (concat + "^\"" + (replace-regexp-in-string + "\\([%!()\"<>&|^]\\)" + "^\\1" + argument) + "^\"") + (concat "\"" argument "\""))) + + (t + (if (equal argument "") + "''" + ;; Quote everything except POSIX filename characters. + ;; This should be safe enough even for really weird shells. + (replace-regexp-in-string + "\n" "'\n'" + (replace-regexp-in-string "[^-0-9a-zA-Z_./\n]" "\\\\\\&" argument)))) + )) + +(defun string-or-null-p (object) + "Return t if OBJECT is a string or nil. +Otherwise, return nil." + (or (stringp object) (null object))) + +(defun booleanp (object) + "Return t if OBJECT is one of the two canonical boolean values: t or nil. +Otherwise, return nil." + (and (memq object '(nil t)) t)) + +(defun special-form-p (object) + "Non-nil if and only if OBJECT is a special form." + (if (and (symbolp object) (fboundp object)) + (setq object (indirect-function object t))) + (and (subrp object) (eq (cdr (subr-arity object)) 'unevalled))) + +(defun macrop (object) + "Non-nil if and only if OBJECT is a macro." + (let ((def (indirect-function object t))) + (when (consp def) + (or (eq 'macro (car def)) + (and (autoloadp def) (memq (nth 4 def) '(macro t))))))) + +(defun field-at-pos (pos) + "Return the field at position POS, taking stickiness etc into account." + (let ((raw-field (get-char-property (field-beginning pos) 'field))) + (if (eq raw-field 'boundary) + (get-char-property (1- (field-end pos)) 'field) + raw-field))) + +(defun sha1 (object &optional start end binary) + "Return the SHA1 (Secure Hash Algorithm) of an OBJECT. +OBJECT is either a string or a buffer. Optional arguments START and +END are character positions specifying which portion of OBJECT for +computing the hash. If BINARY is non-nil, return a string in binary +form." + (secure-hash 'sha1 object start end binary)) + +(defun function-get (f prop &optional autoload) + "Return the value of property PROP of function F. +If AUTOLOAD is non-nil and F is autoloaded, try to autoload it +in the hope that it will set PROP. If AUTOLOAD is `macro', only do it +if it's an autoloaded macro." + (let ((val nil)) + (while (and (symbolp f) + (null (setq val (get f prop))) + (fboundp f)) + (let ((fundef (symbol-function f))) + (if (and autoload (autoloadp fundef) + (not (equal fundef + (autoload-do-load fundef f + (if (eq autoload 'macro) + 'macro))))) + nil ;Re-try `get' on the same `f'. + (setq f fundef)))) + val)) + +;;;; Support for yanking and text properties. +;; Why here in subr.el rather than in simple.el? --Stef + +(defvar yank-handled-properties) +(defvar yank-excluded-properties) + +(defun remove-yank-excluded-properties (start end) + "Process text properties between START and END, inserted for a `yank'. +Perform the handling specified by `yank-handled-properties', then +remove properties specified by `yank-excluded-properties'." + (let ((inhibit-read-only t)) + (dolist (handler yank-handled-properties) + (let ((prop (car handler)) + (fun (cdr handler)) + (run-start start)) + (while (< run-start end) + (let ((value (get-text-property run-start prop)) + (run-end (next-single-property-change + run-start prop nil end))) + (funcall fun value run-start run-end) + (setq run-start run-end))))) + (if (eq yank-excluded-properties t) + (set-text-properties start end nil) + (remove-list-of-text-properties start end yank-excluded-properties)))) + +(defvar yank-undo-function) + +(defun insert-for-yank (string) + "Call `insert-for-yank-1' repetitively for each `yank-handler' segment. + +See `insert-for-yank-1' for more details." + (let (to) + (while (setq to (next-single-property-change 0 'yank-handler string)) + (insert-for-yank-1 (substring string 0 to)) + (setq string (substring string to)))) + (insert-for-yank-1 string)) + +(defun insert-for-yank-1 (string) + "Insert STRING at point for the `yank' command. +This function is like `insert', except it honors the variables +`yank-handled-properties' and `yank-excluded-properties', and the +`yank-handler' text property. + +Properties listed in `yank-handled-properties' are processed, +then those listed in `yank-excluded-properties' are discarded. + +If STRING has a non-nil `yank-handler' property on its first +character, the normal insert behavior is altered. The value of +the `yank-handler' property must be a list of one to four +elements, of the form (FUNCTION PARAM NOEXCLUDE UNDO). +FUNCTION, if non-nil, should be a function of one argument, an + object to insert; it is called instead of `insert'. +PARAM, if present and non-nil, replaces STRING as the argument to + FUNCTION or `insert'; e.g. if FUNCTION is `yank-rectangle', PARAM + may be a list of strings to insert as a rectangle. +If NOEXCLUDE is present and non-nil, the normal removal of + `yank-excluded-properties' is not performed; instead FUNCTION is + responsible for the removal. This may be necessary if FUNCTION + adjusts point before or after inserting the object. +UNDO, if present and non-nil, should be a function to be called + by `yank-pop' to undo the insertion of the current object. It is + given two arguments, the start and end of the region. FUNCTION + may set `yank-undo-function' to override UNDO." + (let* ((handler (and (stringp string) + (get-text-property 0 'yank-handler string))) + (param (or (nth 1 handler) string)) + (opoint (point)) + (inhibit-read-only inhibit-read-only) + end) + + (setq yank-undo-function t) + (if (nth 0 handler) ; FUNCTION + (funcall (car handler) param) + (insert param)) + (setq end (point)) + + ;; Prevent read-only properties from interfering with the + ;; following text property changes. + (setq inhibit-read-only t) + + (unless (nth 2 handler) ; NOEXCLUDE + (remove-yank-excluded-properties opoint end)) + + ;; If last inserted char has properties, mark them as rear-nonsticky. + (if (and (> end opoint) + (text-properties-at (1- end))) + (put-text-property (1- end) end 'rear-nonsticky t)) + + (if (eq yank-undo-function t) ; not set by FUNCTION + (setq yank-undo-function (nth 3 handler))) ; UNDO + (if (nth 4 handler) ; COMMAND + (setq this-command (nth 4 handler))))) + +(defun insert-buffer-substring-no-properties (buffer &optional start end) + "Insert before point a substring of BUFFER, without text properties. +BUFFER may be a buffer or a buffer name. +Arguments START and END are character positions specifying the substring. +They default to the values of (point-min) and (point-max) in BUFFER." + (let ((opoint (point))) + (insert-buffer-substring buffer start end) + (let ((inhibit-read-only t)) + (set-text-properties opoint (point) nil)))) + +(defun insert-buffer-substring-as-yank (buffer &optional start end) + "Insert before point a part of BUFFER, stripping some text properties. +BUFFER may be a buffer or a buffer name. +Arguments START and END are character positions specifying the substring. +They default to the values of (point-min) and (point-max) in BUFFER. +Before insertion, process text properties according to +`yank-handled-properties' and `yank-excluded-properties'." + ;; Since the buffer text should not normally have yank-handler properties, + ;; there is no need to handle them here. + (let ((opoint (point))) + (insert-buffer-substring buffer start end) + (remove-yank-excluded-properties opoint (point)))) + +(defun yank-handle-font-lock-face-property (face start end) + "If `font-lock-defaults' is nil, apply FACE as a `face' property. +START and END denote the start and end of the text to act on. +Do nothing if FACE is nil." + (and face + (null font-lock-defaults) + (put-text-property start end 'face face))) + +;; This removes `mouse-face' properties in *Help* buffer buttons: +;; http://lists.gnu.org/archive/html/emacs-devel/2002-04/msg00648.html +(defun yank-handle-category-property (category start end) + "Apply property category CATEGORY's properties between START and END." + (when category + (let ((start2 start)) + (while (< start2 end) + (let ((end2 (next-property-change start2 nil end)) + (original (text-properties-at start2))) + (set-text-properties start2 end2 (symbol-plist category)) + (add-text-properties start2 end2 original) + (setq start2 end2)))))) + + +;;;; Synchronous shell commands. + +(defun start-process-shell-command (name buffer &rest args) + "Start a program in a subprocess. Return the process object for it. +NAME is name for process. It is modified if necessary to make it unique. +BUFFER is the buffer (or buffer name) to associate with the process. + Process output goes at end of that buffer, unless you specify + an output stream or filter function to handle the output. + BUFFER may be also nil, meaning that this process is not associated + with any buffer +COMMAND is the shell command to run. + +An old calling convention accepted any number of arguments after COMMAND, +which were just concatenated to COMMAND. This is still supported but strongly +discouraged." + (declare (advertised-calling-convention (name buffer command) "23.1")) + ;; We used to use `exec' to replace the shell with the command, + ;; but that failed to handle (...) and semicolon, etc. + (start-process name buffer shell-file-name shell-command-switch + (mapconcat 'identity args " "))) + +(defun start-file-process-shell-command (name buffer &rest args) + "Start a program in a subprocess. Return the process object for it. +Similar to `start-process-shell-command', but calls `start-file-process'." + (declare (advertised-calling-convention (name buffer command) "23.1")) + (start-file-process + name buffer + (if (file-remote-p default-directory) "/bin/sh" shell-file-name) + (if (file-remote-p default-directory) "-c" shell-command-switch) + (mapconcat 'identity args " "))) + +(defun call-process-shell-command (command &optional infile buffer display + &rest args) + "Execute the shell command COMMAND synchronously in separate process. +The remaining arguments are optional. +The program's input comes from file INFILE (nil means `/dev/null'). +Insert output in BUFFER before point; t means current buffer; + nil for BUFFER means discard it; 0 means discard and don't wait. +BUFFER can also have the form (REAL-BUFFER STDERR-FILE); in that case, +REAL-BUFFER says what to do with standard output, as above, +while STDERR-FILE says what to do with standard error in the child. +STDERR-FILE may be nil (discard standard error output), +t (mix it with ordinary output), or a file name string. + +Fourth arg DISPLAY non-nil means redisplay buffer as output is inserted. +Wildcards and redirection are handled as usual in the shell. + +If BUFFER is 0, `call-process-shell-command' returns immediately with value nil. +Otherwise it waits for COMMAND to terminate and returns a numeric exit +status or a signal description string. +If you quit, the process is killed with SIGINT, or SIGKILL if you quit again. + +An old calling convention accepted any number of arguments after DISPLAY, +which were just concatenated to COMMAND. This is still supported but strongly +discouraged." + (declare (advertised-calling-convention + (command &optional infile buffer display) "24.5")) + ;; We used to use `exec' to replace the shell with the command, + ;; but that failed to handle (...) and semicolon, etc. + (call-process shell-file-name + infile buffer display + shell-command-switch + (mapconcat 'identity (cons command args) " "))) + +(defun process-file-shell-command (command &optional infile buffer display + &rest args) + "Process files synchronously in a separate process. +Similar to `call-process-shell-command', but calls `process-file'." + (declare (advertised-calling-convention + (command &optional infile buffer display) "24.5")) + (process-file + (if (file-remote-p default-directory) "/bin/sh" shell-file-name) + infile buffer display + (if (file-remote-p default-directory) "-c" shell-command-switch) + (mapconcat 'identity (cons command args) " "))) + +;;;; Lisp macros to do various things temporarily. + +(defmacro track-mouse (&rest body) + "Evaluate BODY with mouse movement events enabled. +Within a `track-mouse' form, mouse motion generates input events that + you can read with `read-event'. +Normally, mouse motion is ignored." + (declare (debug t) (indent 0)) + `(internal--track-mouse (lambda () ,@body))) + +(defmacro with-current-buffer (buffer-or-name &rest body) + "Execute the forms in BODY with BUFFER-OR-NAME temporarily current. +BUFFER-OR-NAME must be a buffer or the name of an existing buffer. +The value returned is the value of the last form in BODY. See +also `with-temp-buffer'." + (declare (indent 1) (debug t)) + `(save-current-buffer + (set-buffer ,buffer-or-name) + ,@body)) + +(defun internal--before-with-selected-window (window) + (let ((other-frame (window-frame window))) + (list window (selected-window) + ;; Selecting a window on another frame also changes that + ;; frame's frame-selected-window. We must save&restore it. + (unless (eq (selected-frame) other-frame) + (frame-selected-window other-frame)) + ;; Also remember the top-frame if on ttys. + (unless (eq (selected-frame) other-frame) + (tty-top-frame other-frame))))) + +(defun internal--after-with-selected-window (state) + ;; First reset frame-selected-window. + (when (window-live-p (nth 2 state)) + ;; We don't use set-frame-selected-window because it does not + ;; pass the `norecord' argument to Fselect_window. + (select-window (nth 2 state) 'norecord) + (and (frame-live-p (nth 3 state)) + (not (eq (tty-top-frame) (nth 3 state))) + (select-frame (nth 3 state) 'norecord))) + ;; Then reset the actual selected-window. + (when (window-live-p (nth 1 state)) + (select-window (nth 1 state) 'norecord))) + +(defmacro with-selected-window (window &rest body) + "Execute the forms in BODY with WINDOW as the selected window. +The value returned is the value of the last form in BODY. + +This macro saves and restores the selected window, as well as the +selected window of each frame. It does not change the order of +recently selected windows. If the previously selected window of +some frame is no longer live at the end of BODY, that frame's +selected window is left alone. If the selected window is no +longer live, then whatever window is selected at the end of BODY +remains selected. + +This macro uses `save-current-buffer' to save and restore the +current buffer, since otherwise its normal operation could +potentially make a different buffer current. It does not alter +the buffer list ordering." + (declare (indent 1) (debug t)) + `(let ((save-selected-window--state + (internal--before-with-selected-window ,window))) + (save-current-buffer + (unwind-protect + (progn (select-window (car save-selected-window--state) 'norecord) + ,@body) + (internal--after-with-selected-window save-selected-window--state))))) + +(defmacro with-selected-frame (frame &rest body) + "Execute the forms in BODY with FRAME as the selected frame. +The value returned is the value of the last form in BODY. + +This macro saves and restores the selected frame, and changes the +order of neither the recently selected windows nor the buffers in +the buffer list." + (declare (indent 1) (debug t)) + (let ((old-frame (make-symbol "old-frame")) + (old-buffer (make-symbol "old-buffer"))) + `(let ((,old-frame (selected-frame)) + (,old-buffer (current-buffer))) + (unwind-protect + (progn (select-frame ,frame 'norecord) + ,@body) + (when (frame-live-p ,old-frame) + (select-frame ,old-frame 'norecord)) + (when (buffer-live-p ,old-buffer) + (set-buffer ,old-buffer)))))) + +(defmacro save-window-excursion (&rest body) + "Execute BODY, then restore previous window configuration. +This macro saves the window configuration on the selected frame, +executes BODY, then calls `set-window-configuration' to restore +the saved window configuration. The return value is the last +form in BODY. The window configuration is also restored if BODY +exits nonlocally. + +BEWARE: Most uses of this macro introduce bugs. +E.g. it should not be used to try and prevent some code from opening +a new window, since that window may sometimes appear in another frame, +in which case `save-window-excursion' cannot help." + (declare (indent 0) (debug t)) + (let ((c (make-symbol "wconfig"))) + `(let ((,c (current-window-configuration))) + (unwind-protect (progn ,@body) + (set-window-configuration ,c))))) + +(defun internal-temp-output-buffer-show (buffer) + "Internal function for `with-output-to-temp-buffer'." + (with-current-buffer buffer + (set-buffer-modified-p nil) + (goto-char (point-min))) + + (if temp-buffer-show-function + (funcall temp-buffer-show-function buffer) + (with-current-buffer buffer + (let* ((window + (let ((window-combination-limit + ;; When `window-combination-limit' equals + ;; `temp-buffer' or `temp-buffer-resize' and + ;; `temp-buffer-resize-mode' is enabled in this + ;; buffer bind it to t so resizing steals space + ;; preferably from the window that was split. + (if (or (eq window-combination-limit 'temp-buffer) + (and (eq window-combination-limit + 'temp-buffer-resize) + temp-buffer-resize-mode)) + t + window-combination-limit))) + (display-buffer buffer))) + (frame (and window (window-frame window)))) + (when window + (unless (eq frame (selected-frame)) + (make-frame-visible frame)) + (setq minibuffer-scroll-window window) + (set-window-hscroll window 0) + ;; Don't try this with NOFORCE non-nil! + (set-window-start window (point-min) t) + ;; This should not be necessary. + (set-window-point window (point-min)) + ;; Run `temp-buffer-show-hook', with the chosen window selected. + (with-selected-window window + (run-hooks 'temp-buffer-show-hook)))))) + ;; Return nil. + nil) + +;; Doc is very similar to with-temp-buffer-window. +(defmacro with-output-to-temp-buffer (bufname &rest body) + "Bind `standard-output' to buffer BUFNAME, eval BODY, then show that buffer. + +This construct makes buffer BUFNAME empty before running BODY. +It does not make the buffer current for BODY. +Instead it binds `standard-output' to that buffer, so that output +generated with `prin1' and similar functions in BODY goes into +the buffer. + +At the end of BODY, this marks buffer BUFNAME unmodified and displays +it in a window, but does not select it. The normal way to do this is +by calling `display-buffer', then running `temp-buffer-show-hook'. +However, if `temp-buffer-show-function' is non-nil, it calls that +function instead (and does not run `temp-buffer-show-hook'). The +function gets one argument, the buffer to display. + +The return value of `with-output-to-temp-buffer' is the value of the +last form in BODY. If BODY does not finish normally, the buffer +BUFNAME is not displayed. + +This runs the hook `temp-buffer-setup-hook' before BODY, +with the buffer BUFNAME temporarily current. It runs the hook +`temp-buffer-show-hook' after displaying buffer BUFNAME, with that +buffer temporarily current, and the window that was used to display it +temporarily selected. But it doesn't run `temp-buffer-show-hook' +if it uses `temp-buffer-show-function'. + +By default, the setup hook puts the buffer into Help mode before running BODY. +If BODY does not change the major mode, the show hook makes the buffer +read-only, and scans it for function and variable names to make them into +clickable cross-references. + +See the related form `with-temp-buffer-window'." + (declare (debug t)) + (let ((old-dir (make-symbol "old-dir")) + (buf (make-symbol "buf"))) + `(let* ((,old-dir default-directory) + (,buf + (with-current-buffer (get-buffer-create ,bufname) + (prog1 (current-buffer) + (kill-all-local-variables) + ;; FIXME: delete_all_overlays + (setq default-directory ,old-dir) + (setq buffer-read-only nil) + (setq buffer-file-name nil) + (setq buffer-undo-list t) + (let ((inhibit-read-only t) + (inhibit-modification-hooks t)) + (erase-buffer) + (run-hooks 'temp-buffer-setup-hook))))) + (standard-output ,buf)) + (prog1 (progn ,@body) + (internal-temp-output-buffer-show ,buf))))) + +(defmacro with-temp-file (file &rest body) + "Create a new buffer, evaluate BODY there, and write the buffer to FILE. +The value returned is the value of the last form in BODY. +See also `with-temp-buffer'." + (declare (indent 1) (debug t)) + (let ((temp-file (make-symbol "temp-file")) + (temp-buffer (make-symbol "temp-buffer"))) + `(let ((,temp-file ,file) + (,temp-buffer + (get-buffer-create (generate-new-buffer-name " *temp file*")))) + (unwind-protect + (prog1 + (with-current-buffer ,temp-buffer + ,@body) + (with-current-buffer ,temp-buffer + (write-region nil nil ,temp-file nil 0))) + (and (buffer-name ,temp-buffer) + (kill-buffer ,temp-buffer)))))) + +(defmacro with-temp-message (message &rest body) + "Display MESSAGE temporarily if non-nil while BODY is evaluated. +The original message is restored to the echo area after BODY has finished. +The value returned is the value of the last form in BODY. +MESSAGE is written to the message log buffer if `message-log-max' is non-nil. +If MESSAGE is nil, the echo area and message log buffer are unchanged. +Use a MESSAGE of \"\" to temporarily clear the echo area." + (declare (debug t) (indent 1)) + (let ((current-message (make-symbol "current-message")) + (temp-message (make-symbol "with-temp-message"))) + `(let ((,temp-message ,message) + (,current-message)) + (unwind-protect + (progn + (when ,temp-message + (setq ,current-message (current-message)) + (message "%s" ,temp-message)) + ,@body) + (and ,temp-message + (if ,current-message + (message "%s" ,current-message) + (message nil))))))) + +(defmacro with-temp-buffer (&rest body) + "Create a temporary buffer, and evaluate BODY there like `progn'. +See also `with-temp-file' and `with-output-to-string'." + (declare (indent 0) (debug t)) + (let ((temp-buffer (make-symbol "temp-buffer"))) + `(let ((,temp-buffer (generate-new-buffer " *temp*"))) + ;; FIXME: kill-buffer can change current-buffer in some odd cases. + (with-current-buffer ,temp-buffer + (unwind-protect + (progn ,@body) + (and (buffer-name ,temp-buffer) + (kill-buffer ,temp-buffer))))))) + +(defmacro with-silent-modifications (&rest body) + "Execute BODY, pretending it does not modify the buffer. +If BODY performs real modifications to the buffer's text, other +than cosmetic ones, undo data may become corrupted. + +This macro will run BODY normally, but doesn't count its buffer +modifications as being buffer modifications. This affects things +like `buffer-modified-p', checking whether the file is locked by +someone else, running buffer modification hooks, and other things +of that nature. + +Typically used around modifications of text-properties which do +not really affect the buffer's content." + (declare (debug t) (indent 0)) + (let ((modified (make-symbol "modified"))) + `(let* ((,modified (buffer-modified-p)) + (buffer-undo-list t) + (inhibit-read-only t) + (inhibit-modification-hooks t)) + (unwind-protect + (progn + ,@body) + (unless ,modified + (restore-buffer-modified-p nil)))))) + +(defmacro with-output-to-string (&rest body) + "Execute BODY, return the text it sent to `standard-output', as a string." + (declare (indent 0) (debug t)) + `(let ((standard-output + (get-buffer-create (generate-new-buffer-name " *string-output*")))) + (unwind-protect + (progn + (let ((standard-output standard-output)) + ,@body) + (with-current-buffer standard-output + (buffer-string))) + (kill-buffer standard-output)))) + +(defmacro with-local-quit (&rest body) + "Execute BODY, allowing quits to terminate BODY but not escape further. +When a quit terminates BODY, `with-local-quit' returns nil but +requests another quit. That quit will be processed as soon as quitting +is allowed once again. (Immediately, if `inhibit-quit' is nil.)" + (declare (debug t) (indent 0)) + `(condition-case nil + (let ((inhibit-quit nil)) + ,@body) + (quit (setq quit-flag t) + ;; This call is to give a chance to handle quit-flag + ;; in case inhibit-quit is nil. + ;; Without this, it will not be handled until the next function + ;; call, and that might allow it to exit thru a condition-case + ;; that intends to handle the quit signal next time. + (eval '(ignore nil))))) + +(defmacro while-no-input (&rest body) + "Execute BODY only as long as there's no pending input. +If input arrives, that ends the execution of BODY, +and `while-no-input' returns t. Quitting makes it return nil. +If BODY finishes, `while-no-input' returns whatever value BODY produced." + (declare (debug t) (indent 0)) + (let ((catch-sym (make-symbol "input"))) + `(with-local-quit + (catch ',catch-sym + (let ((throw-on-input ',catch-sym)) + (or (input-pending-p) + (progn ,@body))))))) + +(defmacro condition-case-unless-debug (var bodyform &rest handlers) + "Like `condition-case' except that it does not prevent debugging. +More specifically if `debug-on-error' is set then the debugger will be invoked +even if this catches the signal." + (declare (debug condition-case) (indent 2)) + `(condition-case ,var + ,bodyform + ,@(mapcar (lambda (handler) + `((debug ,@(if (listp (car handler)) (car handler) + (list (car handler)))) + ,@(cdr handler))) + handlers))) + +(define-obsolete-function-alias 'condition-case-no-debug + 'condition-case-unless-debug "24.1") + +(defmacro with-demoted-errors (format &rest body) + "Run BODY and demote any errors to simple messages. +FORMAT is a string passed to `message' to format any error message. +It should contain a single %-sequence; e.g., \"Error: %S\". + +If `debug-on-error' is non-nil, run BODY without catching its errors. +This is to be used around code which is not expected to signal an error +but which should be robust in the unexpected case that an error is signaled. + +For backward compatibility, if FORMAT is not a constant string, it +is assumed to be part of BODY, in which case the message format +used is \"Error: %S\"." + (declare (debug t) (indent 1)) + (let ((err (make-symbol "err")) + (format (if (and (stringp format) body) format + (prog1 "Error: %S" + (if format (push format body)))))) + `(condition-case-unless-debug ,err + ,(macroexp-progn body) + (error (message ,format ,err) nil)))) + +(defmacro combine-after-change-calls (&rest body) + "Execute BODY, but don't call the after-change functions till the end. +If BODY makes changes in the buffer, they are recorded +and the functions on `after-change-functions' are called several times +when BODY is finished. +The return value is the value of the last form in BODY. + +If `before-change-functions' is non-nil, then calls to the after-change +functions can't be deferred, so in that case this macro has no effect. + +Do not alter `after-change-functions' or `before-change-functions' +in BODY." + (declare (indent 0) (debug t)) + `(unwind-protect + (let ((combine-after-change-calls t)) + . ,body) + (combine-after-change-execute))) + +(defmacro with-case-table (table &rest body) + "Execute the forms in BODY with TABLE as the current case table. +The value returned is the value of the last form in BODY." + (declare (indent 1) (debug t)) + (let ((old-case-table (make-symbol "table")) + (old-buffer (make-symbol "buffer"))) + `(let ((,old-case-table (current-case-table)) + (,old-buffer (current-buffer))) + (unwind-protect + (progn (set-case-table ,table) + ,@body) + (with-current-buffer ,old-buffer + (set-case-table ,old-case-table)))))) + +(defmacro with-file-modes (modes &rest body) + "Execute BODY with default file permissions temporarily set to MODES. +MODES is as for `set-default-file-modes'." + (declare (indent 1) (debug t)) + (let ((umask (make-symbol "umask"))) + `(let ((,umask (default-file-modes))) + (unwind-protect + (progn + (set-default-file-modes ,modes) + ,@body) + (set-default-file-modes ,umask))))) + + +;;; Matching and match data. + +(defvar save-match-data-internal) + +;; We use save-match-data-internal as the local variable because +;; that works ok in practice (people should not use that variable elsewhere). +;; We used to use an uninterned symbol; the compiler handles that properly +;; now, but it generates slower code. +(defmacro save-match-data (&rest body) + "Execute the BODY forms, restoring the global value of the match data. +The value returned is the value of the last form in BODY." + ;; It is better not to use backquote here, + ;; because that makes a bootstrapping problem + ;; if you need to recompile all the Lisp files using interpreted code. + (declare (indent 0) (debug t)) + (list 'let + '((save-match-data-internal (match-data))) + (list 'unwind-protect + (cons 'progn body) + ;; It is safe to free (evaporate) markers immediately here, + ;; as Lisp programs should not copy from save-match-data-internal. + '(set-match-data save-match-data-internal 'evaporate)))) + +(defun match-string (num &optional string) + "Return string of text matched by last search. +NUM specifies which parenthesized expression in the last regexp. + Value is nil if NUMth pair didn't match, or there were less than NUM pairs. +Zero means the entire text matched by the whole regexp or whole string. +STRING should be given if the last search was by `string-match' on STRING. +If STRING is nil, the current buffer should be the same buffer +the search/match was performed in." + (if (match-beginning num) + (if string + (substring string (match-beginning num) (match-end num)) + (buffer-substring (match-beginning num) (match-end num))))) + +(defun match-string-no-properties (num &optional string) + "Return string of text matched by last search, without text properties. +NUM specifies which parenthesized expression in the last regexp. + Value is nil if NUMth pair didn't match, or there were less than NUM pairs. +Zero means the entire text matched by the whole regexp or whole string. +STRING should be given if the last search was by `string-match' on STRING. +If STRING is nil, the current buffer should be the same buffer +the search/match was performed in." + (if (match-beginning num) + (if string + (substring-no-properties string (match-beginning num) + (match-end num)) + (buffer-substring-no-properties (match-beginning num) + (match-end num))))) + + +(defun match-substitute-replacement (replacement + &optional fixedcase literal string subexp) + "Return REPLACEMENT as it will be inserted by `replace-match'. +In other words, all back-references in the form `\\&' and `\\N' +are substituted with actual strings matched by the last search. +Optional FIXEDCASE, LITERAL, STRING and SUBEXP have the same +meaning as for `replace-match'." + (let ((match (match-string 0 string))) + (save-match-data + (set-match-data (mapcar (lambda (x) + (if (numberp x) + (- x (match-beginning 0)) + x)) + (match-data t))) + (replace-match replacement fixedcase literal match subexp)))) + + +(defun looking-back (regexp &optional limit greedy) + "Return non-nil if text before point matches regular expression REGEXP. +Like `looking-at' except matches before point, and is slower. +LIMIT if non-nil speeds up the search by specifying a minimum +starting position, to avoid checking matches that would start +before LIMIT. + +If GREEDY is non-nil, extend the match backwards as far as +possible, stopping when a single additional previous character +cannot be part of a match for REGEXP. When the match is +extended, its starting position is allowed to occur before +LIMIT. + +As a general recommendation, try to avoid using `looking-back' +wherever possible, since it is slow." + (let ((start (point)) + (pos + (save-excursion + (and (re-search-backward (concat "\\(?:" regexp "\\)\\=") limit t) + (point))))) + (if (and greedy pos) + (save-restriction + (narrow-to-region (point-min) start) + (while (and (> pos (point-min)) + (save-excursion + (goto-char pos) + (backward-char 1) + (looking-at (concat "\\(?:" regexp "\\)\\'")))) + (setq pos (1- pos))) + (save-excursion + (goto-char pos) + (looking-at (concat "\\(?:" regexp "\\)\\'"))))) + (not (null pos)))) + +(defsubst looking-at-p (regexp) + "\ +Same as `looking-at' except this function does not change the match data." + (let ((inhibit-changing-match-data t)) + (looking-at regexp))) + +(defsubst string-match-p (regexp string &optional start) + "\ +Same as `string-match' except this function does not change the match data." + (let ((inhibit-changing-match-data t)) + (string-match regexp string start))) + +(defun subregexp-context-p (regexp pos &optional start) + "Return non-nil if POS is in a normal subregexp context in REGEXP. +A subregexp context is one where a sub-regexp can appear. +A non-subregexp context is for example within brackets, or within a +repetition bounds operator `\\=\\{...\\}', or right after a `\\'. +If START is non-nil, it should be a position in REGEXP, smaller +than POS, and known to be in a subregexp context." + ;; Here's one possible implementation, with the great benefit that it + ;; reuses the regexp-matcher's own parser, so it understands all the + ;; details of the syntax. A disadvantage is that it needs to match the + ;; error string. + (condition-case err + (progn + (string-match (substring regexp (or start 0) pos) "") + t) + (invalid-regexp + (not (member (cadr err) '("Unmatched [ or [^" + "Unmatched \\{" + "Trailing backslash"))))) + ;; An alternative implementation: + ;; (defconst re-context-re + ;; (let* ((harmless-ch "[^\\[]") + ;; (harmless-esc "\\\\[^{]") + ;; (class-harmless-ch "[^][]") + ;; (class-lb-harmless "[^]:]") + ;; (class-lb-colon-maybe-charclass ":\\([a-z]+:]\\)?") + ;; (class-lb (concat "\\[\\(" class-lb-harmless + ;; "\\|" class-lb-colon-maybe-charclass "\\)")) + ;; (class + ;; (concat "\\[^?]?" + ;; "\\(" class-harmless-ch + ;; "\\|" class-lb "\\)*" + ;; "\\[?]")) ; special handling for bare [ at end of re + ;; (braces "\\\\{[0-9,]+\\\\}")) + ;; (concat "\\`\\(" harmless-ch "\\|" harmless-esc + ;; "\\|" class "\\|" braces "\\)*\\'")) + ;; "Matches any prefix that corresponds to a normal subregexp context.") + ;; (string-match re-context-re (substring regexp (or start 0) pos)) + ) + +;;;; split-string + +(defconst split-string-default-separators "[ \f\t\n\r\v]+" + "The default value of separators for `split-string'. + +A regexp matching strings of whitespace. May be locale-dependent +\(as yet unimplemented). Should not match non-breaking spaces. + +Warning: binding this to a different value and using it as default is +likely to have undesired semantics.") + +;; The specification says that if both SEPARATORS and OMIT-NULLS are +;; defaulted, OMIT-NULLS should be treated as t. Simplifying the logical +;; expression leads to the equivalent implementation that if SEPARATORS +;; is defaulted, OMIT-NULLS is treated as t. +(defun split-string (string &optional separators omit-nulls trim) + "Split STRING into substrings bounded by matches for SEPARATORS. + +The beginning and end of STRING, and each match for SEPARATORS, are +splitting points. The substrings matching SEPARATORS are removed, and +the substrings between the splitting points are collected as a list, +which is returned. + +If SEPARATORS is non-nil, it should be a regular expression matching text +which separates, but is not part of, the substrings. If nil it defaults to +`split-string-default-separators', normally \"[ \\f\\t\\n\\r\\v]+\", and +OMIT-NULLS is forced to t. + +If OMIT-NULLS is t, zero-length substrings are omitted from the list (so +that for the default value of SEPARATORS leading and trailing whitespace +are effectively trimmed). If nil, all zero-length substrings are retained, +which correctly parses CSV format, for example. + +If TRIM is non-nil, it should be a regular expression to match +text to trim from the beginning and end of each substring. If trimming +makes the substring empty, it is treated as null. + +If you want to trim whitespace from the substrings, the reliably correct +way is using TRIM. Making SEPARATORS match that whitespace gives incorrect +results when there is whitespace at the start or end of STRING. If you +see such calls to `split-string', please fix them. + +Note that the effect of `(split-string STRING)' is the same as +`(split-string STRING split-string-default-separators t)'. In the rare +case that you wish to retain zero-length substrings when splitting on +whitespace, use `(split-string STRING split-string-default-separators)'. + +Modifies the match data; use `save-match-data' if necessary." + (let* ((keep-nulls (not (if separators omit-nulls t))) + (rexp (or separators split-string-default-separators)) + (start 0) + this-start this-end + notfirst + (list nil) + (push-one + ;; Push the substring in range THIS-START to THIS-END + ;; onto LIST, trimming it and perhaps discarding it. + (lambda () + (when trim + ;; Discard the trim from start of this substring. + (let ((tem (string-match trim string this-start))) + (and (eq tem this-start) + (setq this-start (match-end 0))))) + + (when (or keep-nulls (< this-start this-end)) + (let ((this (substring string this-start this-end))) + + ;; Discard the trim from end of this substring. + (when trim + (let ((tem (string-match (concat trim "\\'") this 0))) + (and tem (< tem (length this)) + (setq this (substring this 0 tem))))) + + ;; Trimming could make it empty; check again. + (when (or keep-nulls (> (length this) 0)) + (push this list))))))) + + (while (and (string-match rexp string + (if (and notfirst + (= start (match-beginning 0)) + (< start (length string))) + (1+ start) start)) + (< start (length string))) + (setq notfirst t) + (setq this-start start this-end (match-beginning 0) + start (match-end 0)) + + (funcall push-one)) + + ;; Handle the substring at the end of STRING. + (setq this-start start this-end (length string)) + (funcall push-one) + + (nreverse list))) + +(defun combine-and-quote-strings (strings &optional separator) + "Concatenate the STRINGS, adding the SEPARATOR (default \" \"). +This tries to quote the strings to avoid ambiguity such that + (split-string-and-unquote (combine-and-quote-strings strs)) == strs +Only some SEPARATORs will work properly." + (let* ((sep (or separator " ")) + (re (concat "[\\\"]" "\\|" (regexp-quote sep)))) + (mapconcat + (lambda (str) + (if (string-match re str) + (concat "\"" (replace-regexp-in-string "[\\\"]" "\\\\\\&" str) "\"") + str)) + strings sep))) + +(defun split-string-and-unquote (string &optional separator) + "Split the STRING into a list of strings. +It understands Emacs Lisp quoting within STRING, such that + (split-string-and-unquote (combine-and-quote-strings strs)) == strs +The SEPARATOR regexp defaults to \"\\s-+\"." + (let ((sep (or separator "\\s-+")) + (i (string-match "\"" string))) + (if (null i) + (split-string string sep t) ; no quoting: easy + (append (unless (eq i 0) (split-string (substring string 0 i) sep t)) + (let ((rfs (read-from-string string i))) + (cons (car rfs) + (split-string-and-unquote (substring string (cdr rfs)) + sep))))))) + + +;;;; Replacement in strings. + +(defun subst-char-in-string (fromchar tochar string &optional inplace) + "Replace FROMCHAR with TOCHAR in STRING each time it occurs. +Unless optional argument INPLACE is non-nil, return a new string." + (let ((i (length string)) + (newstr (if inplace string (copy-sequence string)))) + (while (> i 0) + (setq i (1- i)) + (if (eq (aref newstr i) fromchar) + (aset newstr i tochar))) + newstr)) + +(defun replace-regexp-in-string (regexp rep string &optional + fixedcase literal subexp start) + "Replace all matches for REGEXP with REP in STRING. + +Return a new string containing the replacements. + +Optional arguments FIXEDCASE, LITERAL and SUBEXP are like the +arguments with the same names of function `replace-match'. If START +is non-nil, start replacements at that index in STRING. + +REP is either a string used as the NEWTEXT arg of `replace-match' or a +function. If it is a function, it is called with the actual text of each +match, and its value is used as the replacement text. When REP is called, +the match data are the result of matching REGEXP against a substring +of STRING. + +To replace only the first match (if any), make REGEXP match up to \\' +and replace a sub-expression, e.g. + (replace-regexp-in-string \"\\\\(foo\\\\).*\\\\'\" \"bar\" \" foo foo\" nil nil 1) + => \" bar foo\"" + + ;; To avoid excessive consing from multiple matches in long strings, + ;; don't just call `replace-match' continually. Walk down the + ;; string looking for matches of REGEXP and building up a (reversed) + ;; list MATCHES. This comprises segments of STRING which weren't + ;; matched interspersed with replacements for segments that were. + ;; [For a `large' number of replacements it's more efficient to + ;; operate in a temporary buffer; we can't tell from the function's + ;; args whether to choose the buffer-based implementation, though it + ;; might be reasonable to do so for long enough STRING.] + (let ((l (length string)) + (start (or start 0)) + matches str mb me) + (save-match-data + (while (and (< start l) (string-match regexp string start)) + (setq mb (match-beginning 0) + me (match-end 0)) + ;; If we matched the empty string, make sure we advance by one char + (when (= me mb) (setq me (min l (1+ mb)))) + ;; Generate a replacement for the matched substring. + ;; Operate only on the substring to minimize string consing. + ;; Set up match data for the substring for replacement; + ;; presumably this is likely to be faster than munging the + ;; match data directly in Lisp. + (string-match regexp (setq str (substring string mb me))) + (setq matches + (cons (replace-match (if (stringp rep) + rep + (funcall rep (match-string 0 str))) + fixedcase literal str subexp) + (cons (substring string start mb) ; unmatched prefix + matches))) + (setq start me)) + ;; Reconstruct a string from the pieces. + (setq matches (cons (substring string start l) matches)) ; leftover + (apply #'concat (nreverse matches))))) + +(defun string-prefix-p (prefix string &optional ignore-case) + "Return non-nil if PREFIX is a prefix of STRING. +If IGNORE-CASE is non-nil, the comparison is done without paying attention +to case differences." + (let ((prefix-length (length prefix))) + (if (> prefix-length (length string)) nil + (eq t (compare-strings prefix 0 prefix-length string + 0 prefix-length ignore-case))))) + +(defun string-suffix-p (suffix string &optional ignore-case) + "Return non-nil if SUFFIX is a suffix of STRING. +If IGNORE-CASE is non-nil, the comparison is done without paying +attention to case differences." + (let ((start-pos (- (length string) (length suffix)))) + (and (>= start-pos 0) + (eq t (compare-strings suffix nil nil + string start-pos nil ignore-case))))) + +(defun bidi-string-mark-left-to-right (str) + "Return a string that can be safely inserted in left-to-right text. + +Normally, inserting a string with right-to-left (RTL) script into +a buffer may cause some subsequent text to be displayed as part +of the RTL segment (usually this affects punctuation characters). +This function returns a string which displays as STR but forces +subsequent text to be displayed as left-to-right. + +If STR contains any RTL character, this function returns a string +consisting of STR followed by an invisible left-to-right mark +\(LRM) character. Otherwise, it returns STR." + (unless (stringp str) + (signal 'wrong-type-argument (list 'stringp str))) + (if (string-match "\\cR" str) + (concat str (propertize (string ?\x200e) 'invisible t)) + str)) + +;;;; Specifying things to do later. + +(defun load-history-regexp (file) + "Form a regexp to find FILE in `load-history'. +FILE, a string, is described in the function `eval-after-load'." + (if (file-name-absolute-p file) + (setq file (file-truename file))) + (concat (if (file-name-absolute-p file) "\\`" "\\(\\`\\|/\\)") + (regexp-quote file) + (if (file-name-extension file) + "" + ;; Note: regexp-opt can't be used here, since we need to call + ;; this before Emacs has been fully started. 2006-05-21 + (concat "\\(" (mapconcat 'regexp-quote load-suffixes "\\|") "\\)?")) + "\\(" (mapconcat 'regexp-quote jka-compr-load-suffixes "\\|") + "\\)?\\'")) + +(defun load-history-filename-element (file-regexp) + "Get the first elt of `load-history' whose car matches FILE-REGEXP. +Return nil if there isn't one." + (let* ((loads load-history) + (load-elt (and loads (car loads)))) + (save-match-data + (while (and loads + (or (null (car load-elt)) + (not (string-match file-regexp (car load-elt))))) + (setq loads (cdr loads) + load-elt (and loads (car loads))))) + load-elt)) + +(put 'eval-after-load 'lisp-indent-function 1) +(defun eval-after-load (file form) + "Arrange that if FILE is loaded, FORM will be run immediately afterwards. +If FILE is already loaded, evaluate FORM right now. +FORM can be an Elisp expression (in which case it's passed to `eval'), +or a function (in which case it's passed to `funcall' with no argument). + +If a matching file is loaded again, FORM will be evaluated again. + +If FILE is a string, it may be either an absolute or a relative file +name, and may have an extension (e.g. \".el\") or may lack one, and +additionally may or may not have an extension denoting a compressed +format (e.g. \".gz\"). + +When FILE is absolute, this first converts it to a true name by chasing +symbolic links. Only a file of this name (see next paragraph regarding +extensions) will trigger the evaluation of FORM. When FILE is relative, +a file whose absolute true name ends in FILE will trigger evaluation. + +When FILE lacks an extension, a file name with any extension will trigger +evaluation. Otherwise, its extension must match FILE's. A further +extension for a compressed format (e.g. \".gz\") on FILE will not affect +this name matching. + +Alternatively, FILE can be a feature (i.e. a symbol), in which case FORM +is evaluated at the end of any file that `provide's this feature. +If the feature is provided when evaluating code not associated with a +file, FORM is evaluated immediately after the provide statement. + +Usually FILE is just a library name like \"font-lock\" or a feature name +like 'font-lock. + +This function makes or adds to an entry on `after-load-alist'." + (declare (compiler-macro + (lambda (whole) + (if (eq 'quote (car-safe form)) + ;; Quote with lambda so the compiler can look inside. + `(eval-after-load ,file (lambda () ,(nth 1 form))) + whole)))) + ;; Add this FORM into after-load-alist (regardless of whether we'll be + ;; evaluating it now). + (let* ((regexp-or-feature + (if (stringp file) + (setq file (purecopy (load-history-regexp file))) + file)) + (elt (assoc regexp-or-feature after-load-alist)) + (func + (if (functionp form) form + ;; Try to use the "current" lexical/dynamic mode for `form'. + (eval `(lambda () ,form) lexical-binding)))) + (unless elt + (setq elt (list regexp-or-feature)) + (push elt after-load-alist)) + ;; Is there an already loaded file whose name (or `provide' name) + ;; matches FILE? + (prog1 (if (if (stringp file) + (load-history-filename-element regexp-or-feature) + (featurep file)) + (funcall func)) + (let ((delayed-func + (if (not (symbolp regexp-or-feature)) func + ;; For features, the after-load-alist elements get run when + ;; `provide' is called rather than at the end of the file. + ;; So add an indirection to make sure that `func' is really run + ;; "after-load" in case the provide call happens early. + (lambda () + (if (not load-file-name) + ;; Not being provided from a file, run func right now. + (funcall func) + (let ((lfn load-file-name) + ;; Don't use letrec, because equal (in + ;; add/remove-hook) would get trapped in a cycle. + (fun (make-symbol "eval-after-load-helper"))) + (fset fun (lambda (file) + (when (equal file lfn) + (remove-hook 'after-load-functions fun) + (funcall func)))) + (add-hook 'after-load-functions fun 'append))))))) + ;; Add FORM to the element unless it's already there. + (unless (member delayed-func (cdr elt)) + (nconc elt (list delayed-func))))))) + +(defmacro with-eval-after-load (file &rest body) + "Execute BODY after FILE is loaded. +FILE is normally a feature name, but it can also be a file name, +in case that file does not provide any feature." + (declare (indent 1) (debug t)) + `(eval-after-load ,file (lambda () ,@body))) + +(defvar after-load-functions nil + "Special hook run after loading a file. +Each function there is called with a single argument, the absolute +name of the file just loaded.") + +(defun do-after-load-evaluation (abs-file) + "Evaluate all `eval-after-load' forms, if any, for ABS-FILE. +ABS-FILE, a string, should be the absolute true name of a file just loaded. +This function is called directly from the C code." + ;; Run the relevant eval-after-load forms. + (dolist (a-l-element after-load-alist) + (when (and (stringp (car a-l-element)) + (string-match-p (car a-l-element) abs-file)) + ;; discard the file name regexp + (mapc #'funcall (cdr a-l-element)))) + ;; Complain when the user uses obsolete files. + (when (save-match-data + (and (string-match "/obsolete/\\([^/]*\\)\\'" abs-file) + (not (equal "loaddefs.el" (match-string 1 abs-file))))) + ;; Maybe we should just use display-warning? This seems yucky... + (let* ((file (file-name-nondirectory abs-file)) + (msg (format "Package %s is obsolete!" + (substring file 0 + (string-match "\\.elc?\\>" file))))) + ;; Cribbed from cl--compiling-file. + (if (and (boundp 'byte-compile--outbuffer) + (bufferp (symbol-value 'byte-compile--outbuffer)) + (equal (buffer-name (symbol-value 'byte-compile--outbuffer)) + " *Compiler Output*")) + ;; Don't warn about obsolete files using other obsolete files. + (unless (and (stringp byte-compile-current-file) + (string-match-p "/obsolete/[^/]*\\'" + (expand-file-name + byte-compile-current-file + byte-compile-root-dir))) + (byte-compile-log-warning msg)) + (run-with-timer 0 nil + (lambda (msg) + (message "%s" msg)) + msg)))) + + ;; Finally, run any other hook. + (run-hook-with-args 'after-load-functions abs-file)) + +(defun eval-next-after-load (file) + "Read the following input sexp, and run it whenever FILE is loaded. +This makes or adds to an entry on `after-load-alist'. +FILE should be the name of a library, with no directory name." + (declare (obsolete eval-after-load "23.2")) + (eval-after-load file (read))) + + +(defun display-delayed-warnings () + "Display delayed warnings from `delayed-warnings-list'. +Used from `delayed-warnings-hook' (which see)." + (dolist (warning (nreverse delayed-warnings-list)) + (apply 'display-warning warning)) + (setq delayed-warnings-list nil)) + +(defun collapse-delayed-warnings () + "Remove duplicates from `delayed-warnings-list'. +Collapse identical adjacent warnings into one (plus count). +Used from `delayed-warnings-hook' (which see)." + (let ((count 1) + collapsed warning) + (while delayed-warnings-list + (setq warning (pop delayed-warnings-list)) + (if (equal warning (car delayed-warnings-list)) + (setq count (1+ count)) + (when (> count 1) + (setcdr warning (cons (format "%s [%d times]" (cadr warning) count) + (cddr warning))) + (setq count 1)) + (push warning collapsed))) + (setq delayed-warnings-list (nreverse collapsed)))) + +;; At present this is only used for Emacs internals. +;; Ref http://lists.gnu.org/archive/html/emacs-devel/2012-02/msg00085.html +(defvar delayed-warnings-hook '(collapse-delayed-warnings + display-delayed-warnings) + "Normal hook run to process and display delayed warnings. +By default, this hook contains functions to consolidate the +warnings listed in `delayed-warnings-list', display them, and set +`delayed-warnings-list' back to nil.") + +(defun delay-warning (type message &optional level buffer-name) + "Display a delayed warning. +Aside from going through `delayed-warnings-list', this is equivalent +to `display-warning'." + (push (list type message level buffer-name) delayed-warnings-list)) + + +;;;; invisibility specs + +(defun add-to-invisibility-spec (element) + "Add ELEMENT to `buffer-invisibility-spec'. +See documentation for `buffer-invisibility-spec' for the kind of elements +that can be added." + (if (eq buffer-invisibility-spec t) + (setq buffer-invisibility-spec (list t))) + (setq buffer-invisibility-spec + (cons element buffer-invisibility-spec))) + +(defun remove-from-invisibility-spec (element) + "Remove ELEMENT from `buffer-invisibility-spec'." + (if (consp buffer-invisibility-spec) + (setq buffer-invisibility-spec + (delete element buffer-invisibility-spec)))) + +;;;; Syntax tables. + +(defmacro with-syntax-table (table &rest body) + "Evaluate BODY with syntax table of current buffer set to TABLE. +The syntax table of the current buffer is saved, BODY is evaluated, and the +saved table is restored, even in case of an abnormal exit. +Value is what BODY returns." + (declare (debug t) (indent 1)) + (let ((old-table (make-symbol "table")) + (old-buffer (make-symbol "buffer"))) + `(let ((,old-table (syntax-table)) + (,old-buffer (current-buffer))) + (unwind-protect + (progn + (set-syntax-table ,table) + ,@body) + (save-current-buffer + (set-buffer ,old-buffer) + (set-syntax-table ,old-table)))))) + +(defun make-syntax-table (&optional oldtable) + "Return a new syntax table. +Create a syntax table which inherits from OLDTABLE (if non-nil) or +from `standard-syntax-table' otherwise." + (let ((table (make-char-table 'syntax-table nil))) + (set-char-table-parent table (or oldtable (standard-syntax-table))) + table)) + +(defun syntax-after (pos) + "Return the raw syntax descriptor for the char after POS. +If POS is outside the buffer's accessible portion, return nil." + (unless (or (< pos (point-min)) (>= pos (point-max))) + (let ((st (if parse-sexp-lookup-properties + (get-char-property pos 'syntax-table)))) + (if (consp st) st + (aref (or st (syntax-table)) (char-after pos)))))) + +(defun syntax-class (syntax) + "Return the code for the syntax class described by SYNTAX. + +SYNTAX should be a raw syntax descriptor; the return value is a +integer which encodes the corresponding syntax class. See Info +node `(elisp)Syntax Table Internals' for a list of codes. + +If SYNTAX is nil, return nil." + (and syntax (logand (car syntax) 65535))) + +;; Utility motion commands + +;; Whitespace + +(defun forward-whitespace (arg) + "Move point to the end of the next sequence of whitespace chars. +Each such sequence may be a single newline, or a sequence of +consecutive space and/or tab characters. +With prefix argument ARG, do it ARG times if positive, or move +backwards ARG times if negative." + (interactive "^p") + (if (natnump arg) + (re-search-forward "[ \t]+\\|\n" nil 'move arg) + (while (< arg 0) + (if (re-search-backward "[ \t]+\\|\n" nil 'move) + (or (eq (char-after (match-beginning 0)) ?\n) + (skip-chars-backward " \t"))) + (setq arg (1+ arg))))) + +;; Symbols + +(defun forward-symbol (arg) + "Move point to the next position that is the end of a symbol. +A symbol is any sequence of characters that are in either the +word constituent or symbol constituent syntax class. +With prefix argument ARG, do it ARG times if positive, or move +backwards ARG times if negative." + (interactive "^p") + (if (natnump arg) + (re-search-forward "\\(\\sw\\|\\s_\\)+" nil 'move arg) + (while (< arg 0) + (if (re-search-backward "\\(\\sw\\|\\s_\\)+" nil 'move) + (skip-syntax-backward "w_")) + (setq arg (1+ arg))))) + +;; Syntax blocks + +(defun forward-same-syntax (&optional arg) + "Move point past all characters with the same syntax class. +With prefix argument ARG, do it ARG times if positive, or move +backwards ARG times if negative." + (interactive "^p") + (or arg (setq arg 1)) + (while (< arg 0) + (skip-syntax-backward + (char-to-string (char-syntax (char-before)))) + (setq arg (1+ arg))) + (while (> arg 0) + (skip-syntax-forward (char-to-string (char-syntax (char-after)))) + (setq arg (1- arg)))) + + +;;;; Text clones + +(defvar text-clone--maintaining nil) + +(defun text-clone--maintain (ol1 after beg end &optional _len) + "Propagate the changes made under the overlay OL1 to the other clones. +This is used on the `modification-hooks' property of text clones." + (when (and after (not undo-in-progress) + (not text-clone--maintaining) + (overlay-start ol1)) + (let ((margin (if (overlay-get ol1 'text-clone-spreadp) 1 0))) + (setq beg (max beg (+ (overlay-start ol1) margin))) + (setq end (min end (- (overlay-end ol1) margin))) + (when (<= beg end) + (save-excursion + (when (overlay-get ol1 'text-clone-syntax) + ;; Check content of the clone's text. + (let ((cbeg (+ (overlay-start ol1) margin)) + (cend (- (overlay-end ol1) margin))) + (goto-char cbeg) + (save-match-data + (if (not (re-search-forward + (overlay-get ol1 'text-clone-syntax) cend t)) + ;; Mark the overlay for deletion. + (setq end cbeg) + (when (< (match-end 0) cend) + ;; Shrink the clone at its end. + (setq end (min end (match-end 0))) + (move-overlay ol1 (overlay-start ol1) + (+ (match-end 0) margin))) + (when (> (match-beginning 0) cbeg) + ;; Shrink the clone at its beginning. + (setq beg (max (match-beginning 0) beg)) + (move-overlay ol1 (- (match-beginning 0) margin) + (overlay-end ol1))))))) + ;; Now go ahead and update the clones. + (let ((head (- beg (overlay-start ol1))) + (tail (- (overlay-end ol1) end)) + (str (buffer-substring beg end)) + (nothing-left t) + (text-clone--maintaining t)) + (dolist (ol2 (overlay-get ol1 'text-clones)) + (let ((oe (overlay-end ol2))) + (unless (or (eq ol1 ol2) (null oe)) + (setq nothing-left nil) + (let ((mod-beg (+ (overlay-start ol2) head))) + ;;(overlay-put ol2 'modification-hooks nil) + (goto-char (- (overlay-end ol2) tail)) + (unless (> mod-beg (point)) + (save-excursion (insert str)) + (delete-region mod-beg (point))) + ;;(overlay-put ol2 'modification-hooks '(text-clone--maintain)) + )))) + (if nothing-left (delete-overlay ol1)))))))) + +(defun text-clone-create (start end &optional spreadp syntax) + "Create a text clone of START...END at point. +Text clones are chunks of text that are automatically kept identical: +changes done to one of the clones will be immediately propagated to the other. + +The buffer's content at point is assumed to be already identical to +the one between START and END. +If SYNTAX is provided it's a regexp that describes the possible text of +the clones; the clone will be shrunk or killed if necessary to ensure that +its text matches the regexp. +If SPREADP is non-nil it indicates that text inserted before/after the +clone should be incorporated in the clone." + ;; To deal with SPREADP we can either use an overlay with `nil t' along + ;; with insert-(behind|in-front-of)-hooks or use a slightly larger overlay + ;; (with a one-char margin at each end) with `t nil'. + ;; We opted for a larger overlay because it behaves better in the case + ;; where the clone is reduced to the empty string (we want the overlay to + ;; stay when the clone's content is the empty string and we want to use + ;; `evaporate' to make sure those overlays get deleted when needed). + ;; + (let* ((pt-end (+ (point) (- end start))) + (start-margin (if (or (not spreadp) (bobp) (<= start (point-min))) + 0 1)) + (end-margin (if (or (not spreadp) + (>= pt-end (point-max)) + (>= start (point-max))) + 0 1)) + ;; FIXME: Reuse overlays at point to extend dups! + (ol1 (make-overlay (- start start-margin) (+ end end-margin) nil t)) + (ol2 (make-overlay (- (point) start-margin) (+ pt-end end-margin) nil t)) + (dups (list ol1 ol2))) + (overlay-put ol1 'modification-hooks '(text-clone--maintain)) + (when spreadp (overlay-put ol1 'text-clone-spreadp t)) + (when syntax (overlay-put ol1 'text-clone-syntax syntax)) + ;;(overlay-put ol1 'face 'underline) + (overlay-put ol1 'evaporate t) + (overlay-put ol1 'text-clones dups) + ;; + (overlay-put ol2 'modification-hooks '(text-clone--maintain)) + (when spreadp (overlay-put ol2 'text-clone-spreadp t)) + (when syntax (overlay-put ol2 'text-clone-syntax syntax)) + ;;(overlay-put ol2 'face 'underline) + (overlay-put ol2 'evaporate t) + (overlay-put ol2 'text-clones dups))) + +;;;; Mail user agents. + +;; Here we include just enough for other packages to be able +;; to define them. + +(defun define-mail-user-agent (symbol composefunc sendfunc + &optional abortfunc hookvar) + "Define a symbol to identify a mail-sending package for `mail-user-agent'. + +SYMBOL can be any Lisp symbol. Its function definition and/or +value as a variable do not matter for this usage; we use only certain +properties on its property list, to encode the rest of the arguments. + +COMPOSEFUNC is program callable function that composes an outgoing +mail message buffer. This function should set up the basics of the +buffer without requiring user interaction. It should populate the +standard mail headers, leaving the `to:' and `subject:' headers blank +by default. + +COMPOSEFUNC should accept several optional arguments--the same +arguments that `compose-mail' takes. See that function's documentation. + +SENDFUNC is the command a user would run to send the message. + +Optional ABORTFUNC is the command a user would run to abort the +message. For mail packages that don't have a separate abort function, +this can be `kill-buffer' (the equivalent of omitting this argument). + +Optional HOOKVAR is a hook variable that gets run before the message +is actually sent. Callers that use the `mail-user-agent' may +install a hook function temporarily on this hook variable. +If HOOKVAR is nil, `mail-send-hook' is used. + +The properties used on SYMBOL are `composefunc', `sendfunc', +`abortfunc', and `hookvar'." + (put symbol 'composefunc composefunc) + (put symbol 'sendfunc sendfunc) + (put symbol 'abortfunc (or abortfunc 'kill-buffer)) + (put symbol 'hookvar (or hookvar 'mail-send-hook))) + +(defvar called-interactively-p-functions nil + "Special hook called to skip special frames in `called-interactively-p'. +The functions are called with 3 arguments: (I FRAME1 FRAME2), +where FRAME1 is a \"current frame\", FRAME2 is the next frame, +I is the index of the frame after FRAME2. It should return nil +if those frames don't seem special and otherwise, it should return +the number of frames to skip (minus 1).") + +(defconst internal--funcall-interactively + (symbol-function 'funcall-interactively)) + +(defun called-interactively-p (&optional kind) + "Return t if the containing function was called by `call-interactively'. +If KIND is `interactive', then only return t if the call was made +interactively by the user, i.e. not in `noninteractive' mode nor +when `executing-kbd-macro'. +If KIND is `any', on the other hand, it will return t for any kind of +interactive call, including being called as the binding of a key or +from a keyboard macro, even in `noninteractive' mode. + +This function is very brittle, it may fail to return the intended result when +the code is debugged, advised, or instrumented in some form. Some macros and +special forms (such as `condition-case') may also sometimes wrap their bodies +in a `lambda', so any call to `called-interactively-p' from those bodies will +indicate whether that lambda (rather than the surrounding function) was called +interactively. + +Instead of using this function, it is cleaner and more reliable to give your +function an extra optional argument whose `interactive' spec specifies +non-nil unconditionally (\"p\" is a good way to do this), or via +\(not (or executing-kbd-macro noninteractive)). + +The only known proper use of `interactive' for KIND is in deciding +whether to display a helpful message, or how to display it. If you're +thinking of using it for any other purpose, it is quite likely that +you're making a mistake. Think: what do you want to do when the +command is called from a keyboard macro?" + (declare (advertised-calling-convention (kind) "23.1")) + (when (not (and (eq kind 'interactive) + (or executing-kbd-macro noninteractive))) + (let* ((i 1) ;; 0 is the called-interactively-p frame. + frame nextframe + (get-next-frame + (lambda () + (setq frame nextframe) + (setq nextframe (backtrace-frame i 'called-interactively-p)) + ;; (message "Frame %d = %S" i nextframe) + (setq i (1+ i))))) + (funcall get-next-frame) ;; Get the first frame. + (while + ;; FIXME: The edebug and advice handling should be made modular and + ;; provided directly by edebug.el and nadvice.el. + (progn + ;; frame =(backtrace-frame i-2) + ;; nextframe=(backtrace-frame i-1) + (funcall get-next-frame) + ;; `pcase' would be a fairly good fit here, but it sometimes moves + ;; branches within local functions, which then messes up the + ;; `backtrace-frame' data we get, + (or + ;; Skip special forms (from non-compiled code). + (and frame (null (car frame))) + ;; Skip also `interactive-p' (because we don't want to know if + ;; interactive-p was called interactively but if it's caller was) + ;; and `byte-code' (idem; this appears in subexpressions of things + ;; like condition-case, which are wrapped in a separate bytecode + ;; chunk). + ;; FIXME: For lexical-binding code, this is much worse, + ;; because the frames look like "byte-code -> funcall -> #[...]", + ;; which is not a reliable signature. + (memq (nth 1 frame) '(interactive-p 'byte-code)) + ;; Skip package-specific stack-frames. + (let ((skip (run-hook-with-args-until-success + 'called-interactively-p-functions + i frame nextframe))) + (pcase skip + (`nil nil) + (`0 t) + (_ (setq i (+ i skip -1)) (funcall get-next-frame))))))) + ;; Now `frame' should be "the function from which we were called". + (pcase (cons frame nextframe) + ;; No subr calls `interactive-p', so we can rule that out. + (`((,_ ,(pred (lambda (f) (subrp (indirect-function f)))) . ,_) . ,_) nil) + ;; In case # without going through the + ;; `funcall-interactively' symbol (bug#3984). + (`(,_ . (t ,(pred (lambda (f) + (eq internal--funcall-interactively + (indirect-function f)))) + . ,_)) + t))))) + +(defun interactive-p () + "Return t if the containing function was run directly by user input. +This means that the function was called with `call-interactively' +\(which includes being called as the binding of a key) +and input is currently coming from the keyboard (not a keyboard macro), +and Emacs is not running in batch mode (`noninteractive' is nil). + +The only known proper use of `interactive-p' is in deciding whether to +display a helpful message, or how to display it. If you're thinking +of using it for any other purpose, it is quite likely that you're +making a mistake. Think: what do you want to do when the command is +called from a keyboard macro or in batch mode? + +To test whether your function was called with `call-interactively', +either (i) add an extra optional argument and give it an `interactive' +spec that specifies non-nil unconditionally (such as \"p\"); or (ii) +use `called-interactively-p'." + (declare (obsolete called-interactively-p "23.2")) + (called-interactively-p 'interactive)) + +(defun internal-push-keymap (keymap symbol) + (let ((map (symbol-value symbol))) + (unless (memq keymap map) + (unless (memq 'add-keymap-witness (symbol-value symbol)) + (setq map (make-composed-keymap nil (symbol-value symbol))) + (push 'add-keymap-witness (cdr map)) + (set symbol map)) + (push keymap (cdr map))))) + +(defun internal-pop-keymap (keymap symbol) + (let ((map (symbol-value symbol))) + (when (memq keymap map) + (setf (cdr map) (delq keymap (cdr map)))) + (let ((tail (cddr map))) + (and (or (null tail) (keymapp tail)) + (eq 'add-keymap-witness (nth 1 map)) + (set symbol tail))))) + +(define-obsolete-function-alias + 'set-temporary-overlay-map 'set-transient-map "24.4") + +(defun set-transient-map (map &optional keep-pred on-exit) + "Set MAP as a temporary keymap taking precedence over other keymaps. +Normally, MAP is used only once, to look up the very next key. +However, if the optional argument KEEP-PRED is t, MAP stays +active if a key from MAP is used. KEEP-PRED can also be a +function of no arguments: it is called from `pre-command-hook' and +if it returns non-nil, then MAP stays active. + +Optional arg ON-EXIT, if non-nil, specifies a function that is +called, with no arguments, after MAP is deactivated. + +This uses `overriding-terminal-local-map' which takes precedence over all other +keymaps. As usual, if no match for a key is found in MAP, the normal key +lookup sequence then continues. + +This returns an \"exit function\", which can be called with no argument +to deactivate this transient map, regardless of KEEP-PRED." + (let* ((clearfun (make-symbol "clear-transient-map")) + (exitfun + (lambda () + (internal-pop-keymap map 'overriding-terminal-local-map) + (remove-hook 'pre-command-hook clearfun) + (when on-exit (funcall on-exit))))) + ;; Don't use letrec, because equal (in add/remove-hook) would get trapped + ;; in a cycle. + (fset clearfun + (lambda () + (with-demoted-errors "set-transient-map PCH: %S" + (unless (cond + ((null keep-pred) nil) + ((not (eq map (cadr overriding-terminal-local-map))) + ;; There's presumably some other transient-map in + ;; effect. Wait for that one to terminate before we + ;; remove ourselves. + ;; For example, if isearch and C-u both use transient + ;; maps, then the lifetime of the C-u should be nested + ;; within isearch's, so the pre-command-hook of + ;; isearch should be suspended during the C-u one so + ;; we don't exit isearch just because we hit 1 after + ;; C-u and that 1 exits isearch whereas it doesn't + ;; exit C-u. + t) + ((eq t keep-pred) + (eq this-command + (lookup-key map (this-command-keys-vector)))) + (t (funcall keep-pred))) + (funcall exitfun))))) + (add-hook 'pre-command-hook clearfun) + (internal-push-keymap map 'overriding-terminal-local-map) + exitfun)) + +;;;; Progress reporters. + +;; Progress reporter has the following structure: +;; +;; (NEXT-UPDATE-VALUE . [NEXT-UPDATE-TIME +;; MIN-VALUE +;; MAX-VALUE +;; MESSAGE +;; MIN-CHANGE +;; MIN-TIME]) +;; +;; This weirdness is for optimization reasons: we want +;; `progress-reporter-update' to be as fast as possible, so +;; `(car reporter)' is better than `(aref reporter 0)'. +;; +;; NEXT-UPDATE-TIME is a float. While `float-time' loses a couple +;; digits of precision, it doesn't really matter here. On the other +;; hand, it greatly simplifies the code. + +(defsubst progress-reporter-update (reporter &optional value) + "Report progress of an operation in the echo area. +REPORTER should be the result of a call to `make-progress-reporter'. + +If REPORTER is a numerical progress reporter---i.e. if it was + made using non-nil MIN-VALUE and MAX-VALUE arguments to + `make-progress-reporter'---then VALUE should be a number between + MIN-VALUE and MAX-VALUE. + +If REPORTER is a non-numerical reporter, VALUE should be nil. + +This function is relatively inexpensive. If the change since +last update is too small or insufficient time has passed, it does +nothing." + (when (or (not (numberp value)) ; For pulsing reporter + (>= value (car reporter))) ; For numerical reporter + (progress-reporter-do-update reporter value))) + +(defun make-progress-reporter (message &optional min-value max-value + current-value min-change min-time) + "Return progress reporter object for use with `progress-reporter-update'. + +MESSAGE is shown in the echo area, with a status indicator +appended to the end. When you call `progress-reporter-done', the +word \"done\" is printed after the MESSAGE. You can change the +MESSAGE of an existing progress reporter by calling +`progress-reporter-force-update'. + +MIN-VALUE and MAX-VALUE, if non-nil, are starting (0% complete) +and final (100% complete) states of operation; the latter should +be larger. In this case, the status message shows the percentage +progress. + +If MIN-VALUE and/or MAX-VALUE is omitted or nil, the status +message shows a \"spinning\", non-numeric indicator. + +Optional CURRENT-VALUE is the initial progress; the default is +MIN-VALUE. +Optional MIN-CHANGE is the minimal change in percents to report; +the default is 1%. +CURRENT-VALUE and MIN-CHANGE do not have any effect if MIN-VALUE +and/or MAX-VALUE are nil. + +Optional MIN-TIME specifies the minimum interval time between +echo area updates (default is 0.2 seconds.) If the function +`float-time' is not present, time is not tracked at all. If the +OS is not capable of measuring fractions of seconds, this +parameter is effectively rounded up." + (when (string-match "[[:alnum:]]\\'" message) + (setq message (concat message "..."))) + (unless min-time + (setq min-time 0.2)) + (let ((reporter + ;; Force a call to `message' now + (cons (or min-value 0) + (vector (if (and (fboundp 'float-time) + (>= min-time 0.02)) + (float-time) nil) + min-value + max-value + message + (if min-change (max (min min-change 50) 1) 1) + min-time)))) + (progress-reporter-update reporter (or current-value min-value)) + reporter)) + +(defun progress-reporter-force-update (reporter &optional value new-message) + "Report progress of an operation in the echo area unconditionally. + +The first two arguments are the same as in `progress-reporter-update'. +NEW-MESSAGE, if non-nil, sets a new message for the reporter." + (let ((parameters (cdr reporter))) + (when new-message + (aset parameters 3 new-message)) + (when (aref parameters 0) + (aset parameters 0 (float-time))) + (progress-reporter-do-update reporter value))) + +(defvar progress-reporter--pulse-characters ["-" "\\" "|" "/"] + "Characters to use for pulsing progress reporters.") + +(defun progress-reporter-do-update (reporter value) + (let* ((parameters (cdr reporter)) + (update-time (aref parameters 0)) + (min-value (aref parameters 1)) + (max-value (aref parameters 2)) + (text (aref parameters 3)) + (enough-time-passed + ;; See if enough time has passed since the last update. + (or (not update-time) + (when (>= (float-time) update-time) + ;; Calculate time for the next update + (aset parameters 0 (+ update-time (aref parameters 5))))))) + (cond ((and min-value max-value) + ;; Numerical indicator + (let* ((one-percent (/ (- max-value min-value) 100.0)) + (percentage (if (= max-value min-value) + 0 + (truncate (/ (- value min-value) + one-percent))))) + ;; Calculate NEXT-UPDATE-VALUE. If we are not printing + ;; message because not enough time has passed, use 1 + ;; instead of MIN-CHANGE. This makes delays between echo + ;; area updates closer to MIN-TIME. + (setcar reporter + (min (+ min-value (* (+ percentage + (if enough-time-passed + ;; MIN-CHANGE + (aref parameters 4) + 1)) + one-percent)) + max-value)) + (when (integerp value) + (setcar reporter (ceiling (car reporter)))) + ;; Only print message if enough time has passed + (when enough-time-passed + (if (> percentage 0) + (message "%s%d%%" text percentage) + (message "%s" text))))) + ;; Pulsing indicator + (enough-time-passed + (let ((index (mod (1+ (car reporter)) 4)) + (message-log-max nil)) + (setcar reporter index) + (message "%s %s" + text + (aref progress-reporter--pulse-characters + index))))))) + +(defun progress-reporter-done (reporter) + "Print reporter's message followed by word \"done\" in echo area." + (message "%sdone" (aref (cdr reporter) 3))) + +(defmacro dotimes-with-progress-reporter (spec message &rest body) + "Loop a certain number of times and report progress in the echo area. +Evaluate BODY with VAR bound to successive integers running from +0, inclusive, to COUNT, exclusive. Then evaluate RESULT to get +the return value (nil if RESULT is omitted). + +At each iteration MESSAGE followed by progress percentage is +printed in the echo area. After the loop is finished, MESSAGE +followed by word \"done\" is printed. This macro is a +convenience wrapper around `make-progress-reporter' and friends. + +\(fn (VAR COUNT [RESULT]) MESSAGE BODY...)" + (declare (indent 2) (debug ((symbolp form &optional form) form body))) + (let ((temp (make-symbol "--dotimes-temp--")) + (temp2 (make-symbol "--dotimes-temp2--")) + (start 0) + (end (nth 1 spec))) + `(let ((,temp ,end) + (,(car spec) ,start) + (,temp2 (make-progress-reporter ,message ,start ,end))) + (while (< ,(car spec) ,temp) + ,@body + (progress-reporter-update ,temp2 + (setq ,(car spec) (1+ ,(car spec))))) + (progress-reporter-done ,temp2) + nil ,@(cdr (cdr spec))))) + + +;;;; Comparing version strings. + +(defconst version-separator "." + "Specify the string used to separate the version elements. + +Usually the separator is \".\", but it can be any other string.") + + +(defconst version-regexp-alist + '(("^[-_+ ]?snapshot$" . -4) + ;; treat "1.2.3-20050920" and "1.2-3" as snapshot releases + ("^[-_+]$" . -4) + ;; treat "1.2.3-CVS" as snapshot release + ("^[-_+ ]?\\(cvs\\|git\\|bzr\\|svn\\|hg\\|darcs\\)$" . -4) + ("^[-_+ ]?alpha$" . -3) + ("^[-_+ ]?beta$" . -2) + ("^[-_+ ]?\\(pre\\|rc\\)$" . -1)) + "Specify association between non-numeric version and its priority. + +This association is used to handle version string like \"1.0pre2\", +\"0.9alpha1\", etc. It's used by `version-to-list' (which see) to convert the +non-numeric part of a version string to an integer. For example: + + String Version Integer List Version + \"0.9snapshot\" (0 9 -4) + \"1.0-git\" (1 0 -4) + \"1.0pre2\" (1 0 -1 2) + \"1.0PRE2\" (1 0 -1 2) + \"22.8beta3\" (22 8 -2 3) + \"22.8 Beta3\" (22 8 -2 3) + \"0.9alpha1\" (0 9 -3 1) + \"0.9AlphA1\" (0 9 -3 1) + \"0.9 alpha\" (0 9 -3) + +Each element has the following form: + + (REGEXP . PRIORITY) + +Where: + +REGEXP regexp used to match non-numeric part of a version string. + It should begin with the `^' anchor and end with a `$' to + prevent false hits. Letter-case is ignored while matching + REGEXP. + +PRIORITY a negative integer specifying non-numeric priority of REGEXP.") + + +(defun version-to-list (ver) + "Convert version string VER into a list of integers. + +The version syntax is given by the following EBNF: + + VERSION ::= NUMBER ( SEPARATOR NUMBER )*. + + NUMBER ::= (0|1|2|3|4|5|6|7|8|9)+. + + SEPARATOR ::= `version-separator' (which see) + | `version-regexp-alist' (which see). + +The NUMBER part is optional if SEPARATOR is a match for an element +in `version-regexp-alist'. + +Examples of valid version syntax: + + 1.0pre2 1.0.7.5 22.8beta3 0.9alpha1 6.9.30Beta + +Examples of invalid version syntax: + + 1.0prepre2 1.0..7.5 22.8X3 alpha3.2 .5 + +Examples of version conversion: + + Version String Version as a List of Integers + \"1.0.7.5\" (1 0 7 5) + \"1.0pre2\" (1 0 -1 2) + \"1.0PRE2\" (1 0 -1 2) + \"22.8beta3\" (22 8 -2 3) + \"22.8Beta3\" (22 8 -2 3) + \"0.9alpha1\" (0 9 -3 1) + \"0.9AlphA1\" (0 9 -3 1) + \"0.9alpha\" (0 9 -3) + \"0.9snapshot\" (0 9 -4) + \"1.0-git\" (1 0 -4) + +See documentation for `version-separator' and `version-regexp-alist'." + (or (and (stringp ver) (> (length ver) 0)) + (error "Invalid version string: '%s'" ver)) + ;; Change .x.y to 0.x.y + (if (and (>= (length ver) (length version-separator)) + (string-equal (substring ver 0 (length version-separator)) + version-separator)) + (setq ver (concat "0" ver))) + (save-match-data + (let ((i 0) + (case-fold-search t) ; ignore case in matching + lst s al) + (while (and (setq s (string-match "[0-9]+" ver i)) + (= s i)) + ;; handle numeric part + (setq lst (cons (string-to-number (substring ver i (match-end 0))) + lst) + i (match-end 0)) + ;; handle non-numeric part + (when (and (setq s (string-match "[^0-9]+" ver i)) + (= s i)) + (setq s (substring ver i (match-end 0)) + i (match-end 0)) + ;; handle alpha, beta, pre, etc. separator + (unless (string= s version-separator) + (setq al version-regexp-alist) + (while (and al (not (string-match (caar al) s))) + (setq al (cdr al))) + (cond (al + (push (cdar al) lst)) + ;; Convert 22.3a to 22.3.1, 22.3b to 22.3.2, etc. + ((string-match "^[-_+ ]?\\([a-zA-Z]\\)$" s) + (push (- (aref (downcase (match-string 1 s)) 0) ?a -1) + lst)) + (t (error "Invalid version syntax: '%s'" ver)))))) + (if (null lst) + (error "Invalid version syntax: '%s'" ver) + (nreverse lst))))) + + +(defun version-list-< (l1 l2) + "Return t if L1, a list specification of a version, is lower than L2. + +Note that a version specified by the list (1) is equal to (1 0), +\(1 0 0), (1 0 0 0), etc. That is, the trailing zeros are insignificant. +Also, a version given by the list (1) is higher than (1 -1), which in +turn is higher than (1 -2), which is higher than (1 -3)." + (while (and l1 l2 (= (car l1) (car l2))) + (setq l1 (cdr l1) + l2 (cdr l2))) + (cond + ;; l1 not null and l2 not null + ((and l1 l2) (< (car l1) (car l2))) + ;; l1 null and l2 null ==> l1 length = l2 length + ((and (null l1) (null l2)) nil) + ;; l1 not null and l2 null ==> l1 length > l2 length + (l1 (< (version-list-not-zero l1) 0)) + ;; l1 null and l2 not null ==> l2 length > l1 length + (t (< 0 (version-list-not-zero l2))))) + + +(defun version-list-= (l1 l2) + "Return t if L1, a list specification of a version, is equal to L2. + +Note that a version specified by the list (1) is equal to (1 0), +\(1 0 0), (1 0 0 0), etc. That is, the trailing zeros are insignificant. +Also, a version given by the list (1) is higher than (1 -1), which in +turn is higher than (1 -2), which is higher than (1 -3)." + (while (and l1 l2 (= (car l1) (car l2))) + (setq l1 (cdr l1) + l2 (cdr l2))) + (cond + ;; l1 not null and l2 not null + ((and l1 l2) nil) + ;; l1 null and l2 null ==> l1 length = l2 length + ((and (null l1) (null l2))) + ;; l1 not null and l2 null ==> l1 length > l2 length + (l1 (zerop (version-list-not-zero l1))) + ;; l1 null and l2 not null ==> l2 length > l1 length + (t (zerop (version-list-not-zero l2))))) + + +(defun version-list-<= (l1 l2) + "Return t if L1, a list specification of a version, is lower or equal to L2. + +Note that integer list (1) is equal to (1 0), (1 0 0), (1 0 0 0), +etc. That is, the trailing zeroes are insignificant. Also, integer +list (1) is greater than (1 -1) which is greater than (1 -2) +which is greater than (1 -3)." + (while (and l1 l2 (= (car l1) (car l2))) + (setq l1 (cdr l1) + l2 (cdr l2))) + (cond + ;; l1 not null and l2 not null + ((and l1 l2) (< (car l1) (car l2))) + ;; l1 null and l2 null ==> l1 length = l2 length + ((and (null l1) (null l2))) + ;; l1 not null and l2 null ==> l1 length > l2 length + (l1 (<= (version-list-not-zero l1) 0)) + ;; l1 null and l2 not null ==> l2 length > l1 length + (t (<= 0 (version-list-not-zero l2))))) + +(defun version-list-not-zero (lst) + "Return the first non-zero element of LST, which is a list of integers. + +If all LST elements are zeros or LST is nil, return zero." + (while (and lst (zerop (car lst))) + (setq lst (cdr lst))) + (if lst + (car lst) + ;; there is no element different of zero + 0)) + + +(defun version< (v1 v2) + "Return t if version V1 is lower (older) than V2. + +Note that version string \"1\" is equal to \"1.0\", \"1.0.0\", \"1.0.0.0\", +etc. That is, the trailing \".0\"s are insignificant. Also, version +string \"1\" is higher (newer) than \"1pre\", which is higher than \"1beta\", +which is higher than \"1alpha\", which is higher than \"1snapshot\". +Also, \"-GIT\", \"-CVS\" and \"-NNN\" are treated as snapshot versions." + (version-list-< (version-to-list v1) (version-to-list v2))) + +(defun version<= (v1 v2) + "Return t if version V1 is lower (older) than or equal to V2. + +Note that version string \"1\" is equal to \"1.0\", \"1.0.0\", \"1.0.0.0\", +etc. That is, the trailing \".0\"s are insignificant. Also, version +string \"1\" is higher (newer) than \"1pre\", which is higher than \"1beta\", +which is higher than \"1alpha\", which is higher than \"1snapshot\". +Also, \"-GIT\", \"-CVS\" and \"-NNN\" are treated as snapshot versions." + (version-list-<= (version-to-list v1) (version-to-list v2))) + +(defun version= (v1 v2) + "Return t if version V1 is equal to V2. + +Note that version string \"1\" is equal to \"1.0\", \"1.0.0\", \"1.0.0.0\", +etc. That is, the trailing \".0\"s are insignificant. Also, version +string \"1\" is higher (newer) than \"1pre\", which is higher than \"1beta\", +which is higher than \"1alpha\", which is higher than \"1snapshot\". +Also, \"-GIT\", \"-CVS\" and \"-NNN\" are treated as snapshot versions." + (version-list-= (version-to-list v1) (version-to-list v2))) + +(defvar package--builtin-versions + ;; Mostly populated by loaddefs.el via autoload-builtin-package-versions. + (purecopy `((emacs . ,(version-to-list emacs-version)))) + "Alist giving the version of each versioned builtin package. +I.e. each element of the list is of the form (NAME . VERSION) where +NAME is the package name as a symbol, and VERSION is its version +as a list.") + +(defun package--description-file (dir) + (concat (let ((subdir (file-name-nondirectory + (directory-file-name dir)))) + (if (string-match "\\([^.].*?\\)-\\([0-9]+\\(?:[.][0-9]+\\|\\(?:pre\\|beta\\|alpha\\)[0-9]+\\)*\\)" subdir) + (match-string 1 subdir) subdir)) + "-pkg.el")) + + +;;; Misc. +(defconst menu-bar-separator '("--") + "Separator for menus.") + +;; The following statement ought to be in print.c, but `provide' can't +;; be used there. +;; http://lists.gnu.org/archive/html/emacs-devel/2009-08/msg00236.html +(when (hash-table-p (car (read-from-string + (prin1-to-string (make-hash-table))))) + (provide 'hashtable-print-readable)) + +;; This is used in lisp/Makefile.in and in leim/Makefile.in to +;; generate file names for autoloads, custom-deps, and finder-data. +(defun unmsys--file-name (file) + "Produce the canonical file name for FILE from its MSYS form. + +On systems other than MS-Windows, just returns FILE. +On MS-Windows, converts /d/foo/bar form of file names +passed by MSYS Make into d:/foo/bar that Emacs can grok. + +This function is called from lisp/Makefile and leim/Makefile." + (when (and (eq system-type 'windows-nt) + (string-match "\\`/[a-zA-Z]/" file)) + (setq file (concat (substring file 1 2) ":" (substring file 2)))) + file) + + +;;; subr.el ends here diff --git a/vendor/pygments/tests/examplefiles/swig_java.swg b/vendor/pygments/tests/examplefiles/swig_java.swg new file mode 100644 index 0000000..6126a55 --- /dev/null +++ b/vendor/pygments/tests/examplefiles/swig_java.swg @@ -0,0 +1,1329 @@ +/* ----------------------------------------------------------------------------- + * java.swg + * + * Java typemaps + * ----------------------------------------------------------------------------- */ + +%include + +/* The jni, jtype and jstype typemaps work together and so there should be one of each. + * The jni typemap contains the JNI type used in the JNI (C/C++) code. + * The jtype typemap contains the Java type used in the JNI intermediary class. + * The jstype typemap contains the Java type used in the Java proxy classes, type wrapper classes and module class. */ + +/* Fragments */ +%fragment("SWIG_PackData", "header") { +/* Pack binary data into a string */ +SWIGINTERN char * SWIG_PackData(char *c, void *ptr, size_t sz) { + static const char hex[17] = "0123456789abcdef"; + register const unsigned char *u = (unsigned char *) ptr; + register const unsigned char *eu = u + sz; + for (; u != eu; ++u) { + register unsigned char uu = *u; + *(c++) = hex[(uu & 0xf0) >> 4]; + *(c++) = hex[uu & 0xf]; + } + return c; +} +} + +%fragment("SWIG_UnPackData", "header") { +/* Unpack binary data from a string */ +SWIGINTERN const char * SWIG_UnpackData(const char *c, void *ptr, size_t sz) { + register unsigned char *u = (unsigned char *) ptr; + register const unsigned char *eu = u + sz; + for (; u != eu; ++u) { + register char d = *(c++); + register unsigned char uu; + if ((d >= '0') && (d <= '9')) + uu = ((d - '0') << 4); + else if ((d >= 'a') && (d <= 'f')) + uu = ((d - ('a'-10)) << 4); + else + return (char *) 0; + d = *(c++); + if ((d >= '0') && (d <= '9')) + uu |= (d - '0'); + else if ((d >= 'a') && (d <= 'f')) + uu |= (d - ('a'-10)); + else + return (char *) 0; + *u = uu; + } + return c; +} +} + +/* Primitive types */ +%typemap(jni) bool, const bool & "jboolean" +%typemap(jni) char, const char & "jchar" +%typemap(jni) signed char, const signed char & "jbyte" +%typemap(jni) unsigned char, const unsigned char & "jshort" +%typemap(jni) short, const short & "jshort" +%typemap(jni) unsigned short, const unsigned short & "jint" +%typemap(jni) int, const int & "jint" +%typemap(jni) unsigned int, const unsigned int & "jlong" +%typemap(jni) long, const long & "jint" +%typemap(jni) unsigned long, const unsigned long & "jlong" +%typemap(jni) long long, const long long & "jlong" +%typemap(jni) unsigned long long, const unsigned long long & "jobject" +%typemap(jni) float, const float & "jfloat" +%typemap(jni) double, const double & "jdouble" +%typemap(jni) void "void" + +%typemap(jtype) bool, const bool & "boolean" +%typemap(jtype) char, const char & "char" +%typemap(jtype) signed char, const signed char & "byte" +%typemap(jtype) unsigned char, const unsigned char & "short" +%typemap(jtype) short, const short & "short" +%typemap(jtype) unsigned short, const unsigned short & "int" +%typemap(jtype) int, const int & "int" +%typemap(jtype) unsigned int, const unsigned int & "long" +%typemap(jtype) long, const long & "int" +%typemap(jtype) unsigned long, const unsigned long & "long" +%typemap(jtype) long long, const long long & "long" +%typemap(jtype) unsigned long long, const unsigned long long & "java.math.BigInteger" +%typemap(jtype) float, const float & "float" +%typemap(jtype) double, const double & "double" +%typemap(jtype) void "void" + +%typemap(jstype) bool, const bool & "boolean" +%typemap(jstype) char, const char & "char" +%typemap(jstype) signed char, const signed char & "byte" +%typemap(jstype) unsigned char, const unsigned char & "short" +%typemap(jstype) short, const short & "short" +%typemap(jstype) unsigned short, const unsigned short & "int" +%typemap(jstype) int, const int & "int" +%typemap(jstype) unsigned int, const unsigned int & "long" +%typemap(jstype) long, const long & "int" +%typemap(jstype) unsigned long, const unsigned long & "long" +%typemap(jstype) long long, const long long & "long" +%typemap(jstype) unsigned long long, const unsigned long long & "java.math.BigInteger" +%typemap(jstype) float, const float & "float" +%typemap(jstype) double, const double & "double" +%typemap(jstype) void "void" + +%typemap(jni) char *, char *&, char[ANY], char[] "jstring" +%typemap(jtype) char *, char *&, char[ANY], char[] "String" +%typemap(jstype) char *, char *&, char[ANY], char[] "String" + +/* JNI types */ +%typemap(jni) jboolean "jboolean" +%typemap(jni) jchar "jchar" +%typemap(jni) jbyte "jbyte" +%typemap(jni) jshort "jshort" +%typemap(jni) jint "jint" +%typemap(jni) jlong "jlong" +%typemap(jni) jfloat "jfloat" +%typemap(jni) jdouble "jdouble" +%typemap(jni) jstring "jstring" +%typemap(jni) jobject "jobject" +%typemap(jni) jbooleanArray "jbooleanArray" +%typemap(jni) jcharArray "jcharArray" +%typemap(jni) jbyteArray "jbyteArray" +%typemap(jni) jshortArray "jshortArray" +%typemap(jni) jintArray "jintArray" +%typemap(jni) jlongArray "jlongArray" +%typemap(jni) jfloatArray "jfloatArray" +%typemap(jni) jdoubleArray "jdoubleArray" +%typemap(jni) jobjectArray "jobjectArray" + +%typemap(jtype) jboolean "boolean" +%typemap(jtype) jchar "char" +%typemap(jtype) jbyte "byte" +%typemap(jtype) jshort "short" +%typemap(jtype) jint "int" +%typemap(jtype) jlong "long" +%typemap(jtype) jfloat "float" +%typemap(jtype) jdouble "double" +%typemap(jtype) jstring "String" +%typemap(jtype) jobject "Object" +%typemap(jtype) jbooleanArray "boolean[]" +%typemap(jtype) jcharArray "char[]" +%typemap(jtype) jbyteArray "byte[]" +%typemap(jtype) jshortArray "short[]" +%typemap(jtype) jintArray "int[]" +%typemap(jtype) jlongArray "long[]" +%typemap(jtype) jfloatArray "float[]" +%typemap(jtype) jdoubleArray "double[]" +%typemap(jtype) jobjectArray "Object[]" + +%typemap(jstype) jboolean "boolean" +%typemap(jstype) jchar "char" +%typemap(jstype) jbyte "byte" +%typemap(jstype) jshort "short" +%typemap(jstype) jint "int" +%typemap(jstype) jlong "long" +%typemap(jstype) jfloat "float" +%typemap(jstype) jdouble "double" +%typemap(jstype) jstring "String" +%typemap(jstype) jobject "Object" +%typemap(jstype) jbooleanArray "boolean[]" +%typemap(jstype) jcharArray "char[]" +%typemap(jstype) jbyteArray "byte[]" +%typemap(jstype) jshortArray "short[]" +%typemap(jstype) jintArray "int[]" +%typemap(jstype) jlongArray "long[]" +%typemap(jstype) jfloatArray "float[]" +%typemap(jstype) jdoubleArray "double[]" +%typemap(jstype) jobjectArray "Object[]" + +/* Non primitive types */ +%typemap(jni) SWIGTYPE "jlong" +%typemap(jtype) SWIGTYPE "long" +%typemap(jstype) SWIGTYPE "$&javaclassname" + +%typemap(jni) SWIGTYPE [] "jlong" +%typemap(jtype) SWIGTYPE [] "long" +%typemap(jstype) SWIGTYPE [] "$javaclassname" + +%typemap(jni) SWIGTYPE * "jlong" +%typemap(jtype) SWIGTYPE * "long" +%typemap(jstype) SWIGTYPE * "$javaclassname" + +%typemap(jni) SWIGTYPE & "jlong" +%typemap(jtype) SWIGTYPE & "long" +%typemap(jstype) SWIGTYPE & "$javaclassname" + +/* pointer to a class member */ +%typemap(jni) SWIGTYPE (CLASS::*) "jstring" +%typemap(jtype) SWIGTYPE (CLASS::*) "String" +%typemap(jstype) SWIGTYPE (CLASS::*) "$javaclassname" + +/* The following are the in, out, freearg, argout typemaps. These are the JNI code generating typemaps for converting from Java to C and visa versa. */ + +/* primitive types */ +%typemap(in) bool +%{ $1 = $input ? true : false; %} + +%typemap(directorout) bool +%{ $result = $input ? true : false; %} + +%typemap(javadirectorin) bool "$jniinput" +%typemap(javadirectorout) bool "$javacall" + +%typemap(in) char, + signed char, + unsigned char, + short, + unsigned short, + int, + unsigned int, + long, + unsigned long, + long long, + float, + double +%{ $1 = ($1_ltype)$input; %} + +%typemap(directorout) char, + signed char, + unsigned char, + short, + unsigned short, + int, + unsigned int, + long, + unsigned long, + long long, + float, + double +%{ $result = ($1_ltype)$input; %} + +%typemap(directorin, descriptor="Z") bool "$input = (jboolean) $1;" +%typemap(directorin, descriptor="C") char "$input = (jint) $1;" +%typemap(directorin, descriptor="B") signed char "$input = (jbyte) $1;" +%typemap(directorin, descriptor="S") unsigned char "$input = (jshort) $1;" +%typemap(directorin, descriptor="S") short "$input = (jshort) $1;" +%typemap(directorin, descriptor="I") unsigned short "$input = (jint) $1;" +%typemap(directorin, descriptor="I") int "$input = (jint) $1;" +%typemap(directorin, descriptor="J") unsigned int "$input = (jlong) $1;" +%typemap(directorin, descriptor="I") long "$input = (jint) $1;" +%typemap(directorin, descriptor="J") unsigned long "$input = (jlong) $1;" +%typemap(directorin, descriptor="J") long long "$input = (jlong) $1;" +%typemap(directorin, descriptor="F") float "$input = (jfloat) $1;" +%typemap(directorin, descriptor="D") double "$input = (jdouble) $1;" + +%typemap(javadirectorin) char, + signed char, + unsigned char, + short, + unsigned short, + int, + unsigned int, + long, + unsigned long, + long long, + float, + double + "$jniinput" + +%typemap(javadirectorout) char, + signed char, + unsigned char, + short, + unsigned short, + int, + unsigned int, + long, + unsigned long, + long long, + float, + double + "$javacall" + +%typemap(out) bool %{ $result = (jboolean)$1; %} +%typemap(out) char %{ $result = (jchar)$1; %} +%typemap(out) signed char %{ $result = (jbyte)$1; %} +%typemap(out) unsigned char %{ $result = (jshort)$1; %} +%typemap(out) short %{ $result = (jshort)$1; %} +%typemap(out) unsigned short %{ $result = (jint)$1; %} +%typemap(out) int %{ $result = (jint)$1; %} +%typemap(out) unsigned int %{ $result = (jlong)$1; %} +%typemap(out) long %{ $result = (jint)$1; %} +%typemap(out) unsigned long %{ $result = (jlong)$1; %} +%typemap(out) long long %{ $result = (jlong)$1; %} +%typemap(out) float %{ $result = (jfloat)$1; %} +%typemap(out) double %{ $result = (jdouble)$1; %} + +/* unsigned long long */ +/* Convert from BigInteger using the toByteArray member function */ +%typemap(in) unsigned long long { + jclass clazz; + jmethodID mid; + jbyteArray ba; + jbyte* bae; + jsize sz; + int i; + + if (!$input) { + SWIG_JavaThrowException(jenv, SWIG_JavaNullPointerException, "BigInteger null"); + return $null; + } + clazz = JCALL1(GetObjectClass, jenv, $input); + mid = JCALL3(GetMethodID, jenv, clazz, "toByteArray", "()[B"); + ba = (jbyteArray)JCALL2(CallObjectMethod, jenv, $input, mid); + bae = JCALL2(GetByteArrayElements, jenv, ba, 0); + sz = JCALL1(GetArrayLength, jenv, ba); + $1 = 0; + for(i=0; i", "([B)V"); + jobject bigint; + int i; + + bae[0] = 0; + for(i=1; i<9; i++ ) { + bae[i] = (jbyte)($1>>8*(8-i)); + } + + JCALL3(ReleaseByteArrayElements, jenv, ba, bae, 0); + bigint = JCALL3(NewObject, jenv, clazz, mid, ba); + $result = bigint; +} + +/* Convert to BigInteger (see out typemap) */ +%typemap(directorin, descriptor="Ljava/math/BigInteger;") unsigned long long, const unsigned long long & { + jbyteArray ba = JCALL1(NewByteArray, jenv, 9); + jbyte* bae = JCALL2(GetByteArrayElements, jenv, ba, 0); + jclass clazz = JCALL1(FindClass, jenv, "java/math/BigInteger"); + jmethodID mid = JCALL3(GetMethodID, jenv, clazz, "", "([B)V"); + jobject bigint; + int swig_i; + + bae[0] = 0; + for(swig_i=1; swig_i<9; swig_i++ ) { + bae[swig_i] = (jbyte)($1>>8*(8-swig_i)); + } + + JCALL3(ReleaseByteArrayElements, jenv, ba, bae, 0); + bigint = JCALL3(NewObject, jenv, clazz, mid, ba); + $input = bigint; +} + +%typemap(javadirectorin) unsigned long long "$jniinput" +%typemap(javadirectorout) unsigned long long "$javacall" + +/* char * - treat as String */ +%typemap(in, noblock=1) char * { + $1 = 0; + if ($input) { + $1 = ($1_ltype)JCALL2(GetStringUTFChars, jenv, $input, 0); + if (!$1) return $null; + } +} + +%typemap(directorout, noblock=1, warning=SWIGWARN_TYPEMAP_DIRECTOROUT_PTR_MSG) char * { + $1 = 0; + if ($input) { + $result = ($1_ltype)JCALL2(GetStringUTFChars, jenv, $input, 0); + if (!$result) return $null; + } +} + +%typemap(directorin, descriptor="Ljava/lang/String;", noblock=1) char * { + $input = 0; + if ($1) { + $input = JCALL1(NewStringUTF, jenv, (const char *)$1); + if (!$input) return $null; + } +} + +%typemap(freearg, noblock=1) char * { if ($1) JCALL2(ReleaseStringUTFChars, jenv, $input, (const char *)$1); } +%typemap(out, noblock=1) char * { if ($1) $result = JCALL1(NewStringUTF, jenv, (const char *)$1); } +%typemap(javadirectorin) char * "$jniinput" +%typemap(javadirectorout) char * "$javacall" + +/* char *& - treat as String */ +%typemap(in, noblock=1) char *& ($*1_ltype temp = 0) { + $1 = 0; + if ($input) { + temp = ($*1_ltype)JCALL2(GetStringUTFChars, jenv, $input, 0); + if (!temp) return $null; + } + $1 = &temp; +} +%typemap(freearg, noblock=1) char *& { if ($1 && *$1) JCALL2(ReleaseStringUTFChars, jenv, $input, (const char *)*$1); } +%typemap(out, noblock=1) char *& { if (*$1) $result = JCALL1(NewStringUTF, jenv, (const char *)*$1); } + +%typemap(out) void "" +%typemap(javadirectorin) void "$jniinput" +%typemap(javadirectorout) void "$javacall" +%typemap(directorin, descriptor="V") void "" + +/* primitive types by reference */ +%typemap(in) const bool & ($*1_ltype temp) +%{ temp = $input ? true : false; + $1 = &temp; %} + +%typemap(directorout,warning=SWIGWARN_TYPEMAP_THREAD_UNSAFE_MSG) const bool & +%{ static $*1_ltype temp; + temp = $input ? true : false; + $result = &temp; %} + +%typemap(javadirectorin) const bool & "$jniinput" +%typemap(javadirectorout) const bool & "$javacall" + +%typemap(in) const char & ($*1_ltype temp), + const signed char & ($*1_ltype temp), + const unsigned char & ($*1_ltype temp), + const short & ($*1_ltype temp), + const unsigned short & ($*1_ltype temp), + const int & ($*1_ltype temp), + const unsigned int & ($*1_ltype temp), + const long & ($*1_ltype temp), + const unsigned long & ($*1_ltype temp), + const long long & ($*1_ltype temp), + const float & ($*1_ltype temp), + const double & ($*1_ltype temp) +%{ temp = ($*1_ltype)$input; + $1 = &temp; %} + +%typemap(directorout,warning=SWIGWARN_TYPEMAP_THREAD_UNSAFE_MSG) const char &, + const signed char &, + const unsigned char &, + const short &, + const unsigned short &, + const int &, + const unsigned int &, + const long &, + const unsigned long &, + const long long &, + const float &, + const double & +%{ static $*1_ltype temp; + temp = ($*1_ltype)$input; + $result = &temp; %} + +%typemap(directorin, descriptor="Z") const bool & "$input = (jboolean)$1;" +%typemap(directorin, descriptor="C") const char & "$input = (jchar)$1;" +%typemap(directorin, descriptor="B") const signed char & "$input = (jbyte)$1;" +%typemap(directorin, descriptor="S") const unsigned char & "$input = (jshort)$1;" +%typemap(directorin, descriptor="S") const short & "$input = (jshort)$1;" +%typemap(directorin, descriptor="I") const unsigned short & "$input = (jint)$1;" +%typemap(directorin, descriptor="I") const int & "$input = (jint)$1;" +%typemap(directorin, descriptor="J") const unsigned int & "$input = (jlong)$1;" +%typemap(directorin, descriptor="I") const long & "$input = (jint)$1;" +%typemap(directorin, descriptor="J") const unsigned long & "$input = (jlong)$1;" +%typemap(directorin, descriptor="J") const long long & "$input = (jlong)$1;" +%typemap(directorin, descriptor="F") const float & "$input = (jfloat)$1;" +%typemap(directorin, descriptor="D") const double & "$input = (jdouble)$1;" + +%typemap(javadirectorin) const char & ($*1_ltype temp), + const signed char & ($*1_ltype temp), + const unsigned char & ($*1_ltype temp), + const short & ($*1_ltype temp), + const unsigned short & ($*1_ltype temp), + const int & ($*1_ltype temp), + const unsigned int & ($*1_ltype temp), + const long & ($*1_ltype temp), + const unsigned long & ($*1_ltype temp), + const long long & ($*1_ltype temp), + const float & ($*1_ltype temp), + const double & ($*1_ltype temp) + "$jniinput" + +%typemap(javadirectorout) const char & ($*1_ltype temp), + const signed char & ($*1_ltype temp), + const unsigned char & ($*1_ltype temp), + const short & ($*1_ltype temp), + const unsigned short & ($*1_ltype temp), + const int & ($*1_ltype temp), + const unsigned int & ($*1_ltype temp), + const long & ($*1_ltype temp), + const unsigned long & ($*1_ltype temp), + const long long & ($*1_ltype temp), + const float & ($*1_ltype temp), + const double & ($*1_ltype temp) + "$javacall" + + +%typemap(out) const bool & %{ $result = (jboolean)*$1; %} +%typemap(out) const char & %{ $result = (jchar)*$1; %} +%typemap(out) const signed char & %{ $result = (jbyte)*$1; %} +%typemap(out) const unsigned char & %{ $result = (jshort)*$1; %} +%typemap(out) const short & %{ $result = (jshort)*$1; %} +%typemap(out) const unsigned short & %{ $result = (jint)*$1; %} +%typemap(out) const int & %{ $result = (jint)*$1; %} +%typemap(out) const unsigned int & %{ $result = (jlong)*$1; %} +%typemap(out) const long & %{ $result = (jint)*$1; %} +%typemap(out) const unsigned long & %{ $result = (jlong)*$1; %} +%typemap(out) const long long & %{ $result = (jlong)*$1; %} +%typemap(out) const float & %{ $result = (jfloat)*$1; %} +%typemap(out) const double & %{ $result = (jdouble)*$1; %} + +/* const unsigned long long & */ +/* Similar to unsigned long long */ +%typemap(in) const unsigned long long & ($*1_ltype temp) { + jclass clazz; + jmethodID mid; + jbyteArray ba; + jbyte* bae; + jsize sz; + int i; + + if (!$input) { + SWIG_JavaThrowException(jenv, SWIG_JavaNullPointerException, "BigInteger null"); + return $null; + } + clazz = JCALL1(GetObjectClass, jenv, $input); + mid = JCALL3(GetMethodID, jenv, clazz, "toByteArray", "()[B"); + ba = (jbyteArray)JCALL2(CallObjectMethod, jenv, $input, mid); + bae = JCALL2(GetByteArrayElements, jenv, ba, 0); + sz = JCALL1(GetArrayLength, jenv, ba); + $1 = &temp; + temp = 0; + for(i=0; i", "([B)V"); + jobject bigint; + int i; + + bae[0] = 0; + for(i=1; i<9; i++ ) { + bae[i] = (jbyte)(*$1>>8*(8-i)); + } + + JCALL3(ReleaseByteArrayElements, jenv, ba, bae, 0); + bigint = JCALL3(NewObject, jenv, clazz, mid, ba); + $result = bigint; +} + +%typemap(javadirectorin) const unsigned long long & "$jniinput" +%typemap(javadirectorout) const unsigned long long & "$javacall" + +/* Default handling. Object passed by value. Convert to a pointer */ +%typemap(in) SWIGTYPE ($&1_type argp) +%{ argp = *($&1_ltype*)&$input; + if (!argp) { + SWIG_JavaThrowException(jenv, SWIG_JavaNullPointerException, "Attempt to dereference null $1_type"); + return $null; + } + $1 = *argp; %} + +%typemap(directorout) SWIGTYPE ($&1_type argp) +%{ argp = *($&1_ltype*)&$input; + if (!argp) { + SWIG_JavaThrowException(jenv, SWIG_JavaNullPointerException, "Unexpected null return for type $1_type"); + return $null; + } + $result = *argp; %} + +%typemap(out) SWIGTYPE +#ifdef __cplusplus +%{ *($&1_ltype*)&$result = new $1_ltype((const $1_ltype &)$1); %} +#else +{ + $&1_ltype $1ptr = ($&1_ltype) malloc(sizeof($1_ltype)); + memmove($1ptr, &$1, sizeof($1_type)); + *($&1_ltype*)&$result = $1ptr; +} +#endif + +%typemap(directorin,descriptor="L$packagepath/$&javaclassname;") SWIGTYPE +%{ $input = 0; + *(($&1_ltype*)&$input) = &$1; %} +%typemap(javadirectorin) SWIGTYPE "new $&javaclassname($jniinput, false)" +%typemap(javadirectorout) SWIGTYPE "$&javaclassname.getCPtr($javacall)" + +/* Generic pointers and references */ +%typemap(in) SWIGTYPE * %{ $1 = *($&1_ltype)&$input; %} +%typemap(in, fragment="SWIG_UnPackData") SWIGTYPE (CLASS::*) { + const char *temp = 0; + if ($input) { + temp = JCALL2(GetStringUTFChars, jenv, $input, 0); + if (!temp) return $null; + } + SWIG_UnpackData(temp, (void *)&$1, sizeof($1)); +} +%typemap(in) SWIGTYPE & %{ $1 = *($&1_ltype)&$input; + if (!$1) { + SWIG_JavaThrowException(jenv, SWIG_JavaNullPointerException, "$1_type reference is null"); + return $null; + } %} +%typemap(out) SWIGTYPE * +%{ *($&1_ltype)&$result = $1; %} +%typemap(out, fragment="SWIG_PackData", noblock=1) SWIGTYPE (CLASS::*) { + char buf[128]; + char *data = SWIG_PackData(buf, (void *)&$1, sizeof($1)); + *data = '\0'; + $result = JCALL1(NewStringUTF, jenv, buf); +} +%typemap(out) SWIGTYPE & +%{ *($&1_ltype)&$result = $1; %} + +%typemap(directorout, warning=SWIGWARN_TYPEMAP_DIRECTOROUT_PTR_MSG) SWIGTYPE * +%{ $result = *($&1_ltype)&$input; %} +%typemap(directorout, warning=SWIGWARN_TYPEMAP_DIRECTOROUT_PTR_MSG) SWIGTYPE (CLASS::*) +%{ $result = *($&1_ltype)&$input; %} + +%typemap(directorin,descriptor="L$packagepath/$javaclassname;") SWIGTYPE * +%{ *(($&1_ltype)&$input) = ($1_ltype) $1; %} +%typemap(directorin,descriptor="L$packagepath/$javaclassname;") SWIGTYPE (CLASS::*) +%{ *(($&1_ltype)&$input) = ($1_ltype) $1; %} + +%typemap(directorout, warning=SWIGWARN_TYPEMAP_DIRECTOROUT_PTR_MSG) SWIGTYPE & +%{ if (!$input) { + SWIG_JavaThrowException(jenv, SWIG_JavaNullPointerException, "Unexpected null return for type $1_type"); + return $null; + } + $result = *($&1_ltype)&$input; %} +%typemap(directorin,descriptor="L$packagepath/$javaclassname;") SWIGTYPE & +%{ *($&1_ltype)&$input = ($1_ltype) &$1; %} + +%typemap(javadirectorin) SWIGTYPE *, SWIGTYPE (CLASS::*) "($jniinput == 0) ? null : new $javaclassname($jniinput, false)" +%typemap(javadirectorin) SWIGTYPE & "new $javaclassname($jniinput, false)" +%typemap(javadirectorout) SWIGTYPE *, SWIGTYPE (CLASS::*), SWIGTYPE & "$javaclassname.getCPtr($javacall)" + +/* Default array handling */ +%typemap(in) SWIGTYPE [] %{ $1 = *($&1_ltype)&$input; %} +%typemap(out) SWIGTYPE [] %{ *($&1_ltype)&$result = $1; %} +%typemap(freearg) SWIGTYPE [ANY], SWIGTYPE [] "" + +/* char arrays - treat as String */ +%typemap(in, noblock=1) char[ANY], char[] { + $1 = 0; + if ($input) { + $1 = ($1_ltype)JCALL2(GetStringUTFChars, jenv, $input, 0); + if (!$1) return $null; + } +} + +%typemap(directorout, noblock=1) char[ANY], char[] { + $1 = 0; + if ($input) { + $result = ($1_ltype)JCALL2(GetStringUTFChars, jenv, $input, 0); + if (!$result) return $null; + } +} + +%typemap(directorin, descriptor="Ljava/lang/String;", noblock=1) char[ANY], char[] { + $input = 0; + if ($1) { + $input = JCALL1(NewStringUTF, jenv, (const char *)$1); + if (!$input) return $null; + } +} + +%typemap(argout) char[ANY], char[] "" +%typemap(freearg, noblock=1) char[ANY], char[] { if ($1) JCALL2(ReleaseStringUTFChars, jenv, $input, (const char *)$1); } +%typemap(out, noblock=1) char[ANY], char[] { if ($1) $result = JCALL1(NewStringUTF, jenv, (const char *)$1); } +%typemap(javadirectorin) char[ANY], char[] "$jniinput" +%typemap(javadirectorout) char[ANY], char[] "$javacall" + +/* JNI types */ +%typemap(in) jboolean, + jchar, + jbyte, + jshort, + jint, + jlong, + jfloat, + jdouble, + jstring, + jobject, + jbooleanArray, + jcharArray, + jbyteArray, + jshortArray, + jintArray, + jlongArray, + jfloatArray, + jdoubleArray, + jobjectArray +%{ $1 = $input; %} + +%typemap(directorout) jboolean, + jchar, + jbyte, + jshort, + jint, + jlong, + jfloat, + jdouble, + jstring, + jobject, + jbooleanArray, + jcharArray, + jbyteArray, + jshortArray, + jintArray, + jlongArray, + jfloatArray, + jdoubleArray, + jobjectArray +%{ $result = $input; %} + +%typemap(out) jboolean, + jchar, + jbyte, + jshort, + jint, + jlong, + jfloat, + jdouble, + jstring, + jobject, + jbooleanArray, + jcharArray, + jbyteArray, + jshortArray, + jintArray, + jlongArray, + jfloatArray, + jdoubleArray, + jobjectArray +%{ $result = $1; %} + +%typemap(directorin,descriptor="Z") jboolean "$input = $1;" +%typemap(directorin,descriptor="C") jchar "$input = $1;" +%typemap(directorin,descriptor="B") jbyte "$input = $1;" +%typemap(directorin,descriptor="S") jshort "$input = $1;" +%typemap(directorin,descriptor="I") jint "$input = $1;" +%typemap(directorin,descriptor="J") jlong "$input = $1;" +%typemap(directorin,descriptor="F") jfloat "$input = $1;" +%typemap(directorin,descriptor="D") jdouble "$input = $1;" +%typemap(directorin,descriptor="Ljava/lang/String;") jstring "$input = $1;" +%typemap(directorin,descriptor="Ljava/lang/Object;",nouse="1") jobject "$input = $1;" +%typemap(directorin,descriptor="[Z") jbooleanArray "$input = $1;" +%typemap(directorin,descriptor="[C") jcharArray "$input = $1;" +%typemap(directorin,descriptor="[B") jbyteArray "$input = $1;" +%typemap(directorin,descriptor="[S") jshortArray "$input = $1;" +%typemap(directorin,descriptor="[I") jintArray "$input = $1;" +%typemap(directorin,descriptor="[J") jlongArray "$input = $1;" +%typemap(directorin,descriptor="[F") jfloatArray "$input = $1;" +%typemap(directorin,descriptor="[D") jdoubleArray "$input = $1;" +%typemap(directorin,descriptor="[Ljava/lang/Object;",nouse="1") jobjectArray "$input = $1;" + +%typemap(javadirectorin) jboolean, + jchar, + jbyte, + jshort, + jint, + jlong, + jfloat, + jdouble, + jstring, + jobject, + jbooleanArray, + jcharArray, + jbyteArray, + jshortArray, + jintArray, + jlongArray, + jfloatArray, + jdoubleArray, + jobjectArray + "$jniinput" + +%typemap(javadirectorout) jboolean, + jchar, + jbyte, + jshort, + jint, + jlong, + jfloat, + jdouble, + jstring, + jobject, + jbooleanArray, + jcharArray, + jbyteArray, + jshortArray, + jintArray, + jlongArray, + jfloatArray, + jdoubleArray, + jobjectArray + "$javacall" + +/* Typecheck typemaps - The purpose of these is merely to issue a warning for overloaded C++ functions + * that cannot be overloaded in Java as more than one C++ type maps to a single Java type */ + +%typecheck(SWIG_TYPECHECK_BOOL) /* Java boolean */ + jboolean, + bool, + const bool & + "" + +%typecheck(SWIG_TYPECHECK_CHAR) /* Java char */ + jchar, + char, + const char & + "" + +%typecheck(SWIG_TYPECHECK_INT8) /* Java byte */ + jbyte, + signed char, + const signed char & + "" + +%typecheck(SWIG_TYPECHECK_INT16) /* Java short */ + jshort, + unsigned char, + short, + const unsigned char &, + const short & + "" + +%typecheck(SWIG_TYPECHECK_INT32) /* Java int */ + jint, + unsigned short, + int, + long, + const unsigned short &, + const int &, + const long & + "" + +%typecheck(SWIG_TYPECHECK_INT64) /* Java long */ + jlong, + unsigned int, + unsigned long, + long long, + const unsigned int &, + const unsigned long &, + const long long & + "" + +%typecheck(SWIG_TYPECHECK_INT128) /* Java BigInteger */ + unsigned long long, + const unsigned long long & + "" + +%typecheck(SWIG_TYPECHECK_FLOAT) /* Java float */ + jfloat, + float, + const float & + "" + +%typecheck(SWIG_TYPECHECK_DOUBLE) /* Java double */ + jdouble, + double, + const double & + "" + +%typecheck(SWIG_TYPECHECK_STRING) /* Java String */ + jstring, + char *, + char *&, + char[ANY], + char [] + "" + +%typecheck(SWIG_TYPECHECK_BOOL_ARRAY) /* Java boolean[] */ + jbooleanArray + "" + +%typecheck(SWIG_TYPECHECK_CHAR_ARRAY) /* Java char[] */ + jcharArray + "" + +%typecheck(SWIG_TYPECHECK_INT8_ARRAY) /* Java byte[] */ + jbyteArray + "" + +%typecheck(SWIG_TYPECHECK_INT16_ARRAY) /* Java short[] */ + jshortArray + "" + +%typecheck(SWIG_TYPECHECK_INT32_ARRAY) /* Java int[] */ + jintArray + "" + +%typecheck(SWIG_TYPECHECK_INT64_ARRAY) /* Java long[] */ + jlongArray + "" + +%typecheck(SWIG_TYPECHECK_FLOAT_ARRAY) /* Java float[] */ + jfloatArray + "" + +%typecheck(SWIG_TYPECHECK_DOUBLE_ARRAY) /* Java double[] */ + jdoubleArray + "" + +%typecheck(SWIG_TYPECHECK_OBJECT_ARRAY) /* Java jobject[] */ + jobjectArray + "" + +%typecheck(SWIG_TYPECHECK_POINTER) /* Default */ + SWIGTYPE, + SWIGTYPE *, + SWIGTYPE &, + SWIGTYPE *const&, + SWIGTYPE [], + SWIGTYPE (CLASS::*) + "" + + +/* Exception handling */ + +%typemap(throws) int, + long, + short, + unsigned int, + unsigned long, + unsigned short +%{ char error_msg[256]; + sprintf(error_msg, "C++ $1_type exception thrown, value: %d", $1); + SWIG_JavaThrowException(jenv, SWIG_JavaRuntimeException, error_msg); + return $null; %} + +%typemap(throws) SWIGTYPE, SWIGTYPE &, SWIGTYPE *, SWIGTYPE [], SWIGTYPE [ANY] +%{ (void)$1; + SWIG_JavaThrowException(jenv, SWIG_JavaRuntimeException, "C++ $1_type exception thrown"); + return $null; %} + +%typemap(throws) char * +%{ SWIG_JavaThrowException(jenv, SWIG_JavaRuntimeException, $1); + return $null; %} + + +/* Typemaps for code generation in proxy classes and Java type wrapper classes */ + +/* The javain typemap is used for converting function parameter types from the type + * used in the proxy, module or type wrapper class to the type used in the JNI class. */ +%typemap(javain) bool, const bool &, + char, const char &, + signed char, const signed char &, + unsigned char, const unsigned char &, + short, const short &, + unsigned short, const unsigned short &, + int, const int &, + unsigned int, const unsigned int &, + long, const long &, + unsigned long, const unsigned long &, + long long, const long long &, + unsigned long long, const unsigned long long &, + float, const float &, + double, const double & + "$javainput" +%typemap(javain) char *, char *&, char[ANY], char[] "$javainput" +%typemap(javain) jboolean, + jchar, + jbyte, + jshort, + jint, + jlong, + jfloat, + jdouble, + jstring, + jobject, + jbooleanArray, + jcharArray, + jbyteArray, + jshortArray, + jintArray, + jlongArray, + jfloatArray, + jdoubleArray, + jobjectArray + "$javainput" +%typemap(javain) SWIGTYPE "$&javaclassname.getCPtr($javainput)" +%typemap(javain) SWIGTYPE *, SWIGTYPE &, SWIGTYPE [] "$javaclassname.getCPtr($javainput)" +%typemap(javain) SWIGTYPE (CLASS::*) "$javaclassname.getCMemberPtr($javainput)" + +/* The javaout typemap is used for converting function return types from the return type + * used in the JNI class to the type returned by the proxy, module or type wrapper class. */ +%typemap(javaout) bool, const bool &, + char, const char &, + signed char, const signed char &, + unsigned char, const unsigned char &, + short, const short &, + unsigned short, const unsigned short &, + int, const int &, + unsigned int, const unsigned int &, + long, const long &, + unsigned long, const unsigned long &, + long long, const long long &, + unsigned long long, const unsigned long long &, + float, const float &, + double, const double & { + return $jnicall; + } +%typemap(javaout) char *, char *&, char[ANY], char[] { + return $jnicall; + } +%typemap(javaout) jboolean, + jchar, + jbyte, + jshort, + jint, + jlong, + jfloat, + jdouble, + jstring, + jobject, + jbooleanArray, + jcharArray, + jbyteArray, + jshortArray, + jintArray, + jlongArray, + jfloatArray, + jdoubleArray, + jobjectArray { + return $jnicall; + } +%typemap(javaout) void { + $jnicall; + } +%typemap(javaout) SWIGTYPE { + return new $&javaclassname($jnicall, true); + } +%typemap(javaout) SWIGTYPE & { + return new $javaclassname($jnicall, $owner); + } +%typemap(javaout) SWIGTYPE *, SWIGTYPE [] { + long cPtr = $jnicall; + return (cPtr == 0) ? null : new $javaclassname(cPtr, $owner); + } +%typemap(javaout) SWIGTYPE (CLASS::*) { + String cMemberPtr = $jnicall; + return (cMemberPtr == null) ? null : new $javaclassname(cMemberPtr, $owner); + } + +/* Pointer reference typemaps */ +%typemap(jni) SWIGTYPE *const& "jlong" +%typemap(jtype) SWIGTYPE *const& "long" +%typemap(jstype) SWIGTYPE *const& "$*javaclassname" +%typemap(javain) SWIGTYPE *const& "$*javaclassname.getCPtr($javainput)" +%typemap(javaout) SWIGTYPE *const& { + long cPtr = $jnicall; + return (cPtr == 0) ? null : new $*javaclassname(cPtr, $owner); + } +%typemap(in) SWIGTYPE *const& ($*1_ltype temp = 0) +%{ temp = *($1_ltype)&$input; + $1 = ($1_ltype)&temp; %} +%typemap(out) SWIGTYPE *const& +%{ *($1_ltype)&$result = *$1; %} + +/* Typemaps used for the generation of proxy and type wrapper class code */ +%typemap(javabase) SWIGTYPE, SWIGTYPE *, SWIGTYPE &, SWIGTYPE [], SWIGTYPE (CLASS::*) "" +%typemap(javaclassmodifiers) SWIGTYPE, SWIGTYPE *, SWIGTYPE &, SWIGTYPE [], SWIGTYPE (CLASS::*) "public class" +%typemap(javacode) SWIGTYPE, SWIGTYPE *, SWIGTYPE &, SWIGTYPE [], SWIGTYPE (CLASS::*) "" +%typemap(javaimports) SWIGTYPE, SWIGTYPE *, SWIGTYPE &, SWIGTYPE [], SWIGTYPE (CLASS::*) "" +%typemap(javainterfaces) SWIGTYPE, SWIGTYPE *, SWIGTYPE &, SWIGTYPE [], SWIGTYPE (CLASS::*) "" + +/* javabody typemaps */ + +%define SWIG_JAVABODY_METHODS(PTRCTOR_VISIBILITY, CPTR_VISIBILITY, TYPE...) SWIG_JAVABODY_PROXY(PTRCTOR_VISIBILITY, CPTR_VISIBILITY, TYPE) %enddef // legacy name + +%define SWIG_JAVABODY_PROXY(PTRCTOR_VISIBILITY, CPTR_VISIBILITY, TYPE...) +// Base proxy classes +%typemap(javabody) TYPE %{ + private long swigCPtr; + protected boolean swigCMemOwn; + + PTRCTOR_VISIBILITY $javaclassname(long cPtr, boolean cMemoryOwn) { + swigCMemOwn = cMemoryOwn; + swigCPtr = cPtr; + } + + CPTR_VISIBILITY static long getCPtr($javaclassname obj) { + return (obj == null) ? 0 : obj.swigCPtr; + } +%} + +// Derived proxy classes +%typemap(javabody_derived) TYPE %{ + private long swigCPtr; + + PTRCTOR_VISIBILITY $javaclassname(long cPtr, boolean cMemoryOwn) { + super($imclassname.$javaclazznameSWIGUpcast(cPtr), cMemoryOwn); + swigCPtr = cPtr; + } + + CPTR_VISIBILITY static long getCPtr($javaclassname obj) { + return (obj == null) ? 0 : obj.swigCPtr; + } +%} +%enddef + +%define SWIG_JAVABODY_TYPEWRAPPER(PTRCTOR_VISIBILITY, DEFAULTCTOR_VISIBILITY, CPTR_VISIBILITY, TYPE...) +// Typewrapper classes +%typemap(javabody) TYPE *, TYPE &, TYPE [] %{ + private long swigCPtr; + + PTRCTOR_VISIBILITY $javaclassname(long cPtr, boolean futureUse) { + swigCPtr = cPtr; + } + + DEFAULTCTOR_VISIBILITY $javaclassname() { + swigCPtr = 0; + } + + CPTR_VISIBILITY static long getCPtr($javaclassname obj) { + return (obj == null) ? 0 : obj.swigCPtr; + } +%} + +%typemap(javabody) TYPE (CLASS::*) %{ + private String swigCMemberPtr; + + PTRCTOR_VISIBILITY $javaclassname(String cMemberPtr, boolean futureUse) { + swigCMemberPtr = cMemberPtr; + } + + DEFAULTCTOR_VISIBILITY $javaclassname() { + swigCMemberPtr = null; + } + + CPTR_VISIBILITY static String getCMemberPtr($javaclassname obj) { + return obj.swigCMemberPtr; + } +%} +%enddef + +/* Set the default javabody typemaps to use protected visibility. + Use the macros to change to public if using multiple modules. */ +SWIG_JAVABODY_PROXY(protected, protected, SWIGTYPE) +SWIG_JAVABODY_TYPEWRAPPER(protected, protected, protected, SWIGTYPE) + +%typemap(javafinalize) SWIGTYPE %{ + protected void finalize() { + delete(); + } +%} + +/* + * Java constructor typemaps: + * + * The javaconstruct typemap is inserted when a proxy class's constructor is generated. + * This typemap allows control over what code is executed in the constructor as + * well as specifying who owns the underlying C/C++ object. Normally, Java has + * ownership and the underlying C/C++ object is deallocated when the Java object + * is finalized (swigCMemOwn is true.) If swigCMemOwn is false, C/C++ is + * ultimately responsible for deallocating the underlying object's memory. + * + * The SWIG_PROXY_CONSTRUCTOR macro defines the javaconstruct typemap for a proxy + * class for a particular TYPENAME. OWNERSHIP is passed as the value of + * swigCMemOwn to the pointer constructor method. WEAKREF determines which kind + * of Java object reference will be used by the C++ director class (WeakGlobalRef + * vs. GlobalRef.) + * + * The SWIG_DIRECTOR_OWNED macro sets the ownership of director-based proxy + * classes and the weak reference flag to false, meaning that the underlying C++ + * object will be reclaimed by C++. + */ + +%define SWIG_PROXY_CONSTRUCTOR(OWNERSHIP, WEAKREF, TYPENAME...) +%typemap(javaconstruct,directorconnect="\n $imclassname.$javaclazznamedirector_connect(this, swigCPtr, swigCMemOwn, WEAKREF);") TYPENAME { + this($imcall, OWNERSHIP);$directorconnect + } +%enddef + +%define SWIG_DIRECTOR_OWNED(TYPENAME...) +SWIG_PROXY_CONSTRUCTOR(true, false, TYPENAME) +%enddef + +// Set the default for SWIGTYPE: Java owns the C/C++ object. +SWIG_PROXY_CONSTRUCTOR(true, true, SWIGTYPE) + +%typemap(javadestruct, methodname="delete", methodmodifiers="public synchronized") SWIGTYPE { + if (swigCPtr != 0) { + if (swigCMemOwn) { + swigCMemOwn = false; + $jnicall; + } + swigCPtr = 0; + } + } + +%typemap(javadestruct_derived, methodname="delete", methodmodifiers="public synchronized") SWIGTYPE { + if (swigCPtr != 0) { + if (swigCMemOwn) { + swigCMemOwn = false; + $jnicall; + } + swigCPtr = 0; + } + super.delete(); + } + +%typemap(directordisconnect, methodname="swigDirectorDisconnect") SWIGTYPE %{ + protected void $methodname() { + swigCMemOwn = false; + $jnicall; + } +%} + +%typemap(directorowner_release, methodname="swigReleaseOwnership") SWIGTYPE %{ + public void $methodname() { + swigCMemOwn = false; + $jnicall; + } +%} + +%typemap(directorowner_take, methodname="swigTakeOwnership") SWIGTYPE %{ + public void $methodname() { + swigCMemOwn = true; + $jnicall; + } +%} + +/* Java specific directives */ +#define %javaconst(flag) %feature("java:const","flag") +#define %javaconstvalue(value) %feature("java:constvalue",value) +#define %javaenum(wrapapproach) %feature("java:enum","wrapapproach") +#define %javamethodmodifiers %feature("java:methodmodifiers") +#define %javaexception(exceptionclasses) %feature("except",throws=exceptionclasses) +#define %nojavaexception %feature("except","0",throws="") +#define %clearjavaexception %feature("except","",throws="") + +%pragma(java) jniclassclassmodifiers="public class" +%pragma(java) moduleclassmodifiers="public class" + +/* Some ANSI C typemaps */ + +%apply unsigned long { size_t }; +%apply const unsigned long & { const size_t & }; + +/* Array reference typemaps */ +%apply SWIGTYPE & { SWIGTYPE ((&)[ANY]) } + +/* const pointers */ +%apply SWIGTYPE * { SWIGTYPE *const } + +/* String & length */ +%typemap(jni) (char *STRING, size_t LENGTH) "jbyteArray" +%typemap(jtype) (char *STRING, size_t LENGTH) "byte[]" +%typemap(jstype) (char *STRING, size_t LENGTH) "byte[]" +%typemap(javain) (char *STRING, size_t LENGTH) "$javainput" +%typemap(freearg) (char *STRING, size_t LENGTH) "" +%typemap(in) (char *STRING, size_t LENGTH) { + if ($input) { + $1 = (char *) JCALL2(GetByteArrayElements, jenv, $input, 0); + $2 = (size_t) JCALL1(GetArrayLength, jenv, $input); + } else { + $1 = 0; + $2 = 0; + } +} +%typemap(argout) (char *STRING, size_t LENGTH) { + if ($input) JCALL3(ReleaseByteArrayElements, jenv, $input, (jbyte *)$1, 0); +} +%typemap(directorin, descriptor="[B") (char *STRING, size_t LENGTH) { + jbyteArray jb = (jenv)->NewByteArray($2); + (jenv)->SetByteArrayRegion(jb, 0, $2, (jbyte *)$1); + $input = jb; +} +%typemap(directorargout) (char *STRING, size_t LENGTH) +%{(jenv)->GetByteArrayRegion($input, 0, $2, (jbyte *)$1); %} +%apply (char *STRING, size_t LENGTH) { (char *STRING, int LENGTH) } + +/* java keywords */ +%include + +// Default enum handling +%include + diff --git a/vendor/pygments/tests/examplefiles/swig_std_vector.i b/vendor/pygments/tests/examplefiles/swig_std_vector.i new file mode 100644 index 0000000..baecf85 --- /dev/null +++ b/vendor/pygments/tests/examplefiles/swig_std_vector.i @@ -0,0 +1,225 @@ +// +// std::vector +// + +%include + +// Vector + +%define %std_vector_methods(vector...) + %std_sequence_methods(vector) + + void reserve(size_type n); + size_type capacity() const; +%enddef + + +%define %std_vector_methods_val(vector...) + %std_sequence_methods_val(vector) + + void reserve(size_type n); + size_type capacity() const; +%enddef + + +// ------------------------------------------------------------------------ +// std::vector +// +// The aim of all that follows would be to integrate std::vector with +// as much as possible, namely, to allow the user to pass and +// be returned tuples or lists. +// const declarations are used to guess the intent of the function being +// exported; therefore, the following rationale is applied: +// +// -- f(std::vector), f(const std::vector&): +// the parameter being read-only, either a sequence or a +// previously wrapped std::vector can be passed. +// -- f(std::vector&), f(std::vector*): +// the parameter may be modified; therefore, only a wrapped std::vector +// can be passed. +// -- std::vector f(), const std::vector& f(): +// the vector is returned by copy; therefore, a sequence of T:s +// is returned which is most easily used in other functions +// -- std::vector& f(), std::vector* f(): +// the vector is returned by reference; therefore, a wrapped std::vector +// is returned +// -- const std::vector* f(), f(const std::vector*): +// for consistency, they expect and return a plain vector pointer. +// ------------------------------------------------------------------------ + +%{ +#include +%} + +// exported classes + + +namespace std { + + template > + class vector { + public: + typedef size_t size_type; + typedef ptrdiff_t difference_type; + typedef _Tp value_type; + typedef value_type* pointer; + typedef const value_type* const_pointer; + typedef _Tp& reference; + typedef const _Tp& const_reference; + typedef _Alloc allocator_type; + + %traits_swigtype(_Tp); + %traits_enum(_Tp); + + %fragment(SWIG_Traits_frag(std::vector<_Tp, _Alloc >), "header", + fragment=SWIG_Traits_frag(_Tp), + fragment="StdVectorTraits") { + namespace swig { + template <> struct traits > { + typedef pointer_category category; + static const char* type_name() { + return "std::vector<" #_Tp "," #_Alloc " >"; + } + }; + } + } + + %typemap_traits_ptr(SWIG_TYPECHECK_VECTOR, std::vector<_Tp, _Alloc >); + +#ifdef %swig_vector_methods + // Add swig/language extra methods + %swig_vector_methods(std::vector<_Tp, _Alloc >); +#endif + + %std_vector_methods(vector); + }; + + // *** + // This specialization should disappear or get simplified when + // a 'const SWIGTYPE*&' can be defined + // *** + template + class vector<_Tp*, _Alloc > { + public: + typedef size_t size_type; + typedef ptrdiff_t difference_type; + typedef _Tp* value_type; + typedef value_type* pointer; + typedef const value_type* const_pointer; + typedef value_type reference; + typedef value_type const_reference; + typedef _Alloc allocator_type; + + %traits_swigtype(_Tp); + + %fragment(SWIG_Traits_frag(std::vector<_Tp*, _Alloc >), "header", + fragment=SWIG_Traits_frag(_Tp), + fragment="StdVectorTraits") { + namespace swig { + template <> struct traits > { + typedef value_category category; + static const char* type_name() { + return "std::vector<" #_Tp " *," #_Alloc " >"; + } + }; + } + } + + %typemap_traits_ptr(SWIG_TYPECHECK_VECTOR, std::vector<_Tp*, _Alloc >); + +#ifdef %swig_vector_methods_val + // Add swig/language extra methods + %swig_vector_methods_val(std::vector<_Tp*, _Alloc >); +#endif + + %std_vector_methods_val(vector); + }; + + // *** + // const pointer specialization + // *** + template + class vector<_Tp const *, _Alloc > { + public: + typedef size_t size_type; + typedef ptrdiff_t difference_type; + typedef _Tp const * value_type; + typedef value_type* pointer; + typedef const value_type* const_pointer; + typedef value_type reference; + typedef value_type const_reference; + typedef _Alloc allocator_type; + + %traits_swigtype(_Tp); + + %fragment(SWIG_Traits_frag(std::vector<_Tp const*, _Alloc >), "header", + fragment=SWIG_Traits_frag(_Tp), + fragment="StdVectorTraits") { + namespace swig { + template <> struct traits > { + typedef value_category category; + static const char* type_name() { + return "std::vector<" #_Tp " const*," #_Alloc " >"; + } + }; + } + } + + %typemap_traits_ptr(SWIG_TYPECHECK_VECTOR, std::vector<_Tp const*, _Alloc >); + +#ifdef %swig_vector_methods_val + // Add swig/language extra methods + %swig_vector_methods_val(std::vector<_Tp const*, _Alloc >); +#endif + + %std_vector_methods_val(vector); + }; + + // *** + // bool specialization + // *** + + template + class vector { + public: + typedef size_t size_type; + typedef ptrdiff_t difference_type; + typedef bool value_type; + typedef value_type* pointer; + typedef const value_type* const_pointer; + typedef value_type reference; + typedef value_type const_reference; + typedef _Alloc allocator_type; + + %traits_swigtype(bool); + + %fragment(SWIG_Traits_frag(std::vector), "header", + fragment=SWIG_Traits_frag(bool), + fragment="StdVectorTraits") { + namespace swig { + template <> struct traits > { + typedef value_category category; + static const char* type_name() { + return "std::vector"; + } + }; + } + } + + %typemap_traits_ptr(SWIG_TYPECHECK_VECTOR, std::vector); + + +#ifdef %swig_vector_methods_val + // Add swig/language extra methods + %swig_vector_methods_val(std::vector); +#endif + + %std_vector_methods_val(vector); + +#if defined(SWIG_STD_MODERN_STL) && !defined(SWIG_STD_NOMODERN_STL) + void flip(); +#endif + + }; + +} diff --git a/vendor/pygments/tests/examplefiles/tads3_example.t b/vendor/pygments/tests/examplefiles/tads3_example.t new file mode 100644 index 0000000..41881c9 --- /dev/null +++ b/vendor/pygments/tests/examplefiles/tads3_example.t @@ -0,0 +1,1248 @@ +#charset "utf-8" + +#include +#include + +extern function extern_function; +extern method extern_method; +extern function extern_function(a, b=a, c='<>', d:, e:=1, f?, ...); +extern method extern_method(a, b=a, c='<>', d:, e:=1, f?, [g]);; +extern class extern_class; +extern object extern_object; +intrinsic 't3vm' { }; +#ifndef PropDefAny +intrinsic class Object 'root-object/030004' { }; +#endif +object /**//**/ // /* \\ +#define Room Unthing + template [lst]; + +/* + * Quotations from "Le Roman de la Rose" are transcribed from MS. Douce 195, + * owned by Bodleian Library, University of Oxford + * (http://image.ox.ac.uk/show?collection=bodleian&manuscript=msdouce195). + */ + +versionInfo: GameID + IFID = '17d8efc3-07da-4dde-a837-ff7c4e386a77' + name = 'Pygmentalion' + byline = 'by David Corbett' + htmlByline = 'by David + Corbett' + version = '1' + authorEmail = 'David Corbett\040' + desc = 'You have fallen in love with a statue\x2e' + htmlDesc = 'You have fallen in love with a statue\x2E' +; + +/* + * Pymalion fu ẽtailleꝛꝛes. + * Poᷣtrayãs en fus ⁊ en peꝛꝛeˢ + * En metaulx en os ⁊ en cyꝛes + * Et en touteˢ aultres matires. + * Quon peult a tel oeuure trouuer. + * Poᷣ ſon grant engin eſpꝛouuer. + * Car maiſtre en fu bien dire loz. + * Ainſi com poᷣ acquerre loz + * Se voult a poᷣtraire deduyꝛe + * Si fiſt vng ymage diuuyꝛe + * Et miſt au faire tel entente + * Quel fu ſi plaiſãt et ſi gente + * Quel ſembloit eſtre auſſi viue. + * Com la plus belle riens q̇ viue + * (MS. Douce 195, fol. 149r) + */ + +modify _init() +{ + ({: local r, r = randomize, r})(); + replaced(); +} + +gameMain: GameMainDef + initialPlayerChar: Actor { + desc = "You look the same as usual, but you feel unusually + sentimental. " + location = entrance + } + showIntro + { + "The statue is undeniably a masterpiece: the most skillful carving you + have ever done, and the most beautiful woman you have ever seen. + Unfortunately, she is also an inanimate block, and now you can neither + work nor rest for unrequitable love.\b + Once again you stumble into your studio, hoping and praying to find + your statue brought to life.\b + <>\r\n + <>\b"; + } +; + +enum token token, tokOp, token; + +modify cmdTokenizer + rules_ = static + [ + ['whitespace', new RexPattern('%s+'), nil, &tokCvtSkip, nil], + ['punctuation', new RexPattern('[.,;:?!]'), tokPunct, nil, nil], + ['spelled number', + new RexPattern('(twenty|thirty|forty|fifty|sixty|' + + 'seventy|eighty|ninety)-' + + '(one|two|three|four|five|six|seven|eight|nine)' + + '(?!)'), + tokWord, &tokCvtSpelledNumber, nil], + ['spelled operator', new RexPattern( + '(plus|positive|minus|negat(iv)?e|not|inverse(%s+of)?|' + + 'times|over|divided%s+by|mod(ulo)?|and|xor|or|[al]?sh[lr])' + + '(?!)'), + tokOp, &tokCvtSpelledOperator, nil], + ['operator', R'[-!~+*/%&^|]|<<|>>>?', tokOp, nil, nil], + ['word', new RexPattern('*'), + tokWord, nil, nil], + ['string ascii-quote', R"""([`\'"])(.*)%1(?!)""", + tokString, nil, nil], + ['string back-quote', R"`(.*)'(?!%w)", tokString, nil, nil], + ['string curly single-quote', new RexPattern('\u2018(.*)\u2019'), + tokString, nil, nil], + ['string curly double-quote', new RexPattern('\u201C(.*)\u201D'), + tokString, nil, nil], + ['string unterminated', R'''([`\'"\u2018\u201C](.*)''', tokString, + nil, nil], + ['integer', new RexPattern('[0-9]+'), tokInt, nil, nil] + ] + replace tokCvtSpelledOperator(txt, typ, toks) + { + toks.append([rexReplace(R'%s+', txt.toLower(), '\\'), typ, txt]); + } +; + +/* Tokens */ + +/* + * Puiˢ li reueſt en maĩteˢ guiſes. + * Robeˢ faicteˢ ꝑ grãˢ maiſtriſeˢ. + * De biaulx dꝛaps de ſoye ⁊ de laĩe. + * Deſcarlate de tiretaine + * De vert de pers ⁊ de bꝛunecte + * De couleᷣ freſche fine ⁊ necte + * Ou moult a riches paneˢ miſes. + * Herminees vaires et griſes + * Puis les li roſte puis reſſaye. + * Cõmant li ſiet robbe de ſaye + * Sendaulx meloguins galebꝛunˢ. + * Indes vermeilz iaunes ⁊ bꝛunˢ. + * [...] + * Aultre foiz luy repꝛẽd courage. + * De tout oſter ⁊ mectre guindeˢ. + * Iaunes vermeilles vers ⁊ indeˢ. + * (MS. Douce 195, fol. 150r) + */ + +class Token: Achievement +{ + points = 1; + desc = "<><><>"; + before = before = '', before_ + after = (after = '', after_) +} + +Token template inherited 'before_' 'after_' 'desc_'; + +#define DefineToken(name, before, after) name##Token: Token before after #@name + +DefineToken(builtin, '', ''); +DefineToken(comment, '', ''); +DefineToken(decorator, '', ''); +DefineToken(error, '', ''); +DefineToken(escape, '', ''); +DefineToken(float, '', ''); +DefineToken(keyword, '', ''); +DefineToken(label, '', ''); +DefineToken(long, '', ''); +DefineToken(name, '', ''); +DefineToken(operator, '', ''); +DefineToken(string, '', ''); +DefineToken(whitespace, '', ''); + +function highlightToken(tokenString) +{ + local token = [ + 'built in' -> builtinToken, + 'comment' -> commentToken, + 'decorator' -> decoratorToken, + 'error' -> errorToken, + 'escape' -> escapeToken, + 'float' -> floatToken, + 'keyword' -> keywordToken, + 'label' -> labelToken, + 'long' -> longToken, + 'name' -> nameToken, + 'operator' -> operatorToken, + 'string' -> stringToken, + 'white space' -> whitespaceToken, + * -> nil + ][tokenString.toLower()]; + if (!token) + return tokenString; + token.awardPointsOnce(); + return '<><><>'; +} + +string /**//**/ // /* \\ +#define Room Unthing + template <> highlightToken; + +/* Grammar for materials */ + +dictionary property material; +grammar adjWord(material): ->adj_ : AdjPhraseWithVocab + getVocabMatchList(resolver, results, extraFlags) + { + return getWordMatches(adj_, &material, resolver, extraFlags, + VocabTruncated); + } + getAdjustedTokens() + { + return [adj_, &material]; + } +; + +/* Rooms and objects */ + ++ property location; + +entrance: Room 'Entrance' + "You are in the entrance to your studio. This is where you carve great + works of art, not that you have felt like making any lately. A door leads + outside, and the studio itself is to the north and the east. " + north = workbenchRoom + northeast = sinkRoom + east = altarRoom + south = door + out asExit(south) +; + ++ door: LockableWithKey, Door 'door' 'door' + "It is a simple wooden door. " + material = 'wood' 'wooden' + keyList = [key] + cannotOpenLockedMsg = '{The dobj/He} {is} locked. You cannot + <>! ' +; + +key: PresentLater, Key 'key' 'key' @altar + "It is a <>grimy<> bronze key. <>On it is \ + etched the word <>. " + material = 'bronze' + clean = nil + keyword = (keyword = randomGreekWord(), targetprop) + dobjFor(Clean) { verify { } action { askForIobj(CleanWith); } } + dobjFor(CleanWith) + { + verify + { + if (clean) + illogicalAlready('{The dobj/He} {is} already clean. '); + } + action + { + gDobj.clean = true; + "{You/He} clean{s} {the dobj/him}, revealing an inscription. "; + } + } + dobjFor(Read) { verify { nonObvious; } } +; + +workbenchRoom: Room 'At the Workbench' + "This workbench, in the northwest part of the studio, was where you would + create works of art. Now you just come here to contemplate your + creation’s beauty and lament your hopeless situation.\b + The statue stands on a plinth beside the workbench. " + east = sinkRoom + southeast = altarRoom + south = entrance + getDestName(actor, origin) { return 'the workbench'; } +; + ++ workbench: Fixture, Surface + 'workbench/bench/material/materials/tool/tools' 'workbench' + "Normally, the workbench would be scattered with half-finished projects, + but now your tools and materials lie abandoned. " +; + ++ plinth: Fixture, Thing 'marble plinth/pedestal' 'plinth' + "It’s a smoothed block of marble about a cubit high. " +; + +replace grammar predicate(Screw): ' ': object; +replace grammar predicate(ScrewWith): ' ': object; ++ + statue: Fixture, Surface + '"creation\'s" beauty/carving/creation/galatea/statue/woman' 'statue' + "This is a<>n untitled<> statue of a woman + carved from <>flawless <> + <>milk-white <>ivory. + <>Her + <>long <>hair is done up in a + chignon<>, with a few strands falling down her + neck<><>, and \v<>.<><> + <>She radiates an aura of contrapposto grace. + <><>\bYou wonder what she + <>is going to<>will<> be like as a + woman. + <>Maybe she’ll be a painter and expand + your business.<> + <>Maybe she’ll have a head for figures + and will put the accounts in order.<> + <>She’ll love you, obviously, but beyond + that you don’t know.<><> + <>If only Aphrodite would bring her to life + without this silly puzzle about tokens and mirrors!<> " + material = 'ivory' + propertyset 'is*' + { + propertyset 'H*' + { + im = nil\ + er = true; + } + It = true + } + iobjFor(PutOn) + { + check + { + if (gDobj not /**//**/ // /* \\ +#define Room Unthing + in (necklace, __objref(necklace, warn))) + "How rude! You don’t know what you were thinking. "; + } + } + iobjFor(GiveTo) remapTo(PutOn, DirectObject, IndirectObject) +; + ++++ necklace: Wearable + 'pearl necklace/string pearls' '<> of pearls' + "This is a masterfully crafted pearl necklace. You hope the statue + won’t mind if you hold onto it for a while. " + initDesc = "You gave the statue this pearl necklace yesterday. " + isPlural = true +; + +altarRoom: Room 'At the Altar' + "Light from the window illuminates a crude altar. Until recently, this + corner was your bedroom. The rest of the studio lies north and west. " + north = sinkRoom + northwest = workbenchRoom + west = entrance + getDestName(actor, origin) { return 'the altar'; } +; + ++ window: Fixture 'window' 'window' + "It’s just a window above the altar. <>The space under the + window is blank; as an interior <>, you can’t + help but think the wall would benefit from a bas-relief, but – + sigh &endash you are too lovelorn to wield the chisel. <<||>>The + wall right below it is a boring <>. <>" +; + ++ altar: Fixture, Surface 'crude rough altar/banker/slab' 'altar' + "A rough marble slab lies on a wooden banker. In your rush to construct an + altar, you neglected the usual surface finish and friezes, but you pray at + it anyway. You are sure the gods will understand. " + material = 'marble' 'wood' 'wooden' + bulkCapacity = 1 + dobjFor(PrayAt) + { + verify { } + action() + { + /* + * Biaulx dieux diſt il tout ce poez. + * Sil voꝰ plaiſt ma requeſte oez + * [...] + * Et la belle q̇ mon cueᷣ emble + * Qui ſi bien yuoyꝛe reſſemble. + * Deuiengne ma loyal amye + * De fẽme ait coꝛps ame et vie + * (MS. Douce 195, fol. 151r) + */ + local offering; + foreach (offering in contents); + if (!keywordToken.scoreCount) + "<>O Aphrodite, you say, comforter of + hopeless lovers, hear my prayer! May she to whom I have given + my heart be given body, soul, and life. And a colorful + personality. And&mdash\b + You are interrupted by a shimmering about the altar. As you + watch, it takes the form of a callipygian goddess.\b + Mortal, I have heard your heart-felt and oft-repeated plea, + and I will take pity on you, says Aphrodite. If you give + me a token of your love as an offering, I will give you the + <> of life. Speak this word in the + presence of a mirror, and I will grant your request.\b + She fades away, adding, As for her colorful personality, + just look around you. <><>"; + else if (key.location) + "O Aphrodite, you say, what am I supposed to do + again?\bThe goddess reappears and reminds you to speak the + keyword of life at a mirror. <>What’s the + keyword, then? Gods help those who help themselves. + Figure it out yourself.<>Why a mirror? I like + mirrors.<> "; + else if (offering == necklace) + { + "Aphrodite reappears. A necklace! Perfect! The necklace + disappears in a bright flash. When your eyes readjust, you see + a key lying in its place. "; + necklace.moveInto(nil); + key.makePresent(); + } + else if (+offering) + "Aphrodite reappears. She eyes <> + skeptically. <>No <>.<>You + call that a token of love?<>\^<>? + Really?<>Come on, mortal, it’s not that + difficult!<> "; + else + "I heard you the first time, says Aphrodite. Prove + your devotion by offering a token of your love at the altar, + or the deal’s off. "; + } + } + iobjFor(GiveTo) remapTo(PutOn, DirectObject, IndirectObject) +; + +aphrodite: Unthing + '(love) aphrodite/cytherea/god/goddess/venus love' 'Aphrodite' + '<>You can only pray to a god. + <>You need an altar to interact with a god. ' + location = (gPlayerChar) + isProperName = true + isHer = true + iobjFor(GiveTo) + { + verify + { + illogical('She isn’t here. You’ll have to leave {the + dobj/him} somewhere she can find it. '); + } + } + dobjFor(PrayAt) maybeRemapTo(gActor.canSee(altar), PrayAt, altar) +; + +sinkRoom: Room 'Washroom' + "Sculpting marble is a dusty business. You use this sink to clean off after + a hard day’s work. Beside the sink is a small end table, and on the + wall is a calculator. The rest of the studio is south and west. " + south = altarRoom + southwest = entrance + west = workbenchRoom +; + +property level, overflowing; +export overflowing; +export level 'waterLevel'; ++ sink: Fixture '(auto) (mop) auto-sink/autosink/bowl/drain/faucet/sink' 'sink' + "This is a state-of-the-art mop sink with anti-miasmic coating and bronze + backsplash. It is so modern, there are no handles or other obvious ways to + turn it on.\b + <>It is overflowing. + <>It is full to the brim with water. + <= 15000>>It is full of water. + <>It is half full of water. + <= 2000>>There is some water in the sink. + < 0>>A small puddle has formed at the bottom of the sink. + <>It is empty. + <>It looks like it hasn’t been used in a + <> time. " + level = not in ([lst]) { return argcount; } + not = in() + overflowing = nil + current = self + setLevel(level:) + { + targetobj.current.overflowing = level == nil; + targetobj.current.level = min(level ?? 0, 20000); + if (sink.overflowing || sink.level > 0e+1) + sinkWater.makePresent(); + if (basin.overflowing || basin.level > 0e-1) + basinWater.makePresent(); + } + iobjFor(CleanWith) remapTo(CleanWith, DirectObject, sinkWater) +; + +++ sinkWater: PresentLater, Fixture + '(sink) water sink water' 'water' "<>" + disambigName = 'water in the sink' + dobjFor(Drink) + { + verify { illogical('''{You're} not thirsty. '''); } + } + iobjFor(CleanWith) + { + preCond = [] + verify { + if (!location) + illogicalNow('There is no water in the sink. '); + if (!sink.overflowing && sink.level < 1e2) + illogicalNow('There is not enough water in the sink. '); + } + } +; + ++ table: Fixture, Surface 'small end bracket/table' 'table' + "<>Upon closer inspection, you see that \v<>The table is + bracketed to the wall. " +; + +++ Readable '"operator\'s" manual' 'manual' + "
    <>’s Manual<\center>\b + To control the auto-sink, use the calculator add-on to enter the + desired volume of water. For example,\n + \t\t<>\n + to fill the basin with <<% ,d 0x69 * 0105>> kochliaria B) --> (list A) --> (list B) } + F X -> (typed-map-h F X [])) + +(define typed-map-h + { (A --> B) --> (list A) --> (list B) \\ comment + --> (list B) } + _ [] X -> (reverse X) + F [X | Y] Z -> (typed-map-h F Y [(F X) | Z])) + +(define append-string + { string --> string \* comment *\ --> string } + S1 S2 -> (cn S1 S2)) + +(let X 1 + Y 2 + (+ (type X number) (type Y number))) + +\\ Yacc + +(defcc + + := (package-macro (macroexpand ) ); + := [{ | ]; + := [} | ]; + := [bar! | ]; + := [; | ]; + := [:= | ]; + := [:- | ]; + := [: | ]; + := [(intern ",") | ]; + := [];) + +(defcc + 91 := skip;) + +\\ Pattern matching + +(define matches + 1 X 3 -> X + X Y Z -> Y where (and (= X 1) (= Z 3)) + true false _ -> true + (@p a X c) (@s X "abc") (@v 1 2 3 <>) -> true + [X | Rest] [] [a b c] -> true + [(@p a b)] [[[1] 2] X] "string" -> true + _ _ _ -> false) + + +\\ Prolog + +(defprolog th* + X A Hyps <-- (show [X : A] Hyps) (when false); + X A _ <-- (fwhen (typedf? X)) (bind F (sigf X)) (call [F A]); + (mode [F] -) A Hyp <-- (th* F [--> A] Hyp); + (mode [cons X Y] -) [list A] Hyp <-- (th* X A Hyp) (th* Y [list A] Hyp); + (mode [@s X Y] -) string Hyp <-- (th* X string Hyp) (th* Y string Hyp); + (mode [lambda X Y] -) [A --> B] Hyp <-- ! + (bind X&& (placeholder)) + (bind Z (ebr X&& X Y)) + (th* Z B [[X&& : A] | Hyp]); + (mode [type X A] -) B Hyp <-- ! (unify A B) (th* X A Hyp);) + +\\ Macros + +(defmacro log-macro + [log N] -> [log N 10]) + +\\ Sequent calculus + +(datatype rank + + if (element? X [ace 2 3 4 5 6 7 8 9 10 jack queen king]) + ________ + X : rank;) + +(datatype suit + + if (element? Suit [spades hearts diamonds clubs]) + _________ + Suit : suit;) + +(datatype card + + Rank : rank; Suit : suit; + _________________ + [Rank Suit] : card; + + Rank : rank, Suit : suit >> P; + _____________________ + [Rank Suit] : card >> P;) + +(datatype card + + Rank : rank; Suit : suit; + ================== + [Rank Suit] : card;) + +\\ String interpolation and escape sequences + +"abc~A ~S~R ~% blah + c#30;c#31;blah" + +) diff --git a/vendor/pygments/tests/examplefiles/test.swift b/vendor/pygments/tests/examplefiles/test.swift new file mode 100644 index 0000000..8ef1976 --- /dev/null +++ b/vendor/pygments/tests/examplefiles/test.swift @@ -0,0 +1,65 @@ +// +// test.swift +// from https://github.com/fullstackio/FlappySwift +// +// Created by Nate Murray on 6/2/14. +// Copyright (c) 2014 Fullstack.io. All rights reserved. +// + +import UIKit +import SpriteKit + +extension SKNode { + class func unarchiveFromFile(file : NSString) -> SKNode? { + + let path = NSBundle.mainBundle().pathForResource(file, ofType: "sks") + + var sceneData = NSData.dataWithContentsOfFile(path, options: .DataReadingMappedIfSafe, error: nil) + var archiver = NSKeyedUnarchiver(forReadingWithData: sceneData) + + archiver.setClass(self.classForKeyedUnarchiver(), forClassName: "SKScene") + let scene = archiver.decodeObjectForKey(NSKeyedArchiveRootObjectKey) as GameScene + archiver.finishDecoding() + return scene + } +} + +class GameViewController: UIViewController { + + override func viewDidLoad() { + super.viewDidLoad() + + if let scene = GameScene.unarchiveFromFile("GameScene") as? GameScene { + // Configure the view. + let skView = self.view as SKView + skView.showsFPS = true + skView.showsNodeCount = true + + /* Sprite Kit applies additional optimizations to improve rendering performance */ + skView.ignoresSiblingOrder = true + + /* Set the scale mode to scale to fit the window */ + scene.scaleMode = .AspectFill + + skView.presentScene(scene) + } + } + + override func shouldAutorotate() -> Bool { + return true + } + + override func supportedInterfaceOrientations() -> Int { + if UIDevice.currentDevice().userInterfaceIdiom == .Phone { + return Int(UIInterfaceOrientationMask.AllButUpsideDown.toRaw()) + } else { + return Int(UIInterfaceOrientationMask.All.toRaw()) + } + } + + override func didReceiveMemoryWarning() { + super.didReceiveMemoryWarning() + // Release any cached data, images, etc that aren't in use. + } + +} diff --git a/vendor/pygments/tests/examplefiles/test.zep b/vendor/pygments/tests/examplefiles/test.zep new file mode 100644 index 0000000..4724d4c --- /dev/null +++ b/vendor/pygments/tests/examplefiles/test.zep @@ -0,0 +1,33 @@ +namespace Test; + +use Test\Foo; + +class Bar +{ + protected a; + private b; + public c {set, get}; + + public function __construct(string str, boolean bool) + { + let this->c = str; + this->setC(bool); + let this->b = []; + } + + public function sayHello(string name) + { + echo "Hello " . name; + } + + protected function loops() + { + for a in b { + echo a; + } + loop { + return "boo!"; + } + } + +} \ No newline at end of file diff --git a/vendor/pygments/tests/examplefiles/test2.odin b/vendor/pygments/tests/examplefiles/test2.odin new file mode 100644 index 0000000..2a6b451 --- /dev/null +++ b/vendor/pygments/tests/examplefiles/test2.odin @@ -0,0 +1,30 @@ +school_schedule = < + lesson_times = <08:30:00, 09:30:00, 10:30:00, ...> + + locations = < + [1] = <"under the big plane tree"> + [2] = <"under the north arch"> + [3] = <"in a garden"> + > + + subjects = < + ["philosophy:plato"] = < -- note construction of key + name = <"philosophy"> + teacher = <"plato"> + topics = <"meta-physics", "natural science"> + weighting = <76%> + > + ["philosophy:kant"] = < + name = <"philosophy"> + teacher = <"kant"> + topics = <"meaning and reason", "meta-physics", "ethics"> + weighting = <80%> + > + ["art"] = < + name = <"art"> + teacher = <"goya"> + topics = <"technique", "portraiture", "satire"> + weighting = <78%> + > + > +> diff --git a/vendor/pygments/tests/examplefiles/test_basic.adls b/vendor/pygments/tests/examplefiles/test_basic.adls new file mode 100644 index 0000000..df5aa74 --- /dev/null +++ b/vendor/pygments/tests/examplefiles/test_basic.adls @@ -0,0 +1,28 @@ +-- +-- Example of an openEHR Archetype, written in the Archetype Definition Language (ADL) +-- Definition available here: http://www.openehr.org/releases/trunk/architecture/am/adl2.pdf +-- Author: derived from the openEHR-EHR-EVALUATION.adverse_reaction.v1 archetype at http://www.openEHR.org/ckm +-- + +archetype (adl_version=2.0.5; rm_release=1.0.2; generated) + openEHR-EHR-EVALUATION.adverse_reaction.v1.0.0 + +language + original_language = <[ISO_639-1::en]> + +description + lifecycle_state = <"unmanaged"> + +definition + EVALUATION[id1] + +terminology + term_definitions = < + ["en"] = < + ["id1"] = < + text = <"Adverse Reaction"> + description = <"xxx"> + > + > + > + diff --git a/vendor/pygments/tests/examplefiles/twig_test b/vendor/pygments/tests/examplefiles/twig_test new file mode 100644 index 0000000..0932fe9 --- /dev/null +++ b/vendor/pygments/tests/examplefiles/twig_test @@ -0,0 +1,4612 @@ +From the Twig test suite, https://github.com/fabpot/Twig, available under BSD license. + +--TEST-- +Exception for an unclosed tag +--TEMPLATE-- +{% block foo %} + {% if foo %} + + + + + {% for i in fo %} + + + + {% endfor %} + + + +{% endblock %} +--EXCEPTION-- +Twig_Error_Syntax: Unexpected tag name "endblock" (expecting closing tag for the "if" tag defined near line 4) in "index.twig" at line 16 +--TEST-- +Exception for an undefined trait +--TEMPLATE-- +{% use 'foo' with foobar as bar %} +--TEMPLATE(foo)-- +{% block bar %} +{% endblock %} +--EXCEPTION-- +Twig_Error_Runtime: Block "foobar" is not defined in trait "foo" in "index.twig". +--TEST-- +Twig supports method calls +--TEMPLATE-- +{{ items.foo }} +{{ items['foo'] }} +{{ items[foo] }} +{{ items[items[foo]] }} +--DATA-- +return array('foo' => 'bar', 'items' => array('foo' => 'bar', 'bar' => 'foo')) +--EXPECT-- +bar +bar +foo +bar +--TEST-- +Twig supports array notation +--TEMPLATE-- +{# empty array #} +{{ []|join(',') }} + +{{ [1, 2]|join(',') }} +{{ ['foo', "bar"]|join(',') }} +{{ {0: 1, 'foo': 'bar'}|join(',') }} +{{ {0: 1, 'foo': 'bar'}|keys|join(',') }} + +{{ {0: 1, foo: 'bar'}|join(',') }} +{{ {0: 1, foo: 'bar'}|keys|join(',') }} + +{# nested arrays #} +{% set a = [1, 2, [1, 2], {'foo': {'foo': 'bar'}}] %} +{{ a[2]|join(',') }} +{{ a[3]["foo"]|join(',') }} + +{# works even if [] is used inside the array #} +{{ [foo[bar]]|join(',') }} + +{# elements can be any expression #} +{{ ['foo'|upper, bar|upper, bar == foo]|join(',') }} + +{# arrays can have a trailing , like in PHP #} +{{ + [ + 1, + 2, + ]|join(',') +}} + +{# keys can be any expression #} +{% set a = 1 %} +{% set b = "foo" %} +{% set ary = { (a): 'a', (b): 'b', 'c': 'c', (a ~ b): 'd' } %} +{{ ary|keys|join(',') }} +{{ ary|join(',') }} +--DATA-- +return array('bar' => 'bar', 'foo' => array('bar' => 'bar')) +--EXPECT-- +1,2 +foo,bar +1,bar +0,foo + +1,bar +0,foo + +1,2 +bar + +bar + +FOO,BAR, + +1,2 + +1,foo,c,1foo +a,b,c,d +--TEST-- +Twig supports binary operations (+, -, *, /, ~, %, and, or) +--TEMPLATE-- +{{ 1 + 1 }} +{{ 2 - 1 }} +{{ 2 * 2 }} +{{ 2 / 2 }} +{{ 3 % 2 }} +{{ 1 and 1 }} +{{ 1 and 0 }} +{{ 0 and 1 }} +{{ 0 and 0 }} +{{ 1 or 1 }} +{{ 1 or 0 }} +{{ 0 or 1 }} +{{ 0 or 0 }} +{{ 0 or 1 and 0 }} +{{ 1 or 0 and 1 }} +{{ "foo" ~ "bar" }} +{{ foo ~ "bar" }} +{{ "foo" ~ bar }} +{{ foo ~ bar }} +{{ 20 // 7 }} +--DATA-- +return array('foo' => 'bar', 'bar' => 'foo') +--EXPECT-- +2 +1 +4 +1 +1 +1 + + + +1 +1 +1 + + +1 +foobar +barbar +foofoo +barfoo +2 +--TEST-- +Twig supports bitwise operations +--TEMPLATE-- +{{ 1 b-and 5 }} +{{ 1 b-or 5 }} +{{ 1 b-xor 5 }} +{{ (1 and 0 b-or 0) is same as(1 and (0 b-or 0)) ? 'ok' : 'ko' }} +--DATA-- +return array() +--EXPECT-- +1 +5 +4 +ok +--TEST-- +Twig supports comparison operators (==, !=, <, >, >=, <=) +--TEMPLATE-- +{{ 1 > 2 }}/{{ 1 > 1 }}/{{ 1 >= 2 }}/{{ 1 >= 1 }} +{{ 1 < 2 }}/{{ 1 < 1 }}/{{ 1 <= 2 }}/{{ 1 <= 1 }} +{{ 1 == 1 }}/{{ 1 == 2 }} +{{ 1 != 1 }}/{{ 1 != 2 }} +--DATA-- +return array() +--EXPECT-- +///1 +1//1/1 +1/ +/1 +--TEST-- +Twig supports the "divisible by" operator +--TEMPLATE-- +{{ 8 is divisible by(2) ? 'OK' }} +{{ 8 is not divisible by(3) ? 'OK' }} +{{ 8 is divisible by (2) ? 'OK' }} +{{ 8 is not + divisible + by + (3) ? 'OK' }} +--DATA-- +return array() +--EXPECT-- +OK +OK +OK +OK +--TEST-- +Twig supports the .. operator +--TEMPLATE-- +{% for i in 0..10 %}{{ i }} {% endfor %} + +{% for letter in 'a'..'z' %}{{ letter }} {% endfor %} + +{% for letter in 'a'|upper..'z'|upper %}{{ letter }} {% endfor %} + +{% for i in foo[0]..foo[1] %}{{ i }} {% endfor %} + +{% for i in 0 + 1 .. 10 - 1 %}{{ i }} {% endfor %} +--DATA-- +return array('foo' => array(1, 10)) +--EXPECT-- +0 1 2 3 4 5 6 7 8 9 10 +a b c d e f g h i j k l m n o p q r s t u v w x y z +A B C D E F G H I J K L M N O P Q R S T U V W X Y Z +1 2 3 4 5 6 7 8 9 10 +1 2 3 4 5 6 7 8 9 +--TEST-- +Twig supports the "ends with" operator +--TEMPLATE-- +{{ 'foo' ends with 'o' ? 'OK' : 'KO' }} +{{ not ('foo' ends with 'f') ? 'OK' : 'KO' }} +{{ not ('foo' ends with 'foowaytoolong') ? 'OK' : 'KO' }} +{{ 'foo' ends with '' ? 'OK' : 'KO' }} +{{ '1' ends with true ? 'OK' : 'KO' }} +{{ 1 ends with true ? 'OK' : 'KO' }} +{{ 0 ends with false ? 'OK' : 'KO' }} +{{ '' ends with false ? 'OK' : 'KO' }} +{{ false ends with false ? 'OK' : 'KO' }} +{{ false ends with '' ? 'OK' : 'KO' }} +--DATA-- +return array() +--EXPECT-- +OK +OK +OK +OK +KO +KO +KO +KO +KO +KO +--TEST-- +Twig supports grouping of expressions +--TEMPLATE-- +{{ (2 + 2) / 2 }} +--DATA-- +return array() +--EXPECT-- +2 +--TEST-- +Twig supports literals +--TEMPLATE-- +1 {{ true }} +2 {{ TRUE }} +3 {{ false }} +4 {{ FALSE }} +5 {{ none }} +6 {{ NONE }} +7 {{ null }} +8 {{ NULL }} +--DATA-- +return array() +--EXPECT-- +1 1 +2 1 +3 +4 +5 +6 +7 +8 +--TEST-- +Twig supports __call() for attributes +--TEMPLATE-- +{{ foo.foo }} +{{ foo.bar }} +--EXPECT-- +foo_from_call +bar_from_getbar +--TEST-- +Twig supports the "matches" operator +--TEMPLATE-- +{{ 'foo' matches '/o/' ? 'OK' : 'KO' }} +{{ 'foo' matches '/^fo/' ? 'OK' : 'KO' }} +{{ 'foo' matches '/O/i' ? 'OK' : 'KO' }} +--DATA-- +return array() +--EXPECT-- +OK +OK +OK +--TEST-- +Twig supports method calls +--TEMPLATE-- +{{ items.foo.foo }} +{{ items.foo.getFoo() }} +{{ items.foo.bar }} +{{ items.foo['bar'] }} +{{ items.foo.bar('a', 43) }} +{{ items.foo.bar(foo) }} +{{ items.foo.self.foo() }} +{{ items.foo.is }} +{{ items.foo.in }} +{{ items.foo.not }} +--DATA-- +return array('foo' => 'bar', 'items' => array('foo' => new TwigTestFoo(), 'bar' => 'foo')) +--CONFIG-- +return array('strict_variables' => false) +--EXPECT-- +foo +foo +bar + +bar_a-43 +bar_bar +foo +is +in +not +--TEST-- +Twig allows to use named operators as variable names +--TEMPLATE-- +{% for match in matches %} + {{- match }} +{% endfor %} +{{ in }} +{{ is }} +--DATA-- +return array('matches' => array(1, 2, 3), 'in' => 'in', 'is' => 'is') +--EXPECT-- +1 +2 +3 +in +is +--TEST-- +Twig parses postfix expressions +--TEMPLATE-- +{% import _self as macros %} + +{% macro foo() %}foo{% endmacro %} + +{{ 'a' }} +{{ 'a'|upper }} +{{ ('a')|upper }} +{{ -1|upper }} +{{ macros.foo() }} +{{ (macros).foo() }} +--DATA-- +return array(); +--EXPECT-- +a +A +A +-1 +foo +foo +--TEST-- +Twig supports the "same as" operator +--TEMPLATE-- +{{ 1 is same as(1) ? 'OK' }} +{{ 1 is not same as(true) ? 'OK' }} +{{ 1 is same as(1) ? 'OK' }} +{{ 1 is not same as(true) ? 'OK' }} +{{ 1 is same as (1) ? 'OK' }} +{{ 1 is not + same + as + (true) ? 'OK' }} +--DATA-- +return array() +--EXPECT-- +OK +OK +OK +OK +OK +OK +--TEST-- +Twig supports the "starts with" operator +--TEMPLATE-- +{{ 'foo' starts with 'f' ? 'OK' : 'KO' }} +{{ not ('foo' starts with 'oo') ? 'OK' : 'KO' }} +{{ not ('foo' starts with 'foowaytoolong') ? 'OK' : 'KO' }} +{{ 'foo' starts with 'f' ? 'OK' : 'KO' }} +{{ 'foo' starts +with 'f' ? 'OK' : 'KO' }} +{{ 'foo' starts with '' ? 'OK' : 'KO' }} +{{ '1' starts with true ? 'OK' : 'KO' }} +{{ '' starts with false ? 'OK' : 'KO' }} +{{ 'a' starts with false ? 'OK' : 'KO' }} +{{ false starts with '' ? 'OK' : 'KO' }} +--DATA-- +return array() +--EXPECT-- +OK +OK +OK +OK +OK +OK +KO +KO +KO +KO +--TEST-- +Twig supports string interpolation +--TEMPLATE-- +{# "foo #{"foo #{bar} baz"} baz" #} +{# "foo #{bar}#{bar} baz" #} +--DATA-- +return array('bar' => 'BAR'); +--EXPECT-- +foo foo BAR baz baz +foo BARBAR baz +--TEST-- +Twig supports the ternary operator +--TEMPLATE-- +{{ 1 ? 'YES' }} +{{ 0 ? 'YES' }} +--DATA-- +return array() +--EXPECT-- +YES + +--TEST-- +Twig supports the ternary operator +--TEMPLATE-- +{{ 'YES' ?: 'NO' }} +{{ 0 ?: 'NO' }} +--DATA-- +return array() +--EXPECT-- +YES +NO +--TEST-- +Twig supports the ternary operator +--TEMPLATE-- +{{ 1 ? 'YES' : 'NO' }} +{{ 0 ? 'YES' : 'NO' }} +{{ 0 ? 'YES' : (1 ? 'YES1' : 'NO1') }} +{{ 0 ? 'YES' : (0 ? 'YES1' : 'NO1') }} +{{ 1 == 1 ? 'foo
    ':'' }} +{{ foo ~ (bar ? ('-' ~ bar) : '') }} +--DATA-- +return array('foo' => 'foo', 'bar' => 'bar') +--EXPECT-- +YES +NO +YES1 +NO1 +foo
    +foo-bar +--TEST-- +Twig does not allow to use two-word named operators as variable names +--TEMPLATE-- +{{ starts with }} +--DATA-- +return array() +--EXCEPTION-- +Twig_Error_Syntax: Unexpected token "operator" of value "starts with" in "index.twig" at line 2 +--TEST-- +Twig unary operators precedence +--TEMPLATE-- +{{ -1 - 1 }} +{{ -1 - -1 }} +{{ -1 * -1 }} +{{ 4 / -1 * 5 }} +--DATA-- +return array() +--EXPECT-- +-2 +0 +1 +-20 +--TEST-- +Twig supports unary operators (not, -, +) +--TEMPLATE-- +{{ not 1 }}/{{ not 0 }} +{{ +1 + 1 }}/{{ -1 - 1 }} +{{ not (false or true) }} +--DATA-- +return array() +--EXPECT-- +/1 +2/-2 + +--TEST-- +"abs" filter +--TEMPLATE-- +{{ (-5.5)|abs }} +{{ (-5)|abs }} +{{ (-0)|abs }} +{{ 0|abs }} +{{ 5|abs }} +{{ 5.5|abs }} +{{ number1|abs }} +{{ number2|abs }} +{{ number3|abs }} +{{ number4|abs }} +{{ number5|abs }} +{{ number6|abs }} +--DATA-- +return array('number1' => -5.5, 'number2' => -5, 'number3' => -0, 'number4' => 0, 'number5' => 5, 'number6' => 5.5) +--EXPECT-- +5.5 +5 +0 +0 +5 +5.5 +5.5 +5 +0 +0 +5 +5.5 +--TEST-- +"batch" filter +--TEMPLATE-- +{% for row in items|batch(3.1) %} +
    + {% for column in row %} +
    {{ column }}
    + {% endfor %} +
    +{% endfor %} +--DATA-- +return array('items' => array('a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j')) +--EXPECT-- +
    +
    a
    +
    b
    +
    c
    +
    d
    +
    +
    +
    e
    +
    f
    +
    g
    +
    h
    +
    +
    +
    i
    +
    j
    +
    +--TEST-- +"batch" filter +--TEMPLATE-- +{% for row in items|batch(3) %} +
    + {% for column in row %} +
    {{ column }}
    + {% endfor %} +
    +{% endfor %} +--DATA-- +return array('items' => array('a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j')) +--EXPECT-- +
    +
    a
    +
    b
    +
    c
    +
    +
    +
    d
    +
    e
    +
    f
    +
    +
    +
    g
    +
    h
    +
    i
    +
    +
    +
    j
    +
    +--TEST-- +"batch" filter +--TEMPLATE-- + +{% for row in items|batch(3, '') %} + + {% for column in row %} + + {% endfor %} + +{% endfor %} +
    {{ column }}
    +--DATA-- +return array('items' => array('a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j')) +--EXPECT-- + + + + + + + + + + + + + + + + + + + + + +
    abc
    def
    ghi
    j
    +--TEST-- +"batch" filter +--TEMPLATE-- +{% for row in items|batch(3, 'fill') %} +
    + {% for column in row %} +
    {{ column }}
    + {% endfor %} +
    +{% endfor %} +--DATA-- +return array('items' => array('a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l')) +--EXPECT-- +
    +
    a
    +
    b
    +
    c
    +
    +
    +
    d
    +
    e
    +
    f
    +
    +
    +
    g
    +
    h
    +
    i
    +
    +
    +
    j
    +
    k
    +
    l
    +
    +--TEST-- +"batch" filter +--TEMPLATE-- + +{% for row in items|batch(3, 'fill') %} + + {% for column in row %} + + {% endfor %} + +{% endfor %} +
    {{ column }}
    +--DATA-- +return array('items' => array('a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j')) +--EXPECT-- + + + + + + + + + + + + + + + + + + + + + +
    abc
    def
    ghi
    jfillfill
    +--TEST-- +"convert_encoding" filter +--CONDITION-- +function_exists('iconv') || function_exists('mb_convert_encoding') +--TEMPLATE-- +{{ "愛していますか?"|convert_encoding('ISO-2022-JP', 'UTF-8')|convert_encoding('UTF-8', 'ISO-2022-JP') }} +--DATA-- +return array() +--EXPECT-- +愛していますか? +--TEST-- +"date" filter (interval support as of PHP 5.3) +--CONDITION-- +version_compare(phpversion(), '5.3.0', '>=') +--TEMPLATE-- +{{ date2|date }} +{{ date2|date('%d days') }} +--DATA-- +date_default_timezone_set('UTC'); +$twig->getExtension('core')->setDateFormat('Y-m-d', '%d days %h hours'); +return array( + 'date2' => new DateInterval('P2D'), +) +--EXPECT-- +2 days 0 hours +2 days +--TEST-- +"date" filter +--TEMPLATE-- +{{ date1|date }} +{{ date1|date('d/m/Y') }} +--DATA-- +date_default_timezone_set('UTC'); +$twig->getExtension('core')->setDateFormat('Y-m-d', '%d days %h hours'); +return array( + 'date1' => mktime(13, 45, 0, 10, 4, 2010), +) +--EXPECT-- +2010-10-04 +04/10/2010 +--TEST-- +"date" filter +--CONDITION-- +version_compare(phpversion(), '5.5.0', '>=') +--TEMPLATE-- +{{ date1|date }} +{{ date1|date('d/m/Y') }} +{{ date1|date('d/m/Y H:i:s', 'Asia/Hong_Kong') }} +{{ date1|date('d/m/Y H:i:s', timezone1) }} +{{ date1|date('d/m/Y H:i:s') }} + +{{ date2|date('d/m/Y H:i:s P', 'Europe/Paris') }} +{{ date2|date('d/m/Y H:i:s P', 'Asia/Hong_Kong') }} +{{ date2|date('d/m/Y H:i:s P', false) }} +{{ date2|date('e', 'Europe/Paris') }} +{{ date2|date('e', false) }} +--DATA-- +date_default_timezone_set('Europe/Paris'); +return array( + 'date1' => new DateTimeImmutable('2010-10-04 13:45'), + 'date2' => new DateTimeImmutable('2010-10-04 13:45', new DateTimeZone('America/New_York')), + 'timezone1' => new DateTimeZone('America/New_York'), +) +--EXPECT-- +October 4, 2010 13:45 +04/10/2010 +04/10/2010 19:45:00 +04/10/2010 07:45:00 +04/10/2010 13:45:00 + +04/10/2010 19:45:00 +02:00 +05/10/2010 01:45:00 +08:00 +04/10/2010 13:45:00 -04:00 +Europe/Paris +America/New_York +--TEST-- +"date" filter (interval support as of PHP 5.3) +--CONDITION-- +version_compare(phpversion(), '5.3.0', '>=') +--TEMPLATE-- +{{ date1|date }} +{{ date1|date('%d days %h hours') }} +{{ date1|date('%d days %h hours', timezone1) }} +--DATA-- +date_default_timezone_set('UTC'); +return array( + 'date1' => new DateInterval('P2D'), + // This should have no effect on DateInterval formatting + 'timezone1' => new DateTimeZone('America/New_York'), +) +--EXPECT-- +2 days +2 days 0 hours +2 days 0 hours +--TEST-- +"date_modify" filter +--TEMPLATE-- +{{ date1|date_modify('-1day')|date('Y-m-d H:i:s') }} +{{ date2|date_modify('-1day')|date('Y-m-d H:i:s') }} +--DATA-- +date_default_timezone_set('UTC'); +return array( + 'date1' => '2010-10-04 13:45', + 'date2' => new DateTime('2010-10-04 13:45'), +) +--EXPECT-- +2010-10-03 13:45:00 +2010-10-03 13:45:00 +--TEST-- +"date" filter +--TEMPLATE-- +{{ date|date(format='d/m/Y H:i:s P', timezone='America/Chicago') }} +{{ date|date(timezone='America/Chicago', format='d/m/Y H:i:s P') }} +{{ date|date('d/m/Y H:i:s P', timezone='America/Chicago') }} +--DATA-- +date_default_timezone_set('UTC'); +return array('date' => mktime(13, 45, 0, 10, 4, 2010)) +--EXPECT-- +04/10/2010 08:45:00 -05:00 +04/10/2010 08:45:00 -05:00 +04/10/2010 08:45:00 -05:00 +--TEST-- +"date" filter +--TEMPLATE-- +{{ date1|date }} +{{ date1|date('d/m/Y') }} +{{ date1|date('d/m/Y H:i:s', 'Asia/Hong_Kong') }} +{{ date1|date('d/m/Y H:i:s P', 'Asia/Hong_Kong') }} +{{ date1|date('d/m/Y H:i:s P', 'America/Chicago') }} +{{ date1|date('e') }} +{{ date1|date('d/m/Y H:i:s') }} + +{{ date2|date }} +{{ date2|date('d/m/Y') }} +{{ date2|date('d/m/Y H:i:s', 'Asia/Hong_Kong') }} +{{ date2|date('d/m/Y H:i:s', timezone1) }} +{{ date2|date('d/m/Y H:i:s') }} + +{{ date3|date }} +{{ date3|date('d/m/Y') }} + +{{ date4|date }} +{{ date4|date('d/m/Y') }} + +{{ date5|date }} +{{ date5|date('d/m/Y') }} + +{{ date6|date('d/m/Y H:i:s P', 'Europe/Paris') }} +{{ date6|date('d/m/Y H:i:s P', 'Asia/Hong_Kong') }} +{{ date6|date('d/m/Y H:i:s P', false) }} +{{ date6|date('e', 'Europe/Paris') }} +{{ date6|date('e', false) }} + +{{ date7|date }} +--DATA-- +date_default_timezone_set('Europe/Paris'); +return array( + 'date1' => mktime(13, 45, 0, 10, 4, 2010), + 'date2' => new DateTime('2010-10-04 13:45'), + 'date3' => '2010-10-04 13:45', + 'date4' => 1286199900, // DateTime::createFromFormat('Y-m-d H:i', '2010-10-04 13:45', new DateTimeZone('UTC'))->getTimestamp() -- A unixtimestamp is always GMT + 'date5' => -189291360, // DateTime::createFromFormat('Y-m-d H:i', '1964-01-02 03:04', new DateTimeZone('UTC'))->getTimestamp(), + 'date6' => new DateTime('2010-10-04 13:45', new DateTimeZone('America/New_York')), + 'date7' => '2010-01-28T15:00:00+05:00', + 'timezone1' => new DateTimeZone('America/New_York'), +) +--EXPECT-- +October 4, 2010 13:45 +04/10/2010 +04/10/2010 19:45:00 +04/10/2010 19:45:00 +08:00 +04/10/2010 06:45:00 -05:00 +Europe/Paris +04/10/2010 13:45:00 + +October 4, 2010 13:45 +04/10/2010 +04/10/2010 19:45:00 +04/10/2010 07:45:00 +04/10/2010 13:45:00 + +October 4, 2010 13:45 +04/10/2010 + +October 4, 2010 15:45 +04/10/2010 + +January 2, 1964 04:04 +02/01/1964 + +04/10/2010 19:45:00 +02:00 +05/10/2010 01:45:00 +08:00 +04/10/2010 13:45:00 -04:00 +Europe/Paris +America/New_York + +January 28, 2010 11:00 +--TEST-- +"default" filter +--TEMPLATE-- +Variable: +{{ definedVar |default('default') is same as('default') ? 'ko' : 'ok' }} +{{ zeroVar |default('default') is same as('default') ? 'ko' : 'ok' }} +{{ emptyVar |default('default') is same as('default') ? 'ok' : 'ko' }} +{{ nullVar |default('default') is same as('default') ? 'ok' : 'ko' }} +{{ undefinedVar |default('default') is same as('default') ? 'ok' : 'ko' }} +Array access: +{{ nested.definedVar |default('default') is same as('default') ? 'ko' : 'ok' }} +{{ nested['definedVar'] |default('default') is same as('default') ? 'ko' : 'ok' }} +{{ nested.zeroVar |default('default') is same as('default') ? 'ko' : 'ok' }} +{{ nested.emptyVar |default('default') is same as('default') ? 'ok' : 'ko' }} +{{ nested.nullVar |default('default') is same as('default') ? 'ok' : 'ko' }} +{{ nested.undefinedVar |default('default') is same as('default') ? 'ok' : 'ko' }} +{{ nested['undefinedVar'] |default('default') is same as('default') ? 'ok' : 'ko' }} +{{ undefinedVar.foo |default('default') is same as('default') ? 'ok' : 'ko' }} +Plain values: +{{ 'defined' |default('default') is same as('default') ? 'ko' : 'ok' }} +{{ 0 |default('default') is same as('default') ? 'ko' : 'ok' }} +{{ '' |default('default') is same as('default') ? 'ok' : 'ko' }} +{{ null |default('default') is same as('default') ? 'ok' : 'ko' }} +Precedence: +{{ 'o' ~ nullVar |default('k') }} +{{ 'o' ~ nested.nullVar |default('k') }} +Object methods: +{{ object.foo |default('default') is same as('default') ? 'ko' : 'ok' }} +{{ object.undefinedMethod |default('default') is same as('default') ? 'ok' : 'ko' }} +{{ object.getFoo() |default('default') is same as('default') ? 'ko' : 'ok' }} +{{ object.getFoo('a') |default('default') is same as('default') ? 'ko' : 'ok' }} +{{ object.undefinedMethod() |default('default') is same as('default') ? 'ok' : 'ko' }} +{{ object.undefinedMethod('a') |default('default') is same as('default') ? 'ok' : 'ko' }} +Deep nested: +{{ nested.undefinedVar.foo.bar |default('default') is same as('default') ? 'ok' : 'ko' }} +{{ nested.definedArray.0 |default('default') is same as('default') ? 'ko' : 'ok' }} +{{ nested['definedArray'][0] |default('default') is same as('default') ? 'ko' : 'ok' }} +{{ object.self.foo |default('default') is same as('default') ? 'ko' : 'ok' }} +{{ object.self.undefinedMethod |default('default') is same as('default') ? 'ok' : 'ko' }} +{{ object.undefinedMethod.self |default('default') is same as('default') ? 'ok' : 'ko' }} +--DATA-- +return array( + 'definedVar' => 'defined', + 'zeroVar' => 0, + 'emptyVar' => '', + 'nullVar' => null, + 'nested' => array( + 'definedVar' => 'defined', + 'zeroVar' => 0, + 'emptyVar' => '', + 'nullVar' => null, + 'definedArray' => array(0), + ), + 'object' => new TwigTestFoo(), +) +--CONFIG-- +return array('strict_variables' => false) +--EXPECT-- +Variable: +ok +ok +ok +ok +ok +Array access: +ok +ok +ok +ok +ok +ok +ok +ok +Plain values: +ok +ok +ok +ok +Precedence: +ok +ok +Object methods: +ok +ok +ok +ok +ok +ok +Deep nested: +ok +ok +ok +ok +ok +ok +--DATA-- +return array( + 'definedVar' => 'defined', + 'zeroVar' => 0, + 'emptyVar' => '', + 'nullVar' => null, + 'nested' => array( + 'definedVar' => 'defined', + 'zeroVar' => 0, + 'emptyVar' => '', + 'nullVar' => null, + 'definedArray' => array(0), + ), + 'object' => new TwigTestFoo(), +) +--CONFIG-- +return array('strict_variables' => true) +--EXPECT-- +Variable: +ok +ok +ok +ok +ok +Array access: +ok +ok +ok +ok +ok +ok +ok +ok +Plain values: +ok +ok +ok +ok +Precedence: +ok +ok +Object methods: +ok +ok +ok +ok +ok +ok +Deep nested: +ok +ok +ok +ok +ok +ok +--TEST-- +dynamic filter +--TEMPLATE-- +{{ 'bar'|foo_path }} +{{ 'bar'|a_foo_b_bar }} +--DATA-- +return array() +--EXPECT-- +foo/bar +a/b/bar +--TEST-- +"escape" filter does not escape with the html strategy when using the html_attr strategy +--TEMPLATE-- +{{ '
    '|escape('html_attr') }} +--DATA-- +return array() +--EXPECT-- +<br /> +--TEST-- +"escape" filter +--TEMPLATE-- +{{ "愛していますか?
    "|e }} +--DATA-- +return array() +--EXPECT-- +愛していますか? <br /> +--TEST-- +"escape" filter +--TEMPLATE-- +{{ "foo
    "|e }} +--DATA-- +return array() +--EXPECT-- +foo <br /> +--TEST-- +"first" filter +--TEMPLATE-- +{{ [1, 2, 3, 4]|first }} +{{ {a: 1, b: 2, c: 3, d: 4}|first }} +{{ '1234'|first }} +{{ arr|first }} +{{ 'Ä€é'|first }} +{{ ''|first }} +--DATA-- +return array('arr' => new ArrayObject(array(1, 2, 3, 4))) +--EXPECT-- +1 +1 +1 +1 +Ä +--TEST-- +"escape" filter +--TEMPLATE-- +{% set foo %} + foo
    +{% endset %} + +{{ foo|e('html') -}} +{{ foo|e('js') }} +{% autoescape true %} + {{ foo }} +{% endautoescape %} +--DATA-- +return array() +--EXPECT-- + foo<br /> +\x20\x20\x20\x20foo\x3Cbr\x20\x2F\x3E\x0A + foo
    +--TEST-- +"format" filter +--TEMPLATE-- +{{ string|format(foo, 3) }} +--DATA-- +return array('string' => '%s/%d', 'foo' => 'bar') +--EXPECT-- +bar/3 +--TEST-- +"join" filter +--TEMPLATE-- +{{ ["foo", "bar"]|join(', ') }} +{{ foo|join(', ') }} +{{ bar|join(', ') }} +--DATA-- +return array('foo' => new TwigTestFoo(), 'bar' => new ArrayObject(array(3, 4))) +--EXPECT-- +foo, bar +1, 2 +3, 4 +--TEST-- +"json_encode" filter +--TEMPLATE-- +{{ "foo"|json_encode|raw }} +{{ foo|json_encode|raw }} +{{ [foo, "foo"]|json_encode|raw }} +--DATA-- +return array('foo' => new Twig_Markup('foo', 'UTF-8')) +--EXPECT-- +"foo" +"foo" +["foo","foo"] +--TEST-- +"last" filter +--TEMPLATE-- +{{ [1, 2, 3, 4]|last }} +{{ {a: 1, b: 2, c: 3, d: 4}|last }} +{{ '1234'|last }} +{{ arr|last }} +{{ 'Ä€é'|last }} +{{ ''|last }} +--DATA-- +return array('arr' => new ArrayObject(array(1, 2, 3, 4))) +--EXPECT-- +4 +4 +4 +4 +é +--TEST-- +"length" filter +--TEMPLATE-- +{{ array|length }} +{{ string|length }} +{{ number|length }} +{{ markup|length }} +--DATA-- +return array('array' => array(1, 4), 'string' => 'foo', 'number' => 1000, 'markup' => new Twig_Markup('foo', 'UTF-8')) +--EXPECT-- +2 +3 +4 +3 +--TEST-- +"length" filter +--CONDITION-- +function_exists('mb_get_info') +--TEMPLATE-- +{{ string|length }} +{{ markup|length }} +--DATA-- +return array('string' => 'été', 'markup' => new Twig_Markup('foo', 'UTF-8')) +--EXPECT-- +3 +3 +--TEST-- +"merge" filter +--TEMPLATE-- +{{ items|merge({'bar': 'foo'})|join }} +{{ items|merge({'bar': 'foo'})|keys|join }} +{{ {'bar': 'foo'}|merge(items)|join }} +{{ {'bar': 'foo'}|merge(items)|keys|join }} +{{ numerics|merge([4, 5, 6])|join }} +--DATA-- +return array('items' => array('foo' => 'bar'), 'numerics' => array(1, 2, 3)) +--EXPECT-- +barfoo +foobar +foobar +barfoo +123456 +--TEST-- +"nl2br" filter +--TEMPLATE-- +{{ "I like Twig.\nYou will like it too.\n\nEverybody like it!"|nl2br }} +{{ text|nl2br }} +--DATA-- +return array('text' => "If you have some HTML\nit will be escaped.") +--EXPECT-- +I like Twig.
    +You will like it too.
    +
    +Everybody like it! +If you have some <strong>HTML</strong>
    +it will be escaped. +--TEST-- +"number_format" filter with defaults. +--TEMPLATE-- +{{ 20|number_format }} +{{ 20.25|number_format }} +{{ 20.25|number_format(1) }} +{{ 20.25|number_format(2, ',') }} +{{ 1020.25|number_format }} +{{ 1020.25|number_format(2, ',') }} +{{ 1020.25|number_format(2, ',', '.') }} +--DATA-- +$twig->getExtension('core')->setNumberFormat(2, '!', '='); +return array(); +--EXPECT-- +20!00 +20!25 +20!3 +20,25 +1=020!25 +1=020,25 +1.020,25 +--TEST-- +"number_format" filter +--TEMPLATE-- +{{ 20|number_format }} +{{ 20.25|number_format }} +{{ 20.25|number_format(2) }} +{{ 20.25|number_format(2, ',') }} +{{ 1020.25|number_format(2, ',') }} +{{ 1020.25|number_format(2, ',', '.') }} +--DATA-- +return array(); +--EXPECT-- +20 +20 +20.25 +20,25 +1,020,25 +1.020,25 +--TEST-- +"replace" filter +--TEMPLATE-- +{{ "I like %this% and %that%."|replace({'%this%': "foo", '%that%': "bar"}) }} +--DATA-- +return array() +--EXPECT-- +I like foo and bar. +--TEST-- +"reverse" filter +--TEMPLATE-- +{{ [1, 2, 3, 4]|reverse|join('') }} +{{ '1234évènement'|reverse }} +{{ arr|reverse|join('') }} +{{ {'a': 'c', 'b': 'a'}|reverse()|join(',') }} +{{ {'a': 'c', 'b': 'a'}|reverse(preserveKeys=true)|join(glue=',') }} +{{ {'a': 'c', 'b': 'a'}|reverse(preserve_keys=true)|join(glue=',') }} +--DATA-- +return array('arr' => new ArrayObject(array(1, 2, 3, 4))) +--EXPECT-- +4321 +tnemenèvé4321 +4321 +a,c +a,c +a,c +--TEST-- +"round" filter +--TEMPLATE-- +{{ 2.7|round }} +{{ 2.1|round }} +{{ 2.1234|round(3, 'floor') }} +{{ 2.1|round(0, 'ceil') }} + +{{ 21.3|round(-1)}} +{{ 21.3|round(-1, 'ceil')}} +{{ 21.3|round(-1, 'floor')}} +--DATA-- +return array() +--EXPECT-- +3 +2 +2.123 +3 + +20 +30 +20 +--TEST-- +"slice" filter +--TEMPLATE-- +{{ [1, 2, 3, 4][1:2]|join('') }} +{{ {a: 1, b: 2, c: 3, d: 4}[1:2]|join('') }} +{{ [1, 2, 3, 4][start:length]|join('') }} +{{ [1, 2, 3, 4]|slice(1, 2)|join('') }} +{{ [1, 2, 3, 4]|slice(1, 2)|keys|join('') }} +{{ [1, 2, 3, 4]|slice(1, 2, true)|keys|join('') }} +{{ {a: 1, b: 2, c: 3, d: 4}|slice(1, 2)|join('') }} +{{ {a: 1, b: 2, c: 3, d: 4}|slice(1, 2)|keys|join('') }} +{{ '1234'|slice(1, 2) }} +{{ '1234'[1:2] }} +{{ arr|slice(1, 2)|join('') }} +{{ arr[1:2]|join('') }} + +{{ [1, 2, 3, 4]|slice(1)|join('') }} +{{ [1, 2, 3, 4][1:]|join('') }} +{{ '1234'|slice(1) }} +{{ '1234'[1:] }} +{{ '1234'[:1] }} +--DATA-- +return array('start' => 1, 'length' => 2, 'arr' => new ArrayObject(array(1, 2, 3, 4))) +--EXPECT-- +23 +23 +23 +23 +01 +12 +23 +bc +23 +23 +23 +23 + +234 +234 +234 +234 +1 +--TEST-- +"sort" filter +--TEMPLATE-- +{{ array1|sort|join }} +{{ array2|sort|join }} +--DATA-- +return array('array1' => array(4, 1), 'array2' => array('foo', 'bar')) +--EXPECT-- +14 +barfoo +--TEST-- +"split" filter +--TEMPLATE-- +{{ "one,two,three,four,five"|split(',')|join('-') }} +{{ foo|split(',')|join('-') }} +{{ foo|split(',', 3)|join('-') }} +{{ baz|split('')|join('-') }} +{{ baz|split('', 2)|join('-') }} +{{ foo|split(',', -2)|join('-') }} +--DATA-- +return array('foo' => "one,two,three,four,five", 'baz' => '12345',) +--EXPECT-- +one-two-three-four-five +one-two-three-four-five +one-two-three,four,five +1-2-3-4-5 +12-34-5 +one-two-three--TEST-- +"trim" filter +--TEMPLATE-- +{{ " I like Twig. "|trim }} +{{ text|trim }} +{{ " foo/"|trim("/") }} +--DATA-- +return array('text' => " If you have some HTML it will be escaped. ") +--EXPECT-- +I like Twig. +If you have some <strong>HTML</strong> it will be escaped. + foo +--TEST-- +"url_encode" filter for PHP < 5.4 and HHVM +--CONDITION-- +defined('PHP_QUERY_RFC3986') +--TEMPLATE-- +{{ {foo: "bar", number: 3, "spéßi%l": "e%c0d@d", "spa ce": ""}|url_encode }} +{{ {foo: "bar", number: 3, "spéßi%l": "e%c0d@d", "spa ce": ""}|url_encode|raw }} +{{ {}|url_encode|default("default") }} +{{ 'spéßi%le%c0d@dspa ce'|url_encode }} +--DATA-- +return array() +--EXPECT-- +foo=bar&number=3&sp%C3%A9%C3%9Fi%25l=e%25c0d%40d&spa%20ce= +foo=bar&number=3&sp%C3%A9%C3%9Fi%25l=e%25c0d%40d&spa%20ce= +default +sp%C3%A9%C3%9Fi%25le%25c0d%40dspa%20ce +--TEST-- +"url_encode" filter +--CONDITION-- +defined('PHP_QUERY_RFC3986') +--TEMPLATE-- +{{ {foo: "bar", number: 3, "spéßi%l": "e%c0d@d", "spa ce": ""}|url_encode }} +{{ {foo: "bar", number: 3, "spéßi%l": "e%c0d@d", "spa ce": ""}|url_encode|raw }} +{{ {}|url_encode|default("default") }} +{{ 'spéßi%le%c0d@dspa ce'|url_encode }} +--DATA-- +return array() +--EXPECT-- +foo=bar&number=3&sp%C3%A9%C3%9Fi%25l=e%25c0d%40d&spa%20ce= +foo=bar&number=3&sp%C3%A9%C3%9Fi%25l=e%25c0d%40d&spa%20ce= +default +sp%C3%A9%C3%9Fi%25le%25c0d%40dspa%20ce +--TEST-- +"attribute" function +--TEMPLATE-- +{{ attribute(obj, method) }} +{{ attribute(array, item) }} +{{ attribute(obj, "bar", ["a", "b"]) }} +{{ attribute(obj, "bar", arguments) }} +{{ attribute(obj, method) is defined ? 'ok' : 'ko' }} +{{ attribute(obj, nonmethod) is defined ? 'ok' : 'ko' }} +--DATA-- +return array('obj' => new TwigTestFoo(), 'method' => 'foo', 'array' => array('foo' => 'bar'), 'item' => 'foo', 'nonmethod' => 'xxx', 'arguments' => array('a', 'b')) +--EXPECT-- +foo +bar +bar_a-b +bar_a-b +ok +ko +--TEST-- +"block" function +--TEMPLATE-- +{% extends 'base.twig' %} +{% block bar %}BAR{% endblock %} +--TEMPLATE(base.twig)-- +{% block foo %}{{ block('bar') }}{% endblock %} +{% block bar %}BAR_BASE{% endblock %} +--DATA-- +return array() +--EXPECT-- +BARBAR +--TEST-- +"constant" function +--TEMPLATE-- +{{ constant('DATE_W3C') == expect ? 'true' : 'false' }} +{{ constant('ARRAY_AS_PROPS', object) }} +--DATA-- +return array('expect' => DATE_W3C, 'object' => new ArrayObject(array('hi'))); +--EXPECT-- +true +2 +--TEST-- +"cycle" function +--TEMPLATE-- +{% for i in 0..6 %} +{{ cycle(array1, i) }}-{{ cycle(array2, i) }} +{% endfor %} +--DATA-- +return array('array1' => array('odd', 'even'), 'array2' => array('apple', 'orange', 'citrus')) +--EXPECT-- +odd-apple +even-orange +odd-citrus +even-apple +odd-orange +even-citrus +odd-apple +--TEST-- +"date" function +--TEMPLATE-- +{{ date(date, "America/New_York")|date('d/m/Y H:i:s P', false) }} +{{ date(timezone="America/New_York", date=date)|date('d/m/Y H:i:s P', false) }} +--DATA-- +date_default_timezone_set('UTC'); +return array('date' => mktime(13, 45, 0, 10, 4, 2010)) +--EXPECT-- +04/10/2010 09:45:00 -04:00 +04/10/2010 09:45:00 -04:00 +--TEST-- +"date" function +--TEMPLATE-- +{{ date() == date('now') ? 'OK' : 'KO' }} +{{ date(date1) == date('2010-10-04 13:45') ? 'OK' : 'KO' }} +{{ date(date2) == date('2010-10-04 13:45') ? 'OK' : 'KO' }} +{{ date(date3) == date('2010-10-04 13:45') ? 'OK' : 'KO' }} +{{ date(date4) == date('2010-10-04 13:45') ? 'OK' : 'KO' }} +{{ date(date5) == date('1964-01-02 03:04') ? 'OK' : 'KO' }} +--DATA-- +date_default_timezone_set('UTC'); +return array( + 'date1' => mktime(13, 45, 0, 10, 4, 2010), + 'date2' => new DateTime('2010-10-04 13:45'), + 'date3' => '2010-10-04 13:45', + 'date4' => 1286199900, // DateTime::createFromFormat('Y-m-d H:i', '2010-10-04 13:45', new DateTimeZone('UTC'))->getTimestamp() -- A unixtimestamp is always GMT + 'date5' => -189291360, // DateTime::createFromFormat('Y-m-d H:i', '1964-01-02 03:04', new DateTimeZone('UTC'))->getTimestamp(), +) +--EXPECT-- +OK +OK +OK +OK +OK +OK +--TEST-- +"dump" function, xdebug is not loaded or xdebug <2.2-dev is loaded +--CONDITION-- +!extension_loaded('xdebug') || (($r = new ReflectionExtension('xdebug')) && version_compare($r->getVersion(), '2.2-dev', '<')) +--TEMPLATE-- +{{ dump() }} +--DATA-- +return array('foo' => 'foo', 'bar' => 'bar') +--CONFIG-- +return array('debug' => true, 'autoescape' => false); +--TEST-- +"dump" function +--CONDITION-- +!extension_loaded('xdebug') +--TEMPLATE-- +{{ dump('foo') }} +{{ dump('foo', 'bar') }} +--DATA-- +return array('foo' => 'foo', 'bar' => 'bar') +--CONFIG-- +return array('debug' => true, 'autoescape' => false); +--EXPECT-- +string(3) "foo" + +string(3) "foo" +string(3) "bar" +--TEST-- +dynamic function +--TEMPLATE-- +{{ foo_path('bar') }} +{{ a_foo_b_bar('bar') }} +--DATA-- +return array() +--EXPECT-- +foo/bar +a/b/bar +--TEST-- +"include" function +--TEMPLATE-- +{% set tmp = include("foo.twig") %} + +FOO{{ tmp }}BAR +--TEMPLATE(foo.twig)-- +FOOBAR +--DATA-- +return array() +--EXPECT-- +FOO +FOOBARBAR +--TEST-- +"include" function is safe for auto-escaping +--TEMPLATE-- +{{ include("foo.twig") }} +--TEMPLATE(foo.twig)-- +

    Test

    +--DATA-- +return array() +--EXPECT-- +

    Test

    +--TEST-- +"include" function +--TEMPLATE-- +FOO +{{ include("foo.twig") }} + +BAR +--TEMPLATE(foo.twig)-- +FOOBAR +--DATA-- +return array() +--EXPECT-- +FOO + +FOOBAR + +BAR +--TEST-- +"include" function allows expressions for the template to include +--TEMPLATE-- +FOO +{{ include(foo) }} + +BAR +--TEMPLATE(foo.twig)-- +FOOBAR +--DATA-- +return array('foo' => 'foo.twig') +--EXPECT-- +FOO + +FOOBAR + +BAR +--TEST-- +"include" function +--TEMPLATE-- +{{ include(["foo.twig", "bar.twig"], ignore_missing = true) }} +{{ include("foo.twig", ignore_missing = true) }} +{{ include("foo.twig", ignore_missing = true, variables = {}) }} +{{ include("foo.twig", ignore_missing = true, variables = {}, with_context = true) }} +--DATA-- +return array() +--EXPECT-- +--TEST-- +"include" function +--TEMPLATE-- +{% extends "base.twig" %} + +{% block content %} + {{ parent() }} +{% endblock %} +--TEMPLATE(base.twig)-- +{% block content %} + {{ include("foo.twig") }} +{% endblock %} +--DATA-- +return array(); +--EXCEPTION-- +Twig_Error_Loader: Template "foo.twig" is not defined in "base.twig" at line 3. +--TEST-- +"include" function +--TEMPLATE-- +{{ include("foo.twig") }} +--DATA-- +return array(); +--EXCEPTION-- +Twig_Error_Loader: Template "foo.twig" is not defined in "index.twig" at line 2. +--TEST-- +"include" tag sandboxed +--TEMPLATE-- +{{ include("foo.twig", sandboxed = true) }} +--TEMPLATE(foo.twig)-- +{{ foo|e }} +--DATA-- +return array() +--EXCEPTION-- +Twig_Sandbox_SecurityError: Filter "e" is not allowed in "index.twig" at line 2. +--TEST-- +"include" function accepts Twig_Template instance +--TEMPLATE-- +{{ include(foo) }} FOO +--TEMPLATE(foo.twig)-- +BAR +--DATA-- +return array('foo' => $twig->loadTemplate('foo.twig')) +--EXPECT-- +BAR FOO +--TEST-- +"include" function +--TEMPLATE-- +{{ include(["foo.twig", "bar.twig"]) }} +{{- include(["bar.twig", "foo.twig"]) }} +--TEMPLATE(foo.twig)-- +foo +--DATA-- +return array() +--EXPECT-- +foo +foo +--TEST-- +"include" function accept variables and with_context +--TEMPLATE-- +{{ include("foo.twig") }} +{{- include("foo.twig", with_context = false) }} +{{- include("foo.twig", {'foo1': 'bar'}) }} +{{- include("foo.twig", {'foo1': 'bar'}, with_context = false) }} +--TEMPLATE(foo.twig)-- +{% for k, v in _context %}{{ k }},{% endfor %} +--DATA-- +return array('foo' => 'bar') +--EXPECT-- +foo,global,_parent, +global,_parent, +foo,global,foo1,_parent, +foo1,global,_parent, +--TEST-- +"include" function accept variables +--TEMPLATE-- +{{ include("foo.twig", {'foo': 'bar'}) }} +{{- include("foo.twig", vars) }} +--TEMPLATE(foo.twig)-- +{{ foo }} +--DATA-- +return array('vars' => array('foo' => 'bar')) +--EXPECT-- +bar +bar +--TEST-- +"max" function +--TEMPLATE-- +{{ max([2, 1, 3, 5, 4]) }} +{{ max(2, 1, 3, 5, 4) }} +{{ max({2:"two", 1:"one", 3:"three", 5:"five", 4:"for"}) }} +--DATA-- +return array() +--EXPECT-- +5 +5 +two +--TEST-- +"min" function +--TEMPLATE-- +{{ min(2, 1, 3, 5, 4) }} +{{ min([2, 1, 3, 5, 4]) }} +{{ min({2:"two", 1:"one", 3:"three", 5:"five", 4:"for"}) }} +--DATA-- +return array() +--EXPECT-- +1 +1 +five +--TEST-- +"range" function +--TEMPLATE-- +{{ range(low=0+1, high=10+0, step=2)|join(',') }} +--DATA-- +return array() +--EXPECT-- +1,3,5,7,9 +--TEST-- +"block" function recursively called in a parent template +--TEMPLATE-- +{% extends "ordered_menu.twig" %} +{% block label %}"{{ parent() }}"{% endblock %} +{% block list %}{% set class = 'b' %}{{ parent() }}{% endblock %} +--TEMPLATE(ordered_menu.twig)-- +{% extends "menu.twig" %} +{% block list %}{% set class = class|default('a') %}
      {{ block('children') }}
    {% endblock %} +--TEMPLATE(menu.twig)-- +{% extends "base.twig" %} +{% block list %}
      {{ block('children') }}
    {% endblock %} +{% block children %}{% set currentItem = item %}{% for item in currentItem %}{{ block('item') }}{% endfor %}{% set item = currentItem %}{% endblock %} +{% block item %}
  • {% if item is not iterable %}{{ block('label') }}{% else %}{{ block('list') }}{% endif %}
  • {% endblock %} +{% block label %}{{ item }}{{ block('unknown') }}{% endblock %} +--TEMPLATE(base.twig)-- +{{ block('list') }} +--DATA-- +return array('item' => array('1', '2', array('3.1', array('3.2.1', '3.2.2'), '3.4'))) +--EXPECT-- +
    1. "1"
    2. "2"
      1. "3.1"
        1. "3.2.1"
        2. "3.2.2"
      2. "3.4"
    +--TEST-- +"source" function +--TEMPLATE-- +FOO +{{ source("foo.twig") }} + +BAR +--TEMPLATE(foo.twig)-- +{{ foo }}
    +--DATA-- +return array() +--EXPECT-- +FOO + +{{ foo }}
    + +BAR +--TEST-- +"template_from_string" function +--TEMPLATE-- +{% include template_from_string(template) %} + +{% include template_from_string("Hello {{ name }}") %} +{% include template_from_string('{% extends "parent.twig" %}{% block content %}Hello {{ name }}{% endblock %}') %} +--TEMPLATE(parent.twig)-- +{% block content %}{% endblock %} +--DATA-- +return array('name' => 'Fabien', 'template' => "Hello {{ name }}") +--EXPECT-- +Hello Fabien +Hello Fabien +Hello Fabien +--TEST-- +macro +--TEMPLATE-- +{% from _self import test %} + +{% macro test(a, b = 'bar') -%} +{{ a }}{{ b }} +{%- endmacro %} + +{{ test('foo') }} +{{ test('bar', 'foo') }} +--DATA-- +return array(); +--EXPECT-- +foobar +barfoo +--TEST-- +macro +--TEMPLATE-- +{% import _self as macros %} + +{% macro foo(data) %} + {{ data }} +{% endmacro %} + +{% macro bar() %} +
    +{% endmacro %} + +{{ macros.foo(macros.bar()) }} +--DATA-- +return array(); +--EXPECT-- +
    +--TEST-- +macro +--TEMPLATE-- +{% from _self import test %} + +{% macro test(this) -%} + {{ this }} +{%- endmacro %} + +{{ test(this) }} +--DATA-- +return array('this' => 'foo'); +--EXPECT-- +foo +--TEST-- +macro +--TEMPLATE-- +{% import _self as test %} +{% from _self import test %} + +{% macro test(a, b) -%} + {{ a|default('a') }}
    + {{- b|default('b') }}
    +{%- endmacro %} + +{{ test.test() }} +{{ test() }} +{{ test.test(1, "c") }} +{{ test(1, "c") }} +--DATA-- +return array(); +--EXPECT-- +a
    b
    +a
    b
    +1
    c
    +1
    c
    +--TEST-- +macro with a filter +--TEMPLATE-- +{% import _self as test %} + +{% macro test() %} + {% filter escape %}foo
    {% endfilter %} +{% endmacro %} + +{{ test.test() }} +--DATA-- +return array(); +--EXPECT-- +foo<br /> +--TEST-- +Twig outputs 0 nodes correctly +--TEMPLATE-- +{{ foo }}0{{ foo }} +--DATA-- +return array('foo' => 'foo') +--EXPECT-- +foo0foo +--TEST-- +error in twig extension +--TEMPLATE-- +{{ object.region is not null ? object.regionChoices[object.region] }} +--EXPECT-- +house.region.s +--TEST-- +Twig is able to deal with SimpleXMLElement instances as variables +--CONDITION-- +version_compare(phpversion(), '5.3.0', '>=') +--TEMPLATE-- +Hello '{{ images.image.0.group }}'! +{{ images.image.0.group.attributes.myattr }} +{{ images.children().image.count() }} +{% for image in images %} + - {{ image.group }} +{% endfor %} +--DATA-- +return array('images' => new SimpleXMLElement('foobar')) +--EXPECT-- +Hello 'foo'! +example +2 + - foo + - bar +--TEST-- +Twig does not confuse strings with integers in getAttribute() +--TEMPLATE-- +{{ hash['2e2'] }} +--DATA-- +return array('hash' => array('2e2' => 'works')) +--EXPECT-- +works +--TEST-- +"autoescape" tag applies escaping on its children +--TEMPLATE-- +{% autoescape %} +{{ var }}
    +{% endautoescape %} +{% autoescape 'html' %} +{{ var }}
    +{% endautoescape %} +{% autoescape false %} +{{ var }}
    +{% endautoescape %} +{% autoescape true %} +{{ var }}
    +{% endautoescape %} +{% autoescape false %} +{{ var }}
    +{% endautoescape %} +--DATA-- +return array('var' => '
    ') +--EXPECT-- +<br />
    +<br />
    +

    +<br />
    +

    +--TEST-- +"autoescape" tag applies escaping on embedded blocks +--TEMPLATE-- +{% autoescape 'html' %} + {% block foo %} + {{ var }} + {% endblock %} +{% endautoescape %} +--DATA-- +return array('var' => '
    ') +--EXPECT-- +<br /> +--TEST-- +"autoescape" tag does not double-escape +--TEMPLATE-- +{% autoescape 'html' %} +{{ var|escape }} +{% endautoescape %} +--DATA-- +return array('var' => '
    ') +--EXPECT-- +<br /> +--TEST-- +"autoescape" tag applies escaping after calling functions +--TEMPLATE-- + +autoescape false +{% autoescape false %} + +safe_br +{{ safe_br() }} + +unsafe_br +{{ unsafe_br() }} + +{% endautoescape %} + +autoescape 'html' +{% autoescape 'html' %} + +safe_br +{{ safe_br() }} + +unsafe_br +{{ unsafe_br() }} + +unsafe_br()|raw +{{ (unsafe_br())|raw }} + +safe_br()|escape +{{ (safe_br())|escape }} + +safe_br()|raw +{{ (safe_br())|raw }} + +unsafe_br()|escape +{{ (unsafe_br())|escape }} + +{% endautoescape %} + +autoescape js +{% autoescape 'js' %} + +safe_br +{{ safe_br() }} + +{% endautoescape %} +--DATA-- +return array() +--EXPECT-- + +autoescape false + +safe_br +
    + +unsafe_br +
    + + +autoescape 'html' + +safe_br +
    + +unsafe_br +<br /> + +unsafe_br()|raw +
    + +safe_br()|escape +<br /> + +safe_br()|raw +
    + +unsafe_br()|escape +<br /> + + +autoescape js + +safe_br +\x3Cbr\x20\x2F\x3E +--TEST-- +"autoescape" tag does not apply escaping on literals +--TEMPLATE-- +{% autoescape 'html' %} + +1. Simple literal +{{ "
    " }} + +2. Conditional expression with only literals +{{ true ? "
    " : "
    " }} + +3. Conditional expression with a variable +{{ true ? "
    " : someVar }} + +4. Nested conditionals with only literals +{{ true ? (true ? "
    " : "
    ") : "\n" }} + +5. Nested conditionals with a variable +{{ true ? (true ? "
    " : someVar) : "\n" }} + +6. Nested conditionals with a variable marked safe +{{ true ? (true ? "
    " : someVar|raw) : "\n" }} + +{% endautoescape %} +--DATA-- +return array() +--EXPECT-- + +1. Simple literal +
    + +2. Conditional expression with only literals +
    + +3. Conditional expression with a variable +<br /> + +4. Nested conditionals with only literals +
    + +5. Nested conditionals with a variable +<br /> + +6. Nested conditionals with a variable marked safe +
    +--TEST-- +"autoescape" tags can be nested at will +--TEMPLATE-- +{{ var }} +{% autoescape 'html' %} + {{ var }} + {% autoescape false %} + {{ var }} + {% autoescape 'html' %} + {{ var }} + {% endautoescape %} + {{ var }} + {% endautoescape %} + {{ var }} +{% endautoescape %} +{{ var }} +--DATA-- +return array('var' => '
    ') +--EXPECT-- +<br /> + <br /> +
    + <br /> +
    + <br /> +<br /> +--TEST-- +"autoescape" tag applies escaping to object method calls +--TEMPLATE-- +{% autoescape 'html' %} +{{ user.name }} +{{ user.name|lower }} +{{ user }} +{% endautoescape %} +--EXPECT-- +Fabien<br /> +fabien<br /> +Fabien<br /> +--TEST-- +"autoescape" tag does not escape when raw is used as a filter +--TEMPLATE-- +{% autoescape 'html' %} +{{ var|raw }} +{% endautoescape %} +--DATA-- +return array('var' => '
    ') +--EXPECT-- +
    +--TEST-- +"autoescape" tag accepts an escaping strategy +--TEMPLATE-- +{% autoescape true js %}{{ var }}{% endautoescape %} + +{% autoescape true html %}{{ var }}{% endautoescape %} + +{% autoescape 'js' %}{{ var }}{% endautoescape %} + +{% autoescape 'html' %}{{ var }}{% endautoescape %} +--DATA-- +return array('var' => '
    "') +--EXPECT-- +\x3Cbr\x20\x2F\x3E\x22 +<br />" +\x3Cbr\x20\x2F\x3E\x22 +<br />" +--TEST-- +escape types +--TEMPLATE-- + +1. autoescape 'html' |escape('js') + +{% autoescape 'html' %} + +{% endautoescape %} + +2. autoescape 'html' |escape('js') + +{% autoescape 'html' %} + +{% endautoescape %} + +3. autoescape 'js' |escape('js') + +{% autoescape 'js' %} + +{% endautoescape %} + +4. no escape + +{% autoescape false %} + +{% endautoescape %} + +5. |escape('js')|escape('html') + +{% autoescape false %} + +{% endautoescape %} + +6. autoescape 'html' |escape('js')|escape('html') + +{% autoescape 'html' %} + +{% endautoescape %} + +--DATA-- +return array('msg' => "<>\n'\"") +--EXPECT-- + +1. autoescape 'html' |escape('js') + + + +2. autoescape 'html' |escape('js') + + + +3. autoescape 'js' |escape('js') + + + +4. no escape + + + +5. |escape('js')|escape('html') + + + +6. autoescape 'html' |escape('js')|escape('html') + + + +--TEST-- +"autoescape" tag do not applies escaping on filter arguments +--TEMPLATE-- +{% autoescape 'html' %} +{{ var|nl2br("
    ") }} +{{ var|nl2br("
    "|escape) }} +{{ var|nl2br(sep) }} +{{ var|nl2br(sep|raw) }} +{{ var|nl2br(sep|escape) }} +{% endautoescape %} +--DATA-- +return array('var' => "\nTwig", 'sep' => '
    ') +--EXPECT-- +<Fabien>
    +Twig +<Fabien><br /> +Twig +<Fabien>
    +Twig +<Fabien>
    +Twig +<Fabien><br /> +Twig +--TEST-- +"autoescape" tag applies escaping after calling filters +--TEMPLATE-- +{% autoescape 'html' %} + +(escape_and_nl2br is an escaper filter) + +1. Don't escape escaper filter output +( var is escaped by |escape_and_nl2br, line-breaks are added, + the output is not escaped ) +{{ var|escape_and_nl2br }} + +2. Don't escape escaper filter output +( var is escaped by |escape_and_nl2br, line-breaks are added, + the output is not escaped, |raw is redundant ) +{{ var|escape_and_nl2br|raw }} + +3. Explicit escape +( var is escaped by |escape_and_nl2br, line-breaks are added, + the output is explicitly escaped by |escape ) +{{ var|escape_and_nl2br|escape }} + +4. Escape non-escaper filter output +( var is upper-cased by |upper, + the output is auto-escaped ) +{{ var|upper }} + +5. Escape if last filter is not an escaper +( var is escaped by |escape_and_nl2br, line-breaks are added, + the output is upper-cased by |upper, + the output is auto-escaped as |upper is not an escaper ) +{{ var|escape_and_nl2br|upper }} + +6. Don't escape escaper filter output +( var is upper cased by upper, + the output is escaped by |escape_and_nl2br, line-breaks are added, + the output is not escaped as |escape_and_nl2br is an escaper ) +{{ var|upper|escape_and_nl2br }} + +7. Escape if last filter is not an escaper +( the output of |format is "" ~ var ~ "", + the output is auto-escaped ) +{{ "%s"|format(var) }} + +8. Escape if last filter is not an escaper +( the output of |format is "" ~ var ~ "", + |raw is redundant, + the output is auto-escaped ) +{{ "%s"|raw|format(var) }} + +9. Don't escape escaper filter output +( the output of |format is "" ~ var ~ "", + the output is not escaped due to |raw filter at the end ) +{{ "%s"|format(var)|raw }} + +10. Don't escape escaper filter output +( the output of |format is "" ~ var ~ "", + the output is not escaped due to |raw filter at the end, + the |raw filter on var is redundant ) +{{ "%s"|format(var|raw)|raw }} + +{% endautoescape %} +--DATA-- +return array('var' => "\nTwig") +--EXPECT-- + +(escape_and_nl2br is an escaper filter) + +1. Don't escape escaper filter output +( var is escaped by |escape_and_nl2br, line-breaks are added, + the output is not escaped ) +<Fabien>
    +Twig + +2. Don't escape escaper filter output +( var is escaped by |escape_and_nl2br, line-breaks are added, + the output is not escaped, |raw is redundant ) +<Fabien>
    +Twig + +3. Explicit escape +( var is escaped by |escape_and_nl2br, line-breaks are added, + the output is explicitly escaped by |escape ) +&lt;Fabien&gt;<br /> +Twig + +4. Escape non-escaper filter output +( var is upper-cased by |upper, + the output is auto-escaped ) +<FABIEN> +TWIG + +5. Escape if last filter is not an escaper +( var is escaped by |escape_and_nl2br, line-breaks are added, + the output is upper-cased by |upper, + the output is auto-escaped as |upper is not an escaper ) +&LT;FABIEN&GT;<BR /> +TWIG + +6. Don't escape escaper filter output +( var is upper cased by upper, + the output is escaped by |escape_and_nl2br, line-breaks are added, + the output is not escaped as |escape_and_nl2br is an escaper ) +<FABIEN>
    +TWIG + +7. Escape if last filter is not an escaper +( the output of |format is "" ~ var ~ "", + the output is auto-escaped ) +<b><Fabien> +Twig</b> + +8. Escape if last filter is not an escaper +( the output of |format is "" ~ var ~ "", + |raw is redundant, + the output is auto-escaped ) +<b><Fabien> +Twig</b> + +9. Don't escape escaper filter output +( the output of |format is "" ~ var ~ "", + the output is not escaped due to |raw filter at the end ) + +Twig + +10. Don't escape escaper filter output +( the output of |format is "" ~ var ~ "", + the output is not escaped due to |raw filter at the end, + the |raw filter on var is redundant ) + +Twig +--TEST-- +"autoescape" tag applies escaping after calling filters, and before calling pre_escape filters +--TEMPLATE-- +{% autoescape 'html' %} + +(nl2br is pre_escaped for "html" and declared safe for "html") + +1. Pre-escape and don't post-escape +( var|escape|nl2br ) +{{ var|nl2br }} + +2. Don't double-pre-escape +( var|escape|nl2br ) +{{ var|escape|nl2br }} + +3. Don't escape safe values +( var|raw|nl2br ) +{{ var|raw|nl2br }} + +4. Don't escape safe values +( var|escape|nl2br|nl2br ) +{{ var|nl2br|nl2br }} + +5. Re-escape values that are escaped for an other contexts +( var|escape_something|escape|nl2br ) +{{ var|escape_something|nl2br }} + +6. Still escape when using filters not declared safe +( var|escape|nl2br|upper|escape ) +{{ var|nl2br|upper }} + +{% endautoescape %} +--DATA-- +return array('var' => "\nTwig") +--EXPECT-- + +(nl2br is pre_escaped for "html" and declared safe for "html") + +1. Pre-escape and don't post-escape +( var|escape|nl2br ) +<Fabien>
    +Twig + +2. Don't double-pre-escape +( var|escape|nl2br ) +<Fabien>
    +Twig + +3. Don't escape safe values +( var|raw|nl2br ) +
    +Twig + +4. Don't escape safe values +( var|escape|nl2br|nl2br ) +<Fabien>

    +Twig + +5. Re-escape values that are escaped for an other contexts +( var|escape_something|escape|nl2br ) +<FABIEN>
    +TWIG + +6. Still escape when using filters not declared safe +( var|escape|nl2br|upper|escape ) +&LT;FABIEN&GT;<BR /> +TWIG + +--TEST-- +"autoescape" tag handles filters preserving the safety +--TEMPLATE-- +{% autoescape 'html' %} + +(preserves_safety is preserving safety for "html") + +1. Unsafe values are still unsafe +( var|preserves_safety|escape ) +{{ var|preserves_safety }} + +2. Safe values are still safe +( var|escape|preserves_safety ) +{{ var|escape|preserves_safety }} + +3. Re-escape values that are escaped for an other contexts +( var|escape_something|preserves_safety|escape ) +{{ var|escape_something|preserves_safety }} + +4. Still escape when using filters not declared safe +( var|escape|preserves_safety|replace({'FABIEN': 'FABPOT'})|escape ) +{{ var|escape|preserves_safety|replace({'FABIEN': 'FABPOT'}) }} + +{% endautoescape %} +--DATA-- +return array('var' => "\nTwig") +--EXPECT-- + +(preserves_safety is preserving safety for "html") + +1. Unsafe values are still unsafe +( var|preserves_safety|escape ) +<FABIEN> +TWIG + +2. Safe values are still safe +( var|escape|preserves_safety ) +<FABIEN> +TWIG + +3. Re-escape values that are escaped for an other contexts +( var|escape_something|preserves_safety|escape ) +<FABIEN> +TWIG + +4. Still escape when using filters not declared safe +( var|escape|preserves_safety|replace({'FABIEN': 'FABPOT'})|escape ) +&LT;FABPOT&GT; +TWIG + +--TEST-- +"block" tag +--TEMPLATE-- +{% block title1 %}FOO{% endblock %} +{% block title2 foo|lower %} +--TEMPLATE(foo.twig)-- +{% block content %}{% endblock %} +--DATA-- +return array('foo' => 'bar') +--EXPECT-- +FOObar +--TEST-- +"block" tag +--TEMPLATE-- +{% block content %} + {% block content %} + {% endblock %} +{% endblock %} +--DATA-- +return array() +--EXCEPTION-- +Twig_Error_Syntax: The block 'content' has already been defined line 2 in "index.twig" at line 3 +--TEST-- +"§" special chars in a block name +--TEMPLATE-- +{% block § %} +§ +{% endblock § %} +--DATA-- +return array() +--EXPECT-- +§ +--TEST-- +"embed" tag +--TEMPLATE-- +FOO +{% embed "foo.twig" %} + {% block c1 %} + {{ parent() }} + block1extended + {% endblock %} +{% endembed %} + +BAR +--TEMPLATE(foo.twig)-- +A +{% block c1 %} + block1 +{% endblock %} +B +{% block c2 %} + block2 +{% endblock %} +C +--DATA-- +return array() +--EXPECT-- +FOO + +A + block1 + + block1extended + B + block2 +C +BAR +--TEST-- +"embed" tag +--TEMPLATE(index.twig)-- +FOO +{% embed "foo.twig" %} + {% block c1 %} + {{ nothing }} + {% endblock %} +{% endembed %} +BAR +--TEMPLATE(foo.twig)-- +{% block c1 %}{% endblock %} +--DATA-- +return array() +--EXCEPTION-- +Twig_Error_Runtime: Variable "nothing" does not exist in "index.twig" at line 5 +--TEST-- +"embed" tag +--TEMPLATE-- +FOO +{% embed "foo.twig" %} + {% block c1 %} + {{ parent() }} + block1extended + {% endblock %} +{% endembed %} + +{% embed "foo.twig" %} + {% block c1 %} + {{ parent() }} + block1extended + {% endblock %} +{% endembed %} + +BAR +--TEMPLATE(foo.twig)-- +A +{% block c1 %} + block1 +{% endblock %} +B +{% block c2 %} + block2 +{% endblock %} +C +--DATA-- +return array() +--EXPECT-- +FOO + +A + block1 + + block1extended + B + block2 +C + +A + block1 + + block1extended + B + block2 +C +BAR +--TEST-- +"embed" tag +--TEMPLATE-- +{% embed "foo.twig" %} + {% block c1 %} + {{ parent() }} + {% embed "foo.twig" %} + {% block c1 %} + {{ parent() }} + block1extended + {% endblock %} + {% endembed %} + + {% endblock %} +{% endembed %} +--TEMPLATE(foo.twig)-- +A +{% block c1 %} + block1 +{% endblock %} +B +{% block c2 %} + block2 +{% endblock %} +C +--DATA-- +return array() +--EXPECT-- +A + block1 + + +A + block1 + + block1extended + B + block2 +C + B + block2 +C +--TEST-- +"embed" tag +--TEMPLATE-- +{% extends "base.twig" %} + +{% block c1 %} + {{ parent() }} + blockc1baseextended +{% endblock %} + +{% block c2 %} + {{ parent() }} + + {% embed "foo.twig" %} + {% block c1 %} + {{ parent() }} + block1extended + {% endblock %} + {% endembed %} +{% endblock %} +--TEMPLATE(base.twig)-- +A +{% block c1 %} + blockc1base +{% endblock %} +{% block c2 %} + blockc2base +{% endblock %} +B +--TEMPLATE(foo.twig)-- +A +{% block c1 %} + block1 +{% endblock %} +B +{% block c2 %} + block2 +{% endblock %} +C +--DATA-- +return array() +--EXPECT-- +A + blockc1base + + blockc1baseextended + blockc2base + + + +A + block1 + + block1extended + B + block2 +CB--TEST-- +"filter" tag applies a filter on its children +--TEMPLATE-- +{% filter upper %} +Some text with a {{ var }} +{% endfilter %} +--DATA-- +return array('var' => 'var') +--EXPECT-- +SOME TEXT WITH A VAR +--TEST-- +"filter" tag applies a filter on its children +--TEMPLATE-- +{% filter json_encode|raw %}test{% endfilter %} +--DATA-- +return array() +--EXPECT-- +"test" +--TEST-- +"filter" tags accept multiple chained filters +--TEMPLATE-- +{% filter lower|title %} + {{ var }} +{% endfilter %} +--DATA-- +return array('var' => 'VAR') +--EXPECT-- + Var +--TEST-- +"filter" tags can be nested at will +--TEMPLATE-- +{% filter lower|title %} + {{ var }} + {% filter upper %} + {{ var }} + {% endfilter %} + {{ var }} +{% endfilter %} +--DATA-- +return array('var' => 'var') +--EXPECT-- + Var + Var + Var +--TEST-- +"filter" tag applies the filter on "for" tags +--TEMPLATE-- +{% filter upper %} +{% for item in items %} +{{ item }} +{% endfor %} +{% endfilter %} +--DATA-- +return array('items' => array('a', 'b')) +--EXPECT-- +A +B +--TEST-- +"filter" tag applies the filter on "if" tags +--TEMPLATE-- +{% filter upper %} +{% if items %} +{{ items|join(', ') }} +{% endif %} + +{% if items.3 is defined %} +FOO +{% else %} +{{ items.1 }} +{% endif %} + +{% if items.3 is defined %} +FOO +{% elseif items.1 %} +{{ items.0 }} +{% endif %} + +{% endfilter %} +--DATA-- +return array('items' => array('a', 'b')) +--EXPECT-- +A, B + +B + +A +--TEST-- +"for" tag takes a condition +--TEMPLATE-- +{% for i in 1..5 if i is odd -%} + {{ loop.index }}.{{ i }}{{ foo.bar }} +{% endfor %} +--DATA-- +return array('foo' => array('bar' => 'X')) +--CONFIG-- +return array('strict_variables' => false) +--EXPECT-- +1.1X +2.3X +3.5X +--TEST-- +"for" tag keeps the context safe +--TEMPLATE-- +{% for item in items %} + {% for item in items %} + * {{ item }} + {% endfor %} + * {{ item }} +{% endfor %} +--DATA-- +return array('items' => array('a', 'b')) +--EXPECT-- + * a + * b + * a + * a + * b + * b +--TEST-- +"for" tag can use an "else" clause +--TEMPLATE-- +{% for item in items %} + * {{ item }} +{% else %} + no item +{% endfor %} +--DATA-- +return array('items' => array('a', 'b')) +--EXPECT-- + * a + * b +--DATA-- +return array('items' => array()) +--EXPECT-- + no item +--DATA-- +return array() +--CONFIG-- +return array('strict_variables' => false) +--EXPECT-- + no item +--TEST-- +"for" tag does not reset inner variables +--TEMPLATE-- +{% for i in 1..2 %} + {% for j in 0..2 %} + {{k}}{% set k = k+1 %} {{ loop.parent.loop.index }} + {% endfor %} +{% endfor %} +--DATA-- +return array('k' => 0) +--EXPECT-- + 0 1 + 1 1 + 2 1 + 3 2 + 4 2 + 5 2 +--TEST-- +"for" tag can iterate over keys and values +--TEMPLATE-- +{% for key, item in items %} + * {{ key }}/{{ item }} +{% endfor %} +--DATA-- +return array('items' => array('a', 'b')) +--EXPECT-- + * 0/a + * 1/b +--TEST-- +"for" tag can iterate over keys +--TEMPLATE-- +{% for key in items|keys %} + * {{ key }} +{% endfor %} +--DATA-- +return array('items' => array('a', 'b')) +--EXPECT-- + * 0 + * 1 +--TEST-- +"for" tag adds a loop variable to the context locally +--TEMPLATE-- +{% for item in items %} +{% endfor %} +{% if loop is not defined %}WORKS{% endif %} +--DATA-- +return array('items' => array()) +--EXPECT-- +WORKS +--TEST-- +"for" tag adds a loop variable to the context +--TEMPLATE-- +{% for item in items %} + * {{ loop.index }}/{{ loop.index0 }} + * {{ loop.revindex }}/{{ loop.revindex0 }} + * {{ loop.first }}/{{ loop.last }}/{{ loop.length }} + +{% endfor %} +--DATA-- +return array('items' => array('a', 'b')) +--EXPECT-- + * 1/0 + * 2/1 + * 1//2 + + * 2/1 + * 1/0 + * /1/2 +--TEST-- +"for" tag +--TEMPLATE-- +{% for i, item in items if loop.last > 0 %} +{% endfor %} +--DATA-- +return array('items' => array('a', 'b')) +--EXCEPTION-- +Twig_Error_Syntax: The "loop" variable cannot be used in a looping condition in "index.twig" at line 2 +--TEST-- +"for" tag +--TEMPLATE-- +{% for i, item in items if i > 0 %} + {{ loop.last }} +{% endfor %} +--DATA-- +return array('items' => array('a', 'b')) +--EXCEPTION-- +Twig_Error_Syntax: The "loop.last" variable is not defined when looping with a condition in "index.twig" at line 3 +--TEST-- +"for" tag can use an "else" clause +--TEMPLATE-- +{% for item in items %} + {% for item in items1 %} + * {{ item }} + {% else %} + no {{ item }} + {% endfor %} +{% else %} + no item1 +{% endfor %} +--DATA-- +return array('items' => array('a', 'b'), 'items1' => array()) +--EXPECT-- +no a + no b +--TEST-- +"for" tag iterates over iterable and countable objects +--TEMPLATE-- +{% for item in items %} + * {{ item }} + * {{ loop.index }}/{{ loop.index0 }} + * {{ loop.revindex }}/{{ loop.revindex0 }} + * {{ loop.first }}/{{ loop.last }}/{{ loop.length }} + +{% endfor %} + +{% for key, value in items %} + * {{ key }}/{{ value }} +{% endfor %} + +{% for key in items|keys %} + * {{ key }} +{% endfor %} +--DATA-- +class ItemsIteratorCountable implements Iterator, Countable +{ + protected $values = array('foo' => 'bar', 'bar' => 'foo'); + public function current() { return current($this->values); } + public function key() { return key($this->values); } + public function next() { return next($this->values); } + public function rewind() { return reset($this->values); } + public function valid() { return false !== current($this->values); } + public function count() { return count($this->values); } +} +return array('items' => new ItemsIteratorCountable()) +--EXPECT-- + * bar + * 1/0 + * 2/1 + * 1//2 + + * foo + * 2/1 + * 1/0 + * /1/2 + + + * foo/bar + * bar/foo + + * foo + * bar +--TEST-- +"for" tag iterates over iterable objects +--TEMPLATE-- +{% for item in items %} + * {{ item }} + * {{ loop.index }}/{{ loop.index0 }} + * {{ loop.first }} + +{% endfor %} + +{% for key, value in items %} + * {{ key }}/{{ value }} +{% endfor %} + +{% for key in items|keys %} + * {{ key }} +{% endfor %} +--DATA-- +class ItemsIterator implements Iterator +{ + protected $values = array('foo' => 'bar', 'bar' => 'foo'); + public function current() { return current($this->values); } + public function key() { return key($this->values); } + public function next() { return next($this->values); } + public function rewind() { return reset($this->values); } + public function valid() { return false !== current($this->values); } +} +return array('items' => new ItemsIterator()) +--EXPECT-- + * bar + * 1/0 + * 1 + + * foo + * 2/1 + * + + + * foo/bar + * bar/foo + + * foo + * bar +--TEST-- +"for" tags can be nested +--TEMPLATE-- +{% for key, item in items %} +* {{ key }} ({{ loop.length }}): +{% for value in item %} + * {{ value }} ({{ loop.length }}) +{% endfor %} +{% endfor %} +--DATA-- +return array('items' => array('a' => array('a1', 'a2', 'a3'), 'b' => array('b1'))) +--EXPECT-- +* a (2): + * a1 (3) + * a2 (3) + * a3 (3) +* b (2): + * b1 (1) +--TEST-- +"for" tag iterates over item values +--TEMPLATE-- +{% for item in items %} + * {{ item }} +{% endfor %} +--DATA-- +return array('items' => array('a', 'b')) +--EXPECT-- + * a + * b +--TEST-- +global variables +--TEMPLATE-- +{% include "included.twig" %} +{% from "included.twig" import foobar %} +{{ foobar() }} +--TEMPLATE(included.twig)-- +{% macro foobar() %} +called foobar +{% endmacro %} +--DATA-- +return array(); +--EXPECT-- +called foobar +--TEST-- +"if" creates a condition +--TEMPLATE-- +{% if a is defined %} + {{ a }} +{% elseif b is defined %} + {{ b }} +{% else %} + NOTHING +{% endif %} +--DATA-- +return array('a' => 'a') +--EXPECT-- + a +--DATA-- +return array('b' => 'b') +--EXPECT-- + b +--DATA-- +return array() +--EXPECT-- + NOTHING +--TEST-- +"if" takes an expression as a test +--TEMPLATE-- +{% if a < 2 %} + A1 +{% elseif a > 10 %} + A2 +{% else %} + A3 +{% endif %} +--DATA-- +return array('a' => 1) +--EXPECT-- + A1 +--DATA-- +return array('a' => 12) +--EXPECT-- + A2 +--DATA-- +return array('a' => 7) +--EXPECT-- + A3 +--TEST-- +"include" tag +--TEMPLATE-- +FOO +{% include "foo.twig" %} + +BAR +--TEMPLATE(foo.twig)-- +FOOBAR +--DATA-- +return array() +--EXPECT-- +FOO + +FOOBAR +BAR +--TEST-- +"include" tag allows expressions for the template to include +--TEMPLATE-- +FOO +{% include foo %} + +BAR +--TEMPLATE(foo.twig)-- +FOOBAR +--DATA-- +return array('foo' => 'foo.twig') +--EXPECT-- +FOO + +FOOBAR +BAR +--TEST-- +"include" tag +--TEMPLATE-- +{% include ["foo.twig", "bar.twig"] ignore missing %} +{% include "foo.twig" ignore missing %} +{% include "foo.twig" ignore missing with {} %} +{% include "foo.twig" ignore missing with {} only %} +--DATA-- +return array() +--EXPECT-- +--TEST-- +"include" tag +--TEMPLATE-- +{% extends "base.twig" %} + +{% block content %} + {{ parent() }} +{% endblock %} +--TEMPLATE(base.twig)-- +{% block content %} + {% include "foo.twig" %} +{% endblock %} +--DATA-- +return array(); +--EXCEPTION-- +Twig_Error_Loader: Template "foo.twig" is not defined in "base.twig" at line 3. +--TEST-- +"include" tag +--TEMPLATE-- +{% include "foo.twig" %} +--DATA-- +return array(); +--EXCEPTION-- +Twig_Error_Loader: Template "foo.twig" is not defined in "index.twig" at line 2. +--TEST-- +"include" tag accept variables and only +--TEMPLATE-- +{% include "foo.twig" %} +{% include "foo.twig" only %} +{% include "foo.twig" with {'foo1': 'bar'} %} +{% include "foo.twig" with {'foo1': 'bar'} only %} +--TEMPLATE(foo.twig)-- +{% for k, v in _context %}{{ k }},{% endfor %} +--DATA-- +return array('foo' => 'bar') +--EXPECT-- +foo,global,_parent, +global,_parent, +foo,global,foo1,_parent, +foo1,global,_parent, +--TEST-- +"include" tag accepts Twig_Template instance +--TEMPLATE-- +{% include foo %} FOO +--TEMPLATE(foo.twig)-- +BAR +--DATA-- +return array('foo' => $twig->loadTemplate('foo.twig')) +--EXPECT-- +BAR FOO +--TEST-- +"include" tag +--TEMPLATE-- +{% include ["foo.twig", "bar.twig"] %} +{% include ["bar.twig", "foo.twig"] %} +--TEMPLATE(foo.twig)-- +foo +--DATA-- +return array() +--EXPECT-- +foo +foo +--TEST-- +"include" tag accept variables +--TEMPLATE-- +{% include "foo.twig" with {'foo': 'bar'} %} +{% include "foo.twig" with vars %} +--TEMPLATE(foo.twig)-- +{{ foo }} +--DATA-- +return array('vars' => array('foo' => 'bar')) +--EXPECT-- +bar +bar +--TEST-- +"extends" tag +--TEMPLATE-- +{% extends "foo.twig" %} + +{% block content %} +FOO +{% endblock %} +--TEMPLATE(foo.twig)-- +{% block content %}{% endblock %} +--DATA-- +return array() +--EXPECT-- +FOO +--TEST-- +block_expr2 +--TEMPLATE-- +{% extends "base2.twig" %} + +{% block element -%} + Element: + {{- parent() -}} +{% endblock %} +--TEMPLATE(base2.twig)-- +{% extends "base.twig" %} +--TEMPLATE(base.twig)-- +{% spaceless %} +{% block element -%} +
    + {%- if item.children is defined %} + {%- for item in item.children %} + {{- block('element') -}} + {% endfor %} + {%- endif -%} +
    +{%- endblock %} +{% endspaceless %} +--DATA-- +return array( + 'item' => array( + 'children' => array( + null, + null, + ) + ) +) +--EXPECT-- +Element:
    Element:
    Element:
    +--TEST-- +block_expr +--TEMPLATE-- +{% extends "base.twig" %} + +{% block element -%} + Element: + {{- parent() -}} +{% endblock %} +--TEMPLATE(base.twig)-- +{% spaceless %} +{% block element -%} +
    + {%- if item.children is defined %} + {%- for item in item.children %} + {{- block('element') -}} + {% endfor %} + {%- endif -%} +
    +{%- endblock %} +{% endspaceless %} +--DATA-- +return array( + 'item' => array( + 'children' => array( + null, + null, + ) + ) +) +--EXPECT-- +Element:
    Element:
    Element:
    +--TEST-- +"extends" tag +--TEMPLATE-- +{% extends standalone ? foo : 'bar.twig' %} + +{% block content %}{{ parent() }}FOO{% endblock %} +--TEMPLATE(foo.twig)-- +{% block content %}FOO{% endblock %} +--TEMPLATE(bar.twig)-- +{% block content %}BAR{% endblock %} +--DATA-- +return array('foo' => 'foo.twig', 'standalone' => true) +--EXPECT-- +FOOFOO +--TEST-- +"extends" tag +--TEMPLATE-- +{% extends foo %} + +{% block content %} +FOO +{% endblock %} +--TEMPLATE(foo.twig)-- +{% block content %}{% endblock %} +--DATA-- +return array('foo' => 'foo.twig') +--EXPECT-- +FOO +--TEST-- +"extends" tag +--TEMPLATE-- +{% extends "foo.twig" %} +--TEMPLATE(foo.twig)-- +{% block content %}FOO{% endblock %} +--DATA-- +return array() +--EXPECT-- +FOO +--TEST-- +"extends" tag +--TEMPLATE-- +{% extends ["foo.twig", "bar.twig"] %} +--TEMPLATE(bar.twig)-- +{% block content %} +foo +{% endblock %} +--DATA-- +return array() +--EXPECT-- +foo +--TEST-- +"extends" tag +--TEMPLATE-- +{% extends "layout.twig" %}{% block content %}{{ parent() }}index {% endblock %} +--TEMPLATE(layout.twig)-- +{% extends "base.twig" %}{% block content %}{{ parent() }}layout {% endblock %} +--TEMPLATE(base.twig)-- +{% block content %}base {% endblock %} +--DATA-- +return array() +--EXPECT-- +base layout index +--TEST-- +"block" tag +--TEMPLATE-- +{% block content %} + CONTENT + {%- block subcontent -%} + SUBCONTENT + {%- endblock -%} + ENDCONTENT +{% endblock %} +--TEMPLATE(foo.twig)-- +--DATA-- +return array() +--EXPECT-- +CONTENTSUBCONTENTENDCONTENT +--TEST-- +"block" tag +--TEMPLATE-- +{% extends "foo.twig" %} + +{% block content %} + {% block subcontent %} + {% block subsubcontent %} + SUBSUBCONTENT + {% endblock %} + {% endblock %} +{% endblock %} +--TEMPLATE(foo.twig)-- +{% block content %} + {% block subcontent %} + SUBCONTENT + {% endblock %} +{% endblock %} +--DATA-- +return array() +--EXPECT-- +SUBSUBCONTENT +--TEST-- +"extends" tag +--TEMPLATE-- +{% extends "layout.twig" %} +{% block inside %}INSIDE{% endblock inside %} +--TEMPLATE(layout.twig)-- +{% extends "base.twig" %} +{% block body %} + {% block inside '' %} +{% endblock body %} +--TEMPLATE(base.twig)-- +{% block body '' %} +--DATA-- +return array() +--EXPECT-- +INSIDE +--TEST-- +"extends" tag +--TEMPLATE-- +{% extends foo ? 'foo.twig' : 'bar.twig' %} +--TEMPLATE(foo.twig)-- +FOO +--TEMPLATE(bar.twig)-- +BAR +--DATA-- +return array('foo' => true) +--EXPECT-- +FOO +--DATA-- +return array('foo' => false) +--EXPECT-- +BAR +--TEST-- +"extends" tag +--TEMPLATE-- +{% block content %} + {% extends "foo.twig" %} +{% endblock %} +--EXCEPTION-- +Twig_Error_Syntax: Cannot extend from a block in "index.twig" at line 3 +--TEST-- +"extends" tag +--TEMPLATE-- +{% extends "base.twig" %} +{% block content %}{% include "included.twig" %}{% endblock %} + +{% block footer %}Footer{% endblock %} +--TEMPLATE(included.twig)-- +{% extends "base.twig" %} +{% block content %}Included Content{% endblock %} +--TEMPLATE(base.twig)-- +{% block content %}Default Content{% endblock %} + +{% block footer %}Default Footer{% endblock %} +--DATA-- +return array() +--EXPECT-- +Included Content +Default Footer +Footer +--TEST-- +"extends" tag +--TEMPLATE-- +{% extends "foo.twig" %} + +{% block content %} + {% block inside %} + INSIDE OVERRIDDEN + {% endblock %} + + BEFORE + {{ parent() }} + AFTER +{% endblock %} +--TEMPLATE(foo.twig)-- +{% block content %} + BAR +{% endblock %} +--DATA-- +return array() +--EXPECT-- + +INSIDE OVERRIDDEN + + BEFORE + BAR + + AFTER +--TEST-- +"extends" tag +--TEMPLATE-- +{% extends "foo.twig" %} + +{% block content %}{{ parent() }}FOO{{ parent() }}{% endblock %} +--TEMPLATE(foo.twig)-- +{% block content %}BAR{% endblock %} +--DATA-- +return array() +--EXPECT-- +BARFOOBAR +--TEST-- +"parent" tag +--TEMPLATE-- +{% use 'foo.twig' %} + +{% block content %} + {{ parent() }} +{% endblock %} +--TEMPLATE(foo.twig)-- +{% block content %}BAR{% endblock %} +--DATA-- +return array() +--EXPECT-- +BAR +--TEST-- +"parent" tag +--TEMPLATE-- +{% block content %} + {{ parent() }} +{% endblock %} +--EXCEPTION-- +Twig_Error_Syntax: Calling "parent" on a template that does not extend nor "use" another template is forbidden in "index.twig" at line 3 +--TEST-- +"extends" tag accepts Twig_Template instance +--TEMPLATE-- +{% extends foo %} + +{% block content %} +{{ parent() }}FOO +{% endblock %} +--TEMPLATE(foo.twig)-- +{% block content %}BAR{% endblock %} +--DATA-- +return array('foo' => $twig->loadTemplate('foo.twig')) +--EXPECT-- +BARFOO +--TEST-- +"parent" function +--TEMPLATE-- +{% extends "parent.twig" %} + +{% use "use1.twig" %} +{% use "use2.twig" %} + +{% block content_parent %} + {{ parent() }} +{% endblock %} + +{% block content_use1 %} + {{ parent() }} +{% endblock %} + +{% block content_use2 %} + {{ parent() }} +{% endblock %} + +{% block content %} + {{ block('content_use1_only') }} + {{ block('content_use2_only') }} +{% endblock %} +--TEMPLATE(parent.twig)-- +{% block content_parent 'content_parent' %} +{% block content_use1 'content_parent' %} +{% block content_use2 'content_parent' %} +{% block content '' %} +--TEMPLATE(use1.twig)-- +{% block content_use1 'content_use1' %} +{% block content_use2 'content_use1' %} +{% block content_use1_only 'content_use1_only' %} +--TEMPLATE(use2.twig)-- +{% block content_use2 'content_use2' %} +{% block content_use2_only 'content_use2_only' %} +--DATA-- +return array() +--EXPECT-- + content_parent + content_use1 + content_use2 + content_use1_only + content_use2_only +--TEST-- +"macro" tag +--TEMPLATE-- +{% import _self as macros %} + +{{ macros.input('username') }} +{{ macros.input('password', null, 'password', 1) }} + +{% macro input(name, value, type, size) %} + +{% endmacro %} +--DATA-- +return array() +--EXPECT-- + + + +--TEST-- +"macro" tag supports name for endmacro +--TEMPLATE-- +{% import _self as macros %} + +{{ macros.foo() }} +{{ macros.bar() }} + +{% macro foo() %}foo{% endmacro %} +{% macro bar() %}bar{% endmacro bar %} +--DATA-- +return array() +--EXPECT-- +foo +bar + +--TEST-- +"macro" tag +--TEMPLATE-- +{% import 'forms.twig' as forms %} + +{{ forms.input('username') }} +{{ forms.input('password', null, 'password', 1) }} +--TEMPLATE(forms.twig)-- +{% macro input(name, value, type, size) %} + +{% endmacro %} +--DATA-- +return array() +--EXPECT-- + + + +--TEST-- +"macro" tag +--TEMPLATE-- +{% from 'forms.twig' import foo %} +{% from 'forms.twig' import foo as foobar, bar %} + +{{ foo('foo') }} +{{ foobar('foo') }} +{{ bar('foo') }} +--TEMPLATE(forms.twig)-- +{% macro foo(name) %}foo{{ name }}{% endmacro %} +{% macro bar(name) %}bar{{ name }}{% endmacro %} +--DATA-- +return array() +--EXPECT-- +foofoo +foofoo +barfoo +--TEST-- +"macro" tag +--TEMPLATE-- +{% from 'forms.twig' import foo %} + +{{ foo('foo') }} +{{ foo() }} +--TEMPLATE(forms.twig)-- +{% macro foo(name) %}{{ name|default('foo') }}{{ global }}{% endmacro %} +--DATA-- +return array() +--EXPECT-- +fooglobal +fooglobal +--TEST-- +"macro" tag +--TEMPLATE-- +{% import _self as forms %} + +{{ forms.input('username') }} +{{ forms.input('password', null, 'password', 1) }} + +{% macro input(name, value, type, size) %} + +{% endmacro %} +--DATA-- +return array() +--EXPECT-- + + + +--TEST-- +"raw" tag +--TEMPLATE-- +{% raw %} +{{ foo }} +{% endraw %} +--DATA-- +return array() +--EXPECT-- +{{ foo }} +--TEST-- +"raw" tag +--TEMPLATE-- +{% raw %} +{{ foo }} +{% endverbatim %} +--DATA-- +return array() +--EXCEPTION-- +Twig_Error_Syntax: Unexpected end of file: Unclosed "raw" block in "index.twig" at line 2 +--TEST-- +"raw" tag +--TEMPLATE-- +1*** + +{%- raw %} + {{ 'bla' }} +{% endraw %} + +1*** +2*** + +{%- raw -%} + {{ 'bla' }} +{% endraw %} + +2*** +3*** + +{%- raw -%} + {{ 'bla' }} +{% endraw -%} + +3*** +4*** + +{%- raw -%} + {{ 'bla' }} +{%- endraw %} + +4*** +5*** + +{%- raw -%} + {{ 'bla' }} +{%- endraw -%} + +5*** +--DATA-- +return array() +--EXPECT-- +1*** + {{ 'bla' }} + + +1*** +2***{{ 'bla' }} + + +2*** +3***{{ 'bla' }} +3*** +4***{{ 'bla' }} + +4*** +5***{{ 'bla' }}5*** +--TEST-- +sandbox tag +--TEMPLATE-- +{%- sandbox %} + {%- include "foo.twig" %} + a +{%- endsandbox %} +--TEMPLATE(foo.twig)-- +foo +--EXCEPTION-- +Twig_Error_Syntax: Only "include" tags are allowed within a "sandbox" section in "index.twig" at line 4 +--TEST-- +sandbox tag +--TEMPLATE-- +{%- sandbox %} + {%- include "foo.twig" %} + + {% if 1 %} + {%- include "foo.twig" %} + {% endif %} +{%- endsandbox %} +--TEMPLATE(foo.twig)-- +foo +--EXCEPTION-- +Twig_Error_Syntax: Only "include" tags are allowed within a "sandbox" section in "index.twig" at line 5 +--TEST-- +sandbox tag +--TEMPLATE-- +{%- sandbox %} + {%- include "foo.twig" %} +{%- endsandbox %} + +{%- sandbox %} + {%- include "foo.twig" %} + {%- include "foo.twig" %} +{%- endsandbox %} + +{%- sandbox %}{% include "foo.twig" %}{% endsandbox %} +--TEMPLATE(foo.twig)-- +foo +--DATA-- +return array() +--EXPECT-- +foo +foo +foo +foo +--TEST-- +"set" tag +--TEMPLATE-- +{% set foo = 'foo' %} +{% set bar = 'foo
    ' %} + +{{ foo }} +{{ bar }} + +{% set foo, bar = 'foo', 'bar' %} + +{{ foo }}{{ bar }} +--DATA-- +return array() +--EXPECT-- +foo +foo<br /> + + +foobar +--TEST-- +"set" tag block empty capture +--TEMPLATE-- +{% set foo %}{% endset %} + +{% if foo %}FAIL{% endif %} +--DATA-- +return array() +--EXPECT-- +--TEST-- +"set" tag block capture +--TEMPLATE-- +{% set foo %}f
    o
    o{% endset %} + +{{ foo }} +--DATA-- +return array() +--EXPECT-- +f
    o
    o +--TEST-- +"set" tag +--TEMPLATE-- +{% set foo, bar = 'foo' ~ 'bar', 'bar' ~ 'foo' %} + +{{ foo }} +{{ bar }} +--DATA-- +return array() +--EXPECT-- +foobar +barfoo +--TEST-- +"spaceless" tag removes whites between HTML tags +--TEMPLATE-- +{% spaceless %} + +
    foo
    + +{% endspaceless %} +--DATA-- +return array() +--EXPECT-- +
    foo
    +--TEST-- +"§" custom tag +--TEMPLATE-- +{% § %} +--DATA-- +return array() +--EXPECT-- +§ +--TEST-- +Whitespace trimming on tags. +--TEMPLATE-- +{{ 5 * '{#-'|length }} +{{ '{{-'|length * 5 + '{%-'|length }} + +Trim on control tag: +{% for i in range(1, 9) -%} + {{ i }} +{%- endfor %} + + +Trim on output tag: +{% for i in range(1, 9) %} + {{- i -}} +{% endfor %} + + +Trim comments: + +{#- Invisible -#} + +After the comment. + +Trim leading space: +{% if leading %} + + {{- leading }} +{% endif %} + +{%- if leading %} + {{- leading }} + +{%- endif %} + + +Trim trailing space: +{% if trailing -%} + {{ trailing -}} + +{% endif -%} + +Combined: + +{%- if both -%} +
      +
    • {{- both -}}
    • +
    + +{%- endif -%} + +end +--DATA-- +return array('leading' => 'leading space', 'trailing' => 'trailing space', 'both' => 'both') +--EXPECT-- +15 +18 + +Trim on control tag: +123456789 + +Trim on output tag: +123456789 + +Trim comments:After the comment. + +Trim leading space: +leading space +leading space + +Trim trailing space: +trailing spaceCombined:
      +
    • both
    • +
    end +--TEST-- +"use" tag +--TEMPLATE-- +{% use "blocks.twig" with content as foo %} + +{{ block('foo') }} +--TEMPLATE(blocks.twig)-- +{% block content 'foo' %} +--DATA-- +return array() +--EXPECT-- +foo +--TEST-- +"use" tag +--TEMPLATE-- +{% use "blocks.twig" %} + +{{ block('content') }} +--TEMPLATE(blocks.twig)-- +{% block content 'foo' %} +--DATA-- +return array() +--EXPECT-- +foo +--TEST-- +"use" tag +--TEMPLATE-- +{% use "foo.twig" %} +--TEMPLATE(foo.twig)-- +{% use "bar.twig" %} +--TEMPLATE(bar.twig)-- +--DATA-- +return array() +--EXPECT-- +--TEST-- +"use" tag +--TEMPLATE-- +{% use "foo.twig" %} + +{{ block('content') }} +{{ block('foo') }} +{{ block('bar') }} +--TEMPLATE(foo.twig)-- +{% use "bar.twig" %} + +{% block content 'foo' %} +{% block foo 'foo' %} +--TEMPLATE(bar.twig)-- +{% block content 'bar' %} +{% block bar 'bar' %} +--DATA-- +return array() +--EXPECT-- +foo +foo +bar +--TEST-- +"use" tag +--TEMPLATE-- +{% use "ancestor.twig" %} +{% use "parent.twig" %} + +{{ block('container') }} +--TEMPLATE(parent.twig)-- +{% block sub_container %} +
    overriden sub_container
    +{% endblock %} +--TEMPLATE(ancestor.twig)-- +{% block container %} +
    {{ block('sub_container') }}
    +{% endblock %} + +{% block sub_container %} +
    sub_container
    +{% endblock %} +--DATA-- +return array() +--EXPECT-- +
    overriden sub_container
    +
    +--TEST-- +"use" tag +--TEMPLATE-- +{% use "parent.twig" %} + +{{ block('container') }} +--TEMPLATE(parent.twig)-- +{% use "ancestor.twig" %} + +{% block sub_container %} +
    overriden sub_container
    +{% endblock %} +--TEMPLATE(ancestor.twig)-- +{% block container %} +
    {{ block('sub_container') }}
    +{% endblock %} + +{% block sub_container %} +
    sub_container
    +{% endblock %} +--DATA-- +return array() +--EXPECT-- +
    overriden sub_container
    +
    +--TEST-- +"use" tag +--TEMPLATE-- +{% use "foo.twig" with content as foo_content %} +{% use "bar.twig" %} + +{{ block('content') }} +{{ block('foo') }} +{{ block('bar') }} +{{ block('foo_content') }} +--TEMPLATE(foo.twig)-- +{% block content 'foo' %} +{% block foo 'foo' %} +--TEMPLATE(bar.twig)-- +{% block content 'bar' %} +{% block bar 'bar' %} +--DATA-- +return array() +--EXPECT-- +bar +foo +bar +foo +--TEST-- +"use" tag +--TEMPLATE-- +{% use "foo.twig" %} +{% use "bar.twig" %} + +{{ block('content') }} +{{ block('foo') }} +{{ block('bar') }} +--TEMPLATE(foo.twig)-- +{% block content 'foo' %} +{% block foo 'foo' %} +--TEMPLATE(bar.twig)-- +{% block content 'bar' %} +{% block bar 'bar' %} +--DATA-- +return array() +--EXPECT-- +bar +foo +bar +--TEST-- +"use" tag +--TEMPLATE-- +{% use 'file2.html.twig'%} +{% block foobar %} + {{- parent() -}} + Content of block (second override) +{% endblock foobar %} +--TEMPLATE(file2.html.twig)-- +{% use 'file1.html.twig' %} +{% block foobar %} + {{- parent() -}} + Content of block (first override) +{% endblock foobar %} +--TEMPLATE(file1.html.twig)-- +{% block foobar -%} + Content of block +{% endblock foobar %} +--DATA-- +return array() +--EXPECT-- +Content of block +Content of block (first override) +Content of block (second override) +--TEST-- +"use" tag +--TEMPLATE-- +{% use 'file2.html.twig' %} +{% use 'file1.html.twig' with foo %} +{% block foo %} + {{- parent() -}} + Content of foo (second override) +{% endblock foo %} +{% block bar %} + {{- parent() -}} + Content of bar (second override) +{% endblock bar %} +--TEMPLATE(file2.html.twig)-- +{% use 'file1.html.twig' %} +{% block foo %} + {{- parent() -}} + Content of foo (first override) +{% endblock foo %} +{% block bar %} + {{- parent() -}} + Content of bar (first override) +{% endblock bar %} +--TEMPLATE(file1.html.twig)-- +{% block foo -%} + Content of foo +{% endblock foo %} +{% block bar -%} + Content of bar +{% endblock bar %} +--DATA-- +return array() +--EXPECT-- +Content of foo +Content of foo (first override) +Content of foo (second override) +Content of bar +Content of bar (second override) +--TEST-- +"use" tag +--TEMPLATE-- +{% use 'file2.html.twig' with foobar as base_base_foobar %} +{% block foobar %} + {{- block('base_base_foobar') -}} + Content of block (second override) +{% endblock foobar %} +--TEMPLATE(file2.html.twig)-- +{% use 'file1.html.twig' with foobar as base_foobar %} +{% block foobar %} + {{- block('base_foobar') -}} + Content of block (first override) +{% endblock foobar %} +--TEMPLATE(file1.html.twig)-- +{% block foobar -%} + Content of block +{% endblock foobar %} +--DATA-- +return array() +--EXPECT-- +Content of block +Content of block (first override) +Content of block (second override) +--TEST-- +"verbatim" tag +--TEMPLATE-- +{% verbatim %} +{{ foo }} +{% endverbatim %} +--DATA-- +return array() +--EXPECT-- +{{ foo }} +--TEST-- +"verbatim" tag +--TEMPLATE-- +{% verbatim %} +{{ foo }} +{% endraw %} +--DATA-- +return array() +--EXCEPTION-- +Twig_Error_Syntax: Unexpected end of file: Unclosed "verbatim" block in "index.twig" at line 2 +--TEST-- +"verbatim" tag +--TEMPLATE-- +1*** + +{%- verbatim %} + {{ 'bla' }} +{% endverbatim %} + +1*** +2*** + +{%- verbatim -%} + {{ 'bla' }} +{% endverbatim %} + +2*** +3*** + +{%- verbatim -%} + {{ 'bla' }} +{% endverbatim -%} + +3*** +4*** + +{%- verbatim -%} + {{ 'bla' }} +{%- endverbatim %} + +4*** +5*** + +{%- verbatim -%} + {{ 'bla' }} +{%- endverbatim -%} + +5*** +--DATA-- +return array() +--EXPECT-- +1*** + {{ 'bla' }} + + +1*** +2***{{ 'bla' }} + + +2*** +3***{{ 'bla' }} +3*** +4***{{ 'bla' }} + +4*** +5***{{ 'bla' }}5*** +--TEST-- +array index test +--TEMPLATE-- +{% for key, value in days %} +{{ key }} +{% endfor %} +--DATA-- +return array('days' => array( + 1 => array('money' => 9), + 2 => array('money' => 21), + 3 => array('money' => 38), + 4 => array('money' => 6), + 18 => array('money' => 6), + 19 => array('money' => 3), + 31 => array('money' => 11), +)); +--EXPECT-- +1 +2 +3 +4 +18 +19 +31 +--TEST-- +"const" test +--TEMPLATE-- +{{ 8 is constant('E_NOTICE') ? 'ok' : 'no' }} +{{ 'bar' is constant('TwigTestFoo::BAR_NAME') ? 'ok' : 'no' }} +{{ value is constant('TwigTestFoo::BAR_NAME') ? 'ok' : 'no' }} +{{ 2 is constant('ARRAY_AS_PROPS', object) ? 'ok' : 'no' }} +--DATA-- +return array('value' => 'bar', 'object' => new ArrayObject(array('hi'))); +--EXPECT-- +ok +ok +ok +ok--TEST-- +"defined" test +--TEMPLATE-- +{{ definedVar is defined ? 'ok' : 'ko' }} +{{ definedVar is not defined ? 'ko' : 'ok' }} +{{ undefinedVar is defined ? 'ko' : 'ok' }} +{{ undefinedVar is not defined ? 'ok' : 'ko' }} +{{ zeroVar is defined ? 'ok' : 'ko' }} +{{ nullVar is defined ? 'ok' : 'ko' }} +{{ nested.definedVar is defined ? 'ok' : 'ko' }} +{{ nested['definedVar'] is defined ? 'ok' : 'ko' }} +{{ nested.definedVar is not defined ? 'ko' : 'ok' }} +{{ nested.undefinedVar is defined ? 'ko' : 'ok' }} +{{ nested['undefinedVar'] is defined ? 'ko' : 'ok' }} +{{ nested.undefinedVar is not defined ? 'ok' : 'ko' }} +{{ nested.zeroVar is defined ? 'ok' : 'ko' }} +{{ nested.nullVar is defined ? 'ok' : 'ko' }} +{{ nested.definedArray.0 is defined ? 'ok' : 'ko' }} +{{ nested['definedArray'][0] is defined ? 'ok' : 'ko' }} +{{ object.foo is defined ? 'ok' : 'ko' }} +{{ object.undefinedMethod is defined ? 'ko' : 'ok' }} +{{ object.getFoo() is defined ? 'ok' : 'ko' }} +{{ object.getFoo('a') is defined ? 'ok' : 'ko' }} +{{ object.undefinedMethod() is defined ? 'ko' : 'ok' }} +{{ object.undefinedMethod('a') is defined ? 'ko' : 'ok' }} +{{ object.self.foo is defined ? 'ok' : 'ko' }} +{{ object.self.undefinedMethod is defined ? 'ko' : 'ok' }} +{{ object.undefinedMethod.self is defined ? 'ko' : 'ok' }} +--DATA-- +return array( + 'definedVar' => 'defined', + 'zeroVar' => 0, + 'nullVar' => null, + 'nested' => array( + 'definedVar' => 'defined', + 'zeroVar' => 0, + 'nullVar' => null, + 'definedArray' => array(0), + ), + 'object' => new TwigTestFoo(), +); +--EXPECT-- +ok +ok +ok +ok +ok +ok +ok +ok +ok +ok +ok +ok +ok +ok +ok +ok +ok +ok +ok +ok +ok +ok +ok +ok +ok +--DATA-- +return array( + 'definedVar' => 'defined', + 'zeroVar' => 0, + 'nullVar' => null, + 'nested' => array( + 'definedVar' => 'defined', + 'zeroVar' => 0, + 'nullVar' => null, + 'definedArray' => array(0), + ), + 'object' => new TwigTestFoo(), +); +--CONFIG-- +return array('strict_variables' => false) +--EXPECT-- +ok +ok +ok +ok +ok +ok +ok +ok +ok +ok +ok +ok +ok +ok +ok +ok +ok +ok +ok +ok +ok +ok +ok +ok +ok +--TEST-- +"empty" test +--TEMPLATE-- +{{ foo is empty ? 'ok' : 'ko' }} +{{ bar is empty ? 'ok' : 'ko' }} +{{ foobar is empty ? 'ok' : 'ko' }} +{{ array is empty ? 'ok' : 'ko' }} +{{ zero is empty ? 'ok' : 'ko' }} +{{ string is empty ? 'ok' : 'ko' }} +{{ countable_empty is empty ? 'ok' : 'ko' }} +{{ countable_not_empty is empty ? 'ok' : 'ko' }} +{{ markup_empty is empty ? 'ok' : 'ko' }} +{{ markup_not_empty is empty ? 'ok' : 'ko' }} +--DATA-- + +class CountableStub implements Countable +{ + private $items; + + public function __construct(array $items) + { + $this->items = $items; + } + + public function count() + { + return count($this->items); + } +} +return array( + 'foo' => '', 'bar' => null, 'foobar' => false, 'array' => array(), 'zero' => 0, 'string' => '0', + 'countable_empty' => new CountableStub(array()), 'countable_not_empty' => new CountableStub(array(1, 2)), + 'markup_empty' => new Twig_Markup('', 'UTF-8'), 'markup_not_empty' => new Twig_Markup('test', 'UTF-8'), +); +--EXPECT-- +ok +ok +ok +ok +ko +ko +ok +ko +ok +ko +--TEST-- +"even" test +--TEMPLATE-- +{{ 1 is even ? 'ko' : 'ok' }} +{{ 2 is even ? 'ok' : 'ko' }} +{{ 1 is not even ? 'ok' : 'ko' }} +{{ 2 is not even ? 'ko' : 'ok' }} +--DATA-- +return array() +--EXPECT-- +ok +ok +ok +ok +--TEST-- +Twig supports the in operator +--TEMPLATE-- +{% if bar in foo %} +TRUE +{% endif %} +{% if not (bar in foo) %} +{% else %} +TRUE +{% endif %} +{% if bar not in foo %} +{% else %} +TRUE +{% endif %} +{% if 'a' in bar %} +TRUE +{% endif %} +{% if 'c' not in bar %} +TRUE +{% endif %} +{% if '' not in bar %} +TRUE +{% endif %} +{% if '' in '' %} +TRUE +{% endif %} +{% if '0' not in '' %} +TRUE +{% endif %} +{% if 'a' not in '0' %} +TRUE +{% endif %} +{% if '0' in '0' %} +TRUE +{% endif %} +{{ false in [0, 1] ? 'TRUE' : 'FALSE' }} +{{ true in [0, 1] ? 'TRUE' : 'FALSE' }} +{{ '0' in [0, 1] ? 'TRUE' : 'FALSE' }} +{{ '' in [0, 1] ? 'TRUE' : 'FALSE' }} +{{ 0 in ['', 1] ? 'TRUE' : 'FALSE' }} +{{ '' in 'foo' ? 'TRUE' : 'FALSE' }} +{{ 0 in 'foo' ? 'TRUE' : 'FALSE' }} +{{ false in 'foo' ? 'TRUE' : 'FALSE' }} +{{ true in '100' ? 'TRUE' : 'FALSE' }} +{{ [] in 'Array' ? 'TRUE' : 'FALSE' }} +{{ [] in [true, false] ? 'TRUE' : 'FALSE' }} +{{ [] in [true, ''] ? 'TRUE' : 'FALSE' }} +{{ [] in [true, []] ? 'TRUE' : 'FALSE' }} +{{ dir_object in 'foo'~dir_name ? 'TRUE' : 'FALSE' }} +{{ 5 in 125 ? 'TRUE' : 'FALSE' }} +--DATA-- +return array('bar' => 'bar', 'foo' => array('bar' => 'bar'), 'dir_name' => dirname(__FILE__), 'dir_object' => new SplFileInfo(dirname(__FILE__))) +--EXPECT-- +TRUE +TRUE +TRUE +TRUE +TRUE +TRUE +TRUE +TRUE +TRUE +FALSE +FALSE +FALSE +FALSE +FALSE +TRUE +FALSE +FALSE +FALSE +FALSE +FALSE +FALSE +TRUE +FALSE +FALSE +--TEST-- +Twig supports the in operator when using objects +--TEMPLATE-- +{% if object in object_list %} +TRUE +{% endif %} +--DATA-- +$foo = new TwigTestFoo(); +$foo1 = new TwigTestFoo(); + +$foo->position = $foo1; +$foo1->position = $foo; + +return array( + 'object' => $foo, + 'object_list' => array($foo1, $foo), +); +--EXPECT-- +TRUE +--TEST-- +"iterable" test +--TEMPLATE-- +{{ foo is iterable ? 'ok' : 'ko' }} +{{ traversable is iterable ? 'ok' : 'ko' }} +{{ obj is iterable ? 'ok' : 'ko' }} +{{ val is iterable ? 'ok' : 'ko' }} +--DATA-- +return array( + 'foo' => array(), + 'traversable' => new ArrayIterator(array()), + 'obj' => new stdClass(), + 'val' => 'test', +); +--EXPECT-- +ok +ok +ko +ko--TEST-- +"odd" test +--TEMPLATE-- +{{ 1 is odd ? 'ok' : 'ko' }} +{{ 2 is odd ? 'ko' : 'ok' }} +--DATA-- +return array() +--EXPECT-- +ok +ok diff --git a/vendor/pygments/tests/examplefiles/type.lisp b/vendor/pygments/tests/examplefiles/type.lisp index 9c76937..c02c29d 100644 --- a/vendor/pygments/tests/examplefiles/type.lisp +++ b/vendor/pygments/tests/examplefiles/type.lisp @@ -1200,3 +1200,19 @@ Henry Baker: (unless (clos::funcallable-instance-p #'clos::class-name) (fmakunbound 'clos::class-name)) + + +(keywordp :junk) + T + +(keywordp ::junk) + T + +(symbol-name ::junk) + "JUNK" + +(symbol-name :#junk) + "#JUNK" + +(symbol-name :#.junk) + "#.JUNK" diff --git a/vendor/pygments/tests/examplefiles/unicode.go b/vendor/pygments/tests/examplefiles/unicode.go new file mode 100644 index 0000000..d4bef4d --- /dev/null +++ b/vendor/pygments/tests/examplefiles/unicode.go @@ -0,0 +1,10 @@ +package main + +import "fmt" + +func main() { + 世界 := "Hello, world!" + さようなら := "Goodbye, world!" + fmt.Println(世界) + fmt.Println(さようなら) +} diff --git a/vendor/pygments/tests/examplefiles/unicode.js b/vendor/pygments/tests/examplefiles/unicode.js new file mode 100644 index 0000000..8f553f6 --- /dev/null +++ b/vendor/pygments/tests/examplefiles/unicode.js @@ -0,0 +1,6 @@ +var école; +var sinθ; +var เมือง; +var a\u1234b; + +var nbsp; diff --git a/vendor/pygments/tests/examplefiles/test.bas b/vendor/pygments/tests/examplefiles/vbnet_test.bas similarity index 100% rename from vendor/pygments/tests/examplefiles/test.bas rename to vendor/pygments/tests/examplefiles/vbnet_test.bas diff --git a/vendor/pygments/tests/examplefiles/vctreestatus_hg b/vendor/pygments/tests/examplefiles/vctreestatus_hg new file mode 100644 index 0000000..193ed80 --- /dev/null +++ b/vendor/pygments/tests/examplefiles/vctreestatus_hg @@ -0,0 +1,4 @@ +M LICENSE +M setup.py +! setup.cfg +? vctreestatus_hg diff --git a/vendor/pygments/tests/examplefiles/vimrc b/vendor/pygments/tests/examplefiles/vimrc new file mode 100644 index 0000000..d2f9cd1 --- /dev/null +++ b/vendor/pygments/tests/examplefiles/vimrc @@ -0,0 +1,21 @@ +" A comment + +:py print "py" +::pyt print 'pyt' + pyth print '''pyth''' + : pytho print "pytho" +python print """python""" + + : : python<>sys.stderr, file, 'failed %s:' % what, - print >>sys.stderr, exc - failed.append(file[:-3]) - - -class QuietTestRunner(object): - """Customized test runner for relatively quiet output""" - - def __init__(self, testname, stream=sys.stderr): - self.testname = testname - self.stream = unittest._WritelnDecorator(stream) - - def run(self, test): - global total_test_count - global error_test_count - result = unittest._TextTestResult(self.stream, True, 1) - test(result) - if not result.wasSuccessful(): - self.stream.write(' FAIL:') - result.printErrors() - failed.append(self.testname) - else: - self.stream.write(' ok\n') - total_test_count += result.testsRun - error_test_count += len(result.errors) + len(result.failures) - return result - - -def run_tests(with_coverage=False): - # needed to avoid confusion involving atexit handlers - import logging - - if sys.argv[1:]: - # test only files given on cmdline - files = [entry + '.py' for entry in sys.argv[1:] if entry.startswith('test_')] - else: - files = [entry for entry in os.listdir(testdir) - if (entry.startswith('test_') and entry.endswith('.py'))] - files.sort() - - WIDTH = 85 - - print >>sys.stderr, \ - ('Pygments %s Test Suite running%s, stand by...' % - (pygments.__version__, - with_coverage and " with coverage analysis" or "")).center(WIDTH) - print >>sys.stderr, ('(using Python %s)' % sys.version.split()[0]).center(WIDTH) - print >>sys.stderr, '='*WIDTH - - if with_coverage: - coverage.erase() - coverage.start() - - for testfile in files: - globs = {'__file__': join(testdir, testfile)} - try: - execfile(join(testdir, testfile), globs) - except Exception, exc: - raise - err(testfile, 'execfile', exc) - continue - sys.stderr.write(testfile[:-3] + ': ') - try: - runner = QuietTestRunner(testfile[:-3]) - # make a test suite of all TestCases in the file - tests = [] - for name, thing in globs.iteritems(): - if name.endswith('Test'): - tests.append((name, unittest.makeSuite(thing))) - tests.sort() - suite = unittest.TestSuite() - suite.addTests([x[1] for x in tests]) - runner.run(suite) - except Exception, exc: - err(testfile, 'running test', exc) - - print >>sys.stderr, '='*WIDTH - if failed: - print >>sys.stderr, '%d of %d tests failed.' % \ - (error_test_count, total_test_count) - print >>sys.stderr, 'Tests failed in:', ', '.join(failed) - ret = 1 - else: - if total_test_count == 1: - print >>sys.stderr, '1 test happy.' - else: - print >>sys.stderr, 'All %d tests happy.' % total_test_count - ret = 0 - - if with_coverage: - coverage.stop() - modules = [mod for name, mod in sys.modules.iteritems() - if name.startswith('pygments.') and mod] - coverage.report(modules) - - return ret - - -if __name__ == '__main__': - with_coverage = False - if sys.argv[1:2] == ['-C']: - with_coverage = bool(coverage) - del sys.argv[1] - sys.exit(run_tests(with_coverage)) diff --git a/vendor/pygments/tests/run.py b/vendor/pygments/tests/run.py index 18a1d82..8167b91 100644 --- a/vendor/pygments/tests/run.py +++ b/vendor/pygments/tests/run.py @@ -8,42 +8,43 @@ python run.py [testfile ...] - :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ -import sys, os +from __future__ import print_function -if sys.version_info >= (3,): - # copy test suite over to "build/lib" and convert it - print ('Copying and converting sources to build/lib/test...') - from distutils.util import copydir_run_2to3 - testroot = os.path.dirname(__file__) - newroot = os.path.join(testroot, '..', 'build/lib/test') - copydir_run_2to3(testroot, newroot) - # make nose believe that we run from the converted dir - os.chdir(newroot) -else: - # only find tests in this directory - if os.path.dirname(__file__): - os.chdir(os.path.dirname(__file__)) +import os +import sys + +# only find tests in this directory +if os.path.dirname(__file__): + os.chdir(os.path.dirname(__file__)) try: import nose except ImportError: - print ('nose is required to run the Pygments test suite') + print('nose is required to run the Pygments test suite') sys.exit(1) -try: - # make sure the current source is first on sys.path - sys.path.insert(0, '..') - import pygments -except ImportError: - print ('Cannot find Pygments to test: %s' % sys.exc_info()[1]) - sys.exit(1) +# make sure the current source is first on sys.path +sys.path.insert(0, '..') + +if '--with-coverage' not in sys.argv: + # if running with coverage, pygments should not be imported before coverage + # is started, otherwise it will count already executed lines as uncovered + try: + import pygments + except ImportError as err: + print('Cannot find Pygments to test: %s' % err) + sys.exit(1) + else: + print('Pygments %s test suite running (Python %s)...' % + (pygments.__version__, sys.version.split()[0]), + file=sys.stderr) else: - print ('Pygments %s test suite running (Python %s)...' % - (pygments.__version__, sys.version.split()[0])) + print('Pygments test suite running (Python %s)...' % sys.version.split()[0], + file=sys.stderr) nose.main() diff --git a/vendor/pygments/tests/string_asserts.py b/vendor/pygments/tests/string_asserts.py new file mode 100644 index 0000000..11f5c7f --- /dev/null +++ b/vendor/pygments/tests/string_asserts.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +""" + Pygments string assert utility + ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" + +class StringTests(object): + + def assertStartsWith(self, haystack, needle, msg=None): + if msg is None: + msg = "'{0}' does not start with '{1}'".format(haystack, needle) + if not haystack.startswith(needle): + raise(AssertionError(msg)) + + def assertEndsWith(self, haystack, needle, msg=None): + if msg is None: + msg = "'{0}' does not end with '{1}'".format(haystack, needle) + if not haystack.endswith(needle): + raise(AssertionError(msg)) diff --git a/vendor/pygments/tests/support.py b/vendor/pygments/tests/support.py index 505c17d..c66ac66 100644 --- a/vendor/pygments/tests/support.py +++ b/vendor/pygments/tests/support.py @@ -5,6 +5,8 @@ Support for Pygments tests import os +from nose import SkipTest + def location(mod_name): """ diff --git a/vendor/pygments/tests/test_basic_api.py b/vendor/pygments/tests/test_basic_api.py index 00dc26f..022e6c5 100644 --- a/vendor/pygments/tests/test_basic_api.py +++ b/vendor/pygments/tests/test_basic_api.py @@ -3,19 +3,20 @@ Pygments basic API tests ~~~~~~~~~~~~~~~~~~~~~~~~ - :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ -import os +from __future__ import print_function + import random import unittest -from pygments import lexers, formatters, filters, format +from pygments import lexers, formatters, lex, format from pygments.token import _TokenType, Text from pygments.lexer import RegexLexer from pygments.formatters.img import FontNotFound -from pygments.util import BytesIO, StringIO, bytes, b +from pygments.util import text_type, StringIO, BytesIO, xrange, ClassNotFound import support @@ -26,10 +27,12 @@ random.shuffle(test_content) test_content = ''.join(test_content) + '\n' -def test_lexer_import_all(): +def test_lexer_instantiate_all(): # instantiate every lexer, to see if the token type defs are correct - for x in lexers.LEXERS.keys(): - c = getattr(lexers, x)() + def verify(name): + getattr(lexers, name) + for x in lexers.LEXERS: + yield verify, x def test_lexer_classes(): @@ -39,12 +42,14 @@ def test_lexer_classes(): for attr in 'aliases', 'filenames', 'alias_filenames', 'mimetypes': assert hasattr(cls, attr) assert type(getattr(cls, attr)) is list, \ - "%s: %s attribute wrong" % (cls, attr) + "%s: %s attribute wrong" % (cls, attr) result = cls.analyse_text("abc") assert isinstance(result, float) and 0.0 <= result <= 1.0 result = cls.analyse_text(".abc") assert isinstance(result, float) and 0.0 <= result <= 1.0 + assert all(al.lower() == al for al in cls.aliases) + inst = cls(opt1="val1", opt2="val2") if issubclass(cls, RegexLexer): if not hasattr(cls, '_tokens'): @@ -60,19 +65,22 @@ def test_lexer_classes(): if cls.name in ['XQuery', 'Opa']: # XXX temporary return - tokens = list(inst.get_tokens(test_content)) + try: + tokens = list(inst.get_tokens(test_content)) + except KeyboardInterrupt: + raise KeyboardInterrupt( + 'interrupted %s.get_tokens(): test_content=%r' % + (cls.__name__, test_content)) txt = "" for token in tokens: assert isinstance(token, tuple) assert isinstance(token[0], _TokenType) - if isinstance(token[1], str): - print repr(token[1]) - assert isinstance(token[1], unicode) + assert isinstance(token[1], text_type) txt += token[1] assert txt == test_content, "%s lexer roundtrip failed: %r != %r" % \ - (cls.name, test_content, txt) + (cls.name, test_content, txt) - for lexer in lexers._iter_lexerclasses(): + for lexer in lexers._iter_lexerclasses(plugins=False): yield verify, lexer @@ -81,7 +89,8 @@ def test_lexer_options(): def ensure(tokens, output): concatenated = ''.join(token[1] for token in tokens) assert concatenated == output, \ - '%s: %r != %r' % (lexer, concatenated, output) + '%s: %r != %r' % (lexer, concatenated, output) + def verify(cls): inst = cls(stripnl=False) ensure(inst.get_tokens('a\nb'), 'a\nb\n') @@ -89,18 +98,16 @@ def test_lexer_options(): inst = cls(stripall=True) ensure(inst.get_tokens(' \n b\n\n\n'), 'b\n') # some lexers require full lines in input - if cls.__name__ not in ( - 'PythonConsoleLexer', 'RConsoleLexer', 'RubyConsoleLexer', - 'SqliteConsoleLexer', 'MatlabSessionLexer', 'ErlangShellLexer', - 'BashSessionLexer', 'LiterateHaskellLexer', 'PostgresConsoleLexer', - 'ElixirConsoleLexer', 'JuliaConsoleLexer', 'RobotFrameworkLexer', - 'DylanConsoleLexer', 'ShellSessionLexer'): + if ('ConsoleLexer' not in cls.__name__ and + 'SessionLexer' not in cls.__name__ and + not cls.__name__.startswith('Literate') and + cls.__name__ not in ('ErlangShellLexer', 'RobotFrameworkLexer')): inst = cls(ensurenl=False) ensure(inst.get_tokens('a\nb'), 'a\nb') inst = cls(ensurenl=False, stripall=True) ensure(inst.get_tokens('a\nb\n\n'), 'a\nb') - for lexer in lexers._iter_lexerclasses(): + for lexer in lexers._iter_lexerclasses(plugins=False): if lexer.__name__ == 'RawTokenLexer': # this one is special continue @@ -122,7 +129,7 @@ def test_get_lexers(): ]: yield verify, func, args - for cls, (_, lname, aliases, _, mimetypes) in lexers.LEXERS.iteritems(): + for cls, (_, lname, aliases, _, mimetypes) in lexers.LEXERS.items(): assert cls == lexers.find_lexer_class(lname).__name__ for alias in aliases: @@ -131,34 +138,47 @@ def test_get_lexers(): for mimetype in mimetypes: assert cls == lexers.get_lexer_for_mimetype(mimetype).__class__.__name__ + try: + lexers.get_lexer_by_name(None) + except ClassNotFound: + pass + else: + raise Exception + def test_formatter_public_api(): - ts = list(lexers.PythonLexer().get_tokens("def f(): pass")) - out = StringIO() # test that every formatter class has the correct public API - def verify(formatter, info): - assert len(info) == 4 - assert info[0], "missing formatter name" - assert info[1], "missing formatter aliases" - assert info[3], "missing formatter docstring" + ts = list(lexers.PythonLexer().get_tokens("def f(): pass")) + string_out = StringIO() + bytes_out = BytesIO() - if formatter.name == 'Raw tokens': - # will not work with Unicode output file - return + def verify(formatter): + info = formatters.FORMATTERS[formatter.__name__] + assert len(info) == 5 + assert info[1], "missing formatter name" + assert info[2], "missing formatter aliases" + assert info[4], "missing formatter docstring" try: inst = formatter(opt1="val1") except (ImportError, FontNotFound): - return + raise support.SkipTest + try: inst.get_style_defs() except NotImplementedError: # may be raised by formatters for which it doesn't make sense pass - inst.format(ts, out) - for formatter, info in formatters.FORMATTERS.iteritems(): - yield verify, formatter, info + if formatter.unicodeoutput: + inst.format(ts, string_out) + else: + inst.format(ts, bytes_out) + + for name in formatters.FORMATTERS: + formatter = getattr(formatters, name) + yield verify, formatter + def test_formatter_encodings(): from pygments.formatters import HtmlFormatter @@ -167,7 +187,7 @@ def test_formatter_encodings(): fmt = HtmlFormatter() tokens = [(Text, u"ä")] out = format(tokens, fmt) - assert type(out) is unicode + assert type(out) is text_type assert u"ä" in out # encoding option @@ -191,12 +211,12 @@ def test_formatter_unicode_handling(): inst = formatter(encoding=None) except (ImportError, FontNotFound): # some dependency or font not installed - return + raise support.SkipTest if formatter.name != 'Raw tokens': out = format(tokens, inst) if formatter.unicodeoutput: - assert type(out) is unicode + assert type(out) is text_type, '%s: %r' % (formatter, out) inst = formatter(encoding='utf-8') out = format(tokens, inst) @@ -208,8 +228,10 @@ def test_formatter_unicode_handling(): out = format(tokens, inst) assert type(out) is bytes, '%s: %r' % (formatter, out) - for formatter, info in formatters.FORMATTERS.iteritems(): - yield verify, formatter + for formatter, info in formatters.FORMATTERS.items(): + # this tests the automatic importing as well + fmter = getattr(formatters, formatter) + yield verify, fmter def test_get_formatters(): @@ -226,27 +248,50 @@ def test_get_formatters(): def test_styles(): # minimal style test from pygments.formatters import HtmlFormatter - fmt = HtmlFormatter(style="pastie") + HtmlFormatter(style="pastie") + + +def test_bare_class_handler(): + from pygments.formatters import HtmlFormatter + from pygments.lexers import PythonLexer + try: + lex('test\n', PythonLexer) + except TypeError as e: + assert 'lex() argument must be a lexer instance' in str(e) + else: + assert False, 'nothing raised' + try: + format([], HtmlFormatter) + except TypeError as e: + assert 'format() argument must be a formatter instance' in str(e) + else: + assert False, 'nothing raised' class FiltersTest(unittest.TestCase): def test_basic(self): - filter_args = { - 'whitespace': {'spaces': True, 'tabs': True, 'newlines': True}, - 'highlight': {'names': ['isinstance', 'lexers', 'x']}, - } - for x in filters.FILTERS.keys(): + filters_args = [ + ('whitespace', {'spaces': True, 'tabs': True, 'newlines': True}), + ('whitespace', {'wstokentype': False, 'spaces': True}), + ('highlight', {'names': ['isinstance', 'lexers', 'x']}), + ('codetagify', {'codetags': 'API'}), + ('keywordcase', {'case': 'capitalize'}), + ('raiseonerror', {}), + ('gobble', {'n': 4}), + ('tokenmerge', {}), + ] + for x, args in filters_args: lx = lexers.PythonLexer() - lx.add_filter(x, **filter_args.get(x, {})) - fp = open(TESTFILE, 'rb') - try: + lx.add_filter(x, **args) + with open(TESTFILE, 'rb') as fp: text = fp.read().decode('utf-8') - finally: - fp.close() tokens = list(lx.get_tokens(text)) + self.assertTrue(all(isinstance(t[1], text_type) + for t in tokens), + '%s filter did not return Unicode' % x) roundtext = ''.join([t[1] for t in tokens]) - if x not in ('whitespace', 'keywordcase'): + if x not in ('whitespace', 'keywordcase', 'gobble'): # these filters change the text self.assertEqual(roundtext, text, "lexer roundtrip with %s filter failed" % x) @@ -259,22 +304,16 @@ class FiltersTest(unittest.TestCase): def test_whitespace(self): lx = lexers.PythonLexer() lx.add_filter('whitespace', spaces='%') - fp = open(TESTFILE, 'rb') - try: + with open(TESTFILE, 'rb') as fp: text = fp.read().decode('utf-8') - finally: - fp.close() lxtext = ''.join([t[1] for t in list(lx.get_tokens(text))]) self.assertFalse(' ' in lxtext) def test_keywordcase(self): lx = lexers.PythonLexer() lx.add_filter('keywordcase', case='capitalize') - fp = open(TESTFILE, 'rb') - try: + with open(TESTFILE, 'rb') as fp: text = fp.read().decode('utf-8') - finally: - fp.close() lxtext = ''.join([t[1] for t in list(lx.get_tokens(text))]) self.assertTrue('Def' in lxtext and 'Class' in lxtext) diff --git a/vendor/pygments/tests/test_cfm.py b/vendor/pygments/tests/test_cfm.py new file mode 100644 index 0000000..2585489 --- /dev/null +++ b/vendor/pygments/tests/test_cfm.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +""" + Basic ColdfusionHtmlLexer Test + ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" + +import unittest +import os + +from pygments.token import Token +from pygments.lexers import ColdfusionHtmlLexer + + +class ColdfusionHtmlLexerTest(unittest.TestCase): + + def setUp(self): + self.lexer = ColdfusionHtmlLexer() + + def testBasicComment(self): + fragment = u'' + expected = [ + (Token.Text, u''), + (Token.Comment.Multiline, u''), + (Token.Text, u'\n'), + ] + self.assertEqual(expected, list(self.lexer.get_tokens(fragment))) + + def testNestedComment(self): + fragment = u' --->' + expected = [ + (Token.Text, u''), + (Token.Comment.Multiline, u''), + (Token.Comment.Multiline, u' '), + (Token.Comment.Multiline, u'--->'), + (Token.Text, u'\n'), + ] + self.assertEqual(expected, list(self.lexer.get_tokens(fragment))) diff --git a/vendor/pygments/tests/test_clexer.py b/vendor/pygments/tests/test_clexer.py index 8b37bf5..fd7f58f 100644 --- a/vendor/pygments/tests/test_clexer.py +++ b/vendor/pygments/tests/test_clexer.py @@ -3,14 +3,15 @@ Basic CLexer Test ~~~~~~~~~~~~~~~~~ - :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ import unittest import os +import textwrap -from pygments.token import Text, Number +from pygments.token import Text, Number, Token from pygments.lexers import CLexer @@ -27,5 +28,232 @@ class CLexerTest(unittest.TestCase): Number.Float, Number.Float], code.split()): wanted.append(item) wanted.append((Text, ' ')) - wanted = [(Text, '')] + wanted[:-1] + [(Text, '\n')] + wanted = wanted[:-1] + [(Text, '\n')] self.assertEqual(list(self.lexer.get_tokens(code)), wanted) + + def testSwitch(self): + fragment = u'''\ + int main() + { + switch (0) + { + case 0: + default: + ; + } + } + ''' + tokens = [ + (Token.Keyword.Type, u'int'), + (Token.Text, u' '), + (Token.Name.Function, u'main'), + (Token.Punctuation, u'('), + (Token.Punctuation, u')'), + (Token.Text, u'\n'), + (Token.Punctuation, u'{'), + (Token.Text, u'\n'), + (Token.Text, u' '), + (Token.Keyword, u'switch'), + (Token.Text, u' '), + (Token.Punctuation, u'('), + (Token.Literal.Number.Integer, u'0'), + (Token.Punctuation, u')'), + (Token.Text, u'\n'), + (Token.Text, u' '), + (Token.Punctuation, u'{'), + (Token.Text, u'\n'), + (Token.Text, u' '), + (Token.Keyword, u'case'), + (Token.Text, u' '), + (Token.Literal.Number.Integer, u'0'), + (Token.Operator, u':'), + (Token.Text, u'\n'), + (Token.Text, u' '), + (Token.Keyword, u'default'), + (Token.Operator, u':'), + (Token.Text, u'\n'), + (Token.Text, u' '), + (Token.Punctuation, u';'), + (Token.Text, u'\n'), + (Token.Text, u' '), + (Token.Punctuation, u'}'), + (Token.Text, u'\n'), + (Token.Punctuation, u'}'), + (Token.Text, u'\n'), + ] + self.assertEqual(tokens, list(self.lexer.get_tokens(textwrap.dedent(fragment)))) + + def testSwitchSpaceBeforeColon(self): + fragment = u'''\ + int main() + { + switch (0) + { + case 0 : + default : + ; + } + } + ''' + tokens = [ + (Token.Keyword.Type, u'int'), + (Token.Text, u' '), + (Token.Name.Function, u'main'), + (Token.Punctuation, u'('), + (Token.Punctuation, u')'), + (Token.Text, u'\n'), + (Token.Punctuation, u'{'), + (Token.Text, u'\n'), + (Token.Text, u' '), + (Token.Keyword, u'switch'), + (Token.Text, u' '), + (Token.Punctuation, u'('), + (Token.Literal.Number.Integer, u'0'), + (Token.Punctuation, u')'), + (Token.Text, u'\n'), + (Token.Text, u' '), + (Token.Punctuation, u'{'), + (Token.Text, u'\n'), + (Token.Text, u' '), + (Token.Keyword, u'case'), + (Token.Text, u' '), + (Token.Literal.Number.Integer, u'0'), + (Token.Text, u' '), + (Token.Operator, u':'), + (Token.Text, u'\n'), + (Token.Text, u' '), + (Token.Keyword, u'default'), + (Token.Text, u' '), + (Token.Operator, u':'), + (Token.Text, u'\n'), + (Token.Text, u' '), + (Token.Punctuation, u';'), + (Token.Text, u'\n'), + (Token.Text, u' '), + (Token.Punctuation, u'}'), + (Token.Text, u'\n'), + (Token.Punctuation, u'}'), + (Token.Text, u'\n'), + ] + self.assertEqual(tokens, list(self.lexer.get_tokens(textwrap.dedent(fragment)))) + + def testLabel(self): + fragment = u'''\ + int main() + { + foo: + goto foo; + } + ''' + tokens = [ + (Token.Keyword.Type, u'int'), + (Token.Text, u' '), + (Token.Name.Function, u'main'), + (Token.Punctuation, u'('), + (Token.Punctuation, u')'), + (Token.Text, u'\n'), + (Token.Punctuation, u'{'), + (Token.Text, u'\n'), + (Token.Name.Label, u'foo'), + (Token.Punctuation, u':'), + (Token.Text, u'\n'), + (Token.Text, u' '), + (Token.Keyword, u'goto'), + (Token.Text, u' '), + (Token.Name, u'foo'), + (Token.Punctuation, u';'), + (Token.Text, u'\n'), + (Token.Punctuation, u'}'), + (Token.Text, u'\n'), + ] + self.assertEqual(tokens, list(self.lexer.get_tokens(textwrap.dedent(fragment)))) + + def testLabelSpaceBeforeColon(self): + fragment = u'''\ + int main() + { + foo : + goto foo; + } + ''' + tokens = [ + (Token.Keyword.Type, u'int'), + (Token.Text, u' '), + (Token.Name.Function, u'main'), + (Token.Punctuation, u'('), + (Token.Punctuation, u')'), + (Token.Text, u'\n'), + (Token.Punctuation, u'{'), + (Token.Text, u'\n'), + (Token.Name.Label, u'foo'), + (Token.Text, u' '), + (Token.Punctuation, u':'), + (Token.Text, u'\n'), + (Token.Text, u' '), + (Token.Keyword, u'goto'), + (Token.Text, u' '), + (Token.Name, u'foo'), + (Token.Punctuation, u';'), + (Token.Text, u'\n'), + (Token.Punctuation, u'}'), + (Token.Text, u'\n'), + ] + self.assertEqual(tokens, list(self.lexer.get_tokens(textwrap.dedent(fragment)))) + + def testLabelFollowedByStatement(self): + fragment = u'''\ + int main() + { + foo:return 0; + goto foo; + } + ''' + tokens = [ + (Token.Keyword.Type, u'int'), + (Token.Text, u' '), + (Token.Name.Function, u'main'), + (Token.Punctuation, u'('), + (Token.Punctuation, u')'), + (Token.Text, u'\n'), + (Token.Punctuation, u'{'), + (Token.Text, u'\n'), + (Token.Name.Label, u'foo'), + (Token.Punctuation, u':'), + (Token.Keyword, u'return'), + (Token.Text, u' '), + (Token.Literal.Number.Integer, u'0'), + (Token.Punctuation, u';'), + (Token.Text, u'\n'), + (Token.Text, u' '), + (Token.Keyword, u'goto'), + (Token.Text, u' '), + (Token.Name, u'foo'), + (Token.Punctuation, u';'), + (Token.Text, u'\n'), + (Token.Punctuation, u'}'), + (Token.Text, u'\n'), + ] + self.assertEqual(tokens, list(self.lexer.get_tokens(textwrap.dedent(fragment)))) + + def testPreprocFile(self): + fragment = u'#include \n' + tokens = [ + (Token.Comment.Preproc, u'#'), + (Token.Comment.Preproc, u'include'), + (Token.Text, u' '), + (Token.Comment.PreprocFile, u''), + (Token.Comment.Preproc, u'\n'), + ] + self.assertEqual(tokens, list(self.lexer.get_tokens(fragment))) + + def testPreprocFile2(self): + fragment = u'#include "foo.h"\n' + tokens = [ + (Token.Comment.Preproc, u'#'), + (Token.Comment.Preproc, u'include'), + (Token.Text, u' '), + (Token.Comment.PreprocFile, u'"foo.h"'), + (Token.Comment.Preproc, u'\n'), + ] + self.assertEqual(tokens, list(self.lexer.get_tokens(fragment))) + diff --git a/vendor/pygments/tests/test_cmdline.py b/vendor/pygments/tests/test_cmdline.py index 5ad815c..5883fb5 100644 --- a/vendor/pygments/tests/test_cmdline.py +++ b/vendor/pygments/tests/test_cmdline.py @@ -3,103 +3,250 @@ Command line test ~~~~~~~~~~~~~~~~~ - :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ -# Test the command line interface +from __future__ import print_function -import sys, os +import io +import os +import re +import sys +import tempfile import unittest -import StringIO - -from pygments import highlight -from pygments.cmdline import main as cmdline_main import support +from pygments import cmdline, highlight +from pygments.util import BytesIO, StringIO + TESTFILE, TESTDIR = support.location(__file__) +TESTCODE = '''\ +def func(args): + pass +''' -def run_cmdline(*args): +def run_cmdline(*args, **kwds): + saved_stdin = sys.stdin saved_stdout = sys.stdout saved_stderr = sys.stderr - new_stdout = sys.stdout = StringIO.StringIO() - new_stderr = sys.stderr = StringIO.StringIO() + if sys.version_info > (3,): + stdin_buffer = BytesIO() + stdout_buffer = BytesIO() + stderr_buffer = BytesIO() + new_stdin = sys.stdin = io.TextIOWrapper(stdin_buffer, 'utf-8') + new_stdout = sys.stdout = io.TextIOWrapper(stdout_buffer, 'utf-8') + new_stderr = sys.stderr = io.TextIOWrapper(stderr_buffer, 'utf-8') + else: + stdin_buffer = new_stdin = sys.stdin = StringIO() + stdout_buffer = new_stdout = sys.stdout = StringIO() + stderr_buffer = new_stderr = sys.stderr = StringIO() + new_stdin.write(kwds.get('stdin', '')) + new_stdin.seek(0, 0) try: - ret = cmdline_main(["pygmentize"] + list(args)) + ret = cmdline.main(['pygmentize'] + list(args)) finally: + sys.stdin = saved_stdin sys.stdout = saved_stdout sys.stderr = saved_stderr - return (ret, new_stdout.getvalue(), new_stderr.getvalue()) + new_stdout.flush() + new_stderr.flush() + out, err = stdout_buffer.getvalue().decode('utf-8'), \ + stderr_buffer.getvalue().decode('utf-8') + return (ret, out, err) class CmdLineTest(unittest.TestCase): - def test_L_opt(self): - c, o, e = run_cmdline("-L") - self.assertEqual(c, 0) - self.assertTrue("Lexers" in o and "Formatters" in o and - "Filters" in o and "Styles" in o) - c, o, e = run_cmdline("-L", "lexer") - self.assertEqual(c, 0) - self.assertTrue("Lexers" in o and "Formatters" not in o) - c, o, e = run_cmdline("-L", "lexers") - self.assertEqual(c, 0) + def check_success(self, *cmdline, **kwds): + code, out, err = run_cmdline(*cmdline, **kwds) + self.assertEqual(code, 0) + self.assertEqual(err, '') + return out - def test_O_opt(self): - filename = TESTFILE - c, o, e = run_cmdline("-Ofull=1,linenos=true,foo=bar", - "-fhtml", filename) - self.assertEqual(c, 0) - self.assertTrue("foo, bar=baz=," in o) - - def test_F_opt(self): - filename = TESTFILE - c, o, e = run_cmdline("-Fhighlight:tokentype=Name.Blubb," - "names=TESTFILE filename", - "-fhtml", filename) - self.assertEqual(c, 0) - self.assertTrue(']*>', '', o) + # rstrip is necessary since HTML inserts a \n after the last + self.assertEqual(o.rstrip(), TESTCODE.rstrip()) + + # guess if no lexer given + o = self.check_success('-fhtml', stdin=TESTCODE) + o = re.sub('<[^>]*>', '', o) + # rstrip is necessary since HTML inserts a \n after the last + self.assertEqual(o.rstrip(), TESTCODE.rstrip()) + + def test_outfile(self): + # test that output file works with and without encoding + fd, name = tempfile.mkstemp() + os.close(fd) + for opts in [['-fhtml', '-o', name, TESTFILE], + ['-flatex', '-o', name, TESTFILE], + ['-fhtml', '-o', name, '-O', 'encoding=utf-8', TESTFILE]]: + try: + self.check_success(*opts) + finally: + os.unlink(name) + + def test_stream_opt(self): + o = self.check_success('-lpython', '-s', '-fterminal', stdin=TESTCODE) + o = re.sub(r'\x1b\[.*?m', '', o) + self.assertEqual(o.replace('\r\n', '\n'), TESTCODE) + + def test_h_opt(self): + o = self.check_success('-h') + self.assertTrue('Usage:' in o) + + def test_L_opt(self): + o = self.check_success('-L') + self.assertTrue('Lexers' in o and 'Formatters' in o and + 'Filters' in o and 'Styles' in o) + o = self.check_success('-L', 'lexer') + self.assertTrue('Lexers' in o and 'Formatters' not in o) + self.check_success('-L', 'lexers') + + def test_O_opt(self): + filename = TESTFILE + o = self.check_success('-Ofull=1,linenos=true,foo=bar', + '-fhtml', filename) + self.assertTrue('foo, bar=baz=,' in o) + + def test_F_opt(self): + filename = TESTFILE + o = self.check_success('-Fhighlight:tokentype=Name.Blubb,' + 'names=TESTFILE filename', + '-fhtml', filename) + self.assertTrue('_filename ' 'for overriding, thus no lexer found.' - % fn) - try: - name, rest = fn.split("_", 1) - lx = get_lexer_by_name(name) - except ClassNotFound: - raise AssertionError('no lexer found for file %r' % fn) - yield check_lexer, lx, absfn, outfn + % fn) + yield check_lexer, lx, fn -def check_lexer(lx, absfn, outfn): - fp = open(absfn, 'rb') - try: + N = 7 + stats = list(STATS.items()) + stats.sort(key=lambda x: x[1][1]) + print('\nExample files that took longest absolute time:') + for fn, t in stats[-N:]: + print('%-30s %6d chars %8.2f ms %7.3f ms/char' % ((fn,) + t)) + print() + stats.sort(key=lambda x: x[1][2]) + print('\nExample files that took longest relative time:') + for fn, t in stats[-N:]: + print('%-30s %6d chars %8.2f ms %7.3f ms/char' % ((fn,) + t)) + + +def check_lexer(lx, fn): + if os.name == 'java' and fn in BAD_FILES_FOR_JYTHON: + raise support.SkipTest + absfn = os.path.join(TESTDIR, 'examplefiles', fn) + with open(absfn, 'rb') as fp: text = fp.read() - finally: - fp.close() - text = text.replace(b('\r\n'), b('\n')) - text = text.strip(b('\n')) + b('\n') + text = text.replace(b'\r\n', b'\n') + text = text.strip(b'\n') + b'\n' try: text = text.decode('utf-8') if text.startswith(u'\ufeff'): @@ -64,36 +99,36 @@ def check_lexer(lx, absfn, outfn): text = text.decode('latin1') ntext = [] tokens = [] + import time + t1 = time.time() for type, val in lx.get_tokens(text): ntext.append(val) assert type != Error, \ 'lexer %s generated error token for %s: %r at position %d' % \ (lx, absfn, val, len(u''.join(ntext))) tokens.append((type, val)) + t2 = time.time() + STATS[os.path.basename(absfn)] = (len(text), + 1000 * (t2 - t1), 1000 * (t2 - t1) / len(text)) if u''.join(ntext) != text: - print '\n'.join(difflib.unified_diff(u''.join(ntext).splitlines(), - text.splitlines())) + print('\n'.join(difflib.unified_diff(u''.join(ntext).splitlines(), + text.splitlines()))) raise AssertionError('round trip failed for ' + absfn) # check output against previous run if enabled if STORE_OUTPUT: # no previous output -- store it + outfn = os.path.join(TESTDIR, 'examplefiles', 'output', fn) if not os.path.isfile(outfn): - fp = open(outfn, 'wb') - try: + with open(outfn, 'wb') as fp: pickle.dump(tokens, fp) - finally: - fp.close() return # otherwise load it and compare - fp = open(outfn, 'rb') - try: + with open(outfn, 'rb') as fp: stored_tokens = pickle.load(fp) - finally: - fp.close() if stored_tokens != tokens: f1 = pprint.pformat(stored_tokens) f2 = pprint.pformat(tokens) - print '\n'.join(difflib.unified_diff(f1.splitlines(), - f2.splitlines())) + print('\n'.join(difflib.unified_diff(f1.splitlines(), + f2.splitlines()))) assert False, absfn diff --git a/vendor/pygments/tests/test_ezhil.py b/vendor/pygments/tests/test_ezhil.py new file mode 100644 index 0000000..23b9cb4 --- /dev/null +++ b/vendor/pygments/tests/test_ezhil.py @@ -0,0 +1,182 @@ +# -*- coding: utf-8 -*- +""" + Basic EzhilLexer Test + ~~~~~~~~~~~~~~~~~~~~ + + :copyright: Copyright 2015 Muthiah Annamalai + :license: BSD, see LICENSE for details. +""" + +import unittest + +from pygments.token import Operator, Number, Text, Token +from pygments.lexers import EzhilLexer + + +class EzhilTest(unittest.TestCase): + + def setUp(self): + self.lexer = EzhilLexer() + self.maxDiff = None + + def testSum(self): + fragment = u'1+3\n' + tokens = [ + (Number.Integer, u'1'), + (Operator, u'+'), + (Number.Integer, u'3'), + (Text, u'\n'), + ] + self.assertEqual(tokens, list(self.lexer.get_tokens(fragment))) + + def testGCDExpr(self): + fragment = u'1^3+(5-5)*gcd(a,b)\n' + tokens = [ + (Token.Number.Integer,u'1'), + (Token.Operator,u'^'), + (Token.Literal.Number.Integer, u'3'), + (Token.Operator, u'+'), + (Token.Punctuation, u'('), + (Token.Literal.Number.Integer, u'5'), + (Token.Operator, u'-'), + (Token.Literal.Number.Integer, u'5'), + (Token.Punctuation, u')'), + (Token.Operator, u'*'), + (Token.Name, u'gcd'), + (Token.Punctuation, u'('), + (Token.Name, u'a'), + (Token.Operator, u','), + (Token.Name, u'b'), + (Token.Punctuation, u')'), + (Token.Text, u'\n') + ] + self.assertEqual(tokens, list(self.lexer.get_tokens(fragment))) + + def testIfStatement(self): + fragment = u"""@( 0 > 3 ) ஆனால் + பதிப்பி "wont print" +முடி""" + tokens = [ + (Token.Operator, u'@'), + (Token.Punctuation, u'('), + (Token.Text, u' '), + (Token.Literal.Number.Integer,u'0'), + (Token.Text, u' '), + (Token.Operator,u'>'), + (Token.Text, u' '), + (Token.Literal.Number.Integer, u'3'), + (Token.Text, u' '), + (Token.Punctuation, u')'), + (Token.Text, u' '), + (Token.Keyword, u'ஆனால்'), + (Token.Text, u'\n'), + (Token.Text, u'\t'), + (Token.Keyword, u'பதிப்பி'), + (Token.Text, u' '), + (Token.Literal.String, u'"wont print"'), + (Token.Text, u'\t'), + (Token.Text, u'\n'), + (Token.Keyword, u'முடி'), + (Token.Text, u'\n') + ] + self.assertEqual(tokens, list(self.lexer.get_tokens(fragment))) + + def testFunction(self): + fragment = u"""# (C) முத்தையா அண்ணாமலை 2013, 2015 +நிரல்பாகம் gcd ( x, y ) + மு = max(x,y) + q = min(x,y) + + @( q == 0 ) ஆனால் + பின்கொடு மு + முடி + பின்கொடு gcd( மு - q , q ) +முடி\n""" + tokens = [ + (Token.Comment.Single, + u'# (C) \u0bae\u0bc1\u0ba4\u0bcd\u0ba4\u0bc8\u0baf\u0bbe \u0b85\u0ba3\u0bcd\u0ba3\u0bbe\u0bae\u0bb2\u0bc8 2013, 2015\n'), + (Token.Keyword,u'நிரல்பாகம்'), + (Token.Text, u' '), + (Token.Name, u'gcd'), + (Token.Text, u' '), + (Token.Punctuation, u'('), + (Token.Text, u' '), + (Token.Name, u'x'), + (Token.Operator, u','), + (Token.Text, u' '), + (Token.Name, u'y'), + (Token.Text, u' '), + (Token.Punctuation, u')'), + (Token.Text, u'\n'), + (Token.Text, u' '), + (Token.Name, u'\u0bae\u0bc1'), + (Token.Text, u' '), + (Token.Operator, u'='), + (Token.Text, u' '), + (Token.Name.Builtin, u'max'), + (Token.Punctuation, u'('), + (Token.Name, u'x'), + (Token.Operator, u','), + (Token.Name, u'y'), + (Token.Punctuation, u')'), + (Token.Text, u'\n'), + (Token.Text, u' '), + (Token.Name, u'q'), + (Token.Text, u' '), + (Token.Operator, u'='), + (Token.Text, u' '), + (Token.Name.Builtin, u'min'), + (Token.Punctuation, u'('), + (Token.Name, u'x'), + (Token.Operator, u','), + (Token.Name, u'y'), + (Token.Punctuation, u')'), + (Token.Text, u'\n'), + (Token.Text, u'\n'), + (Token.Text, u' '), + (Token.Operator, u'@'), + (Token.Punctuation, u'('), + (Token.Text, u' '), + (Token.Name, u'q'), + (Token.Text, u' '), + (Token.Operator, u'=='), + (Token.Text, u' '), + (Token.Literal.Number.Integer, u'0'), + (Token.Text, u' '), + (Token.Punctuation, u')'), + (Token.Text, u' '), + (Token.Keyword, u'ஆனால்'), + (Token.Text, u'\n'), + (Token.Text, u' '), + (Token.Keyword, u'பின்கொடு'), + (Token.Text, u' '), + (Token.Name, u'\u0bae\u0bc1'), + (Token.Text, u'\n'), + (Token.Text, u' '), + (Token.Keyword, u'முடி'), + (Token.Text, u'\n'), + (Token.Text, u' '), + (Token.Keyword, u'\u0baa\u0bbf\u0ba9\u0bcd\u0b95\u0bca\u0b9f\u0bc1'), + (Token.Text, u' '), + (Token.Name, u'gcd'), + (Token.Punctuation, u'('), + (Token.Text, u' '), + (Token.Name, u'\u0bae\u0bc1'), + (Token.Text, u' '), + (Token.Operator, u'-'), + (Token.Text, u' '), + (Token.Name, u'q'), + (Token.Text, u' '), + (Token.Operator, u','), + (Token.Text, u' '), + (Token.Name, u'q'), + (Token.Text, u' '), + (Token.Punctuation, u')'), + (Token.Text, u'\n'), + (Token.Keyword, u'முடி'), #u'\u0bae\u0bc1\u0b9f\u0bbf'), + (Token.Text, u'\n') + ] + self.assertEqual(tokens, list(self.lexer.get_tokens(fragment))) + +if __name__ == "__main__": + unittest.main() diff --git a/vendor/pygments/tests/test_html_formatter.py b/vendor/pygments/tests/test_html_formatter.py index f7e7a54..596d9fb 100644 --- a/vendor/pygments/tests/test_html_formatter.py +++ b/vendor/pygments/tests/test_html_formatter.py @@ -3,41 +3,40 @@ Pygments HTML formatter tests ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ +from __future__ import print_function + +import io import os import re import unittest -import StringIO import tempfile from os.path import join, dirname, isfile +from pygments.util import StringIO from pygments.lexers import PythonLexer from pygments.formatters import HtmlFormatter, NullFormatter from pygments.formatters.html import escape_html -from pygments.util import uni_open import support TESTFILE, TESTDIR = support.location(__file__) -fp = uni_open(TESTFILE, encoding='utf-8') -try: +with io.open(TESTFILE, encoding='utf-8') as fp: tokensource = list(PythonLexer().get_tokens(fp.read())) -finally: - fp.close() class HtmlFormatterTest(unittest.TestCase): def test_correct_output(self): hfmt = HtmlFormatter(nowrap=True) - houtfile = StringIO.StringIO() + houtfile = StringIO() hfmt.format(tokensource, houtfile) nfmt = NullFormatter() - noutfile = StringIO.StringIO() + noutfile = StringIO() nfmt.format(tokensource, noutfile) stripped_html = re.sub('<.*?>', '', houtfile.getvalue()) @@ -69,18 +68,35 @@ class HtmlFormatterTest(unittest.TestCase): pass def test_all_options(self): - for optdict in [dict(nowrap=True), - dict(linenos=True), - dict(linenos=True, full=True), - dict(linenos=True, full=True, noclasses=True)]: - - outfile = StringIO.StringIO() + def check(optdict): + outfile = StringIO() fmt = HtmlFormatter(**optdict) fmt.format(tokensource, outfile) + for optdict in [ + dict(nowrap=True), + dict(linenos=True, full=True), + dict(linenos=True, linespans='L'), + dict(hl_lines=[1, 5, 10, 'xxx']), + dict(hl_lines=[1, 5, 10], noclasses=True), + ]: + check(optdict) + + for linenos in [False, 'table', 'inline']: + for noclasses in [False, True]: + for linenospecial in [0, 5]: + for anchorlinenos in [False, True]: + optdict = dict( + linenos=linenos, + noclasses=noclasses, + linenospecial=linenospecial, + anchorlinenos=anchorlinenos, + ) + check(optdict) + def test_linenos(self): optdict = dict(linenos=True) - outfile = StringIO.StringIO() + outfile = StringIO() fmt = HtmlFormatter(**optdict) fmt.format(tokensource, outfile) html = outfile.getvalue() @@ -88,7 +104,7 @@ class HtmlFormatterTest(unittest.TestCase): def test_linenos_with_startnum(self): optdict = dict(linenos=True, linenostart=5) - outfile = StringIO.StringIO() + outfile = StringIO() fmt = HtmlFormatter(**optdict) fmt.format(tokensource, outfile) html = outfile.getvalue() @@ -96,19 +112,19 @@ class HtmlFormatterTest(unittest.TestCase): def test_lineanchors(self): optdict = dict(lineanchors="foo") - outfile = StringIO.StringIO() + outfile = StringIO() fmt = HtmlFormatter(**optdict) fmt.format(tokensource, outfile) html = outfile.getvalue() - self.assertTrue(re.search("
    ", html))
    +        self.assertTrue(re.search("
    ", html))
     
         def test_lineanchors_with_startnum(self):
             optdict = dict(lineanchors="foo", linenostart=5)
    -        outfile = StringIO.StringIO()
    +        outfile = StringIO()
             fmt = HtmlFormatter(**optdict)
             fmt.format(tokensource, outfile)
             html = outfile.getvalue()
    -        self.assertTrue(re.search("
    ", html))
    +        self.assertTrue(re.search("
    ", html))
     
         def test_valid_output(self):
             # test all available wrappers
    @@ -132,7 +148,7 @@ class HtmlFormatterTest(unittest.TestCase):
                 pass
             else:
                 if ret:
    -                print output
    +                print(output)
                 self.assertFalse(ret, 'nsgmls run reported errors')
     
             os.unlink(pathname)
    @@ -172,7 +188,15 @@ class HtmlFormatterTest(unittest.TestCase):
                 # anymore in the actual source
                 fmt = HtmlFormatter(tagsfile='support/tags', lineanchors='L',
                                     tagurlformat='%(fname)s%(fext)s')
    -            outfile = StringIO.StringIO()
    +            outfile = StringIO()
                 fmt.format(tokensource, outfile)
                 self.assertTrue('test_ctags'
                                 in outfile.getvalue())
    +
    +    def test_filename(self):
    +        optdict = dict(filename="test.py")
    +        outfile = StringIO()
    +        fmt = HtmlFormatter(**optdict)
    +        fmt.format(tokensource, outfile)
    +        html = outfile.getvalue()
    +        self.assertTrue(re.search("test.py
    ", html))
    diff --git a/vendor/pygments/tests/test_inherit.py b/vendor/pygments/tests/test_inherit.py
    new file mode 100644
    index 0000000..34033a0
    --- /dev/null
    +++ b/vendor/pygments/tests/test_inherit.py
    @@ -0,0 +1,94 @@
    +# -*- coding: utf-8 -*-
    +"""
    +    Tests for inheritance in RegexLexer
    +    ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
    +
    +    :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
    +    :license: BSD, see LICENSE for details.
    +"""
    +
    +import unittest
    +
    +from pygments.lexer import RegexLexer, inherit
    +from pygments.token import Text
    +
    +
    +class InheritTest(unittest.TestCase):
    +    def test_single_inheritance_position(self):
    +        t = Two()
    +        pats = [x[0].__self__.pattern for x in t._tokens['root']]
    +        self.assertEqual(['x', 'a', 'b', 'y'], pats)
    +    def test_multi_inheritance_beginning(self):
    +        t = Beginning()
    +        pats = [x[0].__self__.pattern for x in t._tokens['root']]
    +        self.assertEqual(['x', 'a', 'b', 'y', 'm'], pats)
    +    def test_multi_inheritance_end(self):
    +        t = End()
    +        pats = [x[0].__self__.pattern for x in t._tokens['root']]
    +        self.assertEqual(['m', 'x', 'a', 'b', 'y'], pats)
    +
    +    def test_multi_inheritance_position(self):
    +        t = Three()
    +        pats = [x[0].__self__.pattern for x in t._tokens['root']]
    +        self.assertEqual(['i', 'x', 'a', 'b', 'y', 'j'], pats)
    +
    +    def test_single_inheritance_with_skip(self):
    +        t = Skipped()
    +        pats = [x[0].__self__.pattern for x in t._tokens['root']]
    +        self.assertEqual(['x', 'a', 'b', 'y'], pats)
    +
    +
    +class One(RegexLexer):
    +    tokens = {
    +        'root': [
    +            ('a', Text),
    +            ('b', Text),
    +        ],
    +    }
    +
    +class Two(One):
    +    tokens = {
    +        'root': [
    +            ('x', Text),
    +            inherit,
    +            ('y', Text),
    +        ],
    +    }
    +
    +class Three(Two):
    +    tokens = {
    +        'root': [
    +            ('i', Text),
    +            inherit,
    +            ('j', Text),
    +        ],
    +    }
    +
    +class Beginning(Two):
    +    tokens = {
    +        'root': [
    +            inherit,
    +            ('m', Text),
    +        ],
    +    }
    +
    +class End(Two):
    +    tokens = {
    +        'root': [
    +            ('m', Text),
    +            inherit,
    +        ],
    +    }
    +
    +class Empty(One):
    +    tokens = {}
    +
    +class Skipped(Empty):
    +    tokens = {
    +        'root': [
    +            ('x', Text),
    +            inherit,
    +            ('y', Text),
    +        ],
    +    }
    +
    diff --git a/vendor/pygments/tests/test_irc_formatter.py b/vendor/pygments/tests/test_irc_formatter.py
    new file mode 100644
    index 0000000..16a8fd3
    --- /dev/null
    +++ b/vendor/pygments/tests/test_irc_formatter.py
    @@ -0,0 +1,30 @@
    +# -*- coding: utf-8 -*-
    +"""
    +    Pygments IRC formatter tests
    +    ~~~~~~~~~~~~~~~~~~~~~~~~~~~~
    +
    +    :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
    +    :license: BSD, see LICENSE for details.
    +"""
    +
    +from __future__ import print_function
    +
    +import re
    +import unittest
    +
    +from pygments.util import StringIO
    +from pygments.lexers import PythonLexer
    +from pygments.formatters import IRCFormatter
    +
    +import support
    +
    +tokensource = list(PythonLexer().get_tokens("lambda x: 123"))
    +
    +class IRCFormatterTest(unittest.TestCase):
    +    def test_correct_output(self):
    +        hfmt = IRCFormatter()
    +        houtfile = StringIO()
    +        hfmt.format(tokensource, houtfile)
    +
    +        self.assertEqual(u'\x0302lambda\x03 x: \x0302123\x03\n', houtfile.getvalue())
    +
    diff --git a/vendor/pygments/tests/test_java.py b/vendor/pygments/tests/test_java.py
    new file mode 100644
    index 0000000..f409664
    --- /dev/null
    +++ b/vendor/pygments/tests/test_java.py
    @@ -0,0 +1,78 @@
    +# -*- coding: utf-8 -*-
    +"""
    +    Basic JavaLexer Test
    +    ~~~~~~~~~~~~~~~~~~~~
    +
    +    :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
    +    :license: BSD, see LICENSE for details.
    +"""
    +
    +import unittest
    +
    +from pygments.token import Text, Name, Operator, Keyword, Number
    +from pygments.lexers import JavaLexer
    +
    +
    +class JavaTest(unittest.TestCase):
    +
    +    def setUp(self):
    +        self.lexer = JavaLexer()
    +        self.maxDiff = None
    +
    +    def testEnhancedFor(self):
    +        fragment = u'label:\nfor(String var2: var1) {}\n'
    +        tokens = [
    +            (Name.Label, u'label:'),
    +            (Text, u'\n'),
    +            (Keyword, u'for'),
    +            (Operator, u'('),
    +            (Name, u'String'),
    +            (Text, u' '),
    +            (Name, u'var2'),
    +            (Operator, u':'),
    +            (Text, u' '),
    +            (Name, u'var1'),
    +            (Operator, u')'),
    +            (Text, u' '),
    +            (Operator, u'{'),
    +            (Operator, u'}'),
    +            (Text, u'\n'),
    +        ]
    +        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
    +
    +    def testNumericLiterals(self):
    +        fragment = '0 5L 9__542_72l 0xbEEf 0X9_A 0_35 01 0b0___101_0'
    +        fragment += ' 0. .7_17F 3e-1_3d 1f 6_01.9e+3 0x.1Fp3 0XEP8D\n'
    +        tokens = [
    +            (Number.Integer, '0'),
    +            (Text, ' '),
    +            (Number.Integer, '5L'),
    +            (Text, ' '),
    +            (Number.Integer, '9__542_72l'),
    +            (Text, ' '),
    +            (Number.Hex, '0xbEEf'),
    +            (Text, ' '),
    +            (Number.Hex, '0X9_A'),
    +            (Text, ' '),
    +            (Number.Oct, '0_35'),
    +            (Text, ' '),
    +            (Number.Oct, '01'),
    +            (Text, ' '),
    +            (Number.Bin, '0b0___101_0'),
    +            (Text, ' '),
    +            (Number.Float, '0.'),
    +            (Text, ' '),
    +            (Number.Float, '.7_17F'),
    +            (Text, ' '),
    +            (Number.Float, '3e-1_3d'),
    +            (Text, ' '),
    +            (Number.Float, '1f'),
    +            (Text, ' '),
    +            (Number.Float, '6_01.9e+3'),
    +            (Text, ' '),
    +            (Number.Float, '0x.1Fp3'),
    +            (Text, ' '),
    +            (Number.Float, '0XEP8D'),
    +            (Text, '\n')
    +        ]
    +        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
    diff --git a/vendor/pygments/tests/test_latex_formatter.py b/vendor/pygments/tests/test_latex_formatter.py
    index 06a74c3..05a6c3a 100644
    --- a/vendor/pygments/tests/test_latex_formatter.py
    +++ b/vendor/pygments/tests/test_latex_formatter.py
    @@ -3,10 +3,12 @@
         Pygments LaTeX formatter tests
         ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
     
    -    :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
    +    :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
         :license: BSD, see LICENSE for details.
     """
     
    +from __future__ import print_function
    +
     import os
     import unittest
     import tempfile
    @@ -22,11 +24,8 @@ TESTFILE, TESTDIR = support.location(__file__)
     class LatexFormatterTest(unittest.TestCase):
     
         def test_valid_output(self):
    -        fp = open(TESTFILE)
    -        try:
    +        with open(TESTFILE) as fp:
                 tokensource = list(PythonLexer().get_tokens(fp.read()))
    -        finally:
    -            fp.close()
             fmt = LatexFormatter(full=True, encoding='latin1')
     
             handle, pathname = tempfile.mkstemp('.tex')
    @@ -45,10 +44,10 @@ class LatexFormatterTest(unittest.TestCase):
                 po.stdout.close()
             except OSError:
                 # latex not available
    -            pass
    +            raise support.SkipTest
             else:
                 if ret:
    -                print output
    +                print(output)
                 self.assertFalse(ret, 'latex run reported errors')
     
             os.unlink(pathname)
    diff --git a/vendor/pygments/tests/test_lexers_other.py b/vendor/pygments/tests/test_lexers_other.py
    new file mode 100644
    index 0000000..bb667c0
    --- /dev/null
    +++ b/vendor/pygments/tests/test_lexers_other.py
    @@ -0,0 +1,80 @@
    +# -*- coding: utf-8 -*-
    +"""
    +    Tests for other lexers
    +    ~~~~~~~~~~~~~~~~~~~~~~
    +
    +    :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
    +    :license: BSD, see LICENSE for details.
    +"""
    +import glob
    +import os
    +import unittest
    +
    +from pygments.lexers import guess_lexer
    +from pygments.lexers.scripting import EasytrieveLexer, JclLexer, RexxLexer
    +
    +def _exampleFilePath(filename):
    +    return os.path.join(os.path.dirname(__file__), 'examplefiles', filename)
    +
    +
    +class AnalyseTextTest(unittest.TestCase):
    +    def _testCanRecognizeAndGuessExampleFiles(self, lexer):
    +        assert lexer is not None
    +
    +        for pattern in lexer.filenames:
    +            exampleFilesPattern = _exampleFilePath(pattern)
    +            for exampleFilePath in glob.glob(exampleFilesPattern):
    +                with open(exampleFilePath, 'rb') as fp:
    +                    text = fp.read().decode('utf-8')
    +                probability = lexer.analyse_text(text)
    +                self.assertTrue(probability > 0,
    +                    '%s must recognize %r' % (
    +                    lexer.name, exampleFilePath))
    +                guessedLexer = guess_lexer(text)
    +                self.assertEqual(guessedLexer.name, lexer.name)
    +
    +    def testCanRecognizeAndGuessExampleFiles(self):
    +        LEXERS_TO_TEST = [
    +            EasytrieveLexer,
    +            JclLexer,
    +            RexxLexer,
    +        ]
    +        for lexerToTest in LEXERS_TO_TEST:
    +            self._testCanRecognizeAndGuessExampleFiles(lexerToTest)
    +
    +
    +class EasyTrieveLexerTest(unittest.TestCase):
    +    def testCanGuessFromText(self):
    +        self.assertLess(0, EasytrieveLexer.analyse_text('MACRO'))
    +        self.assertLess(0, EasytrieveLexer.analyse_text('\nMACRO'))
    +        self.assertLess(0, EasytrieveLexer.analyse_text(' \nMACRO'))
    +        self.assertLess(0, EasytrieveLexer.analyse_text(' \n MACRO'))
    +        self.assertLess(0, EasytrieveLexer.analyse_text('*\nMACRO'))
    +        self.assertLess(0, EasytrieveLexer.analyse_text(
    +            '*\n *\n\n \n*\n MACRO'))
    +
    +
    +class RexxLexerTest(unittest.TestCase):
    +    def testCanGuessFromText(self):
    +        self.assertAlmostEqual(0.01,
    +            RexxLexer.analyse_text('/* */'))
    +        self.assertAlmostEqual(1.0,
    +            RexxLexer.analyse_text('''/* Rexx */
    +                say "hello world"'''))
    +        val = RexxLexer.analyse_text('/* */\n'
    +                'hello:pRoceduRe\n'
    +                '  say "hello world"')
    +        self.assertTrue(val > 0.5, val)
    +        val = RexxLexer.analyse_text('''/* */
    +                if 1 > 0 then do
    +                    say "ok"
    +                end
    +                else do
    +                    say "huh?"
    +                end''')
    +        self.assertTrue(val > 0.2, val)
    +        val = RexxLexer.analyse_text('''/* */
    +                greeting = "hello world!"
    +                parse value greeting "hello" name "!"
    +                say name''')
    +        self.assertTrue(val > 0.2, val)
    diff --git a/vendor/pygments/tests/test_objectiveclexer.py b/vendor/pygments/tests/test_objectiveclexer.py
    new file mode 100644
    index 0000000..90bd680
    --- /dev/null
    +++ b/vendor/pygments/tests/test_objectiveclexer.py
    @@ -0,0 +1,81 @@
    +# -*- coding: utf-8 -*-
    +"""
    +    Basic CLexer Test
    +    ~~~~~~~~~~~~~~~~~
    +
    +    :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
    +    :license: BSD, see LICENSE for details.
    +"""
    +
    +import unittest
    +import os
    +
    +from pygments.token import Token
    +from pygments.lexers import ObjectiveCLexer
    +
    +
    +class ObjectiveCLexerTest(unittest.TestCase):
    +
    +    def setUp(self):
    +        self.lexer = ObjectiveCLexer()
    +
    +    def testLiteralNumberInt(self):
    +        fragment = u'@(1);\n'
    +        expected = [
    +            (Token.Literal, u'@('),
    +            (Token.Literal.Number.Integer, u'1'),
    +            (Token.Literal, u')'),
    +            (Token.Punctuation, u';'),
    +            (Token.Text, u'\n'),
    +        ]
    +        self.assertEqual(expected, list(self.lexer.get_tokens(fragment)))
    +
    +    def testLiteralNumberExpression(self):
    +        fragment = u'@(1+2);\n'
    +        expected = [
    +            (Token.Literal, u'@('),
    +            (Token.Literal.Number.Integer, u'1'),
    +            (Token.Operator, u'+'),
    +            (Token.Literal.Number.Integer, u'2'),
    +            (Token.Literal, u')'),
    +            (Token.Punctuation, u';'),
    +            (Token.Text, u'\n'),
    +        ]
    +        self.assertEqual(expected, list(self.lexer.get_tokens(fragment)))
    +
    +    def testLiteralNumberNestedExpression(self):
    +        fragment = u'@(1+(2+3));\n'
    +        expected = [
    +            (Token.Literal, u'@('),
    +            (Token.Literal.Number.Integer, u'1'),
    +            (Token.Operator, u'+'),
    +            (Token.Punctuation, u'('),
    +            (Token.Literal.Number.Integer, u'2'),
    +            (Token.Operator, u'+'),
    +            (Token.Literal.Number.Integer, u'3'),
    +            (Token.Punctuation, u')'),
    +            (Token.Literal, u')'),
    +            (Token.Punctuation, u';'),
    +            (Token.Text, u'\n'),
    +        ]
    +        self.assertEqual(expected, list(self.lexer.get_tokens(fragment)))
    +
    +    def testLiteralNumberBool(self):
    +        fragment = u'@NO;\n'
    +        expected = [
    +            (Token.Literal.Number, u'@NO'),
    +            (Token.Punctuation, u';'),
    +            (Token.Text, u'\n'),
    +        ]
    +        self.assertEqual(expected, list(self.lexer.get_tokens(fragment)))
    +
    +    def testLieralNumberBoolExpression(self):
    +        fragment = u'@(YES);\n'
    +        expected = [
    +            (Token.Literal, u'@('),
    +            (Token.Name.Builtin, u'YES'),
    +            (Token.Literal, u')'),
    +            (Token.Punctuation, u';'),
    +            (Token.Text, u'\n'),
    +        ]
    +        self.assertEqual(expected, list(self.lexer.get_tokens(fragment)))
    diff --git a/vendor/pygments/tests/test_perllexer.py b/vendor/pygments/tests/test_perllexer.py
    index 315b20e..26b2d0a 100644
    --- a/vendor/pygments/tests/test_perllexer.py
    +++ b/vendor/pygments/tests/test_perllexer.py
    @@ -3,7 +3,7 @@
         Pygments regex lexer tests
         ~~~~~~~~~~~~~~~~~~~~~~~~~~
     
    -    :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
    +    :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
         :license: BSD, see LICENSE for details.
     """
     
    @@ -11,7 +11,7 @@ import time
     import unittest
     
     from pygments.token import String
    -from pygments.lexers.agile import PerlLexer
    +from pygments.lexers.perl import PerlLexer
     
     
     class RunawayRegexTest(unittest.TestCase):
    diff --git a/vendor/pygments/tests/test_qbasiclexer.py b/vendor/pygments/tests/test_qbasiclexer.py
    new file mode 100644
    index 0000000..8b790ce
    --- /dev/null
    +++ b/vendor/pygments/tests/test_qbasiclexer.py
    @@ -0,0 +1,43 @@
    +# -*- coding: utf-8 -*-
    +"""
    +    Tests for QBasic
    +    ~~~~~~~~~~~~~~~~
    +
    +    :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
    +    :license: BSD, see LICENSE for details.
    +"""
    +
    +import glob
    +import os
    +import unittest
    +
    +from pygments.token import Token
    +from pygments.lexers.basic import QBasicLexer
    +
    +
    +class QBasicTest(unittest.TestCase):
    +    def setUp(self):
    +        self.lexer = QBasicLexer()
    +        self.maxDiff = None
    +
    +    def testKeywordsWithDollar(self):
    +        fragment = u'DIM x\nx = RIGHT$("abc", 1)\n'
    +        expected = [
    +            (Token.Keyword.Declaration, u'DIM'),
    +            (Token.Text.Whitespace, u' '),
    +            (Token.Name.Variable.Global, u'x'),
    +            (Token.Text, u'\n'),
    +            (Token.Name.Variable.Global, u'x'),
    +            (Token.Text.Whitespace, u' '),
    +            (Token.Operator, u'='),
    +            (Token.Text.Whitespace, u' '),
    +            (Token.Keyword.Reserved, u'RIGHT$'),
    +            (Token.Punctuation, u'('),
    +            (Token.Literal.String.Double, u'"abc"'),
    +            (Token.Punctuation, u','),
    +            (Token.Text.Whitespace, u' '),
    +            (Token.Literal.Number.Integer.Long, u'1'),
    +            (Token.Punctuation, u')'),
    +            (Token.Text, u'\n'),
    +        ]
    +        self.assertEqual(expected, list(self.lexer.get_tokens(fragment)))
    diff --git a/vendor/pygments/tests/test_regexlexer.py b/vendor/pygments/tests/test_regexlexer.py
    index 28d9689..eb25be6 100644
    --- a/vendor/pygments/tests/test_regexlexer.py
    +++ b/vendor/pygments/tests/test_regexlexer.py
    @@ -3,7 +3,7 @@
         Pygments regex lexer tests
         ~~~~~~~~~~~~~~~~~~~~~~~~~~
     
    -    :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
    +    :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
         :license: BSD, see LICENSE for details.
     """
     
    @@ -12,6 +12,7 @@ import unittest
     from pygments.token import Text
     from pygments.lexer import RegexLexer
     from pygments.lexer import bygroups
    +from pygments.lexer import default
     
     
     class TestLexer(RegexLexer):
    @@ -20,6 +21,7 @@ class TestLexer(RegexLexer):
             'root': [
                 ('a', Text.Root, 'rag'),
                 ('e', Text.Root),
    +            default(('beer', 'beer'))
             ],
             'beer': [
                 ('d', Text.Beer, ('#pop', '#pop')),
    @@ -45,3 +47,8 @@ class TupleTransTest(unittest.TestCase):
             self.assertEqual(toks,
                [(0, Text.Root, 'a'), (1, Text, u'\n'),
                 (2, Text.Root, 'e')])
    +
    +    def test_default(self):
    +        lx = TestLexer()
    +        toks = list(lx.get_tokens_unprocessed('d'))
    +        self.assertEqual(toks, [(0, Text.Beer, 'd')])
    diff --git a/vendor/pygments/tests/test_regexopt.py b/vendor/pygments/tests/test_regexopt.py
    new file mode 100644
    index 0000000..dd56a44
    --- /dev/null
    +++ b/vendor/pygments/tests/test_regexopt.py
    @@ -0,0 +1,76 @@
    +# -*- coding: utf-8 -*-
    +"""
    +    Tests for pygments.regexopt
    +    ~~~~~~~~~~~~~~~~~~~~~~~~~~~
    +
    +    :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
    +    :license: BSD, see LICENSE for details.
    +"""
    +
    +import re
    +import random
    +import unittest
    +import itertools
    +
    +from pygments.regexopt import regex_opt
    +
    +ALPHABET = ['a', 'b', 'c', 'd', 'e']
    +
    +try:
    +    from itertools import combinations_with_replacement
    +    N_TRIES = 15
    +except ImportError:
    +    # Python 2.6
    +    def combinations_with_replacement(iterable, r):
    +        pool = tuple(iterable)
    +        n = len(pool)
    +        for indices in itertools.product(range(n), repeat=r):
    +            if sorted(indices) == list(indices):
    +                yield tuple(pool[i] for i in indices)
    +    N_TRIES = 9
    +
    +
    +class RegexOptTestCase(unittest.TestCase):
    +
    +    def generate_keywordlist(self, length):
    +        return [''.join(p) for p in
    +                combinations_with_replacement(ALPHABET, length)]
    +
    +    def test_randomly(self):
    +        # generate a list of all possible keywords of a certain length using
    +        # a restricted alphabet, then choose some to match and make sure only
    +        # those do
    +        for n in range(3, N_TRIES):
    +            kwlist = self.generate_keywordlist(n)
    +            to_match = random.sample(kwlist,
    +                                     random.randint(1, len(kwlist) - 1))
    +            no_match = set(kwlist) - set(to_match)
    +            rex = re.compile(regex_opt(to_match))
    +            for w in to_match:
    +                self.assertTrue(rex.match(w))
    +            for w in no_match:
    +                self.assertFalse(rex.match(w))
    +
    +    def test_prefix(self):
    +        opt = regex_opt(('a', 'b'), prefix=r':{1,2}')
    +        print(opt)
    +        rex = re.compile(opt)
    +        self.assertFalse(rex.match('a'))
    +        self.assertTrue(rex.match('::a'))
    +        self.assertFalse(rex.match(':::')) # fullmatch
    +
    +    def test_suffix(self):
    +        opt = regex_opt(('a', 'b'), suffix=r':{1,2}')
    +        print(opt)
    +        rex = re.compile(opt)
    +        self.assertFalse(rex.match('a'))
    +        self.assertTrue(rex.match('a::'))
    +        self.assertFalse(rex.match(':::')) # fullmatch
    +
    +    def test_suffix_opt(self):
    +        # test that detected suffixes remain sorted.
    +        opt = regex_opt(('afoo', 'abfoo'))
    +        print(opt)
    +        rex = re.compile(opt)
    +        m = rex.match('abfoo')
    +        self.assertEqual(5, m.end())
    diff --git a/vendor/pygments/tests/test_rtf_formatter.py b/vendor/pygments/tests/test_rtf_formatter.py
    new file mode 100644
    index 0000000..2578474
    --- /dev/null
    +++ b/vendor/pygments/tests/test_rtf_formatter.py
    @@ -0,0 +1,109 @@
    +# -*- coding: utf-8 -*-
    +"""
    +    Pygments RTF formatter tests
    +    ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
    +
    +    :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
    +    :license: BSD, see LICENSE for details.
    +"""
    +
    +import unittest
    +from string_asserts import StringTests
    +
    +from pygments.util import StringIO
    +from pygments.formatters import RtfFormatter
    +from pygments.lexers.special import TextLexer
    +
    +class RtfFormatterTest(StringTests, unittest.TestCase):
    +    foot = (r'\par' '\n' r'}')
    +
    +    def _escape(self, string):
    +        return(string.replace("\n", r"\n"))
    +
    +    def _build_message(self, *args, **kwargs):
    +        string = kwargs.get('string', None)
    +        t = self._escape(kwargs.get('t', ''))
    +        expected = self._escape(kwargs.get('expected', ''))
    +        result = self._escape(kwargs.get('result', ''))
    +
    +        if string is None:
    +            string = (u"The expected output of '{t}'\n"
    +                      u"\t\tShould be '{expected}'\n"
    +                      u"\t\tActually outputs '{result}'\n"
    +                      u"\t(WARNING: Partial Output of Result!)")
    +
    +        end = -(len(self._escape(self.foot)))
    +        start = end-len(expected)
    +
    +        return string.format(t=t,
    +                             result = result[start:end],
    +                             expected = expected)
    +
    +    def format_rtf(self, t):
    +        tokensource = list(TextLexer().get_tokens(t))
    +        fmt = RtfFormatter()
    +        buf = StringIO()
    +        fmt.format(tokensource, buf)
    +        result = buf.getvalue()
    +        buf.close()
    +        return result
    +
    +    def test_rtf_header(self):
    +        t = u''
    +        result = self.format_rtf(t)
    +        expected = r'{\rtf1\ansi\uc0'
    +        msg = (u"RTF documents are expected to start with '{expected}'\n"
    +               u"\t\tStarts intead with '{result}'\n"
    +               u"\t(WARNING: Partial Output of Result!)".format(
    +                   expected = expected,
    +                   result = result[:len(expected)]))
    +        self.assertStartsWith(result, expected, msg)
    +
    +    def test_rtf_footer(self):
    +        t = u''
    +        result = self.format_rtf(t)
    +        expected = self.foot
    +        msg = (u"RTF documents are expected to end with '{expected}'\n"
    +               u"\t\tEnds intead with '{result}'\n"
    +               u"\t(WARNING: Partial Output of Result!)".format(
    +                   expected = self._escape(expected),
    +                   result = self._escape(result[-len(expected):])))
    +        self.assertEndsWith(result, expected, msg)
    +
    +    def test_ascii_characters(self):
    +        t = u'a b c d ~'
    +        result = self.format_rtf(t)
    +        expected = (r'a b c d ~')
    +        if not result.endswith(self.foot):
    +            return(unittest.skip('RTF Footer incorrect'))
    +        msg = self._build_message(t=t, result=result, expected=expected)
    +        self.assertEndsWith(result, expected+self.foot, msg)
    +
    +    def test_escape_characters(self):
    +        t = u'\ {{'
    +        result = self.format_rtf(t)
    +        expected = (r'\\ \{\{')
    +        if not result.endswith(self.foot):
    +            return(unittest.skip('RTF Footer incorrect'))
    +        msg = self._build_message(t=t, result=result, expected=expected)
    +        self.assertEndsWith(result, expected+self.foot, msg)
    +
    +    def test_single_characters(self):
    +        t = u'â € ¤ каждой'
    +        result = self.format_rtf(t)
    +        expected = (r'{\u226} {\u8364} {\u164} '
    +                    r'{\u1082}{\u1072}{\u1078}{\u1076}{\u1086}{\u1081}')
    +        if not result.endswith(self.foot):
    +            return(unittest.skip('RTF Footer incorrect'))
    +        msg = self._build_message(t=t, result=result, expected=expected)
    +        self.assertEndsWith(result, expected+self.foot, msg)
    +
    +    def test_double_characters(self):
    +        t = u'က 힣 ↕ ↕︎ 鼖'
    +        result = self.format_rtf(t)
    +        expected = (r'{\u4096} {\u55203} {\u8597} '
    +                    r'{\u8597}{\u65038} {\u55422}{\u56859}')
    +        if not result.endswith(self.foot):
    +            return(unittest.skip('RTF Footer incorrect'))
    +        msg = self._build_message(t=t, result=result, expected=expected)
    +        self.assertEndsWith(result, expected+self.foot, msg)
    diff --git a/vendor/pygments/tests/test_ruby.py b/vendor/pygments/tests/test_ruby.py
    new file mode 100644
    index 0000000..ab210ba
    --- /dev/null
    +++ b/vendor/pygments/tests/test_ruby.py
    @@ -0,0 +1,145 @@
    +# -*- coding: utf-8 -*-
    +"""
    +    Basic RubyLexer Test
    +    ~~~~~~~~~~~~~~~~~~~~
    +
    +    :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
    +    :license: BSD, see LICENSE for details.
    +"""
    +
    +import unittest
    +
    +from pygments.token import Operator, Number, Text, Token
    +from pygments.lexers import RubyLexer
    +
    +
    +class RubyTest(unittest.TestCase):
    +
    +    def setUp(self):
    +        self.lexer = RubyLexer()
    +        self.maxDiff = None
    +
    +    def testRangeSyntax1(self):
    +        fragment = u'1..3\n'
    +        tokens = [
    +            (Number.Integer, u'1'),
    +            (Operator, u'..'),
    +            (Number.Integer, u'3'),
    +            (Text, u'\n'),
    +        ]
    +        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
    +
    +    def testRangeSyntax2(self):
    +        fragment = u'1...3\n'
    +        tokens = [
    +            (Number.Integer, u'1'),
    +            (Operator, u'...'),
    +            (Number.Integer, u'3'),
    +            (Text, u'\n'),
    +        ]
    +        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
    +
    +    def testRangeSyntax3(self):
    +        fragment = u'1 .. 3\n'
    +        tokens = [
    +            (Number.Integer, u'1'),
    +            (Text, u' '),
    +            (Operator, u'..'),
    +            (Text, u' '),
    +            (Number.Integer, u'3'),
    +            (Text, u'\n'),
    +        ]
    +        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
    +
    +    def testInterpolationNestedCurly(self):
    +        fragment = (
    +            u'"A#{ (3..5).group_by { |x| x/2}.map '
    +            u'do |k,v| "#{k}" end.join }" + "Z"\n')
    +
    +        tokens = [
    +            (Token.Literal.String.Double, u'"'),
    +            (Token.Literal.String.Double, u'A'),
    +            (Token.Literal.String.Interpol, u'#{'),
    +            (Token.Text, u' '),
    +            (Token.Punctuation, u'('),
    +            (Token.Literal.Number.Integer, u'3'),
    +            (Token.Operator, u'..'),
    +            (Token.Literal.Number.Integer, u'5'),
    +            (Token.Punctuation, u')'),
    +            (Token.Operator, u'.'),
    +            (Token.Name, u'group_by'),
    +            (Token.Text, u' '),
    +            (Token.Literal.String.Interpol, u'{'),
    +            (Token.Text, u' '),
    +            (Token.Operator, u'|'),
    +            (Token.Name, u'x'),
    +            (Token.Operator, u'|'),
    +            (Token.Text, u' '),
    +            (Token.Name, u'x'),
    +            (Token.Operator, u'/'),
    +            (Token.Literal.Number.Integer, u'2'),
    +            (Token.Literal.String.Interpol, u'}'),
    +            (Token.Operator, u'.'),
    +            (Token.Name, u'map'),
    +            (Token.Text, u' '),
    +            (Token.Keyword, u'do'),
    +            (Token.Text, u' '),
    +            (Token.Operator, u'|'),
    +            (Token.Name, u'k'),
    +            (Token.Punctuation, u','),
    +            (Token.Name, u'v'),
    +            (Token.Operator, u'|'),
    +            (Token.Text, u' '),
    +            (Token.Literal.String.Double, u'"'),
    +            (Token.Literal.String.Interpol, u'#{'),
    +            (Token.Name, u'k'),
    +            (Token.Literal.String.Interpol, u'}'),
    +            (Token.Literal.String.Double, u'"'),
    +            (Token.Text, u' '),
    +            (Token.Keyword, u'end'),
    +            (Token.Operator, u'.'),
    +            (Token.Name, u'join'),
    +            (Token.Text, u' '),
    +            (Token.Literal.String.Interpol, u'}'),
    +            (Token.Literal.String.Double, u'"'),
    +            (Token.Text, u' '),
    +            (Token.Operator, u'+'),
    +            (Token.Text, u' '),
    +            (Token.Literal.String.Double, u'"'),
    +            (Token.Literal.String.Double, u'Z'),
    +            (Token.Literal.String.Double, u'"'),
    +            (Token.Text, u'\n'),
    +        ]
    +        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
    +
    +    def testOperatorMethods(self):
    +        fragment = u'x.==4\n'
    +        tokens = [
    +            (Token.Name, u'x'),
    +            (Token.Operator, u'.'),
    +            (Token.Name.Operator, u'=='),
    +            (Token.Literal.Number.Integer, u'4'),
    +            (Token.Text, u'\n'),
    +        ]
    +        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
    +
    +    def testEscapedBracestring(self):
    +        fragment = u'str.gsub(%r{\\\\\\\\}, "/")\n'
    +        tokens = [
    +            (Token.Name, u'str'),
    +            (Token.Operator, u'.'),
    +            (Token.Name, u'gsub'),
    +            (Token.Punctuation, u'('),
    +            (Token.Literal.String.Regex, u'%r{'),
    +            (Token.Literal.String.Regex, u'\\\\'),
    +            (Token.Literal.String.Regex, u'\\\\'),
    +            (Token.Literal.String.Regex, u'}'),
    +            (Token.Punctuation, u','),
    +            (Token.Text, u' '),
    +            (Token.Literal.String.Double, u'"'),
    +            (Token.Literal.String.Double, u'/'),
    +            (Token.Literal.String.Double, u'"'),
    +            (Token.Punctuation, u')'),
    +            (Token.Text, u'\n'),
    +        ]
    +        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
    diff --git a/vendor/pygments/tests/test_shell.py b/vendor/pygments/tests/test_shell.py
    new file mode 100644
    index 0000000..4eb5a15
    --- /dev/null
    +++ b/vendor/pygments/tests/test_shell.py
    @@ -0,0 +1,89 @@
    +# -*- coding: utf-8 -*-
    +"""
    +    Basic Shell Tests
    +    ~~~~~~~~~~~~~~~~~
    +
    +    :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
    +    :license: BSD, see LICENSE for details.
    +"""
    +
    +import unittest
    +
    +from pygments.token import Token
    +from pygments.lexers import BashLexer
    +
    +
    +class BashTest(unittest.TestCase):
    +
    +    def setUp(self):
    +        self.lexer = BashLexer()
    +        self.maxDiff = None
    +
    +    def testCurlyNoEscapeAndQuotes(self):
    +        fragment = u'echo "${a//["b"]/}"\n'
    +        tokens = [
    +            (Token.Name.Builtin, u'echo'),
    +            (Token.Text, u' '),
    +            (Token.Literal.String.Double, u'"'),
    +            (Token.String.Interpol, u'${'),
    +            (Token.Name.Variable, u'a'),
    +            (Token.Punctuation, u'//['),
    +            (Token.Literal.String.Double, u'"b"'),
    +            (Token.Punctuation, u']/'),
    +            (Token.String.Interpol, u'}'),
    +            (Token.Literal.String.Double, u'"'),
    +            (Token.Text, u'\n'),
    +        ]
    +        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
    +
    +    def testCurlyWithEscape(self):
    +        fragment = u'echo ${a//[\\"]/}\n'
    +        tokens = [
    +            (Token.Name.Builtin, u'echo'),
    +            (Token.Text, u' '),
    +            (Token.String.Interpol, u'${'),
    +            (Token.Name.Variable, u'a'),
    +            (Token.Punctuation, u'//['),
    +            (Token.Literal.String.Escape, u'\\"'),
    +            (Token.Punctuation, u']/'),
    +            (Token.String.Interpol, u'}'),
    +            (Token.Text, u'\n'),
    +        ]
    +        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
    +
    +    def testParsedSingle(self):
    +        fragment = u"a=$'abc\\''\n"
    +        tokens = [
    +            (Token.Name.Variable, u'a'),
    +            (Token.Operator, u'='),
    +            (Token.Literal.String.Single, u"$'abc\\''"),
    +            (Token.Text, u'\n'),
    +        ]
    +        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
    +
    +    def testShortVariableNames(self):
    +        fragment = u'x="$"\ny="$_"\nz="$abc"\n'
    +        tokens = [
    +            # single lone $
    +            (Token.Name.Variable, u'x'),
    +            (Token.Operator, u'='),
    +            (Token.Literal.String.Double, u'"'),
    +            (Token.Text, u'$'),
    +            (Token.Literal.String.Double, u'"'),
    +            (Token.Text, u'\n'),
    +            # single letter shell var
    +            (Token.Name.Variable, u'y'),
    +            (Token.Operator, u'='),
    +            (Token.Literal.String.Double, u'"'),
    +            (Token.Name.Variable, u'$_'),
    +            (Token.Literal.String.Double, u'"'),
    +            (Token.Text, u'\n'),
    +            # multi-letter user var
    +            (Token.Name.Variable, u'z'),
    +            (Token.Operator, u'='),
    +            (Token.Literal.String.Double, u'"'),
    +            (Token.Name.Variable, u'$abc'),
    +            (Token.Literal.String.Double, u'"'),
    +            (Token.Text, u'\n'),
    +        ]
    +        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
    diff --git a/vendor/pygments/tests/test_smarty.py b/vendor/pygments/tests/test_smarty.py
    new file mode 100644
    index 0000000..450e4e6
    --- /dev/null
    +++ b/vendor/pygments/tests/test_smarty.py
    @@ -0,0 +1,40 @@
    +# -*- coding: utf-8 -*-
    +"""
    +    Basic SmartyLexer Test
    +    ~~~~~~~~~~~~~~~~~~~~
    +
    +    :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
    +    :license: BSD, see LICENSE for details.
    +"""
    +
    +import unittest
    +
    +from pygments.token import Operator, Number, Text, Token
    +from pygments.lexers import SmartyLexer
    +
    +
    +class SmartyTest(unittest.TestCase):
    +
    +    def setUp(self):
    +        self.lexer = SmartyLexer()
    +
    +    def testNestedCurly(self):
    +        fragment = u'{templateFunction param={anotherFunction} param2=$something}\n'
    +        tokens = [
    +            (Token.Comment.Preproc, u'{'),
    +            (Token.Name.Function, u'templateFunction'),
    +            (Token.Text, u' '),
    +            (Token.Name.Attribute, u'param'),
    +            (Token.Operator, u'='),
    +            (Token.Comment.Preproc, u'{'),
    +            (Token.Name.Attribute, u'anotherFunction'),
    +            (Token.Comment.Preproc, u'}'),
    +            (Token.Text, u' '),
    +            (Token.Name.Attribute, u'param2'),
    +            (Token.Operator, u'='),
    +            (Token.Name.Variable, u'$something'),
    +            (Token.Comment.Preproc, u'}'),
    +            (Token.Other, u'\n'),
    +        ]
    +        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
    +
    diff --git a/vendor/pygments/tests/test_string_asserts.py b/vendor/pygments/tests/test_string_asserts.py
    new file mode 100644
    index 0000000..ba7b37f
    --- /dev/null
    +++ b/vendor/pygments/tests/test_string_asserts.py
    @@ -0,0 +1,35 @@
    +# -*- coding: utf-8 -*-
    +"""
    +    Pygments string assert utility tests
    +    ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
    +
    +    :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
    +    :license: BSD, see LICENSE for details.
    +"""
    +
    +import unittest
    +from string_asserts import StringTests
    +
    +class TestStringTests(StringTests, unittest.TestCase):
    +
    +    def test_startswith_correct(self):
    +        self.assertStartsWith("AAA", "A")
    +
    +    # @unittest.expectedFailure not supported by nose
    +    def test_startswith_incorrect(self):
    +        self.assertRaises(AssertionError, self.assertStartsWith, "AAA", "B")
    +
    +    # @unittest.expectedFailure not supported by nose
    +    def test_startswith_short(self):
    +        self.assertRaises(AssertionError, self.assertStartsWith, "A", "AA")
    +
    +    def test_endswith_correct(self):
    +        self.assertEndsWith("AAA", "A")
    +
    +    # @unittest.expectedFailure not supported by nose
    +    def test_endswith_incorrect(self):
    +        self.assertRaises(AssertionError, self.assertEndsWith, "AAA", "B")
    +
    +    # @unittest.expectedFailure not supported by nose
    +    def test_endswith_short(self):
    +        self.assertRaises(AssertionError, self.assertEndsWith, "A", "AA")
    diff --git a/vendor/pygments/tests/test_terminal_formatter.py b/vendor/pygments/tests/test_terminal_formatter.py
    new file mode 100644
    index 0000000..07337cd
    --- /dev/null
    +++ b/vendor/pygments/tests/test_terminal_formatter.py
    @@ -0,0 +1,51 @@
    +# -*- coding: utf-8 -*-
    +"""
    +    Pygments terminal formatter tests
    +    ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
    +
    +    :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
    +    :license: BSD, see LICENSE for details.
    +"""
    +
    +from __future__ import print_function
    +
    +import unittest
    +import re
    +
    +from pygments.util import StringIO
    +from pygments.lexers.sql import PlPgsqlLexer
    +from pygments.formatters import TerminalFormatter
    +
    +DEMO_TEXT = '''\
    +-- comment
    +select
    +* from bar;
    +'''
    +DEMO_LEXER = PlPgsqlLexer
    +DEMO_TOKENS = list(DEMO_LEXER().get_tokens(DEMO_TEXT))
    +
    +ANSI_RE = re.compile(r'\x1b[\w\W]*?m')
    +
    +def strip_ansi(x):
    +    return ANSI_RE.sub('', x)
    +
    +class TerminalFormatterTest(unittest.TestCase):
    +    def test_reasonable_output(self):
    +        out = StringIO()
    +        TerminalFormatter().format(DEMO_TOKENS, out)
    +        plain = strip_ansi(out.getvalue())
    +        self.assertEqual(DEMO_TEXT.count('\n'), plain.count('\n'))
    +        print(repr(plain))
    +
    +        for a, b in zip(DEMO_TEXT.splitlines(), plain.splitlines()):
    +            self.assertEqual(a, b)
    +
    +    def test_reasonable_output_lineno(self):
    +        out = StringIO()
    +        TerminalFormatter(linenos=True).format(DEMO_TOKENS, out)
    +        plain = strip_ansi(out.getvalue())
    +        self.assertEqual(DEMO_TEXT.count('\n') + 1, plain.count('\n'))
    +        print(repr(plain))
    +
    +        for a, b in zip(DEMO_TEXT.splitlines(), plain.splitlines()):
    +            self.assertTrue(a in b)
    diff --git a/vendor/pygments/tests/test_textfmts.py b/vendor/pygments/tests/test_textfmts.py
    new file mode 100644
    index 0000000..d355ab6
    --- /dev/null
    +++ b/vendor/pygments/tests/test_textfmts.py
    @@ -0,0 +1,41 @@
    +# -*- coding: utf-8 -*-
    +"""
    +    Basic Tests for textfmts
    +    ~~~~~~~~~~~~~~~~~~~~~~~~
    +
    +    :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
    +    :license: BSD, see LICENSE for details.
    +"""
    +
    +import unittest
    +
    +from pygments.token import Operator, Number, Text, Token
    +from pygments.lexers.textfmts import HttpLexer
    +
    +
    +class RubyTest(unittest.TestCase):
    +
    +    def setUp(self):
    +        self.lexer = HttpLexer()
    +        self.maxDiff = None
    +
    +    def testApplicationXml(self):
    +        fragment = u'GET / HTTP/1.0\nContent-Type: application/xml\n\n\n'
    +        tokens = [
    +            (Token.Name.Tag, u''),
    +            (Token.Text, u'\n'),
    +        ]
    +        self.assertEqual(
    +            tokens, list(self.lexer.get_tokens(fragment))[-len(tokens):])
    +
    +    def testApplicationCalendarXml(self):
    +        fragment = u'GET / HTTP/1.0\nContent-Type: application/calendar+xml\n\n\n'
    +        tokens = [
    +            (Token.Name.Tag, u''),
    +            (Token.Text, u'\n'),
    +        ]
    +        self.assertEqual(
    +            tokens, list(self.lexer.get_tokens(fragment))[-len(tokens):])
    +
    diff --git a/vendor/pygments/tests/test_token.py b/vendor/pygments/tests/test_token.py
    index 6a5b00b..0c6b02b 100644
    --- a/vendor/pygments/tests/test_token.py
    +++ b/vendor/pygments/tests/test_token.py
    @@ -3,10 +3,11 @@
         Test suite for the token module
         ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
     
    -    :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
    +    :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
         :license: BSD, see LICENSE for details.
     """
     
    +import copy
     import unittest
     
     from pygments import token
    @@ -36,11 +37,18 @@ class TokenTest(unittest.TestCase):
             stp = token.STANDARD_TYPES.copy()
             stp[token.Token] = '---' # Token and Text do conflict, that is okay
             t = {}
    -        for k, v in stp.iteritems():
    +        for k, v in stp.items():
                 t.setdefault(v, []).append(k)
             if len(t) == len(stp):
                 return # Okay
     
    -        for k, v in t.iteritems():
    +        for k, v in t.items():
                 if len(v) > 1:
                     self.fail("%r has more than one key: %r" % (k, v))
    +
    +    def test_copying(self):
    +        # Token instances are supposed to be singletons, so copying or even
    +        # deepcopying should return themselves
    +        t = token.String
    +        self.assertIs(t, copy.copy(t))
    +        self.assertIs(t, copy.deepcopy(t))
    diff --git a/vendor/pygments/tests/test_unistring.py b/vendor/pygments/tests/test_unistring.py
    new file mode 100644
    index 0000000..a414347
    --- /dev/null
    +++ b/vendor/pygments/tests/test_unistring.py
    @@ -0,0 +1,48 @@
    +# -*- coding: utf-8 -*-
    +"""
    +    Test suite for the unistring module
    +    ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
    +
    +    :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
    +    :license: BSD, see LICENSE for details.
    +"""
    +
    +import re
    +import unittest
    +import random
    +
    +from pygments import unistring as uni
    +from pygments.util import unichr
    +
    +
    +class UnistringTest(unittest.TestCase):
    +    def test_cats_exist_and_compilable(self):
    +        for cat in uni.cats:
    +            s = getattr(uni, cat)
    +            if s == '':  # Probably Cs on Jython
    +                continue
    +            print("%s %r" % (cat, s))
    +            re.compile('[%s]' % s)
    +
    +    def _cats_that_match(self, c):
    +        matching_cats = []
    +        for cat in uni.cats:
    +            s = getattr(uni, cat)
    +            if s == '':  # Probably Cs on Jython
    +                continue
    +            if re.compile('[%s]' % s).match(c):
    +                matching_cats.append(cat)
    +        return matching_cats
    +
    +    def test_spot_check_types(self):
    +        # Each char should match one, and precisely one, category
    +        random.seed(0)
    +        for i in range(1000):
    +            o = random.randint(0, 65535)
    +            c = unichr(o)
    +            if o > 0xd800 and o <= 0xdfff and not uni.Cs:
    +                continue  # Bah, Jython.
    +            print(hex(o))
    +            cats = self._cats_that_match(c)
    +            self.assertEqual(len(cats), 1,
    +                             "%d (%s): %s" % (o, c, cats))
    diff --git a/vendor/pygments/tests/test_using_api.py b/vendor/pygments/tests/test_using_api.py
    index bb89d1e..16d865e 100644
    --- a/vendor/pygments/tests/test_using_api.py
    +++ b/vendor/pygments/tests/test_using_api.py
    @@ -3,7 +3,7 @@
         Pygments tests for using()
         ~~~~~~~~~~~~~~~~~~~~~~~~~~
     
    -    :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
    +    :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
         :license: BSD, see LICENSE for details.
     """
     
    diff --git a/vendor/pygments/tests/test_util.py b/vendor/pygments/tests/test_util.py
    index dbbc66c..720b384 100644
    --- a/vendor/pygments/tests/test_util.py
    +++ b/vendor/pygments/tests/test_util.py
    @@ -3,19 +3,19 @@
         Test suite for the util module
         ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
     
    -    :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
    +    :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
         :license: BSD, see LICENSE for details.
     """
     
     import re
     import unittest
     
    -from pygments import util
    +from pygments import util, console
     
     
     class FakeLexer(object):
         def analyse(text):
    -        return float(text)
    +        return text
         analyse = util.make_analysator(analyse)
     
     
    @@ -40,6 +40,10 @@ class UtilTest(unittest.TestCase):
             equals(util.get_list_opt({}, 'a', '1 2'), ['1', '2'])
             raises(util.OptionError, util.get_list_opt, {}, 'a', 1)
     
    +        equals(util.get_choice_opt({}, 'a', ['foo', 'bar'], 'bar'), 'bar')
    +        equals(util.get_choice_opt({}, 'a', ['foo', 'bar'], 'Bar', True), 'bar')
    +        raises(util.OptionError, util.get_choice_opt, {}, 'a',
    +               ['foo', 'bar'], 'baz')
     
         def test_docstring_headline(self):
             def f1():
    @@ -55,9 +59,12 @@ class UtilTest(unittest.TestCase):
     
                 other text
                 """
    +        def f3():
    +            pass
     
    -        self.assertEqual(util.docstring_headline(f1), "docstring headline")
    -        self.assertEqual(util.docstring_headline(f2), "docstring headline")
    +        self.assertEqual(util.docstring_headline(f1), 'docstring headline')
    +        self.assertEqual(util.docstring_headline(f2), 'docstring headline')
    +        self.assertEqual(util.docstring_headline(f3), '')
     
         def test_analysator_returns_float(self):
             # If an analysator wrapped by make_analysator returns a floating point
    @@ -88,10 +95,10 @@ class UtilTest(unittest.TestCase):
         def test_analysator_type_error(self):
             # When converting the analysator's return value to a float a
             # TypeError may occur.  If that happens 0.0 is returned instead.
    -        self.assertEqual(FakeLexer.analyse(None), 0.0)
    +        self.assertEqual(FakeLexer.analyse('xxx'), 0.0)
     
         def test_shebang_matches(self):
    -        self.assertTrue(util.shebang_matches('#!/usr/bin/env python', r'python(2\.\d)?'))
    +        self.assertTrue(util.shebang_matches('#!/usr/bin/env python\n', r'python(2\.\d)?'))
             self.assertTrue(util.shebang_matches('#!/usr/bin/python2.4', r'python(2\.\d)?'))
             self.assertTrue(util.shebang_matches('#!/usr/bin/startsomethingwith python',
                                                  r'python(2\.\d)?'))
    @@ -106,7 +113,7 @@ class UtilTest(unittest.TestCase):
     
         def test_doctype_matches(self):
             self.assertTrue(util.doctype_matches(
    -            ' ', 'html.*'))
    +            ' ', 'html.*'))
             self.assertFalse(util.doctype_matches(
                 '  ', 'html.*'))
             self.assertTrue(util.html_doctype_matches(
    @@ -123,7 +130,7 @@ class UtilTest(unittest.TestCase):
             r = re.compile(util.unirange(0x10000, 0x20000))
             m = r.match(first_non_bmp)
             self.assertTrue(m)
    -        self.assertEquals(m.end(), len(first_non_bmp))
    +        self.assertEqual(m.end(), len(first_non_bmp))
             self.assertFalse(r.match(u'\uffff'))
             self.assertFalse(r.match(u'xxx'))
             # Tests that end is inclusive
    @@ -132,4 +139,75 @@ class UtilTest(unittest.TestCase):
             # build
             m = r.match(first_non_bmp * 2)
             self.assertTrue(m)
    -        self.assertEquals(m.end(), len(first_non_bmp) * 2)
    +        self.assertEqual(m.end(), len(first_non_bmp) * 2)
    +
    +    def test_format_lines(self):
    +        lst = ['cat', 'dog']
    +        output = util.format_lines('var', lst)
    +        d = {}
    +        exec(output, d)
    +        self.assertTrue(isinstance(d['var'], tuple))
    +        self.assertEqual(('cat', 'dog'), d['var'])
    +
    +    def test_duplicates_removed_seq_types(self):
    +        # tuple
    +        x = util.duplicates_removed(('a', 'a', 'b'))
    +        self.assertEqual(['a', 'b'], x)
    +        # list
    +        x = util.duplicates_removed(['a', 'a', 'b'])
    +        self.assertEqual(['a', 'b'], x)
    +        # iterator
    +        x = util.duplicates_removed(iter(('a', 'a', 'b')))
    +        self.assertEqual(['a', 'b'], x)
    +
    +    def test_duplicates_removed_nonconsecutive(self):
    +        # keeps first
    +        x = util.duplicates_removed(('a', 'b', 'a'))
    +        self.assertEqual(['a', 'b'], x)
    +
    +    def test_guess_decode(self):
    +        # UTF-8 should be decoded as UTF-8
    +        s = util.guess_decode(u'\xff'.encode('utf-8'))
    +        self.assertEqual(s, (u'\xff', 'utf-8'))
    +
    +        # otherwise, it could be latin1 or the locale encoding...
    +        import locale
    +        s = util.guess_decode(b'\xff')
    +        self.assertTrue(s[1] in ('latin1', locale.getpreferredencoding()))
    +
    +    def test_guess_decode_from_terminal(self):
    +        class Term:
    +            encoding = 'utf-7'
    +
    +        s = util.guess_decode_from_terminal(u'\xff'.encode('utf-7'), Term)
    +        self.assertEqual(s, (u'\xff', 'utf-7'))
    +
    +        s = util.guess_decode_from_terminal(u'\xff'.encode('utf-8'), Term)
    +        self.assertEqual(s, (u'\xff', 'utf-8'))
    +
    +    def test_add_metaclass(self):
    +        class Meta(type):
    +            pass
    +
    +        @util.add_metaclass(Meta)
    +        class Cls:
    +            pass
    +
    +        self.assertEqual(type(Cls), Meta)
    +
    +
    +class ConsoleTest(unittest.TestCase):
    +
    +    def test_ansiformat(self):
    +        f = console.ansiformat
    +        c = console.codes
    +        all_attrs = f('+*_blue_*+', 'text')
    +        self.assertTrue(c['blue'] in all_attrs and c['blink'] in all_attrs
    +                        and c['bold'] in all_attrs and c['underline'] in all_attrs
    +                        and c['reset'] in all_attrs)
    +        self.assertRaises(KeyError, f, '*mauve*', 'text')
    +
    +    def test_functions(self):
    +        self.assertEqual(console.reset_color(), console.codes['reset'])
    +        self.assertEqual(console.colorize('blue', 'text'),
    +                         console.codes['blue'] + 'text' + console.codes['reset'])
    diff --git a/vendor/pygments/tox.ini b/vendor/pygments/tox.ini
    new file mode 100644
    index 0000000..8a33f99
    --- /dev/null
    +++ b/vendor/pygments/tox.ini
    @@ -0,0 +1,7 @@
    +[tox]
    +envlist = py26, py27, py33, py34
    +[testenv]
    +deps =
    +    nose
    +    coverage
    +commands = python -d tests/run.py {posargs}