o a@sxdZddlmZddlmZddlmZddlmZgdZ GdddeZ Gd d d eZ d Z d Z Gd ddeZdS)z pygments.formatters.other ~~~~~~~~~~~~~~~~~~~~~~~~~ Other formatters: NullFormatter, RawTokenFormatter. :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. ) Formatter)get_choice_opt)Token)colorize) NullFormatterRawTokenFormatterTestcaseFormatterc@s*eZdZdZdZddgZdgZddZdS) rz; Output the text unchanged without any formatting. z Text onlytextnullz*.txtcCs8|j}|D]\}}|r|||q||qdSN)encodingwriteencode)self tokensourceoutfileencttypevaluer;/usr/lib/python3/dist-packages/pygments/formatters/other.pyformats   zNullFormatter.formatN)__name__ __module__ __qualname____doc__namealiases filenamesrrrrrrs  rc@s6eZdZdZdZddgZdgZdZddZd d Z d S) ra} Format tokens as a raw representation for storing token streams. The format is ``tokentyperepr(tokenstring)\n``. The output can later be converted to a token stream with the `RawTokenLexer`, described in the :doc:`lexer list `. Only two options are accepted: `compress` If set to ``'gz'`` or ``'bz2'``, compress the output with the given compression algorithm after encoding (default: ``''``). `error_color` If set to a color name, highlight error tokens using that color. If set but with no value, defaults to ``'red'``. .. versionadded:: 0.11 z Raw tokensrawtokensz*.rawFcKstj|fi|d|_t|dgdd|_|dd|_|jdur%d|_|jdurBz t|jdWdStyAt d|jwdS) Nasciicompress)nonegzbz2r# error_colorTredzInvalid color %r specified) r__init__r rr"getr'rKeyError ValueErrorroptionsrrrr)>s"   zRawTokenFormatter.__init__c s zdWn tytdw|jdkr+ddl}|dddj}j}n#|jdkrHddl}|dfd d }fd d }nj}j}|j rq|D]\}}d ||f} |t j urk|t |j | qS|| qSn|D] \}}|d ||fqs|dS)Nz3The raw tokens formatter needs a binary output filer%rr#wb r&cs|dSr )r r")r  compressorrrrr `sz'RawTokenFormatter.format..writecsdSr )r flushrr2rrr4cs z'RawTokenFormatter.format..flushs%r %r ) r TypeErrorr"gzipGzipFilecloser& BZ2Compressorr4r'rErrorr) rrrr6r r4r&rrlinerr2rrPs6          zRawTokenFormatter.formatN) rrrrrrr unicodeoutputr)rrrrrr$s rzG def testNeedsName(lexer): fragment = %r tokens = [ zD ] assert list(lexer.get_tokens(fragment)) == tokens c@s*eZdZdZdZdgZddZddZdS) rzU Format tokens as appropriate for a new testcase. .. versionadded:: 2.0 TestcasetestcasecKs6tj|fi||jdur|jdkrtddSdS)Nutf-8z*Only None and utf-8 are allowed encodings.)rr)r r,r-rrrr)szTestcaseFormatter.__init__c Csd}g}g}|D]\}}|||d|||fqtd|f}d|} t} |jdur:||| | n||d|| d|| d|dS)Nz z %s(%s, %r), r#r?)appendTESTCASE_BEFOREjoinTESTCASE_AFTERr r rr4) rrr indentationrawbufoutbufrrbeforeduringafterrrrrs     zTestcaseFormatter.formatN)rrrrrrr)rrrrrrs  rN)rpygments.formatterr pygments.utilrpygments.tokenrpygments.consoler__all__rrrArCrrrrrs   S