idx
int64
0
63k
question
stringlengths
61
4.03k
target
stringlengths
6
1.23k
52,800
def assert_not_called ( _mock_self ) : self = _mock_self if self . call_count != 0 : msg = ( "Expected '%s' to not have been called. Called %s times." % ( self . _mock_name or 'mock' , self . call_count ) ) raise AssertionError ( msg )
assert that the mock was never called .
52,801
def assert_called ( _mock_self ) : self = _mock_self if self . call_count == 0 : msg = ( "Expected '%s' to have been called." % self . _mock_name or 'mock' ) raise AssertionError ( msg )
assert that the mock was called at least once
52,802
def assert_called_once ( _mock_self ) : self = _mock_self if not self . call_count == 1 : msg = ( "Expected '%s' to have been called once. Called %s times." % ( self . _mock_name or 'mock' , self . call_count ) ) raise AssertionError ( msg )
assert that the mock was called only once .
52,803
def assert_called_once_with ( _mock_self , * args , ** kwargs ) : self = _mock_self if not self . call_count == 1 : msg = ( "Expected '%s' to be called once. Called %s times." % ( self . _mock_name or 'mock' , self . call_count ) ) raise AssertionError ( msg ) return self . assert_called_with ( * args , ** kwargs )
assert that the mock was called exactly once and with the specified arguments .
52,804
def normalize ( self ) : chrfunc = chr if py3k else HTMLEntity . _unichr if self . named : return chrfunc ( htmlentities . name2codepoint [ self . value ] ) if self . hexadecimal : return chrfunc ( int ( self . value , 16 ) ) return chrfunc ( int ( self . value ) )
Return the unicode character represented by the HTML entity .
52,805
def _detach_children ( self ) : children = [ val [ 0 ] for val in self . _children . values ( ) ] for child in children : child ( ) . _parent = list ( self ) self . _children . clear ( )
Remove all children and give them independent parent copies .
52,806
def _handle_parameter ( self , default ) : key = None showkey = False self . _push ( ) while self . _tokens : token = self . _tokens . pop ( ) if isinstance ( token , tokens . TemplateParamEquals ) : key = self . _pop ( ) showkey = True self . _push ( ) elif isinstance ( token , ( tokens . TemplateParamSeparator , tokens . TemplateClose ) ) : self . _tokens . append ( token ) value = self . _pop ( ) if key is None : key = Wikicode ( SmartList ( [ Text ( str ( default ) ) ] ) ) return Parameter ( key , value , showkey ) else : self . _write ( self . _handle_token ( token ) ) raise ParserError ( "_handle_parameter() missed a close token" )
Handle a case where a parameter is at the head of the tokens .
52,807
def _handle_template ( self , token ) : params = [ ] default = 1 self . _push ( ) while self . _tokens : token = self . _tokens . pop ( ) if isinstance ( token , tokens . TemplateParamSeparator ) : if not params : name = self . _pop ( ) param = self . _handle_parameter ( default ) params . append ( param ) if not param . showkey : default += 1 elif isinstance ( token , tokens . TemplateClose ) : if not params : name = self . _pop ( ) return Template ( name , params ) else : self . _write ( self . _handle_token ( token ) ) raise ParserError ( "_handle_template() missed a close token" )
Handle a case where a template is at the head of the tokens .
52,808
def _handle_argument ( self , token ) : name = None self . _push ( ) while self . _tokens : token = self . _tokens . pop ( ) if isinstance ( token , tokens . ArgumentSeparator ) : name = self . _pop ( ) self . _push ( ) elif isinstance ( token , tokens . ArgumentClose ) : if name is not None : return Argument ( name , self . _pop ( ) ) return Argument ( self . _pop ( ) ) else : self . _write ( self . _handle_token ( token ) ) raise ParserError ( "_handle_argument() missed a close token" )
Handle a case where an argument is at the head of the tokens .
52,809
def _handle_wikilink ( self , token ) : title = None self . _push ( ) while self . _tokens : token = self . _tokens . pop ( ) if isinstance ( token , tokens . WikilinkSeparator ) : title = self . _pop ( ) self . _push ( ) elif isinstance ( token , tokens . WikilinkClose ) : if title is not None : return Wikilink ( title , self . _pop ( ) ) return Wikilink ( self . _pop ( ) ) else : self . _write ( self . _handle_token ( token ) ) raise ParserError ( "_handle_wikilink() missed a close token" )
Handle a case where a wikilink is at the head of the tokens .
52,810
def _handle_external_link ( self , token ) : brackets , url = token . brackets , None self . _push ( ) while self . _tokens : token = self . _tokens . pop ( ) if isinstance ( token , tokens . ExternalLinkSeparator ) : url = self . _pop ( ) self . _push ( ) elif isinstance ( token , tokens . ExternalLinkClose ) : if url is not None : return ExternalLink ( url , self . _pop ( ) , brackets ) return ExternalLink ( self . _pop ( ) , brackets = brackets ) else : self . _write ( self . _handle_token ( token ) ) raise ParserError ( "_handle_external_link() missed a close token" )
Handle when an external link is at the head of the tokens .
52,811
def _handle_entity ( self , token ) : token = self . _tokens . pop ( ) if isinstance ( token , tokens . HTMLEntityNumeric ) : token = self . _tokens . pop ( ) if isinstance ( token , tokens . HTMLEntityHex ) : text = self . _tokens . pop ( ) self . _tokens . pop ( ) return HTMLEntity ( text . text , named = False , hexadecimal = True , hex_char = token . char ) self . _tokens . pop ( ) return HTMLEntity ( token . text , named = False , hexadecimal = False ) self . _tokens . pop ( ) return HTMLEntity ( token . text , named = True , hexadecimal = False )
Handle a case where an HTML entity is at the head of the tokens .
52,812
def _handle_heading ( self , token ) : level = token . level self . _push ( ) while self . _tokens : token = self . _tokens . pop ( ) if isinstance ( token , tokens . HeadingEnd ) : title = self . _pop ( ) return Heading ( title , level ) else : self . _write ( self . _handle_token ( token ) ) raise ParserError ( "_handle_heading() missed a close token" )
Handle a case where a heading is at the head of the tokens .
52,813
def _handle_comment ( self , token ) : self . _push ( ) while self . _tokens : token = self . _tokens . pop ( ) if isinstance ( token , tokens . CommentEnd ) : contents = self . _pop ( ) return Comment ( contents ) else : self . _write ( self . _handle_token ( token ) ) raise ParserError ( "_handle_comment() missed a close token" )
Handle a case where an HTML comment is at the head of the tokens .
52,814
def _handle_attribute ( self , start ) : name = quotes = None self . _push ( ) while self . _tokens : token = self . _tokens . pop ( ) if isinstance ( token , tokens . TagAttrEquals ) : name = self . _pop ( ) self . _push ( ) elif isinstance ( token , tokens . TagAttrQuote ) : quotes = token . char elif isinstance ( token , ( tokens . TagAttrStart , tokens . TagCloseOpen , tokens . TagCloseSelfclose ) ) : self . _tokens . append ( token ) if name : value = self . _pop ( ) else : name , value = self . _pop ( ) , None return Attribute ( name , value , quotes , start . pad_first , start . pad_before_eq , start . pad_after_eq ) else : self . _write ( self . _handle_token ( token ) ) raise ParserError ( "_handle_attribute() missed a close token" )
Handle a case where a tag attribute is at the head of the tokens .
52,815
def _handle_tag ( self , token ) : close_tokens = ( tokens . TagCloseSelfclose , tokens . TagCloseClose ) implicit , attrs , contents , closing_tag = False , [ ] , None , None wiki_markup , invalid = token . wiki_markup , token . invalid or False wiki_style_separator , closing_wiki_markup = None , wiki_markup self . _push ( ) while self . _tokens : token = self . _tokens . pop ( ) if isinstance ( token , tokens . TagAttrStart ) : attrs . append ( self . _handle_attribute ( token ) ) elif isinstance ( token , tokens . TagCloseOpen ) : wiki_style_separator = token . wiki_markup padding = token . padding or "" tag = self . _pop ( ) self . _push ( ) elif isinstance ( token , tokens . TagOpenClose ) : closing_wiki_markup = token . wiki_markup contents = self . _pop ( ) self . _push ( ) elif isinstance ( token , close_tokens ) : if isinstance ( token , tokens . TagCloseSelfclose ) : closing_wiki_markup = token . wiki_markup tag = self . _pop ( ) self_closing = True padding = token . padding or "" implicit = token . implicit or False else : self_closing = False closing_tag = self . _pop ( ) return Tag ( tag , contents , attrs , wiki_markup , self_closing , invalid , implicit , padding , closing_tag , wiki_style_separator , closing_wiki_markup ) else : self . _write ( self . _handle_token ( token ) ) raise ParserError ( "_handle_tag() missed a close token" )
Handle a case where a tag is at the head of the tokens .
52,816
def _handle_token ( self , token ) : try : return _HANDLERS [ type ( token ) ] ( self , token ) except KeyError : err = "_handle_token() got unexpected {0}" raise ParserError ( err . format ( type ( token ) . __name__ ) )
Handle a single token .
52,817
def build ( self , tokenlist ) : self . _tokens = tokenlist self . _tokens . reverse ( ) self . _push ( ) while self . _tokens : node = self . _handle_token ( self . _tokens . pop ( ) ) self . _write ( node ) return self . _pop ( )
Build a Wikicode object from a list tokens and return it .
52,818
def _select_theory ( theories ) : if theories : values = tuple ( theories . values ( ) ) best = max ( values ) confidence = float ( best ) / sum ( values ) if confidence > 0.5 : return tuple ( theories . keys ( ) ) [ values . index ( best ) ]
Return the most likely spacing convention given different options .
52,819
def _get_spacing_conventions ( self , use_names ) : before_theories = defaultdict ( lambda : 0 ) after_theories = defaultdict ( lambda : 0 ) for param in self . params : if not param . showkey : continue if use_names : component = str ( param . name ) else : component = str ( param . value ) match = re . search ( r"^(\s*).*?(\s*)$" , component , FLAGS ) before , after = match . group ( 1 ) , match . group ( 2 ) if not use_names and component . isspace ( ) and "\n" in before : before , after = before . split ( "\n" , 1 ) after = "\n" + after before_theories [ before ] += 1 after_theories [ after ] += 1 before = self . _select_theory ( before_theories ) after = self . _select_theory ( after_theories ) return before , after
Try to determine the whitespace conventions for parameters .
52,820
def _should_remove ( self , i , name ) : if self . params [ i ] . showkey : following = self . params [ i + 1 : ] better_matches = [ after . name . strip ( ) == name and not after . showkey for after in following ] return any ( better_matches ) return False
Look ahead for a parameter with the same name but hidden .
52,821
def _push ( self , context = 0 ) : new_ident = ( self . _head , context ) if new_ident in self . _bad_routes : raise BadRoute ( context ) self . _stacks . append ( [ [ ] , context , [ ] , new_ident ] ) self . _depth += 1
Add a new token stack context and textbuffer to the list .
52,822
def _push_textbuffer ( self ) : if self . _textbuffer : self . _stack . append ( tokens . Text ( text = "" . join ( self . _textbuffer ) ) ) self . _textbuffer = [ ]
Push the textbuffer onto the stack as a Text node and clear it .
52,823
def _fail_route ( self ) : context = self . _context self . _memoize_bad_route ( ) self . _pop ( ) raise BadRoute ( context )
Fail the current tokenization route .
52,824
def _emit_all ( self , tokenlist ) : if tokenlist and isinstance ( tokenlist [ 0 ] , tokens . Text ) : self . _emit_text ( tokenlist . pop ( 0 ) . text ) self . _push_textbuffer ( ) self . _stack . extend ( tokenlist )
Write a series of tokens to the current stack at once .
52,825
def _read ( self , delta = 0 , wrap = False , strict = False ) : index = self . _head + delta if index < 0 and ( not wrap or abs ( index ) > len ( self . _text ) ) : return self . START try : return self . _text [ index ] except IndexError : if strict : self . _fail_route ( ) return self . END
Read the value at a relative point in the wikicode .
52,826
def _parse_template ( self , has_content ) : reset = self . _head context = contexts . TEMPLATE_NAME if has_content : context |= contexts . HAS_TEMPLATE try : template = self . _parse ( context ) except BadRoute : self . _head = reset raise self . _emit_first ( tokens . TemplateOpen ( ) ) self . _emit_all ( template ) self . _emit ( tokens . TemplateClose ( ) )
Parse a template at the head of the wikicode string .
52,827
def _parse_argument ( self ) : reset = self . _head try : argument = self . _parse ( contexts . ARGUMENT_NAME ) except BadRoute : self . _head = reset raise self . _emit_first ( tokens . ArgumentOpen ( ) ) self . _emit_all ( argument ) self . _emit ( tokens . ArgumentClose ( ) )
Parse an argument at the head of the wikicode string .
52,828
def _parse_template_or_argument ( self ) : self . _head += 2 braces = 2 while self . _read ( ) == "{" : self . _head += 1 braces += 1 has_content = False self . _push ( ) while braces : if braces == 1 : return self . _emit_text_then_stack ( "{" ) if braces == 2 : try : self . _parse_template ( has_content ) except BadRoute : return self . _emit_text_then_stack ( "{{" ) break try : self . _parse_argument ( ) braces -= 3 except BadRoute : try : self . _parse_template ( has_content ) braces -= 2 except BadRoute : return self . _emit_text_then_stack ( "{" * braces ) if braces : has_content = True self . _head += 1 self . _emit_all ( self . _pop ( ) ) if self . _context & contexts . FAIL_NEXT : self . _context ^= contexts . FAIL_NEXT
Parse a template or argument at the head of the wikicode string .
52,829
def _handle_template_param ( self ) : if self . _context & contexts . TEMPLATE_NAME : if not self . _context & ( contexts . HAS_TEXT | contexts . HAS_TEMPLATE ) : self . _fail_route ( ) self . _context ^= contexts . TEMPLATE_NAME elif self . _context & contexts . TEMPLATE_PARAM_VALUE : self . _context ^= contexts . TEMPLATE_PARAM_VALUE else : self . _emit_all ( self . _pop ( ) ) self . _context |= contexts . TEMPLATE_PARAM_KEY self . _emit ( tokens . TemplateParamSeparator ( ) ) self . _push ( self . _context )
Handle a template parameter at the head of the string .
52,830
def _handle_template_param_value ( self ) : self . _emit_all ( self . _pop ( ) ) self . _context ^= contexts . TEMPLATE_PARAM_KEY self . _context |= contexts . TEMPLATE_PARAM_VALUE self . _emit ( tokens . TemplateParamEquals ( ) )
Handle a template parameter s value at the head of the string .
52,831
def _handle_template_end ( self ) : if self . _context & contexts . TEMPLATE_NAME : if not self . _context & ( contexts . HAS_TEXT | contexts . HAS_TEMPLATE ) : self . _fail_route ( ) elif self . _context & contexts . TEMPLATE_PARAM_KEY : self . _emit_all ( self . _pop ( ) ) self . _head += 1 return self . _pop ( )
Handle the end of a template at the head of the string .
52,832
def _handle_argument_separator ( self ) : self . _context ^= contexts . ARGUMENT_NAME self . _context |= contexts . ARGUMENT_DEFAULT self . _emit ( tokens . ArgumentSeparator ( ) )
Handle the separator between an argument s name and default .
52,833
def _parse_wikilink ( self ) : reset = self . _head + 1 self . _head += 2 try : link , extra , delta = self . _really_parse_external_link ( True ) except BadRoute : self . _head = reset + 1 try : wikilink = self . _parse ( contexts . WIKILINK_TITLE ) except BadRoute : self . _head = reset self . _emit_text ( "[[" ) else : self . _emit ( tokens . WikilinkOpen ( ) ) self . _emit_all ( wikilink ) self . _emit ( tokens . WikilinkClose ( ) ) else : if self . _context & contexts . EXT_LINK_TITLE : self . _head = reset self . _emit_text ( "[[" ) return self . _emit_text ( "[" ) self . _emit ( tokens . ExternalLinkOpen ( brackets = True ) ) self . _emit_all ( link ) self . _emit ( tokens . ExternalLinkClose ( ) )
Parse an internal wikilink at the head of the wikicode string .
52,834
def _handle_wikilink_separator ( self ) : self . _context ^= contexts . WIKILINK_TITLE self . _context |= contexts . WIKILINK_TEXT self . _emit ( tokens . WikilinkSeparator ( ) )
Handle the separator between a wikilink s title and its text .
52,835
def _parse_bracketed_uri_scheme ( self ) : self . _push ( contexts . EXT_LINK_URI ) if self . _read ( ) == self . _read ( 1 ) == "/" : self . _emit_text ( "//" ) self . _head += 2 else : valid = "abcdefghijklmnopqrstuvwxyz0123456789+.-" all_valid = lambda : all ( char in valid for char in self . _read ( ) ) scheme = "" while self . _read ( ) is not self . END and all_valid ( ) : scheme += self . _read ( ) self . _emit_text ( self . _read ( ) ) self . _head += 1 if self . _read ( ) != ":" : self . _fail_route ( ) self . _emit_text ( ":" ) self . _head += 1 slashes = self . _read ( ) == self . _read ( 1 ) == "/" if slashes : self . _emit_text ( "//" ) self . _head += 2 if not is_scheme ( scheme , slashes ) : self . _fail_route ( )
Parse the URI scheme of a bracket - enclosed external link .
52,836
def _handle_free_link_text ( self , punct , tail , this ) : if "(" in this and ")" in punct : punct = punct [ : - 1 ] if this . endswith ( punct ) : for i in range ( len ( this ) - 1 , 0 , - 1 ) : if this [ i - 1 ] not in punct : break else : i = 0 stripped = this [ : i ] if stripped and tail : self . _emit_text ( tail ) tail = "" tail += this [ i : ] this = stripped elif tail : self . _emit_text ( tail ) tail = "" self . _emit_text ( this ) return punct , tail
Handle text in a free ext link including trailing punctuation .
52,837
def _is_free_link_end ( self , this , next ) : after , ctx = self . _read ( 2 ) , self . _context equal_sign_contexts = contexts . TEMPLATE_PARAM_KEY | contexts . HEADING return ( this in ( self . END , "\n" , "[" , "]" , "<" , ">" ) or this == next == "'" or ( this == "|" and ctx & contexts . TEMPLATE ) or ( this == "=" and ctx & equal_sign_contexts ) or ( this == next == "}" and ctx & contexts . TEMPLATE ) or ( this == next == after == "}" and ctx & contexts . ARGUMENT ) )
Return whether the current head is the end of a free link .
52,838
def _really_parse_external_link ( self , brackets ) : if brackets : self . _parse_bracketed_uri_scheme ( ) invalid = ( "\n" , " " , "]" ) else : self . _parse_free_uri_scheme ( ) invalid = ( "\n" , " " , "[" , "]" ) punct = tuple ( ",;\\.:!?)" ) if self . _read ( ) is self . END or self . _read ( ) [ 0 ] in invalid : self . _fail_route ( ) tail = "" while True : this , next = self . _read ( ) , self . _read ( 1 ) if this == "&" : if tail : self . _emit_text ( tail ) tail = "" self . _parse_entity ( ) elif ( this == "<" and next == "!" and self . _read ( 2 ) == self . _read ( 3 ) == "-" ) : if tail : self . _emit_text ( tail ) tail = "" self . _parse_comment ( ) elif not brackets and self . _is_free_link_end ( this , next ) : return self . _pop ( ) , tail , - 1 elif this is self . END or this == "\n" : self . _fail_route ( ) elif this == next == "{" and self . _can_recurse ( ) : if tail : self . _emit_text ( tail ) tail = "" self . _parse_template_or_argument ( ) elif this == "]" : return self . _pop ( ) , tail , 0 elif " " in this : before , after = this . split ( " " , 1 ) if brackets : self . _emit_text ( before ) self . _emit ( tokens . ExternalLinkSeparator ( ) ) if after : self . _emit_text ( after ) self . _context ^= contexts . EXT_LINK_URI self . _context |= contexts . EXT_LINK_TITLE self . _head += 1 return self . _parse ( push = False ) , None , 0 punct , tail = self . _handle_free_link_text ( punct , tail , before ) return self . _pop ( ) , tail + " " + after , 0 elif not brackets : punct , tail = self . _handle_free_link_text ( punct , tail , this ) else : self . _emit_text ( this ) self . _head += 1
Really parse an external link .
52,839
def _remove_uri_scheme_from_textbuffer ( self , scheme ) : length = len ( scheme ) while length : if length < len ( self . _textbuffer [ - 1 ] ) : self . _textbuffer [ - 1 ] = self . _textbuffer [ - 1 ] [ : - length ] break length -= len ( self . _textbuffer [ - 1 ] ) self . _textbuffer . pop ( )
Remove the URI scheme of a new external link from the textbuffer .
52,840
def _parse_external_link ( self , brackets ) : if self . _context & contexts . NO_EXT_LINKS or not self . _can_recurse ( ) : if not brackets and self . _context & contexts . DL_TERM : self . _handle_dl_term ( ) else : self . _emit_text ( self . _read ( ) ) return reset = self . _head self . _head += 1 try : link , extra , delta = self . _really_parse_external_link ( brackets ) except BadRoute : self . _head = reset if not brackets and self . _context & contexts . DL_TERM : self . _handle_dl_term ( ) else : self . _emit_text ( self . _read ( ) ) else : if not brackets : scheme = link [ 0 ] . text . split ( ":" , 1 ) [ 0 ] self . _remove_uri_scheme_from_textbuffer ( scheme ) self . _emit ( tokens . ExternalLinkOpen ( brackets = brackets ) ) self . _emit_all ( link ) self . _emit ( tokens . ExternalLinkClose ( ) ) self . _head += delta if extra : self . _emit_text ( extra )
Parse an external link at the head of the wikicode string .
52,841
def _parse_heading ( self ) : self . _global |= contexts . GL_HEADING reset = self . _head self . _head += 1 best = 1 while self . _read ( ) == "=" : best += 1 self . _head += 1 context = contexts . HEADING_LEVEL_1 << min ( best - 1 , 5 ) try : title , level = self . _parse ( context ) except BadRoute : self . _head = reset + best - 1 self . _emit_text ( "=" * best ) else : self . _emit ( tokens . HeadingStart ( level = level ) ) if level < best : self . _emit_text ( "=" * ( best - level ) ) self . _emit_all ( title ) self . _emit ( tokens . HeadingEnd ( ) ) finally : self . _global ^= contexts . GL_HEADING
Parse a section heading at the head of the wikicode string .
52,842
def _handle_heading_end ( self ) : reset = self . _head self . _head += 1 best = 1 while self . _read ( ) == "=" : best += 1 self . _head += 1 current = int ( log ( self . _context / contexts . HEADING_LEVEL_1 , 2 ) ) + 1 level = min ( current , min ( best , 6 ) ) try : after , after_level = self . _parse ( self . _context ) except BadRoute : if level < best : self . _emit_text ( "=" * ( best - level ) ) self . _head = reset + best - 1 return self . _pop ( ) , level else : self . _emit_text ( "=" * best ) self . _emit_all ( after ) return self . _pop ( ) , after_level
Handle the end of a section heading at the head of the string .
52,843
def _really_parse_entity ( self ) : self . _emit ( tokens . HTMLEntityStart ( ) ) self . _head += 1 this = self . _read ( strict = True ) if this == "#" : numeric = True self . _emit ( tokens . HTMLEntityNumeric ( ) ) self . _head += 1 this = self . _read ( strict = True ) if this [ 0 ] . lower ( ) == "x" : hexadecimal = True self . _emit ( tokens . HTMLEntityHex ( char = this [ 0 ] ) ) this = this [ 1 : ] if not this : self . _fail_route ( ) else : hexadecimal = False else : numeric = hexadecimal = False valid = "0123456789abcdefABCDEF" if hexadecimal else "0123456789" if not numeric and not hexadecimal : valid += "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ" if not all ( [ char in valid for char in this ] ) : self . _fail_route ( ) self . _head += 1 if self . _read ( ) != ";" : self . _fail_route ( ) if numeric : test = int ( this , 16 ) if hexadecimal else int ( this ) if test < 1 or test > 0x10FFFF : self . _fail_route ( ) else : if this not in htmlentities . entitydefs : self . _fail_route ( ) self . _emit ( tokens . Text ( text = this ) ) self . _emit ( tokens . HTMLEntityEnd ( ) )
Actually parse an HTML entity and ensure that it is valid .
52,844
def _parse_entity ( self ) : reset = self . _head try : self . _push ( contexts . HTML_ENTITY ) self . _really_parse_entity ( ) except BadRoute : self . _head = reset self . _emit_text ( self . _read ( ) ) else : self . _emit_all ( self . _pop ( ) )
Parse an HTML entity at the head of the wikicode string .
52,845
def _parse_comment ( self ) : self . _head += 4 reset = self . _head - 1 self . _push ( ) while True : this = self . _read ( ) if this == self . END : self . _pop ( ) self . _head = reset self . _emit_text ( "<!--" ) return if this == self . _read ( 1 ) == "-" and self . _read ( 2 ) == ">" : self . _emit_first ( tokens . CommentStart ( ) ) self . _emit ( tokens . CommentEnd ( ) ) self . _emit_all ( self . _pop ( ) ) self . _head += 2 if self . _context & contexts . FAIL_NEXT : self . _context ^= contexts . FAIL_NEXT return self . _emit_text ( this ) self . _head += 1
Parse an HTML comment at the head of the wikicode string .
52,846
def _handle_blacklisted_tag ( self ) : strip = lambda text : text . rstrip ( ) . lower ( ) while True : this , next = self . _read ( ) , self . _read ( 1 ) if this is self . END : self . _fail_route ( ) elif this == "<" and next == "/" : self . _head += 3 if self . _read ( ) != ">" or ( strip ( self . _read ( - 1 ) ) != strip ( self . _stack [ 1 ] . text ) ) : self . _head -= 1 self . _emit_text ( "</" ) continue self . _emit ( tokens . TagOpenClose ( ) ) self . _emit_text ( self . _read ( - 1 ) ) self . _emit ( tokens . TagCloseClose ( ) ) return self . _pop ( ) elif this == "&" : self . _parse_entity ( ) else : self . _emit_text ( this ) self . _head += 1
Handle the body of an HTML tag that is parser - blacklisted .
52,847
def _handle_single_only_tag_end ( self ) : padding = self . _stack . pop ( ) . padding self . _emit ( tokens . TagCloseSelfclose ( padding = padding , implicit = True ) ) self . _head -= 1 return self . _pop ( )
Handle the end of an implicitly closing single - only HTML tag .
52,848
def _handle_single_tag_end ( self ) : stack = self . _stack depth = 1 for index , token in enumerate ( stack [ 2 : ] , 2 ) : if isinstance ( token , tokens . TagOpenOpen ) : depth += 1 elif isinstance ( token , tokens . TagCloseOpen ) : depth -= 1 if depth == 0 : break elif isinstance ( token , tokens . TagCloseSelfclose ) : depth -= 1 if depth == 0 : raise ParserError ( "_handle_single_tag_end() got an unexpected " "TagCloseSelfclose" ) else : raise ParserError ( "_handle_single_tag_end() missed a TagCloseOpen" ) padding = stack [ index ] . padding stack [ index ] = tokens . TagCloseSelfclose ( padding = padding , implicit = True ) return self . _pop ( )
Handle the stream end when inside a single - supporting HTML tag .
52,849
def _parse_tag ( self ) : reset = self . _head self . _head += 1 try : tag = self . _really_parse_tag ( ) except BadRoute : self . _head = reset self . _emit_text ( "<" ) else : self . _emit_all ( tag )
Parse an HTML tag at the head of the wikicode string .
52,850
def _emit_style_tag ( self , tag , markup , body ) : self . _emit ( tokens . TagOpenOpen ( wiki_markup = markup ) ) self . _emit_text ( tag ) self . _emit ( tokens . TagCloseOpen ( ) ) self . _emit_all ( body ) self . _emit ( tokens . TagOpenClose ( ) ) self . _emit_text ( tag ) self . _emit ( tokens . TagCloseClose ( ) )
Write the body of a tag and the tokens that should surround it .
52,851
def _parse_italics ( self ) : reset = self . _head try : stack = self . _parse ( contexts . STYLE_ITALICS ) except BadRoute as route : self . _head = reset if route . context & contexts . STYLE_PASS_AGAIN : new_ctx = contexts . STYLE_ITALICS | contexts . STYLE_SECOND_PASS stack = self . _parse ( new_ctx ) else : return self . _emit_text ( "''" ) self . _emit_style_tag ( "i" , "''" , stack )
Parse wiki - style italics .
52,852
def _parse_bold ( self ) : reset = self . _head try : stack = self . _parse ( contexts . STYLE_BOLD ) except BadRoute : self . _head = reset if self . _context & contexts . STYLE_SECOND_PASS : self . _emit_text ( "'" ) return True elif self . _context & contexts . STYLE_ITALICS : self . _context |= contexts . STYLE_PASS_AGAIN self . _emit_text ( "" , stack )
Parse wiki - style bold .
52,853
def _emit_table_tag ( self , open_open_markup , tag , style , padding , close_open_markup , contents , open_close_markup ) : self . _emit ( tokens . TagOpenOpen ( wiki_markup = open_open_markup ) ) self . _emit_text ( tag ) if style : self . _emit_all ( style ) if close_open_markup : self . _emit ( tokens . TagCloseOpen ( wiki_markup = close_open_markup , padding = padding ) ) else : self . _emit ( tokens . TagCloseOpen ( padding = padding ) ) if contents : self . _emit_all ( contents ) self . _emit ( tokens . TagOpenClose ( wiki_markup = open_close_markup ) ) self . _emit_text ( tag ) self . _emit ( tokens . TagCloseClose ( ) )
Emit a table tag .
52,854
def _handle_table_style ( self , end_token ) : data = _TagOpenData ( ) data . context = _TagOpenData . CX_ATTR_READY while True : this = self . _read ( ) can_exit = ( not data . context & data . CX_QUOTED or data . context & data . CX_NOTE_SPACE ) if this == end_token and can_exit : if data . context & ( data . CX_ATTR_NAME | data . CX_ATTR_VALUE ) : self . _push_tag_buffer ( data ) if this . isspace ( ) : data . padding_buffer [ "first" ] += this return data . padding_buffer [ "first" ] elif this is self . END or this == end_token : if self . _context & contexts . TAG_ATTR : if data . context & data . CX_QUOTED : data . context = data . CX_ATTR_VALUE self . _memoize_bad_route ( ) self . _pop ( ) self . _head = data . reset continue self . _pop ( ) self . _fail_route ( ) else : self . _handle_tag_data ( data , this ) self . _head += 1
Handle style attributes for a table until end_token .
52,855
def _parse_table ( self ) : reset = self . _head self . _head += 2 try : self . _push ( contexts . TABLE_OPEN ) padding = self . _handle_table_style ( "\n" ) except BadRoute : self . _head = reset self . _emit_text ( "{" ) return style = self . _pop ( ) self . _head += 1 restore_point = self . _stack_ident try : table = self . _parse ( contexts . TABLE_OPEN ) except BadRoute : while self . _stack_ident != restore_point : self . _memoize_bad_route ( ) self . _pop ( ) self . _head = reset self . _emit_text ( "{" ) return self . _emit_table_tag ( "{|" , "table" , style , padding , None , table , "|}" ) self . _head -= 1
Parse a wikicode table by starting with the first line .
52,856
def _handle_table_row ( self ) : self . _head += 2 if not self . _can_recurse ( ) : self . _emit_text ( "|-" ) self . _head -= 1 return self . _push ( contexts . TABLE_OPEN | contexts . TABLE_ROW_OPEN ) padding = self . _handle_table_style ( "\n" ) style = self . _pop ( ) self . _head += 1 row = self . _parse ( contexts . TABLE_OPEN | contexts . TABLE_ROW_OPEN ) self . _emit_table_tag ( "|-" , "tr" , style , padding , None , row , "" ) self . _head -= 1
Parse as style until end of the line then continue .
52,857
def _handle_table_cell ( self , markup , tag , line_context ) : old_context = self . _context padding , style = "" , None self . _head += len ( markup ) reset = self . _head if not self . _can_recurse ( ) : self . _emit_text ( markup ) self . _head -= 1 return cell = self . _parse ( contexts . TABLE_OPEN | contexts . TABLE_CELL_OPEN | line_context | contexts . TABLE_CELL_STYLE ) cell_context = self . _context self . _context = old_context reset_for_style = cell_context & contexts . TABLE_CELL_STYLE if reset_for_style : self . _head = reset self . _push ( contexts . TABLE_OPEN | contexts . TABLE_CELL_OPEN | line_context ) padding = self . _handle_table_style ( "|" ) style = self . _pop ( ) self . _head += 1 cell = self . _parse ( contexts . TABLE_OPEN | contexts . TABLE_CELL_OPEN | line_context ) cell_context = self . _context self . _context = old_context close_open_markup = "|" if reset_for_style else None self . _emit_table_tag ( markup , tag , style , padding , close_open_markup , cell , "" ) self . _context |= cell_context & ( contexts . TABLE_TH_LINE | contexts . TABLE_TD_LINE ) self . _head -= 1
Parse as normal syntax unless we hit a style marker then parse style as HTML attributes and the remainder as normal syntax .
52,858
def _handle_table_cell_end ( self , reset_for_style = False ) : if reset_for_style : self . _context |= contexts . TABLE_CELL_STYLE else : self . _context &= ~ contexts . TABLE_CELL_STYLE return self . _pop ( keep_context = True )
Returns the current context with the TABLE_CELL_STYLE flag set if it is necessary to reset and parse style attributes .
52,859
def _handle_end ( self ) : if self . _context & contexts . FAIL : if self . _context & contexts . TAG_BODY : if is_single ( self . _stack [ 1 ] . text ) : return self . _handle_single_tag_end ( ) if self . _context & contexts . TABLE_CELL_OPEN : self . _pop ( ) if self . _context & contexts . DOUBLE : self . _pop ( ) self . _fail_route ( ) return self . _pop ( )
Handle the end of the stream of wikitext .
52,860
def _verify_safe ( self , this ) : context = self . _context if context & contexts . FAIL_NEXT : return False if context & contexts . WIKILINK_TITLE : if this == "]" or this == "{" : self . _context |= contexts . FAIL_NEXT elif this == "\n" or this == "[" or this == "}" or this == ">" : return False elif this == "<" : if self . _read ( 1 ) == "!" : self . _context |= contexts . FAIL_NEXT else : return False return True elif context & contexts . EXT_LINK_TITLE : return this != "\n" elif context & contexts . TEMPLATE_NAME : if this == "{" : self . _context |= contexts . HAS_TEMPLATE | contexts . FAIL_NEXT return True if this == "}" or ( this == "<" and self . _read ( 1 ) == "!" ) : self . _context |= contexts . FAIL_NEXT return True if this == "[" or this == "]" or this == "<" or this == ">" : return False if this == "|" : return True if context & contexts . HAS_TEXT : if context & contexts . FAIL_ON_TEXT : if this is self . END or not this . isspace ( ) : return False elif this == "\n" : self . _context |= contexts . FAIL_ON_TEXT elif this is self . END or not this . isspace ( ) : self . _context |= contexts . HAS_TEXT return True elif context & contexts . TAG_CLOSE : return this != "<" else : if context & contexts . FAIL_ON_EQUALS : if this == "=" : return False elif context & contexts . FAIL_ON_LBRACE : if this == "{" or ( self . _read ( - 1 ) == self . _read ( - 2 ) == "{" ) : if context & contexts . TEMPLATE : self . _context |= contexts . FAIL_ON_EQUALS else : self . _context |= contexts . FAIL_NEXT return True self . _context ^= contexts . FAIL_ON_LBRACE elif context & contexts . FAIL_ON_RBRACE : if this == "}" : self . _context |= contexts . FAIL_NEXT return True self . _context ^= contexts . FAIL_ON_RBRACE elif this == "{" : self . _context |= contexts . FAIL_ON_LBRACE elif this == "}" : self . _context |= contexts . FAIL_ON_RBRACE return True
Make sure we are not trying to write an invalid character .
52,861
def tokenize ( self , text , context = 0 , skip_style_tags = False ) : split = self . regex . split ( text ) self . _text = [ segment for segment in split if segment ] self . _head = self . _global = self . _depth = 0 self . _bad_routes = set ( ) self . _skip_style_tags = skip_style_tags try : tokens = self . _parse ( context ) except BadRoute : raise ParserError ( "Python tokenizer exited with BadRoute" ) if self . _stacks : err = "Python tokenizer exited with non-empty token stack" raise ParserError ( err ) return tokens
Build a list of tokens from a string of wikicode and return it .
52,862
def _value_needs_quotes ( val ) : if not val : return None val = "" . join ( str ( node ) for node in val . filter_text ( recursive = False ) ) if not any ( char . isspace ( ) for char in val ) : return None if "'" in val and '"' not in val : return '"' if '"' in val and "'" not in val : return "'" return "\"'"
Return valid quotes for the given value or None if unneeded .
52,863
def _set_padding ( self , attr , value ) : if not value : setattr ( self , attr , "" ) else : value = str ( value ) if not value . isspace ( ) : raise ValueError ( "padding must be entirely whitespace" ) setattr ( self , attr , value )
Setter for the value of a padding attribute .
52,864
def coerce_quotes ( quotes ) : orig , quotes = quotes , str ( quotes ) if quotes else None if quotes not in [ None , '"' , "'" ] : raise ValueError ( "{!r} is not a valid quote type" . format ( orig ) ) return quotes
Coerce a quote type into an acceptable value or raise an error .
52,865
def _indexed_ifilter ( self , recursive = True , matches = None , flags = FLAGS , forcetype = None ) : match = self . _build_matcher ( matches , flags ) if recursive : restrict = forcetype if recursive == self . RECURSE_OTHERS else None def getter ( i , node ) : for ch in self . _get_children ( node , restrict = restrict ) : yield ( i , ch ) inodes = chain ( * ( getter ( i , n ) for i , n in enumerate ( self . nodes ) ) ) else : inodes = enumerate ( self . nodes ) for i , node in inodes : if ( not forcetype or isinstance ( node , forcetype ) ) and match ( node ) : yield ( i , node )
Iterate over nodes and their corresponding indices in the node list .
52,866
def _get_tree ( self , code , lines , marker , indent ) : def write ( * args ) : if lines and lines [ - 1 ] is marker : lines . pop ( ) last = lines . pop ( ) lines . append ( last + " " . join ( args ) ) else : lines . append ( " " * 6 * indent + " " . join ( args ) ) get = lambda code : self . _get_tree ( code , lines , marker , indent + 1 ) mark = lambda : lines . append ( marker ) for node in code . nodes : node . __showtree__ ( write , get , mark ) return lines
Build a tree to illustrate the way the Wikicode object was parsed .
52,867
def _build_filter_methods ( cls , ** meths ) : doc = make_ifilter = lambda ftype : ( lambda self , * a , ** kw : self . ifilter ( forcetype = ftype , * a , ** kw ) ) make_filter = lambda ftype : ( lambda self , * a , ** kw : self . filter ( forcetype = ftype , * a , ** kw ) ) for name , ftype in ( meths . items ( ) if py3k else meths . iteritems ( ) ) : ifilter = make_ifilter ( ftype ) filter = make_filter ( ftype ) ifilter . __doc__ = doc . format ( name , "ifilter" , ftype ) filter . __doc__ = doc . format ( name , "filter" , ftype ) setattr ( cls , "ifilter_" + name , ifilter ) setattr ( cls , "filter_" + name , filter )
Given Node types build the corresponding i?filter shortcuts .
52,868
def matches ( self , other ) : cmp = lambda a , b : ( a [ 0 ] . upper ( ) + a [ 1 : ] == b [ 0 ] . upper ( ) + b [ 1 : ] if a and b else a == b ) this = self . strip_code ( ) . strip ( ) if isinstance ( other , ( str , bytes , Wikicode , Node ) ) : that = parse_anything ( other ) . strip_code ( ) . strip ( ) return cmp ( this , that ) for obj in other : that = parse_anything ( obj ) . strip_code ( ) . strip ( ) if cmp ( this , that ) : return True return False
Do a loose equivalency test suitable for comparing page names .
52,869
def ifilter ( self , recursive = True , matches = None , flags = FLAGS , forcetype = None ) : gen = self . _indexed_ifilter ( recursive , matches , flags , forcetype ) return ( node for i , node in gen )
Iterate over nodes in our list matching certain conditions .
52,870
def get_sections ( self , levels = None , matches = None , flags = FLAGS , flat = False , include_lead = None , include_headings = True ) : title_matcher = self . _build_matcher ( matches , flags ) matcher = lambda heading : ( title_matcher ( heading . title ) and ( not levels or heading . level in levels ) ) iheadings = self . _indexed_ifilter ( recursive = False , forcetype = Heading ) sections = [ ] open_headings = [ ] if include_lead or not ( include_lead is not None or matches or levels ) : itr = self . _indexed_ifilter ( recursive = False , forcetype = Heading ) try : first = next ( itr ) [ 0 ] sections . append ( ( 0 , Wikicode ( self . nodes [ : first ] ) ) ) except StopIteration : sections . append ( ( 0 , Wikicode ( self . nodes [ : ] ) ) ) for i , heading in iheadings : if flat : newly_closed , open_headings = open_headings , [ ] else : closed_start_index = len ( open_headings ) for j , ( start , last_heading ) in enumerate ( open_headings ) : if heading . level <= last_heading . level : closed_start_index = j break newly_closed = open_headings [ closed_start_index : ] del open_headings [ closed_start_index : ] for start , closed_heading in newly_closed : if matcher ( closed_heading ) : sections . append ( ( start , Wikicode ( self . nodes [ start : i ] ) ) ) start = i if include_headings else ( i + 1 ) open_headings . append ( ( start , heading ) ) for start , heading in open_headings : if matcher ( heading ) : sections . append ( ( start , Wikicode ( self . nodes [ start : ] ) ) ) return [ section for i , section in sorted ( sections ) ]
Return a list of sections within the page .
52,871
def strip_code ( self , normalize = True , collapse = True , keep_template_params = False ) : kwargs = { "normalize" : normalize , "collapse" : collapse , "keep_template_params" : keep_template_params } nodes = [ ] for node in self . nodes : stripped = node . __strip__ ( ** kwargs ) if stripped : nodes . append ( str ( stripped ) ) if collapse : stripped = "" . join ( nodes ) . strip ( "\n" ) while "\n\n\n" in stripped : stripped = stripped . replace ( "\n\n\n" , "\n\n" ) return stripped else : return "" . join ( nodes )
Return a rendered string without unprintable code such as templates .
52,872
def write_pid_file ( fn ) : if not fn : return None if fn == '' or fn == "''" : return None f = open ( fn , "w" ) f . write ( "%s\n" % ( os . getpid ( ) ) ) f . close ( )
Create a file with our PID .
52,873
def input_validate_str ( string , name , max_len = None , exact_len = None ) : if type ( string ) is not str : raise pyhsm . exception . YHSM_WrongInputType ( name , str , type ( string ) ) if max_len != None and len ( string ) > max_len : raise pyhsm . exception . YHSM_InputTooLong ( name , max_len , len ( string ) ) if exact_len != None and len ( string ) != exact_len : raise pyhsm . exception . YHSM_WrongInputSize ( name , exact_len , len ( string ) ) return string
Input validation for strings .
52,874
def input_validate_int ( value , name , max_value = None ) : if type ( value ) is not int : raise pyhsm . exception . YHSM_WrongInputType ( name , int , type ( value ) ) if max_value != None and value > max_value : raise pyhsm . exception . YHSM_WrongInputSize ( name , max_value , value ) return value
Input validation for integers .
52,875
def input_validate_nonce ( nonce , name = 'nonce' , pad = False ) : if type ( nonce ) is not str : raise pyhsm . exception . YHSM_WrongInputType ( name , str , type ( nonce ) ) if len ( nonce ) > pyhsm . defines . YSM_AEAD_NONCE_SIZE : raise pyhsm . exception . YHSM_InputTooLong ( name , pyhsm . defines . YSM_AEAD_NONCE_SIZE , len ( nonce ) ) if pad : return nonce . ljust ( pyhsm . defines . YSM_AEAD_NONCE_SIZE , chr ( 0x0 ) ) else : return nonce
Input validation for nonces .
52,876
def input_validate_key_handle ( key_handle , name = 'key_handle' ) : if type ( key_handle ) is not int : try : return key_handle_to_int ( key_handle ) except pyhsm . exception . YHSM_Error : raise pyhsm . exception . YHSM_WrongInputType ( name , int , type ( key_handle ) ) return key_handle
Input validation for key_handles .
52,877
def input_validate_yubikey_secret ( data , name = 'data' ) : if isinstance ( data , pyhsm . aead_cmd . YHSM_YubiKeySecret ) : data = data . pack ( ) return input_validate_str ( data , name )
Input validation for YHSM_YubiKeySecret or string .
52,878
def input_validate_aead ( aead , name = 'aead' , expected_len = None , max_aead_len = pyhsm . defines . YSM_AEAD_MAX_SIZE ) : if isinstance ( aead , pyhsm . aead_cmd . YHSM_GeneratedAEAD ) : aead = aead . data if expected_len != None : return input_validate_str ( aead , name , exact_len = expected_len ) else : return input_validate_str ( aead , name , max_len = max_aead_len )
Input validation for YHSM_GeneratedAEAD or string .
52,879
def validate_cmd_response_nonce ( got , used ) : if used == '000000000000' . decode ( 'hex' ) : if got == used : raise ( pyhsm . exception . YHSM_Error ( "Bad nonce in response (got %s, expected HSM generated nonce)" % ( got . encode ( 'hex' ) ) ) ) return got return validate_cmd_response_str ( 'nonce' , got , used )
Check that the returned nonce matches nonce used in request .
52,880
def _raw_pack ( key_handle , flags , data ) : return struct . pack ( '<IBB' , key_handle , flags , len ( data ) ) + data
Common code for packing payload to YHSM_HMAC_SHA1_GENERATE command .
52,881
def next ( self , data , final = False , to_buffer = False ) : if final : self . flags = pyhsm . defines . YSM_HMAC_SHA1_FINAL else : self . flags = 0x0 if to_buffer : self . flags |= pyhsm . defines . YSM_HMAC_SHA1_TO_BUFFER self . payload = _raw_pack ( self . key_handle , self . flags , data ) self . final = final return self
Add more input to the HMAC SHA1 .
52,882
def get_hash ( self ) : if not self . executed : raise pyhsm . exception . YHSM_Error ( "HMAC-SHA1 hash not available, before execute()." ) return self . result . hash_result
Get the HMAC - SHA1 that has been calculated this far .
52,883
def check_signature ( params ) : if 'id' in params : try : id_int = int ( params [ 'id' ] [ 0 ] ) except : my_log_message ( args , syslog . LOG_INFO , "Non-numerical client id (%s) in request." % ( params [ 'id' ] [ 0 ] ) ) return False , None key = client_ids . get ( id_int ) if key : if 'h' in params : sig = params [ 'h' ] [ 0 ] good_sig = make_signature ( params , key ) if sig == good_sig : return True , key else : my_log_message ( args , syslog . LOG_INFO , "Bad signature from client id '%i' (%s, expected %s)." % ( id_int , sig , good_sig ) ) else : my_log_message ( args , syslog . LOG_INFO , "Client id (%i) but no HMAC in request." % ( id_int ) ) return False , key else : my_log_message ( args , syslog . LOG_INFO , "Unknown client id '%i'" % ( id_int ) ) return False , None return True , None
Verify the signature of the parameters in an OTP v2 . 0 verify request .
52,884
def validate_oath_hotp ( self , params ) : from_key = params [ "hotp" ] [ 0 ] if not re . match ( hotp_valid_input , from_key ) : self . log_error ( "IN: %s, Invalid OATH-HOTP OTP" % ( params ) ) return "ERR Invalid OATH-HOTP OTP" uid , otp , = get_oath_hotp_bits ( params ) if not uid or not otp : self . log_error ( "IN: %s, could not get UID/OTP ('%s'/'%s')" % ( params , uid , otp ) ) return "ERR Invalid OATH-HOTP input" if args . debug : print "OATH-HOTP uid %s, OTP %s" % ( uid , otp ) try : db = ValOathDb ( args . db_file ) entry = db . get ( uid ) except Exception , e : self . log_error ( "IN: %s, database error : '%s'" % ( params , e ) ) return "ERR Internal error" nonce = entry . data [ "nonce" ] . decode ( 'hex' ) aead = entry . data [ "aead" ] . decode ( 'hex' ) new_counter = pyhsm . oath_hotp . search_for_oath_code ( hsm , entry . data [ "key_handle" ] , nonce , aead , entry . data [ "oath_c" ] , otp , args . look_ahead ) if args . debug : print "OATH-HOTP %i..%i -> new C == %s" % ( entry . data [ "oath_c" ] , entry . data [ "oath_c" ] + args . look_ahead , new_counter ) if type ( new_counter ) != int : return "ERR Could not validate OATH-HOTP OTP" try : if db . update_oath_hotp_c ( entry , new_counter ) : return "OK counter=%04x" % ( new_counter ) else : return "ERR replayed OATH-HOTP" except Exception , e : self . log_error ( "IN: %s, database error updating counter : %s" % ( params , e ) ) return "ERR Internal error"
Validate OATH - HOTP code using YubiHSM HMAC - SHA1 hashing with token keys secured in AEAD s that we have stored in an SQLite3 database .
52,885
def validate_oath_totp ( self , params ) : from_key = params [ "totp" ] [ 0 ] if not re . match ( totp_valid_input , from_key ) : self . log_error ( "IN: %s, Invalid OATH-TOTP OTP" % ( params ) ) return "ERR Invalid OATH-TOTP OTP" uid , otp , = get_oath_totp_bits ( params ) if not uid or not otp : self . log_error ( "IN: %s, could not get UID/OTP ('%s'/'%s')" % ( params , uid , otp ) ) return "ERR Invalid OATH-TOTP input" if args . debug : print "OATH-TOTP uid %s, OTP %s" % ( uid , otp ) try : db = ValOathDb ( args . db_file ) entry = db . get ( uid ) except Exception , e : self . log_error ( "IN: %s, database error : '%s'" % ( params , e ) ) return "ERR Internal error" nonce = entry . data [ "nonce" ] . decode ( 'hex' ) aead = entry . data [ "aead" ] . decode ( 'hex' ) new_timecounter = pyhsm . oath_totp . search_for_oath_code ( hsm , entry . data [ "key_handle" ] , nonce , aead , otp , args . interval , args . tolerance ) if args . debug : print "OATH-TOTP counter: %i, interval: %i -> new timecounter == %s" % ( entry . data [ "oath_c" ] , args . interval , new_timecounter ) if type ( new_timecounter ) != int : return "ERR Could not validate OATH-TOTP OTP" try : if db . update_oath_hotp_c ( entry , new_timecounter ) : return "OK timecounter=%04x" % ( new_timecounter ) else : return "ERR replayed OATH-TOTP" except Exception , e : self . log_error ( "IN: %s, database error updating counter : %s" % ( params , e ) ) return "ERR Internal error"
Validate OATH - TOTP code using YubiHSM HMAC - SHA1 hashing with token keys secured in AEAD s that we have stored in an SQLite3 database .
52,886
def validate_pwhash ( _self , params ) : pwhash , nonce , aead , key_handle = get_pwhash_bits ( params ) d_aead = aead . decode ( 'hex' ) plaintext_len = len ( d_aead ) - pyhsm . defines . YSM_AEAD_MAC_SIZE pw = pwhash . ljust ( plaintext_len , chr ( 0x0 ) ) if hsm . validate_aead ( nonce . decode ( 'hex' ) , key_handle , d_aead , pw ) : return "OK pwhash validated" return "ERR Could not validate pwhash"
Validate password hash using YubiHSM .
52,887
def get_pwhash_bits ( params ) : if not "pwhash" in params or not "nonce" in params or not "aead" in params or not "kh" in params : raise Exception ( "Missing required parameter in request (pwhash, nonce, aead or kh)" ) pwhash = params [ "pwhash" ] [ 0 ] nonce = params [ "nonce" ] [ 0 ] aead = params [ "aead" ] [ 0 ] key_handle = pyhsm . util . key_handle_to_int ( params [ "kh" ] [ 0 ] ) return pwhash , nonce , aead , key_handle
Extract bits for password hash validation from params .
52,888
def get_oath_hotp_bits ( params ) : if "uid" in params : return params [ "uid" ] [ 0 ] , int ( params [ "hotp" ] [ 0 ] ) m = re . match ( "^([cbdefghijklnrtuv]*)([0-9]{6,8})" , params [ "hotp" ] [ 0 ] ) uid , otp , = m . groups ( ) return uid , int ( otp ) ,
Extract the OATH - HOTP uid and OTP from params .
52,889
def load_clients_file ( filename ) : res = { } content = [ ] try : fhandle = file ( filename ) content = fhandle . readlines ( ) fhandle . close ( ) except IOError : return None linenum = 0 for line in content : linenum += 1 while line . endswith ( "\r" ) or line . endswith ( "\n" ) : line = line [ : - 1 ] if re . match ( "(^\s*#|^\s*$)" , line ) : continue parts = [ x . strip ( ) for x in line . split ( ',' ) ] try : if len ( parts ) != 2 : raise Exception ( ) id_num = int ( parts [ 0 ] ) key = base64 . b64decode ( parts [ 1 ] ) res [ id_num ] = key except : my_log_message ( args , syslog . LOG_ERR , 'Bad data on line %i of clients file "%s" : "%s"' % ( linenum , filename , line ) ) return None return res
Load a list of base64 encoded shared secrets for numerical client ids .
52,890
def run ( ) : server_address = ( args . listen_addr , args . listen_port ) httpd = YHSM_VALServer ( server_address , YHSM_VALRequestHandler ) my_log_message ( args , syslog . LOG_INFO , "Serving requests to 'http://%s:%s%s' (YubiHSM: '%s')" % ( args . listen_addr , args . listen_port , args . serve_url , args . device ) ) httpd . serve_forever ( )
Start the BaseHTTPServer and serve requests forever .
52,891
def main ( ) : my_name = os . path . basename ( sys . argv [ 0 ] ) if not my_name : my_name = "yhsm-validation-server" syslog . openlog ( my_name , syslog . LOG_PID , syslog . LOG_LOCAL0 ) global args args = parse_args ( ) args_fixup ( ) global hsm try : hsm = pyhsm . YHSM ( device = args . device , debug = args . debug ) except serial . SerialException , e : my_log_message ( args , syslog . LOG_ERR , 'Failed opening YubiHSM device "%s" : %s' % ( args . device , e ) ) return 1 write_pid_file ( args . pid_file ) try : run ( ) except KeyboardInterrupt : print "" print "Shutting down" print ""
The main function that will be executed when running this as a stand alone script .
52,892
def do_GET ( self ) : if self . path . startswith ( args . serve_url ) : res = None log_res = None mode = None params = urlparse . parse_qs ( self . path [ len ( args . serve_url ) : ] ) if "otp" in params : if args . mode_short_otp : mode = 'YubiKey OTP (short)' res = validate_yubikey_otp_short ( self , params ) elif args . mode_otp : mode = 'YubiKey OTP' res = validate_yubikey_otp ( self , params ) log_res = '&' . join ( res . split ( '\n' ) ) else : res = "ERR 'otp/otp2' disabled" elif "hotp" in params : if args . mode_hotp : mode = 'OATH-HOTP' res = validate_oath_hotp ( self , params ) else : res = "ERR 'hotp' disabled" elif "totp" in params : if args . mode_totp : mode = 'OATH-TOTP' res = validate_oath_totp ( self , params ) else : res = "ERR 'totp' disabled" elif "pwhash" in params : if args . mode_pwhash : mode = 'Password hash' res = validate_pwhash ( self , params ) else : res = "ERR 'pwhash' disabled" if not log_res : log_res = res self . log_message ( "%s validation result: %s -> %s" , mode , self . path , log_res ) if res != None : self . send_response ( 200 ) self . send_header ( 'Content-type' , 'text/plain' ) self . end_headers ( ) self . wfile . write ( res ) self . wfile . write ( "\n" ) else : self . log_error ( "No validation result to '%s' (responding 403)" % ( self . path ) ) self . send_response ( 403 , 'Forbidden' ) self . end_headers ( ) else : self . log_error ( "Bad URL '%s' - I'm serving '%s' (responding 403)" % ( self . path , args . serve_url ) ) self . send_response ( 403 , 'Forbidden' ) self . end_headers ( )
Process validation GET requests .
52,893
def get ( self , key ) : c = self . conn . cursor ( ) for row in c . execute ( "SELECT key, nonce, key_handle, aead, oath_C, oath_T FROM oath WHERE key = ?" , ( key , ) ) : return ValOathEntry ( row ) raise Exception ( "OATH token for '%s' not found in database (%s)" % ( key , self . filename ) )
Fetch entry from database .
52,894
def update_oath_hotp_c ( self , entry , new_c ) : key = entry . data [ "key" ] c = self . conn . cursor ( ) c . execute ( "UPDATE oath SET oath_c = ? WHERE key = ? AND ? > oath_c" , ( new_c , key , new_c , ) ) self . conn . commit ( ) return c . rowcount == 1
Update the OATH - HOTP counter value for entry in the database .
52,895
def generate_aead ( hsm , args , password ) : try : pw = password . ljust ( args . min_len , chr ( 0x0 ) ) return hsm . generate_aead_simple ( args . nonce . decode ( 'hex' ) , args . key_handle , pw ) except pyhsm . exception . YHSM_CommandFailed , e : if e . status_str == 'YHSM_FUNCTION_DISABLED' : print "ERROR: The key handle %s is not permitted to YSM_AEAD_GENERATE." % ( args . key_handle ) return None else : print "ERROR: %s" % ( e . reason )
Generate an AEAD using the YubiHSM .
52,896
def aead_filename ( aead_dir , key_handle , public_id ) : parts = [ aead_dir , key_handle ] + pyhsm . util . group ( public_id , 2 ) path = os . path . join ( * parts ) if not os . path . isdir ( path ) : os . makedirs ( path ) return os . path . join ( path , public_id )
Return the filename of the AEAD for this public_id and create any missing directorys .
52,897
def safe_process_files ( path , files , args , state ) : for fn in files : full_fn = os . path . join ( path , fn ) try : if not process_file ( path , fn , args , state ) : return False except Exception , e : sys . stderr . write ( "error: %s\n%s\n" % ( os . path . join ( path , fn ) , traceback . format_exc ( ) ) ) state . log_failed ( full_fn ) if state . should_quit ( ) : return False return True
Process a number of files in a directory . Catches any exception from the processing and checks if we should fail directly or keep going .
52,898
def walk_dir ( path , args , state ) : if args . debug : sys . stderr . write ( "Walking %s\n" % path ) for root , _dirs , files in os . walk ( path ) : if not safe_process_files ( root , files , args , state ) : return False if state . should_quit ( ) : return False return True
Check all files in path to see if there is any requests that we should send out on the bus .
52,899
def main ( ) : global args args = parse_args ( ) if not args : return 1 state = MyState ( args ) for path in args . paths : if os . path . isdir ( path ) : walk_dir ( path , args , state ) else : safe_process_files ( os . path . dirname ( path ) , [ os . path . basename ( path ) ] , args , state ) if state . should_quit ( ) : break if state . failed_files : sys . stderr . write ( "error: %i/%i AEADs failed\n" % ( len ( state . failed_files ) , state . file_count ) ) return 1 if args . debug : sys . stderr . write ( "Successfully processed %i AEADs\n" % ( state . file_count ) )
Main function when running as a program .