idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
1,800
def encode_caveat ( condition , root_key , third_party_info , key , ns ) : if third_party_info . version == VERSION_1 : return _encode_caveat_v1 ( condition , root_key , third_party_info . public_key , key ) if ( third_party_info . version == VERSION_2 or third_party_info . version == VERSION_3 ) : return _encode_caveat_v2_v3 ( third_party_info . version , condition , root_key , third_party_info . public_key , key , ns ) raise NotImplementedError ( 'only bakery v1, v2, v3 supported' )
Encrypt a third - party caveat .
1,801
def _encode_caveat_v1 ( condition , root_key , third_party_pub_key , key ) : plain_data = json . dumps ( { 'RootKey' : base64 . b64encode ( root_key ) . decode ( 'ascii' ) , 'Condition' : condition } ) box = nacl . public . Box ( key . key , third_party_pub_key . key ) encrypted = box . encrypt ( six . b ( plain_data ) ) nonce = encrypted [ 0 : nacl . public . Box . NONCE_SIZE ] encrypted = encrypted [ nacl . public . Box . NONCE_SIZE : ] return base64 . b64encode ( six . b ( json . dumps ( { 'ThirdPartyPublicKey' : str ( third_party_pub_key ) , 'FirstPartyPublicKey' : str ( key . public_key ) , 'Nonce' : base64 . b64encode ( nonce ) . decode ( 'ascii' ) , 'Id' : base64 . b64encode ( encrypted ) . decode ( 'ascii' ) } ) ) )
Create a JSON - encoded third - party caveat .
1,802
def _encode_caveat_v2_v3 ( version , condition , root_key , third_party_pub_key , key , ns ) : ns_data = bytearray ( ) if version >= VERSION_3 : ns_data = ns . serialize_text ( ) data = bytearray ( ) data . append ( version ) data . extend ( third_party_pub_key . serialize ( raw = True ) [ : _PUBLIC_KEY_PREFIX_LEN ] ) data . extend ( key . public_key . serialize ( raw = True ) [ : ] ) secret = _encode_secret_part_v2_v3 ( version , condition , root_key , ns_data ) box = nacl . public . Box ( key . key , third_party_pub_key . key ) encrypted = box . encrypt ( secret ) nonce = encrypted [ 0 : nacl . public . Box . NONCE_SIZE ] encrypted = encrypted [ nacl . public . Box . NONCE_SIZE : ] data . extend ( nonce [ : ] ) data . extend ( encrypted ) return bytes ( data )
Create a version 2 or version 3 third - party caveat .
1,803
def _encode_secret_part_v2_v3 ( version , condition , root_key , ns ) : data = bytearray ( ) data . append ( version ) encode_uvarint ( len ( root_key ) , data ) data . extend ( root_key ) if version >= VERSION_3 : encode_uvarint ( len ( ns ) , data ) data . extend ( ns ) data . extend ( condition . encode ( 'utf-8' ) ) return bytes ( data )
Creates a version 2 or version 3 secret part of the third party caveat . The returned data is not encrypted .
1,804
def decode_caveat ( key , caveat ) : if len ( caveat ) == 0 : raise VerificationError ( 'empty third party caveat' ) first = caveat [ : 1 ] if first == b'e' : return _decode_caveat_v1 ( key , caveat ) first_as_int = six . byte2int ( first ) if ( first_as_int == VERSION_2 or first_as_int == VERSION_3 ) : if ( len ( caveat ) < _VERSION3_CAVEAT_MIN_LEN and first_as_int == VERSION_3 ) : raise VerificationError ( 'caveat id payload not provided for caveat id {}' . format ( caveat ) ) return _decode_caveat_v2_v3 ( first_as_int , key , caveat ) raise VerificationError ( 'unknown version for caveat' )
Decode caveat by decrypting the encrypted part using key .
1,805
def _decode_caveat_v1 ( key , caveat ) : data = base64 . b64decode ( caveat ) . decode ( 'utf-8' ) wrapper = json . loads ( data ) tp_public_key = nacl . public . PublicKey ( base64 . b64decode ( wrapper [ 'ThirdPartyPublicKey' ] ) ) if key . public_key . key != tp_public_key : raise Exception ( 'public key mismatch' ) if wrapper . get ( 'FirstPartyPublicKey' , None ) is None : raise Exception ( 'target service public key not specified' ) secret = base64 . b64decode ( wrapper . get ( 'Id' ) ) nonce = base64 . b64decode ( wrapper . get ( 'Nonce' ) ) fp_public_key = nacl . public . PublicKey ( base64 . b64decode ( wrapper . get ( 'FirstPartyPublicKey' ) ) ) box = nacl . public . Box ( key . key , fp_public_key ) c = box . decrypt ( secret , nonce ) record = json . loads ( c . decode ( 'utf-8' ) ) fp_key = nacl . public . PublicKey ( base64 . b64decode ( wrapper . get ( 'FirstPartyPublicKey' ) ) ) return ThirdPartyCaveatInfo ( condition = record . get ( 'Condition' ) , first_party_public_key = PublicKey ( fp_key ) , third_party_key_pair = key , root_key = base64 . b64decode ( record . get ( 'RootKey' ) ) , caveat = caveat , id = None , version = VERSION_1 , namespace = legacy_namespace ( ) )
Decode a base64 encoded JSON id .
1,806
def _decode_caveat_v2_v3 ( version , key , caveat ) : if ( len ( caveat ) < 1 + _PUBLIC_KEY_PREFIX_LEN + _KEY_LEN + nacl . public . Box . NONCE_SIZE + 16 ) : raise VerificationError ( 'caveat id too short' ) original_caveat = caveat caveat = caveat [ 1 : ] pk_prefix = caveat [ : _PUBLIC_KEY_PREFIX_LEN ] caveat = caveat [ _PUBLIC_KEY_PREFIX_LEN : ] if key . public_key . serialize ( raw = True ) [ : _PUBLIC_KEY_PREFIX_LEN ] != pk_prefix : raise VerificationError ( 'public key mismatch' ) first_party_pub = caveat [ : _KEY_LEN ] caveat = caveat [ _KEY_LEN : ] nonce = caveat [ : nacl . public . Box . NONCE_SIZE ] caveat = caveat [ nacl . public . Box . NONCE_SIZE : ] fp_public_key = nacl . public . PublicKey ( first_party_pub ) box = nacl . public . Box ( key . key , fp_public_key ) data = box . decrypt ( caveat , nonce ) root_key , condition , ns = _decode_secret_part_v2_v3 ( version , data ) return ThirdPartyCaveatInfo ( condition = condition . decode ( 'utf-8' ) , first_party_public_key = PublicKey ( fp_public_key ) , third_party_key_pair = key , root_key = root_key , caveat = original_caveat , version = version , id = None , namespace = ns )
Decodes a version 2 or version 3 caveat .
1,807
def encode_uvarint ( n , data ) : if n < 0 : raise ValueError ( 'only support positive integer' ) while True : this_byte = n & 127 n >>= 7 if n == 0 : data . append ( this_byte ) break data . append ( this_byte | 128 )
encodes integer into variable - length format into data .
1,808
def decode_uvarint ( data ) : n = 0 shift = 0 length = 0 for b in data : if not isinstance ( b , int ) : b = six . byte2int ( b ) n |= ( b & 0x7f ) << shift length += 1 if ( b & 0x80 ) == 0 : break shift += 7 return n , length
Decode a variable - length integer .
1,809
def make_enum ( enum_mappings ) : if ( inspect . isclass ( enum_mappings ) and issubclass ( enum_mappings , enum . Enum ) ) : enum_class = enum_mappings enum_mappings = enum_class . __members__ def convert_enum ( text ) : if text not in convert_enum . mappings : text = text . lower ( ) return convert_enum . mappings [ text ] convert_enum . pattern = r"|" . join ( enum_mappings . keys ( ) ) convert_enum . mappings = enum_mappings return convert_enum
Creates a type converter for an enumeration or text - to - value mapping .
1,810
def make_variant ( cls , converters , re_opts = None , compiled = False , strict = True ) : assert converters , "REQUIRE: Non-empty list." if len ( converters ) == 1 : return converters [ 0 ] if re_opts is None : re_opts = cls . default_re_opts pattern = r")|(" . join ( [ tc . pattern for tc in converters ] ) pattern = r"(" + pattern + ")" group_count = len ( converters ) for converter in converters : group_count += pattern_group_count ( converter . pattern ) if compiled : convert_variant = cls . __create_convert_variant_compiled ( converters , re_opts , strict ) else : convert_variant = cls . __create_convert_variant ( re_opts , strict ) convert_variant . pattern = pattern convert_variant . converters = tuple ( converters ) convert_variant . regex_group_count = group_count return convert_variant
Creates a type converter for a number of type converter alternatives . The first matching type converter is used .
1,811
def isValidUnit ( self , w ) : bad = set ( [ 'point' , 'a' ] ) if w in bad : return False try : pq . Quantity ( 0.0 , w ) return True except : return w == '/'
Checks if a string represents a valid quantities unit .
1,812
def extractUnits ( self , inp ) : inp = self . _preprocess ( inp ) units = [ ] description = "" for w in inp . split ( ' ' ) : if self . isValidUnit ( w ) or w == '/' : if description : description += " " description += w else : if description : units . append ( description ) description = "" if description : units . append ( description ) return units
Collects all the valid units from an inp string . Works by appending consecutive words from the string and cross - referncing them with a set of valid units .
1,813
def convert ( self , inp ) : inp = self . _preprocess ( inp ) n = NumberService ( ) . longestNumber ( inp ) units = self . extractUnits ( inp ) quantity = pq . Quantity ( float ( n ) , units [ 0 ] ) quantity . units = units [ 1 ] return quantity
Converts a string representation of some quantity of units into a quantities object .
1,814
def allow ( self , ctx , acls ) : for acl in acls : if self . _identity == acl : return True return False
Allow access to any ACL members that was equal to the user name .
1,815
def expand_paths ( paths = None , predicate = None , filters = None , parent_uuid = None ) : if not paths : paths = [ Context ( ) . shell . current_path ] else : paths = [ Context ( ) . shell . current_path / res for res in paths ] result = OrderedDict ( ) for res in parallel_map ( _path_to_resources , paths , kwargs = { 'predicate' : predicate , 'filters' : filters , 'parent_uuid' : parent_uuid } , workers = 50 ) : for r in res : result [ r . path ] = r resources = list ( result . values ( ) ) if not resources : raise NotFound ( ) return resources
Return an unique list of resources or collections from a list of paths . Supports fq_name and wilcards resolution .
1,816
def render ( self , template , filename , context = { } , filters = { } ) : filename = os . path . normpath ( filename ) path , file = os . path . split ( filename ) try : os . makedirs ( path ) except OSError as exception : if exception . errno != errno . EEXIST : raise path , file = os . path . split ( template ) loader = jinja2 . FileSystemLoader ( path ) env = jinja2 . Environment ( loader = loader , trim_blocks = True , lstrip_blocks = True ) env . filters . update ( filters ) template = env . get_template ( file ) text = template . render ( context ) with open ( filename , 'wt' ) as f : f . write ( text )
Renders a Jinja2 template to text .
1,817
def write ( pylist , parallel = True ) : threads = [ VSGWriter ( o ) for o in pylist ] if parallel : for t in threads : t . start ( ) for t in threads : t . join ( ) else : for t in threads : t . run ( )
Utility method to spawn a VSGWriter for each element in a collection .
1,818
def interact ( self , ctx , location , ir_err ) : p = ir_err . interaction_method ( self . kind ( ) , WebBrowserInteractionInfo ) if not location . endswith ( '/' ) : location += '/' visit_url = urljoin ( location , p . visit_url ) wait_token_url = urljoin ( location , p . wait_token_url ) self . _open_web_browser ( visit_url ) return self . _wait_for_token ( ctx , wait_token_url )
Implement Interactor . interact by opening the browser window and waiting for the discharge token
1,819
def _wait_for_token ( self , ctx , wait_token_url ) : resp = requests . get ( wait_token_url ) if resp . status_code != 200 : raise InteractionError ( 'cannot get {}' . format ( wait_token_url ) ) json_resp = resp . json ( ) kind = json_resp . get ( 'kind' ) if kind is None : raise InteractionError ( 'cannot get kind token from {}' . format ( wait_token_url ) ) token_val = json_resp . get ( 'token' ) if token_val is None : token_val = json_resp . get ( 'token64' ) if token_val is None : raise InteractionError ( 'cannot get token from {}' . format ( wait_token_url ) ) token_val = base64 . b64decode ( token_val ) return DischargeToken ( kind = kind , value = token_val )
Returns a token from a the wait token URL
1,820
def from_dict ( cls , info_dict ) : return WebBrowserInteractionInfo ( visit_url = info_dict . get ( 'VisitURL' ) , wait_token_url = info_dict . get ( 'WaitTokenURL' ) )
Create a new instance of WebBrowserInteractionInfo as expected by the Error . interaction_method method .
1,821
def set ( self , name , value , overwrite = False ) : if hasattr ( self , name ) : if overwrite : setattr ( self , name , value ) else : self . _log . warning ( "Configuration parameter %s exists and overwrite not allowed" % name ) raise Exception ( "Configuration parameter %s exists and overwrite not allowed" % name ) else : setattr ( self , name , value ) return getattr ( self , name )
Sets a new value for a given configuration parameter .
1,822
def create_missing_types ( cls , schema , type_dict , type_builder = None ) : if not type_builder : type_builder = cls . type_builder missing = cls . extract_missing_special_type_names ( schema , type_dict ) return type_builder . create_type_variants ( missing , type_dict )
Creates missing types for fields with a CardinalityField part . It is assumed that the primary type converter for cardinality = 1 is registered in the type dictionary .
1,823
def extract_missing_special_type_names ( schema , type_dict ) : for name in FieldParser . extract_types ( schema ) : if CardinalityField . matches_type ( name ) and ( name not in type_dict ) : yield name
Extract the type names for fields with CardinalityField part . Selects only the missing type names that are not in the type dictionary .
1,824
def _check_operations ( ctx , need_ops , arg ) : ctx_ops = ctx . get ( OP_KEY , [ ] ) if len ( ctx_ops ) == 0 : if need_ops : f = arg . split ( ) if len ( f ) == 0 : return 'no operations allowed' return '{} not allowed' . format ( f [ 0 ] ) return None fields = arg . split ( ) for op in ctx_ops : err = _check_op ( op , need_ops , fields ) if err is not None : return err return None
Checks an allow or a deny caveat . The need_ops parameter specifies whether we require all the operations in the caveat to be declared in the context .
1,825
def info ( self ) : return sorted ( self . _checkers . values ( ) , key = lambda x : ( x . ns , x . name ) )
Returns information on all the registered checkers .
1,826
def register_std ( self ) : self . _namespace . register ( STD_NAMESPACE , '' ) for cond in _ALL_CHECKERS : self . register ( cond , STD_NAMESPACE , _ALL_CHECKERS [ cond ] )
Registers all the standard checkers in the given checker .
1,827
def authorize ( self , ctx , identity , ops ) : allowed = [ ] caveats = [ ] for op in ops : ok , fcaveats = self . _f ( ctx , identity , op ) allowed . append ( ok ) if fcaveats is not None : caveats . extend ( fcaveats ) return allowed , caveats
Implements Authorizer . authorize by calling f with the given identity for each operation .
1,828
def authorize ( self , ctx , identity , ops ) : if len ( ops ) == 0 : return [ ] , [ ] allowed = [ False ] * len ( ops ) has_allow = isinstance ( identity , ACLIdentity ) for i , op in enumerate ( ops ) : acl = self . _get_acl ( ctx , op ) if has_allow : allowed [ i ] = identity . allow ( ctx , acl ) else : allowed [ i ] = self . _allow_public and EVERYONE in acl return allowed , [ ]
Implements Authorizer . authorize by calling identity . allow to determine whether the identity is a member of the ACLs associated with the given operations .
1,829
def is_relevant ( self , action , subject ) : return self . matches_action ( action ) and self . matches_subject ( subject )
Matches both the subject and action not necessarily the conditions .
1,830
def is_valid ( hal_id ) : match = REGEX . match ( hal_id ) return ( match is not None ) and ( match . group ( 0 ) == hal_id )
Check that a given HAL id is a valid one .
1,831
def extract_from_text ( text ) : return tools . remove_duplicates ( [ i [ 0 ] for i in REGEX . findall ( text ) if i != '' ] )
Extract HAL ids from a text .
1,832
def _getsolution ( self , config , section , ** kwargs ) : if section not in config : raise ValueError ( 'Section [{}] not found in [{}]' . format ( section , ', ' . join ( config . sections ( ) ) ) ) s = VSGSolution ( ** kwargs ) s . Name = config . get ( section , 'name' , fallback = s . Name ) s . FileName = os . path . normpath ( config . get ( section , 'filename' , fallback = s . FileName ) ) s . VSVersion = config . getfloat ( section , 'visual_studio_version' , fallback = s . VSVersion ) if not s . VSVersion : raise ValueError ( 'Solution section [%s] requires a value for Visual Studio Version (visual_studio_version)' % section ) project_sections = config . getlist ( section , 'projects' , fallback = [ ] ) for project_section in project_sections : project = self . _getproject ( config , project_section , VSVersion = s . VSVersion ) s . Projects . append ( project ) return s
Creates a VSG solution from a configparser instance .
1,833
def _getproject ( self , config , section , ** kwargs ) : if section not in config : raise ValueError ( 'Section [{}] not found in [{}]' . format ( section , ', ' . join ( config . sections ( ) ) ) ) type = config . get ( section , 'type' , fallback = None ) if not type : raise ValueError ( 'Section [{}] mandatory option "{}" not found' . format ( section , "type" ) ) project_class = entrypoint ( 'vsgen.projects' , type ) return project_class . from_section ( config , section , ** kwargs )
Creates a VSG project from a configparser instance .
1,834
def from_args ( cls , ** kwargs ) : if kwargs . get ( 'suite_commands' , None ) == 'generate' : filenames = kwargs . pop ( 'configuration_filenames' , [ ] ) return [ cls . from_file ( f ) for f in filenames ] if kwargs . get ( 'suite_commands' , None ) == 'auto' : type = kwargs . get ( 'suite_type' , None ) return [ cls . from_directory ( '' , type , ** kwargs ) ] return [ ]
Generates one or more VSGSuite instances from command line arguments .
1,835
def write ( self , parallel = True ) : solutions = sorted ( self . _solutions , key = lambda x : x . Name ) with VSGWriteCommand ( 'Writing VSG Solution' , solutions , parallel ) as command : command . execute ( ) projects = set ( sorted ( ( p for s in solutions for p in s . Projects ) , key = lambda x : x . Name ) ) with VSGWriteCommand ( 'Writing VSG Projects' , projects , parallel ) as command : command . execute ( ) registerables = set ( sorted ( ( p for s in solutions for p in s . Projects ) , key = lambda x : x . Name ) ) with VSGRegisterCommand ( 'Registering Project Registerables' , registerables ) as command : command . execute ( )
Writes the configuration to disk .
1,836
def bibitem_as_plaintext ( bibitem ) : try : output = subprocess . check_output ( [ "delatex" , "-s" ] , input = bibitem . encode ( "utf-8" ) ) except FileNotFoundError : script_dir = os . path . dirname ( os . path . abspath ( __file__ ) ) output = subprocess . check_output ( [ "%s/../external/opendetex/delatex" % ( script_dir , ) , "-s" ] , input = bibitem . encode ( "utf-8" ) ) output = output . decode ( "utf-8" ) output = tools . clean_whitespaces ( output ) return output
Return a plaintext representation of a bibitem from the . bbl file .
1,837
def split_type ( cls , type_name ) : if cls . matches_type ( type_name ) : basename = type_name [ : - 1 ] cardinality = cls . from_char_map [ type_name [ - 1 ] ] else : cardinality = Cardinality . one basename = type_name return ( basename , cardinality )
Split type of a type name with CardinalityField suffix into its parts .
1,838
def make_type ( cls , basename , cardinality ) : if cardinality is Cardinality . one : return basename type_name = "%s%s" % ( basename , cls . to_char_map [ cardinality ] ) return type_name
Build new type name according to CardinalityField naming scheme .
1,839
def create_missing_type_variants ( cls , type_names , type_dict ) : missing_type_names = [ name for name in type_names if name not in type_dict ] return cls . create_type_variants ( missing_type_names , type_dict )
Create missing type variants for types with a cardinality field .
1,840
def put_ops ( self , key , time , ops ) : if self . _store . get ( key ) is None : self . _store [ key ] = ops
Put an ops only if not already there otherwise it s a no op .
1,841
def get_ops ( self , key ) : ops = self . _store . get ( key ) if ops is None : raise KeyError ( 'cannot get operations for {}' . format ( key ) ) return ops
Returns ops from the key if found otherwise raises a KeyError .
1,842
def _parse_local_location ( loc ) : if not ( loc . startswith ( 'local ' ) ) : return None v = VERSION_1 fields = loc . split ( ) fields = fields [ 1 : ] if len ( fields ) == 2 : try : v = int ( fields [ 0 ] ) except ValueError : return None fields = fields [ 1 : ] if len ( fields ) == 1 : key = PublicKey . deserialize ( fields [ 0 ] ) return ThirdPartyInfo ( public_key = key , version = v ) return None
Parse a local caveat location as generated by LocalThirdPartyCaveat .
1,843
def add_caveat ( self , cav , key = None , loc = None ) : if cav . location is None : self . _macaroon . add_first_party_caveat ( self . namespace . resolve_caveat ( cav ) . condition ) return if key is None : raise ValueError ( 'no private key to encrypt third party caveat' ) local_info = _parse_local_location ( cav . location ) if local_info is not None : info = local_info if cav . condition is not '' : raise ValueError ( 'cannot specify caveat condition in ' 'local third-party caveat' ) cav = checkers . Caveat ( location = 'local' , condition = 'true' ) else : if loc is None : raise ValueError ( 'no locator when adding third party caveat' ) info = loc . third_party_info ( cav . location ) root_key = os . urandom ( 24 ) if self . _version < info . version : info = ThirdPartyInfo ( version = self . _version , public_key = info . public_key , ) caveat_info = encode_caveat ( cav . condition , root_key , info , key , self . _namespace ) if info . version < VERSION_3 : id = caveat_info else : id = self . _new_caveat_id ( self . _caveat_id_prefix ) self . _caveat_data [ id ] = caveat_info self . _macaroon . add_third_party_caveat ( cav . location , root_key , id )
Add a caveat to the macaroon .
1,844
def add_caveats ( self , cavs , key , loc ) : if cavs is None : return for cav in cavs : self . add_caveat ( cav , key , loc )
Add an array of caveats to the macaroon .
1,845
def to_dict ( self ) : if self . version < VERSION_3 : if len ( self . _caveat_data ) > 0 : raise ValueError ( 'cannot serialize pre-version3 macaroon with ' 'external caveat data' ) return json . loads ( self . _macaroon . serialize ( json_serializer . JsonSerializer ( ) ) ) serialized = { 'm' : json . loads ( self . _macaroon . serialize ( json_serializer . JsonSerializer ( ) ) ) , 'v' : self . _version , } if self . _namespace is not None : serialized [ 'ns' ] = self . _namespace . serialize_text ( ) . decode ( 'utf-8' ) caveat_data = { } for id in self . _caveat_data : key = base64 . b64encode ( id ) . decode ( 'utf-8' ) value = base64 . b64encode ( self . _caveat_data [ id ] ) . decode ( 'utf-8' ) caveat_data [ key ] = value if len ( caveat_data ) > 0 : serialized [ 'cdata' ] = caveat_data return serialized
Return a dict representation of the macaroon data in JSON format .
1,846
def from_dict ( cls , json_dict ) : json_macaroon = json_dict . get ( 'm' ) if json_macaroon is None : m = pymacaroons . Macaroon . deserialize ( json . dumps ( json_dict ) , json_serializer . JsonSerializer ( ) ) macaroon = Macaroon ( root_key = None , id = None , namespace = legacy_namespace ( ) , version = _bakery_version ( m . version ) ) macaroon . _macaroon = m return macaroon version = json_dict . get ( 'v' , None ) if version is None : raise ValueError ( 'no version specified' ) if ( version < VERSION_3 or version > LATEST_VERSION ) : raise ValueError ( 'unknown bakery version {}' . format ( version ) ) m = pymacaroons . Macaroon . deserialize ( json . dumps ( json_macaroon ) , json_serializer . JsonSerializer ( ) ) if m . version != macaroon_version ( version ) : raise ValueError ( 'underlying macaroon has inconsistent version; ' 'got {} want {}' . format ( m . version , macaroon_version ( version ) ) ) namespace = checkers . deserialize_namespace ( json_dict . get ( 'ns' ) ) cdata = json_dict . get ( 'cdata' , { } ) caveat_data = { } for id64 in cdata : id = b64decode ( id64 ) data = b64decode ( cdata [ id64 ] ) caveat_data [ id ] = data macaroon = Macaroon ( root_key = None , id = None , namespace = namespace , version = version ) macaroon . _caveat_data = caveat_data macaroon . _macaroon = m return macaroon
Return a macaroon obtained from the given dictionary as deserialized from JSON .
1,847
def deserialize_json ( cls , serialized_json ) : serialized = json . loads ( serialized_json ) return Macaroon . from_dict ( serialized )
Return a macaroon deserialized from a string
1,848
def _new_caveat_id ( self , base ) : id = bytearray ( ) if len ( base ) > 0 : id . extend ( base ) else : id . append ( VERSION_3 ) i = len ( self . _caveat_data ) caveats = self . _macaroon . caveats while True : temp = id [ : ] encode_uvarint ( i , temp ) found = False for cav in caveats : if ( cav . verification_key_id is not None and cav . caveat_id == temp ) : found = True break if not found : return bytes ( temp ) i += 1
Return a third party caveat id
1,849
def extract_macaroons ( headers_or_request ) : def get_header ( key , default = None ) : try : return headers_or_request . get_header ( key , default ) except AttributeError : return headers_or_request . get ( key , default ) mss = [ ] def add_macaroon ( data ) : try : data = utils . b64decode ( data ) data_as_objs = json . loads ( data . decode ( 'utf-8' ) ) except ValueError : return ms = [ utils . macaroon_from_dict ( x ) for x in data_as_objs ] mss . append ( ms ) cookie_header = get_header ( 'Cookie' ) if cookie_header is not None : cs = SimpleCookie ( ) cs . load ( str ( cookie_header ) ) for c in cs : if c . startswith ( 'macaroon-' ) : add_macaroon ( cs [ c ] . value ) macaroon_header = get_header ( 'Macaroons' ) if macaroon_header is not None : for h in macaroon_header . split ( ',' ) : add_macaroon ( h ) return mss
Returns an array of any macaroons found in the given slice of cookies . If the argument implements a get_header method that will be used instead of the get method to retrieve headers .
1,850
def _wait_for_macaroon ( wait_url ) : headers = { BAKERY_PROTOCOL_HEADER : str ( bakery . LATEST_VERSION ) } resp = requests . get ( url = wait_url , headers = headers ) if resp . status_code != 200 : raise InteractionError ( 'cannot get {}' . format ( wait_url ) ) return bakery . Macaroon . from_dict ( resp . json ( ) . get ( 'Macaroon' ) )
Returns a macaroon from a legacy wait endpoint .
1,851
def handle_error ( self , error , url ) : if error . info is None or error . info . macaroon is None : raise BakeryException ( 'unable to read info in discharge error ' 'response' ) discharges = bakery . discharge_all ( error . info . macaroon , self . acquire_discharge , self . key , ) macaroons = '[' + ',' . join ( map ( utils . macaroon_to_json_string , discharges ) ) + ']' all_macaroons = base64 . urlsafe_b64encode ( utils . to_bytes ( macaroons ) ) full_path = urljoin ( url , error . info . macaroon_path ) if error . info . cookie_name_suffix is not None : name = 'macaroon-' + error . info . cookie_name_suffix else : name = 'macaroon-auth' expires = checkers . macaroons_expiry_time ( checkers . Namespace ( ) , discharges ) self . cookies . set_cookie ( utils . cookie ( name = name , value = all_macaroons . decode ( 'ascii' ) , url = full_path , expires = expires , ) )
Try to resolve the given error which should be a response to the given URL by discharging any macaroon contained in it . That is if error . code is ERR_DISCHARGE_REQUIRED then it will try to discharge err . info . macaroon . If the discharge succeeds the discharged macaroon will be saved to the client s cookie jar otherwise an exception will be raised .
1,852
def acquire_discharge ( self , cav , payload ) : resp = self . _acquire_discharge_with_token ( cav , payload , None ) if resp . status_code == 200 : return bakery . Macaroon . from_dict ( resp . json ( ) . get ( 'Macaroon' ) ) cause = Error . from_dict ( resp . json ( ) ) if cause . code != ERR_INTERACTION_REQUIRED : raise DischargeError ( cause . message ) if cause . info is None : raise DischargeError ( 'interaction-required response with no info: {}' . format ( resp . json ( ) ) ) loc = cav . location if not loc . endswith ( '/' ) : loc = loc + '/' token , m = self . _interact ( loc , cause , payload ) if m is not None : return m resp = self . _acquire_discharge_with_token ( cav , payload , token ) if resp . status_code == 200 : return bakery . Macaroon . from_dict ( resp . json ( ) . get ( 'Macaroon' ) ) else : raise DischargeError ( 'discharge failed with code {}' . format ( resp . status_code ) )
Request a discharge macaroon from the caveat location as an HTTP URL .
1,853
def _interact ( self , location , error_info , payload ) : if ( self . _interaction_methods is None or len ( self . _interaction_methods ) == 0 ) : raise InteractionError ( 'interaction required but not possible' ) if error_info . info . interaction_methods is None and error_info . info . visit_url is not None : return None , self . _legacy_interact ( location , error_info ) for interactor in self . _interaction_methods : found = error_info . info . interaction_methods . get ( interactor . kind ( ) ) if found is None : continue try : token = interactor . interact ( self , location , error_info ) except InteractionMethodNotFound : continue if token is None : raise InteractionError ( 'interaction method returned an empty ' 'token' ) return token , None raise InteractionError ( 'no supported interaction method' )
Gathers a macaroon by directing the user to interact with a web page . The error_info argument holds the interaction - required error response .
1,854
def dict2bibtex ( data ) : bibtex = '@' + data [ 'ENTRYTYPE' ] + '{' + data [ 'ID' ] + ",\n" for field in [ i for i in sorted ( data ) if i not in [ 'ENTRYTYPE' , 'ID' ] ] : bibtex += "\t" + field + "={" + data [ field ] + "},\n" bibtex += "}\n\n" return bibtex
Convert a single BibTeX entry dict to a BibTeX string .
1,855
def write ( filename , data ) : with open ( filename , 'w' ) as fh : fh . write ( bibdatabase2bibtex ( data ) )
Create a new BibTeX file .
1,856
def edit ( filename , identifier , data ) : with open ( filename , 'r' ) as fh : bibtex = bibtexparser . load ( fh ) bibtex . entries_dict [ identifier ] = data . entries [ 0 ] write ( filename , bibtex )
Update an entry in a BibTeX file .
1,857
def delete ( filename , identifier ) : with open ( filename , 'r' ) as fh : bibtex = bibtexparser . load ( fh ) try : del bibtex . entries_dict [ identifier ] except KeyError : pass write ( filename , bibtex )
Delete an entry in a BibTeX file .
1,858
def get ( filename , ignore_fields = None ) : if ignore_fields is None : ignore_fields = [ ] with open ( filename , 'r' ) as fh : bibtex = bibtexparser . load ( fh ) bibtex . entries = [ { k : entry [ k ] for k in entry if k not in ignore_fields } for entry in bibtex . entries ] return bibtex
Get all entries from a BibTeX file .
1,859
def to_filename ( data , mask = DEFAULT_PAPERS_FILENAME_MASK , extra_formatters = None ) : if extra_formatters is None : extra_formatters = { } entry = data . entries [ 0 ] authors = re . split ( ' and ' , entry [ 'author' ] ) formatters = { "journal" : "" , "title" : "" , "year" : "" , "first" : "" , "last" : "" , "authors" : "" , "arxiv_version" : "" } formatters [ "journal" ] = entry . get ( "journal" , "" ) formatters [ "title" ] = entry . get ( "title" , "" ) formatters [ "year" ] = entry . get ( "year" , "" ) formatters [ "first" ] = authors [ 0 ] . split ( ',' ) [ 0 ] . strip ( ) formatters [ "last" ] = authors [ - 1 ] . split ( ',' ) [ 0 ] . strip ( ) formatters [ "authors" ] = ", " . join ( [ i . split ( ',' ) [ 0 ] . strip ( ) for i in authors ] ) for extra_formatter in extra_formatters : formatters [ extra_formatter ] = extra_formatters [ extra_formatter ] ( entry ) arxiv_version = "" if "eprint" in entry : arxiv_version = '-' + entry [ 'eprint' ] [ entry [ 'eprint' ] . rfind ( 'v' ) : ] formatters [ "arxiv_version" ] = arxiv_version return tools . slugify ( mask . format ( ** formatters ) )
Convert a bibtex entry to a formatted filename according to a given mask .
1,860
def bind_name ( self , name ) : if self . name : raise errors . Error ( 'Already bound "{0}" with name "{1}" could not ' 'be rebound' . format ( self , self . name ) ) self . name = name self . storage_name = '' . join ( ( '_' , self . name ) ) return self
Bind field to its name in model class .
1,861
def bind_model_cls ( self , model_cls ) : if self . model_cls : raise errors . Error ( '"{0}" has been already bound to "{1}" and ' 'could not be rebound to "{2}"' . format ( self , self . model_cls , model_cls ) ) self . model_cls = model_cls return self
Bind field to model class .
1,862
def init_model ( self , model , value ) : if value is None and self . default is not None : value = self . default ( ) if callable ( self . default ) else self . default self . set_value ( model , value )
Init model with field .
1,863
def get_value ( self , model , default = None ) : if default is not None : default = self . _converter ( default ) value = getattr ( model , self . storage_name ) return value if value is not None else default
Return field s value .
1,864
def set_value ( self , model , value ) : if value is None and self . required : raise AttributeError ( "This field is required." ) if value is not None : value = self . _converter ( value ) setattr ( model , self . storage_name , value )
Set field s value .
1,865
def _get_model_instance ( model_cls , data ) : if not isinstance ( data , ( model_cls , dict ) ) : raise TypeError ( '{0} is not valid type, instance of ' '{1} or dict required' . format ( data , model_cls ) ) return model_cls ( ** data ) if isinstance ( data , dict ) else data
Convert dict into object of class of passed model .
1,866
def get_builtin_type ( self , model ) : return [ item . get_data ( ) if isinstance ( item , self . related_model_cls ) else item for item in self . get_value ( model ) ]
Return built - in type representation of Collection .
1,867
def gw_get ( object_dict , name = None , plugin = None ) : if plugin is not None : if name is None : object_list = { } for key in object_dict . keys ( ) : if object_dict [ key ] . plugin == plugin : object_list [ key ] = object_dict [ key ] return object_list else : if name in object_dict . keys ( ) : if object_dict [ name ] . plugin == plugin : return object_dict [ name ] else : return None else : return None else : if name is None : return object_dict else : if name in object_dict . keys ( ) : return object_dict [ name ] else : return None
Getter function to retrieve objects from a given object dictionary .
1,868
def http_error_handler ( f ) : def hrefs_to_resources ( hrefs ) : for href in hrefs . replace ( ',' , '' ) . split ( ) : type , uuid = href . split ( '/' ) [ - 2 : ] yield Resource ( type , uuid = uuid ) def hrefs_list_to_resources ( hrefs_list ) : for href in eval ( hrefs_list ) : type , uuid = href . split ( '/' ) [ - 2 : ] yield Resource ( type , uuid = uuid ) @ wraps ( f ) def wrapper ( self , * args , ** kwargs ) : try : return f ( self , * args , ** kwargs ) except HttpError as e : if e . http_status == 404 : self . emit ( 'deleted' , self ) if isinstance ( self , Resource ) : raise ResourceNotFound ( resource = self ) elif isinstance ( self , Collection ) : raise CollectionNotFound ( collection = self ) elif e . http_status == 409 : matches = re . match ( r'^Delete when children still present: (\[[^]]*\])' , e . message ) if matches : raise ChildrenExists ( resources = list ( hrefs_list_to_resources ( matches . group ( 1 ) ) ) ) matches = re . match ( r'^Delete when resource still referred: (\[[^]]*\])' , e . message ) if matches : raise BackRefsExists ( resources = list ( hrefs_list_to_resources ( matches . group ( 1 ) ) ) ) matches = re . match ( r'^Children (.*) still exist$' , e . message ) if matches : raise ChildrenExists ( resources = list ( hrefs_to_resources ( matches . group ( 1 ) ) ) ) matches = re . match ( r'^Back-References from (.*) still exist$' , e . message ) if matches : raise BackRefsExists ( resources = list ( hrefs_to_resources ( matches . group ( 1 ) ) ) ) raise return wrapper
Handle 404 errors returned by the API server
1,869
def href ( self ) : url = self . session . base_url + str ( self . path ) if self . path . is_collection and not self . path . is_root : return url + 's' return url
Return URL of the resource
1,870
def filter ( self , field_name , field_value ) : self . filters . append ( ( field_name , field_value ) ) return self
Add permanent filter on the collection
1,871
def fetch ( self , recursive = 1 , fields = None , detail = None , filters = None , parent_uuid = None , back_refs_uuid = None ) : params = self . _format_fetch_params ( fields = fields , detail = detail , filters = filters , parent_uuid = parent_uuid , back_refs_uuid = back_refs_uuid ) data = self . session . get_json ( self . href , ** params ) if not self . type : self . data = [ Collection ( col [ "link" ] [ "name" ] , fetch = recursive - 1 > 0 , recursive = recursive - 1 , fields = self . _fetch_fields ( fields ) , detail = detail or self . detail , filters = self . _fetch_filters ( filters ) , parent_uuid = self . _fetch_parent_uuid ( parent_uuid ) , back_refs_uuid = self . _fetch_back_refs_uuid ( back_refs_uuid ) ) for col in data [ 'links' ] if col [ "link" ] [ "rel" ] == "collection" ] else : self . data = [ Resource ( self . type , fetch = recursive - 1 > 0 , recursive = recursive - 1 , ** res . get ( self . type , res ) ) for res_type , res_list in data . items ( ) for res in res_list ] return self
Fetch collection from API server
1,872
def check ( self ) : if self . fq_name : self [ 'uuid' ] = self . _check_fq_name ( self . fq_name ) elif self . uuid : self [ 'fq_name' ] = self . _check_uuid ( self . uuid ) return True
Check that the resource exists .
1,873
def fq_name ( self ) : return self . get ( 'fq_name' , self . get ( 'to' , super ( Resource , self ) . fq_name ) )
Return FQDN of the resource
1,874
def parent ( self ) : try : return Resource ( self [ 'parent_type' ] , uuid = self [ 'parent_uuid' ] , check = True ) except KeyError : raise ResourceMissing ( '%s has no parent resource' % self )
Return parent resource
1,875
def parent ( self , resource ) : resource . check ( ) self [ 'parent_type' ] = resource . type self [ 'parent_uuid' ] = resource . uuid
Set parent resource
1,876
def created ( self ) : if 'id_perms' not in self : self . fetch ( ) created = self [ 'id_perms' ] [ 'created' ] return datetime . strptime ( created , '%Y-%m-%dT%H:%M:%S.%f' )
Return creation date
1,877
def save ( self ) : if self . path . is_collection : self . session . post_json ( self . href , { self . type : dict ( self . data ) } , cls = ResourceEncoder ) else : self . session . put_json ( self . href , { self . type : dict ( self . data ) } , cls = ResourceEncoder ) return self . fetch ( exclude_children = True , exclude_back_refs = True )
Save the resource to the API server
1,878
def delete ( self ) : res = self . session . delete ( self . href ) self . emit ( 'deleted' , self ) return res
Delete resource from the API server
1,879
def fetch ( self , recursive = 1 , exclude_children = False , exclude_back_refs = False ) : if not self . path . is_resource and not self . path . is_uuid : self . check ( ) params = { } if exclude_children : params [ 'exclude_children' ] = True if exclude_back_refs : params [ 'exclude_back_refs' ] = True data = self . session . get_json ( self . href , ** params ) [ self . type ] self . from_dict ( data ) return self
Fetch resource from the API server
1,880
def from_dict ( self , data , recursive = 1 ) : data = self . _encode_resource ( data , recursive = recursive ) self . data = data
Populate the resource from a python dict
1,881
def remove_ref ( self , ref ) : self . session . remove_ref ( self , ref ) return self . fetch ( )
Remove reference from self to ref
1,882
def set_ref ( self , ref , attr = None ) : ref_attr = '%s_refs' % ref . type . replace ( '-' , '_' ) ref = { 'to' : ref . fq_name , 'uuid' : ref . uuid , } if ref_attr in self : self [ ref_attr ] . append ( ref ) else : self [ ref_attr ] = [ ref ] return self
Set reference to resource
1,883
def add_ref ( self , ref , attr = None ) : self . session . add_ref ( self , ref , attr ) return self . fetch ( )
Add reference to resource
1,884
def add_back_ref ( self , back_ref , attr = None ) : back_ref . add_ref ( self , attr ) return self . fetch ( )
Add reference from back_ref to self
1,885
def _search ( self , trie , strings , limit = None ) : results = [ trie . has_keys_with_prefix ( s ) for s in strings ] if not any ( results ) : return [ ] for result , s in zip ( results , strings ) : if result is True : return trie . values ( s ) [ : limit ]
Search in cache
1,886
def register ( self , signal , plugin , description = "" ) : if signal in self . signals . keys ( ) : raise Exception ( "Signal %s was already registered by %s" % ( signal , self . signals [ signal ] . plugin . name ) ) self . signals [ signal ] = Signal ( signal , plugin , self . _namespace , description ) self . __log . debug ( "Signal %s registered by %s" % ( signal , plugin . name ) ) return self . signals [ signal ]
Registers a new signal .
1,887
def unregister ( self , signal ) : if signal in self . signals . keys ( ) : del ( self . signals [ signal ] ) self . __log . debug ( "Signal %s unregisterd" % signal ) else : self . __log . debug ( "Signal %s does not exist and could not be unregistered." )
Unregisters an existing signal
1,888
def disconnect ( self , receiver ) : if receiver not in self . receivers . keys ( ) : raise Exception ( "No receiver %s was registered" % receiver ) self . receivers [ receiver ] . disconnect ( ) del ( self . receivers [ receiver ] ) self . __log . debug ( "Receiver %s disconnected" % receiver )
Disconnect a receiver from a signal . Signal and receiver must exist otherwise an exception is thrown .
1,889
def get ( self , signal = None , plugin = None ) : if plugin is not None : if signal is None : signals_list = { } for key in self . signals . keys ( ) : if self . signals [ key ] . plugin == plugin : signals_list [ key ] = self . signals [ key ] return signals_list else : if signal in self . signals . keys ( ) : if self . signals [ signal ] . plugin == plugin : return self . signals [ signal ] else : return None else : return None else : if signal is None : return self . signals else : if signal in self . signals . keys ( ) : return self . signals [ signal ] else : return None
Get one or more signals .
1,890
def get_receiver ( self , receiver = None , plugin = None ) : if plugin is not None : if receiver is None : receiver_list = { } for key in self . receivers . keys ( ) : if self . receivers [ key ] . plugin == plugin : receiver_list [ key ] = self . receivers [ key ] return receiver_list else : if receiver in self . receivers . keys ( ) : if self . receivers [ receiver ] . plugin == plugin : return self . receivers [ receiver ] else : return None else : return None else : if receiver is None : return self . receivers else : if receiver in self . receivers . keys ( ) : return self . receivers [ receiver ] else : return None
Get one or more receivers .
1,891
def start ( inqueue , outqueue = None ) : conf . init ( ) , db . init ( conf . DbPath ) Listener ( inqueue , outqueue ) . run ( )
Starts the listener with incoming and outgoing queues .
1,892
def main ( ) : conf . init ( ) , db . init ( conf . DbPath ) inqueue = LineQueue ( sys . stdin ) . queue outqueue = type ( "" , ( ) , { "put" : lambda self , x : print ( "\r%s" % x , end = " " ) } ) ( ) if "--quiet" in sys . argv : outqueue = None if conf . MouseEnabled : inqueue . put ( "mouse_start" ) if conf . KeyboardEnabled : inqueue . put ( "keyboard_start" ) start ( inqueue , outqueue )
Entry point for stand - alone execution .
1,893
def _handle_windows ( self , event ) : vkey = self . _keyname ( event . GetKey ( ) ) if event . Message in self . KEYS_UP + self . KEYS_DOWN : if vkey in self . MODIFIERNAMES : self . _realmodifiers [ vkey ] = event . Message in self . KEYS_DOWN self . _modifiers [ self . MODIFIERNAMES [ vkey ] ] = self . _realmodifiers [ vkey ] if event . Message not in self . KEYS_DOWN : return True is_altgr = False if ( vkey , event . IsExtended ( ) ) in self . NUMPAD_SPECIALS : key = vkey = "Numpad-" + vkey elif not event . Ascii or vkey . startswith ( "Numpad" ) : key = vkey else : is_altgr = event . Ascii in self . ALT_GRS key = self . _keyname ( unichr ( event . Ascii ) ) if DEBUG : print ( "Adding key %s (real %s)" % ( key . encode ( "utf-8" ) , vkey . encode ( "utf-8" ) ) ) self . _output ( type = "keys" , key = key , realkey = vkey ) if vkey not in self . MODIFIERNAMES and not is_altgr : modifier = "-" . join ( k for k in [ "Ctrl" , "Alt" , "Shift" , "Win" ] if self . _modifiers [ k ] ) if modifier and modifier != "Shift" : if self . _modifiers [ "Ctrl" ] and event . Ascii : key = self . _keyname ( unichr ( event . KeyID ) ) realmodifier = "-" . join ( k for k , v in self . _realmodifiers . items ( ) if v ) realkey = "%s-%s" % ( realmodifier , key ) key = "%s-%s" % ( modifier , key ) if DEBUG : print ( "Adding combo %s (real %s)" % ( key . encode ( "utf-8" ) , realkey . encode ( "utf-8" ) ) ) self . _output ( type = "combos" , key = key , realkey = realkey ) if DEBUG : print ( "CHARACTER: %r" % key ) print ( 'GetKey: {0}' . format ( event . GetKey ( ) ) ) print ( 'IsAlt: {0}' . format ( event . IsAlt ( ) ) ) print ( 'IsExtended: {0}' . format ( event . IsExtended ( ) ) ) print ( 'IsInjected: {0}' . format ( event . IsInjected ( ) ) ) print ( 'IsTransition: {0}' . format ( event . IsTransition ( ) ) ) print ( 'ASCII: {0}' . format ( event . Ascii ) ) print ( 'KeyID: {0}' . format ( event . KeyID ) ) print ( 'ScanCode: {0}' . format ( event . ScanCode ) ) print ( 'Message: {0}' . format ( event . Message ) ) print ( ) return True
Windows key event handler .
1,894
def _handle_mac ( self , keycode ) : key = self . _keyname ( unichr ( keycode ) ) self . _output ( type = "keys" , key = key , realkey = key )
Mac key event handler
1,895
def _handle_linux ( self , keycode , character , press ) : if character is None : return key = self . _keyname ( character , keycode ) if key in self . MODIFIERNAMES : self . _modifiers [ self . MODIFIERNAMES [ key ] ] = press self . _realmodifiers [ key ] = press if press : self . _output ( type = "keys" , key = key , realkey = key ) if press and key not in self . MODIFIERNAMES : modifier = "-" . join ( k for k in [ "Ctrl" , "Alt" , "Shift" , "Win" ] if self . _modifiers [ k ] ) if modifier and modifier != "Shift" : realmodifier = "-" . join ( k for k , v in self . _realmodifiers . items ( ) if v ) realkey = "%s-%s" % ( realmodifier , key ) key = "%s-%s" % ( modifier , key ) if DEBUG : print ( "Adding combo %s (real %s)" % ( key . encode ( "utf-8" ) , realkey . encode ( "utf-8" ) ) ) self . _output ( type = "combos" , key = key , realkey = realkey )
Linux key event handler .
1,896
def _store_documentation ( self , path , html , overwrite , quiet ) : echo ( "Storing groundwork application documents\n" ) echo ( "Application: %s" % self . app . name ) echo ( "Number of documents: %s\n" % len ( self . app . documents . get ( ) ) ) if not os . path . isabs ( path ) : path = os . path . abspath ( path ) if not os . path . isdir ( path ) : echo ( "Path %s is not a directory!" % path ) sys . exit ( 1 ) if not os . path . exists ( path ) : echo ( "Path %s does not exist" % path ) sys . exit ( 1 ) for dirpath , dirnames , files in os . walk ( path ) : if files : echo ( "Path %s is not empty!\n" % path ) if not overwrite : sys . exit ( 1 ) documents = [ ] for key , document in self . app . documents . get ( ) . items ( ) : file_extension = ".html" if html else ".rst" file_name_parts = key . lower ( ) . split ( ) file_name = "" . join ( file_name_parts ) file_name += file_extension documents . append ( ( file_name , document ) ) echo ( "Going to write to following files:" ) for document in documents : echo ( " %s" % document [ 0 ] ) echo ( "\nTarget directory: %s" % path ) answer = None while answer not in [ "N" , "Y" ] and not quiet : answer = prompt ( "Shall we go on? [Y]es, [N]o: " ) . upper ( ) if answer == "N" : sys . exit ( 0 ) for document in documents : try : with open ( os . path . join ( path , document [ 0 ] ) , "w" ) as doc_file : doc_rendered = Environment ( ) . from_string ( document [ 1 ] . content ) . render ( app = self . app , plugin = document [ 1 ] . plugin ) if html : output = publish_parts ( doc_rendered , writer_name = "html" ) [ 'whole' ] else : output = doc_rendered doc_file . write ( output ) except Exception as e : echo ( "%s error occurred: %s" % ( document [ 0 ] , e ) ) else : echo ( "%s stored." % document [ 0 ] )
Stores all documents on the file system .
1,897
def _show_documentation ( self ) : documents = [ ] for key , document in self . app . documents . get ( ) . items ( ) : if key != "main" : documents . append ( ( key , document ) ) documents = sorted ( documents , key = lambda x : x [ 0 ] ) main = self . app . documents . get ( "main" ) if main is not None : documents . insert ( 0 , ( main . name , main ) ) user_answer = "" index = 0 while user_answer != "X" : if index < 0 : index = 0 if index > len ( documents ) - 1 : index = len ( documents ) - 1 document = documents [ index ] [ 1 ] os . system ( 'cls' if os . name == 'nt' else 'clear' ) echo ( Environment ( ) . from_string ( document . content ) . render ( app = self . app , plugin = document . plugin ) ) source = "This document is registered by '%s' under the name '%s'" % ( document . plugin . name , document . name ) echo ( "-" * len ( source ) ) echo ( source ) echo ( "-" * len ( source ) ) commands = "Actions: " if index < len ( documents ) - 1 : commands += "[N]ext, " if index > 0 : commands += "[P]revious, " commands += "E[x]it" echo ( commands ) if index < len ( documents ) - 1 : default = "N" elif index > 0 : default = "P" else : default = "X" user_answer = prompt ( "Select your action" , default = default ) . upper ( ) if user_answer == "N" : index += 1 elif user_answer == "P" : index -= 1
Shows all documents of the current groundwork app in the console .
1,898
def execute_cleanup_tasks ( ctx , cleanup_tasks , dry_run = False ) : executor = Executor ( cleanup_tasks , ctx . config ) for cleanup_task in cleanup_tasks . tasks : print ( "CLEANUP TASK: %s" % cleanup_task ) executor . execute ( ( cleanup_task , dict ( dry_run = dry_run ) ) )
Execute several cleanup tasks as part of the cleanup .
1,899
def entrypoints ( section ) : return { ep . name : ep . load ( ) for ep in pkg_resources . iter_entry_points ( section ) }
Returns the Entry Point for a given Entry Point section .