idx int64 0 63k | question stringlengths 53 5.28k | target stringlengths 5 805 |
|---|---|---|
54,900 | def _erf ( x ) : T = [ 9.60497373987051638749E0 , 9.00260197203842689217E1 , 2.23200534594684319226E3 , 7.00332514112805075473E3 , 5.55923013010394962768E4 , ] U = [ 3.35617141647503099647E1 , 5.21357949780152679795E2 , 4.59432382970980127987E3 , 2.26290000613890934246E4 , 4.92673942608635921086E4 , ] if x == 0 : return 0 if x >= MAXVAL : return 1 if x <= - MAXVAL : return - 1 if abs ( x ) > 1 : return 1 - erfc ( x ) z = x * x return x * _polevl ( z , T , 4 ) / _p1evl ( z , U , 5 ) | Port of cephes ndtr . c erf function . |
54,901 | def _erfc ( a ) : P = [ 2.46196981473530512524E-10 , 5.64189564831068821977E-1 , 7.46321056442269912687E0 , 4.86371970985681366614E1 , 1.96520832956077098242E2 , 5.26445194995477358631E2 , 9.34528527171957607540E2 , 1.02755188689515710272E3 , 5.57535335369399327526E2 , ] Q = [ 1.32281951154744992508E1 , 8.67072140885989742329E1 , 3.54937778887819891062E2 , 9.75708501743205489753E2 , 1.82390916687909736289E3 , 2.24633760818710981792E3 , 1.65666309194161350182E3 , 5.57535340817727675546E2 , ] R = [ 5.64189583547755073984E-1 , 1.27536670759978104416E0 , 5.01905042251180477414E0 , 6.16021097993053585195E0 , 7.40974269950448939160E0 , 2.97886665372100240670E0 , ] S = [ 2.26052863220117276590E0 , 9.39603524938001434673E0 , 1.20489539808096656605E1 , 1.70814450747565897222E1 , 9.60896809063285878198E0 , 3.36907645100081516050E0 , ] if a == 0 : return 1 if a >= MAXVAL : return 0 if a <= - MAXVAL : return 2 x = a if a < 0 : x = - a if x < 1 : return 1 - erf ( a ) z = - a * a z = math . exp ( z ) if x < 8 : p = _polevl ( x , P , 8 ) q = _p1evl ( x , Q , 8 ) else : p = _polevl ( x , R , 5 ) q = _p1evl ( x , S , 6 ) y = ( z * p ) / q if a < 0 : y = 2 - y return y | Port of cephes ndtr . c erfc function . |
54,902 | def erfinv ( z ) : if abs ( z ) > 1 : raise ValueError ( "`z` must be between -1 and 1 inclusive" ) if z == 0 : return 0 if z == 1 : return inf if z == - 1 : return - inf return _ndtri ( ( z + 1 ) / 2.0 ) / math . sqrt ( 2 ) | Calculate the inverse error function at point z . |
54,903 | def get_cmap ( name , lut = None ) : if name in rcParams [ 'colors.cmaps' ] : colors = rcParams [ 'colors.cmaps' ] [ name ] lut = lut or len ( colors ) return FixedColorMap . from_list ( name = name , colors = colors , N = lut ) elif name in _cmapnames : colors = _cmapnames [ name ] lut = lut or len ( colors ) return FixedColorMap . from_list ( name = name , colors = colors , N = lut ) else : cmap = mpl_get_cmap ( name ) if lut is not None and cmap . N != lut : cmap = FixedColorMap . from_list ( name = cmap . name , colors = cmap ( np . linspace ( 0 , 1 , lut ) ) , N = lut ) return cmap | Returns the specified colormap . |
54,904 | def _get_cmaps ( names ) : import matplotlib . pyplot as plt available_cmaps = list ( chain ( plt . cm . cmap_d , _cmapnames , rcParams [ 'colors.cmaps' ] ) ) names = list ( names ) wrongs = [ ] for arg in ( arg for arg in names if ( not isinstance ( arg , Colormap ) and arg not in available_cmaps ) ) : if isinstance ( arg , str ) : similarkeys = get_close_matches ( arg , available_cmaps ) if similarkeys != [ ] : warn ( "Colormap %s not found in standard colormaps.\n" "Similar colormaps are %s." % ( arg , ', ' . join ( similarkeys ) ) ) else : warn ( "Colormap %s not found in standard colormaps.\n" "Run function without arguments to see all colormaps" % arg ) names . remove ( arg ) wrongs . append ( arg ) if not names and not wrongs : names = sorted ( m for m in available_cmaps if not m . endswith ( "_r" ) ) return names | Filter the given names for colormaps |
54,905 | def show_colormaps ( names = [ ] , N = 10 , show = True , use_qt = None ) : names = safe_list ( names ) if use_qt or ( use_qt is None and psyplot . with_gui ) : from psy_simple . widgets . colors import ColormapDialog from psyplot_gui . main import mainwindow return ColormapDialog . show_colormap ( names , N , show , parent = mainwindow ) import matplotlib . pyplot as plt a = np . vstack ( ( np . linspace ( 0 , 1 , 256 ) . reshape ( 1 , - 1 ) ) ) cmaps = _get_cmaps ( names ) nargs = len ( cmaps ) + 1 fig = plt . figure ( figsize = ( 5 , 10 ) ) fig . subplots_adjust ( top = 0.99 , bottom = 0.01 , left = 0.2 , right = 0.99 ) for i , m in enumerate ( cmaps ) : ax = plt . subplot ( nargs , 1 , i + 1 ) plt . axis ( "off" ) plt . pcolormesh ( a , cmap = get_cmap ( m , N + 1 ) ) pos = list ( ax . get_position ( ) . bounds ) fig . text ( pos [ 0 ] - 0.01 , pos [ 1 ] , m , fontsize = 10 , horizontalalignment = 'right' ) fig . canvas . set_window_title ( "Figure %i: Predefined colormaps" % fig . number ) if show : plt . show ( block = False ) return fig | Function to show standard colormaps from pyplot |
54,906 | def _create_stdout_logger ( logging_level ) : out_hdlr = logging . StreamHandler ( sys . stdout ) out_hdlr . setFormatter ( logging . Formatter ( '[%(asctime)s] %(message)s' , "%H:%M:%S" ) ) out_hdlr . setLevel ( logging_level ) for name in LOGGING_NAMES : log = logging . getLogger ( name ) log . addHandler ( out_hdlr ) log . setLevel ( logging_level ) | create a logger to stdout . This creates logger for a series of module we would like to log information on . |
54,907 | def main ( ) : docraptor = DocRaptor ( ) print ( "Create PDF" ) resp = docraptor . create ( { "document_content" : "<h1>python-docraptor</h1><p>Async Test</p>" , "test" : True , "async" : True , } ) print ( "Status ID: {status_id}" . format ( status_id = resp [ "status_id" ] ) ) status_id = resp [ "status_id" ] resp = docraptor . status ( status_id ) print ( " {status}" . format ( status = resp [ "status" ] ) ) while resp [ "status" ] != "completed" : time . sleep ( 3 ) resp = docraptor . status ( status_id ) print ( " {status}" . format ( status = resp [ "status" ] ) ) print ( "Download to test_async.pdf" ) with open ( "test_async.pdf" , "wb" ) as pdf_file : pdf_file . write ( docraptor . download ( resp [ "download_key" ] ) . content ) print ( "[DONE]" ) | Generate a PDF using the async method . |
54,908 | def get_alternate_types_resolving_forwardref_union_and_typevar ( typ , _memo : List [ Any ] = None ) -> Tuple [ Any , ... ] : _memo = _memo or [ ] if typ in _memo : return tuple ( ) _memo . append ( typ ) if is_typevar ( typ ) : if hasattr ( typ , '__bound__' ) and typ . __bound__ is not None : if hasattr ( typ , '__contravariant__' ) and typ . __contravariant__ : raise Exception ( 'Contravariant TypeVars are not supported' ) else : return get_alternate_types_resolving_forwardref_union_and_typevar ( typ . __bound__ , _memo = _memo ) elif hasattr ( typ , '__constraints__' ) and typ . __constraints__ is not None : if hasattr ( typ , '__contravariant__' ) and typ . __contravariant__ : raise Exception ( 'Contravariant TypeVars are not supported' ) else : return tuple ( typpp for c in typ . __constraints__ for typpp in get_alternate_types_resolving_forwardref_union_and_typevar ( c , _memo = _memo ) ) else : return object , elif is_union_type ( typ ) : return tuple ( t for typpp in get_args ( typ , evaluate = True ) for t in get_alternate_types_resolving_forwardref_union_and_typevar ( typpp , _memo = _memo ) ) elif is_forward_ref ( typ ) : return get_alternate_types_resolving_forwardref_union_and_typevar ( resolve_forward_ref ( typ ) , _memo = _memo ) else : return typ , | Returns a tuple of all alternate types allowed by the typ type annotation . |
54,909 | def robust_isinstance ( inst , typ ) -> bool : if typ is Any : return True if is_typevar ( typ ) : if hasattr ( typ , '__constraints__' ) and typ . __constraints__ is not None : typs = get_args ( typ , evaluate = True ) return any ( robust_isinstance ( inst , t ) for t in typs ) elif hasattr ( typ , '__bound__' ) and typ . __bound__ is not None : return robust_isinstance ( inst , typ . __bound__ ) else : return True else : if is_union_type ( typ ) : typs = get_args ( typ , evaluate = True ) return any ( robust_isinstance ( inst , t ) for t in typs ) else : return isinstance ( inst , get_base_generic_type ( typ ) ) | Similar to isinstance but if typ is a parametrized generic Type it is first transformed into its base generic class so that the instance check works . It is also robust to Union and Any . |
54,910 | def eval_forward_ref ( typ : _ForwardRef ) : for frame in stack ( ) : m = getmodule ( frame [ 0 ] ) m_name = m . __name__ if m is not None else '<unknown>' if m_name . startswith ( 'parsyfiles.tests' ) or not m_name . startswith ( 'parsyfiles' ) : try : return typ . _eval_type ( frame [ 0 ] . f_globals , frame [ 0 ] . f_locals ) except NameError : pass raise InvalidForwardRefError ( typ ) | Climbs the current stack until the given Forward reference has been resolved or raises an InvalidForwardRefError |
54,911 | def is_valid_pep484_type_hint ( typ_hint , allow_forward_refs : bool = False ) : try : if isinstance ( typ_hint , type ) : return True except : pass try : if allow_forward_refs and is_forward_ref ( typ_hint ) : return True except : pass try : return is_union_type ( typ_hint ) or is_typevar ( typ_hint ) except : return False | Returns True if the provided type is a valid PEP484 type hint False otherwise . |
54,912 | def is_pep484_nonable ( typ ) : if typ is type ( None ) : return True elif is_typevar ( typ ) or is_union_type ( typ ) : return any ( is_pep484_nonable ( tt ) for tt in get_alternate_types_resolving_forwardref_union_and_typevar ( typ ) ) else : return False | Checks if a given type is nonable meaning that it explicitly or implicitly declares a Union with NoneType . Nested TypeVars and Unions are supported . |
54,913 | def create_for_collection_items ( item_type , hint ) : return TypeInformationRequiredError ( "Cannot parse object of type {t} as a collection: this type has no valid " "PEP484 type hint about its contents: found {h}. Please use a standard " "PEP484 declaration such as Dict[str, Foo] or List[Foo]" "" . format ( t = str ( item_type ) , h = hint ) ) | Helper method for collection items |
54,914 | def create_for_object_attributes ( item_type , faulty_attribute_name : str , hint ) : return TypeInformationRequiredError ( "Cannot create instances of type {t}: constructor attribute '{a}' has an" " invalid PEP484 type hint: {h}." . format ( t = str ( item_type ) , a = faulty_attribute_name , h = hint ) ) | Helper method for constructor attributes |
54,915 | def exception_class ( self , exception ) : cls = type ( exception ) if cls . __module__ == 'exceptions' : return cls . __name__ return "%s.%s" % ( cls . __module__ , cls . __name__ ) | Return a name representing the class of an exception . |
54,916 | def request_info ( self , request ) : view , args , kwargs = resolve ( request . path ) for i , arg in enumerate ( args ) : kwargs [ i ] = arg parameters = { } parameters . update ( kwargs ) parameters . update ( request . POST . items ( ) ) environ = request . META return { "session" : dict ( request . session ) , 'cookies' : dict ( request . COOKIES ) , 'headers' : dict ( get_headers ( environ ) ) , 'env' : dict ( get_environ ( environ ) ) , "remote_ip" : request . META [ "REMOTE_ADDR" ] , "parameters" : parameters , "action" : view . __name__ , "application" : view . __module__ , "method" : request . method , "url" : request . build_absolute_uri ( ) } | Return a dictionary of information for a given request . |
54,917 | def _save ( self , hdf5 , model , positives , negatives ) : hdf5 . set ( "PositiveIndices" , sorted ( list ( positives ) ) ) hdf5 . set ( "NegativeIndices" , sorted ( list ( negatives ) ) ) hdf5 . create_group ( "Model" ) hdf5 . cd ( "Model" ) model . save ( hdf5 ) del hdf5 | Saves the given intermediate state of the bootstrapping to file . |
54,918 | def _load ( self , hdf5 ) : positives = set ( hdf5 . get ( "PositiveIndices" ) ) negatives = set ( hdf5 . get ( "NegativeIndices" ) ) hdf5 . cd ( "Model" ) model = bob . learn . boosting . BoostedMachine ( hdf5 ) return model , positives , negatives | Loads the intermediate state of the bootstrapping from file . |
54,919 | def undelay ( self ) : i = 0 while i < len ( self ) : op = self [ i ] i += 1 if hasattr ( op , 'arg1' ) : if isinstance ( op . arg1 , DelayedArg ) : op . arg1 = op . arg1 . resolve ( ) if isinstance ( op . arg1 , CodeBlock ) : op . arg1 . undelay ( ) | resolves all delayed arguments |
54,920 | def get_directorship_heads ( self , val ) : __ldap_group_ou__ = "cn=groups,cn=accounts,dc=csh,dc=rit,dc=edu" res = self . __con__ . search_s ( __ldap_group_ou__ , ldap . SCOPE_SUBTREE , "(cn=eboard-%s)" % val , [ 'member' ] ) ret = [ ] for member in res [ 0 ] [ 1 ] [ 'member' ] : try : ret . append ( member . decode ( 'utf-8' ) ) except UnicodeDecodeError : ret . append ( member ) except KeyError : continue return [ CSHMember ( self , dn . split ( '=' ) [ 1 ] . split ( ',' ) [ 0 ] , True ) for dn in ret ] | Get the head of a directorship |
54,921 | def enqueue_mod ( self , dn , mod ) : if dn not in self . __pending_mod_dn__ : self . __pending_mod_dn__ . append ( dn ) self . __mod_queue__ [ dn ] = [ ] self . __mod_queue__ [ dn ] . append ( mod ) | Enqueue a LDAP modification . |
54,922 | def flush_mod ( self ) : for dn in self . __pending_mod_dn__ : try : if self . __ro__ : for mod in self . __mod_queue__ [ dn ] : if mod [ 0 ] == ldap . MOD_DELETE : mod_str = "DELETE" elif mod [ 0 ] == ldap . MOD_ADD : mod_str = "ADD" else : mod_str = "REPLACE" print ( "{} VALUE {} = {} FOR {}" . format ( mod_str , mod [ 1 ] , mod [ 2 ] , dn ) ) else : self . __con__ . modify_s ( dn , self . __mod_queue__ [ dn ] ) except ldap . TYPE_OR_VALUE_EXISTS : print ( "Error! Conflicting Batch Modification: %s" % str ( self . __mod_queue__ [ dn ] ) ) continue except ldap . NO_SUCH_ATTRIBUTE : print ( "Error! Conflicting Batch Modification: %s" % str ( self . __mod_queue__ [ dn ] ) ) continue self . __mod_queue__ [ dn ] = None self . __pending_mod_dn__ = [ ] | Flush all pending LDAP modifications . |
54,923 | def detect_encoding ( value ) : if six . PY2 : null_pattern = tuple ( bool ( ord ( char ) ) for char in value [ : 4 ] ) else : null_pattern = tuple ( bool ( char ) for char in value [ : 4 ] ) encodings = { ( 0 , 0 , 0 , 1 ) : 'utf-32-be' , ( 0 , 1 , 0 , 1 ) : 'utf-16-be' , ( 1 , 0 , 0 , 0 ) : 'utf-32-le' , ( 1 , 0 , 1 , 0 ) : 'utf-16-le' , } return encodings . get ( null_pattern , 'utf-8' ) | Returns the character encoding for a JSON string . |
54,924 | def _merge_params ( url , params ) : if isinstance ( params , dict ) : params = list ( params . items ( ) ) scheme , netloc , path , query , fragment = urllib . parse . urlsplit ( url ) url_params = urllib . parse . parse_qsl ( query , keep_blank_values = True ) url_params . extend ( params ) query = _encode_data ( url_params ) return urllib . parse . urlunsplit ( ( scheme , netloc , path , query , fragment ) ) | Merge and encode query parameters with an URL . |
54,925 | def json ( self , ** kwargs ) : encoding = detect_encoding ( self . content [ : 4 ] ) value = self . content . decode ( encoding ) return simplejson . loads ( value , ** kwargs ) | Decodes response as JSON . |
54,926 | def raise_for_status ( self ) : if 400 <= self . status_code < 600 : message = 'Error %s for %s' % ( self . status_code , self . url ) raise HTTPError ( message ) | Raises HTTPError if the request got an error . |
54,927 | def metric ( cls , name , count , elapsed ) : if name is None : warnings . warn ( "Ignoring unnamed metric" , stacklevel = 3 ) return with cls . lock : if cls . dump_atexit and not cls . instances : atexit . register ( cls . dump ) try : self = cls . instances [ name ] except KeyError : self = cls . instances [ name ] = cls ( name ) self . temp . write ( self . struct . pack ( count , elapsed ) ) | A metric function that buffers through numpy |
54,928 | def _dump ( self ) : try : self . temp . seek ( 0 ) arr = np . fromfile ( self . temp , self . dtype ) self . count_arr = arr [ 'count' ] self . elapsed_arr = arr [ 'elapsed' ] if self . calc_stats : self . count_mean = np . mean ( self . count_arr ) self . count_std = np . std ( self . count_arr ) self . elapsed_mean = np . mean ( self . elapsed_arr ) self . elapsed_std = np . std ( self . elapsed_arr ) self . _output ( ) finally : self . temp . close ( ) self . _cleanup ( ) | dump data for an individual metric . For internal use only . |
54,929 | def list ( self , host_rec = None , service_rec = None , hostfilter = None ) : return self . send . vuln_list ( host_rec , service_rec , hostfilter ) | Returns a list of vulnerabilities based on t_hosts . id or t_services . id . If neither are set then statistical results are added |
54,930 | def ip_info ( self , vuln_name = None , vuln_id = None , ip_list_only = True , hostfilter = None ) : return self . send . vuln_ip_info ( vuln_name , vuln_id , ip_list_only , hostfilter ) | List of all IP Addresses with a vulnerability |
54,931 | def service_list ( self , vuln_name = None , vuln_id = None , hostfilter = None ) : return self . send . vuln_service_list ( vuln_name , vuln_id , hostfilter ) | Returns a dictionary of vulns with services and IP Addresses |
54,932 | def import_name ( mod_name ) : try : mod_obj_old = sys . modules [ mod_name ] except KeyError : mod_obj_old = None if mod_obj_old is not None : return mod_obj_old __import__ ( mod_name ) mod_obj = sys . modules [ mod_name ] return mod_obj | Import a module by module name . |
54,933 | def import_path ( mod_path , mod_name ) : mod_code = open ( mod_path ) . read ( ) mod_obj = import_code ( mod_code = mod_code , mod_name = mod_name , ) if not hasattr ( mod_obj , '__file__' ) : mod_obj . __file__ = mod_path return mod_obj | Import a module by module file path . |
54,934 | def import_obj ( uri , mod_name = None , mod_attr_sep = '::' , attr_chain_sep = '.' , retn_mod = False , ) : if mod_attr_sep is None : mod_attr_sep = '::' uri_parts = split_uri ( uri = uri , mod_attr_sep = mod_attr_sep ) protocol , mod_uri , attr_chain = uri_parts if protocol == 'py' : mod_obj = import_name ( mod_uri ) else : if not mod_name : msg = ( 'Argument `mod_name` must be given when loading by file path.' ) raise ValueError ( msg ) mod_obj = import_path ( mod_uri , mod_name = mod_name ) if not attr_chain : if retn_mod : return mod_obj , None else : return mod_obj if attr_chain_sep is None : attr_chain_sep = '.' attr_obj = get_attr_chain ( obj = mod_obj , attr_chain = attr_chain , sep = attr_chain_sep , ) if retn_mod : return mod_obj , attr_obj else : return attr_obj | Load an object from a module . |
54,935 | def add_to_sys_modules ( mod_name , mod_obj = None ) : mod_snames = mod_name . split ( '.' ) parent_mod_name = '' parent_mod_obj = None for mod_sname in mod_snames : if parent_mod_name == '' : current_mod_name = mod_sname else : current_mod_name = parent_mod_name + '.' + mod_sname if current_mod_name == mod_name : current_mod_obj = mod_obj else : current_mod_obj = sys . modules . get ( current_mod_name , None ) if current_mod_obj is None : current_mod_obj = imp . new_module ( current_mod_name ) sys . modules [ current_mod_name ] = current_mod_obj if parent_mod_obj is not None : setattr ( parent_mod_obj , mod_sname , current_mod_obj ) parent_mod_name = current_mod_name parent_mod_obj = current_mod_obj | Add a module object to sys . modules . |
54,936 | def get_host ( environ ) : scheme = environ . get ( 'wsgi.url_scheme' ) if 'HTTP_X_FORWARDED_HOST' in environ : result = environ [ 'HTTP_X_FORWARDED_HOST' ] elif 'HTTP_HOST' in environ : result = environ [ 'HTTP_HOST' ] else : result = environ [ 'SERVER_NAME' ] if ( scheme , str ( environ [ 'SERVER_PORT' ] ) ) not in ( ( 'https' , '443' ) , ( 'http' , '80' ) ) : result += ':' + environ [ 'SERVER_PORT' ] if result . endswith ( ':80' ) and scheme == 'http' : result = result [ : - 3 ] elif result . endswith ( ':443' ) and scheme == 'https' : result = result [ : - 4 ] return result | Return the real host for the given WSGI environment . This takes care of the X - Forwarded - Host header . |
54,937 | def _raw ( cls , vertices , edges , out_edges , in_edges , head , tail ) : self = object . __new__ ( cls ) self . _out_edges = out_edges self . _in_edges = in_edges self . _head = head self . _tail = tail self . _vertices = vertices self . _edges = edges return self | Private constructor for direct construction of an ObjectGraph from its attributes . |
54,938 | def annotated ( self ) : edge_annotations = { } for edge in self . edges : if edge not in edge_annotations : referrer = self . _tail [ edge ] known_refs = annotated_references ( referrer ) for out_edge in self . _out_edges [ referrer ] : referent = self . _head [ out_edge ] if known_refs [ referent ] : annotation = known_refs [ referent ] . pop ( ) else : annotation = None edge_annotations [ out_edge ] = annotation annotated_vertices = [ AnnotatedVertex ( id = id ( vertex ) , annotation = object_annotation ( vertex ) , ) for vertex in self . vertices ] annotated_edges = [ AnnotatedEdge ( id = edge , annotation = edge_annotations [ edge ] , head = id ( self . _head [ edge ] ) , tail = id ( self . _tail [ edge ] ) , ) for edge in self . edges ] return AnnotatedGraph ( vertices = annotated_vertices , edges = annotated_edges , ) | Annotate this graph returning an AnnotatedGraph object with the same structure . |
54,939 | def owned_objects ( self ) : return ( [ self , self . __dict__ , self . _head , self . _tail , self . _out_edges , self . _out_edges . _keys , self . _out_edges . _values , self . _in_edges , self . _in_edges . _keys , self . _in_edges . _values , self . _vertices , self . _vertices . _elements , self . _edges , ] + list ( six . itervalues ( self . _out_edges ) ) + list ( six . itervalues ( self . _in_edges ) ) ) | List of gc - tracked objects owned by this ObjectGraph instance . |
54,940 | def find_by_typename ( self , typename ) : return self . find_by ( lambda obj : type ( obj ) . __name__ == typename ) | List of all objects whose type has the given name . |
54,941 | def get_unset_inputs ( self ) : return set ( [ k for k , v in self . _inputs . items ( ) if v . is_empty ( False ) ] ) | Return a set of unset inputs |
54,942 | def prompt_unset_inputs ( self , force = False ) : for k , v in self . _inputs . items ( ) : if force or v . is_empty ( False ) : self . get_input ( k , force = force ) | Prompt for unset input values |
54,943 | def values ( self , with_defaults = True ) : return dict ( ( ( k , str ( v ) ) for k , v in self . _inputs . items ( ) if not v . is_empty ( with_defaults ) ) ) | Return the values dictionary defaulting to default values |
54,944 | def write_values ( self ) : return dict ( ( ( k , v . value ) for k , v in self . _inputs . items ( ) if not v . is_secret and not v . is_empty ( False ) ) ) | Return the dictionary with which to write values |
54,945 | def _parse_param_line ( self , line ) : value = line . strip ( '\n \t' ) if len ( value ) > 0 : i = Input ( ) if value . find ( '#' ) != - 1 : value , extra_attributes = value . split ( '#' ) try : extra_attributes = eval ( extra_attributes ) except SyntaxError : raise InputException ( "Incorrectly formatted input for {0}!" . format ( value ) ) if not isinstance ( extra_attributes , dict ) : raise InputException ( "Incorrectly formatted input for {0}!" . format ( value ) ) if 'prompt' in extra_attributes : i . prompt = extra_attributes [ 'prompt' ] if 'help' in extra_attributes : i . help = extra_attributes [ 'help' ] if 'type' in extra_attributes : i . in_type = extra_attributes [ 'type' ] if i . in_type . find ( '/' ) != - 1 : i . in_type , i . out_type = i . in_type . split ( '/' ) if 'cast' in extra_attributes : i . out_type = extra_attributes [ 'cast' ] if value . find ( '==' ) != - 1 : value , default = value . split ( '==' ) i . default = default if value . endswith ( '?' ) : value = value [ : - 1 ] i . is_secret = True return ( value , i ) return None | Parse a single param line . |
54,946 | def download ( self , overwrite = True ) : if overwrite or not os . path . exists ( self . file_path ) : _ , f = tempfile . mkstemp ( ) try : urlretrieve ( self . DOWNLOAD_URL , f ) extract_csv ( f , self . file_path ) finally : os . remove ( f ) | Download the zipcodes CSV file . If overwrite is set to False the file won t be downloaded if it already exists . |
54,947 | def get_zipcodes_for_canton ( self , canton ) : zipcodes = [ zipcode for zipcode , location in self . get_locations ( ) . items ( ) if location . canton == canton ] return zipcodes | Return the list of zipcodes for the given canton code . |
54,948 | def get_cantons ( self ) : return sorted ( list ( set ( [ location . canton for location in self . get_locations ( ) . values ( ) ] ) ) ) | Return the list of unique cantons sorted by name . |
54,949 | def get_municipalities ( self ) : return sorted ( list ( set ( [ location . municipality for location in self . get_locations ( ) . values ( ) ] ) ) ) | Return the list of unique municipalities sorted by name . |
54,950 | def _get_formula_class ( self , formula ) : from sprinter . formula . base import FormulaBase if formula in LEGACY_MAPPINGS : formula = LEGACY_MAPPINGS [ formula ] formula_class , formula_url = formula , None if ':' in formula : formula_class , formula_url = formula . split ( ":" , 1 ) if formula_class not in self . _formula_dict : try : self . _formula_dict [ formula_class ] = lib . get_subclass_from_module ( formula_class , FormulaBase ) except ( SprinterException , ImportError ) : logger . info ( "Downloading %s..." % formula_class ) try : self . _pip . install_egg ( formula_url or formula_class ) try : self . _formula_dict [ formula_class ] = lib . get_subclass_from_module ( formula_class , FormulaBase ) except ImportError : logger . debug ( "FeatureDict import Error" , exc_info = sys . exc_info ( ) ) raise SprinterException ( "Error: Unable to retrieve formula %s!" % formula_class ) except PipException : logger . error ( "ERROR: Unable to download %s!" % formula_class ) return self . _formula_dict [ formula_class ] | get a formula class object if it exists else create one add it to the dict and pass return it . |
54,951 | def is_backup_class ( cls ) : return True if ( isclass ( cls ) and issubclass ( cls , Storable ) and get_mapping ( cls , no_mapping_ok = True ) ) else False | Return true if given class supports back up . Currently this means a gludb . data . Storable - derived class that has a mapping as defined in gludb . config |
54,952 | def process ( hw_num : int , problems_to_do : Optional [ Iterable [ int ] ] = None , prefix : Optional [ Path ] = None , by_hand : Optional [ Iterable [ int ] ] = None , ) -> None : if prefix is None : prefix = Path ( "." ) problems : Iterable [ Path ] if problems_to_do is None : problems = list ( prefix . glob ( f"homework-{hw_num}-[0-9]*.ipynb" ) ) else : problems = [ prefix / f"homework-{hw_num}-{i}.ipynb" for i in problems_to_do ] problems = sorted ( problems , key = lambda k : k . stem [ - 1 ] ) output_directory : Path = ( prefix / "output" ) . resolve ( ) fw = FilesWriter ( build_directory = str ( output_directory ) ) assignment_zip_name = output_directory / f"homework-{hw_num}.zip" solution_zip_name = output_directory / f"homework-{hw_num}-soln.zip" assignment_pdfs : List [ BytesIO ] = [ ] solution_pdfs : List [ BytesIO ] = [ ] assignment_pdf : bytes solution_pdf : bytes assignment_nb : str solution_nb : str res : Dict [ str , Union [ str , bool ] ] = { "delete_pymarkdown" : True , "global_content_filter" : { "include_raw" : False } , } for problem in problems : print ( "Working on:" , problem ) res [ "unique_key" ] = problem . stem problem_number = int ( problem . stem . split ( "-" ) [ - 1 ] ) if by_hand is not None and problem_number in by_hand : res [ "by_hand" ] = True else : res [ "by_hand" ] = False problem_fname = str ( problem . resolve ( ) ) res [ "remove_solution" ] = True assignment_pdf , _ = pdf_exp . from_filename ( problem_fname , resources = res ) assignment_pdfs . append ( BytesIO ( assignment_pdf ) ) assignment_nb , _ = nb_exp . from_filename ( problem_fname , resources = res ) with ZipFile ( assignment_zip_name , mode = "a" ) as zip_file : zip_file . writestr ( problem . name , assignment_nb ) res [ "remove_solution" ] = False solution_pdf , _ = pdf_exp . from_filename ( problem_fname , resources = res ) solution_pdfs . append ( BytesIO ( solution_pdf ) ) solution_nb , _ = nb_exp . from_filename ( problem_fname , resources = res ) with ZipFile ( solution_zip_name , mode = "a" ) as zip_file : zip_file . writestr ( problem . stem + "-soln" + problem . suffix , solution_nb ) resources : Dict [ str , Any ] = { "metadata" : { "name" : f"homework-{hw_num}" , "path" : str ( prefix ) , "modified_date" : date . today ( ) . strftime ( "%B %d, %Y" ) , } , "output_extension" : ".pdf" , } fw . write ( combine_pdf_as_bytes ( assignment_pdfs ) , resources , f"homework-{hw_num}" ) resources [ "metadata" ] [ "name" ] = f"homework-{hw_num}-soln" fw . write ( combine_pdf_as_bytes ( solution_pdfs ) , resources , f"homework-{hw_num}-soln" ) | Process the homework problems in prefix folder . |
54,953 | def main ( argv : Optional [ Sequence [ str ] ] = None ) -> None : parser = ArgumentParser ( description = "Convert Jupyter Notebook assignments to PDFs" ) parser . add_argument ( "--hw" , type = int , required = True , help = "Homework number to convert" , dest = "hw_num" , ) parser . add_argument ( "-p" , "--problems" , type = int , help = "Problem numbers to convert" , dest = "problems" , nargs = "*" , ) parser . add_argument ( "--by-hand" , type = int , help = "Problem numbers to be completed by hand" , dest = "by_hand" , nargs = "*" , ) args = parser . parse_args ( argv ) prefix = Path ( f"homework/homework-{args.hw_num}" ) process ( args . hw_num , args . problems , prefix = prefix , by_hand = args . by_hand ) | Parse arguments and process the homework assignment . |
54,954 | def get_vm_by_name ( content , name , regex = False ) : return get_object_by_name ( content , vim . VirtualMachine , name , regex ) | Get a VM by its name |
54,955 | def get_datacenter ( content , obj ) : datacenters = content . rootFolder . childEntity for d in datacenters : dch = get_all ( content , d , type ( obj ) ) if dch is not None and obj in dch : return d | Get the datacenter to whom an object belongs |
54,956 | def get_all_vswitches ( content ) : vswitches = [ ] hosts = get_all_hosts ( content ) for h in hosts : for s in h . config . network . vswitch : vswitches . append ( s ) return vswitches | Get all the virtual switches |
54,957 | def print_vm_info ( vm ) : summary = vm . summary print ( 'Name : ' , summary . config . name ) print ( 'Path : ' , summary . config . vmPathName ) print ( 'Guest : ' , summary . config . guestFullName ) annotation = summary . config . annotation if annotation is not None and annotation != '' : print ( 'Annotation : ' , annotation ) print ( 'State : ' , summary . runtime . powerState ) if summary . guest is not None : ip = summary . guest . ipAddress if ip is not None and ip != '' : print ( 'IP : ' , ip ) if summary . runtime . question is not None : print ( 'Question : ' , summary . runtime . question . text ) print ( '' ) | Print information for a particular virtual machine |
54,958 | def module_import ( module_path ) : try : module = __import__ ( module_path ) components = module_path . split ( '.' ) for component in components [ 1 : ] : module = getattr ( module , component ) return module except ImportError : raise BadModulePathError ( 'Unable to find module "%s".' % ( module_path , ) ) | Imports the module indicated in name |
54,959 | def find_contour_yaml ( config_file = __file__ , names = None ) : checked = set ( ) contour_yaml = _find_countour_yaml ( os . path . dirname ( config_file ) , checked , names = names ) if not contour_yaml : contour_yaml = _find_countour_yaml ( os . getcwd ( ) , checked , names = names ) return contour_yaml | Traverse directory trees to find a contour . yaml file |
54,960 | def _find_countour_yaml ( start , checked , names = None ) : extensions = [ ] if names : for name in names : if not os . path . splitext ( name ) [ 1 ] : extensions . append ( name + ".yaml" ) extensions . append ( name + ".yml" ) yaml_names = ( names or [ ] ) + CONTOUR_YAML_NAMES + extensions directory = start while directory not in checked : checked . add ( directory ) for fs_yaml_name in yaml_names : yaml_path = os . path . join ( directory , fs_yaml_name ) if os . path . exists ( yaml_path ) : return yaml_path directory = os . path . dirname ( directory ) return | Traverse the directory tree identified by start until a directory already in checked is encountered or the path of countour . yaml is found . |
54,961 | def _guess_type_from_validator ( validator ) : if isinstance ( validator , _OptionalValidator ) : return _guess_type_from_validator ( validator . validator ) elif isinstance ( validator , _AndValidator ) : for v in validator . validators : typ = _guess_type_from_validator ( v ) if typ is not None : return typ return None elif isinstance ( validator , _InstanceOfValidator ) : return validator . type else : return None | Utility method to return the declared type of an attribute or None . It handles _OptionalValidator and _AndValidator in order to unpack the validators . |
54,962 | def is_optional ( attr ) : return isinstance ( attr . validator , _OptionalValidator ) or ( attr . default is not None and attr . default is not NOTHING ) | Helper method to find if an attribute is mandatory |
54,963 | def preprocess ( self , nb : "NotebookNode" , resources : dict ) -> Tuple [ "NotebookNode" , dict ] : if not resources . get ( "global_content_filter" , { } ) . get ( "include_raw" , False ) : keep_cells = [ ] for cell in nb . cells : if cell . cell_type != "raw" : keep_cells . append ( cell ) nb . cells = keep_cells return nb , resources | Remove any raw cells from the Notebook . |
54,964 | def preprocess ( self , nb : "NotebookNode" , resources : dict ) -> Tuple [ "NotebookNode" , dict ] : if "remove_solution" not in resources : raise KeyError ( "The resources dictionary must have a remove_solution key." ) if resources [ "remove_solution" ] : keep_cells_idx = [ ] for index , cell in enumerate ( nb . cells ) : if "## solution" in cell . source . lower ( ) : keep_cells_idx . append ( index ) elif len ( keep_cells_idx ) > 0 and cell . source . startswith ( "### " ) : keep_cells_idx . append ( index ) keep_cells = nb . cells [ : keep_cells_idx [ 0 ] + 1 ] for i in keep_cells_idx [ 1 : ] : keep_cells . append ( nb . cells [ i ] ) if resources [ "by_hand" ] : keep_cells . append ( by_hand_cell ) else : if "sketch" in nb . cells [ i ] . source . lower ( ) : keep_cells . append ( sketch_cell ) else : keep_cells . append ( md_expl_cell ) keep_cells . append ( code_ans_cell ) keep_cells . append ( md_ans_cell ) nb . cells = keep_cells return nb , resources | Preprocess the entire notebook . |
54,965 | def parse_from_dict ( json_dict ) : order_columns = json_dict [ 'columns' ] order_list = MarketOrderList ( upload_keys = json_dict [ 'uploadKeys' ] , order_generator = json_dict [ 'generator' ] , ) for rowset in json_dict [ 'rowsets' ] : generated_at = parse_datetime ( rowset [ 'generatedAt' ] ) region_id = rowset [ 'regionID' ] type_id = rowset [ 'typeID' ] order_list . set_empty_region ( region_id , type_id , generated_at ) for row in rowset [ 'rows' ] : order_kwargs = _columns_to_kwargs ( SPEC_TO_KWARG_CONVERSION , order_columns , row ) order_kwargs . update ( { 'region_id' : region_id , 'type_id' : type_id , 'generated_at' : generated_at , } ) order_kwargs [ 'order_issue_date' ] = parse_datetime ( order_kwargs [ 'order_issue_date' ] ) order_list . add_order ( MarketOrder ( ** order_kwargs ) ) return order_list | Given a Unified Uploader message parse the contents and return a MarketOrderList . |
54,966 | def encode_to_json ( order_list ) : rowsets = [ ] for items_in_region_list in order_list . _orders . values ( ) : region_id = items_in_region_list . region_id type_id = items_in_region_list . type_id generated_at = gen_iso_datetime_str ( items_in_region_list . generated_at ) rows = [ ] for order in items_in_region_list . orders : issue_date = gen_iso_datetime_str ( order . order_issue_date ) rows . append ( [ order . price , order . volume_remaining , order . order_range , order . order_id , order . volume_entered , order . minimum_volume , order . is_bid , issue_date , order . order_duration , order . station_id , order . solar_system_id , ] ) rowsets . append ( dict ( generatedAt = generated_at , regionID = region_id , typeID = type_id , rows = rows , ) ) json_dict = { 'resultType' : 'orders' , 'version' : '0.1' , 'uploadKeys' : order_list . upload_keys , 'generator' : order_list . order_generator , 'currentTime' : gen_iso_datetime_str ( now_dtime_in_utc ( ) ) , 'columns' : STANDARD_ENCODED_COLUMNS , 'rowsets' : rowsets , } return json . dumps ( json_dict ) | Encodes this list of MarketOrder instances to a JSON string . |
54,967 | def decode ( f ) : decoder = mqtt_io . FileDecoder ( f ) ( byte_0 , ) = decoder . unpack ( mqtt_io . FIELD_U8 ) packet_type_u4 = ( byte_0 >> 4 ) flags = byte_0 & 0x0f try : packet_type = MqttControlPacketType ( packet_type_u4 ) except ValueError : raise DecodeError ( 'Unknown packet type 0x{:02x}.' . format ( packet_type_u4 ) ) if not are_flags_valid ( packet_type , flags ) : raise DecodeError ( 'Invalid flags for packet type.' ) num_bytes , num_remaining_bytes = decoder . unpack_varint ( 4 ) return decoder . num_bytes_consumed , MqttFixedHeader ( packet_type , flags , num_remaining_bytes ) | Extract a MqttFixedHeader from f . |
54,968 | def decode_body ( cls , header , f ) : assert header . packet_type == MqttControlPacketType . subscribe decoder = mqtt_io . FileDecoder ( mqtt_io . LimitReader ( f , header . remaining_len ) ) packet_id , = decoder . unpack ( mqtt_io . FIELD_PACKET_ID ) topics = [ ] while header . remaining_len > decoder . num_bytes_consumed : num_str_bytes , name = decoder . unpack_utf8 ( ) max_qos , = decoder . unpack ( mqtt_io . FIELD_U8 ) try : sub_topic = MqttTopic ( name , max_qos ) except ValueError : raise DecodeError ( 'Invalid QOS {}' . format ( max_qos ) ) topics . append ( sub_topic ) assert header . remaining_len == decoder . num_bytes_consumed return decoder . num_bytes_consumed , MqttSubscribe ( packet_id , topics ) | Generates a MqttSubscribe packet given a MqttFixedHeader . This method asserts that header . packet_type is subscribe . |
54,969 | def decode_body ( cls , header , f ) : assert header . packet_type == MqttControlPacketType . suback decoder = mqtt_io . FileDecoder ( mqtt_io . LimitReader ( f , header . remaining_len ) ) packet_id , = decoder . unpack ( mqtt_io . FIELD_PACKET_ID ) results = [ ] while header . remaining_len > decoder . num_bytes_consumed : result , = decoder . unpack ( mqtt_io . FIELD_U8 ) try : results . append ( SubscribeResult ( result ) ) except ValueError : raise DecodeError ( 'Unsupported result {:02x}.' . format ( result ) ) assert header . remaining_len == decoder . num_bytes_consumed return decoder . num_bytes_consumed , MqttSuback ( packet_id , results ) | Generates a MqttSuback packet given a MqttFixedHeader . This method asserts that header . packet_type is suback . |
54,970 | def decode_body ( cls , header , f ) : assert header . packet_type == MqttControlPacketType . publish dupe = bool ( header . flags & 0x08 ) retain = bool ( header . flags & 0x01 ) qos = ( ( header . flags & 0x06 ) >> 1 ) if qos == 0 and dupe : raise DecodeError ( "Unexpected dupe=True for qos==0 message [MQTT-3.3.1-2]." ) decoder = mqtt_io . FileDecoder ( mqtt_io . LimitReader ( f , header . remaining_len ) ) num_bytes_consumed , topic_name = decoder . unpack_utf8 ( ) if qos != 0 : packet_id , = decoder . unpack ( mqtt_io . FIELD_PACKET_ID ) else : packet_id = 0 payload_len = header . remaining_len - decoder . num_bytes_consumed payload = decoder . read ( payload_len ) return decoder . num_bytes_consumed , MqttPublish ( packet_id , topic_name , payload , dupe , qos , retain ) | Generates a MqttPublish packet given a MqttFixedHeader . This method asserts that header . packet_type is publish . |
54,971 | def decode_body ( cls , header , f ) : assert header . packet_type == MqttControlPacketType . pubrel decoder = mqtt_io . FileDecoder ( mqtt_io . LimitReader ( f , header . remaining_len ) ) packet_id , = decoder . unpack ( mqtt_io . FIELD_U16 ) if header . remaining_len != decoder . num_bytes_consumed : raise DecodeError ( 'Extra bytes at end of packet.' ) return decoder . num_bytes_consumed , MqttPubrel ( packet_id ) | Generates a MqttPubrel packet given a MqttFixedHeader . This method asserts that header . packet_type is pubrel . |
54,972 | def decode_body ( cls , header , f ) : assert header . packet_type == MqttControlPacketType . unsubscribe decoder = mqtt_io . FileDecoder ( mqtt_io . LimitReader ( f , header . remaining_len ) ) packet_id , = decoder . unpack ( mqtt_io . FIELD_PACKET_ID ) topics = [ ] while header . remaining_len > decoder . num_bytes_consumed : num_str_bytes , topic = decoder . unpack_utf8 ( ) topics . append ( topic ) assert header . remaining_len - decoder . num_bytes_consumed == 0 return decoder . num_bytes_consumed , MqttUnsubscribe ( packet_id , topics ) | Generates a MqttUnsubscribe packet given a MqttFixedHeader . This method asserts that header . packet_type is unsubscribe . |
54,973 | def decode_body ( cls , header , f ) : assert header . packet_type == MqttControlPacketType . unsuback decoder = mqtt_io . FileDecoder ( mqtt_io . LimitReader ( f , header . remaining_len ) ) packet_id , = decoder . unpack ( mqtt_io . FIELD_PACKET_ID ) if header . remaining_len != decoder . num_bytes_consumed : raise DecodeError ( 'Extra bytes at end of packet.' ) return decoder . num_bytes_consumed , MqttUnsuback ( packet_id ) | Generates a MqttUnsuback packet given a MqttFixedHeader . This method asserts that header . packet_type is unsuback . |
54,974 | def decode_body ( cls , header , f ) : assert header . packet_type == MqttControlPacketType . pingreq if header . remaining_len != 0 : raise DecodeError ( 'Extra bytes at end of packet.' ) return 0 , MqttPingreq ( ) | Generates a MqttPingreq packet given a MqttFixedHeader . This method asserts that header . packet_type is pingreq . |
54,975 | def decode_body ( cls , header , f ) : assert header . packet_type == MqttControlPacketType . pingresp if header . remaining_len != 0 : raise DecodeError ( 'Extra bytes at end of packet.' ) return 0 , MqttPingresp ( ) | Generates a MqttPingresp packet given a MqttFixedHeader . This method asserts that header . packet_type is pingresp . |
54,976 | def connect ( self ) : if self . token : self . phab_session = { 'token' : self . token } return req = self . req_session . post ( '%s/api/conduit.connect' % self . host , data = { 'params' : json . dumps ( self . connect_params ) , 'output' : 'json' , '__conduit__' : True , } ) result = req . json ( ) [ 'result' ] self . phab_session = { 'sessionKey' : result [ 'sessionKey' ] , 'connectionID' : result [ 'connectionID' ] , } | Sets up your Phabricator session it s not necessary to call this directly |
54,977 | def install ( force = False ) : ret , git_dir , _ = run ( "git rev-parse --show-toplevel" ) if ret != 0 : click . echo ( "ERROR: Please run from within a GIT repository." , file = sys . stderr ) raise click . Abort git_dir = git_dir [ 0 ] hooks_dir = os . path . join ( git_dir , HOOK_PATH ) for hook in HOOKS : hook_path = os . path . join ( hooks_dir , hook ) if os . path . exists ( hook_path ) : if not force : click . echo ( "Hook already exists. Skipping {0}" . format ( hook_path ) , file = sys . stderr ) continue else : os . unlink ( hook_path ) source = os . path . join ( sys . prefix , "bin" , "kwalitee-" + hook ) os . symlink ( os . path . normpath ( source ) , hook_path ) return True | Install git hooks . |
54,978 | def uninstall ( ) : ret , git_dir , _ = run ( "git rev-parse --show-toplevel" ) if ret != 0 : click . echo ( "ERROR: Please run from within a GIT repository." , file = sys . stderr ) raise click . Abort git_dir = git_dir [ 0 ] hooks_dir = os . path . join ( git_dir , HOOK_PATH ) for hook in HOOKS : hook_path = os . path . join ( hooks_dir , hook ) if os . path . exists ( hook_path ) : os . remove ( hook_path ) return True | Uninstall git hooks . |
54,979 | def setup_logger ( ) : logger = logging . getLogger ( 'dockerstache' ) logger . setLevel ( logging . INFO ) handler = logging . StreamHandler ( stream = sys . stdout ) handler . setLevel ( logging . INFO ) logger . addHandler ( handler ) return logger | setup basic logger |
54,980 | def named_any ( name ) : assert name , 'Empty module name' names = name . split ( '.' ) topLevelPackage = None moduleNames = names [ : ] while not topLevelPackage : if moduleNames : trialname = '.' . join ( moduleNames ) try : topLevelPackage = __import__ ( trialname ) except Exception , ex : moduleNames . pop ( ) else : if len ( names ) == 1 : raise Exception ( "No module named %r" % ( name , ) ) else : raise Exception ( '%r does not name an object' % ( name , ) ) obj = topLevelPackage for n in names [ 1 : ] : obj = getattr ( obj , n ) return obj | Retrieve a Python object by its fully qualified name from the global Python module namespace . The first part of the name that describes a module will be discovered and imported . Each subsequent part of the name is treated as the name of an attribute of the object specified by all of the name which came before it . |
54,981 | def for_name ( modpath , classname ) : module = __import__ ( modpath , fromlist = [ classname ] ) classobj = getattr ( module , classname ) return classobj ( ) | Returns a class of classname from module modname . |
54,982 | def _convert ( self , val ) : if isinstance ( val , dict ) and not isinstance ( val , DotDict ) : return DotDict ( val ) , True elif isinstance ( val , list ) and not isinstance ( val , DotList ) : return DotList ( val ) , True return val , False | Convert the type if necessary and return if a conversion happened . |
54,983 | def to_json ( self ) : obj = { "vertices" : [ { "id" : vertex . id , "annotation" : vertex . annotation , } for vertex in self . vertices ] , "edges" : [ { "id" : edge . id , "annotation" : edge . annotation , "head" : edge . head , "tail" : edge . tail , } for edge in self . _edges ] , } return six . text_type ( json . dumps ( obj , ensure_ascii = False ) ) | Convert to a JSON string . |
54,984 | def from_json ( cls , json_graph ) : obj = json . loads ( json_graph ) vertices = [ AnnotatedVertex ( id = vertex [ "id" ] , annotation = vertex [ "annotation" ] , ) for vertex in obj [ "vertices" ] ] edges = [ AnnotatedEdge ( id = edge [ "id" ] , annotation = edge [ "annotation" ] , head = edge [ "head" ] , tail = edge [ "tail" ] , ) for edge in obj [ "edges" ] ] return cls ( vertices = vertices , edges = edges ) | Reconstruct the graph from a graph exported to JSON . |
54,985 | def export_json ( self , filename ) : json_graph = self . to_json ( ) with open ( filename , 'wb' ) as f : f . write ( json_graph . encode ( 'utf-8' ) ) | Export graph in JSON form to the given file . |
54,986 | def import_json ( cls , filename ) : with open ( filename , 'rb' ) as f : json_graph = f . read ( ) . decode ( 'utf-8' ) return cls . from_json ( json_graph ) | Import graph from the given file . The file is expected to contain UTF - 8 encoded JSON data . |
54,987 | def to_dot ( self ) : edge_labels = { edge . id : edge . annotation for edge in self . _edges } edges = [ self . _format_edge ( edge_labels , edge ) for edge in self . _edges ] vertices = [ DOT_VERTEX_TEMPLATE . format ( vertex = vertex . id , label = dot_quote ( vertex . annotation ) , ) for vertex in self . vertices ] return DOT_DIGRAPH_TEMPLATE . format ( edges = "" . join ( edges ) , vertices = "" . join ( vertices ) , ) | Produce a graph in DOT format . |
54,988 | def install_brew ( target_path ) : if not os . path . exists ( target_path ) : try : os . makedirs ( target_path ) except OSError : logger . warn ( "Unable to create directory %s for brew." % target_path ) logger . warn ( "Skipping..." ) return extract_targz ( HOMEBREW_URL , target_path , remove_common_prefix = True ) | Install brew to the target path |
54,989 | def pass_service ( * names ) : def decorator ( f ) : @ functools . wraps ( f ) def wrapper ( * args , ** kwargs ) : for name in names : kwargs [ name ] = service_proxy ( name ) return f ( * args , ** kwargs ) return wrapper return decorator | Injects a service instance into the kwargs |
54,990 | def get_conn ( ) : if os . environ . get ( 'DEBUG' , False ) or os . environ . get ( 'travis' , False ) : conn = DynamoDBConnection ( host = 'localhost' , port = 8000 , aws_access_key_id = 'TEST' , aws_secret_access_key = 'TEST' , is_secure = False ) else : conn = DynamoDBConnection ( ) return conn | Return a connection to DynamoDB . |
54,991 | def table_schema_call ( self , target , cls ) : index_defs = [ ] for name in cls . index_names ( ) or [ ] : index_defs . append ( GlobalIncludeIndex ( gsi_name ( name ) , parts = [ HashKey ( name ) ] , includes = [ 'value' ] ) ) return target ( cls . get_table_name ( ) , connection = get_conn ( ) , schema = [ HashKey ( 'id' ) ] , global_indexes = index_defs or None ) | Perform a table schema call . |
54,992 | def thread ( self ) : log . info ( '@{}.thread starting' . format ( self . __class__ . __name__ ) ) thread = threading . Thread ( target = thread_wrapper ( self . consume ) , args = ( ) ) thread . daemon = True thread . start ( ) | Start a thread for this consumer . |
54,993 | def _parse_multifile ( self , desired_type : Type [ T ] , obj : PersistedObject , parsing_plan_for_children : Dict [ str , ParsingPlan ] , logger : Logger , options : Dict [ str , Dict [ str , Any ] ] ) -> T : pass | First parse all children from the parsing plan then calls _build_object_from_parsed_children |
54,994 | def execute ( self , logger : Logger , options : Dict [ str , Dict [ str , Any ] ] ) -> T : in_root_call = False if logger is not None : if not hasattr ( _BaseParsingPlan . thrd_locals , 'flag_exec' ) or _BaseParsingPlan . thrd_locals . flag_exec == 0 : logger . debug ( 'Executing Parsing Plan for [{location}]' '' . format ( location = self . obj_on_fs_to_parse . get_pretty_location ( append_file_ext = False ) ) ) _BaseParsingPlan . thrd_locals . flag_exec = 1 in_root_call = True logger . debug ( '(P) ' + get_parsing_plan_log_str ( self . obj_on_fs_to_parse , self . obj_type , log_only_last = not in_root_call , parser = self . parser ) ) try : res = super ( _BaseParsingPlan , self ) . execute ( logger , options ) if logger . isEnabledFor ( DEBUG ) : logger . info ( '(P) {loc} -> {type} SUCCESS !' '' . format ( loc = self . obj_on_fs_to_parse . get_pretty_location ( blank_parent_part = not GLOBAL_CONFIG . full_paths_in_logs , compact_file_ext = True ) , type = get_pretty_type_str ( self . obj_type ) ) ) else : logger . info ( 'SUCCESS parsed [{loc}] as a [{type}] successfully. Parser used was [{parser}]' '' . format ( loc = self . obj_on_fs_to_parse . get_pretty_location ( compact_file_ext = True ) , type = get_pretty_type_str ( self . obj_type ) , parser = str ( self . parser ) ) ) if in_root_call : logger . debug ( 'Completed parsing successfully' ) return res finally : if in_root_call : _BaseParsingPlan . thrd_locals . flag_exec = 0 | Overrides the parent method to add log messages . |
54,995 | def _execute ( self , logger : Logger , options : Dict [ str , Dict [ str , Any ] ] ) -> T : if isinstance ( self . parser , _BaseParser ) : if ( not self . is_singlefile ) and self . parser . supports_multifile ( ) : return self . parser . _parse_multifile ( self . obj_type , self . obj_on_fs_to_parse , self . _get_children_parsing_plan ( ) , logger , options ) elif self . is_singlefile and self . parser . supports_singlefile ( ) : return self . parser . _parse_singlefile ( self . obj_type , self . get_singlefile_path ( ) , self . get_singlefile_encoding ( ) , logger , options ) else : raise _InvalidParserException . create ( self . parser , self . obj_on_fs_to_parse ) else : raise TypeError ( 'Parser attached to this _BaseParsingPlan is not a ' + str ( _BaseParser ) ) | Implementation of the parent class method . Checks that self . parser is a _BaseParser and calls the appropriate parsing method . |
54,996 | def create_parsing_plan ( self , desired_type : Type [ T ] , filesystem_object : PersistedObject , logger : Logger , _main_call : bool = True ) : in_root_call = False if _main_call and ( not hasattr ( AnyParser . thrd_locals , 'flag_init' ) or AnyParser . thrd_locals . flag_init == 0 ) : logger . debug ( 'Building a parsing plan to parse [{location}] into a {type}' '' . format ( location = filesystem_object . get_pretty_location ( append_file_ext = False ) , type = get_pretty_type_str ( desired_type ) ) ) AnyParser . thrd_locals . flag_init = 1 in_root_call = True try : pp = self . _create_parsing_plan ( desired_type , filesystem_object , logger , log_only_last = ( not _main_call ) ) finally : if in_root_call : AnyParser . thrd_locals . flag_init = 0 if in_root_call : logger . debug ( 'Parsing Plan created successfully' ) return pp | Implements the abstract parent method by using the recursive parsing plan impl . Subclasses wishing to produce their own parsing plans should rather override _create_parsing_plan in order to benefit from this same log msg . |
54,997 | def _create_parsing_plan ( self , desired_type : Type [ T ] , filesystem_object : PersistedObject , logger : Logger , log_only_last : bool = False ) : logger . debug ( '(B) ' + get_parsing_plan_log_str ( filesystem_object , desired_type , log_only_last = log_only_last , parser = self ) ) return AnyParser . _RecursiveParsingPlan ( desired_type , filesystem_object , self , logger ) | Adds a log message and creates a recursive parsing plan . |
54,998 | def _get_parsing_plan_for_multifile_children ( self , obj_on_fs : PersistedObject , desired_type : Type [ T ] , logger : Logger ) -> Dict [ str , ParsingPlan [ T ] ] : pass | This method is called by the _RecursiveParsingPlan when created . Implementing classes should return a dictionary containing a ParsingPlan for each child they plan to parse using this framework . Note that for the files that will be parsed using a parsing library it is not necessary to return a ParsingPlan . |
54,999 | def _parse_singlefile ( self , desired_type : Type [ T ] , file_path : str , encoding : str , logger : Logger , options : Dict [ str , Dict [ str , Any ] ] ) -> T : opts = get_options_for_id ( options , self . get_id_for_options ( ) ) if self . _streaming_mode : file_stream = None try : file_stream = open ( file_path , 'r' , encoding = encoding ) if self . function_args is None : return self . _parser_func ( desired_type , file_stream , logger , ** opts ) else : return self . _parser_func ( desired_type , file_stream , logger , ** self . function_args , ** opts ) except TypeError as e : raise CaughtTypeError . create ( self . _parser_func , e ) finally : if file_stream is not None : file_stream . close ( ) else : if self . function_args is None : return self . _parser_func ( desired_type , file_path , encoding , logger , ** opts ) else : return self . _parser_func ( desired_type , file_path , encoding , logger , ** self . function_args , ** opts ) | Relies on the inner parsing function to parse the file . If _streaming_mode is True the file will be opened and closed by this method . Otherwise the parsing function will be responsible to open and close . |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.