idx int64 0 63k | question stringlengths 53 5.28k | target stringlengths 5 805 |
|---|---|---|
58,200 | def get_attribute_option ( self , attribute , option_name ) : self . __validate_attribute_option_name ( option_name ) attribute_key = self . __make_key ( attribute ) return self . __attribute_options [ attribute_key ] . get ( option_name ) | Returns the value of the given attribute option for the specified attribute . |
58,201 | def get_attribute_options ( self , attribute = None ) : attribute_key = self . __make_key ( attribute ) if attribute_key is None : opts = defaultdict ( self . _default_attributes_options . copy ) for attr , mp_options in iteritems_ ( self . __attribute_options ) : opts [ attr ] . update ( mp_options ) else : opts = self . _default_attributes_options . copy ( ) attr_opts = self . __attribute_options [ attribute_key ] opts . update ( attr_opts ) return opts | Returns a copy of the mapping options for the given attribute name or a copy of all mapping options if no attribute name is provided . All options that were not explicitly configured are given a default value of None . |
58,202 | def run ( self , visitor ) : attr_option_map = self . __config . get_attribute_options ( ) for ( key , key_attr_option_map ) in sorted ( iteritems_ ( attr_option_map ) ) : if not self . __max_depth is None and len ( key ) > self . __max_depth : continue visitor . visit ( key , key_attr_option_map ) | Traverses this representer configuration traverser with the given visitor . |
58,203 | def with_retry ( cls , methods ) : retry_with_backoff = retry ( retry_on_exception = lambda e : isinstance ( e , BotoServerError ) , wait_exponential_multiplier = 1000 , wait_exponential_max = 10000 ) for method in methods : m = getattr ( cls , method , None ) if isinstance ( m , collections . Callable ) : setattr ( cls , method , retry_with_backoff ( m ) ) return cls | Wraps the given list of methods in a class with an exponential - back retry mechanism . |
58,204 | def from_file ( file_path ) -> dict : with io . open ( file_path , 'r' , encoding = 'utf-8' ) as json_stream : return Json . parse ( json_stream , True ) | Load JSON file |
58,205 | def safe_values ( self , value ) : string_val = "" if isinstance ( value , datetime . date ) : try : string_val = value . strftime ( '{0}{1}{2}' . format ( current_app . config [ 'DATETIME' ] [ 'DATE_FORMAT' ] , current_app . config [ 'DATETIME' ] [ 'SEPARATOR' ] , current_app . config [ 'DATETIME' ] [ 'TIME_FORMAT' ] ) ) except RuntimeError as error : string_val = value . strftime ( '%Y-%m-%d %H:%M:%S' ) elif isinstance ( value , bytes ) : string_val = value . decode ( 'utf-8' ) elif isinstance ( value , decimal . Decimal ) : string_val = float ( value ) else : string_val = value return string_val | Parse non - string values that will not serialize |
58,206 | def camel_case ( self , snake_case ) : components = snake_case . split ( '_' ) return components [ 0 ] + "" . join ( x . title ( ) for x in components [ 1 : ] ) | Convert snake case to camel case |
58,207 | def __find_object_children ( self , obj ) -> dict : if hasattr ( obj , 'items' ) and isinstance ( obj . items , types . BuiltinFunctionType ) : return self . __construct_object ( obj ) elif isinstance ( obj , ( list , tuple , set ) ) : return self . __construct_list ( obj ) else : exclude_list = [ ] if hasattr ( obj , '_sa_instance_state' ) : if len ( orm . attributes . instance_state ( obj ) . unloaded ) > 0 : mapper = inspect ( obj ) for column in mapper . attrs : column . key column . value if hasattr ( obj , 'json_exclude_list' ) : exclude_list = obj . json_exclude_list return self . __construct_object ( vars ( obj ) , exclude_list ) return None | Convert object to flattened object |
58,208 | def __iterate_value ( self , value ) : if hasattr ( value , '__dict__' ) or isinstance ( value , dict ) : return self . __find_object_children ( value ) elif isinstance ( value , ( list , tuple , set ) ) : return self . __construct_list ( value ) return self . safe_values ( value ) | Return value for JSON serialization |
58,209 | def write_xml ( self ) : key = None if self . language is not None : lang = { } lang [ '{http://www.w3.org/XML/1998/namespace}lang' ] = self . language key = etree . Element ( 'vocabulary-key' , attrib = lang ) else : key = etree . Element ( 'vocabulary-key' ) name = etree . Element ( 'name' ) name . text = self . name key . append ( name ) if self . family is not None : family = etree . Element ( 'family' ) family . text = self . family key . append ( family ) if self . version is not None : version = etree . Element ( 'version' ) version . text = self . version key . append ( version ) if self . code_value is not None : code_value = etree . Element ( 'code-value' ) code_value . text = self . code_value key . append ( code_value ) return key | Writes a VocabularyKey Xml as per Healthvault schema . |
58,210 | def parse_xml ( self , key_xml ) : xmlutils = XmlUtils ( key_xml ) self . name = xmlutils . get_string_by_xpath ( 'name' ) self . family = xmlutils . get_string_by_xpath ( 'family' ) self . version = xmlutils . get_string_by_xpath ( 'version' ) self . description = xmlutils . get_string_by_xpath ( 'description' ) self . language = xmlutils . get_lang ( ) | Parse a VocabularyKey from an Xml as per Healthvault schema . |
58,211 | def print_and_exit ( results ) : for success , value in results : if success : print value . encode ( locale . getpreferredencoding ( ) ) else : value . printTraceback ( ) | Print each result and stop the reactor . |
58,212 | def _topological_sort ( self ) : sorted_graph = [ ] node_map = self . _graph . get_nodes ( ) nodes = [ NodeVisitor ( node_map [ node ] ) for node in node_map ] def get_pointers_for_edge_nodes ( visitor_decorated_node ) : edges = [ ] edge_ids = visitor_decorated_node . get_node ( ) . get_edges ( ) for node in nodes : if node . get_id ( ) in edge_ids : edges . append ( node ) return edges for node in nodes : for edge in get_pointers_for_edge_nodes ( node ) : edge . increment ( ) resolved = [ node for node in nodes if node . get_weight ( ) == 0 ] while resolved : node = resolved . pop ( ) sorted_graph . append ( node ) for edge in get_pointers_for_edge_nodes ( node ) : edge . decrement ( ) if edge . get_weight ( ) == 0 : resolved . append ( edge ) self . _circular_dependencies = [ node . get_node ( ) for node in nodes if node . get_weight ( ) > 0 ] self . _sorted_nodes = list ( reversed ( [ node . get_node ( ) for node in sorted_graph ] ) ) | Kahn s algorithm for Topological Sorting - Finds cycles in graph - Computes dependency weight |
58,213 | def load_environment_vars ( self ) : for k , v in os . environ . items ( ) : if k . startswith ( MACH9_PREFIX ) : _ , config_key = k . split ( MACH9_PREFIX , 1 ) self [ config_key ] = v | Looks for any MACH9_ prefixed environment variables and applies them to the configuration if present . |
58,214 | def copy ( self , parent = None ) : new = Structure ( None , parent = parent ) new . key = self . key new . type_ = self . type_ new . val_guaranteed = self . val_guaranteed new . key_guaranteed = self . key_guaranteed for child in self . children : new . children . append ( child . copy ( new ) ) return new | Copies an existing structure and all of it s children |
58,215 | def generation ( self ) : if not self . parent : return 0 elif self . parent . is_dict : return 1 + self . parent . generation else : return self . parent . generation | Returns the number of ancestors that are dictionaries |
58,216 | def type_string ( self ) : if self . is_tuple : subtypes = [ item . type_string for item in self . children ] return '{}({})' . format ( '' if self . val_guaranteed else '*' , ', ' . join ( subtypes ) ) elif self . is_list : return '{}[{}]' . format ( '' if self . val_guaranteed else '*' , self . children [ 0 ] . type_string ) else : return '{}{}' . format ( '' if self . val_guaranteed else '*' , self . type_ . __name__ ) | Returns a string representing the type of the structure |
58,217 | def set_field ( obj , field_name , value ) : old = getattr ( obj , field_name ) field = obj . _meta . get_field ( field_name ) if field . is_relation : old_repr = None if old is None else getattr ( old , 'pk' , old ) new_repr = None if value is None else getattr ( value , 'pk' , value ) elif field . __class__ . __name__ == 'DateTimeField' : old_repr = None if old is None else datetime_repr ( old ) new_repr = None if value is None else datetime_repr ( value ) else : old_repr = None if old is None else str ( old ) new_repr = None if value is None else str ( value ) if old_repr != new_repr : setattr ( obj , field_name , value ) if not hasattr ( obj , DIRTY ) : setattr ( obj , DIRTY , [ ] ) getattr ( obj , DIRTY ) . append ( dict ( field_name = field_name , old_value = old_repr , new_value = new_repr , ) ) | Fancy setattr with debugging . |
58,218 | def obj_update ( obj , data : dict , * , update_fields = UNSET , save : bool = True ) -> bool : for field_name , value in data . items ( ) : set_field ( obj , field_name , value ) dirty_data = getattr ( obj , DIRTY , None ) if not dirty_data : return False logger . debug ( human_log_formatter ( dirty_data ) , extra = { 'model' : obj . _meta . object_name , 'pk' : obj . pk , 'changes' : json_log_formatter ( dirty_data ) , } ) if update_fields == UNSET : update_fields = list ( map ( itemgetter ( 'field_name' ) , dirty_data ) ) if not save : update_fields = ( ) obj . save ( update_fields = update_fields ) delattr ( obj , DIRTY ) return True | Fancy way to update obj with data dict . |
58,219 | def obj_update_or_create ( model , defaults = None , update_fields = UNSET , ** kwargs ) : obj , created = model . objects . get_or_create ( defaults = defaults , ** kwargs ) if created : logger . debug ( 'CREATED %s %s' , model . _meta . object_name , obj . pk , extra = { 'pk' : obj . pk } ) else : obj_update ( obj , defaults , update_fields = update_fields ) return obj , created | Mimic queryset . update_or_create but using obj_update . |
58,220 | def detect_scheme ( filename ) : logger = logging . getLogger ( __name__ ) logger . info ( 'Detecting partitioning scheme' ) with open ( filename , 'rb' ) as f : f . seek ( mbr . MBR_SIG_OFFSET ) data = f . read ( mbr . MBR_SIG_SIZE ) signature = struct . unpack ( "<H" , data ) [ 0 ] if signature != mbr . MBR_SIGNATURE : logger . debug ( 'Unknown partitioning scheme' ) return PartitionScheme . SCHEME_UNKNOWN else : f . seek ( gpt . GPT_HEADER_OFFSET ) data = f . read ( gpt . GPT_SIG_SIZE ) signature = struct . unpack ( "<8s" , data ) [ 0 ] if signature != gpt . GPT_SIGNATURE : logger . debug ( 'MBR scheme detected' ) return PartitionScheme . SCHEME_MBR else : logger . debug ( 'GPT scheme detected' ) return PartitionScheme . SCHEME_GPT | Detects partitioning scheme of the source |
58,221 | def _has_file_rolled ( self ) : if self . _fh : size = self . _getsize_of_current_file ( ) if size < self . oldsize : return True self . oldsize = size return False | Check if the file has been rolled |
58,222 | def _open_file ( self , filename ) : if not self . _os_is_windows : self . _fh = open ( filename , "rb" ) self . filename = filename self . _fh . seek ( 0 , os . SEEK_SET ) self . oldsize = 0 return import win32file import msvcrt handle = win32file . CreateFile ( filename , win32file . GENERIC_READ , win32file . FILE_SHARE_DELETE | win32file . FILE_SHARE_READ | win32file . FILE_SHARE_WRITE , None , win32file . OPEN_EXISTING , 0 , None ) detached_handle = handle . Detach ( ) file_descriptor = msvcrt . open_osfhandle ( detached_handle , os . O_RDONLY ) self . _fh = open ( file_descriptor , "rb" ) self . filename = filename self . _fh . seek ( 0 , os . SEEK_SET ) self . oldsize = 0 | Open a file to be tailed |
58,223 | def _filehandle ( self ) : if self . _fh and self . _has_file_rolled ( ) : try : self . _fh . close ( ) except Exception : pass self . _fh = None if not self . _fh : self . _open_file ( self . filename ) if not self . opened_before : self . opened_before = True self . _fh . seek ( 0 , os . SEEK_END ) return self . _fh | Return a filehandle to the file being tailed |
58,224 | def get_class ( class_string ) : split_string = class_string . encode ( 'ascii' ) . split ( '.' ) import_path = '.' . join ( split_string [ : - 1 ] ) class_name = split_string [ - 1 ] if class_name : try : if import_path : mod = __import__ ( import_path , globals ( ) , { } , [ class_name ] ) cls = getattr ( mod , class_name ) else : cls = __import__ ( class_name , globals ( ) , { } ) if cls : return cls except ( ImportError , AttributeError ) : pass return None | Get a class from a dotted string |
58,225 | def _register_handler ( event , fun , external = False ) : registry = core . HANDLER_REGISTRY if external : registry = core . EXTERNAL_HANDLER_REGISTRY if not isinstance ( event , basestring ) : event = core . parse_event_to_name ( event ) if event in registry : registry [ event ] . append ( fun ) else : registry [ event ] = [ fun ] return fun | Register a function to be an event handler |
58,226 | def handler ( param ) : if isinstance ( param , basestring ) : return lambda f : _register_handler ( param , f ) else : core . HANDLER_METHOD_REGISTRY . append ( param ) return param | Decorator that associates a handler to an event class |
58,227 | def log ( name , data = None ) : data = data or { } data . update ( core . get_default_values ( data ) ) event_cls = core . find_event ( name ) event = event_cls ( name , data ) event . validate ( ) data = core . filter_data_values ( data ) data = ejson . dumps ( data ) if conf . getsetting ( 'DEBUG' ) : core . process ( name , data ) else : tasks . process_task . delay ( name , data ) | Entry point for the event lib that starts the logging process |
58,228 | def validate_keys ( self , * keys ) : current_keys = set ( self . data . keys ( ) ) needed_keys = set ( keys ) if not needed_keys . issubset ( current_keys ) : raise ValidationError ( 'One of the following keys are missing from the ' 'event\'s data: {}' . format ( ', ' . join ( needed_keys . difference ( current_keys ) ) ) ) return True | Validation helper to ensure that keys are present in data |
58,229 | def addProject ( gh_link ) : name = os . path . basename ( gh_link ) zipurl = gh_link + "/archive/master.zip" outzip = os . path . join ( 'temp_data' , name + '.zip' ) if not os . path . exists ( 'temp_data' ) : os . makedirs ( 'temp_data' ) downloadFile ( zipurl , outzip ) zip = zipfile . ZipFile ( outzip , mode = 'r' ) outpath = os . path . join ( 'temp_data' , name ) zip . extractall ( outpath ) zip . close ( ) os . remove ( outzip ) return name , outpath | Adds a github project to the data folder unzips it and deletes the zip file . Returns the project name and the path to the project folder . |
58,230 | def cleanDir ( self ) : if os . path . isdir ( self . outdir ) : baddies = [ 'tout.json' , 'nout.json' , 'hout.json' ] for file in baddies : filepath = os . path . join ( self . outdir , file ) if os . path . isfile ( filepath ) : os . remove ( filepath ) | Remove existing json datafiles in the target directory . |
58,231 | def makeHTML ( self , mustachepath , htmlpath ) : subs = dict ( ) if self . title : subs [ "title" ] = self . title subs [ "has_title" ] = True else : subs [ "has_title" ] = False subs [ "font_size" ] = self . font_size subs [ "font_family" ] = self . font_family subs [ "colorscheme" ] = self . colorscheme subs [ "title_color" ] = self . title_color subs [ "bgcolor" ] = self . bgcolor with open ( mustachepath , 'r' ) as infile : mustache_text = pystache . render ( infile . read ( ) , subs ) with open ( htmlpath , 'w+' ) as outfile : outfile . write ( mustache_text ) | Write an html file by applying this ideogram s attributes to a mustache template . |
58,232 | def age ( self , ** kwargs ) : if kwargs . get ( 'days' , None ) is not None : self . _age += kwargs . get ( 'days' ) return if kwargs . get ( 'hours' , None ) is not None : self . _age += kwargs . get ( 'hours' ) / 24. return if kwargs . get ( 'minutes' , None ) is not None : self . _age += kwargs . get ( 'minutes' ) / 24. / 60. return if kwargs . get ( 'seconds' , None ) is not None : self . _age += kwargs . get ( 'seconds' ) / 24. / 60. / 60. return raise KeyError ( "Could not age particle, please specify 'days', 'hours', 'minutes', or 'seconds' parameter" ) | Age this particle . |
58,233 | def normalized_indexes ( self , model_timesteps ) : clean_locs = [ ] for i , loc in enumerate ( self . locations ) : try : if loc . time == self . locations [ i + 1 ] . time : continue else : clean_locs . append ( loc ) except StandardError : clean_locs . append ( loc ) if len ( clean_locs ) == len ( model_timesteps ) : return [ ind for ind , loc in enumerate ( self . locations ) if loc in clean_locs ] elif len ( model_timesteps ) < len ( clean_locs ) : indexes = [ ind for ind , loc in enumerate ( self . locations ) if loc in clean_locs ] if len ( model_timesteps ) == len ( indexes ) : return indexes raise ValueError ( "Can't normalize" ) elif len ( model_timesteps ) > len ( clean_locs ) : raise ValueError ( "Particle has less locations than model timesteps" ) | This function will normalize the particles locations to the timestep of the model that was run . This is used in output as we should only be outputting the model timestep that was chosen to be run . |
58,234 | def is_satisfied_by ( self , candidate : Any , ** kwds : Any ) -> bool : candidate_name = self . _candidate_name context = self . _context if context : if candidate_name in kwds : raise ValueError ( f"Candidate name '{candidate_name}' must " "not be given as keyword." ) context . update ( kwds ) context [ candidate_name ] = candidate try : code = self . _code except AttributeError : self . _code = code = compile ( self . _ast_expr , '<str>' , mode = 'eval' ) return eval ( code , context ) | Return True if candidate satisfies the specification . |
58,235 | def add_edge ( self , fr , to ) : fr = self . add_vertex ( fr ) to = self . add_vertex ( to ) self . adjacency [ fr ] . children . add ( to ) self . adjacency [ to ] . parents . add ( fr ) | Add an edge to the graph . Multiple edges between the same vertices will quietly be ignored . N - partite graphs can be used to permit multiple edges by partitioning the graph into vertices and edges . |
58,236 | def clone ( self , source_id , backup_id , size , volume_id = None , source_host = None ) : volume_id = volume_id or str ( uuid . uuid4 ( ) ) return self . http_put ( '/volumes/%s' % volume_id , params = self . unused ( { 'source_host' : source_host , 'source_volume_id' : source_id , 'backup_id' : backup_id , 'size' : size } ) ) | create a volume then clone the contents of the backup into the new volume |
58,237 | def create ( self , volume_id , backup_id = None , timestamp = None ) : backup_id = backup_id or str ( uuid . uuid4 ( ) ) timestamp = timestamp or int ( time ( ) ) return self . http_put ( '/volumes/%s/backups/%s' % ( volume_id , backup_id ) , params = { 'timestamp' : timestamp } ) | create a backup of a volume |
58,238 | def boards ( hwpack = 'arduino' ) : bunch = read_properties ( boards_txt ( hwpack ) ) bunch_items = list ( bunch . items ( ) ) for bid , board in bunch_items : if 'build' not in board . keys ( ) or 'name' not in board . keys ( ) : log . debug ( 'invalid board found: %s' , bid ) del bunch [ bid ] return bunch | read boards from boards . txt . |
58,239 | def board_names ( hwpack = 'arduino' ) : ls = list ( boards ( hwpack ) . keys ( ) ) ls . sort ( ) return ls | return installed board names . |
58,240 | def print_boards ( hwpack = 'arduino' , verbose = False ) : if verbose : pp ( boards ( hwpack ) ) else : print ( '\n' . join ( board_names ( hwpack ) ) ) | print boards from boards . txt . |
58,241 | def find_lib_dir ( root ) : root = path ( root ) log . debug ( 'files in dir: %s' , root ) for x in root . walkfiles ( ) : log . debug ( ' %s' , x ) if not len ( root . files ( ) ) and len ( root . dirs ( ) ) == 1 : log . debug ( 'go inside root' ) root = root . dirs ( ) [ 0 ] if len ( root . files ( 'keywords.txt' ) ) : root = rename_root ( root ) return root , root keywords = list ( root . walkfiles ( 'keywords.txt' ) ) if len ( keywords ) : if len ( keywords ) > 1 : log . warning ( 'more keywords.txt found. Installing only one. %s' , keywords ) lib_dir = keywords [ 0 ] . parent lib_dir = fix_libdir ( lib_dir ) return root , lib_dir header_only = len ( list ( noexample ( root . walkfiles ( '*.cpp' ) ) ) ) == 0 log . debug ( 'header_only: %s' , header_only ) lib_dir = None headers = list ( noexample ( root . walkfiles ( '*.h' ) ) ) for h in headers : cpp = h . stripext ( ) + '.cpp' if ( header_only or cpp . exists ( ) ) and h . parent . name . lower ( ) == h . namebase . lower ( ) : assert not lib_dir lib_dir = h . parent log . debug ( 'found lib: %s' , lib_dir ) if not lib_dir : if len ( headers ) == 1 and len ( list ( root . files ( '*.h' ) ) ) == 0 : log . debug ( 'only 1 header, not in root' ) lib_dir = headers [ 0 ] . parent lib_dir = rename_root ( lib_dir ) if not lib_dir : root = rename_root ( root ) return root , root assert lib_dir return root , lib_dir | search for lib dir under root . |
58,242 | def move_examples ( root , lib_dir ) : all_pde = files_multi_pattern ( root , INO_PATTERNS ) lib_pde = files_multi_pattern ( lib_dir , INO_PATTERNS ) stray_pde = all_pde . difference ( lib_pde ) if len ( stray_pde ) and not len ( lib_pde ) : log . debug ( 'examples found outside lib dir, moving them: %s' , stray_pde ) examples = lib_dir / EXAMPLES examples . makedirs ( ) for x in stray_pde : d = examples / x . namebase d . makedirs ( ) x . move ( d ) | find examples not under lib dir and move into examples |
58,243 | def fix_examples_dir ( lib_dir ) : for x in lib_dir . dirs ( ) : if x . name . lower ( ) == EXAMPLES : return for x in lib_dir . dirs ( ) : if x . name . lower ( ) == EXAMPLES : _fix_dir ( x ) return for x in lib_dir . dirs ( ) : if 'example' in x . name . lower ( ) : _fix_dir ( x ) return for x in lib_dir . dirs ( ) : if len ( files_multi_pattern ( x , INO_PATTERNS ) ) : _fix_dir ( x ) return | rename examples dir to examples |
58,244 | def install_lib ( url , replace_existing = False , fix_wprogram = True ) : d = tmpdir ( tmpdir ( ) ) f = download ( url ) Archive ( f ) . extractall ( d ) clean_dir ( d ) d , src_dlib = find_lib_dir ( d ) move_examples ( d , src_dlib ) fix_examples_dir ( src_dlib ) if fix_wprogram : fix_wprogram_in_files ( src_dlib ) targ_dlib = libraries_dir ( ) / src_dlib . name if targ_dlib . exists ( ) : log . debug ( 'library already exists: %s' , targ_dlib ) if replace_existing : log . debug ( 'remove %s' , targ_dlib ) targ_dlib . rmtree ( ) else : raise ConfduinoError ( 'library already exists:' + targ_dlib ) log . debug ( 'move %s -> %s' , src_dlib , targ_dlib ) src_dlib . move ( targ_dlib ) libraries_dir ( ) . copymode ( targ_dlib ) for x in targ_dlib . walk ( ) : libraries_dir ( ) . copymode ( x ) return targ_dlib . name | install library from web or local files system . |
58,245 | def _init_supervisor_rpc ( self , rpc_or_port ) : if isinstance ( rpc_or_port , int ) : if self . username : leader = 'http://{self.username}:{self.password}@' else : leader = 'http://' tmpl = leader + '{self.name}:{port}' url = tmpl . format ( self = self , port = rpc_or_port ) self . rpc = xmlrpc_client . ServerProxy ( url , transport = TimeoutTransport ( ) ) else : self . rpc = rpc_or_port self . supervisor = self . rpc . supervisor | Initialize supervisor RPC . |
58,246 | def _init_redis ( redis_spec ) : if not redis_spec : return if isinstance ( redis_spec , six . string_types ) : return redis . StrictRedis . from_url ( redis_spec ) return redis_spec | Return a StrictRedis instance or None based on redis_spec . |
58,247 | def _get_base ( ) : try : name , _aliaslist , _addresslist = socket . gethostbyname_ex ( 'deploy' ) except socket . gaierror : name = 'deploy' fallback = 'https://{name}/' . format ( name = name ) return os . environ . get ( 'VELOCIRAPTOR_URL' , fallback ) | if deploy resolves in this environment use the hostname for which that name resolves . Override with VELOCIRAPTOR_URL |
58,248 | def load_all ( cls , vr , params = None ) : ob_docs = vr . query ( cls . base , params ) return [ cls ( vr , ob ) for ob in ob_docs ] | Create instances of all objects found |
58,249 | def dispatch ( self , ** changes ) : self . patch ( ** changes ) trigger_url = self . _vr . _build_url ( self . resource_uri , 'swarm/' ) resp = self . _vr . session . post ( trigger_url ) resp . raise_for_status ( ) try : return resp . json ( ) except ValueError : return None | Patch the swarm with changes and then trigger the swarm . |
58,250 | def assemble ( self ) : if not self . created : self . create ( ) url = self . _vr . _build_url ( self . resource_uri , 'build/' ) resp = self . _vr . session . post ( url ) resp . raise_for_status ( ) | Assemble a build |
58,251 | def _get_token ( self ) : self . session . token = self . request ( 'getCommunicationToken' , { 'secretKey' : self . session . secret } , { 'uuid' : self . session . user , 'session' : self . session . session , 'clientRevision' : grooveshark . const . CLIENTS [ 'htmlshark' ] [ 'version' ] , 'country' : self . session . country , 'privacy' : 0 , 'client' : 'htmlshark' } ) [ 1 ] self . session . time = time . time ( ) | requests an communication token from Grooveshark |
58,252 | def _request_token ( self , method , client ) : if time . time ( ) - self . session . time > grooveshark . const . TOKEN_TIMEOUT : self . _get_token ( ) random_value = self . _random_hex ( ) return random_value + hashlib . sha1 ( ( method + ':' + self . session . token + ':' + grooveshark . const . CLIENTS [ client ] [ 'token' ] + ':' + random_value ) . encode ( 'utf-8' ) ) . hexdigest ( ) | generates a request token |
58,253 | def request ( self , method , parameters , header ) : data = json . dumps ( { 'parameters' : parameters , 'method' : method , 'header' : header } ) request = urllib . Request ( 'https://grooveshark.com/more.php?%s' % ( method ) , data = data . encode ( 'utf-8' ) , headers = self . _json_request_header ( ) ) with contextlib . closing ( self . urlopen ( request ) ) as response : result = json . loads ( response . read ( ) . decode ( 'utf-8' ) ) if 'result' in result : return response . info ( ) , result [ 'result' ] elif 'fault' in result : raise RequestError ( result [ 'fault' ] [ 'message' ] , result [ 'fault' ] [ 'code' ] ) else : raise UnknownError ( result ) | Grooveshark API request |
58,254 | def header ( self , method , client = 'htmlshark' ) : return { 'token' : self . _request_token ( method , client ) , 'privacy' : 0 , 'uuid' : self . session . user , 'clientRevision' : grooveshark . const . CLIENTS [ client ] [ 'version' ] , 'session' : self . session . session , 'client' : client , 'country' : self . session . country } | generates Grooveshark API Json header |
58,255 | def radio ( self , radio ) : artists = self . connection . request ( 'getArtistsForTagRadio' , { 'tagID' : radio } , self . connection . header ( 'getArtistsForTagRadio' , 'jsqueue' ) ) [ 1 ] return Radio ( artists , radio , self . connection ) | Get songs belong to a specific genre . |
58,256 | def search ( self , query , type = SONGS ) : result = self . connection . request ( 'getResultsFromSearch' , { 'query' : query , 'type' : type , 'guts' : 0 , 'ppOverride' : False } , self . connection . header ( 'getResultsFromSearch' ) ) [ 1 ] [ 'result' ] if type == self . SONGS : return ( Song . from_response ( song , self . connection ) for song in result ) elif type == self . ARTISTS : return ( Artist ( artist [ 'ArtistID' ] , artist [ 'Name' ] , self . connection ) for artist in result ) elif type == self . ALBUMS : return ( self . _parse_album ( album ) for album in result ) elif type == self . PLAYLISTS : return ( self . _parse_playlist ( playlist ) for playlist in result ) | Search for songs artists and albums . |
58,257 | def popular ( self , period = DAILY ) : songs = self . connection . request ( 'popularGetSongs' , { 'type' : period } , self . connection . header ( 'popularGetSongs' ) ) [ 1 ] [ 'Songs' ] return ( Song . from_response ( song , self . connection ) for song in songs ) | Get popular songs . |
58,258 | def playlist ( self , playlist_id ) : playlist = self . connection . request ( 'getPlaylistByID' , { 'playlistID' : playlist_id } , self . connection . header ( 'getPlaylistByID' ) ) [ 1 ] return self . _parse_playlist ( playlist ) | Get a playlist from it s ID |
58,259 | def collection ( self , user_id ) : dct = { 'userID' : user_id , 'page' : 0 } r = 'userGetSongsInLibrary' result = self . connection . request ( r , dct , self . connection . header ( r ) ) songs = result [ 1 ] [ 'Songs' ] return [ Song . from_response ( song , self . connection ) for song in songs ] | Get the song collection of a user . |
58,260 | def hwpack_names ( ) : ls = hwpack_dir ( ) . listdir ( ) ls = [ x . name for x in ls ] ls = [ x for x in ls if x != 'tools' ] arduino_included = 'arduino' in ls ls = [ x for x in ls if x != 'arduino' ] ls . sort ( ) if arduino_included : ls = [ 'arduino' ] + ls return ls | return installed hardware package names . |
58,261 | def _create_parser ( self , html_parser , current_url ) : css_code = '' elements = html_parser . find ( 'style,link[rel="stylesheet"]' ) . list_results ( ) for element in elements : if element . get_tag_name ( ) == 'STYLE' : css_code = css_code + element . get_text_content ( ) else : css_code = css_code + requests . get ( urljoin ( current_url , element . get_attribute ( 'href' ) ) ) . text self . stylesheet = tinycss . make_parser ( ) . parse_stylesheet ( css_code ) | Create the tinycss stylesheet . |
58,262 | def readline ( self , prompt = '' ) : u if self . first_prompt : self . first_prompt = False if self . startup_hook : try : self . startup_hook ( ) except : print u'startup hook failed' traceback . print_exc ( ) c = self . console self . l_buffer . reset_line ( ) self . prompt = prompt self . _print_prompt ( ) if self . pre_input_hook : try : self . pre_input_hook ( ) except : print u'pre_input_hook failed' traceback . print_exc ( ) self . pre_input_hook = None log ( u"in readline: %s" % self . paste_line_buffer ) if len ( self . paste_line_buffer ) > 0 : self . l_buffer = lineobj . ReadlineTextBuffer ( self . paste_line_buffer [ 0 ] ) self . _update_line ( ) self . paste_line_buffer = self . paste_line_buffer [ 1 : ] c . write ( u'\r\n' ) else : self . _readline_from_keyboard ( ) c . write ( u'\r\n' ) self . add_history ( self . l_buffer . copy ( ) ) log ( u'returning(%s)' % self . l_buffer . get_line_text ( ) ) return self . l_buffer . get_line_text ( ) + '\n' | u Try to act like GNU readline . |
58,263 | def history_search_backward ( self , e ) : u self . l_buffer = self . _history . history_search_backward ( self . l_buffer ) | u Search backward through the history for the string of characters between the start of the current line and the point . This is a non - incremental search . By default this command is unbound . |
58,264 | def quoted_insert ( self , e ) : u e = self . console . getkeypress ( ) self . insert_text ( e . char ) | u Add the next character typed to the line verbatim . This is how to insert key sequences like C - q for example . |
58,265 | def ipython_paste ( self , e ) : u if self . enable_win32_clipboard : txt = clipboard . get_clipboard_text_and_convert ( self . enable_ipython_paste_list_of_lists ) if self . enable_ipython_paste_for_paths : if len ( txt ) < 300 and ( u"\t" not in txt ) and ( u"\n" not in txt ) : txt = txt . replace ( u"\\" , u"/" ) . replace ( u" " , ur"\ " ) self . insert_text ( txt ) | u Paste windows clipboard . If enable_ipython_paste_list_of_lists is True then try to convert tabseparated data to repr of list of lists or repr of array |
58,266 | def main ( cls , args = None ) : if args is None : args = sys . argv [ 1 : ] try : o = cls ( ) o . parseOptions ( args ) except usage . UsageError as e : print ( o . getSynopsis ( ) ) print ( o . getUsage ( ) ) print ( str ( e ) ) return 1 except CLIError as ce : print ( str ( ce ) ) return ce . returnCode return 0 | Fill in command - line arguments from argv |
58,267 | def _index_files ( path ) : with zipfile . ZipFile ( path ) as zf : names = sorted ( zf . namelist ( ) ) names = [ nn for nn in names if nn . endswith ( ".tif" ) ] names = [ nn for nn in names if nn . startswith ( "SID PHA" ) ] phasefiles = [ ] for name in names : with zf . open ( name ) as pt : fd = io . BytesIO ( pt . read ( ) ) if SingleTifPhasics . verify ( fd ) : phasefiles . append ( name ) return phasefiles | Search zip file for SID PHA files |
58,268 | def files ( self ) : if self . _files is None : self . _files = SeriesZipTifPhasics . _index_files ( self . path ) return self . _files | List of Phasics tif file names in the input zip file |
58,269 | def verify ( path ) : valid = False try : zf = zipfile . ZipFile ( path ) except ( zipfile . BadZipfile , IsADirectoryError ) : pass else : names = sorted ( zf . namelist ( ) ) names = [ nn for nn in names if nn . endswith ( ".tif" ) ] names = [ nn for nn in names if nn . startswith ( "SID PHA" ) ] for name in names : with zf . open ( name ) as pt : fd = io . BytesIO ( pt . read ( ) ) if SingleTifPhasics . verify ( fd ) : valid = True break zf . close ( ) return valid | Verify that path is a zip file with Phasics TIFF files |
58,270 | def chunks ( stream , size = None ) : if size == 'lines' : for item in stream : yield item return if size is None : size = MAXBUF while True : buf = stream . read ( size ) if not buf : return yield buf | Returns a generator of chunks from the stream with a maximum size of size . I don t know why this isn t part of core Python . |
58,271 | def write_logfile ( ) : command = os . path . basename ( os . path . realpath ( os . path . abspath ( sys . argv [ 0 ] ) ) ) now = datetime . datetime . now ( ) . strftime ( '%Y%m%d-%H%M%S.%f' ) filename = '{}-{}.log' . format ( command , now ) with open ( filename , 'w' ) as logfile : if six . PY3 : logfile . write ( _LOGFILE_STREAM . getvalue ( ) ) else : logfile . write ( _LOGFILE_STREAM . getvalue ( ) . decode ( errors = 'replace' ) ) | Write a DEBUG log file COMMAND - YYYYMMDD - HHMMSS . ffffff . log . |
58,272 | def excepthook ( type , value , traceback ) : try : six . reraise ( type , value , traceback ) except type : _LOGGER . exception ( str ( value ) ) if isinstance ( value , KeyboardInterrupt ) : message = "Cancelling at the user's request." else : message = handle_unexpected_exception ( value ) print ( message , file = sys . stderr ) | Log exceptions instead of printing a traceback to stderr . |
58,273 | def handle_unexpected_exception ( exc ) : try : write_logfile ( ) addendum = 'Please see the log file for more information.' except IOError : addendum = 'Unable to write log file.' try : message = str ( exc ) return '{}{}{}' . format ( message , '\n' if message else '' , addendum ) except Exception : return str ( exc ) | Return an error message and write a log file if logging was not enabled . |
58,274 | def enable_logging ( log_level ) : root_logger = logging . getLogger ( ) root_logger . setLevel ( logging . DEBUG ) logfile_handler = logging . StreamHandler ( _LOGFILE_STREAM ) logfile_handler . setLevel ( logging . DEBUG ) logfile_handler . setFormatter ( logging . Formatter ( '%(levelname)s [%(asctime)s][%(name)s] %(message)s' ) ) root_logger . addHandler ( logfile_handler ) if signal . getsignal ( signal . SIGTERM ) == signal . SIG_DFL : signal . signal ( signal . SIGTERM , _logfile_sigterm_handler ) if log_level : handler = logging . StreamHandler ( ) handler . setFormatter ( _LogColorFormatter ( ) ) root_logger . setLevel ( log_level ) root_logger . addHandler ( handler ) | Configure the root logger and a logfile handler . |
58,275 | def get_log_level ( args ) : index = - 1 log_level = None if '<command>' in args and args [ '<command>' ] : index = sys . argv . index ( args [ '<command>' ] ) if args . get ( '--debug' ) : log_level = 'DEBUG' if '--debug' in sys . argv and sys . argv . index ( '--debug' ) < index : sys . argv . remove ( '--debug' ) elif '-d' in sys . argv and sys . argv . index ( '-d' ) < index : sys . argv . remove ( '-d' ) elif args . get ( '--verbose' ) : log_level = 'INFO' if '--verbose' in sys . argv and sys . argv . index ( '--verbose' ) < index : sys . argv . remove ( '--verbose' ) elif '-v' in sys . argv and sys . argv . index ( '-v' ) < index : sys . argv . remove ( '-v' ) elif args . get ( '--log-level' ) : log_level = args [ '--log-level' ] sys . argv . remove ( '--log-level' ) sys . argv . remove ( log_level ) if log_level not in ( None , 'DEBUG' , 'INFO' , 'WARN' , 'ERROR' ) : raise exceptions . InvalidLogLevelError ( log_level ) return getattr ( logging , log_level ) if log_level else None | Get the log level from the CLI arguments . |
58,276 | def _logfile_sigterm_handler ( * _ ) : logging . error ( 'Received SIGTERM.' ) write_logfile ( ) print ( 'Received signal. Please see the log file for more information.' , file = sys . stderr ) sys . exit ( signal ) | Handle exit signals and write out a log file . |
58,277 | def format ( self , record ) : if record . levelno >= logging . ERROR : color = colorama . Fore . RED elif record . levelno >= logging . WARNING : color = colorama . Fore . YELLOW elif record . levelno >= logging . INFO : color = colorama . Fore . RESET else : color = colorama . Fore . CYAN format_template = ( '{}{}%(levelname)s{} [%(asctime)s][%(name)s]{} %(message)s' ) if sys . stdout . isatty ( ) : self . _fmt = format_template . format ( colorama . Style . BRIGHT , color , colorama . Fore . RESET , colorama . Style . RESET_ALL ) else : self . _fmt = format_template . format ( * [ '' ] * 4 ) if six . PY3 : self . _style . _fmt = self . _fmt return super ( _LogColorFormatter , self ) . format ( record ) | Format the log record with timestamps and level based colors . |
58,278 | def ask_question ( self , question_text , question = None ) : if question is not None : q = question . to_dict ( ) else : q = WatsonQuestion ( question_text ) . to_dict ( ) r = requests . post ( self . url + '/question' , json = { 'question' : q } , headers = { 'Accept' : 'application/json' , 'X-SyncTimeout' : 30 } , auth = ( self . username , self . password ) ) try : response_json = r . json ( ) except ValueError : raise Exception ( 'Failed to parse response JSON' ) return WatsonAnswer ( response_json ) | Ask Watson a question via the Question and Answer API |
58,279 | def process_queue ( queue = None , ** kwargs ) : while True : item = queue . get ( ) if item is None : queue . task_done ( ) logger . info ( f"{queue}: exiting process queue." ) break filename = os . path . basename ( item ) try : queue . next_task ( item , ** kwargs ) except Exception as e : queue . task_done ( ) logger . warn ( f"{queue}: item={filename}. {e}\n" ) logger . exception ( e ) sys . stdout . write ( style . ERROR ( f"{queue}. item={filename}. {e}. Exception has been logged.\n" ) ) sys . stdout . flush ( ) break else : logger . info ( f"{queue}: Successfully processed {filename}.\n" ) queue . task_done ( ) | Loops and waits on queue calling queue s next_task method . |
58,280 | def memoize ( func ) : cache = { } @ functools . wraps ( func ) def wrapper ( * args ) : key = "__" . join ( str ( arg ) for arg in args ) if key not in cache : cache [ key ] = func ( * args ) return cache [ key ] return wrapper | Classic memoize decorator for non - class methods |
58,281 | def cached_method ( func ) : @ functools . wraps ( func ) def wrapper ( self , * args ) : if not hasattr ( self , "_cache" ) : self . _cache = { } key = _argstring ( ( func . __name__ , ) + args ) if key not in self . _cache : self . _cache [ key ] = func ( self , * args ) return self . _cache [ key ] return wrapper | Memoize for class methods |
58,282 | def guard ( func ) : semaphore = threading . Lock ( ) @ functools . wraps ( func ) def wrapper ( * args , ** kwargs ) : semaphore . acquire ( ) try : return func ( * args , ** kwargs ) finally : semaphore . release ( ) return wrapper | Prevents the decorated function from parallel execution . |
58,283 | def threadpool ( num_workers = None ) : def decorator ( func ) : @ functools . wraps ( func ) def wrapper ( data ) : return mapreduce . map ( func , data , num_workers ) return wrapper return decorator | Apply stutils . mapreduce . map to the given function |
58,284 | def invalidate_all ( self ) : for fname in os . listdir ( self . cache_path ) : if fname . startswith ( self . func . __name__ + "." ) : os . remove ( os . path . join ( self . cache_path , fname ) ) | Remove all files caching this function |
58,285 | def ctmc ( data , numstates , transintv = 1.0 , toltime = 1e-8 , debug = False ) : if debug : datacheck ( data , numstates , toltime ) transcount , statetime = aggregateevents ( data , numstates ) if debug : errorcheck ( transcount , statetime , toltime ) genmat = generatormatrix ( transcount , statetime ) transmat = scipy . linalg . expm ( genmat * transintv ) return transmat , genmat , transcount , statetime | Continous Time Markov Chain |
58,286 | def plugins ( group , spec = None ) : pspec = _parse_spec ( spec ) plugs = list ( _get_registered_plugins ( group , pspec ) ) plugs += list ( _get_unregistered_plugins ( group , plugs , pspec ) ) return PluginSet ( group , spec , list ( _sort_plugins ( group , plugs , pspec , spec ) ) ) | Returns a PluginSet object for the specified setuptools - style entrypoint group . This is just a wrapper around pkg_resources . iter_entry_points that allows the plugins to sort and override themselves . |
58,287 | def handle ( self , object , * args , ** kw ) : if not bool ( self ) : if not self . spec or self . spec == SPEC_ALL : raise ValueError ( 'No plugins available in group %r' % ( self . group , ) ) raise ValueError ( 'No plugins in group %r matched %r' % ( self . group , self . spec ) ) for plugin in self . plugins : object = plugin . handle ( object , * args , ** kw ) return object | Calls each plugin in this PluginSet with the specified object arguments and keywords in the standard group plugin order . The return value from each successive invoked plugin is passed as the first parameter to the next plugin . The final return value is the object returned from the last plugin . |
58,288 | def select ( self , name ) : return PluginSet ( self . group , name , [ plug for plug in self . plugins if plug . name == name ] ) | Returns a new PluginSet that has only the plugins in this that are named name . |
58,289 | def browse_home_listpage_url ( self , state = None , county = None , zipcode = None , street = None , ** kwargs ) : url = self . domain_browse_homes for item in [ state , county , zipcode , street ] : if item : url = url + "/%s" % item url = url + "/" return url | Construct an url of home list page by state county zipcode street . |
58,290 | def _render_bundle ( bundle_name ) : try : bundle = get_bundles ( ) [ bundle_name ] except KeyError : raise ImproperlyConfigured ( "Bundle '%s' is not defined" % bundle_name ) if bundle . use_bundle : return _render_file ( bundle . bundle_type , bundle . get_url ( ) , attrs = ( { 'media' : bundle . media } if bundle . media else { } ) ) bundle_files = [ ] for bundle_file in bundle . files : if bundle_file . precompile_in_debug : bundle_files . append ( _render_file ( bundle_file . bundle_type , bundle_file . precompile_url , attrs = ( { 'media' : bundle_file . media } if bundle . media else { } ) ) ) else : bundle_files . append ( _render_file ( bundle_file . file_type , bundle_file . file_url , attrs = ( { 'media' : bundle_file . media } if bundle . media else { } ) ) ) return '\n' . join ( bundle_files ) | Renders the HTML for a bundle in place - one HTML tag or many depending on settings . USE_BUNDLES |
58,291 | def from_string ( self , string_representation , resource = None ) : stream = NativeIO ( string_representation ) return self . from_stream ( stream , resource = resource ) | Extracts resource data from the given string and converts them to a new resource or updates the given resource from it . |
58,292 | def to_string ( self , obj ) : stream = NativeIO ( ) self . to_stream ( obj , stream ) return text_ ( stream . getvalue ( ) , encoding = self . encoding ) | Converts the given resource to a string representation and returns it . |
58,293 | def data_from_bytes ( self , byte_representation ) : text = byte_representation . decode ( self . encoding ) return self . data_from_string ( text ) | Converts the given bytes representation to resource data . |
58,294 | def data_to_string ( self , data_element ) : stream = NativeIO ( ) self . data_to_stream ( data_element , stream ) return stream . getvalue ( ) | Converts the given data element into a string representation . |
58,295 | def create_from_resource_class ( cls , resource_class ) : mp_reg = get_mapping_registry ( cls . content_type ) mp = mp_reg . find_or_create_mapping ( resource_class ) return cls ( resource_class , mp ) | Creates a new representer for the given resource class . |
58,296 | def data_from_stream ( self , stream ) : parser = self . _make_representation_parser ( stream , self . resource_class , self . _mapping ) return parser . run ( ) | Creates a data element reading a representation from the given stream . |
58,297 | def data_to_stream ( self , data_element , stream ) : generator = self . _make_representation_generator ( stream , self . resource_class , self . _mapping ) generator . run ( data_element ) | Writes the given data element to the given stream . |
58,298 | def resource_from_data ( self , data_element , resource = None ) : return self . _mapping . map_to_resource ( data_element , resource = resource ) | Converts the given data element to a resource . |
58,299 | def configure ( self , options = None , attribute_options = None ) : self . _mapping . update ( options = options , attribute_options = attribute_options ) | Configures the options and attribute options of the mapping associated with this representer with the given dictionaries . |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.