idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
6,100
def size_of_generator ( generator , memory_efficient = True ) : if memory_efficient : counter = 0 for _ in generator : counter += 1 return counter else : return len ( list ( generator ) )
Get number of items in a generator function .
6,101
def validate ( self , value ) : errors = [ ] self . _used_validator = [ ] for val in self . _validators : try : val . validate ( value ) self . _used_validator . append ( val ) except ValidatorException as e : errors . append ( e ) except Exception as e : errors . append ( ValidatorException ( "Unknown Error" , e ) ) if len ( errors ) > 0 : raise ValidatorException . from_list ( errors ) return value
validate function form OrValidator
6,102
def GetTotalValue ( self ) : value = "" if hasattr ( self , "meter" ) : top_value = self . meter . beats bottom = self . meter . type fraction = top_value / bottom if fraction == 1 : value = "1" else : if fraction > 1 : value = "1." if fraction < 1 : if fraction >= 0.5 : fraction -= 0.5 value = "2" if fraction == 0.25 : value += "." return value
Gets the total value of the bar according to it s time signature
6,103
def GetLastKey ( self , voice = 1 ) : voice_obj = self . GetChild ( voice ) if voice_obj is not None : key = BackwardSearch ( KeyNode , voice_obj , 1 ) if key is not None : return key else : if hasattr ( self , "key" ) : return self . key else : if hasattr ( self , "key" ) : return self . key
key as in musical key not index
6,104
def SplitString ( value ) : string_length = len ( value ) chunks = int ( string_length / 10 ) string_list = list ( value ) lstring = "" if chunks > 1 : lstring = "\\markup { \n\r \column { " for i in range ( int ( chunks ) ) : lstring += "\n\r\r \\line { \"" index = i * 10 for i in range ( index ) : lstring += string_list [ i ] lstring += "\" \r\r}" lstring += "\n\r } \n }" if lstring == "" : indexes = [ i for i in range ( len ( string_list ) ) if string_list [ i ] == "\r" or string_list [ i ] == "\n" ] lstring = "\\markup { \n\r \column { " if len ( indexes ) == 0 : lstring += "\n\r\r \\line { \"" + "" . join ( string_list ) + "\" \n\r\r } \n\r } \n }" else : rows = [ ] row_1 = string_list [ : indexes [ 0 ] ] rows . append ( row_1 ) for i in range ( len ( indexes ) ) : start = indexes [ i ] if i != len ( indexes ) - 1 : end = indexes [ i + 1 ] else : end = len ( string_list ) row = string_list [ start : end ] rows . append ( row ) for row in rows : lstring += "\n\r\r \\line { \"" lstring += "" . join ( row ) lstring += "\" \r\r}" lstring += "\n\r } \n }" return lstring
simple method that puts in spaces every 10 characters
6,105
def NumbersToWords ( number ) : units = [ 'one' , 'two' , 'three' , 'four' , 'five' , 'six' , 'seven' , 'eight' , 'nine' ] tens = [ 'ten' , 'twenty' , 'thirty' , 'forty' , 'fifty' , 'sixty' , 'seventy' , 'eighty' , 'ninety' ] output = "" if number != 0 : str_val = str ( number ) if 4 > len ( str_val ) > 2 : output += units [ int ( str_val [ 0 ] ) - 1 ] output += "hundred" if str_val [ 1 ] != 0 : output += "and" + tens [ int ( str_val [ 1 ] ) - 1 ] if str_val [ 2 ] != 0 : output += units [ int ( str_val [ 2 ] ) - 1 ] if 3 > len ( str_val ) > 1 : output += tens [ int ( str_val [ 0 ] ) - 1 ] if str_val [ 1 ] != 0 : output += units [ int ( str_val [ 1 ] ) - 1 ] if 2 > len ( str_val ) == 1 : output += units [ int ( str_val [ 0 ] ) - 1 ] else : output = "zero" return output
little function that converts numbers to words . This could be more efficient and won t work if the number is bigger than 999 but it s for stave names and I doubt any part would have more than 10 staves let alone 999 .
6,106
def CheckTotals ( self ) : staves = self . GetChildrenIndexes ( ) for staff in staves : child = self . getStaff ( staff ) child . CheckTotals ( )
method to calculate the maximum total lilypond value for a measure without a time signature
6,107
def CheckPreviousBarline ( self , staff ) : measure_before_last = self . getMeasureAtPosition ( - 2 , staff ) last_measure = self . getMeasureAtPosition ( - 1 , staff ) if last_measure is not None and measure_before_last is not None : bline1 = measure_before_last . GetBarline ( "right" ) bline2 = last_measure . GetBarline ( "left" ) if bline1 is not None : if hasattr ( bline1 , "ending" ) : if bline2 is not None : if not hasattr ( bline2 , "ending" ) : bline1 . ending . type = "discontinue" else : bline1 . ending . type = "discontinue"
method which checks the bar before the current for changes we need to make to it s barlines
6,108
def __parse ( self ) -> object : char = self . data [ self . idx : self . idx + 1 ] if char in [ b'1' , b'2' , b'3' , b'4' , b'5' , b'6' , b'7' , b'8' , b'9' , b'0' ] : str_len = int ( self . __read_to ( b':' ) ) return self . __read ( str_len ) elif char == b'i' : self . idx += 1 return int ( self . __read_to ( b'e' ) ) elif char == b'd' : return self . __parse_dict ( ) elif char == b'l' : return self . __parse_list ( ) elif char == b'' : raise bencodepy . DecodingError ( 'Unexpected End of File at index position of {0}.' . format ( str ( self . idx ) ) ) else : raise bencodepy . DecodingError ( 'Invalid token character ({0}) at position {1}.' . format ( str ( char ) , str ( self . idx ) ) )
Selects the appropriate method to decode next bencode element and returns the result .
6,109
def decode ( self ) -> Iterable : if self . data [ 0 : 1 ] not in ( b'd' , b'l' ) : return self . __wrap_with_tuple ( ) return self . __parse ( )
Start of decode process . Returns final results .
6,110
def __wrap_with_tuple ( self ) -> tuple : l = list ( ) length = len ( self . data ) while self . idx < length : l . append ( self . __parse ( ) ) return tuple ( l )
Returns a tuple of all nested bencode elements .
6,111
def __parse_dict ( self ) -> OrderedDict : self . idx += 1 d = OrderedDict ( ) key_name = None while self . data [ self . idx : self . idx + 1 ] != b'e' : if key_name is None : key_name = self . __parse ( ) else : d [ key_name ] = self . __parse ( ) key_name = None self . idx += 1 return d
Returns an Ordered Dictionary of nested bencode elements .
6,112
def __parse_list ( self ) -> list : self . idx += 1 l = [ ] while self . data [ self . idx : self . idx + 1 ] != b'e' : l . append ( self . __parse ( ) ) self . idx += 1 return l
Returns an list of nested bencode elements .
6,113
def PopAllChildren ( self ) : indexes = self . GetChildrenIndexes ( ) children = [ ] for c in indexes : child = self . PopChild ( c ) children . append ( child ) return children
Method to remove and return all children of current node
6,114
def _process_file ( input_file , output_file , apikey ) : bytes_ = read_binary ( input_file ) compressed = shrink ( bytes_ , apikey ) if compressed . success and compressed . bytes : write_binary ( output_file , compressed . bytes ) else : if compressed . errno in FATAL_ERRORS : raise StopProcessing ( compressed ) elif compressed . errno == TinyPNGError . InternalServerError : raise RetryProcessing ( compressed ) return compressed
Shrinks input_file to output_file .
6,115
def process_directory ( source , target , apikey , handler , overwrite = False ) : handler . on_start ( ) attempts = defaultdict ( lambda : 0 ) input_files = files_with_exts ( source , suffix = '.png' ) next_ = lambda : next ( input_files , None ) current_file = next_ ( ) response = None last_processed = None while current_file : output_file = target_path ( source , target , current_file ) if os . path . exists ( output_file ) and not overwrite : handler . on_skip ( current_file , source = source ) current_file = next_ ( ) continue try : handler . on_pre_item ( current_file ) last_processed = current_file response = _process_file ( current_file , output_file , apikey ) current_file = next_ ( ) except StopProcessing as e : response = e . response handler . on_stop ( response . errmsg ) break except RetryProcessing as e : response = e . response if attempts [ current_file ] < 9 : handler . on_retry ( current_file ) time . sleep ( TINYPNG_SLEEP_SEC ) attempts [ current_file ] += 1 else : current_file = next_ ( ) finally : handler . on_post_item ( response , input_file = last_processed , source = source ) handler . on_finish ( output_dir = target )
Optimize and save png files form source to target directory .
6,116
def _main ( args ) : if not args . apikey : print ( "\nPlease provide TinyPNG API key" ) print ( "To obtain key visit https://api.tinypng.com/developers\n" ) sys . exit ( 1 ) input_dir = realpath ( args . input ) if not args . output : output_dir = input_dir + "-output" else : output_dir = realpath ( args . output ) if input_dir == output_dir : print ( "\nPlease specify different output directory\n" ) sys . exit ( 1 ) handler = ScreenHandler ( ) try : process_directory ( input_dir , output_dir , args . apikey , handler ) except KeyboardInterrupt : handler . on_finish ( output_dir = output_dir )
Batch compression .
6,117
def task ( self , task_name ) : return Task ( uri = ':' . join ( ( self . _engine_name , task_name ) ) , cwd = self . _cwd )
Returns an ENVI Py Engine Task object . See ENVI Py Engine Task for examples .
6,118
def tasks ( self ) : task_input = { 'taskName' : 'QueryTaskCatalog' } output = taskengine . execute ( task_input , self . _engine_name , cwd = self . _cwd ) return output [ 'outputParameters' ] [ 'TASKS' ]
Returns a list of all tasks known to the engine .
6,119
def execute ( query , auth = None , client = urllib_request . build_opener ( ) ) : exec_fn = getattr ( type ( query ) , '__execute__' , _default_execute_method ) return exec_fn ( query , client , _make_auth ( auth ) )
Execute a query returning its result
6,120
def execute_async ( query , auth = None , client = event_loop ) : exc_fn = getattr ( type ( query ) , '__execute_async__' , Query . __execute_async__ ) return exc_fn ( query , client , _make_auth ( auth ) )
Execute a query asynchronously returning its result
6,121
def secure_randint ( min_value , max_value , system_random = None ) : if not system_random : system_random = random . SystemRandom ( ) return system_random . randint ( min_value , max_value )
Return a random integer N such that a < = N < = b .
6,122
def _merge_maps ( m1 , m2 ) : return type ( m1 ) ( chain ( m1 . items ( ) , m2 . items ( ) ) )
merge two Mapping objects keeping the type of the first mapping
6,123
def basic_auth ( credentials ) : encoded = b64encode ( ':' . join ( credentials ) . encode ( 'ascii' ) ) . decode ( ) return header_adder ( { 'Authorization' : 'Basic ' + encoded } )
Create an HTTP basic authentication callable
6,124
def with_headers ( self , headers ) : return self . replace ( headers = _merge_maps ( self . headers , headers ) )
Create a new request with added headers
6,125
def with_params ( self , params ) : return self . replace ( params = _merge_maps ( self . params , params ) )
Create a new request with added query parameters
6,126
def _get_bit ( self , n , hash_bytes ) : if hash_bytes [ n // 8 ] >> int ( 8 - ( ( n % 8 ) + 1 ) ) & 1 == 1 : return True return False
Determines if the n - th bit of passed bytes is 1 or 0 .
6,127
def _generate_matrix ( self , hash_bytes ) : half_columns = self . columns // 2 + self . columns % 2 cells = self . rows * half_columns matrix = [ [ False ] * self . columns for _ in range ( self . rows ) ] for cell in range ( cells ) : if self . _get_bit ( cell , hash_bytes [ 1 : ] ) : column = cell // self . columns row = cell % self . rows matrix [ row ] [ column ] = True matrix [ row ] [ self . columns - column - 1 ] = True return matrix
Generates matrix that describes which blocks should be coloured .
6,128
def _generate_image ( self , matrix , width , height , padding , foreground , background , image_format ) : image = Image . new ( "RGBA" , ( width + padding [ 2 ] + padding [ 3 ] , height + padding [ 0 ] + padding [ 1 ] ) , background ) draw = ImageDraw . Draw ( image ) block_width = width // self . columns block_height = height // self . rows for row , row_columns in enumerate ( matrix ) : for column , cell in enumerate ( row_columns ) : if cell : x1 = padding [ 2 ] + column * block_width y1 = padding [ 0 ] + row * block_height x2 = padding [ 2 ] + ( column + 1 ) * block_width - 1 y2 = padding [ 0 ] + ( row + 1 ) * block_height - 1 draw . rectangle ( ( x1 , y1 , x2 , y2 ) , fill = foreground ) stream = BytesIO ( ) if image_format . upper ( ) == "JPEG" : image = image . convert ( mode = "RGB" ) try : image . save ( stream , format = image_format , optimize = True ) except KeyError : raise ValueError ( "Pillow does not support requested image format: %s" % image_format ) image_raw = stream . getvalue ( ) stream . close ( ) return image_raw
Generates an identicon image in requested image format out of the passed block matrix with the requested width height padding foreground colour background colour and image format .
6,129
def _generate_ascii ( self , matrix , foreground , background ) : return "\n" . join ( [ "" . join ( [ foreground if cell else background for cell in row ] ) for row in matrix ] )
Generates an identicon image in the ASCII format . The image will just output the matrix used to generate the identicon .
6,130
def local_timezone ( value ) : if hasattr ( value , "tzinfo" ) and value . tzinfo is None : return value . replace ( tzinfo = dateutil . tz . tzlocal ( ) ) return value
Add the local timezone to value to make it aware .
6,131
def dumps ( data , ** kwargs ) : def _encoder ( value ) : if isinstance ( value , datetime . datetime ) : return value . isoformat ( ) if hasattr ( value , "_data" ) : return value . _data raise TypeError ( 'Could not encode %r' % value ) return json . dumps ( data , default = _encoder , ** kwargs )
Convert a PPMP entity to JSON . Additional arguments are the same as accepted by json . dumps .
6,132
def setup_lilypond ( path_to_lilypond_folder = "default" ) : options = { "win32" : setup_lilypond_windows , "darwin" : setup_lilypond_osx } if platform . startswith ( "linux" ) : setup_lilypond_linux ( ) else : options [ platform ] ( path_to_lilypond_folder )
Optional helper method which works out the platform and calls the relevant setup method
6,133
def setup_lilypond_windows ( path = "default" ) : default = "C:/Program Files (x86)/LilyPond/usr/bin" path_variable = os . environ [ 'PATH' ] . split ( ";" ) if path == "default" : path_variable . append ( default ) else : path_variable . append ( path ) os . environ [ 'PATH' ] = ";" . join ( path_variable )
Optional helper method which does the environment setup for lilypond in windows . If you ve ran this method you do not need and should not provide a lyscript when you instantiate this class . As this method is static you can run this method before you set up the LilypondRenderer instance .
6,134
def recursive_dict_to_dict ( rdict ) : d = { } for ( k , v ) in rdict . items ( ) : if isinstance ( v , defaultdict ) : d [ k ] = recursive_dict_to_dict ( v ) else : d [ k ] = v return d
Convert a recursive dict to a plain ol dict .
6,135
def scrub_dict ( d ) : if type ( d ) is dict : return dict ( ( k , scrub_dict ( v ) ) for k , v in d . iteritems ( ) if v and scrub_dict ( v ) ) elif type ( d ) is list : return [ scrub_dict ( v ) for v in d if v and scrub_dict ( v ) ] else : return d
Recursively inspect a dictionary and remove all empty values including empty strings lists and dictionaries .
6,136
def _to_json_type ( obj , classkey = None ) : if isinstance ( obj , dict ) : data = { } for ( k , v ) in obj . items ( ) : data [ k ] = _to_json_type ( v , classkey ) return data elif hasattr ( obj , "_ast" ) : return _to_json_type ( obj . _ast ( ) ) elif hasattr ( obj , "__iter__" ) : return [ _to_json_type ( v , classkey ) for v in obj ] elif hasattr ( obj , "__dict__" ) : data = dict ( [ ( key , _to_json_type ( value , classkey ) ) for key , value in obj . __dict__ . iteritems ( ) if not callable ( value ) and not key . startswith ( '_' ) ] ) if classkey is not None and hasattr ( obj , "__class__" ) : data [ classkey ] = obj . __class__ . __name__ return data else : return obj
Recursively convert the object instance into a valid JSON type .
6,137
def to_dict ( obj ) : d = _to_json_type ( obj ) if isinstance ( d , dict ) : return scrub_dict ( d ) else : raise ValueError ( "The value provided must be an object." )
Convert an instance of an object into a dict .
6,138
def print_exc_plus ( stream = sys . stdout ) : write = stream . write flush = stream . flush tp , value , tb = sys . exc_info ( ) while tb . tb_next : tb = tb . tb_next stack = list ( ) f = tb . tb_frame while f : stack . append ( f ) f = f . f_back stack . reverse ( ) try : traceback . print_exc ( None , stream ) except BaseException as e : write ( u ( "FAILED PRINTING TRACE\n\n" ) ) write ( u ( str ( value ) ) ) write ( u ( '\n\n' ) ) finally : flush ( ) write ( u ( 'Locals by frame, innermost last\n' ) ) for frame in stack : write ( u ( '\nFrame %s in %s at line %s\n' % ( frame . f_code . co_name , frame . f_code . co_filename , frame . f_lineno ) ) ) for key , value , in frame . f_locals . items ( ) : write ( u ( '\t%20s = ' % key ) ) try : write ( u ( '%s\n' % value ) ) except BaseException : write ( u ( '<ERROR WHILE PRINTING VALUE>\n' ) ) flush ( )
print normal traceback information with some local arg values
6,139
def format_single_space_only ( text ) : return " " . join ( [ word for word in text . strip ( ) . split ( " " ) if len ( word ) >= 1 ] )
Revise consecutive empty space to single space .
6,140
def format_title ( text ) : text = text . strip ( ) if len ( text ) == 0 : return text else : text = text . lower ( ) words = [ word for word in text . strip ( ) . split ( " " ) if len ( word ) >= 1 ] words_new = list ( ) for word in words : if word not in FUNCTION_WORD : word = word [ 0 ] . upper ( ) + word [ 1 : ] words_new . append ( word ) words_new [ 0 ] = words_new [ 0 ] [ 0 ] . upper ( ) + words_new [ 0 ] [ 1 : ] return " " . join ( words_new )
Capitalize first letter for each words except function words .
6,141
def format_person_name ( text ) : text = text . strip ( ) if len ( text ) == 0 : return text else : text = text . lower ( ) words = [ word for word in text . strip ( ) . split ( " " ) if len ( word ) >= 1 ] words = [ word [ 0 ] . upper ( ) + word [ 1 : ] for word in words ] return " " . join ( words )
Capitalize first letter for each part of the name .
6,142
def dump ( self , path ) : try : with open ( path , "wb" ) as f : f . write ( self . __str__ ( ) . encode ( "utf-8" ) ) except : pass with open ( path , "wb" ) as f : pickle . dump ( self . __data__ , f )
dump DictTree data to json files .
6,143
def load ( cls , path ) : try : with open ( path , "rb" ) as f : return cls ( __data__ = json . loads ( f . read ( ) . decode ( "utf-8" ) ) ) except : pass with open ( path , "rb" ) as f : return cls ( __data__ = pickle . load ( f ) )
load DictTree from json files .
6,144
def values ( self ) : for key , value in self . __data__ . items ( ) : if key not in ( META , KEY ) : yield DictTree ( __data__ = value )
Iterate values .
6,145
def keys_at ( self , depth , counter = 1 ) : if depth < 1 : yield ROOT else : if counter == depth : for key in self . keys ( ) : yield key else : counter += 1 for dict_tree in self . values ( ) : for key in dict_tree . keys_at ( depth , counter ) : yield key
Iterate keys at specified depth .
6,146
def values_at ( self , depth ) : if depth < 1 : yield self else : for dict_tree in self . values ( ) : for value in dict_tree . values_at ( depth - 1 ) : yield value
Iterate values at specified depth .
6,147
def items_at ( self , depth ) : if depth < 1 : yield ROOT , self elif depth == 1 : for key , value in self . items ( ) : yield key , value else : for dict_tree in self . values ( ) : for key , value in dict_tree . items_at ( depth - 1 ) : yield key , value
Iterate items at specified depth .
6,148
def stats ( self , result = None , counter = 0 ) : if result is None : result = dict ( ) if counter == 0 : if len ( self ) : result [ 0 ] = { "depth" : 0 , "leaf" : 0 , "root" : 1 } else : result [ 0 ] = { "depth" : 0 , "leaf" : 1 , "root" : 0 } counter += 1 if len ( self ) : result . setdefault ( counter , { "depth" : counter , "leaf" : 0 , "root" : 0 } ) for dict_tree in self . values ( ) : if len ( dict_tree ) : result [ counter ] [ "root" ] += 1 else : result [ counter ] [ "leaf" ] += 1 dict_tree . stats ( result , counter ) return [ collections . OrderedDict ( [ ( "depth" , info [ "depth" ] ) , ( "leaf" , info [ "leaf" ] ) , ( "root" , info [ "root" ] ) , ] ) for info in sorted ( result . values ( ) , key = lambda x : x [ "depth" ] ) ]
Display the node stats info on specific depth in this dict .
6,149
async def put ( self , data , * , pri = None , ttl = None , ttr = None , delay = None ) : opts = { } if pri is not None : opts [ 'pri' ] = pri if ttl is not None : opts [ 'ttl' ] = ttl if ttr is not None : opts [ 'ttr' ] = ttr if delay is not None : opts [ 'delay' ] = delay args = ( data , opts ) res = await self . conn . call ( self . __funcs [ 'put' ] , args ) return self . _create_task ( res . body )
Puts data to the queue and returns a newly created Task
6,150
async def take ( self , timeout = None ) : args = None if timeout is not None : args = ( timeout , ) res = await self . conn . call ( self . __funcs [ 'take' ] , args ) if len ( res . body ) > 0 : return self . _create_task ( res . body ) return None
Takes task from the queue waiting the timeout if specified
6,151
async def peek ( self , task_id ) : args = ( task_id , ) res = await self . conn . call ( self . __funcs [ 'peek' ] , args ) return self . _create_task ( res . body )
Get task without changing its state
6,152
async def kick ( self , count ) : args = ( count , ) res = await self . conn . call ( self . __funcs [ 'kick' ] , args ) if self . conn . version < ( 1 , 7 ) : return res . body [ 0 ] [ 0 ] return res . body [ 0 ]
Kick count tasks from queue
6,153
def _parse_content ( response ) : if response . status_code != 200 : raise ApiError ( f'unknown error: {response.content.decode()}' ) result = json . loads ( response . content ) if not result [ 'ok' ] : raise ApiError ( f'{result["error"]}: {result.get("detail")}' ) return result
parse the response body as JSON raise on errors
6,154
def paginated_retrieval ( methodname , itemtype ) : return compose ( reusable , basic_interaction , map_yield ( partial ( _params_as_get , methodname ) ) , )
decorator factory for retrieval queries from query params
6,155
def json_post ( methodname , rtype , key ) : return compose ( reusable , map_return ( registry ( rtype ) , itemgetter ( key ) ) , basic_interaction , map_yield ( partial ( _json_as_post , methodname ) ) , oneyield , )
decorator factory for json POST queries
6,156
def _read_config ( cfg_file ) : config = ConfigParser ( ) config . optionxform = lambda option : option if not os . path . exists ( cfg_file ) : config . add_section ( _MAIN_SECTION_NAME ) config . add_section ( _ENVIRONMENT_SECTION_NAME ) else : config . read ( cfg_file ) return config
Return a ConfigParser object populated from the settings . cfg file .
6,157
def _write_config ( config , cfg_file ) : directory = os . path . dirname ( cfg_file ) if not os . path . exists ( directory ) : os . makedirs ( directory ) with open ( cfg_file , "w+" ) as output_file : config . write ( output_file )
Write a config object to the settings . cfg file .
6,158
def get_environment ( ) : section = _ENVIRONMENT_SECTION_NAME sys_cfg = _read_config ( _SYSTEM_CONFIG_FILE ) sys_env = dict ( sys_cfg . items ( section ) ) if sys_cfg . has_section ( section ) else { } usr_cfg = _read_config ( _USER_CONFIG_FILE ) usr_env = dict ( usr_cfg . items ( section ) ) if usr_cfg . has_section ( section ) else { } for k in usr_env . keys ( ) : sys_env [ k ] = usr_env [ k ] return sys_env
Return all environment values from the config files . Values stored in the user configuration file will take precedence over values stored in the system configuration file .
6,159
def set_environment ( environment , system = False ) : config_filename = _SYSTEM_CONFIG_FILE if system is True else _USER_CONFIG_FILE config = _read_config ( config_filename ) section = _ENVIRONMENT_SECTION_NAME for key in environment . keys ( ) : config . set ( section , key , environment [ key ] ) _write_config ( config , config_filename )
Set engine environment values in the config file .
6,160
def remove_environment ( environment_var_name , system = False ) : config_filename = _SYSTEM_CONFIG_FILE if system is True else _USER_CONFIG_FILE config = _read_config ( config_filename ) section = _ENVIRONMENT_SECTION_NAME config . remove_option ( section , environment_var_name ) _write_config ( config , config_filename )
Remove the specified environment setting from the appropriate config file .
6,161
def get ( property_name ) : config = _read_config ( _USER_CONFIG_FILE ) section = _MAIN_SECTION_NAME try : property_value = config . get ( section , property_name ) except ( NoOptionError , NoSectionError ) as error : try : config = _read_config ( _SYSTEM_CONFIG_FILE ) property_value = config . get ( section , property_name ) except ( NoOptionError , NoSectionError ) as error : raise NoConfigOptionError ( error ) return property_value
Returns the value of the specified configuration property . Property values stored in the user configuration file take precedence over values stored in the system configuration file .
6,162
def set ( property_name , value , system = False ) : config_filename = _SYSTEM_CONFIG_FILE if system is True else _USER_CONFIG_FILE config = _read_config ( config_filename ) section = _MAIN_SECTION_NAME config . set ( section , property_name , value ) _write_config ( config , config_filename )
Sets the configuration property to the specified value .
6,163
def register ( self , id , name , address , port = None , tags = None , check = None ) : service = { } service [ 'ID' ] = id service [ 'Name' ] = name service [ 'Address' ] = address if port : service [ 'Port' ] = int ( port ) if tags : service [ 'Tags' ] = tags if check : service [ 'Check' ] = check r = requests . put ( self . url_register , json = service ) if r . status_code != 200 : raise consulRegistrationError ( 'PUT returned {}' . format ( r . status_code ) ) return r
Register a new service with the local consul agent
6,164
def deregister ( self , id ) : r = requests . put ( '{}/{}' . format ( self . url_deregister , id ) ) if r . status_code != 200 : raise consulDeregistrationError ( 'PUT returned {}' . format ( r . status_code ) ) return r
Deregister a service with the local consul agent
6,165
def info ( self , name ) : r = requests . get ( '{}/{}' . format ( self . url_service , name ) ) return r . json ( )
Info about a given service
6,166
def star ( self ) -> snug . Query [ bool ] : req = snug . PUT ( BASE + f'/user/starred/{self.owner}/{self.name}' ) return ( yield req ) . status_code == 204
star this repo
6,167
def device_message ( device , code , ts = None , origin = None , type = None , severity = None , title = None , description = None , hint = None , ** metaData ) : if ts is None : ts = local_now ( ) payload = MessagePayload ( device = device ) payload . messages . append ( Message ( code = code , ts = ts , origin = origin , type = type , severity = severity , title = title , description = description , hint = hint , ** metaData ) ) return dumps ( payload )
This quickly builds a time - stamped message . If ts is None the current time is used .
6,168
def _dump ( obj , abspath , serializer_type , dumper_func = None , compress = True , overwrite = False , verbose = False , ** kwargs ) : _check_serializer_type ( serializer_type ) if not inspect . isfunction ( dumper_func ) : raise TypeError ( "dumper_func has to be a function take object as input " "and return binary!" ) prt_console ( "\nDump to '%s' ..." % abspath , verbose ) if os . path . exists ( abspath ) : if not overwrite : prt_console ( " Stop! File exists and overwrite is not allowed" , verbose , ) return st = time . clock ( ) b_or_str = dumper_func ( obj , ** kwargs ) if serializer_type is "str" : b = b_or_str . encode ( "utf-8" ) else : b = b_or_str if compress : b = zlib . compress ( b ) with atomic_write ( abspath , overwrite = overwrite , mode = "wb" ) as f : f . write ( b ) elapsed = time . clock ( ) - st prt_console ( " Complete! Elapse %.6f sec." % elapsed , verbose ) if serializer_type is "str" : return b_or_str else : return b
Dump object to file .
6,169
def _load ( abspath , serializer_type , loader_func = None , decompress = True , verbose = False , ** kwargs ) : _check_serializer_type ( serializer_type ) if not inspect . isfunction ( loader_func ) : raise TypeError ( "loader_func has to be a function take binary as input " "and return an object!" ) prt_console ( "\nLoad from '%s' ..." % abspath , verbose ) if not os . path . exists ( abspath ) : raise ValueError ( "'%s' doesn't exist." % abspath ) st = time . clock ( ) with open ( abspath , "rb" ) as f : b = f . read ( ) if decompress : b = zlib . decompress ( b ) if serializer_type is "str" : obj = loader_func ( b . decode ( "utf-8" ) , ** kwargs ) else : obj = loader_func ( b , ** kwargs ) elapsed = time . clock ( ) - st prt_console ( " Complete! Elapse %.6f sec." % elapsed , verbose ) return obj
load object from file .
6,170
def _get_response ( self , method , endpoint , data = None ) : url = urljoin ( IVONA_REGION_ENDPOINTS [ self . region ] , endpoint ) response = getattr ( self . session , method ) ( url , json = data , ) if 'x-amzn-ErrorType' in response . headers : raise IvonaAPIException ( response . headers [ 'x-amzn-ErrorType' ] ) if response . status_code != requests . codes . ok : raise IvonaAPIException ( "Something wrong happened: {}" . format ( response . json ( ) ) ) return response
Helper method for wrapping API requests mainly for catching errors in one place .
6,171
def get_available_voices ( self , language = None , gender = None ) : endpoint = 'ListVoices' data = dict ( ) if language : data . update ( { 'Voice' : { 'Language' : language } } ) if gender : data . update ( { 'Voice' : { 'Gender' : gender } } ) print ( data ) response = self . _get_response ( 'get' , endpoint , data ) return response . json ( ) [ 'Voices' ]
Returns a list of available voices via ListVoices endpoint
6,172
def text_to_speech ( self , text , file , voice_name = None , language = None ) : endpoint = 'CreateSpeech' data = { 'Input' : { 'Data' : text , } , 'OutputFormat' : { 'Codec' : self . codec . upper ( ) , } , 'Parameters' : { 'Rate' : self . rate , 'Volume' : self . volume , 'SentenceBreak' : self . sentence_break , 'ParagraphBreak' : self . paragraph_break , } , 'Voice' : { 'Name' : voice_name or self . voice_name , 'Language' : language or self . language , } , } response = self . _get_response ( 'post' , endpoint , data ) file . write ( response . content )
Saves given text synthesized audio file via CreateSpeech endpoint
6,173
def create_client_with_auto_poll ( api_key , poll_interval_seconds = 60 , max_init_wait_time_seconds = 5 , on_configuration_changed_callback = None , config_cache_class = None , base_url = None ) : if api_key is None : raise ConfigCatClientException ( 'API Key is required.' ) if poll_interval_seconds < 1 : poll_interval_seconds = 1 if max_init_wait_time_seconds < 0 : max_init_wait_time_seconds = 0 return ConfigCatClient ( api_key , poll_interval_seconds , max_init_wait_time_seconds , on_configuration_changed_callback , 0 , config_cache_class , base_url )
Create an instance of ConfigCatClient and setup Auto Poll mode with custom options
6,174
def create_client_with_lazy_load ( api_key , cache_time_to_live_seconds = 60 , config_cache_class = None , base_url = None ) : if api_key is None : raise ConfigCatClientException ( 'API Key is required.' ) if cache_time_to_live_seconds < 1 : cache_time_to_live_seconds = 1 return ConfigCatClient ( api_key , 0 , 0 , None , cache_time_to_live_seconds , config_cache_class , base_url )
Create an instance of ConfigCatClient and setup Lazy Load mode with custom options
6,175
def create_client_with_manual_poll ( api_key , config_cache_class = None , base_url = None ) : if api_key is None : raise ConfigCatClientException ( 'API Key is required.' ) return ConfigCatClient ( api_key , 0 , 0 , None , 0 , config_cache_class , base_url )
Create an instance of ConfigCatClient and setup Manual Poll mode with custom options
6,176
def basic_query ( returns ) : return compose ( reusable , map_send ( parse_request ) , map_yield ( prepare_params , snug . prefix_adder ( API_PREFIX ) ) , map_return ( loads ( returns ) ) , oneyield , )
decorator factory for NS queries
6,177
def departures ( station : str ) -> snug . Query [ t . List [ Departure ] ] : return snug . GET ( 'avt' , params = { 'station' : station } )
departures for a station
6,178
def journey_options ( origin : str , destination : str , via : t . Optional [ str ] = None , before : t . Optional [ int ] = None , after : t . Optional [ int ] = None , time : t . Optional [ datetime ] = None , hsl : t . Optional [ bool ] = None , year_card : t . Optional [ bool ] = None ) -> ( snug . Query [ t . List [ Journey ] ] ) : return snug . GET ( 'treinplanner' , params = { 'fromStation' : origin , 'toStation' : destination , 'viaStation' : via , 'previousAdvices' : before , 'nextAdvices' : after , 'dateTime' : time , 'hslAllowed' : hsl , 'yearCard' : year_card , } )
journey recommendations from an origin to a destination station
6,179
def rand_str ( length , allowed = CHARSET_ALPHA_DIGITS ) : res = list ( ) for _ in range ( length ) : res . append ( random . choice ( allowed ) ) return "" . join ( res )
Generate fixed - length random string from your allowed character pool .
6,180
def rand_hexstr ( length , lower = True ) : if lower : return rand_str ( length , allowed = CHARSET_HEXSTR_LOWER ) else : return rand_str ( length , allowed = CHARSET_HEXSTR_UPPER )
Gererate fixed - length random hexstring usually for md5 .
6,181
def rand_alphastr ( length , lower = True , upper = True ) : if lower is True and upper is True : return rand_str ( length , allowed = string . ascii_letters ) if lower is True and upper is False : return rand_str ( length , allowed = string . ascii_lowercase ) if lower is False and upper is True : return rand_str ( length , allowed = string . ascii_uppercase ) else : raise Exception
Generate fixed - length random alpha only string .
6,182
def rand_article ( num_p = ( 4 , 10 ) , num_s = ( 2 , 15 ) , num_w = ( 5 , 40 ) ) : article = list ( ) for _ in range ( random . randint ( * num_p ) ) : p = list ( ) for _ in range ( random . randint ( * num_s ) ) : s = list ( ) for _ in range ( random . randint ( * num_w ) ) : s . append ( rand_str ( random . randint ( 1 , 15 ) , string . ascii_lowercase ) ) p . append ( " " . join ( s ) ) article . append ( ". " . join ( p ) ) return "\n\n" . join ( article )
Random article text .
6,183
def _resolve_dep ( self , key ) : if key in self . future_values_key_dep : dep_list = self . future_values_key_dep [ key ] del self . future_values_key_dep [ key ] also_finish = [ ] for dep in dep_list : if self . __resolve_dep_helper ( dep , key ) is True : also_finish . append ( dep ) for dep in also_finish : self . _resolve_dep ( dep )
this method resolves dependencies for the given key . call the method afther the item key was added to the list of avalable items
6,184
def _get_all_refs ( self , dep , handled_refs = None ) : if handled_refs is None : handled_refs = [ dep ] else : if dep in handled_refs : return [ ] res = [ ] if dep in self . future_values_key_item : res . extend ( self . future_values_key_item [ dep ] [ "dependencies" ] . values ( ) ) add = [ ] for h_d in res : add . extend ( self . _get_all_refs ( h_d , handled_refs ) ) res . extend ( add ) return list ( set ( res ) )
get al list of all dependencies for the given item dep
6,185
def parse ( response ) : if response . status_code == 400 : try : msg = json . loads ( response . content ) [ 'message' ] except ( KeyError , ValueError ) : msg = '' raise ApiError ( msg ) return response
check for errors
6,186
def create ( cls , host = 'localhost' , port = 14999 , auto_reconnect = True , loop = None , protocol_class = AVR , update_callback = None ) : assert port >= 0 , 'Invalid port value: %r' % ( port ) conn = cls ( ) conn . host = host conn . port = port conn . _loop = loop or asyncio . get_event_loop ( ) conn . _retry_interval = 1 conn . _closed = False conn . _closing = False conn . _halted = False conn . _auto_reconnect = auto_reconnect def connection_lost ( ) : if conn . _auto_reconnect and not conn . _closing : ensure_future ( conn . _reconnect ( ) , loop = conn . _loop ) conn . protocol = protocol_class ( connection_lost_callback = connection_lost , loop = conn . _loop , update_callback = update_callback ) yield from conn . _reconnect ( ) return conn
Initiate a connection to a specific device .
6,187
def close ( self ) : self . log . warning ( 'Closing connection to AVR' ) self . _closing = True if self . protocol . transport : self . protocol . transport . close ( )
Close the AVR device connection and don t try to reconnect .
6,188
def _compress_obj ( obj , level ) : return zlib . compress ( pickle . dumps ( obj , protocol = 2 ) , level )
Compress object to bytes .
6,189
def compress ( obj , level = 6 , return_type = "bytes" ) : if isinstance ( obj , binary_type ) : b = _compress_bytes ( obj , level ) elif isinstance ( obj , string_types ) : b = _compress_str ( obj , level ) else : b = _compress_obj ( obj , level ) if return_type == "bytes" : return b elif return_type == "str" : return base64 . b64encode ( b ) . decode ( "utf-8" ) else : raise ValueError ( "'return_type' has to be one of 'bytes', 'str'!" )
Compress anything to bytes or string .
6,190
def decompress ( obj , return_type = "bytes" ) : if isinstance ( obj , binary_type ) : b = zlib . decompress ( obj ) elif isinstance ( obj , string_types ) : b = zlib . decompress ( base64 . b64decode ( obj . encode ( "utf-8" ) ) ) else : raise TypeError ( "input cannot be anything other than str and bytes!" ) if return_type == "bytes" : return b elif return_type == "str" : return b . decode ( "utf-8" ) elif return_type == "obj" : return pickle . loads ( b ) else : raise ValueError ( "'return_type' has to be one of 'bytes', 'str' or 'obj'!" )
De - compress it to it s original .
6,191
def build_signature_template ( key_id , algorithm , headers ) : param_map = { 'keyId' : key_id , 'algorithm' : algorithm , 'signature' : '%s' } if headers : headers = [ h . lower ( ) for h in headers ] param_map [ 'headers' ] = ' ' . join ( headers ) kv = map ( '{0[0]}="{0[1]}"' . format , param_map . items ( ) ) kv_string = ',' . join ( kv ) sig_string = 'Signature {0}' . format ( kv_string ) return sig_string
Build the Signature template for use with the Authorization header .
6,192
def train ( self , data , key_id , key_lat , key_lng , clear_old = True ) : engine , t_point = self . engine , self . t_point if clear_old : try : t_point . drop ( engine ) except : pass t_point . create ( engine ) table_data = list ( ) for record in data : id = key_id ( record ) lat = key_lat ( record ) lng = key_lng ( record ) row = { "id" : id , "lat" : lat , "lng" : lng , "data" : record } table_data . append ( row ) ins = t_point . insert ( ) engine . execute ( ins , table_data ) index = Index ( 'idx_lat_lng' , t_point . c . lat , t_point . c . lng ) index . create ( engine )
Feed data into database .
6,193
def find_n_nearest ( self , lat , lng , n = 5 , radius = None ) : engine , t_point = self . engine , self . t_point if radius : dist_btwn_lat_deg = 69.172 dist_btwn_lon_deg = cos ( lat ) * 69.172 lat_degr_rad = abs ( radius * 1.05 / dist_btwn_lat_deg ) lon_degr_rad = abs ( radius * 1.05 / dist_btwn_lon_deg ) lat_lower = lat - lat_degr_rad lat_upper = lat + lat_degr_rad lng_lower = lng - lon_degr_rad lng_upper = lng + lon_degr_rad filters = [ t_point . c . lat >= lat_lower , t_point . c . lat <= lat_upper , t_point . c . lat >= lng_lower , t_point . c . lat >= lng_upper , ] else : radius = 999999.9 filters = [ ] s = select ( [ t_point ] ) . where ( and_ ( * filters ) ) heap = list ( ) for row in engine . execute ( s ) : dist = great_circle ( ( lat , lng ) , ( row . lat , row . lng ) ) if dist <= radius : heap . append ( ( dist , row . data ) ) n_nearest = heapq . nsmallest ( n , heap , key = lambda x : x [ 0 ] ) return n_nearest
Find n nearest point within certain distance from a point .
6,194
def sample ( self , k ) : def new_get_iterators ( ) : tweet_parser = smappdragon . TweetParser ( ) it = iter ( self . get_collection_iterators ( ) ) sample = list ( itertools . islice ( it , k ) ) random . shuffle ( sample ) for i , item in enumerate ( it , start = k + 1 ) : j = random . randrange ( i ) if j < k : sample [ j ] = item for tweet in sample : if all ( [ collection . limit != 0 and collection . limit <= count for collection in self . collections ] ) : return elif all ( [ tweet_parser . tweet_passes_filter ( collection . filter , tweet ) and tweet_parser . tweet_passes_custom_filter_list ( collection . custom_filters , tweet ) for collection in self . collections ] ) : yield tweet cp = copy . deepcopy ( self ) cp . get_collection_iterators = new_get_iterators return cp
this method is especially troublesome i do not reccommend making any changes to it you may notice it uplicates code fro smappdragon there is no way around this as far as i can tell it really might screw up a lot of stuff stip tweets has been purposely omitted as it isnt supported in pysmap
6,195
def merge_networks ( output_file = "merged_network.txt" , * files ) : contacts = dict ( ) for network_file in files : with open ( network_file ) as network_file_handle : for line in network_file_handle : id_a , id_b , n_contacts = line . split ( "\t" ) pair = sorted ( ( id_a , id_b ) ) try : contacts [ pair ] += n_contacts except KeyError : contacts [ pair ] = n_contacts sorted_contacts = sorted ( contacts ) with open ( output_file , "w" ) as output_handle : for index_pair in sorted_contacts : id_a , id_b = index_pair n_contacts = contacts [ index_pair ] output_handle . write ( "{}\t{}\t{}\n" . format ( id_a , id_b , n_contacts ) )
Merge networks into a larger network .
6,196
def merge_chunk_data ( output_file = "merged_idx_contig_hit_size_cov.txt" , * files ) : chunks = dict ( ) for chunk_file in files : with open ( chunk_file ) as chunk_file_handle : for line in chunk_file_handle : chunk_id , chunk_name , hit , size , cov = line . split ( "\t" ) try : chunks [ chunk_id ] [ "hit" ] += hit chunks [ chunk_id ] [ "cov" ] += cov except KeyError : chunks [ chunk_id ] = { "name" : chunk_name , "hit" : hit , "size" : size , "cov" : cov , } sorted_chunks = sorted ( chunks ) with open ( output_file , "w" ) as output_handle : for chunk_id in sorted_chunks : my_chunk = chunks [ chunk_id ] name , hit , size , cov = ( my_chunk [ "name" ] , my_chunk [ "hit" ] , my_chunk [ "size" ] , my_chunk [ "cov" ] , ) my_line = "{}\t{}\t{}\t{}\t{}" . format ( chunk_id , name , hit , size , cov ) output_handle . write ( my_line )
Merge chunk data from different networks
6,197
def alignment_to_reads ( sam_merged , output_dir , parameters = DEFAULT_PARAMETERS , save_memory = True , * bin_fasta ) : def get_file_string ( file_thing ) : try : file_string = file_thing . name except AttributeError : file_string = str ( file_thing ) return file_string bin_chunks = set ( ) for bin_file in bin_fasta : for record in SeqIO . parse ( bin_file , "fasta" ) : bin_chunks . add ( ( get_file_string ( bin_file ) , record . id ) ) chunk_size = int ( parameters [ "chunk_size" ] ) mapq_threshold = int ( parameters [ "mapq_threshold" ] ) def read_name ( read ) : return read . query_name . split ( ) [ 0 ] def get_base_name ( bin_file ) : base_name = "." . join ( os . path . basename ( bin_file ) . split ( "." ) [ : - 1 ] ) output_path = os . path . join ( output_dir , "{}.readnames" . format ( base_name ) ) return output_path if save_memory : opened_files = dict ( ) else : read_names = dict ( ) with pysam . AlignmentFile ( sam_merged , "rb" ) as alignment_merged_handle : for ( my_read_name , alignment_pool ) in itertools . groupby ( alignment_merged_handle , read_name ) : for my_alignment in alignment_pool : relative_position = my_alignment . reference_start contig_name = my_alignment . reference_name chunk_position = relative_position // chunk_size chunk_name = "{}_{}" . format ( contig_name , chunk_position ) quality_test = my_alignment . mapping_quality > mapq_threshold for bin_file in bin_fasta : chunk_tuple = ( bin_file , chunk_name ) if chunk_tuple in bin_chunks and quality_test : if save_memory : output_path = get_base_name ( bin_file ) try : output_handle = opened_files [ bin_file ] except KeyError : output_handle = open ( output_path , "w" ) opened_files [ bin_file ] = output_handle output_handle . write ( "@{}\n" . format ( my_read_name ) ) else : try : read_names [ my_read_name ] . append ( bin_file ) except KeyError : read_names [ my_read_name ] = [ bin_file ] for file_handle in opened_files . values ( ) : file_handle . close ( ) if save_memory : return opened_files . keys ( ) else : return read_names
Generate reads from ambiguous alignment file
6,198
def ResetHandler ( self , name ) : if name in self . tags : if len ( self . tags ) > 1 : key = len ( self . tags ) - 2 self . handler = None while key >= 0 : if self . tags [ key ] in self . structure : self . handler = self . structure [ self . tags [ key ] ] break key -= 1 else : self . handler = None
Method which assigns handler to the tag encountered before the current or else sets it to None
6,199
def get_cluster ( self , label ) : for cluster in self . _clusters : if label == cluster [ 'label' ] : return self . _get_connection ( cluster ) raise AttributeError ( 'No such cluster %s.' % label )
Returns a connection to a mongo - clusters .