idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
51,900
def get_widget ( self , request ) : return self . _update_widget_choices ( self . field . formfield ( widget = RestrictedSelectWidget ) . widget )
Field widget is replaced with RestrictedSelectWidget because we not want to use modified widgets for filtering .
51,901
def get_rev_options ( url , rev ) : if rev : rev_options = [ '-r' , rev ] else : rev_options = [ ] r = urlparse . urlsplit ( url ) if hasattr ( r , 'username' ) : username , password = r . username , r . password else : netloc = r [ 1 ] if '@' in netloc : auth = netloc . split ( '@' ) [ 0 ] if ':' in auth : username , password = auth . split ( ':' , 1 ) else : username , password = auth , None else : username , password = None , None if username : rev_options += [ '--username' , username ] if password : rev_options += [ '--password' , password ] return rev_options
Return revision options .
51,902
def get_revision_file ( self , location ) : current_rev = self . run ( [ 'info' , location ] ) _INI_RE = re . compile ( r"^([^:]+):\s+(\S.*)$" , re . M ) info_list = _INI_RE . findall ( current_rev ) return int ( dict ( info_list ) [ 'Revision' ] )
Return revision for a file .
51,903
def get_publish_path ( self , obj ) : return os . path . join ( obj . chat_type . publish_path , obj . publish_path . lstrip ( "/" ) )
publish_path joins the publish_paths for the chat type and the channel .
51,904
def _init_module_cache ( ) : if len ( FieldTranslation . _modules ) < len ( FieldTranslation . _model_module_paths ) : for module_path in FieldTranslation . _model_module_paths : FieldTranslation . _modules [ module_path ] = importlib . import_module ( module_path ) return True return False
Module caching it helps with not having to import again and again same modules .
51,905
def _load_source_object ( self ) : if hasattr ( self , "source_obj" ) : self . source_text = getattr ( self . source_obj , self . field ) return self . source_obj self . _load_source_model ( ) self . source_obj = self . source_model . objects . get ( id = self . object_id ) return self . source_obj
Loads related object in a dynamic attribute and returns it .
51,906
def delete_orphan_translations ( condition = None ) : if condition is None : condition = { } translations = FieldTranslation . objects . all ( ) for translation in translations : translation . _load_source_model ( ) condition [ "id" ] = translation . object_id if not translation . source_model . objects . filter ( ** condition ) . exists ( ) : translation . delete ( )
Delete orphan translations . This method needs refactoring to be improve its performance .
51,907
def update_translations ( condition = None ) : if condition is None : condition = { } num_translations = 0 FieldTranslation . _init_module_cache ( ) LANGUAGES = dict ( lang for lang in MODELTRANSLATION_LANG_CHOICES ) if settings . LANGUAGE_CODE in LANGUAGES : del LANGUAGES [ settings . LANGUAGE_CODE ] for key in FieldTranslation . _modules . keys ( ) : module = FieldTranslation . _modules [ key ] clsmembers = inspect . getmembers ( sys . modules [ key ] , inspect . isclass ) for cls in clsmembers : cls = cls [ 1 ] if hasattr ( cls , "_meta" ) and not cls . _meta . abstract and hasattr ( cls . _meta , "translatable_fields" ) and len ( cls . _meta . translatable_fields ) > 0 : objects = cls . objects . filter ( ** condition ) for obj in objects : for lang in LANGUAGES . keys ( ) : for field in cls . _meta . translatable_fields : if FieldTranslation . update ( obj = obj , field = field , lang = lang , context = "" ) : num_translations += 1 return num_translations
Updates FieldTranslations table
51,908
def factory ( obj , field , source_text , lang , context = "" ) : obj_classname = obj . __class__ . __name__ obj_module = obj . __module__ source_md5 = checksum ( source_text ) translation = "" field_lang = trans_attr ( field , lang ) if hasattr ( obj , field_lang ) and getattr ( obj , field_lang ) != "" : translation = getattr ( obj , field_lang ) is_fuzzy = True is_fuzzy_lang = trans_is_fuzzy_attr ( field , lang ) if hasattr ( obj , is_fuzzy_lang ) : is_fuzzy = getattr ( obj , is_fuzzy_lang ) trans = FieldTranslation ( module = obj_module , model = obj_classname , object_id = obj . id , field = field , lang = lang , source_text = source_text , source_md5 = source_md5 , translation = translation , is_fuzzy = is_fuzzy , context = context ) return trans
Static method that constructs a translation based on its contents .
51,909
def save ( self , * args , ** kwargs ) : now_datetime = timezone . now ( ) if not self . id : self . creation_datetime = now_datetime self . last_update_datetime = now_datetime self . creator_user = None current_user = CuserMiddleware . get_user ( ) if not current_user is None and not current_user . is_anonymous ( ) : self . creator_user_id = current_user . id super ( FieldTranslation , self ) . save ( * args , ** kwargs )
Save object in database updating the datetimes accordingly .
51,910
def parse_info ( raw_info , apply_tag = None ) : parse_releases ( raw_info ) parse_packages ( raw_info , apply_tag = apply_tag ) return raw_info
Parse raw rdoinfo metadata inplace .
51,911
def alphanumeric ( text ) : return "" . join ( [ c for c in text if re . match ( r'\w' , c ) ] )
Make an ultra - safe ASCII version a string . For instance for use as a filename . \ w matches any alphanumeric character and the underscore .
51,912
def all_combinations ( items ) : return ( set ( compress ( items , mask ) ) for mask in product ( * [ [ 0 , 1 ] ] * len ( items ) ) )
Generate all combinations of a given list of items .
51,913
def pad_equal_whitespace ( string , pad = None ) : if pad is None : pad = max ( map ( len , string . split ( '\n' ) ) ) + 1 return '\n' . join ( ( '{0: <%i}' % pad ) . format ( line ) for line in string . split ( '\n' ) )
Given a multiline string add whitespaces to every line so that every line has the same length .
51,914
def concatenate_by_line ( first , second ) : return '\n' . join ( x + y for x , y in zip ( first . split ( '\n' ) , second . split ( '\n' ) ) )
Zip two strings together line wise
51,915
def sort_string_by_pairs ( strings ) : assert len ( strings ) % 2 == 0 pairs = [ ] strings = list ( strings ) while strings : template = strings . pop ( ) for i , candidate in enumerate ( strings ) : if count_string_diff ( template , candidate ) == 1 : pair = [ template , strings . pop ( i ) ] pair . sort ( ) pairs . append ( pair ) break return pairs
Group a list of strings by pairs by matching those with only one character difference between each other together .
51,916
def count_string_diff ( a , b ) : shortest = min ( len ( a ) , len ( b ) ) return sum ( a [ i ] != b [ i ] for i in range ( shortest ) )
Return the number of characters in two strings that don t exactly match
51,917
def iflatten ( L ) : for sublist in L : if hasattr ( sublist , '__iter__' ) : for item in iflatten ( sublist ) : yield item else : yield sublist
Iterative flatten .
51,918
def uniquify_list ( L ) : return [ e for i , e in enumerate ( L ) if L . index ( e ) == i ]
Same order unique list using only a list compression .
51,919
def average ( iterator ) : count = 0 total = 0 for num in iterator : count += 1 total += num return float ( total ) / count
Iterative mean .
51,920
def get_next_item ( iterable ) : try : x = iterable . next ( ) except StopIteration : x = None except AttributeError : x = None return x
Gets the next item of an iterable . If the iterable is exhausted returns None .
51,921
def andify ( list_of_strings ) : result = ', ' . join ( list_of_strings ) comma_index = result . rfind ( ',' ) if comma_index > - 1 : result = result [ : comma_index ] + ' and' + result [ comma_index + 1 : ] return result
Given a list of strings will join them with commas and a final and word .
51,922
def num_to_ith ( num ) : value = str ( num ) before_last_digit = value [ - 2 ] last_digit = value [ - 1 ] if len ( value ) > 1 and before_last_digit == '1' : return value + 'th' if last_digit == '1' : return value + 'st' if last_digit == '2' : return value + 'nd' if last_digit == '3' : return value + 'rd' return value + 'th'
1 becomes 1st 2 becomes 2nd etc .
51,923
def isubsample ( full_sample , k , full_sample_len = None ) : if not full_sample_len : full_sample_len = len ( full_sample ) if not 0 <= k <= full_sample_len : raise ValueError ( 'Required that 0 <= k <= full_sample_length' ) picked = 0 for i , element in enumerate ( full_sample ) : prob = ( k - picked ) / ( full_sample_len - i ) if random . random ( ) < prob : yield element picked += 1 assert picked == k
Down - sample an enumerable list of things
51,924
def moving_average ( interval , windowsize , borders = None ) : half = int ( math . floor ( windowsize / 2.0 ) ) window = numpy . ones ( int ( windowsize ) ) / float ( windowsize ) if borders == None : return numpy . convolve ( interval , window , 'valid' ) if borders == 'zero_padding' : return numpy . convolve ( interval , window , 'full' ) if borders == 'zero_padding_and_cut' : return numpy . convolve ( interval , window , 'same' ) if borders == 'copy_padding' : new_interval = [ interval [ 0 ] ] * ( windowsize - 1 ) + interval + [ interval [ - 1 ] ] * ( windowsize - 1 ) return numpy . convolve ( new_interval , window , 'valid' ) if borders == 'copy_padding_and_cut' : new_interval = [ interval [ 0 ] ] * ( windowsize - 1 ) + interval + [ interval [ - 1 ] ] * ( windowsize - 1 ) return numpy . convolve ( new_interval , window , 'valid' ) [ half : - half ] if borders == 'zero_stretching' : result = numpy . convolve ( interval , window , 'valid' ) pad = numpy . zeros ( half ) return numpy . concatenate ( ( pad , result , pad ) ) if borders == 'copy_stretching' : result = numpy . convolve ( interval , window , 'valid' ) left = numpy . ones ( half ) * result [ 0 ] right = numpy . ones ( half ) * result [ - 1 ] return numpy . concatenate ( ( left , result , right ) )
This is essentially a convolving operation . Several option exist for dealing with the border cases .
51,925
def wait ( predicate , interval = 1 , message = lambda : "Waiting..." ) : ball , next_ball = u"|/-\\" , "|" sys . stdout . write ( " \033[K" ) sys . stdout . flush ( ) while not predicate ( ) : time . sleep ( 1 ) next_ball = ball [ ( ball . index ( next_ball ) + 1 ) % len ( ball ) ] sys . stdout . write ( "\r " + str ( message ( ) ) + " " + next_ball + " \033[K" ) sys . stdout . flush ( ) print ( "\r Done. \033[K" ) sys . stdout . flush ( )
Wait until the predicate turns true and display a turning ball .
51,926
def natural_sort ( item ) : dre = re . compile ( r'(\d+)' ) return [ int ( s ) if s . isdigit ( ) else s . lower ( ) for s in re . split ( dre , item ) ]
Sort strings that contain numbers correctly . Works in Python 2 and 3 .
51,927
def split_thousands ( s ) : if s is None : return "0" if isinstance ( s , basestring ) : s = float ( s ) if isinstance ( s , float ) and s . is_integer ( ) : s = int ( s ) result = "{:,}" . format ( s ) result = result . replace ( ',' , "'" ) return result
Splits a number on thousands .
51,928
def reverse_compl_with_name ( old_seq ) : new_seq = old_seq . reverse_complement ( ) new_seq . id = old_seq . id new_seq . description = old_seq . description return new_seq
Reverse a SeqIO sequence but keep its name intact .
51,929
def load_json_path ( path ) : with open ( path ) as handle : try : return json . load ( handle , object_pairs_hook = collections . OrderedDict ) except ValueError as error : message = "Could not decode JSON file '%s'." % path message = "-" * 20 + "\n" + message + "\n" + str ( error ) + "\n" + "-" * 20 + "\n" sys . stderr . write ( message ) raise error
Load a file with the json module but report errors better if it fails . And have it ordered too !
51,930
def md5sum ( file_path , blocksize = 65536 ) : md5 = hashlib . md5 ( ) with open ( file_path , "rb" ) as f : for block in iter ( lambda : f . read ( blocksize ) , "" ) : md5 . update ( block ) return md5 . hexdigest ( )
Compute the md5 of a file . Pretty fast .
51,931
def reversed_lines ( path ) : with open ( path , 'r' ) as handle : part = '' for block in reversed_blocks ( handle ) : for c in reversed ( block ) : if c == '\n' and part : yield part [ : : - 1 ] part = '' part += c if part : yield part [ : : - 1 ]
Generate the lines of file in reverse order .
51,932
def reversed_blocks ( handle , blocksize = 4096 ) : handle . seek ( 0 , os . SEEK_END ) here = handle . tell ( ) while 0 < here : delta = min ( blocksize , here ) here -= delta handle . seek ( here , os . SEEK_SET ) yield handle . read ( delta )
Generate blocks of file s contents in reverse order .
51,933
def supports_gzip ( self , context ) : if 'request' in context and client . supports_gzip ( ) : enc = context [ 'request' ] . META . get ( 'HTTP_ACCEPT_ENCODING' , '' ) return 'gzip' in enc and msettings [ 'SERVE_REMOTE' ] return False
Looks at the RequestContext object and determines if the client supports gzip encoded content . If the client does we will send them to the gzipped version of files that are allowed to be compressed . Clients without gzip support will be served the original media .
51,934
def wrap_application ( application , before_run , on_start , shutdown ) : before_run = [ ] if before_run is None else before_run on_start = [ ] if on_start is None else on_start shutdown = [ ] if shutdown is None else shutdown if not isinstance ( application , Application ) : application = _ApplicationAdapter ( application ) application . before_run_callbacks . extend ( before_run ) application . on_start_callbacks . extend ( on_start ) application . on_shutdown_callbacks . extend ( shutdown ) return application
Wrap a tornado application in a callback - aware wrapper .
51,935
def start ( self , io_loop ) : for callback in self . before_run_callbacks : try : callback ( self . tornado_application , io_loop ) except Exception : self . logger . error ( 'before_run callback %r cancelled start' , callback , exc_info = 1 ) self . stop ( io_loop ) raise for callback in self . on_start_callbacks : io_loop . spawn_callback ( callback , self . tornado_application , io_loop )
Run the before_run callbacks and queue to on_start callbacks .
51,936
def stop ( self , io_loop ) : running_async = False shutdown = _ShutdownHandler ( io_loop ) for callback in self . on_shutdown_callbacks : try : maybe_future = callback ( self . tornado_application ) if asyncio . iscoroutine ( maybe_future ) : maybe_future = asyncio . create_task ( maybe_future ) if concurrent . is_future ( maybe_future ) : shutdown . add_future ( maybe_future ) running_async = True except Exception as error : self . logger . warning ( 'exception raised from shutdown ' 'callback %r, ignored: %s' , callback , error , exc_info = 1 ) if not running_async : shutdown . on_shutdown_ready ( )
Asynchronously stop the application .
51,937
def sync ( client = None , force = False , verbose = True ) : from mediasync import backends from mediasync . conf import msettings from mediasync . signals import pre_sync , post_sync if client is None : client = backends . client ( ) client . open ( ) client . serve_remote = True pre_sync . send ( sender = client ) for joinfile , sourcefiles in msettings [ 'JOINED' ] . iteritems ( ) : filedata = combine_files ( joinfile , sourcefiles , client ) if filedata is None : continue filedata , dirname = filedata content_type = mimetypes . guess_type ( joinfile ) [ 0 ] or msettings [ 'DEFAULT_MIMETYPE' ] remote_path = joinfile if dirname : remote_path = "%s/%s" % ( dirname , remote_path ) if client . process_and_put ( filedata , content_type , remote_path , force = force ) : if verbose : print "[%s] %s" % ( content_type , remote_path ) for dirname in os . listdir ( client . media_root ) : dirpath = os . path . abspath ( os . path . join ( client . media_root , dirname ) ) if os . path . isdir ( dirpath ) : for filename in listdir_recursive ( dirpath ) : filepath = os . path . join ( dirpath , filename ) remote_path = "%s/%s" % ( dirname , filename ) content_type = mimetypes . guess_type ( filepath ) [ 0 ] or msettings [ 'DEFAULT_MIMETYPE' ] if not is_syncable_file ( os . path . basename ( filename ) ) or not os . path . isfile ( filepath ) : continue filedata = open ( filepath , 'rb' ) . read ( ) if client . process_and_put ( filedata , content_type , remote_path , force = force ) : if verbose : print "[%s] %s" % ( content_type , remote_path ) post_sync . send ( sender = client ) client . close ( )
Let s face it ... pushing this stuff to S3 is messy . A lot of different things need to be calculated for each file and they have to be in a certain order as some variables rely on others .
51,938
def enable_all_cpu ( self ) : for cpu in self . __get_ranges ( "offline" ) : fpath = path . join ( "cpu%i" % cpu , "online" ) self . __write_cpu_file ( fpath , b"1" )
Enable all offline cpus
51,939
def reset ( self , rg = None ) : if type ( rg ) == int : rg = [ rg ] to_reset = rg if rg else self . __get_ranges ( "present" ) self . enable_cpu ( to_reset ) for cpu in to_reset : fpath = path . join ( "cpu%i" % cpu , "cpufreq" , "cpuinfo_max_freq" ) max_freq = self . __read_cpu_file ( fpath ) fpath = path . join ( "cpu%i" % cpu , "cpufreq" , "cpuinfo_min_freq" ) min_freq = self . __read_cpu_file ( fpath ) fpath = path . join ( "cpu%i" % cpu , "cpufreq" , "scaling_max_freq" ) self . __write_cpu_file ( fpath , max_freq . encode ( ) ) fpath = path . join ( "cpu%i" % cpu , "cpufreq" , "scaling_min_freq" ) self . __write_cpu_file ( fpath , min_freq . encode ( ) )
Enable all offline cpus and reset max and min frequencies files
51,940
def disable_hyperthread ( self ) : to_disable = [ ] online_cpus = self . __get_ranges ( "online" ) for cpu in online_cpus : fpath = path . join ( "cpu%i" % cpu , "topology" , "thread_siblings_list" ) to_disable += self . __get_ranges ( fpath ) [ 1 : ] to_disable = set ( to_disable ) & set ( online_cpus ) for cpu in to_disable : fpath = path . join ( "cpu%i" % cpu , "online" ) self . __write_cpu_file ( fpath , b"0" )
Disable all threads attached to the same core
51,941
def set_frequencies ( self , freq , rg = None , setMaxfeq = True , setMinfreq = True , setSpeed = True ) : to_change = self . __get_ranges ( "online" ) if type ( rg ) == int : rg = [ rg ] if rg : to_change = set ( rg ) & set ( self . __get_ranges ( "online" ) ) for cpu in to_change : if setSpeed : fpath = path . join ( "cpu%i" % cpu , "cpufreq" , "scaling_setspeed" ) self . __write_cpu_file ( fpath , str ( freq ) . encode ( ) ) if setMinfreq : fpath = path . join ( "cpu%i" % cpu , "cpufreq" , "scaling_min_freq" ) self . __write_cpu_file ( fpath , str ( freq ) . encode ( ) ) if setMaxfeq : fpath = path . join ( "cpu%i" % cpu , "cpufreq" , "scaling_max_freq" ) self . __write_cpu_file ( fpath , str ( freq ) . encode ( ) )
Set cores frequencies
51,942
def get_available_frequencies ( self ) : fpath = path . join ( "cpu0" , "cpufreq" , "scaling_available_frequencies" ) data = self . __read_cpu_file ( fpath ) . rstrip ( "\n" ) . split ( ) return data
Get all possible frequencies
51,943
def configure ( self , endpoint = None , ** kwargs ) : if endpoint : kwargs [ 'endpoint' ] = endpoint keywords = self . _keywords . copy ( ) keywords . update ( kwargs ) if 'endpoint' in kwargs : endpoint = kwargs [ 'endpoint' ] keywords [ 'endpoint' ] = self . _configure_endpoint ( endpoint ) self . api_key = keywords [ 'api_key' ] or self . _global_api_key ( ) self . endpoint = keywords [ 'endpoint' ] self . format = keywords [ 'format' ] or 'json' self . jurisdiction = keywords [ 'jurisdiction' ] self . proxy = keywords [ 'proxy' ] self . discovery_url = keywords [ 'discovery' ] or None self . session = requests . Session ( ) if 'ssl_version' in keywords : self . session . mount ( 'https://' , SSLAdapter ( keywords [ 'ssl_version' ] ) )
Configure a previously initialized instance of the class .
51,944
def _configure_endpoint ( self , endpoint ) : if not endpoint . startswith ( 'http' ) : endpoint = 'https://' + endpoint if not endpoint . endswith ( '/' ) : endpoint += '/' return endpoint
Configure the endpoint with a schema and end slash .
51,945
def get ( self , * args , ** kwargs ) : if 'convert' in kwargs : conversion = kwargs . pop ( 'convert' ) else : conversion = True kwargs = self . _get_keywords ( ** kwargs ) url = self . _create_path ( * args ) request = self . session . get ( url , params = kwargs ) content = request . content self . _request = request return self . convert ( content , conversion )
Perform a get request .
51,946
def _get_keywords ( self , ** kwargs ) : if self . jurisdiction and 'jurisdiction_id' not in kwargs : kwargs [ 'jurisdiction_id' ] = self . jurisdiction if 'count' in kwargs : kwargs [ 'page_size' ] = kwargs . pop ( 'count' ) if 'start' in kwargs : start = kwargs . pop ( 'start' ) if 'end' in kwargs : end = kwargs . pop ( 'end' ) else : end = date . today ( ) . strftime ( '%m-%d-%Y' ) start , end = self . _format_dates ( start , end ) kwargs [ 'start_date' ] = start kwargs [ 'end_date' ] = end elif 'between' in kwargs : start , end = kwargs . pop ( 'between' ) start , end = self . _format_dates ( start , end ) kwargs [ 'start_date' ] = start kwargs [ 'end_date' ] = end return kwargs
Format GET request parameters and keywords .
51,947
def _format_dates ( self , start , end ) : start = self . _split_date ( start ) end = self . _split_date ( end ) return start , end
Format start and end dates .
51,948
def _split_date ( self , time ) : if isinstance ( time , str ) : month , day , year = [ int ( t ) for t in re . split ( r'-|/' , time ) ] if year < 100 : year += 2000 time = date ( year , month , day ) return time . strftime ( '%Y-%m-%dT%H:%M:%SZ' )
Split apart a date string .
51,949
def convert ( self , content , conversion ) : if not conversion : data = content elif self . format == 'json' : data = json . loads ( content ) elif self . format == 'xml' : content = xml ( content ) first = list ( content . keys ( ) ) [ 0 ] data = content [ first ] else : data = content return data
Convert content to Python data structures .
51,950
def discovery ( self , url = None ) : if url : data = self . session . get ( url ) . content elif self . discovery_url : response = self . session . get ( self . discovery_url ) if self . format == 'xml' : data = xml ( response . text ) else : data = response . json ( ) else : data = self . get ( 'discovery' ) return data
Retrieve the standard discovery file that provides routing information .
51,951
def services ( self , code = None , ** kwargs ) : data = self . get ( 'services' , code , ** kwargs ) return data
Retrieve information about available services . You can also enter a specific service code argument .
51,952
def requests ( self , code = None , ** kwargs ) : if code : kwargs [ 'service_code' ] = code data = self . get ( 'requests' , ** kwargs ) return data
Retrieve open requests . You can also enter a specific service code argument .
51,953
def request ( self , id , ** kwargs ) : data = self . get ( 'requests' , id , ** kwargs ) return data
Retrieve a specific request using its service code ID .
51,954
def post ( self , service_code = '0' , ** kwargs ) : kwargs [ 'service_code' ] = service_code kwargs = self . _post_keywords ( ** kwargs ) media = kwargs . pop ( 'media' , None ) if media : files = { 'media' : media } else : files = None url = self . _create_path ( 'requests' ) self . post_response = self . session . post ( url , data = kwargs , files = files ) content = self . post_response . content if self . post_response . status_code >= 500 : conversion = False else : conversion = True return self . convert ( content , conversion )
Post a new Open311 request .
51,955
def _post_keywords ( self , ** kwargs ) : if self . jurisdiction and 'jurisdiction_id' not in kwargs : kwargs [ 'jurisdiction_id' ] = self . jurisdiction if 'address' in kwargs : address = kwargs . pop ( 'address' ) kwargs [ 'address_string' ] = address if 'name' in kwargs : first , last = kwargs . pop ( 'name' ) . split ( ' ' ) kwargs [ 'first_name' ] = first kwargs [ 'last_name' ] = last if 'api_key' not in kwargs : kwargs [ 'api_key' ] = self . api_key return kwargs
Configure keyword arguments for Open311 POST requests .
51,956
def token ( self , id , ** kwargs ) : data = self . get ( 'tokens' , id , ** kwargs ) return data
Retrieve a service request ID from a token .
51,957
def build_attrs ( self , base_attrs , extra_attrs = None , ** kwargs ) : attrs = dict ( base_attrs , ** kwargs ) if extra_attrs : attrs . update ( extra_attrs ) return attrs
Helper function for building an attribute dictionary . This is combination of the same method from Django< = 1 . 10 and Django1 . 11 +
51,958
def to_dataframe ( self , ** kwargs ) : return pandas . io . parsers . read_csv ( self . path , sep = self . d , ** kwargs )
Load up the CSV file as a pandas dataframe
51,959
def get_elementary_intervals ( self , features ) : coords = [ ] try : for interval in features : if len ( interval ) != 3 : raise SyntaxError ( 'Interval malformed %s. Allways specify start and end position for interval.' % str ( interval ) ) coords . extend ( [ interval [ 0 ] , interval [ 1 ] ] ) except IndexError : raise SyntaxError ( 'Interval malformed %s. Allways specify start and end position for interval.' % str ( interval ) ) coords = list ( set ( coords ) ) coords . sort ( ) return coords
Generates a sorted list of elementary intervals
51,960
def pt_within ( self , pt , subject ) : try : if pt >= int ( subject [ 0 ] ) and pt <= int ( subject [ 1 ] ) : return True except ValueError : raise ValueError ( 'Interval start and stop has to be integers. %s' % str ( subject ) ) return False
Accessory function to check if a point is within a range
51,961
def is_within ( self , query , subject ) : if self . pt_within ( query [ 0 ] , subject ) and self . pt_within ( query [ 1 ] , subject ) : return True return False
Accessory function to check if a range is fully within another range
51,962
def overlap ( self , query , subject ) : if ( self . pt_within ( query [ 0 ] , subject ) or self . pt_within ( query [ 1 ] , subject ) or self . pt_within ( subject [ 0 ] , query ) or self . pt_within ( subject [ 1 ] , query ) ) : return True return False
Accessory function to check if two ranges overlap
51,963
def recursive_insert ( self , node , coord , data , start , end ) : if node [ 0 ] != - 1 : left = ( start , node [ 0 ] ) right = ( node [ 0 ] , end ) if self . is_within ( left , coord ) : node [ 1 ] [ - 1 ] . append ( data ) elif self . overlap ( left , coord ) : self . recursive_insert ( node [ 1 ] , coord , data , left [ 0 ] , left [ 1 ] ) if self . is_within ( right , coord ) : node [ 2 ] [ - 1 ] . append ( data ) elif self . overlap ( right , coord ) : self . recursive_insert ( node [ 2 ] , coord , data , right [ 0 ] , right [ 1 ] )
Recursively inserts id data into nodes
51,964
def insert_data ( self , node , data , start , end ) : for item in data : self . recursive_insert ( node , [ item [ 0 ] , item [ 1 ] ] , item [ - 1 ] , start , end )
loops through all the data and inserts them into the empty tree
51,965
def trim_tree ( self , node ) : data_len = len ( node [ - 1 ] ) if node [ 1 ] == - 1 and node [ 2 ] == - 1 : if data_len == 0 : return 1 else : return 0 else : if self . trim_tree ( node [ 1 ] ) == 1 : node [ 1 ] = - 1 if self . trim_tree ( node [ 2 ] ) == 1 : node [ 2 ] = - 1 if node [ 1 ] == - 1 and node [ 2 ] == - 1 : if data_len == 0 : return 1 else : return 0
trims the tree for any empty data nodes
51,966
def find ( self , node , interval , start , end ) : data = [ ] if len ( interval ) != 2 : raise SyntaxError ( 'Interval malformed %s. Allways specify start and end position for interval.' % str ( interval ) ) left = ( start , node [ 0 ] ) right = ( node [ 0 ] , end ) if self . overlap ( left , interval ) : data . extend ( node [ - 1 ] ) if node [ 1 ] != - 1 : data . extend ( self . find ( node [ 1 ] , interval , left [ 0 ] , left [ 1 ] ) ) if self . overlap ( right , interval ) : data . extend ( node [ - 1 ] ) if node [ 2 ] != - 1 : data . extend ( self . find ( node [ 2 ] , interval , right [ 0 ] , right [ 1 ] ) ) return list ( set ( data ) )
recursively finds ids within a range
51,967
def find_range ( self , interval ) : return self . find ( self . tree , interval , self . start , self . end )
wrapper for find
51,968
def pprint ( self , ind ) : pp = pprint . PrettyPrinter ( indent = ind ) pp . pprint ( self . tree )
pretty prints the tree with indentation
51,969
def get_model_core ( model ) : model_label = lower ( '%s.%s' % ( model . _meta . app_label , model . _meta . object_name ) ) return registered_model_cores . get ( model_label )
Return core view of given model or None
51,970
def create_repo ( url , vcs , ** kwargs ) : r if vcs == 'git' : return GitRepo ( url , ** kwargs ) elif vcs == 'hg' : return MercurialRepo ( url , ** kwargs ) elif vcs == 'svn' : return SubversionRepo ( url , ** kwargs ) else : raise InvalidVCS ( 'VCS %s is not a valid VCS' % vcs )
r Return a object representation of a VCS repository .
51,971
def create_repo_from_pip_url ( pip_url , ** kwargs ) : r if pip_url . startswith ( 'git+' ) : return GitRepo . from_pip_url ( pip_url , ** kwargs ) elif pip_url . startswith ( 'hg+' ) : return MercurialRepo . from_pip_url ( pip_url , ** kwargs ) elif pip_url . startswith ( 'svn+' ) : return SubversionRepo . from_pip_url ( pip_url , ** kwargs ) else : raise InvalidPipURL ( pip_url )
r Return a object representation of a VCS repository via pip - style url .
51,972
def set_paths ( self , base_dir , script_path ) : if 'change_dir' in self . kwargs : self . kwargs [ 'change_dir' ] = DirectoryPath ( os . path . abspath ( self . kwargs [ 'change_dir' ] ) ) if 'out_file' in self . kwargs : self . kwargs [ 'out_file' ] = FilePath ( os . path . abspath ( self . kwargs [ 'out_file' ] ) ) if base_dir is not None : self . base_dir = DirectoryPath ( os . path . abspath ( base_dir ) ) self . script_path = FilePath ( base_dir + "run." + self . extensions [ self . language ] ) self . kwargs [ 'change_dir' ] = base_dir self . kwargs [ 'out_file' ] = FilePath ( base_dir + "run.out" ) if base_dir is None and script_path is None : self . script_path = FilePath ( new_temp_path ( ) ) if script_path is not None : self . script_path = FilePath ( os . path . abspath ( script_path ) )
Set the directory the script path and the outfile path
51,973
def slurm_params ( self ) : result = OrderedDict ( ) for param , info in self . slurm_headers . items ( ) : if not info [ 'needed' ] and not param in self . kwargs : continue if param in self . kwargs : result [ param ] = self . kwargs . get ( param ) else : result [ param ] = info [ 'default' ] if result . get ( 'cluster' ) == 'halvan' : result [ 'partition' ] = 'halvan' return result
The list of parameters to give to the sbatch command .
51,974
def script ( self ) : self . shebang_header = self . shebang_headers [ self . language ] self . slurm_header = [ self . slurm_headers [ k ] [ 'tag' ] % v for k , v in self . slurm_params . items ( ) ] self . script_header = self . script_headers [ self . language ] self . script_footer = self . script_footers [ self . language ] return '\n' . join ( flatter ( [ self . shebang_header , self . slurm_header , self . script_header , self . command , self . script_footer ] ) )
The script to be submitted to the SLURM queue .
51,975
def make_script ( self ) : self . script_path . write ( self . script ) self . script_path . permissions . make_executable ( ) return self . script_path
Make the script and return a FilePath object pointing to the script above .
51,976
def status ( self ) : if not self . script_path . exists : if self . name in jobs . names : return "DUPLICATE" if self . name not in jobs . names : return "READY" if self . name in jobs . names : if jobs [ self . name ] [ 'type' ] == 'queued' : return "QUEUED" if jobs [ self . name ] [ 'type' ] == 'running' : return "RUNNING" if not self . kwargs [ 'out_file' ] . exists : return "ABORTED" if 'CANCELED' in self . log_tail : return "CANCELLED" if 'slurmstepd: error' in self . log_tail : return "CANCELLED" if 'SLURM: end at' in self . log_tail : return "FINISHED" return "INTERUPTED"
What is the status of the job ?
51,977
def info ( self ) : if self . name not in jobs : return { 'status' : self . status } else : return jobs [ self . name ]
Get the existing job information dictionary
51,978
def launch ( self ) : self . make_script ( ) sbatch_out = sh . sbatch ( self . script_path ) jobs . expire ( ) print Color . i_blu + "SLURM:" + Color . end + " " + str ( sbatch_out ) , self . id = int ( re . findall ( "Submitted batch job ([0-9]+)" , str ( sbatch_out ) ) [ 0 ] ) return self . id
Make the script file and return the newly created job id
51,979
def run_locally ( self ) : self . thread = threading . Thread ( target = self . execute_locally ) self . thread . daemon = True self . thread . start ( )
A convenience method to run the same result as a SLURM job but locally in a non - blocking way . Useful for testing .
51,980
def execute_locally ( self ) : self . make_script ( ) with open ( self . kwargs [ 'out_file' ] , 'w' ) as handle : sh . python ( self . script_path , _out = handle , _err = handle )
Runs the equivalent command locally in a blocking way .
51,981
def wait_locally ( self ) : try : self . thread . join ( sys . maxint ) except KeyboardInterrupt : print "Stopped waiting on job '%s'" % self . kwargs [ 'job_name' ]
If you have run the query in a non - blocking way call this method to pause until the query is finished .
51,982
def get_widget ( self , request ) : widget = self . widget if isinstance ( widget , type ) : widget = widget ( ) return widget
Returns concrete widget that will be used for rendering table filter .
51,983
def distroinfo ( cargs , version = __version__ ) : code = 1 args = docopt ( __doc__ , argv = cargs ) try : if args [ '--version' ] : if not version : version = 'N/A' print ( version ) code = 0 elif args [ 'fetch' ] : code = fetch ( info_url = args [ '<info-url>' ] , info_files = args [ '<info-file>' ] , cache_dir = args [ '--cache-dir' ] , fetcher = args [ '--fetcher' ] , ) elif args [ 'dump' ] : code = dump ( info_url = args [ '<info-url>' ] , info_files = args [ '<info-file>' ] , yaml_out = args [ '--yaml-out' ] , json_out = args [ '--json-out' ] , cache_dir = args [ '--cache-dir' ] , fetcher = args [ '--fetcher' ] , ) except ( exception . InvalidInfoFormat , KeyboardInterrupt , ) as ex : code = getattr ( ex , 'exit_code' , code ) print ( "" ) print ( str ( ex ) or type ( ex ) . __name__ ) return code
distroinfo Command - Line Interface
51,984
def _get_error_response ( self , exception ) : response_exceptions = { MimerDataException : HTTPBadRequestResponseException , NotAllowedException : HTTPForbiddenResponseException , UnsupportedMediaTypeException : HTTPUnsupportedMediaTypeResponseException , Http404 : Http404 , ResourceNotFoundException : Http404 , NotAllowedMethodException : HTTPMethodNotAllowedResponseException , DuplicateEntryException : HTTPDuplicateResponseException , ConflictException : HTTPDuplicateResponseException , } response_exception = response_exceptions . get ( type ( exception ) ) if response_exception : raise response_exception return super ( RESTResourceMixin , self ) . _get_error_response ( exception )
Trasform pyston exceptions to Is - core exceptions and raise it
51,985
def reprompt_error ( self , message = None ) : try : session_id = session . sessionId self . session_machines . rollback_fsm ( session_id ) current_state = self . session_machines . current_state ( session_id ) if message is None : err_msg = choice ( self . _scenario_steps [ current_state ] [ 'reprompt' ] ) else : err_msg = message return question ( err_msg ) except UninitializedStateMachine as e : logger . error ( e ) return statement ( INTERNAL_ERROR_MSG )
Intended to be used in case of erroneous input data
51,986
def move_to_step ( self , step ) : if step not in self . _scenario_steps . keys ( ) : raise UndefinedState ( "step {} not defined in scenario" . format ( step ) ) try : session_id = session . sessionId self . session_machines . set_state ( session_id , step ) except UninitializedStateMachine as e : logger . error ( e ) return statement ( INTERNAL_ERROR_MSG )
Use in cases when you need to move in given step depending on input
51,987
def get_current_state ( self ) : try : session_id = session . sessionId return self . session_machines . current_state ( session_id ) except UninitializedStateMachine as e : logger . error ( e )
Get current state for user session or None if session doesn t exist
51,988
def get_help ( self ) : current_state = self . get_current_state ( ) if current_state is None : return statement ( INTERNAL_ERROR_MSG ) else : try : return choice ( self . _scenario_steps [ current_state ] [ 'help' ] ) except KeyError : return choice ( self . _default_help )
Get context help depending on the current step . If no help for current step was specified in scenario description file default one will be returned .
51,989
def run ( self ) : for cls in self . get_test_classes ( ) : self . logger . info ( 'Running {cls.__name__} test...' . format ( cls = cls ) ) test = cls ( runner = self ) if test . _run ( ) : self . logger . passed ( 'Test {cls.__name__} succeeded!' . format ( cls = cls ) ) else : self . logger . failed ( 'Test {cls.__name__} failed!' . format ( cls = cls ) ) self . has_passed = False if self . has_passed : self . logger . passed ( 'Summary: All tests passed!' ) else : self . logger . failed ( 'Summary: One or more tests failed!' ) return self . has_passed
Runs all enabled tests .
51,990
def iiif_image_key ( obj ) : if isinstance ( obj , ObjectVersion ) : bucket_id = obj . bucket_id version_id = obj . version_id key = obj . key else : bucket_id = obj . get ( 'bucket' ) version_id = obj . get ( 'version_id' ) key = obj . get ( 'key' ) return u'{}:{}:{}' . format ( bucket_id , version_id , key , )
Generate the IIIF image key .
51,991
def ui_iiif_image_url ( obj , version = 'v2' , region = 'full' , size = 'full' , rotation = 0 , quality = 'default' , image_format = 'png' ) : return u'{prefix}{version}/{identifier}/{region}/{size}/{rotation}/' u'{quality}.{image_format}' . format ( prefix = current_app . config [ 'IIIF_UI_URL' ] , version = version , identifier = quote ( iiif_image_key ( obj ) . encode ( 'utf8' ) , safe = ':' ) , region = region , size = size , rotation = rotation , quality = quality , image_format = image_format , )
Generate IIIF image URL from the UI application .
51,992
def preview ( file ) : params = deepcopy ( current_app . config [ 'IIIF_PREVIEWER_PARAMS' ] ) if 'image_format' not in params : params [ 'image_format' ] = 'png' if file . has_extensions ( '.png' ) else 'jpg' return render_template ( current_app . config [ 'IIIF_PREVIEW_TEMPLATE' ] , file = file , file_url = ui_iiif_image_url ( file . file , ** params ) )
Render appropriate template with embed flag .
51,993
def secure ( view ) : AUTH = getattr ( settings , 'SLACKCHAT_AUTH_DECORATOR' , 'django.contrib.admin.views.decorators.staff_member_required' ) auth_decorator = import_class ( AUTH ) return method_decorator ( auth_decorator , name = 'dispatch' ) ( view )
Set an auth decorator applied for views . If DEBUG is on we serve the view without authenticating . Default is django . contrib . auth . decorators . login_required . Can also be django . contrib . admin . views . decorators . staff_member_required or a custom decorator .
51,994
def up ( cloud_init , use_snapshots , upgrade_image , snapshot_cluster , snapshot_time ) : try : cloud_config = CloudConfig ( ) ci = None if cloud_init : ci = CloudInit ( ) cloud_controller = CloudController ( cloud_config ) cloud_controller . up ( ci , use_snapshots , upgrade_image , snapshot_cluster , snapshot_time ) except CloudComposeException as ex : print ( ex )
creates a new cluster
51,995
def down ( force ) : try : cloud_config = CloudConfig ( ) cloud_controller = CloudController ( cloud_config ) cloud_controller . down ( force ) except CloudComposeException as ex : print ( ex )
destroys an existing cluster
51,996
def cleanup ( ) : try : cloud_config = CloudConfig ( ) cloud_controller = CloudController ( cloud_config ) cloud_controller . cleanup ( ) except CloudComposeException as ex : print ( ex )
deletes launch configs and auto scaling group
51,997
def build ( ) : try : cloud_config = CloudConfig ( ) config_data = cloud_config . config_data ( 'cluster' ) cloud_init = CloudInit ( ) print ( cloud_init . build ( config_data ) ) except CloudComposeException as ex : print ( ex )
builds the cloud_init script
51,998
def flush ( signal_names , exclude , wait ) : signalbus = current_app . extensions [ 'signalbus' ] signal_names = set ( signal_names ) exclude = set ( exclude ) models_to_flush = signalbus . get_signal_models ( ) if signal_names and exclude : click . echo ( 'Warning: Specified both SIGNAL_NAMES and exclude option.' ) if signal_names : wrong_signal_names = signal_names - { m . __name__ for m in models_to_flush } models_to_flush = [ m for m in models_to_flush if m . __name__ in signal_names ] else : wrong_signal_names = exclude - { m . __name__ for m in models_to_flush } for name in wrong_signal_names : click . echo ( 'Warning: A signal with name "{}" does not exist.' . format ( name ) ) models_to_flush = [ m for m in models_to_flush if m . __name__ not in exclude ] logger = logging . getLogger ( __name__ ) try : if wait is not None : signal_count = signalbus . flush ( models_to_flush , wait = max ( 0.0 , wait ) ) else : signal_count = signalbus . flush ( models_to_flush ) except Exception : logger . exception ( 'Caught error while sending pending signals.' ) sys . exit ( 1 ) if signal_count == 1 : logger . warning ( '%i signal has been successfully processed.' , signal_count ) elif signal_count > 1 : logger . warning ( '%i signals have been successfully processed.' , signal_count )
Send pending signals over the message bus .
51,999
def signals ( ) : signalbus = current_app . extensions [ 'signalbus' ] for signal_model in signalbus . get_signal_models ( ) : click . echo ( signal_model . __name__ )
Show all signal types .