idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
53,100
def execute ( ) : arg_parser = ArgumentParser ( description = 'Install helper tool to download the right version of the ANTLR v4 tool jar.' ) arg_parser . add_argument ( '--version' , action = 'version' , version = '%(prog)s {version}' . format ( version = __version__ ) ) mode_group = arg_parser . add_mutually_exclusive_group ( ) mode_group . add_argument ( '-f' , '--force' , action = 'store_true' , default = False , help = 'force download even if local antlr4.jar already exists' ) mode_group . add_argument ( '-l' , '--lazy' , action = 'store_true' , default = False , help = 'don\'t report an error if local antlr4.jar already exists and don\'t try to download it either' ) args = arg_parser . parse_args ( ) install ( force = args . force , lazy = args . lazy )
Entry point of the install helper tool to ease the download of the right version of the ANTLR v4 tool jar .
53,101
def make_tsv_row ( values : List [ Any ] ) -> str : return "\t" . join ( [ tsv_escape ( x ) for x in values ] ) + "\n"
From a list of values make a TSV line .
53,102
def dictlist_to_tsv ( dictlist : List [ Dict [ str , Any ] ] ) -> str : if not dictlist : return "" fieldnames = dictlist [ 0 ] . keys ( ) tsv = "\t" . join ( [ tsv_escape ( f ) for f in fieldnames ] ) + "\n" for d in dictlist : tsv += "\t" . join ( [ tsv_escape ( v ) for v in d . values ( ) ] ) + "\n" return tsv
From a consistent list of dictionaries mapping fieldnames to values make a TSV file .
53,103
def import_submodules ( package : Union [ str , ModuleType ] , base_package_for_relative_import : str = None , recursive : bool = True ) -> Dict [ str , ModuleType ] : if isinstance ( package , str ) : package = importlib . import_module ( package , base_package_for_relative_import ) results = { } for loader , name , is_pkg in pkgutil . walk_packages ( package . __path__ ) : full_name = package . __name__ + '.' + name log . debug ( "importing: {}" , full_name ) results [ full_name ] = importlib . import_module ( full_name ) if recursive and is_pkg : results . update ( import_submodules ( full_name ) ) return results
Import all submodules of a module recursively including subpackages .
53,104
def preserve_cwd ( func : Callable ) -> Callable : def decorator ( * args_ , ** kwargs ) -> Any : cwd = os . getcwd ( ) result = func ( * args_ , ** kwargs ) os . chdir ( cwd ) return result return decorator
Decorator to preserve the current working directory in calls to the decorated function .
53,105
def copyglob ( src : str , dest : str , allow_nothing : bool = False , allow_nonfiles : bool = False ) -> None : something = False for filename in glob . glob ( src ) : if allow_nonfiles or os . path . isfile ( filename ) : shutil . copy ( filename , dest ) something = True if something or allow_nothing : return raise ValueError ( "No files found matching: {}" . format ( src ) )
Copies files whose filenames match the glob src into the directory dest . Raises an error if no files are copied unless allow_nothing is True .
53,106
def rmtree ( directory : str ) -> None : log . debug ( "Deleting directory {!r}" , directory ) shutil . rmtree ( directory , onerror = shutil_rmtree_onerror )
Deletes a directory tree .
53,107
def chown_r ( path : str , user : str , group : str ) -> None : for root , dirs , files in os . walk ( path ) : for x in dirs : shutil . chown ( os . path . join ( root , x ) , user , group ) for x in files : shutil . chown ( os . path . join ( root , x ) , user , group )
Performs a recursive chown .
53,108
def chmod_r ( root : str , permission : int ) -> None : os . chmod ( root , permission ) for dirpath , dirnames , filenames in os . walk ( root ) : for d in dirnames : os . chmod ( os . path . join ( dirpath , d ) , permission ) for f in filenames : os . chmod ( os . path . join ( dirpath , f ) , permission )
Recursive chmod .
53,109
def _get_errors ( self ) : errors = self . json . get ( 'data' ) . get ( 'failures' ) if errors : logger . error ( errors ) return errors
Gets errors from HTTP response
53,110
def page_sequence ( n_sheets : int , one_based : bool = True ) -> List [ int ] : n_pages = calc_n_virtual_pages ( n_sheets ) assert n_pages % 4 == 0 half_n_pages = n_pages // 2 firsthalf = list ( range ( half_n_pages ) ) secondhalf = list ( reversed ( range ( half_n_pages , n_pages ) ) ) sequence = [ ] top = True for left , right in zip ( secondhalf , firsthalf ) : if not top : left , right = right , left sequence += [ left , right ] top = not top if one_based : sequence = [ x + 1 for x in sequence ] log . debug ( "{} sheets => page sequence {!r}" , n_sheets , sequence ) return sequence
Generates the final page sequence from the starting number of sheets .
53,111
def require ( executable : str , explanation : str = "" ) -> None : assert shutil . which ( executable ) , "Need {!r} on the PATH.{}" . format ( executable , "\n" + explanation if explanation else "" )
Ensures that the external tool is available . Asserts upon failure .
53,112
def get_page_count ( filename : str ) -> int : log . debug ( "Getting page count for {!r}" , filename ) require ( PDFTK , HELP_MISSING_PDFTK ) stdout , _ = run ( [ PDFTK , filename , "dump_data" ] , get_output = True ) regex = re . compile ( r"^NumberOfPages: (\d+)$" , re . MULTILINE ) m = regex . search ( stdout ) if m : return int ( m . group ( 1 ) ) raise ValueError ( "Can't get PDF page count for: {!r}" . format ( filename ) )
How many pages are in a PDF?
53,113
def make_blank_pdf ( filename : str , paper : str = "A4" ) -> None : require ( CONVERT , HELP_MISSING_IMAGEMAGICK ) run ( [ CONVERT , "xc:none" , "-page" , paper , filename ] )
NOT USED . Makes a blank single - page PDF using ImageMagick s convert .
53,114
def slice_pdf ( input_filename : str , output_filename : str , slice_horiz : int , slice_vert : int ) -> str : if slice_horiz == 1 and slice_vert == 1 : log . debug ( "No slicing required" ) return input_filename log . info ( "Slicing each source page mv into {} horizontally x {} vertically" , slice_horiz , slice_vert ) log . debug ( "... from {!r} to {!r}" , input_filename , output_filename ) require ( MUTOOL , HELP_MISSING_MUTOOL ) run ( [ MUTOOL , "poster" , "-x" , str ( slice_horiz ) , "-y" , str ( slice_vert ) , input_filename , output_filename ] ) return output_filename
Slice each page of the original to convert to one real page per PDF page . Return the output filename .
53,115
def rotate_even_pages_180 ( input_filename : str , output_filename : str ) -> str : log . info ( "Rotating even-numbered pages 180 degrees for long-edge " "duplex printing" ) log . debug ( "... {!r} -> {!r}" , input_filename , output_filename ) require ( PDFTK , HELP_MISSING_PDFTK ) args = [ PDFTK , "A=" + input_filename , "shuffle" , "Aoddnorth" , "Aevensouth" , "output" , output_filename , ] run ( args ) return output_filename
Rotates even - numbered pages 180 degrees . Returns the output filename .
53,116
def convert_to_foldable ( input_filename : str , output_filename : str , slice_horiz : int , slice_vert : int , overwrite : bool = False , longedge : bool = False , latex_paper_size : str = LATEX_PAPER_SIZE_A4 ) -> bool : if not os . path . isfile ( input_filename ) : log . warning ( "Input file does not exist or is not a file" ) return False if not overwrite and os . path . isfile ( output_filename ) : log . error ( "Output file exists; not authorized to overwrite (use " "--overwrite if you are sure)" ) return False log . info ( "Processing {!r}" , input_filename ) with tempfile . TemporaryDirectory ( ) as tmpdir : log . debug ( "Using temporary directory {!r}" , tmpdir ) intermediate_num = 0 def make_intermediate ( ) -> str : nonlocal intermediate_num intermediate_num += 1 return os . path . join ( tmpdir , "intermediate_{}.pdf" . format ( intermediate_num ) ) input_filename = slice_pdf ( input_filename = input_filename , output_filename = make_intermediate ( ) , slice_horiz = slice_horiz , slice_vert = slice_vert ) input_filename = booklet_nup_pdf ( input_filename = input_filename , output_filename = make_intermediate ( ) , latex_paper_size = latex_paper_size ) if longedge : input_filename = rotate_even_pages_180 ( input_filename = input_filename , output_filename = make_intermediate ( ) , ) log . info ( "Writing to {!r}" , output_filename ) shutil . move ( input_filename , output_filename ) return True
Runs a chain of tasks to convert a PDF to a useful booklet PDF .
53,117
def get_mysql_vars ( mysql : str , host : str , port : int , user : str ) -> Dict [ str , str ] : cmdargs = [ mysql , "-h" , host , "-P" , str ( port ) , "-e" , "SHOW VARIABLES; SHOW STATUS" , "-u" , user , "-p" ] log . info ( "Connecting to MySQL with user: {}" , user ) log . debug ( cmdargs ) process = subprocess . Popen ( cmdargs , stdout = subprocess . PIPE ) out , err = process . communicate ( ) lines = out . decode ( "utf8" ) . splitlines ( ) mysqlvars = { } for line in lines : var , val = line . split ( "\t" ) mysqlvars [ var ] = val return mysqlvars
Asks MySQL for its variables and status .
53,118
def add_var_mb ( table : PrettyTable , vardict : Dict [ str , str ] , varname : str ) -> None : valstr = vardict . get ( varname , None ) table . add_row ( [ varname , val_mb ( valstr ) , UNITS_MB ] )
Adds a row to table for varname in megabytes .
53,119
def report_line ( zipfilename : str , contentsfilename : str , line : str , show_inner_file : bool ) -> None : if show_inner_file : print ( "{} [{}]: {}" . format ( zipfilename , contentsfilename , line ) ) else : print ( "{}: {}" . format ( zipfilename , line ) )
Prints a line from a file with the . zip filename and optionally also the inner filename .
53,120
def parse_zip ( zipfilename : str , regex : Pattern , invert_match : bool , files_with_matches : bool , files_without_match : bool , grep_inner_file_name : bool , show_inner_file : bool ) -> None : assert not ( files_without_match and files_with_matches ) report_lines = ( not files_without_match ) and ( not files_with_matches ) report_hit_lines = report_lines and not invert_match report_miss_lines = report_lines and invert_match log . debug ( "Checking ZIP: " + zipfilename ) found_in_zip = False try : with ZipFile ( zipfilename , 'r' ) as zf : for contentsfilename in zf . namelist ( ) : log . debug ( "... checking file: " + contentsfilename ) if grep_inner_file_name : found_in_filename = bool ( regex . search ( contentsfilename ) ) found_in_zip = found_in_zip or found_in_filename if files_with_matches and found_in_zip : report_hit_filename ( zipfilename , contentsfilename , show_inner_file ) return if ( ( report_hit_lines and found_in_filename ) or ( report_miss_lines and not found_in_filename ) ) : report_line ( zipfilename , contentsfilename , contentsfilename , show_inner_file ) else : try : with zf . open ( contentsfilename , 'r' ) as file : try : for line in file . readlines ( ) : found_in_line = bool ( regex . search ( line ) ) found_in_zip = found_in_zip or found_in_line if files_with_matches and found_in_zip : report_hit_filename ( zipfilename , contentsfilename , show_inner_file ) return if ( ( report_hit_lines and found_in_line ) or ( report_miss_lines and not found_in_line ) ) : report_line ( zipfilename , contentsfilename , line , show_inner_file ) except EOFError : pass except RuntimeError as e : log . warning ( "RuntimeError whilst processing {} [{}]: probably " "encrypted contents; error was {!r}" , zipfilename , contentsfilename , e ) except ( zlib . error , BadZipFile ) as e : log . debug ( "Invalid zip: {}; error was {!r}" , zipfilename , e ) if files_without_match and not found_in_zip : report_miss_filename ( zipfilename )
Implement a grep within an OpenXML file for a single OpenXML file which is by definition a . zip file .
53,121
def merge_metrics ( self ) : self . metrics . extend ( self . _metrics . values ( ) ) del self . _metrics
Merge metrics in the internal _metrics dict to metrics list and delete the internal _metrics
53,122
def pairwise ( iterable ) : iterable = iter ( iterable ) left = next ( iterable ) for right in iterable : yield left , right left = right
Pair each element with its neighbors .
53,123
def partition ( pred , iterable ) : pos , neg = [ ] , [ ] pos_append , neg_append = pos . append , neg . append for elem in iterable : if pred ( elem ) : pos_append ( elem ) else : neg_append ( elem ) return neg , pos
Partition an iterable .
53,124
def powerset ( iterable , * , reverse = False ) : lst = list ( iterable ) if reverse : rng = range ( len ( lst ) , - 1 , - 1 ) else : rng = range ( len ( lst ) + 1 ) return chain . from_iterable ( combinations ( lst , r ) for r in rng )
Return the powerset .
53,125
def multi_map ( key , iterable , * , default_dict = False ) : result = collections . defaultdict ( list ) for rec in iterable : result [ key ( rec ) ] . append ( rec ) return result if default_dict else dict ( result )
Collect data into a multi - map .
53,126
def split ( pred , iterable , * , trailing = True ) : result = [ ] result_append = result . append if trailing : for elem in iterable : result_append ( elem ) if pred ( elem ) : yield result result = [ ] result_append = result . append else : for elem in iterable : if pred ( elem ) : if result : yield result result = [ ] result_append = result . append result_append ( elem ) if result : yield result
Split the iterable .
53,127
def chunk ( iterable , length ) : if length < 0 : return ( ) iterable = iter ( iterable ) result = tuple ( islice ( iterable , length ) ) while result : yield result result = tuple ( islice ( iterable , length ) )
Collect data into chunks .
53,128
def divide ( iterable , n ) : if n <= 0 : return [ ] data = list ( iterable ) base , rem = divmod ( len ( data ) , n ) iterable = iter ( data ) for i in range ( n ) : yield tuple ( islice ( iterable , base + 1 if i < rem else base ) )
Evenly divide elements .
53,129
def divide_sizes ( count , n ) : if n <= 0 : return [ ] if count < 0 : return [ 0 ] * n base , rem = divmod ( count , n ) return [ base + 1 if i < rem else base for i in range ( n ) ]
Evenly divide a count .
53,130
def dump ( cursor ) : def node_children ( node ) : return list ( node . get_children ( ) ) def print_node ( node ) : text = node . spelling or node . displayname kind = str ( node . kind ) . split ( '.' ) [ 1 ] return '{} {}' . format ( kind , text ) return draw_tree ( cursor , node_children , print_node )
Display the AST represented by the cursor
53,131
def pendulum_to_datetime ( x : DateTime ) -> datetime . datetime : return datetime . datetime ( x . year , x . month , x . day , x . hour , x . minute , x . second , x . microsecond , tzinfo = x . tzinfo )
Used for example where a database backend insists on datetime . datetime .
53,132
def format_datetime ( d : PotentialDatetimeType , fmt : str , default : str = None ) -> Optional [ str ] : d = coerce_to_pendulum ( d ) if d is None : return default return d . strftime ( fmt )
Format a datetime with a strftime format specification string or return default if the input is None .
53,133
def truncate_date_to_first_of_month ( dt : Optional [ DateLikeType ] ) -> Optional [ DateLikeType ] : if dt is None : return None return dt . replace ( day = 1 )
Change the day to the first of the month .
53,134
def request_from_context ( context ) : new_context = copy . deepcopy ( context ) assert new_context . method in ALLOWED_METHODS new_context . url_path = generate_url_path ( new_context . url_path , prefix = new_context . prefix_url_path , format_suffix = new_context . url_path_format , ** new_context . url_path_params ) if new_context . body_params or new_context . files : body , content_type = new_context . renderer . encode_params ( new_context . body_params , files = new_context . files ) if new_context . update_content_type and HttpSdk . CONTENT_TYPE_HEADER_NAME not in new_context . headers : new_context . headers [ HttpSdk . CONTENT_TYPE_HEADER_NAME ] = content_type else : body = None authentication_instances = new_context . authentication_instances for auth_obj in authentication_instances : new_context = auth_obj . apply_authentication ( new_context ) if HttpSdk . COOKIE_HEADER_NAME not in new_context . headers and not new_context . cookie . is_empty ( ) : new_context . headers [ HttpSdk . COOKIE_HEADER_NAME ] = new_context . cookie . as_cookie_header_value ( ) url = "%s%s" % ( new_context . host , new_context . url_path ) if new_context . query_params : url += "?%s" % ( urlencode ( new_context . query_params ) ) log_print_request ( new_context . method , url , new_context . query_params , new_context . headers , body ) r = HttpSdk . get_pool_manager ( new_context . proxy ) . request ( convert_unicode_to_native_str ( new_context . method ) , convert_unicode_to_native_str ( url ) , body = body , headers = HttpSdk . convert_headers_to_native_str ( new_context . headers ) , redirect = new_context . redirect , timeout = new_context . timeout ) log_print_response ( r . status , r . data , r . headers ) r = new_context . response_class ( r ) return r
Do http requests from context .
53,135
def host ( self , value ) : scheme , host , port = get_hostname_parameters_from_url ( value ) self . _host = "%s://%s:%s" % ( scheme , host , port )
A string that will be automatically included at the beginning of the url generated for doing each http request .
53,136
def _http_request ( self , method , url_path , headers = None , query_params = None , body_params = None , files = None , ** kwargs ) : host = kwargs . get ( 'host' , self . host ) proxy = kwargs . get ( 'proxy' , self . proxy ) renderer = kwargs . get ( 'renderer' , MultiPartRenderer ( ) if files else self . default_renderer ) prefix_url_path = kwargs . get ( 'prefix_url_path' , self . prefix_url_path ) authentication_instances = kwargs . get ( 'authentication_instances' , self . authentication_instances ) url_path_format = kwargs . get ( 'url_path_format' , self . url_path_format ) update_content_type = kwargs . get ( 'update_content_type' , True ) redirect = kwargs . get ( 'redirect' , False ) if headers is None : headers = self . default_headers ( ) context = HttpRequestContext ( host = host , proxy = proxy , method = method , prefix_url_path = prefix_url_path , url_path = url_path , url_path_params = self . url_path_params , url_path_format = url_path_format , headers = headers , query_params = query_params , body_params = body_params , files = files , renderer = renderer , response_class = self . response_class , authentication_instances = authentication_instances , update_content_type = update_content_type , redirect = redirect ) res = self . http_request_from_context ( context ) self . cookie . update ( res . cookie ) return res
Method to do http requests .
53,137
def login ( self , ** kwargs ) : assert self . LOGIN_URL_PATH is not None render_name = kwargs . pop ( "render" , "json" ) render = get_renderer ( render_name ) params = parse_args ( ** kwargs ) return self . post ( self . LOGIN_URL_PATH , body_params = params , render = render )
Login abstract method with default implementation .
53,138
def nhs_check_digit ( ninedigits : Union [ str , List [ Union [ str , int ] ] ] ) -> int : if len ( ninedigits ) != 9 or not all ( str ( x ) . isdigit ( ) for x in ninedigits ) : raise ValueError ( "bad string to nhs_check_digit" ) check_digit = 11 - ( sum ( [ int ( d ) * f for ( d , f ) in zip ( ninedigits , NHS_DIGIT_WEIGHTINGS ) ] ) % 11 ) if check_digit == 11 : check_digit = 0 return check_digit
Calculates an NHS number check digit .
53,139
def generate_random_nhs_number ( ) -> int : check_digit = 10 while check_digit == 10 : digits = [ random . randint ( 1 , 9 ) ] digits . extend ( [ random . randint ( 0 , 9 ) for _ in range ( 8 ) ] ) check_digit = nhs_check_digit ( digits ) digits . append ( check_digit ) return int ( "" . join ( [ str ( d ) for d in digits ] ) )
Returns a random valid NHS number as an int .
53,140
def send_msg ( from_addr : str , to_addrs : Union [ str , List [ str ] ] , host : str , user : str , password : str , port : int = None , use_tls : bool = True , msg : email . mime . multipart . MIMEMultipart = None , msg_string : str = None ) -> None : assert bool ( msg ) != bool ( msg_string ) , "Specify either msg or msg_string" try : session = smtplib . SMTP ( host , port ) except smtplib . SMTPException as e : raise RuntimeError ( "send_msg: Failed to connect to host {}, port {}: {}" . format ( host , port , e ) ) try : session . ehlo ( ) except smtplib . SMTPException as e : raise RuntimeError ( "send_msg: Failed to issue EHLO: {}" . format ( e ) ) if use_tls : try : session . starttls ( ) session . ehlo ( ) except smtplib . SMTPException as e : raise RuntimeError ( "send_msg: Failed to initiate TLS: {}" . format ( e ) ) if user : try : session . login ( user , password ) except smtplib . SMTPException as e : raise RuntimeError ( "send_msg: Failed to login as user {}: {}" . format ( user , e ) ) else : log . debug ( "Not using SMTP AUTH; no user specified" ) try : session . sendmail ( from_addr , to_addrs , msg . as_string ( ) ) except smtplib . SMTPException as e : raise RuntimeError ( "send_msg: Failed to send e-mail: {}" . format ( e ) ) session . quit ( )
Sends a pre - built e - mail message .
53,141
def toposimplify ( geojson , p ) : proc_out = subprocess . run ( [ 'geo2topo' ] , input = bytes ( json . dumps ( geojson ) , 'utf-8' ) , stdout = subprocess . PIPE ) proc_out = subprocess . run ( [ 'toposimplify' , '-P' , p ] , input = proc_out . stdout , stdout = subprocess . PIPE , stderr = subprocess . DEVNULL ) topojson = json . loads ( proc_out . stdout ) topojson [ 'objects' ] [ 'divisions' ] = topojson [ 'objects' ] . pop ( '-' ) return topojson
Convert geojson and simplify topology .
53,142
def get_sqlserver_product_version ( engine : "Engine" ) -> Tuple [ int ] : assert is_sqlserver ( engine ) , ( "Only call get_sqlserver_product_version() for Microsoft SQL Server " "instances." ) sql = "SELECT CAST(SERVERPROPERTY('ProductVersion') AS VARCHAR)" rp = engine . execute ( sql ) row = rp . fetchone ( ) dotted_version = row [ 0 ] return tuple ( int ( x ) for x in dotted_version . split ( "." ) )
Gets SQL Server version information .
53,143
def add_ones ( a ) : arr = N . ones ( ( a . shape [ 0 ] , a . shape [ 1 ] + 1 ) ) arr [ : , : - 1 ] = a return arr
Adds a column of 1s at the end of the array
53,144
def _uninstall ( cls ) : if cls . _hook : sys . meta_path . remove ( cls . _hook ) cls . _hook = None
uninstall the hook if installed
53,145
def add ( cls , module_name , fsts , composer ) : cls . _to_compose . setdefault ( module_name , [ ] ) cls . _to_compose [ module_name ] . append ( ( list ( fsts ) , composer ) ) cls . _install ( )
add a couple of fsts to be superimposed on the module given by module_name as soon as it is imported .
53,146
def add ( cls , module_name , msg = '' ) : if module_name in sys . modules : raise ImportGuard ( 'Module to guard has already been imported: ' + module_name ) cls . _guards . setdefault ( module_name , [ ] ) cls . _guards [ module_name ] . append ( msg ) cls . _num_entries += 1 cls . _install ( )
Until the guard is dropped again disallow imports of the module given by module_name .
53,147
def remove ( cls , module_name ) : module_guards = cls . _guards . get ( module_name , False ) if module_guards : module_guards . pop ( ) cls . _num_entries -= 1 if cls . _num_entries < 1 : if cls . _num_entries < 0 : raise Exception ( 'Bug: ImportGuardHook._num_entries became negative!' ) cls . _uninstall ( )
drop a previously created guard on module_name if the module is not guarded then this is a no - op .
53,148
def copy ( self , extractor = None , needs = None , store = None , data_writer = None , persistence = None , extractor_args = None ) : f = Feature ( extractor or self . extractor , needs = needs , store = self . store if store is None else store , encoder = self . encoder , decoder = self . decoder , key = self . key , data_writer = data_writer , persistence = persistence , ** ( extractor_args or self . extractor_args ) ) f . _fixup_needs ( ) return f
Use self as a template to build a new feature replacing values in kwargs
53,149
def _can_compute ( self , _id , persistence ) : if self . store and self . _stored ( _id , persistence ) : return True if self . is_root : return False return all ( [ n . _can_compute ( _id , persistence ) for n in self . dependencies ] )
Return true if this feature stored or is unstored but can be computed from stored dependencies
53,150
def genrows ( cursor : Cursor , arraysize : int = 1000 ) -> Generator [ List [ Any ] , None , None ] : while True : results = cursor . fetchmany ( arraysize ) if not results : break for result in results : yield result
Generate all rows from a cursor .
53,151
def genfirstvalues ( cursor : Cursor , arraysize : int = 1000 ) -> Generator [ Any , None , None ] : return ( row [ 0 ] for row in genrows ( cursor , arraysize ) )
Generate the first value in each row .
53,152
def get_values_and_permissible ( values : Iterable [ Tuple [ Any , str ] ] , add_none : bool = False , none_description : str = "[None]" ) -> Tuple [ List [ Tuple [ Any , str ] ] , List [ Any ] ] : permissible_values = list ( x [ 0 ] for x in values ) if add_none : none_tuple = ( SERIALIZED_NONE , none_description ) values = [ none_tuple ] + list ( values ) return values , permissible_values
Used when building Colander nodes .
53,153
def deserialize ( self , node : SchemaNode , cstruct : Union [ str , ColanderNullType ] ) -> Optional [ Pendulum ] : if not cstruct : return colander . null try : result = coerce_to_pendulum ( cstruct , assume_local = self . use_local_tz ) except ( ValueError , ParserError ) as e : raise Invalid ( node , "Invalid date/time: value={!r}, error=" "{!r}" . format ( cstruct , e ) ) return result
Deserializes string representation to Python object .
53,154
def get_features_from_equation_file ( filename ) : features = [ ] for line in open ( filename ) : line = line . split ( '#' ) [ 0 ] . strip ( ) if line : features . append ( line ) return features
returns list of feature names read from equation file given by filename .
53,155
def _compose_pair ( self , role , base ) : for attrname in dir ( role ) : transformation = getattr ( role , attrname ) self . _apply_transformation ( role , base , transformation , attrname ) return base
composes onto base by applying the role
53,156
def mapper_metro ( self , _ , data ) : if 'tags' in data : type_tag = 1 lonlat = data [ 'coordinates' ] payload = data [ 'tags' ] elif 'user_id' in data : type_tag = 2 accept = [ "twitter\.com" , "foursquare\.com" , "instagram\.com" , "untappd\.com" ] expr = "|" . join ( accept ) if not re . findall ( expr , data [ 'source' ] ) : return lonlat = data [ 'lonlat' ] payload = None metro = self . lookup . get ( lonlat , METRO_DISTANCE ) if not metro : return yield metro , ( type_tag , lonlat , payload )
map each osm POI and geotweets based on spatial lookup of metro area
53,157
def reducer_metro ( self , metro , values ) : lookup = CachedLookup ( precision = POI_GEOHASH_PRECISION ) for i , value in enumerate ( values ) : type_tag , lonlat , data = value if type_tag == 1 : lookup . insert ( i , dict ( geometry = dict ( type = 'Point' , coordinates = project ( lonlat ) ) , properties = dict ( tags = data ) ) ) else : if not lookup . data_store : return poi_names = [ ] kwargs = dict ( buffer_size = POI_DISTANCE , multiple = True ) for poi in lookup . get ( lonlat , ** kwargs ) : has_tag = [ tag in poi [ 'tags' ] for tag in POI_TAGS ] if any ( has_tag ) and 'name' in poi [ 'tags' ] : poi_names . append ( poi [ 'tags' ] [ 'name' ] ) for poi in set ( poi_names ) : yield ( metro , poi ) , 1
Output tags of POI locations nearby tweet locations
53,158
def reducer_output ( self , metro , values ) : records = [ ] for value in values : total , poi = value records . append ( dict ( metro_area = metro , poi = poi , count = total ) ) output = "{0}\t{1}\t{2}" output = output . format ( metro . encode ( 'utf-8' ) , total , poi . encode ( 'utf-8' ) ) yield None , output if self . mongo : self . mongo . insert_many ( records )
store each record in MongoDB and output tab delimited lines
53,159
def fetch_processed_single_clause ( element : "ClauseElement" , compiler : "SQLCompiler" ) -> str : if len ( element . clauses ) != 1 : raise TypeError ( "Only one argument supported; {} were passed" . format ( len ( element . clauses ) ) ) clauselist = element . clauses first = clauselist . get_children ( ) [ 0 ] return compiler . process ( first )
Takes a clause element that must have a single clause and converts it to raw SQL text .
53,160
def clean_int ( x ) -> int : try : return int ( x ) except ValueError : raise forms . ValidationError ( "Cannot convert to integer: {}" . format ( repr ( x ) ) )
Returns its parameter as an integer or raises django . forms . ValidationError .
53,161
def clean_nhs_number ( x ) -> int : try : x = int ( x ) if not is_valid_nhs_number ( x ) : raise ValueError return x except ValueError : raise forms . ValidationError ( "Not a valid NHS number: {}" . format ( repr ( x ) ) )
Returns its parameter as a valid integer NHS number or raises django . forms . ValidationError .
53,162
def coltype_as_typeengine ( coltype : Union [ VisitableType , TypeEngine ] ) -> TypeEngine : if isinstance ( coltype , TypeEngine ) : return coltype return coltype ( )
Instances of SQLAlchemy column types are subclasses of TypeEngine . It s possible to specify column types either as such instances or as the class type . This function ensures that such classes are converted to instances .
53,163
def walk_orm_tree ( obj , debug : bool = False , seen : Set = None , skip_relationships_always : List [ str ] = None , skip_relationships_by_tablename : Dict [ str , List [ str ] ] = None , skip_all_relationships_for_tablenames : List [ str ] = None , skip_all_objects_for_tablenames : List [ str ] = None ) -> Generator [ object , None , None ] : skip_relationships_always = skip_relationships_always or [ ] skip_relationships_by_tablename = skip_relationships_by_tablename or { } skip_all_relationships_for_tablenames = skip_all_relationships_for_tablenames or [ ] skip_all_objects_for_tablenames = skip_all_objects_for_tablenames or [ ] stack = [ obj ] if seen is None : seen = set ( ) while stack : obj = stack . pop ( 0 ) if obj in seen : continue tablename = obj . __tablename__ if tablename in skip_all_objects_for_tablenames : continue seen . add ( obj ) if debug : log . debug ( "walk: yielding {!r}" , obj ) yield obj insp = inspect ( obj ) for relationship in insp . mapper . relationships : attrname = relationship . key if attrname in skip_relationships_always : continue if tablename in skip_all_relationships_for_tablenames : continue if ( tablename in skip_relationships_by_tablename and attrname in skip_relationships_by_tablename [ tablename ] ) : continue if debug : log . debug ( "walk: following relationship {}" , relationship ) related = getattr ( obj , attrname ) if debug and related : log . debug ( "walk: queueing {!r}" , related ) if relationship . uselist : stack . extend ( related ) elif related is not None : stack . append ( related )
Starting with a SQLAlchemy ORM object this function walks a relationship tree yielding each of the objects once .
53,164
def rewrite_relationships ( oldobj : object , newobj : object , objmap : Dict [ object , object ] , debug : bool = False , skip_table_names : List [ str ] = None ) -> None : skip_table_names = skip_table_names or [ ] insp = inspect ( oldobj ) for attrname_rel in insp . mapper . relationships . items ( ) : attrname = attrname_rel [ 0 ] rel_prop = attrname_rel [ 1 ] if rel_prop . viewonly : if debug : log . debug ( "Skipping viewonly relationship" ) continue related_class = rel_prop . mapper . class_ related_table_name = related_class . __tablename__ if related_table_name in skip_table_names : if debug : log . debug ( "Skipping relationship for related table {!r}" , related_table_name ) continue related_old = getattr ( oldobj , attrname ) if rel_prop . uselist : related_new = [ objmap [ r ] for r in related_old ] elif related_old is not None : related_new = objmap [ related_old ] else : related_new = None if debug : log . debug ( "rewrite_relationships: relationship {} -> {}" , attrname , related_new ) setattr ( newobj , attrname , related_new )
A utility function only . Used in copying objects between SQLAlchemy sessions .
53,165
def deepcopy_sqla_objects ( startobjs : List [ object ] , session : Session , flush : bool = True , debug : bool = False , debug_walk : bool = True , debug_rewrite_rel : bool = False , objmap : Dict [ object , object ] = None ) -> None : if objmap is None : objmap = { } if debug : log . debug ( "deepcopy_sqla_objects: pass 1: create new objects" ) seen = set ( ) for startobj in startobjs : for oldobj in walk_orm_tree ( startobj , seen = seen , debug = debug_walk ) : if debug : log . debug ( "deepcopy_sqla_objects: copying {}" , oldobj ) newobj = copy_sqla_object ( oldobj , omit_pk = True , omit_fk = True ) objmap [ oldobj ] = newobj if debug : log . debug ( "deepcopy_sqla_objects: pass 2: set relationships" ) for oldobj , newobj in objmap . items ( ) : if debug : log . debug ( "deepcopy_sqla_objects: newobj: {}" , newobj ) rewrite_relationships ( oldobj , newobj , objmap , debug = debug_rewrite_rel ) if debug : log . debug ( "deepcopy_sqla_objects: pass 3: insert into session" ) for newobj in objmap . values ( ) : session . add ( newobj ) if debug : log . debug ( "deepcopy_sqla_objects: done" ) if flush : session . flush ( )
Makes a copy of the specified SQLAlchemy ORM objects inserting them into a new session .
53,166
def deepcopy_sqla_object ( startobj : object , session : Session , flush : bool = True , debug : bool = False , debug_walk : bool = False , debug_rewrite_rel : bool = False , objmap : Dict [ object , object ] = None ) -> object : if objmap is None : objmap = { } deepcopy_sqla_objects ( startobjs = [ startobj ] , session = session , flush = flush , debug = debug , debug_walk = debug_walk , debug_rewrite_rel = debug_rewrite_rel , objmap = objmap ) return objmap [ startobj ]
Makes a copy of the object inserting it into session .
53,167
def attrname_to_colname_dict ( cls ) -> Dict [ str , str ] : attr_col = { } for attrname , column in gen_columns ( cls ) : attr_col [ attrname ] = column . name return attr_col
Asks an SQLAlchemy class how its attribute names correspond to database column names .
53,168
def get_orm_classes_by_table_name_from_base ( base : Type ) -> Dict [ str , Type ] : return { cls . __tablename__ : cls for cls in gen_orm_classes_from_base ( base ) }
Given an SQLAlchemy ORM base class returns a dictionary whose keys are table names and whose values are ORM classes .
53,169
def get_attrdict ( self ) -> OrderedNamespace : columns = self . __table__ . columns . keys ( ) values = ( getattr ( self , x ) for x in columns ) zipped = zip ( columns , values ) return OrderedNamespace ( zipped )
Returns what looks like a plain object with the values of the SQLAlchemy ORM object .
53,170
def from_attrdict ( cls , attrdict : OrderedNamespace ) -> object : dictionary = attrdict . __dict__ return cls ( ** dictionary )
Builds a new instance of the ORM object from values in an attrdict .
53,171
def gen_all_subclasses ( cls : Type ) -> Generator [ Type , None , None ] : for s1 in cls . __subclasses__ ( ) : yield s1 for s2 in gen_all_subclasses ( s1 ) : yield s2
Generates all subclasses of a class .
53,172
def augment_tensor ( matrix , ndim = None ) : s = matrix . shape if ndim is None : ndim = s [ 0 ] + 1 arr = N . identity ( ndim ) arr [ : s [ 0 ] , : s [ 1 ] ] = matrix return arr
Increase the dimensionality of a tensor splicing it into an identity matrix of a higher dimension . Useful for generalizing transformation matrices .
53,173
def angle ( v1 , v2 , cos = False ) : n = ( norm ( v1 ) * norm ( v2 ) ) _ = dot ( v1 , v2 ) / n return _ if cos else N . arccos ( _ )
Find the angle between two vectors .
53,174
def perpendicular_vector ( n ) : dim = len ( n ) if dim == 2 : return n [ : : - 1 ] for ix in range ( dim ) : _ = N . zeros ( dim ) _ [ dim - ix - 1 ] = 1 v1 = N . cross ( n , _ ) if N . linalg . norm ( v1 ) != 0 : return v1 raise ValueError ( "Cannot find perpendicular vector" )
Get a random vector perpendicular to the given vector
53,175
def process_openxml_file ( filename : str , print_good : bool , delete_if_bad : bool ) -> None : print_bad = not print_good try : file_good = is_openxml_good ( filename ) file_bad = not file_good if ( print_good and file_good ) or ( print_bad and file_bad ) : print ( filename ) if delete_if_bad and file_bad : log . warning ( "Deleting: {}" , filename ) os . remove ( filename ) except Exception as e : log . critical ( "Uncaught error in subprocess: {!r}\n{}" , e , traceback . format_exc ( ) ) raise
Prints the filename of or deletes an OpenXML file depending on whether it is corrupt or not .
53,176
def bhattacharyya_distance ( pca1 , pca2 ) : u1 = pca1 . coefficients s1 = pca1 . covariance_matrix u2 = pca2 . coefficients s2 = pca2 . covariance_matrix sigma = ( s1 + s2 ) / 2 assert all ( u1 > 0 ) assert all ( u2 > 0 ) assert all ( s1 . sum ( axis = 1 ) > 0 ) assert all ( s2 . sum ( axis = 1 ) > 0 ) _ = 1 / 8 * dot ( ( u1 - u2 ) . T , N . linalg . inv ( sigma ) , u1 - u2 ) _ += 1 / 2 * N . log ( N . linalg . det ( sigma ) / ( N . linalg . det ( s1 ) * N . linalg . det ( s2 ) ) ) return _
A measure of the distance between two probability distributions
53,177
def produce_csv_output ( filehandle : TextIO , fields : Sequence [ str ] , values : Iterable [ str ] ) -> None : output_csv ( filehandle , fields ) for row in values : output_csv ( filehandle , row )
Produce CSV output without using csv . writer so the log can be used for lots of things .
53,178
def output_csv ( filehandle : TextIO , values : Iterable [ str ] ) -> None : line = "," . join ( values ) filehandle . write ( line + "\n" )
Write a line of CSV . POOR ; does not escape things properly . DEPRECATED .
53,179
def get_what_follows_raw ( s : str , prefix : str , onlyatstart : bool = True , stripwhitespace : bool = True ) -> Tuple [ bool , str ] : prefixstart = s . find ( prefix ) if ( ( prefixstart == 0 and onlyatstart ) or ( prefixstart != - 1 and not onlyatstart ) ) : resultstart = prefixstart + len ( prefix ) result = s [ resultstart : ] if stripwhitespace : result = result . strip ( ) return True , result return False , ""
Find the part of s that is after prefix .
53,180
def get_bool_raw ( s : str ) -> Optional [ bool ] : if s == "Y" or s == "y" : return True elif s == "N" or s == "n" : return False return None
Maps Y y to True and N n to False .
53,181
def find_line_beginning ( strings : Sequence [ str ] , linestart : Optional [ str ] ) -> int : if linestart is None : for i in range ( len ( strings ) ) : if is_empty_string ( strings [ i ] ) : return i return - 1 for i in range ( len ( strings ) ) : if strings [ i ] . find ( linestart ) == 0 : return i return - 1
Finds the index of the line in strings that begins with linestart or - 1 if none is found .
53,182
def find_line_containing ( strings : Sequence [ str ] , contents : str ) -> int : for i in range ( len ( strings ) ) : if strings [ i ] . find ( contents ) != - 1 : return i return - 1
Finds the index of the line in strings that contains contents or - 1 if none is found .
53,183
def django_cache_function ( timeout : int = 5 * 60 , cache_key : str = '' , debug_cache : bool = False ) : cache_key = cache_key or None def decorator ( fn ) : def wrapper ( * args , ** kwargs ) : if cache_key : call_sig = '' _cache_key = cache_key check_stored_call_sig = False else : call_sig = get_call_signature ( fn , args , kwargs ) _cache_key = make_cache_key ( call_sig ) check_stored_call_sig = True if debug_cache : log . critical ( "Checking cache for key: " + _cache_key ) cache_result_tuple = cache . get ( _cache_key ) if cache_result_tuple is None : if debug_cache : log . debug ( "Cache miss" ) else : if debug_cache : log . debug ( "Cache hit" ) cached_call_sig , func_result = cache_result_tuple if ( not check_stored_call_sig ) or cached_call_sig == call_sig : return func_result log . warning ( "... Cache hit was due to hash collision; cached_call_sig " "{} != call_sig {}" . format ( repr ( cached_call_sig ) , repr ( call_sig ) ) ) func_result = fn ( * args , ** kwargs ) cache_result_tuple = ( call_sig , func_result ) cache . set ( key = _cache_key , value = cache_result_tuple , timeout = timeout ) return func_result return wrapper return decorator
Decorator to add caching to a function in Django . Uses the Django default cache .
53,184
def transform ( v1 , v2 ) : theta = angle ( v1 , v2 ) x = N . cross ( v1 , v2 ) x = x / N . linalg . norm ( x ) A = N . array ( [ [ 0 , - x [ 2 ] , x [ 1 ] ] , [ x [ 2 ] , 0 , - x [ 0 ] ] , [ - x [ 1 ] , x [ 0 ] , 0 ] ] ) R = N . exp ( A * theta ) return R
Create an affine transformation matrix that maps vector 1 onto vector 2
53,185
def x_11paths_authorization ( app_id , secret , context , utc = None ) : utc = utc or context . headers [ X_11PATHS_DATE_HEADER_NAME ] url_path = ensure_url_path_starts_with_slash ( context . url_path ) url_path_query = url_path if context . query_params : url_path_query += "?%s" % ( url_encode ( context . query_params , sort = True ) ) string_to_sign = ( context . method . upper ( ) . strip ( ) + "\n" + utc + "\n" + _get_11paths_serialized_headers ( context . headers ) + "\n" + url_path_query . strip ( ) ) if context . body_params and isinstance ( context . renderer , FormRenderer ) : string_to_sign = string_to_sign + "\n" + url_encode ( context . body_params , sort = True ) . replace ( "&" , "" ) authorization_header_value = ( AUTHORIZATION_METHOD + AUTHORIZATION_HEADER_FIELD_SEPARATOR + app_id + AUTHORIZATION_HEADER_FIELD_SEPARATOR + _sign_data ( secret , string_to_sign ) ) return authorization_header_value
Calculate the authentication headers to be sent with a request to the API .
53,186
def _sign_data ( secret , data ) : sha1_hash = hmac . new ( secret . encode ( ) , data . encode ( ) , sha1 ) return binascii . b2a_base64 ( sha1_hash . digest ( ) ) [ : - 1 ] . decode ( 'utf8' )
Sign data .
53,187
def _get_11paths_serialized_headers ( x_headers ) : if x_headers : headers = to_key_val_list ( x_headers , sort = True , insensitive = True ) serialized_headers = "" for key , value in headers : if key . lower ( ) . startswith ( X_11PATHS_HEADER_PREFIX . lower ( ) ) and key . lower ( ) != X_11PATHS_DATE_HEADER_NAME . lower ( ) : serialized_headers += key . lower ( ) + X_11PATHS_HEADER_SEPARATOR + value . replace ( "\n" , " " ) + " " return serialized_headers . strip ( ) else : return ""
Prepares and returns a string ready to be signed from the 11 - paths specific HTTP headers received .
53,188
def rotate_2D ( angle ) : return N . array ( [ [ N . cos ( angle ) , - N . sin ( angle ) ] , [ N . sin ( angle ) , N . cos ( angle ) ] ] )
Returns a 2x2 transformation matrix to rotate by an angle in two dimensions
53,189
def apparent_dip_correction ( axes ) : a1 = axes [ 0 ] . copy ( ) a1 [ - 1 ] = 0 cosa = angle ( axes [ 0 ] , a1 , cos = True ) _ = 1 - cosa ** 2 if _ > 1e-12 : sina = N . sqrt ( _ ) if cosa < 0 : sina *= - 1 R = N . array ( [ [ cosa , sina ] , [ - sina , cosa ] ] ) else : R = N . identity ( 2 ) return R
Produces a two - dimensional rotation matrix that rotates a projected dataset to correct for apparent dip
53,190
def hyperbolic_errors ( hyp_axes , xvals , transformation = None , axes = None , means = None , correct_apparent_dip = True , reverse = False ) : if means is None : means = N . array ( [ 0 , 0 ] ) arr = augment_tensor ( N . diag ( hyp_axes ) ) hyp = conic ( arr ) . dual ( ) if len ( hyp_axes ) == 3 : if transformation is None : transformation = N . identity ( 3 ) if axes is None : axes = N . array ( [ [ 0 , 1 , 0 ] , [ 0 , 0 , 1 ] ] ) hyp = hyp . transform ( augment_tensor ( transformation ) ) n_ = N . cross ( axes [ 0 ] , axes [ 1 ] ) p = plane ( n_ ) h1 = hyp . slice ( p , axes = axes ) [ 0 ] else : h1 = hyp A = N . sqrt ( h1 . semiaxes ( ) ) yvals = A [ 1 ] * N . cosh ( N . arcsinh ( xvals / A [ 0 ] ) ) vals = N . array ( [ xvals , yvals ] ) . transpose ( ) nom = N . array ( [ xvals , N . zeros ( xvals . shape ) ] ) . transpose ( ) ax1 = apparent_dip_correction ( axes ) if reverse : ax1 = ax1 . T t = dot ( vals , ax1 ) . T + means [ : , N . newaxis ] vals [ : , - 1 ] *= - 1 b = dot ( vals , ax1 ) . T + means [ : , N . newaxis ] nom = dot ( nom , ax1 ) . T + means [ : , N . newaxis ] return nom , b , t [ : , : : - 1 ]
Returns a function that can be used to create a view of the hyperbolic error ellipse from a specific direction .
53,191
def set_metadata ( self , metadata : MetaData ) -> None : self . _parent . set_metadata ( metadata ) self . _child . set_metadata ( metadata )
Sets the metadata for the parent and child tables .
53,192
def set_metadata_if_none ( self , metadata : MetaData ) -> None : self . _parent . set_metadata_if_none ( metadata ) self . _child . set_metadata_if_none ( metadata )
Sets the metadata for the parent and child tables unless they were set already .
53,193
def description ( self ) -> str : if self . is_parent and self . is_child : desc = "parent+child" elif self . is_parent : desc = "parent" elif self . is_child : desc = "child" else : desc = "standalone" if self . circular : desc += "+CIRCULAR({})" . format ( self . circular_description ) return desc
Short description .
53,194
def spherical ( coordinates ) : c = coordinates r = N . linalg . norm ( c , axis = 0 ) theta = N . arccos ( c [ 2 ] / r ) phi = N . arctan2 ( c [ 1 ] , c [ 0 ] ) return N . column_stack ( ( r , theta , phi ) )
No error is propagated
53,195
def centered ( coordinates ) : coordinates = N . array ( coordinates ) means = N . mean ( coordinates , axis = 0 ) return coordinates - means
Centers coordinate distribution with respect to its mean on all three axes . This is used as the input to the regression model so it can be converted easily into radial coordinates .
53,196
def add_http_headers_for_attachment ( response : HttpResponse , offered_filename : str = None , content_type : str = None , as_attachment : bool = False , as_inline : bool = False , content_length : int = None ) -> None : if offered_filename is None : offered_filename = '' if content_type is None : content_type = 'application/force-download' response [ 'Content-Type' ] = content_type if as_attachment : prefix = 'attachment; ' elif as_inline : prefix = 'inline; ' else : prefix = '' fname = 'filename=%s' % smart_str ( offered_filename ) response [ 'Content-Disposition' ] = prefix + fname if content_length is not None : response [ 'Content-Length' ] = content_length
Add HTTP headers to a Django response class object .
53,197
def add_download_filename ( response : HttpResponse , filename : str ) -> None : add_http_headers_for_attachment ( response ) response [ 'Content-Disposition' ] = 'attachment; filename="{}"' . format ( filename )
Adds a Content - Disposition header to the HTTP response to say that there is an attachment with the specified filename .
53,198
def file_response ( data : Union [ bytes , str ] , content_type : str , filename : str ) -> HttpResponse : response = HttpResponse ( data , content_type = content_type ) add_download_filename ( response , filename ) return response
Returns an HttpResponse with an attachment containing the specified data with the specified filename as an attachment .
53,199
def serve_concatenated_pdf_from_disk ( filenames : Iterable [ str ] , offered_filename : str = "crate_download.pdf" , ** kwargs ) -> HttpResponse : pdf = get_concatenated_pdf_from_disk ( filenames , ** kwargs ) return serve_buffer ( pdf , offered_filename = offered_filename , content_type = MimeType . PDF , as_attachment = False , as_inline = True )
Concatenates PDFs from disk and serves them .