idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
53,000
def validate_scan_result ( results ) : if results [ EsConst . SHARDS ] [ EsConst . FAILED ] and results [ EsConst . SHARDS ] [ EsConst . FAILED ] > 0 : raise ScanError ( 'Scroll request has failed on %d shards out of %d.' % ( results [ EsConst . SHARDS ] [ EsConst . FAILED ] , results [ EsConst . SHARDS ] [ EsConst . TOTAL ] ) )
Check if there s a failed shard in the scan query
53,001
def choice_explanation ( value : str , choices : Iterable [ Tuple [ str , str ] ] ) -> str : for k , v in choices : if k == value : return v return ''
Returns the explanation associated with a Django choice tuple - list .
53,002
def tee_log ( tee_file : TextIO , loglevel : int ) -> None : handler = get_monochrome_handler ( stream = tee_file ) handler . setLevel ( loglevel ) rootlogger = logging . getLogger ( ) rootlogger . addHandler ( handler ) with TeeContextManager ( tee_file , capture_stdout = True ) : with TeeContextManager ( tee_file , capture_stderr = True ) : try : yield except Exception : exc_type , exc_value , exc_traceback = sys . exc_info ( ) lines = traceback . format_exception ( exc_type , exc_value , exc_traceback ) log . critical ( "\n" + "" . join ( lines ) ) raise
Context manager to add a file output stream to our logging system .
53,003
def bootstrap ( array ) : reg_func = lambda a : N . linalg . svd ( a , full_matrices = False ) [ 2 ] [ 2 ] beta_boots = bootstrap ( array , func = reg_func ) return yhat , yhat_boots
Provides a bootstrap resampling of an array . Provides another statistical method to estimate the variance of a dataset .
53,004
def rename_key ( d : Dict [ str , Any ] , old : str , new : str ) -> None : d [ new ] = d . pop ( old )
Rename a key in dictionary d from old to new in place .
53,005
def rename_keys ( d : Dict [ str , Any ] , mapping : Dict [ str , str ] ) -> Dict [ str , Any ] : result = { } for k , v in d . items ( ) : if k in mapping : k = mapping [ k ] result [ k ] = v return result
Returns a copy of the dictionary d with its keys renamed according to mapping .
53,006
def prefix_dict_keys ( d : Dict [ str , Any ] , prefix : str ) -> Dict [ str , Any ] : result = { } for k , v in d . items ( ) : result [ prefix + k ] = v return result
Returns a dictionary that s a copy of as d but with prefix prepended to its keys .
53,007
def reversedict ( d : Dict [ Any , Any ] ) -> Dict [ Any , Any ] : return { v : k for k , v in d . items ( ) }
Takes a k - > v mapping and returns a v - > k mapping .
53,008
def map_keys_to_values ( l : List [ Any ] , d : Dict [ Any , Any ] , default : Any = None , raise_if_missing : bool = False , omit_if_missing : bool = False ) -> List [ Any ] : result = [ ] for k in l : if raise_if_missing and k not in d : raise ValueError ( "Missing key: " + repr ( k ) ) if omit_if_missing and k not in d : continue result . append ( d . get ( k , default ) ) return result
The d dictionary contains a key - > value mapping .
53,009
def dict_diff ( d1 : Dict [ Any , Any ] , d2 : Dict [ Any , Any ] , deleted_value : Any = None ) -> Dict [ Any , Any ] : changes = { k : v for k , v in d2 . items ( ) if k not in d1 or d2 [ k ] != d1 [ k ] } for k in d1 . keys ( ) : if k not in d2 : changes [ k ] = deleted_value return changes
Returns a representation of the changes that need to be made to d1 to create d2 .
53,010
def delete_keys ( d : Dict [ Any , Any ] , keys_to_delete : List [ Any ] , keys_to_keep : List [ Any ] ) -> None : for k in keys_to_delete : if k in d and k not in keys_to_keep : del d [ k ]
Deletes keys from a dictionary in place .
53,011
def reset ( self ) -> None : self . _overallstart = get_now_utc_pendulum ( ) self . _starttimes . clear ( ) self . _totaldurations . clear ( ) self . _count . clear ( ) self . _stack . clear ( )
Reset the timers .
53,012
def set_timing ( self , timing : bool , reset : bool = False ) -> None : self . _timing = timing if reset : self . reset ( )
Manually set the timing parameter and optionally reset the timers .
53,013
def start ( self , name : str , increment_count : bool = True ) -> None : if not self . _timing : return now = get_now_utc_pendulum ( ) if self . _stack : last = self . _stack [ - 1 ] self . _totaldurations [ last ] += now - self . _starttimes [ last ] if name not in self . _starttimes : self . _totaldurations [ name ] = datetime . timedelta ( ) self . _count [ name ] = 0 self . _starttimes [ name ] = now if increment_count : self . _count [ name ] += 1 self . _stack . append ( name )
Start a named timer .
53,014
def stop ( self , name : str ) -> None : if not self . _timing : return now = get_now_utc_pendulum ( ) if not self . _stack : raise AssertionError ( "MultiTimer.stop() when nothing running" ) if self . _stack [ - 1 ] != name : raise AssertionError ( "MultiTimer.stop({}) when {} is running" . format ( repr ( name ) , repr ( self . _stack [ - 1 ] ) ) ) self . _totaldurations [ name ] += now - self . _starttimes [ name ] self . _stack . pop ( ) if self . _stack : last = self . _stack [ - 1 ] self . _starttimes [ last ] = now
Stop a named timer .
53,015
def report ( self ) -> None : while self . _stack : self . stop ( self . _stack [ - 1 ] ) now = get_now_utc_pendulum ( ) grand_total = datetime . timedelta ( ) overall_duration = now - self . _overallstart for name , duration in self . _totaldurations . items ( ) : grand_total += duration log . info ( "Timing summary:" ) summaries = [ ] for name , duration in self . _totaldurations . items ( ) : n = self . _count [ name ] total_sec = duration . total_seconds ( ) mean = total_sec / n if n > 0 else None summaries . append ( { 'total' : total_sec , 'description' : ( "- {}: {:.3f} s ({:.2f}%, n={}, mean={:.3f}s)" . format ( name , total_sec , ( 100 * total_sec / grand_total . total_seconds ( ) ) , n , mean ) ) , } ) summaries . sort ( key = lambda x : x [ 'total' ] , reverse = True ) for s in summaries : log . info ( s [ "description" ] ) if not self . _totaldurations : log . info ( "<no timings recorded>" ) unmetered = overall_duration - grand_total log . info ( "Unmetered time: {:.3f} s ({:.2f}%)" , unmetered . total_seconds ( ) , 100 * unmetered . total_seconds ( ) / overall_duration . total_seconds ( ) ) log . info ( "Total time: {:.3f} s" , grand_total . total_seconds ( ) )
Finish and report to the log .
53,016
def girdle_error ( ax , fit , ** kwargs ) : vertices = [ ] codes = [ ] for sheet in ( 'upper' , 'lower' ) : err = plane_errors ( fit . axes , fit . covariance_matrix , sheet = sheet ) lonlat = N . array ( err ) lonlat *= - 1 n = len ( lonlat ) if sheet == 'lower' : lonlat = lonlat [ : : - 1 ] vertices += list ( lonlat ) codes . append ( Path . MOVETO ) codes += [ Path . LINETO ] * ( n - 1 ) plot_patch ( ax , vertices , codes , ** kwargs )
Plot an attitude measurement on an mplstereonet axes object .
53,017
def pole_error ( ax , fit , ** kwargs ) : ell = normal_errors ( fit . axes , fit . covariance_matrix ) lonlat = - N . array ( ell ) n = len ( lonlat ) codes = [ Path . MOVETO ] codes += [ Path . LINETO ] * ( n - 1 ) vertices = list ( lonlat ) plot_patch ( ax , vertices , codes , ** kwargs )
Plot the error to the pole to a plane on a mplstereonet axis object .
53,018
def ping ( hostname : str , timeout_s : int = 5 ) -> bool : if sys . platform == "win32" : timeout_ms = timeout_s * 1000 args = [ "ping" , hostname , "-n" , "1" , "-w" , str ( timeout_ms ) , ] elif sys . platform . startswith ( 'linux' ) : args = [ "ping" , hostname , "-c" , "1" , "-w" , str ( timeout_s ) , ] else : raise AssertionError ( "Don't know how to ping on this operating system" ) proc = subprocess . Popen ( args , stdout = subprocess . PIPE , stderr = subprocess . PIPE ) proc . communicate ( ) retcode = proc . returncode return retcode == 0
Pings a host using OS tools .
53,019
def download ( url : str , filename : str , skip_cert_verify : bool = True ) -> None : log . info ( "Downloading from {} to {}" , url , filename ) ctx = ssl . create_default_context ( ) if skip_cert_verify : log . debug ( "Skipping SSL certificate check for " + url ) ctx . check_hostname = False ctx . verify_mode = ssl . CERT_NONE with urllib . request . urlopen ( url , context = ctx ) as u , open ( filename , 'wb' ) as f : f . write ( u . read ( ) )
Downloads a URL to a file .
53,020
def split_string ( x : str , n : int ) -> List [ str ] : return [ x [ i : i + n ] for i in range ( 0 , len ( x ) , n ) ]
Split string into chunks of length n
53,021
def mangle_unicode_to_ascii ( s : Any ) -> str : if s is None : return "" if not isinstance ( s , str ) : s = str ( s ) return ( unicodedata . normalize ( 'NFKD' , s ) . encode ( 'ascii' , 'ignore' ) . decode ( 'ascii' ) )
Mangle unicode to ASCII losing accents etc . in the process .
53,022
def aligned_residuals ( pca ) : A = pca . rotated ( ) fig , axes = P . subplots ( 2 , 1 , sharex = True , frameon = False ) fig . subplots_adjust ( hspace = 0 , wspace = 0.1 ) kw = dict ( c = "#555555" , s = 40 , alpha = 0.5 ) lengths = ( A [ : , i ] . max ( ) - A [ : , i ] . min ( ) for i in range ( 3 ) ) titles = ( "Long cross-section (axis 3 vs. axis 1)" , "Short cross-section (axis 3 vs. axis 2)" ) for title , ax , ( a , b ) in zip ( titles , axes , [ ( 0 , 2 ) , ( 1 , 2 ) ] ) : seaborn . regplot ( A [ : , a ] , A [ : , b ] , ax = ax ) ax . text ( 0 , 1 , title , verticalalignment = 'top' , transform = ax . transAxes ) ax . autoscale ( tight = True ) for spine in ax . spines . itervalues ( ) : spine . set_visible ( False ) ax . set_xlabel ( "Meters" ) return fig
Plots error components along with bootstrap resampled error surface . Provides another statistical method to estimate the variance of a dataset .
53,023
def mapper_init ( self ) : self . lookup = CachedMetroLookup ( precision = GEOHASH_PRECISION ) self . extractor = WordExtractor ( )
build local spatial index of US metro areas
53,024
def reducer_init_output ( self ) : try : self . mongo = MongoGeo ( db = DB , collection = COLLECTION , timeout = MONGO_TIMEOUT ) except ServerSelectionTimeoutError : self . mongo = None
establish connection to MongoDB
53,025
def add_intersecting ( self , division , intersection = None , symm = True ) : relationship , created = IntersectRelationship . objects . update_or_create ( from_division = self , to_division = division , defaults = { "intersection" : intersection } , ) if symm : division . add_intersecting ( self , None , False ) return relationship
Adds paired relationships between intersecting divisions .
53,026
def remove_intersecting ( self , division , symm = True ) : IntersectRelationship . objects . filter ( from_division = self , to_division = division ) . delete ( ) if symm : division . remove_intersecting ( self , False )
Removes paired relationships between intersecting divisions
53,027
def set_intersection ( self , division , intersection ) : IntersectRelationship . objects . filter ( from_division = self , to_division = division ) . update ( intersection = intersection )
Set intersection percentage of intersecting divisions .
53,028
def get_intersection ( self , division ) : try : return IntersectRelationship . objects . get ( from_division = self , to_division = division ) . intersection except ObjectDoesNotExist : raise Exception ( "No intersecting relationship with that division." )
Get intersection percentage of intersecting divisions .
53,029
def drug_name_to_generic ( drug_name : str , unknown_to_default : bool = False , default : str = None , include_categories : bool = False ) -> str : drug = get_drug ( drug_name , include_categories = include_categories ) if drug is not None : return drug . generic_name return default if unknown_to_default else drug_name
Converts a drug name to the name of its generic equivalent .
53,030
def drug_names_to_generic ( drugs : List [ str ] , unknown_to_default : bool = False , default : str = None , include_categories : bool = False ) -> List [ str ] : return [ drug_name_to_generic ( drug , unknown_to_default = unknown_to_default , default = default , include_categories = include_categories ) for drug in drugs ]
Converts a list of drug names to their generic equivalents .
53,031
def regex ( self ) -> Pattern : if self . _regex is None : self . _regex = re . compile ( self . regex_text , re . IGNORECASE | re . DOTALL ) return self . _regex
Returns a compiled regex for this drug .
53,032
def regex_to_sql_like ( regex_text : str , single_wildcard : str = "_" , zero_or_more_wildcard : str = "%" ) -> List [ str ] : def append_to_all ( new_content : str ) -> None : nonlocal results results = [ r + new_content for r in results ] def split_and_append ( new_options : List [ str ] ) -> None : nonlocal results newresults = [ ] for option in new_options : newresults . extend ( [ r + option for r in results ] ) results = newresults def deduplicate_wildcards ( text : str ) -> str : while zero_or_more_wildcard + zero_or_more_wildcard in text : text = text . replace ( zero_or_more_wildcard + zero_or_more_wildcard , zero_or_more_wildcard ) return text working = regex_text results = [ zero_or_more_wildcard ] while working : if working . startswith ( ".*" ) : append_to_all ( zero_or_more_wildcard ) working = working [ 2 : ] elif working . startswith ( "[" ) : close_bracket = working . index ( "]" ) bracketed = working [ 1 : close_bracket ] option_groups = bracketed . split ( "|" ) options = [ c for group in option_groups for c in group ] split_and_append ( options ) working = working [ close_bracket + 1 : ] elif len ( working ) > 1 and working [ 1 ] == "?" : split_and_append ( [ "" , working [ 0 ] ] ) working = working [ 2 : ] elif working . startswith ( "." ) : append_to_all ( single_wildcard ) working = working [ 1 : ] else : append_to_all ( working [ 0 ] ) working = working [ 1 : ] append_to_all ( zero_or_more_wildcard ) results = [ deduplicate_wildcards ( r ) for r in results ] return results
Converts regular expression text to a reasonably close fragment for the SQL LIKE operator .
53,033
def sql_like_fragments ( self ) -> List [ str ] : if self . _sql_like_fragments is None : self . _sql_like_fragments = [ ] for p in list ( set ( self . all_generics + self . alternatives ) ) : self . _sql_like_fragments . extend ( self . regex_to_sql_like ( p ) ) return self . _sql_like_fragments
Returns all the string literals to which a database column should be compared using the SQL LIKE operator to match this drug .
53,034
def sql_column_like_drug ( self , column_name : str ) -> str : clauses = [ "{col} LIKE {fragment}" . format ( col = column_name , fragment = sql_string_literal ( f ) ) for f in self . sql_like_fragments ] return "({})" . format ( " OR " . join ( clauses ) )
Returns SQL like
53,035
def quaternion ( vector , angle ) : return N . cos ( angle / 2 ) + vector * N . sin ( angle / 2 )
Unit quaternion for a vector and an angle
53,036
def ellipse ( n = 1000 , adaptive = False ) : u = N . linspace ( 0 , 2 * N . pi , n ) return N . array ( [ N . cos ( u ) , N . sin ( u ) ] ) . T
Get a parameterized set of vectors defining ellipse for a major and minor axis length . Resulting vector bundle has major axes along axes given .
53,037
def scale_errors ( cov_axes , confidence_level = 0.95 ) : dof = len ( cov_axes ) x2t = chi2 . ppf ( confidence_level , dof ) return N . sqrt ( x2t * cov_axes )
Returns major axes of error ellipse or hyperbola rescaled using chi2 test statistic
53,038
def normal_errors ( axes , covariance_matrix , ** kwargs ) : level = kwargs . pop ( 'level' , 1 ) traditional_layout = kwargs . pop ( 'traditional_layout' , True ) d = N . diagonal ( covariance_matrix ) ell = ellipse ( ** kwargs ) if axes [ 2 , 2 ] < 0 : axes *= - 1 c1 = 2 axis_lengths = d [ : 2 ] f = N . linalg . norm ( ell * axis_lengths , axis = 1 ) e0 = - ell . T * d [ 2 ] * c1 e = N . vstack ( ( e0 , f ) ) _ = dot ( e . T , axes ) . T if traditional_layout : lon , lat = stereonet_math . cart2sph ( _ [ 2 ] , _ [ 0 ] , - _ [ 1 ] ) else : lon , lat = stereonet_math . cart2sph ( - _ [ 1 ] , _ [ 0 ] , _ [ 2 ] ) return list ( zip ( lon , lat ) )
Currently assumes upper hemisphere of stereonet
53,039
def iterative_plane_errors ( axes , covariance_matrix , ** kwargs ) : sheet = kwargs . pop ( 'sheet' , 'upper' ) level = kwargs . pop ( 'level' , 1 ) n = kwargs . pop ( 'n' , 100 ) cov = N . sqrt ( N . diagonal ( covariance_matrix ) ) u = N . linspace ( 0 , 2 * N . pi , n ) scales = dict ( upper = 1 , lower = - 1 , nominal = 0 ) c1 = scales [ sheet ] c1 *= - 1 if axes [ 2 , 2 ] < 0 : c1 *= - 1 def sdot ( a , b ) : return sum ( [ i * j for i , j in zip ( a , b ) ] ) def step_func ( a ) : e = [ N . cos ( a ) * cov [ 0 ] , N . sin ( a ) * cov [ 1 ] , c1 * cov [ 2 ] ] d = [ sdot ( e , i ) for i in axes . T ] x , y , z = d [ 2 ] , d [ 0 ] , d [ 1 ] r = N . sqrt ( x ** 2 + y ** 2 + z ** 2 ) lat = N . arcsin ( z / r ) lon = N . arctan2 ( y , x ) return lon , lat return N . array ( [ step_func ( i ) for i in u ] )
An iterative version of pca . plane_errors which computes an error surface for a plane .
53,040
def to_topojson ( self ) : topojson = self . topojson topojson [ "objects" ] [ "points" ] = { "type" : "GeometryCollection" , "geometries" : [ point . to_topojson ( ) for point in self . points . all ( ) ] , } return json . dumps ( topojson )
Adds points and converts to topojson string .
53,041
def urlsplit ( url ) : p = '((?P<scheme>.*)?.*://)?(?P<host>[^:/ ]+).?(?P<port>[0-9]*).*' m = re . search ( p , url ) scheme = m . group ( 'scheme' ) host = m . group ( 'host' ) port = m . group ( 'port' ) _scheme , _netloc , path , query , _fragment = tuple ( py_urlsplit ( url ) ) return scheme , host , port , path , query
Split url into scheme host port path query
53,042
def generate_url ( scheme = None , host = None , port = None , path = None , query = None ) : url = "" if scheme is not None : url += "%s://" % scheme if host is not None : url += host if port is not None : url += ":%s" % str ( port ) if path is not None : url += ensure_url_path_starts_with_slash ( path ) if query is not None : url += "?%s" % ( urlencode ( query ) ) return url
Generate URI from parameters .
53,043
def run ( self ) : states = open ( self . states , 'r' ) . read ( ) . splitlines ( ) for state in states : url = self . build_url ( state ) log = "Downloading State < {0} > from < {1} >" logging . info ( log . format ( state , url ) ) tmp = self . download ( self . output , url , self . overwrite ) self . s3 . store ( self . extract ( tmp , self . tmp2poi ( tmp ) ) )
For each state in states file build url and download file
53,044
def extract ( self , pbf , output ) : logging . info ( "Extracting POI nodes from {0} to {1}" . format ( pbf , output ) ) with open ( output , 'w' ) as f : def nodes_callback ( nodes ) : for node in nodes : node_id , tags , coordinates = node if any ( [ t in tags for t in POI_TAGS ] ) : f . write ( json . dumps ( dict ( tags = tags , coordinates = coordinates ) ) ) f . write ( '\n' ) parser = OSMParser ( concurrency = 4 , nodes_callback = nodes_callback ) parser . parse ( pbf ) return output
extract POI nodes from osm pbf extract
53,045
def url2tmp ( self , root , url ) : filename = url . rsplit ( '/' , 1 ) [ - 1 ] return os . path . join ( root , filename )
convert url path to filename
53,046
def ask_user ( prompt : str , default : str = None ) -> Optional [ str ] : if default is None : prompt += ": " else : prompt += " [" + default + "]: " result = input ( prompt ) return result if len ( result ) > 0 else default
Prompts the user with a default . Returns user input from stdin .
53,047
def positive_int ( value : str ) -> int : try : ivalue = int ( value ) assert ivalue > 0 except ( AssertionError , TypeError , ValueError ) : raise ArgumentTypeError ( "{!r} is an invalid positive int" . format ( value ) ) return ivalue
argparse argument type that checks that its value is a positive integer .
53,048
def abort_request ( self , request ) : self . timedout = True try : request . cancel ( ) except error . AlreadyCancelled : return
Called to abort request on timeout
53,049
def getBody ( cls , url , method = 'GET' , headers = { } , data = None , socket = None , timeout = 120 ) : if not 'User-Agent' in headers : headers [ 'User-Agent' ] = [ 'Tensor HTTP checker' ] return cls ( ) . request ( url , method , headers , data , socket , timeout )
Make an HTTP request and return the body
53,050
def getJson ( cls , url , method = 'GET' , headers = { } , data = None , socket = None , timeout = 120 ) : if not 'Content-Type' in headers : headers [ 'Content-Type' ] = [ 'application/json' ] body = yield cls ( ) . getBody ( url , method , headers , data , socket , timeout ) defer . returnValue ( json . loads ( body ) )
Fetch a JSON result via HTTP
53,051
def aligned_covariance ( fit , type = 'noise' ) : cov = fit . _covariance_matrix ( type ) cov /= N . linalg . norm ( cov ) return dot ( fit . axes , cov )
Covariance rescaled so that eigenvectors sum to 1 and rotated into data coordinates from PCA space
53,052
def fit_angle ( fit1 , fit2 , degrees = True ) : return N . degrees ( angle ( fit1 . normal , fit2 . normal ) )
Finds the angle between the nominal vectors
53,053
def fit_similarity ( fit1 , fit2 ) : cov1 = aligned_covariance ( fit1 ) cov2 = aligned_covariance ( fit2 ) if fit2 . axes [ 2 , 2 ] < 0 : cov2 *= - 1 v0 = fit1 . normal - fit2 . normal cov0 = cov1 + cov2 U , s , V = N . linalg . svd ( cov0 ) rotated = dot ( V , v0 ) val = rotated ** 2 / N . sqrt ( s ) return N . sqrt ( val . sum ( ) )
Distance apart for vectors given in standard deviations
53,054
def png_img_html_from_pyplot_figure ( fig : "Figure" , dpi : int = 100 , extra_html_class : str = None ) -> str : if fig is None : return "" memfile = io . BytesIO ( ) fig . savefig ( memfile , format = "png" , dpi = dpi ) memfile . seek ( 0 ) pngblob = memoryview ( memfile . read ( ) ) return rnc_web . get_png_img_html ( pngblob , extra_html_class )
Converts a pyplot figure to an HTML IMG tag with encapsulated PNG .
53,055
def svg_html_from_pyplot_figure ( fig : "Figure" ) -> str : if fig is None : return "" memfile = io . BytesIO ( ) fig . savefig ( memfile , format = "svg" ) return memfile . getvalue ( ) . decode ( "utf-8" )
Converts a pyplot figure to an SVG tag .
53,056
def set_matplotlib_fontsize ( matplotlib : ModuleType , fontsize : Union [ int , float ] = 12 ) -> None : font = { 'family' : 'sans-serif' , 'style' : 'normal' , 'variant' : 'normal' , 'weight' : 'normal' , 'size' : fontsize } matplotlib . rc ( 'font' , ** font ) legend = { 'fontsize' : fontsize } matplotlib . rc ( 'legend' , ** legend )
Sets the current font size within the matplotlib library .
53,057
def encrypt_file ( src , dest , csv_keys ) : keys = massage_keys ( csv_keys . split ( ',' ) ) cryptorito . encrypt ( src , dest , keys )
Encrypt a file with the specific GPG keys and write out to the specified path
53,058
def encrypt_var ( csv_keys ) : keys = massage_keys ( csv_keys . split ( ',' ) ) data = sys . stdin . read ( ) encrypted = cryptorito . encrypt_var ( data , keys ) print ( cryptorito . portable_b64encode ( encrypted ) )
Encrypt what comes in from stdin and return base64 encrypted against the specified keys returning on stdout
53,059
def import_keybase ( useropt ) : public_key = None u_bits = useropt . split ( ':' ) username = u_bits [ 0 ] if len ( u_bits ) == 1 : public_key = cryptorito . key_from_keybase ( username ) else : fingerprint = u_bits [ 1 ] public_key = cryptorito . key_from_keybase ( username , fingerprint ) if cryptorito . has_gpg_key ( public_key [ 'fingerprint' ] ) : sys . exit ( 2 ) cryptorito . import_gpg_key ( public_key [ 'bundle' ] . encode ( 'ascii' ) ) sys . exit ( 0 )
Imports a public GPG key from Keybase
53,060
def do_thing ( ) : if len ( sys . argv ) == 5 and sys . argv [ 1 ] == "encrypt_file" : encrypt_file ( sys . argv [ 2 ] , sys . argv [ 3 ] , sys . argv [ 4 ] ) elif len ( sys . argv ) == 4 and sys . argv [ 1 ] == "decrypt_file" : decrypt_file ( sys . argv [ 2 ] , sys . argv [ 3 ] ) elif len ( sys . argv ) == 3 and sys . argv [ 1 ] == "encrypt" : encrypt_var ( sys . argv [ 2 ] ) elif len ( sys . argv ) == 2 and sys . argv [ 1 ] == "decrypt" : decrypt_var ( ) elif len ( sys . argv ) == 3 and sys . argv [ 1 ] == "decrypt" : decrypt_var ( passphrase = sys . argv [ 2 ] ) elif len ( sys . argv ) == 3 and sys . argv [ 1 ] == "has_key" : has_key ( sys . argv [ 2 ] ) elif len ( sys . argv ) == 3 and sys . argv [ 1 ] == "import_keybase" : import_keybase ( sys . argv [ 2 ] ) elif len ( sys . argv ) == 3 and sys . argv [ 1 ] == "export" : export_key ( sys . argv [ 2 ] ) else : print ( "Cryptorito testing wrapper. Not suitable for routine use." , file = sys . stderr ) sys . exit ( 1 )
Execute command line cryptorito actions
53,061
def get_monochrome_handler ( extranames : List [ str ] = None , with_process_id : bool = False , with_thread_id : bool = False , stream : TextIO = None ) -> logging . StreamHandler : fmt = "%(asctime)s.%(msecs)03d" if with_process_id or with_thread_id : procinfo = [ ] if with_process_id : procinfo . append ( "p%(process)d" ) if with_thread_id : procinfo . append ( "t%(thread)d" ) fmt += " [{}]" . format ( "." . join ( procinfo ) ) extras = ":" + ":" . join ( extranames ) if extranames else "" fmt += " %(name)s{extras}:%(levelname)s: " . format ( extras = extras ) fmt += "%(message)s" f = logging . Formatter ( fmt , datefmt = LOG_DATEFMT , style = '%' ) h = logging . StreamHandler ( stream ) h . setFormatter ( f ) return h
Gets a monochrome log handler using a standard format .
53,062
def get_colour_handler ( extranames : List [ str ] = None , with_process_id : bool = False , with_thread_id : bool = False , stream : TextIO = None ) -> logging . StreamHandler : fmt = "%(white)s%(asctime)s.%(msecs)03d" if with_process_id or with_thread_id : procinfo = [ ] if with_process_id : procinfo . append ( "p%(process)d" ) if with_thread_id : procinfo . append ( "t%(thread)d" ) fmt += " [{}]" . format ( "." . join ( procinfo ) ) extras = ":" + ":" . join ( extranames ) if extranames else "" fmt += " %(name)s{extras}:%(levelname)s: " . format ( extras = extras ) fmt += "%(reset)s%(log_color)s%(message)s" cf = ColoredFormatter ( fmt , datefmt = LOG_DATEFMT , reset = True , log_colors = LOG_COLORS , secondary_log_colors = { } , style = '%' ) ch = logging . StreamHandler ( stream ) ch . setFormatter ( cf ) return ch
Gets a colour log handler using a standard format .
53,063
def main_only_quicksetup_rootlogger ( level : int = logging . DEBUG , with_process_id : bool = False , with_thread_id : bool = False ) -> None : rootlogger = logging . getLogger ( ) configure_logger_for_colour ( rootlogger , level , remove_existing = True , with_process_id = with_process_id , with_thread_id = with_thread_id )
Quick function to set up the root logger for colour .
53,064
def remove_all_logger_handlers ( logger : logging . Logger ) -> None : while logger . handlers : h = logger . handlers [ 0 ] logger . removeHandler ( h )
Remove all handlers from a logger .
53,065
def reset_logformat ( logger : logging . Logger , fmt : str , datefmt : str = '%Y-%m-%d %H:%M:%S' ) -> None : handler = logging . StreamHandler ( ) formatter = logging . Formatter ( fmt = fmt , datefmt = datefmt ) handler . setFormatter ( formatter ) remove_all_logger_handlers ( logger ) logger . addHandler ( handler ) logger . propagate = False
Create a new formatter and apply it to the logger .
53,066
def reset_logformat_timestamped ( logger : logging . Logger , extraname : str = "" , level : int = logging . INFO ) -> None : namebit = extraname + ":" if extraname else "" fmt = ( "%(asctime)s.%(msecs)03d:%(levelname)s:%(name)s:" + namebit + "%(message)s" ) reset_logformat ( logger , fmt = fmt ) logger . setLevel ( level )
Apply a simple time - stamped log format to an existing logger and set its loglevel to either logging . DEBUG or logging . INFO .
53,067
def get_formatter_report ( f : logging . Formatter ) -> Optional [ Dict [ str , str ] ] : if f is None : return None return { '_fmt' : f . _fmt , 'datefmt' : f . datefmt , '_style' : str ( f . _style ) , }
Returns information on a log formatter as a dictionary . For debugging .
53,068
def get_handler_report ( h : logging . Handler ) -> Dict [ str , Any ] : return { 'get_name()' : h . get_name ( ) , 'level' : h . level , 'formatter' : get_formatter_report ( h . formatter ) , 'filters' : h . filters , }
Returns information on a log handler as a dictionary . For debugging .
53,069
def get_log_report ( log : Union [ logging . Logger , logging . PlaceHolder ] ) -> Dict [ str , Any ] : if isinstance ( log , logging . Logger ) : return { '(object)' : str ( log ) , 'level' : log . level , 'disabled' : log . disabled , 'propagate' : log . propagate , 'parent' : str ( log . parent ) , 'manager' : str ( log . manager ) , 'handlers' : [ get_handler_report ( h ) for h in log . handlers ] , } elif isinstance ( log , logging . PlaceHolder ) : return { "(object)" : str ( log ) , } else : raise ValueError ( "Unknown object type: {!r}" . format ( log ) )
Returns information on a log as a dictionary . For debugging .
53,070
def set_level_for_logger_and_its_handlers ( log : logging . Logger , level : int ) -> None : log . setLevel ( level ) for h in log . handlers : h . setLevel ( level )
Set a log level for a log and all its handlers .
53,071
def get_column_names ( engine : Engine , tablename : str ) -> List [ str ] : return [ info . name for info in gen_columns_info ( engine , tablename ) ]
Get all the database column names for the specified table .
53,072
def get_single_int_autoincrement_colname ( table_ : Table ) -> Optional [ str ] : n_autoinc = 0 int_autoinc_names = [ ] for col in table_ . columns : if col . autoincrement : n_autoinc += 1 if is_sqlatype_integer ( col . type ) : int_autoinc_names . append ( col . name ) if n_autoinc > 1 : log . warning ( "Table {!r} has {} autoincrement columns" , table_ . name , n_autoinc ) if n_autoinc == 1 and len ( int_autoinc_names ) == 1 : return int_autoinc_names [ 0 ] return None
If a table has a single integer AUTOINCREMENT column this will return its name ; otherwise None .
53,073
def index_exists ( engine : Engine , tablename : str , indexname : str ) -> bool : insp = Inspector . from_engine ( engine ) return any ( i [ 'name' ] == indexname for i in insp . get_indexes ( tablename ) )
Does the specified index exist for the specified table?
53,074
def column_creation_ddl ( sqla_column : Column , dialect : Dialect ) -> str : return str ( CreateColumn ( sqla_column ) . compile ( dialect = dialect ) )
Returns DDL to create a column using the specified dialect .
53,075
def giant_text_sqltype ( dialect : Dialect ) -> str : if dialect . name == SqlaDialectName . SQLSERVER : return 'NVARCHAR(MAX)' elif dialect . name == SqlaDialectName . MYSQL : return 'LONGTEXT' else : raise ValueError ( "Unknown dialect: {}" . format ( dialect . name ) )
Returns the SQL column type used to make very large text columns for a given dialect .
53,076
def _get_sqla_coltype_class_from_str ( coltype : str , dialect : Dialect ) -> Type [ TypeEngine ] : ischema_names = dialect . ischema_names try : return ischema_names [ coltype . upper ( ) ] except KeyError : return ischema_names [ coltype . lower ( ) ]
Returns the SQLAlchemy class corresponding to a particular SQL column type in a given dialect .
53,077
def remove_collation ( coltype : TypeEngine ) -> TypeEngine : if not getattr ( coltype , 'collation' , None ) : return coltype newcoltype = copy . copy ( coltype ) newcoltype . collation = None return newcoltype
Returns a copy of the specific column type with any COLLATION removed .
53,078
def convert_sqla_type_for_dialect ( coltype : TypeEngine , dialect : Dialect , strip_collation : bool = True , convert_mssql_timestamp : bool = True , expand_for_scrubbing : bool = False ) -> TypeEngine : assert coltype is not None to_mysql = dialect . name == SqlaDialectName . MYSQL to_mssql = dialect . name == SqlaDialectName . MSSQL typeclass = type ( coltype ) if isinstance ( coltype , sqltypes . Enum ) : return sqltypes . String ( length = coltype . length ) if isinstance ( coltype , sqltypes . UnicodeText ) : return sqltypes . UnicodeText ( ) if isinstance ( coltype , sqltypes . Text ) : return sqltypes . Text ( ) if isinstance ( coltype , sqltypes . Unicode ) : if ( coltype . length is None and to_mysql ) or expand_for_scrubbing : return sqltypes . UnicodeText ( ) if isinstance ( coltype , sqltypes . String ) : if ( coltype . length is None and to_mysql ) or expand_for_scrubbing : return sqltypes . Text ( ) if strip_collation : return remove_collation ( coltype ) return coltype if typeclass == mssql . base . BIT and to_mysql : return mysql . base . BIT ( ) is_mssql_timestamp = isinstance ( coltype , MSSQL_TIMESTAMP ) if is_mssql_timestamp and to_mssql and convert_mssql_timestamp : return mssql . base . BINARY ( 8 ) return coltype
Converts an SQLAlchemy column type from one SQL dialect to another .
53,079
def is_sqlatype_binary ( coltype : Union [ TypeEngine , VisitableType ] ) -> bool : coltype = _coltype_to_typeengine ( coltype ) return isinstance ( coltype , sqltypes . _Binary )
Is the SQLAlchemy column type a binary type?
53,080
def is_sqlatype_date ( coltype : TypeEngine ) -> bool : coltype = _coltype_to_typeengine ( coltype ) return ( isinstance ( coltype , sqltypes . DateTime ) or isinstance ( coltype , sqltypes . Date ) )
Is the SQLAlchemy column type a date type?
53,081
def is_sqlatype_integer ( coltype : Union [ TypeEngine , VisitableType ] ) -> bool : coltype = _coltype_to_typeengine ( coltype ) return isinstance ( coltype , sqltypes . Integer )
Is the SQLAlchemy column type an integer type?
53,082
def is_sqlatype_string ( coltype : Union [ TypeEngine , VisitableType ] ) -> bool : coltype = _coltype_to_typeengine ( coltype ) return isinstance ( coltype , sqltypes . String )
Is the SQLAlchemy column type a string type?
53,083
def is_sqlatype_text_of_length_at_least ( coltype : Union [ TypeEngine , VisitableType ] , min_length : int = 1000 ) -> bool : coltype = _coltype_to_typeengine ( coltype ) if not isinstance ( coltype , sqltypes . String ) : return False if coltype . length is None : return True return coltype . length >= min_length
Is the SQLAlchemy column type a string type that s at least the specified length?
53,084
def is_sqlatype_text_over_one_char ( coltype : Union [ TypeEngine , VisitableType ] ) -> bool : coltype = _coltype_to_typeengine ( coltype ) return is_sqlatype_text_of_length_at_least ( coltype , 2 )
Is the SQLAlchemy column type a string type that s more than one character long?
53,085
def does_sqlatype_require_index_len ( coltype : Union [ TypeEngine , VisitableType ] ) -> bool : coltype = _coltype_to_typeengine ( coltype ) if isinstance ( coltype , sqltypes . Text ) : return True if isinstance ( coltype , sqltypes . LargeBinary ) : return True return False
Is the SQLAlchemy column type one that requires its indexes to have a length specified?
53,086
def signed_to_twos_comp ( val : int , n_bits : int ) -> int : assert n_bits % 8 == 0 , "Must specify a whole number of bytes" n_bytes = n_bits // 8 b = val . to_bytes ( n_bytes , byteorder = sys . byteorder , signed = True ) return int . from_bytes ( b , byteorder = sys . byteorder , signed = False )
Convert a signed integer to its two s complement representation .
53,087
def bytes_to_long ( bytesdata : bytes ) -> int : assert len ( bytesdata ) == 8 return sum ( ( b << ( k * 8 ) for k , b in enumerate ( bytesdata ) ) )
Converts an 8 - byte sequence to a long integer .
53,088
def pymmh3_hash128 ( key : Union [ bytes , bytearray ] , seed : int = 0 , x64arch : bool = True ) -> int : if x64arch : return pymmh3_hash128_x64 ( key , seed ) else : return pymmh3_hash128_x86 ( key , seed )
Implements 128bit murmur3 hash as per pymmh3 .
53,089
def pymmh3_hash64 ( key : Union [ bytes , bytearray ] , seed : int = 0 , x64arch : bool = True ) -> Tuple [ int , int ] : hash_128 = pymmh3_hash128 ( key , seed , x64arch ) unsigned_val1 = hash_128 & 0xFFFFFFFFFFFFFFFF if unsigned_val1 & 0x8000000000000000 == 0 : signed_val1 = unsigned_val1 else : signed_val1 = - ( ( unsigned_val1 ^ 0xFFFFFFFFFFFFFFFF ) + 1 ) unsigned_val2 = ( hash_128 >> 64 ) & 0xFFFFFFFFFFFFFFFF if unsigned_val2 & 0x8000000000000000 == 0 : signed_val2 = unsigned_val2 else : signed_val2 = - ( ( unsigned_val2 ^ 0xFFFFFFFFFFFFFFFF ) + 1 ) return signed_val1 , signed_val2
Implements 64bit murmur3 hash as per pymmh3 . Returns a tuple .
53,090
def compare_python_to_reference_murmur3_32 ( data : Any , seed : int = 0 ) -> None : assert mmh3 , "Need mmh3 module" c_data = to_str ( data ) c_signed = mmh3 . hash ( c_data , seed = seed ) py_data = to_bytes ( c_data ) py_unsigned = murmur3_x86_32 ( py_data , seed = seed ) py_signed = twos_comp_to_signed ( py_unsigned , n_bits = 32 ) preamble = "Hashing {data} with MurmurHash3/32-bit/seed={seed}" . format ( data = repr ( data ) , seed = seed ) if c_signed == py_signed : print ( preamble + " -> {result}: OK" . format ( result = c_signed ) ) else : raise AssertionError ( preamble + "; mmh3 says " "{c_data} -> {c_signed}, Python version says {py_data} -> " "{py_unsigned} = {py_signed}" . format ( c_data = repr ( c_data ) , c_signed = c_signed , py_data = repr ( py_data ) , py_unsigned = py_unsigned , py_signed = py_signed ) )
Checks the pure Python implementation of 32 - bit murmur3 against the mmh3 C - based module .
53,091
def compare_python_to_reference_murmur3_64 ( data : Any , seed : int = 0 ) -> None : assert mmh3 , "Need mmh3 module" c_data = to_str ( data ) c_signed_low , c_signed_high = mmh3 . hash64 ( c_data , seed = seed , x64arch = IS_64_BIT ) py_data = to_bytes ( c_data ) py_signed_low , py_signed_high = pymmh3_hash64 ( py_data , seed = seed ) preamble = "Hashing {data} with MurmurHash3/64-bit values from 128-bit " "hash/seed={seed}" . format ( data = repr ( data ) , seed = seed ) if c_signed_low == py_signed_low and c_signed_high == py_signed_high : print ( preamble + " -> (low={low}, high={high}): OK" . format ( low = c_signed_low , high = c_signed_high ) ) else : raise AssertionError ( preamble + "; mmh3 says {c_data} -> (low={c_low}, high={c_high}), Python " "version says {py_data} -> (low={py_low}, high={py_high})" . format ( c_data = repr ( c_data ) , c_low = c_signed_low , c_high = c_signed_high , py_data = repr ( py_data ) , py_low = py_signed_low , py_high = py_signed_high ) )
Checks the pure Python implementation of 64 - bit murmur3 against the mmh3 C - based module .
53,092
def main ( ) -> None : _ = testdata = [ "hello" , 1 , [ "bongos" , "today" ] , ] for data in testdata : compare_python_to_reference_murmur3_32 ( data , seed = 0 ) compare_python_to_reference_murmur3_64 ( data , seed = 0 ) print ( "All OK" )
Command - line validation checks .
53,093
def hash ( self , raw : Any ) -> str : with MultiTimerContext ( timer , TIMING_HASH ) : raw_bytes = str ( raw ) . encode ( 'utf-8' ) hmac_obj = hmac . new ( key = self . key_bytes , msg = raw_bytes , digestmod = self . digestmod ) return hmac_obj . hexdigest ( )
Returns the hex digest of a HMAC - encoded version of the input .
53,094
def mean ( values : Sequence [ Union [ int , float , None ] ] ) -> Optional [ float ] : total = 0.0 n = 0 for x in values : if x is not None : total += x n += 1 return total / n if n > 0 else None
Returns the mean of a list of numbers .
53,095
def normal_round_float ( x : float , dp : int = 0 ) -> float : if not math . isfinite ( x ) : return x factor = pow ( 10 , dp ) x = x * factor if x >= 0 : x = math . floor ( x + 0.5 ) else : x = math . ceil ( x - 0.5 ) x = x / factor return x
Hmpf . Shouldn t need to have to implement this but ...
53,096
def cmdargs ( mysqldump : str , username : str , password : str , database : str , verbose : bool , with_drop_create_database : bool , max_allowed_packet : str , hide_password : bool = False ) -> List [ str ] : ca = [ mysqldump , "-u" , username , "-p{}" . format ( "*****" if hide_password else password ) , "--max_allowed_packet={}" . format ( max_allowed_packet ) , "--hex-blob" , ] if verbose : ca . append ( "--verbose" ) if with_drop_create_database : ca . extend ( [ "--add-drop-database" , "--databases" , database ] ) else : ca . append ( database ) pass return ca
Returns command arguments for a mysqldump call .
53,097
def get_dialect ( mixed : Union [ SQLCompiler , Engine , Dialect ] ) -> Dialect : if isinstance ( mixed , Dialect ) : return mixed elif isinstance ( mixed , Engine ) : return mixed . dialect elif isinstance ( mixed , SQLCompiler ) : return mixed . dialect else : raise ValueError ( "get_dialect: 'mixed' parameter of wrong type" )
Finds the SQLAlchemy dialect in use .
53,098
def get_dialect_name ( mixed : Union [ SQLCompiler , Engine , Dialect ] ) -> str : dialect = get_dialect ( mixed ) return dialect . name
Finds the name of the SQLAlchemy dialect in use .
53,099
def quote_identifier ( identifier : str , mixed : Union [ SQLCompiler , Engine , Dialect ] ) -> str : return get_preparer ( mixed ) . quote ( identifier )
Converts an SQL identifier to a quoted version via the SQL dialect in use .