idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
52,900
def dump_orm_object_as_insert_sql ( engine : Engine , obj : object , fileobj : TextIO ) -> None : insp = inspect ( obj ) meta = MetaData ( bind = engine ) table_name = insp . mapper . mapped_table . name table = Table ( table_name , meta , autoload = True ) query = select ( table . columns ) for orm_pkcol in insp . mapper . primary_key : core_pkcol = table . columns . get ( orm_pkcol . name ) pkval = getattr ( obj , orm_pkcol . name ) query = query . where ( core_pkcol == pkval ) cursor = engine . execute ( query ) row = cursor . fetchone ( ) row_dict = dict ( row ) statement = table . insert ( values = row_dict ) insert_str = get_literal_query ( statement , bind = engine ) writeline_nl ( fileobj , insert_str )
Takes a SQLAlchemy ORM object and writes INSERT SQL to replicate it to the output file - like object .
52,901
def next ( self ) : d = None if self . _first_results : d = succeed ( EsUtils . extract_hits ( self . _first_results ) ) self . _first_results = None elif self . _scroll_id : d = self . _scroll_next_results ( ) else : raise StopIteration ( ) return d
Fetch next page from scroll API .
52,902
def reformat_python_docstrings ( top_dirs : List [ str ] , correct_copyright_lines : List [ str ] , show_only : bool = True , rewrite : bool = False , process_only_filenum : int = None ) -> None : filenum = 0 for top_dir in top_dirs : for dirpath , dirnames , filenames in walk ( top_dir ) : for filename in filenames : fullname = join ( dirpath , filename ) extension = splitext ( filename ) [ 1 ] if extension != PYTHON_EXTENSION : continue filenum += 1 if process_only_filenum and filenum != process_only_filenum : continue log . info ( "Processing file {}: {}" , filenum , fullname ) proc = PythonProcessor ( full_path = fullname , top_dir = top_dir , correct_copyright_lines = correct_copyright_lines ) if show_only : proc . show ( ) elif rewrite : proc . rewrite_file ( )
Walk a directory finding Python files and rewriting them .
52,903
def _read_source ( self ) -> None : with open ( self . full_path , "rt" ) as f : for linenum , line_with_nl in enumerate ( f . readlines ( ) , start = 1 ) : line_without_newline = ( line_with_nl [ : - 1 ] if line_with_nl . endswith ( NL ) else line_with_nl ) if TAB in line_without_newline : self . _warn ( "Tab character at line {}" . format ( linenum ) ) if CR in line_without_newline : self . _warn ( "Carriage return character at line {} " "(Windows CR+LF endings?)" . format ( linenum ) ) self . source_lines . append ( line_without_newline )
Reads the source file .
52,904
def _debug_line ( linenum : int , line : str , extramsg : str = "" ) -> None : log . critical ( "{}Line {}: {!r}" , extramsg , linenum , line )
Writes a debugging report on a line .
52,905
def rewrite_file ( self ) -> None : if not self . needs_rewriting : return self . _info ( "Rewriting file" ) with open ( self . full_path , "w" ) as outfile : self . _write ( outfile )
Rewrites the source file .
52,906
def _write ( self , destination : TextIO ) -> None : for line in self . dest_lines : destination . write ( line + NL )
Writes the converted output to a destination .
52,907
def contains_duplicates ( values : Iterable [ Any ] ) -> bool : for v in Counter ( values ) . values ( ) : if v > 1 : return True return False
Does the iterable contain any duplicate values?
52,908
def index_list_for_sort_order ( x : List [ Any ] , key : Callable [ [ Any ] , Any ] = None , reverse : bool = False ) -> List [ int ] : def key_with_user_func ( idx_val : Tuple [ int , Any ] ) : return key ( idx_val [ 1 ] ) if key : sort_key = key_with_user_func else : sort_key = itemgetter ( 1 ) index_value_list = sorted ( enumerate ( x ) , key = sort_key , reverse = reverse ) return [ i for i , _ in index_value_list ]
Returns a list of indexes of x IF x WERE TO BE SORTED .
52,909
def sort_list_by_index_list ( x : List [ Any ] , indexes : List [ int ] ) -> None : x [ : ] = [ x [ i ] for i in indexes ]
Re - orders x by the list of indexes of x in place .
52,910
def unique_list ( seq : Iterable [ Any ] ) -> List [ Any ] : seen = set ( ) seen_add = seen . add return [ x for x in seq if not ( x in seen or seen_add ( x ) ) ]
Returns a list of all the unique elements in the input list .
52,911
def escape_newlines ( s : str ) -> str : if not s : return s s = s . replace ( "\\" , r"\\" ) s = s . replace ( "\n" , r"\n" ) s = s . replace ( "\r" , r"\r" ) return s
Escapes CR LF and backslashes .
52,912
def escape_tabs_newlines ( s : str ) -> str : if not s : return s s = s . replace ( "\\" , r"\\" ) s = s . replace ( "\n" , r"\n" ) s = s . replace ( "\r" , r"\r" ) s = s . replace ( "\t" , r"\t" ) return s
Escapes CR LF tab and backslashes .
52,913
def validate_geotweet ( self , record ) : if record and self . _validate ( 'user' , record ) and self . _validate ( 'coordinates' , record ) : return True return False
check that stream record is actual tweet with coordinates
52,914
def contains ( ell , p , shell_only = False ) : v = augment ( p ) _ = ell . solve ( v ) return N . allclose ( _ , 0 ) if shell_only else _ <= 0
Check to see whether point is inside conic .
52,915
def major_axes ( ell ) : _ = ell [ : - 1 , : - 1 ] U , s , V = N . linalg . svd ( _ ) scalar = - ( ell . sum ( ) - _ . sum ( ) ) return N . sqrt ( s * scalar ) * V
Gets major axes of ellipsoids
52,916
def translate ( conic , vector ) : T = N . identity ( len ( conic ) ) T [ : - 1 , - 1 ] = - vector return conic . transform ( T )
Translates a conic by a vector
52,917
def pole ( conic , plane ) : v = dot ( N . linalg . inv ( conic ) , plane ) return v [ : - 1 ] / v [ - 1 ]
Calculates the pole of a polar plane for a given conic section .
52,918
def projection ( self , ** kwargs ) : viewpoint = kwargs . pop ( 'viewpoint' , None ) if viewpoint is None : ndim = self . shape [ 0 ] - 1 viewpoint = N . zeros ( ndim ) plane = self . polar_plane ( viewpoint ) return self . slice ( plane , ** kwargs )
The elliptical cut of an ellipsoidal conic describing all points of tangency to the conic as viewed from the origin .
52,919
def guess_file_name_stream_type_header ( args ) : ftype = None fheader = None if isinstance ( args , ( tuple , list ) ) : if len ( args ) == 2 : fname , fstream = args elif len ( args ) == 3 : fname , fstream , ftype = args else : fname , fstream , ftype , fheader = args else : fname , fstream = guess_filename_stream ( args ) ftype = guess_content_type ( fname ) if isinstance ( fstream , ( str , bytes , bytearray ) ) : fdata = fstream else : fdata = fstream . read ( ) return fname , fdata , ftype , fheader
Guess filename file stream file type file header from args .
52,920
def encode_params ( self , data = None , ** kwargs ) : collection_format = kwargs . get ( "collection_format" , self . collection_format ) output_str = kwargs . get ( "output_str" , self . output_str ) sort = kwargs . get ( "sort" , self . sort ) if data is None : return "" , self . content_type elif isinstance ( data , ( str , bytes ) ) : return data , self . content_type elif hasattr ( data , 'read' ) : return data , self . content_type elif collection_format == 'multi' and hasattr ( data , '__iter__' ) : result = [ ] for k , vs in to_key_val_list ( data , sort = sort ) : if isinstance ( vs , basestring ) or not hasattr ( vs , '__iter__' ) : vs = [ vs ] for v in vs : result . append ( ( k . encode ( 'utf-8' ) if isinstance ( k , str ) else k , v . encode ( 'utf-8' ) if isinstance ( v , str ) else to_string ( v , lang = output_str ) ) ) return urlencode ( result , doseq = True ) , self . content_type elif collection_format == 'encoded' and hasattr ( data , '__iter__' ) : return urlencode ( data , doseq = False ) , self . content_type elif hasattr ( data , '__iter__' ) : results = [ ] for k , vs in to_key_val_dict ( data ) . items ( ) : if isinstance ( vs , list ) : v = self . COLLECTION_SEPARATORS [ collection_format ] . join ( quote_plus ( e ) for e in vs ) key = k + '[]' else : v = quote_plus ( vs ) key = k results . append ( "%s=%s" % ( key , v ) ) return '&' . join ( results ) , self . content_type else : return data , self . content_type
Encode parameters in a piece of data . Will successfully encode parameters when passed as a dict or a list of 2 - tuples . Order is retained if data is a list of 2 - tuples but arbitrary if parameters are supplied as a dict .
52,921
def ci ( a , which = 95 , axis = None ) : p = 50 - which / 2 , 50 + which / 2 return percentiles ( a , p , axis )
Return a percentile range from an array of values .
52,922
def get_config_string_option ( parser : ConfigParser , section : str , option : str , default : str = None ) -> str : if not parser . has_section ( section ) : raise ValueError ( "config missing section: " + section ) return parser . get ( section , option , fallback = default )
Retrieves a string value from a parser .
52,923
def read_config_string_options ( obj : Any , parser : ConfigParser , section : str , options : Iterable [ str ] , default : str = None ) -> None : for o in options : setattr ( obj , o , get_config_string_option ( parser , section , o , default = default ) )
Reads config options and writes them as attributes of obj with attribute names as per options .
52,924
def get_config_bool_option ( parser : ConfigParser , section : str , option : str , default : bool = None ) -> bool : if not parser . has_section ( section ) : raise ValueError ( "config missing section: " + section ) return parser . getboolean ( section , option , fallback = default )
Retrieves a boolean value from a parser .
52,925
def get_config_parameter ( config : ConfigParser , section : str , param : str , fn : Callable [ [ Any ] , Any ] , default : Any ) -> Any : try : value = fn ( config . get ( section , param ) ) except ( TypeError , ValueError , NoOptionError ) : log . warning ( "Configuration variable {} not found or improper in section [{}]; " "using default of {!r}" , param , section , default ) if default is None : value = default else : value = fn ( default ) return value
Fetch parameter from configparser . INI file .
52,926
def get_config_parameter_boolean ( config : ConfigParser , section : str , param : str , default : bool ) -> bool : try : value = config . getboolean ( section , param ) except ( TypeError , ValueError , NoOptionError ) : log . warning ( "Configuration variable {} not found or improper in section [{}]; " "using default of {!r}" , param , section , default ) value = default return value
Get Boolean parameter from configparser . INI file .
52,927
def is_definition ( cursor ) : defn = cursor . get_definition ( ) return ( defn is not None ) and ( cursor . location == defn . location )
Test if a cursor refers to a definition
52,928
def asymptotes ( hyp , n = 1000 ) : assert N . linalg . norm ( hyp . center ( ) ) == 0 u = N . linspace ( 0 , 2 * N . pi , n ) _ = N . ones ( len ( u ) ) angles = N . array ( [ N . cos ( u ) , N . sin ( u ) , _ ] ) . T return dot ( angles , hyp [ : - 1 , : - 1 ] )
Gets a cone of asymptotes for hyperbola
52,929
def pca_to_mapping ( pca , ** extra_props ) : from . axes import sampling_axes method = extra_props . pop ( 'method' , sampling_axes ) return dict ( axes = pca . axes . tolist ( ) , covariance = method ( pca ) . tolist ( ) , ** extra_props )
A helper to return a mapping of a PCA result set suitable for reconstructing a planar error surface in other software packages
52,930
def generic_service_main ( cls : Type [ WindowsService ] , name : str ) -> None : argc = len ( sys . argv ) if argc == 1 : try : print ( "Trying to start service directly..." ) evtsrc_dll = os . path . abspath ( servicemanager . __file__ ) servicemanager . PrepareToHostSingle ( cls ) servicemanager . Initialize ( name , evtsrc_dll ) servicemanager . StartServiceCtrlDispatcher ( ) except win32service . error as details : print ( "Failed: {}" . format ( details ) ) errnum = details . winerror if errnum == winerror . ERROR_FAILED_SERVICE_CONTROLLER_CONNECT : win32serviceutil . usage ( ) elif argc == 2 and sys . argv [ 1 ] == 'debug' : s = cls ( ) s . run_debug ( ) else : win32serviceutil . HandleCommandLine ( cls )
Call this from your command - line entry point to manage a service .
52,931
def fullname ( self ) -> str : fullname = "Process {}/{} ({})" . format ( self . procnum , self . nprocs , self . details . name ) if self . running : fullname += " (PID={})" . format ( self . process . pid ) return fullname
Description of the process .
52,932
def debug ( self , msg : str ) -> None : if self . debugging : s = "{}: {}" . format ( self . fullname , msg ) log . debug ( s )
If we are being verbose write a debug message to the Python disk log .
52,933
def open_logs ( self ) -> None : if self . details . logfile_out : self . stdout = open ( self . details . logfile_out , 'a' ) else : self . stdout = None if self . details . logfile_err : if self . details . logfile_err == self . details . logfile_out : self . stderr = subprocess . STDOUT else : self . stderr = open ( self . details . logfile_err , 'a' ) else : self . stderr = None
Open Python disk logs .
52,934
def close_logs ( self ) -> None : if self . stdout is not None : self . stdout . close ( ) self . stdout = None if self . stderr is not None and self . stderr != subprocess . STDOUT : self . stderr . close ( ) self . stderr = None
Close Python disk logs .
52,935
def start ( self ) -> None : if self . running : return self . info ( "Starting: {} (with logs stdout={}, stderr={})" . format ( self . details . procargs , self . details . logfile_out , self . details . logfile_err ) ) self . open_logs ( ) creationflags = CREATE_NEW_PROCESS_GROUP if WINDOWS else 0 self . process = subprocess . Popen ( self . details . procargs , stdin = None , stdout = self . stdout , stderr = self . stderr , creationflags = creationflags ) self . running = True
Starts a subprocess . Optionally routes its output to our disk logs .
52,936
def stop ( self ) -> None : if not self . running : return try : self . wait ( timeout_s = 0 ) except subprocess . TimeoutExpired : for kill_level in self . ALL_KILL_LEVELS : tried_to_kill = self . _terminate ( level = kill_level ) if tried_to_kill : try : self . wait ( timeout_s = self . kill_timeout_sec ) break except subprocess . TimeoutExpired : self . warning ( "Subprocess didn't stop when asked" ) pass self . close_logs ( ) self . running = False
Stops a subprocess .
52,937
def wait ( self , timeout_s : float = None ) -> int : if not self . running : return 0 retcode = self . process . wait ( timeout = timeout_s ) if retcode is None : self . error ( "Subprocess finished, but return code was None" ) retcode = 1 elif retcode == 0 : self . info ( "Subprocess finished cleanly (return code 0)." ) else : self . error ( "Subprocess finished, but FAILED (return code {}). " "Logs were: {} (stdout), {} (stderr)" . format ( retcode , self . details . logfile_out , self . details . logfile_err ) ) self . running = False return retcode
Wait for up to timeout_s for the child process to finish .
52,938
def SvcStop ( self ) -> None : self . ReportServiceStatus ( win32service . SERVICE_STOP_PENDING ) win32event . SetEvent ( self . h_stop_event )
Called when the service is being shut down .
52,939
def SvcDoRun ( self ) -> None : self . debug ( "Sending PYS_SERVICE_STARTED message" ) servicemanager . LogMsg ( servicemanager . EVENTLOG_INFORMATION_TYPE , servicemanager . PYS_SERVICE_STARTED , ( self . _svc_name_ , '' ) ) self . main ( ) servicemanager . LogMsg ( servicemanager . EVENTLOG_INFORMATION_TYPE , servicemanager . PYS_SERVICE_STOPPED , ( self . _svc_name_ , '' ) ) self . ReportServiceStatus ( win32service . SERVICE_STOPPED )
Called when the service is started .
52,940
def run_processes ( self , procdetails : List [ ProcessDetails ] , subproc_run_timeout_sec : float = 1 , stop_event_timeout_ms : int = 1000 , kill_timeout_sec : float = 5 ) -> None : def cleanup ( ) : self . debug ( "atexit function called: cleaning up" ) for pmgr_ in self . process_managers : pmgr_ . stop ( ) atexit . register ( cleanup ) self . process_managers = [ ] n = len ( procdetails ) for i , details in enumerate ( procdetails ) : pmgr = ProcessManager ( details , i + 1 , n , kill_timeout_sec = kill_timeout_sec , debugging = self . debugging ) self . process_managers . append ( pmgr ) for pmgr in self . process_managers : pmgr . start ( ) self . info ( "All started" ) something_running = True stop_requested = False subproc_failed = False while something_running and not stop_requested and not subproc_failed : if ( win32event . WaitForSingleObject ( self . h_stop_event , stop_event_timeout_ms ) == win32event . WAIT_OBJECT_0 ) : stop_requested = True self . info ( "Stop requested; stopping" ) else : something_running = False for pmgr in self . process_managers : if subproc_failed : break try : retcode = pmgr . wait ( timeout_s = subproc_run_timeout_sec ) if retcode != 0 : subproc_failed = True except subprocess . TimeoutExpired : something_running = True for pmgr in self . process_managers : pmgr . stop ( ) self . info ( "All stopped" )
Run multiple child processes .
52,941
def disable_bool_icon ( fieldname : str , model ) -> Callable [ [ Any ] , bool ] : def func ( self , obj ) : return getattr ( obj , fieldname ) func . boolean = False func . admin_order_field = fieldname func . short_description = model . _meta . get_field ( fieldname ) . verbose_name return func
Disable boolean icons for a Django ModelAdmin field . The _meta attribute is present on Django model classes and instances .
52,942
def admin_view_url ( admin_site : AdminSite , obj , view_type : str = "change" , current_app : str = None ) -> str : app_name = obj . _meta . app_label . lower ( ) model_name = obj . _meta . object_name . lower ( ) pk = obj . pk viewname = "admin:{}_{}_{}" . format ( app_name , model_name , view_type ) if current_app is None : current_app = admin_site . name url = reverse ( viewname , args = [ pk ] , current_app = current_app ) return url
Get a Django admin site URL for an object .
52,943
def admin_view_fk_link ( modeladmin : ModelAdmin , obj , fkfield : str , missing : str = "(None)" , use_str : bool = True , view_type : str = "change" , current_app : str = None ) -> str : if not hasattr ( obj , fkfield ) : return missing linked_obj = getattr ( obj , fkfield ) app_name = linked_obj . _meta . app_label . lower ( ) model_name = linked_obj . _meta . object_name . lower ( ) viewname = "admin:{}_{}_{}" . format ( app_name , model_name , view_type ) if current_app is None : current_app = modeladmin . admin_site . name url = reverse ( viewname , args = [ linked_obj . pk ] , current_app = current_app ) if use_str : label = escape ( str ( linked_obj ) ) else : label = "{} {}" . format ( escape ( linked_obj . _meta . object_name ) , linked_obj . pk ) return '<a href="{}">{}</a>' . format ( url , label )
Get a Django admin site URL for an object that s found from a foreign key in our object of interest .
52,944
def lowpass_filter ( data : FLOATS_TYPE , sampling_freq_hz : float , cutoff_freq_hz : float , numtaps : int ) -> FLOATS_TYPE : coeffs = firwin ( numtaps = numtaps , cutoff = normalized_frequency ( cutoff_freq_hz , sampling_freq_hz ) , pass_zero = True ) filtered_data = lfilter ( b = coeffs , a = 1.0 , x = data ) return filtered_data
Apply a low - pass filter to the data .
52,945
def bandpass_filter ( data : FLOATS_TYPE , sampling_freq_hz : float , lower_freq_hz : float , upper_freq_hz : float , numtaps : int ) -> FLOATS_TYPE : f1 = normalized_frequency ( lower_freq_hz , sampling_freq_hz ) f2 = normalized_frequency ( upper_freq_hz , sampling_freq_hz ) coeffs = firwin ( numtaps = numtaps , cutoff = [ f1 , f2 ] , pass_zero = False ) filtered_data = lfilter ( b = coeffs , a = 1.0 , x = data ) return filtered_data
Apply a band - pass filter to the data .
52,946
def ellipse ( center , covariance_matrix , level = 1 , n = 1000 ) : U , s , rotation_matrix = N . linalg . svd ( covariance_matrix ) saxes = N . sqrt ( s ) * level u = N . linspace ( 0 , 2 * N . pi , n ) data = N . column_stack ( ( saxes [ 0 ] * N . cos ( u ) , saxes [ 1 ] * N . sin ( u ) ) ) return N . dot ( data , rotation_matrix ) + center
Returns error ellipse in slope - azimuth space
52,947
def to_mapping ( self , ** values ) : strike , dip , rake = self . strike_dip_rake ( ) min , max = self . angular_errors ( ) try : disabled = self . disabled except AttributeError : disabled = False mapping = dict ( uid = self . hash , axes = self . axes . tolist ( ) , hyperbolic_axes = self . hyperbolic_axes . tolist ( ) , max_angular_error = max , min_angular_error = min , strike = strike , dip = dip , rake = rake , disabled = disabled ) for k , v in values . items ( ) : mapping [ k ] = v return mapping
Create a JSON - serializable representation of the plane that is usable with the javascript frontend
52,948
def run_multiple_processes ( args_list : List [ List [ str ] ] , die_on_failure : bool = True ) -> None : for procargs in args_list : start_process ( procargs ) wait_for_processes ( die_on_failure = die_on_failure )
Fire up multiple processes and wait for them to finihs .
52,949
def run ( self ) -> None : fd = self . _fd encoding = self . _encoding line_terminators = self . _line_terminators queue = self . _queue buf = "" while True : try : c = fd . read ( 1 ) . decode ( encoding ) except UnicodeDecodeError as e : log . warning ( "Decoding error from {!r}: {!r}" , self . _cmdargs , e ) if self . _suppress_decoding_errors : continue else : raise if not c : return buf += c for t in line_terminators : try : t_idx = buf . index ( t ) + len ( t ) fragment = buf [ : t_idx ] buf = buf [ t_idx : ] queue . put ( fragment ) except ValueError : pass
Read lines and put them on the queue .
52,950
def create_nation_fixtures ( self ) : SHP_SLUG = "cb_{}_us_state_500k" . format ( self . YEAR ) DOWNLOAD_PATH = os . path . join ( self . DOWNLOAD_DIRECTORY , SHP_SLUG ) shape = shapefile . Reader ( os . path . join ( DOWNLOAD_PATH , "{}.shp" . format ( SHP_SLUG ) ) ) fields = shape . fields [ 1 : ] field_names = [ f [ 0 ] for f in fields ] features = [ ] for shp in shape . shapeRecords ( ) : state = dict ( zip ( field_names , shp . record ) ) geodata = { "type" : "Feature" , "geometry" : shp . shape . __geo_interface__ , "properties" : { "state" : state [ "STATEFP" ] , "name" : state [ "NAME" ] , } , } features . append ( geodata ) Geometry . objects . update_or_create ( division = self . NATION , subdivision_level = self . STATE_LEVEL , simplification = self . THRESHOLDS [ "nation" ] , source = os . path . join ( self . SHP_SOURCE_BASE . format ( self . YEAR ) , SHP_SLUG ) + ".zip" , series = self . YEAR , defaults = { "topojson" : self . toposimplify ( geojson . FeatureCollection ( features ) , self . THRESHOLDS [ "nation" ] , ) } , ) geo , created = Geometry . objects . update_or_create ( division = self . NATION , subdivision_level = self . COUNTY_LEVEL , simplification = self . THRESHOLDS [ "nation" ] , source = os . path . join ( self . SHP_SOURCE_BASE . format ( self . YEAR ) , SHP_SLUG ) + ".zip" , series = self . YEAR , defaults = { "topojson" : self . get_state_county_shps ( "00" ) } , ) tqdm . write ( "Nation\n" ) tqdm . write ( self . TQDM_PREFIX + "> FIPS {} @ ~{}kb " . format ( "00" , round ( len ( json . dumps ( geo . topojson ) ) / 1000 ) ) ) tqdm . write ( self . style . SUCCESS ( "Done.\n" ) )
Create national US and State Map
52,951
def serialize ( pca , ** kwargs ) : strike , dip , rake = pca . strike_dip_rake ( ) hyp_axes = sampling_axes ( pca ) return dict ( ** kwargs , principal_axes = pca . axes . tolist ( ) , hyperbolic_axes = hyp_axes . tolist ( ) , n_samples = pca . n , strike = strike , dip = dip , rake = rake , angular_errors = [ 2 * N . degrees ( i ) for i in angular_errors ( hyp_axes ) ] )
Serialize an orientation object to a dict suitable for JSON
52,952
def create_groups ( orientations , * groups , ** kwargs ) : grouped = [ ] if kwargs . pop ( 'copy' , True ) : orientations = [ copy ( o ) for o in orientations ] for o in orientations : o . member_of = None try : grouped += o . members for a in o . members : a . member_of = o except AttributeError : pass def find ( uid ) : try : val = next ( x for x in orientations if x . hash == uid ) if val in grouped : raise GroupedPlaneError ( "{} is already in a group." . format ( val . hash ) ) return val except StopIteration : raise KeyError ( "No measurement of with hash {} found" . format ( uid ) ) for uid_list in groups : vals = [ find ( uid ) for uid in uid_list ] o = GroupedOrientation ( * vals , ** kwargs ) orientations . append ( o ) return orientations
Create groups of an orientation measurement dataset
52,953
def logistic ( x : Union [ float , np . ndarray ] , k : float , theta : float ) -> Optional [ float ] : r if x is None or k is None or theta is None : return None return 1 / ( 1 + np . exp ( - k * ( x - theta ) ) )
r Standard logistic function .
52,954
def _build_from_geojson ( self , src ) : geojson = json . loads ( self . read ( src ) ) idx = index . Index ( ) data_store = { } for i , feature in enumerate ( geojson [ 'features' ] ) : feature = self . _build_obj ( feature ) idx . insert ( i , feature [ 'geometry' ] . bounds ) data_store [ i ] = feature return data_store , idx
Build a RTree index to disk using bounding box of each feature
52,955
def get ( self , point , buffer_size = 0 , multiple = False ) : lon , lat = point geohash = Geohash . encode ( lat , lon , precision = self . precision ) key = ( geohash , buffer_size , multiple ) if key in self . geohash_cache : self . hit += 1 return self . geohash_cache [ key ] self . miss += 1 lat , lon = Geohash . decode ( geohash ) proj_point = project ( [ float ( lon ) , float ( lat ) ] ) args = dict ( buffer_size = buffer_size , multiple = multiple ) payload = self . get_object ( proj_point , ** args ) self . geohash_cache [ key ] = payload return payload
lookup state and county based on geohash of coordinates from tweet
52,956
def run ( in_file_nose , out_dir_unitth ) : suites = Converter . read_nose ( in_file_nose ) Converter . write_unitth ( suites , out_dir_unitth )
Convert nose - style test reports to UnitTH - style test reports by splitting modules into separate XML files
52,957
def read_nose ( in_file ) : suites = { } doc_xml = minidom . parse ( in_file ) suite_xml = doc_xml . getElementsByTagName ( "testsuite" ) [ 0 ] for case_xml in suite_xml . getElementsByTagName ( 'testcase' ) : classname = case_xml . getAttribute ( 'classname' ) if classname not in suites : suites [ classname ] = [ ] case = { 'name' : case_xml . getAttribute ( 'name' ) , 'time' : float ( case_xml . getAttribute ( 'time' ) ) , } skipped_xml = case_xml . getElementsByTagName ( 'skipped' ) if skipped_xml : if skipped_xml [ 0 ] . hasAttribute ( 'type' ) : type = skipped_xml [ 0 ] . getAttribute ( 'type' ) else : type = '' case [ 'skipped' ] = { 'type' : type , 'message' : skipped_xml [ 0 ] . getAttribute ( 'message' ) , 'text' : "" . join ( [ child . nodeValue for child in skipped_xml [ 0 ] . childNodes ] ) , } failure_xml = case_xml . getElementsByTagName ( 'failure' ) if failure_xml : if failure_xml [ 0 ] . hasAttribute ( 'type' ) : type = failure_xml [ 0 ] . getAttribute ( 'type' ) else : type = '' case [ 'failure' ] = { 'type' : type , 'message' : failure_xml [ 0 ] . getAttribute ( 'message' ) , 'text' : "" . join ( [ child . nodeValue for child in failure_xml [ 0 ] . childNodes ] ) , } error_xml = case_xml . getElementsByTagName ( 'error' ) if error_xml : if error_xml [ 0 ] . hasAttribute ( 'type' ) : type = error_xml [ 0 ] . getAttribute ( 'type' ) else : type = '' case [ 'error' ] = { 'type' : type , 'message' : error_xml [ 0 ] . getAttribute ( 'message' ) , 'text' : "" . join ( [ child . nodeValue for child in error_xml [ 0 ] . childNodes ] ) , } suites [ classname ] . append ( case ) return suites
Parse nose - style test reports into a dict
52,958
def write_unitth ( suites , out_dir ) : if not os . path . isdir ( out_dir ) : os . mkdir ( out_dir ) for classname , cases in suites . items ( ) : doc_xml = minidom . Document ( ) suite_xml = doc_xml . createElement ( 'testsuite' ) suite_xml . setAttribute ( 'name' , classname ) suite_xml . setAttribute ( 'tests' , str ( len ( cases ) ) ) suite_xml . setAttribute ( 'errors' , str ( sum ( 'error' in case for case in cases ) ) ) suite_xml . setAttribute ( 'failures' , str ( sum ( 'failure' in case for case in cases ) ) ) suite_xml . setAttribute ( 'skipped' , str ( sum ( 'skipped' in case for case in cases ) ) ) suite_xml . setAttribute ( 'time' , '{:.3f}' . format ( sum ( case [ 'time' ] for case in cases ) ) ) doc_xml . appendChild ( suite_xml ) for case in cases : case_xml = doc_xml . createElement ( 'testcase' ) case_xml . setAttribute ( 'classname' , classname ) case_xml . setAttribute ( 'name' , case [ 'name' ] ) case_xml . setAttribute ( 'time' , '{:.3f}' . format ( case [ 'time' ] ) ) suite_xml . appendChild ( case_xml ) if 'skipped' in case : skipped_xml = doc_xml . createElement ( 'skipped' ) skipped_xml . setAttribute ( 'type' , case [ 'skipped' ] [ 'type' ] ) skipped_xml . setAttribute ( 'message' , case [ 'skipped' ] [ 'message' ] ) case_xml . appendChild ( skipped_xml ) skipped_text_xml = doc_xml . createCDATASection ( case [ 'skipped' ] [ 'text' ] ) skipped_xml . appendChild ( skipped_text_xml ) if 'failure' in case : failure_xml = doc_xml . createElement ( 'failure' ) failure_xml . setAttribute ( 'type' , case [ 'failure' ] [ 'type' ] ) failure_xml . setAttribute ( 'message' , case [ 'failure' ] [ 'message' ] ) case_xml . appendChild ( failure_xml ) failure_text_xml = doc_xml . createCDATASection ( case [ 'failure' ] [ 'text' ] ) failure_xml . appendChild ( failure_text_xml ) if 'error' in case : error_xml = doc_xml . createElement ( 'error' ) error_xml . setAttribute ( 'type' , case [ 'error' ] [ 'type' ] ) error_xml . setAttribute ( 'message' , case [ 'error' ] [ 'message' ] ) case_xml . appendChild ( error_xml ) error_text_xml = doc_xml . createCDATASection ( case [ 'error' ] [ 'text' ] ) error_xml . appendChild ( error_text_xml ) with open ( os . path . join ( out_dir , '{}.xml' . format ( classname ) ) , 'w' ) as output : doc_xml . writexml ( output , encoding = 'utf-8' , addindent = '' , newl = "" ) doc_xml . unlink ( )
Write UnitTH - style test reports
52,959
def error_asymptotes ( pca , ** kwargs ) : ax = kwargs . pop ( "ax" , current_axes ( ) ) lon , lat = pca . plane_errors ( 'upper' , n = 1000 ) ax . plot ( lon , lat , '-' ) lon , lat = pca . plane_errors ( 'lower' , n = 1000 ) ax . plot ( lon , lat , '-' ) ax . plane ( * pca . strike_dip ( ) )
Plots asymptotic error bounds for hyperbola on a stereonet .
52,960
def fetch_all_first_values ( session : Session , select_statement : Select ) -> List [ Any ] : rows = session . execute ( select_statement ) try : return [ row [ 0 ] for row in rows ] except ValueError as e : raise MultipleResultsFound ( str ( e ) )
Returns a list of the first values in each row returned by a SELECT query .
52,961
def hyperbola ( axes , ** kwargs ) : opens_up = kwargs . pop ( 'opens_up' , True ) center = kwargs . pop ( 'center' , defaults [ 'center' ] ) th = N . linspace ( 0 , 2 * N . pi , kwargs . pop ( 'n' , 500 ) ) vals = [ N . tan ( th ) , 1 / N . cos ( th ) ] if not opens_up : vals = vals [ : : - 1 ] x = axes [ 0 ] * vals [ 0 ] + center [ 0 ] y = axes [ 1 ] * vals [ 1 ] + center [ 1 ] extrema = [ N . argmin ( x ) , N . argmax ( x ) ] def remove_asymptotes ( arr ) : arr [ extrema ] = N . nan return arr xy = tuple ( remove_asymptotes ( i ) for i in ( x , y ) ) return xy
Plots a hyperbola that opens along y axis
52,962
def __reverse_ellipse ( axes , scalar = 1 ) : ax1 = axes . copy ( ) [ : : - 1 ] * scalar center = ax1 [ 1 ] * N . sqrt ( 2 ) * scalar return ax1 , center
This method doesn t work as well
52,963
def if_sqlserver_disable_constraints ( session : SqlASession , tablename : str ) -> None : engine = get_engine_from_session ( session ) if is_sqlserver ( engine ) : quoted_tablename = quote_identifier ( tablename , engine ) session . execute ( "ALTER TABLE {} NOCHECK CONSTRAINT all" . format ( quoted_tablename ) ) yield session . execute ( "ALTER TABLE {} WITH CHECK CHECK CONSTRAINT all" . format ( quoted_tablename ) ) else : yield
If we re running under SQL Server disable constraint checking for the specified table while the resource is held .
52,964
def if_sqlserver_disable_constraints_triggers ( session : SqlASession , tablename : str ) -> None : with if_sqlserver_disable_constraints ( session , tablename ) : with if_sqlserver_disable_triggers ( session , tablename ) : yield
If we re running under SQL Server disable triggers AND constraints for the specified table while the resource is held .
52,965
def get_current_revision ( database_url : str , version_table : str = DEFAULT_ALEMBIC_VERSION_TABLE ) -> str : engine = create_engine ( database_url ) conn = engine . connect ( ) opts = { 'version_table' : version_table } mig_context = MigrationContext . configure ( conn , opts = opts ) return mig_context . get_current_revision ( )
Ask the database what its current revision is .
52,966
def upgrade_database ( alembic_config_filename : str , alembic_base_dir : str = None , starting_revision : str = None , destination_revision : str = "head" , version_table : str = DEFAULT_ALEMBIC_VERSION_TABLE , as_sql : bool = False ) -> None : if alembic_base_dir is None : alembic_base_dir = os . path . dirname ( alembic_config_filename ) os . chdir ( alembic_base_dir ) config = Config ( alembic_config_filename ) script = ScriptDirectory . from_config ( config ) def upgrade ( rev , context ) : return script . _upgrade_revs ( destination_revision , rev ) log . info ( "Upgrading database to revision {!r} using Alembic" , destination_revision ) with EnvironmentContext ( config , script , fn = upgrade , as_sql = as_sql , starting_rev = starting_revision , destination_rev = destination_revision , tag = None , version_table = version_table ) : script . run_env ( ) log . info ( "Database upgrade completed" )
Use Alembic to upgrade our database .
52,967
def stamp_allowing_unusual_version_table ( config : Config , revision : str , sql : bool = False , tag : str = None , version_table : str = DEFAULT_ALEMBIC_VERSION_TABLE ) -> None : script = ScriptDirectory . from_config ( config ) starting_rev = None if ":" in revision : if not sql : raise CommandError ( "Range revision not allowed" ) starting_rev , revision = revision . split ( ':' , 2 ) def do_stamp ( rev : str , context ) : return script . _stamp_revs ( revision , rev ) with EnvironmentContext ( config , script , fn = do_stamp , as_sql = sql , destination_rev = revision , starting_rev = starting_rev , tag = tag , version_table = version_table ) : script . run_env ( )
Stamps the Alembic version table with the given revision ; don t run any migrations .
52,968
def get_external_command_output ( command : str ) -> bytes : args = shlex . split ( command ) ret = subprocess . check_output ( args ) return ret
Takes a command - line command executes it and returns its stdout output .
52,969
def get_pipe_series_output ( commands : Sequence [ str ] , stdinput : BinaryIO = None ) -> bytes : processes = [ ] for i in range ( len ( commands ) ) : if i == 0 : processes . append ( subprocess . Popen ( shlex . split ( commands [ i ] ) , stdin = subprocess . PIPE , stdout = subprocess . PIPE ) ) else : processes . append ( subprocess . Popen ( shlex . split ( commands [ i ] ) , stdin = processes [ i - 1 ] . stdout , stdout = subprocess . PIPE ) ) return processes [ len ( processes ) - 1 ] . communicate ( stdinput ) [ 0 ]
Get the output from a piped series of commands .
52,970
def launch_external_file ( filename : str , raise_if_fails : bool = False ) -> None : log . info ( "Launching external file: {!r}" , filename ) try : if sys . platform . startswith ( 'linux' ) : cmdargs = [ "xdg-open" , filename ] subprocess . call ( cmdargs ) else : os . startfile ( filename ) except Exception as e : log . critical ( "Error launching {!r}: error was {}.\n\n{}" , filename , str ( e ) , traceback . format_exc ( ) ) if raise_if_fails : raise
Launches a file using the operating system s standard launcher .
52,971
def make_mysql_url ( username : str , password : str , dbname : str , driver : str = "mysqldb" , host : str = "localhost" , port : int = 3306 , charset : str = "utf8" ) -> str : return "mysql+{driver}://{u}:{p}@{host}:{port}/{db}?charset={cs}" . format ( driver = driver , host = host , port = port , db = dbname , u = username , p = password , cs = charset , )
Makes an SQLAlchemy URL for a MySQL database .
52,972
def make_sqlite_url ( filename : str ) -> str : absfile = os . path . abspath ( filename ) return "sqlite://{host}/{path}" . format ( host = "" , path = absfile )
Makes an SQLAlchemy URL for a SQLite database .
52,973
def atoi ( text : str ) -> Union [ int , str ] : return int ( text ) if text . isdigit ( ) else text
Converts strings to integers if they re composed of digits ; otherwise returns the strings unchanged . One way of sorting strings with numbers ; it will mean that 11 is more than 2 .
52,974
def _get_pretty_body ( headers , body ) : try : if CONTENT_TYPE_HEADER_NAME in headers : if XMLRenderer . DEFAULT_CONTENT_TYPE == headers [ CONTENT_TYPE_HEADER_NAME ] : xml_parsed = parseString ( body ) pretty_xml_as_string = xml_parsed . toprettyxml ( ) return pretty_xml_as_string elif JSONRenderer . DEFAULT_CONTENT_TYPE == headers [ CONTENT_TYPE_HEADER_NAME ] : decoded_body = body . decode ( 'utf-8' ) parsed = json . loads ( decoded_body ) return json . dumps ( parsed , sort_keys = True , indent = 4 ) except : pass finally : return body
Return a pretty printed body using the Content - Type header information .
52,975
def log_print_request ( method , url , query_params = None , headers = None , body = None ) : log_msg = '\n>>>>>>>>>>>>>>>>>>>>> Request >>>>>>>>>>>>>>>>>>> \n' log_msg += '\t> Method: %s\n' % method log_msg += '\t> Url: %s\n' % url if query_params is not None : log_msg += '\t> Query params: {}\n' . format ( str ( query_params ) ) if headers is not None : log_msg += '\t> Headers:\n{}\n' . format ( json . dumps ( dict ( headers ) , sort_keys = True , indent = 4 ) ) if body is not None : try : log_msg += '\t> Payload sent:\n{}\n' . format ( _get_pretty_body ( headers , body ) ) except : log_msg += "\t> Payload could't be formatted" logger . debug ( log_msg )
Log an HTTP request data in a user - friendly representation .
52,976
def log_print_response ( status_code , response , headers = None ) : log_msg = '\n<<<<<<<<<<<<<<<<<<<<<< Response <<<<<<<<<<<<<<<<<<\n' log_msg += '\t< Response code: {}\n' . format ( str ( status_code ) ) if headers is not None : log_msg += '\t< Headers:\n{}\n' . format ( json . dumps ( dict ( headers ) , sort_keys = True , indent = 4 ) ) try : log_msg += '\t< Payload received:\n{}' . format ( _get_pretty_body ( headers , response ) ) except : log_msg += '\t< Payload received:\n{}' . format ( response ) logger . debug ( log_msg )
Log an HTTP response data in a user - friendly representation .
52,977
def args_kwargs_to_initdict ( args : ArgsList , kwargs : KwargsDict ) -> InitDict : return { ARGS_LABEL : args , KWARGS_LABEL : kwargs }
Converts a set of args and kwargs to an InitDict .
52,978
def strip_leading_underscores_from_keys ( d : Dict ) -> Dict : newdict = { } for k , v in d . items ( ) : if k . startswith ( '_' ) : k = k [ 1 : ] if k in newdict : raise ValueError ( "Attribute conflict: _{k}, {k}" . format ( k = k ) ) newdict [ k ] = v return newdict
Clones a dictionary removing leading underscores from key names . Raises ValueError if this causes an attribute conflict .
52,979
def verify_initdict ( initdict : InitDict ) -> None : if ( not isinstance ( initdict , dict ) or ARGS_LABEL not in initdict or KWARGS_LABEL not in initdict ) : raise ValueError ( "Not an InitDict dictionary" )
Ensures that its parameter is a proper InitDict or raises ValueError .
52,980
def register_class_for_json ( cls : ClassType , method : str = METHOD_SIMPLE , obj_to_dict_fn : InstanceToDictFnType = None , dict_to_obj_fn : DictToInstanceFnType = initdict_to_instance , default_factory : DefaultFactoryFnType = None ) -> None : typename = cls . __qualname__ if obj_to_dict_fn and dict_to_obj_fn : descriptor = JsonDescriptor ( typename = typename , obj_to_dict_fn = obj_to_dict_fn , dict_to_obj_fn = dict_to_obj_fn , cls = cls , default_factory = default_factory ) elif method == METHOD_SIMPLE : descriptor = JsonDescriptor ( typename = typename , obj_to_dict_fn = instance_to_initdict_simple , dict_to_obj_fn = initdict_to_instance , cls = cls , default_factory = default_factory ) elif method == METHOD_STRIP_UNDERSCORE : descriptor = JsonDescriptor ( typename = typename , obj_to_dict_fn = instance_to_initdict_stripping_underscores , dict_to_obj_fn = initdict_to_instance , cls = cls , default_factory = default_factory ) else : raise ValueError ( "Unknown method, and functions not fully specified" ) global TYPE_MAP TYPE_MAP [ typename ] = descriptor
Registers the class cls for JSON serialization .
52,981
def register_for_json ( * args , ** kwargs ) -> Any : if DEBUG : print ( "register_for_json: args = {}" . format ( repr ( args ) ) ) print ( "register_for_json: kwargs = {}" . format ( repr ( kwargs ) ) ) if len ( args ) == 1 and len ( kwargs ) == 0 and callable ( args [ 0 ] ) : if DEBUG : print ( "... called as @register_for_json" ) cls = args [ 0 ] register_class_for_json ( cls , method = METHOD_SIMPLE ) return cls if DEBUG : print ( "... called as @register_for_json(*args, **kwargs)" ) method = kwargs . pop ( 'method' , METHOD_SIMPLE ) obj_to_dict_fn = kwargs . pop ( 'obj_to_dict_fn' , None ) dict_to_obj_fn = kwargs . pop ( 'dict_to_obj_fn' , initdict_to_instance ) default_factory = kwargs . pop ( 'default_factory' , None ) check_result = kwargs . pop ( 'check_results' , True ) def register_json_class ( cls_ : ClassType ) -> ClassType : odf = obj_to_dict_fn dof = dict_to_obj_fn if method == METHOD_PROVIDES_INIT_ARGS_KWARGS : if hasattr ( cls_ , INIT_ARGS_KWARGS_FN_NAME ) : odf = wrap_args_kwargs_to_initdict ( getattr ( cls_ , INIT_ARGS_KWARGS_FN_NAME ) , typename = cls_ . __qualname__ , check_result = check_result ) else : raise ValueError ( "Class type {} does not provide function {}" . format ( cls_ , INIT_ARGS_KWARGS_FN_NAME ) ) elif method == METHOD_PROVIDES_INIT_KWARGS : if hasattr ( cls_ , INIT_KWARGS_FN_NAME ) : odf = wrap_kwargs_to_initdict ( getattr ( cls_ , INIT_KWARGS_FN_NAME ) , typename = cls_ . __qualname__ , check_result = check_result ) else : raise ValueError ( "Class type {} does not provide function {}" . format ( cls_ , INIT_KWARGS_FN_NAME ) ) elif method == METHOD_NO_ARGS : odf = obj_with_no_args_to_init_dict register_class_for_json ( cls_ , method = method , obj_to_dict_fn = odf , dict_to_obj_fn = dof , default_factory = default_factory ) return cls_ return register_json_class
Class decorator to register classes with our JSON system .
52,982
def dump_map ( file : TextIO = sys . stdout ) -> None : pp = pprint . PrettyPrinter ( indent = 4 , stream = file ) print ( "Type map: " , file = file ) pp . pprint ( TYPE_MAP )
Prints the JSON registered types map to the specified file .
52,983
def json_class_decoder_hook ( d : Dict ) -> Any : if TYPE_LABEL in d : typename = d . get ( TYPE_LABEL ) if typename in TYPE_MAP : if DEBUG : log . debug ( "Deserializing: {!r}" , d ) d . pop ( TYPE_LABEL ) descriptor = TYPE_MAP [ typename ] obj = descriptor . to_obj ( d ) if DEBUG : log . debug ( "... to: {!r}" , obj ) return obj return d
Provides a JSON decoder that converts dictionaries to Python objects if suitable methods are found in our TYPE_MAP .
52,984
def json_encode ( obj : Instance , ** kwargs ) -> str : return json . dumps ( obj , cls = JsonClassEncoder , ** kwargs )
Encodes an object to JSON using our custom encoder .
52,985
def json_decode ( s : str ) -> Any : try : return json . JSONDecoder ( object_hook = json_class_decoder_hook ) . decode ( s ) except json . JSONDecodeError : log . warning ( "Failed to decode JSON (returning None): {!r}" , s ) return None
Decodes an object from JSON using our custom decoder .
52,986
def dict_to_enum_fn ( d : Dict [ str , Any ] , enum_class : Type [ Enum ] ) -> Enum : return enum_class [ d [ 'name' ] ]
Converts an dict to a Enum .
52,987
def dict_to_pendulum ( d : Dict [ str , Any ] , pendulum_class : ClassType ) -> DateTime : return pendulum . parse ( d [ 'iso' ] )
Converts a dict object back to a Pendulum .
52,988
def dict_to_pendulumdate ( d : Dict [ str , Any ] , pendulumdate_class : ClassType ) -> Date : return pendulum . parse ( d [ 'iso' ] ) . date ( )
Converts a dict object back to a pendulum . Date .
52,989
def simple_eq ( one : Instance , two : Instance , attrs : List [ str ] ) -> bool : return all ( getattr ( one , a ) == getattr ( two , a ) for a in attrs )
Test if two objects are equal based on a comparison of the specified attributes attrs .
52,990
def writelines_nl ( fileobj : TextIO , lines : Iterable [ str ] ) -> None : fileobj . write ( '\n' . join ( lines ) + '\n' )
Writes lines plus terminating newline characters to the file .
52,991
def write_text ( filename : str , text : str ) -> None : with open ( filename , 'w' ) as f : print ( text , file = f )
Writes text to a file .
52,992
def gen_textfiles_from_filenames ( filenames : Iterable [ str ] ) -> Generator [ TextIO , None , None ] : for filename in filenames : with open ( filename ) as f : yield f
Generates file - like objects from a list of filenames .
52,993
def gen_lines_from_textfiles ( files : Iterable [ TextIO ] ) -> Generator [ str , None , None ] : for file in files : for line in file : yield line
Generates lines from file - like objects .
52,994
def gen_lines_from_binary_files ( files : Iterable [ BinaryIO ] , encoding : str = UTF8 ) -> Generator [ str , None , None ] : for file in files : for byteline in file : line = byteline . decode ( encoding ) . strip ( ) yield line
Generates lines from binary files . Strips out newlines .
52,995
def gen_part_from_line ( lines : Iterable [ str ] , part_index : int , splitter : str = None ) -> Generator [ str , None , None ] : for line in lines : parts = line . split ( splitter ) yield parts [ part_index ]
Splits lines with splitter and yields a specified part by index .
52,996
def gen_rows_from_csv_binfiles ( csv_files : Iterable [ BinaryIO ] , encoding : str = UTF8 , skip_header : bool = False , ** csv_reader_kwargs ) -> Generator [ Iterable [ str ] , None , None ] : dialect = csv_reader_kwargs . pop ( 'dialect' , None ) for csv_file_bin in csv_files : csv_file = io . TextIOWrapper ( csv_file_bin , encoding = encoding ) thisfile_dialect = dialect if thisfile_dialect is None : thisfile_dialect = csv . Sniffer ( ) . sniff ( csv_file . read ( 1024 ) ) csv_file . seek ( 0 ) reader = csv . reader ( csv_file , dialect = thisfile_dialect , ** csv_reader_kwargs ) first = True for row in reader : if first : first = False if skip_header : continue yield row
Iterate through binary file - like objects that are CSV files in a specified encoding . Yield each row .
52,997
def webify_file ( srcfilename : str , destfilename : str ) -> None : with open ( srcfilename ) as infile , open ( destfilename , 'w' ) as ofile : for line_ in infile : ofile . write ( escape ( line_ ) )
Rewrites a file from srcfilename to destfilename HTML - escaping it in the process .
52,998
def replace_in_file ( filename : str , text_from : str , text_to : str ) -> None : log . info ( "Amending {}: {} -> {}" , filename , repr ( text_from ) , repr ( text_to ) ) with open ( filename ) as infile : contents = infile . read ( ) contents = contents . replace ( text_from , text_to ) with open ( filename , 'w' ) as outfile : outfile . write ( contents )
Replaces text in a file .
52,999
def is_line_in_file ( filename : str , line : str ) -> bool : assert "\n" not in line with open ( filename , "r" ) as file : for fileline in file : if fileline == line : return True return False
Detects whether a line is present within a file .