idx int64 0 63k | question stringlengths 53 5.28k | target stringlengths 5 805 |
|---|---|---|
53,300 | def validate_pair ( ob : Any ) -> bool : try : if len ( ob ) != 2 : log . warning ( "Unexpected result: {!r}" , ob ) raise ValueError ( ) except ValueError : return False return True | Does the object have length 2? |
53,301 | def clean_message ( message : Message , topmost : bool = False ) -> Message : if message . is_multipart ( ) : if message . get_content_type ( ) != 'message/external-body' : parts = message . get_payload ( ) parts [ : ] = map ( clean_message , parts ) elif message_is_binary ( message ) : if not topmost : message = gut_m... | Clean a message of all its binary parts . |
53,302 | def is_password_valid ( plaintextpw : str , storedhash : str ) -> bool : if storedhash is None : storedhash = "" storedhash = str ( storedhash ) if plaintextpw is None : plaintextpw = "" plaintextpw = str ( plaintextpw ) try : h = bcrypt . hashpw ( plaintextpw , storedhash ) except ValueError : return False return h ==... | Checks if a plaintext password matches a stored hash . |
53,303 | def version ( self ) : try : f = self . func . __call__ . __code__ except AttributeError : f = self . func . __code__ h = md5 ( ) h . update ( f . co_code ) h . update ( str ( f . co_names ) . encode ( ) ) try : closure = self . func . __closure__ except AttributeError : return h . hexdigest ( ) if closure is None or s... | Compute the version identifier for this functional node using the func code and local names . Optionally also allow closed - over variable values to affect the version number when closure_fingerprint is specified |
53,304 | def mapper_init ( self ) : self . counties = CachedCountyLookup ( precision = GEOHASH_PRECISION ) self . extractor = WordExtractor ( ) | Download counties geojson from S3 and build spatial index and cache |
53,305 | def run ( self ) : logging . info ( "Starting GeoJSON MongoDB loading process." ) mongo = dict ( uri = self . mongo , db = self . db , collection = self . collection ) self . load ( self . source , ** mongo ) logging . info ( "Finished loading {0} into MongoDB" . format ( self . source ) ) | Top level runner to load State and County GeoJSON files into Mongo DB |
53,306 | def load ( self , geojson , uri = None , db = None , collection = None ) : logging . info ( "Mongo URI: {0}" . format ( uri ) ) logging . info ( "Mongo DB: {0}" . format ( db ) ) logging . info ( "Mongo Collection: {0}" . format ( collection ) ) logging . info ( "Geojson File to be loaded: {0}" . format ( geojson ) ) m... | Load geojson file into mongodb instance |
53,307 | def get_version ( ) : if all ( [ VERSION , UPDATED , any ( [ isinstance ( UPDATED , date ) , isinstance ( UPDATED , datetime ) , ] ) , ] ) : return FORMAT_STRING . format ( ** { "version" : VERSION , "updated" : UPDATED , } ) elif VERSION : return VERSION elif UPDATED : return localize ( UPDATED ) if any ( [ isinstance... | Return formatted version string . |
53,308 | def process_file ( filename : str , filetypes : List [ str ] , move_to : str , delete_if_not_specified_file_type : bool , show_zip_output : bool ) -> None : try : reader = CorruptedOpenXmlReader ( filename , show_zip_output = show_zip_output ) if reader . file_type in filetypes : log . info ( "Found {}: {}" , reader . ... | Deals with an OpenXML including if it is potentially corrupted . |
53,309 | def should_build ( self , fpath , meta ) : if meta . get ( 'layout' , self . default_template ) in self . inc_layout : if self . prev_mtime . get ( fpath , 0 ) == os . path . getmtime ( fpath ) : return False else : return True return True | Checks if the file should be built or not Only skips layouts which are tagged as INCREMENTAL Rebuilds only those files with mtime changed since previous build |
53,310 | def clusterQueues ( self ) : servers = yield self . getClusterServers ( ) queues = { } for sname in servers : qs = yield self . get ( 'rhumba.server.%s.queues' % sname ) uuid = yield self . get ( 'rhumba.server.%s.uuid' % sname ) qs = json . loads ( qs ) for q in qs : if q not in queues : queues [ q ] = [ ] queues [ q ... | Return a dict of queues in cluster and servers running them |
53,311 | def close ( self ) : def validate_client ( client ) : host , port = client . addr parsed_url = urlparse ( self . _hostname ) return host == parsed_url . hostname and port == parsed_url . port def _check_fds ( _ ) : fds = set ( reactor . getReaders ( ) + reactor . getReaders ( ) ) if not [ fd for fd in fds if isinstance... | close all http connections . returns a deferred that fires once they re all closed . |
53,312 | def extract_line ( geom , dem , ** kwargs ) : kwargs . setdefault ( 'masked' , True ) coords_in = coords_array ( geom ) f = lambda * x : ~ dem . transform * x px = transform ( f , geom ) interval = kwargs . pop ( 'subdivide' , 1 ) if interval is not None : px = subdivide ( px , interval = interval ) f = lambda * x : de... | Extract a linear feature from a rasterio geospatial dataset . |
53,313 | def fork ( executable , args = ( ) , env = { } , path = None , timeout = 3600 ) : d = defer . Deferred ( ) p = ProcessProtocol ( d , timeout ) reactor . spawnProcess ( p , executable , ( executable , ) + tuple ( args ) , env , path ) return d | fork Provides a deferred wrapper function with a timeout function |
53,314 | def number_to_dp ( number : Optional [ float ] , dp : int , default : Optional [ str ] = "" , en_dash_for_minus : bool = True ) -> str : if number is None : return default if number == float ( "inf" ) : return u"∞" if number == float ( "-inf" ) : s = u"-∞" else : s = u"{:.{precision}f}" . format ( number , precision = ... | Format number to dp decimal places optionally using a UTF - 8 en dash for minus signs . |
53,315 | def debug_form_contents ( form : cgi . FieldStorage , to_stderr : bool = True , to_logger : bool = False ) -> None : for k in form . keys ( ) : text = "{0} = {1}" . format ( k , form . getvalue ( k ) ) if to_stderr : sys . stderr . write ( text ) if to_logger : log . info ( text ) | Writes the keys and values of a CGI form to stderr . |
53,316 | def cgi_method_is_post ( environ : Dict [ str , str ] ) -> bool : method = environ . get ( "REQUEST_METHOD" , None ) if not method : return False return method . upper ( ) == "POST" | Determines if the CGI method was POST given the CGI environment . |
53,317 | def get_cgi_parameter_str_or_none ( form : cgi . FieldStorage , key : str ) -> Optional [ str ] : s = get_cgi_parameter_str ( form , key ) if s is None or len ( s ) == 0 : return None return s | Extracts a string parameter from a CGI form or None if the key doesn t exist or the string is zero - length . |
53,318 | def get_cgi_parameter_list ( form : cgi . FieldStorage , key : str ) -> List [ str ] : return form . getlist ( key ) | Extracts a list of values all with the same key from a CGI form . |
53,319 | def get_cgi_parameter_bool ( form : cgi . FieldStorage , key : str ) -> bool : return is_1 ( get_cgi_parameter_str ( form , key ) ) | Extracts a boolean parameter from a CGI form on the assumption that 1 is True and everything else is False . |
53,320 | def get_cgi_parameter_int ( form : cgi . FieldStorage , key : str ) -> Optional [ int ] : return get_int_or_none ( get_cgi_parameter_str ( form , key ) ) | Extracts an integer parameter from a CGI form or None if the key is absent or the string value is not convertible to int . |
53,321 | def get_cgi_parameter_float ( form : cgi . FieldStorage , key : str ) -> Optional [ float ] : return get_float_or_none ( get_cgi_parameter_str ( form , key ) ) | Extracts a float parameter from a CGI form or None if the key is absent or the string value is not convertible to float . |
53,322 | def get_cgi_parameter_file ( form : cgi . FieldStorage , key : str ) -> Optional [ bytes ] : ( filename , filecontents ) = get_cgi_parameter_filename_and_file ( form , key ) return filecontents | Extracts a file s contents from a file input in a CGI form or None if no such file was uploaded . |
53,323 | def cgi_parameter_exists ( form : cgi . FieldStorage , key : str ) -> bool : s = get_cgi_parameter_str ( form , key ) return s is not None | Does a CGI form contain the key? |
53,324 | def getenv_escaped ( key : str , default : str = None ) -> Optional [ str ] : value = os . getenv ( key , default ) return cgi . escape ( value ) if value is not None else None | Returns an environment variable s value CGI - escaped or None . |
53,325 | def get_png_data_url ( blob : Optional [ bytes ] ) -> str : return BASE64_PNG_URL_PREFIX + base64 . b64encode ( blob ) . decode ( 'ascii' ) | Converts a PNG blob into a local URL encapsulating the PNG . |
53,326 | def print_result_for_plain_cgi_script_from_tuple ( contenttype_headers_content : WSGI_TUPLE_TYPE , status : str = '200 OK' ) -> None : contenttype , headers , content = contenttype_headers_content print_result_for_plain_cgi_script ( contenttype , headers , content , status ) | Writes HTTP result to stdout . |
53,327 | def print_result_for_plain_cgi_script ( contenttype : str , headers : TYPE_WSGI_RESPONSE_HEADERS , content : bytes , status : str = '200 OK' ) -> None : headers = [ ( "Status" , status ) , ( "Content-Type" , contenttype ) , ( "Content-Length" , str ( len ( content ) ) ) , ] + headers sys . stdout . write ( "\n" . join ... | Writes HTTP request result to stdout . |
53,328 | def wsgi_simple_responder ( result : Union [ str , bytes ] , handler : Callable [ [ Union [ str , bytes ] ] , WSGI_TUPLE_TYPE ] , start_response : TYPE_WSGI_START_RESPONSE , status : str = '200 OK' , extraheaders : TYPE_WSGI_RESPONSE_HEADERS = None ) -> TYPE_WSGI_APP_RESULT : extraheaders = extraheaders or [ ] ( conten... | Simple WSGI app . |
53,329 | def bold_if_not_blank ( x : Optional [ str ] ) -> str : if x is None : return u"{}" . format ( x ) return u"<b>{}</b>" . format ( x ) | HTML - emboldens content unless blank . |
53,330 | def make_urls_hyperlinks ( text : str ) -> str : find_url = r replace_url = r'<a href="\1">\1</a>' find_email = re . compile ( r'([.\w\-]+@(\w[\w\-]+\.)+[\w\-]+)' ) replace_email = r'<a href="mailto:\1">\1</a>' text = re . sub ( find_url , replace_url , text ) text = re . sub ( find_email , replace_email , text ) retur... | Adds hyperlinks to text that appears to contain URLs . |
53,331 | def rst_underline ( heading : str , underline_char : str ) -> str : assert "\n" not in heading assert len ( underline_char ) == 1 return heading + "\n" + ( underline_char * len ( heading ) ) | Underlines a heading for RST files . |
53,332 | def write_if_allowed ( filename : str , content : str , overwrite : bool = False , mock : bool = False ) -> None : if not overwrite and exists ( filename ) : fail ( "File exists, not overwriting: {!r}" . format ( filename ) ) directory = dirname ( filename ) if not mock : mkdir_p ( directory ) log . info ( "Writing to ... | Writes the contents to a file if permitted . |
53,333 | def rst_filename_rel_autodoc_index ( self , index_filename : str ) -> str : index_dir = dirname ( abspath ( expanduser ( index_filename ) ) ) return relpath ( self . target_rst_filename , start = index_dir ) | Returns the filename of the target RST file relative to a specified index file . Used to make the index refer to the RST . |
53,334 | def python_module_name ( self ) -> str : if not self . is_python : return "" filepath = self . source_filename_rel_python_root dirs_and_base = splitext ( filepath ) [ 0 ] dir_and_file_parts = dirs_and_base . split ( sep ) return "." . join ( dir_and_file_parts ) | Returns the name of the Python module that this instance refers to in dotted Python module notation or a blank string if it doesn t . |
53,335 | def write_rst ( self , prefix : str = "" , suffix : str = "" , heading_underline_char : str = "=" , method : AutodocMethod = None , overwrite : bool = False , mock : bool = False ) -> None : content = self . rst_content ( prefix = prefix , suffix = suffix , heading_underline_char = heading_underline_char , method = met... | Writes the RST file to our destination RST filename making any necessary directories . |
53,336 | def add_source_files ( self , source_filenames_or_globs : Union [ str , List [ str ] ] , method : AutodocMethod = None , recursive : bool = None , source_rst_title_style_python : bool = None , pygments_language_override : Dict [ str , str ] = None ) -> None : if not source_filenames_or_globs : return if method is None ... | Adds source files to the index . |
53,337 | def filename_matches_glob ( filename : str , globtext : str ) -> bool : if fnmatch ( filename , globtext ) : log . debug ( "{!r} matches {!r}" , filename , globtext ) return True bname = basename ( filename ) if fnmatch ( bname , globtext ) : log . debug ( "{!r} matches {!r}" , bname , globtext ) return True return Fal... | The glob . glob function doesn t do exclusion very well . We don t want to have to specify root directories for exclusion patterns . We don t want to have to trawl a massive set of files to find exclusion files . So let s implement a glob match . |
53,338 | def should_exclude ( self , filename ) -> bool : for skip_glob in self . skip_globs : if self . filename_matches_glob ( filename , skip_glob ) : return True return False | Should we exclude this file from consideration? |
53,339 | def specific_file_rst_filename ( self , source_filename : str ) -> str : highest_code_to_target = relative_filename_within_dir ( source_filename , self . highest_code_dir ) bname = basename ( source_filename ) result = join ( self . autodoc_rst_root_dir , dirname ( highest_code_to_target ) , bname + EXT_RST ) log . deb... | Gets the RST filename corresponding to a source filename . See the help for the constructor for more details . |
53,340 | def write_index_and_rst_files ( self , overwrite : bool = False , mock : bool = False ) -> None : for f in self . files_to_index : if isinstance ( f , FileToAutodocument ) : f . write_rst ( prefix = self . rst_prefix , suffix = self . rst_suffix , heading_underline_char = self . source_rst_heading_underline_char , over... | Writes both the individual RST files and the index . |
53,341 | def write_index ( self , overwrite : bool = False , mock : bool = False ) -> None : write_if_allowed ( self . index_filename , self . index_content ( ) , overwrite = overwrite , mock = mock ) | Writes the index file if permitted . |
53,342 | def basic_animation ( frames = 100 , interval = 30 ) : fig = plt . figure ( ) ax = plt . axes ( xlim = ( 0 , 10 ) , ylim = ( - 2 , 2 ) ) line , = ax . plot ( [ ] , [ ] , lw = 2 ) x = np . linspace ( 0 , 10 , 1000 ) def init ( ) : line . set_data ( [ ] , [ ] ) return line , def animate ( i ) : y = np . cos ( i * 0.02 * ... | Plot a basic sine wave with oscillating amplitude |
53,343 | def lorenz_animation ( N_trajectories = 20 , rseed = 1 , frames = 200 , interval = 30 ) : from scipy import integrate from mpl_toolkits . mplot3d import Axes3D from matplotlib . colors import cnames def lorentz_deriv ( coords , t0 , sigma = 10. , beta = 8. / 3 , rho = 28.0 ) : x , y , z = coords return [ sigma * ( y - ... | Plot a 3D visualization of the dynamics of the Lorenz system |
53,344 | def _included_frames ( frame_list , frame_format ) : return INCLUDED_FRAMES . format ( Nframes = len ( frame_list ) , frame_dir = os . path . dirname ( frame_list [ 0 ] ) , frame_format = frame_format ) | frame_list should be a list of filenames |
53,345 | def _embedded_frames ( frame_list , frame_format ) : template = ' frames[{0}] = "data:image/{1};base64,{2}"\n' embedded = "\n" for i , frame_data in enumerate ( frame_list ) : embedded += template . format ( i , frame_format , frame_data . replace ( '\n' , '\\\n' ) ) return embedded | frame_list should be a list of base64 - encoded png files |
53,346 | def remove_noise ( time , magnitude , error , error_limit = 3 , std_limit = 5 ) : data , mjd = magnitude , time data_len = len ( mjd ) error_mean = np . mean ( error ) error_tolerance = error_limit * ( error_mean or 1 ) data_mean = np . mean ( data ) data_std = np . std ( data ) mjd_out , data_out , error_out = [ ] , [... | Points within std_limit standard deviations from the mean and with errors greater than error_limit times the error mean are considered as noise and thus are eliminated . |
53,347 | def align ( time , time2 , magnitude , magnitude2 , error , error2 ) : error = np . zeros ( time . shape ) if error is None else error error2 = np . zeros ( time2 . shape ) if error2 is None else error2 sserie = pd . DataFrame ( { "mag" : magnitude , "error" : error } , index = time ) lserie = pd . DataFrame ( { "mag" ... | Synchronizes the light - curves in the two different bands . |
53,348 | def load_OGLE3_catalog ( ) : with bz2 . BZ2File ( CATALOG_PATH ) as bz2fp , warnings . catch_warnings ( ) : warnings . simplefilter ( "ignore" ) df = pd . read_table ( bz2fp , skiprows = 6 ) df . rename ( columns = { "# ID" : "ID" } , inplace = True ) return df | Return the full list of variables stars of OGLE - 3 as a DataFrame |
53,349 | def fetch_OGLE3 ( ogle3_id , data_home = None , metadata = None , download_if_missing = True ) : store_path = _get_OGLE3_data_home ( data_home ) file_path = os . path . join ( store_path , "{}.tar" . format ( ogle3_id ) ) members = { "I" : "./{}.I.dat" . format ( ogle3_id ) , "V" : "./{}.V.dat" . format ( ogle3_id ) } ... | Retrieve a lighte curve from OGLE - 3 database |
53,350 | def sort_by_dependencies ( exts , retry = None ) : sorted_ext , features_from_sorted = [ ] , set ( ) pending = [ ( e , 0 ) for e in exts ] retry = len ( exts ) * 100 if retry is None else retry while pending : ext , cnt = pending . pop ( 0 ) if not isinstance ( ext , Extractor ) and not issubclass ( ext , Extractor ) :... | Calculate the Feature Extractor Resolution Order . |
53,351 | def getSignificance ( wk1 , wk2 , nout , ofac ) : expy = exp ( - wk2 ) effm = 2.0 * ( nout ) / ofac sig = effm * expy ind = ( sig > 0.01 ) . nonzero ( ) sig [ ind ] = 1.0 - ( 1.0 - expy [ ind ] ) ** effm return sig | returns the peak false alarm probabilities Hence the lower is the probability and the more significant is the peak |
53,352 | def fetch ( url , dest , force = False ) : cached = True if force or not os . path . exists ( dest ) : cached = False r = requests . get ( url , stream = True ) if r . status_code == 200 : with open ( dest , 'wb' ) as f : for chunk in r . iter_content ( 1024 ) : f . write ( chunk ) return cached , dest | Retrieve data from an url and store it into dest . |
53,353 | def create_random ( magf , magf_params , errf , errf_params , timef = np . linspace , timef_params = None , size = DEFAULT_SIZE , id = None , ds_name = DS_NAME , description = DESCRIPTION , bands = BANDS , metadata = METADATA ) : timef_params = ( { "start" : 0. , "stop" : 1. } if timef_params is None else timef_params ... | Generate a data with any given random function . |
53,354 | def create_normal ( mu = 0. , sigma = 1. , mu_err = 0. , sigma_err = 1. , seed = None , ** kwargs ) : random = np . random . RandomState ( seed ) return create_random ( magf = random . normal , magf_params = { "loc" : mu , "scale" : sigma } , errf = random . normal , errf_params = { "loc" : mu_err , "scale" : sigma_err... | Generate a data with magnitudes that follows a Gaussian distribution . Also their errors are gaussian . |
53,355 | def create_uniform ( low = 0. , high = 1. , mu_err = 0. , sigma_err = 1. , seed = None , ** kwargs ) : random = np . random . RandomState ( seed ) return create_random ( magf = random . uniform , magf_params = { "low" : low , "high" : high } , errf = random . normal , errf_params = { "loc" : mu_err , "scale" : sigma_er... | Generate a data with magnitudes that follows a uniform distribution ; the error instead are gaussian . |
53,356 | def create_periodic ( mu_err = 0. , sigma_err = 1. , seed = None , ** kwargs ) : random = np . random . RandomState ( seed ) size = kwargs . get ( "size" , DEFAULT_SIZE ) times , mags , errors = [ ] , [ ] , [ ] for b in kwargs . get ( "bands" , BANDS ) : time = 100 * random . rand ( size ) error = random . normal ( siz... | Generate a data with magnitudes with periodic variability distribution ; the error instead are gaussian . |
53,357 | def pdf_single ( z , N , normalization , dH = 1 , dK = 3 ) : if dK - dH != 2 : raise NotImplementedError ( "Degrees of freedom != 2" ) Nk = N - dK if normalization == 'psd' : return np . exp ( - z ) elif normalization == 'standard' : return 0.5 * Nk * ( 1 - z ) ** ( 0.5 * Nk - 1 ) elif normalization == 'model' : return... | Probability density function for Lomb - Scargle periodogram |
53,358 | def cdf_single ( z , N , normalization , dH = 1 , dK = 3 ) : return 1 - fap_single ( z , N , normalization = normalization , dH = dH , dK = dK ) | Cumulative distribution for the Lomb - Scargle periodogram |
53,359 | def fap_simple ( Z , fmax , t , y , dy , normalization = 'standard' ) : N = len ( t ) T = max ( t ) - min ( t ) N_eff = fmax * T p_s = cdf_single ( Z , N , normalization = normalization ) return 1 - p_s ** N_eff | False Alarm Probability based on estimated number of indep frequencies |
53,360 | def fap_davies ( Z , fmax , t , y , dy , normalization = 'standard' ) : N = len ( t ) fap_s = fap_single ( Z , N , normalization = normalization ) tau = tau_davies ( Z , fmax , t , y , dy , normalization = normalization ) return fap_s + tau | Davies upper - bound to the false alarm probability |
53,361 | def fap_baluev ( Z , fmax , t , y , dy , normalization = 'standard' ) : cdf = cdf_single ( Z , len ( t ) , normalization ) tau = tau_davies ( Z , fmax , t , y , dy , normalization = normalization ) return 1 - cdf * np . exp ( - tau ) | Alias - free approximation to false alarm probability |
53,362 | def false_alarm_probability ( Z , fmax , t , y , dy , normalization , method = 'baluev' , method_kwds = None ) : if method not in METHODS : raise ValueError ( "Unrecognized method: {0}" . format ( method ) ) method = METHODS [ method ] method_kwds = method_kwds or { } return method ( Z , fmax , t , y , dy , normalizati... | Approximate the False Alarm Probability |
53,363 | def anim_to_html ( anim , fps = None , embed_frames = True , default_mode = 'loop' ) : if fps is None and hasattr ( anim , '_interval' ) : fps = 1000. / anim . _interval plt . close ( anim . _fig ) if hasattr ( anim , "_html_representation" ) : return anim . _html_representation else : with _NameOnlyTemporaryFile ( suf... | Generate HTML representation of the animation |
53,364 | def display_animation ( anim , ** kwargs ) : from IPython . display import HTML return HTML ( anim_to_html ( anim , ** kwargs ) ) | Display the animation with an IPython HTML object |
53,365 | def indent ( s , c = " " , n = 4 ) : indentation = c * n return "\n" . join ( [ indentation + l for l in s . splitlines ( ) ] ) | Indent the string s with the character c n times . |
53,366 | def generate_date_tail_boost_queries ( field , timedeltas_and_boosts , relative_to = None ) : relative_to = relative_to or datetime . datetime . now ( ) times = { } for timedelta , boost in timedeltas_and_boosts . items ( ) : date = ( relative_to - timedelta ) . date ( ) times [ date ] = boost times = sorted ( times . ... | Generate a list of RangeQueries usable to boost the scores of more recent documents . |
53,367 | def batch_iterable ( iterable , count ) : if count <= 0 : return current_batch = [ ] for item in iterable : if len ( current_batch ) == count : yield current_batch current_batch = [ ] current_batch . append ( item ) if current_batch : yield current_batch | Yield batches of count items from the given iterable . |
53,368 | def validate_nb ( nb ) : if nb [ 'nbformat' ] != 4 : return False language_name = ( nb . get ( 'metadata' , { } ) . get ( 'kernelspec' , { } ) . get ( 'language' , '' ) . lower ( ) ) return language_name == 'python' | Validate that given notebook JSON is importable |
53,369 | def filter_ast ( module_ast ) : def node_predicate ( node ) : for an in ALLOWED_NODES : if isinstance ( node , an ) : return True if isinstance ( node , ast . Assign ) : return all ( [ node_predicate ( t ) for t in node . targets if not hasattr ( t , 'id' ) ] ) and all ( [ t . id . isupper ( ) for t in node . targets i... | Filters a given module ast removing non - whitelisted nodes |
53,370 | def code_from_ipynb ( nb , markdown = False ) : code = PREAMBLE for cell in nb [ 'cells' ] : if cell [ 'cell_type' ] == 'code' : code += '' . join ( cell [ 'source' ] ) if cell [ 'cell_type' ] == 'markdown' : code += '\n# ' + '# ' . join ( cell [ 'source' ] ) code += '\n\n' return code | Get the code for a given notebook |
53,371 | def _get_paths ( self , fullname ) : real_path = os . path . join ( * fullname [ len ( self . package_prefix ) : ] . split ( '.' ) ) for base_path in sys . path : if base_path == '' : base_path = os . getcwd ( ) path = os . path . join ( base_path , real_path ) yield path + '.ipynb' yield path + '.py' yield os . path .... | Generate ordered list of paths we should look for fullname module in |
53,372 | def find_spec ( self , fullname , path , target = None ) : if fullname . startswith ( self . package_prefix ) : for path in self . _get_paths ( fullname ) : if os . path . exists ( path ) : return ModuleSpec ( name = fullname , loader = self . loader_class ( fullname , path ) , origin = path , is_package = ( path . end... | Claims modules that are under ipynb . fs |
53,373 | def coroutine ( func ) : @ wraps ( func ) def start ( * args , ** kwargs ) : g = func ( * args , ** kwargs ) next ( g ) return g return start | Decorator for priming generator - based coroutines . |
53,374 | async def ticker ( delay , to ) : for i in range ( to ) : yield i await asyncio . sleep ( delay ) | Yield numbers from 0 to to every delay seconds . |
53,375 | def rprint ( sep = '\n' , end = '\n' , file = sys . stdout , flush = False ) : try : first_item = ( yield ) file . write ( str ( first_item ) ) if flush : file . flush ( ) while True : item = ( yield ) file . write ( sep ) file . write ( str ( item ) ) if flush : file . flush ( ) except GeneratorExit : file . write ( e... | A coroutine sink which prints received items stdout |
53,376 | def iterable_source ( iterable , target ) : it = iter ( iterable ) for item in it : try : target . send ( item ) except StopIteration : return prepend ( item , it ) return empty_iter ( ) | Convert an iterable into a stream of events . |
53,377 | def poisson_source ( rate , iterable , target ) : if rate <= 0.0 : raise ValueError ( "poisson_source rate {} is not positive" . format ( rate ) ) it = iter ( iterable ) for item in it : duration = random . expovariate ( rate ) sleep ( duration ) try : target . send ( item ) except StopIteration : return prepend ( item... | Send events at random times with uniform probability . |
53,378 | def compose ( f , * fs ) : rfs = list ( chain ( [ f ] , fs ) ) rfs . reverse ( ) def composed ( * args , ** kwargs ) : return reduce ( lambda result , fn : fn ( result ) , rfs [ 1 : ] , rfs [ 0 ] ( * args , ** kwargs ) ) return composed | Compose functions right to left . |
53,379 | def reducing ( reducer , init = UNSET ) : reducer2 = reducer def reducing_transducer ( reducer ) : return Reducing ( reducer , reducer2 , init ) return reducing_transducer | Create a reducing transducer with the given reducer . |
53,380 | def scanning ( reducer , init = UNSET ) : reducer2 = reducer def scanning_transducer ( reducer ) : return Scanning ( reducer , reducer2 , init ) return scanning_transducer | Create a scanning reducer . |
53,381 | def taking ( n ) : if n < 0 : raise ValueError ( "Cannot take fewer than zero ({}) items" . format ( n ) ) def taking_transducer ( reducer ) : return Taking ( reducer , n ) return taking_transducer | Create a transducer which takes the first n items |
53,382 | def dropping ( n ) : if n < 0 : raise ValueError ( "Cannot drop fewer than zero ({}) items" . format ( n ) ) def dropping_transducer ( reducer ) : return Dropping ( reducer , n ) return dropping_transducer | Create a transducer which drops the first n items |
53,383 | def batching ( size ) : if size < 1 : raise ValueError ( "batching() size must be at least 1" ) def batching_transducer ( reducer ) : return Batching ( reducer , size ) return batching_transducer | Create a transducer which produces non - overlapping batches . |
53,384 | def windowing ( size , padding = UNSET , window_type = tuple ) : if size < 1 : raise ValueError ( "windowing() size {} is not at least 1" . format ( size ) ) def windowing_transducer ( reducer ) : return Windowing ( reducer , size , padding , window_type ) return windowing_transducer | Create a transducer which produces a moving window over items . |
53,385 | def first ( predicate = None ) : predicate = true if predicate is None else predicate def first_transducer ( reducer ) : return First ( reducer , predicate ) return first_transducer | Create a transducer which obtains the first item then terminates . |
53,386 | def last ( predicate = None ) : predicate = true if predicate is None else predicate def last_transducer ( reducer ) : return Last ( reducer , predicate ) return last_transducer | Create a transducer which obtains the last item . |
53,387 | def element_at ( index ) : if index < 0 : raise IndexError ( "element_at used with illegal index {}" . format ( index ) ) def element_at_transducer ( reducer ) : return ElementAt ( reducer , index ) return element_at_transducer | Create a transducer which obtains the item at the specified index . |
53,388 | def compile_sources ( files , CompilerRunner_ = None , destdir = None , cwd = None , keep_dir_struct = False , per_file_kwargs = None , ** kwargs ) : _per_file_kwargs = { } if per_file_kwargs is not None : for k , v in per_file_kwargs . items ( ) : if isinstance ( k , Glob ) : for path in glob . glob ( k . pathname ) :... | Compile source code files to object files . |
53,389 | def link ( obj_files , out_file = None , shared = False , CompilerRunner_ = None , cwd = None , cplus = False , fort = False , ** kwargs ) : if out_file is None : out_file , ext = os . path . splitext ( os . path . basename ( obj_files [ - 1 ] ) ) if shared : out_file += sharedext if not CompilerRunner_ : if fort : Com... | Link object files . |
53,390 | def simple_cythonize ( src , destdir = None , cwd = None , logger = None , full_module_name = None , only_update = False , ** cy_kwargs ) : from Cython . Compiler . Main import ( default_options , CompilationOptions ) from Cython . Compiler . Main import compile as cy_compile assert src . lower ( ) . endswith ( '.pyx' ... | Generates a C file from a Cython source file . |
53,391 | def src2obj ( srcpath , CompilerRunner_ = None , objpath = None , only_update = False , cwd = None , out_ext = None , inc_py = False , ** kwargs ) : name , ext = os . path . splitext ( os . path . basename ( srcpath ) ) if objpath is None : if os . path . isabs ( srcpath ) : objpath = '.' else : objpath = os . path . d... | Compiles a source code file to an object file . Files ending with . pyx assumed to be cython files and are dispatched to pyx2obj . |
53,392 | def compile_link_import_strings ( codes , build_dir = None , ** kwargs ) : build_dir = build_dir or tempfile . mkdtemp ( ) if not os . path . isdir ( build_dir ) : raise OSError ( "Non-existent directory: " , build_dir ) source_files = [ ] if kwargs . get ( 'logger' , False ) is True : import logging logging . basicCon... | Creates a temporary directory and dumps compiles and links provided source code . |
53,393 | def arguments ( self ) : if 'arguments' in self . attributes : LOGGER . warning ( "WARNING: 'arguments' use in OSU yaml configuration file is deprecated. Please use 'options'!" ) arguments = self . attributes [ 'arguments' ] if isinstance ( arguments , dict ) : return arguments else : return { k : arguments for k in se... | Dictionary providing the list of arguments for every benchmark |
53,394 | def _ ( obj ) : tz_offset = obj . utcoffset ( ) if not tz_offset or tz_offset == UTC_ZERO : iso_datetime = obj . strftime ( '%Y-%m-%dT%H:%M:%S.%fZ' ) else : iso_datetime = obj . isoformat ( ) return iso_datetime | ISO 8601 format . Interprets naive datetime as UTC with zulu suffix . |
53,395 | def get_row_generator ( self , ref , cache = None ) : from inspect import isgenerator from rowgenerators import get_generator g = get_generator ( ref ) if not g : raise GenerateError ( "Cant figure out how to generate rows from {} ref: {}" . format ( type ( ref ) , ref ) ) else : return g | Return a row generator for a reference |
53,396 | def create_key_filter ( properties : Dict [ str , list ] ) -> List [ Tuple ] : combinations = ( product ( [ k ] , v ) for k , v in properties . items ( ) ) return chain . from_iterable ( combinations ) | Generate combinations of key value pairs for each key in properties . |
53,397 | def create_indexer ( indexes : list ) : if len ( indexes ) == 1 : index = indexes [ 0 ] return lambda x : ( x [ index ] , ) else : return itemgetter ( * indexes ) | Create indexer function to pluck values from list . |
53,398 | def including ( self , sequence ) -> Generator : return ( element for element in sequence if self . indexer ( element ) in self . predicates ) | Include the sequence elements matching the filter set . |
53,399 | def excluding ( self , sequence ) -> Generator : return ( element for element in sequence if self . indexer ( element ) not in self . predicates ) | Exclude the sequence elements matching the filter set . |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.