idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
48,500
def tagged_item_key ( self , key ) : return '%s:%s' % ( hashlib . sha1 ( encode ( self . _tags . get_namespace ( ) ) ) . hexdigest ( ) , key )
Get a fully qualified key for a tagged item .
48,501
def _get_minutes ( self , duration ) : if isinstance ( duration , datetime . datetime ) : from_now = ( duration - datetime . datetime . now ( ) ) . total_seconds ( ) from_now = math . ceil ( from_now / 60 ) if from_now > 0 : return from_now return return duration
Calculate the number of minutes with the given duration .
48,502
async def main ( ) : with aiohttp . ClientSession ( ) as session : data = Netdata ( 'localhost' , loop , session , data = 'data' ) await data . get_data ( 'system.cpu' ) print ( json . dumps ( data . values , indent = 4 , sort_keys = True ) ) print ( "CPU System:" , round ( data . values [ 'system' ] , 2 ) ) with aiohttp . ClientSession ( ) as session : data = Netdata ( 'localhost' , loop , session , data = 'alarms' ) await data . get_alarms ( ) print ( data . alarms ) with aiohttp . ClientSession ( ) as session : data = Netdata ( 'localhost' , loop , session ) await data . get_allmetrics ( ) print ( data . metrics ) print ( "CPU System:" , round ( data . metrics [ 'system.cpu' ] [ 'dimensions' ] [ 'system' ] [ 'value' ] , 2 ) )
Get the data from a Netdata instance .
48,503
def install_handler ( self , app ) : if app . config [ 'LOGGING_CONSOLE_PYWARNINGS' ] : self . capture_pywarnings ( logging . StreamHandler ( ) ) if app . config [ 'LOGGING_CONSOLE_LEVEL' ] is not None : for h in app . logger . handlers : h . setLevel ( app . config [ 'LOGGING_CONSOLE_LEVEL' ] ) app . logger . addFilter ( add_request_id_filter )
Install logging handler .
48,504
def execute ( cmd , shell = False , poll_period = 1.0 , catch_out = False ) : log = logging . getLogger ( __name__ ) log . debug ( "Starting: %s" , cmd ) stdout = "" stderr = "" if not shell and isinstance ( cmd , string_types ) : cmd = shlex . split ( cmd ) if catch_out : process = subprocess . Popen ( cmd , shell = shell , stderr = subprocess . PIPE , stdout = subprocess . PIPE , close_fds = True ) else : process = subprocess . Popen ( cmd , shell = shell , close_fds = True ) stdout , stderr = process . communicate ( ) if stderr : log . error ( "There were errors:\n%s" , stderr ) if stdout : log . debug ( "Process output:\n%s" , stdout ) returncode = process . returncode log . debug ( "Process exit code: %s" , returncode ) return returncode , stdout , stderr
Execute UNIX command and wait for its completion
48,505
def flush ( self ) : if os . path . isdir ( self . _directory ) : for root , dirs , files in os . walk ( self . _directory , topdown = False ) : for name in files : os . remove ( os . path . join ( root , name ) ) for name in dirs : os . rmdir ( os . path . join ( root , name ) )
Remove all items from the cache .
48,506
def _path ( self , key ) : hash_type , parts_count = self . _HASHES [ self . _hash_type ] h = hash_type ( encode ( key ) ) . hexdigest ( ) parts = [ h [ i : i + 2 ] for i in range ( 0 , len ( h ) , 2 ) ] [ : parts_count ] return os . path . join ( self . _directory , os . path . sep . join ( parts ) , h )
Get the full path for the given cache key .
48,507
def tags ( self , * names ) : if len ( names ) == 1 and isinstance ( names [ 0 ] , list ) : names = names [ 0 ] return TaggedCache ( self , TagSet ( self , names ) )
Begin executing a new tags operation .
48,508
def get_user_info ( self , request ) : if not current_user . is_authenticated : return { } user_info = { 'id' : current_user . get_id ( ) , } if 'SENTRY_USER_ATTRS' in current_app . config : for attr in current_app . config [ 'SENTRY_USER_ATTRS' ] : if hasattr ( current_user , attr ) : user_info [ attr ] = getattr ( current_user , attr ) return user_info
Implement custom getter .
48,509
def create_job ( self ) : my_user_agent = None try : my_user_agent = pkg_resources . require ( 'netort' ) [ 0 ] . version except pkg_resources . DistributionNotFound : my_user_agent = 'DistributionNotFound' finally : headers = { "User-Agent" : "Uploader/{uploader_ua}, {upward_ua}" . format ( upward_ua = self . meta . get ( 'user_agent' , '' ) , uploader_ua = my_user_agent ) } req = requests . Request ( 'POST' , "{api_address}{path}" . format ( api_address = self . api_address , path = self . create_job_path ) , headers = headers ) req . data = { 'test_start' : self . job . test_start } prepared_req = req . prepare ( ) logger . debug ( 'Prepared create_job request:\n%s' , pretty_print ( prepared_req ) ) response = send_chunk ( self . session , prepared_req ) logger . debug ( 'Luna create job status: %s' , response . status_code ) logger . debug ( 'Answ data: %s' , response . content ) job_id = response . content . decode ( 'utf-8' ) if isinstance ( response . content , bytes ) else response . content if not job_id : self . failed . set ( ) raise ValueError ( 'Luna returned answer without jobid: %s' , response . content ) else : logger . info ( 'Luna job created: %s' , job_id ) return job_id
Create public Luna job
48,510
def __filter ( filterable , filter_ , logic_operation = 'and' ) : condition = [ ] if not filter_ : return filterable elif filter_ . get ( 'type' ) == '__ANY__' : return filterable else : for key , value in filter_ . items ( ) : condition . append ( '{key} == "{value}"' . format ( key = key , value = value ) ) try : res = filterable . query ( " {operation} " . format ( operation = logic_operation ) . join ( condition ) ) except pd . core . computation . ops . UndefinedVariableError : return pd . DataFrame ( ) else : return res
filtering DataFrame using filter_ key - value conditions applying logic_operation only find rows strictly fitting the filter_ criterion
48,511
def install_handler ( self , app ) : from raven . contrib . celery import register_logger_signal , register_signal from raven . contrib . flask import Sentry , make_client from raven . handlers . logging import SentryHandler level = getattr ( logging , app . config [ 'LOGGING_SENTRY_LEVEL' ] ) cls = app . config [ 'LOGGING_SENTRY_CLASS' ] if cls : if isinstance ( cls , six . string_types ) : cls = import_string ( cls ) else : cls = Sentry sentry = cls ( app , logging = True , level = level ) app . logger . addHandler ( SentryHandler ( client = sentry . client , level = level ) ) if app . config [ 'LOGGING_SENTRY_PYWARNINGS' ] : self . capture_pywarnings ( SentryHandler ( sentry . client ) ) if app . config [ 'LOGGING_SENTRY_CELERY' ] : try : register_logger_signal ( sentry . client , loglevel = level ) except TypeError : register_logger_signal ( sentry . client ) register_signal ( sentry . client ) if app . debug : logger = logging . getLogger ( 'werkzeug' ) logger . setLevel ( logging . INFO ) logger . addHandler ( logging . StreamHandler ( ) )
Install log handler .
48,512
def process ( self , data , ** kwargs ) : data = super ( RequestIdProcessor , self ) . process ( data , ** kwargs ) if g and hasattr ( g , 'request_id' ) : tags = data . get ( 'tags' , { } ) tags [ 'request_id' ] = g . request_id data [ 'tags' ] = tags return data
Process event data .
48,513
def put ( self , key , val , minutes ) : minutes = self . _get_minutes ( minutes ) if minutes is not None : self . _store . put ( key , val , minutes )
Store an item in the cache .
48,514
def _get_key ( self , fn , args , kwargs ) : if args : serialized_arguments = ( self . _store . serialize ( args [ 1 : ] ) + self . _store . serialize ( [ ( k , kwargs [ k ] ) for k in sorted ( kwargs . keys ( ) ) ] ) ) else : serialized_arguments = self . _store . serialize ( [ ( k , kwargs [ k ] ) for k in sorted ( kwargs . keys ( ) ) ] ) if isinstance ( fn , types . MethodType ) : key = self . _hash ( '%s.%s.%s' % ( fn . __self__ . __class__ . __name__ , args [ 0 ] . __name__ , serialized_arguments ) ) elif isinstance ( fn , types . FunctionType ) : key = self . _hash ( '%s.%s' % ( fn . __name__ , serialized_arguments ) ) else : key = '%s:' % fn + self . _hash ( serialized_arguments ) return key
Calculate a cache key given a function args and kwargs .
48,515
def flush_main ( self ) : if self . has_main : return self . in_main = True self . write ( 'task main()' ) self . INDENT ( ) if self . debug : print 'Flushing main:' , self . fv . main for node in self . fv . main : self . v ( node ) self . NEWLINE ( ) self . DEDENT ( ) self . in_main = False
Flushes the implicit main function if there is no main function defined .
48,516
def get_nowait_from_queue ( queue ) : data = [ ] for _ in range ( queue . qsize ( ) ) : try : data . append ( queue . get_nowait ( ) ) except q . Empty : break return data
Collect all immediately available items from a queue
48,517
def _push_forever_keys ( self , namespace , key ) : full_key = '%s%s:%s' % ( self . get_prefix ( ) , hashlib . sha1 ( encode ( self . _tags . get_namespace ( ) ) ) . hexdigest ( ) , key ) for segment in namespace . split ( '|' ) : self . _store . connection ( ) . lpush ( self . _forever_key ( segment ) , full_key )
Store a copy of the full key for each namespace segment .
48,518
def _delete_forever_keys ( self ) : for segment in self . _tags . get_namespace ( ) . split ( '|' ) : segment = self . _forever_key ( segment ) self . _delete_forever_values ( segment ) self . _store . connection ( ) . delete ( segment )
Delete all of the items that were stored forever .
48,519
def _delete_forever_values ( self , forever_key ) : forever = self . _store . connection ( ) . lrange ( forever_key , 0 , - 1 ) if len ( forever ) > 0 : self . _store . connection ( ) . delete ( * forever )
Delete all of the keys that have been stored forever .
48,520
def capture_pywarnings ( handler ) : logger = logging . getLogger ( 'py.warnings' ) for h in logger . handlers : if isinstance ( h , handler . __class__ ) : return logger . addHandler ( handler ) logger . setLevel ( logging . WARNING )
Log python system warnings .
48,521
async def get_data ( self , resource ) : url = '{}{}' . format ( self . base_url , self . endpoint . format ( resource = resource ) ) try : with async_timeout . timeout ( 5 , loop = self . _loop ) : response = await self . _session . get ( url ) _LOGGER . info ( "Response from Netdata: %s" , response . status ) data = await response . json ( ) _LOGGER . debug ( data ) self . values = { k : v for k , v in zip ( data [ 'labels' ] , data [ 'data' ] [ 0 ] ) } except ( asyncio . TimeoutError , aiohttp . ClientError , socket . gaierror ) : _LOGGER . error ( "Can not load data from Netdata" ) raise exceptions . NetdataConnectionError ( )
Get detail for a resource from the data endpoint .
48,522
async def get_alarms ( self ) : url = '{}{}' . format ( self . base_url , self . endpoint ) try : with async_timeout . timeout ( 5 , loop = self . _loop ) : response = await self . _session . get ( url ) _LOGGER . debug ( "Response from Netdata: %s" , response . status ) data = await response . text ( ) _LOGGER . debug ( data ) self . alarms = data except ( asyncio . TimeoutError , aiohttp . ClientError , socket . gaierror ) : _LOGGER . error ( "Can not load data from Netdata" ) raise exceptions . NetdataConnectionError ( )
Get alarms for a Netdata instance .
48,523
def filter ( self , record ) : if g and hasattr ( g , 'request_id' ) : record . request_id = g . request_id return True
If request_id is set in flask . g add it to log record .
48,524
def store ( self , name = None ) : if name is None : name = self . get_default_driver ( ) self . _stores [ name ] = self . _get ( name ) return self . _stores [ name ]
Get a cache store instance by name .
48,525
def _get ( self , name ) : return self . _stores . get ( name , self . _resolve ( name ) )
Attempt to get the store from the local cache .
48,526
def _resolve ( self , name ) : config = self . _get_config ( name ) if not config : raise RuntimeError ( 'Cache store [%s] is not defined.' % name ) if config [ 'driver' ] in self . _custom_creators : repository = self . _call_custom_creator ( config ) else : repository = getattr ( self , '_create_%s_driver' % config [ 'driver' ] ) ( config ) if 'serializer' in config : serializer = self . _resolve_serializer ( config [ 'serializer' ] ) else : serializer = self . _serializer repository . get_store ( ) . set_serializer ( serializer ) return repository
Resolve the given store
48,527
def _call_custom_creator ( self , config ) : creator = self . _custom_creators [ config [ 'driver' ] ] ( config ) if isinstance ( creator , Store ) : creator = self . repository ( creator ) if not isinstance ( creator , Repository ) : raise RuntimeError ( 'Custom creator should return a Repository instance.' ) return creator
Call a custom driver creator .
48,528
def _create_file_driver ( self , config ) : kwargs = { 'directory' : config [ 'path' ] } if 'hash_type' in config : kwargs [ 'hash_type' ] = config [ 'hash_type' ] return self . repository ( FileStore ( ** kwargs ) )
Create an instance of the file cache driver .
48,529
def get_default_driver ( self ) : if 'default' in self . _config : return self . _config [ 'default' ] if len ( self . _config [ 'stores' ] ) == 1 : return list ( self . _config [ 'stores' ] . keys ( ) ) [ 0 ] raise RuntimeError ( 'Missing "default" cache in configuration.' )
Get the default cache driver name .
48,530
def _resolve_serializer ( self , serializer ) : if isinstance ( serializer , Serializer ) : return serializer if serializer in self . _serializers : return self . _serializers [ serializer ] raise RuntimeError ( 'Unsupported serializer' )
Resolve the given serializer .
48,531
def multiple_choice ( question , choices , answers ) : if not isinstance ( answers , ( int , collections . Iterable ) ) : raise TypeError ( 'The `answers` arg is expected to be of type ' '(int | iterable int) but got {} instead.' . format ( type ( answers ) ) ) @ curry def check_answer ( index , button ) : is_correct = ( index == answers if isinstance ( answers , int ) else index in answers ) button . style . button_color = GREEN if is_correct else RED answer_choices = [ ] for index , choice in enumerate ( choices ) : button = widgets . Button ( layout = widgets . Layout ( width = '20px' , height = '20px' , padding = '0' ) ) button . on_click ( check_answer ( index ) ) button_and_question = widgets . HBox ( [ button , widgets . HTML ( TEXT_STYLE . format ( choice ) ) ] , layout = widgets . Layout ( align_items = 'center' ) ) answer_choices . append ( button_and_question ) question_html = [ widgets . HTML ( TEXT_STYLE . format ( question ) ) ] display ( widgets . VBox ( question_html + answer_choices ) )
Generates a multiple choice question that allows the user to select an answer choice and shows whether choice was correct .
48,532
def short_answer ( question , answers , explanation = None ) : textbox = widgets . Text ( placeholder = 'Write your answer here' ) submit_button = widgets . Button ( description = 'Submit' ) visual_correct = widgets . HTML ( ) error_space = widgets . HTML ( ) explain_space = widgets . HTML ( ) def check_answer ( _ ) : response = textbox . value if isinstance ( answers , collections . Callable ) : try : error_space . value = '' correct = answers ( response ) except Exception as e : correct = False error_space . value = 'Error in checking answer: {}' . format ( e ) elif isinstance ( answers , str ) : correct = response == answers elif isinstance ( answers , collections . Iterable ) : correct = response in answers else : raise TypeError ( 'The `answers` arg is an incorrect type.' ) visual_correct . value = CHECK_ICON if correct else X_ICON if correct and explanation : explain_space . value = explanation submit_button . on_click ( check_answer ) question_tag = widgets . HTML ( TEXT_STYLE . format ( question ) ) user_input_line = widgets . HBox ( [ textbox , submit_button , visual_correct ] ) display ( widgets . VBox ( [ question_tag , user_input_line , error_space , explain_space ] ) )
Generates a short answer question that allows user to input an answer in a textbox and a submit button to check the answer .
48,533
def publish ( spec , nb_name , template = 'full' , save_first = True ) : if not os . path . isfile ( nb_name ) : raise ValueError ( "{} isn't a path to a file. Double check your " "filename and try again." . format ( nb_name ) ) if save_first : _save_nb ( nb_name ) print ( 'Converting notebook...' ) try : check_output ( [ 'nbinteract' , '--template' , template , '--spec' , spec , nb_name ] , stderr = STDOUT ) except CalledProcessError as err : logging . warning ( ERROR_MESSAGE . format ( filename = nb_name , error = str ( err . output , 'utf-8' ) ) ) return html_filename = os . path . splitext ( nb_name ) [ 0 ] + '.html' display ( Markdown ( CONVERT_SUCCESS_MD . format ( url = html_filename ) ) )
Converts nb_name to an HTML file . Preserves widget functionality .
48,534
def _save_nb ( nb_name ) : display ( Javascript ( 'IPython.notebook.save_checkpoint();' ) ) display ( Javascript ( 'IPython.notebook.save_notebook();' ) ) print ( 'Saving notebook...' , end = ' ' ) if _wait_for_save ( nb_name ) : print ( "Saved '{}'." . format ( nb_name ) ) else : logging . warning ( "Could not save your notebook (timed out waiting for " "IPython save). Make sure your notebook is saved " "and export again." )
Attempts to save notebook . If unsuccessful shows a warning .
48,535
def _wait_for_save ( nb_name , timeout = 5 ) : modification_time = os . path . getmtime ( nb_name ) start_time = time . time ( ) while time . time ( ) < start_time + timeout : if ( os . path . getmtime ( nb_name ) > modification_time and os . path . getsize ( nb_name ) > 0 ) : return True time . sleep ( 0.2 ) return False
Waits for nb_name to update waiting up to TIMEOUT seconds . Returns True if a save was detected and False otherwise .
48,536
def maybe_call ( maybe_fn , kwargs : dict , prefix : str = None ) -> 'Any' : if not callable ( maybe_fn ) : return maybe_fn args = get_fn_args ( maybe_fn , kwargs , prefix = prefix ) return maybe_fn ( ** args )
If maybe_fn is a function get its arguments from kwargs and call it also searching for prefixed kwargs if prefix is specified . Otherwise return maybe_fn .
48,537
def maybe_curry ( maybe_fn , first_arg ) -> 'Function | Any' : if not callable ( maybe_fn ) : return maybe_fn return tz . curry ( maybe_fn ) ( first_arg )
If maybe_fn is a function curries it and passes in first_arg . Otherwise returns maybe_fn .
48,538
def get_fn_args ( fn , kwargs : dict , prefix : str = None ) : all_args = get_all_args ( fn ) required_args = get_required_args ( fn ) fn_kwargs = pick_kwargs ( kwargs , all_args , prefix ) missing_args = [ arg for arg in required_args if arg not in fn_kwargs ] if missing_args : raise ValueError ( 'The following args are missing for the function ' '{}: {}.' . format ( fn . __name__ , missing_args ) ) return fn_kwargs
Given function and a dict of kwargs return a dict containing only the args used by the function .
48,539
def get_all_args ( fn ) -> list : sig = inspect . signature ( fn ) return list ( sig . parameters )
Returns a list of all arguments for the function fn .
48,540
def get_required_args ( fn ) -> list : sig = inspect . signature ( fn ) return [ name for name , param in sig . parameters . items ( ) if param . default == inspect . _empty and param . kind not in VAR_ARGS ]
Returns a list of required arguments for the function fn .
48,541
def pick_kwargs ( kwargs : dict , required_args : list , prefix : str = None ) : picked = { k : v for k , v in kwargs . items ( ) if k in required_args } prefixed = { } if prefix : prefix = prefix + '__' prefixed = { _remove_prefix ( k , prefix ) : v for k , v in kwargs . items ( ) if k . startswith ( prefix ) and _remove_prefix ( k , prefix ) in required_args } conflicting_args = [ k for k in picked if k in prefixed ] if conflicting_args : raise ValueError ( 'Both prefixed and unprefixed args were specified ' 'for the following parameters: {}' . format ( conflicting_args ) ) return tz . merge ( picked , prefixed )
Given a dict of kwargs and a list of required_args return a dict containing only the args in required_args .
48,542
def use_options ( allowed ) : def update_docstring ( f ) : _update_option_docstring ( f , allowed ) @ functools . wraps ( f ) def check_options ( * args , ** kwargs ) : options = kwargs . get ( 'options' , { } ) not_allowed = [ option for option in options if option not in allowed and not option . startswith ( '_' ) ] if not_allowed : logging . warning ( 'The following options are not supported by ' 'this function and will likely result in ' 'undefined behavior: {}.' . format ( not_allowed ) ) return f ( * args , ** kwargs ) return check_options return update_docstring
Decorator that logs warnings when unpermitted options are passed into its wrapped function .
48,543
def hist ( hist_function , * , options = { } , ** interact_params ) : params = { 'marks' : [ { 'sample' : _array_or_placeholder ( hist_function ) , 'bins' : _get_option ( 'bins' ) , 'normalized' : _get_option ( 'normalized' ) , 'scales' : ( lambda opts : { 'sample' : opts [ 'x_sc' ] , 'count' : opts [ 'y_sc' ] } ) , } ] , } fig = options . get ( '_fig' , False ) or _create_fig ( options = options ) [ hist ] = _create_marks ( fig = fig , marks = [ bq . Hist ] , options = options , params = params ) _add_marks ( fig , [ hist ] ) def wrapped ( ** interact_params ) : hist . sample = util . maybe_call ( hist_function , interact_params ) controls = widgets . interactive ( wrapped , ** interact_params ) return widgets . VBox ( [ controls , fig ] )
Generates an interactive histogram that allows users to change the parameters of the input hist_function .
48,544
def bar ( x_fn , y_fn , * , options = { } , ** interact_params ) : params = { 'marks' : [ { 'x' : _array_or_placeholder ( x_fn , PLACEHOLDER_RANGE ) , 'y' : _array_or_placeholder ( y_fn ) } ] } fig = options . get ( '_fig' , False ) or _create_fig ( x_sc = bq . OrdinalScale , options = options ) [ bar ] = _create_marks ( fig = fig , marks = [ bq . Bars ] , options = options , params = params ) _add_marks ( fig , [ bar ] ) def wrapped ( ** interact_params ) : x_data = util . maybe_call ( x_fn , interact_params , prefix = 'x' ) bar . x = x_data y_bound = util . maybe_curry ( y_fn , x_data ) bar . y = util . maybe_call ( y_bound , interact_params , prefix = 'y' ) controls = widgets . interactive ( wrapped , ** interact_params ) return widgets . VBox ( [ controls , fig ] )
Generates an interactive bar chart that allows users to change the parameters of the inputs x_fn and y_fn .
48,545
def scatter_drag ( x_points : 'Array' , y_points : 'Array' , * , fig = None , show_eqn = True , options = { } ) : params = { 'marks' : [ { 'x' : x_points , 'y' : y_points , 'enable_move' : True , } , { 'colors' : [ GOLDENROD ] , } ] } fig = options . get ( '_fig' , False ) or _create_fig ( options = options ) [ scat , lin ] = _create_marks ( fig = fig , marks = [ bq . Scatter , bq . Lines ] , options = options , params = params ) _add_marks ( fig , [ scat , lin ] ) equation = widgets . Label ( ) def update_line ( change = None ) : x_sc = scat . scales [ 'x' ] lin . x = [ x_sc . min if x_sc . min is not None else np . min ( scat . x ) , x_sc . max if x_sc . max is not None else np . max ( scat . x ) , ] poly = np . polyfit ( scat . x , scat . y , deg = 1 ) lin . y = np . polyval ( poly , lin . x ) if show_eqn : equation . value = 'y = {:.2f}x + {:.2f}' . format ( poly [ 0 ] , poly [ 1 ] ) update_line ( ) scat . observe ( update_line , names = [ 'x' , 'y' ] ) return widgets . VBox ( [ equation , fig ] )
Generates an interactive scatter plot with the best fit line plotted over the points . The points can be dragged by the user and the line will automatically update .
48,546
def scatter ( x_fn , y_fn , * , options = { } , ** interact_params ) : params = { 'marks' : [ { 'x' : _array_or_placeholder ( x_fn ) , 'y' : _array_or_placeholder ( y_fn ) , 'marker' : _get_option ( 'marker' ) , } ] } fig = options . get ( '_fig' , False ) or _create_fig ( options = options ) [ scat ] = _create_marks ( fig = fig , marks = [ bq . Scatter ] , options = options , params = params ) _add_marks ( fig , [ scat ] ) def wrapped ( ** interact_params ) : x_data = util . maybe_call ( x_fn , interact_params , prefix = 'x' ) scat . x = x_data y_bound = util . maybe_curry ( y_fn , x_data ) scat . y = util . maybe_call ( y_bound , interact_params , prefix = 'y' ) controls = widgets . interactive ( wrapped , ** interact_params ) return widgets . VBox ( [ controls , fig ] )
Generates an interactive scatter chart that allows users to change the parameters of the inputs x_fn and y_fn .
48,547
def line ( x_fn , y_fn , * , options = { } , ** interact_params ) : fig = options . get ( '_fig' , False ) or _create_fig ( options = options ) [ line ] = ( _create_marks ( fig = fig , marks = [ bq . Lines ] , options = options ) ) _add_marks ( fig , [ line ] ) def wrapped ( ** interact_params ) : x_data = util . maybe_call ( x_fn , interact_params , prefix = 'x' ) line . x = x_data y_bound = util . maybe_curry ( y_fn , x_data ) line . y = util . maybe_call ( y_bound , interact_params , prefix = 'y' ) controls = widgets . interactive ( wrapped , ** interact_params ) return widgets . VBox ( [ controls , fig ] )
Generates an interactive line chart that allows users to change the parameters of the inputs x_fn and y_fn .
48,548
def _merge_with_defaults ( params ) : marks_params = [ tz . merge ( default , param ) for default , param in zip ( itertools . repeat ( _default_params [ 'marks' ] ) , params [ 'marks' ] ) ] if 'marks' in params else [ _default_params [ 'marks' ] ] merged_without_marks = tz . merge_with ( tz . merge , tz . dissoc ( _default_params , 'marks' ) , tz . dissoc ( params , 'marks' ) ) return tz . merge ( merged_without_marks , { 'marks' : marks_params } )
Performs a 2 - level deep merge of params with _default_params with corrent merging of params for each mark .
48,549
def _create_fig ( * , x_sc = bq . LinearScale , y_sc = bq . LinearScale , x_ax = bq . Axis , y_ax = bq . Axis , fig = bq . Figure , options = { } , params = { } ) : params = _merge_with_defaults ( params ) x_sc = x_sc ( ** _call_params ( params [ 'x_sc' ] , options ) ) y_sc = y_sc ( ** _call_params ( params [ 'y_sc' ] , options ) ) options = tz . merge ( options , { 'x_sc' : x_sc , 'y_sc' : y_sc } ) x_ax = x_ax ( ** _call_params ( params [ 'x_ax' ] , options ) ) y_ax = y_ax ( ** _call_params ( params [ 'y_ax' ] , options ) ) options = tz . merge ( options , { 'x_ax' : x_ax , 'y_ax' : y_ax , 'marks' : [ ] } ) fig = fig ( ** _call_params ( params [ 'fig' ] , options ) ) return fig
Initializes scales and axes for a bqplot figure and returns the resulting blank figure . Each plot component is passed in as a class . The plot options should be passed into options .
48,550
def _create_marks ( fig , marks = [ bq . Mark ] , options = { } , params = { } ) : params = _merge_with_defaults ( params ) x_ax , y_ax = fig . axes x_sc , y_sc = x_ax . scale , y_ax . scale options = tz . merge ( options , { 'x_sc' : x_sc , 'y_sc' : y_sc } ) marks = [ mark_cls ( ** _call_params ( mark_params , options ) ) for mark_cls , mark_params in zip ( marks , params [ 'marks' ] ) ] return marks
Initializes and returns marks for a figure as a list . Each mark is passed in as a class . The plot options should be passed into options .
48,551
def _array_or_placeholder ( maybe_iterable , placeholder = PLACEHOLDER_ZEROS ) -> np . array : if isinstance ( maybe_iterable , collections . Iterable ) : return np . array ( [ i for i in maybe_iterable ] ) return placeholder
Return maybe_iterable s contents or a placeholder array .
48,552
def binder_spec_from_github_url ( github_url ) : tokens = re . split ( r'/|:' , github_url . replace ( '.git' , '' ) ) return '{}/{}/master' . format ( tokens [ - 2 ] , tokens [ - 1 ] )
Converts GitHub origin into a Binder spec .
48,553
def main ( ) : arguments = docopt ( __doc__ ) if arguments [ 'init' ] : return_code = init ( ) sys . exit ( return_code ) run_converter ( arguments )
Parses command line options and runs nbinteract .
48,554
def run_converter ( arguments ) : if os . path . isfile ( CONFIG_FILE ) : with open ( CONFIG_FILE , encoding = 'utf-8' ) as f : config = json . load ( f ) arguments [ '--spec' ] = arguments [ '--spec' ] or config [ 'spec' ] check_arguments ( arguments ) notebooks = flatmap ( expand_folder , arguments [ 'NOTEBOOKS' ] , recursive = arguments [ '--recursive' ] ) exporter = init_exporter ( extract_images = arguments [ '--images' ] , spec = arguments [ '--spec' ] , template_file = arguments [ '--template' ] , button_at_top = ( not arguments [ '--no-top-button' ] ) , execute = arguments [ '--execute' ] , ) log ( 'Converting notebooks to HTML...' ) output_files = [ ] for notebook in notebooks : output_file = convert ( notebook , exporter = exporter , output_folder = arguments [ '--output' ] , images_folder = arguments [ '--images' ] ) output_files . append ( output_file ) log ( 'Converted {} to {}' . format ( notebook , output_file ) ) log ( 'Done!' ) if arguments [ '--images' ] : log ( 'Resulting images located in {}' . format ( arguments [ '--images' ] ) ) return output_files
Converts notebooks to HTML files . Returns list of output file paths
48,555
def expand_folder ( notebook_or_folder , recursive = False ) : is_file = os . path . isfile ( notebook_or_folder ) is_dir = os . path . isdir ( notebook_or_folder ) if not ( is_file or is_dir ) : raise ValueError ( '{} is neither an existing file nor a folder.' . format ( notebook_or_folder ) ) if is_file : return [ notebook_or_folder ] if not recursive : return glob ( '{}/*.ipynb' . format ( notebook_or_folder ) ) return [ os . path . join ( folder , filename ) for folder , _ , filenames in os . walk ( notebook_or_folder ) if not os . path . basename ( folder ) . startswith ( '.' ) for filename in fnmatch . filter ( filenames , '*.ipynb' ) ]
If notebook_or_folder is a folder returns a list containing all notebooks in the folder . Otherwise returns a list containing the notebook name .
48,556
def init_exporter ( extract_images , execute , ** exporter_config ) : config = Config ( InteractExporter = exporter_config ) preprocessors = [ ] if extract_images : preprocessors . append ( 'nbconvert.preprocessors.ExtractOutputPreprocessor' ) if execute : preprocessors . append ( 'nbinteract.preprocessors.NbiExecutePreprocessor' ) config . InteractExporter . preprocessors = preprocessors exporter = InteractExporter ( config = config ) return exporter
Returns an initialized exporter .
48,557
def make_exporter_resources ( nb_name , out_folder , images_folder = None ) : resources = defaultdict ( str ) resources [ 'metadata' ] = defaultdict ( str ) resources [ 'metadata' ] [ 'name' ] = nb_name resources [ 'metadata' ] [ 'path' ] = out_folder resources [ 'unique_key' ] = nb_name resources [ 'output_files_dir' ] = images_folder return resources
Creates resources dict for the exporter
48,558
def convert ( notebook_path , exporter , output_folder = None , images_folder = None ) : if output_folder : os . makedirs ( output_folder , exist_ok = True ) if images_folder : os . makedirs ( images_folder , exist_ok = True ) path , filename = os . path . split ( notebook_path ) basename , _ = os . path . splitext ( filename ) outfile_name = basename + '.html' out_folder = path if not output_folder else output_folder outfile_path = os . path . join ( out_folder , outfile_name ) notebook = nbformat . read ( notebook_path , as_version = 4 ) html , resources = exporter . from_notebook_node ( notebook , resources = make_exporter_resources ( basename , out_folder , images_folder ) , ) with open ( outfile_path , 'w' , encoding = 'utf-8' ) as outfile : outfile . write ( html ) for image_path , image_data in resources . get ( 'outputs' , { } ) . items ( ) : with open ( image_path , 'wb' ) as outimage : outimage . write ( image_data ) return outfile_path
Converts notebook into an HTML file outputting notebooks into output_folder if set and images into images_folder if set .
48,559
def convert_notebooks_to_html_partial ( notebook_paths , url_map ) : for notebook_path in notebook_paths : path , filename = os . path . split ( notebook_path ) chapter = os . path . split ( path ) [ 1 ] if os . sep in path else '' basename , _ = os . path . splitext ( filename ) outfile_name = basename + '.html' unique_image_key = basename output_files_dir = '/' + NOTEBOOK_IMAGE_DIR outfile_path = os . path . join ( chapter , outfile_name ) if chapter : os . makedirs ( chapter , exist_ok = True ) extract_output_config = { 'unique_key' : unique_image_key , 'output_files_dir' : output_files_dir , } notebook = nbformat . read ( notebook_path , 4 ) notebook . cells . insert ( 0 , _preamble_cell ( path ) ) html , resources = html_exporter . from_notebook_node ( notebook , resources = extract_output_config , ) if outfile_path not in url_map : print ( '[Warning]: {} not found in _data/toc.yml. This page will ' 'not appear in the textbook table of contents.' . format ( outfile_path ) ) prev_page = url_map . get ( outfile_path , { } ) . get ( 'prev' , 'false' ) next_page = url_map . get ( outfile_path , { } ) . get ( 'next' , 'false' ) final_output = wrapper . format ( html = html , prev_page = prev_page , next_page = next_page , ) with open ( outfile_path , 'w' , encoding = 'utf-8' ) as outfile : outfile . write ( final_output ) for relative_path , image_data in resources [ 'outputs' ] . items ( ) : image_name = os . path . basename ( relative_path ) final_image_path = os . path . join ( NOTEBOOK_IMAGE_DIR , image_name ) with open ( final_image_path , 'wb' ) as outimage : outimage . write ( image_data ) print ( outfile_path + " written." )
Converts notebooks in notebook_paths to HTML partials
48,560
def find_element_class ( self , element_name ) : for element in self . elements : if element . element == element_name : return element return Element
Finds an element class for the given element name contained within the registry .
48,561
def _transform ( transformer_chain : Sequence [ Tuple [ DataTransformer , Type ] ] , data : S , context : PipelineContext = None ) -> T : for transformer , target_type in transformer_chain : data = transformer . transform ( target_type , data , context ) return data
Transform data to a new type .
48,562
def put ( self , item : T , context : PipelineContext = None ) -> None : LOGGER . info ( "Converting item \"{item}\" for sink \"{sink}\"" . format ( item = item , sink = self . _sink ) ) item = self . _transform ( data = item , context = context ) LOGGER . info ( "Puting item \"{item}\" into sink \"{sink}\"" . format ( item = item , sink = self . _sink ) ) self . _sink . put ( self . _store_type , item , context )
Puts an objects into the data sink . The objects may be transformed into a new type for insertion if necessary .
48,563
def get_many ( self , query : Mapping [ str , Any ] , context : PipelineContext = None , streaming : bool = False ) -> Iterable [ T ] : result = self . _source . get_many ( self . _source_type , deepcopy ( query ) , context ) LOGGER . info ( "Got results \"{result}\" from query \"{query}\" of source \"{source}\"" . format ( result = result , query = query , source = self . _source ) ) if not streaming : LOGGER . info ( "Non-streaming get_many request. Ensuring results \"{result}\" are a Iterable" . format ( result = result ) ) result = list ( result ) LOGGER . info ( "Sending results \"{result}\" to sinks before converting" . format ( result = result ) ) for sink in self . _before_transform : sink . put_many ( result , context ) LOGGER . info ( "Converting results \"{result}\" to request type" . format ( result = result ) ) result = [ self . _transform ( data = item , context = context ) for item in result ] LOGGER . info ( "Sending results \"{result}\" to sinks after converting" . format ( result = result ) ) for sink in self . _after_transform : sink . put_many ( result , context ) return result else : LOGGER . info ( "Streaming get_many request. Returning result generator for results \"{result}\"" . format ( result = result ) ) return self . _get_many_generator ( result )
Gets a query from the data source where the query contains multiple elements to be extracted .
48,564
def get ( self , type : Type [ T ] , query : Mapping [ str , Any ] ) -> T : LOGGER . info ( "Getting SourceHandlers for \"{type}\"" . format ( type = type . __name__ ) ) try : handlers = self . _get_types [ type ] except KeyError : try : LOGGER . info ( "Building new SourceHandlers for \"{type}\"" . format ( type = type . __name__ ) ) handlers = self . _get_handlers ( type ) except NoConversionError : handlers = None self . _get_types [ type ] = handlers if handlers is None : raise NoConversionError ( "No source can provide \"{type}\"" . format ( type = type . __name__ ) ) LOGGER . info ( "Creating new PipelineContext" ) context = self . _new_context ( ) LOGGER . info ( "Querying SourceHandlers for \"{type}\"" . format ( type = type . __name__ ) ) for handler in handlers : try : return handler . get ( query , context ) except NotFoundError : pass raise NotFoundError ( "No source returned a query result!" )
Gets a query from the data pipeline .
48,565
def get_many ( self , type : Type [ T ] , query : Mapping [ str , Any ] , streaming : bool = False ) -> Iterable [ T ] : LOGGER . info ( "Getting SourceHandlers for \"{type}\"" . format ( type = type . __name__ ) ) try : handlers = self . _get_types [ type ] except KeyError : try : LOGGER . info ( "Building new SourceHandlers for \"{type}\"" . format ( type = type . __name__ ) ) handlers = self . _get_handlers ( type ) except NoConversionError : handlers = None self . _get_types [ type ] = handlers if handlers is None : raise NoConversionError ( "No source can provide \"{type}\"" . format ( type = type . __name__ ) ) LOGGER . info ( "Creating new PipelineContext" ) context = self . _new_context ( ) LOGGER . info ( "Querying SourceHandlers for \"{type}\"" . format ( type = type . __name__ ) ) for handler in handlers : try : return handler . get_many ( query , context , streaming ) except NotFoundError : pass raise NotFoundError ( "No source returned a query result!" )
Gets a query from the data pipeline which contains a request for multiple objects .
48,566
def put ( self , type : Type [ T ] , item : T ) -> None : LOGGER . info ( "Getting SinkHandlers for \"{type}\"" . format ( type = type . __name__ ) ) try : handlers = self . _put_types [ type ] except KeyError : try : LOGGER . info ( "Building new SinkHandlers for \"{type}\"" . format ( type = type . __name__ ) ) handlers = self . _put_handlers ( type ) except NoConversionError : handlers = None self . _get_types [ type ] = handlers LOGGER . info ( "Creating new PipelineContext" ) context = self . _new_context ( ) LOGGER . info ( "Sending item \"{item}\" to SourceHandlers" . format ( item = item ) ) if handlers is not None : for handler in handlers : handler . put ( item , context )
Puts an objects into the data pipeline . The object may be transformed into a new type for insertion if necessary .
48,567
def print_error ( self , wrapper ) : level = 0 parent = wrapper . parent while parent : print_test_msg ( parent . name , level , TestStatus . FAIL , self . use_color ) level += 1 parent = parent . parent print_test_msg ( wrapper . name , level , TestStatus . FAIL , self . use_color ) print_test_args ( wrapper . execute_kwargs , level , TestStatus . FAIL , self . use_color ) if wrapper . error : for line in wrapper . error : print_test_msg ( line , level + 2 , TestStatus . FAIL , self . use_color ) print_expects ( wrapper , level , use_color = self . use_color )
A crude way of output the errors for now . This needs to be cleaned up into something better .
48,568
def refract ( structure ) -> Element : if isinstance ( structure , Element ) : return structure elif isinstance ( structure , str ) : return String ( content = structure ) elif isinstance ( structure , bool ) : return Boolean ( content = structure ) elif isinstance ( structure , ( int , float ) ) : return Number ( content = structure ) elif isinstance ( structure , ( list , tuple ) ) : return Array ( content = list ( map ( refract , structure ) ) ) elif isinstance ( structure , dict ) : return Object ( content = [ Member ( key = refract ( k ) , value = refract ( v ) ) for ( k , v ) in structure . items ( ) ] ) elif structure is None : return Null ( ) raise ValueError ( 'Unsupported Value Type' )
Refracts the given value .
48,569
def serialise ( self , element : Element , ** kwargs ) -> str : return json . dumps ( self . serialise_dict ( element ) , ** kwargs )
Serialises the given element into JSON .
48,570
def deserialise ( self , element_json : str ) -> Element : return self . deserialise_dict ( json . loads ( element_json ) )
Deserialises the given JSON into an element .
48,571
def serialise ( self , element : Element ) -> str : return json . dumps ( self . serialise_element ( element ) )
Serialises the given element into Compact JSON .
48,572
def deserialise ( self , content ) -> Element : content = json . loads ( content ) if not isinstance ( content , list ) : raise ValueError ( 'Given content was not compact JSON refract' ) return self . deserialise_element ( content )
Deserialises the given compact JSON into an element .
48,573
def combine_coverage_reports ( self , omit , parallel ) : tmp_cov = coverage . coverage ( omit = omit , data_suffix = parallel ) tmp_cov . load ( ) tmp_cov . combine ( ) tmp_cov . save ( )
Method to force the combination of parallel coverage reports .
48,574
def transforms ( self ) -> Mapping [ Type , Iterable [ Type ] ] : try : return getattr ( self . __class__ , "transform" ) . _transforms except AttributeError : return { }
The available data transformers .
48,575
def transform ( self , target_type : Type [ T ] , value : F , context : PipelineContext = None ) -> T : pass
Transforms an object to a new type .
48,576
def serialize ( self ) : expects = [ exp . serialize ( ) for exp in self . expects ] converted_dict = { 'id' : self . id , 'name' : self . pretty_name , 'raw_name' : self . name , 'doc' : self . doc , 'error' : self . error , 'skipped' : self . skipped , 'skip_reason' : self . skip_reason , 'execute_kwargs' : self . safe_execute_kwargs , 'metadata' : self . metadata , 'start' : self . start_time , 'end' : self . end_time , 'expects' : expects , 'success' : self . success } return remove_empty_entries_from_dict ( converted_dict )
Serializes the CaseWrapper object for collection .
48,577
def _run_hooks ( self ) : for hook in self . hooks : getattr ( self , hook ) ( self . _state )
Calls any registered hooks providing the current state .
48,578
def append ( self , element ) : from refract . refraction import refract self . content . append ( refract ( element ) )
Append an element onto the array .
48,579
def insert ( self , index : int , element ) : from refract . refraction import refract self . content . insert ( index , refract ( element ) )
Insert an element at a given position .
48,580
def index ( self , element : Element ) -> int : from refract . refraction import refract return self . content . index ( refract ( element ) )
Return the index in the array of the first item whose value is element . It is an error if there is no such item .
48,581
def provides ( self ) : types = set ( ) any_dispatch = False try : types . update ( getattr ( self . __class__ , "get" ) . _provides ) any_dispatch = True except AttributeError : pass try : types . update ( getattr ( self . __class__ , "get_many" ) . _provides ) any_dispatch = True except AttributeError : pass return types if any_dispatch else TYPE_WILDCARD
The types of objects the data store provides .
48,582
def get_many ( self , type : Type [ T ] , query : Mapping [ str , Any ] , context : PipelineContext = None ) -> Iterable [ T ] : pass
Gets a query from the data source which contains a request for multiple objects .
48,583
def subscribe_all_to_spec ( self , spec ) : for reporter in self . reporters : if self . can_use_reporter ( reporter , self . parallel ) : reporter . subscribe_to_spec ( spec )
Will automatically not subscribe reporters that are not parallel or serial depending on the current mode .
48,584
def output ( self , msg , indent , status = None ) : color = None if self . use_color : color = get_color_from_status ( status ) print_indent_msg ( msg , indent , color )
Alias for print_indent_msg with color determined by status .
48,585
def get_real_last_traceback ( exception ) : traceback_blocks = [ ] _n , _n , exc_traceback = sys . exc_info ( ) tb_list = get_all_tracebacks ( exc_traceback ) [ 1 : ] tb_list = [ tb for tb in tb_list if tb not in CAPTURED_TRACEBACKS ] CAPTURED_TRACEBACKS . extend ( tb_list ) for traceback in tb_list : lines , path , line_num = get_source_from_frame ( traceback . tb_frame ) traceback_lines = get_numbered_source ( lines , traceback . tb_lineno , line_num ) traceback_lines . insert ( 0 , ' - {0}' . format ( path ) ) traceback_lines . insert ( 1 , ' ------------------' ) traceback_lines . append ( ' ------------------' ) traceback_blocks . append ( traceback_lines ) traced_lines = [ 'Error Traceback:' ] traced_lines . extend ( itertools . chain . from_iterable ( traceback_blocks ) ) traced_lines . append ( ' - Error | {0}: {1}' . format ( type ( exception ) . __name__ , exception ) ) return traced_lines
An unfortunate evil ... All because Python s traceback cannot determine where my executed code is coming from ...
48,586
def assets ( self ) -> List [ Asset ] : return list ( filter ( is_element ( Asset ) , self . content ) )
Returns the assets in the transaction .
48,587
def decompile ( ast , indentation = 4 , line_length = 100 , starting_indentation = 0 ) : decompiler = Decompiler ( indentation = indentation , line_length = line_length , starting_indentation = starting_indentation , ) return decompiler . run ( ast )
Decompiles an AST into Python code .
48,588
def write_expression_list ( self , nodes , separator = ', ' , allow_newlines = True , need_parens = True , final_separator_if_multiline = True ) : first = True last_line = len ( self . lines ) current_line = list ( self . current_line ) for node in nodes : if first : first = False else : self . write ( separator ) self . visit ( node ) if allow_newlines and ( self . current_line_length ( ) > self . max_line_length or last_line != len ( self . lines ) ) : break else : return del self . lines [ last_line : ] self . current_line = current_line separator = separator . rstrip ( ) if need_parens : self . write ( '(' ) self . write_newline ( ) with self . add_indentation ( ) : num_nodes = len ( nodes ) for i , node in enumerate ( nodes ) : self . write_indentation ( ) self . visit ( node ) if final_separator_if_multiline or i < num_nodes - 1 : self . write ( separator ) self . write_newline ( ) self . write_indentation ( ) if need_parens : self . write ( ')' )
Writes a list of nodes separated by separator .
48,589
def children ( self ) : if isinstance ( self . content , list ) : return self . content elif isinstance ( self . content , Element ) : return [ self . content ] else : return [ ]
Returns all of the children elements .
48,590
def recursive_children ( self ) : for child in self . children : yield child for recursive_child in child . recursive_children : yield recursive_child
Generator returning all recursive children elements .
48,591
def skip ( reason ) : def decorator ( test_func ) : if not isinstance ( test_func , ( type , ClassObjType ) ) : func_data = None if test_func . __name__ == 'DECORATOR_ONCALL' : func_data = test_func ( ) @ functools . wraps ( test_func ) def skip_wrapper ( * args , ** kwargs ) : other_data = { 'real_func' : func_data [ 0 ] if func_data else test_func , 'metadata' : func_data [ 1 ] if func_data else None } raise TestSkippedException ( test_func , reason , other_data ) test_func = skip_wrapper return test_func return decorator
The skip decorator allows for you to always bypass a test .
48,592
def skip_if ( condition , reason = None ) : if condition : return skip ( reason ) def wrapper ( func ) : return func return wrapper
The skip_if decorator allows for you to bypass a test on conditions
48,593
def incomplete ( test_func ) : if not isinstance ( test_func , ( type , ClassObjType ) ) : @ functools . wraps ( test_func ) def skip_wrapper ( * args , ** kwargs ) : raise TestIncompleteException ( test_func , _ ( 'Test is incomplete' ) ) return skip_wrapper
The incomplete decorator behaves much like a normal skip ; however tests that are marked as incomplete get tracked under a different metric . This allows for you to create a skeleton around all of your features and specifications and track what tests have been written and what tests are left outstanding .
48,594
def serialize ( self ) : converted_dict = { 'success' : self . success , 'assertion' : str ( self ) , 'required' : self . required } return converted_dict
Serializes the ExpectAssert object for collection .
48,595
def accepts ( self ) : types = set ( ) any_dispatch = False try : types . update ( getattr ( self . __class__ , "put" ) . _accepts ) any_dispatch = True except AttributeError : pass try : types . update ( getattr ( self . __class__ , "put_many" ) . _accepts ) any_dispatch = True except AttributeError : pass return types if any_dispatch else TYPE_WILDCARD
The types of objects the data sink can store .
48,596
def put_many ( self , type : Type [ T ] , items : Iterable [ T ] , context : PipelineContext = None ) -> None : pass
Puts multiple objects of the same type into the data sink .
48,597
def init_defaults ( self ) : if "WARC-Record-ID" not in self : self [ 'WARC-Record-ID' ] = "<urn:uuid:%s>" % uuid . uuid1 ( ) if "WARC-Date" not in self : self [ 'WARC-Date' ] = datetime . datetime . utcnow ( ) . strftime ( '%Y-%m-%dT%H:%M:%SZ' ) if "Content-Type" not in self : self [ 'Content-Type' ] = WARCHeader . CONTENT_TYPES . get ( self . type , "application/octet-stream" )
Initializes important headers to default values if not already specified . The WARC - Record - ID header is set to a newly generated UUID . The WARC - Date header is set to the current datetime . The Content - Type is set based on the WARC - Type header . The Content - Length is initialized to 0 .
48,598
def write_to ( self , f ) : f . write ( self . version + "\r\n" ) for name , value in self . items ( ) : name = name . title ( ) name = name . replace ( "Warc-" , "WARC-" ) . replace ( "-Ip-" , "-IP-" ) . replace ( "-Id" , "-ID" ) . replace ( "-Uri" , "-URI" ) f . write ( name ) f . write ( ": " ) f . write ( value ) f . write ( "\r\n" ) f . write ( "\r\n" )
Writes this header to a file in the format specified by WARC .
48,599
def from_response ( response ) : http_response = response . raw . _original_response status_line = "HTTP/1.1 %d %s" % ( http_response . status , http_response . reason ) headers = str ( http_response . msg ) body = http_response . read ( ) response . raw . _fp = StringIO ( body ) payload = status_line + "\r\n" + headers + "\r\n" + body headers = { "WARC-Type" : "response" , "WARC-Target-URI" : response . request . full_url . encode ( 'utf-8' ) } return WARCRecord ( payload = payload , headers = headers )
Creates a WARCRecord from given response object .