idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
56,800
def list ( self , ** kwargs ) : projects = Project . select ( ) . order_by ( Project . name ) if len ( projects ) == 0 : self . _print ( 'No projects available' , 'yellow' ) return for project in projects : project_repr = self . _PROJECT_ITEM . format ( project . name , project . path ) row = '- {}' . format ( self . _PROJECT_ITEM . format ( project . name , project . path ) ) six . print_ ( row )
displays all projects on database
56,801
def parent_tags ( self ) : tags = set ( ) for addr in self . _addresses : if addr . attr == 'text' : tags . add ( addr . element . tag ) tags . update ( el . tag for el in addr . element . iterancestors ( ) ) tags . discard ( HTMLFragment . _root_tag ) return frozenset ( tags )
Provides tags of all parent HTML elements .
56,802
def involved_tags ( self ) : if len ( self . _addresses ) < 2 : return frozenset ( ) parent_sets = [ ] common_parents = set ( ) for addr in self . _addresses : parents = set ( ) if addr . attr == 'text' : parents . add ( addr . element ) parents . update ( addr . element . iterancestors ( ) ) parent_sets . append ( ( addr , parents ) ) if not common_parents : common_parents = parents else : common_parents &= parents involved_tags = set ( ) prev_addr = None for addr , parents in parent_sets : parents = parents - common_parents involved_tags . update ( p . tag for p in parents ) is_tail_of_hidden = ( prev_addr and addr . attr == 'tail' and prev_addr . element != addr . element ) if is_tail_of_hidden : involved_tags . add ( addr . element ) prev_addr = addr return frozenset ( involved_tags )
Provides all HTML tags directly involved in this string .
56,803
def _parse ( self , html ) : if self . _has_body_re . search ( html ) : tree = lxml . html . document_fromstring ( html ) . find ( './/body' ) self . has_body = True else : tree = lxml . html . fragment_fromstring ( html , create_parent = self . _root_tag ) if tree . tag != self . _root_tag : root = lxml . html . HtmlElement ( ) root . tag = self . _root_tag root . append ( tree ) return root return tree
Parse given string as HTML and return it s etree representation .
56,804
def _iter_texts ( self , tree ) : skip = ( not isinstance ( tree , lxml . html . HtmlElement ) or tree . tag in self . skipped_tags ) if not skip : if tree . text : yield Text ( tree . text , tree , 'text' ) for child in tree : for text in self . _iter_texts ( child ) : yield text if tree . tail : yield Text ( tree . tail , tree , 'tail' )
Iterates over texts in given HTML tree .
56,805
def _analyze_tree ( self , tree ) : addresses = [ ] for text in self . _iter_texts ( tree ) : for i , char in enumerate ( text . content ) : if char in whitespace : char = ' ' addresses . append ( CharAddress ( char , text . element , text . attr , i ) ) while addresses and addresses [ 0 ] . char == ' ' : del addresses [ 0 ] while addresses and addresses [ - 1 ] . char == ' ' : del addresses [ - 1 ] return addresses
Analyze given tree and create mapping of indexes to character addresses .
56,806
def _validate_index ( self , index ) : if isinstance ( index , slice ) : if index . step and index . step != 1 : raise IndexError ( 'Step is not allowed.' ) indexes = ( index . start , index . stop ) else : indexes = ( index , ) for index in indexes : if index is not None and index < 0 : raise IndexError ( 'Negative indexes are not allowed.' )
Validates given index eventually raises errors .
56,807
def _find_pivot_addr ( self , index ) : if not self . addresses or index . start == 0 : return CharAddress ( '' , self . tree , 'text' , - 1 ) if index . start > len ( self . addresses ) : return self . addresses [ - 1 ] return self . addresses [ index . start ]
Inserting by slicing can lead into situation where no addresses are selected . In that case a pivot address has to be chosen so we know where to add characters .
56,808
def check_api_key ( email , api_key ) : table = boto3 . resource ( "dynamodb" ) . Table ( os . environ [ 'people' ] ) user = table . get_item ( Key = { 'email' : email } ) if not user : return False user = user . get ( "Item" ) if api_key != user . get ( 'api_key' , None ) : return False return user
Check the API key of the user .
56,809
def replace ( html , replacements = None ) : if not replacements : return html html = HTMLFragment ( html ) for r in replacements : r . replace ( html ) return unicode ( html )
Performs replacements on given HTML string .
56,810
def _is_replacement_allowed ( self , s ) : if any ( tag in s . parent_tags for tag in self . skipped_tags ) : return False if any ( tag not in self . textflow_tags for tag in s . involved_tags ) : return False return True
Tests whether replacement is allowed on given piece of HTML text .
56,811
def replace ( self , html ) : self . html = html text = html . text ( ) positions = [ ] def perform_replacement ( match ) : offset = sum ( positions ) start , stop = match . start ( ) + offset , match . end ( ) + offset s = self . html [ start : stop ] if self . _is_replacement_allowed ( s ) : repl = match . expand ( self . replacement ) self . html [ start : stop ] = repl else : repl = match . group ( ) positions . append ( match . end ( ) ) return repl while True : if positions : text = text [ positions [ - 1 ] : ] text , n = self . pattern . subn ( perform_replacement , text , count = 1 ) if not n : break
Perform replacements on given HTML fragment .
56,812
def read_relative_file ( filename , relative_to = None ) : if relative_to is None : relative_to = os . path . dirname ( __file__ ) with open ( os . path . join ( os . path . dirname ( relative_to ) , filename ) ) as f : return f . read ( )
Returns contents of the given file which path is supposed relative to this package .
56,813
def get_events ( self ) : to_send = { 'limit' : 50 } response = self . _send_data ( 'POST' , 'admin' , 'get-events' , to_send ) output = { 'message' : "" } for event in response [ 'events' ] : desc = "Source IP: {ip}\n" desc += "Datetime: {time}\n" desc += "Indicator: {match}\n" desc += "Method: {method}\n" desc += "URL: {url}\n" desc += "Request Type: {type}\n" desc += "User-Agent: {userAgent}\n" desc += "Contact: {contact}\n" desc += "\n" output [ 'message' ] += desc . format ( ** event ) return output
Get events from the cloud node .
56,814
def flush_events ( self ) : response = self . _send_data ( 'DELETE' , 'admin' , 'flush-events' , { } ) if response [ 'success' ] : msg = "Events flushed" else : msg = "Flushing of events failed" output = { 'message' : msg } return output
Flush events from the cloud node .
56,815
def put ( self ) : try : self . cloudwatch . put_metric_data ( Namespace = self . namespace , MetricData = [ { 'MetricName' : self . name , 'Value' : self . value , 'Timestamp' : self . timestamp } ] ) except Exception : logging . exception ( "Error pushing {0} to CloudWatch." . format ( str ( self ) ) )
Push the info represented by this Metric to CloudWatch .
56,816
def log ( self , message , level = logging . INFO , * args , ** kwargs ) : msg = "{}.{}: {}[{}]: {}" . format ( self . __class__ . __name__ , self . status , self . __class__ . path , self . uuid , message ) extra = kwargs . pop ( "extra" , dict ( ) ) extra . update ( dict ( kmsg = Message ( self . uuid , entrypoint = self . __class__ . path , params = self . params , metadata = self . metadata ) . dump ( ) ) ) return logger . log ( level = level , msg = msg , extra = extra , * args , ** kwargs )
Send log entry
56,817
def _connect ( self ) : logger . info ( "Connecting to rabbit" ) for url in self . _urls : try : self . _connection = pika . BlockingConnection ( pika . URLParameters ( url ) ) self . _channel = self . _connection . channel ( ) self . _declare ( ) if self . _confirm_delivery : self . _channel . confirm_delivery ( ) logger . info ( "Enabled delivery confirmation" ) logger . debug ( "Connected to rabbit" ) return True except pika . exceptions . AMQPConnectionError : logger . exception ( "Unable to connect to rabbit" ) continue except Exception : logger . exception ( "Unexpected exception connecting to rabbit" ) continue raise pika . exceptions . AMQPConnectionError
Connect to a RabbitMQ instance
56,818
def _disconnect ( self ) : try : self . _connection . close ( ) logger . debug ( "Disconnected from rabbit" ) except Exception : logger . exception ( "Unable to close connection" )
Cleanly close a RabbitMQ connection .
56,819
def publish_message ( self , message , content_type = None , headers = None , mandatory = False , immediate = False ) : logger . debug ( "Publishing message" ) try : self . _connect ( ) return self . _do_publish ( mandatory = mandatory , immediate = immediate , content_type = content_type , headers = headers , message = message ) except pika . exceptions . AMQPConnectionError : logger . error ( "AMQPConnectionError occurred. Message not published." ) raise PublishMessageError except NackError : logger . error ( "NackError occurred. Message not published." ) raise PublishMessageError except UnroutableError : logger . error ( "UnroutableError occurred. Message not published." ) raise PublishMessageError except Exception : logger . exception ( "Unknown exception occurred. Message not published." ) raise PublishMessageError
Publish a response message to a RabbitMQ instance .
56,820
def visit ( folder , provenance_id , step_name , previous_step_id = None , config = None , db_url = None , is_organised = True ) : config = config if config else [ ] logging . info ( "Visiting %s" , folder ) logging . info ( "-> is_organised=%s" , str ( is_organised ) ) logging . info ( "-> config=%s" , str ( config ) ) logging . info ( "Connecting to database..." ) db_conn = connection . Connection ( db_url ) step_id = _create_step ( db_conn , step_name , provenance_id , previous_step_id ) previous_files_hash = _get_files_hash_from_step ( db_conn , previous_step_id ) checked = dict ( ) def process_file ( file_path ) : logging . debug ( "Processing '%s'" % file_path ) file_type = _find_type ( file_path ) if "DICOM" == file_type : is_copy = _hash_file ( file_path ) in previous_files_hash leaf_folder = os . path . split ( file_path ) [ 0 ] if leaf_folder not in checked or 'boost' not in config : ret = dicom_import . dicom2db ( file_path , file_type , is_copy , step_id , db_conn , 'session_id_by_patient' in config , 'visit_id_in_patient_id' in config , 'visit_id_in_patient_id' in config , 'repetition_from_path' in config ) try : checked [ leaf_folder ] = ret [ 'repetition_id' ] except KeyError : logging . warning ( "Cannot find repetition ID !" ) else : dicom_import . extract_dicom ( file_path , file_type , is_copy , checked [ leaf_folder ] , step_id ) elif "NIFTI" == file_type and is_organised : is_copy = _hash_file ( file_path ) in previous_files_hash nifti_import . nifti2db ( file_path , file_type , is_copy , step_id , db_conn , 'session_id_by_patient' in config , 'visit_id_in_patient_id' in config ) elif file_type : is_copy = _hash_file ( file_path ) in previous_files_hash others_import . others2db ( file_path , file_type , is_copy , step_id , db_conn ) if sys . version_info . major == 3 and sys . version_info . minor < 5 : matches = [ ] for root , dirnames , filenames in os . walk ( folder ) : for filename in fnmatch . filter ( filenames , '*' ) : matches . append ( os . path . join ( root , filename ) ) for file_path in matches : process_file ( file_path ) else : for file_path in glob . iglob ( os . path . join ( folder , "**/*" ) , recursive = True ) : process_file ( file_path ) logging . info ( "Closing database connection..." ) db_conn . close ( ) return step_id
Record all files from a folder into the database .
56,821
def check_sockets ( self ) : try : msg_frames = ( self . command_socket . recv_multipart ( zmq . NOBLOCK ) ) except zmq . Again : pass else : self . on_command_recv ( msg_frames ) try : msg_frames = ( self . subscribe_socket . recv_multipart ( zmq . NOBLOCK ) ) source , target , msg_type , msg_json = msg_frames if ( ( source == 'microdrop.device_info_plugin' ) and ( msg_type == 'execute_reply' ) ) : msg = json . loads ( msg_json ) if msg [ 'content' ] [ 'command' ] == 'get_device' : data = decode_content_data ( msg ) if data is not None : self . parent . on_device_loaded ( data ) elif ( ( source == 'microdrop.electrode_controller_plugin' ) and ( msg_type == 'execute_reply' ) ) : msg = json . loads ( msg_json ) if msg [ 'content' ] [ 'command' ] in ( 'set_electrode_state' , 'set_electrode_states' ) : data = decode_content_data ( msg ) if data is None : print msg else : self . parent . on_electrode_states_updated ( data ) elif msg [ 'content' ] [ 'command' ] == 'get_channel_states' : data = decode_content_data ( msg ) if data is None : print msg else : self . parent . on_electrode_states_set ( data ) elif ( ( source == 'droplet_planning_plugin' ) and ( msg_type == 'execute_reply' ) ) : msg = json . loads ( msg_json ) if msg [ 'content' ] [ 'command' ] in ( 'add_route' , ) : self . execute_async ( 'droplet_planning_plugin' , 'get_routes' ) elif msg [ 'content' ] [ 'command' ] in ( 'get_routes' , ) : data = decode_content_data ( msg ) self . parent . canvas_slave . df_routes = data elif ( ( source == 'microdrop.command_plugin' ) and ( msg_type == 'execute_reply' ) ) : msg = json . loads ( msg_json ) if msg [ 'content' ] [ 'command' ] in ( 'get_commands' , 'unregister_command' , 'register_command' ) : df_commands = decode_content_data ( msg ) . set_index ( 'namespace' ) for group_i , df_i in df_commands . groupby ( 'namespace' ) : register = getattr ( self . parent . canvas_slave , 'register_%s_command' % group_i , None ) if register is None : continue else : for j , command_ij in df_i . iterrows ( ) : register ( command_ij . command_name , title = command_ij . title , group = command_ij . plugin_name ) _L ( ) . debug ( 'registered %s command: `%s`' , group_i , command_ij ) else : self . most_recent = msg_json except zmq . Again : pass except : logger . error ( 'Error processing message from subscription ' 'socket.' , exc_info = True ) return True
Check for new messages on sockets and respond accordingly .
56,822
def follow_cf ( save , Uspan , target_cf , nup , n_tot = 5.0 , slsp = None ) : if slsp == None : slsp = Spinon ( slaves = 6 , orbitals = 3 , avg_particles = n_tot , hopping = [ 0.5 ] * 6 , populations = np . asarray ( [ n_tot ] * 6 ) / 6 ) zet , lam , mu , mean_f = [ ] , [ ] , [ ] , [ ] for co in Uspan : print ( 'U=' , co , 'del=' , target_cf ) res = root ( targetpop , nup [ - 1 ] , ( co , target_cf , slsp , n_tot ) ) print ( res . x ) if res . x > nup [ - 1 ] : break nup . append ( res . x ) slsp . param [ 'populations' ] = population_distri ( nup [ - 1 ] ) mean_f . append ( slsp . mean_field ( ) ) zet . append ( slsp . quasiparticle_weight ( ) ) lam . append ( slsp . param [ 'lambda' ] ) mu . append ( orbital_energies ( slsp . param , zet [ - 1 ] ) ) case = save . createGroup ( 'cf={}' . format ( target_cf ) ) varis = st . setgroup ( case ) st . storegroup ( varis , Uspan [ : len ( zet ) ] , zet , lam , mu , nup [ 1 : ] , target_cf , mean_f )
Calculates the quasiparticle weight in single site spin hamiltonian under with N degenerate half - filled orbitals
56,823
def targetpop ( upper_density , coul , target_cf , slsp , n_tot ) : if upper_density < 0.503 : return 0. trypops = population_distri ( upper_density , n_tot ) slsp . set_filling ( trypops ) slsp . selfconsistency ( coul , 0 ) efm_free = dos_bethe_find_crystalfield ( trypops , slsp . param [ 'hopping' ] ) orb_ener = slsp . param [ 'lambda' ] + slsp . quasiparticle_weight ( ) * efm_free obtained_cf = orb_ener [ 5 ] - orb_ener [ 0 ] return target_cf - obtained_cf
restriction on finding the right populations that leave the crystal field same
56,824
def load ( self , filename = None ) : DataFile . load ( self , filename ) self . spectrum . filename = filename
Method was overriden to set spectrum . filename as well
56,825
def _do_save_as ( self , filename ) : if len ( self . spectrum . x ) < 2 : raise RuntimeError ( "Spectrum must have at least two points" ) if os . path . isfile ( filename ) : os . unlink ( filename ) hdu = self . spectrum . to_hdu ( ) overwrite_fits ( hdu , filename )
Saves spectrum back to FITS file .
56,826
def matches ( ) : wvw_matches = get_cached ( "wvw/matches.json" , False ) . get ( "wvw_matches" ) for match in wvw_matches : match [ "start_time" ] = parse_datetime ( match [ "start_time" ] ) match [ "end_time" ] = parse_datetime ( match [ "end_time" ] ) return wvw_matches
This resource returns a list of the currently running WvW matches with the participating worlds included in the result . Further details about a match can be requested using the match_details function .
56,827
def objective_names ( lang = "en" ) : params = { "lang" : lang } cache_name = "objective_names.%(lang)s.json" % params data = get_cached ( "wvw/objective_names.json" , cache_name , params = params ) return dict ( [ ( objective [ "id" ] , objective [ "name" ] ) for objective in data ] )
This resource returns a list of the localized WvW objective names for the specified language .
56,828
def _parse_data ( self , data , charset ) : builder = TreeBuilder ( numbermode = self . _numbermode ) if isinstance ( data , basestring ) : xml . sax . parseString ( data , builder ) else : xml . sax . parse ( data , builder ) return builder . root [ self . _root_element_name ( ) ]
Parse the xml data into dictionary .
56,829
def _format_data ( self , data , charset ) : if data is None or data == '' : return u'' stream = StringIO . StringIO ( ) xml = SimplerXMLGenerator ( stream , charset ) xml . startDocument ( ) xml . startElement ( self . _root_element_name ( ) , { } ) self . _to_xml ( xml , data ) xml . endElement ( self . _root_element_name ( ) ) xml . endDocument ( ) return stream . getvalue ( )
Format data into XML .
56,830
def _to_xml ( self , xml , data , key = None ) : if isinstance ( data , ( list , tuple ) ) : for item in data : elemname = self . _list_item_element_name ( key ) xml . startElement ( elemname , { } ) self . _to_xml ( xml , item ) xml . endElement ( elemname ) elif isinstance ( data , dict ) : for key , value in data . iteritems ( ) : xml . startElement ( key , { } ) self . _to_xml ( xml , value , key ) xml . endElement ( key ) else : xml . characters ( smart_unicode ( data ) )
Recursively convert the data into xml .
56,831
def startElement ( self , name , attrs ) : self . stack . append ( ( self . current , self . chardata ) ) self . current = { } self . chardata = [ ]
Initialize new node and store current node into stack .
56,832
def endElement ( self , name ) : if self . current : obj = self . current else : text = '' . join ( self . chardata ) . strip ( ) obj = self . _parse_node_data ( text ) newcurrent , self . chardata = self . stack . pop ( ) self . current = self . _element_to_node ( newcurrent , name , obj )
End current xml element parse and add to to parent node .
56,833
def _parse_node_data ( self , data ) : data = data or '' if self . numbermode == 'basic' : return self . _try_parse_basic_number ( data ) elif self . numbermode == 'decimal' : return self . _try_parse_decimal ( data ) else : return data
Parse the value of a node . Override to provide your own parsing .
56,834
def _try_parse_basic_number ( self , data ) : try : return int ( data ) except ValueError : pass try : return float ( data ) except ValueError : pass return data
Try to convert the data into int or float .
56,835
def apize_raw ( url , method = 'GET' ) : def decorator ( func ) : def wrapper ( * args , ** kwargs ) : elem = func ( * args , ** kwargs ) if type ( elem ) is not dict : raise BadReturnVarType ( func . __name__ ) response = send_request ( url , method , elem . get ( 'data' , { } ) , elem . get ( 'args' , { } ) , elem . get ( 'params' , { } ) , elem . get ( 'headers' , { } ) , elem . get ( 'cookies' , { } ) , elem . get ( 'timeout' , 8 ) , elem . get ( 'is_json' , False ) , elem . get ( 'verify_cert' , True ) ) return response return wrapper return decorator
Convert data and params dict - > json .
56,836
def extract_version ( path ) : _version_re = re . compile ( r'__version__\s+=\s+(.*)' ) with open ( path + '__init__.py' , 'r' , encoding = 'utf-8' ) as f : version = f . read ( ) if version : version = _version_re . search ( version ) if version : version = version . group ( 1 ) version = str ( ast . literal_eval ( version . rstrip ( ) ) ) extracted = version else : extracted = None else : extracted = None return extracted
Reads the file at the specified path and returns the version contained in it .
56,837
def _make_connect ( module , args , kwargs ) : return functools . partial ( module . connect , * args , ** kwargs )
Returns a function capable of making connections with a particular driver given the supplied credentials .
56,838
def create_pool ( module , max_conns , * args , ** kwargs ) : if not hasattr ( module , 'threadsafety' ) : raise NotSupported ( "Cannot determine driver threadsafety." ) if max_conns < 1 : raise ValueError ( "Minimum number of connections is 1." ) if module . threadsafety >= 2 : return Pool ( module , max_conns , * args , ** kwargs ) if module . threadsafety >= 1 : return DummyPool ( module , * args , ** kwargs ) raise ValueError ( "Bad threadsafety level: %d" % module . threadsafety )
Create a connection pool appropriate to the driver module s capabilities .
56,839
def transactional ( wrapped ) : def wrapper ( * args , ** kwargs ) : with Context . current ( ) . transaction ( ) : return wrapped ( * args , ** kwargs ) return functools . update_wrapper ( wrapper , wrapped )
A decorator to denote that the content of the decorated function or method is to be ran in a transaction .
56,840
def execute ( stmt , args = ( ) ) : ctx = Context . current ( ) with ctx . mdr : cursor = ctx . execute ( stmt , args ) row_count = cursor . rowcount _safe_close ( cursor ) return row_count
Execute an SQL statement . Returns the number of affected rows .
56,841
def query ( stmt , args = ( ) , factory = None ) : ctx = Context . current ( ) factory = ctx . default_factory if factory is None else factory with ctx . mdr : return factory ( ctx . execute ( stmt , args ) , ctx . mdr )
Execute a query . This returns an iterator of the result set .
56,842
def query_row ( stmt , args = ( ) , factory = None ) : for row in query ( stmt , args , factory ) : return row return None
Execute a query . Returns the first row of the result set or None .
56,843
def query_value ( stmt , args = ( ) , default = None ) : for row in query ( stmt , args , TupleFactory ) : return row [ 0 ] return default
Execute a query returning the first value in the first row of the result set . If the query returns no result set a default value is returned which is None by default .
56,844
def execute_proc ( procname , args = ( ) ) : ctx = Context . current ( ) with ctx . mdr : cursor = ctx . execute_proc ( procname , args ) row_count = cursor . rowcount _safe_close ( cursor ) return row_count
Execute a stored procedure . Returns the number of affected rows .
56,845
def query_proc ( procname , args = ( ) , factory = None ) : ctx = Context . current ( ) factory = ctx . default_factory if factory is None else factory with ctx . mdr : return factory ( ctx . execute_proc ( procname , args ) , ctx . mdr )
Execute a stored procedure . This returns an iterator of the result set .
56,846
def query_proc_row ( procname , args = ( ) , factory = None ) : for row in query_proc ( procname , args , factory ) : return row return None
Execute a stored procedure . Returns the first row of the result set or None .
56,847
def query_proc_value ( procname , args = ( ) , default = None ) : for row in query_proc ( procname , args , TupleFactory ) : return row [ 0 ] return default
Execute a stored procedure returning the first value in the first row of the result set . If it returns no result set a default value is returned which is None by default .
56,848
def make_placeholders ( seq , start = 1 ) : if len ( seq ) == 0 : raise ValueError ( 'Sequence must have at least one element.' ) param_style = Context . current ( ) . param_style placeholders = None if isinstance ( seq , dict ) : if param_style in ( 'named' , 'pyformat' ) : template = ':%s' if param_style == 'named' else '%%(%s)s' placeholders = ( template % key for key in six . iterkeys ( seq ) ) elif isinstance ( seq , ( list , tuple ) ) : if param_style == 'numeric' : placeholders = ( ':%d' % i for i in xrange ( start , start + len ( seq ) ) ) elif param_style in ( 'qmark' , 'format' , 'pyformat' ) : placeholders = itertools . repeat ( '?' if param_style == 'qmark' else '%s' , len ( seq ) ) if placeholders is None : raise NotSupported ( "Param style '%s' does not support sequence type '%s'" % ( param_style , seq . __class__ . __name__ ) ) return ', ' . join ( placeholders )
Generate placeholders for the given sequence .
56,849
def make_file_object_logger ( fh ) : def logger_func ( stmt , args , fh = fh ) : now = datetime . datetime . now ( ) six . print_ ( "Executing (%s):" % now . isoformat ( ) , file = fh ) six . print_ ( textwrap . dedent ( stmt ) , file = fh ) six . print_ ( "Arguments:" , file = fh ) pprint . pprint ( args , fh ) return logger_func
Make a logger that logs to the given file object .
56,850
def current ( cls , with_exception = True ) : if with_exception and len ( cls . stack ) == 0 : raise NoContext ( ) return cls . stack . top ( )
Returns the current database context .
56,851
def transaction ( self ) : with self . mdr : try : self . _depth += 1 yield self self . _depth -= 1 except self . mdr . OperationalError : self . _depth -= 1 raise except : self . _depth -= 1 if self . _depth == 0 : self . mdr . rollback ( ) raise if self . _depth == 0 : self . mdr . commit ( )
Sets up a context where all the statements within it are ran within a single database transaction . For internal use only .
56,852
def cursor ( self ) : cursor = self . mdr . cursor ( ) with self . transaction ( ) : try : yield cursor if cursor . rowcount != - 1 : self . last_row_count = cursor . rowcount self . last_row_id = getattr ( cursor , 'lastrowid' , None ) except : self . last_row_count = None self . last_row_id = None _safe_close ( cursor ) raise
Get a cursor for the current connection . For internal use only .
56,853
def execute ( self , stmt , args ) : self . logger ( stmt , args ) with self . cursor ( ) as cursor : cursor . execute ( stmt , args ) return cursor
Execute a statement returning a cursor . For internal use only .
56,854
def execute_proc ( self , procname , args ) : self . logger ( procname , args ) with self . cursor ( ) as cursor : cursor . callproc ( procname , args ) return cursor
Execute a stored procedure returning a cursor . For internal use only .
56,855
def close ( self ) : self . logger = None for exc in _EXCEPTIONS : setattr ( self , exc , None ) try : self . mdr . close ( ) finally : self . mdr = None
Close the connection this context wraps .
56,856
def connect ( self ) : ctx = Context ( self . module , self . create_mediator ( ) ) ctx . logger = self . logger ctx . default_factory = self . default_factory return ctx
Returns a context that uses this pool as a connection source .
56,857
def close ( self ) : if self . mdr is None : return exc = ( None , None , None ) try : self . cursor . close ( ) except : exc = sys . exc_info ( ) try : if self . mdr . __exit__ ( * exc ) : exc = ( None , None , None ) except : exc = sys . exc_info ( ) self . mdr = None self . cursor = None if exc != ( None , None , None ) : six . reraise ( * exc )
Release all resources associated with this factory .
56,858
def add_item ( cls , item , ** kwargs ) : kwargs [ '_return_http_data_only' ] = True if kwargs . get ( 'async' ) : return cls . _add_item_with_http_info ( item , ** kwargs ) else : ( data ) = cls . _add_item_with_http_info ( item , ** kwargs ) return data
Add item .
56,859
def checkout ( cls , order , ** kwargs ) : kwargs [ '_return_http_data_only' ] = True if kwargs . get ( 'async' ) : return cls . _checkout_with_http_info ( order , ** kwargs ) else : ( data ) = cls . _checkout_with_http_info ( order , ** kwargs ) return data
Checkout cart .
56,860
def delete_item ( cls , item_id , ** kwargs ) : kwargs [ '_return_http_data_only' ] = True if kwargs . get ( 'async' ) : return cls . _delete_item_with_http_info ( item_id , ** kwargs ) else : ( data ) = cls . _delete_item_with_http_info ( item_id , ** kwargs ) return data
Remove item .
56,861
def empty ( cls , ** kwargs ) : kwargs [ '_return_http_data_only' ] = True if kwargs . get ( 'async' ) : return cls . _empty_with_http_info ( ** kwargs ) else : ( data ) = cls . _empty_with_http_info ( ** kwargs ) return data
Empty cart .
56,862
def get ( cls , ** kwargs ) : kwargs [ '_return_http_data_only' ] = True if kwargs . get ( 'async' ) : return cls . _get_with_http_info ( ** kwargs ) else : ( data ) = cls . _get_with_http_info ( ** kwargs ) return data
Get cart .
56,863
def update_item ( cls , item_id , item , ** kwargs ) : kwargs [ '_return_http_data_only' ] = True if kwargs . get ( 'async' ) : return cls . _update_item_with_http_info ( item_id , item , ** kwargs ) else : ( data ) = cls . _update_item_with_http_info ( item_id , item , ** kwargs ) return data
Update cart .
56,864
def get_perm_names ( cls , resource ) : return [ cls . get_perm_name ( resource , method ) for method in cls . METHODS ]
Return all permissions supported by the resource .
56,865
def get_perm_name ( cls , resource , method ) : return '%s_%s_%s' % ( cls . PREFIX , cls . _get_resource_name ( resource ) , method . lower ( ) )
Compose permission name
56,866
def _has_perm ( self , user , permission ) : if user . is_superuser : return True if user . is_active : perms = [ perm . split ( '.' ) [ 1 ] for perm in user . get_all_permissions ( ) ] return permission in perms return False
Check whether the user has the given permission
56,867
def is_local_url ( target ) : ref_url = urlparse ( cfg . get ( 'CFG_SITE_SECURE_URL' ) ) test_url = urlparse ( urljoin ( cfg . get ( 'CFG_SITE_SECURE_URL' ) , target ) ) return test_url . scheme in ( 'http' , 'https' ) and ref_url . netloc == test_url . netloc
Determine if URL is a local .
56,868
def rewrite_to_secure_url ( url , secure_base = None ) : if secure_base is None : secure_base = cfg . get ( 'CFG_SITE_SECURE_URL' ) url_parts = list ( urlparse ( url ) ) url_secure_parts = urlparse ( secure_base ) url_parts [ 0 ] = url_secure_parts [ 0 ] url_parts [ 1 ] = url_secure_parts [ 1 ] return urlunparse ( url_parts )
Rewrite URL to a Secure URL
56,869
def create_html_link ( urlbase , urlargd , link_label , linkattrd = None , escape_urlargd = True , escape_linkattrd = True , urlhash = None ) : attributes_separator = ' ' output = '<a href="' + create_url ( urlbase , urlargd , escape_urlargd , urlhash ) + '"' if linkattrd : output += ' ' if escape_linkattrd : attributes = [ escape ( str ( key ) , quote = True ) + '="' + escape ( str ( linkattrd [ key ] ) , quote = True ) + '"' for key in linkattrd . keys ( ) ] else : attributes = [ str ( key ) + '="' + str ( linkattrd [ key ] ) + '"' for key in linkattrd . keys ( ) ] output += attributes_separator . join ( attributes ) output = wash_for_utf8 ( output ) output += '>' + wash_for_utf8 ( link_label ) + '</a>' return output
Creates a W3C compliant link .
56,870
def get_canonical_and_alternates_urls ( url , drop_ln = True , washed_argd = None , quote_path = False ) : dummy_scheme , dummy_netloc , path , dummy_params , query , fragment = urlparse ( url ) canonical_scheme , canonical_netloc = urlparse ( cfg . get ( 'CFG_SITE_URL' ) ) [ 0 : 2 ] parsed_query = washed_argd or parse_qsl ( query ) no_ln_parsed_query = [ ( key , value ) for ( key , value ) in parsed_query if key != 'ln' ] if drop_ln : canonical_parsed_query = no_ln_parsed_query else : canonical_parsed_query = parsed_query if quote_path : path = urllib . quote ( path ) canonical_query = urlencode ( canonical_parsed_query ) canonical_url = urlunparse ( ( canonical_scheme , canonical_netloc , path , dummy_params , canonical_query , fragment ) ) alternate_urls = { } for ln in cfg . get ( 'CFG_SITE_LANGS' ) : alternate_query = urlencode ( no_ln_parsed_query + [ ( 'ln' , ln ) ] ) alternate_url = urlunparse ( ( canonical_scheme , canonical_netloc , path , dummy_params , alternate_query , fragment ) ) alternate_urls [ ln ] = alternate_url return canonical_url , alternate_urls
Given an Invenio URL returns a tuple with two elements . The first is the canonical URL that is the original URL with CFG_SITE_URL prefix and where the ln = argument stripped . The second element element is mapping language code - > alternate URL
56,871
def same_urls_p ( a , b ) : ua = list ( urlparse ( a ) ) ub = list ( urlparse ( b ) ) ua [ 4 ] = parse_qs ( ua [ 4 ] ) ub [ 4 ] = parse_qs ( ub [ 4 ] ) return ua == ub
Compare two URLs ignoring reorganizing of query arguments
56,872
def make_user_agent_string ( component = None ) : ret = "Invenio-%s (+%s; \"%s\")" % ( cfg . get ( 'CFG_VERSION' ) , cfg . get ( 'CFG_SITE_URL' ) , cfg . get ( 'CFG_SITE_NAME' ) ) if component : ret += " %s" % component return ret
Return a nice and uniform user - agent string to be used when Invenio act as a client in HTTP requests .
56,873
def make_invenio_opener ( component = None ) : opener = urllib2 . build_opener ( ) opener . addheaders = [ ( 'User-agent' , make_user_agent_string ( component ) ) ] return opener
Return an urllib2 opener with the useragent already set in the appropriate way .
56,874
def create_Indico_request_url ( base_url , indico_what , indico_loc , indico_id , indico_type , indico_params , indico_key , indico_sig , _timestamp = None ) : url = '/export/' + indico_what + '/' if indico_loc : url += indico_loc + '/' if type ( indico_id ) in ( list , tuple ) : indico_id = '-' . join ( [ str ( x ) for x in indico_id ] ) url += indico_id + '.' + str ( indico_type ) if hasattr ( indico_params , 'items' ) : items = indico_params . items ( ) else : items = list ( indico_params ) if indico_key : items . append ( ( 'apikey' , indico_key ) ) if indico_sig and HASHLIB_IMPORTED : if _timestamp : items . append ( ( 'timestamp' , str ( _timestamp ) ) ) else : items . append ( ( 'timestamp' , str ( int ( time . time ( ) ) ) ) ) items = sorted ( items , key = lambda x : x [ 0 ] . lower ( ) ) url_to_sign = '%s?%s' % ( url , urlencode ( items ) ) if sys . version_info < ( 2 , 5 ) : my_digest_algo = _MySHA1 ( sha1 ( ) ) else : my_digest_algo = sha1 signature = hmac . new ( indico_sig , url_to_sign , my_digest_algo ) . hexdigest ( ) items . append ( ( 'signature' , signature ) ) elif not HASHLIB_IMPORTED : current_app . logger . warning ( "Module hashlib not installed. Please install it." ) if not items : return url url = '%s%s?%s' % ( base_url . strip ( '/' ) , url , urlencode ( items ) ) return url
Create a signed Indico request URL to access Indico HTTP Export APIs .
56,875
def auto_version_url ( file_path ) : file_md5 = "" try : file_md5 = md5 ( open ( cfg . get ( 'CFG_WEBDIR' ) + os . sep + file_path ) . read ( ) ) . hexdigest ( ) except IOError : pass return file_path + "?%s" % file_md5
Appends modification time of the file to the request URL in order for the browser to refresh the cache when file changes
56,876
def function_arg_count ( fn ) : assert callable ( fn ) , 'function_arg_count needed a callable function, not {0}' . format ( repr ( fn ) ) if hasattr ( fn , '__code__' ) and hasattr ( fn . __code__ , 'co_argcount' ) : return fn . __code__ . co_argcount else : return 1
returns how many arguments a funciton has
56,877
def merge ( left , right , how = 'inner' , key = None , left_key = None , right_key = None , left_as = 'left' , right_as = 'right' ) : return join ( left , right , how , key , left_key , right_key , join_fn = make_union_join ( left_as , right_as ) )
Performs a join using the union join function .
56,878
def _inner_join ( left , right , left_key_fn , right_key_fn , join_fn = union_join ) : joiner = defaultdict ( list ) for ele in right : joiner [ right_key_fn ( ele ) ] . append ( ele ) joined = [ ] for ele in left : for other in joiner [ left_key_fn ( ele ) ] : joined . append ( join_fn ( ele , other ) ) return joined
Inner join using left and right key functions
56,879
def group ( iterable , key = lambda ele : ele ) : if callable ( key ) : return _group ( iterable , key ) else : return _group ( iterable , make_key_fn ( key ) )
Groups an iterable by a specified attribute or using a specified key access function . Returns tuples of grouped elements .
56,880
def trigger_keyphrases ( text = None , keyphrases = None , response = None , function = None , kwargs = None , confirm = False , confirmation_prompt = "Do you want to continue? (y/n)" , confirmation_feedback_confirm = "confirm" , confirmation_feedback_deny = "deny" ) : if any ( pattern in text for pattern in keyphrases ) : if confirm : return confirmation ( prompt = confirmation_prompt , feedback_confirm = confirmation_feedback_confirm , feedback_deny = confirmation_feedback_deny , function = function , kwargs = kwargs ) if function and not kwargs : result = function ( ) elif function and kwargs : result = function ( ** kwargs ) else : result = None if response : return response elif not response and result : return str ( result ) else : return True else : return False
Parse input text for keyphrases . If any keyphrases are found respond with text or by seeking confirmation or by engaging a function with optional keyword arguments . Return text or True if triggered and return False if not triggered . If confirmation is required a confirmation object is returned encapsulating a function and its optional arguments .
56,881
def parse ( text = None , humour = 75 ) : triggers = [ ] if humour >= 75 : triggers . extend ( [ trigger_keyphrases ( text = text , keyphrases = [ "image" ] , response = "http://i.imgur.com/MiqrlTh.jpg" ) , trigger_keyphrases ( text = text , keyphrases = [ "sup" , "hi" ] , response = "sup home bean" ) , trigger_keyphrases ( text = text , keyphrases = [ "thanks" , "thank you" ] , response = "you're welcome, boo ;)" ) ] ) triggers . extend ( [ trigger_keyphrases ( text = text , keyphrases = [ "where are you" , "IP" , "I.P." , "IP address" , "I.P. address" , "ip address" ] , function = report_IP ) , trigger_keyphrases ( text = text , keyphrases = [ "how are you" , "are you well" , "status" ] , function = report_system_status , kwargs = { "humour" : humour } ) , trigger_keyphrases ( text = text , keyphrases = [ "heartbeat" ] , function = heartbeat_message ) , trigger_keyphrases ( text = text , keyphrases = [ "METAR" ] , function = report_METAR , kwargs = { "text" : text } ) , trigger_keyphrases ( text = text , keyphrases = [ "TAF" ] , response = report_TAF , kwargs = { "text" : text } ) , trigger_keyphrases ( text = text , keyphrases = [ "rain" ] , response = report_rain_times , kwargs = { "text" : text } ) ] ) triggers . extend ( [ trigger_keyphrases ( text = text , keyphrases = [ "command" , "run command" , "engage command" , "execute command" ] , response = command ( ) ) , trigger_keyphrases ( text = text , keyphrases = [ "restart" ] , function = restart , confirm = True , confirmation_prompt = "Do you want to restart this " "program? (y/n)" , confirmation_feedback_confirm = "confirm restart" , confirmation_feedback_deny = "deny restart" ) ] ) if any ( triggers ) : responses = [ response for response in triggers if response ] if len ( responses ) > 1 : return responses else : return responses [ 0 ] else : return False
Parse input text using various triggers some returning text and some for engaging functions . If triggered a trigger returns text or True if and if not triggered returns False . If no triggers are triggered return False if one trigger is triggered return the value returned by that trigger and if multiple triggers are triggered return a list of the values returned by those triggers .
56,882
def multiparse ( text = None , parsers = [ parse ] , help_message = None ) : responses = [ ] for _parser in parsers : response = _parser ( text = text ) if response is not False : responses . extend ( response if response is list else [ response ] ) if not any ( responses ) : if help_message : return help_message else : return False else : if len ( responses ) > 1 : return responses else : return responses [ 0 ]
Parse input text by looping over a list of multiple parsers . If one trigger is triggered return the value returned by that trigger if multiple triggers are triggered return a list of the values returned by those triggers . If no triggers are triggered return False or an optional help message .
56,883
def run ( self ) : if self . _function and not self . _kwargs : return self . _function ( ) if self . _function and self . _kwargs : return self . _function ( ** self . _kwargs )
Engage contained function with optional keyword arguments .
56,884
def tax_class_based_on ( self , tax_class_based_on ) : allowed_values = [ "shippingAddress" , "billingAddress" ] if tax_class_based_on is not None and tax_class_based_on not in allowed_values : raise ValueError ( "Invalid value for `tax_class_based_on` ({0}), must be one of {1}" . format ( tax_class_based_on , allowed_values ) ) self . _tax_class_based_on = tax_class_based_on
Sets the tax_class_based_on of this TaxSettings .
56,885
def recurse_up ( directory , filename ) : directory = osp . abspath ( directory ) while True : searchfile = osp . join ( directory , filename ) if osp . isfile ( searchfile ) : return directory if directory == '/' : break else : directory = osp . dirname ( directory ) return False
Recursive walk a directory up to root until it contains filename
56,886
def etree_to_dict ( tree ) : d = { tree . tag . split ( '}' ) [ 1 ] : map ( etree_to_dict , tree . iterchildren ( ) ) or tree . text } return d
Translate etree into dictionary .
56,887
def finalize_download ( url , download_to_file , content_type , request ) : if content_type and content_type not in request . headers [ 'content-type' ] : msg = 'The downloaded file is not of the desired format' raise InvenioFileDownloadError ( msg ) to_file = open ( download_to_file , 'w' ) try : try : while True : block = request . read ( CFG_FILEUTILS_BLOCK_SIZE ) if not block : break to_file . write ( block ) except Exception as e : msg = "Error when downloading %s into %s: %s" % ( url , download_to_file , e ) raise InvenioFileDownloadError ( msg ) finally : to_file . close ( ) filesize = os . path . getsize ( download_to_file ) if filesize == 0 : raise InvenioFileDownloadError ( "%s seems to be empty" % ( url , ) ) return download_to_file
Finalizes the download operation by doing various checks such as format type size check etc .
56,888
def download_local_file ( filename , download_to_file ) : try : path = urllib2 . urlparse . urlsplit ( urllib . unquote ( filename ) ) [ 2 ] if os . path . abspath ( path ) != path : msg = "%s is not a normalized path (would be %s)." % ( path , os . path . normpath ( path ) ) raise InvenioFileCopyError ( msg ) allowed_path_list = current_app . config . get ( 'CFG_BIBUPLOAD_FFT_ALLOWED_LOCAL_PATHS' , [ ] ) allowed_path_list . append ( current_app . config [ 'CFG_TMPSHAREDDIR' ] ) for allowed_path in allowed_path_list : if path . startswith ( allowed_path ) : shutil . copy ( path , download_to_file ) if os . path . getsize ( download_to_file ) == 0 : os . remove ( download_to_file ) msg = "%s seems to be empty" % ( filename , ) raise InvenioFileCopyError ( msg ) break else : msg = "%s is not in one of the allowed paths." % ( path , ) raise InvenioFileCopyError ( ) except Exception as e : msg = "Impossible to copy the local file '%s' to %s: %s" % ( filename , download_to_file , str ( e ) ) raise InvenioFileCopyError ( msg ) return download_to_file
Copies a local file to Invenio s temporary directory .
56,889
def safe_mkstemp ( suffix , prefix = 'filedownloadutils_' ) : tmpfd , tmppath = tempfile . mkstemp ( suffix = suffix , prefix = prefix , dir = current_app . config [ 'CFG_TMPSHAREDDIR' ] ) os . close ( tmpfd ) if '.' not in suffix : return tmppath while '.' in os . path . basename ( tmppath ) [ : - len ( suffix ) ] : os . remove ( tmppath ) tmpfd , tmppath = tempfile . mkstemp ( suffix = suffix , prefix = prefix , dir = current_app . config [ 'CFG_TMPSHAREDDIR' ] ) os . close ( tmpfd ) return tmppath
Create a temporary filename that don t have any . inside a part from the suffix .
56,890
def open_url ( url , headers = None ) : request = urllib2 . Request ( url ) if headers : for key , value in headers . items ( ) : request . add_header ( key , value ) return URL_OPENER . open ( request )
Opens a URL . If headers are passed as argument no check is performed and the URL will be opened .
56,891
async def get_json ( self , url , timeout = 30 , astext = False , exceptions = False ) : try : with async_timeout . timeout ( timeout ) : res = await self . _aio_session . get ( url ) if res . status != 200 : _LOGGER . error ( "QSUSB returned %s [%s]" , res . status , url ) return None res_text = await res . text ( ) except ( aiohttp . client_exceptions . ClientError , asyncio . TimeoutError ) as exc : if exceptions : raise exc return None if astext : return res_text try : return json . loads ( res_text ) except json . decoder . JSONDecodeError : if res_text . strip ( " " ) == "" : return None _LOGGER . error ( "Could not decode %s [%s]" , res_text , url )
Get URL and parse JSON from text .
56,892
def stop ( self ) : self . _running = False if self . _sleep_task : self . _sleep_task . cancel ( ) self . _sleep_task = None
Stop listening .
56,893
async def _async_listen ( self , callback = None ) : while True : if not self . _running : return try : packet = await self . get_json ( URL_LISTEN . format ( self . _url ) , timeout = 30 , exceptions = True ) except asyncio . TimeoutError : continue except aiohttp . client_exceptions . ClientError as exc : _LOGGER . warning ( "ClientError: %s" , exc ) self . _sleep_task = self . loop . create_task ( asyncio . sleep ( 30 ) ) try : await self . _sleep_task except asyncio . CancelledError : pass self . _sleep_task = None continue if isinstance ( packet , dict ) and QS_CMD in packet : _LOGGER . debug ( "callback( %s )" , packet ) try : callback ( packet ) except Exception as err : _LOGGER . error ( "Exception in callback\nType: %s: %s" , type ( err ) , err ) else : _LOGGER . debug ( "unknown packet? %s" , packet )
Listen loop .
56,894
def attowiki_distro_path ( ) : attowiki_path = os . path . abspath ( __file__ ) if attowiki_path [ - 1 ] != '/' : attowiki_path = attowiki_path [ : attowiki_path . rfind ( '/' ) ] else : attowiki_path = attowiki_path [ : attowiki_path [ : - 1 ] . rfind ( '/' ) ] return attowiki_path
return the absolute complete path where attowiki is located
56,895
def build_command ( self ) : return cron_utils . cronify ( "crontab -l | {{ cat; echo \"{} {} {} {} {} CJOBID='{}' MAILTO='' {}\"; }} | crontab - > /dev/null" . format ( self . _minute , self . _hour , self . _day_of_month , self . _month_of_year , self . _day_of_week , self . _jobid , self . _command ) )
Build out the crontab command
56,896
def infer_format ( filename : str ) -> str : _ , ext = os . path . splitext ( filename ) return ext
Return extension identifying format of given filename
56,897
def reversed_graph ( graph : dict ) -> dict : ret = defaultdict ( set ) for node , succs in graph . items ( ) : for succ in succs : ret [ succ ] . add ( node ) return dict ( ret )
Return given graph reversed
56,898
def have_cycle ( graph : dict ) -> frozenset : walked = set ( ) nodes = frozenset ( it . chain ( it . chain . from_iterable ( graph . values ( ) ) , graph . keys ( ) ) ) preds = reversed_graph ( graph ) last_walked_len = - 1 while last_walked_len != len ( walked ) : last_walked_len = len ( walked ) for node in nodes - walked : if len ( preds . get ( node , set ( ) ) - walked ) == 0 : walked . add ( node ) return frozenset ( nodes - walked )
Perform a topologic sort to detect any cycle .
56,899
def file_lines ( bblfile : str ) -> iter : with open ( bblfile ) as fd : yield from ( line . rstrip ( ) for line in fd if line . rstrip ( ) )
Yield lines found in given file