idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
7,000
def getsockopt ( self , level , optname , * args , ** kwargs ) : return self . _sock . getsockopt ( level , optname , * args , ** kwargs )
get the value of a given socket option
7,001
def makefile ( self , mode = 'r' , bufsize = - 1 ) : f = SocketFile ( self . _sock , mode ) f . _sock . settimeout ( self . gettimeout ( ) ) return f
create a file - like object that wraps the socket
7,002
def recvfrom ( self , bufsize , flags = 0 ) : with self . _registered ( 're' ) : while 1 : if self . _closed : raise socket . error ( errno . EBADF , "Bad file descriptor" ) try : return self . _sock . recvfrom ( bufsize , flags ) except socket . error , exc : if not self . _blocking or exc [ 0 ] not in _BLOCKING_OP : raise sys . exc_clear ( ) if self . _readable . wait ( self . gettimeout ( ) ) : raise socket . timeout ( "timed out" ) if scheduler . state . interrupted : raise IOError ( errno . EINTR , "interrupted system call" )
receive data on a socket that isn t necessarily a 1 - 1 connection
7,003
def send ( self , data , flags = 0 ) : with self . _registered ( 'we' ) : while 1 : try : return self . _sock . send ( data ) except socket . error , exc : if exc [ 0 ] not in _CANT_SEND or not self . _blocking : raise if self . _writable . wait ( self . gettimeout ( ) ) : raise socket . timeout ( "timed out" ) if scheduler . state . interrupted : raise IOError ( errno . EINTR , "interrupted system call" )
send data over the socket connection
7,004
def sendall ( self , data , flags = 0 ) : sent = self . send ( data , flags ) while sent < len ( data ) : sent += self . send ( data [ sent : ] , flags )
send data over the connection and keep sending until it all goes
7,005
def setsockopt ( self , level , optname , value ) : return self . _sock . setsockopt ( level , optname , value )
set the value of a given socket option
7,006
def settimeout ( self , timeout ) : if timeout is not None : timeout = float ( timeout ) self . _timeout = timeout
set the timeout for this specific socket
7,007
def get_version ( ) : if not INSTALLED : try : with open ( 'version.txt' , 'r' ) as v_fh : return v_fh . read ( ) except Exception : warnings . warn ( 'Unable to resolve package version until installed' , UserWarning ) return '0.0.0' return p_version . get_version ( HERE )
find current version information
7,008
def add_span ( self , term_span ) : new_span = Cspan ( ) new_span . create_from_ids ( term_span ) self . node . append ( new_span . get_node ( ) )
Adds a list of term ids a new span in the references
7,009
def remove_span ( self , span ) : this_node = span . get_node ( ) self . node . remove ( this_node )
Removes a specific span from the coref object
7,010
def to_kaf ( self ) : if self . type == 'NAF' : for node_coref in self . __get_corefs_nodes ( ) : node_coref . set ( 'coid' , node_coref . get ( 'id' ) ) del node_coref . attrib [ 'id' ]
Converts the coreference layer to KAF
7,011
def to_naf ( self ) : if self . type == 'KAF' : for node_coref in self . __get_corefs_nodes ( ) : node_coref . set ( 'id' , node_coref . get ( 'coid' ) ) del node_coref . attrib [ 'coid' ]
Converts the coreference layer to NAF
7,012
def remove_coreference ( self , coid ) : for this_node in self . node . findall ( 'coref' ) : if this_node . get ( 'id' ) == coid : self . node . remove ( this_node ) break
Removes the coreference with specific identifier
7,013
def vt2esofspy ( vesseltree , outputfilename = "tracer.txt" , axisorder = [ 0 , 1 , 2 ] ) : if ( type ( vesseltree ) == str ) and os . path . isfile ( vesseltree ) : import io3d vt = io3d . misc . obj_from_file ( vesseltree ) else : vt = vesseltree print ( vt [ 'general' ] ) print ( vt . keys ( ) ) vtgm = vt [ 'graph' ] [ 'microstructure' ] lines = [ ] vs = vt [ 'general' ] [ 'voxel_size_mm' ] sh = vt [ 'general' ] [ 'shape_px' ] ax = axisorder lines . append ( "#Tracer+\n" ) lines . append ( "#voxelsize mm %f %f %f\n" % ( vs [ ax [ 0 ] ] , vs [ ax [ 1 ] ] , vs [ ax [ 2 ] ] ) ) lines . append ( "#shape %i %i %i\n" % ( sh [ ax [ 0 ] ] , sh [ ax [ 1 ] ] , sh [ ax [ 2 ] ] ) ) lines . append ( str ( len ( vtgm ) * 2 ) + "\n" ) i = 1 for id in vtgm : try : nda = vtgm [ id ] [ 'nodeA_ZYX' ] ndb = vtgm [ id ] [ 'nodeB_ZYX' ] lines . append ( "%i\t%i\t%i\t%i\n" % ( nda [ ax [ 0 ] ] , nda [ ax [ 1 ] ] , nda [ ax [ 2 ] ] , i ) ) lines . append ( "%i\t%i\t%i\t%i\n" % ( ndb [ ax [ 0 ] ] , ndb [ ax [ 1 ] ] , ndb [ ax [ 2 ] ] , i ) ) i += 1 except : pass lines . append ( "%i\t%i\t%i\t%i" % ( 0 , 0 , 0 , 0 ) ) lines [ 3 ] = str ( i - 1 ) + "\n" with open ( outputfilename , 'wt' ) as f : f . writelines ( lines )
exports vesseltree to esofspy format
7,014
def constq_grun ( v , v0 , gamma0 , q ) : x = v / v0 return gamma0 * np . power ( x , q )
calculate Gruneisen parameter for constant q
7,015
def constq_debyetemp ( v , v0 , gamma0 , q , theta0 ) : gamma = constq_grun ( v , v0 , gamma0 , q ) if isuncertainties ( [ v , v0 , gamma0 , q , theta0 ] ) : theta = theta0 * unp . exp ( ( gamma0 - gamma ) / q ) else : theta = theta0 * np . exp ( ( gamma0 - gamma ) / q ) return theta
calculate Debye temperature for constant q
7,016
def constq_pth ( v , temp , v0 , gamma0 , q , theta0 , n , z , t_ref = 300. , three_r = 3. * constants . R ) : v_mol = vol_uc2mol ( v , z ) gamma = constq_grun ( v , v0 , gamma0 , q ) theta = constq_debyetemp ( v , v0 , gamma0 , q , theta0 ) xx = theta / temp debye = debye_E ( xx ) if t_ref == 0. : debye0 = 0. else : xx0 = theta / t_ref debye0 = debye_E ( xx0 ) Eth0 = three_r * n * t_ref * debye0 Eth = three_r * n * temp * debye delEth = Eth - Eth0 p_th = ( gamma / v_mol * delEth ) * 1.e-9 return p_th
calculate thermal pressure for constant q
7,017
def getValidReff ( self , urn , inventory = None , level = None ) : return self . call ( { "inv" : inventory , "urn" : urn , "level" : level , "request" : "GetValidReff" } )
Retrieve valid urn - references for a text
7,018
def getPassage ( self , urn , inventory = None , context = None ) : return self . call ( { "inv" : inventory , "urn" : urn , "context" : context , "request" : "GetPassage" } )
Retrieve a passage
7,019
def getPassagePlus ( self , urn , inventory = None , context = None ) : return self . call ( { "inv" : inventory , "urn" : urn , "context" : context , "request" : "GetPassagePlus" } )
Retrieve a passage and information about it
7,020
def parse_intervals ( diff_report ) : for patch in diff_report . patch_set : try : old_pf = diff_report . old_file ( patch . source_file ) new_pf = diff_report . new_file ( patch . target_file ) except InvalidPythonFile : continue for hunk in patch : for line in hunk : if line . line_type == LINE_TYPE_ADDED : idx = line . target_line_no yield ContextInterval ( new_pf . filename , new_pf . context ( idx ) ) elif line . line_type == LINE_TYPE_REMOVED : idx = line . source_line_no yield ContextInterval ( old_pf . filename , old_pf . context ( idx ) ) elif line . line_type in ( LINE_TYPE_EMPTY , LINE_TYPE_CONTEXT ) : pass else : raise AssertionError ( "Unexpected line type: %s" % line )
Parse a diff into an iterator of Intervals .
7,021
def string_literal ( content ) : if '"' in content and "'" in content : raise ValueError ( "Cannot represent this string in XPath" ) if '"' in content : content = "'%s'" % content else : content = '"%s"' % content return content
Choose a string literal that can wrap our string .
7,022
def element_id_by_label ( browser , label ) : label = ElementSelector ( browser , str ( '//label[contains(., %s)]' % string_literal ( label ) ) ) if not label : return False return label . get_attribute ( 'for' )
The ID of an element referenced by a label s for attribute . The label must be visible .
7,023
def find_button ( browser , value ) : field_types = ( 'submit' , 'reset' , 'button-element' , 'button' , 'image' , 'button-role' , ) return reduce ( operator . add , ( find_field_with_value ( browser , field_type , value ) for field_type in field_types ) )
Find a button with the given value .
7,024
def find_field ( browser , field_type , value ) : return find_field_by_id ( browser , field_type , value ) + find_field_by_name ( browser , field_type , value ) + find_field_by_label ( browser , field_type , value )
Locate an input field .
7,025
def find_field_by_id ( browser , field_type , id ) : return ElementSelector ( browser , xpath = field_xpath ( field_type , 'id' ) % string_literal ( id ) , filter_displayed = True , )
Locate the control input with the given id .
7,026
def find_field_by_name ( browser , field_type , name ) : return ElementSelector ( browser , field_xpath ( field_type , 'name' ) % string_literal ( name ) , filter_displayed = True , )
Locate the control input with the given name .
7,027
def find_field_by_value ( browser , field_type , name ) : xpath = field_xpath ( field_type , 'value' ) % string_literal ( name ) elems = ElementSelector ( browser , xpath = str ( xpath ) , filter_displayed = True , filter_enabled = True , ) if field_type in ( 'button-element' , 'button-role' ) : elems = sorted ( elems , key = lambda elem : len ( elem . text ) ) else : elems = sorted ( elems , key = lambda elem : len ( elem . get_attribute ( 'value' ) ) ) if elems : elems = [ elems [ 0 ] ] return ElementSelector ( browser , elements = elems )
Locate the control input with the given value . Useful for buttons .
7,028
def find_field_by_label ( browser , field_type , label ) : return ElementSelector ( browser , xpath = field_xpath ( field_type , 'id' ) % '//label[contains(., {0})]/@for' . format ( string_literal ( label ) ) , filter_displayed = True , )
Locate the control input that has a label pointing to it .
7,029
def wait_for ( func ) : @ wraps ( func ) def wrapped ( * args , ** kwargs ) : timeout = kwargs . pop ( 'timeout' , TIMEOUT ) start = None while True : try : return func ( * args , ** kwargs ) except AssertionError : if not start : start = time ( ) if time ( ) - start < timeout : sleep ( CHECK_EVERY ) continue else : raise return wrapped
A decorator to invoke a function retrying on assertion errors for a specified time interval .
7,030
def filter ( self , displayed = False , enabled = False ) : if self . evaluated : result = self if displayed : result = ElementSelector ( result . browser , elements = [ e for e in result if e . is_displayed ( ) ] ) if enabled : result = ElementSelector ( result . browser , elements = [ e for e in result if e . is_enabled ( ) ] ) else : result = copy ( self ) if displayed : result . displayed = True if enabled : result . enabled = True return result
Filter elements by visibility and enabled status .
7,031
def _select ( self ) : for element in self . browser . find_elements_by_xpath ( self . xpath ) : if self . filter_displayed : if not element . is_displayed ( ) : continue if self . filter_enabled : if not element . is_enabled ( ) : continue yield element
Fetch the elements from the browser .
7,032
def authenticate ( self ) -> bool : with IHCController . _mutex : if not self . client . authenticate ( self . _username , self . _password ) : return False if self . _ihcevents : self . client . enable_runtime_notifications ( self . _ihcevents . keys ( ) ) return True
Authenticate and enable the registered notifications
7,033
def get_runtime_value ( self , ihcid : int ) : if self . client . get_runtime_value ( ihcid ) : return True self . re_authenticate ( ) return self . client . get_runtime_value ( ihcid )
Get runtime value with re - authenticate if needed
7,034
def set_runtime_value_bool ( self , ihcid : int , value : bool ) -> bool : if self . client . set_runtime_value_bool ( ihcid , value ) : return True self . re_authenticate ( ) return self . client . set_runtime_value_bool ( ihcid , value )
Set bool runtime value with re - authenticate if needed
7,035
def set_runtime_value_int ( self , ihcid : int , value : int ) -> bool : if self . client . set_runtime_value_int ( ihcid , value ) : return True self . re_authenticate ( ) return self . client . set_runtime_value_int ( ihcid , value )
Set integer runtime value with re - authenticate if needed
7,036
def set_runtime_value_float ( self , ihcid : int , value : float ) -> bool : if self . client . set_runtime_value_float ( ihcid , value ) : return True self . re_authenticate ( ) return self . client . set_runtime_value_float ( ihcid , value )
Set float runtime value with re - authenticate if needed
7,037
def get_project ( self ) -> str : with IHCController . _mutex : if self . _project is None : if self . client . get_state ( ) != IHCSTATE_READY : ready = self . client . wait_for_state_change ( IHCSTATE_READY , 10 ) if ready != IHCSTATE_READY : return None self . _project = self . client . get_project ( ) return self . _project
Get the ihc project and make sure controller is ready before
7,038
def add_notify_event ( self , resourceid : int , callback , delayed = False ) : with IHCController . _mutex : if resourceid in self . _ihcevents : self . _ihcevents [ resourceid ] . append ( callback ) else : self . _ihcevents [ resourceid ] = [ callback ] if delayed : self . _newnotifyids . append ( resourceid ) else : if not self . client . enable_runtime_notification ( resourceid ) : return False if not self . _notifyrunning : self . _notifythread . start ( ) return True
Add a notify callback for a specified resource id If delayed is set to true the enable request will be send from the notofication thread
7,039
def _notify_fn ( self ) : self . _notifyrunning = True while self . _notifyrunning : try : with IHCController . _mutex : if self . _newnotifyids : self . client . enable_runtime_notifications ( self . _newnotifyids ) self . _newnotifyids = [ ] changes = self . client . wait_for_resource_value_changes ( ) if changes is False : self . re_authenticate ( True ) continue for ihcid in changes : value = changes [ ihcid ] if ihcid in self . _ihcevents : for callback in self . _ihcevents [ ihcid ] : callback ( ihcid , value ) except Exception as exp : self . re_authenticate ( True )
The notify thread function .
7,040
def re_authenticate ( self , notify : bool = False ) -> bool : timeout = datetime . now ( ) + timedelta ( seconds = self . reauthenticatetimeout ) while True : if self . authenticate ( ) : return True if notify : if not self . _notifyrunning : return False else : if timeout and datetime . now ( ) > timeout : return False time . sleep ( self . retryinterval )
Authenticate again after failure . Keep trying with 10 sec interval . If called from the notify thread we will not have a timeout but will end if the notify thread has been cancled . Will return True if authentication was successful .
7,041
def get_texts ( self , metadata = None ) : if metadata is None : metadata = self . metadata self . input_file = gzip . GzipFile ( self . input_file_path ) volume_num = 0 with self . input_file as lines : for lineno , line in enumerate ( lines ) : if volume_num >= len ( self . book_meta [ 'volumes' ] ) : raise StopIteration ( ) if lineno < self . book_meta [ 'volumes' ] [ volume_num ] [ 'start' ] : continue if lineno < self . book_meta [ 'volumes' ] [ volume_num ] [ 'stop' ] : if metadata : toks = self . tokenize ( line , lowercase = self . lowercase ) yield ( toks , ( lineno , ) ) else : toks = self . tokenize ( line , lowercase = self . lowercase ) yield toks else : volume_num += 1
Iterate over the lines of The Complete Works of William Shakespeare .
7,042
def encode ( df , encoding = 'utf8' , verbosity = 1 ) : if verbosity > 0 : pbar = progressbar . ProgressBar ( maxval = df . shape [ 1 ] ) pbar . start ( ) for colnum , col in enumerate ( df . columns ) : if isinstance ( df [ col ] , pd . Series ) : if verbosity : pbar . update ( colnum ) if df [ col ] . dtype in ( np . dtype ( 'object' ) , np . dtype ( 'U' ) , np . dtype ( 'S' ) ) and any ( isinstance ( obj , basestring ) for obj in df [ col ] ) : strmask = np . array ( [ isinstance ( obj , basestring ) for obj in df [ col ] ] ) series = df [ col ] . copy ( ) try : series [ strmask ] = np . char . encode ( series [ strmask ] . values . astype ( 'U' ) ) except TypeError : print ( "Unable to convert {} elements starting at position {} in column {}" . format ( sum ( strmask ) , [ i for i , b in enumerate ( strmask ) if b ] [ : 1 ] , col ) ) raise except ( UnicodeDecodeError , UnicodeEncodeError ) : try : series [ strmask ] = np . array ( [ eval ( s , { } , { } ) for s in series [ strmask ] ] ) except ( SyntaxError , UnicodeDecodeError , UnicodeEncodeError ) : newseries = [ ] for s in series [ strmask ] : try : newseries += [ s . encode ( 'utf8' ) ] except : print ( u'Had trouble encoding {} so used repr to turn it into {}' . format ( s , repr ( transcode_unicode ( s ) ) ) ) newseries += [ transcode_unicode ( s ) ] series [ strmask ] = np . array ( newseries ) . astype ( 'O' ) df [ col ] = series if verbosity : pbar . finish ( ) return df
If you try to encode each element individually with python this would take days!
7,043
def run ( verbosity = 1 ) : filepath = os . path . join ( DATA_PATH , 'all_tweets.csv' ) print ( 'Loading tweets from {} (could take a minute or so)...' . format ( filepath ) ) df = pd . read_csv ( filepath , encoding = 'utf-8' , engine = 'python' ) if 'id' in df . columns : df = df . set_index ( 'id' ) df = normalize ( df ) df = dropna ( df ) df = encode ( df , verbosity = verbosity ) df = clean_labels ( df ) df . to_csv ( os . path . join ( DATA_PATH , 'cleaned_tweets.csv.gz' ) , compression = 'gzip' , quotechar = '"' , quoting = pd . io . common . csv . QUOTE_NONNUMERIC ) df = pd . read_csv ( os . path . join ( DATA_PATH , 'cleaned_tweets.csv.gz' ) , index_col = 'id' , compression = 'gzip' , quotechar = '"' , quoting = pd . io . common . csv . QUOTE_NONNUMERIC , low_memory = False ) df . to_csv ( os . path . join ( DATA_PATH , 'cleaned_tweets.csv.gz' ) , compression = 'gzip' , quotechar = '"' , quoting = pd . io . common . csv . QUOTE_NONNUMERIC ) return df
Load all_tweets . csv and run normalize dropna encode before dumping to cleaned_tweets . csv . gz
7,044
def data_worker ( ** kwargs ) : if kwargs is not None : if "function" in kwargs : function = kwargs [ "function" ] else : Exception ( "Invalid arguments, no function specified" ) if "input" in kwargs : input_queue = kwargs [ "input" ] else : Exception ( "Invalid Arguments, no input queue" ) if "output" in kwargs : output_map = kwargs [ "output" ] else : Exception ( "Invalid Arguments, no output map" ) if "token" in kwargs : argsdict = { "quandl_token" : kwargs [ "token" ] } else : if "Quandl" in function . __module__ : Exception ( "Invalid Arguments, no Quandl token" ) if ( "source" and "begin" and "end" ) in kwargs : argsdict = { "data_source" : kwargs [ "source" ] , "begin" : kwargs [ "begin" ] , "end" : kwargs [ "end" ] } else : if "pandas.io.data" in function . __module__ : Exception ( "Invalid Arguments, no pandas data source specified" ) if ( "source" in kwargs ) and ( ( "begin" and "end" ) not in kwargs ) : argsdict = { "data_source" : kwargs [ "source" ] } else : if "pandas.io.data" in function . __module__ : Exception ( "Invalid Arguments, no pandas data source specified" ) else : Exception ( "Invalid Arguments" ) retries = 5 while not input_queue . empty ( ) : data_key = input_queue . get ( ) get_data ( function , data_key , output_map , retries , argsdict )
Function to be spawned concurrently consume data keys from input queue and push the resulting dataframes to output map
7,045
def consume_keys ( self ) : print ( "\nLooking up " + self . input_queue . qsize ( ) . __str__ ( ) + " keys from " + self . source_name + "\n" ) self . data_worker ( ** self . worker_args )
Work through the keys to look up sequentially
7,046
def consume_keys_asynchronous_processes ( self ) : print ( "\nLooking up " + self . input_queue . qsize ( ) . __str__ ( ) + " keys from " + self . source_name + "\n" ) jobs = multiprocessing . cpu_count ( ) * 4 if ( multiprocessing . cpu_count ( ) * 4 < self . input_queue . qsize ( ) ) else self . input_queue . qsize ( ) pool = multiprocessing . Pool ( processes = jobs , maxtasksperchild = 10 ) for x in range ( jobs ) : pool . apply ( self . data_worker , [ ] , self . worker_args ) pool . close ( ) pool . join ( )
Work through the keys to look up asynchronously using multiple processes
7,047
def consume_keys_asynchronous_threads ( self ) : print ( "\nLooking up " + self . input_queue . qsize ( ) . __str__ ( ) + " keys from " + self . source_name + "\n" ) jobs = multiprocessing . cpu_count ( ) * 4 if ( multiprocessing . cpu_count ( ) * 4 < self . input_queue . qsize ( ) ) else self . input_queue . qsize ( ) pool = ThreadPool ( jobs ) for x in range ( jobs ) : pool . apply ( self . data_worker , [ ] , self . worker_args ) pool . close ( ) pool . join ( )
Work through the keys to look up asynchronously using multiple threads
7,048
def unpack ( self , to_unpack ) : try : isinstance ( to_unpack , basestring ) except NameError : basestring = str if isinstance ( to_unpack , basestring ) : self . input_queue . put ( to_unpack ) return for possible_key in to_unpack : if isinstance ( possible_key , basestring ) : self . input_queue . put ( possible_key ) elif sys . version_info >= ( 3 , 0 ) : if isinstance ( possible_key , collections . abc . Container ) and not isinstance ( possible_key , basestring ) : self . unpack ( possible_key ) else : raise Exception ( "A type that is neither a string or a container was passed to unpack. " "Aborting!" ) else : if isinstance ( possible_key , collections . Container ) and not isinstance ( possible_key , basestring ) : self . unpack ( possible_key ) else : raise Exception ( "A type that is neither a string or a container was passed to unpack. " "Aborting!" )
Unpack is a recursive function that will unpack anything that inherits from abstract base class Container provided it is not also inheriting from Python basestring .
7,049
def set_source_quandl ( self , quandl_token ) : self . data_worker = data_worker self . worker_args = { "function" : Quandl . get , "input" : self . input_queue , "output" : self . output_map , "token" : quandl_token } self . source_name = "Quandl"
Set data source to Quandl
7,050
def set_source_google_finance ( self ) : self . data_worker = data_worker self . worker_args = { "function" : pandas . io . data . DataReader , "input" : self . input_queue , "output" : self . output_map , "source" : 'google' } self . source_name = "Google Finance"
Set data source to Google Finance
7,051
def set_source_yahoo_options ( self ) : self . data_worker = data_worker self . worker_args = { "function" : Options , "input" : self . input_queue , "output" : self . output_map , "source" : 'yahoo' } self . source_name = "Yahoo Finance Options"
Set data source to yahoo finance specifically to download financial options data
7,052
def load_jquery ( func ) : @ wraps ( func ) def wrapped ( browser , * args , ** kwargs ) : try : return func ( browser , * args , ** kwargs ) except WebDriverException as ex : if not is_jquery_not_defined_error ( ex . msg ) : raise load_script ( browser , JQUERY ) @ wait_for def jquery_available ( ) : try : return browser . execute_script ( 'return $' ) except WebDriverException : raise AssertionError ( "jQuery is not loaded" ) jquery_available ( ) return func ( browser , * args , ** kwargs ) return wrapped
A decorator to ensure a function is run with jQuery available .
7,053
def check_element_by_selector ( self , selector ) : elems = find_elements_by_jquery ( world . browser , selector ) if not elems : raise AssertionError ( "Expected matching elements, none found." )
Assert an element exists matching the given selector .
7,054
def check_no_element_by_selector ( self , selector ) : elems = find_elements_by_jquery ( world . browser , selector ) if elems : raise AssertionError ( "Expected no matching elements, found {}." . format ( len ( elems ) ) )
Assert an element does not exist matching the given selector .
7,055
def wait_for_element_by_selector ( self , selector , seconds ) : def assert_element_present ( ) : if not find_elements_by_jquery ( world . browser , selector ) : raise AssertionError ( "Expected a matching element." ) wait_for ( assert_element_present ) ( timeout = int ( seconds ) )
Assert an element exists matching the given selector within the given time period .
7,056
def count_elements_exactly_by_selector ( self , number , selector ) : elems = find_elements_by_jquery ( world . browser , selector ) number = int ( number ) if len ( elems ) != number : raise AssertionError ( "Expected {} elements, found {}" . format ( number , len ( elems ) ) )
Assert n elements exist matching the given selector .
7,057
def fill_in_by_selector ( self , selector , value ) : elem = find_element_by_jquery ( world . browser , selector ) elem . clear ( ) elem . send_keys ( value )
Fill in the form element matching the CSS selector .
7,058
def submit_by_selector ( self , selector ) : elem = find_element_by_jquery ( world . browser , selector ) elem . submit ( )
Submit the form matching the CSS selector .
7,059
def check_by_selector ( self , selector ) : elem = find_element_by_jquery ( world . browser , selector ) if not elem . is_selected ( ) : elem . click ( )
Check the checkbox matching the CSS selector .
7,060
def click_by_selector ( self , selector ) : elem = find_element_by_jquery ( world . browser , selector ) elem . click ( )
Click the element matching the CSS selector .
7,061
def follow_link_by_selector ( self , selector ) : elem = find_element_by_jquery ( world . browser , selector ) href = elem . get_attribute ( 'href' ) world . browser . get ( href )
Navigate to the href of the element matching the CSS selector .
7,062
def is_selected_by_selector ( self , selector ) : elem = find_element_by_jquery ( world . browser , selector ) if not elem . is_selected ( ) : raise AssertionError ( "Element expected to be selected." )
Assert the option matching the CSS selector is selected .
7,063
def select_by_selector ( self , selector ) : option = find_element_by_jquery ( world . browser , selector ) selectors = find_parents_by_jquery ( world . browser , selector ) if not selectors : raise AssertionError ( "No parent element found for the option." ) selector = selectors [ 0 ] selector . click ( ) sleep ( 0.3 ) option . click ( ) if not option . is_selected ( ) : raise AssertionError ( "Option should have become selected after clicking it." )
Select the option matching the CSS selector .
7,064
def run_filter_calculation ( self ) : inputs = { 'cif' : self . inputs . cif , 'code' : self . inputs . cif_filter , 'parameters' : self . inputs . cif_filter_parameters , 'metadata' : { 'options' : self . inputs . options . get_dict ( ) , } } calculation = self . submit ( CifFilterCalculation , ** inputs ) self . report ( 'submitted {}<{}>' . format ( CifFilterCalculation . __name__ , calculation . uuid ) ) return ToContext ( cif_filter = calculation )
Run the CifFilterCalculation on the CifData input node .
7,065
def inspect_filter_calculation ( self ) : try : node = self . ctx . cif_filter self . ctx . cif = node . outputs . cif except exceptions . NotExistent : self . report ( 'aborting: CifFilterCalculation<{}> did not return the required cif output' . format ( node . uuid ) ) return self . exit_codes . ERROR_CIF_FILTER_FAILED
Inspect the result of the CifFilterCalculation verifying that it produced a CifData output node .
7,066
def run_select_calculation ( self ) : inputs = { 'cif' : self . ctx . cif , 'code' : self . inputs . cif_select , 'parameters' : self . inputs . cif_select_parameters , 'metadata' : { 'options' : self . inputs . options . get_dict ( ) , } } calculation = self . submit ( CifSelectCalculation , ** inputs ) self . report ( 'submitted {}<{}>' . format ( CifSelectCalculation . __name__ , calculation . uuid ) ) return ToContext ( cif_select = calculation )
Run the CifSelectCalculation on the CifData output node of the CifFilterCalculation .
7,067
def inspect_select_calculation ( self ) : try : node = self . ctx . cif_select self . ctx . cif = node . outputs . cif except exceptions . NotExistent : self . report ( 'aborting: CifSelectCalculation<{}> did not return the required cif output' . format ( node . uuid ) ) return self . exit_codes . ERROR_CIF_SELECT_FAILED
Inspect the result of the CifSelectCalculation verifying that it produced a CifData output node .
7,068
def parse_cif_structure ( self ) : from aiida_codtools . workflows . functions . primitive_structure_from_cif import primitive_structure_from_cif if self . ctx . cif . has_unknown_species : self . ctx . exit_code = self . exit_codes . ERROR_CIF_HAS_UNKNOWN_SPECIES self . report ( self . ctx . exit_code . message ) return if self . ctx . cif . has_undefined_atomic_sites : self . ctx . exit_code = self . exit_codes . ERROR_CIF_HAS_UNDEFINED_ATOMIC_SITES self . report ( self . ctx . exit_code . message ) return if self . ctx . cif . has_attached_hydrogens : self . ctx . exit_code = self . exit_codes . ERROR_CIF_HAS_ATTACHED_HYDROGENS self . report ( self . ctx . exit_code . message ) return parse_inputs = { 'cif' : self . ctx . cif , 'parse_engine' : self . inputs . parse_engine , 'site_tolerance' : self . inputs . site_tolerance , 'symprec' : self . inputs . symprec , } try : structure , node = primitive_structure_from_cif . run_get_node ( ** parse_inputs ) except Exception : self . ctx . exit_code = self . exit_codes . ERROR_CIF_STRUCTURE_PARSING_FAILED self . report ( self . ctx . exit_code . message ) return if node . is_failed : self . ctx . exit_code = self . exit_codes ( node . exit_status ) self . report ( self . ctx . exit_code . message ) else : self . ctx . structure = structure
Parse a StructureData from the cleaned CifData returned by the CifSelectCalculation .
7,069
def results ( self ) : self . out ( 'cif' , self . ctx . cif ) if 'group_cif' in self . inputs : self . inputs . group_cif . add_nodes ( [ self . ctx . cif ] ) if 'group_structure' in self . inputs : try : structure = self . ctx . structure except AttributeError : return self . ctx . exit_code else : self . inputs . group_structure . add_nodes ( [ structure ] ) self . out ( 'structure' , structure ) self . report ( 'workchain finished successfully' )
If successfully created add the cleaned CifData and StructureData as output nodes to the workchain .
7,070
def get_input_node ( cls , value ) : from aiida import orm if cls in ( orm . Bool , orm . Float , orm . Int , orm . Str ) : result = orm . QueryBuilder ( ) . append ( cls , filters = { 'attributes.value' : value } ) . first ( ) if result is None : node = cls ( value ) . store ( ) else : node = result [ 0 ] elif cls is orm . Dict : result = orm . QueryBuilder ( ) . append ( cls , filters = { 'attributes' : { '==' : value } } ) . first ( ) if result is None : node = cls ( dict = value ) . store ( ) else : node = result [ 0 ] else : raise NotImplementedError return node
Return a Node of a given class and given value .
7,071
def bind ( self , form ) : field = self . field ( default = self . default , ** self . field_kwargs ) form . _fields [ self . name ] = field . bind ( form , self . name , prefix = form . _prefix )
Bind to filters form .
7,072
def get_config_path ( ) : dir_path = ( os . getenv ( 'APPDATA' ) if os . name == "nt" else os . path . expanduser ( '~' ) ) return os . path . join ( dir_path , '.vtjp' )
Put together the default configuration path based on OS .
7,073
def print_table ( document , * columns ) : headers = [ ] for _ , header in columns : headers . append ( header ) table = [ ] for element in document : row = [ ] for item , _ in columns : if item in element : row . append ( element [ item ] ) else : row . append ( None ) table . append ( row ) print ( tabulate . tabulate ( table , headers ) )
Print json document as table
7,074
def print_trip_table ( document ) : headers = [ 'Alt.' , 'Name' , 'Time' , 'Track' , 'Direction' , 'Dest.' , 'Track' , 'Arrival' ] table = [ ] altnr = 0 for alternative in document : altnr += 1 first_trip_in_alt = True if not isinstance ( alternative [ 'Leg' ] , list ) : alternative [ 'Leg' ] = [ alternative [ 'Leg' ] ] for part in alternative [ 'Leg' ] : orig = part [ 'Origin' ] dest = part [ 'Destination' ] row = [ altnr if first_trip_in_alt else None , part [ 'name' ] , orig [ 'rtTime' ] if 'rtTime' in orig else orig [ 'time' ] , orig [ 'track' ] , part [ 'direction' ] if 'direction' in part else None , dest [ 'name' ] , dest [ 'track' ] , dest [ 'rtTime' ] if 'rtTime' in dest else dest [ 'time' ] , ] table . append ( row ) first_trip_in_alt = False print ( tabulate . tabulate ( table , headers ) )
Print trip table
7,075
def makeproperty ( ns , cls = None , name = None , docstring = '' , descendant = True ) : def get_property ( self ) : if cls is None : xpath = '%s:%s' % ( ns , name ) else : xpath = '%s:%s' % ( ns , cls . __name__ ) xpath = self . _node . xpath ( xpath , namespaces = SLDNode . _nsmap ) if len ( xpath ) == 1 : if cls is None : return xpath [ 0 ] . text else : elem = cls . __new__ ( cls ) cls . __init__ ( elem , self , descendant = descendant ) return elem else : return None def set_property ( self , value ) : if cls is None : xpath = '%s:%s' % ( ns , name ) else : xpath = '%s:%s' % ( ns , cls . __name__ ) xpath = self . _node . xpath ( xpath , namespaces = SLDNode . _nsmap ) if len ( xpath ) == 1 : if cls is None : xpath [ 0 ] . text = value else : xpath [ 0 ] = value . _node else : if cls is None : elem = self . _node . makeelement ( '{%s}%s' % ( SLDNode . _nsmap [ ns ] , name ) , nsmap = SLDNode . _nsmap ) elem . text = value self . _node . append ( elem ) else : self . _node . append ( value . _node ) def del_property ( self ) : if cls is None : xpath = '%s:%s' % ( ns , name ) else : xpath = '%s:%s' % ( ns , cls . __name__ ) xpath = self . _node . xpath ( xpath , namespaces = SLDNode . _nsmap ) if len ( xpath ) == 1 : self . _node . remove ( xpath [ 0 ] ) return property ( get_property , set_property , del_property , docstring )
Make a property on an instance of an SLDNode . If cls is omitted the property is assumed to be a text node with no corresponding class object . If name is omitted the property is assumed to be a complex node with a corresponding class wrapper .
7,076
def get_or_create_element ( self , ns , name ) : if len ( self . _node . xpath ( '%s:%s' % ( ns , name ) , namespaces = SLDNode . _nsmap ) ) == 1 : return getattr ( self , name ) return self . create_element ( ns , name )
Attempt to get the only child element from this SLDNode . If the node does not exist create the element attach it to the DOM and return the class object that wraps the node .
7,077
def create_element ( self , ns , name ) : elem = self . _node . makeelement ( '{%s}%s' % ( SLDNode . _nsmap [ ns ] , name ) , nsmap = SLDNode . _nsmap ) self . _node . append ( elem ) return getattr ( self , name )
Create an element as a child of this SLDNode .
7,078
def normalize ( self ) : for i , rnode in enumerate ( self . _nodes ) : rule = Rule ( self , i - 1 , descendant = False ) rule . normalize ( )
Normalize this node and all rules contained within . The SLD model is modified in place .
7,079
def validate ( self ) : self . normalize ( ) if self . _node is None : logging . debug ( 'The node is empty, and cannot be validated.' ) return False if self . _schema is None : self . _schema = XMLSchema ( self . _schemadoc ) is_valid = self . _schema . validate ( self . _node ) for msg in self . _schema . error_log : logging . info ( 'Line:%d, Column:%d -- %s' , msg . line , msg . column , msg . message ) return is_valid
Validate the current file against the SLD schema . This first normalizes the SLD document then validates it . Any schema validation error messages are logged at the INFO level .
7,080
def helper ( path ) : if sys . platform . startswith ( "win" ) : src_path = os . path . join ( PHLB_BASE_DIR , "helper_cmd" ) elif sys . platform . startswith ( "linux" ) : src_path = os . path . join ( PHLB_BASE_DIR , "helper_sh" ) else : print ( "TODO: %s" % sys . platform ) return if not os . path . isdir ( src_path ) : raise RuntimeError ( "Helper script path not found here: '%s'" % src_path ) for entry in scandir ( src_path ) : print ( "_" * 79 ) print ( "Link file: '%s'" % entry . name ) src = entry . path dst = os . path . join ( path , entry . name ) if os . path . exists ( dst ) : print ( "Remove old file '%s'" % dst ) try : os . remove ( dst ) except OSError as err : print ( "\nERROR:\n%s\n" % err ) continue print ( "source.....: '%s'" % src ) print ( "destination: '%s'" % dst ) try : os . link ( src , dst ) except OSError as err : print ( "\nERROR:\n%s\n" % err ) continue
link helper files to given path
7,081
def backup ( path , name = None ) : from PyHardLinkBackup . phlb . phlb_main import backup backup ( path , name )
Start a Backup run
7,082
def verify ( backup_path , fast ) : from PyHardLinkBackup . phlb . verify import verify_backup verify_backup ( backup_path , fast )
Verify a existing backup
7,083
def setup_package ( ) : import json from setuptools import setup , find_packages filename_setup_json = 'setup.json' filename_description = 'README.md' with open ( filename_setup_json , 'r' ) as handle : setup_json = json . load ( handle ) with open ( filename_description , 'r' ) as handle : description = handle . read ( ) setup ( include_package_data = True , packages = find_packages ( ) , setup_requires = [ 'reentry' ] , reentry_register = True , long_description = description , long_description_content_type = 'text/markdown' , ** setup_json )
Setup procedure .
7,084
def literal_to_dict ( value ) : if isinstance ( value , Literal ) : if value . language is not None : return { "@value" : str ( value ) , "@language" : value . language } return value . toPython ( ) elif isinstance ( value , URIRef ) : return { "@id" : str ( value ) } elif value is None : return None return str ( value )
Transform an object value into a dict readable value
7,085
def dict_to_literal ( dict_container : dict ) : if isinstance ( dict_container [ "@value" ] , int ) : return dict_container [ "@value" ] , else : return dict_container [ "@value" ] , dict_container . get ( "@language" , None )
Transforms a JSON + LD PyLD dictionary into an RDFLib object
7,086
def set_src_filepath ( self , src_dir_path ) : log . debug ( "set_src_filepath() with: '%s'" , src_dir_path ) self . abs_src_filepath = src_dir_path . resolved_path log . debug ( " * abs_src_filepath: %s" % self . abs_src_filepath ) if self . abs_src_filepath is None : log . info ( "Can't resolve source path: %s" , src_dir_path ) return self . sub_filepath = self . abs_src_filepath . relative_to ( self . abs_src_root ) log . debug ( " * sub_filepath: %s" % self . sub_filepath ) self . sub_path = self . sub_filepath . parent log . debug ( " * sub_path: %s" % self . sub_path ) self . filename = self . sub_filepath . name log . debug ( " * filename: %s" % self . filename ) self . abs_dst_path = Path2 ( self . abs_dst_root , self . sub_path ) log . debug ( " * abs_dst_path: %s" % self . abs_dst_path ) self . abs_dst_filepath = Path2 ( self . abs_dst_root , self . sub_filepath ) log . debug ( " * abs_dst_filepath: %s" % self . abs_dst_filepath ) self . abs_dst_hash_filepath = Path2 ( "%s%s%s" % ( self . abs_dst_filepath , os . extsep , phlb_config . hash_name ) ) log . debug ( " * abs_dst_hash_filepath: %s" % self . abs_dst_hash_filepath )
Set one filepath to backup this file . Called for every file in the source directory .
7,087
def _cryptodome_encrypt ( cipher_factory , plaintext , key , iv ) : encryptor = cipher_factory ( key , iv ) return encryptor . encrypt ( plaintext )
Use a Pycryptodome cipher factory to encrypt data .
7,088
def _cryptodome_decrypt ( cipher_factory , ciphertext , key , iv ) : decryptor = cipher_factory ( key , iv ) return decryptor . decrypt ( ciphertext )
Use a Pycryptodome cipher factory to decrypt data .
7,089
def _cryptography_encrypt ( cipher_factory , plaintext , key , iv ) : encryptor = cipher_factory ( key , iv ) . encryptor ( ) return encryptor . update ( plaintext ) + encryptor . finalize ( )
Use a cryptography cipher factory to encrypt data .
7,090
def _cryptography_decrypt ( cipher_factory , ciphertext , key , iv ) : decryptor = cipher_factory ( key , iv ) . decryptor ( ) return decryptor . update ( ciphertext ) + decryptor . finalize ( )
Use a cryptography cipher factory to decrypt data .
7,091
def generic_encrypt ( cipher_factory_map , plaintext , key , iv ) : if backend is None : raise PysnmpCryptoError ( 'Crypto backend not available' ) return _ENCRYPT_MAP [ backend ] ( cipher_factory_map [ backend ] , plaintext , key , iv )
Encrypt data using the available backend .
7,092
def generic_decrypt ( cipher_factory_map , ciphertext , key , iv ) : if backend is None : raise PysnmpCryptoError ( 'Crypto backend not available' ) return _DECRYPT_MAP [ backend ] ( cipher_factory_map [ backend ] , ciphertext , key , iv )
Decrypt data using the available backend .
7,093
def _prepare_disks ( self , disks_name ) : fstab = '/etc/fstab' for disk in tqdm ( disks_name . split ( ',' ) ) : sudo ( 'umount /dev/{0}' . format ( disk ) , warn_only = True ) if sudo ( 'mkfs.xfs -f /dev/{0}' . format ( disk ) , warn_only = True ) . failed : sudo ( 'apt-get update' ) sudo ( 'apt-get -y install xfsprogs' ) sudo ( 'mkfs.xfs -f /dev/{0}' . format ( disk ) ) sudo ( 'mkdir -p /srv/node/{0}' . format ( disk ) ) files . append ( fstab , '/dev/{0} /srv/node/{1} xfs noatime,nodiratime,nobarrier,logbufs=8 0 2' . format ( disk , disk ) , use_sudo = True ) sudo ( 'mount /srv/node/{0}' . format ( disk ) )
format disks to xfs and mount it
7,094
def load_file ( cls , file_path ) : with open ( os . path . abspath ( file_path ) , 'rt' ) as f : s = Sudoku ( f . read ( ) . strip ( ) ) return s
Load a Sudoku from file .
7,095
def _parse_from_string ( string_input ) : read_lines = list ( filter ( None , string_input . split ( '\n' ) ) ) if read_lines [ 0 ] . startswith ( '#' ) : comment = read_lines . pop ( 0 ) else : comment = '' if len ( read_lines ) > 1 : order = int ( math . sqrt ( len ( read_lines ) ) ) else : order = int ( math . sqrt ( math . sqrt ( len ( read_lines [ 0 ] ) ) ) ) read_lines = filter ( lambda x : len ( x ) == ( order ** 2 ) , [ read_lines [ 0 ] [ i : ( i + order ** 2 ) ] for i in utils . range_ ( len ( read_lines [ 0 ] ) ) if i % ( order ** 2 ) == 0 ] ) matrix = utils . get_list_of_lists ( order ** 2 , order ** 2 , fill_with = 0 ) for i , line in enumerate ( read_lines ) : line = line . strip ( ) for j , value in enumerate ( line ) : if value . isdigit ( ) and int ( value ) : matrix [ i ] [ j ] = int ( value ) else : matrix [ i ] [ j ] = 0 return order , comment , matrix
Parses a Sudoku instance from string input .
7,096
def row_iter ( self ) : for k in utils . range_ ( self . side ) : yield self . row ( k )
Get an iterator over all rows in the Sudoku
7,097
def col_iter ( self ) : for k in utils . range_ ( self . side ) : yield self . col ( k )
Get an iterator over all columns in the Sudoku
7,098
def box ( self , row , col ) : box = [ ] box_i = ( row // self . order ) * self . order box_j = ( col // self . order ) * self . order for i in utils . range_ ( box_i , box_i + self . order ) : for j in utils . range_ ( box_j , box_j + self . order ) : box . append ( self [ i ] [ j ] ) return box
Get the values of the box pertaining to the specified row and column of the Sudoku
7,099
def box_iter ( self ) : for i in utils . range_ ( self . order ) : for j in utils . range_ ( self . order ) : yield self . box ( i * 3 , j * 3 )
Get an iterator over all boxes in the Sudoku