idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
8,400
def gen_schlumberger ( self , M , N , a = None ) : if a is None : a = np . abs ( M - N ) nr_of_steps_left = int ( min ( M , N ) - 1 / a ) nr_of_steps_right = int ( ( self . nr_electrodes - max ( M , N ) ) / a ) configs = [ ] for i in range ( 0 , min ( nr_of_steps_left , nr_of_steps_right ) ) : A = min ( M , N ) - ( i + 1 ) * a B = max ( M , N ) + ( i + 1 ) * a configs . append ( ( A , B , M , N ) ) configs = np . array ( configs ) self . add_to_configs ( configs ) return configs
generate one Schlumberger sounding configuration that is one set of configurations for one potential dipole MN .
8,401
def add_to_configs ( self , configs ) : if len ( configs ) == 0 : return None if self . configs is None : self . configs = np . atleast_2d ( configs ) else : configs = np . atleast_2d ( configs ) self . configs = np . vstack ( ( self . configs , configs ) ) return self . configs
Add one or more measurement configurations to the stored configurations
8,402
def split_into_normal_and_reciprocal ( self , pad = False , return_indices = False ) : configs = np . hstack ( ( np . sort ( self . configs [ : , 0 : 2 ] , axis = 1 ) , np . sort ( self . configs [ : , 2 : 4 ] , axis = 1 ) ) ) ab_min = configs [ : , 0 ] mn_min = configs [ : , 2 ] indices_normal = np . where ( ab_min < mn_min ) [ 0 ] indices_used = [ ] normal = [ ] normal_indices = [ ] reciprocal_indices = [ ] reciprocal = [ ] duplicates = [ ] for index in indices_normal : indices_used . append ( index ) normal . append ( self . configs [ index , : ] ) normal_indices . append ( index ) index_rec = np . where ( ( configs [ : , 0 ] == configs [ index , 2 ] ) & ( configs [ : , 1 ] == configs [ index , 3 ] ) & ( configs [ : , 2 ] == configs [ index , 0 ] ) & ( configs [ : , 3 ] == configs [ index , 1 ] ) ) [ 0 ] if len ( index_rec ) == 0 and pad : reciprocal . append ( np . ones ( 4 ) * np . nan ) elif len ( index_rec ) == 1 : reciprocal . append ( self . configs [ index_rec [ 0 ] , : ] ) indices_used . append ( index_rec [ 0 ] ) reciprocal_indices . append ( index_rec [ 0 ] ) elif len ( index_rec > 1 ) : reciprocal . append ( self . configs [ index_rec [ 0 ] , : ] ) reciprocal_indices . append ( index_rec [ 0 ] ) duplicates += list ( index_rec [ 1 : ] ) indices_used += list ( index_rec ) set_all_indices = set ( list ( range ( 0 , configs . shape [ 0 ] ) ) ) set_used_indices = set ( indices_used ) reciprocal_only_indices = set_all_indices - set_used_indices for index in reciprocal_only_indices : if pad : normal . append ( np . ones ( 4 ) * np . nan ) reciprocal . append ( self . configs [ index , : ] ) normals = np . array ( normal ) reciprocals = np . array ( reciprocal ) if return_indices : return normals , reciprocals , normal_indices , reciprocal_indices else : return normals , reciprocals
Split the stored configurations into normal and reciprocal measurements
8,403
def gen_reciprocals ( self , append = False ) : reciprocals = self . configs . copy ( ) [ : , : : - 1 ] reciprocals [ : , 0 : 2 ] = np . sort ( reciprocals [ : , 0 : 2 ] , axis = 1 ) reciprocals [ : , 2 : 4 ] = np . sort ( reciprocals [ : , 2 : 4 ] , axis = 1 ) ind = np . lexsort ( ( reciprocals [ : , 3 ] , reciprocals [ : , 2 ] , reciprocals [ : , 1 ] , reciprocals [ : , 0 ] ) ) reciprocals = reciprocals [ ind ] if append : self . configs = np . vstack ( ( self . configs , reciprocals ) ) return reciprocals
Generate reciprocal configurations sort by AB and optionally append to configurations .
8,404
def gen_configs_permutate ( self , injections_raw , only_same_dipole_length = False , ignore_crossed_dipoles = False , silent = False ) : injections = np . atleast_2d ( injections_raw ) . astype ( int ) N = injections . shape [ 0 ] measurements = [ ] for injection in range ( 0 , N ) : dipole_length = np . abs ( injections [ injection ] [ 1 ] - injections [ injection ] [ 0 ] ) for i in set ( range ( 0 , N ) ) - set ( [ injection ] ) : test_dipole_length = np . abs ( injections [ i , : ] [ 1 ] - injections [ i , : ] [ 0 ] ) if ( only_same_dipole_length and test_dipole_length != dipole_length ) : continue quadpole = np . array ( [ injections [ injection , : ] , injections [ i , : ] ] ) . flatten ( ) if ignore_crossed_dipoles is True : if ( quadpole [ 2 ] > quadpole [ 0 ] and quadpole [ 2 ] < quadpole [ 1 ] ) : if not silent : print ( 'A - ignoring' , quadpole ) elif ( quadpole [ 3 ] > quadpole [ 0 ] and quadpole [ 3 ] < quadpole [ 1 ] ) : if not silent : print ( 'B - ignoring' , quadpole ) else : measurements . append ( quadpole ) else : measurements . append ( quadpole ) filtered = [ ] for quadpole in measurements : if ( not set ( quadpole [ 0 : 2 ] ) . isdisjoint ( set ( quadpole [ 2 : 4 ] ) ) ) : if not silent : print ( 'Ignoring quadrupole because of ' , 'repeated electrode use:' , quadpole ) else : filtered . append ( quadpole ) self . add_to_configs ( filtered ) return np . array ( filtered )
Create measurement configurations out of a pool of current injections . Use only the provided dipoles for potential dipole selection . This means that we have always reciprocal measurements .
8,405
def remove_max_dipole_sep ( self , maxsep = 10 ) : sep = np . abs ( self . configs [ : , 1 ] - self . configs [ : , 2 ] ) self . configs = self . configs [ sep <= maxsep ]
Remove configurations with dipole separations higher than maxsep .
8,406
def to_pg_scheme ( self , container = None , positions = None ) : if container is None and positions is None : raise Exception ( 'electrode positions are required for BERT export' ) if container is not None and container . electrodes is None : raise Exception ( 'container does not contain electrode positions' ) if container is not None and positions is not None : raise Exception ( 'only one of container OR positions must be provided' ) if container is not None : elec_positions = container . electrodes . values elif positions is not None : elec_positions = positions opt_import ( "pybert" , requiredFor = "" ) import pybert data = pybert . DataContainerERT ( ) for nr , ( x , y , z ) in enumerate ( elec_positions ) : data . createSensor ( ( x , y , z ) ) data . resize ( self . configs . shape [ 0 ] ) for index , token in enumerate ( "abmn" ) : data . set ( token , self . configs [ : , index ] . tolist ( ) ) for token in "abmn" : data . set ( token , data ( token ) - 1 ) return data
Convert the configuration to a pygimli measurement scheme
8,407
def to_iris_syscal ( self , filename ) : with open ( filename , 'w' ) as fid : fid . write ( '#\t X\t Y\t Z\n' ) for nr in range ( 0 , self . configs . max ( ) ) : fid . write ( '{} {} 0 0\n' . format ( nr + 1 , nr ) ) fid . write ( '#\t A\t B\t M\t N\n' ) for nr , config in enumerate ( self . configs ) : fid . write ( '{} {} {} {} {}\n' . format ( nr + 1 , * config ) )
Export to IRIS Instrument configuration file
8,408
def create_plan ( self , * , plan_code , description , interval , interval_count , max_payments_allowed , payment_attempts_delay , plan_value , plan_tax , plan_tax_return_base , currency , max_payment_attempts = None , max_pending_payments = None , trial_days = None ) : payload = { "accountId" : self . client . account_id , "planCode" : plan_code , "description" : description , "interval" : interval , "intervalCount" : interval_count , "maxPaymentsAllowed" : max_payments_allowed , "paymentAttemptsDelay" : payment_attempts_delay , "additionalValues" : [ { "name" : "PLAN_VALUE" , "value" : plan_value , "currency" : currency } , { "name" : "PLAN_TAX" , "value" : plan_tax , "currency" : currency } , { "name" : "PLAN_TAX_RETURN_BASE" , "value" : plan_tax_return_base , "currency" : currency } ] , "maxPaymentAttempts" : max_payment_attempts , "maxPendingPayments" : max_pending_payments , "trialDays" : trial_days } return self . client . _post ( self . url + 'plans' , json = payload , headers = self . get_headers ( ) )
Creating a new plan for subscriptions associated with the merchant .
8,409
def get_plan ( self , plan_code ) : return self . client . _get ( self . url + 'plans/{}' . format ( plan_code ) , headers = self . get_headers ( ) )
Check all the information of a plan for subscriptions associated with the merchant .
8,410
def delete_plan ( self , plan_code ) : return self . client . _delete ( self . url + 'plans/{}' . format ( plan_code ) , headers = self . get_headers ( ) )
Delete an entire subscription plan associated with the merchant .
8,411
def create_customer ( self , * , full_name , email ) : payload = { "fullName" : full_name , "email" : email } return self . client . _post ( self . url + 'customers' , json = payload , headers = self . get_headers ( ) )
Creation of a customer in the system .
8,412
def get_customer ( self , customer_id ) : return self . client . _get ( self . url + 'customers/{}' . format ( customer_id ) , headers = self . get_headers ( ) )
Queries the information related to the customer .
8,413
def delete_customer ( self , customer_id ) : return self . client . _delete ( self . url + 'customers/{}' . format ( customer_id ) , headers = self . get_headers ( ) )
Removes a user from the system .
8,414
def create_subscription ( self , * , customer_id , credit_card_token , plan_code , quantity = None , installments = None , trial_days = None , immediate_payment = None , extra1 = None , extra2 = None , delivery_address = None , notify_url = None , recurring_bill_items = None ) : payload = { "quantity" : quantity , "installments" : installments , "trialDays" : trial_days , "immediatePayment" : immediate_payment , "extra1" : extra1 , "extra2" : extra2 , "customer" : { "id" : customer_id , "creditCards" : [ { "token" : credit_card_token } ] } , "plan" : { "planCode" : plan_code } , "deliveryAddress" : delivery_address , "notifyUrl" : notify_url , "recurringBillItems" : recurring_bill_items } return self . client . _post ( self . url + 'subscriptions' , json = payload , headers = self . get_headers ( ) )
Creating a new subscription of a client to a plan .
8,415
def get_subscription ( self , subscription_id ) : return self . client . _put ( self . url + 'subscriptions/{}' . format ( subscription_id ) , headers = self . get_headers ( ) )
Check the basic information associated with the specified subscription .
8,416
def update_subscription ( self , * , subscription_id , credit_card_token ) : payload = { "creditCardToken" : credit_card_token } fmt = 'subscriptions/{}' . format ( subscription_id ) return self . client . _put ( self . url + fmt , json = payload , headers = self . get_headers ( ) )
Update information associated with the specified subscription . At the moment it is only possible to update the token of the credit card to which the charge of the subscription is made .
8,417
def delete_subscription ( self , subscription_id ) : return self . client . _delete ( self . url + 'subscriptions/{}' . format ( subscription_id ) , headers = self . get_headers ( ) )
Unsubscribe delete the relationship of the customer with the plan .
8,418
def create_additional_charge ( self , * , subscription_id , description , plan_value , plan_tax , plan_tax_return_base , currency ) : payload = { "description" : description , "additionalValues" : [ { "name" : "ITEM_VALUE" , "value" : plan_value , "currency" : currency } , { "name" : "ITEM_TAX" , "value" : plan_tax , "currency" : currency } , { "name" : "ITEM_TAX_RETURN_BASE" , "value" : plan_tax_return_base , "currency" : currency } ] } fmt = 'subscriptions/{}/recurringBillItems' . format ( subscription_id ) return self . client . _post ( self . url + fmt , json = payload , headers = self . get_headers ( ) )
Adds extra charges to the respective invoice for the current period .
8,419
def get_additional_charge_by_identifier ( self , recurring_billing_id ) : fmt = 'recurringBillItems/{}' . format ( recurring_billing_id ) return self . client . _get ( self . url + fmt , headers = self . get_headers ( ) )
Query extra charge information of an invoice from its identifier .
8,420
def update_additional_charge ( self , * , recurring_billing_id , description , plan_value , plan_tax , plan_tax_return_base , currency ) : payload = { "description" : description , "additionalValues" : [ { "name" : "ITEM_VALUE" , "value" : plan_value , "currency" : currency } , { "name" : "ITEM_TAX" , "value" : plan_tax , "currency" : currency } , { "name" : "ITEM_TAX_RETURN_BASE" , "value" : plan_tax_return_base , "currency" : currency } ] } fmt = 'recurringBillItems/{}' . format ( recurring_billing_id ) return self . client . _put ( self . url + fmt , payload = payload , headers = self . get_headers ( ) )
Updates the information from an additional charge in an invoice .
8,421
def delete_additional_charge ( self , recurring_billing_id ) : fmt = 'recurringBillItems/{}' . format ( recurring_billing_id ) return self . client . _delete ( self . url + fmt , headers = self . get_headers ( ) )
Remove an extra charge from an invoice .
8,422
def thumbnail ( parser , token ) : args = token . split_contents ( ) tag = args [ 0 ] if len ( args ) > 4 and args [ - 2 ] == 'as' : context_name = args [ - 1 ] args = args [ : - 2 ] else : context_name = None if len ( args ) < 3 : raise TemplateSyntaxError ( "Invalid syntax. Expected " "'{%% %s source size [option1 option2 ...] %%}' or " "'{%% %s source size [option1 option2 ...] as variable %%}'" % ( tag , tag ) ) source_var = args [ 1 ] m = REGEXP_THUMB_SIZES . match ( args [ 2 ] ) if m : args [ 2 ] = '"%s"' % args [ 2 ] size_var = args [ 2 ] args_list = split_args ( args [ 3 : ] ) . items ( ) opts = { } kwargs = { } for arg , value in args_list : value = value and parser . compile_filter ( value ) if arg in TAG_SETTINGS and value is not None : kwargs [ str ( arg ) ] = value continue else : raise TemplateSyntaxError ( "'%s' tag received a bad argument: " "'%s'" % ( tag , arg ) ) return ThumbnailNode ( source_var , size_var , opts = opts , context_name = context_name , ** kwargs )
Creates a thumbnail of for an ImageField .
8,423
def printStats ( self ) : print ( "--- Imagestats Results ---" ) if ( self . fields . find ( 'npix' ) != - 1 ) : print ( "Number of pixels : " , self . npix ) if ( self . fields . find ( 'min' ) != - 1 ) : print ( "Minimum value : " , self . min ) if ( self . fields . find ( 'max' ) != - 1 ) : print ( "Maximum value : " , self . max ) if ( self . fields . find ( 'stddev' ) != - 1 ) : print ( "Standard Deviation: " , self . stddev ) if ( self . fields . find ( 'mean' ) != - 1 ) : print ( "Mean : " , self . mean ) if ( self . fields . find ( 'mode' ) != - 1 ) : print ( "Mode : " , self . mode ) if ( self . fields . find ( 'median' ) != - 1 ) : print ( "Median : " , self . median ) if ( self . fields . find ( 'midpt' ) != - 1 ) : print ( "Midpt : " , self . midpt )
Print the requested statistics values for those fields specified on input .
8,424
def raw_request ( self , method , uri , ** kwargs ) : with warnings . catch_warnings ( ) : warnings . simplefilter ( "ignore" , urllib3 . exceptions . InsecureRequestWarning ) warnings . simplefilter ( "ignore" , urllib3 . exceptions . InsecurePlatformWarning ) try : response = self . _get_session ( ) . request ( method , self . _get_ws_url ( uri ) , ** kwargs ) except requests . RequestException as e : six . raise_from ( WVAHttpRequestError ( e ) , e ) else : return response
Perform a WVA web services request and return the raw response object
8,425
def request ( self , method , uri , ** kwargs ) : response = self . raw_request ( method , uri , ** kwargs ) if response . status_code != 200 : exception_class = HTTP_STATUS_EXCEPTION_MAP . get ( response . status_code , WVAHttpError ) raise exception_class ( response ) if response . headers . get ( "content-type" ) == "application/json" : return json . loads ( response . text ) else : return response . text
Perform a WVA web services request and return the decoded value if successful
8,426
def post ( self , uri , data , ** kwargs ) : return self . request ( "POST" , uri , data = data , ** kwargs )
POST the provided data to the specified path
8,427
def post_json ( self , uri , data , ** kwargs ) : encoded_data = json . dumps ( data ) kwargs . setdefault ( "headers" , { } ) . update ( { "Content-Type" : "application/json" , } ) return self . post ( uri , data = encoded_data , ** kwargs )
POST the provided data as json to the specified path
8,428
def put ( self , uri , data , ** kwargs ) : return self . request ( "PUT" , uri , data = data , ** kwargs )
PUT the provided data to the specified path
8,429
def put_json ( self , uri , data , ** kwargs ) : encoded_data = json . dumps ( data ) kwargs . setdefault ( "headers" , { } ) . update ( { "Content-Type" : "application/json" , } ) return self . put ( uri , data = encoded_data , ** kwargs )
PUT the provided data as json to the specified path
8,430
def mac_to_ipv6_linklocal ( mac , prefix = "fe80::" ) : mac_value = int ( mac . translate ( str . maketrans ( dict ( [ ( x , None ) for x in [ " " , "." , ":" , "-" ] ] ) ) ) , 16 ) high2 = mac_value >> 32 & 0xffff ^ 0x0200 high1 = mac_value >> 24 & 0xff low1 = mac_value >> 16 & 0xff low2 = mac_value & 0xffff return prefix + ':{:04x}:{:02x}ff:fe{:02x}:{:04x}' . format ( high2 , high1 , low1 , low2 )
Translate a MAC address into an IPv6 address in the prefixed network .
8,431
def datagram_received ( self , data , addr ) : self . register ( ) response = unpack_lifx_message ( data ) self . lastmsg = datetime . datetime . now ( ) if response . seq_num in self . message : response_type , myevent , callb = self . message [ response . seq_num ] if type ( response ) == response_type : if response . source_id == self . source_id : if "State" in response . __class__ . __name__ : setmethod = "resp_set_" + response . __class__ . __name__ . replace ( "State" , "" ) . lower ( ) if setmethod in dir ( self ) and callable ( getattr ( self , setmethod ) ) : getattr ( self , setmethod ) ( response ) if callb : callb ( self , response ) myevent . set ( ) del ( self . message [ response . seq_num ] ) elif type ( response ) == Acknowledgement : pass else : del ( self . message [ response . seq_num ] ) elif self . default_callb : self . default_callb ( response )
Method run when data is received from the device
8,432
def register ( self ) : if not self . registered : self . registered = True if self . parent : self . parent . register ( self )
Proxy method to register the device with the parent .
8,433
def unregister ( self ) : if self . registered : if datetime . datetime . now ( ) - datetime . timedelta ( seconds = self . unregister_timeout ) > self . lastmsg : self . registered = False if self . parent : self . parent . unregister ( self )
Proxy method to unregister the device with the parent .
8,434
async def fire_sending ( self , msg , num_repeats ) : if num_repeats is None : num_repeats = self . retry_count sent_msg_count = 0 sleep_interval = 0.05 while ( sent_msg_count < num_repeats ) : if self . transport : self . transport . sendto ( msg . packed_message ) sent_msg_count += 1 await aio . sleep ( sleep_interval )
Coroutine used to send message to the device when no response is needed .
8,435
async def try_sending ( self , msg , timeout_secs , max_attempts ) : if timeout_secs is None : timeout_secs = self . timeout if max_attempts is None : max_attempts = self . retry_count attempts = 0 while attempts < max_attempts : if msg . seq_num not in self . message : return event = aio . Event ( ) self . message [ msg . seq_num ] [ 1 ] = event attempts += 1 if self . transport : self . transport . sendto ( msg . packed_message ) try : myresult = await aio . wait_for ( event . wait ( ) , timeout_secs ) break except Exception as inst : if attempts >= max_attempts : if msg . seq_num in self . message : callb = self . message [ msg . seq_num ] [ 2 ] if callb : callb ( self , None ) del ( self . message [ msg . seq_num ] ) self . unregister ( )
Coroutine used to send message to the device when a response or ack is needed .
8,436
def req_with_ack ( self , msg_type , payload , callb = None , timeout_secs = None , max_attempts = None ) : msg = msg_type ( self . mac_addr , self . source_id , seq_num = self . seq_next ( ) , payload = payload , ack_requested = True , response_requested = False ) self . message [ msg . seq_num ] = [ Acknowledgement , None , callb ] xx = self . loop . create_task ( self . try_sending ( msg , timeout_secs , max_attempts ) ) return True
Method to send a message expecting to receive an ACK .
8,437
def get_label ( self , callb = None ) : if self . label is None : mypartial = partial ( self . resp_set_label ) if callb : mycallb = lambda x , y : ( mypartial ( y ) , callb ( x , y ) ) else : mycallb = lambda x , y : mypartial ( y ) response = self . req_with_resp ( GetLabel , StateLabel , callb = mycallb ) return self . label
Convenience method to request the label from the device
8,438
def set_label ( self , value , callb = None ) : if len ( value ) > 32 : value = value [ : 32 ] mypartial = partial ( self . resp_set_label , label = value ) if callb : self . req_with_ack ( SetLabel , { "label" : value } , lambda x , y : ( mypartial ( y ) , callb ( x , y ) ) ) else : self . req_with_ack ( SetLabel , { "label" : value } , lambda x , y : mypartial ( y ) )
Convenience method to set the label of the device
8,439
def get_location ( self , callb = None ) : if self . location is None : mypartial = partial ( self . resp_set_location ) if callb : mycallb = lambda x , y : ( mypartial ( y ) , callb ( x , y ) ) else : mycallb = lambda x , y : mypartial ( y ) response = self . req_with_resp ( GetLocation , StateLocation , callb = mycallb ) return self . location
Convenience method to request the location from the device
8,440
def get_group ( self , callb = None ) : if self . group is None : mypartial = partial ( self . resp_set_group ) if callb : mycallb = lambda x , y : ( mypartial ( y ) , callb ( x , y ) ) else : mycallb = lambda x , y : mypartial ( y ) response = self . req_with_resp ( GetGroup , StateGroup , callb = callb ) return self . group
Convenience method to request the group from the device
8,441
def get_wififirmware ( self , callb = None ) : if self . wifi_firmware_version is None : mypartial = partial ( self . resp_set_wififirmware ) if callb : mycallb = lambda x , y : ( mypartial ( y ) , callb ( x , y ) ) else : mycallb = lambda x , y : mypartial ( y ) response = self . req_with_resp ( GetWifiFirmware , StateWifiFirmware , mycallb ) return ( self . wifi_firmware_version , self . wifi_firmware_build_timestamp )
Convenience method to request the wifi firmware info from the device
8,442
def resp_set_wififirmware ( self , resp ) : if resp : self . wifi_firmware_version = float ( str ( str ( resp . version >> 16 ) + "." + str ( resp . version & 0xff ) ) ) self . wifi_firmware_build_timestamp = resp . build
Default callback for get_wififirmware
8,443
def get_wifiinfo ( self , callb = None ) : response = self . req_with_resp ( GetWifiInfo , StateWifiInfo , callb = callb ) return None
Convenience method to request the wifi info from the device
8,444
def get_hostfirmware ( self , callb = None ) : if self . host_firmware_version is None : mypartial = partial ( self . resp_set_hostfirmware ) if callb : mycallb = lambda x , y : ( mypartial ( y ) , callb ( x , y ) ) else : mycallb = lambda x , y : mypartial ( y ) response = self . req_with_resp ( GetHostFirmware , StateHostFirmware , mycallb ) return ( self . host_firmware_version , self . host_firmware_build_timestamp )
Convenience method to request the device firmware info from the device
8,445
def resp_set_hostfirmware ( self , resp ) : if resp : self . host_firmware_version = float ( str ( str ( resp . version >> 16 ) + "." + str ( resp . version & 0xff ) ) ) self . host_firmware_build_timestamp = resp . build
Default callback for get_hostfirmware
8,446
def get_hostinfo ( self , callb = None ) : response = self . req_with_resp ( GetInfo , StateInfo , callb = callb ) return None
Convenience method to request the device info from the device
8,447
def get_version ( self , callb = None ) : if self . vendor is None : mypartial = partial ( self . resp_set_version ) if callb : mycallb = lambda x , y : ( mypartial ( y ) , callb ( x , y ) ) else : mycallb = lambda x , y : mypartial ( y ) response = self . req_with_resp ( GetVersion , StateVersion , callb = mycallb ) return ( self . host_firmware_version , self . host_firmware_build_timestamp )
Convenience method to request the version from the device
8,448
def resp_set_version ( self , resp ) : if resp : self . vendor = resp . vendor self . product = resp . product self . version = resp . version
Default callback for get_version
8,449
def resp_set_lightpower ( self , resp , power_level = None ) : if power_level is not None : self . power_level = power_level elif resp : self . power_level = resp . power_level
Default callback for set_power
8,450
def get_color ( self , callb = None ) : response = self . req_with_resp ( LightGet , LightState , callb = callb ) return self . color
Convenience method to request the colour status from the device
8,451
def set_color ( self , value , callb = None , duration = 0 , rapid = False ) : if len ( value ) == 4 : mypartial = partial ( self . resp_set_light , color = value ) if callb : mycallb = lambda x , y : ( mypartial ( y ) , callb ( x , y ) ) else : mycallb = lambda x , y : mypartial ( y ) if rapid : self . fire_and_forget ( LightSetColor , { "color" : value , "duration" : duration } , num_repeats = 1 ) self . resp_set_light ( None , color = value ) if callb : callb ( self , None ) else : self . req_with_ack ( LightSetColor , { "color" : value , "duration" : duration } , callb = mycallb )
Convenience method to set the colour status of the device
8,452
def resp_set_light ( self , resp , color = None ) : if color : self . color = color elif resp : self . power_level = resp . power_level self . color = resp . color self . label = resp . label . decode ( ) . replace ( "\x00" , "" )
Default callback for set_color
8,453
def get_color_zones ( self , start_index , end_index = None , callb = None ) : if end_index is None : end_index = start_index + 7 args = { "start_index" : start_index , "end_index" : end_index , } self . req_with_resp ( MultiZoneGetColorZones , MultiZoneStateMultiZone , payload = args , callb = callb )
Convenience method to request the state of colour by zones from the device
8,454
def set_color_zones ( self , start_index , end_index , color , duration = 0 , apply = 1 , callb = None , rapid = False ) : if len ( color ) == 4 : args = { "start_index" : start_index , "end_index" : end_index , "color" : color , "duration" : duration , "apply" : apply , } mypartial = partial ( self . resp_set_multizonemultizone , args = args ) if callb : mycallb = lambda x , y : ( mypartial ( y ) , callb ( x , y ) ) else : mycallb = lambda x , y : mypartial ( y ) if rapid : self . fire_and_forget ( MultiZoneSetColorZones , args , num_repeats = 1 ) mycallb ( self , None ) else : self . req_with_ack ( MultiZoneSetColorZones , args , callb = mycallb )
Convenience method to set the colour status zone of the device
8,455
def get_infrared ( self , callb = None ) : response = self . req_with_resp ( LightGetInfrared , LightStateInfrared , callb = callb ) return self . infrared_brightness
Convenience method to request the infrared brightness from the device
8,456
def set_infrared ( self , infrared_brightness , callb = None , rapid = False ) : mypartial = partial ( self . resp_set_infrared , infrared_brightness = infrared_brightness ) if callb : mycallb = lambda x , y : ( mypartial ( y ) , callb ( x , y ) ) else : mycallb = lambda x , y : mypartial ( y ) if rapid : self . fire_and_forget ( LightSetInfrared , { "infrared_brightness" : infrared_brightness } , num_repeats = 1 ) self . resp_set_infrared ( None , infrared_brightness = infrared_brightness ) if callb : callb ( self , None ) else : self . req_with_ack ( LightSetInfrared , { "infrared_brightness" : infrared_brightness } , callb = mycallb )
Convenience method to set the infrared status of the device
8,457
def start ( self , listen_ip = LISTEN_IP , listen_port = 0 ) : coro = self . loop . create_datagram_endpoint ( lambda : self , local_addr = ( listen_ip , listen_port ) ) self . task = self . loop . create_task ( coro ) return self . task
Start discovery task .
8,458
def connection_made ( self , transport ) : self . transport = transport sock = self . transport . get_extra_info ( "socket" ) sock . setsockopt ( socket . SOL_SOCKET , socket . SO_REUSEADDR , 1 ) sock . setsockopt ( socket . SOL_SOCKET , socket . SO_BROADCAST , 1 ) self . loop . call_soon ( self . discover )
Method run when the UDP broadcast server is started
8,459
def datagram_received ( self , data , addr ) : response = unpack_lifx_message ( data ) response . ip_addr = addr [ 0 ] mac_addr = response . target_addr if mac_addr == BROADCAST_MAC : return if type ( response ) == StateService and response . service == 1 : remote_port = response . port elif type ( response ) == LightState : remote_port = UDP_BROADCAST_PORT else : return if self . ipv6prefix : family = socket . AF_INET6 remote_ip = mac_to_ipv6_linklocal ( mac_addr , self . ipv6prefix ) else : family = socket . AF_INET remote_ip = response . ip_addr if mac_addr in self . lights : light = self . lights [ mac_addr ] if light . registered : return light . cleanup ( ) light . ip_addr = remote_ip light . port = remote_port else : light = Light ( self . loop , mac_addr , remote_ip , remote_port , parent = self ) self . lights [ mac_addr ] = light coro = self . loop . create_datagram_endpoint ( lambda : light , family = family , remote_addr = ( remote_ip , remote_port ) ) light . task = self . loop . create_task ( coro )
Method run when data is received from the devices
8,460
def discover ( self ) : if self . transport : if self . discovery_countdown <= 0 : self . discovery_countdown = self . discovery_interval msg = GetService ( BROADCAST_MAC , self . source_id , seq_num = 0 , payload = { } , ack_requested = False , response_requested = True ) self . transport . sendto ( msg . generate_packed_message ( ) , ( self . broadcast_ip , UDP_BROADCAST_PORT ) ) else : self . discovery_countdown -= self . discovery_step self . loop . call_later ( self . discovery_step , self . discover )
Method to send a discovery message
8,461
async def scan ( self , timeout = 1 ) : adapters = await self . loop . run_in_executor ( None , ifaddr . get_adapters ) ips = [ ip . ip for adapter in ifaddr . get_adapters ( ) for ip in adapter . ips if ip . is_IPv4 ] if not ips : return [ ] tasks = [ ] discoveries = [ ] for ip in ips : manager = ScanManager ( ip ) lifx_discovery = LifxDiscovery ( self . loop , manager ) discoveries . append ( lifx_discovery ) lifx_discovery . start ( listen_ip = ip ) tasks . append ( self . loop . create_task ( manager . lifx_ip ( ) ) ) ( done , pending ) = await aio . wait ( tasks , timeout = timeout ) for discovery in discoveries : discovery . cleanup ( ) for task in pending : task . cancel ( ) return [ task . result ( ) for task in done ]
Return a list of local IP addresses on interfaces with LIFX bulbs .
8,462
def _get_file_version ( filename ) : mat = sio . loadmat ( filename , squeeze_me = True ) version = mat [ 'MP' ] [ 'Version' ] . item ( ) del ( mat ) return version
High level import function that tries to determine the specific version of the data format used .
8,463
def MD_ConfigsPermutate ( df_md ) : g_current_injections = df_md . groupby ( [ 'a' , 'b' ] ) ab = np . array ( list ( g_current_injections . groups . keys ( ) ) ) config_mgr = ConfigManager ( nr_of_electrodes = ab . max ( ) ) config_mgr . gen_configs_permutate ( ab , silent = True ) return config_mgr . configs
Given a MD DataFrame return a Nx4 array which permutes the current injection dipoles .
8,464
def apply_correction_factors ( df , correction_file ) : if isinstance ( correction_file , ( list , tuple ) ) : corr_data_raw = np . vstack ( [ np . loadtxt ( x ) for x in correction_file ] ) else : corr_data_raw = np . loadtxt ( correction_file ) if corr_data_raw . shape [ 1 ] == 3 : A = ( corr_data_raw [ : , 0 ] / 1e4 ) . astype ( int ) B = ( corr_data_raw [ : , 0 ] % 1e4 ) . astype ( int ) M = ( corr_data_raw [ : , 1 ] / 1e4 ) . astype ( int ) N = ( corr_data_raw [ : , 1 ] % 1e4 ) . astype ( int ) corr_data = np . vstack ( ( A , B , M , N , corr_data_raw [ : , 2 ] ) ) . T elif corr_data_raw . shape [ 1 ] == 5 : corr_data = corr_data_raw else : raise Exception ( 'error' ) corr_data [ : , 0 : 2 ] = np . sort ( corr_data [ : , 0 : 2 ] , axis = 1 ) corr_data [ : , 2 : 4 ] = np . sort ( corr_data [ : , 2 : 4 ] , axis = 1 ) if 'frequency' not in df . columns : raise Exception ( 'No frequency data found. Are you sure this is a seit data set?' ) df = df . reset_index ( ) gf = df . groupby ( [ 'a' , 'b' , 'm' , 'n' ] ) for key , item in gf . indices . items ( ) : item_norm = np . hstack ( ( np . sort ( key [ 0 : 2 ] ) , np . sort ( key [ 2 : 4 ] ) ) ) index = np . where ( ( corr_data [ : , 0 ] == item_norm [ 0 ] ) & ( corr_data [ : , 1 ] == item_norm [ 1 ] ) & ( corr_data [ : , 2 ] == item_norm [ 2 ] ) & ( corr_data [ : , 3 ] == item_norm [ 3 ] ) ) [ 0 ] if len ( index ) == 0 : print ( key ) import IPython IPython . embed ( ) raise Exception ( 'No correction factor found for this configuration' ) factor = corr_data [ index , 4 ] for col in ( 'r' , 'Zt' , 'Vmn' , 'rho_a' ) : if col in df . columns : df . ix [ item , col ] *= factor df . ix [ item , 'corr_fac' ] = factor return df , corr_data
Apply correction factors for a pseudo - 2D measurement setup . See Weigand and Kemna 2017 Biogeosciences for detailed information .
8,465
def get_pij_method ( model = F81 , frequencies = None , kappa = None ) : if is_f81_like ( model ) : mu = get_mu ( frequencies ) return lambda t : get_f81_pij ( t , frequencies , mu ) if JTT == model : return get_jtt_pij if HKY == model : return lambda t : get_hky_pij ( t , frequencies , kappa )
Returns a function for calculation of probability matrix of substitutions i - > j over time t .
8,466
def initialize_allowed_states ( tree , feature , states ) : allowed_states_feature = get_personalized_feature_name ( feature , ALLOWED_STATES ) state2index = dict ( zip ( states , range ( len ( states ) ) ) ) for node in tree . traverse ( ) : node_states = getattr ( node , feature , set ( ) ) if not node_states : allowed_states = np . ones ( len ( state2index ) , dtype = np . int ) else : allowed_states = np . zeros ( len ( state2index ) , dtype = np . int ) for state in node_states : allowed_states [ state2index [ state ] ] = 1 node . add_feature ( allowed_states_feature , allowed_states )
Initializes the allowed state arrays for tips based on their states given by the feature .
8,467
def alter_zero_tip_allowed_states ( tree , feature ) : zero_parent2tips = defaultdict ( list ) allowed_state_feature = get_personalized_feature_name ( feature , ALLOWED_STATES ) for tip in tree : if tip . dist == 0 : state = getattr ( tip , feature , None ) if state is not None and state != '' : zero_parent2tips [ tip . up ] . append ( tip ) for parent , zero_tips in zero_parent2tips . items ( ) : counts = None for tip in zero_tips : if counts is None : counts = getattr ( tip , allowed_state_feature ) . copy ( ) else : counts += getattr ( tip , allowed_state_feature ) if counts . max ( ) == len ( zero_tips ) : continue allowed_states = None for tip in zero_tips : if allowed_states is None : allowed_states = getattr ( tip , allowed_state_feature ) . copy ( ) else : tip_allowed_states = getattr ( tip , allowed_state_feature ) allowed_states [ np . nonzero ( tip_allowed_states ) ] = 1 tip . add_feature ( allowed_state_feature , allowed_states )
Alters the bottom - up likelihood arrays for zero - distance tips to make sure they do not contradict with other zero - distance tip siblings .
8,468
def unalter_zero_tip_allowed_states ( tree , feature , state2index ) : allowed_state_feature = get_personalized_feature_name ( feature , ALLOWED_STATES ) for tip in tree : if tip . dist > 0 : continue state = getattr ( tip , feature , set ( ) ) if state : initial_allowed_states = np . zeros ( len ( state2index ) , np . int ) for _ in state : initial_allowed_states [ state2index [ _ ] ] = 1 allowed_states = getattr ( tip , allowed_state_feature ) & initial_allowed_states tip . add_feature ( allowed_state_feature , ( allowed_states if np . any ( allowed_states > 0 ) else initial_allowed_states ) )
Unalters the bottom - up likelihood arrays for zero - distance tips to contain ones only in their states .
8,469
def unalter_zero_tip_joint_states ( tree , feature , state2index ) : lh_joint_state_feature = get_personalized_feature_name ( feature , BU_LH_JOINT_STATES ) for tip in tree : if tip . dist > 0 : continue state = getattr ( tip , feature , set ( ) ) if len ( state ) > 1 : allowed_indices = { state2index [ _ ] for _ in state } allowed_index = next ( iter ( allowed_indices ) ) joint_states = getattr ( tip , lh_joint_state_feature ) for i in range ( len ( state2index ) ) : if joint_states [ i ] not in allowed_indices : joint_states [ i ] = allowed_index elif len ( state ) == 1 : tip . add_feature ( lh_joint_state_feature , np . ones ( len ( state2index ) , np . int ) * state2index [ next ( iter ( state ) ) ] )
Unalters the joint tip states for zero - distance tips to contain only their states .
8,470
def calculate_marginal_likelihoods ( tree , feature , frequencies ) : bu_lh_feature = get_personalized_feature_name ( feature , BU_LH ) bu_lh_sf_feature = get_personalized_feature_name ( feature , BU_LH_SF ) td_lh_feature = get_personalized_feature_name ( feature , TD_LH ) td_lh_sf_feature = get_personalized_feature_name ( feature , TD_LH_SF ) lh_feature = get_personalized_feature_name ( feature , LH ) lh_sf_feature = get_personalized_feature_name ( feature , LH_SF ) allowed_state_feature = get_personalized_feature_name ( feature , ALLOWED_STATES ) for node in tree . traverse ( 'preorder' ) : likelihood = getattr ( node , bu_lh_feature ) * getattr ( node , td_lh_feature ) * frequencies * getattr ( node , allowed_state_feature ) node . add_feature ( lh_feature , likelihood ) node . add_feature ( lh_sf_feature , getattr ( node , td_lh_sf_feature ) + getattr ( node , bu_lh_sf_feature ) ) node . del_feature ( bu_lh_feature ) node . del_feature ( bu_lh_sf_feature ) node . del_feature ( td_lh_feature ) node . del_feature ( td_lh_sf_feature )
Calculates marginal likelihoods for each tree node by multiplying state frequencies with their bottom - up and top - down likelihoods .
8,471
def convert_likelihoods_to_probabilities ( tree , feature , states ) : lh_feature = get_personalized_feature_name ( feature , LH ) name2probs = { } for node in tree . traverse ( ) : lh = getattr ( node , lh_feature ) name2probs [ node . name ] = lh / lh . sum ( ) return pd . DataFrame . from_dict ( name2probs , orient = 'index' , columns = states )
Normalizes each node marginal likelihoods to convert them to marginal probabilities .
8,472
def choose_ancestral_states_mppa ( tree , feature , states , force_joint = True ) : lh_feature = get_personalized_feature_name ( feature , LH ) allowed_state_feature = get_personalized_feature_name ( feature , ALLOWED_STATES ) joint_state_feature = get_personalized_feature_name ( feature , JOINT_STATE ) n = len ( states ) _ , state2array = get_state2allowed_states ( states , False ) num_scenarios = 1 unresolved_nodes = 0 num_states = 0 for node in tree . traverse ( ) : marginal_likelihoods = getattr ( node , lh_feature ) marginal_probs = marginal_likelihoods / marginal_likelihoods . sum ( ) if force_joint : joint_index = getattr ( node , joint_state_feature ) joint_prob = marginal_probs [ joint_index ] marginal_probs = np . hstack ( ( np . sort ( np . delete ( marginal_probs , joint_index ) ) , [ joint_prob ] ) ) else : marginal_probs = np . sort ( marginal_probs ) best_k = n best_correstion = np . inf for k in range ( 1 , n + 1 ) : correction = np . hstack ( ( np . zeros ( n - k ) , np . ones ( k ) / k ) ) - marginal_probs correction = correction . dot ( correction ) if correction < best_correstion : best_correstion = correction best_k = k num_scenarios *= best_k num_states += best_k if force_joint : indices_selected = sorted ( range ( n ) , key = lambda _ : ( 0 if n == joint_index else 1 , - marginal_likelihoods [ _ ] ) ) [ : best_k ] else : indices_selected = sorted ( range ( n ) , key = lambda _ : - marginal_likelihoods [ _ ] ) [ : best_k ] if best_k == 1 : allowed_states = state2array [ indices_selected [ 0 ] ] else : allowed_states = np . zeros ( len ( states ) , dtype = np . int ) allowed_states [ indices_selected ] = 1 unresolved_nodes += 1 node . add_feature ( allowed_state_feature , allowed_states ) return num_scenarios , unresolved_nodes , num_states
Chooses node ancestral states based on their marginal probabilities using MPPA method .
8,473
def choose_ancestral_states_map ( tree , feature , states ) : lh_feature = get_personalized_feature_name ( feature , LH ) allowed_state_feature = get_personalized_feature_name ( feature , ALLOWED_STATES ) _ , state2array = get_state2allowed_states ( states , False ) for node in tree . traverse ( ) : marginal_likelihoods = getattr ( node , lh_feature ) node . add_feature ( allowed_state_feature , state2array [ marginal_likelihoods . argmax ( ) ] )
Chooses node ancestral states based on their marginal probabilities using MAP method .
8,474
def choose_ancestral_states_joint ( tree , feature , states , frequencies ) : lh_feature = get_personalized_feature_name ( feature , BU_LH ) lh_state_feature = get_personalized_feature_name ( feature , BU_LH_JOINT_STATES ) allowed_state_feature = get_personalized_feature_name ( feature , ALLOWED_STATES ) joint_state_feature = get_personalized_feature_name ( feature , JOINT_STATE ) _ , state2array = get_state2allowed_states ( states , False ) def chose_consistent_state ( node , state_index ) : node . add_feature ( joint_state_feature , state_index ) node . add_feature ( allowed_state_feature , state2array [ state_index ] ) for child in node . children : chose_consistent_state ( child , getattr ( child , lh_state_feature ) [ state_index ] ) chose_consistent_state ( tree , ( getattr ( tree , lh_feature ) * frequencies ) . argmax ( ) )
Chooses node ancestral states based on their marginal probabilities using joint method .
8,475
def col_name2cat ( column ) : column_string = '' . join ( s for s in column . replace ( ' ' , '_' ) if s . isalnum ( ) or '_' == s ) return column_string
Reformats the column string to make sure it contains only numerical letter characters or underscore .
8,476
def get_user_config_filename ( appname = 'notify' ) : import platform system = platform . system ( ) if system == 'Windows' : rootname = os . path . join ( os . environ [ 'APPDATA' ] , appname ) filename = appname + ".cfg" prefix = '' elif system == 'Linux' : XDG_CONFIG_HOME = os . environ . get ( 'XDG_CONFIG_HOME' , None ) rootname = XDG_CONFIG_HOME or os . path . join ( '~' , '.config' ) rootname = os . path . expanduser ( rootname ) if not os . path . exists ( rootname ) and XDG_CONFIG_HOME is None : rootname = os . path . expanduser ( '~' ) filename = appname + ".cfg" prefix = '.' else : rootname = os . path . join ( rootname , appname ) filename = appname + ".cfg" prefix = '' elif system == 'Darwin' : rootname = os . path . expanduser ( '~' ) filename = appname + ".cfg" prefix = '.' else : rootname = os . path . expanduser ( '~' ) filename = appname + ".cfg" prefix = '' return os . path . join ( rootname , prefix + filename )
Get user config filename .
8,477
def config_to_options ( config ) : class Options : host = config . get ( 'smtp' , 'host' , raw = True ) port = config . getint ( 'smtp' , 'port' ) to_addr = config . get ( 'mail' , 'to_addr' , raw = True ) from_addr = config . get ( 'mail' , 'from_addr' , raw = True ) subject = config . get ( 'mail' , 'subject' , raw = True ) encoding = config . get ( 'mail' , 'encoding' , raw = True ) username = config . get ( 'auth' , 'username' ) opts = Options ( ) opts . from_addr % { 'host' : opts . host , 'prog' : 'notify' } opts . to_addr % { 'host' : opts . host , 'prog' : 'notify' } return opts
Convert ConfigParser instance to argparse . Namespace
8,478
def create_default_config ( ) : import codecs config = ConfigParser . SafeConfigParser ( ) config . readfp ( StringIO ( DEFAULT_CONFIG ) ) filename = get_user_config_filename ( ) if not os . path . exists ( filename ) : from wizard import setup_wizard setup_wizard ( config ) else : try : fi = codecs . open ( filename , 'r' , encoding = 'utf-8' ) config . readfp ( fi ) finally : fi . close ( ) return config
Create default ConfigParser instance
8,479
def has_multiple_timesteps ( data ) : if "timestep" in data . keys ( ) : if len ( np . unique ( data [ "timestep" ] ) ) > 1 : return True return False
Return True if data container has multiple timesteps .
8,480
def split_timesteps ( data , consistent_abmn = False ) : if has_multiple_timesteps ( data ) : grouped = data . groupby ( "timestep" ) return [ group [ 1 ] for group in grouped ] else : return data
Split data into multiple timesteps .
8,481
def parse ( text ) : fixed_text = validate . fix_string_case ( utf ( text ) ) output = [ ] cur_end = 0 for cur , i in enumerate ( fixed_text ) : try : i . encode ( 'utf-8' ) except UnicodeDecodeError : uni_pass = False else : uni_pass = True match = { 'matched' : False } if not uni_pass : cur_end = cur + 1 output . append ( i ) elif cur >= cur_end and uni_pass : match = match_non_rule_patterns ( fixed_text , cur ) if match [ "matched" ] : output . append ( match [ "replaced" ] ) cur_end = cur + len ( match [ "found" ] ) else : match = match_rule_patterns ( fixed_text , cur ) if match [ "matched" ] : cur_end = cur + len ( match [ "found" ] ) replaced = process_rules ( rules = match [ "rules" ] , fixed_text = fixed_text , cur = cur , cur_end = cur_end ) if replaced is not None : output . append ( replaced ) else : output . append ( match [ "replaced" ] ) if not match [ "matched" ] : cur_end = cur + 1 output . append ( i ) return '' . join ( output )
Parses input text matches and replaces using avrodict
8,482
def match_non_rule_patterns ( fixed_text , cur = 0 ) : pattern = exact_find_in_pattern ( fixed_text , cur , NON_RULE_PATTERNS ) if len ( pattern ) > 0 : return { "matched" : True , "found" : pattern [ 0 ] [ 'find' ] , "replaced" : pattern [ 0 ] [ 'replace' ] } else : return { "matched" : False , "found" : None , "replaced" : fixed_text [ cur ] }
Matches given text at cursor position with non rule patterns
8,483
def match_rule_patterns ( fixed_text , cur = 0 ) : pattern = exact_find_in_pattern ( fixed_text , cur , RULE_PATTERNS ) if len ( pattern ) > 0 : return { "matched" : True , "found" : pattern [ 0 ] [ 'find' ] , "replaced" : pattern [ 0 ] [ 'replace' ] , "rules" : pattern [ 0 ] [ 'rules' ] } else : return { "matched" : False , "found" : None , "replaced" : fixed_text [ cur ] , "rules" : None }
Matches given text at cursor position with rule patterns
8,484
def exact_find_in_pattern ( fixed_text , cur = 0 , patterns = PATTERNS ) : return [ x for x in patterns if ( cur + len ( x [ 'find' ] ) <= len ( fixed_text ) ) and x [ 'find' ] == fixed_text [ cur : ( cur + len ( x [ 'find' ] ) ) ] ]
Returns pattern items that match given text cur position and pattern
8,485
def process_rules ( rules , fixed_text , cur = 0 , cur_end = 1 ) : replaced = '' for rule in rules : matched = False for match in rule [ 'matches' ] : matched = process_match ( match , fixed_text , cur , cur_end ) if not matched : break if matched : replaced = rule [ 'replace' ] break if matched : return replaced else : return None
Process rules matched in pattern and returns suitable replacement
8,486
def process_match ( match , fixed_text , cur , cur_end ) : replace = True if match [ 'type' ] == 'prefix' : chk = cur - 1 else : chk = cur_end if match [ 'scope' ] . startswith ( '!' ) : scope = match [ 'scope' ] [ 1 : ] negative = True else : scope = match [ 'scope' ] negative = False if scope == 'punctuation' : if ( not ( ( chk < 0 and match [ 'type' ] == 'prefix' ) or ( chk >= len ( fixed_text ) and match [ 'type' ] == 'suffix' ) or validate . is_punctuation ( fixed_text [ chk ] ) ) ^ negative ) : replace = False elif scope == 'vowel' : if ( not ( ( ( chk >= 0 and match [ 'type' ] == 'prefix' ) or ( chk < len ( fixed_text ) and match [ 'type' ] == 'suffix' ) ) and validate . is_vowel ( fixed_text [ chk ] ) ) ^ negative ) : replace = False elif scope == 'consonant' : if ( not ( ( ( chk >= 0 and match [ 'type' ] == 'prefix' ) or ( chk < len ( fixed_text ) and match [ 'type' ] == 'suffix' ) ) and validate . is_consonant ( fixed_text [ chk ] ) ) ^ negative ) : replace = False elif scope == 'exact' : if match [ 'type' ] == 'prefix' : exact_start = cur - len ( match [ 'value' ] ) exact_end = cur else : exact_start = cur_end exact_end = cur_end + len ( match [ 'value' ] ) if not validate . is_exact ( match [ 'value' ] , fixed_text , exact_start , exact_end , negative ) : replace = False return replace
Processes a single match in rules
8,487
def cli ( ctx , hostname , username , password , config_dir , https ) : ctx . is_root = True ctx . user_values_entered = False ctx . config_dir = os . path . abspath ( os . path . expanduser ( config_dir ) ) ctx . config = load_config ( ctx ) ctx . hostname = hostname ctx . username = username ctx . password = password ctx . https = https ctx . wva = None
Command - line interface for interacting with a WVA device
8,488
def get ( ctx , uri ) : http_client = get_wva ( ctx ) . get_http_client ( ) cli_pprint ( http_client . get ( uri ) )
Perform an HTTP GET of the provided URI
8,489
def delete ( ctx , uri ) : http_client = get_wva ( ctx ) . get_http_client ( ) cli_pprint ( http_client . delete ( uri ) )
DELETE the specified URI
8,490
def post ( ctx , uri , input_file ) : http_client = get_wva ( ctx ) . get_http_client ( ) cli_pprint ( http_client . post ( uri , input_file . read ( ) ) )
POST file data to a specific URI
8,491
def sample ( ctx , element , timestamp , repeat , delay ) : element = get_wva ( ctx ) . get_vehicle_data_element ( element ) for i in xrange ( repeat ) : curval = element . sample ( ) if timestamp : print ( "{} at {}" . format ( curval . value , curval . timestamp . ctime ( ) ) ) else : print ( "{}" . format ( curval . value ) ) if i + 1 < repeat : time . sleep ( delay )
Sample the value of a vehicle data element
8,492
def list ( ctx ) : wva = get_wva ( ctx ) for subscription in wva . get_subscriptions ( ) : print ( subscription . short_name )
List short name of all current subscriptions
8,493
def delete ( ctx , short_name ) : wva = get_wva ( ctx ) subscription = wva . get_subscription ( short_name ) subscription . delete ( )
Delete a specific subscription by short name
8,494
def clear ( ctx ) : wva = get_wva ( ctx ) for subscription in wva . get_subscriptions ( ) : sys . stdout . write ( "Deleting {}... " . format ( subscription . short_name ) ) sys . stdout . flush ( ) subscription . delete ( ) print ( "Done" )
Remove all registered subscriptions
8,495
def show ( ctx , short_name ) : wva = get_wva ( ctx ) subscription = wva . get_subscription ( short_name ) cli_pprint ( subscription . get_metadata ( ) )
Show metadata for a specific subscription
8,496
def add ( ctx , short_name , uri , interval , buffer ) : wva = get_wva ( ctx ) subscription = wva . get_subscription ( short_name ) subscription . create ( uri , buffer , interval )
Add a subscription with a given short_name for a given uri
8,497
def listen ( ctx ) : wva = get_wva ( ctx ) es = wva . get_event_stream ( ) def cb ( event ) : cli_pprint ( event ) es . add_event_listener ( cb ) es . enable ( ) while True : time . sleep ( 5 )
Output the contents of the WVA event stream
8,498
def graph ( ctx , items , seconds , ylim ) : wva = get_wva ( ctx ) es = wva . get_event_stream ( ) try : from wva import grapher except ImportError : print ( "Unable to graph... you must have matplotlib installed" ) else : stream_grapher = grapher . WVAStreamGrapher ( wva , items , seconds = seconds , ylim = ylim ) es . enable ( ) stream_grapher . run ( )
Present a live graph of the incoming streaming data
8,499
def authorize ( ctx , public_key , append ) : wva = get_wva ( ctx ) http_client = wva . get_http_client ( ) authorized_keys_uri = "/files/userfs/WEB/python/.ssh/authorized_keys" authorized_key_contents = public_key if append : try : existing_contents = http_client . get ( authorized_keys_uri ) authorized_key_contents = "{}\n{}" . format ( existing_contents , public_key ) except WVAHttpNotFoundError : pass http_client . put ( authorized_keys_uri , authorized_key_contents ) print ( "Public key written to authorized_keys for python user." ) print ( "You should now be able to ssh to the device by doing the following:" ) print ( "" ) print ( " $ ssh python@{}" . format ( get_root_ctx ( ctx ) . hostname ) )
Enable ssh login as the Python user for the current user