idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
4,300
def user ( self , username = None , pk = None , ** kwargs ) : _users = self . users ( username = username , pk = pk , ** kwargs ) if len ( _users ) == 0 : raise NotFoundError ( "No user criteria matches" ) if len ( _users ) != 1 : raise MultipleFoundError ( "Multiple users fit criteria" ) return _users [ 0 ]
User of KE - chain .
4,301
def team ( self , name = None , id = None , is_hidden = False , ** kwargs ) : _teams = self . teams ( name = name , id = id , ** kwargs ) if len ( _teams ) == 0 : raise NotFoundError ( "No team criteria matches" ) if len ( _teams ) != 1 : raise MultipleFoundError ( "Multiple teams fit criteria" ) return _teams [ 0 ]
Team of KE - chain .
4,302
def teams ( self , name = None , id = None , is_hidden = False , ** kwargs ) : request_params = { 'name' : name , 'id' : id , 'is_hidden' : is_hidden } if kwargs : request_params . update ( ** kwargs ) r = self . _request ( 'GET' , self . _build_url ( 'teams' ) , params = request_params ) if r . status_code != requests . codes . ok : raise NotFoundError ( "Could not find teams: '{}'" . format ( r . json ( ) ) ) data = r . json ( ) return [ Team ( team , client = self ) for team in data [ 'results' ] ]
Teams of KE - chain .
4,303
def _create_part ( self , action , data , ** kwargs ) : if 'suppress_kevents' in kwargs : data [ 'suppress_kevents' ] = kwargs . pop ( 'suppress_kevents' ) query_params = kwargs query_params [ 'select_action' ] = action response = self . _request ( 'POST' , self . _build_url ( 'parts' ) , params = query_params , data = data ) if response . status_code != requests . codes . created : raise APIError ( "Could not create part, {}: {}" . format ( str ( response ) , response . content ) ) return Part ( response . json ( ) [ 'results' ] [ 0 ] , client = self )
Create a part internal core function .
4,304
def create_part ( self , parent , model , name = None , ** kwargs ) : if parent . category != Category . INSTANCE : raise IllegalArgumentError ( "The parent should be an category 'INSTANCE'" ) if model . category != Category . MODEL : raise IllegalArgumentError ( "The models should be of category 'MODEL'" ) if not name : name = model . name data = { "name" : name , "parent" : parent . id , "model" : model . id } return self . _create_part ( action = "new_instance" , data = data , ** kwargs )
Create a new part instance from a given model under a given parent .
4,305
def create_model ( self , parent , name , multiplicity = 'ZERO_MANY' , ** kwargs ) : if parent . category != Category . MODEL : raise IllegalArgumentError ( "The parent should be of category 'MODEL'" ) data = { "name" : name , "parent" : parent . id , "multiplicity" : multiplicity } return self . _create_part ( action = "create_child_model" , data = data , ** kwargs )
Create a new child model under a given parent .
4,306
def _create_clone ( self , parent , part , ** kwargs ) : if part . category == Category . MODEL : select_action = 'clone_model' else : select_action = 'clone_instance' data = { "part" : part . id , "parent" : parent . id , "suppress_kevents" : kwargs . pop ( 'suppress_kevents' , None ) } query_params = kwargs query_params [ 'select_action' ] = select_action response = self . _request ( 'POST' , self . _build_url ( 'parts' ) , params = query_params , data = data ) if response . status_code != requests . codes . created : raise APIError ( "Could not clone part, {}: {}" . format ( str ( response ) , response . content ) ) return Part ( response . json ( ) [ 'results' ] [ 0 ] , client = self )
Create a new Part clone under the Parent .
4,307
def create_property ( self , model , name , description = None , property_type = PropertyType . CHAR_VALUE , default_value = None , unit = None , options = None ) : if model . category != Category . MODEL : raise IllegalArgumentError ( "The model should be of category MODEL" ) if not property_type . endswith ( '_VALUE' ) : warnings . warn ( "Please use the `PropertyType` enumeration to ensure providing correct " "values to the backend." , UserWarning ) property_type = '{}_VALUE' . format ( property_type . upper ( ) ) if property_type not in PropertyType . values ( ) : raise IllegalArgumentError ( "Please provide a valid propertytype, please use one of `enums.PropertyType`. " "Got: '{}'" . format ( property_type ) ) if property_type in ( PropertyType . REFERENCE_VALUE , PropertyType . REFERENCES_VALUE ) and isinstance ( default_value , ( list , tuple ) ) and default_value : default_value = default_value [ 0 ] data = { "name" : name , "part" : model . id , "description" : description or '' , "property_type" : property_type . upper ( ) , "value" : default_value , "unit" : unit or '' , "options" : options or { } } response = self . _request ( 'POST' , self . _build_url ( 'properties' ) , json = data ) if response . status_code != requests . codes . created : raise APIError ( "Could not create property" ) prop = Property . create ( response . json ( ) [ 'results' ] [ 0 ] , client = self ) model . properties . append ( prop ) return prop
Create a new property model under a given model .
4,308
def create_service ( self , name , scope , description = None , version = None , service_type = ServiceType . PYTHON_SCRIPT , environment_version = ServiceEnvironmentVersion . PYTHON_3_5 , pkg_path = None ) : if service_type not in ServiceType . values ( ) : raise IllegalArgumentError ( "The type should be of one of {}" . format ( ServiceType . values ( ) ) ) if environment_version not in ServiceEnvironmentVersion . values ( ) : raise IllegalArgumentError ( "The environment version should be of one of {}" . format ( ServiceEnvironmentVersion . values ( ) ) ) data = { "name" : name , "scope" : scope , "description" : description , "script_type" : service_type , "script_version" : version , "env_version" : environment_version , } response = self . _request ( 'POST' , self . _build_url ( 'services' ) , data = data ) if response . status_code != requests . codes . created : raise APIError ( "Could not create service ({})" . format ( ( response , response . json ( ) ) ) ) service = Service ( response . json ( ) . get ( 'results' ) [ 0 ] , client = self ) if pkg_path : service . upload ( pkg_path ) service . refresh ( ) return service
Create a Service .
4,309
def delete_scope ( self , scope ) : assert isinstance ( scope , Scope ) , 'Scope "{}" is not a scope!' . format ( scope . name ) response = self . _request ( 'DELETE' , self . _build_url ( 'scope' , scope_id = str ( scope . id ) ) ) if response . status_code != requests . codes . no_content : raise APIError ( "Could not delete scope, {}: {}" . format ( str ( response ) , response . content ) )
Delete a scope .
4,310
def set_sort ( self , request ) : sort_request = request . GET . get ( self . sort_parameter , self . default_sort ) if sort_request . startswith ( '-' ) : sort_order = '-' sort_field = sort_request . split ( '-' ) [ 1 ] else : sort_order = '' sort_field = sort_request if not sort_field in self . _allowed_sort_fields : sort_order = self . default_sort_order sort_field = self . default_sort_field return ( sort_order , sort_field )
Take the sort parameter from the get parameters and split it into the field and the prefix
4,311
def get_next_sort_string ( self , field ) : if field == self . sort_field : next_sort = self . toggle_sort_order ( ) + field else : default_order_for_field = self . _allowed_sort_fields [ field ] [ 'default_direction' ] next_sort = default_order_for_field + field return self . get_sort_string ( next_sort )
If we re already sorted by the field then the sort query returned reverses the sort order .
4,312
def get_sort_indicator ( self , field ) : indicator = '' if field == self . sort_field : indicator = 'sort-asc' if self . sort_order == '-' : indicator = 'sort-desc' return indicator
Returns a sort class for the active sort only . That is if field is not sort_field then nothing will be returned becaues the sort is not active .
4,313
def get_basic_sort_link ( self , request , field ) : query_string = self . get_querystring ( ) sort_string = self . get_next_sort_string ( field ) if sort_string : sort_link = request . path + '?' + sort_string if query_string : sort_link += '&' + query_string else : sort_link = request . path if query_string : sort_link += '?' + query_string return sort_link
Thanks to del_query_parameters and get_querystring we build the link with preserving interesting get parameters and removing the others
4,314
def build_thumb_path ( self , image ) : image_file = image . file image_name_w_ext = split ( image . name ) [ - 1 ] image_name , ext = splitext ( image_name_w_ext ) if not self . in_memory ( image_file ) : image_name = image_name . split ( '/' ) [ - 1 ] upload_to = image . field . upload_to if not upload_to . endswith ( '/' ) : upload_to = f'{upload_to}/' path_upload_to = f'{upload_to}{image_name}' return f'{self.storage.location}/{path_upload_to}{THUMB_EXT}{ext}'
Build the absolute path of the to - be - saved thumbnail .
4,315
def run ( self , ** options ) : shutdown_message = options . get ( 'shutdown_message' , '' ) self . stdout . write ( "Performing system checks...\n\n" ) self . check ( display_num_errors = True ) self . check_migrations ( ) now = datetime . datetime . now ( ) . strftime ( r'%B %d, %Y - %X' ) if six . PY2 : now = now . decode ( get_system_encoding ( ) ) self . stdout . write ( now ) addr , port = self . addr , self . port addr = '[{}]' . format ( addr ) if self . _raw_ipv6 else addr runner = GunicornRunner ( addr , port , options ) try : runner . run ( ) except KeyboardInterrupt : runner . shutdown ( ) if shutdown_message : self . stdout . write ( shutdown_message ) sys . exit ( 0 ) except : runner . shutdown ( ) raise
Override runserver s entry point to bring Gunicorn on .
4,316
def _plot ( x , mph , mpd , threshold , edge , valley , ax , ind ) : try : import matplotlib . pyplot as plt except ImportError : print ( 'matplotlib is not available.' ) else : if ax is None : _ , ax = plt . subplots ( 1 , 1 , figsize = ( 8 , 4 ) ) ax . plot ( x , 'b' , lw = 1 ) if ind . size : label = 'valley' if valley else 'peak' label = label + 's' if ind . size > 1 else label ax . plot ( ind , x [ ind ] , '+' , mfc = None , mec = 'r' , mew = 2 , ms = 8 , label = '%d %s' % ( ind . size , label ) ) ax . legend ( loc = 'best' , framealpha = .5 , numpoints = 1 ) ax . set_xlim ( - .02 * x . size , x . size * 1.02 - 1 ) ymin , ymax = x [ np . isfinite ( x ) ] . min ( ) , x [ np . isfinite ( x ) ] . max ( ) yrange = ymax - ymin if ymax > ymin else 1 ax . set_ylim ( ymin - 0.1 * yrange , ymax + 0.1 * yrange ) ax . set_xlabel ( 'Data #' , fontsize = 14 ) ax . set_ylabel ( 'Amplitude' , fontsize = 14 ) mode = 'Valley detection' if valley else 'Peak detection' ax . set_title ( "%s (mph=%s, mpd=%d, threshold=%s, edge='%s')" % ( mode , str ( mph ) , mpd , str ( threshold ) , edge ) )
Plot results of the detect_peaks function see its help .
4,317
def assignees ( self ) : if 'assignees' in self . _json_data and self . _json_data . get ( 'assignees_ids' ) == list ( ) : return [ ] elif 'assignees' in self . _json_data and self . _json_data . get ( 'assignees_ids' ) : assignees_ids_str = ',' . join ( [ str ( id ) for id in self . _json_data . get ( 'assignees_ids' ) ] ) return self . _client . users ( id__in = assignees_ids_str , is_hidden = False ) return None
List of assignees to the activity .
4,318
def is_rootlevel ( self ) : if self . is_root ( ) : return False parent_name = None parent_dict = self . _json_data . get ( 'parent_id_name' ) if parent_dict and 'name' in parent_dict : parent_name = parent_dict . get ( 'name' ) if not parent_dict : parent_name = self . _client . activity ( id = self . _json_data . get ( 'parent_id' ) ) . name if parent_name in ActivityRootNames . values ( ) : return True return False
Determine if the Activity is at the root level of a project .
4,319
def parent ( self ) : parent_id = self . _json_data . get ( 'parent_id' ) if parent_id is None : raise NotFoundError ( "Cannot find subprocess for this task '{}', " "as this task exist on top level." . format ( self . name ) ) return self . _client . activity ( pk = parent_id , scope = self . scope_id )
Retrieve the parent in which this activity is defined .
4,320
def siblings ( self , ** kwargs ) : parent_id = self . _json_data . get ( 'parent_id' ) if parent_id is None : raise NotFoundError ( "Cannot find subprocess for this task '{}', " "as this task exist on top level." . format ( self . name ) ) return self . _client . activities ( parent_id = parent_id , scope = self . scope_id , ** kwargs )
Retrieve the other activities that also belong to the parent .
4,321
def download_as_pdf ( self , target_dir = None , pdf_filename = None , paper_size = PaperSize . A4 , paper_orientation = PaperOrientation . PORTRAIT , include_appendices = False ) : if not pdf_filename : pdf_filename = self . name + '.pdf' if not pdf_filename . endswith ( '.pdf' ) : pdf_filename += '.pdf' full_path = os . path . join ( target_dir or os . getcwd ( ) , pdf_filename ) request_params = { 'papersize' : paper_size , 'orientation' : paper_orientation , 'appendices' : include_appendices } url = self . _client . _build_url ( 'activity_export' , activity_id = self . id ) response = self . _client . _request ( 'GET' , url , params = request_params ) if response . status_code != requests . codes . ok : raise APIError ( "Could not download PDF of activity {}" . format ( self . name ) ) if include_appendices : data = response . json ( ) url = urljoin ( self . _client . api_root , data [ 'download_url' ] ) count = 0 while count <= ASYNC_TIMEOUT_LIMIT : response = self . _client . _request ( 'GET' , url = url ) if response . status_code == requests . codes . ok : with open ( full_path , 'wb' ) as f : for chunk in response . iter_content ( 1024 ) : f . write ( chunk ) return count += ASYNC_REFRESH_INTERVAL time . sleep ( ASYNC_REFRESH_INTERVAL ) raise APIError ( "Could not download PDF of activity {} within the time-out limit of {} " "seconds" . format ( self . name , ASYNC_TIMEOUT_LIMIT ) ) with open ( full_path , 'wb' ) as f : for chunk in response . iter_content ( 1024 ) : f . write ( chunk )
Retrieve the PDF of the Activity .
4,322
def parse ( argv ) : args = docopt ( __doc__ , argv = argv ) try : call ( sys . argv [ 2 ] , args ) except KytosException as exception : print ( "Error parsing args: {}" . format ( exception ) ) exit ( )
Parse cli args .
4,323
def run ( self ) : super ( ) . run ( ) call ( 'rm -vrf ./build ./dist ./*.egg-info' , shell = True ) call ( 'find . -name __pycache__ -type d | xargs rm -rf' , shell = True ) call ( 'test -d docs && make -C docs/ clean' , shell = True )
Clean build dist pyc and egg from package and docs .
4,324
def run ( self ) : print ( 'Yala is running. It may take several seconds...' ) try : check_call ( 'yala setup.py tests kytos' , shell = True ) print ( 'No linter error found.' ) except CalledProcessError : print ( 'Linter check failed. Fix the error(s) above and try again.' ) sys . exit ( - 1 )
Run yala .
4,325
def allow ( self ) : with self . selenium . context ( self . selenium . CONTEXT_CHROME ) : self . find_primary_button ( ) . click ( )
Allow the add - on to be installed .
4,326
def addon_name ( self ) : with self . selenium . context ( self . selenium . CONTEXT_CHROME ) : el = self . find_description ( ) return el . find_element ( By . CSS_SELECTOR , "b" ) . text
Provide access to the add - on name .
4,327
def cancel ( self ) : with self . selenium . context ( self . selenium . CONTEXT_CHROME ) : self . find_secondary_button ( ) . click ( )
Cancel add - on install .
4,328
def install ( self ) : with self . selenium . context ( self . selenium . CONTEXT_CHROME ) : self . find_primary_button ( ) . click ( )
Confirm add - on install .
4,329
def _load_txt ( file , devices , channels , header , ** kwargs ) : kwargs_txt = _filter_keywords ( numpy . loadtxt , kwargs ) out_dict = { } for dev_nbr , device in enumerate ( devices ) : out_dict [ device ] = { } columns = [ ] for chn in channels [ dev_nbr ] : columns . append ( header [ device ] [ "column labels" ] [ chn ] ) out_dict [ device ] [ "CH" + str ( chn ) ] = numpy . loadtxt ( fname = file , usecols = header [ device ] [ "column labels" ] [ chn ] , ** kwargs_txt ) return out_dict
Function used for reading . txt files generated by OpenSignals .
4,330
def _load_h5 ( file , devices , channels ) : h5_object = h5py . File ( file ) out_dict = { } for dev_nbr , device in enumerate ( devices ) : out_dict [ device ] = { } for chn in channels [ dev_nbr ] : data_temp = list ( h5_object . get ( device ) . get ( "raw" ) . get ( "channel_" + str ( chn ) ) ) out_dict [ device ] [ "CH" + str ( chn ) ] = numpy . concatenate ( data_temp ) return out_dict
Function used for reading . h5 files generated by OpenSignals .
4,331
def _check_chn_type ( channels , available_channels ) : chn_list_standardized = [ ] devices = list ( available_channels . keys ( ) ) for dev_nbr , device in enumerate ( devices ) : if channels is not None : sub_unit = channels [ dev_nbr ] for channel in sub_unit : if channel in available_channels [ devices [ dev_nbr ] ] : continue else : raise RuntimeError ( "At least one of the specified channels is not available in " "the acquisition file." ) chn_list_standardized . append ( sub_unit ) else : chn_list_standardized . append ( available_channels [ device ] ) return chn_list_standardized
Function used for checking weather the elements in channels input are coincident with the available channels .
4,332
def _available_channels ( devices , header ) : chn_dict = { } for dev in devices : chn_dict [ dev ] = header [ dev ] [ "column labels" ] . keys ( ) return chn_dict
Function used for the determination of the available channels in each device .
4,333
def _check_dev_type ( devices , dev_list ) : if devices is not None : for device in devices : if device in dev_list : continue else : raise RuntimeError ( "At least one of the specified devices is not available in the " "acquisition file." ) out = devices else : out = dev_list return out
Function used for checking weather the devices field only contain devices used during the acquisition .
4,334
def _file_type ( file ) : if "." in file : file_type = file . split ( "." ) [ - 1 ] else : file_type = magic . from_file ( file , mime = True ) . split ( "/" ) [ - 1 ] return file_type
Function intended for identification of the file type .
4,335
def team ( self ) : team_dict = self . _json_data . get ( 'team' ) if team_dict and team_dict . get ( 'id' ) : return self . _client . team ( id = team_dict . get ( 'id' ) ) else : return None
Team to which the scope is assigned .
4,336
def parts ( self , * args , ** kwargs ) : return self . _client . parts ( * args , bucket = self . bucket . get ( 'id' ) , ** kwargs )
Retrieve parts belonging to this scope .
4,337
def part ( self , * args , ** kwargs ) : return self . _client . part ( * args , bucket = self . bucket . get ( 'id' ) , ** kwargs )
Retrieve a single part belonging to this scope .
4,338
def create_model ( self , parent , name , multiplicity = Multiplicity . ZERO_MANY ) : return self . _client . create_model ( parent , name , multiplicity = multiplicity )
Create a single part model in this scope .
4,339
def model ( self , * args , ** kwargs ) : return self . _client . model ( * args , bucket = self . bucket . get ( 'id' ) , ** kwargs )
Retrieve a single model belonging to this scope .
4,340
def activities ( self , * args , ** kwargs ) : if self . _client . match_app_version ( label = 'wim' , version = '<2.0.0' , default = True ) : return self . _client . activities ( * args , scope = self . id , ** kwargs ) else : return self . _client . activities ( * args , scope_id = self . id , ** kwargs )
Retrieve activities belonging to this scope .
4,341
def create_activity ( self , * args , ** kwargs ) : if self . _client . match_app_version ( label = 'wim' , version = '<2.0.0' , default = True ) : return self . _client . create_activity ( self . process , * args , ** kwargs ) else : return self . _client . create_activity ( self . workflow_root , * args , ** kwargs )
Create a new activity belonging to this scope .
4,342
def create_service ( self , * args , ** kwargs ) : return self . _client . create_service ( * args , scope = self . id , ** kwargs )
Create a service to current scope .
4,343
def service ( self , * args , ** kwargs ) : return self . _client . service ( * args , scope = self . id , ** kwargs )
Retrieve a single service belonging to this scope .
4,344
def service_execution ( self , * args , ** kwargs ) : return self . _client . service_execution ( * args , scope = self . id , ** kwargs )
Retrieve a single service execution belonging to this scope .
4,345
def members ( self , is_manager = None ) : if not is_manager : return [ member for member in self . _json_data [ 'members' ] if member [ 'is_active' ] ] else : return [ member for member in self . _json_data [ 'members' ] if member . get ( 'is_active' , False ) and member . get ( 'is_manager' , False ) ]
Retrieve members of the scope .
4,346
def add_member ( self , member ) : select_action = 'add_member' self . _update_scope_project_team ( select_action = select_action , user = member , user_type = 'member' )
Add a single member to the scope .
4,347
def remove_member ( self , member ) : select_action = 'remove_member' self . _update_scope_project_team ( select_action = select_action , user = member , user_type = 'member' )
Remove a single member to the scope .
4,348
def add_manager ( self , manager ) : select_action = 'add_manager' self . _update_scope_project_team ( select_action = select_action , user = manager , user_type = 'manager' )
Add a single manager to the scope .
4,349
def remove_manager ( self , manager ) : select_action = 'remove_manager' self . _update_scope_project_team ( select_action = select_action , user = manager , user_type = 'manager' )
Remove a single manager to the scope .
4,350
def _update_scope_project_team ( self , select_action , user , user_type ) : if isinstance ( user , str ) : users = self . _client . _retrieve_users ( ) manager_object = next ( ( item for item in users [ 'results' ] if item [ "username" ] == user ) , None ) if manager_object : url = self . _client . _build_url ( 'scope' , scope_id = self . id ) r = self . _client . _request ( 'PUT' , url , params = { 'select_action' : select_action } , data = { 'user_id' : manager_object [ 'pk' ] } ) if r . status_code != requests . codes . ok : raise APIError ( "Could not {} {} in Scope" . format ( select_action . split ( '_' ) [ 0 ] , user_type ) ) else : raise NotFoundError ( "User {} does not exist" . format ( user ) ) else : raise TypeError ( "User {} should be defined as a string" . format ( user ) )
Update the Project Team of the Scope . Updates include addition or removing of managers or members .
4,351
def clone ( self , * args , ** kwargs ) : return self . _client . clone_scope ( * args , source_scope = self , ** kwargs )
Clone current scope .
4,352
def name ( self ) -> str : if self . is_platform : if self . _data [ "publicCode" ] : return self . _data [ 'name' ] + " Platform " + self . _data [ "publicCode" ] else : return self . _data [ 'name' ] + " Platform " + self . place_id . split ( ':' ) [ - 1 ] else : return self . _data [ 'name' ]
Friendly name for the stop place or platform
4,353
def remove ( self , value , _sa_initiator = None ) : key = self . keyfunc ( value ) if not self . __contains__ ( key ) or value not in self [ key ] : raise sa_exc . InvalidRequestError ( "Can not remove '%s': collection holds '%s' for key '%s'. " "Possible cause: is the MappedCollection key function " "based on mutable properties or properties that only obtain " "values after flush?" % ( value , self [ key ] , key ) ) self . __getitem__ ( key , _sa_initiator ) . remove ( value )
Remove an item by value consulting the keyfunc for the key .
4,354
def progressive ( image_field , alt_text = '' ) : if not isinstance ( image_field , ImageFieldFile ) : raise ValueError ( '"image_field" argument must be an ImageField.' ) for engine in engines . all ( ) : if isinstance ( engine , BaseEngine ) and hasattr ( engine , 'env' ) : env = engine . env if isinstance ( env , Environment ) : context = render_progressive_field ( image_field , alt_text ) template = env . get_template ( 'progressiveimagefield/render_field.html' ) rendered = template . render ( ** context ) return Markup ( rendered ) return ''
Used as a Jinja2 filter this function returns a safe HTML chunk .
4,355
def get_form ( self , form_class = None ) : form = super ( ) . get_form ( form_class ) if self . _save : make_form_or_formset_fields_not_required ( form ) return form
If the task was only saved treat all form fields as not required .
4,356
def save_task ( self ) : task = self . request . activation . task task . status = STATUS . ASSIGNED task . save ( )
Transition to save the task and return to ASSIGNED state .
4,357
def activation_done ( self , * args , ** kwargs ) : if self . _save : self . save_task ( ) else : super ( ) . activation_done ( * args , ** kwargs )
Complete the activation or save only depending on form submit .
4,358
def niplot ( ) : fig = gcf ( ) cid = fig . canvas . mpl_connect ( 'key_press_event' , on_key_press ) cid = fig . canvas . mpl_connect ( 'key_release_event' , on_key_release ) cid = fig . canvas . mpl_connect ( 'scroll_event' , zoom )
This script extends the native matplolib keyboard bindings . This script allows to use the up down left and right keys to move the visualization window . Zooming can be performed using the + and - keys . Finally the scroll wheel can be used to zoom under cursor .
4,359
def acquire_subsamples_gp1 ( input_data , file_name = None ) : fs_orig = 4000 nbr_samples_orig = len ( input_data ) data_interp = { "4000" : { } } data_interp [ "4000" ] [ "data" ] = input_data data_interp [ "4000" ] [ "time" ] = numpy . linspace ( 0 , nbr_samples_orig / fs_orig , nbr_samples_orig ) time_orig = data_interp [ "4000" ] [ "time" ] data_orig = data_interp [ "4000" ] [ "data" ] for sample_rate in [ 3000 , 1000 , 500 , 200 , 100 , 50 , 20 ] : fs_str = str ( sample_rate ) nbr_samples_interp = int ( ( nbr_samples_orig * sample_rate ) / fs_orig ) data_interp [ fs_str ] = { } data_interp [ fs_str ] [ "time" ] = numpy . linspace ( 0 , nbr_samples_orig / fs_orig , nbr_samples_interp ) data_interp [ fs_str ] [ "data" ] = numpy . interp ( data_interp [ fs_str ] [ "time" ] , time_orig , data_orig ) list_figures = [ ] for iter_nbr , sample_rate in enumerate ( [ "4000" , "3000" , "1000" , "500" , "200" , "100" ] ) : if iter_nbr == 0 or iter_nbr % 2 == 0 : list_figures . append ( [ ] ) list_figures [ - 1 ] . append ( figure ( x_axis_label = 'Time (s)' , y_axis_label = 'Raw Data' , title = "Sampling Frequency: " + sample_rate + " Hz" , ** opensignals_kwargs ( "figure" ) ) ) list_figures [ - 1 ] [ - 1 ] . line ( data_interp [ sample_rate ] [ "time" ] [ : int ( sample_rate ) ] , data_interp [ sample_rate ] [ "data" ] [ : int ( sample_rate ) ] , ** opensignals_kwargs ( "line" ) )
Function invoked for plotting a grid - plot with 3x2 format showing the differences in ECG signals accordingly to the chosen sampling frequency .
4,360
def download ( link , out ) : r = requests . get ( link ) with open ( out , 'wb' ) as outfile : outfile . write ( r . content )
Downloading data from websites such as previously acquired physiological signals is an extremely relevant task taking into consideration that without data processing cannot take place .
4,361
def argrelmin ( data , axis = 0 , order = 1 , mode = 'clip' ) : return argrelextrema ( data , np . less , axis , order , mode )
Calculate the relative minima of data .
4,362
def argrelmax ( data , axis = 0 , order = 1 , mode = 'clip' ) : return argrelextrema ( data , np . greater , axis , order , mode )
Calculate the relative maxima of data .
4,363
def peaks ( signal , tol = None ) : if ( tol is None ) : tol = min ( signal ) pks = argrelmax ( clip ( signal , tol , signal . max ( ) ) ) return pks [ 0 ]
This function detects all the peaks of a signal and returns those time positions . To reduce the amount of peaks detected a threshold is introduced so only the peaks above that value are considered .
4,364
def get_project ( url = None , username = None , password = None , token = None , scope = None , scope_id = None , env_filename = None , status = ScopeStatus . ACTIVE ) : if env . bool ( kecenv . KECHAIN_FORCE_ENV_USE , default = False ) : if not os . getenv ( kecenv . KECHAIN_URL ) : raise ClientError ( "Error: KECHAIN_URL should be provided as environment variable (use of env vars is enforced)" ) if not ( os . getenv ( kecenv . KECHAIN_TOKEN ) or ( os . getenv ( kecenv . KECHAIN_PASSWORD ) and os . getenv ( kecenv . KECHAIN_PASSWORD ) ) ) : raise ClientError ( "Error: KECHAIN_TOKEN or KECHAIN_USERNAME and KECHAIN_PASSWORD should be provided as " "environment variable(s) (use of env vars is enforced)" ) if not ( os . getenv ( kecenv . KECHAIN_SCOPE ) or os . getenv ( kecenv . KECHAIN_SCOPE_ID ) ) : raise ClientError ( "Error: KECHAIN_SCOPE or KECHAIN_SCOPE_ID should be provided as environment variable " "(use of env vars is enforced)" ) if env . bool ( kecenv . KECHAIN_FORCE_ENV_USE , default = False ) or not any ( ( url , username , password , token , scope , scope_id ) ) : client = Client . from_env ( env_filename = env_filename ) scope_id = env ( kecenv . KECHAIN_SCOPE_ID , default = None ) scope = env ( kecenv . KECHAIN_SCOPE , default = None ) status = env ( kecenv . KECHAIN_SCOPE_STATUS , default = None ) elif ( url and ( ( username and password ) or ( token ) ) and ( scope or scope_id ) ) and not env . bool ( kecenv . KECHAIN_FORCE_ENV_USE , default = False ) : client = Client ( url = url ) client . login ( username = username , password = password , token = token ) else : raise ClientError ( "Error: insufficient arguments to connect to KE-chain. " "See documentation of `pykechain.get_project()`" ) if scope_id : return client . scope ( pk = scope_id , status = status ) else : return client . scope ( name = scope , status = status )
Retrieve and return the KE - chain project to be used throughout an app .
4,365
def _rebuild_key_ids ( self ) : self . _key_ids = collections . defaultdict ( list ) for i , x in enumerate ( self . _pairs ) : self . _key_ids [ x [ 0 ] ] . append ( i )
Rebuild the internal key to index mapping .
4,366
def iteritems ( self ) : keys_yielded = set ( ) for k , v in self . _pairs : if k not in keys_yielded : keys_yielded . add ( k ) yield k , v
Iterator across all the non - duplicate keys and their values . Only yields the first key of duplicates .
4,367
def _update ( self , resource , update_dict = None , params = None , ** kwargs ) : url = self . _client . _build_url ( resource , ** kwargs ) response = self . _client . _request ( 'PUT' , url , json = update_dict , params = params ) if response . status_code != requests . codes . ok : raise APIError ( "Could not update {} ({})" . format ( self . __class__ . __name__ , response . json ( ) . get ( 'results' ) ) ) else : self . refresh ( )
Update the object .
4,368
def members ( self , role = None ) : if role and role not in TeamRoles . values ( ) : raise IllegalArgumentError ( "role should be one of `TeamRoles` {}, got '{}'" . format ( TeamRoles . values ( ) , role ) ) member_list = self . _json_data . get ( 'members' ) if role : return [ teammember for teammember in member_list if teammember . get ( 'role' ) == role ] else : return member_list
Members of the team .
4,369
def scopes ( self , ** kwargs ) : return self . _client . scopes ( team = self . id , ** kwargs )
Scopes associated to the team .
4,370
def insert_hash ( path : Path , content : Union [ str , bytes ] , * , hash_length = 7 , hash_algorithm = hashlib . md5 ) : if isinstance ( content , str ) : content = content . encode ( ) hash_ = hash_algorithm ( content ) . hexdigest ( ) [ : hash_length ] if '.' in path . name : new_name = re . sub ( r'\.' , f'.{hash_}.' , path . name , count = 1 ) else : new_name = f'{path.name}.{hash_}' return path . with_name ( new_name )
Insert a hash based on the content into the path after the first dot .
4,371
def options ( cls ) : return sorted ( ( value , name ) for ( name , value ) in cls . __dict__ . items ( ) if not name . startswith ( '__' ) )
Provide a sorted list of options .
4,372
def navbar ( self ) : window = BaseWindow ( self . selenium , self . selenium . current_window_handle ) with self . selenium . context ( self . selenium . CONTEXT_CHROME ) : el = self . selenium . find_element ( * self . _nav_bar_locator ) return NavBar ( window , el )
Provide access to the Navigation Bar .
4,373
def notification ( self ) : with self . selenium . context ( self . selenium . CONTEXT_CHROME ) : try : root = self . selenium . find_element ( * self . _notification_locator ) return BaseNotification . create ( self , root ) except NoSuchElementException : pass try : notifications = self . selenium . find_elements ( * self . _app_menu_notification_locator ) root = next ( n for n in notifications if n . is_displayed ( ) ) return BaseNotification . create ( self , root ) except StopIteration : pass return None
Provide access to the currently displayed notification .
4,374
def wait_for_notification ( self , notification_class = BaseNotification ) : if notification_class : if notification_class is BaseNotification : message = "No notification was shown." else : message = "{0} was not shown." . format ( notification_class . __name__ ) self . wait . until ( lambda _ : isinstance ( self . notification , notification_class ) , message = message , ) return self . notification else : self . wait . until ( lambda _ : self . notification is None , message = "Unexpected notification shown." , )
Wait for the specified notification to be displayed .
4,375
def open_window ( self , private = False ) : handles_before = self . selenium . window_handles self . switch_to ( ) with self . selenium . context ( self . selenium . CONTEXT_CHROME ) : self . selenium . find_element ( * self . _file_menu_button_locator ) . click ( ) if private : self . selenium . find_element ( * self . _file_menu_private_window_locator ) . click ( ) else : self . selenium . find_element ( * self . _file_menu_new_window_button_locator ) . click ( ) return self . wait . until ( expected . new_browser_window_is_opened ( self . selenium , handles_before ) , message = "No new browser window opened" , )
Open a new browser window .
4,376
def to_serializable_dict ( self , attrs_to_serialize = None , rels_to_expand = None , rels_to_serialize = None , key_modifications = None ) : return self . todict ( attrs_to_serialize = attrs_to_serialize , rels_to_expand = rels_to_expand , rels_to_serialize = rels_to_serialize , key_modifications = key_modifications )
An alias for todict
4,377
def serialize_attrs ( self , * args ) : cls = type ( self ) result = { } for a in args : if hasattr ( cls , a ) and a not in cls . attrs_forbidden_for_serialization ( ) : val = getattr ( self , a ) if is_list_like ( val ) : result [ a ] = list ( val ) else : result [ a ] = val return result
Converts and instance to a dictionary with only the specified attributes as keys
4,378
def fundamental_frequency ( s , FS ) : s = s - mean ( s ) f , fs = plotfft ( s , FS , doplot = False ) fs = fs [ 1 : int ( len ( fs ) / 2 ) ] f = f [ 1 : int ( len ( f ) / 2 ) ] cond = find ( f > 0.5 ) [ 0 ] bp = bigPeaks ( fs [ cond : ] , 0 ) if bp == [ ] : f0 = 0 else : bp = bp + cond f0 = f [ min ( bp ) ] return f0
Compute fundamental frequency along the specified axes .
4,379
def max_frequency ( sig , FS ) : f , fs = plotfft ( sig , FS , doplot = False ) t = cumsum ( fs ) ind_mag = find ( t > t [ - 1 ] * 0.95 ) [ 0 ] f_max = f [ ind_mag ] return f_max
Compute max frequency along the specified axes .
4,380
def median_frequency ( sig , FS ) : f , fs = plotfft ( sig , FS , doplot = False ) t = cumsum ( fs ) ind_mag = find ( t > t [ - 1 ] * 0.50 ) [ 0 ] f_median = f [ ind_mag ] return f_median
Compute median frequency along the specified axes .
4,381
def call ( subcommand , args ) : args [ '<napp>' ] = parse_napps ( args [ '<napp>' ] ) func = getattr ( NAppsAPI , subcommand ) func ( args )
Call a subcommand passing the args .
4,382
def parse_napp ( napp_id ) : regex = r'([a-zA-Z][a-zA-Z0-9_]{2,})/([a-zA-Z][a-zA-Z0-9_]{2,}):?(.+)?' compiled_regex = re . compile ( regex ) matched = compiled_regex . fullmatch ( napp_id ) if not matched : msg = '"{}" NApp has not the form username/napp_name[:version].' raise KytosException ( msg . format ( napp_id ) ) return matched . groups ( )
Convert a napp_id in tuple with username napp name and version .
4,383
def _generate_notebook_header ( notebook_object , notebook_type , notebook_title = "Notebook Title" , tags = "tags" , difficulty_stars = 1 , notebook_description = "Notebook Description" ) : header_temp = HEADER_ALL_CATEGORIES . replace ( "header_image_color_i" , "header_image_color_" + str ( NOTEBOOK_KEYS [ notebook_type ] ) ) header_temp = header_temp . replace ( "header_image_i" , "header_image_" + str ( NOTEBOOK_KEYS [ notebook_type ] ) ) header_temp = header_temp . replace ( "Notebook Title" , notebook_title ) notebook_object [ "cells" ] . append ( nb . v4 . new_markdown_cell ( header_temp , ** { "metadata" : { "tags" : [ "intro_info_title" ] } } ) ) tags_and_diff = HEADER_TAGS . replace ( '<td class="shield_right" id="tags">tags</td>' , '<td class="shield_right" id="tags">' + "&#9729;" . join ( tags ) + '</td>' ) for star in range ( 1 , 6 ) : if star <= difficulty_stars : tags_and_diff = tags_and_diff . replace ( "fa fa-star " + str ( star ) , "fa fa-star " "checked" ) else : tags_and_diff = tags_and_diff . replace ( "fa fa-star " + str ( star ) , "fa fa-star" ) notebook_object [ "cells" ] . append ( nb . v4 . new_markdown_cell ( tags_and_diff , ** { "metadata" : { "tags" : [ "intro_info_tags" ] } } ) ) notebook_object [ "cells" ] . append ( nb . v4 . new_markdown_cell ( notebook_description , ** { "metadata" : { "tags" : [ "test" ] } } ) ) notebook_object [ "cells" ] . append ( nb . v4 . new_markdown_cell ( SEPARATOR ) ) notebook_object [ "cells" ] . append ( nb . v4 . new_markdown_cell ( MD_EXAMPLES ) ) notebook_object [ "cells" ] . append ( nb . v4 . new_code_cell ( CODE_EXAMPLES ) )
Internal function that is used for generation of the generic notebooks header .
4,384
def _request ( self , method , path , params = None ) : url = self . _base_url + path try : if method == 'GET' : response = requests . get ( url , timeout = TIMEOUT ) elif method == "POST" : response = requests . post ( url , params , timeout = TIMEOUT ) elif method == "PUT" : response = requests . put ( url , params , timeout = TIMEOUT ) elif method == "DELETE" : response = requests . delete ( url , timeout = TIMEOUT ) if response : return response . json ( ) else : return { 'status' : 'error' } except requests . exceptions . HTTPError : return { 'status' : 'error' } except requests . exceptions . Timeout : return { 'status' : 'offline' } except requests . exceptions . RequestException : return { 'status' : 'offline' }
Make the actual request and returns the parsed response .
4,385
def post_worker_init ( worker ) : quit_command = 'CTRL-BREAK' if sys . platform == 'win32' else 'CONTROL-C' sys . stdout . write ( "Django version {djangover}, Gunicorn version {gunicornver}, " "using settings {settings!r}\n" "Starting development server at {urls}\n" "Quit the server with {quit_command}.\n" . format ( djangover = django . get_version ( ) , gunicornver = gunicorn . __version__ , settings = os . environ . get ( 'DJANGO_SETTINGS_MODULE' ) , urls = ', ' . join ( 'http://{0}/' . format ( b ) for b in worker . cfg . bind ) , quit_command = quit_command , ) , )
Hook into Gunicorn to display message after launching .
4,386
def value ( self ) : if 'value' in self . _json_data and self . _json_data [ 'value' ] : return "[Attachment: {}]" . format ( self . _json_data [ 'value' ] . split ( '/' ) [ - 1 ] ) else : return None
Retrieve the data value of this attachment .
4,387
def filename ( self ) : if self . value and 'value' in self . _json_data and self . _json_data [ 'value' ] : return self . _json_data [ 'value' ] . split ( '/' ) [ - 1 ] return None
Filename of the attachment without the full attachment path .
4,388
def upload ( self , data , ** kwargs ) : try : import matplotlib . figure if isinstance ( data , matplotlib . figure . Figure ) : self . _upload_plot ( data , ** kwargs ) return except ImportError : pass if isinstance ( data , str ) : with open ( data , 'rb' ) as fp : self . _upload ( fp ) else : self . _upload_json ( data , ** kwargs )
Upload a file to the attachment property .
4,389
def save_as ( self , filename ) : with open ( filename , 'w+b' ) as f : for chunk in self . _download ( ) : f . write ( chunk )
Download the attachment to a file .
4,390
def devpiserver_cmdline_run ( xom ) : if xom . config . args . theme == 'semantic-ui' : xom . config . args . theme = resource_filename ( 'devpi_semantic_ui' , '' ) xom . log . info ( "Semantic UI Theme loaded" )
Load theme when theme parameter is semantic - ui .
4,391
def is_on ( self , channel ) : if channel in self . _is_on : return self . _is_on [ channel ] return False
Check if a switch is turned on
4,392
def turn_on ( self , channel , callback = None ) : if callback is None : def callb ( ) : pass callback = callb message = velbus . SwitchRelayOnMessage ( self . _address ) message . relay_channels = [ channel ] self . _controller . send ( message , callback )
Turn on switch .
4,393
def turn_off ( self , channel , callback = None ) : if callback is None : def callb ( ) : pass callback = callb message = velbus . SwitchRelayOffMessage ( self . _address ) message . relay_channels = [ channel ] self . _controller . send ( message , callback )
Turn off switch .
4,394
def read_dew_point ( self , t = None , rh = None ) : 'With t and rh provided, does not access the hardware.' if t is None : t , rh = self . read_t ( ) , None if rh is None : rh = self . read_rh ( t ) t_range = 'water' if t >= 0 else 'ice' tn , m = self . c . tn [ t_range ] , self . c . m [ t_range ] return ( tn * ( math . log ( rh / 100.0 ) + ( m * t ) / ( tn + t ) ) / ( m - math . log ( rh / 100.0 ) - m * t / ( tn + t ) ) )
With t and rh provided does not access the hardware .
4,395
def _put_options ( self , options_list ) : new_options = self . _options . copy ( ) new_options . update ( { "value_choices" : options_list } ) validate ( new_options , options_json_schema ) url = self . _client . _build_url ( 'property' , property_id = self . id ) response = self . _client . _request ( 'PUT' , url , json = { 'options' : new_options } ) if response . status_code != 200 : raise APIError ( "Could not update property value. Response: {}" . format ( str ( response ) ) ) else : self . _options = new_options
Save the options to KE - chain .
4,396
def make_form_or_formset_fields_not_required ( form_or_formset ) : if isinstance ( form_or_formset , BaseFormSet ) : for single_form in form_or_formset : make_form_fields_not_required ( single_form ) else : make_form_fields_not_required ( form_or_formset )
Take a Form or FormSet and set all fields to not required .
4,397
def scope_id ( self ) : if self . scope : scope_id = self . scope and self . scope . get ( 'id' ) else : pseudo_self = self . _client . activity ( pk = self . id , fields = "id,scope" ) if pseudo_self . scope and pseudo_self . scope . get ( 'id' ) : self . scope = pseudo_self . scope scope_id = self . scope . get ( 'id' ) else : raise NotFoundError ( "This activity '{}'({}) does not belong to a scope, something is weird!" . format ( self . name , self . id ) ) return scope_id
ID of the scope this Activity belongs to .
4,398
def is_rootlevel ( self ) : container_id = self . _json_data . get ( 'container' ) if container_id : return container_id == self . _json_data . get ( 'root_container' ) else : return False
Determine if Activity is at the root level of a project .
4,399
def is_configured ( self ) : associated_models = self . parts ( category = Category . MODEL , limit = 1 ) if associated_models : return True else : return False
Determine if the Activity is configured with input and output properties .