idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
14,400
def create_tags ( self , entry ) : tag_list = [ t . lower ( ) . strip ( ) for t in entry . tag_string . split ( ',' ) ] for t in tag_list : tag , created = self . get_or_create ( name = t ) entry . tags . add ( tag )
Inspects an Entry instance and builds associates Tag objects based on the values in the Entry s tag_string .
14,401
def _create_date_slug ( self ) : if not self . pk : d = utc_now ( ) elif self . published and self . published_on : d = self . published_on elif self . updated_on : d = self . updated_on self . date_slug = u"{0}/{1}" . format ( d . strftime ( "%Y/%m/%d" ) , self . slug )
Prefixes the slug with the published_on date .
14,402
def _render_content ( self ) : if self . content_format == "rst" and docutils_publish is not None : doc_parts = docutils_publish ( source = self . raw_content , writer_name = "html4css1" ) self . rendered_content = doc_parts [ 'fragment' ] elif self . content_format == "rs" and docutils_publish is None : raise RuntimeError ( "Install docutils to pubilsh reStructuredText" ) elif self . content_format == "md" and markdown is not None : self . rendered_content = markdown ( self . raw_content ) elif self . content_format == "md" and markdown is None : raise RuntimeError ( "Install Markdown to pubilsh markdown" ) else : self . rendered_content = self . raw_content
Renders the content according to the content_format .
14,403
def save ( self , * args , ** kwargs ) : self . _create_slug ( ) self . _create_date_slug ( ) self . _render_content ( ) send_published_signal = False if self . published and self . published_on is None : send_published_signal = self . _set_published ( ) super ( Entry , self ) . save ( * args , ** kwargs ) if send_published_signal : entry_published . send ( sender = self , entry = self )
Auto - generate a slug from the name .
14,404
def get_absolute_url_with_date ( self ) : pub_date = self . published_on if pub_date and settings . USE_TZ : pub_date = make_naive ( pub_date , pytz . utc ) pub_date = pytz . timezone ( settings . TIME_ZONE ) . localize ( pub_date ) if pub_date : args = [ pub_date . strftime ( "%Y" ) , pub_date . strftime ( "%m" ) , pub_date . strftime ( "%d" ) , self . slug ] else : args = [ self . slug ] return reverse ( 'blargg:entry_detail' , args = args )
URL based on the entry s date & slug .
14,405
def tag_list ( self ) : tags = [ tag . strip ( ) for tag in self . tag_string . split ( "," ) ] return sorted ( filter ( None , tags ) )
Return a plain python list containing all of this Entry s tags .
14,406
def heartbeat ( self ) : url = urljoin ( self . base_url , 'heartbeat' ) return self . session . get ( url ) . json ( ) [ 'ok' ]
Check The API Is Up .
14,407
def venue_healthcheck ( self ) : url = urljoin ( self . base_url , 'venues/TESTEX/heartbeat' ) return self . session . get ( url ) . json ( ) [ 'ok' ]
Check A Venue Is Up .
14,408
def venue_stocks ( self ) : url = urljoin ( self . base_url , 'venues/{0}/stocks' . format ( self . venue ) ) return self . session . get ( url ) . json ( )
List the stocks available for trading on the venue .
14,409
def orderbook_for_stock ( self , stock ) : url_fragment = 'venues/{venue}/stocks/{stock}' . format ( venue = self . venue , stock = stock , ) url = urljoin ( self . base_url , url_fragment ) return self . session . get ( url ) . json ( )
Get the orderbook for a particular stock .
14,410
def place_new_order ( self , stock , price , qty , direction , order_type ) : url_fragment = 'venues/{venue}/stocks/{stock}/orders' . format ( venue = self . venue , stock = stock , ) data = { "stock" : stock , "price" : price , "venue" : self . venue , "account" : self . account , "qty" : qty , "direction" : direction , "orderType" : order_type , } url = urljoin ( self . base_url , url_fragment ) resp = self . session . post ( url , json = data ) return resp . json ( )
Place an order for a stock .
14,411
def status_for_order ( self , order_id , stock ) : url_fragment = 'venues/{venue}/stocks/{stock}/orders/{order_id}' . format ( venue = self . venue , stock = stock , order_id = order_id , ) url = urljoin ( self . base_url , url_fragment ) return self . session . get ( url ) . json ( )
Status For An Existing Order
14,412
def cancel_order ( self , order_id , stock ) : url_fragment = 'venues/{venue}/stocks/{stock}/orders/{order_id}' . format ( venue = self . venue , stock = stock , order_id = order_id , ) url = urljoin ( self . base_url , url_fragment ) return self . session . delete ( url ) . json ( )
Cancel An Order
14,413
def status_for_all_orders ( self ) : url_fragment = 'venues/{venue}/accounts/{account}/orders' . format ( venue = self . venue , account = self . account , ) url = urljoin ( self . base_url , url_fragment ) return self . session . get ( url ) . json ( )
Status for all orders
14,414
def status_for_all_orders_in_a_stock ( self , stock ) : url_fragment = 'venues/{venue}/accounts/{account}/stocks/{stock}/orders' . format ( stock = stock , venue = self . venue , account = self . account , ) url = urljoin ( self . base_url , url_fragment ) return self . session . get ( url ) . json ( )
Status for all orders in a stock
14,415
def scan_django_settings ( values , imports ) : if isinstance ( values , ( str , bytes ) ) : if utils . is_import_str ( values ) : imports . add ( values ) elif isinstance ( values , dict ) : for k , v in values . items ( ) : scan_django_settings ( k , imports ) scan_django_settings ( v , imports ) elif hasattr ( values , '__file__' ) and getattr ( values , '__file__' ) : imp , _ = utils . import_path_from_file ( getattr ( values , '__file__' ) ) imports . add ( imp ) elif hasattr ( values , '__iter__' ) : for item in values : scan_django_settings ( item , imports )
Recursively scans Django settings for values that appear to be imported modules .
14,416
def handle_django_settings ( filename ) : old_sys_path = sys . path [ : ] dirpath = os . path . dirname ( filename ) project = os . path . basename ( dirpath ) cwd = os . getcwd ( ) project_path = os . path . normpath ( os . path . join ( dirpath , '..' ) ) if project_path not in sys . path : sys . path . insert ( 0 , project_path ) os . chdir ( project_path ) project_settings = '{}.settings' . format ( project ) os . environ [ 'DJANGO_SETTINGS_MODULE' ] = project_settings try : import django django . setup = lambda : False except ImportError : log . error ( 'Found Django settings, but Django is not installed.' ) return log . warn ( 'Loading Django Settings (Using {}): {}' . format ( django . get_version ( ) , filename ) ) from django . conf import LazySettings installed_apps = set ( ) settings_imports = set ( ) try : settings = LazySettings ( ) settings . _setup ( ) for k , v in vars ( settings . _wrapped ) . items ( ) : if k not in _excluded_settings and re . match ( r'^[A-Z_]+$' , k ) : scan_django_settings ( v , settings_imports ) for app in getattr ( settings , 'INSTALLED_APPS' , [ ] ) : if hasattr ( app , '__file__' ) and getattr ( app , '__file__' ) : imp , _ = utils . import_path_from_file ( getattr ( app , '__file__' ) ) installed_apps . add ( imp ) else : installed_apps . add ( app ) except Exception as e : log . error ( 'Could not load Django settings: %s' , e ) log . debug ( '' , exc_info = True ) return if not installed_apps or not settings_imports : log . error ( 'Got empty settings values from Django settings.' ) try : from django . apps . registry import apps , Apps , AppRegistryNotReady if not apps . apps_ready : apps . populate ( installed_apps ) else : apps = Apps ( installed_apps ) start = time . time ( ) while True : try : for app in apps . get_app_configs ( ) : installed_apps . add ( app . name ) except AppRegistryNotReady : if time . time ( ) - start > 10 : raise Exception ( 'Bail out of waiting for Django' ) log . debug ( 'Waiting for apps to load...' ) continue break except Exception as e : log . debug ( 'Could not use AppConfig: {}' . format ( e ) ) sys . path [ : ] = old_sys_path os . chdir ( cwd ) for item in settings_imports : need_scan = item . startswith ( _filescan_modules ) yield ( 'django' , item , project_path if need_scan else None ) for app in installed_apps : need_scan = app . startswith ( project ) yield ( 'django' , app , project_path if need_scan else None )
Attempts to load a Django project and get package dependencies from settings .
14,417
def _url ( endpoint : str , sandbox : bool = False ) -> str : if sandbox is True : url = BASE_URL_SANDBOX else : url = BASE_URL return "{0}{1}" . format ( url , endpoint )
Build a URL from the API s base URLs .
14,418
def update ( self , goal : Dict = None ) -> None : if goal is None : endpoint = "/account/{0}/savings-goals/{1}" . format ( self . _account_uid , self . uid ) response = get ( _url ( endpoint , self . _sandbox ) , headers = self . _auth_headers ) response . raise_for_status ( ) goal = response . json ( ) self . uid = goal . get ( 'savingsGoalUid' ) self . name = goal . get ( 'name' ) target = goal . get ( 'target' , { } ) self . target_currency = target . get ( 'currency' ) self . target_minor_units = target . get ( 'minorUnits' ) total_saved = goal . get ( 'totalSaved' , { } ) self . total_saved_currency = total_saved . get ( 'currency' ) self . total_saved_minor_units = total_saved . get ( 'minorUnits' )
Update a single savings goals data .
14,419
def deposit ( self , deposit_minor_units : int ) -> None : endpoint = "/account/{0}/savings-goals/{1}/add-money/{2}" . format ( self . _account_uid , self . uid , uuid4 ( ) ) body = { "amount" : { "currency" : self . total_saved_currency , "minorUnits" : deposit_minor_units } } response = put ( _url ( endpoint , self . _sandbox ) , headers = self . _auth_headers , data = json_dumps ( body ) ) response . raise_for_status ( ) self . update ( )
Add funds to a savings goal .
14,420
def get_image ( self , filename : str = None ) -> None : if filename is None : filename = "{0}.png" . format ( self . name ) endpoint = "/account/{0}/savings-goals/{1}/photo" . format ( self . _account_uid , self . uid ) response = get ( _url ( endpoint , self . _sandbox ) , headers = self . _auth_headers ) response . raise_for_status ( ) base64_image = response . json ( ) [ 'base64EncodedPhoto' ] with open ( filename , 'wb' ) as file : file . write ( b64decode ( base64_image ) )
Download the photo associated with a Savings Goal .
14,421
def update_account_data ( self ) -> None : response = get ( _url ( "/accounts/{0}/identifiers" . format ( self . _account_uid ) , self . _sandbox ) , headers = self . _auth_headers ) response . raise_for_status ( ) response = response . json ( ) self . account_identifier = response . get ( 'accountIdentifier' ) self . bank_identifier = response . get ( 'bankIdentifier' ) self . iban = response . get ( 'iban' ) self . bic = response . get ( 'bic' )
Get basic information for the account .
14,422
def update_balance_data ( self ) -> None : response = get ( _url ( "/accounts/{0}/balance" . format ( self . _account_uid ) , self . _sandbox ) , headers = self . _auth_headers ) response . raise_for_status ( ) response = response . json ( ) self . cleared_balance = response [ 'clearedBalance' ] [ 'minorUnits' ] self . effective_balance = response [ 'effectiveBalance' ] [ 'minorUnits' ] self . pending_transactions = response [ 'pendingTransactions' ] [ 'minorUnits' ] self . available_to_spend = response [ 'availableToSpend' ] [ 'minorUnits' ] self . accepted_overdraft = response [ 'acceptedOverdraft' ] [ 'minorUnits' ]
Get the latest balance information for the account .
14,423
def update_savings_goal_data ( self ) -> None : response = get ( _url ( "/account/{0}/savings-goals" . format ( self . _account_uid ) , self . _sandbox ) , headers = self . _auth_headers ) response . raise_for_status ( ) response = response . json ( ) response_savings_goals = response . get ( 'savingsGoalList' , { } ) returned_uids = [ ] for goal in response_savings_goals : uid = goal . get ( 'savingsGoalUid' ) returned_uids . append ( uid ) if uid not in self . savings_goals : self . savings_goals [ uid ] = SavingsGoal ( self . _auth_headers , self . _sandbox , self . _account_uid ) self . savings_goals [ uid ] . update ( goal ) for uid in list ( self . savings_goals ) : if uid not in returned_uids : self . savings_goals . pop ( uid )
Get the latest savings goal information for the account .
14,424
def process_inlines ( parser , token ) : args = token . split_contents ( ) if not len ( args ) in ( 2 , 4 , 6 ) : raise template . TemplateSyntaxError ( "%r tag requires either 1, 3 or 5 arguments." % args [ 0 ] ) var_name = args [ 1 ] ALLOWED_ARGS = [ 'as' , 'in' ] kwargs = { 'template_directory' : None } if len ( args ) > 2 : tuples = zip ( * [ args [ 2 : ] [ i : : 2 ] for i in range ( 2 ) ] ) for k , v in tuples : if not k in ALLOWED_ARGS : raise template . TemplateSyntaxError ( "%r tag options arguments must be one of %s." % ( args [ 0 ] , ', ' . join ( ALLOWED_ARGS ) ) ) if k == 'in' : kwargs [ 'template_directory' ] = v if k == 'as' : kwargs [ 'asvar' ] = v return InlinesNode ( var_name , ** kwargs )
Searches through the provided content and applies inlines where ever they are found .
14,425
def build_current_graph ( ) : graph = SQLStateGraph ( ) for app_name , config in apps . app_configs . items ( ) : try : module = import_module ( '.' . join ( ( config . module . __name__ , SQL_CONFIG_MODULE ) ) ) sql_items = module . sql_items except ( ImportError , AttributeError ) : continue for sql_item in sql_items : graph . add_node ( ( app_name , sql_item . name ) , sql_item ) for dep in sql_item . dependencies : graph . add_lazy_dependency ( ( app_name , sql_item . name ) , dep ) graph . build_graph ( ) return graph
Read current state of SQL items from the current project state .
14,426
def build_graph ( self ) : for child , parents in self . dependencies . items ( ) : if child not in self . nodes : raise NodeNotFoundError ( "App %s SQL item dependencies reference nonexistent child node %r" % ( child [ 0 ] , child ) , child ) for parent in parents : if parent not in self . nodes : raise NodeNotFoundError ( "App %s SQL item dependencies reference nonexistent parent node %r" % ( child [ 0 ] , parent ) , parent ) self . node_map [ child ] . add_parent ( self . node_map [ parent ] ) self . node_map [ parent ] . add_child ( self . node_map [ child ] ) for node in self . nodes : self . ensure_not_cyclic ( node , lambda x : ( parent . key for parent in self . node_map [ x ] . parents ) )
Read lazy dependency list and build graph .
14,427
def sample ( self , n_to_sample , ** kwargs ) : n_to_sample = verify_positive ( int ( n_to_sample ) ) n_remaining = self . _max_iter - self . t_ if n_remaining == 0 : if ( not self . replace ) and ( self . _n_items == self . _max_iter ) : raise Exception ( "All items have already been sampled" ) else : raise Exception ( "No more space available to continue sampling. " "Consider re-initialising with a larger value " "of max_iter." ) if n_to_sample > n_remaining : warnings . warn ( "Space only remains for {} more iteration(s). " "Setting n_to_sample = {}." . format ( n_remaining , n_remaining ) ) n_to_sample = n_remaining for _ in range ( n_to_sample ) : self . _iterate ( ** kwargs )
Sample a sequence of items from the pool
14,428
def sample_distinct ( self , n_to_sample , ** kwargs ) : n_notsampled = np . sum ( np . isnan ( self . cached_labels_ ) ) if n_notsampled == 0 : raise Exception ( "All distinct items have already been sampled." ) if n_to_sample > n_notsampled : warnings . warn ( "Only {} distinct item(s) have not yet been sampled." " Setting n_to_sample = {}." . format ( n_notsampled , n_notsampled ) ) n_to_sample = n_notsampled n_sampled = 0 while n_sampled < n_to_sample : self . sample ( 1 , ** kwargs ) n_sampled += self . _queried_oracle [ self . t_ - 1 ] * 1
Sample a sequence of items from the pool until a minimum number of distinct items are queried
14,429
def _sample_item ( self , ** kwargs ) : if self . replace : loc = np . random . choice ( self . _n_items ) else : not_seen_ids = np . where ( np . isnan ( self . cached_labels_ ) ) [ 0 ] loc = np . random . choice ( not_seen_ids ) return loc , 1 , { }
Sample an item from the pool
14,430
def _query_label ( self , loc ) : ell = self . cached_labels_ [ loc ] if np . isnan ( ell ) : oracle_arg = self . identifiers [ loc ] ell = self . oracle ( oracle_arg ) if ell not in [ 0 , 1 ] : raise Exception ( "Oracle provided an invalid label." ) self . _queried_oracle [ self . t_ ] = True self . cached_labels_ [ loc ] = ell return ell
Query the label for the item with index loc . Preferentially queries the label from the cache but if not yet cached queries the oracle .
14,431
def _F_measure ( self , alpha , TP , FP , FN , return_num_den = False ) : num = np . float64 ( TP ) den = np . float64 ( alpha * ( TP + FP ) + ( 1 - alpha ) * ( TP + FN ) ) with np . errstate ( divide = 'ignore' , invalid = 'ignore' ) : F_measure = num / den if return_num_den : return F_measure , num , den else : return F_measure
Calculate the weighted F - measure
14,432
def key_occurrence ( self , key , update = True ) : if update : self . update ( ) result = { } for k , v in self . database . items ( ) : if key in v : result [ str ( v [ key ] ) ] = k return result
Return a dict containing the value of the provided key and its uuid as value .
14,433
def zmq_address ( self , key ) : zmq_address = "tcp://" + self . database [ key ] [ 'node' ] + ":" + str ( self . database [ key ] [ 'ports' ] [ 'REQ' ] ) return zmq_address
Return a ZeroMQ address to the module with the provided key .
14,434
def _get_json ( location ) : location = os . path . expanduser ( location ) try : if os . path . isfile ( location ) : with io . open ( location , encoding = "utf-8" ) as json_data : return json . load ( json_data , object_pairs_hook = OrderedDict ) . get ( "tests" ) elif "http" in location : json_data = requests . get ( location ) if not json_data : raise Dump2PolarionException ( "Failed to download" ) return json . loads ( json_data . text , object_pairs_hook = OrderedDict ) . get ( "tests" ) else : raise Dump2PolarionException ( "Invalid location" ) except Exception as err : raise Dump2PolarionException ( "Failed to parse JSON from {}: {}" . format ( location , err ) )
Reads JSON data from file or URL .
14,435
def _calculate_duration ( start_time , finish_time ) : if not ( start_time and finish_time ) : return 0 start = datetime . datetime . fromtimestamp ( start_time ) finish = datetime . datetime . fromtimestamp ( finish_time ) duration = finish - start decimals = float ( ( "0." + str ( duration . microseconds ) ) ) return duration . seconds + decimals
Calculates how long it took to execute the testcase .
14,436
def _filter_parameters ( parameters ) : if not parameters : return None return OrderedDict ( ( param , value ) for param , value in six . iteritems ( parameters ) if param not in IGNORED_PARAMS )
Filters the ignored parameters out .
14,437
def _append_record ( test_data , results , test_path ) : statuses = test_data . get ( "statuses" ) jenkins_data = test_data . get ( "jenkins" ) or { } data = [ ( "title" , test_data . get ( "test_name" ) or _get_testname ( test_path ) ) , ( "verdict" , statuses . get ( "overall" ) ) , ( "source" , test_data . get ( "source" ) ) , ( "job_name" , jenkins_data . get ( "job_name" ) ) , ( "run" , jenkins_data . get ( "build_number" ) ) , ( "params" , _filter_parameters ( test_data . get ( "params" ) ) ) , ( "time" , _calculate_duration ( test_data . get ( "start_time" ) , test_data . get ( "finish_time" ) ) or 0 , ) , ] test_id = test_data . get ( "polarion" ) if test_id : if isinstance ( test_id , list ) : test_id = test_id [ 0 ] data . append ( ( "test_id" , test_id ) ) results . append ( OrderedDict ( data ) )
Adds data of single testcase results to results database .
14,438
def _parse_ostriz ( ostriz_data ) : if not ostriz_data : raise NothingToDoException ( "No data to import" ) results = [ ] found_build = None last_finish_time = [ 0 ] for test_path , test_data in six . iteritems ( ostriz_data ) : curr_build = test_data . get ( "build" ) if not curr_build : continue if not found_build : found_build = curr_build if found_build != curr_build : continue if not test_data . get ( "statuses" ) : continue _append_record ( test_data , results , test_path ) _comp_finish_time ( test_data , last_finish_time ) if last_finish_time [ 0 ] : logger . info ( "Last result finished at %s" , last_finish_time [ 0 ] ) testrun_id = _get_testrun_id ( found_build ) return xunit_exporter . ImportedData ( results = results , testrun = testrun_id )
Reads the content of the input JSON and returns testcases results .
14,439
def send_array ( socket , A = None , metadata = None , flags = 0 , copy = False , track = False , compress = None , chunksize = 50 * 1000 * 1000 ) : md = { } md [ 'timestamp' ] = datetime . datetime . now ( ) . isoformat ( ) if metadata : md . update ( metadata ) if A is None : md [ 'parts' ] = 0 socket . send_json ( md , flags ) return if isinstance ( A , float ) or isinstance ( A , int ) : A = np . asarray ( A ) md [ 'dtype' ] = str ( A . dtype ) md [ 'shape' ] = A . shape md [ 'parts' ] = int ( np . prod ( A . shape ) // chunksize + 1 ) try : md [ 'fill_value' ] = np . asscalar ( A . fill_value ) A = A . filled ( ) except AttributeError : pass socket . send_json ( md , flags | zmq . SNDMORE ) if md [ 'parts' ] == 1 : msg = memoryview ( np . ascontiguousarray ( A ) ) socket . send ( msg , flags , copy = copy , track = track ) else : for i , a in enumerate ( np . array_split ( A , md [ 'parts' ] ) ) : msg = memoryview ( np . ascontiguousarray ( a ) ) flags_ = flags if i != md [ 'parts' ] - 1 : flags_ |= zmq . SNDMORE socket . send ( msg , flags_ , copy = copy , track = track ) return
send a numpy array with metadata over zmq
14,440
def recv_array ( socket , flags = 0 , copy = False , track = False , poll = None , poll_timeout = 10000 ) : if poll is None : md = socket . recv_json ( flags = flags ) else : socks = dict ( poll . poll ( poll_timeout ) ) if socks . get ( socket ) == zmq . POLLIN : reply = socket . recv_json ( flags = flags ) md = reply else : raise NoResponseException ( "Recv_array got no response within timeout (1)" ) if md [ 'parts' ] == 0 : A = None elif md [ 'parts' ] == 1 : if poll is None : msg = socket . recv ( flags = flags , copy = copy , track = track ) else : socks = dict ( poll . poll ( poll_timeout ) ) if socks . get ( socket ) == zmq . POLLIN : reply = socket . recv ( flags = flags , copy = copy , track = track ) msg = reply else : raise NoResponseException ( "Recv_array got no response within timeout (2)" ) buf = buffer ( msg ) A = np . frombuffer ( buf , dtype = md [ 'dtype' ] ) A = A . reshape ( md [ 'shape' ] ) if 'fill_value' in md : A = np . ma . masked_equal ( A , md [ 'fill_value' ] ) else : A = np . zeros ( np . prod ( md [ 'shape' ] ) , dtype = md [ 'dtype' ] ) arr_position = 0 for i in range ( md [ 'parts' ] ) : if poll is None : msg = socket . recv ( flags = flags , copy = copy , track = track ) else : socks = dict ( poll . poll ( poll_timeout ) ) if socks . get ( socket ) == zmq . POLLIN : reply = socket . recv ( flags = flags , copy = copy , track = track ) if not reply : raise EmptyResponseException ( "Recv_array got an empty response (2)" ) msg = reply else : raise NoResponseException ( "Recv_array got no response within timeout (2)" ) buf = buffer ( msg ) a = np . frombuffer ( buf , dtype = md [ 'dtype' ] ) A [ arr_position : arr_position + a . shape [ 0 ] ] = a [ : ] arr_position += a . shape [ 0 ] A = A . reshape ( md [ 'shape' ] ) if 'fill_value' in md : A = np . ma . masked_equal ( A , md [ 'fill_value' ] ) return A , md
recv a metadata and an optional numpy array from a zmq socket
14,441
def is_sql_equal ( sqls1 , sqls2 ) : is_seq1 = isinstance ( sqls1 , ( list , tuple ) ) is_seq2 = isinstance ( sqls2 , ( list , tuple ) ) if not is_seq1 : sqls1 = ( sqls1 , ) if not is_seq2 : sqls2 = ( sqls2 , ) if len ( sqls1 ) != len ( sqls2 ) : return False for sql1 , sql2 in zip ( sqls1 , sqls2 ) : sql1 , params1 = _sql_params ( sql1 ) sql2 , params2 = _sql_params ( sql2 ) if sql1 != sql2 or params1 != params2 : return False return True
Find out equality of two SQL items .
14,442
def add_sql_operation ( self , app_label , sql_name , operation , dependencies ) : deps = [ ( dp [ 0 ] , SQL_BLOB , dp [ 1 ] , self . _sql_operations . get ( dp ) ) for dp in dependencies ] self . add_operation ( app_label , operation , dependencies = deps ) self . _sql_operations [ ( app_label , sql_name ) ] = operation
Add SQL operation and register it to be used as dependency for further sequential operations .
14,443
def _generate_reversed_sql ( self , keys , changed_keys ) : for key in keys : if key not in changed_keys : continue app_label , sql_name = key old_item = self . from_sql_graph . nodes [ key ] new_item = self . to_sql_graph . nodes [ key ] if not old_item . reverse_sql or old_item . reverse_sql == RunSQL . noop or new_item . replace : continue operation = ReverseAlterSQL ( sql_name , old_item . reverse_sql , reverse_sql = old_item . sql ) sql_deps = [ n . key for n in self . from_sql_graph . node_map [ key ] . children ] sql_deps . append ( key ) self . add_sql_operation ( app_label , sql_name , operation , sql_deps )
Generate reversed operations for changes that require full rollback and creation .
14,444
def _generate_delete_sql ( self , delete_keys ) : for key in delete_keys : app_label , sql_name = key old_node = self . from_sql_graph . nodes [ key ] operation = DeleteSQL ( sql_name , old_node . reverse_sql , reverse_sql = old_node . sql ) sql_deps = [ n . key for n in self . from_sql_graph . node_map [ key ] . children ] sql_deps . append ( key ) self . add_sql_operation ( app_label , sql_name , operation , sql_deps )
Generate forward delete operations for SQL items .
14,445
def generate_sql_changes ( self ) : from_keys = set ( self . from_sql_graph . nodes . keys ( ) ) to_keys = set ( self . to_sql_graph . nodes . keys ( ) ) new_keys = to_keys - from_keys delete_keys = from_keys - to_keys changed_keys = set ( ) dep_changed_keys = [ ] for key in from_keys & to_keys : old_node = self . from_sql_graph . nodes [ key ] new_node = self . to_sql_graph . nodes [ key ] if not is_sql_equal ( old_node . sql , new_node . sql ) : changed_keys . add ( key ) old_deps = self . from_sql_graph . dependencies [ key ] new_deps = self . to_sql_graph . dependencies [ key ] removed_deps = old_deps - new_deps added_deps = new_deps - old_deps if removed_deps or added_deps : dep_changed_keys . append ( ( key , removed_deps , added_deps ) ) keys = self . assemble_changes ( new_keys , changed_keys , self . to_sql_graph ) delete_keys = self . assemble_changes ( delete_keys , set ( ) , self . from_sql_graph ) self . _sql_operations = { } self . _generate_reversed_sql ( keys , changed_keys ) self . _generate_sql ( keys , changed_keys ) self . _generate_delete_sql ( delete_keys ) self . _generate_altered_sql_dependencies ( dep_changed_keys )
Starting point of this tool which identifies changes and generates respective operations .
14,446
def check_dependency ( self , operation , dependency ) : if isinstance ( dependency [ 1 ] , SQLBlob ) : return dependency [ 3 ] == operation return super ( MigrationAutodetector , self ) . check_dependency ( operation , dependency )
Enhances default behavior of method by checking dependency for matching operation .
14,447
def reactivate ( credit_card_id : str ) -> None : logger . info ( 'reactivating-credit-card' , credit_card_id = credit_card_id ) with transaction . atomic ( ) : cc = CreditCard . objects . get ( pk = credit_card_id ) cc . reactivate ( ) cc . save ( )
Reactivates a credit card .
14,448
def sync ( self , syncPointList ) : if len ( syncPointList ) == 0 : return subsCopy = self . _subs . clone ( ) syncPointList . sort ( ) SubAssert ( syncPointList [ 0 ] . subNo >= 0 ) SubAssert ( syncPointList [ 0 ] . subNo < subsCopy . size ( ) ) SubAssert ( syncPointList [ - 1 ] . subNo < subsCopy . size ( ) ) firstSyncPoint = self . _getLowestSyncPoint ( syncPointList , subsCopy ) if firstSyncPoint != syncPointList [ 0 ] : syncPointList . insert ( 0 , firstSyncPoint ) for i , syncPoint in enumerate ( syncPointList ) : firstSyncPoint = syncPointList [ i ] secondSyncPoint = self . _getSyncPointOrEnd ( i + 1 , syncPointList , subsCopy ) log . debug ( _ ( "Syncing times for sync points:" ) ) log . debug ( " %s" % firstSyncPoint ) log . debug ( " %s" % secondSyncPoint ) if firstSyncPoint == secondSyncPoint : continue secondSubNo = secondSyncPoint . subNo firstSubNo = firstSyncPoint . subNo firstOldSub = subsCopy [ firstSubNo ] secondOldSub = subsCopy [ secondSubNo ] oldStartDelta , oldEndDelta = self . _getDeltas ( firstOldSub , secondOldSub ) newStartDelta , newEndDelta = self . _getDeltas ( firstSyncPoint , secondSyncPoint ) for subNo in range ( firstSubNo , secondSubNo + 1 ) : sub = subsCopy [ subNo ] newStartTime = self . _calculateTime ( sub . start , firstOldSub . start , firstSyncPoint . start , oldStartDelta , newStartDelta ) newEndTime = self . _calculateTime ( sub . end , firstOldSub . end , firstSyncPoint . end , oldEndDelta , newEndDelta ) self . _subs . changeSubStart ( subNo , newStartTime ) self . _subs . changeSubEnd ( subNo , newEndTime )
Synchronise subtitles using a given list of SyncPoints .
14,449
def _getDeltas ( self , firstSub , secondSub ) : startDelta = max ( firstSub . start , secondSub . start ) - min ( firstSub . start , secondSub . start ) endDelta = max ( firstSub . end , secondSub . end ) - min ( firstSub . end , secondSub . end ) return ( startDelta , endDelta )
Arguments must have start and end properties which are FrameTimes .
14,450
async def scan_for_units ( self , iprange ) : units = [ ] for ip_address in ipaddress . IPv4Network ( iprange ) : sock = socket . socket ( ) sock . settimeout ( 0.02 ) host = str ( ip_address ) try : scan_result = sock . connect ( ( host , PORT ) ) except socket . error : scan_result = 1 _LOGGER . debug ( 'Checking port connectivity on %s:%s' , host , ( str ( PORT ) ) ) if scan_result is None : ghlocalapi = DeviceInfo ( self . _loop , self . _session , host ) await ghlocalapi . get_device_info ( ) data = ghlocalapi . device_info if data is not None : cap = data [ 'device_info' ] [ 'capabilities' ] units . append ( { 'host' : host , 'name' : data [ 'name' ] , 'model' : data [ 'device_info' ] [ 'model_name' ] , 'assistant_supported' : cap . get ( 'assistant_supported' , False ) } ) sock . close ( ) return units
Scan local network for GH units .
14,451
async def bluetooth_scan ( ) : async with aiohttp . ClientSession ( ) as session : ghlocalapi = Bluetooth ( LOOP , session , IPADDRESS ) await ghlocalapi . scan_for_devices ( ) await ghlocalapi . get_scan_result ( ) print ( "Device info:" , ghlocalapi . devices )
Get nearby bluetooth devices .
14,452
def reduce_fit ( interface , state , label , inp ) : from disco . util import kvgroup import numpy as np out = interface . output ( 0 ) fit_model = { "y_labels" : [ ] , "y_sum" : 0 , "iv" : set ( ) } combiner = { } means , variances = [ ] , [ ] k_prev = "" for key , value in kvgroup ( inp ) : k_split = key . split ( state [ "delimiter" ] ) if len ( k_split ) == 3 : fit_model [ "iv" ] . add ( tuple ( k_split [ 1 : ] ) ) out . add ( tuple ( k_split ) , sum ( value ) ) elif len ( k_split ) == 2 : if k_split [ 0 ] != k_prev and k_prev != "" : mean , var = zip ( * [ combiner [ key ] for key in sorted ( combiner . keys ( ) ) ] ) means . append ( mean ) variances . append ( var ) n_a = mean_a = var_a = 0 for n_b , mean_b , var_b in value : n_ab = n_a + n_b var_a = ( ( n_a * var_a + n_b * var_b ) / float ( n_ab ) ) + ( n_a * n_b * ( ( mean_b - mean_a ) / float ( n_ab ) ) ** 2 ) mean_a = ( n_a * mean_a + n_b * mean_b ) / float ( n_ab ) n_a = n_ab combiner [ int ( k_split [ 1 ] ) ] = ( mean_a , var_a + 1e-9 ) k_prev = k_split [ 0 ] else : fit_model [ key ] = np . sum ( value ) fit_model [ "y_sum" ] += fit_model [ key ] fit_model [ "y_labels" ] . append ( key ) if len ( means ) > 0 : mean , var = zip ( * [ combiner [ key ] for key in sorted ( combiner . keys ( ) ) ] ) out . add ( "mean" , np . array ( means + [ mean ] , dtype = np . float32 ) ) variances = np . array ( variances + [ var ] , dtype = np . float32 ) out . add ( "var" , variances ) out . add ( "var_log" , np . log ( np . pi * variances ) ) prior = [ fit_model [ y_label ] / float ( fit_model [ "y_sum" ] ) for y_label in fit_model [ "y_labels" ] ] out . add ( "prior" , np . array ( prior , dtype = np . float32 ) ) out . add ( "prior_log" , np . log ( prior ) ) out . add ( "iv" , list ( fit_model [ "iv" ] ) ) out . add ( "y_labels" , fit_model [ "y_labels" ] )
Function separates aggregation of continuous and discrete features . For continuous features it aggregates partially calculated means and variances and returns them . For discrete features it aggregates pairs and returns them . Pairs with label occurrences are used to calculate prior probabilities
14,453
def map_predict ( interface , state , label , inp ) : import numpy as np out = interface . output ( 0 ) continuous = [ j for i , j in enumerate ( state [ "X_indices" ] ) if state [ "X_meta" ] [ i ] == "c" ] discrete = [ j for i , j in enumerate ( state [ "X_indices" ] ) if state [ "X_meta" ] [ i ] == "d" ] cont = True if len ( continuous ) > 0 else False disc = True if len ( discrete ) > 0 else False for row in inp : row = row . strip ( ) . split ( state [ "delimiter" ] ) if len ( row ) > 1 : x_id = "" if state [ "id_index" ] == - 1 else row [ state [ "id_index" ] ] probs = state [ "fit_model" ] [ "prior_log" ] if cont : x = np . array ( [ ( 0 if row [ j ] in state [ "missing_vals" ] else float ( row [ j ] ) ) for j in continuous ] ) probs = probs - 0.5 * np . sum ( np . true_divide ( ( x - state [ "fit_model" ] [ "mean" ] ) ** 2 , state [ "fit_model" ] [ "var" ] ) + state [ "fit_model" ] [ "var_log" ] , axis = 1 ) if disc : probs = probs + np . sum ( [ ( 0 if row [ i ] in state [ "missing_vals" ] else state [ "fit_model" ] . get ( ( str ( i ) , row [ i ] ) , np . zeros ( 1 ) ) ) for i in discrete ] , axis = 0 ) log_prob_x = np . log ( np . sum ( np . exp ( probs ) ) ) probs = np . exp ( np . array ( probs ) - log_prob_x ) y_predicted = max ( zip ( probs , state [ "fit_model" ] [ "y_labels" ] ) ) [ 1 ] out . add ( x_id , ( y_predicted , probs . tolist ( ) ) )
Function makes a predictions of samples with given model . It calculates probabilities with multinomial and Gaussian distribution .
14,454
def predict ( dataset , fitmodel_url , m = 1 , save_results = True , show = False ) : from disco . worker . pipeline . worker import Worker , Stage from disco . core import Job , result_iterator import numpy as np try : m = float ( m ) except ValueError : raise Exception ( "Parameter m should be numerical." ) if "naivebayes_fitmodel" in fitmodel_url : fit_model = dict ( ( k , v ) for k , v in result_iterator ( fitmodel_url [ "naivebayes_fitmodel" ] ) ) if len ( fit_model [ "y_labels" ] ) < 2 : print "There is only one class in training data." return [ ] else : raise Exception ( "Incorrect fit model." ) if dataset . params [ "X_meta" ] . count ( "d" ) > 0 : np . seterr ( divide = 'ignore' ) for iv in fit_model [ "iv" ] : dist = [ fit_model . pop ( ( y , ) + iv , 0 ) for y in fit_model [ "y_labels" ] ] fit_model [ iv ] = np . nan_to_num ( np . log ( np . true_divide ( np . array ( dist ) + m * fit_model [ "prior" ] , np . sum ( dist ) + m ) ) ) - fit_model [ "prior_log" ] del ( fit_model [ "iv" ] ) job = Job ( worker = Worker ( save_results = save_results ) ) job . pipeline = [ ( "split" , Stage ( "map" , input_chain = dataset . params [ "input_chain" ] , init = simple_init , process = map_predict ) ) ] job . params = dataset . params job . params [ "fit_model" ] = fit_model job . run ( name = "naivebayes_predict" , input = dataset . params [ "data_tag" ] ) results = job . wait ( show = show ) return results
Function starts a job that makes predictions to input data with a given model
14,455
def data ( self ) : if self . _data : return self . _data retval = { } data = self . get_request_data ( ) for subdata in data : for key , value in subdata . iteritems ( ) : if not key in retval : retval [ key ] = value self . _data = retval return retval
Returns the request data as a dictionary .
14,456
def get_resource ( self , resource , ** kwargs ) : return resource ( request = self . request , response = self . response , path_params = self . path_params , application = self . application , ** kwargs )
Returns a new instance of the resource class passed in as resource . This is a helper to make future - compatibility easier when new arguments get added to the constructor .
14,457
def assert_conditions ( self ) : self . assert_condition_md5 ( ) etag = self . clean_etag ( self . call_method ( 'get_etag' ) ) self . response . last_modified = self . call_method ( 'get_last_modified' ) self . assert_condition_etag ( ) self . assert_condition_last_modified ( )
Handles various HTTP conditions and raises HTTP exceptions to abort the request .
14,458
def assert_condition_md5 ( self ) : if 'Content-MD5' in self . request . headers : body_md5 = hashlib . md5 ( self . request . body ) . hexdigest ( ) if body_md5 != self . request . headers [ 'Content-MD5' ] : raise_400 ( self , msg = 'Invalid Content-MD5 request header.' )
If the Content - MD5 request header is present in the request it s verified against the MD5 hash of the request body . If they don t match a 400 HTTP response is returned .
14,459
def get_allowed_methods ( self ) : return ", " . join ( [ method for method in dir ( self ) if method . upper ( ) == method and callable ( getattr ( self , method ) ) ] )
Returns a coma - separated list of method names that are allowed on this instance . Useful to set the Allowed response header .
14,460
def convert_param ( self , method , param , value ) : rules = self . _get_validation ( method , param ) if not rules or not rules . get ( 'convert' ) : return value try : return rules [ 'convert' ] ( value ) except ValueError : raise ValidationException ( "{0} value {1} does not validate." . format ( param , value ) )
Converts the parameter using the function convert function of the validation rules . Same parameters as the validate_param method so it might have just been added there . But lumping together the two functionalities would make overwriting harder .
14,461
def _get_validation ( self , method , param ) : if hasattr ( method , '_validations' ) and param in method . _validations : return method . _validations [ param ] elif ( hasattr ( method . im_class , '_validations' ) and param in method . im_class . _validations ) : return method . im_class . _validations [ param ] else : return None
Return the correct validations dictionary for this parameter . First checks the method itself and then its class . If no validation is defined for this parameter None is returned .
14,462
def convert_response ( self ) : if hasattr ( self . response , 'body_raw' ) : if self . response . body_raw is not None : to_type = re . sub ( '[^a-zA-Z_]' , '_' , self . type ) to_type_method = 'to_' + to_type if hasattr ( self , to_type_method ) : self . response . body = getattr ( self , to_type_method ) ( self . response . body_raw ) del self . response . body_raw
Finish filling the instance s response object so it s ready to be served to the client . This includes converting the body_raw property to the content type requested by the user if necessary .
14,463
def handle_exception ( self , e , status = 500 ) : logger . exception ( "An exception occurred while handling the request: %s" , e ) self . response . body_raw = { 'error' : str ( e ) } self . response . status = status
Handle the given exception . Log sets the response code and output the exception message as an error message .
14,464
def handle_exception_404 ( self , e ) : logger . debug ( "A 404 Not Found exception occurred while handling " "the request." ) self . response . body_raw = { 'error' : 'Not Found' } self . response . status = 404
Handle the given exception . Log sets the response code to 404 and output the exception message as an error message .
14,465
def set_response_content_md5 ( self ) : self . response . content_md5 = hashlib . md5 ( self . response . body ) . hexdigest ( )
Set the Content - MD5 response header . Calculated from the the response body by creating the MD5 hash from it .
14,466
def get_request_data ( self ) : request_data = [ self . path_params , self . request . GET ] if self . request . headers . get ( 'Content-Type' ) == 'application/json' and self . request . body : try : post = json . loads ( self . request . body ) except ValueError : raise_400 ( self , msg = 'Invalid JSON content data' ) if isinstance ( post , dict ) : request_data . append ( post ) else : request_data . append ( self . request . POST ) return request_data
Read the input values .
14,467
def _merge_defaults ( self , data , method_params , defaults ) : if defaults : optional_args = method_params [ - len ( defaults ) : ] for key , value in zip ( optional_args , defaults ) : if not key in data : data [ key ] = value return data
Helper method for adding default values to the data dictionary .
14,468
def _get_columns ( self , X , cols ) : if isinstance ( X , DataSet ) : X = X [ cols ] return_vector = False if isinstance ( cols , basestring ) : return_vector = True cols = [ cols ] if isinstance ( X , list ) : X = [ x [ cols ] for x in X ] X = pd . DataFrame ( X ) if return_vector : t = X [ cols [ 0 ] ] else : t = X . as_matrix ( cols ) return t
Get a subset of columns from the given table X . X a Pandas dataframe ; the table to select columns from cols a string or list of strings representing the columns to select Returns a numpy array with the data from the selected columns
14,469
def _requirement_element ( self , parent_element , req_data ) : req_data = self . _transform_result ( req_data ) if not req_data : return title = req_data . get ( "title" ) if not title : logger . warning ( "Skipping requirement, title is missing" ) return req_id = req_data . get ( "id" ) if not self . _check_lookup_prop ( req_id ) : logger . warning ( "Skipping requirement `%s`, data missing for selected lookup method" , title ) return attrs , custom_fields = self . _classify_data ( req_data ) attrs , custom_fields = self . _fill_defaults ( attrs , custom_fields ) attrs = OrderedDict ( sorted ( attrs . items ( ) ) ) custom_fields = OrderedDict ( sorted ( custom_fields . items ( ) ) ) requirement = etree . SubElement ( parent_element , "requirement" , attrs ) title_el = etree . SubElement ( requirement , "title" ) title_el . text = title description = req_data . get ( "description" ) if description : description_el = etree . SubElement ( requirement , "description" ) description_el . text = description self . _fill_custom_fields ( requirement , custom_fields )
Adds requirement XML element .
14,470
def export ( self ) : top = self . _top_element ( ) properties = self . _properties_element ( top ) self . _fill_requirements ( top ) self . _fill_lookup_prop ( properties ) return utils . prettify_xml ( top )
Returns requirements XML .
14,471
def write_xml ( xml , output_file = None ) : gen_filename = "requirements-{:%Y%m%d%H%M%S}.xml" . format ( datetime . datetime . now ( ) ) utils . write_xml ( xml , output_loc = output_file , filename = gen_filename )
Outputs the XML content into a file .
14,472
def _client ( self , id , secret ) : url = self . api_url + self . auth_token_url auth_string = '%s:%s' % ( id , secret ) authorization = base64 . b64encode ( auth_string . encode ( ) ) . decode ( ) headers = { 'Authorization' : "Basic " + authorization , 'Content-Type' : "application/x-www-form-urlencoded" } params = { 'grant_type' : 'client_credentials' , 'response_type' : 'token' } return self . session . post ( url , params = params , headers = headers )
Performs client login with the provided credentials
14,473
def aggregate_cap_val ( self , conn , ** kwargs ) : region = kwargs [ 'region' ] [ pv_df , wind_df , cap ] = self . get_timeseries ( conn , geometry = region . geom , ** kwargs ) if kwargs . get ( 'store' , False ) : self . store_full_df ( pv_df , wind_df , ** kwargs ) cap = cap . sum ( ) df = pd . concat ( [ pv_df . sum ( axis = 1 ) / cap [ 'pv_pwr' ] , wind_df . sum ( axis = 1 ) / cap [ 'wind_pwr' ] ] , axis = 1 ) feedin_df = df . rename ( columns = { 0 : 'pv_pwr' , 1 : 'wind_pwr' } ) return feedin_df , cap
Returns the normalised feedin profile and installed capacity for a given region .
14,474
def save_assets ( self , dest_path ) : for idx , subplot in enumerate ( self . subplots ) : subplot . save_assets ( dest_path , suffix = '_%d' % idx )
Save plot assets alongside dest_path .
14,475
def set_empty ( self , row , column ) : subplot = self . get_subplot_at ( row , column ) subplot . set_empty ( )
Keep one of the subplots completely empty .
14,476
def set_empty_for_all ( self , row_column_list ) : for row , column in row_column_list : self . set_empty ( row , column )
Keep all specified subplots completely empty .
14,477
def set_title ( self , row , column , text ) : subplot = self . get_subplot_at ( row , column ) subplot . set_title ( text )
Set a title text .
14,478
def set_label ( self , row , column , text , location = 'upper right' , style = None ) : subplot = self . get_subplot_at ( row , column ) subplot . set_label ( text , location , style )
Set a label for the subplot .
14,479
def show_xticklabels ( self , row , column ) : subplot = self . get_subplot_at ( row , column ) subplot . show_xticklabels ( )
Show the x - axis tick labels for a subplot .
14,480
def show_xticklabels_for_all ( self , row_column_list = None ) : if row_column_list is None : for subplot in self . subplots : subplot . show_xticklabels ( ) else : for row , column in row_column_list : self . show_xticklabels ( row , column )
Show the x - axis tick labels for all specified subplots .
14,481
def show_yticklabels ( self , row , column ) : subplot = self . get_subplot_at ( row , column ) subplot . show_yticklabels ( )
Show the y - axis tick labels for a subplot .
14,482
def show_yticklabels_for_all ( self , row_column_list = None ) : if row_column_list is None : for subplot in self . subplots : subplot . show_yticklabels ( ) else : for row , column in row_column_list : self . show_yticklabels ( row , column )
Show the y - axis tick labels for all specified subplots .
14,483
def set_xlimits ( self , row , column , min = None , max = None ) : subplot = self . get_subplot_at ( row , column ) subplot . set_xlimits ( min , max )
Set x - axis limits of a subplot .
14,484
def set_xlimits_for_all ( self , row_column_list = None , min = None , max = None ) : if row_column_list is None : self . limits [ 'xmin' ] = min self . limits [ 'xmax' ] = max else : for row , column in row_column_list : self . set_xlimits ( row , column , min , max )
Set x - axis limits of specified subplots .
14,485
def set_ylimits ( self , row , column , min = None , max = None ) : subplot = self . get_subplot_at ( row , column ) subplot . set_ylimits ( min , max )
Set y - axis limits of a subplot .
14,486
def set_ylimits_for_all ( self , row_column_list = None , min = None , max = None ) : if row_column_list is None : self . limits [ 'ymin' ] = min self . limits [ 'ymax' ] = max else : for row , column in row_column_list : self . set_ylimits ( row , column , min , max )
Set y - axis limits of specified subplots .
14,487
def set_slimits ( self , row , column , min , max ) : subplot = self . get_subplot_at ( row , column ) subplot . set_slimits ( min , max )
Set limits for the point sizes .
14,488
def set_ytick_labels ( self , row , column , labels ) : subplot = self . get_subplot_at ( row , column ) subplot . set_ytick_labels ( labels )
Manually specify the y - axis tick labels .
14,489
def get_subplot_at ( self , row , column ) : idx = row * self . columns + column return self . subplots [ idx ]
Return the subplot at row column position .
14,490
def set_subplot_xlabel ( self , row , column , text ) : subplot = self . get_subplot_at ( row , column ) subplot . set_xlabel ( text )
Set a label for the x - axis of a subplot .
14,491
def set_subplot_ylabel ( self , row , column , text ) : subplot = self . get_subplot_at ( row , column ) subplot . set_ylabel ( text )
Set a label for the y - axis of a subplot .
14,492
def set_scalebar_for_all ( self , row_column_list = None , location = 'lower right' ) : if row_column_list is None : for subplot in self . subplots : subplot . set_scalebar ( location ) else : for row , column in row_column_list : subplot = self . get_subplot_at ( row , column ) subplot . set_scalebar ( location )
Show marker area scale for subplots .
14,493
def set_colorbar ( self , label = '' , horizontal = False ) : if self . limits [ 'mmin' ] is None or self . limits [ 'mmax' ] is None : warnings . warn ( 'Set (only) global point meta limits to ensure the ' 'colorbar is correct for all subplots.' ) self . colorbar = { 'label' : label , 'horizontal' : horizontal }
Show the colorbar it will be attached to the last plot .
14,494
def set_axis_options ( self , row , column , text ) : subplot = self . get_subplot_at ( row , column ) subplot . set_axis_options ( text )
Set additionnal options as plain text .
14,495
def initialize ( self , configfile = None ) : method = "initialize" A = None metadata = { method : configfile } send_array ( self . socket , A , metadata ) A , metadata = recv_array ( self . socket , poll = self . poll , poll_timeout = self . poll_timeout , flags = self . zmq_flags )
Initialize the module
14,496
def finalize ( self ) : method = "finalize" A = None metadata = { method : - 1 } send_array ( self . socket , A , metadata ) A , metadata = recv_array ( self . socket , poll = self . poll , poll_timeout = self . poll_timeout , flags = self . zmq_flags )
Finalize the module
14,497
def set_current_time ( self , t ) : method = "set_current_time" A = None metadata = { method : t } send_array ( self . socket , A , metadata ) A , metadata = recv_array ( self . socket , poll = self . poll , poll_timeout = self . poll_timeout , flags = self . zmq_flags )
Set current time of simulation
14,498
def set_var_slice ( self , name , start , count , var ) : method = "set_var_slice" A = var metadata = { method : name , "start" : start , "count" : count } send_array ( self . socket , A , metadata ) A , metadata = recv_array ( self . socket , poll = self . poll , poll_timeout = self . poll_timeout , flags = self . zmq_flags )
Set the variable name with the values of var
14,499
def update ( self , dt ) : method = "update" A = None metadata = { method : dt } send_array ( self . socket , A , metadata ) A , metadata = recv_array ( self . socket , poll = self . poll , poll_timeout = self . poll_timeout , flags = self . zmq_flags )
Advance the module with timestep dt