idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
14,300
def write ( self ) : if not hasattr ( self , 'space' ) : raise AttributeError ( "A WikiPage must have a 'space' attribute before you can write it to Assembla." ) self . api = self . space . api if self . get ( 'id' ) : return self . api . _put_json ( self , space = self . space , rel_path = self . space . _build_rel_path ( 'wiki_pages' ) , id_field = 'id' ) else : return self . api . _post_json ( self , space = self . space , rel_path = self . space . _build_rel_path ( 'wiki_pages' ) , )
Create or update a Wiki Page on Assembla
14,301
def add ( self , spec ) : for limit in spec . limit_to : if limit not in self . limit_to : self . limit_to . append ( limit )
Add limitations of given spec to self s .
14,302
def combine ( specs ) : new_specs = { } for spec in specs : if new_specs . get ( spec , None ) is None : new_specs [ spec ] = spec else : new_specs [ spec ] . add ( spec ) return list ( new_specs . values ( ) )
Combine package specifications limitations .
14,303
def find ( self , package , ** kwargs ) : for finder in self . finders : package_spec = finder . find ( package , ** kwargs ) if package_spec : return package_spec return None
Find a package using package finders .
14,304
def _lazy_turbo_mapping ( initial , pre_size ) : size = pre_size or ( 2 * len ( initial ) ) or 8 buckets = size * [ None ] if not isinstance ( initial , colls . Mapping ) : initial = dict ( initial ) for k , v in six . iteritems ( initial ) : h = hash ( k ) index = h % size bucket = buckets [ index ] if bucket : bucket . append ( ( k , v ) ) else : buckets [ index ] = [ ( k , v ) ] return LazyPMap ( len ( initial ) , ps . pvector ( ) . extend ( buckets ) )
_lazy_turbo_mapping is a blatant copy of the pyrsistent . _pmap . _turbo_mapping function except it works for lazy maps ; this seems like the only way to fully overload PMap .
14,305
def lazy_map ( initial = { } , pre_size = 0 ) : if is_lazy_map ( initial ) : return initial if not initial : return _EMPTY_LMAP return _lazy_turbo_mapping ( initial , pre_size )
lazy_map is a blatant copy of the pyrsistent . pmap function and is used to create lazy maps .
14,306
def _examine_val ( self , k , val ) : 'should only be called internally' if not isinstance ( val , ( types . FunctionType , partial ) ) : return val vid = id ( val ) if vid in self . _memoized : return self . _memoized [ vid ] elif [ ] != getargspec_py27like ( val ) [ 0 ] : return val else : val = val ( ) object . __setattr__ ( self , '_memoized' , self . _memoized . set ( vid , val ) ) return val
should only be called internally
14,307
def psh_fire_msg_action_if_new ( sender , instance , created , ** kwargs ) : if created : from message_sender . tasks import send_message send_message . apply_async ( kwargs = { "message_id" : str ( instance . id ) } )
Post save hook to fire message send task
14,308
def update_default_channels ( sender , instance , created , ** kwargs ) : if instance . default : Channel . objects . filter ( default = True ) . exclude ( channel_id = instance . channel_id ) . update ( default = False )
Post save hook to ensure that there is only one default
14,309
def map_fit ( interface , state , label , inp ) : import numpy as np ete , etde = 0 , 0 out = interface . output ( 0 ) for row in inp : row = row . strip ( ) . split ( state [ "delimiter" ] ) if len ( row ) > 1 : x = np . array ( [ ( 0 if v in state [ "missing_vals" ] else float ( v ) ) for i , v in enumerate ( row ) if i in state [ "X_indices" ] ] + [ - 1 ] ) y = 1 if state [ "y_map" ] [ 0 ] == row [ state [ "y_index" ] ] else - 1 if state [ "y_map" ] [ 1 ] == row [ state [ "y_index" ] ] else "Error" ete += np . outer ( x , x ) etde += x * y out . add ( "etde" , etde ) for i , row in enumerate ( ete ) : out . add ( i , row )
Function calculates matrices ete and etde for every sample aggregates and output them .
14,310
def reduce_fit ( interface , state , label , inp ) : import numpy as np out = interface . output ( 0 ) sum_etde = 0 sum_ete = [ 0 for _ in range ( len ( state [ "X_indices" ] ) + 1 ) ] for key , value in inp : if key == "etde" : sum_etde += value else : sum_ete [ key ] += value sum_ete += np . true_divide ( np . eye ( len ( sum_ete ) ) , state [ "nu" ] ) out . add ( "params" , np . linalg . lstsq ( sum_ete , sum_etde ) [ 0 ] )
Function joins all partially calculated matrices ETE and ETDe aggregates them and it calculates final parameters .
14,311
def fit ( dataset , nu = 0.1 , save_results = True , show = False ) : from disco . worker . pipeline . worker import Worker , Stage from disco . core import Job if dataset . params [ "y_map" ] == [ ] : raise Exception ( "Linear proximal SVM requires a target label mapping parameter." ) try : nu = float ( nu ) if nu <= 0 : raise Exception ( "Parameter nu should be greater than 0" ) except ValueError : raise Exception ( "Parameter should be numerical." ) job = Job ( worker = Worker ( save_results = save_results ) ) job . pipeline = [ ( "split" , Stage ( "map" , input_chain = dataset . params [ "input_chain" ] , init = simple_init , process = map_fit ) ) , ( 'group_all' , Stage ( "reduce" , init = simple_init , process = reduce_fit , combine = True ) ) ] job . params = dataset . params job . params [ "nu" ] = nu job . run ( name = "linearsvm_fit" , input = dataset . params [ "data_tag" ] ) fitmodel_url = job . wait ( show = show ) return { "linsvm_fitmodel" : fitmodel_url }
Function starts a job for calculation of model parameters
14,312
def predict ( dataset , fitmodel_url , save_results = True , show = False ) : from disco . worker . pipeline . worker import Worker , Stage from disco . core import Job , result_iterator if "linsvm_fitmodel" not in fitmodel_url : raise Exception ( "Incorrect fit model." ) job = Job ( worker = Worker ( save_results = save_results ) ) job . pipeline = [ ( "split" , Stage ( "map" , input_chain = dataset . params [ "input_chain" ] , init = simple_init , process = map_predict ) ) ] job . params = dataset . params job . params [ "fit_params" ] = [ v for _ , v in result_iterator ( fitmodel_url [ "linsvm_fitmodel" ] ) ] [ 0 ] job . run ( name = "linsvm_predict" , input = dataset . params [ "data_tag" ] ) return job . wait ( show = show )
Function starts a job that makes predictions to input data with a given model .
14,313
def validate_redirect_url ( next_url ) : if not next_url : return None parts = urlparse ( next_url ) if parts . netloc : domain , _ = split_domain_port ( parts . netloc ) allowed_hosts = ( [ '*' ] if django_settings . DEBUG else django_settings . ALLOWED_HOSTS ) if not ( domain and validate_host ( domain , allowed_hosts ) ) : return None return urlunparse ( ( "" , "" , parts . path , parts . params , parts . query , parts . fragment ) )
Returns the next_url path if next_url matches allowed hosts .
14,314
def convert_currency ( amount , from_currency , to_currency ) : try : rate = CurrencyRate . objects . get ( from_currency__iso_code = from_currency , to_currency__iso_code = to_currency ) except CurrencyRate . DoesNotExist : return _ ( 'n/a' ) try : history = rate . history . all ( ) [ 0 ] except IndexError : return _ ( 'n/a' ) return amount * history . value
Converts currencies .
14,315
def url_prefixed ( regex , view , name = None ) : return url ( r'^%(app_prefix)s%(regex)s' % { 'app_prefix' : APP_PREFIX , 'regex' : regex } , view , name = name )
Returns a urlpattern prefixed with the APP_NAME in debug mode .
14,316
def createDataFromFile ( self , filePath , inputEncoding = None , defaultFps = None ) : file_ = File ( filePath ) if inputEncoding is None : inputEncoding = file_ . detectEncoding ( ) inputEncoding = inputEncoding . lower ( ) videoInfo = VideoInfo ( defaultFps ) if defaultFps is not None else file_ . detectFps ( ) subtitles = self . _parseFile ( file_ , inputEncoding , videoInfo . fps ) data = SubtitleData ( ) data . subtitles = subtitles data . fps = videoInfo . fps data . inputEncoding = inputEncoding data . outputEncoding = inputEncoding data . outputFormat = self . _parser . parsedFormat ( ) data . videoPath = videoInfo . videoPath return data
Fetch a given filePath and parse its contents .
14,317
def _set_property ( xml_root , name , value , properties = None ) : if properties is None : properties = xml_root . find ( "properties" ) for prop in properties : if prop . get ( "name" ) == name : prop . set ( "value" , utils . get_unicode_str ( value ) ) break else : etree . SubElement ( properties , "property" , { "name" : name , "value" : utils . get_unicode_str ( value ) } )
Sets property to specified value .
14,318
def generate_response_property ( name = None , value = None ) : name = name or "dump2polarion" value = value or "" . join ( random . sample ( string . ascii_lowercase , 12 ) ) return ( name , value )
Generates response property .
14,319
def fill_response_property ( xml_root , name = None , value = None ) : name , value = generate_response_property ( name , value ) response_property = None if xml_root . tag == "testsuites" : response_property = _fill_testsuites_response_property ( xml_root , name , value ) elif xml_root . tag in ( "testcases" , "requirements" ) : response_property = _fill_non_testsuites_response_property ( xml_root , name , value ) else : raise Dump2PolarionException ( _NOT_EXPECTED_FORMAT_MSG ) return response_property
Returns response property and fills it if missing .
14,320
def remove_response_property ( xml_root ) : if xml_root . tag == "testsuites" : properties = xml_root . find ( "properties" ) resp_properties = [ ] for prop in properties : prop_name = prop . get ( "name" , "" ) if "polarion-response-" in prop_name : resp_properties . append ( prop ) for resp_property in resp_properties : properties . remove ( resp_property ) elif xml_root . tag in ( "testcases" , "requirements" ) : resp_properties = xml_root . find ( "response-properties" ) if resp_properties is not None : xml_root . remove ( resp_properties ) else : raise Dump2PolarionException ( _NOT_EXPECTED_FORMAT_MSG )
Removes response properties if exist .
14,321
def remove_property ( xml_root , partial_name ) : if xml_root . tag in ( "testsuites" , "testcases" , "requirements" ) : properties = xml_root . find ( "properties" ) remove_properties = [ ] for prop in properties : prop_name = prop . get ( "name" , "" ) if partial_name in prop_name : remove_properties . append ( prop ) for rem_prop in remove_properties : properties . remove ( rem_prop ) else : raise Dump2PolarionException ( _NOT_EXPECTED_FORMAT_MSG )
Removes properties if exist .
14,322
def set_lookup_method ( xml_root , value ) : if xml_root . tag == "testsuites" : _set_property ( xml_root , "polarion-lookup-method" , value ) elif xml_root . tag in ( "testcases" , "requirements" ) : _set_property ( xml_root , "lookup-method" , value ) else : raise Dump2PolarionException ( _NOT_EXPECTED_FORMAT_MSG )
Changes lookup method .
14,323
def set_dry_run ( xml_root , value = True ) : value_str = str ( value ) . lower ( ) assert value_str in ( "true" , "false" ) if xml_root . tag == "testsuites" : _set_property ( xml_root , "polarion-dry-run" , value_str ) elif xml_root . tag in ( "testcases" , "requirements" ) : _set_property ( xml_root , "dry-run" , value_str ) else : raise Dump2PolarionException ( _NOT_EXPECTED_FORMAT_MSG )
Sets dry - run so records are not updated only log file is produced .
14,324
def get_environ ( cls , prefix ) : return ( ( key [ len ( prefix ) + 1 : ] , value ) for key , value in os . environ . items ( ) if key . startswith ( '%s_' % prefix ) )
Retrieves environment variables from a namespace .
14,325
def get_bool ( self , name , default = None ) : if name not in self : if default is not None : return default raise EnvironmentError . not_found ( self . _prefix , name ) return bool ( self . get_int ( name ) )
Retrieves an environment variable value as bool .
14,326
def get_dict ( self , name , default = None ) : if name not in self : if default is not None : return default raise EnvironmentError . not_found ( self . _prefix , name ) return dict ( ** self . get ( name ) )
Retrieves an environment variable value as a dictionary .
14,327
def get_int ( self , name , default = None ) : if name not in self : if default is not None : return default raise EnvironmentError . not_found ( self . _prefix , name ) return int ( self [ name ] )
Retrieves an environment variable as an integer .
14,328
def get_list ( self , name , default = None ) : if name not in self : if default is not None : return default raise EnvironmentError . not_found ( self . _prefix , name ) return list ( self [ name ] )
Retrieves an environment variable as a list .
14,329
def get_path ( self , name , default = None ) : if name not in self : if default is not None : return default raise EnvironmentError . not_found ( self . _prefix , name ) return pathlib . Path ( self [ name ] )
Retrieves an environment variable as a filesystem path .
14,330
def refresh ( self ) : super ( Habitat , self ) . update ( self . get_environ ( self . _prefix ) )
Update all environment variables from os . environ .
14,331
def _transform_result ( self , result ) : if self . _transform_func : result = self . _transform_func ( result ) return result or None
Calls transform function on result .
14,332
def _get_verdict ( result ) : verdict = result . get ( "verdict" ) if not verdict : return None verdict = verdict . strip ( ) . lower ( ) if verdict not in Verdicts . PASS + Verdicts . FAIL + Verdicts . SKIP + Verdicts . WAIT : return None return verdict
Gets verdict of the testcase .
14,333
def _set_lookup_prop ( self , result_data ) : if self . _lookup_prop : return if result_data . get ( "id" ) : self . _lookup_prop = "id" elif result_data . get ( "title" ) : self . _lookup_prop = "name" else : return logger . debug ( "Setting lookup method for xunit to `%s`" , self . _lookup_prop )
Set lookup property based on processed testcases if not configured .
14,334
def _fill_out_err ( result , testcase ) : if result . get ( "stdout" ) : system_out = etree . SubElement ( testcase , "system-out" ) system_out . text = utils . get_unicode_str ( result [ "stdout" ] ) if result . get ( "stderr" ) : system_err = etree . SubElement ( testcase , "system-err" ) system_err . text = utils . get_unicode_str ( result [ "stderr" ] )
Adds stdout and stderr if present .
14,335
def _fill_properties ( verdict , result , testcase , testcase_id , testcase_title ) : properties = etree . SubElement ( testcase , "properties" ) etree . SubElement ( properties , "property" , { "name" : "polarion-testcase-id" , "value" : testcase_id or testcase_title } , ) if verdict in Verdicts . PASS and result . get ( "comment" ) : etree . SubElement ( properties , "property" , { "name" : "polarion-testcase-comment" , "value" : utils . get_unicode_str ( result [ "comment" ] ) , } , ) for param , value in six . iteritems ( result . get ( "params" ) or { } ) : etree . SubElement ( properties , "property" , { "name" : "polarion-parameter-{}" . format ( param ) , "value" : utils . get_unicode_str ( value ) , } , )
Adds properties into testcase element .
14,336
def export ( self ) : top = self . _top_element ( ) properties = self . _properties_element ( top ) testsuite = self . _testsuite_element ( top ) self . _fill_tests_results ( testsuite ) self . _fill_lookup_prop ( properties ) return utils . prettify_xml ( top )
Returns XUnit XML .
14,337
def parse ( log_file ) : with io . open ( os . path . expanduser ( log_file ) , encoding = "utf-8" ) as input_file : for line in input_file : if "Starting import of XUnit results" in line : obj = XUnitParser break elif "Starting import of test cases" in line : obj = TestcasesParser break elif "Starting import of requirements" in line : obj = RequirementsParser break else : raise Dump2PolarionException ( "No valid data found in the log file '{}'" . format ( log_file ) ) return obj ( input_file , log_file ) . parse ( )
Parse log file .
14,338
def get_result ( self , line ) : res = self . RESULT_SEARCH . search ( line ) try : name , ids = res . group ( 1 ) , res . group ( 2 ) except ( AttributeError , IndexError ) : return None ids = ids . split ( "/" ) tc_id = ids [ 0 ] try : custom_id = ids [ 1 ] except IndexError : custom_id = None return LogItem ( name , tc_id , custom_id )
Gets work item name and id .
14,339
def get_result_warn ( self , line ) : res = self . RESULT_WARN_SEARCH . search ( line ) try : return LogItem ( res . group ( 1 ) , None , None ) except ( AttributeError , IndexError ) : pass res = self . RESULT_WARN_SEARCH_CUSTOM . search ( line ) try : return LogItem ( res . group ( 1 ) , None , res . group ( 2 ) ) except ( AttributeError , IndexError ) : return None
Gets work item name of item that was not successfully imported .
14,340
def get_requirement ( self , line ) : res = self . REQ_SEARCH . search ( line ) try : name , tc_id = res . group ( 1 ) , res . group ( 2 ) except ( AttributeError , IndexError ) : return None return LogItem ( name , tc_id , None )
Gets requirement name and id .
14,341
def get_requirement_warn ( self , line ) : res = self . REQ_WARN_SEARCH . search ( line ) try : return LogItem ( res . group ( 1 ) , None , None ) except ( AttributeError , IndexError ) : return None
Gets name of test case that was not successfully imported .
14,342
def load_config ( app_name , * args , ** kwargs ) : configure_logging ( ) prefix = kwargs . get ( 'prefix' , 'etc' ) verbose = kwargs . get ( 'verbose' , False ) location = kwargs . get ( 'location' , None ) passphrase = kwargs . get ( 'passphrase' , os . getenv ( "%s_SETTINGS_CRYPT_KEY" % app_name . upper ( ) , os . getenv ( "SETTINGS_CRYPT_KEY" , None ) ) ) confnames = args if not location : location = os . getenv ( "%s_SETTINGS_LOCATION" % app_name . upper ( ) , None ) if not location : location = os . getenv ( "SETTINGS_LOCATION" , None ) if location : location = "%s/%s" % ( location , app_name ) config = { } for confname in confnames : content = None if location and location . startswith ( 's3://' ) : try : import boto _ , bucket_name , prefix = urlparse ( location ) [ : 3 ] try : conn = boto . connect_s3 ( ) bucket = conn . get_bucket ( bucket_name ) key_name = '%s/%s' % ( prefix , confname ) key = bucket . get_key ( key_name ) content = key . get_contents_as_string ( ) if verbose : LOGGER . info ( "config loaded from 's3://%s/%s'" , bucket_name , key_name ) except ( boto . exception . NoAuthHandlerFound , boto . exception . S3ResponseError ) as _ : pass except ImportError : pass if not content : confpath = locate_config ( confname , app_name , location = location , prefix = prefix , verbose = verbose ) if confpath : with open ( confpath , 'rb' ) as conffile : content = conffile . read ( ) if content : if passphrase : content = crypt . decrypt ( content , passphrase ) if hasattr ( content , 'decode' ) : content = content . decode ( 'utf-8' ) for line in content . split ( '\n' ) : if not line . startswith ( '#' ) : look = re . match ( r'(\w+)\s*=\s*(.*)' , line ) if look : try : config . update ( { look . group ( 1 ) . upper ( ) : eval ( look . group ( 2 ) , { } , { } ) } ) except Exception : raise return config
Given a path to a file parse its lines in ini - like format and then set them in the current namespace .
14,343
def close ( account_id : str ) -> None : logger . info ( 'closing-account' , account_id = account_id ) with transaction . atomic ( ) : account = Account . objects . get ( pk = account_id ) account . close ( ) account . save ( )
Closes the account .
14,344
def create_invoices ( account_id : str , due_date : date ) -> Sequence [ Invoice ] : invoices = [ ] with transaction . atomic ( ) : due_charges = Charge . objects . uninvoiced ( account_id = account_id ) . charges ( ) total = total_amount ( due_charges ) for amount_due in total . monies ( ) : if amount_due . amount > 0 : invoice = Invoice . objects . create ( account_id = account_id , due_date = due_date ) Charge . objects . uninvoiced ( account_id = account_id ) . charges ( ) . in_currency ( currency = amount_due . currency ) . update ( invoice = invoice ) invoices . append ( invoice ) logger . info ( 'created-invoices' , account_id = str ( account_id ) , invoice_ids = [ i . pk for i in invoices ] ) for invoice in invoices : invoice_ready . send ( sender = create_invoices , invoice = invoice ) return invoices
Creates the invoices for any due positive charges in the account . If there are due positive charges in different currencies one invoice is created for each currency .
14,345
def add_charge ( account_id : str , amount : Money , reverses_id : Optional [ str ] = None , product_code : Optional [ str ] = None , product_properties : Optional [ Dict [ str , str ] ] = None ) -> Charge : logger . info ( 'adding-charge' , account_id = account_id , amount = amount , product_code = product_code , product_properties = product_properties ) with transaction . atomic ( ) : charge = Charge ( account_id = account_id , amount = amount ) if reverses_id : charge . reverses_id = reverses_id if product_code : charge . product_code = product_code charge . full_clean ( exclude = [ 'id' , 'account' ] ) charge . save ( force_insert = True ) if product_properties : objs = [ ProductProperty ( charge = charge , name = k , value = v ) for k , v in product_properties . items ( ) ] for o in objs : o . full_clean ( exclude = [ 'id' , 'charge' ] ) ProductProperty . objects . bulk_create ( objs ) return charge
Add a charge to the account .
14,346
def build_tree ( self ) : for spec in self . specs : if spec . ismodule : self . modules . append ( Module ( spec . name , spec . path , dsm = self ) ) else : self . packages . append ( Package ( spec . name , spec . path , dsm = self , limit_to = spec . limit_to , build_tree = True , build_dependencies = False , enforce_init = self . enforce_init ) )
Build the Python packages tree .
14,347
def split_limits_heads ( self ) : heads = [ ] new_limit_to = [ ] for limit in self . limit_to : if '.' in limit : name , limit = limit . split ( '.' , 1 ) heads . append ( name ) new_limit_to . append ( limit ) else : heads . append ( limit ) return heads , new_limit_to
Return first parts of dot - separated strings and rest of strings .
14,348
def build_tree ( self ) : for m in listdir ( self . path ) : abs_m = join ( self . path , m ) if isfile ( abs_m ) and m . endswith ( '.py' ) : name = splitext ( m ) [ 0 ] if not self . limit_to or name in self . limit_to : self . modules . append ( Module ( name , abs_m , self . dsm , self ) ) elif isdir ( abs_m ) : if isfile ( join ( abs_m , '__init__.py' ) ) or not self . enforce_init : heads , new_limit_to = self . split_limits_heads ( ) if not heads or m in heads : self . packages . append ( Package ( m , abs_m , self . dsm , self , new_limit_to , build_tree = True , build_dependencies = False , enforce_init = self . enforce_init ) )
Build the tree for this package .
14,349
def cardinal ( self , to ) : return sum ( m . cardinal ( to ) for m in self . submodules )
Return the number of dependencies of this package to the given node .
14,350
def build_dependencies ( self ) : highest = self . dsm or self . root if self is highest : highest = LeafNode ( ) for _import in self . parse_code ( ) : target = highest . get_target ( _import [ 'target' ] ) if target : what = _import [ 'target' ] . split ( '.' ) [ - 1 ] if what != target . name : _import [ 'what' ] = what _import [ 'target' ] = target self . dependencies . append ( Dependency ( source = self , ** _import ) )
Build the dependencies for this module .
14,351
def parse_code ( self ) : code = open ( self . path , encoding = 'utf-8' ) . read ( ) try : body = ast . parse ( code ) . body except SyntaxError : try : code = code . encode ( 'utf-8' ) body = ast . parse ( code ) . body except SyntaxError : return [ ] return self . get_imports ( body )
Read the source code and return all the import statements .
14,352
def cardinal ( self , to ) : return sum ( 1 for _ in filter ( lambda d : not d . external and d . target in to , self . dependencies ) )
Return the number of dependencies of this module to the given node .
14,353
def generate_urls ( self , first_url , last_url ) : first_url = first_url . split ( "/" ) last_url = last_url . split ( "/" ) if first_url [ 0 ] . lower ( ) != "http:" or last_url [ 0 ] . lower ( ) != "http:" : raise Exception ( "URLs should be accessible via HTTP." ) url_base = "/" . join ( first_url [ : - 1 ] ) start_index = first_url [ - 1 ] . index ( "a" ) file_name = first_url [ - 1 ] [ 0 : start_index ] url_base += "/" + file_name start = first_url [ - 1 ] [ start_index : ] finish = last_url [ - 1 ] [ start_index : ] if start . count ( "." ) == 1 and finish . count ( "." ) == 1 : start , file_extension = start . split ( "." ) finish , _ = finish . split ( "." ) if len ( start ) != len ( finish ) : raise Exception ( "Filenames in url should have the same length." ) file_extension = "." + file_extension else : raise Exception ( "URLs does not have the same pattern." ) alphabet = "abcdefghijklmnopqrstuvwxyz" product = itertools . product ( alphabet , repeat = len ( start ) ) urls = [ ] for p in product : urls . append ( [ url_base + "" . join ( p ) + file_extension ] ) if "" . join ( p ) == finish : break return urls
Function generates URLs in split command fashion . If first_url is xaaaaa and last_url is xaaaac it will automatically generate xaaaab .
14,354
def fetch_raw_data ( sql , connection , geometry ) : tmp_dc = { } weather_df = pd . DataFrame ( connection . execute ( sql ) . fetchall ( ) , columns = [ 'gid' , 'geom_point' , 'geom_polygon' , 'data_id' , 'time_series' , 'dat_id' , 'type_id' , 'type' , 'height' , 'year' , 'leap_year' ] ) . drop ( 'dat_id' , 1 ) tz = tools . tz_from_geom ( connection , geometry ) for ix in weather_df . index : weather_df . loc [ ix , 'geom_point' ] = wkt_loads ( weather_df [ 'geom_point' ] [ ix ] ) utc = timezone ( 'utc' ) offset = int ( utc . localize ( datetime ( 2002 , 1 , 1 ) ) . astimezone ( timezone ( tz ) ) . strftime ( "%z" ) [ : - 2 ] ) db_year = weather_df . loc [ ix , 'year' ] db_len = len ( weather_df [ 'time_series' ] [ ix ] ) tmp_dc [ ix ] = pd . Series ( np . roll ( np . array ( weather_df [ 'time_series' ] [ ix ] ) , offset ) , index = pd . date_range ( pd . datetime ( db_year , 1 , 1 , 0 ) , periods = db_len , freq = 'H' , tz = tz ) ) weather_df [ 'time_series' ] = pd . Series ( tmp_dc ) return weather_df
Fetch the coastdat2 from the database adapt it to the local time zone and create a time index .
14,355
def create_single_weather ( df , rename_dc ) : my_weather = weather . FeedinWeather ( ) data_height = { } name = None weather_df = pd . DataFrame ( index = df . time_series . iloc [ 0 ] . index ) for row in df . iterrows ( ) : key = rename_dc [ row [ 1 ] . type ] weather_df [ key ] = row [ 1 ] . time_series data_height [ key ] = row [ 1 ] . height if not np . isnan ( row [ 1 ] . height ) else 0 name = row [ 1 ] . gid my_weather . data = weather_df my_weather . timezone = weather_df . index . tz my_weather . longitude = df . geom_point . iloc [ 0 ] . x my_weather . latitude = df . geom_point . iloc [ 0 ] . y my_weather . geometry = df . geom_point . iloc [ 0 ] my_weather . data_height = data_height my_weather . name = name return my_weather
Create an oemof weather object for the given geometry
14,356
def create_multi_weather ( df , rename_dc ) : weather_list = [ ] for gid in df . gid . unique ( ) : gid_df = df [ df . gid == gid ] obj = create_single_weather ( gid_df , rename_dc ) weather_list . append ( obj ) return weather_list
Create a list of oemof weather objects if the given geometry is a polygon
14,357
def predict ( tree , x , y = [ ] , dist = False ) : node_id = 1 while 1 : nodes = tree [ node_id ] if nodes [ 0 ] [ 5 ] == "c" : if x [ nodes [ 0 ] [ 1 ] ] <= nodes [ 0 ] [ 2 ] : index , node_id = 0 , nodes [ 0 ] [ 0 ] else : index , node_id = 1 , nodes [ 1 ] [ 0 ] else : if x [ nodes [ 0 ] [ 1 ] ] in nodes [ 0 ] [ 2 ] : index , node_id = 0 , nodes [ 0 ] [ 0 ] elif x [ nodes [ 1 ] [ 1 ] ] in nodes [ 1 ] [ 2 ] : index , node_id = 1 , nodes [ 1 ] [ 0 ] else : node_id = str ( nodes [ 0 ] [ 0 ] ) + "," + str ( nodes [ 1 ] [ 0 ] ) index , nodes = 0 , [ [ 0 , 0 , 0 , { k : nodes [ 0 ] [ 3 ] . get ( k , 0 ) + nodes [ 1 ] [ 3 ] . get ( k , 0 ) for k in set ( nodes [ 0 ] [ 3 ] ) | set ( nodes [ 1 ] [ 3 ] ) } ] ] if node_id in tree . keys ( ) : continue if dist : suma = sum ( nodes [ index ] [ 3 ] . values ( ) ) return Counter ( { k : v / float ( suma ) for k , v in nodes [ index ] [ 3 ] . iteritems ( ) } ) prediction = max ( nodes [ index ] [ 3 ] , key = nodes [ index ] [ 3 ] . get ) if y == [ ] : return prediction probs = sorted ( zip ( nodes [ index ] [ 3 ] . keys ( ) , np . true_divide ( nodes [ index ] [ 3 ] . values ( ) , np . sum ( nodes [ index ] [ 3 ] . values ( ) ) ) ) , key = itemgetter ( 1 ) , reverse = True ) if prediction == y : margin = probs [ 0 ] [ 1 ] - probs [ 1 ] [ 1 ] if len ( probs ) > 1 else 1 else : margin = dict ( probs ) . get ( y , 0 ) - probs [ 0 ] [ 1 ] return node_id , margin
Function makes a prediction of one sample with a tree model . If y label is defined it returns node identifier and margin .
14,358
def __addTab ( self , filePath ) : for i in range ( self . tabBar . count ( ) ) : widget = self . pages . widget ( i ) if not widget . isStatic and filePath == widget . filePath : return i tab = SubtitleEditor ( filePath , self . _subtitleData , self ) newIndex = self . tabBar . addTab ( self . _createTabName ( tab . name , tab . history . isClean ( ) ) ) tab . history . cleanChanged . connect ( lambda clean : self . _cleanStateForFileChanged ( filePath , clean ) ) self . pages . addWidget ( tab ) return newIndex
Returns existing tab index . Creates a new one if it isn t opened and returns its index otherwise .
14,359
def find_and_parse_config ( config , default_config = 'default.yaml' ) : def load_config ( path ) : if os . path . isfile ( path ) : with open ( path , 'r' ) as f : config_dict_ = yaml . load ( f ) return config_dict_ config_path = find_file ( config ) default_path = find_file ( default_config ) config = load_config ( config_path ) default_config = load_config ( default_path ) if config is None and default_config is None : raise ValueError ( 'Both config and default_config return None' ) if config is None : config_dict = default_config elif default_config is None : config_dict = config else : config_dict = merge ( default_config , config ) return config_dict
Finds the service configuration file and parses it . Checks also a directory called default to check for default configuration values that will be overwritten by the actual configuration found on given path .
14,360
def parse ( s , subs ) : if len ( subs ) == 0 : return [ ] points = [ ] requests = _tokenize_request ( s ) if len ( requests ) == 1 and requests [ 0 ] . type_ == _Request . Type . OFFSET : return _offset_subtitles ( requests [ 0 ] , subs ) return _sync_subtitles ( requests , subs )
Parses a given string and creates a list of SyncPoints .
14,361
def full_name_natural_split ( full_name ) : parts = full_name . strip ( ) . split ( ' ' ) first_name = "" if parts : first_name = parts . pop ( 0 ) if first_name . lower ( ) == "el" and parts : first_name += " " + parts . pop ( 0 ) last_name = "" if parts : last_name = parts . pop ( ) if ( last_name . lower ( ) == 'i' or last_name . lower ( ) == 'ii' or last_name . lower ( ) == 'iii' and parts ) : last_name = parts . pop ( ) + " " + last_name middle_initials = "" for middle_name in parts : if middle_name : middle_initials += middle_name [ 0 ] return first_name , middle_initials , last_name
This function splits a full name into a natural first name last name and middle initials .
14,362
def _get_xml_value ( value ) : retval = [ ] if isinstance ( value , dict ) : for key , value in value . iteritems ( ) : retval . append ( '<' + xml_escape ( str ( key ) ) + '>' ) retval . append ( _get_xml_value ( value ) ) retval . append ( '</' + xml_escape ( str ( key ) ) + '>' ) elif isinstance ( value , list ) : for key , value in enumerate ( value ) : retval . append ( '<child order="' + xml_escape ( str ( key ) ) + '">' ) retval . append ( _get_xml_value ( value ) ) retval . append ( '</child>' ) elif isinstance ( value , bool ) : retval . append ( xml_escape ( str ( value ) . lower ( ) ) ) elif isinstance ( value , unicode ) : retval . append ( xml_escape ( value . encode ( 'utf-8' ) ) ) else : retval . append ( xml_escape ( str ( value ) ) ) return "" . join ( retval )
Convert an individual value to an XML string . Calls itself recursively for dictionaries and lists .
14,363
def fetch_changes ( repo_path , up_commit = 'master' ) : last_up_commit = None prevcwd = os . getcwd ( ) try : gitexe = 'git' os . chdir ( repo_path ) old_sources_timestamp = sources_latest_timestamp ( '.' ) shell_command ( [ gitexe , 'pull' ] ) last_up_commit = subprocess . check_output ( [ 'git' , 'rev-parse' , 'HEAD' ] ) shell_command ( [ gitexe , 'checkout' , up_commit ] ) up_commit = subprocess . check_output ( [ 'git' , 'rev-parse' , 'HEAD' ] ) new_sources_timestamp = sources_latest_timestamp ( '.' ) if old_sources_timestamp < new_sources_timestamp : with open ( '.timestamp' , 'w' ) as up_commit_file : up_commit_file . write ( up_commit ) finally : os . chdir ( prevcwd ) return last_up_commit , up_commit
Fetch latest changes from stage and touch . timestamp if any python sources have been modified .
14,364
def migrate_all ( ) : if 'south' in settings . INSTALLED_APPS : return _south_migrate_all ( ) from django . core . management . commands import makemigrations , migrate schema_args = [ sys . executable , 'makemigrations' ] for app in settings . INSTALLED_APPS : if not app . startswith ( 'django' ) : schema_args += [ app ] schema_cmd = makemigrations . Command ( ) schema_cmd . run_from_argv ( schema_args ) migrate_cmd = migrate . Command ( ) sys . stderr . write ( "MIGRATE ALL!\n" ) return migrate_cmd . run_from_argv ( [ sys . executable , 'migrate' ] )
Create schema migrations for all apps specified in INSTALLED_APPS then run a migrate command .
14,365
def add ( self , username , user_api , filename = None ) : keys = API . __get_keys ( filename ) user = user_api . find ( username ) [ 0 ] distinguished_name = user . entry_dn if 'ldapPublicKey' not in user . objectClass : raise ldap3 . core . exceptions . LDAPNoSuchAttributeResult ( 'LDAP Public Key Object Class not found. ' + 'Please ensure user was created correctly.' ) else : for key in list ( set ( keys ) ) : print ( key ) try : SSHKey ( key ) . parse ( ) except Exception as err : raise err from None else : operation = { 'sshPublicKey' : [ ( ldap3 . MODIFY_ADD , [ key ] ) ] } self . client . modify ( distinguished_name , operation )
Add SSH public key to a user s profile .
14,366
def remove ( self , username , user_api , filename = None , force = False ) : self . keys = API . __get_keys ( filename ) self . username = username user = user_api . find ( username ) [ 0 ] if not force : self . __confirm ( ) for key in self . __delete_keys ( ) : operation = { 'sshPublicKey' : [ ( ldap3 . MODIFY_DELETE , [ key ] ) ] } self . client . modify ( user . entry_dn , operation )
Remove specified SSH public key from specified user .
14,367
def get_keys_from_ldap ( self , username = None ) : result_dict = { } filter = [ '(sshPublicKey=*)' ] if username is not None : filter . append ( '(uid={})' . format ( username ) ) attributes = [ 'uid' , 'sshPublicKey' ] results = self . client . search ( filter , attributes ) for result in results : result_dict [ result . uid . value ] = result . sshPublicKey . values return result_dict
Fetch keys from ldap .
14,368
def add ( config , username , filename ) : try : client = Client ( ) client . prepare_connection ( ) user_api = UserApi ( client ) key_api = API ( client ) key_api . add ( username , user_api , filename ) except ( ldap3 . core . exceptions . LDAPNoSuchAttributeResult , ldap_tools . exceptions . InvalidResult , ldap3 . core . exceptions . LDAPAttributeOrValueExistsResult ) as err : print ( '{}: {}' . format ( type ( err ) , err . args [ 0 ] ) ) except Exception as err : raise err from None
Add user s SSH public key to their LDAP entry .
14,369
def remove ( config , username , filename , force ) : client = Client ( ) client . prepare_connection ( ) user_api = UserApi ( client ) key_api = API ( client ) key_api . remove ( username , user_api , filename , force )
Remove user s SSH public key from their LDAP entry .
14,370
def install ( config ) : client = Client ( ) client . prepare_connection ( ) key_api = API ( client ) key_api . install ( )
Install user s SSH public key to the local system .
14,371
def show ( config , username ) : client = Client ( ) client . prepare_connection ( ) key_api = API ( client ) for key , value in key_api . get_keys_from_ldap ( username ) . items ( ) : print ( value )
Show a user s SSH public key from their LDAP entry .
14,372
def main ( ) : logging . basicConfig ( ) logger . info ( "mmi-runner" ) warnings . warn ( "You are using the mmi-runner script, please switch to `mmi runner`" , DeprecationWarning ) arguments = docopt . docopt ( __doc__ ) kwargs = parse_args ( arguments ) runner = mmi . runner . Runner ( ** kwargs ) runner . run ( )
run mmi runner
14,373
def load_requires_from_file ( filepath ) : with open ( filepath ) as fp : return [ pkg_name . strip ( ) for pkg_name in fp . readlines ( ) ]
Read a package list from a given file path .
14,374
def get_tour_list ( self ) : resp = json . loads ( urlopen ( self . tour_list_url . format ( 1 ) ) . read ( ) . decode ( 'utf-8' ) ) total_count = resp [ 'response' ] [ 'body' ] [ 'totalCount' ] resp = json . loads ( urlopen ( self . tour_list_url . format ( total_count ) ) . read ( ) . decode ( 'utf-8' ) ) data = resp [ 'response' ] [ 'body' ] [ 'items' ] [ 'item' ] keychain = { 'contentid' : ( 'content_id' , None ) , 'contenttypeid' : ( 'content_type_id' , None ) , 'title' : ( 'title' , None ) , 'addr1' : ( 'address' , None ) , 'zipcode' : ( 'zipcode' , None ) , 'sigungucode' : ( 'municipality' , None ) , 'mapx' : ( 'x' , None ) , 'mapy' : ( 'y' , None ) , 'cat1' : ( 'main_category' , None ) , 'cat2' : ( 'middle_category' , None ) , 'cat3' : ( 'small_category' , None ) , 'readcount' : ( 'views' , 0 ) , 'tel' : ( 'tel' , None ) , 'firstimage' : ( 'image' , None ) , } for tour in data : _dict_key_changer ( tour , keychain ) tour [ 'creation_date' ] = str ( tour . pop ( 'createdtime' ) ) [ : 8 ] if 'createdtime' in tour else None tour [ 'modified_date' ] = str ( tour . pop ( 'modifiedtime' ) ) [ : 8 ] if 'modifiedtime' in tour else None tour . pop ( 'areacode' , None ) tour . pop ( 'addr2' , None ) tour . pop ( 'mlevel' , None ) return data
Inquire all tour list
14,375
def get_detail_common ( self , content_id ) : resp = json . loads ( urlopen ( self . detail_common_url . format ( str ( content_id ) ) ) . read ( ) . decode ( 'utf-8' ) ) data = resp [ 'response' ] [ 'body' ] [ 'items' ] [ 'item' ] keychain = { 'contenttypeid' : ( 'content_type_id' , None ) , 'overview' : ( 'overview' , None ) , 'tel' : ( 'tel' , None ) , 'telname' : ( 'tel_owner' , None ) , 'booktour' : ( 'in_book' , 0 ) } _dict_key_changer ( data , keychain ) try : data [ 'homepage' ] = re . findall ( 'http\w?://[\w|.]+' , data . pop ( 'homepage' ) ) [ 0 ] if 'homepage' in data else None except IndexError : data [ 'homepage' ] = None data . pop ( 'contentid' , None ) data . pop ( 'title' , None ) data . pop ( 'createdtime' , None ) data . pop ( 'modifiedtime' , None ) return data
Inquire common detail data
14,376
def get_detail_images ( self , content_id ) : resp = json . loads ( urlopen ( self . additional_images_url . format ( content_id , 1 ) ) . read ( ) . decode ( 'utf-8' ) ) total_count = resp [ 'response' ] [ 'body' ] [ 'totalCount' ] resp = json . loads ( urlopen ( self . additional_images_url . format ( content_id , total_count ) ) . read ( ) . decode ( 'utf-8' ) ) try : data = resp [ 'response' ] [ 'body' ] [ 'items' ] [ 'item' ] if type ( data ) is dict : data . pop ( 'contentid' , None ) data . pop ( 'serialnum' , None ) data [ 'origin' ] = data . pop ( 'originimgurl' , None ) data [ 'small' ] = data . pop ( 'smallimageurl' , None ) else : for img in data : if type ( img ) is dict : img . pop ( 'contentid' , None ) img . pop ( 'serialnum' , None ) img [ 'origin' ] = img . pop ( 'originimgurl' , None ) img [ 'small' ] = img . pop ( 'smallimageurl' , None ) else : del img return data if type ( data ) is list else [ data ] except TypeError : return None
Inquire detail images
14,377
def _writeFile ( cls , filePath , content , encoding = None ) : filePath = os . path . realpath ( filePath ) log . debug ( _ ( "Real file path to write: %s" % filePath ) ) if encoding is None : encoding = File . DEFAULT_ENCODING try : encodedContent = '' . join ( content ) . encode ( encoding ) except LookupError as msg : raise SubFileError ( _ ( "Unknown encoding name: '%s'." ) % encoding ) except UnicodeEncodeError : raise SubFileError ( _ ( "There are some characters in '%(file)s' that cannot be encoded to '%(enc)s'." ) % { "file" : filePath , "enc" : encoding } ) tmpFilePath = "%s.tmp" % filePath bakFilePath = "%s.bak" % filePath with open ( tmpFilePath , 'wb' ) as f : f . write ( encodedContent ) f . flush ( ) try : os . rename ( filePath , bakFilePath ) except FileNotFoundError : pass os . rename ( tmpFilePath , filePath ) try : os . unlink ( bakFilePath ) except FileNotFoundError : pass
Safe file writing . Most common mistakes are checked against and reported before write operation . After that if anything unexpected happens user won t be left without data or with corrupted one as this method writes to a temporary file and then simply renames it ( which should be atomic operation according to POSIX but who knows how Ext4 really works .
14,378
def pay_with_account_credit_cards ( invoice_id ) -> Optional [ Transaction ] : logger . debug ( 'invoice-payment-started' , invoice_id = invoice_id ) with transaction . atomic ( ) : invoice = Invoice . objects . select_for_update ( ) . get ( pk = invoice_id ) if not invoice . in_payable_state : raise PreconditionError ( 'Cannot pay invoice with status {}.' . format ( invoice . status ) ) due = invoice . due ( ) . monies ( ) if len ( due ) == 0 : raise PreconditionError ( 'Cannot pay empty invoice.' ) if len ( due ) > 1 : raise PreconditionError ( 'Cannot pay invoice with more than one currency.' ) amount = due [ 0 ] if amount . amount <= 0 : raise PreconditionError ( 'Cannot pay invoice with non-positive amount.' ) valid_credit_cards = CreditCard . objects . valid ( ) . filter ( account = invoice . account ) . order_by ( 'status' ) if not valid_credit_cards : raise PreconditionError ( 'No valid credit card on account.' ) for credit_card in valid_credit_cards : try : success , payment_psp_object = psp . charge_credit_card ( credit_card_psp_object = credit_card . psp_object , amount = amount , client_ref = str ( invoice_id ) ) payment = Transaction . objects . create ( account = invoice . account , invoice = invoice , amount = amount , success = success , payment_method = credit_card . type , credit_card_number = credit_card . number , psp_object = payment_psp_object ) if success : invoice . pay ( ) invoice . save ( ) logger . info ( 'invoice-payment-success' , invoice = invoice_id , payment = payment ) return payment else : logger . info ( 'invoice-payment-failure' , invoice = invoice_id , payment = payment ) except Exception as e : logger . error ( 'invoice-payment-error' , invoice_id = invoice_id , credit_card = credit_card , exc_info = e ) return None
Get paid for the invoice trying the valid credit cards on record for the account .
14,379
def setContentFor ( self , widget ) : for i in range ( self . count ( ) ) : item = self . widget ( i ) if widget . isStatic : item . setStaticContent ( widget ) else : item . setContent ( widget )
Updates toolbox contents with a data corresponding to a given tab .
14,380
def clear ( self ) : layout = self . layout ( ) for index in reversed ( range ( layout . count ( ) ) ) : item = layout . takeAt ( index ) try : item . widget ( ) . deleteLater ( ) except AttributeError : item = None
Removes all child widgets .
14,381
def _request ( self , service , ** kw ) : fb_request = { 'service' : service , } for key in [ 'limit' , 'offset' , 'filter' , 'data' ] : fb_request [ key ] = kw . pop ( key , None ) if kw : raise _exc . FastbillRequestError ( "Unknown arguments: %s" % ", " . join ( kw . keys ( ) ) ) data = _jsonencoder . dumps ( fb_request ) _logger . debug ( "Sending data: %r" , data ) self . _pre_request_callback ( service , fb_request ) http_resp = self . session . post ( self . SERVICE_URL , auth = self . auth , headers = self . headers , timeout = self . timeout , data = data ) self . _post_request_callback ( service , fb_request , http_resp ) try : json_resp = http_resp . json ( ) except ValueError : _logger . debug ( "Got data: %r" , http_resp . content ) _abort_http ( service , http_resp ) return else : _logger . debug ( "Got data: %r" , json_resp ) errors = json_resp [ 'RESPONSE' ] . get ( 'ERRORS' ) if errors : _abort_api ( service , json_resp , errors ) if json_resp [ 'REQUEST' ] [ 'SERVICE' ] != service : raise _exc . FastbillError ( "API Error: Got response from wrong service." ) return _response . FastbillResponse ( json_resp [ 'RESPONSE' ] , self )
Do the actual request to Fastbill s API server .
14,382
def set_parent ( self , node ) : self . _parent = node if node is None : self . _depth = 0 else : self . _depth = node . get_depth ( ) + 1
Attach node to its parent .
14,383
def generate_child_leaf_nodes ( self ) : def _yield_child_leaf_nodes ( node ) : if not node . has_children ( ) : yield node else : for child_node in node . generate_child_nodes ( ) : for child in _yield_child_leaf_nodes ( child_node ) : yield child return _yield_child_leaf_nodes ( self )
Generate leaf nodes of this node .
14,384
def detach_children ( self ) : for node in self . get_child_nodes ( ) : node . set_parent ( None ) self . _nodes = dict ( )
Erase references to children without deleting them .
14,385
def process_expt ( h5_path , inmemory = True , ignorenan = False ) : logging . info ( "Reading file at {}" . format ( h5_path ) ) h5_file = tables . open_file ( h5_path , mode = 'r' ) F = h5_file . root . F_measure n_expt , n_labels , n_class = F . shape mean_n_iterations = np . sum ( h5_file . root . n_iterations ) / n_expt if hasattr ( h5_file . root , 'CPU_time' ) : CPU_time = h5_file . root . CPU_time mean_CPU_time = np . mean ( CPU_time ) var_CPU_time = np . var ( CPU_time ) else : mean_CPU_time = None var_CPU_time = None mean_CPU_time_per_iteration = None F_mean = np . empty ( [ n_labels , n_class ] , dtype = 'float' ) F_var = np . empty ( [ n_labels , n_class ] , dtype = 'float' ) F_stderr = np . empty ( [ n_labels , n_class ] , dtype = 'float' ) n_sample = np . empty ( n_labels , dtype = 'int' ) if inmemory : F_mem = F [ : , : , : ] logging . info ( "Beginning processing" . format ( ) ) for t in range ( n_labels ) : if t % np . ceil ( n_labels / 10 ) . astype ( int ) == 0 : logging . info ( "Processed {} of {} experiments" . format ( t , n_labels ) ) if inmemory : temp = F_mem [ : , t , : ] else : temp = F [ : , t , : ] if ignorenan : n_sample [ t ] = np . sum ( ~ np . isnan ( temp ) ) with warnings . catch_warnings ( ) : warnings . simplefilter ( "ignore" , category = RuntimeWarning ) F_mean [ t ] = np . nanmean ( temp , axis = 0 ) F_var [ t ] = np . nanvar ( temp , axis = 0 ) F_stderr [ t ] = np . sqrt ( F_var [ t ] / n_sample [ t ] ) else : n_sample [ t ] = len ( temp ) F_mean [ t ] = np . mean ( temp , axis = 0 ) F_var [ t ] = np . var ( temp , axis = 0 ) F_stderr [ t ] = np . sqrt ( F_var [ t ] / n_sample [ t ] ) logging . info ( "Processing complete" . format ( ) ) h5_file . close ( ) return { 'mean' : F_mean , 'variance' : F_var , 'std_error' : F_stderr , 'n_samples' : n_sample , 'n_expts' : n_expt , 'n_labels' : n_labels , 'mean_CPU_time' : mean_CPU_time , 'var_CPU_time' : var_CPU_time , 'mean_n_iterations' : mean_n_iterations , 'h5_path' : h5_path }
Assumes h5 file has table called F_measure
14,386
def calc_confusion_matrix ( self , printout = False ) : if self . labels is None : raise DataError ( "Cannot calculate confusion matrix before data " "has been read." ) if self . preds is None : raise DataError ( "Predictions not available. Please run " "`scores_to_preds` before calculating confusion " "matrix" ) self . TP = np . sum ( np . logical_and ( self . preds == 1 , self . labels == 1 ) ) self . TN = np . sum ( np . logical_and ( self . preds == 0 , self . labels == 0 ) ) self . FP = np . sum ( np . logical_and ( self . preds == 1 , self . labels == 0 ) ) self . FN = np . sum ( np . logical_and ( self . preds == 0 , self . labels == 1 ) ) if printout : print ( "Contingency matrix is:" ) print ( "----------------------" ) print ( "TP: {} \t FN: {}" . format ( self . TP , self . FN ) ) print ( "FP: {} \t TN: {}" . format ( self . FP , self . TN ) ) print ( "\n" )
Calculates number of TP FP TN FN
14,387
def calc_true_performance ( self , printout = False ) : try : self . calc_confusion_matrix ( printout = False ) except DataError as e : print ( e . msg ) raise if self . TP + self . FP == 0 : self . precision = np . nan else : self . precision = self . TP / ( self . TP + self . FP ) if self . TP + self . FN == 0 : self . recall = np . nan else : self . recall = self . TP / ( self . TP + self . FN ) if self . precision + self . recall == 0 : self . F1_measure = np . nan else : self . F1_measure = ( 2 * self . precision * self . recall / ( self . precision + self . recall ) ) if printout : print ( "True performance is:" ) print ( "--------------------" ) print ( "Precision: {} \t Recall: {} \t F1 measure: {}" . format ( self . precision , self . recall , self . F1_measure ) )
Evaluate precision recall and balanced F - measure
14,388
def modify_fk_constraint ( apps , schema_editor ) : model = apps . get_model ( "message_sender" , "OutboundSendFailure" ) table = model . _meta . db_table with schema_editor . connection . cursor ( ) as cursor : constraints = schema_editor . connection . introspection . get_constraints ( cursor , table ) [ constraint ] = filter ( lambda c : c [ 1 ] [ "foreign_key" ] , constraints . items ( ) ) [ name , _ ] = constraint sql_delete_fk = ( "SET CONSTRAINTS {name} IMMEDIATE; " "ALTER TABLE {table} DROP CONSTRAINT {name}" ) . format ( table = schema_editor . quote_name ( table ) , name = schema_editor . quote_name ( name ) ) schema_editor . execute ( sql_delete_fk ) field = model . outbound . field to_table = field . remote_field . model . _meta . db_table to_column = field . remote_field . model . _meta . get_field ( field . remote_field . field_name ) . column sql_create_fk = ( "ALTER TABLE {table} ADD CONSTRAINT {name} FOREIGN KEY " "({column}) REFERENCES {to_table} ({to_column}) " "ON DELETE CASCADE {deferrable};" ) . format ( table = schema_editor . quote_name ( table ) , name = schema_editor . quote_name ( name ) , column = schema_editor . quote_name ( field . column ) , to_table = schema_editor . quote_name ( to_table ) , to_column = schema_editor . quote_name ( to_column ) , deferrable = schema_editor . connection . ops . deferrable_sql ( ) , ) schema_editor . execute ( sql_create_fk )
Delete s the current foreign key contraint on the outbound field and adds it again but this time with an ON DELETE clause
14,389
def log_warning ( self , msg ) : if self . __logger : self . __logger . warning ( msg ) if self . __raise_exception_on_warning : raise RuntimeError ( msg )
Log a warning if logger exists .
14,390
def log_error ( self , msg ) : if self . __logger : self . __logger . error ( msg ) raise RuntimeError ( msg )
Log an error and raise an exception .
14,391
def __add_action ( self , relative_directory , action ) : generator_action_container = self . __actions . retrieve_element_or_default ( relative_directory , None ) if generator_action_container is None : generator_action_container = GeneratorActionContainer ( ) generator_action_container . add_generator_action ( action ) self . __actions . add_element ( location = relative_directory , element = generator_action_container ) else : generator_action_container . add_generator_action ( action )
Add action into the dictionary of actions .
14,392
def __is_function_action ( self , action_function ) : is_function_action = True if not hasattr ( action_function , '__call__' ) : return False try : for end_string , context in action_function ( ) : if not isinstance ( end_string , basestring ) : self . log_error ( "Action function must return end of filename as a string as first argument" ) if not isinstance ( context , dict ) : self . log_error ( "Action function must return context as a dict as second argument" ) break except Exception : is_function_action = False return is_function_action
Detect if given function is really an action function .
14,393
def register_default_action ( self , file_pattern , action_function ) : if self . __default_action is not None : self . log_error ( 'Default action function already exist.' ) if not self . __is_function_action ( action_function ) : self . log_error ( 'Attached default function is not an action function.' ) self . __default_action = GeneratorAction ( file_pattern = file_pattern , action_function = action_function )
Default action used if no compatible action is found .
14,394
def prepare_page ( self , * args , ** kwargs ) : super ( BaseBackend , self ) . prepare_page ( * args , ** kwargs )
This is called after the page has been loaded good time to do extra polishing
14,395
def set_wrapped ( self , wrapped ) : self . wrapped = wrapped functools . update_wrapper ( self , self . wrapped , updated = ( ) ) self . wrapped_func = False self . wrapped_class = False if inspect . isroutine ( wrapped ) : self . wrapped_func = True elif isinstance ( wrapped , type ) : self . wrapped_class = True
This will decide what wrapped is and set . wrapped_func or . wrapped_class accordingly
14,396
def decorate_class ( self , klass , * decorator_args , ** decorator_kwargs ) : raise RuntimeError ( "decorator {} does not support class decoration" . format ( self . __class__ . __name__ ) ) return klass
override this in a child class with your own logic it must return a function that returns klass or the like
14,397
def decorate_class ( self , klass , * decorator_args , ** decorator_kwargs ) : class ChildClass ( klass ) : def __init__ ( slf , * args , ** kwargs ) : super ( ChildClass , slf ) . __init__ ( * args , ** kwargs ) self . decorate ( slf , * decorator_args , ** decorator_kwargs ) decorate_klass = ChildClass decorate_klass . __name__ = klass . __name__ decorate_klass . __module__ = klass . __module__ return decorate_klass
where the magic happens this wraps a class to call our decorate method in the init of the class
14,398
def generate_entry_tags ( sender , instance , created , raw , using , ** kwargs ) : Tag . objects . create_tags ( instance )
Generate the M2M Tag s for an Entry right after it has been saved .
14,399
def entry_stats ( entries , top_n = 10 ) : wc = Counter ( ) for content in entries . values_list ( "rendered_content" , flat = True ) : content = strip_tags ( content ) content = re . sub ( '\s+' , ' ' , content ) content = re . sub ( '[^A-Za-z ]+' , '' , content ) words = [ w . lower ( ) for w in content . split ( ) ] wc . update ( [ w for w in words if w not in IGNORE_WORDS ] ) return { "total_words" : len ( wc . values ( ) ) , "most_common" : wc . most_common ( top_n ) , }
Calculates stats for the given QuerySet of Entry s .