idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
57,000
def generate_http_manifest ( self ) : base_path = os . path . dirname ( self . translate_path ( self . path ) ) self . dataset = dtoolcore . DataSet . from_uri ( base_path ) admin_metadata_fpath = os . path . join ( base_path , ".dtool" , "dtool" ) with open ( admin_metadata_fpath ) as fh : admin_metadata = json . load ( fh ) http_manifest = { "admin_metadata" : admin_metadata , "manifest_url" : self . generate_url ( ".dtool/manifest.json" ) , "readme_url" : self . generate_url ( "README.yml" ) , "overlays" : self . generate_overlay_urls ( ) , "item_urls" : self . generate_item_urls ( ) } return bytes ( json . dumps ( http_manifest ) , "utf-8" )
Return http manifest .
57,001
def do_GET ( self ) : if self . path . endswith ( "http_manifest.json" ) : try : manifest = self . generate_http_manifest ( ) self . send_response ( 200 ) self . end_headers ( ) self . wfile . write ( manifest ) except dtoolcore . DtoolCoreTypeError : self . send_response ( 400 ) self . end_headers ( ) else : super ( DtoolHTTPRequestHandler , self ) . do_GET ( )
Override inherited do_GET method .
57,002
def indent ( self , code , level = 1 ) : lines = code . split ( '\n' ) lines = tuple ( self . indent_space * level + line for line in lines ) return '\n' . join ( lines )
python s famous indent
57,003
def retrieve_authorization_code ( self , redirect_func = None ) : request_param = { "client_id" : self . client_id , "redirect_uri" : self . redirect_uri , } if self . scope : request_param [ 'scope' ] = self . scope if self . _extra_auth_params : request_param . update ( self . _extra_auth_params ) r = requests . get ( self . auth_uri , params = request_param , allow_redirects = False ) url = r . headers . get ( 'location' ) if self . local : webbrowser . open_new_tab ( url ) authorization_code = raw_input ( "Code: " ) if self . validate_code ( authorization_code ) : self . authorization_code = authorization_code else : return redirect_func ( url )
retrieve authorization code to get access token
57,004
def retrieve_token ( self ) : if self . authorization_code : request_param = { "client_id" : self . client_id , "client_secret" : self . client_secret , "redirect_uri" : self . redirect_uri , "code" : self . authorization_code } if self . _extra_token_params : request_param . update ( self . _extra_token_params ) content_length = len ( urlencode ( request_param ) ) headers = { 'Content-Length' : str ( content_length ) , 'Content-Type' : 'application/x-www-form-urlencoded' } r = requests . post ( self . token_uri , data = request_param , headers = headers ) jsondata = json . loads ( r . text ) self . access_token = jsondata return self . access_token else : print ( "authorization code is required before getting accesss token" ) print ( "Please call retrieve_authorization_code() beforehand" )
retrieve access token with code fetched via retrieve_authorization_code method .
57,005
def to_dict ( self ) : mapping = dict ( ) for attr in dir ( self ) : if attr . startswith ( '_' ) : continue if not isinstance ( getattr ( self . __class__ , attr ) , property ) : continue try : value = getattr ( self , attr ) except AttributeError : if attr in self . _OPTIONAL_ATTRS : continue else : raise AttributeError ( 'Required attribute "{0}" does not exist on ' 'instance of type "{1}.' . format ( attr , self . __class__ . __name__ ) ) mapping [ attr ] = value return mapping
Represents the setup section in form of key - value pairs .
57,006
def mtime ( path ) : if not os . path . exists ( path ) : return - 1 stat = os . stat ( path ) return stat . st_mtime
Get the modification time of a file or - 1 if the file does not exist .
57,007
def encode_coin_link ( copper , silver = 0 , gold = 0 ) : return encode_chat_link ( gw2api . TYPE_COIN , copper = copper , silver = silver , gold = gold )
Encode a chat link for an amount of coins .
57,008
def status ( self , status ) : allowed_values = [ "pending" , "awaitingRetry" , "successful" , "failed" ] if status is not None and status not in allowed_values : raise ValueError ( "Invalid value for `status` ({0}), must be one of {1}" . format ( status , allowed_values ) ) self . _status = status
Sets the status of this StoreCreditPayment .
57,009
def nmtoken_from_string ( text ) : text = text . replace ( '-' , '--' ) return '' . join ( [ ( ( ( not char . isalnum ( ) and char not in [ '.' , '-' , '_' , ':' ] ) and str ( ord ( char ) ) ) or char ) for char in text ] )
Returns a Nmtoken from a string . It is useful to produce XHTML valid values for the name attribute of an anchor .
57,010
def tidy_html ( html_buffer , cleaning_lib = 'utidylib' ) : if CFG_TIDY_INSTALLED and cleaning_lib == 'utidylib' : options = dict ( output_xhtml = 1 , show_body_only = 1 , merge_divs = 0 , wrap = 0 ) try : output = str ( tidy . parseString ( html_buffer , ** options ) ) except : output = html_buffer elif CFG_BEAUTIFULSOUP_INSTALLED and cleaning_lib == 'beautifulsoup' : try : output = str ( BeautifulSoup ( html_buffer ) . prettify ( ) ) except : output = html_buffer else : output = html_buffer return output
Tidy up the input HTML using one of the installed cleaning libraries .
57,011
def remove_html_markup ( text , replacechar = ' ' , remove_escaped_chars_p = True ) : if not remove_escaped_chars_p : return RE_HTML_WITHOUT_ESCAPED_CHARS . sub ( replacechar , text ) return RE_HTML . sub ( replacechar , text )
Remove HTML markup from text .
57,012
def create_html_select ( options , name = None , selected = None , disabled = None , multiple = False , attrs = None , ** other_attrs ) : body = [ ] if selected is None : selected = [ ] elif isinstance ( selected , ( str , unicode ) ) : selected = [ selected ] if disabled is None : disabled = [ ] elif isinstance ( disabled , ( str , unicode ) ) : disabled = [ disabled ] if name is not None and multiple and not name . endswith ( '[]' ) : name += "[]" if isinstance ( options , dict ) : items = options . items ( ) items . sort ( lambda item1 , item2 : cmp ( item1 [ 1 ] , item2 [ 1 ] ) ) elif isinstance ( options , ( list , tuple ) ) : options = list ( options ) items = [ ] for item in options : if isinstance ( item , ( str , unicode ) ) : items . append ( ( item , item ) ) elif isinstance ( item , ( tuple , list ) ) and len ( item ) == 2 : items . append ( tuple ( item ) ) else : raise ValueError ( 'Item "%s" of incompatible type: %s' % ( item , type ( item ) ) ) else : raise ValueError ( 'Options of incompatible type: %s' % type ( options ) ) for key , value in items : option_attrs = { } if key in selected : option_attrs [ 'selected' ] = 'selected' if key in disabled : option_attrs [ 'disabled' ] = 'disabled' body . append ( create_tag ( "option" , body = value , escape_body = True , value = key , attrs = option_attrs ) ) if attrs is None : attrs = { } if name is not None : attrs [ 'name' ] = name if multiple : attrs [ 'multiple' ] = 'multiple' return create_tag ( "select" , body = '\n' . join ( body ) , attrs = attrs , ** other_attrs )
Create an HTML select box .
57,013
def wash ( self , html_buffer , render_unallowed_tags = False , allowed_tag_whitelist = CFG_HTML_BUFFER_ALLOWED_TAG_WHITELIST , automatic_link_transformation = False , allowed_attribute_whitelist = CFG_HTML_BUFFER_ALLOWED_ATTRIBUTE_WHITELIST ) : self . reset ( ) self . result = '' self . nb = 0 self . previous_nbs = [ ] self . previous_type_lists = [ ] self . url = '' self . render_unallowed_tags = render_unallowed_tags self . automatic_link_transformation = automatic_link_transformation self . allowed_tag_whitelist = allowed_tag_whitelist self . allowed_attribute_whitelist = allowed_attribute_whitelist self . feed ( html_buffer ) self . close ( ) return self . result
Wash HTML buffer escaping XSS attacks .
57,014
def template_from_filename ( filename ) : ext = filename . split ( os . path . extsep ) [ - 1 ] if not ext in TEMPLATES_MAP : raise ValueError ( "No template for file extension {}" . format ( ext ) ) return TEMPLATES_MAP [ ext ]
Returns the appropriate template name based on the given file name .
57,015
def dt2ts ( dt ) : assert isinstance ( dt , ( datetime . datetime , datetime . date ) ) ret = time . mktime ( dt . timetuple ( ) ) if isinstance ( dt , datetime . datetime ) : ret += 1e-6 * dt . microsecond return ret
Converts to float representing number of seconds since 1970 - 01 - 01 GMT .
57,016
def dt2str ( dt , flagSeconds = True ) : if isinstance ( dt , str ) : return dt return dt . strftime ( _FMTS if flagSeconds else _FMT )
Converts datetime object to str if not yet an str .
57,017
def time2seconds ( t ) : return t . hour * 3600 + t . minute * 60 + t . second + float ( t . microsecond ) / 1e6
Returns seconds since 0h00 .
57,018
def Fit ( self , zxq ) : z , trans , Q = zip ( * zxq ) assert len ( trans ) == len ( z ) ndf = len ( z ) - 3 z = np . array ( z ) trans = np . array ( trans ) def dbexpl ( t , p ) : return ( p [ 0 ] - p [ 1 ] * t + p [ 2 ] * t ** 2 ) def residuals ( p , data , t ) : err = data - dbexpl ( t , p ) return err doc = { } try : assert ndf > 0 p0 = [ 1 , 0 , 0 ] pbest = leastsq ( residuals , p0 , args = ( trans , z ) , full_output = 1 ) bestparams = pbest [ 0 ] good_of_fit = sum ( pbest [ 2 ] [ 'fvec' ] ** 2 ) good_of_fit = float ( good_of_fit / ndf ) doc [ 'params' ] = list ( bestparams ) doc [ 'gof' ] = good_of_fit except : doc [ 'gof' ] = 'FAIL' doc [ 'params' ] = [ 0 , 0 , 0 ] return doc
Perform a 2D fit on 2D points then return parameters
57,019
def _get_last_transverse_over_list ( self , zxq ) : z_max = None x_of_interest = None for z , x , q in zxq : if z == None or z > z_max : x_of_interest = x return x_of_interest
Get transverse coord at highest z
57,020
def item_details ( item_id , lang = "en" ) : params = { "item_id" : item_id , "lang" : lang } cache_name = "item_details.%(item_id)s.%(lang)s.json" % params return get_cached ( "item_details.json" , cache_name , params = params )
This resource returns a details about a single item .
57,021
def recipe_details ( recipe_id , lang = "en" ) : params = { "recipe_id" : recipe_id , "lang" : lang } cache_name = "recipe_details.%(recipe_id)s.%(lang)s.json" % params return get_cached ( "recipe_details.json" , cache_name , params = params )
This resource returns a details about a single recipe .
57,022
def requires_indieauth ( f ) : @ wraps ( f ) def decorated ( * args , ** kwargs ) : access_token = get_access_token ( ) resp = check_auth ( access_token ) if isinstance ( resp , Response ) : return resp return f ( * args , ** kwargs ) return decorated
Wraps a Flask handler to require a valid IndieAuth access token .
57,023
def check_auth ( access_token ) : if not access_token : current_app . logger . error ( 'No access token.' ) return deny ( 'No access token found.' ) request = Request ( current_app . config [ 'TOKEN_ENDPOINT' ] , headers = { "Authorization" : ( "Bearer %s" % access_token ) } ) contents = urlopen ( request ) . read ( ) . decode ( 'utf-8' ) token_data = parse_qs ( contents ) me = token_data [ 'me' ] [ 0 ] client_id = token_data [ 'client_id' ] [ 0 ] if me is None or client_id is None : current_app . logger . error ( "Invalid token [%s]" % contents ) return deny ( 'Invalid token' ) me , me_error = check_me ( me ) if me is None : current_app . logger . error ( "Invalid `me` value [%s]" % me_error ) return deny ( me_error ) scope = token_data [ 'scope' ] if not isinstance ( scope , str ) : scope = scope [ 0 ] valid_scopes = ( 'post' , 'create' , ) scope_ = scope . split ( ) scope_valid = any ( ( val in scope_ ) for val in valid_scopes ) if not scope_valid : current_app . logger . error ( "Scope '%s' does not contain 'post' or 'create'." % scope ) return deny ( "Scope '%s' does not contain 'post' or 'create'." % scope ) g . user = { 'me' : me , 'client_id' : client_id , 'scope' : scope , 'access_token' : access_token }
This function contacts the configured IndieAuth Token Endpoint to see if the given token is a valid token and for whom .
57,024
def connect_db ( config ) : rv = sqlite3 . connect ( config [ "database" ] [ "uri" ] ) rv . row_factory = sqlite3 . Row return rv
Connects to the specific database .
57,025
def harvest_repo ( root_url , archive_path , tag = None , archive_format = 'tar.gz' ) : if not git_exists ( ) : raise Exception ( "Git not found. It probably needs installing." ) clone_path = mkdtemp ( dir = cfg [ 'CFG_TMPDIR' ] ) git = get_which_git ( ) call ( [ git , 'clone' , root_url , clone_path ] ) chdir ( clone_path ) if tag : call ( [ git , 'archive' , '--format=' + archive_format , '-o' , archive_path , tag ] ) else : call ( [ git , 'archive' , '--format=' + archive_format , '-o' , archive_path , 'HEAD' ] ) try : rmtree ( clone_path ) except OSError as e : if e . errno != errno . ENOENT : raise
Archives a specific tag in a specific Git repository .
57,026
def gregorian_to_julian ( day ) : before_march = 1 if day . month < MARCH else 0 month_index = day . month + MONTHS_PER_YEAR * before_march - MARCH years_elapsed = day . year - JULIAN_START_YEAR - before_march total_days_in_previous_months = ( 153 * month_index + 2 ) // 5 total_days_in_previous_years = 365 * years_elapsed total_leap_days = ( ( years_elapsed // 4 ) - ( years_elapsed // 100 ) + ( years_elapsed // 400 ) ) return sum ( [ day . day , total_days_in_previous_months , total_days_in_previous_years , total_leap_days , - 32045 , ] )
Convert a datetime . date object to its corresponding Julian day .
57,027
def sun_declination ( day ) : day_of_year = day . toordinal ( ) - date ( day . year , 1 , 1 ) . toordinal ( ) day_angle = 2 * pi * day_of_year / 365 declination_radians = sum ( [ 0.006918 , 0.001480 * sin ( 3 * day_angle ) , 0.070257 * sin ( day_angle ) , 0.000907 * sin ( 2 * day_angle ) , - 0.399912 * cos ( day_angle ) , - 0.006758 * cos ( 2 * day_angle ) , - 0.002697 * cos ( 3 * day_angle ) , ] ) return degrees ( declination_radians )
Compute the declination angle of the sun for the given date .
57,028
def equation_of_time ( day ) : day_of_year = day . toordinal ( ) - date ( day . year , 1 , 1 ) . toordinal ( ) A = EARTH_ORIBITAL_VELOCITY * ( day_of_year + 10 ) B = A + 1.914 * sin ( radians ( EARTH_ORIBITAL_VELOCITY * ( day_of_year - 2 ) ) ) movement_on_equatorial_plane = degrees ( atan2 ( tan ( radians ( B ) ) , cos ( EARTH_AXIS_TILT ) ) ) eot_half_turns = ( A - movement_on_equatorial_plane ) / 180 result = 720 * ( eot_half_turns - int ( eot_half_turns + 0.5 ) ) return radians ( result )
Compute the equation of time for the given date .
57,029
def compute_zuhr_utc ( day , longitude ) : eot = equation_of_time ( day ) zuhr_time_utc = 12 + ( abs ( longitude ) / 15 ) - eot return abs ( zuhr_time_utc ) % 24
Compute the UTC floating point time for Zuhr given date and longitude .
57,030
def compute_time_at_sun_angle ( day , latitude , angle ) : positive_angle_rad = radians ( abs ( angle ) ) angle_sign = abs ( angle ) / angle latitude_rad = radians ( latitude ) declination = radians ( sun_declination ( day ) ) numerator = - sin ( positive_angle_rad ) - sin ( latitude_rad ) * sin ( declination ) denominator = cos ( latitude_rad ) * cos ( declination ) time_diff = degrees ( acos ( numerator / denominator ) ) / 15 return time_diff * angle_sign
Compute the floating point time difference between mid - day and an angle .
57,031
def time_at_shadow_length ( day , latitude , multiplier ) : latitude_rad = radians ( latitude ) declination = radians ( sun_declination ( day ) ) angle = arccot ( multiplier + tan ( abs ( latitude_rad - declination ) ) ) numerator = sin ( angle ) - sin ( latitude_rad ) * sin ( declination ) denominator = cos ( latitude_rad ) * cos ( declination ) return degrees ( acos ( numerator / denominator ) ) / 15
Compute the time at which an object s shadow is a multiple of its length .
57,032
def parse_range_header ( range ) : match = re . match ( '^items=([0-9]+)-([0-9]+)$' , range ) if match : start = int ( match . group ( 1 ) ) finish = int ( match . group ( 2 ) ) if finish < start : finish = start return { 'start' : start , 'finish' : finish , 'number' : finish - start + 1 } else : return False
Parse a range header as used by the dojo Json Rest store .
57,033
def on ( self , event ) : handler = self . _handlers . get ( event , None ) if not handler : raise ValueError ( "Unknown event '{}'" . format ( event ) ) return handler . register
Returns a wrapper for the given event .
57,034
def register ( self , event , keys ) : if self . running : raise RuntimeError ( "Can't register while running" ) handler = self . _handlers . get ( event , None ) if handler is not None : raise ValueError ( "Event {} already registered" . format ( event ) ) self . _handlers [ event ] = EventHandler ( event , keys , loop = self . loop )
Register a new event with available keys . Raises ValueError when the event has already been registered .
57,035
def unregister ( self , event ) : if self . running : raise RuntimeError ( "Can't unregister while running" ) self . _handlers . pop ( event , None )
Remove all registered handlers for an event . Silent return when event was not registered .
57,036
async def trigger ( self , event , kwargs ) : await self . _queue . put ( ( event , kwargs ) ) self . _resume_processing . set ( )
Enqueue an event for processing
57,037
async def _task ( self ) : if self . _handlers . values ( ) : start_tasks = [ h . start ( ) for h in self . _handlers . values ( ) ] await asyncio . wait ( start_tasks , loop = self . loop ) while self . running : if self . events : event , kwargs = await self . _queue . get ( ) handler = self . _handlers . get ( event , None ) if handler : handler ( kwargs ) else : await self . _resume_processing . wait ( ) self . _resume_processing . clear ( ) tasks = [ handler . stop ( ) for handler in self . _handlers . values ( ) ] if tasks : await asyncio . wait ( tasks , loop = self . loop ) await self . _complete_shutdown ( )
Main queue processor
57,038
def restriction ( lam , mu , orbitals , U , beta ) : return 2 * orbitals * fermi_dist ( - ( mu + lam ) , beta ) - expected_filling ( - 1 * lam , orbitals , U , beta )
Equation that determines the restriction on lagrange multipier
57,039
def pressision_try ( orbitals , U , beta , step ) : mu , lam = main ( orbitals , U , beta , step ) mu2 , lam2 = linspace ( 0 , U * orbitals , step ) , zeros ( step ) for i in range ( 99 ) : lam2 [ i + 1 ] = fsolve ( restriction , lam2 [ i ] , ( mu2 [ i + 1 ] , orbitals , U , beta ) ) plot ( mu2 , 2 * orbitals * fermi_dist ( - ( mu2 + lam2 ) , beta ) , label = 'Test guess' ) legend ( loc = 0 )
perform a better initial guess of lambda no improvement
57,040
def spin_z ( particles , index ) : mat = np . zeros ( ( 2 ** particles , 2 ** particles ) ) for i in range ( 2 ** particles ) : ispin = btest ( i , index ) if ispin == 1 : mat [ i , i ] = 1 else : mat [ i , i ] = - 1 return 1 / 2. * mat
Generates the spin_z projection operator for a system of N = particles and for the selected spin index name . where index = 0 .. N - 1
57,041
def spin_gen ( particles , index , gauge = 1 ) : mat = np . zeros ( ( 2 ** particles , 2 ** particles ) ) flipper = 2 ** index for i in range ( 2 ** particles ) : ispin = btest ( i , index ) if ispin == 1 : mat [ i ^ flipper , i ] = 1 else : mat [ i ^ flipper , i ] = gauge return mat
Generates the generic spin operator in z basis for a system of N = particles and for the selected spin index name . where index = 0 .. N - 1 The gauge term sets the behavoir for a system away from half - filling
57,042
def revoke ( self , token , pipe = None ) : p = self . redis . pipeline ( ) if pipe is None else pipe formatted_token = self . format_token ( token ) try : p . watch ( formatted_token ) key = p . get ( formatted_token ) formatted_key = self . format_key ( key ) p . multi ( ) p . delete ( formatted_key , formatted_token ) if pipe is None : if not p . execute ( ) [ - 1 ] : raise RevokeError ( token , 'token not found' ) except WatchError : raise finally : if pipe is None : p . reset ( )
\ Revokes the key associated with the given revokation token .
57,043
async def oauth ( request ) : provider = request . match_info . get ( 'provider' ) client , _ = await app . ps . oauth . login ( provider , request ) user , data = await client . user_info ( ) response = ( "<a href='/'>back</a><br/><br/>" "<ul>" "<li>ID: {u.id}</li>" "<li>Username: {u.username}</li>" "<li>First, last name: {u.first_name}, {u.last_name}</li>" "<li>Email: {u.email}</li>" "<li>Link: {u.link}</li>" "<li>Picture: {u.picture}</li>" "<li>Country, city: {u.country}, {u.city}</li>" "</ul>" ) . format ( u = user ) response += "<code>%s</code>" % html . escape ( repr ( data ) ) return response
Oauth example .
57,044
def BeginOfEventAction ( self , event ) : self . log . info ( "Simulating event %s" , event . GetEventID ( ) ) self . sd . setEventNumber ( event . GetEventID ( ) )
Save event number
57,045
def EndOfEventAction ( self , event ) : self . log . debug ( 'Processesing simulated event %d' , event . GetEventID ( ) ) docs = self . sd . getDocs ( ) self . sd . clearDocs ( ) for processor in self . processors : docs = processor . process ( docs ) if not docs : self . log . warning ( '%s did not return documents in process()!' , processor . __class__ . __name__ )
At the end of an event grab sensitive detector hits then run processor loop
57,046
def generate_handler ( ) : logger . debug ( "[#] Setting up user, group and permissions" ) client = boto3 . client ( "iam" , region_name = PRIMARY_REGION ) try : response = client . create_user ( UserName = BLOCKADE_USER ) except client . exceptions . EntityAlreadyExistsException : logger . debug ( "[!] Blockade user already exists" ) logger . info ( "[#] %s user successfully created" % ( BLOCKADE_USER ) ) try : logger . debug ( "[#] Creating %s role" % ( BLOCKADE_ROLE ) ) response = client . create_role ( RoleName = BLOCKADE_ROLE , AssumeRolePolicyDocument = BLOCKADE_ROLE_POLICY , Description = "Allow a user to manage the administration of Blockade." ) except client . exceptions . EntityAlreadyExistsException : logger . debug ( "[!] Blockade role already exists" ) logger . info ( "[#] %s role successfully created" % ( BLOCKADE_ROLE ) ) try : logger . debug ( "[#] Creating %s group" % ( BLOCKADE_GROUP ) ) response = client . create_group ( GroupName = BLOCKADE_GROUP , ) except client . exceptions . EntityAlreadyExistsException : logger . debug ( "[!] Blockade group already exists" ) logger . info ( "[#] %s group successfully created" % ( BLOCKADE_GROUP ) ) logger . debug ( "[#] Creating Blockade IAM policies" ) for label in BLOCKADE_POLICIES : logger . debug ( "[#] Creating %s policy" % ( label ) ) try : response = client . create_policy ( PolicyName = label , PolicyDocument = POLICIES [ label ] , Description = "Generated policy from Blockade bootstrap tool" ) except client . exceptions . EntityAlreadyExistsException : logger . debug ( "[!] Blockade policy %s already exists" % ( label ) ) logger . info ( "[#] Blockade %s policy successfully created" % ( label ) ) logger . info ( "[#] Blockade policies successfully created" ) iam = boto3 . resource ( 'iam' ) account_id = iam . CurrentUser ( ) . arn . split ( ':' ) [ 4 ] for label in BLOCKADE_POLICIES + [ 'PushToCloud' , 'APIGatewayAdmin' ] : logger . debug ( "[#] Attaching %s policy" % ( label ) ) arn = 'arn:aws:iam::{id}:policy/{policy}' . format ( id = account_id , policy = label ) if label == 'PushToCloud' : arn = "arn:aws:iam::aws:policy/service-role/AmazonAPIGatewayPushToCloudWatchLogs" if label == 'APIGatewayAdmin' : arn = "arn:aws:iam::aws:policy/AmazonAPIGatewayAdministrator" client . attach_role_policy ( RoleName = BLOCKADE_ROLE , PolicyArn = arn ) client . attach_group_policy ( GroupName = BLOCKADE_GROUP , PolicyArn = arn ) logger . info ( "[#] Blockade policies successfully attached" ) logger . debug ( "[#] Adding %s to %s group" % ( BLOCKADE_USER , BLOCKADE_GROUP ) ) response = client . add_user_to_group ( GroupName = BLOCKADE_GROUP , UserName = BLOCKADE_USER ) logger . info ( "[#] %s user is part of %s group" % ( BLOCKADE_USER , BLOCKADE_GROUP ) ) return True
Create the Blockade user and give them permissions .
57,047
def remove_handler ( ) : logger . debug ( "[#] Removing user, group and permissions for Blockade" ) client = boto3 . client ( "iam" , region_name = PRIMARY_REGION ) iam = boto3 . resource ( 'iam' ) account_id = iam . CurrentUser ( ) . arn . split ( ':' ) [ 4 ] try : logger . debug ( "[#] Removing %s from %s group" % ( BLOCKADE_USER , BLOCKADE_GROUP ) ) response = client . remove_user_from_group ( GroupName = BLOCKADE_GROUP , UserName = BLOCKADE_USER ) except client . exceptions . NoSuchEntityException : logger . debug ( "[!] Blockade user already removed from group" ) for label in BLOCKADE_POLICIES + [ 'PushToCloud' , 'APIGatewayAdmin' ] : logger . debug ( "[#] Removing %s policy" % ( label ) ) arn = 'arn:aws:iam::{id}:policy/{policy}' . format ( id = account_id , policy = label ) if label == 'PushToCloud' : arn = "arn:aws:iam::aws:policy/service-role/AmazonAPIGatewayPushToCloudWatchLogs" if label == 'APIGatewayAdmin' : arn = "arn:aws:iam::aws:policy/AmazonAPIGatewayAdministrator" try : response = client . detach_group_policy ( GroupName = BLOCKADE_GROUP , PolicyArn = arn ) except : pass try : response = client . detach_role_policy ( RoleName = BLOCKADE_ROLE , PolicyArn = arn ) except : pass try : response = client . delete_policy ( PolicyArn = arn ) except Exception as e : print ( e ) pass logger . debug ( "[#] Removed all policies" ) try : logger . debug ( "[#] Deleting %s user" % ( BLOCKADE_USER ) ) response = client . delete_user ( UserName = BLOCKADE_USER ) except client . exceptions . NoSuchEntityException : logger . debug ( "[!] %s user already deleted" % ( BLOCKADE_USER ) ) try : logger . debug ( "[#] Removing %s group" % ( BLOCKADE_GROUP ) ) response = client . delete_group ( GroupName = BLOCKADE_GROUP ) except : logger . debug ( "[!] Group already removed" ) try : logger . debug ( "[#] Removing %s role" % ( BLOCKADE_ROLE ) ) response = client . delete_role ( RoleName = BLOCKADE_ROLE ) except : logger . debug ( "[!] Role already removed" ) return True
Remove the user group and policies for Blockade .
57,048
def generate_s3_bucket ( ) : logger . debug ( "[#] Setting up S3 bucket" ) client = boto3 . client ( "s3" , region_name = PRIMARY_REGION ) buckets = client . list_buckets ( ) matches = [ x for x in buckets . get ( 'Buckets' , list ( ) ) if x [ 'Name' ] . startswith ( S3_BUCKET ) ] if len ( matches ) > 0 : logger . debug ( "[*] Bucket already exists" ) return matches . pop ( ) response = client . create_bucket ( Bucket = S3_BUCKET , CreateBucketConfiguration = { 'LocationConstraint' : PRIMARY_REGION } ) logger . info ( "[#] Successfully setup the S3 bucket" ) return response
Create the blockade bucket if not already there .
57,049
def remove_s3_bucket ( ) : logger . debug ( "[#] Removing S3 bucket" ) client = boto3 . client ( "s3" , region_name = PRIMARY_REGION ) buckets = client . list_buckets ( ) matches = [ x for x in buckets . get ( 'Buckets' , list ( ) ) if x [ 'Name' ] . startswith ( S3_BUCKET_NAME ) ] if len ( matches ) == 0 : return match = matches . pop ( ) [ 'Name' ] try : response = client . list_objects_v2 ( Bucket = match , ) except client . exceptions . NoSuchBucket : logger . info ( "[!] S3 bucket already deleted" ) return True while response [ 'KeyCount' ] > 0 : logger . debug ( '[*] Deleting %d objects from bucket %s' % ( len ( response [ 'Contents' ] ) , match ) ) response = client . delete_objects ( Bucket = match , Delete = { 'Objects' : [ { 'Key' : obj [ 'Key' ] } for obj in response [ 'Contents' ] ] } ) response = client . list_objects_v2 ( Bucket = match , ) logger . debug ( '[#] Deleting bucket %s' % match ) response = client . delete_bucket ( Bucket = match ) logger . info ( "[#] Successfully deleted the S3 bucket" ) return response
Remove the Blockade bucket .
57,050
def generate_dynamodb_tables ( ) : logger . debug ( "[#] Setting up DynamoDB tables" ) client = boto3 . client ( 'dynamodb' , region_name = PRIMARY_REGION ) existing_tables = client . list_tables ( ) [ 'TableNames' ] responses = list ( ) for label in DYNAMODB_TABLES : if label in existing_tables : logger . debug ( "[*] Table %s already exists" % ( label ) ) continue kwargs = { 'TableName' : label , 'ProvisionedThroughput' : { 'ReadCapacityUnits' : 5 , 'WriteCapacityUnits' : 5 } } kwargs . update ( DYNAMODB_SCHEMAS [ label ] ) response = client . create_table ( ** kwargs ) responses . append ( response ) logger . debug ( "[#] Successfully setup DynamoDB table %s" % ( label ) ) logger . info ( "[#] Successfully setup DynamoDB tables" ) return responses
Create the Blockade DynamoDB tables .
57,051
def remove_dynamodb_tables ( ) : logger . debug ( "[#] Removing DynamoDB tables" ) client = boto3 . client ( 'dynamodb' , region_name = PRIMARY_REGION ) responses = list ( ) for label in DYNAMODB_TABLES : logger . debug ( "[*] Removing %s table" % ( label ) ) try : response = client . delete_table ( TableName = label ) except client . exceptions . ResourceNotFoundException : logger . info ( "[!] Table %s already removed" % ( label ) ) continue responses . append ( response ) logger . debug ( "[*] Removed %s table" % ( label ) ) logger . info ( "[#] Successfully removed DynamoDB tables" ) return responses
Remove the Blockade DynamoDB tables .
57,052
def generate_lambda_functions ( ) : logger . debug ( "[#] Setting up the Lambda functions" ) aws_lambda = boto3 . client ( 'lambda' , region_name = PRIMARY_REGION ) functions = aws_lambda . list_functions ( ) . get ( 'Functions' ) existing_funcs = [ x [ 'FunctionName' ] for x in functions ] iam = boto3 . resource ( 'iam' ) account_id = iam . CurrentUser ( ) . arn . split ( ':' ) [ 4 ] responses = list ( ) for label in LAMBDA_FUNCTIONS : if label in existing_funcs : logger . debug ( "[*] Lambda function %s already exists" % ( label ) ) continue dir_path = os . path . dirname ( os . path . realpath ( __file__ ) ) dir_path = dir_path . replace ( '/cli' , '/aws' ) kwargs = { 'Runtime' : 'python2.7' , 'Role' : 'arn:aws:iam::{0}:role/{1}' . format ( account_id , BLOCKADE_ROLE ) , 'Timeout' : 3 , 'MemorySize' : 128 , 'Publish' : True , 'Code' : { 'ZipFile' : open ( "{0}/lambda-zips/{1}.zip" . format ( dir_path , label ) , 'rb' ) . read ( ) } } kwargs . update ( LAMBDA_SCHEMA [ label ] ) logger . debug ( "[#] Setting up the %s Lambda function" % ( label ) ) response = aws_lambda . create_function ( ** kwargs ) responses . append ( response ) logger . debug ( "[#] Successfully setup Lambda function %s" % ( label ) ) logger . info ( "[#] Successfully setup Lambda functions" ) return responses
Create the Blockade lambda functions .
57,053
def remove_lambda_functions ( ) : logger . debug ( "[#] Removing the Lambda functions" ) client = boto3 . client ( 'lambda' , region_name = PRIMARY_REGION ) responses = list ( ) for label in LAMBDA_FUNCTIONS : try : response = client . delete_function ( FunctionName = label , ) except client . exceptions . ResourceNotFoundException : logger . info ( "[!] Function %s already removed" % ( label ) ) continue responses . append ( response ) logger . debug ( "[*] Removed %s function" % ( label ) ) logger . info ( "[#] Successfully removed Lambda functions" ) return responses
Remove the Blockade Lambda functions .
57,054
def generate_api_gateway ( ) : logger . debug ( "[#] Setting up the API Gateway" ) client = boto3 . client ( 'apigateway' , region_name = PRIMARY_REGION ) matches = [ x for x in client . get_rest_apis ( ) . get ( 'items' , list ( ) ) if x [ 'name' ] == API_GATEWAY ] if len ( matches ) > 0 : logger . debug ( "[#] API Gateway already setup" ) return matches . pop ( ) response = client . create_rest_api ( name = API_GATEWAY , description = 'REST-API to power the Blockade service' ) logger . info ( "[#] Successfully setup the API Gateway" ) return response
Create the Blockade API Gateway REST service .
57,055
def generate_admin_resource ( ) : logger . debug ( "[#] Setting up the admin resource" ) client = boto3 . client ( 'apigateway' , region_name = PRIMARY_REGION ) existing = get_api_gateway_resource ( "admin" ) if existing : logger . debug ( "[#] API admin resource already created" ) return True matches = [ x for x in client . get_rest_apis ( ) . get ( 'items' , list ( ) ) if x [ 'name' ] == API_GATEWAY ] match = matches . pop ( ) resource_id = get_api_gateway_resource ( '/' ) response = client . create_resource ( restApiId = match . get ( 'id' ) , parentId = resource_id , pathPart = 'admin' ) logger . info ( "[#] Successfully setup the admin resource" ) return response
Create the Blockade admin resource for the REST services .
57,056
def get_api_gateway_resource ( name ) : client = boto3 . client ( 'apigateway' , region_name = PRIMARY_REGION ) matches = [ x for x in client . get_rest_apis ( ) . get ( 'items' , list ( ) ) if x [ 'name' ] == API_GATEWAY ] match = matches . pop ( ) resources = client . get_resources ( restApiId = match . get ( 'id' ) ) resource_id = None for item in resources . get ( 'items' , list ( ) ) : if item . get ( 'pathPart' , '/' ) != name : continue resource_id = item [ 'id' ] return resource_id
Get the resource associated with our gateway .
57,057
def remove_api_gateway ( ) : logger . debug ( "[#] Removing API Gateway" ) client = boto3 . client ( 'apigateway' , region_name = PRIMARY_REGION ) matches = [ x for x in client . get_rest_apis ( ) . get ( 'items' , list ( ) ) if x [ 'name' ] == API_GATEWAY ] if len ( matches ) == 0 : logger . info ( "[!] API Gateway already removed" ) return True match = matches . pop ( ) response = client . delete_rest_api ( restApiId = match . get ( 'id' ) ) logger . info ( "[#] Removed API Gateway" ) return response
Remove the Blockade REST API service .
57,058
def method_delegate ( ** methods ) : methods = { k . upper ( ) : v for k , v in iteritems ( methods ) } if PY3 : methods = { k . encode ( "utf-8" ) : v for k , v in iteritems ( methods ) } def render ( request ) : renderer = methods . get ( request . method ) if renderer is None : return Response ( code = 405 ) return renderer ( request ) return render
Construct a renderer that delegates based on the request s HTTP method .
57,059
def traverse ( path , request , resource ) : path = path . lstrip ( b"/" ) for component in path and path . split ( b"/" ) : if getattr ( resource , "is_leaf" , False ) : break resource = resource . get_child ( name = component , request = request ) return resource
Traverse a root resource retrieving the appropriate child for the request .
57,060
def escape_shell_arg ( shell_arg ) : if isinstance ( shell_arg , six . text_type ) : msg = "ERROR: escape_shell_arg() expected string argument but " "got '%s' of type '%s'." % ( repr ( shell_arg ) , type ( shell_arg ) ) raise TypeError ( msg ) return "'%s'" % shell_arg . replace ( "'" , r"'\''" )
Escape shell argument shell_arg by placing it within single - quotes . Any single quotes found within the shell argument string will be escaped .
57,061
def retry_mkstemp ( suffix = '' , prefix = 'tmp' , directory = None , max_retries = 3 ) : if directory is None : directory = current_app . config [ 'CFG_TMPSHAREDDIR' ] for retry_count in range ( 1 , max_retries + 1 ) : try : tmp_file_fd , tmp_file_name = tempfile . mkstemp ( suffix = suffix , prefix = prefix , dir = directory ) except OSError as e : if e . errno == 19 and retry_count <= max_retries : time . sleep ( 10 ) else : raise else : break return tmp_file_fd , tmp_file_name
Make mkstemp more robust against AFS glitches .
57,062
def declarative_fields ( cls_filter , meta_base = type , extra_attr_name = 'base_fields' ) : def __new__ ( cls , name , bases , attrs ) : attrs [ extra_attr_name ] = fields = get_declared_fields ( bases , attrs , cls_filter , extra_attr_name = extra_attr_name ) attrs [ extra_attr_name + '_names' ] = set ( fields . keys ( ) ) new_class = meta_base . __new__ ( cls , name , bases , attrs ) return new_class return type ( '' , ( meta_base , ) , { '__new__' : __new__ } )
Metaclass that converts Field attributes to a dictionary called base_fields taking into account parent class cls_filter .
57,063
def started ( generator_function ) : @ wraps ( generator_function ) def wrapper ( * args , ** kwargs ) : g = generator_function ( * args , ** kwargs ) next ( g ) return g return wrapper
starts a generator when created
57,064
def add_log_error ( self , x , flag_also_show = False , E = None ) : if len ( x ) == 0 : x = "(empty error)" tb . print_stack ( ) x_ = x if E is not None : a99 . get_python_logger ( ) . exception ( x_ ) else : a99 . get_python_logger ( ) . info ( "ERROR: {}" . format ( x_ ) ) x = '<span style="color: {0!s}">{1!s}</span>' . format ( a99 . COLOR_ERROR , x ) self . _add_log_no_logger ( x , False ) if flag_also_show : a99 . show_error ( x_ )
Sets text of labelError .
57,065
def check_user ( user , password ) : return ( ( user == attowiki . user or attowiki . user is None ) and ( password == attowiki . password or attowiki . password is None ) )
check the auth for user and password .
57,066
def view_meta_index ( ) : rst_files = [ filename [ 2 : - 4 ] for filename in sorted ( glob . glob ( "./*.rst" ) ) ] rst_files . reverse ( ) return template ( 'index' , type = "view" , filelist = rst_files , name = "__index__" , extended_name = None , history = [ ] , gitref = None , is_repo = check_repo ( ) )
List all the available . rst files in the directory .
57,067
def view_cancel_edit ( name = None ) : if name is None : return redirect ( '/' ) else : files = glob . glob ( "{0}.rst" . format ( name ) ) if len ( files ) > 0 : reset_to_last_commit ( ) return redirect ( '/' + name ) else : return abort ( 404 )
Cancel the edition of an existing page .
57,068
def view_edit ( name = None ) : response . set_header ( 'Cache-control' , 'no-cache' ) response . set_header ( 'Pragma' , 'no-cache' ) if name is None : return template ( 'edit' , type = "edit" , name = name , extended_name = None , is_repo = check_repo ( ) , history = [ ] , gitref = None , today = datetime . datetime . now ( ) . strftime ( "%Y%m%d" ) , content = "" ) else : files = glob . glob ( "{0}.rst" . format ( name ) ) if len ( files ) > 0 : file_handle = open ( files [ 0 ] , 'r' ) return template ( 'edit' , type = "edit" , name = name , extended_name = None , is_repo = check_repo ( ) , history = [ ] , gitref = None , today = datetime . datetime . now ( ) . strftime ( "%Y%m%d" ) , content = file_handle . read ( ) ) else : return abort ( 404 )
Edit or creates a new page .
57,069
def view_pdf ( name = None ) : if name is None : return view_meta_index ( ) files = glob . glob ( "{0}.rst" . format ( name ) ) if len ( files ) > 0 : file_handle = open ( files [ 0 ] , 'r' ) dest_filename = name + '.pdf' doctree = publish_doctree ( file_handle . read ( ) ) try : produce_pdf ( doctree_content = doctree , filename = dest_filename ) except : raise else : return static_file ( dest_filename , root = '' , download = True ) else : return abort ( 404 )
Render a pdf file based on the given page .
57,070
def view_page ( name = None ) : if request . method == 'POST' : if name is None : if len ( request . forms . filename ) > 0 : name = request . forms . filename if name is not None : filename = "{0}.rst" . format ( name ) file_handle = open ( filename , 'w' ) file_handle . write ( request . forms . content . encode ( 'utf-8' ) ) file_handle . close ( ) add_file_to_repo ( filename ) commit ( filename ) response . set_header ( 'Cache-control' , 'no-cache' ) response . set_header ( 'Pragma' , 'no-cache' ) if name is None : index_files = glob . glob ( "./[Ii][Nn][Dd][Ee][Xx].rst" ) if len ( index_files ) == 0 : return view_meta_index ( ) else : name = index_files [ 0 ] [ 2 : - 4 ] files = glob . glob ( "{0}.rst" . format ( name ) ) if len ( files ) > 0 : file_handle = open ( files [ 0 ] , 'r' ) html_body = publish_parts ( file_handle . read ( ) , writer = AttowikiWriter ( ) , settings = None , settings_overrides = None ) [ 'html_body' ] history = commit_history ( "{0}.rst" . format ( name ) ) return template ( 'page' , type = "view" , name = name , extended_name = None , is_repo = check_repo ( ) , history = history , gitref = None , content = html_body ) else : return static_file ( name , '' )
Serve a page name .
57,071
def view_quick_save_page ( name = None ) : response . set_header ( 'Cache-control' , 'no-cache' ) response . set_header ( 'Pragma' , 'no-cache' ) if request . method == 'PUT' : if name is None : if len ( request . forms . filename ) > 0 : name = request . forms . filename if name is not None : filename = "{0}.rst" . format ( name ) file_handle = open ( filename , 'w' ) content = request . body . read ( ) content = content . decode ( 'utf-8' ) file_handle . write ( content . encode ( 'utf-8' ) ) file_handle . close ( ) return "OK" else : return abort ( 404 )
Quick save a page .
57,072
def getHelp ( arg = None ) : if arg == None : print ( '--------------------------------------------------------------' ) print ( 'Hello, this is an interactive help system of HITRANonline API.' ) print ( '--------------------------------------------------------------' ) print ( 'Run getHelp(.) with one of the following arguments:' ) print ( ' tutorial - interactive tutorials on HAPI' ) print ( ' units - units used in calculations' ) print ( ' index - index of available HAPI functions' ) elif arg == 'tutorial' : print ( '-----------------------------------' ) print ( 'This is a tutorial section of help.' ) print ( '-----------------------------------' ) print ( 'Please choose the subject of tutorial:' ) print ( ' data - downloading the data and working with it' ) print ( ' spectra - calculating spectral functions' ) print ( ' plotting - visualizing data with matplotlib' ) print ( ' python - Python quick start guide' ) elif arg == 'python' : print_python_tutorial ( ) elif arg == 'data' : print_data_tutorial ( ) elif arg == 'spectra' : print_spectra_tutorial ( ) elif arg == 'plotting' : print_plotting_tutorial ( ) elif arg == 'index' : print ( '------------------------------' ) print ( 'FETCHING DATA:' ) print ( '------------------------------' ) print ( ' fetch' ) print ( ' fetch_by_ids' ) print ( '' ) print ( '------------------------------' ) print ( 'WORKING WITH DATA:' ) print ( '------------------------------' ) print ( ' db_begin' ) print ( ' db_commit' ) print ( ' tableList' ) print ( ' describe' ) print ( ' select' ) print ( ' sort' ) print ( ' extractColumns' ) print ( ' getColumn' ) print ( ' getColumns' ) print ( ' dropTable' ) print ( '' ) print ( '------------------------------' ) print ( 'CALCULATING SPECTRA:' ) print ( '------------------------------' ) print ( ' profiles' ) print ( ' partitionSum' ) print ( ' absorptionCoefficient_HT' ) print ( ' absorptionCoefficient_Voigt' ) print ( ' absorptionCoefficient_SDVoigt' ) print ( ' absorptionCoefficient_Lorentz' ) print ( ' absorptionCoefficient_Doppler' ) print ( ' transmittanceSpectrum' ) print ( ' absorptionSpectrum' ) print ( ' radianceSpectrum' ) print ( '' ) print ( '------------------------------' ) print ( 'CONVOLVING SPECTRA:' ) print ( '------------------------------' ) print ( ' convolveSpectrum' ) print ( ' slit_functions' ) print ( '' ) print ( '------------------------------' ) print ( 'INFO ON ISOTOPOLOGUES:' ) print ( '------------------------------' ) print ( ' ISO_ID' ) print ( ' abundance' ) print ( ' molecularMass' ) print ( ' moleculeName' ) print ( ' isotopologueName' ) print ( '' ) print ( '------------------------------' ) print ( 'MISCELLANEOUS:' ) print ( '------------------------------' ) print ( ' getStickXY' ) print ( ' read_hotw' ) elif arg == ISO : print_iso ( ) elif arg == ISO_ID : print_iso_id ( ) elif arg == profiles : print_profiles ( ) elif arg == slit_functions : print_slit_functions ( ) else : help ( arg )
This function provides interactive manuals and tutorials .
57,073
def convolveSpectrumSame ( Omega , CrossSection , Resolution = 0.1 , AF_wing = 10. , SlitFunction = SLIT_RECTANGULAR ) : step = Omega [ 1 ] - Omega [ 0 ] x = arange ( - AF_wing , AF_wing + step , step ) slit = SlitFunction ( x , Resolution ) print ( 'step=' ) print ( step ) print ( 'x=' ) print ( x ) print ( 'slitfunc=' ) print ( SlitFunction ) CrossSectionLowRes = convolve ( CrossSection , slit , mode = 'same' ) * step return Omega , CrossSectionLowRes , None , None , slit
Convolves cross section with a slit function with given parameters .
57,074
def setup_db ( self , couch , dbname ) : my_db = None self . log . debug ( 'Setting up DB: %s' % dbname ) if dbname not in couch : self . log . info ( "DB doesn't exist so creating DB: %s" , dbname ) try : my_db = couch . create ( dbname ) except : self . log . critical ( "Race condition caught" ) raise RuntimeError ( "Race condition caught when creating DB" ) try : auth_doc = { } auth_doc [ '_id' ] = '_design/auth' auth_doc [ 'language' ] = 'javascript' auth_doc [ 'validate_doc_update' ] = my_db . save ( auth_doc ) except : self . log . error ( 'Could not set permissions of %s' % dbname ) else : my_db = couch [ dbname ] return my_db
Setup and configure DB
57,075
def commit ( self , force = False ) : self . log . debug ( 'Bulk commit requested' ) size = sys . getsizeof ( self . docs ) self . log . debug ( 'Size of docs in KB: %d' , size ) if size > self . commit_threshold or force : self . log . info ( 'Commiting %d KB to CouchDB' % size ) self . my_db . update ( self . docs ) self . docs = [ ]
Commit data to couchdb
57,076
def save ( self , doc ) : self . log . debug ( 'save()' ) self . docs . append ( doc ) self . commit ( )
Save a doc to cache
57,077
def start ( address , channel , key , loop = None ) : if loop is None : loop = asyncio . get_event_loop ( ) socket = yield from websockets . connect ( address + "/robot" , loop = loop ) conn = Connection ( socket , loop ) yield from conn . send ( _create_handshake ( channel , key ) ) return conn
Starts a new Interactive client .
57,078
def _create_handshake ( channel , key ) : hsk = Handshake ( ) hsk . channel = channel hsk . streamKey = key return hsk
Creates and returns a Handshake packet that authenticates on the channel with the given stream key .
57,079
def shell_source ( script ) : pipe = subprocess . Popen ( ". %s; env" % script , stdout = subprocess . PIPE , shell = True ) output = pipe . communicate ( ) [ 0 ] . decode ( ) env = { } for line in output . splitlines ( ) : try : keyval = line . split ( "=" , 1 ) env [ keyval [ 0 ] ] = keyval [ 1 ] except : pass os . environ . update ( env )
Sometime you want to emulate the action of source in bash settings some environment variables . Here is a way to do it .
57,080
def nll ( data , model ) : try : log_lik_vals = model . logpmf ( data ) except : log_lik_vals = model . logpdf ( data ) return - np . sum ( log_lik_vals )
Negative log likelihood given data and a model
57,081
def lrt ( data , model_full , model_reduced , df = None ) : ll_full = nll ( data , model_full ) * - 1 ll_reduced = nll ( data , model_reduced ) * - 1 test_stat = 2 * ( ll_full - ll_reduced ) if not df : df = ( len ( model_full . args ) + len ( model_full . kwds ) - len ( model_reduced . args ) - len ( model_reduced . kwds ) ) return test_stat , stats . chisqprob ( test_stat , df )
Compare two nested models using a likelihood ratio test
57,082
def AIC ( data , model , params = None , corrected = True ) : n = len ( data ) L = nll ( data , model ) if not params : k = len ( model . kwds ) + len ( model . args ) else : k = params if corrected : aic_value = 2 * k + 2 * L + ( 2 * k * ( k + 1 ) ) / ( n - k - 1 ) else : aic_value = 2 * k + 2 * L return aic_value
Akaike Information Criteria given data and a model
57,083
def AIC_compare ( aic_list ) : aic_values = np . array ( aic_list ) minimum = np . min ( aic_values ) delta = aic_values - minimum values = np . exp ( - delta / 2 ) weights = values / np . sum ( values ) return delta , weights
Calculates delta AIC and AIC weights from a list of AIC values
57,084
def sum_of_squares ( obs , pred ) : return np . sum ( ( np . array ( obs ) - np . array ( pred ) ) ** 2 )
Sum of squares between observed and predicted data
57,085
def r_squared ( obs , pred , one_to_one = False , log_trans = False ) : if log_trans : obs = np . log ( obs ) pred = np . log ( pred ) if one_to_one : r_sq = 1 - ( sum_of_squares ( obs , pred ) / sum_of_squares ( obs , np . mean ( obs ) ) ) else : b0 , b1 , r , p_value , se = stats . linregress ( obs , pred ) r_sq = r ** 2 return r_sq
R^2 value for a regression of observed and predicted data
57,086
def preston_bin ( data , max_num ) : log_ub = np . ceil ( np . log2 ( max_num ) ) if log_ub == 0 : boundaries = np . array ( [ 0 , 1 ] ) elif log_ub == 1 : boundaries = np . arange ( 1 , 4 ) else : boundaries = 2 ** np . arange ( 0 , log_ub + 1 ) boundaries = np . insert ( boundaries , 2 , 3 ) boundaries [ 3 : ] = boundaries [ 3 : ] + 1 hist_data = np . histogram ( data , bins = boundaries ) return hist_data
Bins data on base 2 using Preston s method
57,087
def url_to_parts ( url ) : if not url : return None scheme , netloc , path , query , fragment = _urlsplit ( url ) if not path or path == '/' : path = [ ] else : path = path . strip ( '/' ) . split ( '/' ) if not query : query = { } else : query = _parse_qs ( query ) return _urllib_parse . SplitResult ( scheme , netloc , path , query , fragment )
Split url urlsplit style but return path as a list and query as a dict
57,088
def __visit_index_model_instance ( self , models , p , k , v ) : cp = p + ( k , ) for model in models : try : if model . validator ( v ) : if cp in self . path_index : self . path_index [ cp ] . add_model ( model , v ) else : self . path_index [ cp ] = PathCacheObject ( val = v , path = cp , regs = [ model ] ) except : pass
Called during model research on merged data
57,089
def compute_edge_reduction ( self ) -> float : nb_init_edge = self . init_edge_number ( ) nb_poweredge = self . edge_number ( ) return ( nb_init_edge - nb_poweredge ) / ( nb_init_edge )
Compute the edge reduction . Costly computation
57,090
def init_edge_number ( self ) -> int : return len ( frozenset ( frozenset ( edge ) for edge in self . initial_edges ( ) ) )
Return the number of edges present in the non - compressed graph
57,091
def assert_powernode ( self , name : str ) -> None or ValueError : if name not in self . inclusions : raise ValueError ( "Powernode '{}' does not exists." . format ( name ) ) if self . is_node ( name ) : raise ValueError ( "Given name '{}' is a node." . format ( name ) )
Do nothing if given name refers to a powernode in given graph . Raise a ValueError in any other case .
57,092
def powernode_data ( self , name : str ) -> Powernode : self . assert_powernode ( name ) contained_nodes = frozenset ( self . nodes_in ( name ) ) return Powernode ( size = len ( contained_nodes ) , contained = frozenset ( self . all_in ( name ) ) , contained_pnodes = frozenset ( self . powernodes_in ( name ) ) , contained_nodes = contained_nodes , )
Return a Powernode object describing the given powernode
57,093
def node_number ( self , * , count_pnode = True ) -> int : return ( sum ( 1 for n in self . nodes ( ) ) + ( sum ( 1 for n in self . powernodes ( ) ) if count_pnode else 0 ) )
Return the number of node
57,094
def write_bubble ( self , filename : str ) : from bubbletools import converter converter . tree_to_bubble ( self , filename )
Write in given filename the lines of bubble describing this instance
57,095
def from_bubble_file ( bblfile : str , oriented : bool = False , symmetric_edges : bool = True ) -> 'BubbleTree' : return BubbleTree . from_bubble_data ( utils . data_from_bubble ( bblfile ) , oriented = bool ( oriented ) , symmetric_edges = symmetric_edges )
Extract data from given bubble file then call from_bubble_data method
57,096
def set_from_tree ( root : str , graph : dict ) -> frozenset : Node = namedtuple ( 'Node' , 'id succs' ) succs = graph [ root ] if succs : return ( len ( succs ) , sorted ( tuple ( set_from_tree ( succ , graph ) for succ in succs ) ) ) else : return 0 , ( )
Return a recursive structure describing given tree
57,097
def get_suitable_vis_classes ( obj ) : ret = [ ] for class_ in classes_vis ( ) : if isinstance ( obj , class_ . input_classes ) : ret . append ( class_ ) return ret
Retuns a list of Vis classes that can handle obj .
57,098
def get_suitable_vis_list_classes ( objs ) : from f311 import explorer as ex ret = [ ] for class_ in classes_vis ( ) : if isinstance ( class_ , ex . VisList ) : flag_can = True for obj in objs : if not isinstance ( obj , class_ . item_input_classes ) : flag_can = False break if flag_can : ret . append ( class_ ) return ret
Retuns a list of VisList classes that can handle a list of objects .
57,099
def _get_programs_dict ( ) : global __programs_dict if __programs_dict is not None : return __programs_dict d = __programs_dict = OrderedDict ( ) for pkgname in COLLABORATORS_S : try : package = importlib . import_module ( pkgname ) except ImportError : continue path_ = os . path . join ( os . path . split ( package . __file__ ) [ 0 ] , "scripts" ) bulk = a99 . get_exe_info ( path_ , flag_protected = True ) d [ pkgname ] = { "description" : a99 . get_obj_doc0 ( package ) , "exeinfo" : bulk } return __programs_dict
Builds and returns programs dictionary