idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
58,000
def _get_docushare_url ( handle , validate = True ) : logger = structlog . get_logger ( __name__ ) logger . debug ( 'Using Configuration._get_docushare_url' ) url = 'https://ls.st/{handle}*' . format ( handle = handle . lower ( ) ) if validate : logger . debug ( 'Validating {0}' . format ( url ) ) try : response = requests . head ( url , allow_redirects = True , timeout = 30 ) except requests . exceptions . RequestException as e : raise DocuShareError ( str ( e ) ) error_message = 'URL {0} does not resolve to DocuShare' . format ( url ) if response . status_code != 200 : logger . warning ( 'HEAD {0} status: {1:d}' . format ( url , response . status_code ) ) raise DocuShareError ( error_message ) redirect_url_parts = urllib . parse . urlsplit ( response . url ) if redirect_url_parts . netloc != 'docushare.lsst.org' : logger . warning ( '{0} resolved to {1}' . format ( url , response . url ) ) raise DocuShareError ( error_message ) return url
Get a docushare URL given document s handle .
58,001
def _init_defaults ( self ) : defaults = { 'build_dir' : None , 'build_datetime' : datetime . datetime . now ( dateutil . tz . tzutc ( ) ) , 'pdf_path' : None , 'extra_downloads' : list ( ) , 'environment' : None , 'lsstdoc_tex_path' : None , 'title' : None , 'title_plain' : "" , 'authors' : None , 'authors_json' : list ( ) , 'doc_handle' : None , 'series' : None , 'series_name' : None , 'abstract' : None , 'abstract_plain' : "" , 'ltd_product' : None , 'docushare_url' : None , 'github_slug' : None , 'git_branch' : 'master' , 'git_commit' : None , 'git_tag' : None , 'travis_job_number' : None , 'is_travis_pull_request' : False , 'is_draft_branch' : True , 'aws_id' : None , 'aws_secret' : None , 'keeper_url' : 'https://keeper.lsst.codes' , 'keeper_user' : None , 'keeper_password' : None , 'upload' : False } return defaults
Create a dict of default configurations .
58,002
def create_permissions_from_tuples ( model , codename_tpls ) : if codename_tpls : model_cls = django_apps . get_model ( model ) content_type = ContentType . objects . get_for_model ( model_cls ) for codename_tpl in codename_tpls : app_label , codename , name = get_from_codename_tuple ( codename_tpl , model_cls . _meta . app_label ) try : Permission . objects . get ( codename = codename , content_type = content_type ) except ObjectDoesNotExist : Permission . objects . create ( name = name , codename = codename , content_type = content_type ) verify_codename_exists ( f"{app_label}.{codename}" )
Creates custom permissions on model model .
58,003
def remove_historical_group_permissions ( group = None , allowed_permissions = None ) : allowed_permissions = allowed_permissions or [ "view" ] for action in allowed_permissions : for permission in group . permissions . filter ( codename__contains = "historical" ) . exclude ( codename__startswith = action ) : group . permissions . remove ( permission )
Removes group permissions for historical models except those whose prefix is in allowed_historical_permissions .
58,004
def traversal ( root ) : stack = [ root ] while len ( stack ) > 0 : node = stack . pop ( ) if hasattr ( node , 'children' ) : if node . children == set ( ) : try : stack [ - 1 ] . children . remove ( node ) except : pass yield ( node , stack ) else : childnode = node . children . pop ( ) stack += [ node , childnode ] else : children = [ x for x in ast . iter_child_nodes ( node ) ] node . children = set ( children ) stack . append ( node )
Tree traversal function that generates nodes . For each subtree the deepest node is evaluated first . Then the next - deepest nodes are evaluated until all the nodes in the subtree are generated .
58,005
def firstPass ( ASTs , verbose ) : fdefs = dict ( ) cdefs = dict ( ) imp_obj_strs = dict ( ) imp_mods = dict ( ) for ( root , path ) in ASTs : fdefs [ path ] = [ ] fdefs [ path ] . append ( formatBodyNode ( root , path ) ) imp_obj_strs [ path ] = [ ] imp_mods [ path ] = [ ] cdefs [ path ] = [ ] for ( node , stack ) in traversal ( root ) : if isinstance ( node , ast . FunctionDef ) : fdefs [ path ] . append ( formatFunctionNode ( node , path , stack ) ) elif isinstance ( node , ast . ImportFrom ) : module = ia . getImportFromModule ( node , path , verbose ) if module : fn_names = ia . getImportFromObjects ( node ) for fn_name in fn_names : imp_obj_strs [ path ] . append ( ( module , fn_name ) ) else : if verbose : print ( "No module found " + ast . dump ( node ) ) elif isinstance ( node , ast . Import ) : module = ia . getImportModule ( node , path , verbose ) imp_mods [ path ] . append ( module ) elif isinstance ( node , ast . ClassDef ) : node . path = path cdefs [ path ] . append ( node ) return fdefs , imp_obj_strs , imp_mods , cdefs
Return a dictionary of function definition nodes a dictionary of imported object names and a dictionary of imported module names . All three dictionaries use source file paths as keys .
58,006
def formatBodyNode ( root , path ) : body = root body . name = "body" body . weight = calcFnWeight ( body ) body . path = path body . pclass = None return body
Format the root node for use as the body node .
58,007
def formatFunctionNode ( node , path , stack ) : node . weight = calcFnWeight ( node ) node . path = path node . pclass = getCurrentClass ( stack ) return node
Add some helpful attributes to node .
58,008
def getSourceFnDef ( stack , fdefs , path ) : found = False for x in stack : if isinstance ( x , ast . FunctionDef ) : for y in fdefs [ path ] : if ast . dump ( x ) == ast . dump ( y ) : found = True return y raise if not found : for y in fdefs [ path ] : if y . name == 'body' : return y raise
VERY VERY SLOW
58,009
def delete_database ( mongo_uri , database_name ) : client = pymongo . MongoClient ( mongo_uri ) client . drop_database ( database_name )
Delete a mongo database using pymongo . Mongo daemon assumed to be running .
58,010
def delete_collection ( mongo_uri , database_name , collection_name ) : client = pymongo . MongoClient ( mongo_uri ) db = client [ database_name ] db . drop_collection ( collection_name )
Delete a mongo document collection using pymongo . Mongo daemon assumed to be running .
58,011
def create_staging_collection ( resource ) : ent_cls = get_entity_class ( resource ) coll_cls = get_collection_class ( resource ) agg = StagingAggregate ( ent_cls ) return coll_cls . create_from_aggregate ( agg )
Helper function to create a staging collection for the given registered resource .
58,012
def parse ( self ) : try : return self . parse_top_level ( ) except PartpyError as ex : self . error = True print ( ex . pretty_print ( ) )
Run the parser over the entire sourestring and return the results .
58,013
def parse_top_level ( self ) : contacts = [ ] while not self . eos : contact = self . parse_contact ( ) if not contact : break contacts . append ( contact ) self . parse_whitespace ( ) output = { } for key , value in contacts : output [ key ] = value return output
The top level parser will do a loop where it looks for a single contact parse and then eats all whitespace until there is no more input left or another contact is found to be parsed and stores them .
58,014
def parse_contact ( self ) : self . parse_whitespace ( ) name = self . parse_name ( ) if not name : raise PartpyError ( self , 'Expecting a name' ) self . parse_whitespace ( ) if not self . match_any_char ( ':-' ) : raise PartpyError ( self , 'Expecting : or -' ) self . eat_length ( 1 ) self . parse_whitespace ( ) email = self . parse_email ( ) if not email : raise PartpyError ( self , 'Expecting an email address' ) return ( name , email )
Parse a top level contact expression these consist of a name expression a special char and an email expression .
58,015
def parse_name ( self ) : name = [ ] while True : part = self . match_string_pattern ( spat . alphau , spat . alphal ) if part == '' : break self . eat_string ( part ) name . append ( part ) if self . get_char ( ) == ' ' : self . eat_length ( 1 ) if not len ( name ) : raise PartpyError ( self , 'Expecting a title cased name' ) return ' ' . join ( name )
This function uses string patterns to match a title cased name . This is done in a loop until there are no more names to match so as to be able to include surnames etc . in the output .
58,016
def get_development_container_name ( self ) : if self . __prefix : return "{0}:{1}-{2}-dev" . format ( self . __repository , self . __prefix , self . __branch ) else : return "{0}:{1}-dev" . format ( self . __repository , self . __branch )
Returns the development container name
58,017
def get_build_container_tag ( self ) : if self . __prefix : return "{0}-{1}-{2}" . format ( self . __prefix , self . __branch , self . __version ) else : return "{0}-{1}" . format ( self . __branch , self . __version )
Return the build container tag
58,018
def get_branch_container_tag ( self ) : if self . __prefix : return "{0}-{1}" . format ( self . __prefix , self . __branch ) else : return "{0}" . format ( self . __branch )
Returns the branch container tag
58,019
def custom_server_error ( request , template_name = '500.html' , admin_template_name = '500A.html' ) : trace = None if request . user . is_authenticated ( ) and ( request . user . is_staff or request . user . is_superuser ) : try : import traceback , sys trace = traceback . format_exception ( * ( sys . exc_info ( ) ) ) if not request . user . is_superuser and trace : trace = trace [ - 1 : ] trace = '\n' . join ( trace ) except : pass if request . path . startswith ( '/%s' % admin . site . name ) : template_name = admin_template_name t = loader . get_template ( template_name ) return http . HttpResponseServerError ( t . render ( Context ( { 'trace' : trace } ) ) )
500 error handler . Displays a full trackback for superusers and the first line of the traceback for staff members .
58,020
def parse_n_jobs ( s ) : n_jobs = None N = cpu_count ( ) if isinstance ( s , int ) : n_jobs = s elif isinstance ( s , float ) : n_jobs = int ( s ) elif isinstance ( s , str ) : m = re . match ( r'(\d*(?:\.\d*)?)?(\s*\*?\s*n)?$' , s . strip ( ) ) if m is None : raise ValueError ( 'Unable to parse n_jobs="{}"' . format ( s ) ) k = float ( m . group ( 1 ) ) if m . group ( 1 ) else 1 if m . group ( 2 ) : n_jobs = k * N elif k < 1 : n_jobs = k * N else : n_jobs = int ( k ) else : raise TypeError ( 'n_jobs argument must be of type str, int, or float.' ) n_jobs = int ( n_jobs ) if n_jobs <= 0 : warnings . warn ( 'n_jobs={} is invalid. Setting n_jobs=1.' . format ( n_jobs ) ) n_jobs = 1 return int ( n_jobs )
This function parses a math - like string as a function of CPU count . It is useful for specifying the number of jobs .
58,021
def _load_settings_from_source ( self , source ) : if not source : pass elif source == 'env_settings_uri' : for env_settings_uri_key in self . env_settings_uri_keys : env_settings_uri = self . _search_environ ( env_settings_uri_key ) if env_settings_uri : logger . debug ( 'Found {} in the environment.' . format ( env_settings_uri_key ) ) yield env_settings_uri , self . _load_settings_from_uri ( env_settings_uri ) elif source == 'env' : logger . debug ( 'Loaded {} settings from the environment.' . format ( len ( os . environ ) ) ) yield source , dict ( os . environ . items ( ) ) elif isinstance ( source , ParseResult ) : settings = self . _load_settings_from_uri ( source ) yield source , settings elif isinstance ( source , str ) : try : spec = importlib . util . find_spec ( source ) except ( AttributeError , ImportError ) : spec = None settings = self . _load_settings_from_spec ( spec , name = source ) if settings is None : _ , ext = os . path . splitext ( source ) with uri_open ( source , 'rb' ) as f : yield source , self . _load_settings_from_file ( f , ext = ext ) else : yield source , settings elif hasattr ( source , 'read' ) : yield source . name , self . _load_settings_from_file ( source ) elif hasattr ( source , 'items' ) : source_type = type ( source ) . __name__ for dict_settings_uri_key in self . dict_settings_uri_keys : if dict_settings_uri_key and dict_settings_uri_key in source and source [ dict_settings_uri_key ] : logger . debug ( 'Found {} in the dict-like object <{}>.' . format ( dict_settings_uri_key , source_type ) ) yield from self . _load_settings_from_source ( source [ dict_settings_uri_key ] ) logger . debug ( 'Loaded {} settings from dict-like object <{}>.' . format ( len ( source ) , source_type ) ) yield self . _get_unique_name ( source_type ) , source else : source_type = type ( source ) . __name__ for object_settings_uri_key in self . object_settings_uri_keys : if object_settings_uri_key and hasattr ( source , object_settings_uri_key ) and getattr ( source , object_settings_uri_key ) : logger . debug ( 'Found {} in the object <{}>.' . format ( object_settings_uri_key , source_type ) ) yield from self . _load_settings_from_source ( getattr ( source , object_settings_uri_key ) ) settings = dict ( ( k , v ) for k , v in source . __dict__ . items ( ) if not k . startswith ( '__' ) ) logger . debug ( 'Loaded {} settings from object <{}>.' . format ( len ( settings ) , source_type ) ) yield self . _get_unique_name ( source_type ) , settings
Loads the relevant settings from the specified source .
58,022
def get ( self , key , * , default = None , cast_func = None , case_sensitive = None , raise_exception = None , warn_missing = None , use_cache = True , additional_sources = [ ] ) : case_sensitive = self . case_sensitive if case_sensitive is None else case_sensitive raise_exception = self . raise_exception if raise_exception is None else raise_exception warn_missing = self . warn_missing if warn_missing is None else warn_missing if not case_sensitive : key = key . lower ( ) if use_cache and key in self . _cache : return cast_func ( self . _cache [ key ] ) if cast_func else self . _cache [ key ] found , value = False , None for source , settings in chain ( self . _settings . items ( ) , map ( self . _load_settings_from_source , additional_sources ) ) : if case_sensitive : if key in settings : found = True value = settings [ key ] else : continue else : possible_keys = [ k for k in settings . keys ( ) if k . lower ( ) == key ] if not possible_keys : continue else : if len ( possible_keys ) > 1 : warnings . warn ( 'There are more than one possible value for "{}" in <{}> settings due to case insensitivity.' . format ( key , source ) ) found = True value = settings [ possible_keys [ 0 ] ] if found : break if not found : if raise_exception : raise MissingSettingException ( 'The "{}" setting is missing.' . format ( key ) ) if warn_missing : warnings . warn ( 'The "{}" setting is missing.' . format ( key ) ) return default if use_cache : self . _cache [ key ] = value if cast_func : value = cast_func ( value ) return value
Gets the setting specified by key . For efficiency we cache the retrieval of settings to avoid multiple searches through the sources list .
58,023
def _in_list ( self , original_list , item ) : for item_list in original_list : if item is item_list : return True return False
Check that an item as contained in a list .
58,024
def _sort_results ( self , results ) : parents = [ ] groups = [ ] for result in results : if not self . _in_list ( parents , result . parent ) : parents . append ( result . parent ) groups . append ( [ ] ) groups [ len ( groups ) - 1 ] . append ( result ) else : groups [ parents . index ( result . parent ) ] . append ( result ) array = [ ] for group in groups : array += sorted ( group , key = lambda element : element . parent . contents . index ( element ) ) return array
Order the results .
58,025
def _fix_data_select ( self ) : elements = self . document . select ( '*' ) for element in elements : attributes = element . attrs . keys ( ) data_attributes = list ( ) for attribute in attributes : if bool ( re . findall ( '^data-' , attribute ) ) : data_attributes . append ( { 'original' : attribute , 'modified' : re . sub ( 'data-' , 'dataaaaaa' , attribute ) , 'value' : element [ attribute ] } ) if data_attributes : auxiliar_element = BeautifulSoupHTMLDOMElement ( element ) for data_attribute in data_attributes : auxiliar_element . remove_attribute ( data_attribute [ 'original' ] ) auxiliar_element . set_attribute ( data_attribute [ 'modified' ] , data_attribute [ 'value' ] )
Replace all hyphens of data attributes for aaaaa to avoid error in search .
58,026
def render_activity ( activity , grouped_activity = None , * args , ** kwargs ) : template_name = 'activity_monitor/includes/models/{0.app_label}_{0.model}.html' . format ( activity . content_type ) try : tmpl = loader . get_template ( template_name ) except template . TemplateDoesNotExist : return None content_object = activity . content_object return tmpl . render ( Context ( { 'activity' : activity , 'obj' : content_object , 'grouped_activity' : grouped_activity } ) )
Given an activity will attempt to render the matching template snippet for that activity s content object or will return a simple representation of the activity .
58,027
def show_activity_count ( date = None ) : if not date : today = datetime . datetime . now ( ) - datetime . timedelta ( hours = 24 ) return Activity . objects . filter ( timestamp__gte = today ) . count ( ) return Activity . objects . filter ( timestamp__gte = date ) . count ( )
Simple filter to get activity count for a given day . Defaults to today .
58,028
def __root_path ( self ) : if self . root_path is not None : if os . path . isdir ( self . root_path ) : sys . path . append ( self . root_path ) return raise RuntimeError ( 'EverNode requires a valid root path.' ' Directory: %s does not exist' % ( self . root_path ) )
Just checks the root path if set
58,029
def write_metadata ( self , output_path ) : if self . _config . lsstdoc is None : self . _logger . info ( 'No known LSST LaTeX source (--tex argument). ' 'Not writing a metadata.jsonld file.' ) return product_data = ltdclient . get_product ( self . _config ) metadata = self . _config . lsstdoc . build_jsonld ( url = product_data [ 'published_url' ] , code_url = product_data [ 'doc_repo' ] , ci_url = 'https://travis-ci.org/' + self . _config [ 'github_slug' ] , readme_url = None , license_id = None ) json_text = encode_jsonld ( metadata , separators = ( ',' , ':' ) , ensure_ascii = False ) with open ( output_path , 'w' ) as f : f . write ( json_text )
Build a JSON - LD dataset for LSST Projectmeta .
58,030
def upload_site ( self ) : if not os . path . isdir ( self . _config [ 'build_dir' ] ) : message = 'Site not built at {0}' . format ( self . _config [ 'build_dir' ] ) self . _logger . error ( message ) raise RuntimeError ( message ) ltdclient . upload ( self . _config )
Upload a previously - built site to LSST the Docs .
58,031
def libraries ( ) : ls = libraries_dir ( ) . dirs ( ) ls = [ str ( x . name ) for x in ls ] ls . sort ( ) return ls
return installed library names .
58,032
def lib_examples ( lib ) : d = lib_examples_dir ( lib ) if not d . exists ( ) : return [ ] ls = d . dirs ( ) ls = [ x . name for x in ls ] ls . sort ( ) return ls
return library examples .
58,033
def safe_eval ( source , * args , ** kwargs ) : source = source . replace ( 'import' , '' ) return eval ( source , * args , ** kwargs )
eval without import
58,034
def intersect ( self , ** kwargs ) : ls = None if "linestring" in kwargs : ls = kwargs . pop ( 'linestring' ) spoint = Point ( ls . coords [ 0 ] ) epoint = Point ( ls . coords [ - 1 ] ) elif "start_point" and "end_point" in kwargs : spoint = kwargs . get ( 'start_point' ) epoint = kwargs . get ( 'end_point' ) ls = LineString ( list ( spoint . coords ) + list ( epoint . coords ) ) elif "single_point" in kwargs : spoint = kwargs . get ( 'single_point' ) epoint = None ls = LineString ( list ( spoint . coords ) + list ( spoint . coords ) ) else : raise TypeError ( "must provide a LineString geometry object, (2) Point geometry objects, or (1) Point geometry object" ) inter = False if self . _spatial_query_object is None or ( self . _spatial_query_object and not ls . within ( self . _spatial_query_object ) ) : self . index ( point = spoint ) for element in self . _geoms : prepped_element = prep ( element ) if prepped_element . contains ( spoint ) : if epoint is None : return { 'point' : spoint , 'feature' : None } else : raise Exception ( 'Starting point on land: %s %s %s' % ( spoint . envelope , epoint . envelope , element . envelope ) ) else : if epoint is None : continue inter = ls . intersection ( element ) if inter : if isinstance ( inter , MultiLineString ) : inter = inter . geoms [ 0 ] inter = Point ( inter . coords [ 0 ] ) smaller_int = inter . buffer ( self . _spatialbuffer ) shorelines = element . exterior . intersection ( smaller_int ) if isinstance ( shorelines , LineString ) : shorelines = [ shorelines ] else : shorelines = list ( shorelines ) for shore_segment in shorelines : if ls . touches ( shore_segment ) : break return { 'point' : Point ( inter . x , inter . y , 0 ) , 'feature' : shore_segment or None } return None
Intersect a Line or Point Collection and the Shoreline
58,035
def __bounce ( self , ** kwargs ) : start_point = kwargs . pop ( 'start_point' ) hit_point = kwargs . pop ( 'hit_point' ) end_point = kwargs . pop ( 'end_point' ) feature = kwargs . pop ( 'feature' ) distance = kwargs . pop ( 'distance' ) angle = kwargs . pop ( 'angle' ) points_in_shore = map ( lambda x : Point ( x ) , list ( feature . coords ) ) points_in_shore = sorted ( points_in_shore , key = lambda x : x . x ) first_shore = points_in_shore [ 0 ] last_shore = points_in_shore [ - 1 ] shoreline_x = abs ( abs ( first_shore . x ) - abs ( last_shore . x ) ) shoreline_y = abs ( abs ( first_shore . y ) - abs ( last_shore . y ) ) beta = math . degrees ( math . atan ( shoreline_x / shoreline_y ) ) theta = 90 - angle - beta bounce_azimuth = AsaMath . math_angle_to_azimuth ( angle = 2 * theta + angle ) print "Beta: " + str ( beta ) print "Incoming Angle: " + str ( angle ) print "ShorelineAngle: " + str ( theta + angle ) print "Bounce Azimuth: " + str ( bounce_azimuth ) print "Bounce Angle: " + str ( AsaMath . azimuth_to_math_angle ( azimuth = bounce_azimuth ) ) after_distance = distance - AsaGreatCircle . great_distance ( start_point = start_point , end_point = hit_point ) [ 'distance' ] new_point = AsaGreatCircle . great_circle ( distance = after_distance , azimuth = bounce_azimuth , start_point = hit_point ) return Location4D ( latitude = new_point [ 'latitude' ] , longitude = new_point [ 'longitude' ] , depth = start_point . depth )
Bounce off of the shoreline .
58,036
def __reverse ( self , ** kwargs ) : start_point = kwargs . pop ( 'start_point' ) hit_point = kwargs . pop ( 'hit_point' ) distance = kwargs . pop ( 'distance' ) azimuth = kwargs . pop ( 'azimuth' ) reverse_azimuth = kwargs . pop ( 'reverse_azimuth' ) reverse_distance = kwargs . get ( 'reverse_distance' , None ) if reverse_distance is None : reverse_distance = 100 random_azimuth = reverse_azimuth + AsaRandom . random ( ) * 5 count = 0 nudge_distance = 0.01 nudge_point = AsaGreatCircle . great_circle ( distance = nudge_distance , azimuth = reverse_azimuth , start_point = hit_point ) nudge_loc = Location4D ( latitude = nudge_point [ 'latitude' ] , longitude = nudge_point [ 'longitude' ] , depth = start_point . depth ) while self . intersect ( single_point = nudge_loc . point ) and count < 16 : nudge_distance *= 2 nudge_point = AsaGreatCircle . great_circle ( distance = nudge_distance , azimuth = reverse_azimuth , start_point = hit_point ) nudge_loc = Location4D ( latitude = nudge_point [ 'latitude' ] , longitude = nudge_point [ 'longitude' ] , depth = start_point . depth ) count += 1 if count == 16 : logger . debug ( "WOW. Could not find location in water to do shoreline calculation with. Assuming particle did not move from original location" ) return start_point count = 0 changing_distance = reverse_distance new_point = AsaGreatCircle . great_circle ( distance = reverse_distance , azimuth = random_azimuth , start_point = hit_point ) new_loc = Location4D ( latitude = new_point [ 'latitude' ] , longitude = new_point [ 'longitude' ] , depth = start_point . depth ) while self . intersect ( start_point = nudge_loc . point , end_point = new_loc . point ) and count < 12 : changing_distance /= 2 new_point = AsaGreatCircle . great_circle ( distance = changing_distance , azimuth = random_azimuth , start_point = hit_point ) new_loc = Location4D ( latitude = new_point [ 'latitude' ] , longitude = new_point [ 'longitude' ] , depth = start_point . depth ) count += 1 if count == 12 : logger . debug ( "Could not react particle with shoreline. Assuming particle did not move from original location" ) return start_point return new_loc
Reverse particle just off of the shore in the direction that it came in . Adds a slight random factor to the distance and angle it is reversed in .
58,037
def get_feature_type_info ( self ) : caps = self . get_capabilities ( ) if caps is None : return None el = caps . find ( '{http://www.opengis.net/wfs}FeatureTypeList' ) for e in el . findall ( '{http://www.opengis.net/wfs}FeatureType' ) : if e . find ( '{http://www.opengis.net/wfs}Name' ) . text == self . _feature_name : d = { sube . tag [ 28 : ] : sube . text or sube . attrib or None for sube in e . getchildren ( ) } llbb = { k : round ( float ( v ) , 4 ) for k , v in d [ 'LatLongBoundingBox' ] . iteritems ( ) } d [ 'LatLongBoundingBox' ] = box ( llbb [ 'minx' ] , llbb [ 'miny' ] , llbb [ 'maxx' ] , llbb [ 'maxy' ] ) return d return None
Gets FeatureType as a python dict .
58,038
def extract_edges_from_callable ( fn ) : def extractor ( * args , ** kwargs ) : return list ( args ) + list ( kwargs . values ( ) ) edges = fn ( extractor ) for edge in edges : if not isinstance ( edge , str ) : raise ValueError ( 'Provided edge "{}" is not a string' . format ( edge ) ) return list ( edges )
This takes args and kwargs provided and returns the names of the strings assigned . If a string is not provided for a value an exception is raised .
58,039
def parse_and_bind ( self , string ) : u try : log ( u'parse_and_bind("%s")' % string ) if string . startswith ( u'#' ) : return if string . startswith ( u'set' ) : m = re . compile ( ur'set\s+([-a-zA-Z0-9]+)\s+(.+)\s*$' ) . match ( string ) if m : var_name = m . group ( 1 ) val = m . group ( 2 ) try : setattr ( self . mode , var_name . replace ( u'-' , u'_' ) , val ) except AttributeError : log ( u'unknown var="%s" val="%s"' % ( var_name , val ) ) else : log ( u'bad set "%s"' % string ) return m = re . compile ( ur'\s*(.+)\s*:\s*([-a-zA-Z]+)\s*$' ) . match ( string ) if m : key = m . group ( 1 ) func_name = m . group ( 2 ) py_name = func_name . replace ( u'-' , u'_' ) try : func = getattr ( self . mode , py_name ) except AttributeError : log ( u'unknown func key="%s" func="%s"' % ( key , func_name ) ) if self . debug : print u'pyreadline parse_and_bind error, unknown function to bind: "%s"' % func_name return self . mode . _bind_key ( key , func ) except : log ( u'error' ) raise
u Parse and execute single line of a readline init file .
58,040
def _bell ( self ) : u if self . bell_style == u'none' : pass elif self . bell_style == u'visible' : raise NotImplementedError ( u"Bellstyle visible is not implemented yet." ) elif self . bell_style == u'audible' : self . console . bell ( ) else : raise ReadlineError ( u"Bellstyle %s unknown." % self . bell_style )
u ring the bell if requested .
58,041
def set_list_attributes ( element1 , element2 , attributes ) : for attribute in attributes : if element1 . has_attribute ( attribute ) : element2 . set_attribute ( attribute , element1 . get_attribute ( attribute ) )
Copy a list of attributes of a element for other element .
58,042
def increase_in_list ( list_to_increase , string_to_increase ) : if ( bool ( list_to_increase ) ) and ( bool ( string_to_increase ) ) : if CommonFunctions . in_list ( list_to_increase , string_to_increase ) : return list_to_increase return list_to_increase + ' ' + string_to_increase elif bool ( list_to_increase ) : return list_to_increase return string_to_increase
Increase a item in a HTML list .
58,043
def in_list ( list_to_search , string_to_search ) : if ( bool ( list_to_search ) ) and ( bool ( string_to_search ) ) : elements = re . split ( '[ \n\t\r]+' , list_to_search ) for element in elements : if element == string_to_search : return True return False
Verify if the list contains the item .
58,044
def is_valid_element ( element ) : if element . has_attribute ( CommonFunctions . DATA_IGNORE ) : return False else : parent_element = element . get_parent_element ( ) if parent_element is not None : tag_name = parent_element . get_tag_name ( ) if ( tag_name != 'BODY' ) and ( tag_name != 'HTML' ) : return CommonFunctions . is_valid_element ( parent_element ) return True return True
Check that the element can be manipulated by HaTeMiLe .
58,045
def get_joke ( ) : page = requests . get ( "https://api.chucknorris.io/jokes/random" ) if page . status_code == 200 : joke = json . loads ( page . content . decode ( "UTF-8" ) ) return joke [ "value" ] return None
Returns a joke from the WebKnox one liner API . Returns None if unable to retrieve a joke .
58,046
def burnin ( self , n ) : self . sediment_rate = self . sediment_rate [ : , n : ] self . headage = self . headage [ n : ] self . sediment_memory = self . sediment_memory [ n : ] self . objective = self . objective [ n : ]
Remove the earliest n ensemble members from the MCMC output
58,047
def update_current_time ( loop ) : global current_time current_time = time ( ) loop . call_later ( 1 , partial ( update_current_time , loop ) )
Cache the current time since it is needed at the end of every keep - alive request to update the request timeout time
58,048
def options ( self , parser , env = None ) : if env is None : env = os . environ env_opt_name = 'NOSE_%s' % self . __dest_opt_name . upper ( ) parser . add_option ( "--%s" % self . __opt_name , dest = self . __dest_opt_name , type = "string" , default = env . get ( env_opt_name ) , help = ".ini file providing the environment for the " "test web application." )
Adds command - line options for this plugin .
58,049
def configure ( self , options , conf ) : super ( EverestNosePlugin , self ) . configure ( options , conf ) opt_val = getattr ( options , self . __dest_opt_name , None ) if opt_val : self . enabled = True EverestIni . ini_file_path = opt_val
Configures the plugin .
58,050
def create ( self , ami , count , config = None ) : return self . Launcher ( config = config ) . launch ( ami , count )
Create an instance using the launcher .
58,051
def Launcher ( self , config = None ) : class _launcher ( EC2ApiClient ) : def __init__ ( self , aws , config ) : super ( _launcher , self ) . __init__ ( aws ) self . config = config self . _attr = list ( self . __dict__ . keys ( ) ) + [ '_attr' ] def launch ( self , ami , min_count , max_count = 0 ) : params = config . copy ( ) params . update ( dict ( [ i for i in self . __dict__ . items ( ) if i [ 0 ] not in self . _attr ] ) ) return self . call ( "RunInstances" , ImageId = ami , MinCount = min_count , MaxCount = max_count or min_count , response_data_key = "Instances" , ** params ) if not config : config = { } return _launcher ( self . _aws , config )
Provides a configurable launcher for EC2 instances .
58,052
def events ( self , all_instances = None , instance_ids = None , filters = None ) : params = { } if filters : params [ "filters" ] = make_filters ( filters ) if instance_ids : params [ 'InstanceIds' ] = instance_ids statuses = self . status ( all_instances , ** params ) event_list = [ ] for status in statuses : if status . get ( "Events" ) : for event in status . get ( "Events" ) : event [ u"InstanceId" ] = status . get ( 'InstanceId' ) event_list . append ( event ) return event_list
a list of tuples containing instance Id s and event information
58,053
def get ( self , volume_ids = None , filters = None ) : params = { } if filters : params [ "filters" ] = make_filters ( filters ) if isinstance ( volume_ids , str ) : volume_ids = [ volume_ids ] return self . call ( "DescribeVolumes" , VolumeIds = volume_ids , response_data_key = "Volumes" , ** params )
List EBS Volume info .
58,054
def attach ( self , volume_id , instance_id , device_path ) : return self . call ( "AttachVolume" , VolumeId = volume_id , InstanceId = instance_id , Device = device_path )
Attach a volume to an instance exposing it with a device name .
58,055
def detach ( self , volume_id , instance_id = '' , device_path = '' , force = False ) : return self . call ( "DetachVolume" , VolumeId = volume_id , InstanceId = instance_id , Device = device_path , force = force )
Detach a volume from an instance .
58,056
def save ( self , * args , ** kwargs ) : self . slug = self . create_slug ( ) super ( Slugable , self ) . save ( * args , ** kwargs )
Overrides the save method
58,057
def create_slug ( self ) : name = self . slug_source counter = 0 while True : if counter == 0 : slug = slugify ( name ) else : slug = slugify ( '{0} {1}' . format ( name , str ( counter ) ) ) try : self . __class__ . objects . exclude ( pk = self . pk ) . get ( slug = slug ) counter += 1 except ObjectDoesNotExist : break return slug
Creates slug checks if slug is unique and loop if not
58,058
def get_html ( url , headers = None , timeout = None , errors = "strict" , wait_time = None , driver = None , zillow_only = False , cache_only = False , zillow_first = False , cache_first = False , random = False , ** kwargs ) : if wait_time is None : wait_time = Config . Crawler . wait_time cache_url1 = prefix + url + "/" cache_url2 = prefix + url zillow_url = url only_flags = [ zillow_only , cache_only ] if sum ( only_flags ) == 0 : first_flags = [ zillow_first , cache_first ] if sum ( first_flags ) == 0 : if random : if randint ( 0 , 1 ) : all_url = [ zillow_url , cache_url1 , cache_url2 ] else : all_url = [ cache_url1 , cache_url2 , zillow_url ] else : all_url = [ zillow_url , cache_url1 , cache_url2 ] elif sum ( first_flags ) == 1 : if zillow_first : all_url = [ zillow_url , cache_url1 , cache_url2 ] elif cache_first : all_url = [ cache_url1 , cache_url2 , zillow_url ] else : raise ValueError ( "Only zero or one `xxx_first` argument could be `True`!" ) elif sum ( only_flags ) == 1 : if zillow_only : all_url = [ zillow_url , ] elif cache_only : all_url = [ cache_url1 , cache_url2 ] else : raise ValueError ( "Only zero or one `xxx_only` argument could be `True`!" ) for url in all_url : try : html = _get_html ( url , headers , timeout , errors , wait_time , driver , ** kwargs ) return html except Exception as e : pass raise e
Use Google Cached Url .
58,059
def get_paths ( folder , ignore_endswith = ignore_endswith ) : folder = pathlib . Path ( folder ) . resolve ( ) files = folder . rglob ( "*" ) for ie in ignore_endswith : files = [ ff for ff in files if not ff . name . endswith ( ie ) ] return sorted ( files )
Return hologram file paths
58,060
def call ( func , args ) : assert hasattr ( func , '__call__' ) , 'Cannot call func: {}' . format ( func . __name__ ) raw_func = ( func if isinstance ( func , FunctionType ) else func . __class__ . __call__ ) hints = collections . defaultdict ( lambda : Any , get_type_hints ( raw_func ) ) argspec = _getargspec ( raw_func ) named_args = { } varargs = ( ) for k , nk , v in _normalize ( args ) : if nk == argspec . varargs : hints [ nk ] = Tuple [ hints [ nk ] , ... ] elif nk not in argspec . args and argspec . varkw in hints : hints [ nk ] = hints [ argspec . varkw ] try : value = cast ( hints [ nk ] , v ) except TypeError as e : _LOGGER . exception ( e ) six . raise_from ( exc . InvalidCliValueError ( k , v ) , e ) if nk == argspec . varargs : varargs = value elif ( nk in argspec . args or argspec . varkw ) and ( nk not in named_args or named_args [ nk ] is None ) : named_args [ nk ] = value return func ( * varargs , ** named_args )
Call the function with args normalized and cast to the correct types .
58,061
def get_callable ( subcommand ) : _LOGGER . debug ( 'Creating callable from subcommand "%s".' , subcommand . __name__ ) if isinstance ( subcommand , ModuleType ) : _LOGGER . debug ( 'Subcommand is a module.' ) assert hasattr ( subcommand , 'Command' ) , ( 'Module subcommand must have callable "Command" class definition.' ) callable_ = subcommand . Command else : callable_ = subcommand if any ( isinstance ( callable_ , t ) for t in six . class_types ) : return callable_ ( ) return callable_
Return a callable object from the subcommand .
58,062
def _getargspec ( func ) : argspec = _getspec ( func ) args = list ( argspec . args ) if argspec . varargs : args += [ argspec . varargs ] if argspec [ 2 ] : args += [ argspec [ 2 ] ] return _ArgSpec ( args , argspec . varargs , argspec [ 2 ] )
Return a Python 3 - like argspec object .
58,063
def _normalize ( args ) : for k , v in six . iteritems ( args ) : nk = re . sub ( r'\W|^(?=\d)' , '_' , k ) . strip ( '_' ) . lower ( ) do_not_shadow = dir ( six . moves . builtins ) if keyword . iskeyword ( nk ) or nk in do_not_shadow : nk += '_' _LOGGER . debug ( 'Normalized "%s" to "%s".' , k , nk ) yield k , nk , v
Yield a 3 - tuple containing the key a normalized key and the value .
58,064
def copy ( self , filename = None ) : dst = os . path . join ( self . dst_path , filename ) src = os . path . join ( self . src_path , filename ) dst_tmp = os . path . join ( self . dst_tmp , filename ) self . put ( src = src , dst = dst_tmp , callback = self . update_progress , confirm = True ) self . rename ( src = dst_tmp , dst = dst )
Puts on destination as a temp file renames on the destination .
58,065
def __create ( self ) : self . __data = json . dumps ( { 'config_path' : self . encode ( self . config_path ) , 'subject' : self . encode ( self . __subject ) , 'text' : self . encode ( self . __text ) , 'html' : self . encode ( self . __html ) , 'files' : self . __files , 'send_as_one' : self . send_as_one , 'addresses' : self . __addresses , 'ccs' : self . __ccs , } )
Construct the email
58,066
async def async_get_camera_image ( self , image_name , username = None , password = None ) : try : data = await self . async_fetch_image_data ( image_name , username , password ) if data is None : raise XeomaError ( 'Unable to authenticate with Xeoma web ' 'server' ) return data except asyncio . TimeoutError : raise XeomaError ( 'Connection timeout while fetching camera image.' ) except aiohttp . ClientError as e : raise XeomaError ( 'Unable to fetch image: {}' . format ( e ) )
Grab a single image from the Xeoma web server
58,067
async def async_fetch_image_data ( self , image_name , username , password ) : params = { } cookies = self . get_session_cookie ( ) if username is not None and password is not None : params [ 'user' ] = self . encode_user ( username , password ) else : params [ 'user' ] = '' async with aiohttp . ClientSession ( cookies = cookies ) as session : resp = await session . get ( '{}/{}.jpg' . format ( self . _base_url , image_name ) , params = params ) if resp . headers [ 'Content-Type' ] == 'image/jpeg' : data = await resp . read ( ) else : data = None return data
Fetch image data from the Xeoma web server
58,068
async def async_get_image_names ( self ) : cookies = self . get_session_cookie ( ) try : async with aiohttp . ClientSession ( cookies = cookies ) as session : resp = await session . get ( self . _base_url ) t = await resp . text ( ) match = re . findall ( '(?:\w|\d|")/(.*?).(?:mjpg|jpg)' , t ) if len ( match ) == 0 : raise XeomaError ( 'Unable to find any camera image names' ) image_names = set ( match ) results = [ ] for image_name in image_names : match = re . search ( image_name + '\.(?:mjpg|jpg).*?user=(.*?)&' , t ) if match and len ( match . group ( 1 ) ) > 0 : d = base64 . b64decode ( unquote ( match . group ( 1 ) ) ) . decode ( 'ASCII' ) creds = d . split ( ':' ) if len ( creds ) < 2 : raise XeomaError ( 'Error parsing image credentials' ) results . append ( ( image_name , creds [ 0 ] , creds [ 1 ] ) ) else : results . append ( ( image_name , None , None ) ) return results except asyncio . TimeoutError as e : raise XeomaError ( "Unable to connect to Xeoma web server" )
Parse web server camera view for camera image names
58,069
def get_session_cookie ( self ) : if self . _login is not None and self . _password is not None : session_key = self . encode_user ( self . _login , self . _password ) return { 'sessionkey' : session_key } else : return None
Create a session cookie object for use by aiohttp
58,070
def _get_sha256_digest ( self , content ) : content_sha256 = base64 . b64encode ( SHA256 . new ( content ) . digest ( ) ) return 'SHA256=' + content_sha256
Return the sha256 digest of the content in the header format the Merchant API expects .
58,071
def _sha256_sign ( self , method , url , headers , body ) : d = '' sign_headers = method . upper ( ) + '|' + url + '|' for key , value in sorted ( headers . items ( ) ) : if key . startswith ( 'X-Mcash-' ) : sign_headers += d + key . upper ( ) + '=' + value d = '&' rsa_signature = base64 . b64encode ( self . signer . sign ( SHA256 . new ( sign_headers ) ) ) return 'RSA-SHA256 ' + rsa_signature
Sign the request with SHA256 .
58,072
def create_toolbox ( self , filename ) : filename = os . path . splitext ( filename ) [ 0 ] label = os . path . basename ( filename ) tool_list = [ ] for task in self . tasks : tool_list . append ( task . name ) file_descriptor = os . open ( filename + '.pyt' , os . O_WRONLY | os . O_CREAT | os . O_EXCL ) with os . fdopen ( file_descriptor , 'w' ) as self . toolbox_file : self . toolbox_file . write ( self . _imports_template . substitute ( { } ) ) toolbox_class = self . _toolbox_class_template . substitute ( { 'label' : label , 'alias' : self . alias , 'toolList' : param_builder . convert_list ( tool_list ) } ) self . toolbox_file . write ( toolbox_class ) for task in self . tasks : gp_tool = self . create_tool ( task ) self . toolbox_file . write ( gp_tool ) toolbox_help_filename = '.' . join ( ( filename , task . name , 'pyt' , 'xml' ) ) help_builder . create ( toolbox_help_filename , task , self . alias ) return filename
Creates a new Python toolbox where each task name is a GPTool in the toolbox .
58,073
def create_tool ( self , task ) : gp_tool = dict ( taskName = task . name , taskDisplayName = task . display_name , taskDescription = task . description , canRunInBackground = True , taskUri = task . uri ) gp_tool [ 'execute' ] = self . _execute_template . substitute ( gp_tool ) gp_tool [ 'parameterInfo' ] = param_builder . create_param_info ( task . parameters , self . parameter_map ) gp_tool [ 'updateParameter' ] = param_builder . create_update_parameter ( task . parameters , self . parameter_map ) gp_tool [ 'preExecute' ] = param_builder . create_pre_execute ( task . parameters , self . parameter_map ) gp_tool [ 'postExecute' ] = param_builder . create_post_execute ( task . parameters , self . parameter_map ) return self . _tool_template . substitute ( gp_tool )
Creates a new GPTool for the toolbox .
58,074
def import_script ( self , script_name ) : filename = os . path . abspath ( script_name ) with open ( filename , 'r' ) as script_file : self . toolbox_file . write ( script_file . read ( ) )
Finds the script file and copies it into the toolbox
58,075
def make_relationship ( self , relator , direction = RELATIONSHIP_DIRECTIONS . BIDIRECTIONAL ) : if IEntity . providedBy ( relator ) : rel = DomainRelationship ( relator , self , direction = direction ) elif IResource . providedBy ( relator ) : rel = ResourceRelationship ( relator , self , direction = direction ) else : raise ValueError ( 'Invalid relator argument "%s" for ' 'relationship; must provide IEntity or ' 'IResource.' % relator ) return rel
Create a relationship object for this attribute from the given relator and relationship direction .
58,076
def register ( self , app , options ) : url_prefix = options . get ( 'url_prefix' , self . url_prefix ) for future in self . routes : future . handler . __blueprintname__ = self . name uri = url_prefix + future . uri if url_prefix else future . uri app . route ( uri = uri [ 1 : ] if uri . startswith ( '//' ) else uri , methods = future . methods , host = future . host or self . host , strict_slashes = future . strict_slashes , stream = future . stream ) ( future . handler ) for future in self . middlewares : if future . args or future . kwargs : app . middleware ( * future . args , ** future . kwargs ) ( future . middleware ) else : app . middleware ( future . middleware ) for future in self . exceptions : app . exception ( * future . args , ** future . kwargs ) ( future . handler ) for future in self . statics : uri = url_prefix + future . uri if url_prefix else future . uri app . static ( uri , future . file_or_directory , * future . args , ** future . kwargs ) for event , listeners in self . listeners . items ( ) : for listener in listeners : app . listener ( event ) ( listener )
Register the blueprint to the mach9 app .
58,077
def add_route ( self , handler , uri , methods = frozenset ( { 'GET' } ) , host = None , strict_slashes = False ) : if hasattr ( handler , 'view_class' ) : http_methods = ( 'GET' , 'POST' , 'PUT' , 'HEAD' , 'OPTIONS' , 'PATCH' , 'DELETE' ) methods = set ( ) for method in http_methods : if getattr ( handler . view_class , method . lower ( ) , None ) : methods . add ( method ) if isinstance ( handler , self . _composition_view_class ) : methods = handler . handlers . keys ( ) self . route ( uri = uri , methods = methods , host = host , strict_slashes = strict_slashes ) ( handler ) return handler
Create a blueprint route from a function .
58,078
def remove_boards_gui ( hwpack = '' ) : if not hwpack : if len ( hwpack_names ( ) ) > 1 : hwpack = psidialogs . choice ( hwpack_names ( ) , 'select hardware package to select board from!' , title = 'select' ) else : hwpack = hwpack_names ( ) [ 0 ] print ( '%s selected' % hwpack ) if hwpack : sel = psidialogs . multi_choice ( board_names ( hwpack ) , 'select boards to remove from %s!' % boards_txt ( hwpack ) , title = 'remove boards' ) print ( '%s selected' % sel ) if sel : for x in sel : remove_board ( x ) print ( '%s was removed' % x )
remove boards by GUI .
58,079
def single ( C , namespace = None ) : if namespace is None : B = C ( ) . _ else : B = C ( default = namespace , _ = namespace ) . _ return B
An element maker with a single namespace that uses that namespace as the default
58,080
def intersect ( self , ** kwargs ) : end_point = kwargs . pop ( 'end_point' ) depth = self . get_depth ( location = end_point ) if depth < 0 and depth > end_point . depth : inter = True else : inter = False return inter
Intersect Point and Bathymetry returns bool
58,081
def react ( self , ** kwargs ) : react_type = kwargs . get ( "type" , self . _type ) if react_type == 'hover' : return self . __hover ( ** kwargs ) elif react_type == 'stick' : pass elif react_type == 'reverse' : return self . __reverse ( ** kwargs ) else : raise ValueError ( "Bathymetry interaction type not supported" )
The time of recation is ignored hereTime is ignored here and should be handled by whatever called this function .
58,082
def __hover ( self , ** kwargs ) : end_point = kwargs . pop ( 'end_point' ) depth = self . get_depth ( location = end_point ) return Location4D ( latitude = end_point . latitude , longitude = end_point . longitude , depth = ( depth + 1. ) )
This hovers the particle 1m above the bathymetry WHERE IT WOULD HAVE ENDED UP . This is WRONG and we need to compute the location that it actually hit the bathymetry and hover 1m above THAT .
58,083
def __reverse ( self , ** kwargs ) : start_point = kwargs . pop ( 'start_point' ) return Location4D ( latitude = start_point . latitude , longitude = start_point . longitude , depth = start_point . depth )
If we hit the bathymetry set the location to where we came from .
58,084
def main ( ) : colorama . init ( wrap = six . PY3 ) doc = usage . get_primary_command_usage ( ) allow_subcommands = '<command>' in doc args = docopt ( doc , version = settings . version , options_first = allow_subcommands ) if sys . excepthook is sys . __excepthook__ : sys . excepthook = log . excepthook try : log . enable_logging ( log . get_log_level ( args ) ) default_args = sys . argv [ 2 if args . get ( '<command>' ) else 1 : ] if ( args . get ( '<command>' ) == 'help' and None not in settings . subcommands ) : subcommand = next ( iter ( args . get ( '<args>' , default_args ) ) , None ) return usage . get_help_usage ( subcommand ) argv = [ args . get ( '<command>' ) ] + args . get ( '<args>' , default_args ) return _run_command ( argv ) except exc . InvalidCliValueError as e : return str ( e )
Parse the command line options and launch the requested command .
58,085
def _get_subcommand ( name ) : _LOGGER . debug ( 'Accessing subcommand "%s".' , name ) if name not in settings . subcommands : raise ValueError ( '"{subcommand}" is not a {command} command. \'{command} help -a\' ' 'lists all available subcommands.' . format ( command = settings . command , subcommand = name ) ) return settings . subcommands [ name ]
Return the function for the specified subcommand .
58,086
def _run_command ( argv ) : command_name , argv = _get_command_and_argv ( argv ) _LOGGER . info ( 'Running command "%s %s" with args: %s' , settings . command , command_name , argv ) subcommand = _get_subcommand ( command_name ) func = call . get_callable ( subcommand ) doc = usage . format_usage ( subcommand . __doc__ ) args = _get_parsed_args ( command_name , doc , argv ) return call . call ( func , args ) or 0
Run the command with the given CLI options and exit .
58,087
def _get_command_and_argv ( argv ) : command_name = argv [ 0 ] if not command_name : argv = argv [ 1 : ] elif command_name == settings . command : argv . remove ( command_name ) return command_name , argv
Extract the command name and arguments to pass to docopt .
58,088
def _get_parsed_args ( command_name , doc , argv ) : _LOGGER . debug ( 'Parsing docstring: with arguments %s.' , doc , argv ) args = docopt ( doc , argv = argv ) if command_name == settings . command : args [ command_name ] = True return args
Parse the docstring with docopt .
58,089
def trace ( msg ) : if os . environ . get ( 'JARN_TRACE' ) == '1' : print ( 'TRACE:' , msg , file = sys . stderr )
Print a trace message to stderr if environment variable is set .
58,090
def Efn ( Ms , eccs ) : Ms = np . atleast_1d ( Ms ) eccs = np . atleast_1d ( eccs ) unit = np . floor ( Ms / ( 2 * np . pi ) ) Es = EFN ( ( Ms % ( 2 * np . pi ) ) , eccs ) Es += unit * ( 2 * np . pi ) return Es
works for - 2pi < Ms < 2pi e < = 0 . 97
58,091
def enable_modules_from_last_session ( seashcommanddict ) : successfully_enabled_modules = [ ] modules_to_enable = get_enabled_modules ( ) for modulename in modules_to_enable : disable ( seashcommanddict , modulename ) try : enable ( seashcommanddict , modulename ) successfully_enabled_modules . append ( modulename ) except seash_exceptions . ModuleConflictError , e : print "Failed to enable the '" + modulename + "' module due to the following conflicting command:" print str ( e ) open ( MODULES_FOLDER_PATH + os . sep + modulename + ".disabled" , 'w' ) except seash_exceptions . InitializeError , e : print "Failed to enable the '" + modulename + "' module." disable ( seashcommanddict , modulename ) successfully_enabled_modules . sort ( ) print 'Enabled modules:' , ', ' . join ( successfully_enabled_modules ) , '\n'
Enable every module that isn t marked as disabled in the modules folder . This function is meant to be called when seash is initializing and nowhere else . A module is marked as disabled when there is a modulename . disabled file .
58,092
def _ensure_module_folder_exists ( ) : if not os . path . isdir ( MODULES_FOLDER_PATH ) : try : os . mkdir ( MODULES_FOLDER_PATH ) except OSError , e : if "file already exists" in str ( e ) : raise RuntimeError ( "Could not create modules folder: file exists with the same name" )
Checks to see if the module folder exists . If it does not create it . If there is an existing file with the same name we raise a RuntimeError .
58,093
def get_configuration_from_settings ( self , setting_info ) : settings = self . get_settings ( ) return dict ( [ ( name , settings . get ( key ) ) for ( name , key ) in setting_info if not settings . get ( key , None ) is None ] )
Returns a dictionary with configuration names as keys and setting values extracted from this configurator s settings as values .
58,094
def add_repository ( self , name , repository_type , repository_class , aggregate_class , make_default , configuration ) : repo_mgr = self . get_registered_utility ( IRepositoryManager ) if name is None : name = REPOSITORY_DOMAINS . ROOT repo = repo_mgr . new ( repository_type , name = name , make_default = make_default , repository_class = repository_class , aggregate_class = aggregate_class , configuration = configuration ) repo_mgr . set ( repo )
Generic method for adding a repository .
58,095
def encrypt_email ( email ) : aes = SimpleAES ( flask . current_app . config [ "AES_KEY" ] ) return aes . encrypt ( email )
The default encryption function for storing emails in the database . This uses AES and the encryption key defined in the applications configuration .
58,096
def shortlink_scanned ( self , data ) : self . logger . info ( "Received shortlink_scanned event" ) data = json . loads ( data ) customer_token = str ( data [ 'object' ] [ 'id' ] ) response = self . mapiclient . create_payment_request ( customer = customer_token , currency = "NOK" , amount = "20.00" , allow_credit = True , pos_id = self . _pos_id , pos_tid = str ( uuid . uuid4 ( ) ) , action = 'auth' , expires_in = 90 , callback_uri = "pusher:m-winterwarming-pos_callback_chan" , text = 'Have some hot chocolate!' ) self . _tid = response [ 'id' ] print ( str ( self . _tid ) )
Called when a shortlink_scanned event is received
58,097
def pusher_connected ( self , data ) : self . logger . info ( "Pusherclient connected" ) self . callback_client . bind ( "payment_authorized" , self . payment_authorized ) self . callback_client . bind ( "shortlink_scanned" , self . shortlink_scanned )
Called when the pusherclient is connected
58,098
def get ( self , * args , ** kwargs ) : if self . query . where : return super ( CachingQuerySet , self ) . get ( * args , ** kwargs ) if len ( kwargs ) == 1 : k = kwargs . keys ( ) [ 0 ] if k in ( 'pk' , 'pk__exact' , '%s' % self . model . _meta . pk . attname , '%s__exact' % self . model . _meta . pk . attname ) : obj = cache . get ( self . model . _cache_key ( pk = kwargs . values ( ) [ 0 ] ) ) if obj is not None : obj . from_cache = True return obj return super ( CachingQuerySet , self ) . get ( * args , ** kwargs )
Checks the cache to see if there s a cached entry for this pk . If not fetches using super then stores the result in cache . Most of the logic here was gathered from a careful reading of django . db . models . sql . query . add_filter
58,099
def fetch_path ( self , name ) : with codecs . open ( self . lookup_path ( name ) , encoding = 'utf-8' ) as fd : return fd . read ( )
Fetch contents from the path retrieved via lookup_path .