idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
12,500
def start_txn ( self , txn_name = None ) : if not txn_name : txn_name = uuid . uuid4 ( ) . hex txn_response = self . api . http_request ( 'POST' , '%s/fcr:tx' % self . root , data = None , headers = None ) if txn_response . status_code == 201 : txn_uri = txn_response . headers [ 'Location' ] logger . debug ( "spawning ...
Request new transaction from repository init new Transaction store in self . txns
12,501
def get_txn ( self , txn_name , txn_uri ) : txn_uri = self . parse_uri ( txn_uri ) txn_response = self . api . http_request ( 'GET' , txn_uri , data = None , headers = None ) if txn_response . status_code == 200 : logger . debug ( "transactoin found: %s" % txn_uri ) txn = Transaction ( self , txn_name , txn_uri , expir...
Retrieves known transaction and adds to self . txns .
12,502
def keep_alive ( self ) : txn_response = self . api . http_request ( 'POST' , '%sfcr:tx' % self . root , data = None , headers = None ) if txn_response . status_code == 204 : logger . debug ( "continuing transaction: %s" % self . root ) self . active = True self . expires = txn_response . headers [ 'Expires' ] return T...
Keep current transaction alive updates self . expires
12,503
def _close ( self , close_type ) : txn_response = self . api . http_request ( 'POST' , '%sfcr:tx/fcr:%s' % ( self . root , close_type ) , data = None , headers = None ) if txn_response . status_code == 204 : logger . debug ( "%s for transaction: %s, successful" % ( close_type , self . root ) ) self . active = False ret...
Ends transaction by committing or rolling back all changes during transaction .
12,504
def http_request ( self , verb , uri , data = None , headers = None , files = None , response_format = None , is_rdf = True , stream = False ) : if is_rdf : if verb == 'GET' : if not response_format : response_format = self . repo . default_serialization if headers and 'Accept' not in headers . keys ( ) : headers [ 'Ac...
Primary route for all HTTP requests to repository . Ability to set most parameters for requests library with some additional convenience parameters as well .
12,505
def parse_rdf_payload ( self , data , headers ) : if headers [ 'Content-Type' ] . startswith ( 'text/plain' ) : logger . debug ( 'text/plain Content-Type detected, using application/n-triples for parser' ) parse_format = 'application/n-triples' else : parse_format = headers [ 'Content-Type' ] if ';charset' in parse_for...
small function to parse RDF payloads from various repository endpoints
12,506
def _derive_namespaces ( self ) : for graph in [ self . diffs . overlap , self . diffs . removed , self . diffs . added ] : for s , p , o in graph : try : ns_prefix , ns_uri , predicate = graph . compute_qname ( p ) self . update_namespaces . add ( ns_uri ) except : logger . debug ( 'could not parse Object URI: %s' % n...
Small method to loop through three graphs in self . diffs identify unique namespace URIs . Then loop through provided dictionary of prefixes and pin one to another .
12,507
def check_exists ( self ) : response = self . repo . api . http_request ( 'HEAD' , self . uri ) self . status_code = response . status_code if self . status_code == 200 : self . exists = True elif self . status_code == 410 : self . exists = False elif self . status_code == 404 : self . exists = False return self . exis...
Check if resource exists update self . exists returns
12,508
def create ( self , specify_uri = False , ignore_tombstone = False , serialization_format = None , stream = False , auto_refresh = None ) : if self . exists : raise Exception ( 'resource exists attribute True, aborting' ) else : if specify_uri : verb = 'PUT' else : verb = 'POST' logger . debug ( 'creating resource %s w...
Primary method to create resources .
12,509
def options ( self ) : response = self . repo . api . http_request ( 'OPTIONS' , self . uri ) return response . headers
Small method to return headers of an OPTIONS request to self . uri
12,510
def copy ( self , destination ) : destination_uri = self . repo . parse_uri ( destination ) response = self . repo . api . http_request ( 'COPY' , self . uri , data = None , headers = { 'Destination' : destination_uri . toPython ( ) } ) if response . status_code == 201 : return destination_uri else : raise Exception ( ...
Method to copy resource to another location
12,511
def delete ( self , remove_tombstone = True ) : response = self . repo . api . http_request ( 'DELETE' , self . uri ) if response . status_code == 204 : self . _empty_resource_attributes ( ) if remove_tombstone : self . repo . api . http_request ( 'DELETE' , '%s/fcr:tombstone' % self . uri ) return True
Method to delete resources .
12,512
def refresh ( self , refresh_binary = True ) : updated_self = self . repo . get_resource ( self . uri ) if not isinstance ( self , type ( updated_self ) ) : raise Exception ( 'Instantiated %s, but repository reports this resource is %s' % ( type ( updated_self ) , type ( self ) ) ) if updated_self : self . status_code ...
Performs GET request and refreshes RDF information for resource .
12,513
def _build_rdf ( self , data = None ) : self . rdf = SimpleNamespace ( ) self . rdf . data = data self . rdf . prefixes = SimpleNamespace ( ) self . rdf . uris = SimpleNamespace ( ) for prefix , uri in self . repo . context . items ( ) : setattr ( self . rdf . prefixes , prefix , rdflib . Namespace ( uri ) ) self . _pa...
Parse incoming rdf as self . rdf . orig_graph create copy at self . rdf . graph
12,514
def _parse_graph ( self ) : if self . exists : self . rdf . graph = self . repo . api . parse_rdf_payload ( self . rdf . data , self . headers ) else : self . rdf . graph = rdflib . Graph ( ) self . rdf . namespace_manager = rdflib . namespace . NamespaceManager ( self . rdf . graph ) for ns_prefix , ns_uri in self . r...
use Content - Type from headers to determine parsing method
12,515
def parse_object_like_triples ( self ) : self . rdf . triples = SimpleNamespace ( ) for s , p , o in self . rdf . graph : ns_prefix , ns_uri , predicate = self . rdf . graph . compute_qname ( p ) if not hasattr ( self . rdf . triples , ns_prefix ) : setattr ( self . rdf . triples , ns_prefix , SimpleNamespace ( ) ) if ...
method to parse triples from self . rdf . graph for object - like access
12,516
def _empty_resource_attributes ( self ) : self . status_code = 404 self . headers = { } self . exists = False self . rdf = self . _build_rdf ( ) if type ( self ) == NonRDFSource : self . binary . empty ( )
small method to empty values if resource is removed or absent
12,517
def add_triple ( self , p , o , auto_refresh = True ) : self . rdf . graph . add ( ( self . uri , p , self . _handle_object ( o ) ) ) self . _handle_triple_refresh ( auto_refresh )
add triple by providing p o assumes s = subject
12,518
def set_triple ( self , p , o , auto_refresh = True ) : self . rdf . graph . set ( ( self . uri , p , self . _handle_object ( o ) ) ) self . _handle_triple_refresh ( auto_refresh )
Assuming the predicate or object matches a single triple sets the other for that triple .
12,519
def remove_triple ( self , p , o , auto_refresh = True ) : self . rdf . graph . remove ( ( self . uri , p , self . _handle_object ( o ) ) ) self . _handle_triple_refresh ( auto_refresh )
remove triple by supplying p o
12,520
def _handle_triple_refresh ( self , auto_refresh ) : if auto_refresh : self . parse_object_like_triples ( ) elif auto_refresh == None : if self . repo . default_auto_refresh : self . parse_object_like_triples ( )
method to refresh self . rdf . triples if auto_refresh or defaults set to True
12,521
def update ( self , sparql_query_only = False , auto_refresh = None , update_binary = True ) : self . _diff_graph ( ) sq = SparqlUpdate ( self . rdf . prefixes , self . rdf . diffs ) if sparql_query_only : return sq . build_query ( ) response = self . repo . api . http_request ( 'PATCH' , '%s/fcr:metadata' % self . uri...
Method to update resources in repository . Firing this method computes the difference in the local modified graph and the original one creates an instance of SparqlUpdate and builds a sparql query that represents these differences and sends this as a PATCH request .
12,522
def children ( self , as_resources = False ) : children = [ o for s , p , o in self . rdf . graph . triples ( ( None , self . rdf . prefixes . ldp . contains , None ) ) ] if as_resources : logger . debug ( 'retrieving children as resources' ) children = [ self . repo . get_resource ( child ) for child in children ] ret...
method to return hierarchical children of this resource
12,523
def parents ( self , as_resources = False ) : parents = [ o for s , p , o in self . rdf . graph . triples ( ( None , self . rdf . prefixes . fedora . hasParent , None ) ) ] if as_resources : logger . debug ( 'retrieving parent as resource' ) parents = [ self . repo . get_resource ( parent ) for parent in parents ] retu...
method to return hierarchical parents of this resource
12,524
def siblings ( self , as_resources = False ) : siblings = set ( ) for parent in self . parents ( as_resources = True ) : for sibling in parent . children ( as_resources = as_resources ) : siblings . add ( sibling ) if as_resources : siblings . remove ( self ) if not as_resources : siblings . remove ( self . uri ) retur...
method to return hierarchical siblings of this resource .
12,525
def create_version ( self , version_label ) : version_response = self . repo . api . http_request ( 'POST' , '%s/fcr:versions' % self . uri , data = None , headers = { 'Slug' : version_label } ) if version_response . status_code == 201 : logger . debug ( 'version created: %s' % version_response . headers [ 'Location' ]...
method to create a new version of the resource as it currently stands
12,526
def get_versions ( self ) : versions_response = self . repo . api . http_request ( 'GET' , '%s/fcr:versions' % self . uri ) versions_graph = self . repo . api . parse_rdf_payload ( versions_response . content , versions_response . headers ) for version_uri in versions_graph . objects ( self . uri , self . rdf . prefixe...
retrieves all versions of an object and stores them at self . versions
12,527
def dump ( self , format = 'ttl' ) : return self . rdf . graph . serialize ( format = format ) . decode ( 'utf-8' )
Convenience method to return RDF data for resource optionally selecting serialization format . Inspired by . dump from Samvera .
12,528
def revert_to ( self ) : response = self . resource . repo . api . http_request ( 'PATCH' , self . uri ) if response . status_code == 204 : logger . debug ( 'reverting to previous version of resource, %s' % self . uri ) self . _current_resource . refresh ( ) else : raise Exception ( 'HTTP %s, could not revert to resour...
method to revert resource to this version by issuing PATCH
12,529
def delete ( self ) : response = self . resource . repo . api . http_request ( 'DELETE' , self . uri ) if response . status_code == 204 : logger . debug ( 'deleting previous version of resource, %s' % self . uri ) delattr ( self . _current_resource . versions , self . label ) elif response . status_code == 400 : raise ...
method to remove version from resource s history
12,530
def empty ( self ) : self . resource = None self . delivery = None self . data = None self . stream = False self . mimetype = None self . location = None
Method to empty attributes particularly for use when object is deleted but remains as variable
12,531
def refresh ( self , updated_self ) : logger . debug ( 'refreshing binary attributes' ) self . mimetype = updated_self . binary . mimetype self . data = updated_self . binary . data
method to refresh binary attributes and data
12,532
def parse_binary ( self ) : self . mimetype = self . resource . rdf . graph . value ( self . resource . uri , self . resource . rdf . prefixes . ebucore . hasMimeType ) . toPython ( ) self . data = self . resource . repo . api . http_request ( 'GET' , self . resource . uri , data = None , headers = { 'Content-Type' : s...
when retrieving a NonRDF resource parse binary data and make available via generators
12,533
def _prep_binary_content ( self ) : if not self . data and not self . location and 'Content-Location' not in self . resource . headers . keys ( ) : raise Exception ( 'creating/updating NonRDFSource requires content from self.binary.data, self.binary.location, or the Content-Location header' ) elif 'Content-Location' in...
Sets delivery method of either payload or header Favors Content - Location header if set
12,534
def fixity ( self , response_format = None ) : if not response_format : response_format = self . repo . default_serialization response = self . repo . api . http_request ( 'GET' , '%s/fcr:fixity' % self . uri ) fixity_graph = self . repo . api . parse_rdf_payload ( response . content , response . headers ) for outcome ...
Issues fixity check return parsed graph
12,535
def get_value ( self , consumer = None ) : if consumer : self . consumers [ consumer ] = True return self . value
If consumer is specified the channel will record that consumer as having consumed the value .
12,536
def set_input_data ( self , key , value ) : if not key in self . input_channels . keys ( ) : self . set_input_channel ( key , Channel ( ) ) self . input_channels [ key ] . set_value ( Data ( self . time , value ) )
set_input_data will automatically create an input channel if necessary . Automatic channel creation is intended for the case where users are trying to set initial values on a block whose input channels aren t subscribed to anything in the graph .
12,537
def get_output_channel ( self , output_channel_name ) : if not output_channel_name in self . output_channels . keys ( ) : self . output_channels [ output_channel_name ] = Channel ( ) self . output_channels [ output_channel_name ] . add_producer ( self ) return self . output_channels [ output_channel_name ]
get_output_channel will create a new channel object if necessary .
12,538
def by_bounding_box ( self , tl_lat , tl_long , br_lat , br_long , term = None , num_biz_requested = None , category = None ) : header , content = self . _http_request ( self . BASE_URL , tl_lat = tl_lat , tl_long = tl_long , br_lat = br_lat , br_long = br_long , term = term , category = category , num_biz_requested = ...
Perform a Yelp Review Search based on a map bounding box .
12,539
def by_geopoint ( self , lat , long , radius , term = None , num_biz_requested = None , category = None ) : header , content = self . _http_request ( self . BASE_URL , lat = lat , long = long , radius = radius , term = None , num_biz_requested = None ) return json . loads ( content )
Perform a Yelp Review Search based on a geopoint and radius tuple .
12,540
def by_location ( self , location , cc = None , radius = None , term = None , num_biz_requested = None , category = None ) : header , content = self . _http_request ( self . BASE_URL , location = location , cc = cc , radius = radius , term = term , num_biz_requested = num_biz_requested ) return json . loads ( content )
Perform a Yelp Review Search based on a location specifier .
12,541
def by_phone ( self , phone , cc = None ) : header , content = self . _http_request ( self . BASE_URL , phone = phone , cc = cc ) return json . loads ( content )
Perform a Yelp Phone API Search based on phone number given .
12,542
def by_geopoint ( self , lat , long ) : header , content = self . _http_request ( self . BASE_URL , lat = lat , long = long ) return json . loads ( content )
Perform a Yelp Neighborhood API Search based on a geopoint .
12,543
def by_location ( self , location , cc = None ) : header , content = self . _http_request ( self . BASE_URL , location = location , cc = cc ) return json . loads ( content )
Perform a Yelp Neighborhood API Search based on a location specifier .
12,544
def check_transport_host ( self ) : sock = socket . socket ( socket . AF_INET , socket . SOCK_STREAM ) result = sock . connect_ex ( ( 'events-server' , 8080 ) ) if result == 0 : logging . info ( 'port 8080 on zmq is open!' ) return True return False
Check if zeromq socket is available on transport host
12,545
def sample ( config , samples ) : url = get_api_path ( 'sample.json' ) multiple_files = [ ] images = [ s [ 'image' ] for s in samples ] labels = [ s [ 'label' ] for s in samples ] for image in images : multiple_files . append ( ( 'images' , ( image , open ( image , 'rb' ) , 'image/png' ) ) ) headers = get_headers ( no_...
Upload a series of samples . Each sample has keys image and label . Images are ignored if the rate limit is hit .
12,546
def measure ( config , result , max_retries = 10 ) : url = get_api_path ( 'measurement.json' ) data = { 'config' : config , 'result' : result } retries = 0 while ( retries < max_retries ) : try : r = requests . post ( url , data = json . dumps ( data , cls = HCEncoder ) , headers = get_headers ( ) , timeout = 30 ) retu...
Records results on hyperchamber . io . Used when you are done testing a config .
12,547
def move_selection ( reverse = False ) : global selected_pid if selected_pid not in gunicorns : selected_pid = None found = False pids = sorted ( gunicorns . keys ( ) , reverse = reverse ) for pid in pids + pids : if selected_pid is None or found : selected_pid = pid return found = pid == selected_pid
Goes through the list of gunicorns setting the selected as the one after the currently selected .
12,548
def update_gunicorns ( ) : global tick tick += 1 if ( tick * screen_delay ) % ps_delay != 0 : return tick = 0 for pid in gunicorns : gunicorns [ pid ] . update ( { "workers" : 0 , "mem" : 0 } ) ps = Popen ( PS_ARGS , stdout = PIPE ) . communicate ( ) [ 0 ] . split ( "\n" ) headings = ps . pop ( 0 ) . split ( ) name_col...
Updates the dict of gunicorn processes . Run the ps command and parse its output for processes named after gunicorn building up a dict of gunicorn processes . When new gunicorns are discovered run the netstat command to determine the ports they re serving on .
12,549
def handle_keypress ( screen ) : global selected_pid try : key = screen . getkey ( ) . upper ( ) except : return if key in ( "KEY_DOWN" , "J" ) : move_selection ( ) elif key in ( "KEY_UP" , "K" ) : move_selection ( reverse = True ) elif key in ( "A" , "+" ) : send_signal ( "TTIN" ) if selected_pid in gunicorns : gunico...
Check for a key being pressed and handle it if applicable .
12,550
def format_row ( pid = "" , port = "" , name = "" , mem = "" , workers = "" , prefix_char = " " ) : row = "%s%-5s %-6s %-25s %8s %7s " % ( prefix_char , pid , port , name , mem , workers ) global screen_width if screen_width is None : screen_width = len ( row ) return row
Applies consistant padding to each of the columns in a row and serves as the source of the overall screen width .
12,551
def display_output ( screen ) : format_row ( ) screen_height = len ( gunicorns ) + len ( instructions . split ( "\n" ) ) + 9 if not gunicorns : screen_height += 2 screen . erase ( ) win = curses . newwin ( screen_height , screen_width + 6 , 1 , 3 ) win . bkgd ( " " , curses . color_pair ( 1 ) ) win . border ( ) x = 3 b...
Display the menu list of gunicorns .
12,552
def main ( ) : stdscr = curses . initscr ( ) curses . start_color ( ) curses . init_pair ( 1 , foreground_colour , background_colour ) curses . noecho ( ) stdscr . keypad ( True ) stdscr . nodelay ( True ) try : curses . curs_set ( False ) except : pass try : while True : try : update_gunicorns ( ) handle_keypress ( st...
Main entry point for gunicorn_console .
12,553
def _get_variant_silent ( parser , variant ) : prev_log = config . LOG_NOT_FOUND config . LOG_NOT_FOUND = False results = parser . get_variant_genotypes ( variant ) config . LOG_NOT_FOUND = prev_log return results
Gets a variant from the parser while disabling logging .
12,554
def _attrs_ ( mcs , cls , attr_name : str ) -> Tuple [ Any , ... ] : return tuple ( map ( lambda x : getattr ( x , attr_name ) , list ( cls ) ) )
Returns a tuple containing just the value of the given attr_name of all the elements from the cls .
12,555
def _from_attr_ ( mcs , cls , attr_name : str , attr_value : Any ) -> TypeVar : return next ( iter ( filter ( lambda x : getattr ( x , attr_name ) == attr_value , list ( cls ) ) ) , None )
Returns the enumeration item regarding to the attribute name and value or None if not found for the given cls
12,556
def describe ( cls ) -> None : max_lengths = [ ] for attr_name in cls . attr_names ( ) : attr_func = "%ss" % attr_name attr_list = list ( map ( str , getattr ( cls , attr_func ) ( ) ) ) + [ attr_name ] max_lengths . append ( max ( list ( map ( len , attr_list ) ) ) ) row_format = "{:>%d} | {:>%d} | {:>%d}" % tuple ( ma...
Prints in the console a table showing all the attributes for all the definitions inside the class
12,557
def read_iter ( use_fpi ) : filename_rhosuffix = 'exe/inv.lastmod_rho' filename = 'exe/inv.lastmod' if ( not os . path . isfile ( filename ) ) : print ( 'Inversion was not finished! No last iteration found.' ) if ( use_fpi is True ) : if ( os . path . isfile ( filename_rhosuffix ) ) : filename = filename_rhosuffix line...
Return the path to the final . mag file either for the complex or the fpi inversion .
12,558
def list_datafiles ( ) : is_cplx , is_fpi = td_type ( ) it_rho = read_iter ( is_fpi ) it_phase = read_iter ( False ) files = [ 'inv/coverage.mag' ] dtype = [ 'cov' ] files . append ( it_rho ) dtype . append ( 'mag' ) if is_cplx : files . append ( it_rho . replace ( 'mag' , 'pha' ) ) dtype . append ( 'pha' ) if is_fpi :...
Get the type of the tomodir and the highest iteration to list all files which will be plotted .
12,559
def read_datafiles ( files , dtype , column ) : pha = [ ] pha_fpi = [ ] for filename , filetype in zip ( files , dtype ) : if filetype == 'cov' : cov = load_cov ( filename ) elif filetype == 'mag' : mag = load_rho ( filename , column ) elif filetype == 'pha' : pha = load_rho ( filename , 2 ) elif filetype == 'pha_fpi' ...
Load the datafiles and return cov mag phase and fpi phase values .
12,560
def load_cov ( name ) : content = np . genfromtxt ( name , skip_header = 1 , skip_footer = 1 , usecols = ( [ 2 ] ) ) return content
Load a datafile with coverage file structure .
12,561
def load_rho ( name , column ) : try : content = np . loadtxt ( name , skiprows = 1 , usecols = ( [ column ] ) ) except : raise ValueError ( 'Given column to open does not exist.' ) return content
Load a datafile with rho structure like mag and phase
12,562
def calc_complex ( mag , pha ) : complx = [ 10 ** m * math . e ** ( 1j * p / 1e3 ) for m , p in zip ( mag , pha ) ] real = [ math . log10 ( ( 1 / c ) . real ) for c in complx ] imag = [ ] for c in complx : if ( ( 1 / c ) . imag ) == 0 : imag . append ( math . nan ) else : i = math . log10 ( abs ( ( 1 / c ) . imag ) ) i...
Calculate real and imaginary part of the complex conductivity from magnitude and phase in log10 .
12,563
def plot_ratio ( cid , ax , plotman , title , alpha , vmin , vmax , xmin , xmax , zmin , zmax , xunit , cbtiks , elecs ) : cblabel = 'anisotropy ratio' zlabel = 'z [' + xunit + ']' xlabel = 'x [' + xunit + ']' cm = 'RdYlGn' xmin , xmax , zmin , zmax , vmin , vmax = check_minmax ( plotman , cid , xmin , xmax , zmin , zm...
Plot ratio of two conductivity directions .
12,564
def check_minmax ( plotman , cid , xmin , xmax , zmin , zmax , vmin , vmax ) : if xmin is None : xmin = plotman . grid . grid [ 'x' ] . min ( ) if xmax is None : xmax = plotman . grid . grid [ 'x' ] . max ( ) if zmin is None : zmin = plotman . grid . grid [ 'z' ] . min ( ) if zmax is None : zmax = plotman . grid . grid...
Get min and max values for axes and colorbar if not given
12,565
def citation_director ( ** kwargs ) : qualifier = kwargs . get ( 'qualifier' , '' ) content = kwargs . get ( 'content' , '' ) if qualifier == 'publicationTitle' : return CitationJournalTitle ( content = content ) elif qualifier == 'volume' : return CitationVolume ( content = content ) elif qualifier == 'issue' : return...
Direct the citation elements based on their qualifier .
12,566
def identifier_director ( ** kwargs ) : qualifier = kwargs . get ( 'qualifier' , '' ) content = kwargs . get ( 'content' , '' ) if qualifier == 'ISBN' : return CitationISBN ( content = content ) elif qualifier == 'ISSN' : return CitationISSN ( content = content ) elif qualifier == 'DOI' : return CitationDOI ( content =...
Direct the identifier elements based on their qualifier .
12,567
def get_author ( self , ** kwargs ) : qualifier = kwargs . get ( 'qualifier' , '' ) children = kwargs . get ( 'children' , [ ] ) creator_type_per = False author_name = None for child in children : if child . tag == 'type' and child . content == 'per' : creator_type_per = True elif child . tag == 'name' : author_name = ...
Determine the authors from the creator field .
12,568
def get_publisher_name ( self , ** kwargs ) : children = kwargs . get ( 'children' , [ ] ) for child in children : if child . tag == 'name' : return child . content return None
Get the publisher name .
12,569
def get_publication_date ( self , ** kwargs ) : date_string = kwargs . get ( 'content' , '' ) date_match = CREATION_DATE_REGEX . match ( date_string ) month_match = CREATION_MONTH_REGEX . match ( date_string ) year_match = CREATION_YEAR_REGEX . match ( date_string ) if date_match : ( year , month , day ) = date_match ....
Determine the creation date for the publication date .
12,570
def get_online_date ( self , ** kwargs ) : qualifier = kwargs . get ( 'qualifier' , '' ) content = kwargs . get ( 'content' , '' ) if qualifier == 'metadataCreationDate' : date_match = META_CREATION_DATE_REGEX . match ( content ) ( year , month , day ) = date_match . groups ( '' ) creation_date = datetime . date ( int ...
Get the online date from the meta creation date .
12,571
def get_institution ( self , ** kwargs ) : qualifier = kwargs . get ( 'qualifier' , '' ) content = kwargs . get ( 'content' , '' ) if qualifier == 'grantor' : return content return None
Get the dissertation institution .
12,572
def model_results ( self ) -> str : with open ( os . path . join ( self . directory , "model.results" ) ) as f : return f . read ( )
Reads the model . results file
12,573
def header ( self ) -> str : return "/" . join ( ( self . pipeline , self . phase , self . data ) )
A header created by joining the pipeline phase and data names
12,574
def optimizer ( self ) -> non_linear . NonLinearOptimizer : if self . __optimizer is None : with open ( os . path . join ( self . directory , ".optimizer.pickle" ) , "r+b" ) as f : self . __optimizer = pickle . loads ( f . read ( ) ) return self . __optimizer
The optimizer object that was used in this phase
12,575
def phases_with ( self , ** kwargs ) -> [ PhaseOutput ] : return [ phase for phase in self . phases if all ( [ getattr ( phase , key ) == value for key , value in kwargs . items ( ) ] ) ]
Filters phases . If no arguments are passed all phases are returned . Arguments must be key value pairs with phase data or pipeline as the key .
12,576
def optimizers_with ( self , ** kwargs ) -> [ non_linear . NonLinearOptimizer ] : return [ phase . optimizer for phase in self . phases_with ( ** kwargs ) ]
Load a list of optimizers for phases in the directory with zero or more filters applied .
12,577
def model_results ( self , ** kwargs ) -> str : return "\n\n" . join ( "{}\n\n{}" . format ( phase . header , phase . model_results ) for phase in self . phases_with ( ** kwargs ) )
Collates model results from all phases in the directory or some subset if filters are applied .
12,578
def branches ( config , searchstring = "" ) : repo = config . repo branches_ = list ( find ( repo , searchstring ) ) if branches_ : merged = get_merged_branches ( repo ) info_out ( "Found existing branches..." ) print_list ( branches_ , merged ) if len ( branches_ ) == 1 and searchstring : active_branch = repo . active...
List all branches . And if exactly 1 found offer to check it out .
12,579
def decodebytes ( input ) : py_version = sys . version_info [ 0 ] if py_version >= 3 : return _decodebytes_py3 ( input ) return _decodebytes_py2 ( input )
Decode base64 string to byte array .
12,580
def capture ( self , commit = "" ) : self . _validateProvider ( self . _provider ) client = RepositoryClientBuilder ( ) . buildWithRemoteClient ( self . _provider ) if self . _provider [ "provider" ] == "github" : self . _signature = ProjectGithubRepositoryCapturer ( self . _provider , client ) . capture ( commit ) . s...
Capture the current state of a project based on its provider
12,581
def found_duplicates ( counts ) : _logger . warning ( "Duplicated markers found" ) for marker , count in counts : _logger . warning ( " - {}: {:,d} times" . format ( marker , count ) ) _logger . warning ( "Appending ':dupX' to the duplicated markers according " "to their location in the file." )
Log that duplicates were found .
12,582
def patch_model_schemas ( mapping ) : from mbdata . models import Base for table in Base . metadata . sorted_tables : if table . schema is None : continue table . schema = mapping . get ( table . schema , table . schema )
Update mbdata . models to use different schema names
12,583
def detectRamPorts ( stm : IfContainer , current_en : RtlSignalBase ) : if stm . ifFalse or stm . elIfs : return for _stm in stm . ifTrue : if isinstance ( _stm , IfContainer ) : yield from detectRamPorts ( _stm , _stm . cond & current_en ) elif isinstance ( _stm , Assignment ) : if isinstance ( _stm . dst . _dtype , H...
Detect RAM ports in If statement
12,584
def addInputPort ( self , node , name , i : Union [ Value , RtlSignalBase ] , side = PortSide . WEST ) : root = self . node port = node . addPort ( name , PortType . INPUT , side ) netCtxs = self . netCtxs if isinstance ( i , LPort ) : root . addEdge ( i , port ) elif isConst ( i ) : i = i . staticEval ( ) c , wasThere...
Add and connect input port on subnode
12,585
def addOutputPort ( self , node : LNode , name : str , out : Optional [ Union [ RtlSignalBase , LPort ] ] , side = PortSide . EAST ) : oPort = node . addPort ( name , PortType . OUTPUT , side ) if out is not None : if isinstance ( out , LPort ) : self . node . addEdge ( oPort , out ) elif out . hidden : raise ValueErro...
Add and connect output port on subnode
12,586
def renderContent ( self ) : stm = self . stm portCtx = self . portCtx for o in stm . _outputs : if not self . isVirtual : portCtx . register ( o , PortType . OUTPUT ) canHaveRamPorts = isinstance ( stm , IfContainer ) and arr_any ( chain ( stm . _inputs , stm . _outputs ) , lambda s : isinstance ( s . _dtype , HArray ...
Walk from outputs to inputs for each public signal register port of wrap node if required lazy load all operator and statement nodes for signals
12,587
def generate ( self , project ) : for assignment in self . s2n_mapping : if assignment [ "ipprefix" ] == project : self . _name = assignment [ "package" ] return self name = project if name . startswith ( "github.com" ) : name = re . sub ( r"^github\.com" , "github" , name ) if name . startswith ( "gopkg.in" ) : name =...
Package name construction is based on provider not on prefix . Prefix does not have to equal provider_prefix .
12,588
def hash_host ( hostname , salt = None ) : if salt is None : salt = os . urandom ( sha1 ( ) . digest_size ) else : if salt . startswith ( '|1|' ) : salt = salt . split ( '|' ) [ 2 ] salt = decodebytes ( b ( salt ) ) assert len ( salt ) == sha1 ( ) . digest_size hmac = HMAC ( salt , b ( hostname ) , sha1 ) . digest ( ) ...
Return a hashed form of the hostname as used by OpenSSH when storing hashed hostnames in the known_hosts file .
12,589
def _read_elem_nodes ( self , fid ) : nodes = { } nodes_raw = np . empty ( ( self . header [ 'nr_nodes' ] , 3 ) , dtype = float ) for nr in range ( 0 , self . header [ 'nr_nodes' ] ) : node_line = fid . readline ( ) . lstrip ( ) nodes_raw [ nr , : ] = np . fromstring ( node_line , dtype = float , sep = ' ' ) nodes_r...
Read the nodes from an opened elem . dat file . Correct for CutMcK transformations .
12,590
def calculate_dimensions ( self ) : x_coordinates = np . sort ( self . grid [ 'x' ] [ : , 0 ] ) self . nr_nodes_z = np . where ( x_coordinates == x_coordinates [ 0 ] ) [ 0 ] . size self . nr_elements_x = self . elements . shape [ 0 ] / ( self . nr_nodes_z - 1 ) self . nr_nodes_x = self . nr_elements_x + 1 self . nr_ele...
For a regular grid calculate the element and node dimensions
12,591
def _read_elem_neighbors ( self , fid ) : sizes = sum ( [ len ( self . element_data [ key ] ) for key in ( 11 , 12 ) if self . element_data . get ( key , None ) is not None ] ) self . neighbors = [ ] try : for i in range ( 0 , sizes ) : self . neighbors . append ( int ( fid . readline ( ) . strip ( ) ) ) except Excepti...
Read the boundary - element - neighbors from the end of the file
12,592
def load_grid ( self , elem_file , elec_file ) : self . load_elem_file ( elem_file ) self . load_elec_file ( elec_file )
Load elem . dat and elec . dat
12,593
def get_element_centroids ( self ) : centroids = np . vstack ( ( np . mean ( self . grid [ 'x' ] , axis = 1 ) , np . mean ( self . grid [ 'z' ] , axis = 1 ) ) ) . T return centroids
return the central points of all elements
12,594
def get_internal_angles ( self ) : angles = [ ] for elx , elz in zip ( self . grid [ 'x' ] , self . grid [ 'z' ] ) : el_angles = [ ] xy = np . vstack ( ( elx , elz ) ) for i in range ( 0 , elx . size ) : i1 = ( i - 1 ) % elx . size i2 = ( i + 1 ) % elx . size a = ( xy [ : , i ] - xy [ : , i1 ] ) b = ( xy [ : , i2 ] - x...
Compute all internal angles of the grid
12,595
def Wm ( self ) : centroids = self . get_element_centroids ( ) Wm = scipy . sparse . csr_matrix ( ( self . nr_of_elements , self . nr_of_elements ) ) for i , nb in enumerate ( self . element_neighbors ) : for j , edges in zip ( nb , self . element_neighbors_edges [ i ] ) : edge_coords = self . nodes [ 'presort' ] [ edg...
Return the smoothing regularization matrix Wm of the grid
12,596
def create_tomodir ( self , directory ) : pwd = os . getcwd ( ) if not os . path . isdir ( directory ) : os . makedirs ( directory ) os . chdir ( directory ) directories = ( 'config' , 'exe' , 'grid' , 'mod' , 'mod/pot' , 'mod/sens' , 'rho' , ) for directory in directories : if not os . path . isdir ( directory ) : os ...
Create a tomodir subdirectory structure in the given directory
12,597
def load_rho_file ( self , filename ) : pids = self . parman . load_from_rho_file ( filename ) self . register_magnitude_model ( pids [ 0 ] ) self . register_phase_model ( pids [ 1 ] ) return pids
Load a forward model from a rho . dat file
12,598
def save_to_tomodir ( self , directory ) : self . create_tomodir ( directory ) self . grid . save_elem_file ( directory + os . sep + 'grid/elem.dat' ) self . grid . save_elec_file ( directory + os . sep + 'grid/elec.dat' ) if self . configs . configs is not None : self . configs . write_crmod_config ( directory + os . ...
Save the tomodir instance to a directory structure .
12,599
def _save_sensitivities ( self , directory ) : print ( 'saving sensitivities' ) digits = int ( np . ceil ( np . log10 ( self . configs . configs . shape [ 0 ] ) ) ) for i in range ( 0 , self . configs . configs . shape [ 0 ] ) : sens_data , meta_data = self . get_sensitivity ( i ) filename_raw = 'sens{0:0' + '{0}' . fo...
save sensitivities to a directory