idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
12,500
def start_txn ( self , txn_name = None ) : if not txn_name : txn_name = uuid . uuid4 ( ) . hex txn_response = self . api . http_request ( 'POST' , '%s/fcr:tx' % self . root , data = None , headers = None ) if txn_response . status_code == 201 : txn_uri = txn_response . headers [ 'Location' ] logger . debug ( "spawning transaction: %s" % txn_uri ) txn = Transaction ( self , txn_name , txn_uri , expires = txn_response . headers [ 'Expires' ] ) self . txns [ txn_name ] = txn return txn
Request new transaction from repository init new Transaction store in self . txns
12,501
def get_txn ( self , txn_name , txn_uri ) : txn_uri = self . parse_uri ( txn_uri ) txn_response = self . api . http_request ( 'GET' , txn_uri , data = None , headers = None ) if txn_response . status_code == 200 : logger . debug ( "transactoin found: %s" % txn_uri ) txn = Transaction ( self , txn_name , txn_uri , expires = None ) self . txns [ txn_name ] = txn return txn elif txn_response . status_code in [ 404 , 410 ] : logger . debug ( "transaction does not exist: %s" % txn_uri ) return False else : raise Exception ( 'HTTP %s, could not retrieve transaction' % txn_response . status_code )
Retrieves known transaction and adds to self . txns .
12,502
def keep_alive ( self ) : txn_response = self . api . http_request ( 'POST' , '%sfcr:tx' % self . root , data = None , headers = None ) if txn_response . status_code == 204 : logger . debug ( "continuing transaction: %s" % self . root ) self . active = True self . expires = txn_response . headers [ 'Expires' ] return True elif txn_response . status_code == 410 : logger . debug ( "transaction does not exist: %s" % self . root ) self . active = False return False else : raise Exception ( 'HTTP %s, could not continue transaction' % txn_response . status_code )
Keep current transaction alive updates self . expires
12,503
def _close ( self , close_type ) : txn_response = self . api . http_request ( 'POST' , '%sfcr:tx/fcr:%s' % ( self . root , close_type ) , data = None , headers = None ) if txn_response . status_code == 204 : logger . debug ( "%s for transaction: %s, successful" % ( close_type , self . root ) ) self . active = False return True elif txn_response . status_code in [ 404 , 410 ] : logger . debug ( "transaction does not exist: %s" % self . root ) self . active = False return False else : raise Exception ( 'HTTP %s, could not commit transaction' % txn_response . status_code )
Ends transaction by committing or rolling back all changes during transaction .
12,504
def http_request ( self , verb , uri , data = None , headers = None , files = None , response_format = None , is_rdf = True , stream = False ) : if is_rdf : if verb == 'GET' : if not response_format : response_format = self . repo . default_serialization if headers and 'Accept' not in headers . keys ( ) : headers [ 'Accept' ] = response_format else : headers = { 'Accept' : response_format } if type ( uri ) == rdflib . term . URIRef : uri = uri . toPython ( ) logger . debug ( "%s request for %s, format %s, headers %s" % ( verb , uri , response_format , headers ) ) session = requests . Session ( ) request = requests . Request ( verb , uri , auth = ( self . repo . username , self . repo . password ) , data = data , headers = headers , files = files ) prepped_request = session . prepare_request ( request ) response = session . send ( prepped_request , stream = stream , ) return response
Primary route for all HTTP requests to repository . Ability to set most parameters for requests library with some additional convenience parameters as well .
12,505
def parse_rdf_payload ( self , data , headers ) : if headers [ 'Content-Type' ] . startswith ( 'text/plain' ) : logger . debug ( 'text/plain Content-Type detected, using application/n-triples for parser' ) parse_format = 'application/n-triples' else : parse_format = headers [ 'Content-Type' ] if ';charset' in parse_format : parse_format = parse_format . split ( ';' ) [ 0 ] graph = rdflib . Graph ( ) . parse ( data = data . decode ( 'utf-8' ) , format = parse_format ) return graph
small function to parse RDF payloads from various repository endpoints
12,506
def _derive_namespaces ( self ) : for graph in [ self . diffs . overlap , self . diffs . removed , self . diffs . added ] : for s , p , o in graph : try : ns_prefix , ns_uri , predicate = graph . compute_qname ( p ) self . update_namespaces . add ( ns_uri ) except : logger . debug ( 'could not parse Object URI: %s' % ns_uri ) try : ns_prefix , ns_uri , predicate = graph . compute_qname ( o ) self . update_namespaces . add ( ns_uri ) except : logger . debug ( 'could not parse Object URI: %s' % ns_uri ) logger . debug ( self . update_namespaces ) for ns_uri in self . update_namespaces : for k in self . prefixes . __dict__ : if str ( ns_uri ) == str ( self . prefixes . __dict__ [ k ] ) : logger . debug ( 'adding prefix %s for uri %s to unique_prefixes' % ( k , str ( ns_uri ) ) ) self . update_prefixes [ k ] = self . prefixes . __dict__ [ k ]
Small method to loop through three graphs in self . diffs identify unique namespace URIs . Then loop through provided dictionary of prefixes and pin one to another .
12,507
def check_exists ( self ) : response = self . repo . api . http_request ( 'HEAD' , self . uri ) self . status_code = response . status_code if self . status_code == 200 : self . exists = True elif self . status_code == 410 : self . exists = False elif self . status_code == 404 : self . exists = False return self . exists
Check if resource exists update self . exists returns
12,508
def create ( self , specify_uri = False , ignore_tombstone = False , serialization_format = None , stream = False , auto_refresh = None ) : if self . exists : raise Exception ( 'resource exists attribute True, aborting' ) else : if specify_uri : verb = 'PUT' else : verb = 'POST' logger . debug ( 'creating resource %s with verb %s' % ( self . uri , verb ) ) if issubclass ( type ( self ) , NonRDFSource ) : self . binary . _prep_binary ( ) data = self . binary . data else : if not serialization_format : serialization_format = self . repo . default_serialization data = self . rdf . graph . serialize ( format = serialization_format ) logger . debug ( 'Serialized graph used for resource creation:' ) logger . debug ( data . decode ( 'utf-8' ) ) self . headers [ 'Content-Type' ] = serialization_format response = self . repo . api . http_request ( verb , self . uri , data = data , headers = self . headers , stream = stream ) return self . _handle_create ( response , ignore_tombstone , auto_refresh )
Primary method to create resources .
12,509
def options ( self ) : response = self . repo . api . http_request ( 'OPTIONS' , self . uri ) return response . headers
Small method to return headers of an OPTIONS request to self . uri
12,510
def copy ( self , destination ) : destination_uri = self . repo . parse_uri ( destination ) response = self . repo . api . http_request ( 'COPY' , self . uri , data = None , headers = { 'Destination' : destination_uri . toPython ( ) } ) if response . status_code == 201 : return destination_uri else : raise Exception ( 'HTTP %s, could not move resource %s to %s' % ( response . status_code , self . uri , destination_uri ) )
Method to copy resource to another location
12,511
def delete ( self , remove_tombstone = True ) : response = self . repo . api . http_request ( 'DELETE' , self . uri ) if response . status_code == 204 : self . _empty_resource_attributes ( ) if remove_tombstone : self . repo . api . http_request ( 'DELETE' , '%s/fcr:tombstone' % self . uri ) return True
Method to delete resources .
12,512
def refresh ( self , refresh_binary = True ) : updated_self = self . repo . get_resource ( self . uri ) if not isinstance ( self , type ( updated_self ) ) : raise Exception ( 'Instantiated %s, but repository reports this resource is %s' % ( type ( updated_self ) , type ( self ) ) ) if updated_self : self . status_code = updated_self . status_code self . rdf . data = updated_self . rdf . data self . headers = updated_self . headers self . exists = updated_self . exists if type ( self ) != NonRDFSource : self . _parse_graph ( ) self . versions = SimpleNamespace ( ) if type ( updated_self ) == NonRDFSource and refresh_binary : self . binary . refresh ( updated_self ) if hasattr ( self , '_post_refresh' ) : self . _post_refresh ( ) del ( updated_self ) else : logger . debug ( 'resource %s not found, dumping values' ) self . _empty_resource_attributes ( )
Performs GET request and refreshes RDF information for resource .
12,513
def _build_rdf ( self , data = None ) : self . rdf = SimpleNamespace ( ) self . rdf . data = data self . rdf . prefixes = SimpleNamespace ( ) self . rdf . uris = SimpleNamespace ( ) for prefix , uri in self . repo . context . items ( ) : setattr ( self . rdf . prefixes , prefix , rdflib . Namespace ( uri ) ) self . _parse_graph ( )
Parse incoming rdf as self . rdf . orig_graph create copy at self . rdf . graph
12,514
def _parse_graph ( self ) : if self . exists : self . rdf . graph = self . repo . api . parse_rdf_payload ( self . rdf . data , self . headers ) else : self . rdf . graph = rdflib . Graph ( ) self . rdf . namespace_manager = rdflib . namespace . NamespaceManager ( self . rdf . graph ) for ns_prefix , ns_uri in self . rdf . prefixes . __dict__ . items ( ) : self . rdf . namespace_manager . bind ( ns_prefix , ns_uri , override = False ) for ns_prefix , ns_uri in self . rdf . graph . namespaces ( ) : setattr ( self . rdf . prefixes , ns_prefix , rdflib . Namespace ( ns_uri ) ) setattr ( self . rdf . uris , rdflib . Namespace ( ns_uri ) , ns_prefix ) self . rdf . _orig_graph = copy . deepcopy ( self . rdf . graph ) self . parse_object_like_triples ( )
use Content - Type from headers to determine parsing method
12,515
def parse_object_like_triples ( self ) : self . rdf . triples = SimpleNamespace ( ) for s , p , o in self . rdf . graph : ns_prefix , ns_uri , predicate = self . rdf . graph . compute_qname ( p ) if not hasattr ( self . rdf . triples , ns_prefix ) : setattr ( self . rdf . triples , ns_prefix , SimpleNamespace ( ) ) if not hasattr ( getattr ( self . rdf . triples , ns_prefix ) , predicate ) : setattr ( getattr ( self . rdf . triples , ns_prefix ) , predicate , [ ] ) getattr ( getattr ( self . rdf . triples , ns_prefix ) , predicate ) . append ( o )
method to parse triples from self . rdf . graph for object - like access
12,516
def _empty_resource_attributes ( self ) : self . status_code = 404 self . headers = { } self . exists = False self . rdf = self . _build_rdf ( ) if type ( self ) == NonRDFSource : self . binary . empty ( )
small method to empty values if resource is removed or absent
12,517
def add_triple ( self , p , o , auto_refresh = True ) : self . rdf . graph . add ( ( self . uri , p , self . _handle_object ( o ) ) ) self . _handle_triple_refresh ( auto_refresh )
add triple by providing p o assumes s = subject
12,518
def set_triple ( self , p , o , auto_refresh = True ) : self . rdf . graph . set ( ( self . uri , p , self . _handle_object ( o ) ) ) self . _handle_triple_refresh ( auto_refresh )
Assuming the predicate or object matches a single triple sets the other for that triple .
12,519
def remove_triple ( self , p , o , auto_refresh = True ) : self . rdf . graph . remove ( ( self . uri , p , self . _handle_object ( o ) ) ) self . _handle_triple_refresh ( auto_refresh )
remove triple by supplying p o
12,520
def _handle_triple_refresh ( self , auto_refresh ) : if auto_refresh : self . parse_object_like_triples ( ) elif auto_refresh == None : if self . repo . default_auto_refresh : self . parse_object_like_triples ( )
method to refresh self . rdf . triples if auto_refresh or defaults set to True
12,521
def update ( self , sparql_query_only = False , auto_refresh = None , update_binary = True ) : self . _diff_graph ( ) sq = SparqlUpdate ( self . rdf . prefixes , self . rdf . diffs ) if sparql_query_only : return sq . build_query ( ) response = self . repo . api . http_request ( 'PATCH' , '%s/fcr:metadata' % self . uri , data = sq . build_query ( ) , headers = { 'Content-Type' : 'application/sparql-update' } ) if response . status_code != 204 : logger . debug ( response . content ) raise Exception ( 'HTTP %s, expecting 204' % response . status_code ) if type ( self ) == NonRDFSource and update_binary and type ( self . binary . data ) != requests . models . Response : self . binary . _prep_binary ( ) binary_data = self . binary . data binary_response = self . repo . api . http_request ( 'PUT' , self . uri , data = binary_data , headers = { 'Content-Type' : self . binary . mimetype } ) if not auto_refresh and not self . repo . default_auto_refresh : logger . debug ( "not refreshing resource RDF, but updated binary, so must refresh binary data" ) updated_self = self . repo . get_resource ( self . uri ) self . binary . refresh ( updated_self ) if hasattr ( self , '_post_update' ) : self . _post_update ( ) if auto_refresh : self . refresh ( refresh_binary = update_binary ) elif auto_refresh == None : if self . repo . default_auto_refresh : self . refresh ( refresh_binary = update_binary ) return True
Method to update resources in repository . Firing this method computes the difference in the local modified graph and the original one creates an instance of SparqlUpdate and builds a sparql query that represents these differences and sends this as a PATCH request .
12,522
def children ( self , as_resources = False ) : children = [ o for s , p , o in self . rdf . graph . triples ( ( None , self . rdf . prefixes . ldp . contains , None ) ) ] if as_resources : logger . debug ( 'retrieving children as resources' ) children = [ self . repo . get_resource ( child ) for child in children ] return children
method to return hierarchical children of this resource
12,523
def parents ( self , as_resources = False ) : parents = [ o for s , p , o in self . rdf . graph . triples ( ( None , self . rdf . prefixes . fedora . hasParent , None ) ) ] if as_resources : logger . debug ( 'retrieving parent as resource' ) parents = [ self . repo . get_resource ( parent ) for parent in parents ] return parents
method to return hierarchical parents of this resource
12,524
def siblings ( self , as_resources = False ) : siblings = set ( ) for parent in self . parents ( as_resources = True ) : for sibling in parent . children ( as_resources = as_resources ) : siblings . add ( sibling ) if as_resources : siblings . remove ( self ) if not as_resources : siblings . remove ( self . uri ) return list ( siblings )
method to return hierarchical siblings of this resource .
12,525
def create_version ( self , version_label ) : version_response = self . repo . api . http_request ( 'POST' , '%s/fcr:versions' % self . uri , data = None , headers = { 'Slug' : version_label } ) if version_response . status_code == 201 : logger . debug ( 'version created: %s' % version_response . headers [ 'Location' ] ) self . _affix_version ( version_response . headers [ 'Location' ] , version_label )
method to create a new version of the resource as it currently stands
12,526
def get_versions ( self ) : versions_response = self . repo . api . http_request ( 'GET' , '%s/fcr:versions' % self . uri ) versions_graph = self . repo . api . parse_rdf_payload ( versions_response . content , versions_response . headers ) for version_uri in versions_graph . objects ( self . uri , self . rdf . prefixes . fedora . hasVersion ) : version_label = versions_graph . value ( version_uri , self . rdf . prefixes . fedora . hasVersionLabel , None ) . toPython ( ) self . _affix_version ( version_uri , version_label )
retrieves all versions of an object and stores them at self . versions
12,527
def dump ( self , format = 'ttl' ) : return self . rdf . graph . serialize ( format = format ) . decode ( 'utf-8' )
Convenience method to return RDF data for resource optionally selecting serialization format . Inspired by . dump from Samvera .
12,528
def revert_to ( self ) : response = self . resource . repo . api . http_request ( 'PATCH' , self . uri ) if response . status_code == 204 : logger . debug ( 'reverting to previous version of resource, %s' % self . uri ) self . _current_resource . refresh ( ) else : raise Exception ( 'HTTP %s, could not revert to resource version, %s' % ( response . status_code , self . uri ) )
method to revert resource to this version by issuing PATCH
12,529
def delete ( self ) : response = self . resource . repo . api . http_request ( 'DELETE' , self . uri ) if response . status_code == 204 : logger . debug ( 'deleting previous version of resource, %s' % self . uri ) delattr ( self . _current_resource . versions , self . label ) elif response . status_code == 400 : raise Exception ( 'HTTP 400, likely most recent resource version which cannot be removed' ) else : raise Exception ( 'HTTP %s, could not delete resource version: %s' % ( response . status_code , self . uri ) )
method to remove version from resource s history
12,530
def empty ( self ) : self . resource = None self . delivery = None self . data = None self . stream = False self . mimetype = None self . location = None
Method to empty attributes particularly for use when object is deleted but remains as variable
12,531
def refresh ( self , updated_self ) : logger . debug ( 'refreshing binary attributes' ) self . mimetype = updated_self . binary . mimetype self . data = updated_self . binary . data
method to refresh binary attributes and data
12,532
def parse_binary ( self ) : self . mimetype = self . resource . rdf . graph . value ( self . resource . uri , self . resource . rdf . prefixes . ebucore . hasMimeType ) . toPython ( ) self . data = self . resource . repo . api . http_request ( 'GET' , self . resource . uri , data = None , headers = { 'Content-Type' : self . resource . mimetype } , is_rdf = False , stream = True )
when retrieving a NonRDF resource parse binary data and make available via generators
12,533
def _prep_binary_content ( self ) : if not self . data and not self . location and 'Content-Location' not in self . resource . headers . keys ( ) : raise Exception ( 'creating/updating NonRDFSource requires content from self.binary.data, self.binary.location, or the Content-Location header' ) elif 'Content-Location' in self . resource . headers . keys ( ) : logger . debug ( 'Content-Location header found, using' ) self . delivery = 'header' elif 'Content-Location' not in self . resource . headers . keys ( ) : if self . location : self . resource . headers [ 'Content-Location' ] = self . location self . delivery = 'header' elif self . data : if isinstance ( self . data , io . BufferedIOBase ) : logger . debug ( 'detected file-like object' ) self . delivery = 'payload' else : logger . debug ( 'detected bytes' ) self . delivery = 'payload'
Sets delivery method of either payload or header Favors Content - Location header if set
12,534
def fixity ( self , response_format = None ) : if not response_format : response_format = self . repo . default_serialization response = self . repo . api . http_request ( 'GET' , '%s/fcr:fixity' % self . uri ) fixity_graph = self . repo . api . parse_rdf_payload ( response . content , response . headers ) for outcome in fixity_graph . objects ( None , self . rdf . prefixes . premis . hasEventOutcome ) : if outcome . toPython ( ) == 'SUCCESS' : verdict = True else : verdict = False return { 'verdict' : verdict , 'premis_graph' : fixity_graph }
Issues fixity check return parsed graph
12,535
def get_value ( self , consumer = None ) : if consumer : self . consumers [ consumer ] = True return self . value
If consumer is specified the channel will record that consumer as having consumed the value .
12,536
def set_input_data ( self , key , value ) : if not key in self . input_channels . keys ( ) : self . set_input_channel ( key , Channel ( ) ) self . input_channels [ key ] . set_value ( Data ( self . time , value ) )
set_input_data will automatically create an input channel if necessary . Automatic channel creation is intended for the case where users are trying to set initial values on a block whose input channels aren t subscribed to anything in the graph .
12,537
def get_output_channel ( self , output_channel_name ) : if not output_channel_name in self . output_channels . keys ( ) : self . output_channels [ output_channel_name ] = Channel ( ) self . output_channels [ output_channel_name ] . add_producer ( self ) return self . output_channels [ output_channel_name ]
get_output_channel will create a new channel object if necessary .
12,538
def by_bounding_box ( self , tl_lat , tl_long , br_lat , br_long , term = None , num_biz_requested = None , category = None ) : header , content = self . _http_request ( self . BASE_URL , tl_lat = tl_lat , tl_long = tl_long , br_lat = br_lat , br_long = br_long , term = term , category = category , num_biz_requested = num_biz_requested ) return json . loads ( content )
Perform a Yelp Review Search based on a map bounding box .
12,539
def by_geopoint ( self , lat , long , radius , term = None , num_biz_requested = None , category = None ) : header , content = self . _http_request ( self . BASE_URL , lat = lat , long = long , radius = radius , term = None , num_biz_requested = None ) return json . loads ( content )
Perform a Yelp Review Search based on a geopoint and radius tuple .
12,540
def by_location ( self , location , cc = None , radius = None , term = None , num_biz_requested = None , category = None ) : header , content = self . _http_request ( self . BASE_URL , location = location , cc = cc , radius = radius , term = term , num_biz_requested = num_biz_requested ) return json . loads ( content )
Perform a Yelp Review Search based on a location specifier .
12,541
def by_phone ( self , phone , cc = None ) : header , content = self . _http_request ( self . BASE_URL , phone = phone , cc = cc ) return json . loads ( content )
Perform a Yelp Phone API Search based on phone number given .
12,542
def by_geopoint ( self , lat , long ) : header , content = self . _http_request ( self . BASE_URL , lat = lat , long = long ) return json . loads ( content )
Perform a Yelp Neighborhood API Search based on a geopoint .
12,543
def by_location ( self , location , cc = None ) : header , content = self . _http_request ( self . BASE_URL , location = location , cc = cc ) return json . loads ( content )
Perform a Yelp Neighborhood API Search based on a location specifier .
12,544
def check_transport_host ( self ) : sock = socket . socket ( socket . AF_INET , socket . SOCK_STREAM ) result = sock . connect_ex ( ( 'events-server' , 8080 ) ) if result == 0 : logging . info ( 'port 8080 on zmq is open!' ) return True return False
Check if zeromq socket is available on transport host
12,545
def sample ( config , samples ) : url = get_api_path ( 'sample.json' ) multiple_files = [ ] images = [ s [ 'image' ] for s in samples ] labels = [ s [ 'label' ] for s in samples ] for image in images : multiple_files . append ( ( 'images' , ( image , open ( image , 'rb' ) , 'image/png' ) ) ) headers = get_headers ( no_content_type = True ) headers [ "config" ] = json . dumps ( config , cls = HCEncoder ) headers [ "labels" ] = json . dumps ( labels ) print ( "With headers" , headers ) try : r = requests . post ( url , files = multiple_files , headers = headers , timeout = 30 ) return r . text except requests . exceptions . RequestException : e = sys . exc_info ( ) [ 0 ] print ( "Error while calling hyperchamber - " , e ) return None
Upload a series of samples . Each sample has keys image and label . Images are ignored if the rate limit is hit .
12,546
def measure ( config , result , max_retries = 10 ) : url = get_api_path ( 'measurement.json' ) data = { 'config' : config , 'result' : result } retries = 0 while ( retries < max_retries ) : try : r = requests . post ( url , data = json . dumps ( data , cls = HCEncoder ) , headers = get_headers ( ) , timeout = 30 ) return r . text except requests . exceptions . RequestException : e = sys . exc_info ( ) [ 0 ] print ( "Error while calling hyperchamber - retrying " , e ) retries += 1
Records results on hyperchamber . io . Used when you are done testing a config .
12,547
def move_selection ( reverse = False ) : global selected_pid if selected_pid not in gunicorns : selected_pid = None found = False pids = sorted ( gunicorns . keys ( ) , reverse = reverse ) for pid in pids + pids : if selected_pid is None or found : selected_pid = pid return found = pid == selected_pid
Goes through the list of gunicorns setting the selected as the one after the currently selected .
12,548
def update_gunicorns ( ) : global tick tick += 1 if ( tick * screen_delay ) % ps_delay != 0 : return tick = 0 for pid in gunicorns : gunicorns [ pid ] . update ( { "workers" : 0 , "mem" : 0 } ) ps = Popen ( PS_ARGS , stdout = PIPE ) . communicate ( ) [ 0 ] . split ( "\n" ) headings = ps . pop ( 0 ) . split ( ) name_col = headings . index ( cmd_heading ) num_cols = len ( headings ) - 1 for row in ps : cols = row . split ( None , num_cols ) if cols and "gunicorn: " in cols [ name_col ] : if "gunicorn: worker" in cols [ name_col ] : is_worker = True else : is_worker = False if is_worker : pid = cols [ headings . index ( "PPID" ) ] else : pid = cols [ headings . index ( "PID" ) ] if pid not in gunicorns : gunicorns [ pid ] = { "workers" : 0 , "mem" : 0 , "port" : None , "name" : cols [ name_col ] . strip ( ) . split ( "[" , 1 ) [ 1 ] . split ( "]" , 1 ) [ : - 1 ] } gunicorns [ pid ] [ "mem" ] += int ( cols [ headings . index ( "RSS" ) ] ) if is_worker : gunicorns [ pid ] [ "workers" ] += 1 for pid in gunicorns . keys ( ) [ : ] : if gunicorns [ pid ] [ "workers" ] == 0 : del gunicorns [ pid ] if not [ g for g in gunicorns . values ( ) if g [ "port" ] is None ] : return for ( pid , port ) in ports_for_pids ( gunicorns . keys ( ) ) : if pid in gunicorns : gunicorns [ pid ] [ "port" ] = port
Updates the dict of gunicorn processes . Run the ps command and parse its output for processes named after gunicorn building up a dict of gunicorn processes . When new gunicorns are discovered run the netstat command to determine the ports they re serving on .
12,549
def handle_keypress ( screen ) : global selected_pid try : key = screen . getkey ( ) . upper ( ) except : return if key in ( "KEY_DOWN" , "J" ) : move_selection ( ) elif key in ( "KEY_UP" , "K" ) : move_selection ( reverse = True ) elif key in ( "A" , "+" ) : send_signal ( "TTIN" ) if selected_pid in gunicorns : gunicorns [ selected_pid ] [ "workers" ] = 0 elif key in ( "W" , "-" ) : if selected_pid in gunicorns : if gunicorns [ selected_pid ] [ "workers" ] != 1 : send_signal ( "TTOU" ) gunicorns [ selected_pid ] [ "workers" ] = 0 elif key in ( "R" , ) : if selected_pid in gunicorns : send_signal ( "HUP" ) del gunicorns [ selected_pid ] selected_pid = None elif key in ( "T" , ) : for pid in gunicorns . copy ( ) . iterkeys ( ) : selected_pid = pid send_signal ( "HUP" ) del gunicorns [ selected_pid ] selected_pid = None elif key in ( "M" , "-" ) : if selected_pid in gunicorns : send_signal ( "QUIT" ) del gunicorns [ selected_pid ] selected_pid = None elif key in ( "Q" , ) : raise KeyboardInterrupt
Check for a key being pressed and handle it if applicable .
12,550
def format_row ( pid = "" , port = "" , name = "" , mem = "" , workers = "" , prefix_char = " " ) : row = "%s%-5s %-6s %-25s %8s %7s " % ( prefix_char , pid , port , name , mem , workers ) global screen_width if screen_width is None : screen_width = len ( row ) return row
Applies consistant padding to each of the columns in a row and serves as the source of the overall screen width .
12,551
def display_output ( screen ) : format_row ( ) screen_height = len ( gunicorns ) + len ( instructions . split ( "\n" ) ) + 9 if not gunicorns : screen_height += 2 screen . erase ( ) win = curses . newwin ( screen_height , screen_width + 6 , 1 , 3 ) win . bkgd ( " " , curses . color_pair ( 1 ) ) win . border ( ) x = 3 blank_line = y = count ( 2 ) . next win . addstr ( y ( ) , x , title . center ( screen_width ) , curses . A_NORMAL ) blank_line ( ) win . addstr ( y ( ) , x , format_row ( " PID" , "PORT" , "NAME" , "MEM (MB)" , "WORKERS" ) , curses . A_STANDOUT ) if not gunicorns : blank_line ( ) win . addstr ( y ( ) , x , no_gunicorns . center ( screen_width ) , curses . A_NORMAL ) blank_line ( ) else : win . hline ( y ( ) , x , curses . ACS_HLINE , screen_width ) for ( i , pid ) in enumerate ( sorted ( gunicorns . keys ( ) ) ) : port = gunicorns [ pid ] [ "port" ] name = gunicorns [ pid ] [ "name" ] mem = "%#.3f" % ( gunicorns [ pid ] [ "mem" ] / 1000. ) workers = gunicorns [ pid ] [ "workers" ] if workers < 1 : gunicorns [ pid ] [ "workers" ] -= 1 chars = "|/-\\" workers *= - 1 if workers == len ( chars ) : gunicorns [ pid ] [ "workers" ] = workers = 0 workers = chars [ workers ] if pid == selected_pid : attr = curses . A_STANDOUT prefix_char = '> ' else : attr = curses . A_NORMAL prefix_char = ' ' win . addstr ( y ( ) , x , format_row ( pid , port , name , mem , workers , prefix_char ) , attr ) win . hline ( y ( ) , x , curses . ACS_HLINE , screen_width ) blank_line ( ) for line in instructions . split ( "\n" ) : win . addstr ( y ( ) , x , line . center ( screen_width ) , curses . A_NORMAL ) win . refresh ( )
Display the menu list of gunicorns .
12,552
def main ( ) : stdscr = curses . initscr ( ) curses . start_color ( ) curses . init_pair ( 1 , foreground_colour , background_colour ) curses . noecho ( ) stdscr . keypad ( True ) stdscr . nodelay ( True ) try : curses . curs_set ( False ) except : pass try : while True : try : update_gunicorns ( ) handle_keypress ( stdscr ) display_output ( stdscr ) curses . napms ( int ( screen_delay * 1000 ) ) except KeyboardInterrupt : break finally : curses . nocbreak ( ) stdscr . keypad ( False ) curses . echo ( ) curses . endwin ( )
Main entry point for gunicorn_console .
12,553
def _get_variant_silent ( parser , variant ) : prev_log = config . LOG_NOT_FOUND config . LOG_NOT_FOUND = False results = parser . get_variant_genotypes ( variant ) config . LOG_NOT_FOUND = prev_log return results
Gets a variant from the parser while disabling logging .
12,554
def _attrs_ ( mcs , cls , attr_name : str ) -> Tuple [ Any , ... ] : return tuple ( map ( lambda x : getattr ( x , attr_name ) , list ( cls ) ) )
Returns a tuple containing just the value of the given attr_name of all the elements from the cls .
12,555
def _from_attr_ ( mcs , cls , attr_name : str , attr_value : Any ) -> TypeVar : return next ( iter ( filter ( lambda x : getattr ( x , attr_name ) == attr_value , list ( cls ) ) ) , None )
Returns the enumeration item regarding to the attribute name and value or None if not found for the given cls
12,556
def describe ( cls ) -> None : max_lengths = [ ] for attr_name in cls . attr_names ( ) : attr_func = "%ss" % attr_name attr_list = list ( map ( str , getattr ( cls , attr_func ) ( ) ) ) + [ attr_name ] max_lengths . append ( max ( list ( map ( len , attr_list ) ) ) ) row_format = "{:>%d} | {:>%d} | {:>%d}" % tuple ( max_lengths ) headers = [ attr_name . capitalize ( ) for attr_name in cls . attr_names ( ) ] header_line = row_format . format ( * headers ) output = "Class: %s\n" % cls . __name__ output += header_line + "\n" output += "-" * ( len ( header_line ) ) + "\n" for item in cls : format_list = [ str ( getattr ( item , attr_name ) ) for attr_name in cls . attr_names ( ) ] output += row_format . format ( * format_list ) + "\n" print ( output )
Prints in the console a table showing all the attributes for all the definitions inside the class
12,557
def read_iter ( use_fpi ) : filename_rhosuffix = 'exe/inv.lastmod_rho' filename = 'exe/inv.lastmod' if ( not os . path . isfile ( filename ) ) : print ( 'Inversion was not finished! No last iteration found.' ) if ( use_fpi is True ) : if ( os . path . isfile ( filename_rhosuffix ) ) : filename = filename_rhosuffix linestring = open ( filename , 'r' ) . readline ( ) . strip ( ) linestring = linestring . replace ( '\n' , '' ) linestring = linestring . replace ( '../' , '' ) return linestring
Return the path to the final . mag file either for the complex or the fpi inversion .
12,558
def list_datafiles ( ) : is_cplx , is_fpi = td_type ( ) it_rho = read_iter ( is_fpi ) it_phase = read_iter ( False ) files = [ 'inv/coverage.mag' ] dtype = [ 'cov' ] files . append ( it_rho ) dtype . append ( 'mag' ) if is_cplx : files . append ( it_rho . replace ( 'mag' , 'pha' ) ) dtype . append ( 'pha' ) if is_fpi : files . append ( it_phase . replace ( 'mag' , 'pha' ) ) dtype . append ( 'pha_fpi' ) return files , dtype
Get the type of the tomodir and the highest iteration to list all files which will be plotted .
12,559
def read_datafiles ( files , dtype , column ) : pha = [ ] pha_fpi = [ ] for filename , filetype in zip ( files , dtype ) : if filetype == 'cov' : cov = load_cov ( filename ) elif filetype == 'mag' : mag = load_rho ( filename , column ) elif filetype == 'pha' : pha = load_rho ( filename , 2 ) elif filetype == 'pha_fpi' : pha_fpi = load_rho ( filename , 2 ) return cov , mag , pha , pha_fpi
Load the datafiles and return cov mag phase and fpi phase values .
12,560
def load_cov ( name ) : content = np . genfromtxt ( name , skip_header = 1 , skip_footer = 1 , usecols = ( [ 2 ] ) ) return content
Load a datafile with coverage file structure .
12,561
def load_rho ( name , column ) : try : content = np . loadtxt ( name , skiprows = 1 , usecols = ( [ column ] ) ) except : raise ValueError ( 'Given column to open does not exist.' ) return content
Load a datafile with rho structure like mag and phase
12,562
def calc_complex ( mag , pha ) : complx = [ 10 ** m * math . e ** ( 1j * p / 1e3 ) for m , p in zip ( mag , pha ) ] real = [ math . log10 ( ( 1 / c ) . real ) for c in complx ] imag = [ ] for c in complx : if ( ( 1 / c ) . imag ) == 0 : imag . append ( math . nan ) else : i = math . log10 ( abs ( ( 1 / c ) . imag ) ) imag . append ( i ) return real , imag
Calculate real and imaginary part of the complex conductivity from magnitude and phase in log10 .
12,563
def plot_ratio ( cid , ax , plotman , title , alpha , vmin , vmax , xmin , xmax , zmin , zmax , xunit , cbtiks , elecs ) : cblabel = 'anisotropy ratio' zlabel = 'z [' + xunit + ']' xlabel = 'x [' + xunit + ']' cm = 'RdYlGn' xmin , xmax , zmin , zmax , vmin , vmax = check_minmax ( plotman , cid , xmin , xmax , zmin , zmax , vmin , vmax , ) fig , ax , cnorm , cmap , cb , scalarMap = plotman . plot_elements_to_ax ( cid = cid , ax = ax , xmin = xmin , xmax = xmax , zmin = zmin , zmax = zmax , cblabel = cblabel , cbnrticks = cbtiks , title = title , zlabel = zlabel , xlabel = xlabel , plot_colorbar = True , cmap_name = cm , no_elecs = elecs , cbmin = vmin , cbmax = vmax , ) return fig , ax , cnorm , cmap , cb
Plot ratio of two conductivity directions .
12,564
def check_minmax ( plotman , cid , xmin , xmax , zmin , zmax , vmin , vmax ) : if xmin is None : xmin = plotman . grid . grid [ 'x' ] . min ( ) if xmax is None : xmax = plotman . grid . grid [ 'x' ] . max ( ) if zmin is None : zmin = plotman . grid . grid [ 'z' ] . min ( ) if zmax is None : zmax = plotman . grid . grid [ 'z' ] . max ( ) if isinstance ( cid , int ) : subdata = plotman . parman . parsets [ cid ] else : subdata = cid if vmin is None : vmin = subdata . min ( ) if vmax is None : vmax = subdata . max ( ) return xmin , xmax , zmin , zmax , vmin , vmax
Get min and max values for axes and colorbar if not given
12,565
def citation_director ( ** kwargs ) : qualifier = kwargs . get ( 'qualifier' , '' ) content = kwargs . get ( 'content' , '' ) if qualifier == 'publicationTitle' : return CitationJournalTitle ( content = content ) elif qualifier == 'volume' : return CitationVolume ( content = content ) elif qualifier == 'issue' : return CitationIssue ( content = content ) elif qualifier == 'pageStart' : return CitationFirstpage ( content = content ) elif qualifier == 'pageEnd' : return CitationLastpage ( content = content ) else : return None
Direct the citation elements based on their qualifier .
12,566
def identifier_director ( ** kwargs ) : qualifier = kwargs . get ( 'qualifier' , '' ) content = kwargs . get ( 'content' , '' ) if qualifier == 'ISBN' : return CitationISBN ( content = content ) elif qualifier == 'ISSN' : return CitationISSN ( content = content ) elif qualifier == 'DOI' : return CitationDOI ( content = content ) elif qualifier == 'REP-NO' : return CitationTechnicalReportNumber ( content = content ) else : return None
Direct the identifier elements based on their qualifier .
12,567
def get_author ( self , ** kwargs ) : qualifier = kwargs . get ( 'qualifier' , '' ) children = kwargs . get ( 'children' , [ ] ) creator_type_per = False author_name = None for child in children : if child . tag == 'type' and child . content == 'per' : creator_type_per = True elif child . tag == 'name' : author_name = child . content if qualifier == 'aut' and creator_type_per and author_name : return author_name return None
Determine the authors from the creator field .
12,568
def get_publisher_name ( self , ** kwargs ) : children = kwargs . get ( 'children' , [ ] ) for child in children : if child . tag == 'name' : return child . content return None
Get the publisher name .
12,569
def get_publication_date ( self , ** kwargs ) : date_string = kwargs . get ( 'content' , '' ) date_match = CREATION_DATE_REGEX . match ( date_string ) month_match = CREATION_MONTH_REGEX . match ( date_string ) year_match = CREATION_YEAR_REGEX . match ( date_string ) if date_match : ( year , month , day ) = date_match . groups ( '' ) try : creation_date = datetime . date ( int ( year ) , int ( month ) , int ( day ) ) except ValueError : return None else : return '%s/%s/%s' % ( format_date_string ( creation_date . month ) , format_date_string ( creation_date . day ) , creation_date . year , ) elif month_match : ( year , month ) = month_match . groups ( '' ) try : creation_date = datetime . date ( int ( year ) , int ( month ) , 1 ) except ValueError : return None else : return '%s/%s' % ( format_date_string ( creation_date . month ) , creation_date . year , ) elif year_match : year = year_match . groups ( '' ) [ 0 ] return year else : return None
Determine the creation date for the publication date .
12,570
def get_online_date ( self , ** kwargs ) : qualifier = kwargs . get ( 'qualifier' , '' ) content = kwargs . get ( 'content' , '' ) if qualifier == 'metadataCreationDate' : date_match = META_CREATION_DATE_REGEX . match ( content ) ( year , month , day ) = date_match . groups ( '' ) creation_date = datetime . date ( int ( year ) , int ( month ) , int ( day ) ) return '%s/%s/%s' % ( format_date_string ( creation_date . month ) , format_date_string ( creation_date . day ) , creation_date . year , ) return None
Get the online date from the meta creation date .
12,571
def get_institution ( self , ** kwargs ) : qualifier = kwargs . get ( 'qualifier' , '' ) content = kwargs . get ( 'content' , '' ) if qualifier == 'grantor' : return content return None
Get the dissertation institution .
12,572
def model_results ( self ) -> str : with open ( os . path . join ( self . directory , "model.results" ) ) as f : return f . read ( )
Reads the model . results file
12,573
def header ( self ) -> str : return "/" . join ( ( self . pipeline , self . phase , self . data ) )
A header created by joining the pipeline phase and data names
12,574
def optimizer ( self ) -> non_linear . NonLinearOptimizer : if self . __optimizer is None : with open ( os . path . join ( self . directory , ".optimizer.pickle" ) , "r+b" ) as f : self . __optimizer = pickle . loads ( f . read ( ) ) return self . __optimizer
The optimizer object that was used in this phase
12,575
def phases_with ( self , ** kwargs ) -> [ PhaseOutput ] : return [ phase for phase in self . phases if all ( [ getattr ( phase , key ) == value for key , value in kwargs . items ( ) ] ) ]
Filters phases . If no arguments are passed all phases are returned . Arguments must be key value pairs with phase data or pipeline as the key .
12,576
def optimizers_with ( self , ** kwargs ) -> [ non_linear . NonLinearOptimizer ] : return [ phase . optimizer for phase in self . phases_with ( ** kwargs ) ]
Load a list of optimizers for phases in the directory with zero or more filters applied .
12,577
def model_results ( self , ** kwargs ) -> str : return "\n\n" . join ( "{}\n\n{}" . format ( phase . header , phase . model_results ) for phase in self . phases_with ( ** kwargs ) )
Collates model results from all phases in the directory or some subset if filters are applied .
12,578
def branches ( config , searchstring = "" ) : repo = config . repo branches_ = list ( find ( repo , searchstring ) ) if branches_ : merged = get_merged_branches ( repo ) info_out ( "Found existing branches..." ) print_list ( branches_ , merged ) if len ( branches_ ) == 1 and searchstring : active_branch = repo . active_branch if active_branch == branches_ [ 0 ] : error_out ( "You're already on '{}'" . format ( branches_ [ 0 ] . name ) ) branch_name = branches_ [ 0 ] . name if len ( branch_name ) > 50 : branch_name = branch_name [ : 47 ] + "…" check_it_out = ( input ( "Check out '{}'? [Y/n] " . format ( branch_name ) ) . lower ( ) . strip ( ) != "n" ) if check_it_out : branches_ [ 0 ] . checkout ( ) elif searchstring : error_out ( "Found no branches matching '{}'." . format ( searchstring ) ) else : error_out ( "Found no branches." )
List all branches . And if exactly 1 found offer to check it out .
12,579
def decodebytes ( input ) : py_version = sys . version_info [ 0 ] if py_version >= 3 : return _decodebytes_py3 ( input ) return _decodebytes_py2 ( input )
Decode base64 string to byte array .
12,580
def capture ( self , commit = "" ) : self . _validateProvider ( self . _provider ) client = RepositoryClientBuilder ( ) . buildWithRemoteClient ( self . _provider ) if self . _provider [ "provider" ] == "github" : self . _signature = ProjectGithubRepositoryCapturer ( self . _provider , client ) . capture ( commit ) . signature ( ) elif self . _provider [ "provider" ] == "bitbucket" : self . _signature = ProjectBitbucketRepositoryCapturer ( self . _provider , client ) . capture ( commit ) . signature ( ) else : raise KeyError ( "Provider '%s' not recognized" % self . _provider [ "provider" ] ) return self
Capture the current state of a project based on its provider
12,581
def found_duplicates ( counts ) : _logger . warning ( "Duplicated markers found" ) for marker , count in counts : _logger . warning ( " - {}: {:,d} times" . format ( marker , count ) ) _logger . warning ( "Appending ':dupX' to the duplicated markers according " "to their location in the file." )
Log that duplicates were found .
12,582
def patch_model_schemas ( mapping ) : from mbdata . models import Base for table in Base . metadata . sorted_tables : if table . schema is None : continue table . schema = mapping . get ( table . schema , table . schema )
Update mbdata . models to use different schema names
12,583
def detectRamPorts ( stm : IfContainer , current_en : RtlSignalBase ) : if stm . ifFalse or stm . elIfs : return for _stm in stm . ifTrue : if isinstance ( _stm , IfContainer ) : yield from detectRamPorts ( _stm , _stm . cond & current_en ) elif isinstance ( _stm , Assignment ) : if isinstance ( _stm . dst . _dtype , HArray ) : assert len ( _stm . indexes ) == 1 , "one address per RAM port" w_addr = _stm . indexes [ 0 ] mem = _stm . dst yield ( RAM_WRITE , mem , w_addr , current_en , _stm . src ) elif _stm . src . hidden and len ( _stm . src . drivers ) == 1 : op = _stm . src . drivers [ 0 ] mem = op . operands [ 0 ] if isinstance ( mem . _dtype , HArray ) and op . operator == AllOps . INDEX : r_addr = op . operands [ 1 ] if _stm . indexes : raise NotImplementedError ( ) yield ( RAM_READ , mem , r_addr , current_en , _stm . dst )
Detect RAM ports in If statement
12,584
def addInputPort ( self , node , name , i : Union [ Value , RtlSignalBase ] , side = PortSide . WEST ) : root = self . node port = node . addPort ( name , PortType . INPUT , side ) netCtxs = self . netCtxs if isinstance ( i , LPort ) : root . addEdge ( i , port ) elif isConst ( i ) : i = i . staticEval ( ) c , wasThereBefore = self . netCtxs . getDefault ( i ) if not wasThereBefore : v = ValueAsLNode ( root , i ) . east [ 0 ] c . addDriver ( v ) c . addEndpoint ( port ) elif i . hidden : ctx , wasThereBefore = netCtxs . getDefault ( i ) if not wasThereBefore : self . lazyLoadNet ( i ) ctx . addEndpoint ( port ) else : portCtx = self . portCtx rootCtx , _ = self . rootNetCtxs . getDefault ( i ) if self . isVirtual : rootCtx . addEndpoint ( port ) else : isNewlySpotted = ( i , PortType . INPUT ) not in portCtx . data src = portCtx . register ( i , PortType . INPUT ) ctx , _ = netCtxs . getDefault ( i ) ctx . addDriver ( src ) ctx . addEndpoint ( port ) if isNewlySpotted : _port = portCtx . getOutside ( i , PortType . INPUT ) rootCtx . addEndpoint ( _port )
Add and connect input port on subnode
12,585
def addOutputPort ( self , node : LNode , name : str , out : Optional [ Union [ RtlSignalBase , LPort ] ] , side = PortSide . EAST ) : oPort = node . addPort ( name , PortType . OUTPUT , side ) if out is not None : if isinstance ( out , LPort ) : self . node . addEdge ( oPort , out ) elif out . hidden : raise ValueError ( "Hidden signals should not be connected to outside" , name ) elif self . isVirtual : ctx , _ = self . netCtxs . getDefault ( out ) ctx . addDriver ( oPort ) else : _out = self . portCtx . getInside ( out , PortType . OUTPUT ) self . node . addEdge ( oPort , _out , originObj = out ) ooPort = self . portCtx . getOutside ( out , PortType . OUTPUT ) ctx , _ = self . rootNetCtxs . getDefault ( out ) ctx . addDriver ( ooPort ) return oPort
Add and connect output port on subnode
12,586
def renderContent ( self ) : stm = self . stm portCtx = self . portCtx for o in stm . _outputs : if not self . isVirtual : portCtx . register ( o , PortType . OUTPUT ) canHaveRamPorts = isinstance ( stm , IfContainer ) and arr_any ( chain ( stm . _inputs , stm . _outputs ) , lambda s : isinstance ( s . _dtype , HArray ) ) consumedOutputs = set ( ) if canHaveRamPorts : for pType , memSig , addrSig , enSig , io in detectRamPorts ( stm , stm . cond ) : if pType == RAM_READ : self . createRamReadNode ( memSig , enSig , addrSig , io , True ) consumedOutputs . add ( io ) elif pType == RAM_WRITE : self . createRamWriteNode ( memSig , enSig , addrSig , io , True ) consumedOutputs . add ( memSig ) else : raise TypeError ( ) for o in stm . _outputs : if o not in consumedOutputs : self . renderForSignal ( stm , o , True ) if not self . isVirtual : self . netCtxs . applyConnections ( self . node )
Walk from outputs to inputs for each public signal register port of wrap node if required lazy load all operator and statement nodes for signals
12,587
def generate ( self , project ) : for assignment in self . s2n_mapping : if assignment [ "ipprefix" ] == project : self . _name = assignment [ "package" ] return self name = project if name . startswith ( "github.com" ) : name = re . sub ( r"^github\.com" , "github" , name ) if name . startswith ( "gopkg.in" ) : name = re . sub ( r"gopkg\.in" , "gopkg" , name ) name = re . sub ( r"\.v\d" , "" , name ) name = re . sub ( r"/v\d/" , "/" , name ) if name . startswith ( "code.google.com/p" ) : name = re . sub ( r"^code\.google\.com/p" , "googlecode" , name ) if name . startswith ( "golang.org/x" ) : name = re . sub ( r"^golang\.org/x" , "golangorg" , name ) if name . startswith ( "google.golang.org" ) : name = re . sub ( r"^google\.golang\.org" , "googlegolangorg" , name ) if name . startswith ( "bitbucket.org" ) : name = re . sub ( r"^bitbucket\.org" , "bitbucket" , name ) if name . startswith ( "k8s.io" ) : name = re . sub ( r"^k8s\.io" , "k8s" , name ) if name . endswith ( ".org" ) : name = re . sub ( r"\.org$" , "" , name ) name = name . replace ( "/" , "-" ) self . _name = "golang-%s" % name return self
Package name construction is based on provider not on prefix . Prefix does not have to equal provider_prefix .
12,588
def hash_host ( hostname , salt = None ) : if salt is None : salt = os . urandom ( sha1 ( ) . digest_size ) else : if salt . startswith ( '|1|' ) : salt = salt . split ( '|' ) [ 2 ] salt = decodebytes ( b ( salt ) ) assert len ( salt ) == sha1 ( ) . digest_size hmac = HMAC ( salt , b ( hostname ) , sha1 ) . digest ( ) hostkey = '|1|%s|%s' % ( u ( encodebytes ( salt ) ) , u ( encodebytes ( hmac ) ) ) return hostkey . replace ( '\n' , '' )
Return a hashed form of the hostname as used by OpenSSH when storing hashed hostnames in the known_hosts file .
12,589
def _read_elem_nodes ( self , fid ) : nodes = { } nodes_raw = np . empty ( ( self . header [ 'nr_nodes' ] , 3 ) , dtype = float ) for nr in range ( 0 , self . header [ 'nr_nodes' ] ) : node_line = fid . readline ( ) . lstrip ( ) nodes_raw [ nr , : ] = np . fromstring ( node_line , dtype = float , sep = ' ' ) nodes_raw [ : , 1 : 3 ] = np . round ( nodes_raw [ : , 1 : 3 ] , 5 ) if ( nodes_raw [ : , 0 ] != list ( range ( 1 , nodes_raw . shape [ 0 ] ) ) ) : self . header [ 'cutmck' ] = True print ( 'This grid was sorted using CutMcK. The nodes were resorted!' ) else : self . header [ 'cutmck' ] = False if ( self . header [ 'cutmck' ] ) : nodes_cutmck = np . empty_like ( nodes_raw ) nodes_cutmck_index = np . zeros ( nodes_raw . shape [ 0 ] , dtype = int ) for node in range ( 0 , self . header [ 'nr_nodes' ] ) : new_index = np . where ( nodes_raw [ : , 0 ] . astype ( int ) == ( node + 1 ) ) nodes_cutmck [ new_index [ 0 ] , 1 : 3 ] = nodes_raw [ node , 1 : 3 ] nodes_cutmck [ new_index [ 0 ] , 0 ] = new_index [ 0 ] nodes_cutmck_index [ node ] = new_index [ 0 ] nodes_sorted = nodes_cutmck [ nodes_cutmck_index , : ] nodes [ 'presort' ] = nodes_cutmck nodes [ 'cutmck_index' ] = nodes_cutmck_index nodes [ 'rev_cutmck_index' ] = np . argsort ( nodes_cutmck_index ) else : nodes_sorted = nodes_raw nodes [ 'presort' ] = nodes_raw nodes [ 'raw' ] = nodes_raw nodes [ 'sorted' ] = nodes_sorted self . nodes = nodes self . nr_of_nodes = nodes [ 'raw' ] . shape [ 0 ]
Read the nodes from an opened elem . dat file . Correct for CutMcK transformations .
12,590
def calculate_dimensions ( self ) : x_coordinates = np . sort ( self . grid [ 'x' ] [ : , 0 ] ) self . nr_nodes_z = np . where ( x_coordinates == x_coordinates [ 0 ] ) [ 0 ] . size self . nr_elements_x = self . elements . shape [ 0 ] / ( self . nr_nodes_z - 1 ) self . nr_nodes_x = self . nr_elements_x + 1 self . nr_elements_z = self . nr_nodes_z - 1
For a regular grid calculate the element and node dimensions
12,591
def _read_elem_neighbors ( self , fid ) : sizes = sum ( [ len ( self . element_data [ key ] ) for key in ( 11 , 12 ) if self . element_data . get ( key , None ) is not None ] ) self . neighbors = [ ] try : for i in range ( 0 , sizes ) : self . neighbors . append ( int ( fid . readline ( ) . strip ( ) ) ) except Exception as e : raise Exception ( 'Not enough neighbors in file' )
Read the boundary - element - neighbors from the end of the file
12,592
def load_grid ( self , elem_file , elec_file ) : self . load_elem_file ( elem_file ) self . load_elec_file ( elec_file )
Load elem . dat and elec . dat
12,593
def get_element_centroids ( self ) : centroids = np . vstack ( ( np . mean ( self . grid [ 'x' ] , axis = 1 ) , np . mean ( self . grid [ 'z' ] , axis = 1 ) ) ) . T return centroids
return the central points of all elements
12,594
def get_internal_angles ( self ) : angles = [ ] for elx , elz in zip ( self . grid [ 'x' ] , self . grid [ 'z' ] ) : el_angles = [ ] xy = np . vstack ( ( elx , elz ) ) for i in range ( 0 , elx . size ) : i1 = ( i - 1 ) % elx . size i2 = ( i + 1 ) % elx . size a = ( xy [ : , i ] - xy [ : , i1 ] ) b = ( xy [ : , i2 ] - xy [ : , i ] ) angle = np . pi - np . arctan2 ( a [ 0 ] * b [ 1 ] - a [ 1 ] * b [ 0 ] , a [ 0 ] * b [ 0 ] + a [ 1 ] * b [ 1 ] ) el_angles . append ( angle * 180 / np . pi ) angles . append ( el_angles ) return np . array ( angles )
Compute all internal angles of the grid
12,595
def Wm ( self ) : centroids = self . get_element_centroids ( ) Wm = scipy . sparse . csr_matrix ( ( self . nr_of_elements , self . nr_of_elements ) ) for i , nb in enumerate ( self . element_neighbors ) : for j , edges in zip ( nb , self . element_neighbors_edges [ i ] ) : edge_coords = self . nodes [ 'presort' ] [ edges ] [ : , 1 : ] edge_length = np . linalg . norm ( edge_coords [ 1 , : ] - edge_coords [ 0 , : ] ) distance = np . linalg . norm ( centroids [ i ] - centroids [ j ] ) Wm [ i , i ] += edge_length / distance Wm [ i , j ] -= edge_length / distance return Wm
Return the smoothing regularization matrix Wm of the grid
12,596
def create_tomodir ( self , directory ) : pwd = os . getcwd ( ) if not os . path . isdir ( directory ) : os . makedirs ( directory ) os . chdir ( directory ) directories = ( 'config' , 'exe' , 'grid' , 'mod' , 'mod/pot' , 'mod/sens' , 'rho' , ) for directory in directories : if not os . path . isdir ( directory ) : os . makedirs ( directory ) os . chdir ( pwd )
Create a tomodir subdirectory structure in the given directory
12,597
def load_rho_file ( self , filename ) : pids = self . parman . load_from_rho_file ( filename ) self . register_magnitude_model ( pids [ 0 ] ) self . register_phase_model ( pids [ 1 ] ) return pids
Load a forward model from a rho . dat file
12,598
def save_to_tomodir ( self , directory ) : self . create_tomodir ( directory ) self . grid . save_elem_file ( directory + os . sep + 'grid/elem.dat' ) self . grid . save_elec_file ( directory + os . sep + 'grid/elec.dat' ) if self . configs . configs is not None : self . configs . write_crmod_config ( directory + os . sep + 'config/config.dat' ) if self . assignments [ 'forward_model' ] is not None : self . parman . save_to_rho_file ( directory + os . sep + 'rho/rho.dat' , self . assignments [ 'forward_model' ] [ 0 ] , self . assignments [ 'forward_model' ] [ 1 ] , ) self . crmod_cfg . write_to_file ( directory + os . sep + 'exe/crmod.cfg' ) if self . assignments [ 'measurements' ] is not None : self . configs . write_crmod_volt ( directory + os . sep + 'mod/volt.dat' , self . assignments [ 'measurements' ] ) if self . assignments [ 'sensitivities' ] is not None : self . _save_sensitivities ( directory + os . sep + 'mod/sens' , ) if self . assignments [ 'potentials' ] is not None : self . _save_potentials ( directory + os . sep + 'mod/pot' , ) self . crtomo_cfg . write_to_file ( directory + os . sep + 'exe/crtomo.cfg' ) if self . noise_model is not None : self . noise_model . write_crt_noisemod ( directory + os . sep + 'exe/crt.noisemod' ) if not os . path . isdir ( directory + os . sep + 'inv' ) : os . makedirs ( directory + os . sep + 'inv' )
Save the tomodir instance to a directory structure .
12,599
def _save_sensitivities ( self , directory ) : print ( 'saving sensitivities' ) digits = int ( np . ceil ( np . log10 ( self . configs . configs . shape [ 0 ] ) ) ) for i in range ( 0 , self . configs . configs . shape [ 0 ] ) : sens_data , meta_data = self . get_sensitivity ( i ) filename_raw = 'sens{0:0' + '{0}' . format ( digits ) + '}.dat' filename = directory + os . sep + filename_raw . format ( i + 1 ) grid_xz = self . grid . get_element_centroids ( ) all_data = np . vstack ( ( grid_xz [ : , 0 ] , grid_xz [ : , 0 ] , sens_data [ 0 ] , sens_data [ 1 ] , ) ) . T with open ( filename , 'wb' ) as fid : fid . write ( bytes ( '{0} {1}\n' . format ( meta_data [ 0 ] , meta_data [ 1 ] ) , 'utf-8' ) ) np . savetxt ( fid , all_data )
save sensitivities to a directory