idx
int64
0
63k
question
stringlengths
61
4.03k
target
stringlengths
6
1.23k
51,500
def _generate_hex_for_uris ( self , uris ) : return sha256 ( ( ":" . join ( uris ) + str ( time ( ) ) ) . encode ( ) ) . hexdigest ( )
Given uris generate and return hex version of it
51,501
def create_dataset ( self , dataset_id , friendly_name = None , description = None , access = None , location = None , project_id = None ) : project_id = self . _get_project_id ( project_id ) try : datasets = self . bigquery . datasets ( ) dataset_data = self . dataset_resource ( dataset_id , project_id = project_id , friendly_name = friendly_name , description = description , access = access , location = location ) response = datasets . insert ( projectId = project_id , body = dataset_data ) . execute ( num_retries = self . num_retries ) if self . swallow_results : return True else : return response except HttpError as e : logger . error ( 'Cannot create dataset {0}, {1}' . format ( dataset_id , e ) ) if self . swallow_results : return False else : return { }
Create a new BigQuery dataset .
51,502
def delete_dataset ( self , dataset_id , delete_contents = False , project_id = None ) : project_id = self . _get_project_id ( project_id ) try : datasets = self . bigquery . datasets ( ) request = datasets . delete ( projectId = project_id , datasetId = dataset_id , deleteContents = delete_contents ) response = request . execute ( num_retries = self . num_retries ) if self . swallow_results : return True else : return response except HttpError as e : logger . error ( 'Cannot delete dataset {0}: {1}' . format ( dataset_id , e ) ) if self . swallow_results : return False else : return { }
Delete a BigQuery dataset .
51,503
def update_dataset ( self , dataset_id , friendly_name = None , description = None , access = None , project_id = None ) : project_id = self . _get_project_id ( project_id ) try : datasets = self . bigquery . datasets ( ) body = self . dataset_resource ( dataset_id , friendly_name = friendly_name , description = description , access = access , project_id = project_id ) request = datasets . update ( projectId = project_id , datasetId = dataset_id , body = body ) response = request . execute ( num_retries = self . num_retries ) if self . swallow_results : return True else : return response except HttpError as e : logger . error ( 'Cannot update dataset {0}: {1}' . format ( dataset_id , e ) ) if self . swallow_results : return False else : return { }
Updates information in an existing dataset . The update method replaces the entire dataset resource whereas the patch method only replaces fields that are provided in the submitted dataset resource .
51,504
def schema_from_record ( record , timestamp_parser = default_timestamp_parser ) : return [ describe_field ( k , v , timestamp_parser = timestamp_parser ) for k , v in list ( record . items ( ) ) ]
Generate a BigQuery schema given an example of a record that is to be inserted into BigQuery .
51,505
def describe_field ( k , v , timestamp_parser = default_timestamp_parser ) : def bq_schema_field ( name , bq_type , mode ) : return { "name" : name , "type" : bq_type , "mode" : mode } if isinstance ( v , list ) : if len ( v ) == 0 : raise Exception ( "Can't describe schema because of empty list {0}:[]" . format ( k ) ) v = v [ 0 ] mode = "repeated" else : mode = "nullable" bq_type = bigquery_type ( v , timestamp_parser = timestamp_parser ) if not bq_type : raise InvalidTypeException ( k , v ) field = bq_schema_field ( k , bq_type , mode ) if bq_type == "record" : try : field [ 'fields' ] = schema_from_record ( v , timestamp_parser ) except InvalidTypeException as e : raise InvalidTypeException ( "%s.%s" % ( k , e . key ) , e . value ) return field
Given a key representing a column name and value representing the value stored in the column return a representation of the BigQuery schema element describing that field . Raise errors if invalid value types are provided .
51,506
def render_query ( dataset , tables , select = None , conditions = None , groupings = None , having = None , order_by = None , limit = None ) : if None in ( dataset , tables ) : return None query = "%s %s %s %s %s %s %s" % ( _render_select ( select ) , _render_sources ( dataset , tables ) , _render_conditions ( conditions ) , _render_groupings ( groupings ) , _render_having ( having ) , _render_order ( order_by ) , _render_limit ( limit ) ) return query
Render a query that will run over the given tables using the specified parameters .
51,507
def _render_select ( selections ) : if not selections : return 'SELECT *' rendered_selections = [ ] for name , options in selections . items ( ) : if not isinstance ( options , list ) : options = [ options ] original_name = name for options_dict in options : name = original_name alias = options_dict . get ( 'alias' ) alias = "as %s" % alias if alias else "" formatter = options_dict . get ( 'format' ) if formatter : name = _format_select ( formatter , name ) rendered_selections . append ( "%s %s" % ( name , alias ) ) return "SELECT " + ", " . join ( rendered_selections )
Render the selection part of a query .
51,508
def _format_select ( formatter , name ) : for caster in formatter . split ( '-' ) : if caster == 'SEC_TO_MICRO' : name = "%s*1000000" % name elif ':' in caster : caster , args = caster . split ( ':' ) name = "%s(%s,%s)" % ( caster , name , args ) else : name = "%s(%s)" % ( caster , name ) return name
Modify the query selector by applying any formatters to it .
51,509
def _render_sources ( dataset , tables ) : if isinstance ( tables , dict ) : if tables . get ( 'date_range' , False ) : try : dataset_table = '.' . join ( [ dataset , tables [ 'table' ] ] ) return "FROM (TABLE_DATE_RANGE([{}], TIMESTAMP('{}')," " TIMESTAMP('{}'))) " . format ( dataset_table , tables [ 'from_date' ] , tables [ 'to_date' ] ) except KeyError as exp : logger . warn ( 'Missing parameter %s in selecting sources' % ( exp ) ) else : return "FROM " + ", " . join ( [ "[%s.%s]" % ( dataset , table ) for table in tables ] )
Render the source part of a query .
51,510
def _render_conditions ( conditions ) : if not conditions : return "" rendered_conditions = [ ] for condition in conditions : field = condition . get ( 'field' ) field_type = condition . get ( 'type' ) comparators = condition . get ( 'comparators' ) if None in ( field , field_type , comparators ) or not comparators : logger . warn ( 'Invalid condition passed in: %s' % condition ) continue rendered_conditions . append ( _render_condition ( field , field_type , comparators ) ) if not rendered_conditions : return "" return "WHERE %s" % ( " AND " . join ( rendered_conditions ) )
Render the conditions part of a query .
51,511
def _render_condition ( field , field_type , comparators ) : field_type = field_type . upper ( ) negated_conditions , normal_conditions = [ ] , [ ] for comparator in comparators : condition = comparator . get ( "condition" ) . upper ( ) negated = "NOT " if comparator . get ( "negate" ) else "" value = comparator . get ( "value" ) if condition == "IN" : if isinstance ( value , ( list , tuple , set ) ) : value = ', ' . join ( sorted ( [ _render_condition_value ( v , field_type ) for v in value ] ) ) else : value = _render_condition_value ( value , field_type ) value = "(" + value + ")" elif condition == "IS NULL" or condition == "IS NOT NULL" : return field + " " + condition elif condition == "BETWEEN" : if isinstance ( value , ( tuple , list , set ) ) and len ( value ) == 2 : value = ' AND ' . join ( sorted ( [ _render_condition_value ( v , field_type ) for v in value ] ) ) elif isinstance ( value , ( tuple , list , set ) ) and len ( value ) != 2 : logger . warn ( 'Invalid condition passed in: %s' % condition ) else : value = _render_condition_value ( value , field_type ) rendered_sub_condition = "%s%s %s %s" % ( negated , field , condition , value ) if comparator . get ( "negate" ) : negated_conditions . append ( rendered_sub_condition ) else : normal_conditions . append ( rendered_sub_condition ) rendered_normal = " AND " . join ( normal_conditions ) rendered_negated = " AND " . join ( negated_conditions ) if rendered_normal and rendered_negated : return "((%s) AND (%s))" % ( rendered_normal , rendered_negated ) return "(%s)" % ( rendered_normal or rendered_negated )
Render a single query condition .
51,512
def _render_condition_value ( value , field_type ) : if field_type == "BOOLEAN" : value = 1 if value else 0 elif field_type in ( "STRING" , "INTEGER" , "FLOAT" ) : value = "'%s'" % ( value ) elif field_type in ( "TIMESTAMP" ) : value = "'%s'" % ( str ( value ) ) return "%s(%s)" % ( field_type , value )
Render a query condition value .
51,513
def _render_having ( having_conditions ) : if not having_conditions : return "" rendered_conditions = [ ] for condition in having_conditions : field = condition . get ( 'field' ) field_type = condition . get ( 'type' ) comparators = condition . get ( 'comparators' ) if None in ( field , field_type , comparators ) or not comparators : logger . warn ( 'Invalid condition passed in: %s' % condition ) continue rendered_conditions . append ( _render_condition ( field , field_type , comparators ) ) if not rendered_conditions : return "" return "HAVING %s" % ( " AND " . join ( rendered_conditions ) )
Render the having part of a query .
51,514
def init_app ( self , app ) : self . client_id = app . config . get ( "MQTT_CLIENT_ID" , "" ) if isinstance ( self . client_id , unicode ) : self . client . _client_id = self . client_id . encode ( 'utf-8' ) else : self . client . _client_id = self . client_id self . client . _transport = app . config . get ( "MQTT_TRANSPORT" , "tcp" ) . lower ( ) self . client . _protocol = app . config . get ( "MQTT_PROTOCOL_VERSION" , MQTTv311 ) self . client . on_connect = self . _handle_connect self . client . on_disconnect = self . _handle_disconnect self . username = app . config . get ( "MQTT_USERNAME" ) self . password = app . config . get ( "MQTT_PASSWORD" ) self . broker_url = app . config . get ( "MQTT_BROKER_URL" , "localhost" ) self . broker_port = app . config . get ( "MQTT_BROKER_PORT" , 1883 ) self . tls_enabled = app . config . get ( "MQTT_TLS_ENABLED" , False ) self . keepalive = app . config . get ( "MQTT_KEEPALIVE" , 60 ) self . last_will_topic = app . config . get ( "MQTT_LAST_WILL_TOPIC" ) self . last_will_message = app . config . get ( "MQTT_LAST_WILL_MESSAGE" ) self . last_will_qos = app . config . get ( "MQTT_LAST_WILL_QOS" , 0 ) self . last_will_retain = app . config . get ( "MQTT_LAST_WILL_RETAIN" , False ) if self . tls_enabled : self . tls_ca_certs = app . config [ "MQTT_TLS_CA_CERTS" ] self . tls_certfile = app . config . get ( "MQTT_TLS_CERTFILE" ) self . tls_keyfile = app . config . get ( "MQTT_TLS_KEYFILE" ) self . tls_cert_reqs = app . config . get ( "MQTT_TLS_CERT_REQS" , ssl . CERT_REQUIRED ) self . tls_version = app . config . get ( "MQTT_TLS_VERSION" , ssl . PROTOCOL_TLSv1 ) self . tls_ciphers = app . config . get ( "MQTT_TLS_CIPHERS" ) self . tls_insecure = app . config . get ( "MQTT_TLS_INSECURE" , False ) if self . last_will_topic is not None : self . client . will_set ( self . last_will_topic , self . last_will_message , self . last_will_qos , self . last_will_retain , ) self . _connect ( )
Init the Flask - MQTT addon .
51,515
def subscribe ( self , topic , qos = 0 ) : result , mid = self . client . subscribe ( topic = topic , qos = qos ) if result == MQTT_ERR_SUCCESS : self . topics [ topic ] = TopicQos ( topic = topic , qos = qos ) logger . debug ( 'Subscribed to topic: {0}, qos: {1}' . format ( topic , qos ) ) else : logger . error ( 'Error {0} subscribing to topic: {1}' . format ( result , topic ) ) return ( result , mid )
Subscribe to a certain topic .
51,516
def unsubscribe ( self , topic ) : if topic in self . topics : result , mid = self . client . unsubscribe ( topic ) if result == MQTT_ERR_SUCCESS : self . topics . pop ( topic ) logger . debug ( 'Unsubscribed from topic: {0}' . format ( topic ) ) else : logger . debug ( 'Error {0} unsubscribing from topic: {1}' . format ( result , topic ) ) return result , mid return None
Unsubscribe from a single topic .
51,517
def unsubscribe_all ( self ) : topics = list ( self . topics . keys ( ) ) for topic in topics : self . unsubscribe ( topic )
Unsubscribe from all topics .
51,518
def publish ( self , topic , payload = None , qos = 0 , retain = False ) : if not self . connected : self . client . reconnect ( ) result , mid = self . client . publish ( topic , payload , qos , retain ) if result == MQTT_ERR_SUCCESS : logger . debug ( 'Published topic {0}: {1}' . format ( topic , payload ) ) else : logger . error ( 'Error {0} publishing topic {1}' . format ( result , topic ) ) return ( result , mid )
Send a message to the broker .
51,519
def on_subscribe ( self ) : def decorator ( handler ) : self . client . on_subscribe = handler return handler return decorator
Decorate a callback function to handle subscritions .
51,520
def on_unsubscribe ( self ) : def decorator ( handler ) : self . client . on_unsubscribe = handler return handler return decorator
Decorate a callback funtion to handle unsubscribtions .
51,521
def on_log ( self ) : def decorator ( handler ) : self . client . on_log = handler return handler return decorator
Decorate a callback function to handle MQTT logging .
51,522
def list ( ) : s3 = boto3 . resource ( 's3' ) return [ b . name for b in s3 . buckets . all ( ) ]
Lists buckets by name .
51,523
def delete ( self , key = None ) : if key is None : for key in self . all ( ) : key . delete ( ) return self . _boto_bucket . delete ( ) k = self . key ( key ) return k . delete ( )
Deletes the given key or the whole bucket .
51,524
def rename ( self , new_name ) : self . bucket . set ( new_name , self . get ( ) , self . meta ) self . delete ( ) self . name = new_name
Renames the key to a given new name .
51,525
def is_public ( self ) : for grant in self . _boto_object . Acl ( ) . grants : if 'AllUsers' in grant [ 'Grantee' ] . get ( 'URI' , '' ) : if grant [ 'Permission' ] == 'READ' : return True return False
Returns True if the public - read ACL is set for the Key .
51,526
def url ( self ) : if self . is_public : return '{0}/{1}/{2}' . format ( self . bucket . _boto_s3 . meta . client . meta . endpoint_url , self . bucket . name , self . name ) else : raise ValueError ( '{0!r} does not have the public-read ACL set. ' 'Use the make_public() method to allow for ' 'public URL sharing.' . format ( self . name ) )
Returns the public URL for the given key .
51,527
def temp_url ( self , duration = 120 ) : return self . bucket . _boto_s3 . meta . client . generate_presigned_url ( 'get_object' , Params = { 'Bucket' : self . bucket . name , 'Key' : self . name } , ExpiresIn = duration )
Returns a temporary URL for the given key .
51,528
def eprint ( * args , ** kwargs ) : end = kwargs . get ( "end" , "\n" ) sep = kwargs . get ( "sep" , " " ) ( filename , lineno ) = inspect . stack ( ) [ 1 ] [ 1 : 3 ] print ( "{}:{}: " . format ( filename , lineno ) , end = "" ) print ( * args , end = end , file = sys . stderr , sep = sep )
Print an error message to standard error prefixing it with file name and line number from which method was called .
51,529
def formatException ( type , value , tb ) : packages = tuple ( join ( abspath ( p ) , "" ) for p in sys . path [ 1 : ] ) lines = [ ] for line in format_exception ( type , value , tb ) : matches = re . search ( r"^ File \"([^\"]+)\", line \d+, in .+" , line ) if matches and matches . group ( 1 ) . startswith ( packages ) : lines += line else : matches = re . search ( r"^(\s*)(.*?)(\s*)$" , line , re . DOTALL ) lines . append ( matches . group ( 1 ) + colored ( matches . group ( 2 ) , "yellow" ) + matches . group ( 3 ) ) return "" . join ( lines ) . rstrip ( )
Format traceback darkening entries from global site - packages directories and user - specific site - packages directory .
51,530
def get_char ( prompt = None ) : while True : s = get_string ( prompt ) if s is None : return None if len ( s ) == 1 : return s [ 0 ] if prompt is None : print ( "Retry: " , end = "" )
Read a line of text from standard input and return the equivalent char ; if text is not a single char user is prompted to retry . If line can t be read return None .
51,531
def get_float ( prompt = None ) : while True : s = get_string ( prompt ) if s is None : return None if len ( s ) > 0 and re . search ( r"^[+-]?\d*(?:\.\d*)?$" , s ) : try : return float ( s ) except ValueError : pass if prompt is None : print ( "Retry: " , end = "" )
Read a line of text from standard input and return the equivalent float as precisely as possible ; if text does not represent a double user is prompted to retry . If line can t be read return None .
51,532
def get_int ( prompt = None ) : while True : s = get_string ( prompt ) if s is None : return None if re . search ( r"^[+-]?\d+$" , s ) : try : i = int ( s , 10 ) if type ( i ) is int : return i except ValueError : pass if prompt is None : print ( "Retry: " , end = "" )
Read a line of text from standard input and return the equivalent int ; if text does not represent an int user is prompted to retry . If line can t be read return None .
51,533
def _connect ( dbapi_connection , connection_record ) : if type ( dbapi_connection ) is sqlite3 . Connection : cursor = dbapi_connection . cursor ( ) cursor . execute ( "PRAGMA foreign_keys=ON" ) cursor . close ( )
Enables foreign key support .
51,534
def _parse ( self , e ) : matches = re . search ( r"^\(_mysql_exceptions\.OperationalError\) \(\d+, \"(.+)\"\)$" , str ( e ) ) if matches : return matches . group ( 1 ) matches = re . search ( r"^\(psycopg2\.OperationalError\) (.+)$" , str ( e ) ) if matches : return matches . group ( 1 ) matches = re . search ( r"^\(sqlite3\.OperationalError\) (.+)$" , str ( e ) ) if matches : return matches . group ( 1 ) return str ( e )
Parses an exception returns its message .
51,535
def get_table_service_stats ( self , timeout = None ) : request = HTTPRequest ( ) request . method = 'GET' request . host_locations = self . _get_host_locations ( primary = False , secondary = True ) request . path = '/' request . query = { 'restype' : 'service' , 'comp' : 'stats' , 'timeout' : _int_to_str ( timeout ) , } return self . _perform_request ( request , _convert_xml_to_service_stats )
Retrieves statistics related to replication for the Table service . It is only available when read - access geo - redundant replication is enabled for the storage account .
51,536
def get_table_service_properties ( self , timeout = None ) : request = HTTPRequest ( ) request . method = 'GET' request . host_locations = self . _get_host_locations ( secondary = True ) request . path = '/' request . query = { 'restype' : 'service' , 'comp' : 'properties' , 'timeout' : _int_to_str ( timeout ) , } return self . _perform_request ( request , _convert_xml_to_service_properties )
Gets the properties of a storage account s Table service including logging analytics and CORS rules .
51,537
def delete_table ( self , table_name , fail_not_exist = False , timeout = None ) : _validate_not_none ( 'table_name' , table_name ) request = HTTPRequest ( ) request . method = 'DELETE' request . host_locations = self . _get_host_locations ( ) request . path = '/Tables(\'' + _to_str ( table_name ) + '\')' request . query = { 'timeout' : _int_to_str ( timeout ) } request . headers = { _DEFAULT_ACCEPT_HEADER [ 0 ] : _DEFAULT_ACCEPT_HEADER [ 1 ] } if not fail_not_exist : try : self . _perform_request ( request ) return True except AzureHttpError as ex : _dont_fail_not_exist ( ex ) return False else : self . _perform_request ( request ) return True
Deletes the specified table and any data it contains .
51,538
def query_entities ( self , table_name , filter = None , select = None , num_results = None , marker = None , accept = TablePayloadFormat . JSON_MINIMAL_METADATA , property_resolver = None , timeout = None ) : operation_context = _OperationContext ( location_lock = True ) if self . key_encryption_key is not None or self . key_resolver_function is not None : if select is not None and select != '*' : select += ',_ClientEncryptionMetadata1,_ClientEncryptionMetadata2' args = ( table_name , ) kwargs = { 'filter' : filter , 'select' : select , 'max_results' : num_results , 'marker' : marker , 'accept' : accept , 'property_resolver' : property_resolver , 'timeout' : timeout , '_context' : operation_context } resp = self . _query_entities ( * args , ** kwargs ) return ListGenerator ( resp , self . _query_entities , args , kwargs )
Returns a generator to list the entities in the table specified . The generator will lazily follow the continuation tokens returned by the service and stop when all entities have been returned or num_results is reached .
51,539
def merge_entity ( self , table_name , entity , if_match = '*' , timeout = None ) : _validate_not_none ( 'table_name' , table_name ) request = _merge_entity ( entity , if_match , self . require_encryption , self . key_encryption_key ) request . host_locations = self . _get_host_locations ( ) request . query [ 'timeout' ] = _int_to_str ( timeout ) request . path = _get_entity_path ( table_name , entity [ 'PartitionKey' ] , entity [ 'RowKey' ] ) return self . _perform_request ( request , _extract_etag )
Updates an existing entity by merging the entity s properties . Throws if the entity does not exist . This operation does not replace the existing entity as the update_entity operation does . A property cannot be removed with merge_entity . Any properties with null values are ignored . All other properties will be updated or added .
51,540
def create_entity_class ( self ) : entity = Entity ( ) entity . PartitionKey = 'pk{}' . format ( str ( uuid . uuid4 ( ) ) . replace ( '-' , '' ) ) entity . RowKey = 'rk{}' . format ( str ( uuid . uuid4 ( ) ) . replace ( '-' , '' ) ) entity . age = 39 entity . large = 933311100 entity . sex = 'male' entity . married = True entity . ratio = 3.1 entity . birthday = datetime ( 1970 , 10 , 4 ) entity . binary = EntityProperty ( EdmType . BINARY , b'xyz' ) entity . other = EntityProperty ( EdmType . INT32 , 20 ) entity . clsid = EntityProperty ( EdmType . GUID , 'c9da6455-213d-42c9-9a79-3e9149a57833' ) return entity
Creates a class - based entity with fixed values using all of the supported data types .
51,541
def create_entity_dict ( self ) : entity = { } entity [ 'PartitionKey' ] = 'pk{}' . format ( str ( uuid . uuid4 ( ) ) . replace ( '-' , '' ) ) entity [ 'RowKey' ] = 'rk{}' . format ( str ( uuid . uuid4 ( ) ) . replace ( '-' , '' ) ) entity [ 'age' ] = 39 entity [ 'large' ] = 933311100 entity [ 'sex' ] = 'male' entity [ 'married' ] = True entity [ 'ratio' ] = 3.1 entity [ 'birthday' ] = datetime ( 1970 , 10 , 4 ) entity [ 'binary' ] = EntityProperty ( EdmType . BINARY , b'xyz' ) entity [ 'other' ] = EntityProperty ( EdmType . INT32 , 20 ) entity [ 'clsid' ] = EntityProperty ( EdmType . GUID , 'c9da6455-213d-42c9-9a79-3e9149a57833' ) return entity
Creates a dict - based entity with fixed values using all of the supported data types .
51,542
def _convert_batch_to_json ( batch_requests ) : batch_boundary = b'batch_' + _new_boundary ( ) changeset_boundary = b'changeset_' + _new_boundary ( ) body = [ b'--' + batch_boundary + b'\n' , b'Content-Type: multipart/mixed; boundary=' , changeset_boundary + b'\n\n' ] content_id = 1 for _ , request in batch_requests : body . append ( b'--' + changeset_boundary + b'\n' ) body . append ( b'Content-Type: application/http\n' ) body . append ( b'Content-Transfer-Encoding: binary\n\n' ) body . append ( request . method . encode ( 'utf-8' ) ) body . append ( b' ' ) body . append ( request . path . encode ( 'utf-8' ) ) body . append ( b' HTTP/1.1\n' ) body . append ( b'Content-ID: ' ) body . append ( str ( content_id ) . encode ( 'utf-8' ) + b'\n' ) content_id += 1 for name , value in request . headers . items ( ) : if name in _SUB_HEADERS : body . append ( name . encode ( 'utf-8' ) + b': ' ) body . append ( value . encode ( 'utf-8' ) + b'\n' ) if not request . method == 'DELETE' : body . append ( b'Content-Length: ' ) body . append ( str ( len ( request . body ) ) . encode ( 'utf-8' ) ) body . append ( b'\n\n' ) body . append ( request . body + b'\n' ) body . append ( b'\n' ) body . append ( b'--' + changeset_boundary + b'--' + b'\n' ) body . append ( b'--' + batch_boundary + b'--' ) return b'' . join ( body ) , 'multipart/mixed; boundary=' + batch_boundary . decode ( 'utf-8' )
Create json to send for an array of batch requests .
51,543
def _decrypt_entity ( entity , encrypted_properties_list , content_encryption_key , entityIV , isJavaV1 ) : _validate_not_none ( 'entity' , entity ) decrypted_entity = deepcopy ( entity ) try : for property in entity . keys ( ) : if property in encrypted_properties_list : value = entity [ property ] propertyIV = _generate_property_iv ( entityIV , entity [ 'PartitionKey' ] , entity [ 'RowKey' ] , property , isJavaV1 ) cipher = _generate_AES_CBC_cipher ( content_encryption_key , propertyIV ) decryptor = cipher . decryptor ( ) decrypted_data = ( decryptor . update ( value . value ) + decryptor . finalize ( ) ) unpadder = PKCS7 ( 128 ) . unpadder ( ) decrypted_data = ( unpadder . update ( decrypted_data ) + unpadder . finalize ( ) ) decrypted_data = decrypted_data . decode ( 'utf-8' ) decrypted_entity [ property ] = decrypted_data decrypted_entity . pop ( '_ClientEncryptionMetadata1' ) decrypted_entity . pop ( '_ClientEncryptionMetadata2' ) return decrypted_entity except : raise AzureException ( _ERROR_DECRYPTION_FAILURE )
Decrypts the specified entity using AES256 in CBC mode with 128 bit padding . Unwraps the CEK using either the specified KEK or the key returned by the key_resolver . Properties specified in the encrypted_properties_list will be decrypted and decoded to utf - 8 strings .
51,544
def _generate_property_iv ( entity_iv , pk , rk , property_name , isJavaV1 ) : digest = Hash ( SHA256 ( ) , default_backend ( ) ) if not isJavaV1 : digest . update ( entity_iv + ( rk + pk + property_name ) . encode ( 'utf-8' ) ) else : digest . update ( entity_iv + ( pk + rk + property_name ) . encode ( 'utf-8' ) ) propertyIV = digest . finalize ( ) return propertyIV [ : 16 ]
Uses the entity_iv partition key and row key to generate and return the iv for the specified property .
51,545
def _get_default_mapping ( self , obj ) : mapping = { v : k for k , v in obj . TYPE_MAPPING . items ( ) } mapping . update ( { fields . Email : text_type , fields . Dict : dict , fields . Url : text_type , fields . List : list , fields . LocalDateTime : datetime . datetime , fields . Nested : '_from_nested_schema' , } ) return mapping
Return default mapping if there are no special needs .
51,546
def get_properties ( self , obj ) : properties = { } for field_name , field in sorted ( obj . fields . items ( ) ) : schema = self . _get_schema_for_field ( obj , field ) properties [ field . name ] = schema return properties
Fill out properties field .
51,547
def get_required ( self , obj ) : required = [ ] for field_name , field in sorted ( obj . fields . items ( ) ) : if field . required : required . append ( field . name ) return required or missing
Fill out required field .
51,548
def _from_python_type ( self , obj , field , pytype ) : json_schema = { 'title' : field . attribute or field . name , } for key , val in TYPE_MAP [ pytype ] . items ( ) : json_schema [ key ] = val if field . dump_only : json_schema [ 'readonly' ] = True if field . default is not missing : json_schema [ 'default' ] = field . default metadata = field . metadata . get ( 'metadata' , { } ) metadata . update ( field . metadata ) for md_key , md_val in metadata . items ( ) : if md_key == 'metadata' : continue json_schema [ md_key ] = md_val if isinstance ( field , fields . List ) : json_schema [ 'items' ] = self . _get_schema_for_field ( obj , field . container ) return json_schema
Get schema definition from python type .
51,549
def _get_schema_for_field ( self , obj , field ) : mapping = self . _get_default_mapping ( obj ) if hasattr ( field , '_jsonschema_type_mapping' ) : schema = field . _jsonschema_type_mapping ( ) elif '_jsonschema_type_mapping' in field . metadata : schema = field . metadata [ '_jsonschema_type_mapping' ] elif field . __class__ in mapping : pytype = mapping [ field . __class__ ] if isinstance ( pytype , basestring ) : schema = getattr ( self , pytype ) ( obj , field ) else : schema = self . _from_python_type ( obj , field , pytype ) else : raise ValueError ( 'unsupported field type %s' % field ) for validator in field . validators : if validator . __class__ in FIELD_VALIDATORS : schema = FIELD_VALIDATORS [ validator . __class__ ] ( schema , field , validator , obj ) return schema
Get schema and validators for field .
51,550
def _from_nested_schema ( self , obj , field ) : if isinstance ( field . nested , basestring ) : nested = get_class ( field . nested ) else : nested = field . nested name = nested . __name__ outer_name = obj . __class__ . __name__ only = field . only exclude = field . exclude if name not in self . _nested_schema_classes and name != outer_name : wrapped_nested = self . __class__ ( nested = True ) wrapped_dumped = wrapped_nested . dump ( nested ( only = only , exclude = exclude ) ) if marshmallow . __version__ . split ( '.' , 1 ) [ 0 ] >= '3' : self . _nested_schema_classes [ name ] = wrapped_dumped else : self . _nested_schema_classes [ name ] = wrapped_dumped . data self . _nested_schema_classes . update ( wrapped_nested . _nested_schema_classes ) schema = { 'type' : 'object' , '$ref' : '#/definitions/{}' . format ( name ) } metadata = field . metadata . get ( 'metadata' , { } ) metadata . update ( field . metadata ) for md_key , md_val in metadata . items ( ) : if md_key == 'metadata' : continue schema [ md_key ] = md_val if field . many : schema = { 'type' : [ "array" ] if field . required else [ 'array' , 'null' ] , 'items' : schema , } return schema
Support nested field .
51,551
def wrap ( self , data ) : if self . nested : return data name = self . obj . __class__ . __name__ self . _nested_schema_classes [ name ] = data root = { 'definitions' : self . _nested_schema_classes , '$ref' : '#/definitions/{name}' . format ( name = name ) } return root
Wrap this with the root schema definitions .
51,552
def handle_length ( schema , field , validator , parent_schema ) : if isinstance ( field , fields . String ) : minKey = 'minLength' maxKey = 'maxLength' elif isinstance ( field , ( fields . List , fields . Nested ) ) : minKey = 'minItems' maxKey = 'maxItems' else : raise ValueError ( "In order to set the Length validator for JSON " "schema, the field must be either a List or a String" ) if validator . min : schema [ minKey ] = validator . min if validator . max : schema [ maxKey ] = validator . max if validator . equal : schema [ minKey ] = validator . equal schema [ maxKey ] = validator . equal return schema
Adds validation logic for marshmallow . validate . Length setting the values appropriately for fields . List fields . Nested and fields . String .
51,553
def handle_one_of ( schema , field , validator , parent_schema ) : if validator . choices : schema [ 'enum' ] = list ( validator . choices ) schema [ 'enumNames' ] = list ( validator . labels ) return schema
Adds the validation logic for marshmallow . validate . OneOf by setting the JSONSchema enum property to the allowed choices in the validator .
51,554
def handle_range ( schema , field , validator , parent_schema ) : if not isinstance ( field , fields . Number ) : return schema if validator . min : schema [ 'minimum' ] = validator . min schema [ 'exclusiveMinimum' ] = True else : schema [ 'minimum' ] = 0 schema [ 'exclusiveMinimum' ] = False if validator . max : schema [ 'maximum' ] = validator . max schema [ 'exclusiveMaximum' ] = True return schema
Adds validation logic for marshmallow . validate . Range setting the values appropriately fields . Number and it s subclasses .
51,555
def check_eigen_solver ( eigen_solver , solver_kwds , size = None , nvec = None ) : if eigen_solver in BAD_EIGEN_SOLVERS : raise ValueError ( BAD_EIGEN_SOLVERS [ eigen_solver ] ) elif eigen_solver not in EIGEN_SOLVERS : raise ValueError ( "Unrecognized eigen_solver: '{0}'." "Should be one of: {1}" . format ( eigen_solver , EIGEN_SOLVERS ) ) if size is not None and nvec is not None : if eigen_solver == 'lobpcg' and size < 5 * nvec + 1 : warnings . warn ( "lobpcg does not perform well with small matrices or " "with large numbers of vectors. Switching to 'dense'" ) eigen_solver = 'dense' solver_kwds = None elif eigen_solver == 'auto' : if size > 200 and nvec < 10 : if PYAMG_LOADED : eigen_solver = 'amg' solver_kwds = None else : eigen_solver = 'arpack' solver_kwds = None else : eigen_solver = 'dense' solver_kwds = None return eigen_solver , solver_kwds
Check that the selected eigensolver is valid
51,556
def precompute_optimzation_Y ( laplacian_matrix , n_samples , relaxation_kwds ) : relaxation_kwds . setdefault ( 'presave' , False ) relaxation_kwds . setdefault ( 'presave_name' , 'pre_comp_current.npy' ) relaxation_kwds . setdefault ( 'verbose' , False ) if relaxation_kwds [ 'verbose' ] : print ( 'Making Lk and nbhds' ) Lk_tensor , nbk , si_map = compute_Lk ( laplacian_matrix , n_samples , relaxation_kwds [ 'subset' ] ) if relaxation_kwds [ 'presave' ] : raise NotImplementedError ( 'Not yet implemented presave' ) return { 'Lk' : Lk_tensor , 'nbk' : nbk , 'si_map' : si_map }
compute Lk neighbors and subset to index map for projected == False
51,557
def compute_Lk ( laplacian_matrix , n_samples , subset ) : Lk_tensor = [ ] nbk = [ ] row , column = laplacian_matrix . T . nonzero ( ) nnz_val = np . squeeze ( np . asarray ( laplacian_matrix . T [ ( row , column ) ] ) ) sorted_col_args = np . argsort ( column ) sorted_col_vals = column [ sorted_col_args ] breaks_row = np . diff ( row ) . nonzero ( ) [ 0 ] breaks_col = np . diff ( sorted_col_vals ) . nonzero ( ) [ 0 ] si_map = { } for idx , k in enumerate ( subset ) : if k == 0 : nbk . append ( column [ : breaks_row [ k ] + 1 ] . T ) lk = nnz_val [ np . sort ( sorted_col_args [ : breaks_col [ k ] + 1 ] ) ] elif k == n_samples - 1 : nbk . append ( column [ breaks_row [ k - 1 ] + 1 : ] . T ) lk = nnz_val [ np . sort ( sorted_col_args [ breaks_col [ k - 1 ] + 1 : ] ) ] else : nbk . append ( column [ breaks_row [ k - 1 ] + 1 : breaks_row [ k ] + 1 ] . T ) lk = nnz_val [ np . sort ( sorted_col_args [ breaks_col [ k - 1 ] + 1 : breaks_col [ k ] + 1 ] ) ] npair = nbk [ idx ] . shape [ 0 ] rk = ( nbk [ idx ] == k ) . nonzero ( ) [ 0 ] Lk = sp . sparse . lil_matrix ( ( npair , npair ) ) Lk . setdiag ( lk ) Lk [ : , rk ] = - ( lk . reshape ( - 1 , 1 ) ) Lk [ rk , : ] = - ( lk . reshape ( 1 , - 1 ) ) Lk_tensor . append ( sp . sparse . csr_matrix ( Lk ) ) si_map [ k ] = idx assert len ( Lk_tensor ) == subset . shape [ 0 ] , 'Size of Lk_tensor should be the same as subset.' return Lk_tensor , nbk , si_map
Compute sparse L matrix neighbors and subset to L matrix index map .
51,558
def precompute_optimzation_S ( laplacian_matrix , n_samples , relaxation_kwds ) : relaxation_kwds . setdefault ( 'presave' , False ) relaxation_kwds . setdefault ( 'presave_name' , 'pre_comp_current.npy' ) relaxation_kwds . setdefault ( 'verbose' , False ) if relaxation_kwds [ 'verbose' ] : print ( 'Pre-computing quantities Y to S conversions' ) print ( 'Making A and Pairs' ) A , pairs = makeA ( laplacian_matrix ) if relaxation_kwds [ 'verbose' ] : print ( 'Making Rk and nbhds' ) Rk_tensor , nbk = compute_Rk ( laplacian_matrix , A , n_samples ) ATAinv = np . linalg . pinv ( A . T . dot ( A ) . todense ( ) ) if relaxation_kwds [ 'verbose' ] : print ( 'Finish calculating pseudo inverse' ) if relaxation_kwds [ 'presave' ] : raise NotImplementedError ( 'Not yet implemented presave' ) return { 'RK' : Rk_tensor , 'nbk' : nbk , 'ATAinv' : ATAinv , 'pairs' : pairs , 'A' : A }
compute Rk A ATAinv neighbors and pairs for projected mode
51,559
def compute_Rk ( L , A , n_samples ) : laplacian_matrix = L . copy ( ) laplacian_matrix . setdiag ( 0 ) laplacian_matrix . eliminate_zeros ( ) n = n_samples Rk_tensor = [ ] nbk = [ ] row_A , column_A = A . T . nonzero ( ) row , column = laplacian_matrix . nonzero ( ) nnz_val = np . squeeze ( np . asarray ( laplacian_matrix . T [ ( row , column ) ] ) ) sorted_col_args = np . argsort ( column ) sorted_col_vals = column [ sorted_col_args ] breaks_row_A = np . diff ( row_A ) . nonzero ( ) [ 0 ] breaks_col = np . diff ( sorted_col_vals ) . nonzero ( ) [ 0 ] for k in range ( n_samples ) : if k == 0 : nbk . append ( column_A [ : breaks_row_A [ k ] + 1 ] . T ) Rk_tensor . append ( nnz_val [ np . sort ( sorted_col_args [ : breaks_col [ k ] + 1 ] ) ] ) elif k == n_samples - 1 : nbk . append ( column_A [ breaks_row_A [ k - 1 ] + 1 : ] . T ) Rk_tensor . append ( nnz_val [ np . sort ( sorted_col_args [ breaks_col [ k - 1 ] + 1 : ] ) ] ) else : nbk . append ( column_A [ breaks_row_A [ k - 1 ] + 1 : breaks_row_A [ k ] + 1 ] . T ) Rk_tensor . append ( nnz_val [ np . sort ( sorted_col_args [ breaks_col [ k - 1 ] + 1 : breaks_col [ k ] + 1 ] ) ] ) return Rk_tensor , nbk
Compute sparse L matrix and neighbors .
51,560
def _mod_info ( modname , toskip = [ ] , onlylocals = True ) : hascls = hasfunc = False for localnm , fqnm , obj in zip ( * find_mod_objs ( modname , onlylocals = onlylocals ) ) : if localnm not in toskip : hascls = hascls or inspect . isclass ( obj ) hasfunc = hasfunc or inspect . isroutine ( obj ) if hascls and hasfunc : break pkg = sys . modules [ modname ] ispkg = ( hasattr ( pkg , '__file__' ) and isinstance ( pkg . __file__ , str ) and os . path . split ( pkg . __file__ ) [ 1 ] . startswith ( '__init__.py' ) ) return ispkg , hascls , hasfunc
Determines if a module is a module or a package and whether or not it has classes or functions .
51,561
def compute_affinity_matrix ( adjacency_matrix , method = 'auto' , ** kwargs ) : if method == 'auto' : method = 'gaussian' return Affinity . init ( method , ** kwargs ) . affinity_matrix ( adjacency_matrix )
Compute the affinity matrix with the given method
51,562
def barycenter_graph ( distance_matrix , X , reg = 1e-3 ) : ( N , d_in ) = X . shape ( rows , cols ) = distance_matrix . nonzero ( ) W = sparse . lil_matrix ( ( N , N ) ) for i in range ( N ) : nbrs_i = cols [ rows == i ] n_neighbors_i = len ( nbrs_i ) v = np . ones ( n_neighbors_i , dtype = X . dtype ) C = X [ nbrs_i ] - X [ i ] G = np . dot ( C , C . T ) trace = np . trace ( G ) if trace > 0 : R = reg * trace else : R = reg G . flat [ : : n_neighbors_i + 1 ] += R w = solve ( G , v , sym_pos = True ) W [ i , nbrs_i ] = w / np . sum ( w ) return W
Computes the barycenter weighted graph for points in X
51,563
def locally_linear_embedding ( geom , n_components , reg = 1e-3 , eigen_solver = 'auto' , random_state = None , solver_kwds = None ) : if geom . X is None : raise ValueError ( "Must pass data matrix X to Geometry" ) if geom . adjacency_matrix is None : geom . compute_adjacency_matrix ( ) W = barycenter_graph ( geom . adjacency_matrix , geom . X , reg = reg ) eigen_solver , solver_kwds = check_eigen_solver ( eigen_solver , solver_kwds , size = W . shape [ 0 ] , nvec = n_components + 1 ) if eigen_solver != 'dense' : M = eye ( * W . shape , format = W . format ) - W M = ( M . T * M ) . tocsr ( ) else : M = ( W . T * W - W . T - W ) . toarray ( ) M . flat [ : : M . shape [ 0 ] + 1 ] += 1 return null_space ( M , n_components , k_skip = 1 , eigen_solver = eigen_solver , random_state = random_state )
Perform a Locally Linear Embedding analysis on the data .
51,564
def _num_samples ( x ) : if hasattr ( x , 'fit' ) : raise TypeError ( 'Expected sequence or array-like, got ' 'estimator %s' % x ) if not hasattr ( x , '__len__' ) and not hasattr ( x , 'shape' ) : if hasattr ( x , '__array__' ) : x = np . asarray ( x ) else : raise TypeError ( "Expected sequence or array-like, got %s" % type ( x ) ) if hasattr ( x , 'shape' ) : if len ( x . shape ) == 0 : raise TypeError ( "Singleton array %r cannot be considered" " a valid collection." % x ) return x . shape [ 0 ] else : return len ( x )
Return number of samples in array - like x .
51,565
def spectral_clustering ( geom , K , eigen_solver = 'dense' , random_state = None , solver_kwds = None , renormalize = True , stabalize = True , additional_vectors = 0 ) : if geom . affinity_matrix is None : S = geom . compute_affinity_matrix ( ) else : S = geom . affinity_matrix if eigen_solver in [ 'lobpcg' , 'amg' ] : stabalize = True if stabalize : geom . laplacian_type = 'symmetricnormalized' return_lapsym = True else : geom . laplacian_type = 'randomwalk' return_lapsym = False P = geom . compute_laplacian_matrix ( return_lapsym = return_lapsym ) P += identity ( P . shape [ 0 ] ) if eigen_solver in [ 'auto' , 'amg' , 'lobpcg' ] : n_components = 2 * int ( np . log ( P . shape [ 0 ] ) ) * K + 1 n_components += int ( additional_vectors ) else : n_components = K n_components = min ( n_components , P . shape [ 0 ] ) ( lambdas , eigen_vectors ) = eigen_decomposition ( P , n_components = n_components , eigen_solver = eigen_solver , random_state = random_state , drop_first = True , solver_kwds = solver_kwds ) if eigen_solver in [ 'auto' , 'lobpcg' , 'amg' ] : if np . abs ( lambdas [ 0 ] - 1 ) > 1e-4 : warnings . warn ( "largest eigenvalue not equal to 1. Results may be poor. Try increasing additional_vectors parameter" ) eigen_vectors = eigen_vectors [ : , 1 : K ] lambdas = lambdas [ 1 : K ] if stabalize : w = np . array ( geom . laplacian_weights ) eigen_vectors /= np . sqrt ( w [ : , np . newaxis ] ) eigen_vectors /= np . linalg . norm ( eigen_vectors , axis = 0 ) if renormalize : norms = np . linalg . norm ( eigen_vectors , axis = 1 ) eigen_vectors /= norms [ : , np . newaxis ] labels = k_means_clustering ( eigen_vectors , K ) return labels , eigen_vectors , P
Spectral clustering for find K clusters by using the eigenvectors of a matrix which is derived from a set of similarities S .
51,566
def pathpatch_2d_to_3d ( pathpatch , z = 0 , normal = 'z' ) : if type ( normal ) is str : index = "xyz" . index ( normal ) normal = np . roll ( ( 1.0 , 0 , 0 ) , index ) normal /= np . linalg . norm ( normal ) path = pathpatch . get_path ( ) trans = pathpatch . get_patch_transform ( ) path = trans . transform_path ( path ) pathpatch . __class__ = art3d . PathPatch3D pathpatch . _code3d = path . codes pathpatch . _facecolor3d = pathpatch . get_facecolor verts = path . vertices d = np . cross ( normal , ( 0 , 0 , 1 ) ) M = rotation_matrix ( d ) pathpatch . _segment3d = np . array ( [ np . dot ( M , ( x , y , 0 ) ) + ( 0 , 0 , z ) for x , y in verts ] ) return pathpatch
Transforms a 2D Patch to a 3D patch using the given normal vector .
51,567
def calc_2d_ellipse_properties ( cov , nstd = 2 ) : def eigsorted ( cov ) : vals , vecs = np . linalg . eigh ( cov ) order = vals . argsort ( ) [ : : - 1 ] return vals [ order ] , vecs [ : , order ] vals , vecs = eigsorted ( cov ) width , height = 2 * nstd * np . sqrt ( vals [ : 2 ] ) normal = vecs [ : , 2 ] if vecs [ 2 , 2 ] > 0 else - vecs [ : , 2 ] d = np . cross ( normal , ( 0 , 0 , 1 ) ) M = rotation_matrix ( d ) x_trans = np . dot ( M , ( 1 , 0 , 0 ) ) cos_val = np . dot ( vecs [ : , 0 ] , x_trans ) / np . linalg . norm ( vecs [ : , 0 ] ) / np . linalg . norm ( x_trans ) theta = np . degrees ( np . arccos ( np . clip ( cos_val , - 1 , 1 ) ) ) return { 'width' : width , 'height' : height , 'angle' : theta } , normal
Calculate the properties for 2d ellipse given the covariance matrix .
51,568
def rotation_matrix ( d ) : sin_angle = np . linalg . norm ( d ) if sin_angle == 0 : return np . identity ( 3 ) d /= sin_angle eye = np . eye ( 3 ) ddt = np . outer ( d , d ) skew = np . array ( [ [ 0 , d [ 2 ] , - d [ 1 ] ] , [ - d [ 2 ] , 0 , d [ 0 ] ] , [ d [ 1 ] , - d [ 0 ] , 0 ] ] , dtype = np . float64 ) M = ddt + np . sqrt ( 1 - sin_angle ** 2 ) * ( eye - ddt ) + sin_angle * skew return M
Calculates a rotation matrix given a vector d . The direction of d corresponds to the rotation axis . The length of d corresponds to the sin of the angle of rotation .
51,569
def create_ellipse ( width , height , angle ) : angle = angle / 180.0 * np . pi thetas = np . linspace ( 0 , 2 * np . pi , 200 ) a = width / 2.0 b = height / 2.0 x = a * np . cos ( thetas ) * np . cos ( angle ) - b * np . sin ( thetas ) * np . sin ( angle ) y = a * np . cos ( thetas ) * np . sin ( angle ) + b * np . sin ( thetas ) * np . cos ( angle ) z = np . zeros ( thetas . shape ) return np . vstack ( ( x , y , z ) ) . T
Create parametric ellipse from 200 points .
51,570
def transform_to_3d ( points , normal , z = 0 ) : d = np . cross ( normal , ( 0 , 0 , 1 ) ) M = rotation_matrix ( d ) transformed_points = M . dot ( points . T ) . T + z return transformed_points
Project points into 3d from 2d points .
51,571
def create_ellipse_mesh ( points , ** kwargs ) : import plotly . graph_objs as go x , y , z = points . T return ( go . Mesh3d ( x = x , y = y , z = z , ** kwargs ) , go . Scatter3d ( x = x , y = y , z = z , marker = dict ( size = 0.01 ) , line = dict ( width = 2 , color = '#000000' ) , showlegend = False , hoverinfo = 'none' ) )
Visualize the ellipse by using the mesh of the points .
51,572
def ltsa ( geom , n_components , eigen_solver = 'auto' , random_state = None , solver_kwds = None ) : if geom . X is None : raise ValueError ( "Must pass data matrix X to Geometry" ) ( N , d_in ) = geom . X . shape if n_components > d_in : raise ValueError ( "output dimension must be less than or equal " "to input dimension" ) if geom . adjacency_matrix is None : geom . compute_adjacency_matrix ( ) ( rows , cols ) = geom . adjacency_matrix . nonzero ( ) eigen_solver , solver_kwds = check_eigen_solver ( eigen_solver , solver_kwds , size = geom . adjacency_matrix . shape [ 0 ] , nvec = n_components + 1 ) if eigen_solver != 'dense' : M = sparse . csr_matrix ( ( N , N ) ) else : M = np . zeros ( ( N , N ) ) for i in range ( N ) : neighbors_i = cols [ rows == i ] n_neighbors_i = len ( neighbors_i ) use_svd = ( n_neighbors_i > d_in ) Xi = geom . X [ neighbors_i ] Xi -= Xi . mean ( 0 ) if use_svd : v = svd ( Xi , full_matrices = True ) [ 0 ] else : Ci = np . dot ( Xi , Xi . T ) v = eigh ( Ci ) [ 1 ] [ : , : : - 1 ] Gi = np . zeros ( ( n_neighbors_i , n_components + 1 ) ) Gi [ : , 1 : ] = v [ : , : n_components ] Gi [ : , 0 ] = 1. / np . sqrt ( n_neighbors_i ) GiGiT = np . dot ( Gi , Gi . T ) nbrs_x , nbrs_y = np . meshgrid ( neighbors_i , neighbors_i ) with warnings . catch_warnings ( ) : warnings . simplefilter ( "ignore" ) M [ nbrs_x , nbrs_y ] -= GiGiT M [ neighbors_i , neighbors_i ] += 1 return null_space ( M , n_components , k_skip = 1 , eigen_solver = eigen_solver , random_state = random_state , solver_kwds = solver_kwds )
Perform a Local Tangent Space Alignment analysis on the data .
51,573
def run_riemannian_relaxation ( laplacian , initial_guess , intrinsic_dim , relaxation_kwds ) : n , s = initial_guess . shape relaxation_kwds = initialize_kwds ( relaxation_kwds , n , s , intrinsic_dim ) if relaxation_kwds [ 'save_init' ] : directory = relaxation_kwds [ 'backup_dir' ] np . save ( os . path . join ( directory , 'Y0.npy' ) , initial_guess ) sp . io . mmwrite ( os . path . join ( directory , 'L_used.mtx' ) , sp . sparse . csc_matrix ( laplacian ) ) lossf = relaxation_kwds [ 'lossf' ] return RiemannianRelaxation . init ( lossf , laplacian , initial_guess , intrinsic_dim , relaxation_kwds )
Helper function for creating a RiemannianRelaxation class .
51,574
def relax_isometry ( self ) : for ii in range ( self . relaxation_kwds [ 'niter' ] ) : self . H = self . compute_dual_rmetric ( ) self . loss = self . rieman_loss ( ) self . trace_var . update ( ii , self . H , self . Y , self . eta , self . loss ) self . trace_var . print_report ( ii ) self . trace_var . save_backup ( ii ) self . compute_gradient ( ) self . make_optimization_step ( first_iter = ( ii == 0 ) ) self . H = self . compute_dual_rmetric ( ) self . trace_var . update ( - 1 , self . H , self . Y , self . eta , self . loss ) self . trace_var . print_report ( ii ) tracevar_path = os . path . join ( self . trace_var . backup_dir , 'results.pyc' ) TracingVariable . save ( self . trace_var , tracevar_path )
Main function for doing riemannian relaxation .
51,575
def calc_loss ( self , embedding ) : Hnew = self . compute_dual_rmetric ( Ynew = embedding ) return self . rieman_loss ( Hnew = Hnew )
Helper function to calculate rieman loss given new embedding
51,576
def compute_dual_rmetric ( self , Ynew = None ) : usedY = self . Y if Ynew is None else Ynew rieman_metric = RiemannMetric ( usedY , self . laplacian_matrix ) return rieman_metric . get_dual_rmetric ( )
Helper function to calculate the
51,577
def automodsumm_to_autosummary_lines ( fn , app ) : fullfn = os . path . join ( app . builder . env . srcdir , fn ) with open ( fullfn ) as fr : if 'astropy_helpers.sphinx.ext.automodapi' in app . _extensions : from astropy_helpers . sphinx . ext . automodapi import automodapi_replace docname = os . path . splitext ( fn ) [ 0 ] filestr = automodapi_replace ( fr . read ( ) , app , True , docname , False ) else : filestr = fr . read ( ) spl = _automodsummrex . split ( filestr ) indent1s = spl [ 1 : : 5 ] mods = spl [ 2 : : 5 ] opssecs = spl [ 3 : : 5 ] indent2s = spl [ 4 : : 5 ] remainders = spl [ 5 : : 5 ] newlines = [ ] for i , ( i1 , i2 , modnm , ops , rem ) in enumerate ( zip ( indent1s , indent2s , mods , opssecs , remainders ) ) : allindent = i1 + ( '' if i2 is None else i2 ) oplines = ops . split ( '\n' ) toskip = [ ] allowedpkgnms = [ ] funcsonly = clssonly = False for i , ln in reversed ( list ( enumerate ( oplines ) ) ) : if ':functions-only:' in ln : funcsonly = True del oplines [ i ] if ':classes-only:' in ln : clssonly = True del oplines [ i ] if ':skip:' in ln : toskip . extend ( _str_list_converter ( ln . replace ( ':skip:' , '' ) ) ) del oplines [ i ] if ':allowed-package-names:' in ln : allowedpkgnms . extend ( _str_list_converter ( ln . replace ( ':allowed-package-names:' , '' ) ) ) del oplines [ i ] if funcsonly and clssonly : msg = ( 'Defined both functions-only and classes-only options. ' 'Skipping this directive.' ) lnnum = sum ( [ spl [ j ] . count ( '\n' ) for j in range ( i * 5 + 1 ) ] ) app . warn ( '[automodsumm]' + msg , ( fn , lnnum ) ) continue newlines . extend ( [ i1 + '.. currentmodule:: ' + modnm , '' , '.. autosummary::' ] ) newlines . extend ( oplines ) ols = True if len ( allowedpkgnms ) == 0 else allowedpkgnms for nm , fqn , obj in zip ( * find_mod_objs ( modnm , onlylocals = ols ) ) : if nm in toskip : continue if funcsonly and not inspect . isroutine ( obj ) : continue if clssonly and not inspect . isclass ( obj ) : continue newlines . append ( allindent + nm ) newlines . append ( '' ) return newlines
Generates lines from a file with an automodsumm entry suitable for feeding into autosummary .
51,578
def compute_adjacency_matrix ( X , method = 'auto' , ** kwargs ) : if method == 'auto' : if X . shape [ 0 ] > 10000 : method = 'cyflann' else : method = 'kd_tree' return Adjacency . init ( method , ** kwargs ) . adjacency_graph ( X . astype ( 'float' ) )
Compute an adjacency matrix with the given method
51,579
def split_kwargs ( relaxation_kwds ) : optimizer_keys_list = [ 'step_method' , 'linesearch' , 'eta_max' , 'eta' , 'm' , 'linesearch_first' ] optimizer_kwargs = { k : relaxation_kwds . pop ( k ) for k in optimizer_keys_list if k in relaxation_kwds } if 'm' in optimizer_kwargs : optimizer_kwargs [ 'momentum' ] = optimizer_kwargs . pop ( 'm' ) return optimizer_kwargs , relaxation_kwds
Split relaxation keywords to keywords for optimizer and others
51,580
def initialize_kwds ( relaxation_kwds , n_samples , n_components , intrinsic_dim ) : new_relaxation_kwds = { 'weights' : np . array ( [ ] , dtype = np . float64 ) , 'step_method' : 'fixed' , 'linesearch' : True , 'verbose' : False , 'niter' : 2000 , 'niter_trace' : 0 , 'presave' : False , 'sqrd' : True , 'alpha' : 0 , 'projected' : False , 'lossf' : 'epsilon' if n_components > intrinsic_dim else 'rloss' , 'subset' : np . arange ( n_samples ) , 'sub_dir' : current_time_str ( ) , 'backup_base_dir' : default_basedir , 'saveiter' : 10 , 'printiter' : 1 , 'save_init' : False , } new_relaxation_kwds . update ( relaxation_kwds ) backup_dir = os . path . join ( new_relaxation_kwds [ 'backup_base_dir' ] , new_relaxation_kwds [ 'sub_dir' ] ) new_relaxation_kwds [ 'backup_dir' ] = backup_dir create_output_dir ( backup_dir ) new_relaxation_kwds = convert_to_int ( new_relaxation_kwds ) if new_relaxation_kwds [ 'weights' ] . shape [ 0 ] != 0 : weights = np . absolute ( new_relaxation_kwds [ 'weights' ] ) . astype ( np . float64 ) new_relaxation_kwds [ 'weights' ] = weights / np . sum ( weights ) if new_relaxation_kwds [ 'lossf' ] == 'epsilon' : new_relaxation_kwds . setdefault ( 'eps_orth' , 0.1 ) if n_components != intrinsic_dim and new_relaxation_kwds [ 'lossf' ] == 'rloss' : raise ValueError ( 'loss function rloss is for n_components equal intrinsic_dim' ) if n_components == intrinsic_dim and new_relaxation_kwds [ 'lossf' ] == 'epsilon' : raise ValueError ( 'loss function rloss is for n_components equal intrinsic_dim' ) if new_relaxation_kwds [ 'projected' ] and new_relaxation_kwds [ 'subset' ] . shape [ 0 ] < n_samples : raise ValueError ( 'Projection derivative not working for subset methods.' ) prefix = 'projected' if new_relaxation_kwds [ 'projected' ] else 'nonprojected' new_relaxation_kwds [ 'lossf' ] = '{}_{}' . format ( prefix , new_relaxation_kwds [ 'lossf' ] ) step_method = new_relaxation_kwds [ 'step_method' ] if new_relaxation_kwds [ 'linesearch' ] == True : new_relaxation_kwds . setdefault ( 'linesearch_first' , False ) init_eta_max = 2 ** 11 if new_relaxation_kwds [ 'projected' ] else 2 ** 4 new_relaxation_kwds . setdefault ( 'eta_max' , init_eta_max ) else : new_relaxation_kwds . setdefault ( 'eta' , 1.0 ) if step_method == 'momentum' : new_relaxation_kwds . setdefault ( 'm' , 0.05 ) return new_relaxation_kwds
Initialize relaxation keywords .
51,581
def _graph_connected_component ( graph , node_id ) : connected_components = np . zeros ( shape = ( graph . shape [ 0 ] ) , dtype = np . bool ) connected_components [ node_id ] = True n_node = graph . shape [ 0 ] for i in range ( n_node ) : last_num_component = connected_components . sum ( ) _ , node_to_add = np . where ( graph [ connected_components ] != 0 ) connected_components [ node_to_add ] = True if last_num_component >= connected_components . sum ( ) : break return connected_components
Find the largest graph connected components the contains one given node
51,582
def predict ( self , X_test , y = None ) : if not hasattr ( self , 'geom_' ) : raise RuntimeError ( 'the .fit() function must be called before the .predict() function' ) if self . geom_ . X is None : raise NotImplementedError ( 'method only implemented when X passed as data' ) adjacency_kwds = self . geom_ . adjacency_kwds if self . geom_ . adjacency_method == 'cyflann' : if 'cyflann_kwds' in adjacency_kwds . keys ( ) : cyflann_kwds = adjacency_kwds [ 'cyflann_kwds' ] else : cyflann_kwds = { } total_adjacency_matrix = complete_adjacency_matrix ( self . geom_ . adjacency_matrix , self . geom_ . X , X_test , adjacency_kwds ) if self . geom_ . affinity_kwds is not None : affinity_kwds = self . geom_ . affinity_kwds else : affinity_kwds = { } if self . geom_ . affinity_method is not None : affinity_method = self . geom_ . affinity_method else : affinity_method = 'auto' total_affinity_matrix = compute_affinity_matrix ( total_adjacency_matrix , affinity_method , ** affinity_kwds ) if self . geom_ . laplacian_kwds is not None : laplacian_kwds = self . geom_ . laplacian_kwds else : laplacian_kwds = { } if self . geom_ . laplacian_method is not None : laplacian_method = self . geom_ . laplacian_method else : self . laplacian_method = 'auto' total_laplacian_matrix = compute_laplacian_matrix ( total_affinity_matrix , laplacian_method , ** laplacian_kwds ) ( n_sample_train ) = self . geom_ . adjacency_matrix . shape [ 0 ] total_laplacian_matrix = total_laplacian_matrix . tocsr ( ) C = total_laplacian_matrix [ : , : n_sample_train ] eigenvalues , eigenvectors = nystrom_extension ( C , self . eigenvectors_ , self . eigenvalues_ ) if self . diffusion_maps : embedding = compute_diffusion_maps ( laplacian_method , eigenvectors , eigenvalues , self . diffusion_time ) else : embedding = eigenvectors ( n_sample_test ) = X_test . shape [ 0 ] embedding_test = embedding [ - n_sample_test : , : ] return embedding_test , embedding
Predict embedding on new data X_test given the existing embedding on training data
51,583
def compute_laplacian_matrix ( affinity_matrix , method = 'auto' , ** kwargs ) : if method == 'auto' : method = 'geometric' return Laplacian . init ( method , ** kwargs ) . laplacian_matrix ( affinity_matrix )
Compute the laplacian matrix with the given method
51,584
def fit_geometry ( self , X = None , input_type = 'data' ) : if self . geom is None : self . geom_ = Geometry ( ) elif isinstance ( self . geom , Geometry ) : self . geom_ = self . geom else : try : kwds = dict ( ** self . geom ) except TypeError : raise ValueError ( "geom must be a Geometry instance or " "a mappable/dictionary" ) self . geom_ = Geometry ( ** kwds ) if self . radius is not None : self . geom_ . set_radius ( self . radius , override = False ) if X is not None : self . geom_ . set_matrix ( X , input_type ) return self
Inputs self . geom and produces the fitted geometry self . geom_
51,585
def set_radius ( self , radius , override = True , X = None , n_components = 2 ) : if radius < 0 : raise ValueError ( "radius must be non-negative" ) if override or ( 'radius' not in self . adjacency_kwds and 'n_neighbors' not in self . adjacency_kwds ) : self . adjacency_kwds [ 'radius' ] = radius if override or ( 'radius' not in self . affinity_kwds ) : self . affinity_kwds [ 'radius' ] = radius
Set the radius for the adjacency and affinity computation
51,586
def get_rmetric ( self , mode_inv = 'svd' , return_svd = False ) : if self . H is None : self . H , self . G , self . Hvv , self . Hsval = riemann_metric ( self . Y , self . L , self . mdimG , invert_h = True , mode_inv = mode_inv ) if self . G is None : self . G , self . Hvv , self . Hsvals , self . Gsvals = compute_G_from_H ( self . H , mode_inv = self . mode_inv ) if mode_inv is 'svd' and return_svd : return self . G , self . Hvv , self . Hsvals , self . Gsvals else : return self . G
Compute the Reimannian Metric
51,587
def report_and_save_keywords ( self , relaxation_kwds , precomputed_kwds ) : report_name = os . path . join ( self . backup_dir , 'relaxation_keywords.txt' ) pretty_relax_kwds = pprint . pformat ( relaxation_kwds , indent = 4 ) with open ( report_name , 'w' ) as wf : wf . write ( pretty_relax_kwds ) wf . close ( ) origin_name = os . path . join ( self . backup_dir , 'relaxation_keywords.pyc' ) with open ( origin_name , 'wb' ) as ro : pickle . dump ( relaxation_kwds , ro , protocol = pickle . HIGHEST_PROTOCOL ) ro . close ( ) if relaxation_kwds [ 'presave' ] : precomp_kwds_name = os . path . join ( self . backup_dir , 'precomputed_keywords.pyc' ) with open ( precomp_kwds_name , 'wb' ) as po : pickle . dump ( precomputed_kwds , po , protocol = pickle . HIGHEST_PROTOCOL ) po . close ( )
Save relaxation keywords to . txt and . pyc file
51,588
def update ( self , iiter , H , Y , eta , loss ) : if iiter <= self . niter_trace + 1 : self . H [ iiter ] = H self . Y [ iiter ] = Y elif iiter > self . niter - self . niter_trace + 1 : self . H [ self . ltrace + iiter - self . niter - 1 ] = H self . Y [ self . ltrace + iiter - self . niter - 1 ] = Y self . etas [ iiter ] = eta self . loss [ iiter ] = loss if self . loss [ iiter ] < self . lmin : self . Yh = Y self . lmin = self . loss [ iiter ] self . miniter = iiter if not iiter == - 1 else self . niter + 1
Update the trace_var in new iteration
51,589
def save ( cls , instance , filename ) : filename = cls . correct_file_extension ( filename ) try : with open ( filename , 'wb' ) as f : pickle . dump ( instance , f , protocol = pickle . HIGHEST_PROTOCOL ) except MemoryError as e : print ( '{} occurred, will downsampled the saved file by 20.' . format ( type ( e ) . __name__ ) ) copy_instance = instance . copy ( ) copy_instance . H = copy_instance . H [ : : 20 , : , : ] copy_instance . Y = copy_instance . Y [ : : 20 , : ] with open ( filename , 'wb' ) as f : pickle . dump ( copy_instance , f , protocol = pickle . HIGHEST_PROTOCOL )
Class method save for saving TracingVariable .
51,590
def load ( cls , filename ) : filename = cls . correct_file_extension ( filename ) with open ( filename , 'rb' ) as f : return pickle . load ( f )
Load from stored files
51,591
def find_mod_objs ( modname , onlylocals = False ) : __import__ ( modname ) mod = sys . modules [ modname ] if hasattr ( mod , '__all__' ) : pkgitems = [ ( k , mod . __dict__ [ k ] ) for k in mod . __all__ ] else : pkgitems = [ ( k , mod . __dict__ [ k ] ) for k in dir ( mod ) if k [ 0 ] != '_' ] ismodule = inspect . ismodule localnames = [ k for k , v in pkgitems if not ismodule ( v ) ] objs = [ v for k , v in pkgitems if not ismodule ( v ) ] fqnames = [ ] for obj , lnm in zip ( objs , localnames ) : if hasattr ( obj , '__module__' ) and hasattr ( obj , '__name__' ) : fqnames . append ( obj . __module__ + '.' + obj . __name__ ) else : fqnames . append ( modname + '.' + lnm ) if onlylocals : valids = [ fqn . startswith ( modname ) for fqn in fqnames ] localnames = [ e for i , e in enumerate ( localnames ) if valids [ i ] ] fqnames = [ e for i , e in enumerate ( fqnames ) if valids [ i ] ] objs = [ e for i , e in enumerate ( objs ) if valids [ i ] ] return localnames , fqnames , objs
Returns all the public attributes of a module referenced by name .
51,592
def get_megaman_image ( factor = 1 ) : imfile = os . path . join ( os . path . dirname ( __file__ ) , 'megaman.png' ) data = ndimage . imread ( imfile ) / 255 if factor > 1 : data = data . repeat ( factor , axis = 0 ) . repeat ( factor , axis = 1 ) return data
Return an RGBA representation of the megaman icon
51,593
def generate_megaman_data ( sampling = 2 ) : data = get_megaman_image ( ) x = np . arange ( sampling * data . shape [ 1 ] ) / float ( sampling ) y = np . arange ( sampling * data . shape [ 0 ] ) / float ( sampling ) X , Y = map ( np . ravel , np . meshgrid ( x , y ) ) C = data [ np . floor ( Y . max ( ) - Y ) . astype ( int ) , np . floor ( X ) . astype ( int ) ] return np . vstack ( [ X , Y ] ) . T , C
Generate 2D point data of the megaman image
51,594
def _make_S_curve ( x , range = ( - 0.75 , 0.75 ) ) : assert x . ndim == 1 x = x - x . min ( ) theta = 2 * np . pi * ( range [ 0 ] + ( range [ 1 ] - range [ 0 ] ) * x / x . max ( ) ) X = np . empty ( ( x . shape [ 0 ] , 2 ) , dtype = float ) X [ : , 0 ] = np . sign ( theta ) * ( 1 - np . cos ( theta ) ) X [ : , 1 ] = np . sin ( theta ) X *= x . max ( ) / ( 2 * np . pi * ( range [ 1 ] - range [ 0 ] ) ) return X
Make a 2D S - curve from a 1D vector
51,595
def generate_megaman_manifold ( sampling = 2 , nfolds = 2 , rotate = True , random_state = None ) : X , c = generate_megaman_data ( sampling ) for i in range ( nfolds ) : X = np . hstack ( [ _make_S_curve ( x ) for x in X . T ] ) if rotate : rand = check_random_state ( random_state ) R = rand . randn ( X . shape [ 1 ] , X . shape [ 1 ] ) U , s , VT = np . linalg . svd ( R ) X = np . dot ( X , U ) return X , c
Generate a manifold of the megaman data
51,596
def snapshots_to_send ( source_snaps , dest_snaps ) : if len ( source_snaps ) == 0 : raise AssertionError ( "No snapshots exist locally!" ) if len ( dest_snaps ) == 0 : return None , source_snaps [ - 1 ] last_remote = dest_snaps [ - 1 ] for snap in reversed ( source_snaps ) : if snap == last_remote : return last_remote , source_snaps [ - 1 ] raise AssertionError ( "Latest snapshot on destination doesn't exist on source!" )
return pair of snapshots
51,597
def get_chunk ( self ) : while not self . _eof_reached : read = self . input_stream . read ( self . chunk_size - len ( self . _partial_chunk ) ) if len ( read ) == 0 : self . _eof_reached = True self . _partial_chunk += read if len ( self . _partial_chunk ) == self . chunk_size or self . _eof_reached : chunk = self . _partial_chunk self . _partial_chunk = "" return chunk
Return complete chunks or None if EOF reached
51,598
def _handle_result ( self ) : result = self . inbox . get ( ) if result . success : if self . _verbosity >= VERB_PROGRESS : sys . stderr . write ( "\nuploaded chunk {} \n" . format ( result . index ) ) self . results . append ( ( result . index , result . md5 ) ) self . _pending_chunks -= 1 else : raise result . traceback
Process one result . Block untill one is available
51,599
def _send_chunk ( self , index , chunk ) : self . _pending_chunks += 1 self . outbox . put ( ( index , chunk ) )
Send the current chunk to the workers for processing . Called when the _partial_chunk is complete .