idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
50,300
def processFormData ( self , data , dataset_name ) : cols = self . datasets [ dataset_name ] reader = self . getCSVReader ( data , reader_type = csv . reader ) fieldnames = next ( reader ) for col in cols : varname = col [ "varname" ] if varname not in fieldnames : raise ValueError ( "Column %s not found in data for dataset %s" % ( varname , dataset_name , ) ) self . _processDML ( dataset_name , cols , reader )
Take a string of form data as CSV and convert to insert statements return template and data values
50,301
def _processDDL ( self ) : sql_statements = self . _generateDDL ( ) logging . info ( 'Generating sqllite tables' ) for stmt in sql_statements : c = self . conn . cursor ( ) c . execute ( stmt ) self . conn . commit ( )
Generate and process table SQL SQLLite version
50,302
def _processDML ( self , dataset_name , cols , reader ) : sql_template = self . _generateInsertStatement ( dataset_name , cols ) c = self . conn . cursor ( ) c . executemany ( sql_template , reader ) self . conn . commit ( )
Overridden version of create DML for SQLLite
50,303
def _generateInsertStatement ( self , dataset_name , cols ) : col_names = [ col [ "varname" ] for col in cols ] qms = ',' . join ( [ '?' for x in col_names ] ) return 'INSERT INTO %s (%s) values (%s)' % ( dataset_name , ',' . join ( col_names ) , qms )
Generates a sql INSERT template
50,304
def execute ( self ) : logging . info ( 'Requesting view metadata for project %s' % self . project_name ) project_csv_meta = self . rws_connection . send_request ( ProjectMetaDataRequest ( self . project_name ) ) self . db_adapter . processMetaData ( project_csv_meta ) for dataset_name in self . db_adapter . datasets . keys ( ) : logging . info ( 'Requesting data from dataset %s' % dataset_name ) form_name , _type = self . name_type_from_viewname ( dataset_name ) form_data = self . rws_connection . send_request ( FormDataRequest ( self . project_name , self . environment , _type , form_name ) ) logging . info ( 'Populating dataset %s' % dataset_name ) self . db_adapter . processFormData ( form_data , dataset_name ) logging . info ( 'Process complete' )
Generate local DB pulling metadata and data from RWSConnection
50,305
def typeof_rave_data ( value ) : for format in [ '%d %b %Y' , '%b %Y' , '%Y' , '%d %m %Y' , '%m %Y' , '%d/%b/%Y' , '%b/%Y' , '%d/%m/%Y' , '%m/%Y' ] : try : datetime . datetime . strptime ( value , format ) if len ( value ) == 4 and ( int ( value ) < 1900 or int ( value ) > 2030 ) : break return ( 'date' , format ) except ValueError : pass except TypeError : pass for format in [ '%H:%M:%S' , '%H:%M' , '%I:%M:%S' , '%I:%M' , '%I:%M:%S %p' , '%I:%M %p' ] : try : datetime . datetime . strptime ( value , format ) return ( 'time' , format ) except ValueError : pass except TypeError : pass try : if ( ( isinstance ( value , str ) and isinstance ( int ( value ) , int ) ) or isinstance ( value , int ) ) : return ( 'int' , None ) except ValueError : pass except TypeError : pass try : float ( value ) return ( 'float' , None ) except ValueError : pass except TypeError : pass return ( 'string' , None )
Function to duck - type values not relying on standard Python functions because for example a string of 1 should be typed as an integer and not as a string or float since we re trying to replace like with like when scrambling .
50,306
def scramble_float ( self , length , sd = 0 ) : if sd == 0 : return str ( fake . random_number ( length ) ) else : return str ( fake . pyfloat ( length - sd , sd , positive = True ) )
Return random float in specified format
50,307
def scramble_date ( self , value , format = '%d %b %Y' ) : if value == '' : end_date = 'now' start_date = '-1y' else : end_date = datetime . datetime . strptime ( value , format ) . date ( ) start_date = end_date - datetime . timedelta ( days = 365 ) fake_date = fake . date_time_between ( start_date = start_date , end_date = end_date ) . strftime ( format ) . upper ( ) return fake_date
Return random date
50,308
def scramble_string ( self , length ) : return fake . text ( length ) if length > 5 else '' . join ( [ fake . random_letter ( ) for n in range ( 0 , length ) ] )
Return random string
50,309
def scramble_value ( self , value ) : try : type , format = typeof_rave_data ( value ) if type == 'float' : i , f = value . split ( '.' ) return self . scramble_float ( len ( value ) - 1 , len ( f ) ) elif type == 'int' : return self . scramble_int ( len ( value ) ) elif type == 'date' : return self . scramble_date ( value , format ) elif type == 'time' : return self . scramble_time ( format ) elif type == 'string' : return self . scramble_string ( len ( value ) ) else : return value except : return ""
Duck - type value and scramble appropriately
50,310
def scramble_codelist ( self , codelist ) : path = ".//{0}[@{1}='{2}']" . format ( E_ODM . CODELIST . value , A_ODM . OID . value , codelist ) elem = self . metadata . find ( path ) codes = [ ] for c in elem . iter ( E_ODM . CODELIST_ITEM . value ) : codes . append ( c . get ( A_ODM . CODED_VALUE . value ) ) for c in elem . iter ( E_ODM . ENUMERATED_ITEM . value ) : codes . append ( c . get ( A_ODM . CODED_VALUE . value ) ) return fake . random_element ( codes )
Return random element from code list
50,311
def scramble_itemdata ( self , oid , value ) : if self . metadata is not None : path = ".//{0}[@{1}='{2}']" . format ( E_ODM . ITEM_DEF . value , A_ODM . OID . value , oid ) elem = self . metadata . find ( path ) datatype = elem . get ( A_ODM . DATATYPE . value ) codelist = None for el in elem . iter ( E_ODM . CODELIST_REF . value ) : codelist = el . get ( A_ODM . CODELIST_OID . value ) length = 1 if not A_ODM . LENGTH in elem else int ( elem . get ( A_ODM . LENGTH . value ) ) if A_ODM . SIGNIFICANT_DIGITS . value in elem . keys ( ) : sd = elem . get ( A_ODM . SIGNIFICANT_DIGITS . value ) else : sd = 0 if A_ODM . DATETIME_FORMAT . value in elem . keys ( ) : dt_format = elem . get ( A_ODM . DATETIME_FORMAT . value ) for fmt in [ ( 'yyyy' , '%Y' ) , ( 'MMM' , '%b' ) , ( 'dd' , '%d' ) , ( 'HH' , '%H' ) , ( 'nn' , '%M' ) , ( 'ss' , '%S' ) , ( '-' , '' ) ] : dt_format = dt_format . replace ( fmt [ 0 ] , fmt [ 1 ] ) if codelist is not None : return self . scramble_codelist ( codelist ) elif datatype == 'integer' : return self . scramble_int ( length ) elif datatype == 'float' : return self . scramble_float ( length , sd ) elif datatype in [ 'string' , 'text' ] : return self . scramble_string ( length ) elif datatype in [ 'date' , 'datetime' ] : return self . scramble_date ( value , dt_format ) elif datatype in [ 'time' ] : return self . scramble_time ( dt_format ) else : return self . scramble_value ( value ) else : return self . scramble_value ( value )
If metadata provided use it to scramble the value based on data type
50,312
def fill_empty ( self , fixed_values , input ) : odm_elements = etree . fromstring ( input ) for v in odm_elements . iter ( E_ODM . ITEM_DATA . value ) : if v . get ( A_ODM . VALUE . value ) == "" : oid = v . get ( A_ODM . ITEM_OID . value ) if fixed_values is not None and oid in fixed_values : d = fixed_values [ oid ] else : d = self . scramble_itemdata ( v . get ( A_ODM . ITEM_OID . value ) , v . get ( A_ODM . VALUE . value ) ) v . set ( A_ODM . VALUE . value , d ) else : v . getparent ( ) . remove ( v ) for v in odm_elements . iter ( E_ODM . ITEM_GROUP_DATA . value ) : if len ( v ) == 0 : v . getparent ( ) . remove ( v ) for v in odm_elements . iter ( E_ODM . FORM_DATA . value ) : if len ( v ) == 0 : v . getparent ( ) . remove ( v ) for v in odm_elements . iter ( E_ODM . STUDY_EVENT_DATA . value ) : if len ( v ) == 0 : v . getparent ( ) . remove ( v ) return etree . tostring ( odm_elements )
Fill in random values for all empty - valued ItemData elements in an ODM document
50,313
def make_int ( value , missing = - 1 ) : if isinstance ( value , six . string_types ) : if not value . strip ( ) : return missing elif value is None : return missing return int ( value )
Convert string value to long to missing
50,314
def parse ( data , eventer ) : parser = etree . XMLParser ( target = ODMTargetParser ( eventer ) ) return etree . XML ( data , parser )
Parse the XML data firing events from the eventer
50,315
def emit ( self ) : self . count += 1 event_name = self . context . subcategory if hasattr ( self . handler , event_name ) : getattr ( self . handler , event_name ) ( self . context ) elif hasattr ( self . handler , 'default' ) : self . handler . default ( self . context )
We are finished processing one element . Emit it
50,316
def get_parent_element ( self ) : return { AUDIT_REF_STATE : self . context . audit_record , SIGNATURE_REF_STATE : self . context . signature } [ self . ref_state ]
Signatures and Audit elements share sub - elements we need to know which to set attributes on
50,317
def data ( self , data ) : if self . state == STATE_SOURCE_ID : self . context . audit_record . source_id = int ( data ) elif self . state == STATE_DATETIME : dt = datetime . datetime . strptime ( data , "%Y-%m-%dT%H:%M:%S" ) self . get_parent_element ( ) . datetimestamp = dt elif self . state == STATE_REASON_FOR_CHANGE : self . context . audit_record . reason_for_change = data . strip ( ) or None self . state = STATE_NONE
Called for text between tags
50,318
def validate_one_of ( values ) : def one_of_validator ( field , data ) : if field . value is None : return options = values if callable ( options ) : options = options ( ) if field . value not in options : raise ValidationError ( 'one_of' , choices = ', ' . join ( map ( str , options ) ) ) return one_of_validator
Validate that a field is in one of the given values .
50,319
def validate_none_of ( values ) : def none_of_validator ( field , data ) : options = values if callable ( options ) : options = options ( ) if field . value in options : raise ValidationError ( 'none_of' , choices = str . join ( ', ' , options ) ) return none_of_validator
Validate that a field is not in one of the given values .
50,320
def validate_equal ( value ) : def equal_validator ( field , data ) : if field . value is None : return if not ( field . value == value ) : raise ValidationError ( 'equal' , other = value ) return equal_validator
Validate the field value is equal to the given value . Should work with anything that supports == operator .
50,321
def validate_matches ( other ) : def matches_validator ( field , data ) : if field . value is None : return if not ( field . value == data . get ( other ) ) : raise ValidationError ( 'matches' , other = other ) return matches_validator
Validate the field value is equal to another field in the data . Should work with anything that supports == operator .
50,322
def validate_regexp ( pattern , flags = 0 ) : regex = re . compile ( pattern , flags ) if isinstance ( pattern , str ) else pattern def regexp_validator ( field , data ) : if field . value is None : return if regex . match ( str ( field . value ) ) is None : raise ValidationError ( 'regexp' , pattern = pattern ) return regexp_validator
Validate the field matches the given regular expression . Should work with anything that supports == operator .
50,323
def validate_email ( ) : user_regex = re . compile ( r"(^[-!#$%&'*+/=?^`{}|~\w]+(\.[-!#$%&'*+/=?^`{}|~\w]+)*$" r'|^"([\001-\010\013\014\016-\037!#-\[\]-\177]' r'|\\[\001-\011\013\014\016-\177])*"$)' , re . IGNORECASE | re . UNICODE ) domain_regex = re . compile ( r'(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+' r'(?:[A-Z]{2,6}|[A-Z0-9-]{2,})$' r'|^\[(25[0-5]|2[0-4]\d|[0-1]?\d?\d)' r'(\.(25[0-5]|2[0-4]\d|[0-1]?\d?\d)){3}\]$' , re . IGNORECASE | re . UNICODE ) domain_whitelist = ( 'localhost' , ) def email_validator ( field , data ) : if field . value is None : return value = str ( field . value ) if '@' not in value : raise ValidationError ( 'email' ) user_part , domain_part = value . rsplit ( '@' , 1 ) if not user_regex . match ( user_part ) : raise ValidationError ( 'email' ) if domain_part in domain_whitelist : return if not domain_regex . match ( domain_part ) : raise ValidationError ( 'email' ) return email_validator
Validate the field is a valid email address .
50,324
def isiterable_notstring ( value ) : if isinstance ( value , str ) : return False return isinstance ( value , Iterable ) or isgeneratorfunction ( value ) or isgenerator ( value )
Returns True if the value is iterable but not a string . Otherwise returns False .
50,325
def get_value ( self , name , data ) : if name in data : return data . get ( name ) if self . default : if callable ( self . default ) : return self . default ( ) return self . default return None
Get the value of this field from the data . If there is a problem with the data raise ValidationError .
50,326
def validate ( self , name , data ) : self . value = self . get_value ( name , data ) if self . value is not None : self . value = self . coerce ( self . value ) for method in self . validators : method ( self , data )
Check to make sure ths data for this field is valid . Usually runs all validators in self . validators list . If there is a problem with the data raise ValidationError .
50,327
def validate ( self , name , data ) : super ( ) . validate ( name , data ) if self . value is not None : try : self . value = self . query . get ( self . lookup_field == self . value ) except ( AttributeError , ValueError , peewee . DoesNotExist ) : raise ValidationError ( 'related' , field = self . lookup_field . name , values = self . value )
If there is a problem with the data raise ValidationError .
50,328
def coerce ( self , value ) : if isinstance ( value , dict ) : value = [ value ] if not isiterable_notstring ( value ) : value = [ value ] return [ coerce_single_instance ( self . lookup_field , v ) for v in value ]
Convert from whatever is given to a list of scalars for the lookup_field .
50,329
def initialize_fields ( self ) : for field in dir ( self ) : obj = getattr ( self , field ) if isinstance ( obj , Field ) : self . _meta . fields [ field ] = obj
The dict self . base_fields is a model instance at this point . Turn it into an instance attribute on this meta class . Also intitialize any other special fields if needed in sub - classes .
50,330
def initialize_fields ( self ) : for name , field in self . instance . _meta . fields . items ( ) : if getattr ( field , 'primary_key' , False ) : continue self . _meta . fields [ name ] = self . convert_field ( name , field ) for name in dir ( type ( self . instance ) ) : field = getattr ( type ( self . instance ) , name , None ) if isinstance ( field , ManyToManyField ) : self . _meta . fields [ name ] = self . convert_field ( name , field ) super ( ) . initialize_fields ( )
Convert all model fields to validator fields . Then call the parent so that overwrites can happen if necessary for manually defined fields .
50,331
def convert_field ( self , name , field ) : if PEEWEE3 : field_type = field . field_type . lower ( ) else : field_type = field . db_field pwv_field = ModelValidator . FIELD_MAP . get ( field_type , StringField ) print ( 'pwv_field' , field_type , pwv_field ) validators = [ ] required = not bool ( getattr ( field , 'null' , True ) ) choices = getattr ( field , 'choices' , ( ) ) default = getattr ( field , 'default' , None ) max_length = getattr ( field , 'max_length' , None ) unique = getattr ( field , 'unique' , False ) if required : validators . append ( validate_required ( ) ) if choices : print ( 'CHOICES' , choices ) validators . append ( validate_one_of ( [ c [ 0 ] for c in choices ] ) ) if max_length : validators . append ( validate_length ( high = max_length ) ) if unique : validators . append ( validate_model_unique ( field , self . instance . select ( ) , self . pk_field , self . pk_value ) ) if isinstance ( field , peewee . ForeignKeyField ) : if PEEWEE3 : rel_field = field . rel_field else : rel_field = field . to_field return ModelChoiceField ( field . rel_model , rel_field , default = default , validators = validators ) if isinstance ( field , ManyToManyField ) : return ManyModelChoiceField ( field . rel_model , field . rel_model . _meta . primary_key , default = default , validators = validators ) return pwv_field ( default = default , validators = validators )
Convert a single field from a Peewee model field to a validator field .
50,332
def perform_index_validation ( self , data ) : index_data = [ ] for columns , unique in self . instance . _meta . indexes : if not unique : continue index_data . append ( { col : data . get ( col , None ) for col in columns } ) for index in index_data : query = self . instance . filter ( ** index ) if self . pk_field and self . pk_value : query = query . where ( ~ ( self . pk_field == self . pk_value ) ) if query . count ( ) : err = ValidationError ( 'index' , fields = str . join ( ', ' , index . keys ( ) ) ) for col in index . keys ( ) : self . add_error ( col , err )
Validate any unique indexes specified on the model . This should happen after all the normal fields have been validated . This can add error messages to multiple fields .
50,333
def save ( self , force_insert = False ) : delayed = { } for field , value in self . data . items ( ) : model_field = getattr ( type ( self . instance ) , field , None ) if isinstance ( model_field , ManyToManyField ) : if value is not None : delayed [ field ] = value continue setattr ( self . instance , field , value ) rv = self . instance . save ( force_insert = force_insert ) for field , value in delayed . items ( ) : setattr ( self . instance , field , value ) return rv
Save the model and any related many - to - many fields .
50,334
def long_to_bytes ( N , blocksize = 1 ) : bytestring = hex ( N ) bytestring = bytestring [ 2 : ] if bytestring . startswith ( '0x' ) else bytestring bytestring = bytestring [ : - 1 ] if bytestring . endswith ( 'L' ) else bytestring bytestring = '0' + bytestring if ( len ( bytestring ) % 2 ) != 0 else bytestring bytestring = binascii . unhexlify ( bytestring ) if blocksize > 0 and len ( bytestring ) % blocksize != 0 : bytestring = '\x00' * ( blocksize - ( len ( bytestring ) % blocksize ) ) + bytestring return bytestring
Given an input integer N long_to_bytes returns the representation of N in bytes . If blocksize is greater than 1 then the output string will be right justified and then padded with zero - bytes such that the return values length is a multiple of blocksize .
50,335
def getCiphertextLen ( self , ciphertext ) : plaintext_length = self . getPlaintextLen ( ciphertext ) ciphertext_length = plaintext_length + Encrypter . _CTXT_EXPANSION return ciphertext_length
Given a ciphertext with a valid header returns the length of the ciphertext inclusive of ciphertext expansion .
50,336
def getPlaintextLen ( self , ciphertext ) : completeCiphertextHeader = ( len ( ciphertext ) >= 16 ) if completeCiphertextHeader is False : raise RecoverableDecryptionError ( 'Incomplete ciphertext header.' ) ciphertext_header = ciphertext [ : 16 ] L = self . _ecb_enc_K1 . decrypt ( ciphertext_header ) padding_expected = '\x00\x00\x00\x00' padding_actual = L [ - 8 : - 4 ] validPadding = ( padding_actual == padding_expected ) if validPadding is False : raise UnrecoverableDecryptionError ( 'Invalid padding: ' + padding_actual ) message_length = fte . bit_ops . bytes_to_long ( L [ - 8 : ] ) msgLenNonNegative = ( message_length >= 0 ) if msgLenNonNegative is False : raise UnrecoverableDecryptionError ( 'Negative message length.' ) return message_length
Given a ciphertext with a valid header returns the length of the plaintext payload .
50,337
def cached_stan_file ( model_name = 'anon_model' , file = None , model_code = None , cache_dir = None , fit_cachefile = None , cache_only = None , force = False , include_modelfile = False , prefix_only = False , ** kwargs ) : model_prefix , model_cachefile = cached_model_file ( model_name = model_name , file = file , model_code = model_code , cache_dir = cache_dir , fit_cachefile = fit_cachefile , include_prefix = True ) if not fit_cachefile : fit_cachefile = '.' . join ( [ model_prefix , 'stanfit' , _make_digest ( dict ( ** kwargs ) ) , 'pkl' ] ) if include_modelfile : return model_cachefile , fit_cachefile if prefix_only : fit_cachefile = re . sub ( string = fit_cachefile , pattern = '.pkl$' , repl = '' ) return fit_cachefile
Given inputs to cached_stan_fit compute pickle file containing cached fit
50,338
def plot ( self , ** kwds ) : r plo = self . lx . plot ( dist_to_center = 3 , ** kwds ) plo += self . ly . plot ( dist_to_center = 3 , ** kwds ) plo += self . lz . plot ( dist_to_center = 3 , ** kwds ) plo += self . lx_r . plot ( dist_to_center = 3 , ** kwds ) plo += self . ly_r . plot ( dist_to_center = 3 , ** kwds ) plo += self . lz_r . plot ( dist_to_center = 3 , ** kwds ) return plo
r The plotting function for MOT fields .
50,339
def fprint ( expr , print_ascii = False ) : r if print_ascii : pprint ( expr , use_unicode = False , num_columns = 120 ) else : return expr
r This function chooses whether to use ascii characters to represent a symbolic expression in the notebook or to use sympy s pprint .
50,340
def Mu ( i , j , s , N , excluded_mu = [ ] ) : if i == j : if s == - 1 : if i == 1 : return 0 else : mes = 'There is no population rhoii with i=' + str ( i ) + '.' raise ValueError ( mes ) mu = i - 1 elif i > j and s == 1 : mu = i - j + sum ( [ N - k for k in range ( 1 , j ) ] ) + N - 1 elif i > j and s == - 1 : mu = i - j + sum ( [ N - k for k in range ( 1 , j ) ] ) + N - 1 + N * ( N - 1 ) / 2 else : mes = 'i=' + str ( i ) + ', j=' + str ( j ) mes += ' Equations for i<j are not calculated.' + str ( s ) raise ValueError ( mes ) if excluded_mu != [ ] : mu = mu - len ( [ ii for ii in excluded_mu if ii < mu ] ) return mu
This function calculates the global index mu for the element i j .
50,341
def IJ ( mu , N ) : if mu == 0 : return 1 , 1 , 1 if mu not in range ( 0 , N ** 2 ) : raise ValueError ( 'mu has an invalid value mu=' + str ( mu ) + '.' ) if 1 <= mu <= N - 1 : return mu + 1 , mu + 1 , 1 else : m = N - 1 M = N * ( N - 1 ) / 2 for jj in range ( 1 , N ) : for ii in range ( jj + 1 , N + 1 ) : m += 1 if m == mu or m + M == mu : if mu > N * ( N + 1 ) / 2 - 1 : return ii , jj , - 1 else : return ii , jj , 1
Return i j s for any given mu .
50,342
def calculate_iI_correspondence ( omega ) : r Ne = len ( omega [ 0 ] ) om = omega [ 0 ] [ 0 ] correspondence = [ ] I = 0 for i in range ( Ne ) : if omega [ i ] [ 0 ] != om : om = omega [ i ] [ 0 ] I += 1 correspondence += [ ( i + 1 , I + 1 ) ] Nnd = I + 1 def I_nd ( i ) : return correspondence [ i - 1 ] [ 1 ] def i_d ( I ) : for i in range ( Ne ) : if correspondence [ i ] [ 1 ] == I : return correspondence [ i ] [ 0 ] return i_d , I_nd , Nnd
r Get the correspondance between degenerate and nondegenerate schemes .
50,343
def part ( z , s ) : r if sage_included : if s == 1 : return np . real ( z ) elif s == - 1 : return np . imag ( z ) elif s == 0 : return z else : if s == 1 : return z . real elif s == - 1 : return z . imag elif s == 0 : return z
r Get the real or imaginary part of a complex number .
50,344
def symbolic_part ( z , s ) : r if s == 1 : return symre ( z ) elif s == - 1 : return symim ( z ) elif s == 0 : return z
r Get the real or imaginary part of a complex symbol .
50,345
def detuning_combinations ( lists ) : r Nl = len ( lists ) comb = [ [ i ] for i in range ( lists [ 0 ] ) ] for l in range ( 1 , Nl ) : combn = [ ] for c0 in comb : for cl in range ( lists [ l ] ) : combn += [ c0 [ : ] + [ cl ] ] comb = combn [ : ] return comb
r This function recieves a list of length Nl with the number of transitions each laser induces . It returns the cartesian product of all these posibilities as a list of all possible combinations .
50,346
def laser_detunings ( Lij , Nl , i_d , I_nd , Nnd ) : r Ne = len ( Lij ) detunings = [ [ ] for i in range ( Nl ) ] detuningsij = [ [ ] for i in range ( Nl ) ] detuning = [ 0 for i in range ( Nnd ) ] for i in range ( 1 , Ne ) : for j in range ( i ) : for l in Lij [ i ] [ j ] : det = detuning [ : ] det [ I_nd ( i + 1 ) - 1 ] += 1 det [ I_nd ( j + 1 ) - 1 ] -= 1 if det not in detunings [ l - 1 ] : detunings [ l - 1 ] += [ det ] detuningsij [ l - 1 ] += [ ( I_nd ( i + 1 ) - 1 , I_nd ( j + 1 ) - 1 ) ] return detunings , detuningsij
r This function returns the list of transitions i j that each laser produces as lists of length Ne whose elements are all zero except for the ith element = 1 and the jth element = - 1 .
50,347
def find_omega_min ( omega , Nl , detuningsij , i_d , I_nd ) : r omega_min = [ ] omega_min_indices = [ ] for l in range ( Nl ) : omegas = sorted ( [ ( omega [ i_d ( p [ 0 ] + 1 ) - 1 ] [ i_d ( p [ 1 ] + 1 ) - 1 ] , p ) for p in detuningsij [ l ] ] ) omega_min += [ omegas [ 0 ] [ 0 ] ] omega_min_indices += [ omegas [ 0 ] [ 1 ] ] return omega_min , omega_min_indices
r This function returns a list of length Nl containing the mininmal frequency that each laser excites .
50,348
def block_diagonal_matrix ( matrices , type = None ) : ur if type is None : type = np . float64 sizes = [ Ai . shape [ 0 ] for Ai in matrices ] size = sum ( sizes ) symbolic = hasattr ( matrices [ 0 ] [ 0 ] , "subs" ) if symbolic : A = symzeros ( size , size ) else : A = np . zeros ( ( size , size ) , type ) ini = 0 fin = 0 for i , sizei in enumerate ( sizes ) : fin += sizei A [ ini : fin , ini : fin ] = matrices [ i ] ini += sizei return A
ur Build a block - diagonal matrix out of a given list of matrices .
50,349
def update ( self , other = None , ** kwargs ) : if other is not None : if isinstance ( other , dict ) : for key in other : self [ key ] = other [ key ] else : super ( MergingDict , self ) . update ( other ) for key in kwargs : self . _merge ( key , kwargs [ key ] )
A special update method to handle merging of dict objects . For all other iterable objects we use the parent class update method . For other objects we simply make use of the internal merging logic .
50,350
def wrap_cell ( entity , json_obj , mapping , table_view = False ) : html_class = '' out = '' if entity [ 'multiple' ] : out = ", " . join ( map ( lambda x : num2name ( x , entity , mapping ) , json_obj . get ( entity [ 'source' ] , [ ] ) ) ) elif entity [ 'is_chem_formula' ] : out = html_formula ( json_obj [ entity [ 'source' ] ] ) if entity [ 'source' ] in json_obj else '&mdash;' elif entity [ 'source' ] == 'bandgap' : html_class = ' class=_g' out = json_obj . get ( 'bandgap' ) if out is None : out = '&mdash;' elif entity [ 'source' ] == 'energy' : html_class = ' class=_e' out = "%6.5f" % json_obj [ 'energy' ] if json_obj [ 'energy' ] else '&mdash;' elif entity [ 'source' ] == 'dims' : out = "%4.2f" % json_obj [ 'dims' ] if json_obj [ 'periodicity' ] in [ 2 , 3 ] else '&mdash;' else : out = num2name ( json_obj . get ( entity [ 'source' ] ) , entity , mapping ) or '&mdash;' if table_view : return '<td rel=' + str ( entity [ 'cid' ] ) + html_class + '>' + str ( out ) + '</td>' elif html_class : return '<span' + html_class + '>' + str ( out ) + '</span>' return str ( out )
Cell wrappers for customizing the GUI data table
50,351
def meminfo ( ) : f = open ( "/proc/meminfo" ) hwinfo = { } for line in f . readlines ( ) : meml = line . split ( ) if ( meml [ 0 ] == "MemTotal:" ) : mem = int ( meml [ 1 ] ) hwinfo [ "Mem_MiB" ] = mem / 1024 elif ( meml [ 0 ] == "SwapTotal:" ) : swap = int ( meml [ 1 ] ) hwinfo [ "Swap_MiB" ] = swap / 1024 f . close ( ) return hwinfo
Get the amount of memory and swap Mebibytes
50,352
def cpuinfo ( ) : f = open ( "/proc/cpuinfo" ) hwinfo = { } for line in f . readlines ( ) : cpul = line . split ( ":" ) name = cpul [ 0 ] . strip ( ) if ( len ( cpul ) > 1 ) : val = cpul [ 1 ] . strip ( ) if ( name == "model name" ) : hwinfo [ "CPU" ] = val elif ( name == "cpu MHz" ) : hwinfo [ "MHz" ] = int ( round ( float ( val ) ) ) f . close ( ) return hwinfo
Get the cpu info
50,353
def vgadata ( ) : if os . path . isfile ( '/sbin/lspci' ) : lspci = '/sbin/lspci' else : lspci = '/usr/bin/lspci' f = os . popen ( lspci + ' -m' ) pdata = { } for line in f . readlines ( ) : p = line . split ( "\"" ) name = p [ 1 ] . strip ( ) if ( name == "VGA compatible controller" ) : pdata [ "Graphics" ] = p [ 3 ] + " " + p [ 5 ] f . close ( ) return pdata
Get data about the graphics card .
50,354
def serial_number ( ) : sdata = { } if os . getuid ( ) == 0 : try : sdata [ 'Serial' ] = open ( '/sys/class/dmi/id/product_serial' ) . read ( ) . strip ( ) except : for line in os . popen ( '/usr/sbin/dmidecode -s system-serial-number' ) : sdata [ 'Serial' ] = line . strip ( ) return sdata
Get the serial number . Requires root access
50,355
def system_model ( ) : mdata = { } man = None pn = None try : man = open ( '/sys/class/dmi/id/sys_vendor' ) . read ( ) . strip ( ) except : if os . getuid ( ) == 0 : for line in os . popen ( '/usr/sbin/dmidecode -s system-manufacturer' ) : man = line . strip ( ) try : pn = open ( '/sys/class/dmi/id/product_name' ) . read ( ) . strip ( ) except : if os . getuid ( ) == 0 : for line in os . popen ( '/usr/sbin/dmidecode -s system-product-name' ) : pn = line . strip ( ) if man is not None : mdata [ 'System_manufacturer' ] = man if pn is not None : mdata [ 'System_product_name' ] = pn return mdata
Get manufacturer and model number .
50,356
def diskdata ( ) : p = os . popen ( "/bin/df -l -P" ) ddata = { } tsize = 0 for line in p . readlines ( ) : d = line . split ( ) if ( "/dev/sd" in d [ 0 ] or "/dev/hd" in d [ 0 ] or "/dev/mapper" in d [ 0 ] ) : tsize = tsize + int ( d [ 1 ] ) ddata [ "Disk_GB" ] = int ( tsize ) / 1000000 p . close ( ) return ddata
Get total disk size in GB .
50,357
def ip_address ( ) : s = socket . socket ( socket . AF_INET , socket . SOCK_DGRAM ) s . connect ( ( "8.8.8.8" , 53 ) ) ip = s . getsockname ( ) [ 0 ] s . close ( ) return ip
Get the IP address used for public connections .
50,358
def mac_address ( ip ) : mac = '' for line in os . popen ( '/sbin/ifconfig' ) : s = line . split ( ) if len ( s ) > 3 : if s [ 3 ] == 'HWaddr' : mac = s [ 4 ] elif s [ 2 ] == ip : break return { 'MAC' : mac }
Get the MAC address
50,359
def getallhwinfo ( ) : hwinfo = meminfo ( ) hwinfo . update ( cpuinfo ( ) ) hwinfo . update ( uname ( ) ) hwinfo . update ( vgadata ( ) ) hwinfo . update ( distro ( ) ) hwinfo . update ( diskdata ( ) ) hwinfo . update ( hostname ( ) ) hwinfo . update ( serial_number ( ) ) ip = ip_address ( ) hwinfo . update ( mac_address ( ip ) ) hwinfo . update ( { 'IP' : ip } ) hwinfo . update ( system_model ( ) ) return hwinfo
Get all the hw info .
50,360
def printheader ( h = None ) : writer = csv . writer ( sys . stdout ) writer . writerow ( header_fields ( h ) )
Print the header for the CSV table .
50,361
def agent ( server = "http://localhost:8000" ) : import xmlrpc . client sp = xmlrpc . client . ServerProxy ( server ) hw = getallhwinfo ( ) fields = header_fields ( ) for f in fields : if not f in hw : hw [ f ] = '' try : sp . puthwinfo ( xmlrpc . client . dumps ( ( hw , ) ) ) except xmlrpc . client . Error as v : print ( "ERROR occured: " , v )
Run in agent mode .
50,362
def calculate_omega_matrix ( states , Omega = 1 ) : N = len ( states ) omega = [ [ 2 * Pi * ( states [ i ] . nu - states [ j ] . nu ) / Omega for j in range ( N ) ] for i in range ( N ) ] return omega
Calculate the matrix of transition frequencies .
50,363
def calculate_gamma_matrix ( magnetic_states , Omega = 1 ) : r Ne = len ( magnetic_states ) II = magnetic_states [ 0 ] . i gamma = [ [ 0.0 for j in range ( Ne ) ] for i in range ( Ne ) ] for i in range ( Ne ) : for j in range ( i ) : ei = magnetic_states [ i ] ej = magnetic_states [ j ] einsteinAij = Transition ( ei , ej ) . einsteinA if einsteinAij != 0 : ji = ei . j jj = ej . j fi = ei . f fj = ej . f mi = ei . m mj = ej . m gammaij = ( 2.0 * ji + 1 ) gammaij *= ( 2.0 * fi + 1 ) gammaij *= ( 2.0 * fj + 1 ) gammaij *= float ( wigner_6j ( ji , fi , II , fj , jj , 1 ) ** 2 ) gammaij *= sum ( [ float ( wigner_3j ( fj , 1 , fi , - mj , q , mi ) ** 2 ) for q in [ - 1 , 0 , 1 ] ] ) gammaij *= einsteinAij / Omega gammaij = float ( gammaij ) gamma [ i ] [ j ] = gammaij gamma [ j ] [ i ] = - gammaij return gamma
r Calculate the matrix of decay between states .
50,364
def calculate_boundaries ( fine_states , full_magnetic_states ) : r N_magnetic = len ( full_magnetic_states ) fq = full_magnetic_states [ 0 ] . quantum_numbers [ : 4 ] index_list_fine = [ ] start_fine = 0 hq = full_magnetic_states [ 0 ] . quantum_numbers [ : 5 ] index_list_hyperfine = [ ] start_hyperfine = 0 for i in range ( N_magnetic ) : magnetic = full_magnetic_states [ i ] if magnetic . quantum_numbers [ : 4 ] != fq : index_list_fine += [ ( start_fine , i ) ] start_fine = i fq = magnetic . quantum_numbers [ : 4 ] if magnetic . quantum_numbers [ : 5 ] != hq : index_list_hyperfine += [ ( start_hyperfine , i ) ] start_hyperfine = i hq = magnetic . quantum_numbers [ : 5 ] if i == N_magnetic - 1 : index_list_fine += [ ( start_fine , i + 1 ) ] index_list_hyperfine += [ ( start_hyperfine , i + 1 ) ] return index_list_fine , index_list_hyperfine
r Calculate the boundary indices within a list of magnetic states .
50,365
def exclude_states ( omega , gamma , r , Lij , states , excluded_states ) : Ne = len ( omega ) excluded_indices = [ i for i in range ( Ne ) if states [ i ] in excluded_states ] omega_new = [ ] gamma_new = [ ] r_new = [ [ ] , [ ] , [ ] ] Lij_new = [ ] for i in range ( Ne ) : row_om = [ ] row_ga = [ ] row_L = [ ] for j in range ( Ne ) : if j not in excluded_indices : row_om += [ omega [ i ] [ j ] ] row_ga += [ gamma [ i ] [ j ] ] row_L += [ Lij [ i ] [ j ] ] if i not in excluded_indices : omega_new += [ row_om ] gamma_new += [ row_ga ] Lij_new += [ row_L ] for p in range ( 3 ) : for i in range ( Ne ) : row_r = [ ] for j in range ( Ne ) : if j not in excluded_indices : row_r += [ r [ p ] [ i ] [ j ] ] if i not in excluded_indices : r_new [ p ] += [ row_r ] states_new = [ states [ i ] for i in range ( Ne ) if i not in excluded_indices ] return omega_new , gamma_new , r_new , Lij_new , states_new
Exclude states from matrices .
50,366
def vapour_pressure ( Temperature , element ) : r if element == "Rb" : Tmelt = 39.30 + 273.15 if Temperature < Tmelt : P = 10 ** ( 2.881 + 4.857 - 4215.0 / Temperature ) else : P = 10 ** ( 2.881 + 4.312 - 4040.0 / Temperature ) elif element == "Cs" : Tmelt = 28.5 + 273.15 if Temperature < Tmelt : P = 10 ** ( 2.881 + 4.711 - 3999.0 / Temperature ) else : P = 10 ** ( 2.881 + 4.165 - 3830.0 / Temperature ) else : s = str ( element ) s += " is not an element in the database for this function." raise ValueError ( s ) P = P * 101325.0 / 760.0 return P
r Return the vapour pressure of rubidium or cesium in Pascals .
50,367
def states ( self , Nmax = 50 , omega_min = None , omega_max = None , return_missing = False ) : r S = 1 / Integer ( 2 ) available = [ ] not_available = [ ] for N in range ( 1 , Nmax + 1 ) : for L in range ( N ) : Jmin = abs ( L - S ) Jmax = L + S Jpos = [ Jmin + i for i in range ( Jmax - Jmin + 1 ) ] for J in Jpos : try : state = State ( self . element , self . isotope , N , L , J ) available += [ state ] except : not_available += [ ( self . element , self . isotope , N , L , J ) ] if omega_min is not None : available = [ s for s in available if s . omega >= omega_min ] if omega_max is not None : available = [ s for s in available if s . omega <= omega_max ] available = [ ( s . omega , s ) for s in available ] available = sorted ( available ) available = [ s [ 1 ] for s in available ] if return_missing : return available , not_available else : return available
r Find all states of available in an atom .
50,368
def find_decays ( self , fine_state ) : r def decays_from ( fine_state , transitions ) : states_below = [ ] for t in transitions : if t . e2 == fine_state : states_below += [ t . e1 ] return states_below def iterate ( states , transitions ) : new_and_old_states = states [ : ] for state in states : new_states = decays_from ( state , transitions ) for new_state in new_states : if new_state not in new_and_old_states : new_and_old_states += [ new_state ] if states == new_and_old_states : return states else : return iterate ( new_and_old_states , transitions ) transitions = self . transitions ( ) states = iterate ( [ fine_state ] , transitions ) return states
r Find all possible decays from a given fine state .
50,369
def _latex_ ( self ) : r if self . l == 0 : l = 'S' elif self . l == 1 : l = 'P' elif self . l == 2 : l = 'D' elif self . l == 3 : l = 'F' else : l = str ( self . l ) if self . f is None : s = '^{' + str ( self . isotope ) + '}\\mathrm{' + self . element + '}\\ ' s += str ( self . n ) + l + '_{' + str ( self . j ) + '}' else : s = '^{' + str ( self . isotope ) + '}\\mathrm{' + self . element + '}\\ ' s += str ( self . n ) + l + '_{' + str ( self . j ) + '}^{' + str ( self . f ) + '}' if self . m is not None : s = s [ : - 1 ] + ',' + str ( self . m ) + '}' return s
r The LaTeX routine for states .
50,370
def _latex_ ( self ) : r if self . allowed : return self . e1 . _latex_ ( ) + '\\ \\rightarrow \\ ' + self . e2 . _latex_ ( ) elif not self . allowed : return self . e1 . _latex_ ( ) + '\\ \\nrightarrow \\ ' + self . e2 . _latex_ ( ) else : return self . e1 . _latex_ ( ) + '\\ \\rightarrow^? \\ ' + self . e2 . _latex_ ( ) return self . e1 . _latex_ ( ) + '\\ \\nleftrightarrow \\ ' + self . e2 . _latex_ ( )
r The representation routine for transitions .
50,371
def uFuncConverter ( variableIndex ) : def wrap ( func ) : def npWrapFunc ( * args ) : if len ( args ) >= variableIndex : before = list ( args [ : variableIndex ] ) arguments = args [ variableIndex ] after = list ( args [ variableIndex + 1 : ] ) if isinstance ( arguments , ( int , float , Decimal ) ) : if variableIndex : return func ( * args ) else : return func ( args [ 0 ] ) elif isinstance ( arguments , ( list , tuple , ndarray ) ) : if variableIndex : return asarray ( [ func ( * ( before + [ x ] + after ) ) for x in arguments ] ) else : return asarray ( [ func ( x ) for x in arguments ] ) raise Exception ( 'Error! Arguments (%s) not of proper format' % str ( arguments ) ) return npWrapFunc return wrap
A decorator to convert python functions to numpy universal functions
50,372
def unperturbed_hamiltonian ( states ) : r Ne = len ( states ) H0 = np . zeros ( ( Ne , Ne ) , complex ) for i in range ( Ne ) : H0 [ i , i ] = hbar * states [ i ] . omega return H0
r Return the unperturbed atomic hamiltonian for given states .
50,373
def calculate_gamma_matrix ( magnetic_states , Omega = 1 , einsteinA = None , numeric = True ) : ur Ne = len ( magnetic_states ) fine_states = [ ] fine_map = { } ii = 0 for ei in magnetic_states : fine = State ( ei . element , ei . isotope , ei . n , ei . l , ei . j ) if fine not in fine_states : fine_states += [ fine ] ii += 1 fine_map . update ( { ei : ii - 1 } ) II = magnetic_states [ 0 ] . i gamma = [ [ 0.0 for j in range ( Ne ) ] for i in range ( Ne ) ] for i in range ( Ne ) : for j in range ( i ) : ei = magnetic_states [ i ] ej = magnetic_states [ j ] if einsteinA is not None : iii = fine_map [ ei ] jjj = fine_map [ ej ] einsteinAij = einsteinA [ iii , jjj ] else : einsteinAij = Transition ( ei , ej ) . einsteinA if einsteinAij != 0 : ji = ei . j jj = ej . j fi = ei . f fj = ej . f mi = ei . m mj = ej . m gammaij = ( 2 * ji + 1 ) gammaij *= ( 2 * fi + 1 ) gammaij *= ( 2 * fj + 1 ) if numeric : gammaij *= float ( wigner_6j ( ji , fi , II , fj , jj , 1 ) ** 2 ) gammaij *= sum ( [ float ( wigner_3j ( fj , 1 , fi , - mj , q , mi ) ** 2 ) for q in [ - 1 , 0 , 1 ] ] ) gammaij *= einsteinAij / Omega gammaij = float ( gammaij ) else : gammaij *= wigner_6j ( ji , fi , II , fj , jj , 1 ) ** 2 gammaij *= sum ( [ wigner_3j ( fj , 1 , fi , - mj , q , mi ) ** 2 for q in [ - 1 , 0 , 1 ] ] ) gammaij *= einsteinAij / Omega gamma [ i ] [ j ] = gammaij gamma [ j ] [ i ] = - gammaij return gamma
ur Calculate the matrix of decay between states .
50,374
def reduced_matrix_element ( fine_statei , fine_statej , convention = 1 ) : r if fine_statei == fine_statej : return 0.0 t = Transition ( fine_statei , fine_statej ) einsteinAij = t . einsteinA omega0 = t . omega Ji = fine_statei . j Jj = fine_statej . j factor = sqrt ( 3 * Pi * hbar * c ** 3 * epsilon0 ) / e if omega0 < 0 : rij = factor * sqrt ( ( 2 * Jj + 1 ) * einsteinAij / omega0 ** 3 ) / a0 else : rij = reduced_matrix_element ( fine_statej , fine_statei , convention = convention ) rij *= ( - 1 ) ** ( Jj - Ji ) if convention == 2 : if omega0 < 0 : rij = rij / sqrt ( 2 * Ji + 1 ) else : rij = rij / sqrt ( 2 * Ji + 1 ) return rij
r Return the reduced matrix element of the position operator in Bohr \ radii .
50,375
def calculate_reduced_matrix_elements ( fine_states , convention = 1 ) : r reduced_matrix_elements = [ [ reduced_matrix_element ( ei , ej , convention = convention ) for ej in fine_states ] for ei in fine_states ] return reduced_matrix_elements
r Calculate the reduced matrix elements for a list of fine states .
50,376
def calculate_matrices ( states , Omega = 1 ) : r iso = states [ 0 ] . isotope element = states [ 0 ] . element for state in states [ 1 : ] : if state . element != element : raise ValueError ( 'All states must belong to the same element.' ) if state . isotope != iso : raise ValueError ( 'All states must belong to the same isotope.' ) fine_states = find_fine_states ( states ) full_magnetic_states = make_list_of_states ( fine_states , 'magnetic' , verbose = 0 ) omega_full = calculate_omega_matrix ( full_magnetic_states , Omega ) gamma_full = calculate_gamma_matrix ( full_magnetic_states , Omega ) reduced_matrix_elements = calculate_reduced_matrix_elements ( fine_states ) r_full = calculate_r_matrices ( fine_states , reduced_matrix_elements ) omega = omega_full r = r_full gamma = gamma_full return omega , gamma , r
r Calculate the matrices omega_ij gamma_ij r_pij .
50,377
def thermal_state ( omega_level , T , return_diagonal = False ) : r Ne = len ( omega_level ) E = np . array ( [ hbar * omega_level [ i ] for i in range ( Ne ) ] ) p = np . exp ( - E / k_B / T ) p = p / sum ( p ) if not return_diagonal : return np . diag ( p ) return p
r Return a thermal state for a given set of levels .
50,378
def transitions ( self , omega_min = None , omega_max = None ) : r states = self . states ( ) transitions = states transitions = [ ] for i in range ( len ( states ) ) : si = states [ i ] for j in range ( i ) : sj = states [ j ] t = Transition ( sj , si ) if t . allowed : transitions += [ t ] if omega_min is not None : transitions = [ ti for ti in transitions if abs ( ti . omega ) >= omega_min ] if omega_max is not None : transitions = [ ti for ti in transitions if abs ( ti . omega ) <= omega_max ] return transitions
r Find all allowed transitions .
50,379
def average ( self , rho ) : r def marginal ( f , rho ) : remaining = len ( f . shape ) if remaining == 0 : return rho rho = sum ( [ f [ i ] * rho [ i ] for i in range ( rho . shape [ 0 ] ) ] ) f = np . sum ( f , 0 ) return marginal ( f , rho ) return marginal ( self . distribution , rho )
r Return the average density matrix of an inhomogeneous ensemble .
50,380
def reset ( self , T ) : r self . __init__ ( self . shape , self . stds , T , self . mass , self . detuning_knob , self . k , self . omega_level , self . xi , self . theta , self . unfolding , self . axes , self . matrix_form )
r Recalculate the doppler broadening for a given temperature .
50,381
def perm_j ( j1 , j2 ) : r jmin = abs ( j1 - j2 ) jmax = j1 + j2 return [ jmin + i for i in range ( jmax - jmin + 1 ) ]
r Calculate the allowed total angular momenta .
50,382
def wigner_d_small ( J , beta ) : u def prod ( x ) : p = 1 for i , xi in enumerate ( x ) : p = p * xi return p M = [ J - i for i in range ( 2 * J + 1 ) ] d = [ ] for Mi in M : row = [ ] for Mj in M : sigmamax = max ( [ - Mi - Mj , J - Mj ] ) sigmamin = min ( [ 0 , J - Mi ] ) dij = sqrt ( factorial ( J + Mi ) * factorial ( J - Mi ) / factorial ( J + Mj ) / factorial ( J - Mj ) ) terms = [ [ ( - 1 ) ** ( J - Mi - s ) , binomial ( J + Mj , J - Mi - s ) , binomial ( J - Mj , s ) , cos ( beta / 2 ) ** ( 2 * s + Mi + Mj ) , sin ( beta / 2 ) ** ( 2 * J - 2 * s - Mj - Mi ) ] for s in range ( sigmamin , sigmamax + 1 ) ] terms = [ prod ( term ) if 0 not in term else 0 for term in terms ] dij = dij * sum ( terms ) row += [ dij ] d += [ row ] return Matrix ( d )
u Return the small Wigner d matrix for angular momentum J .
50,383
def wigner_d ( J , alpha , beta , gamma ) : u d = wigner_d_small ( J , beta ) M = [ J - i for i in range ( 2 * J + 1 ) ] D = [ [ exp ( I * Mi * alpha ) * d [ i , j ] * exp ( I * Mj * gamma ) for j , Mj in enumerate ( M ) ] for i , Mi in enumerate ( M ) ] return Matrix ( D )
u Return the Wigner D matrix for angular momentum J .
50,384
def density_matrix_rotation ( J_values , alpha , beta , gamma ) : r size = sum ( [ 2 * J + 1 for J in J_values ] ) D = zeros ( size , size ) ind0 = 0 for J in J_values : DJ = wigner_d ( J , alpha , beta , gamma ) sizeJ = 2 * J + 1 indf = ind0 + sizeJ D [ ind0 : indf , ind0 : indf ] = DJ ind0 += sizeJ return D
r Return a block - wise diagonal Wigner D matrix for that rotates a density matrix of an ensemble of particles in definite total angular momentum states given by J_values .
50,385
def generate_unit_squares ( image_width , image_height ) : for x in range ( image_width ) : for y in range ( image_height ) : yield [ ( x , y ) , ( x + 1 , y ) , ( x + 1 , y + 1 ) , ( x , y + 1 ) ]
Generate coordinates for a tiling of unit squares .
50,386
def generate_unit_triangles ( image_width , image_height ) : h = math . sin ( math . pi / 3 ) for x in range ( - 1 , image_width ) : for y in range ( int ( image_height / h ) ) : x_ = x if ( y % 2 == 0 ) else x + 0.5 yield [ ( x_ , y * h ) , ( x_ + 1 , y * h ) , ( x_ + 0.5 , ( y + 1 ) * h ) ] yield [ ( x_ + 1 , y * h ) , ( x_ + 1.5 , ( y + 1 ) * h ) , ( x_ + 0.5 , ( y + 1 ) * h ) ]
Generate coordinates for a tiling of unit triangles .
50,387
def restore_default_settings ( ) : global __DEFAULTS __DEFAULTS . CACHE_DIR = defaults . CACHE_DIR __DEFAULTS . SET_SEED = defaults . SET_SEED __DEFAULTS . SEED = defaults . SEED logging . info ( 'Settings reverted to their default values.' )
Restore settings to default values .
50,388
def load_config ( config_file = '~/.stancache.ini' ) : if not os . path . exists ( config_file ) : logging . warning ( 'Config file does not exist: {}. Using default settings.' . format ( config_file ) ) return config = configparser . ConfigParser ( ) config . read ( config_file ) if not config . has_section ( 'main' ) : raise ValueError ( 'Config file {} has no section "main"' . format ( config_file ) ) for ( key , val ) in config . items ( 'main' ) : _set_value ( key . upper ( ) , val ) return
Load config file into default settings
50,389
def send_email ( name , ctx_dict , send_to = None , subject = u'Subject' , ** kwargs ) : eft = EmailFromTemplate ( name = name ) eft . subject = subject eft . context = ctx_dict eft . get_object ( ) eft . render_message ( ) eft . send_email ( send_to = send_to , ** kwargs )
Shortcut function for EmailFromTemplate class
50,390
def __parse ( self , string = '' ) : self . string = string self . string = self . wiki_re . sub ( '' , self . string ) self . listmatch = re . search ( '^(\*+)' , self . string ) if self . listmatch : self . string = self . __list ( self . listmatch ) + re . sub ( '^(\*+)' , '' , self . string ) return self . string
Parse a string to remove and replace all wiki markup tags
50,391
def parse_string ( self , string = '' ) : self . strings = string . splitlines ( 1 ) self . strings = [ self . __parse ( line ) for line in self . strings ] return '' . join ( self . strings )
Parse a string object to de - wikified text
50,392
def refine_cell ( self , tilde_obj ) : try : lattice , positions , numbers = spg . refine_cell ( tilde_obj [ 'structures' ] [ - 1 ] , symprec = self . accuracy , angle_tolerance = self . angle_tolerance ) except Exception as ex : self . error = 'Symmetry finder error: %s' % ex else : self . refinedcell = Atoms ( numbers = numbers , cell = lattice , scaled_positions = positions , pbc = tilde_obj [ 'structures' ] [ - 1 ] . get_pbc ( ) ) self . refinedcell . periodicity = sum ( self . refinedcell . get_pbc ( ) ) self . refinedcell . dims = abs ( det ( tilde_obj [ 'structures' ] [ - 1 ] . cell ) )
NB only used for perovskite_tilting app
50,393
def parse ( self ) : if self . data is None : raise ValueError ( 'No input data provided, unable to parse' ) for line in self . data : parts = line . strip ( ) . split ( ) try : path = parts [ 0 ] code = parts [ 1 ] path , line , char = path . split ( ':' ) [ : 3 ] if not re . match ( POSITION , line ) : continue if not re . match ( POSITION , char ) : continue if not re . match ( ERROR_CODE , code ) : continue if not re . match ( FILEPATH , path ) : continue except IndexError : continue except ValueError : continue yield path , code , line , char , ' ' . join ( parts [ 2 : ] )
Reads all lines from the current data source and yields each FileResult objects
50,394
def _get ( function , return_format = None ) : if return_format : return requests . get ( '' . join ( [ __BASE_URL , function , return_format ] ) ) . text return requests . get ( '' . join ( [ __BASE_URL , function , JSON ] ) ) . json ( )
Get and return data from the API .
50,395
def backscatter ( date = None , rows = None , return_format = None ) : uri = 'backscatter' if date : try : uri = '/' . join ( [ uri , date . strftime ( "%Y-%m-%d" ) ] ) except AttributeError : uri = '/' . join ( [ uri , date ] ) if rows : uri = '/' . join ( [ uri , str ( rows ) ] ) return _get ( uri , return_format )
Returns possible backscatter data .
50,396
def port ( port_number , return_format = None ) : response = _get ( 'port/{number}' . format ( number = port_number ) , return_format ) if 'bad port number' in str ( response ) : raise Error ( 'Bad port number, {number}' . format ( number = port_number ) ) else : return response
Summary information about a particular port .
50,397
def portdate ( port_number , date = None , return_format = None ) : uri = 'portdate/{number}' . format ( number = port_number ) if date : try : uri = '/' . join ( [ uri , date . strftime ( "%Y-%m-%d" ) ] ) except AttributeError : uri = '/' . join ( [ uri , date ] ) response = _get ( uri , return_format ) if 'bad port number' in str ( response ) : raise Error ( 'Bad port number, {number}' . format ( number = port_number ) ) else : return response
Information about a particular port at a particular date .
50,398
def topports ( sort_by = 'records' , limit = 10 , date = None , return_format = None ) : uri = '/' . join ( [ 'topports' , sort_by , str ( limit ) ] ) if date : try : uri = '/' . join ( [ uri , date . strftime ( "%Y-%m-%d" ) ] ) except AttributeError : uri = '/' . join ( [ uri , date ] ) return _get ( uri , return_format )
Information about top ports for a particular date with return limit .
50,399
def porthistory ( port_number , start_date = None , end_date = None , return_format = None ) : uri = 'porthistory/{port}' . format ( port = port_number ) if not start_date : start_date = datetime . datetime . now ( ) - datetime . timedelta ( days = 30 ) try : uri = '/' . join ( [ uri , start_date . strftime ( "%Y-%m-%d" ) ] ) except AttributeError : uri = '/' . join ( [ uri , start_date ] ) if end_date : try : uri = '/' . join ( [ uri , end_date . strftime ( "%Y-%m-%d" ) ] ) except AttributeError : uri = '/' . join ( [ uri , end_date ] ) response = _get ( uri , return_format ) if 'bad port number' in str ( response ) : raise Error ( 'Bad port, {port}' . format ( port = port_number ) ) else : return response
Returns port data for a range of dates .