idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
50,400
def daily404summary ( date , return_format = None ) : uri = 'daily404summary' if date : try : uri = '/' . join ( [ uri , date . strftime ( "%Y-%m-%d" ) ] ) except AttributeError : uri = '/' . join ( [ uri , date ] ) return _get ( uri , return_format )
Returns daily summary information of submitted 404 Error Page Information .
50,401
def daily404detail ( date , limit = None , return_format = None ) : uri = 'daily404detail' if date : try : uri = '/' . join ( [ uri , date . strftime ( "%Y-%m-%d" ) ] ) except AttributeError : uri = '/' . join ( [ uri , date ] ) if limit : uri = '/' . join ( [ uri , str ( limit ) ] ) return _get ( uri , return_format )
Returns detail information of submitted 404 Error Page Information .
50,402
def glossary ( term = None , return_format = None ) : uri = 'glossary' if term : uri = '/' . join ( [ uri , term ] ) return _get ( uri , return_format )
List of glossary terms and definitions .
50,403
def _append_path ( new_path ) : for path in sys . path : path = os . path . abspath ( path ) if new_path == path : return sys . path . append ( new_path )
Given a path string append it to sys . path
50,404
def _caller_path ( index ) : module = None stack = inspect . stack ( ) while not module : if index >= len ( stack ) : raise RuntimeError ( "Cannot find import path" ) frame = stack [ index ] module = inspect . getmodule ( frame [ 0 ] ) index += 1 filename = module . __file__ path = os . path . dirname ( os . path . realpath ( filename ) ) return path
Get the caller s file path by the index of the stack does not work when the caller is stdin through a CLI python
50,405
def get_current_path ( index = 2 ) : try : path = _caller_path ( index ) except RuntimeError : path = os . getcwd ( ) return path
Get the caller s path to sys . path If the caller is a CLI through stdin the current working directory is used
50,406
def get_git_root ( index = 3 ) : path = get_current_path ( index = index ) while True : git_path = os . path . join ( path , '.git' ) if os . path . isdir ( git_path ) : return path if os . path . dirname ( path ) == path : raise RuntimeError ( "Cannot find git root" ) path = os . path . split ( path ) [ 0 ]
Get the path of the git root directory of the caller s file Raises a RuntimeError if a git repository cannot be found
50,407
def get_parent_path ( index = 2 ) : try : path = _caller_path ( index ) except RuntimeError : path = os . getcwd ( ) path = os . path . abspath ( os . path . join ( path , os . pardir ) ) return path
Get the caller s parent path to sys . path If the caller is a CLI through stdin the parent of the current working directory is used
50,408
def send_email ( self , send_to , attachment_paths = None , fail_silently = True , * args , ** kwargs ) : msg = self . get_message_object ( send_to , attachment_paths , * args , ** kwargs ) msg . content_subtype = self . content_subtype try : self . sent = msg . send ( ) except SMTPException , e : if not fail_silently : raise logger . error ( u'Problem sending email to %s: %s' , send_to , e ) return self . sent
Sends email to recipient based on self object parameters .
50,409
def _tag ( element ) : tag = element . tag if tag [ 0 ] == "{" : uri , tag = tag [ 1 : ] . split ( "}" ) return tag
Return element . tag with xmlns stripped away .
50,410
def _uriPrefix ( element ) : i = element . tag . find ( '}' ) if i < 0 : return "" return element . tag [ : i + 1 ]
Return xmlns prefix of the given element .
50,411
def parseValue ( self , value ) : if self . isVector ( ) : return list ( map ( self . _pythonType , value . split ( ',' ) ) ) if self . typ == 'boolean' : return _parseBool ( value ) return self . _pythonType ( value )
Parse the given value and return result .
50,412
def defaultExtension ( self ) : result = self . EXTERNAL_TYPES [ self . typ ] if not self . fileExtensions : return result if result in self . fileExtensions : return result return self . fileExtensions [ 0 ]
Return default extension for this parameter type checked against supported fileExtensions . If the default extension is not within fileExtensions return the first supported extension .
50,413
def define_symbol ( name , open_brace , comma , i , j , close_brace , variables , ** kwds ) : r if variables is None : return Symbol ( name + open_brace + str ( i + 1 ) + comma + str ( j + 1 ) + close_brace , ** kwds ) else : return Function ( name + open_brace + str ( i + 1 ) + comma + str ( j + 1 ) + close_brace , ** kwds ) ( * variables )
r Define a nice symbol with matrix indices .
50,414
def cartesian_to_helicity ( vector , numeric = False ) : r if numeric : vector = list ( vector ) vector [ 0 ] = nparray ( vector [ 0 ] ) vector [ 1 ] = nparray ( vector [ 1 ] ) vector [ 2 ] = nparray ( vector [ 2 ] ) v = [ ( vector [ 0 ] - 1j * vector [ 1 ] ) / npsqrt ( 2 ) , vector [ 2 ] , - ( vector [ 0 ] + 1j * vector [ 1 ] ) / npsqrt ( 2 ) ] v = nparray ( v ) else : v = [ ( vector [ 0 ] - I * vector [ 1 ] ) / sqrt ( 2 ) , vector [ 2 ] , - ( vector [ 0 ] + I * vector [ 1 ] ) / sqrt ( 2 ) ] if type ( vector [ 0 ] ) in [ type ( Matrix ( [ 1 , 0 ] ) ) , type ( nparray ( [ 1 , 0 ] ) ) ] : return v else : return Matrix ( v )
r This function takes vectors from the cartesian basis to the helicity basis . For instance we can check what are the vectors of the helicity basis .
50,415
def vector_element ( r , i , j ) : r return Matrix ( [ r [ p ] [ i , j ] for p in range ( 3 ) ] )
r Extract an matrix element of a vector operator .
50,416
def define_frequencies ( Ne , explicitly_antisymmetric = False ) : u omega_level = [ Symbol ( 'omega_' + str ( i + 1 ) , real = True ) for i in range ( Ne ) ] if Ne > 9 : opening = "\\" comma = "," open_brace = "{" close_brace = "}" else : opening = r"" comma = "" open_brace = "" close_brace = "" omega = [ ] gamma = [ ] for i in range ( Ne ) : row_omega = [ ] row_gamma = [ ] for j in range ( Ne ) : if i == j : om = 0 ga = 0 elif i > j : om = Symbol ( opening + r"omega_" + open_brace + str ( i + 1 ) + comma + str ( j + 1 ) + close_brace , real = True ) ga = Symbol ( opening + r"gamma_" + open_brace + str ( i + 1 ) + comma + str ( j + 1 ) + close_brace , real = True ) elif explicitly_antisymmetric : om = - Symbol ( opening + r"omega_" + open_brace + str ( j + 1 ) + comma + str ( i + 1 ) + close_brace , real = True ) ga = - Symbol ( opening + r"gamma_" + open_brace + str ( j + 1 ) + comma + str ( i + 1 ) + close_brace , real = True ) else : om = Symbol ( opening + r"omega_" + open_brace + str ( i + 1 ) + comma + str ( j + 1 ) + close_brace , real = True ) ga = Symbol ( opening + r"gamma_" + open_brace + str ( i + 1 ) + comma + str ( j + 1 ) + close_brace , real = True ) row_omega += [ om ] row_gamma += [ ga ] omega += [ row_omega ] gamma += [ row_gamma ] omega = Matrix ( omega ) gamma = Matrix ( gamma ) return omega_level , omega , gamma
u Define all frequencies omega_level omega gamma .
50,417
def lindblad_terms ( gamma , rho , Ne , verbose = 1 ) : u Nterms = 0 for i in range ( Ne ) : for j in range ( i ) : if gamma [ i , j ] != 0 : Nterms += 1 L = zeros ( Ne ) counter = 0 t0 = time ( ) for i in range ( Ne ) : for j in range ( i ) : if gamma [ i , j ] != 0 : counter += 1 sig = ket ( j + 1 , Ne ) * bra ( i + 1 , Ne ) L += gamma [ i , j ] * lindblad_operator ( sig , rho ) tn = time ( ) if tn - t0 > 1 : aux = "Calculated up to i={}, j={}, or {}/{} = {:2.2f} %." if verbose > 0 : print ( aux . format ( i , j , counter , Nterms , float ( counter + 1 ) / Nterms * 100 ) ) t0 = tn return L
u Return the Lindblad terms for decays gamma in matrix form .
50,418
def define_rho_vector ( rho , Ne ) : u rho_vect = [ ] for mu in range ( 1 , Ne ** 2 ) : i , j , s = IJ ( mu , Ne ) i = i - 1 j = j - 1 rho_vect += [ part_symbolic ( rho [ i , j ] , s ) ] return Matrix ( rho_vect )
u Define the vectorized density matrix .
50,419
def calculate_A_b ( eqs , unfolding , verbose = 0 ) : u Ne = unfolding . Ne Nrho = unfolding . Nrho lower_triangular = unfolding . lower_triangular rho = define_density_matrix ( Ne , explicitly_hermitian = lower_triangular , normalized = unfolding . normalized ) rho_vect = unfolding ( rho ) if unfolding . real : ss_comp = { rho [ i , j ] : re ( rho [ i , j ] ) + I * im ( rho [ i , j ] ) for j in range ( Ne ) for i in range ( Ne ) } A = [ ] b = [ ] for mu in range ( Nrho ) : s , i , j = unfolding . IJ ( mu ) if verbose > 0 : print mu eq = part_symbolic ( eqs [ i , j ] . subs ( ss_comp ) , s ) eq_new = 0 row = [ ] for nu in range ( Nrho ) : variable = rho_vect [ nu ] coefficient = Derivative ( eq , variable ) . doit ( ) row += [ coefficient ] eq_new += coefficient * variable b += [ - ( eq - eq_new ) . expand ( ) ] A += [ row ] A = Matrix ( A ) b = Matrix ( b ) return A , b
u Calculate the equations in matrix form .
50,420
def phase_transformation ( Ne , Nl , r , Lij , omega_laser , phase ) : r ph = find_phase_transformation ( Ne , Nl , r , Lij ) return { phase [ i ] : sum ( [ ph [ i ] [ j ] * omega_laser [ j ] for j in range ( Nl ) ] ) for i in range ( Ne ) }
r Obtain a phase transformation to eliminate explicit time dependence .
50,421
def dot ( a , b ) : r if isinstance ( a , Mul ) : a = a . expand ( ) avect = 1 aivect = - 1 for ai , fact in enumerate ( a . args ) : if isinstance ( fact , Vector3D ) : avect = fact aivect = ai break acoef = a . args [ : aivect ] + a . args [ aivect + 1 : ] acoef = Mul ( * acoef ) return acoef * dot ( avect , b ) if isinstance ( b , Mul ) : b = b . expand ( ) bvect = 1 bivect = - 1 for bi , fact in enumerate ( b . args ) : if isinstance ( fact , Vector3D ) : bvect = fact bivect = bi break bcoef = b . args [ : bivect ] + b . args [ bivect + 1 : ] bcoef = Mul ( * bcoef ) return bcoef * dot ( a , bvect ) if isinstance ( a , Vector3D ) and isinstance ( b , Vector3D ) : return DotProduct ( a , b ) if hasattr ( a , "shape" ) and hasattr ( b , "shape" ) : return cartesian_dot_product ( a , b ) print a , b , type ( a ) , type ( b ) , print isinstance ( a , Vector3D ) , isinstance ( b , Vector3D ) raise NotImplementedError ( "could not catch these instances in dot!" )
r Dot product of two 3d vectors .
50,422
def cross ( a , b ) : r if isinstance ( a , Mul ) : a = a . expand ( ) avect = 1 aivect = - 1 for ai , fact in enumerate ( a . args ) : if isinstance ( fact , Vector3D ) : avect = fact aivect = ai break acoef = a . args [ : aivect ] + a . args [ aivect + 1 : ] acoef = Mul ( * acoef ) return acoef * cross ( avect , b ) if isinstance ( b , Mul ) : b = b . expand ( ) bvect = 1 bivect = - 1 for bi , fact in enumerate ( b . args ) : if isinstance ( fact , Vector3D ) : bvect = fact bivect = bi break bcoef = b . args [ : bivect ] + b . args [ bivect + 1 : ] bcoef = Mul ( * bcoef ) return bcoef * cross ( a , bvect ) if isinstance ( a , Vector3D ) and isinstance ( b , Vector3D ) : return CrossProduct ( a , b )
r Cross product of two 3d vectors .
50,423
def write_settings ( settings ) : if not os . access ( DATA_DIR , os . W_OK ) : return False try : f = open ( DATA_DIR + os . sep + SETTINGS_FILE , 'w' ) f . writelines ( json . dumps ( settings , indent = 0 ) ) f . close ( ) os . chmod ( os . path . abspath ( DATA_DIR + os . sep + SETTINGS_FILE ) , 0o777 ) except IOError : return False else : return True
Saves user s settings
50,424
def is_field_unique_by_group ( df , field_col , group_col ) : def num_unique ( x ) : return len ( pd . unique ( x ) ) num_distinct = df . groupby ( group_col ) [ field_col ] . agg ( num_unique ) return all ( num_distinct == 1 )
Determine if field is constant by group in df
50,425
def _list_files_in_path ( path , pattern = "*.stan" ) : results = [ ] for dirname , subdirs , files in os . walk ( path ) : for name in files : if fnmatch ( name , pattern ) : results . append ( os . path . join ( dirname , name ) ) return ( results )
indexes a directory of stan files returns as dictionary containing contents of files
50,426
def generate_random_perovskite ( lat = None ) : if not lat : lat = round ( random . uniform ( 3.5 , Perovskite_tilting . OCTAHEDRON_BOND_LENGTH_LIMIT * 2 ) , 3 ) A_site = random . choice ( Perovskite_Structure . A ) B_site = random . choice ( Perovskite_Structure . B ) Ci_site = random . choice ( Perovskite_Structure . C ) Cii_site = random . choice ( Perovskite_Structure . C ) while covalent_radii [ chemical_symbols . index ( A_site ) ] - covalent_radii [ chemical_symbols . index ( B_site ) ] < 0.05 or covalent_radii [ chemical_symbols . index ( A_site ) ] - covalent_radii [ chemical_symbols . index ( B_site ) ] > 0.5 : A_site = random . choice ( Perovskite_Structure . A ) B_site = random . choice ( Perovskite_Structure . B ) return crystal ( [ A_site , B_site , Ci_site , Cii_site ] , [ ( 0.5 , 0.25 , 0.0 ) , ( 0.0 , 0.0 , 0.0 ) , ( 0.0 , 0.25 , 0.0 ) , ( 0.25 , 0.0 , 0.75 ) ] , spacegroup = 62 , cellpar = [ lat * math . sqrt ( 2 ) , 2 * lat , lat * math . sqrt ( 2 ) , 90 , 90 , 90 ] )
This generates a random valid perovskite structure in ASE format . Useful for testing . Binary and organic perovskites are not considered .
50,427
def check_positive_integer ( name , value ) : try : value = int ( value ) is_positive = ( value > 0 ) except ValueError : raise ValueError ( '%s should be an integer; got %r' % ( name , value ) ) if is_positive : return value else : raise ValueError ( '%s should be positive; got %r' % ( name , value ) )
Check a value is a positive integer .
50,428
def check_color_input ( value ) : value = value . lower ( ) if value . startswith ( '#' ) : value = value [ 1 : ] if len ( value ) != 6 : raise ValueError ( 'Color should be six hexadecimal digits, got %r (%s)' % ( value , len ( value ) ) ) if re . sub ( r'[a-f0-9]' , '' , value ) : raise ValueError ( 'Color should only contain hex characters, got %r' % value ) red = int ( value [ 0 : 2 ] , base = 16 ) green = int ( value [ 2 : 4 ] , base = 16 ) blue = int ( value [ 4 : 6 ] , base = 16 ) return RGBColor ( red , green , blue )
Check a value is a valid colour input .
50,429
def get_octahedra ( self , atoms , periodicity = 3 ) : octahedra = [ ] for n , i in enumerate ( atoms ) : found = [ ] if i . symbol in Perovskite_Structure . B : for m , j in enumerate ( self . virtual_atoms ) : if j . symbol in Perovskite_Structure . C and self . virtual_atoms . get_distance ( n , m ) <= self . OCTAHEDRON_BOND_LENGTH_LIMIT : found . append ( m ) if ( periodicity == 3 and len ( found ) == 6 ) or ( periodicity == 2 and len ( found ) in [ 5 , 6 ] ) : octahedra . append ( [ n , found ] ) if not len ( octahedra ) : raise ModuleError ( "Cannot extract valid octahedra: not enough corner atoms found!" ) return octahedra
Extract octahedra as lists of sequence numbers of corner atoms
50,430
def get_tiltplane ( self , sequence ) : sequence = sorted ( sequence , key = lambda x : self . virtual_atoms [ x ] . z ) in_plane = [ ] for i in range ( 0 , len ( sequence ) - 4 ) : if abs ( self . virtual_atoms [ sequence [ i ] ] . z - self . virtual_atoms [ sequence [ i + 1 ] ] . z ) < self . OCTAHEDRON_ATOMS_Z_DIFFERENCE and abs ( self . virtual_atoms [ sequence [ i + 1 ] ] . z - self . virtual_atoms [ sequence [ i + 2 ] ] . z ) < self . OCTAHEDRON_ATOMS_Z_DIFFERENCE and abs ( self . virtual_atoms [ sequence [ i + 2 ] ] . z - self . virtual_atoms [ sequence [ i + 3 ] ] . z ) < self . OCTAHEDRON_ATOMS_Z_DIFFERENCE : in_plane = [ sequence [ j ] for j in range ( i , i + 4 ) ] return in_plane
Extract the main tilting plane basing on Z coordinate
50,431
def register ( self , path , help_text = None , help_context = None ) : if path in self . _registry : raise AlreadyRegistered ( 'The template %s is already registered' % path ) self . _registry [ path ] = RegistrationItem ( path , help_text , help_context ) logger . debug ( "Registered email template %s" , path )
Registers email template .
50,432
def get_registration ( self , path ) : if not self . is_registered ( path ) : raise NotRegistered ( "Email template not registered" ) return self . _registry [ path ]
Returns registration item for specified path .
50,433
def get_form_help_text ( self , path ) : try : form_help_text = self . get_registration ( path ) . as_form_help_text ( ) except NotRegistered : form_help_text = u"" return form_help_text
Returns text that can be used as form help text for creating email templates .
50,434
def report_build_messages ( self ) : if os . getenv ( 'TEAMCITY_VERSION' ) : tc_build_message_warning = "##teamcity[buildStatisticValue key='pepper8warnings' value='{}']\n" tc_build_message_error = "##teamcity[buildStatisticValue key='pepper8errors' value='{}']\n" stdout . write ( tc_build_message_warning . format ( self . total_warnings ) ) stdout . write ( tc_build_message_error . format ( self . total_errors ) ) stdout . flush ( )
Checks environment variables to see whether pepper8 is run under a build agent such as TeamCity and performs the adequate actions to report statistics .
50,435
def phase_transformation ( Ne , Nl , rm , xi , return_equations = False ) : E0 , omega_laser = define_laser_variables ( Nl ) theta = [ Symbol ( 'theta' + str ( i + 1 ) ) for i in range ( Ne ) ] if type ( xi ) == list : xi = np . array ( [ [ [ xi [ l ] [ i , j ] for j in range ( Ne ) ] for i in range ( Ne ) ] for l in range ( Nl ) ] ) eqs = [ ] for i in range ( Ne ) : for j in range ( 0 , i ) : if ( rm [ 0 ] [ i , j ] != 0 ) or ( rm [ 1 ] [ i , j ] != 0 ) or ( rm [ 2 ] [ i , j ] != 0 ) : for l in range ( Nl ) : if xi [ l , i , j ] == 1 : eqs += [ - omega_laser [ l ] + theta [ j ] - theta [ i ] ] if return_equations : return eqs sol = sympy . solve ( eqs , theta , dict = True ) sol = sol [ 0 ] extra_thetas = [ ] for i in range ( Ne ) : if theta [ i ] not in sol . keys ( ) : sol . update ( { theta [ i ] : theta [ i ] } ) extra_thetas += [ theta [ i ] ] omega_level , omega , gamma = define_frequencies ( Ne ) eq_crit = sol [ theta [ 0 ] ] + omega_level [ 0 ] ss = sympy . solve ( eq_crit , extra_thetas [ 0 ] ) [ 0 ] ss = { extra_thetas [ 0 ] : ss } sol_simple = [ sol [ theta [ i ] ] . subs ( ss ) for i in range ( Ne ) ] return sol_simple
Returns a phase transformation theta_i .
50,436
def define_simplification ( omega_level , xi , Nl ) : try : Ne = len ( omega_level ) except : Ne = omega_level . shape [ 0 ] om = omega_level [ 0 ] iu = 0 Neu = 1 omega_levelu = [ om ] d = { } di = { 0 : 0 } for i in range ( Ne ) : if omega_level [ i ] != om : iu += 1 om = omega_level [ i ] Neu += 1 omega_levelu += [ om ] di . update ( { iu : i } ) d . update ( { i : iu } ) def u ( i ) : return d [ i ] def invu ( iu ) : return di [ iu ] Neu = len ( omega_levelu ) xiu = np . array ( [ [ [ xi [ l , invu ( i ) , invu ( j ) ] for j in range ( Neu ) ] for i in range ( Neu ) ] for l in range ( Nl ) ] ) return u , invu , omega_levelu , Neu , xiu
Return a simplifying function its inverse and simplified frequencies .
50,437
def find_omega_min ( omega_levelu , Neu , Nl , xiu ) : r omega_min = [ ] iu0 = [ ] ju0 = [ ] for l in range ( Nl ) : omegasl = [ ] for iu in range ( Neu ) : for ju in range ( iu ) : if xiu [ l , iu , ju ] == 1 : omegasl += [ ( omega_levelu [ iu ] - omega_levelu [ ju ] , iu , ju ) ] omegasl = list ( sorted ( omegasl ) ) omega_min += [ omegasl [ 0 ] [ 0 ] ] iu0 += [ omegasl [ 0 ] [ 1 ] ] ju0 += [ omegasl [ 0 ] [ 2 ] ] return omega_min , iu0 , ju0
r Find the smallest transition frequency for each field .
50,438
def detunings_indices ( Neu , Nl , xiu ) : r pairs = [ ] for l in range ( Nl ) : ind = [ ] for iu in range ( Neu ) : for ju in range ( iu ) : if xiu [ l , iu , ju ] == 1 : ind += [ ( iu , ju ) ] pairs += [ ind ] return pairs
r Get the indices of the transitions of all fields .
50,439
def detunings_code ( Neu , Nl , pairs , omega_levelu , iu0 , ju0 ) : r code_det = "" for l in range ( Nl ) : for pair in pairs [ l ] : iu , ju = pair code_det += " delta" + str ( l + 1 ) code_det += "_" + str ( iu + 1 ) code_det += "_" + str ( ju + 1 ) code_det += " = detuning_knob[" + str ( l ) + "]" corr = - omega_levelu [ iu ] + omega_levelu [ iu0 [ l ] ] corr = - omega_levelu [ ju0 [ l ] ] + omega_levelu [ ju ] + corr if corr != 0 : code_det += " + (" + str ( corr ) + ")" code_det += "\n" return code_det
r Get the code to calculate the simplified detunings .
50,440
def detunings_combinations ( pairs ) : r def iter ( pairs , combs , l ) : combs_n = [ ] for i in range ( len ( combs ) ) : for j in range ( len ( pairs [ l ] ) ) : combs_n += [ combs [ i ] + [ pairs [ l ] [ j ] ] ] return combs_n Nl = len ( pairs ) combs = [ [ pairs [ 0 ] [ k ] ] for k in range ( len ( pairs [ 0 ] ) ) ] for l in range ( 1 , Nl ) : combs = iter ( pairs , combs , 1 ) return combs
r Return all combinations of detunings .
50,441
def detunings_rewrite ( expr , combs , omega_laser , symb_omega_levelu , omega_levelu , iu0 , ju0 ) : r Nl = len ( omega_laser ) Neu = len ( symb_omega_levelu ) a = [ diff ( expr , omega_laser [ l ] ) for l in range ( Nl ) ] success = False for comb in combs : expr_try = 0 for l in range ( Nl ) : expr_try += a [ l ] * ( omega_laser [ l ] - symb_omega_levelu [ comb [ l ] [ 0 ] ] + symb_omega_levelu [ comb [ l ] [ 1 ] ] ) if expr - expr_try == 0 : success = True break assign = "" if success : for l in range ( Nl ) : if a [ l ] != 0 : if a [ l ] == 1 : assign += "+" elif a [ l ] == - 1 : assign += "-" assign += "delta" + str ( l + 1 ) assign += "_" + str ( comb [ l ] [ 0 ] + 1 ) assign += "_" + str ( comb [ l ] [ 1 ] + 1 ) else : _remainder = expr - sum ( [ a [ l ] * omega_laser [ l ] for l in range ( Nl ) ] ) b = [ diff ( _remainder , symb_omega_levelu [ j ] ) for j in range ( Neu ) ] remainder = sum ( [ b [ j ] * omega_levelu [ j ] for j in range ( Neu ) ] ) remainder += sum ( [ a [ l ] * ( omega_levelu [ iu0 [ l ] ] - omega_levelu [ ju0 [ l ] ] ) for l in range ( Nl ) ] ) assign = str ( remainder ) for l in range ( Nl ) : if a [ l ] != 0 : if a [ l ] == 1 : assign += "+" elif a [ l ] == - 1 : assign += "-" assign += "detuning_knob[" + str ( l ) + "]" return assign
r Rewrite a symbolic expression in terms of allowed transition detunings .
50,442
def term_code ( mu , nu , coef , matrix_form , rhouv_isconjugated , linear = True ) : r if coef == 0 : return "" coef = str ( coef ) ini = coef . find ( "E_{0" ) fin = coef . find ( "}" ) if ini != - 1 : l = int ( coef [ ini + 4 : fin ] ) coef = coef [ : ini ] + "Ep[" + str ( l - 1 ) + "]" + coef [ fin + 1 : ] coef = coef . replace ( "rp[" , "rp[:, " ) coef = coef . replace ( "rm[" , "rm[:, " ) coef = coef . replace ( "conjugate(" , "np.conjugate(" ) coef = coef . replace ( "re(" , "np.real(" ) coef = coef . replace ( "im(" , "np.imag(" ) coef = coef . replace ( "*I" , "j" ) if not linear : if matrix_form : s = " b[" + str ( mu ) + "] += " + coef + "\n" else : s = " rhs[" + str ( mu ) + "] += " + coef + "\n" return s s = " " if matrix_form : s += "A[" + str ( mu ) + ", " + str ( nu ) + "] += " + coef + "\n" else : s += "rhs[" + str ( mu ) + "] += (" + coef + ")" if rhouv_isconjugated : s += "*np.conjugate(rho[" + str ( nu ) + '])\n' else : s += "*rho[" + str ( nu ) + ']\n' return s
r Get code to calculate a linear term .
50,443
def observable ( operator , rho , unfolding , complex = False ) : r if len ( rho . shape ) == 2 : return np . array ( [ observable ( operator , i , unfolding ) for i in rho ] ) Ne = unfolding . Ne Mu = unfolding . Mu obs = 0 if unfolding . normalized : rho11 = 1 - sum ( [ rho [ Mu ( 1 , i , i ) ] for i in range ( 1 , Ne ) ] ) for i in range ( Ne ) : for k in range ( Ne ) : if unfolding . real : if k == 0 and i == 0 : obs += operator [ i , k ] * rho11 else : if k < i : u , v = ( i , k ) else : u , v = ( k , i ) obs += operator [ i , k ] * rho [ Mu ( 1 , u , v ) ] if k != i : if k < i : obs += 1j * operator [ i , k ] * rho [ Mu ( - 1 , u , v ) ] else : obs += - 1j * operator [ i , k ] * rho [ Mu ( - 1 , u , v ) ] else : if k == 0 and i == 0 : obs += operator [ i , k ] * rho11 else : obs += operator [ i , k ] * rho [ Mu ( 0 , k , i ) ] if not complex : obs = np . real ( obs ) return obs
r Return an observable ammount .
50,444
def electric_succeptibility ( l , Ep , epsilonp , rm , n , rho , unfolding , part = 0 ) : r epsilonm = epsilonp . conjugate ( ) rp = np . array ( [ rm [ i ] . transpose ( ) . conjugate ( ) for i in range ( 3 ) ] ) if part == 1 : op = cartesian_dot_product ( rp , epsilonm [ 0 ] ) op += cartesian_dot_product ( rm , epsilonp [ 0 ] ) op = - e_num * n / epsilon_0_num / np . abs ( Ep [ 0 ] ) * op elif part == - 1 : op = cartesian_dot_product ( rm , epsilonp [ 0 ] ) op += - cartesian_dot_product ( rp , epsilonm [ 0 ] ) op = - 1j * e_num * n / epsilon_0_num / np . abs ( Ep [ 0 ] ) * op elif part == 0 : chire = electric_succeptibility ( l , Ep , epsilonp , rm , n , rho , unfolding , + 1 ) chiim = electric_succeptibility ( l , Ep , epsilonp , rm , n , rho , unfolding , - 1 ) return chire + 1j * chiim return np . real ( observable ( op , rho , unfolding ) )
r Return the electric succeptibility for a given field .
50,445
def radiated_intensity ( rho , i , j , epsilonp , rm , omega_level , xi , N , D , unfolding ) : r def inij ( i , j , ilist , jlist ) : if ( i in ilist ) and ( j in jlist ) : return 1 else : return 0 rm = np . array ( rm ) Nl = xi . shape [ 0 ] Ne = xi . shape [ 1 ] aux = define_simplification ( omega_level , xi , Nl ) u = aux [ 0 ] omega_levelu = aux [ 2 ] ui = u ( i ) uj = u ( j ) omegaij = omega_levelu [ ui ] - omega_levelu [ uj ] ilist = [ ii for ii in range ( Ne ) if u ( ii ) == ui ] jlist = [ jj for jj in range ( Ne ) if u ( jj ) == uj ] rp = np . array ( [ rm [ ii ] . conjugate ( ) . transpose ( ) for ii in range ( 3 ) ] ) rm = np . array ( [ [ [ rm [ p , ii , jj ] * inij ( ii , jj , ilist , jlist ) for jj in range ( Ne ) ] for ii in range ( Ne ) ] for p in range ( 3 ) ] ) rp = np . array ( [ [ [ rp [ p , ii , jj ] * inij ( jj , ii , ilist , jlist ) for jj in range ( Ne ) ] for ii in range ( Ne ) ] for p in range ( 3 ) ] ) epsilonm = epsilonp . conjugate ( ) Adag = cartesian_dot_product ( rm , epsilonp ) A = cartesian_dot_product ( rp , epsilonm ) fact = alpha_num * N * hbar_num * omegaij ** 3 / 2 / np . pi / c_num ** 2 / D ** 2 Iop = fact * np . dot ( Adag , A ) intensity = observable ( Iop , rho , unfolding ) intensity = float ( np . real ( intensity ) ) return intensity
r Return the radiated intensity in a given direction .
50,446
def inverse ( self , rhov , time_derivative = False ) : r Ne = self . Ne Nrho = self . Nrho IJ = self . IJ if isinstance ( rhov , np . ndarray ) : rho = np . zeros ( ( Ne , Ne ) , complex ) numeric = True elif isinstance ( rhov , sympy . Matrix ) : rho = sympy . zeros ( Ne , Ne ) numeric = False for mu in range ( Nrho ) : s , i , j = IJ ( mu ) if numeric : if s == 1 : rho [ i , j ] += rhov [ mu ] elif s == - 1 : rho [ i , j ] += 1j * rhov [ mu ] elif s == 0 : rho [ i , j ] += rhov [ mu ] else : if s == 1 : rho [ i , j ] += rhov [ mu ] elif s == - 1 : rho [ i , j ] += sympy . I * rhov [ mu ] elif s == 0 : rho [ i , j ] += rhov [ mu ] if self . lower_triangular : for i in range ( Ne ) : for j in range ( i ) : rho [ j , i ] = rho [ i , j ] . conjugate ( ) if self . normalized : if time_derivative : rho [ 0 , 0 ] = - sum ( [ rho [ i , i ] for i in range ( 1 , Ne ) ] ) else : rho [ 0 , 0 ] = 1 - sum ( [ rho [ i , i ] for i in range ( 1 , Ne ) ] ) return rho
r Fold a vector into a matrix .
50,447
def eplotter ( task , data ) : results , color , fdata = [ ] , None , [ ] if task == 'optstory' : color = '#CC0000' clickable = True for n , i in enumerate ( data ) : fdata . append ( [ n , i [ 4 ] ] ) fdata = array ( fdata ) fdata [ : , 1 ] -= min ( fdata [ : , 1 ] ) fdata = fdata . tolist ( ) elif task == 'convergence' : color = '#0066CC' clickable = False for n , i in enumerate ( data ) : fdata . append ( [ n , i ] ) for n in range ( len ( fdata ) ) : fdata [ n ] [ 1 ] = round ( fdata [ n ] [ 1 ] , 5 ) results . append ( { 'color' : color , 'clickable:' : clickable , 'data' : fdata } ) return results
eplotter is like bdplotter but less complicated
50,448
def popenCLIExecutable ( command , ** kwargs ) : cliExecutable = command [ 0 ] ma = re_slicerSubPath . search ( cliExecutable ) if ma : wrapper = os . path . join ( cliExecutable [ : ma . start ( ) ] , 'Slicer' ) if sys . platform . startswith ( 'win' ) : wrapper += '.exe' if os . path . exists ( wrapper ) : command = [ wrapper , '--launcher-no-splash' , '--launch' ] + command return subprocess . Popen ( command , ** kwargs )
Wrapper around subprocess . Popen constructor that tries to detect Slicer CLI modules and launches them through the Slicer launcher in order to prevent potential DLL dependency issues .
50,449
def _candidate_filenames ( ) : while True : random_stub = '' . join ( [ random . choice ( string . ascii_letters + string . digits ) for _ in range ( 5 ) ] ) yield 'specktre_%s.png' % random_stub
Generates filenames of the form specktre_123AB . png .
50,450
def draw_tiling ( coord_generator , filename ) : im = Image . new ( 'L' , size = ( CANVAS_WIDTH , CANVAS_HEIGHT ) ) for shape in coord_generator ( CANVAS_WIDTH , CANVAS_HEIGHT ) : ImageDraw . Draw ( im ) . polygon ( shape , outline = 'white' ) im . save ( filename )
Given a coordinate generator and a filename render those coordinates in a new image and save them to the file .
50,451
def trace ( self , msg , * args , ** kwargs ) : if self . isEnabledFor ( TRACE ) : self . _log ( TRACE , msg , args , ** kwargs )
Log msg % args with severity TRACE .
50,452
def get_state ( cls , clz ) : if clz not in cls . __shared_state : cls . __shared_state [ clz ] = ( clz . init_state ( ) if hasattr ( clz , "init_state" ) else { } ) return cls . __shared_state [ clz ]
Retrieve the state of a given Class .
50,453
def define_density_matrix ( Ne , explicitly_hermitian = False , normalized = False , variables = None ) : r if Ne > 9 : comma = "," name = r"\rho" open_brace = "_{" close_brace = "}" else : comma = "" name = "rho" open_brace = "" close_brace = "" rho = [ ] for i in range ( Ne ) : row_rho = [ ] for j in range ( Ne ) : if i == j : row_rho += [ define_symbol ( name , open_brace , comma , i , j , close_brace , variables , positive = True ) ] elif i > j : row_rho += [ define_symbol ( name , open_brace , comma , i , j , close_brace , variables ) ] else : if explicitly_hermitian : row_rho += [ conjugate ( define_symbol ( name , open_brace , comma , j , i , close_brace , variables ) ) ] else : row_rho += [ define_symbol ( name , open_brace , comma , i , j , close_brace , variables ) ] rho += [ row_rho ] if normalized : rho11 = 1 - sum ( [ rho [ i ] [ i ] for i in range ( 1 , Ne ) ] ) rho [ 0 ] [ 0 ] = rho11 rho = Matrix ( rho ) return rho
r Return a symbolic density matrix .
50,454
def define_laser_variables ( Nl , real_amplitudes = False , variables = None ) : r if variables is None : E0 = [ Symbol ( r"E_0^" + str ( l + 1 ) , real = real_amplitudes ) for l in range ( Nl ) ] else : E0 = [ Function ( r"E_0^" + str ( l + 1 ) , real = real_amplitudes ) ( * variables ) for l in range ( Nl ) ] omega_laser = [ Symbol ( r"varpi_" + str ( l + 1 ) , positive = True ) for l in range ( Nl ) ] return E0 , omega_laser
r Return the amplitudes and frequencies of Nl fields .
50,455
def complex_matrix_plot ( A , logA = False , normalize = False , plot = True , ** kwds ) : r N = len ( A [ 0 ] ) if logA : Anew = [ ] for i in range ( N ) : row = [ ] for j in range ( N ) : if A [ i ] [ j ] != 0 : row += [ log ( log ( A [ i ] [ j ] ) ) ] else : row += [ 0.0 ] Anew += [ row ] A = Anew [ : ] if normalize : norm = 1 for i in range ( N ) : for j in range ( N ) : if abs ( A [ i ] [ j ] ) > norm : norm = abs ( A [ i ] [ j ] ) A = [ [ A [ i ] [ j ] / norm for j in range ( N ) ] for i in range ( N ) ] color_matrix = [ ] lmax = - 1 for i in range ( N ) : row = [ ] for j in range ( N ) : rgb , l = complex_to_color ( A [ i ] [ j ] ) row += [ rgb ] if l > lmax : lmax = l color_matrix += [ row ] if normalize : color_matrix = [ [ tuple ( [ k / lmax for k in color_matrix [ i ] [ j ] ] ) for j in range ( N ) ] for i in range ( N ) ] if plot : pyplot . imshow ( color_matrix , interpolation = 'none' ) pyplot . savefig ( 'a.png' , bbox_inches = 'tight' ) pyplot . close ( 'all' ) else : return color_matrix
r A function to plot complex matrices .
50,456
def bar_chart_mf ( data , path_name ) : N = len ( data ) ind = np . arange ( N ) width = 0.8 fig , ax = pyplot . subplots ( ) rects1 = ax . bar ( ind , data , width , color = 'g' ) ax . set_ylabel ( 'Population' ) ax . set_xticks ( ind + width / 2 ) labs = [ 'm=' + str ( i ) for i in range ( - N / 2 + 1 , N / 2 + 1 ) ] ax . set_xticklabels ( labs ) def autolabel ( rects ) : for rect in rects : rect . get_height ( ) autolabel ( rects1 ) pyplot . savefig ( path_name ) pyplot . close ( )
Make a bar chart for data on MF quantities .
50,457
def draw_plane_wave_3d ( ax , beam , dist_to_center = 0 ) : Ex = [ ] Ey = [ ] Ez = [ ] k = [ cos ( beam . phi ) * sin ( beam . theta ) , sin ( beam . phi ) * sin ( beam . theta ) , cos ( beam . theta ) ] kx , ky , kz = k Nt = 1000 tstep = 7 * pi / 4 / ( Nt - 1 ) alpha = beam . alpha beta = beam . beta phi = beam . phi theta = beam . theta omega = 1 for i in range ( Nt ) : t = i * tstep Ex += [ ( cos ( 2 * alpha ) * cos ( phi ) * cos ( theta ) - sin ( 2 * alpha ) * sin ( phi ) ) * cos ( omega * t ) * cos ( 2 * beta ) - ( cos ( phi ) * cos ( theta ) * sin ( 2 * alpha ) + cos ( 2 * alpha ) * sin ( phi ) ) * sin ( omega * t ) * sin ( 2 * beta ) - dist_to_center * kx ] Ey += [ ( cos ( 2 * alpha ) * cos ( theta ) * sin ( phi ) + cos ( phi ) * sin ( 2 * alpha ) ) * cos ( omega * t ) * cos ( 2 * beta ) - ( cos ( theta ) * sin ( 2 * alpha ) * sin ( phi ) - cos ( 2 * alpha ) * cos ( phi ) ) * sin ( omega * t ) * sin ( 2 * beta ) - dist_to_center * ky ] Ez += [ - cos ( omega * t ) * cos ( 2 * alpha ) * cos ( 2 * beta ) * sin ( theta ) + sin ( omega * t ) * sin ( 2 * alpha ) * sin ( 2 * beta ) * sin ( theta ) - dist_to_center * kz ] ax . plot ( Ex , Ey , Ez , beam . color + '-' ) ff = dist_to_center - 1.0 arrx = [ - kx * dist_to_center , - kx * ff ] arry = [ - ky * dist_to_center , - ky * ff ] arrz = [ - kz * dist_to_center , - kz * ff ] arrow = Arrow3D ( arrx , arry , arrz , mutation_scale = 20 , lw = 1 , arrowstyle = "-|>" , color = beam . color ) ax . add_artist ( arrow ) ax . plot ( [ Ex [ - 1 ] ] , [ Ey [ - 1 ] ] , [ Ez [ - 1 ] ] , '.' , markersize = 8 , color = beam . color )
Draw the polarization of a plane wave .
50,458
def draw_lasers_3d ( ax , lasers , name = None , distances = None , lim = None ) : if distances is None : distances = [ 1.0 for i in range ( len ( lasers ) ) ] for i in range ( len ( lasers ) ) : if type ( lasers [ i ] ) == PlaneWave : draw_plane_wave_3d ( ax , lasers [ i ] , distances [ i ] ) elif type ( lasers [ i ] ) == MotField : draw_mot_field_3d ( ax , lasers [ i ] , distances [ i ] ) ax . set_xlabel ( r"$x$" , fontsize = 20 ) ax . set_ylabel ( r"$y$" , fontsize = 20 ) ax . set_zlabel ( r"$z$" , fontsize = 20 ) if lim is None : lim = sqrt ( 2.0 ) ax . set_xlim ( - lim , lim ) ax . set_ylim ( - lim , lim ) ax . set_zlim ( - lim , lim ) ax . set_aspect ( "equal" ) if name is not None : pyplot . savefig ( name , bbox_inches = 'tight' )
Draw MOT lasers in 3d .
50,459
def rotate_and_traslate ( cur , alpha , v0 ) : r if len ( cur ) > 2 or ( type ( cur [ 0 ] [ 0 ] ) in [ list , tuple ] ) : cur_list = cur [ : ] for i in range ( len ( cur_list ) ) : curi = cur_list [ i ] curi = rotate_and_traslate ( curi , alpha , v0 ) cur_list [ i ] = curi return cur_list else : x0 , y0 = cur rot = np . matrix ( [ [ cos ( alpha ) , - sin ( alpha ) ] , [ sin ( alpha ) , cos ( alpha ) ] ] ) xn = [ ] yn = [ ] for i in range ( len ( x0 ) ) : v = np . matrix ( [ [ x0 [ i ] ] , [ y0 [ i ] ] ] ) vi = np . dot ( rot , v ) xn += [ float ( vi [ 0 ] [ 0 ] ) + v0 [ 0 ] ] yn += [ float ( vi [ 1 ] [ 0 ] ) + v0 [ 1 ] ] return xn , yn
r Rotate and translate a curve .
50,460
def mirror ( ax , p0 , alpha = 0 , size = 2.54 , width = 0.5 , format = None ) : r if format is None : format = 'k-' x0 = [ size / 2 , - size / 2 , - size / 2 , size / 2 , size / 2 ] y0 = [ 0 , 0 , - width , - width , 0 ] x1 = [ size / 2 , size / 2 - width ] y1 = [ 0 , - width ] x2 = [ - size / 2 + width , - size / 2 ] y2 = [ 0 , - width ] x3 = [ ( size / 2 - size / 2 + width ) / 2 , ( size / 2 - width - size / 2 ) / 2 ] y3 = [ 0 , - width ] cur_list = [ ( x0 , y0 ) , ( x1 , y1 ) , ( x2 , y2 ) , ( x3 , y3 ) ] cur_list = rotate_and_traslate ( cur_list , alpha , p0 ) for curi in cur_list : ax . plot ( curi [ 0 ] , curi [ 1 ] , format )
r Draw a mirror .
50,461
def eye ( ax , p0 , size = 1.0 , alpha = 0 , format = None , ** kwds ) : r if format is None : format = 'k-' N = 100 ang0 = pi - 3 * pi / 16 angf = pi + 3 * pi / 16 angstep = ( angf - ang0 ) / ( N - 1 ) x1 = [ size * ( cos ( i * angstep + ang0 ) + 1 ) for i in range ( N ) ] y1 = [ size * sin ( i * angstep + ang0 ) for i in range ( N ) ] ang2 = ang0 + pi / 16 x2 = [ size , size * ( 1.2 * cos ( ang2 ) + 1 ) ] y2 = [ 0 , 1.2 * size * ( sin ( ang2 ) ) ] y3 = [ 0 , - 1.2 * size * ( sin ( ang2 ) ) ] N = 100 ang0 = ang2 angf = ang2 + 4 * pi / 16 angstep = ( angf - ang0 ) / ( N - 1 ) x4 = [ size * ( 0.85 * cos ( i * angstep + ang0 ) + 1 ) for i in range ( N ) ] y4 = [ size * 0.85 * sin ( i * angstep + ang0 ) for i in range ( N ) ] cur_list = [ ( x1 , y1 ) , ( x2 , y2 ) , ( x2 , y3 ) , ( x4 , y4 ) ] cur_list = rotate_and_traslate ( cur_list , alpha , p0 ) for curi in cur_list : ax . plot ( curi [ 0 ] , curi [ 1 ] , format , ** kwds )
r Draw an eye .
50,462
def beam_splitter ( ax , p0 , size = 2.54 , alpha = 0 , format = None , ** kwds ) : r if format is None : format = 'k-' a = size / 2 x0 = [ a , - a , - a , a , a , - a ] y0 = [ a , a , - a , - a , a , - a ] cur_list = [ ( x0 , y0 ) ] cur_list = rotate_and_traslate ( cur_list , alpha , p0 ) for curi in cur_list : ax . plot ( curi [ 0 ] , curi [ 1 ] , format , ** kwds )
r Draw a beam splitter .
50,463
def simple_beam_splitter ( ax , p0 , size = 2.54 , width = 0.1 , alpha = 0 , format = None , ** kwds ) : r if format is None : format = 'k-' a = size / 2 b = width / 2 x0 = [ a , - a , - a , a , a ] y0 = [ b , b , - b , - b , b ] cur_list = [ ( x0 , y0 ) ] cur_list = rotate_and_traslate ( cur_list , alpha , p0 ) for curi in cur_list : ax . plot ( curi [ 0 ] , curi [ 1 ] , format , ** kwds )
r Draw a simple beam splitter .
50,464
def draw_arith ( ax , p0 , size = 1 , alpha = 0 , arith = None , format = None , fontsize = 10 , ** kwds ) : r if format is None : format = 'k-' a = size / 2.0 x0 = [ 0 , 2.5 * a , 0 , 0 ] y0 = [ a , 0 , - a , a ] cur_list = [ ( x0 , y0 ) ] cur_list = rotate_and_traslate ( cur_list , alpha , p0 ) for curi in cur_list : ax . plot ( curi [ 0 ] , curi [ 1 ] , format , ** kwds ) if arith is not None : pyplot . text ( p0 [ 0 ] + 0.75 * a , p0 [ 1 ] , arith , horizontalalignment = 'center' , verticalalignment = 'center' , fontsize = fontsize )
r Draw an arithmetic operator .
50,465
def draw_state ( ax , p , text = '' , l = 0.5 , alignment = 'left' , label_displacement = 1.0 , fontsize = 25 , atoms = None , atoms_h = 0.125 , atoms_size = 5 , ** kwds ) : r ax . plot ( [ p [ 0 ] - l / 2.0 , p [ 0 ] + l / 2.0 ] , [ p [ 1 ] , p [ 1 ] ] , color = 'black' , ** kwds ) if text != '' : if alignment == 'left' : ax . text ( p [ 0 ] - l / 2.0 - label_displacement , p [ 1 ] , text , horizontalalignment = 'right' , verticalalignment = 'center' , color = 'black' , fontsize = fontsize ) elif alignment == 'right' : ax . text ( p [ 0 ] + l / 2.0 + label_displacement , p [ 1 ] , text , horizontalalignment = 'left' , color = 'black' , fontsize = fontsize ) if atoms is not None : atoms_x = np . linspace ( p [ 0 ] - l * 0.5 , p [ 0 ] + l * 0.5 , atoms ) atoms_y = [ p [ 1 ] + atoms_h for i in range ( atoms ) ] ax . plot ( atoms_x , atoms_y , "ko" , ms = atoms_size )
r Draw a quantum state for energy level diagrams .
50,466
def decay ( ax , p0 , pf , A , n , format = None , ** kwds ) : r if format is None : format = 'k-' T = sqrt ( ( p0 [ 0 ] - pf [ 0 ] ) ** 2 + ( p0 [ 1 ] - pf [ 1 ] ) ** 2 ) alpha = atan2 ( pf [ 1 ] - p0 [ 1 ] , pf [ 0 ] - p0 [ 0 ] ) x = [ i * T / 400.0 for i in range ( 401 ) ] y = [ A * sin ( xi * 2 * pi * n / T ) for xi in x ] cur_list = [ ( x , y ) ] cur_list = rotate_and_traslate ( cur_list , alpha , p0 ) for curi in cur_list : ax . plot ( curi [ 0 ] , curi [ 1 ] , format , ** kwds )
r Draw a spontaneous decay as a wavy line .
50,467
def dup2 ( a , b , timeout = 3 ) : dup_err = None for i in range ( int ( 10 * timeout ) ) : try : return os . dup2 ( a , b ) except OSError as e : dup_err = e if e . errno == errno . EBUSY : time . sleep ( 0.1 ) else : raise if dup_err : raise dup_err
Like os . dup2 but retry on EBUSY
50,468
def push_filters ( self , new_filters ) : t = self . tokenizer for f in new_filters : t = f ( t ) self . tokenizer = t
Add a filter to the tokenizer chain .
50,469
def check ( self , text ) : for word , pos in self . tokenizer ( text ) : correct = self . dictionary . check ( word ) if correct : continue yield word , self . dictionary . suggest ( word ) if self . suggest else [ ] return
Yields bad words and suggested alternate spellings .
50,470
def all_nodes_that_receive ( service , service_configuration = None , run_only = False , deploy_to_only = False ) : assert not ( run_only and deploy_to_only ) if service_configuration is None : service_configuration = read_services_configuration ( ) runs_on = service_configuration [ service ] [ 'runs_on' ] deployed_to = service_configuration [ service ] . get ( 'deployed_to' ) if deployed_to is None : deployed_to = [ ] if run_only : result = runs_on elif deploy_to_only : result = deployed_to else : result = set ( runs_on ) | set ( deployed_to ) return list ( sorted ( result ) )
If run_only returns only the services that are in the runs_on list . If deploy_to_only returns only the services in the deployed_to list . If neither both are returned duplicates stripped . Results are always sorted .
50,471
def precision_and_scale ( x ) : if isinstance ( x , Decimal ) : precision = len ( x . as_tuple ( ) . digits ) scale = - 1 * x . as_tuple ( ) . exponent if scale < 0 : precision -= scale scale = 0 return ( precision , scale ) max_digits = 14 int_part = int ( abs ( x ) ) magnitude = 1 if int_part == 0 else int ( math . log10 ( int_part ) ) + 1 if magnitude >= max_digits : return ( magnitude , 0 ) frac_part = abs ( x ) - int_part multiplier = 10 ** ( max_digits - magnitude ) frac_digits = multiplier + int ( multiplier * frac_part + 0.5 ) while frac_digits % 10 == 0 : frac_digits /= 10 scale = int ( math . log10 ( frac_digits ) ) return ( magnitude + scale , scale )
From a float decide what precision and scale are needed to represent it .
50,472
def best_representative ( d1 , d2 ) : if hasattr ( d2 , 'strip' ) and not d2 . strip ( ) : return d1 if d1 is None : return d2 elif d2 is None : return d1 preference = ( datetime . datetime , bool , int , Decimal , float , str ) worst_pref = 0 worst = '' for coerced in ( d1 , d2 ) : pref = preference . index ( type ( coerced ) ) if pref > worst_pref : worst_pref = pref worst = set_worst ( worst , coerced ) elif pref == worst_pref : if isinstance ( coerced , Decimal ) : worst = set_worst ( worst , worst_decimal ( coerced , worst ) ) elif isinstance ( coerced , float ) : worst = set_worst ( worst , max ( coerced , worst ) ) else : if len ( str ( coerced ) ) > len ( str ( worst ) ) : worst = set_worst ( worst , coerced ) return worst
Given two objects each coerced to the most specific type possible return the one of the least restrictive type .
50,473
def best_coercable ( data ) : preference = ( datetime . datetime , bool , int , Decimal , float , str ) worst_pref = 0 worst = '' for datum in data : coerced = coerce_to_specific ( datum ) pref = preference . index ( type ( coerced ) ) if pref > worst_pref : worst_pref = pref worst = coerced elif pref == worst_pref : if isinstance ( coerced , Decimal ) : worst = worst_decimal ( coerced , worst ) elif isinstance ( coerced , float ) : worst = max ( coerced , worst ) else : if len ( str ( coerced ) ) > len ( str ( worst ) ) : worst = coerced return worst
Given an iterable of scalar data returns the datum representing the most specific data type the list overall can be coerced into preferring datetimes then bools then integers then decimals then floats then strings .
50,474
def sqla_datatype_for ( datum ) : try : if len ( _complex_enough_to_be_date . findall ( datum ) ) > 1 : dateutil . parser . parse ( datum ) return sa . DATETIME except ( TypeError , ValueError ) : pass try : ( prec , scale ) = precision_and_scale ( datum ) return sa . DECIMAL ( prec , scale ) except TypeError : return sa . Unicode ( len ( datum ) )
Given a scalar Python value picks an appropriate SQLAlchemy data type .
50,475
def generate ( args = None , namespace = None , file = None ) : if hasattr ( args , 'split' ) : args = args . split ( ) args = parser . parse_args ( args , namespace ) set_logging ( args ) logging . info ( str ( args ) ) if args . dialect in ( 'pg' , 'pgsql' , 'postgres' ) : args . dialect = 'postgresql' if args . dialect . startswith ( 'dj' ) : args . dialect = 'django' elif args . dialect . startswith ( 'sqla' ) : args . dialect = 'sqlalchemy' if args . dialect not in dialect_names : raise NotImplementedError ( 'First arg must be one of: %s' % ", " . join ( dialect_names ) ) if args . dialect == 'sqlalchemy' : print ( sqla_head , file = file ) for datafile in args . datafile : if is_sqlalchemy_url . search ( datafile ) : table_names_for_insert = [ ] for tbl in sqlalchemy_table_sources ( datafile ) : t = generate_one ( tbl , args , table_name = tbl . generator . name , file = file ) if t . data : table_names_for_insert . append ( tbl . generator . name ) if args . inserts and args . dialect == 'sqlalchemy' : print ( sqla_inserter_call ( table_names_for_insert ) , file = file ) if t and args . inserts : for seq_update in emit_db_sequence_updates ( t . source . db_engine ) : if args . dialect == 'sqlalchemy' : print ( ' conn.execute("%s")' % seq_update , file = file ) elif args . dialect == 'postgresql' : print ( seq_update , file = file ) else : generate_one ( datafile , args , file = file )
Genereate DDL from data sources named .
50,476
def ddl ( self , dialect = None , creates = True , drops = True ) : dialect = self . _dialect ( dialect ) creator = CreateTable ( self . table ) . compile ( mock_engines [ dialect ] ) creator = "\n" . join ( l for l in str ( creator ) . splitlines ( ) if l . strip ( ) ) comments = "\n\n" . join ( self . _comment_wrapper . fill ( "in %s: %s" % ( col , self . comments [ col ] ) ) for col in self . comments ) result = [ ] if drops : result . append ( self . _dropper ( dialect ) + ';' ) if creates : result . append ( "%s;\n%s" % ( creator , comments ) ) for child in self . children . values ( ) : result . append ( child . ddl ( dialect = dialect , creates = creates , drops = drops ) ) return '\n\n' . join ( result )
Returns SQL to define the table .
50,477
def sqlalchemy ( self , is_top = True ) : table_def = self . table_backref_remover . sub ( '' , self . table . __repr__ ( ) ) constraint_defs = [ ] for constraint in self . table . constraints : if isinstance ( constraint , sa . sql . schema . UniqueConstraint ) : col_list = ', ' . join ( "'%s'" % c . name for c in constraint . columns ) constraint_defs . append ( 'UniqueConstraint(%s)' % col_list ) if constraint_defs : constraint_defs = ',\n ' . join ( constraint_defs ) + ',' table_def = table_def . replace ( 'schema=None' , '\n ' + constraint_defs + 'schema=None' ) table_def = table_def . replace ( "MetaData(bind=None)" , "metadata" ) table_def = table_def . replace ( "Column(" , "\n Column(" ) table_def = table_def . replace ( "schema=" , "\n schema=" ) result = [ table_def , ] result . extend ( c . sqlalchemy ( is_top = False ) for c in self . children . values ( ) ) result = "\n%s = %s" % ( self . table_name , "\n" . join ( result ) ) if is_top : sqla_imports = set ( self . capitalized_words . findall ( table_def ) ) sqla_imports &= set ( dir ( sa ) ) sqla_imports = sorted ( sqla_imports ) result = self . sqlalchemy_setup_template % ( ", " . join ( sqla_imports ) , result , self . table . name ) result = textwrap . dedent ( result ) return result
Dumps Python code to set up the table s SQLAlchemy model
50,478
def _prep_datum ( self , datum , dialect , col , needs_conversion ) : if datum is None or ( needs_conversion and not str ( datum ) . strip ( ) ) : return 'NULL' pytype = self . columns [ col ] [ 'pytype' ] if needs_conversion : if pytype == datetime . datetime : datum = dateutil . parser . parse ( datum ) elif pytype == bool : datum = th . coerce_to_specific ( datum ) if dialect . startswith ( 'sqlite' ) : datum = 1 if datum else 0 else : datum = pytype ( str ( datum ) ) if isinstance ( datum , datetime . datetime ) or isinstance ( datum , datetime . date ) : if dialect in self . _datetime_format : return datum . strftime ( self . _datetime_format [ dialect ] ) else : return "'%s'" % datum elif hasattr ( datum , 'lower' ) : return "'%s'" % datum . replace ( "'" , "''" ) else : return datum
Puts a value in proper format for a SQL string
50,479
def _id_fieldname ( fieldnames , table_name = '' ) : templates = [ '%s_%%s' % table_name , '%s' , '_%s' ] for stub in [ 'id' , 'num' , 'no' , 'number' ] : for t in templates : if t % stub in fieldnames : return t % stub
Finds the field name from a dict likeliest to be its unique ID
50,480
def unnest_child_dict ( parent , key , parent_name = '' ) : val = parent [ key ] name = "%s['%s']" % ( parent_name , key ) logging . debug ( "Unnesting dict %s" % name ) id = _id_fieldname ( val , parent_name ) if id : logging . debug ( "%s is %s's ID" % ( id , key ) ) if len ( val ) <= 2 : logging . debug ( 'Removing ID column %s.%s' % ( key , id ) ) val . pop ( id ) if len ( val ) == 0 : logging . debug ( '%s is empty, removing from %s' % ( name , parent_name ) ) parent . pop ( key ) return elif len ( val ) == 1 : logging . debug ( 'Nested one-item dict in %s, making scalar.' % name ) parent [ key ] = list ( val . values ( ) ) [ 0 ] return else : logging . debug ( 'Pushing all fields from %s up to %s' % ( name , parent_name ) ) new_field_names = [ '%s_%s' % ( key , child_key . strip ( '_' ) ) for child_key in val ] overlap = ( set ( new_field_names ) & set ( parent ) ) - set ( id or [ ] ) if overlap : logging . error ( "Could not unnest child %s; %s present in %s" % ( name , key , ',' . join ( overlap ) , parent_name ) ) return for ( child_key , child_val ) in val . items ( ) : new_field_name = '%s_%s' % ( key , child_key . strip ( '_' ) ) parent [ new_field_name ] = child_val parent . pop ( key )
If parent dictionary has a key whose val is a dict unnest val s fields into parent and remove key .
50,481
def parse ( data ) : reader = io . BytesIO ( data ) headers = [ ] while reader . tell ( ) < len ( data ) : h = Header ( ) h . tag = int . from_bytes ( reader . read ( 2 ) , byteorder = 'big' , signed = False ) h . taglen = int . from_bytes ( reader . read ( 2 ) , byteorder = 'big' , signed = False ) h . tagdata = reader . read ( h . taglen ) headers . append ( h ) return headers
returns a list of header tags
50,482
def to_tgt ( self ) : enc_part = EncryptedData ( { 'etype' : 1 , 'cipher' : b'' } ) tgt_rep = { } tgt_rep [ 'pvno' ] = krb5_pvno tgt_rep [ 'msg-type' ] = MESSAGE_TYPE . KRB_AS_REP . value tgt_rep [ 'crealm' ] = self . server . realm . to_string ( ) tgt_rep [ 'cname' ] = self . client . to_asn1 ( ) [ 0 ] tgt_rep [ 'ticket' ] = Ticket . load ( self . ticket . to_asn1 ( ) ) . native tgt_rep [ 'enc-part' ] = enc_part . native t = EncryptionKey ( self . key . to_asn1 ( ) ) . native return tgt_rep , t
Returns the native format of an AS_REP message and the sessionkey in EncryptionKey native format
50,483
def from_kirbidir ( directory_path ) : cc = CCACHE ( ) dir_path = os . path . join ( os . path . abspath ( directory_path ) , '*.kirbi' ) for filename in glob . glob ( dir_path ) : with open ( filename , 'rb' ) as f : kirbidata = f . read ( ) kirbi = KRBCRED . load ( kirbidata ) . native cc . add_kirbi ( kirbi ) return cc
Iterates trough all . kirbi files in a given directory and converts all of them into one CCACHE object
50,484
def to_file ( self , filename ) : with open ( filename , 'wb' ) as f : f . write ( self . to_bytes ( ) )
Writes the contents of the CCACHE object to a file
50,485
def print_table ( lines , separate_head = True ) : widths = [ ] for line in lines : for i , size in enumerate ( [ len ( x ) for x in line ] ) : while i >= len ( widths ) : widths . append ( 0 ) if size > widths [ i ] : widths [ i ] = size print_string = "" for i , width in enumerate ( widths ) : print_string += "{" + str ( i ) + ":" + str ( width ) + "} | " if ( len ( print_string ) == 0 ) : return print_string = print_string [ : - 3 ] for i , line in enumerate ( lines ) : print ( print_string . format ( * line ) ) if ( i == 0 and separate_head ) : print ( "-" * ( sum ( widths ) + 3 * ( len ( widths ) - 1 ) ) )
Prints a formatted table given a 2 dimensional array
50,486
def get_key_for_enctype ( self , etype ) : if etype == EncryptionType . AES256_CTS_HMAC_SHA1_96 : if self . kerberos_key_aes_256 : return bytes . fromhex ( self . kerberos_key_aes_256 ) if self . password is not None : salt = ( self . domain . upper ( ) + self . username ) . encode ( ) return string_to_key ( Enctype . AES256 , self . password . encode ( ) , salt ) . contents raise Exception ( 'There is no key for AES256 encryption' ) elif etype == EncryptionType . AES128_CTS_HMAC_SHA1_96 : if self . kerberos_key_aes_128 : return bytes . fromhex ( self . kerberos_key_aes_128 ) if self . password is not None : salt = ( self . domain . upper ( ) + self . username ) . encode ( ) return string_to_key ( Enctype . AES128 , self . password . encode ( ) , salt ) . contents raise Exception ( 'There is no key for AES128 encryption' ) elif etype == EncryptionType . ARCFOUR_HMAC_MD5 : if self . kerberos_key_rc4 : return bytes . fromhex ( self . kerberos_key_rc4 ) if self . nt_hash : return bytes . fromhex ( self . nt_hash ) elif self . password : self . nt_hash = hashlib . new ( 'md4' , self . password . encode ( 'utf-16-le' ) ) . hexdigest ( ) . upper ( ) return bytes . fromhex ( self . nt_hash ) else : raise Exception ( 'There is no key for RC4 encryption' ) elif etype == EncryptionType . DES3_CBC_SHA1 : if self . kerberos_key_des3 : return bytes . fromhex ( self . kerberos_key_des ) elif self . password : salt = ( self . domain . upper ( ) + self . username ) . encode ( ) return string_to_key ( Enctype . DES3 , self . password . encode ( ) , salt ) . contents else : raise Exception ( 'There is no key for DES3 encryption' ) elif etype == EncryptionType . DES_CBC_MD5 : if self . kerberos_key_des : return bytes . fromhex ( self . kerberos_key_des ) elif self . password : salt = ( self . domain . upper ( ) + self . username ) . encode ( ) return string_to_key ( Enctype . DES_MD5 , self . password . encode ( ) , salt ) . contents else : raise Exception ( 'There is no key for DES3 encryption' ) else : raise Exception ( 'Unsupported encryption type: %s' % etype . name )
Returns the encryption key bytes for the enctryption type .
50,487
def run ( self , realm , users ) : existing_users = [ ] for user in users : logging . debug ( 'Probing user %s' % user ) req = KerberosUserEnum . construct_tgt_req ( realm , user ) rep = self . ksoc . sendrecv ( req . dump ( ) , throw = False ) if rep . name != 'KRB_ERROR' : existing_users . append ( user ) elif rep . native [ 'error-code' ] != KerberosErrorCode . KDC_ERR_PREAUTH_REQUIRED . value : continue else : existing_users . append ( user ) return existing_users
Requests a TGT in the name of the users specified in users . Returns a list of usernames that are in the domain .
50,488
def from_tgt ( ksoc , tgt , key ) : kc = KerbrosComm ( None , ksoc ) kc . kerberos_TGT = tgt kc . kerberos_cipher_type = key [ 'keytype' ] kc . kerberos_session_key = Key ( kc . kerberos_cipher_type , key [ 'keyvalue' ] ) kc . kerberos_cipher = _enctype_table [ kc . kerberos_cipher_type ] return kc
Sets up the kerberos object from tgt and the session key . Use this function when pulling the TGT from ccache file .
50,489
def get_TGS ( self , spn_user , override_etype = None ) : logger . debug ( 'Constructing TGS request for user %s' % spn_user . get_formatted_pname ( ) ) now = datetime . datetime . utcnow ( ) kdc_req_body = { } kdc_req_body [ 'kdc-options' ] = KDCOptions ( set ( [ 'forwardable' , 'renewable' , 'renewable_ok' , 'canonicalize' ] ) ) kdc_req_body [ 'realm' ] = spn_user . domain . upper ( ) kdc_req_body [ 'sname' ] = PrincipalName ( { 'name-type' : NAME_TYPE . SRV_INST . value , 'name-string' : spn_user . get_principalname ( ) } ) kdc_req_body [ 'till' ] = now + datetime . timedelta ( days = 1 ) kdc_req_body [ 'nonce' ] = secrets . randbits ( 31 ) if override_etype : kdc_req_body [ 'etype' ] = override_etype else : kdc_req_body [ 'etype' ] = [ self . kerberos_cipher_type ] authenticator_data = { } authenticator_data [ 'authenticator-vno' ] = krb5_pvno authenticator_data [ 'crealm' ] = Realm ( self . kerberos_TGT [ 'crealm' ] ) authenticator_data [ 'cname' ] = self . kerberos_TGT [ 'cname' ] authenticator_data [ 'cusec' ] = now . microsecond authenticator_data [ 'ctime' ] = now authenticator_data_enc = self . kerberos_cipher . encrypt ( self . kerberos_session_key , 7 , Authenticator ( authenticator_data ) . dump ( ) , None ) ap_req = { } ap_req [ 'pvno' ] = krb5_pvno ap_req [ 'msg-type' ] = MESSAGE_TYPE . KRB_AP_REQ . value ap_req [ 'ap-options' ] = APOptions ( set ( ) ) ap_req [ 'ticket' ] = Ticket ( self . kerberos_TGT [ 'ticket' ] ) ap_req [ 'authenticator' ] = EncryptedData ( { 'etype' : self . kerberos_cipher_type , 'cipher' : authenticator_data_enc } ) pa_data_1 = { } pa_data_1 [ 'padata-type' ] = PaDataType . TGS_REQ . value pa_data_1 [ 'padata-value' ] = AP_REQ ( ap_req ) . dump ( ) kdc_req = { } kdc_req [ 'pvno' ] = krb5_pvno kdc_req [ 'msg-type' ] = MESSAGE_TYPE . KRB_TGS_REQ . value kdc_req [ 'padata' ] = [ pa_data_1 ] kdc_req [ 'req-body' ] = KDC_REQ_BODY ( kdc_req_body ) req = TGS_REQ ( kdc_req ) logger . debug ( 'Constructing TGS request to server' ) rep = self . ksoc . sendrecv ( req . dump ( ) ) logger . debug ( 'Got TGS reply, decrypting...' ) tgs = rep . native encTGSRepPart = EncTGSRepPart . load ( self . kerberos_cipher . decrypt ( self . kerberos_session_key , 8 , tgs [ 'enc-part' ] [ 'cipher' ] ) ) . native key = Key ( encTGSRepPart [ 'key' ] [ 'keytype' ] , encTGSRepPart [ 'key' ] [ 'keyvalue' ] ) self . ccache . add_tgs ( tgs , encTGSRepPart ) logger . debug ( 'Got valid TGS reply' ) self . kerberos_TGS = tgs return tgs , encTGSRepPart , key
Requests a TGS ticket for the specified user . Retruns the TGS ticket end the decrpyted encTGSRepPart .
50,490
def minimise ( table , target_length , check_for_aliases = True ) : if check_for_aliases : if len ( set ( e . mask for e in table ) ) == 1 and len ( table ) == len ( set ( e . key for e in table ) ) : check_for_aliases = False new_table = list ( ) for i , entry in enumerate ( table ) : if not _is_defaultable ( i , entry , table , check_for_aliases ) : new_table . append ( entry ) if target_length is not None and target_length < len ( new_table ) : raise MinimisationFailedError ( target_length , len ( new_table ) ) return new_table
Remove from the routing table any entries which could be replaced by default routing .
50,491
def _is_defaultable ( i , entry , table , check_for_aliases = True ) : if ( len ( entry . sources ) == 1 and len ( entry . route ) == 1 and None not in entry . sources ) : source = next ( iter ( entry . sources ) ) sink = next ( iter ( entry . route ) ) if source . is_link and sink . is_link : if source . opposite is sink : key , mask = entry . key , entry . mask if not check_for_aliases or not any ( intersect ( key , mask , d . key , d . mask ) for d in table [ i + 1 : ] ) : return True return False
Determine if an entry may be removed from a routing table and be replaced by a default route .
50,492
def table_is_subset_of ( entries_a , entries_b ) : common_xs = get_common_xs ( entries_b ) for entry in expand_entries ( entries_a , ignore_xs = common_xs ) : for other_entry in entries_b : if other_entry . mask & entry . key == other_entry . key : if other_entry . route == entry . route : break else : return False else : default_routed = False if len ( entry . route ) == 1 and len ( entry . sources ) == 1 : source = next ( iter ( entry . sources ) ) sink = next ( iter ( entry . route ) ) if ( source is not None and sink . is_link and source is sink . opposite ) : default_routed = True if not default_routed : return False return True
Check that every key matched by every entry in one table results in the same route when checked against the other table .
50,493
def expand_entry ( entry , ignore_xs = 0x0 ) : xs = ( ~ entry . key & ~ entry . mask ) & ~ ignore_xs for bit in ( 1 << i for i in range ( 31 , - 1 , - 1 ) ) : if bit & xs : entry_0 = RoutingTableEntry ( entry . route , entry . key , entry . mask | bit , entry . sources ) for new_entry in expand_entry ( entry_0 , ignore_xs ) : yield new_entry entry_1 = RoutingTableEntry ( entry . route , entry . key | bit , entry . mask | bit , entry . sources ) for new_entry in expand_entry ( entry_1 , ignore_xs ) : yield new_entry break else : yield entry
Turn all Xs which are not marked in ignore_xs into 0 \ s and 1 \ s .
50,494
def expand_entries ( entries , ignore_xs = None ) : if ignore_xs is None : ignore_xs = get_common_xs ( entries ) seen_keys = set ( { } ) for entry in entries : for new_entry in expand_entry ( entry , ignore_xs ) : if new_entry . key in seen_keys : warnings . warn ( "Table is not orthogonal: Key {:#010x} matches " "multiple entries." . format ( new_entry . key ) ) else : seen_keys . add ( new_entry . key ) yield new_entry
Turn all Xs which are not ignored in all entries into 0 s and 1 s .
50,495
def get_common_xs ( entries ) : key = 0x00000000 mask = 0x00000000 for entry in entries : key |= entry . key mask |= entry . mask return ( ~ ( key | mask ) ) & 0xffffffff
Return a mask of where there are Xs in all routing table entries .
50,496
def slices_overlap ( slice_a , slice_b ) : assert slice_a . step is None assert slice_b . step is None return max ( slice_a . start , slice_b . start ) < min ( slice_a . stop , slice_b . stop )
Test if the ranges covered by a pair of slices overlap .
50,497
def concentric_hexagons ( radius , start = ( 0 , 0 ) ) : x , y = start yield ( x , y ) for r in range ( 1 , radius + 1 ) : y -= 1 for dx , dy in [ ( 1 , 1 ) , ( 0 , 1 ) , ( - 1 , 0 ) , ( - 1 , - 1 ) , ( 0 , - 1 ) , ( 1 , 0 ) ] : for _ in range ( r ) : yield ( x , y ) x += dx y += dy
A generator which produces coordinates of concentric rings of hexagons .
50,498
def spinn5_eth_coords ( width , height , root_x = 0 , root_y = 0 ) : root_x %= 12 root_x %= 12 w = ( ( width + 11 ) // 12 ) * 12 h = ( ( height + 11 ) // 12 ) * 12 for x in range ( 0 , w , 12 ) : for y in range ( 0 , h , 12 ) : for dx , dy in ( ( 0 , 0 ) , ( 4 , 8 ) , ( 8 , 4 ) ) : nx = ( x + dx + root_x ) % w ny = ( y + dy + root_y ) % h if nx < width and ny < height : yield ( nx , ny )
Generate a list of board coordinates with Ethernet connectivity in a SpiNNaker machine .
50,499
def spinn5_local_eth_coord ( x , y , w , h , root_x = 0 , root_y = 0 ) : dx , dy = SPINN5_ETH_OFFSET [ ( y - root_y ) % 12 ] [ ( x - root_x ) % 12 ] return ( ( x + int ( dx ) ) % w ) , ( ( y + int ( dy ) ) % h )
Get the coordinates of a chip s local ethernet connected chip .