idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
12,100
def add_quantity_modifier ( self , quantity , modifier , overwrite = False ) : if quantity in self . _quantity_modifiers and not overwrite : raise ValueError ( 'quantity `{}` already exists' . format ( quantity ) ) self . _quantity_modifiers [ quantity ] = modifier self . _check_quantities_exist ( [ quantity ] , raise_exception = False )
Add a quantify modifier . Consider useing the high - level function add_derived_quantity instead!
12,101
def get_normalized_quantity_modifier ( self , quantity ) : modifier = self . _quantity_modifiers . get ( quantity , self . _default_quantity_modifier ) if modifier is None : return ( trivial_callable , quantity ) if callable ( modifier ) : return ( modifier , quantity ) if isinstance ( modifier , ( tuple , list ) ) and len ( modifier ) > 1 and callable ( modifier [ 0 ] ) : return modifier return ( trivial_callable , modifier )
Retrive a quantify modifier normalized . This function would also return a tuple with the first item a callable and the rest native quantity names
12,102
def add_derived_quantity ( self , derived_quantity , func , * quantities ) : if derived_quantity in self . _quantity_modifiers : raise ValueError ( 'quantity name `{}` already exists' . format ( derived_quantity ) ) if set ( quantities ) . issubset ( self . _native_quantities ) : new_modifier = ( func , ) + quantities else : functions = [ ] quantities_needed = [ ] quantity_count = [ ] for q in quantities : modifier = self . get_normalized_quantity_modifier ( q ) functions . append ( modifier [ 0 ] ) quantities_needed . extend ( modifier [ 1 : ] ) quantity_count . append ( len ( modifier ) - 1 ) def _new_func ( * x ) : assert len ( x ) == sum ( quantity_count ) count_current = 0 new_args = [ ] for func_this , count in zip ( functions , quantity_count ) : new_args . append ( func_this ( * x [ count_current : count_current + count ] ) ) count_current += count return func ( * new_args ) new_modifier = ( _new_func , ) + tuple ( quantities_needed ) self . add_quantity_modifier ( derived_quantity , new_modifier )
Add a derived quantify modifier .
12,103
def add_modifier_on_derived_quantities ( self , new_quantity , func , * quantities ) : warnings . warn ( "Use `add_derived_quantity` instead." , DeprecationWarning ) self . add_derived_quantity ( new_quantity , func , * quantities )
Deprecated . Use add_derived_quantity instead .
12,104
def check_for_wdiff ( ) : cmd = [ 'which' , CMD_WDIFF ] DEVNULL = open ( os . devnull , 'wb' ) proc = sub . Popen ( cmd , stdout = DEVNULL ) proc . wait ( ) DEVNULL . close ( ) if proc . returncode != 0 : msg = "the `{}` command can't be found" . format ( CMD_WDIFF ) raise WdiffNotFoundError ( msg )
Checks if the wdiff command can be found .
12,105
def generate_wdiff ( org_file , new_file , fold_tags = False , html = True ) : check_for_wdiff ( ) cmd = [ CMD_WDIFF ] if html : cmd . extend ( OPTIONS_OUTPUT ) if not fold_tags : cmd . extend ( OPTIONS_LINEBREAK ) cmd . extend ( [ org_file , new_file ] ) proc = sub . Popen ( cmd , stdout = sub . PIPE ) diff , _ = proc . communicate ( ) return diff . decode ( 'utf-8' )
Returns the results from the wdiff command as a string .
12,106
def body ( self , master ) : self . frame = ttk . Frame ( master , padding = ( 5 , 5 , 10 , 10 ) ) self . lbl_message = ttk . Label ( self . frame , text = 'Select User Type: ' , ) self . rb_student = ttk . Radiobutton ( self . frame , text = 'Student' , variable = self . rb_choice , value = 'student' , ) self . rb_tutor = ttk . Radiobutton ( self . frame , text = 'Tutor' , variable = self . rb_choice , value = 'tutor' , ) self . btn_ok = ttk . Button ( self . frame , text = 'OK' , command = self . ok , ) self . btn_cancel = ttk . Button ( self . frame , text = 'Cancel' , command = self . cancel , ) self . frame . grid ( column = 0 , row = 0 , sticky = ( N , S , E , W ) ) self . lbl_message . grid ( column = 0 , row = 0 , columnspan = 2 , sticky = ( W , E ) ) self . rb_student . grid ( column = 0 , row = 1 , columnspan = 2 , sticky = W ) self . rb_tutor . grid ( column = 0 , row = 2 , columnspan = 2 , sticky = W ) self . btn_ok . grid ( column = 0 , row = 3 ) self . btn_cancel . grid ( column = 1 , row = 3 ) self . bind ( '<Return>' , self . ok ) self . bind ( '<KP_Enter>' , self . ok ) self . bind ( '<Escape>' , self . cancel ) self . rb_tutor . invoke ( ) return self . btn_ok
Create dialog body . Return widget that should have initial focus .
12,107
def apply ( self ) : user_type = self . rb_choice . get ( ) if user_type == 'student' or user_type == 'tutor' : self . result = user_type
Inherited from tkinter . simpledialog . Dialog
12,108
def flag_forgotten_entries ( session , today = None ) : today = date . today ( ) if today is None else today forgotten = ( session . query ( Entry ) . filter ( Entry . time_out . is_ ( None ) ) . filter ( Entry . forgot_sign_out . is_ ( False ) ) . filter ( Entry . date < today ) ) for entry in forgotten : e = sign_out ( entry , forgot = True ) logger . debug ( 'Signing out forgotten entry: {}' . format ( e ) ) session . add ( e ) session . commit ( )
Flag any entries from previous days where users forgot to sign out .
12,109
def signed_in_users ( session = None , today = None , full_name = True ) : if session is None : session = Session ( ) else : session = session if today is None : today = date . today ( ) else : today = today signed_in_users = ( session . query ( User ) . filter ( Entry . date == today ) . filter ( Entry . time_out . is_ ( None ) ) . filter ( User . user_id == Entry . user_id ) . all ( ) ) session . close ( ) return signed_in_users
Return list of names of currently signed in users .
12,110
def get_user_name ( user , full_name = True ) : try : if full_name : name = ' ' . join ( [ user . first_name , user . last_name ] ) else : name = user . first_name except AttributeError : name = None return name
Return the user s name as a string .
12,111
def sign_in ( user , user_type = None , date = None , time_in = None ) : now = datetime . today ( ) if date is None : date = now . date ( ) if time_in is None : time_in = now . time ( ) if user_type is None : if user . is_student and user . is_tutor : raise AmbiguousUserType ( 'User is both a student and a tutor.' ) elif user . is_student : user_type = 'student' elif user . is_tutor : user_type = 'tutor' else : raise ValueError ( 'Unknown user type.' ) new_entry = Entry ( uuid = str ( uuid . uuid4 ( ) ) , date = date , time_in = time_in , time_out = None , user_id = user . user_id , user_type = user_type , user = user , ) logger . info ( '{} ({}) signed in.' . format ( new_entry . user_id , new_entry . user_type ) ) return new_entry
Add a new entry to the timesheet .
12,112
def sign_out ( entry , time_out = None , forgot = False ) : if time_out is None : time_out = datetime . today ( ) . time ( ) if forgot : entry . forgot_sign_out = True logger . info ( '{} forgot to sign out on {}.' . format ( entry . user_id , entry . date ) ) else : entry . time_out = time_out logger . info ( '{} ({}) signed out.' . format ( entry . user_id , entry . user_type ) ) return entry
Sign out of an existing entry in the timesheet . If the user forgot to sign out flag the entry .
12,113
def undo_sign_in ( entry , session = None ) : if session is None : session = Session ( ) else : session = session entry_to_delete = ( session . query ( Entry ) . filter ( Entry . uuid == entry . uuid ) . one_or_none ( ) ) if entry_to_delete : logger . info ( 'Undo sign in: {}' . format ( entry_to_delete . user_id ) ) logger . debug ( 'Undo sign in: {}' . format ( entry_to_delete ) ) session . delete ( entry_to_delete ) session . commit ( ) else : error_message = 'Entry not found: {}' . format ( entry ) logger . error ( error_message ) raise ValueError ( error_message )
Delete a signed in entry .
12,114
def undo_sign_out ( entry , session = None ) : if session is None : session = Session ( ) else : session = session entry_to_sign_in = ( session . query ( Entry ) . filter ( Entry . uuid == entry . uuid ) . one_or_none ( ) ) if entry_to_sign_in : logger . info ( 'Undo sign out: {}' . format ( entry_to_sign_in . user_id ) ) logger . debug ( 'Undo sign out: {}' . format ( entry_to_sign_in ) ) entry_to_sign_in . time_out = None session . add ( entry_to_sign_in ) session . commit ( ) else : error_message = 'Entry not found: {}' . format ( entry ) logger . error ( error_message ) raise ValueError ( error_message )
Sign in a signed out entry .
12,115
def sign ( user_id , user_type = None , today = None , session = None ) : if session is None : session = Session ( ) else : session = session if today is None : today = date . today ( ) else : today = today user = ( session . query ( User ) . filter ( User . user_id == user_id ) . one_or_none ( ) ) if user : signed_in_entries = ( user . entries . filter ( Entry . date == today ) . filter ( Entry . time_out . is_ ( None ) ) . all ( ) ) if not signed_in_entries : new_entry = sign_in ( user , user_type = user_type ) session . add ( new_entry ) status = Status ( valid = True , in_or_out = 'in' , user_name = get_user_name ( user ) , user_type = new_entry . user_type , entry = new_entry ) else : for entry in signed_in_entries : signed_out_entry = sign_out ( entry ) session . add ( signed_out_entry ) status = Status ( valid = True , in_or_out = 'out' , user_name = get_user_name ( user ) , user_type = signed_out_entry . user_type , entry = signed_out_entry ) session . commit ( ) else : raise UnregisteredUser ( '{} not registered. Please register at the front desk.' . format ( user_id ) ) logger . debug ( status ) return status
Check user id for validity then sign user in if they are signed out or out if they are signed in .
12,116
def format_request ( self ) : fmt = '{now} {status} {requestline} ({client_address}) {response_length} {delta}ms' requestline = getattr ( self , 'requestline' ) if requestline : requestline = ' ' . join ( requestline . split ( ' ' ) [ : - 1 ] ) else : requestline = '???' if self . time_finish : delta = '%.2f' % ( ( self . time_finish - self . time_start ) * 1000 ) else : delta = '-' data = dict ( now = datetime . datetime . now ( ) . replace ( microsecond = 0 ) , response_length = self . response_length or '-' , client_address = self . client_address [ 0 ] if isinstance ( self . client_address , tuple ) else self . client_address , status = str ( self . _get_status_int ( ) ) , requestline = requestline , delta = delta , ) return fmt . format ( ** data )
Override for better log format
12,117
def handle_error ( self , type_ , value , tb ) : if not issubclass ( type_ , pywsgi . GreenletExit ) : self . server . loop . handle_error ( self . environ , type_ , value , tb ) if self . response_length : self . close_connection = True else : tb_stream = traceback . format_exception ( type_ , value , tb ) del tb tb_stream . append ( '\n' ) tb_stream . append ( pprint . pformat ( self . environ ) ) body = '' . join ( tb_stream ) headers = pywsgi . _INTERNAL_ERROR_HEADERS [ : ] headers [ 2 ] = ( 'Content-Length' , str ( len ( body ) ) ) self . start_response ( pywsgi . _INTERNAL_ERROR_STATUS , headers ) self . write ( body )
This method copies the code from pywsgi . WSGIHandler . handle_error change the write part to be a reflection of traceback and environ
12,118
def clear_measurements ( self ) : keys = list ( self . measurements . keys ( ) ) for key in keys : del ( self . measurements [ key ] ) self . meas_counter = - 1
Remove all measurements from self . measurements . Reset the measurement counter . All ID are invalidated .
12,119
def add_measurements ( self , measurements ) : subdata = np . atleast_2d ( measurements ) if self . configs is None : raise Exception ( 'must read in configuration before measurements can be stored' ) if subdata . shape [ 1 ] != self . configs . shape [ 0 ] : if subdata . shape [ 0 ] == self . configs . shape [ 0 ] : subdata = subdata . T else : raise Exception ( 'Number of measurements does not match number of configs' ) return_ids = [ ] for dataset in subdata : cid = self . _get_next_index ( ) self . measurements [ cid ] = dataset . copy ( ) return_ids . append ( cid ) if len ( return_ids ) == 1 : return return_ids [ 0 ] else : return return_ids
Add new measurements to this instance
12,120
def gen_all_voltages_for_injections ( self , injections_raw ) : injections = injections_raw . astype ( int ) N = self . nr_electrodes all_quadpoles = [ ] for idipole in injections : Icurrent = np . sort ( idipole ) - 1 velecs = list ( range ( 1 , N + 1 ) ) del ( velecs [ Icurrent [ 1 ] ] ) del ( velecs [ Icurrent [ 0 ] ] ) voltages = itertools . permutations ( velecs , 2 ) for voltage in voltages : all_quadpoles . append ( ( idipole [ 0 ] , idipole [ 1 ] , voltage [ 0 ] , voltage [ 1 ] ) ) configs_unsorted = np . array ( all_quadpoles ) configs_sorted = np . hstack ( ( np . sort ( configs_unsorted [ : , 0 : 2 ] , axis = 1 ) , np . sort ( configs_unsorted [ : , 2 : 4 ] , axis = 1 ) , ) ) configs = self . remove_duplicates ( configs_sorted ) self . add_to_configs ( configs ) self . remove_duplicates ( ) return configs
For a given set of current injections AB generate all possible unique potential measurements .
12,121
def gen_wenner ( self , a ) : configs = [ ] for i in range ( 1 , self . nr_electrodes - 3 * a + 1 ) : configs . append ( ( i , i + a , i + 2 * a , i + 3 * a ) , ) configs = np . array ( configs ) self . add_to_configs ( configs ) return configs
Generate Wenner measurement configurations .
12,122
def gen_reciprocals ( self , quadrupoles ) : reciprocals = quadrupoles [ : , : : - 1 ] . copy ( ) reciprocals [ : , 0 : 2 ] = np . sort ( reciprocals [ : , 0 : 2 ] , axis = 1 ) reciprocals [ : , 2 : 4 ] = np . sort ( reciprocals [ : , 2 : 4 ] , axis = 1 ) return reciprocals
For a given set of quadrupoles generate and return reciprocals
12,123
def compute_K_factors ( self , spacing = None , configs = None , numerical = False , elem_file = None , elec_file = None ) : if configs is None : use_configs = self . configs else : use_configs = configs if numerical : settings = { 'elem' : elem_file , 'elec' : elec_file , 'rho' : 100 , } K = edfK . compute_K_numerical ( use_configs , settings ) else : K = edfK . compute_K_analytical ( use_configs , spacing = spacing ) return K
Compute analytical geometrical factors .
12,124
def applies ( self , src , dst ) : if self . _src_pattern and ( src is None or re . search ( self . _src_pattern , src ) is None ) : return False elif self . _dst_pattern and ( dst is None or re . search ( self . _dst_pattern , dst ) is None ) : return False return True
Checks if this rule applies to the given src and dst paths based on the src pattern and dst pattern given in the constructor .
12,125
def _createunbound ( kls , ** info ) : if issubclass ( kls , Bitfield ) : nodetype = UnboundBitfieldNode elif hasattr ( kls , '_fields_' ) : nodetype = UnboundStructureNode elif issubclass ( kls , ctypes . Array ) : nodetype = UnboundArrayNode else : nodetype = UnboundSimpleNode return nodetype ( type = kls , ** info )
Create a new UnboundNode representing a given class .
12,126
def _createbound ( obj ) : try : kls = obj . _unboundreference_ ( ) except AttributeError : kls = type ( obj ) unbound = _createunbound ( kls ) def valueget ( ) : return obj for t in ( BoundBitfieldNode , BoundStructureNode , BoundArrayNode ) : if isinstance ( unbound , t . _unboundtype ) : kls = t break else : kls = BoundSimpleNode child = kls ( unbound , valueget ) return child
Create a new BoundNode representing a given object .
12,127
def display ( obj , skiphidden = True , ** printargs ) : top = findnode ( obj ) maxhex = len ( hex ( ctypes . sizeof ( top . type ) ) ) - 2 def addrformat ( addr ) : if isinstance ( addr , int ) : return "0x{0:0{1}X}" . format ( addr , maxhex ) else : intpart = int ( addr ) fracbits = int ( ( addr - intpart ) * 8 ) return "0x{0:0{1}X}'{2}" . format ( intpart , maxhex , fracbits ) def formatval ( here ) : if isinstance ( here , BoundSimpleNode ) : return "{0}({1})" . format ( here . type . __name__ , here . value ) else : return str ( here . value ) if isinstance ( top , UnboundNode ) : headers = [ 'Path' , 'Addr' , 'Type' ] results = [ ( ( ' ' * n . depth ) + n . name , addrformat ( n . baseoffset ) , n . type . __name__ ) for n in walknode ( top , skiphidden ) ] else : headers = [ 'Path' , 'Addr' , 'Value' ] results = [ ( ( ' ' * n . depth ) + n . name , addrformat ( n . baseoffset ) , formatval ( n ) ) for n in walknode ( top , skiphidden ) ] widths = [ max ( max ( len ( d [ col ] ) for d in results ) , len ( h ) ) for col , h in enumerate ( headers ) ] def lp ( args ) : print ( * args , ** printargs ) lp ( d . center ( w ) for d , w in zip ( headers , widths ) ) lp ( '-' * w for w in widths ) for r in results : lp ( d . ljust ( w ) for d , w in zip ( r , widths ) )
Print a view of obj where obj is either a ctypes - derived class or an instance of such a class . Any additional keyword arguments are passed directly to the print function . This is mostly useful to introspect structures from an interactive session .
12,128
def pathparts ( self ) : try : parts = self . parent . pathparts ( ) parts . append ( self . name ) return parts except AttributeError : return [ ]
A list of the parts of the path with the root node returning an empty list .
12,129
def baseoffset ( self ) : try : return self . parent . baseoffset + self . offset except AttributeError : return self . offset
The offset of this node from the root node .
12,130
def _almost_equal ( a , b ) : threshold = 1e-9 diff = np . abs ( a - b ) return ( diff < threshold )
Check if the two numbers are almost equal
12,131
def complement_alleles ( self ) : self . alleles = self . _encode_alleles ( [ complement_alleles ( i ) for i in self . alleles ] )
Complement the alleles of this variant .
12,132
def flip_coded ( self ) : self . genotypes = 2 - self . genotypes self . reference , self . coded = self . coded , self . reference
Flips the coding of the alleles .
12,133
def flip_strand ( self ) : self . reference = complement_alleles ( self . reference ) self . coded = complement_alleles ( self . coded ) self . variant . complement_alleles ( )
Flips the strand of the alleles .
12,134
def rotvec2mat ( u , phi ) : phi = np . squeeze ( phi ) norm_u = np . linalg . norm ( u ) if norm_u < 1e-12 : raise Exception ( "the rotation vector is equal to zero" ) u = u / norm_u s = np . sin ( phi ) c = np . cos ( phi ) t = 1 - c ux = u [ 0 ] uy = u [ 1 ] uz = u [ 2 ] res = np . array ( [ [ t * ux * ux + c , t * ux * uy - s * uz , t * ux * uz + s * uy ] , [ t * ux * uy + s * uz , t * uy * uy + c , t * uy * uz - s * ux ] , [ t * ux * uz - s * uy , t * uy * uz + s * ux , t * uz * uz + c ] ] ) return res
Convert rotation from axis and angle to matrix representation
12,135
def det2lab_xds ( pixels_coord , frame_number , starting_frame , starting_angle , oscillation_angle , rotation_axis , wavelength , wavevector , NX , NY , pixelsize_x , pixelsize_y , distance_to_detector , x_center , y_center , detector_x , detector_y , detector_normal , ** kwargs ) : array_shape = ( 1 , 3 ) if detector_x . shape == array_shape : detector_x = detector_x . T detector_y = detector_y . T detector_normal = detector_normal . T if wavevector . shape == array_shape : wavevector = wavevector . T if rotation_axis . shape == array_shape : rotation_axis = rotation_axis . T xmm = ( pixels_coord [ : , [ 0 ] ] - x_center ) * pixelsize_x ymm = ( pixels_coord [ : , [ 1 ] ] - y_center ) * pixelsize_y scattering_vector_mm = np . outer ( xmm , detector_x ) + np . outer ( ymm , detector_y ) + distance_to_detector * np . outer ( np . ones ( shape = xmm . shape ) , detector_normal ) scattering_vector_mm = scattering_vector_mm . T phi = ( frame_number - starting_frame ) * oscillation_angle + starting_angle norms = np . sum ( scattering_vector_mm ** 2. , axis = 0 ) ** ( 1. / 2 ) unit_scattering_vector = scattering_vector_mm / norms h = unit_scattering_vector / wavelength - np . tile ( wavevector , ( unit_scattering_vector . shape [ 1 ] , 1 ) ) . T if phi . size == 1 : h = np . dot ( rotvec2mat ( rotation_axis . T , - 2 * np . pi * phi / 360 ) , h ) else : for i in range ( phi . size ) : h [ : , [ i ] ] = np . dot ( rotvec2mat ( rotation_axis . T , - 2 * np . pi * phi [ i ] / 360 ) , h [ : , [ i ] ] ) return h , scattering_vector_mm , unit_scattering_vector
Converts pixels coordinates from the frame into q - vector
12,136
def get_query_dict ( self , ** kwargs ) : total_cols = ensure ( int , kwargs . get ( 'total_cols' , [ 0 ] ) [ 0 ] , 0 ) mapping = self . mapping filter_dict = defaultdict ( dict ) starter = mapping . keys ( ) [ 0 ] for i in range ( starter , total_cols ) : key = 'columns[{index}]' . format ( index = i ) if kwargs . get ( key + '[searchable]' , [ 0 ] ) [ 0 ] != 'true' : continue search_value = kwargs . get ( key + '[search][value]' , [ '' ] ) [ 0 ] . strip ( ) if not search_value : continue enum_item = mapping . from_key ( i ) filter_obj = enum_item . extra if type ( filter_obj ) is tuple and len ( filter_obj ) == 2 : filter_func , filter_key = filter_obj filter_dict [ filter_func ] [ filter_key ] = search_value elif type ( filter_obj ) is str : filter_dict [ 'filter' ] [ filter_obj ] = search_value else : raise ValueError ( "Invalid filter key." ) return filter_dict
function to generate a filter dictionary in which the key is the keyword used in django filter function in string form and the value is the searched value .
12,137
def get_order_key ( self , ** kwargs ) : mapping = self . mapping order_column = kwargs . get ( 'order[0][column]' , [ mapping . keys ( ) [ 0 ] ] ) [ 0 ] order_column = ensure ( int , order_column , mapping . keys ( ) [ 0 ] ) order = kwargs . get ( 'order[0][dir]' , [ 'asc' ] ) [ 0 ] order_key = mapping . from_key ( order_column ) . label if order == 'desc' : order_key = '-' + order_key return order_key
function to get the order key to apply it in the filtered queryset
12,138
def filtering ( queryset , query_dict ) : for key , value in query_dict . items ( ) : assert hasattr ( queryset , key ) , "Parameter 'query_dict' contains" " non-existent attribute." if isinstance ( value , list ) : queryset = getattr ( queryset , key ) ( * value ) elif isinstance ( value , dict ) : queryset = getattr ( queryset , key ) ( ** value ) else : queryset = getattr ( queryset , key ) ( value ) return queryset
function to apply the pre search condition to the queryset to narrow down the queryset s size
12,139
def slicing ( queryset , ** kwargs ) : length = ensure ( int , kwargs . get ( 'length' , [ 0 ] ) [ 0 ] , 0 ) start = ensure ( int , kwargs . get ( 'start' , [ 0 ] ) [ 0 ] , 0 ) if length >= 0 : queryset = queryset [ start : start + length ] return queryset
function to slice the queryset according to the display length
12,140
def query_by_args ( self , pre_search_condition = None , ** kwargs ) : if pre_search_condition and not isinstance ( pre_search_condition , OrderedDict ) : raise TypeError ( "Parameter 'pre_search_condition' must be an OrderedDict." ) draw = ensure ( int , kwargs . get ( 'draw' , [ 0 ] ) [ 0 ] , 0 ) query_dict = self . get_query_dict ( ** kwargs ) order_key = self . get_order_key ( ** kwargs ) model_class = self . serializer . Meta . model queryset = model_class . objects if pre_search_condition : queryset = self . filtering ( queryset , pre_search_condition ) else : queryset = queryset . all ( ) total = queryset . count ( ) if query_dict : queryset = self . filtering ( queryset , query_dict ) count = queryset . count ( ) queryset = queryset . order_by ( order_key ) queryset = self . slicing ( queryset , ** kwargs ) return { 'items' : queryset , 'count' : count , 'total' : total , 'draw' : draw }
intends to process the queries sent by data tables package in frontend . The model_cls indicates the model class get_query_dict is a function implemented by you such that it can return a query dictionary in which the key is the query keyword in str form and the value is the queried value
12,141
def process ( self , pre_search_condition = None , ** kwargs ) : records = self . query_by_args ( pre_search_condition = pre_search_condition , ** kwargs ) serializer = self . serializer ( records [ 'items' ] , many = True ) result = { 'data' : serializer . data , 'draw' : records [ 'draw' ] , 'recordsTotal' : records [ 'total' ] , 'recordsFiltered' : records [ 'count' ] , } return result
function to be called outside to get the footer search condition apply the search in DB and render the serialized result .
12,142
def coerce ( cls , key , value ) : self = MutationDict ( ( k , MutationObj . coerce ( key , v ) ) for ( k , v ) in value . items ( ) ) self . _key = key return self
Convert plain dictionary to MutationDict
12,143
def coerce ( cls , key , value ) : self = MutationList ( ( MutationObj . coerce ( key , v ) for v in value ) ) self . _key = key return self
Convert plain list to MutationList
12,144
def structure ( cls ) : downstream = cls . cutter . elucidate ( ) upstream = str ( Seq ( downstream ) . reverse_complement ( ) ) return "" . join ( [ upstream . replace ( "^" , ")(" ) . replace ( "_" , "(" ) , "N*" , downstream . replace ( "^" , ")(" ) . replace ( "_" , ")" ) , ] )
Get the vector structure as a DNA regex pattern .
12,145
def placeholder_sequence ( self ) : if self . cutter . is_3overhang ( ) : return self . _match . group ( 2 ) + self . overhang_end ( ) else : return self . overhang_start ( ) + self . _match . group ( 2 )
Get the placeholder sequence in the vector .
12,146
def target_sequence ( self ) : if self . cutter . is_3overhang ( ) : start , end = self . _match . span ( 2 ) [ 0 ] , self . _match . span ( 3 ) [ 1 ] else : start , end = self . _match . span ( 1 ) [ 0 ] , self . _match . span ( 2 ) [ 1 ] return add_as_source ( self . record , ( self . record << start ) [ end - start : ] )
Get the target sequence in the vector .
12,147
def assemble ( self , module , * modules , ** kwargs ) : mgr = AssemblyManager ( vector = self , modules = [ module ] + list ( modules ) , name = kwargs . get ( "name" , "assembly" ) , id_ = kwargs . get ( "id" , "assembly" ) , ) return mgr . assemble ( )
Assemble the provided modules into the vector .
12,148
async def onConnect ( self ) : if not hasattr ( self . config , 'extra' ) : original_config = { 'config' : self . config } self . config = objdict ( self . config ) setattr ( self . config , 'extra' , original_config ) self . config . extra [ 'handlers' ] = self . handlers self . transport_host = self . config . extra [ 'config' ] [ 'transport_host' ] self . subscribe_options = SubscribeOptions ( ** self . config . extra [ 'config' ] [ 'sub_options' ] ) self . replay_events = self . config . extra [ 'config' ] [ 'replay_events' ] self . publish_topic = self . config . extra [ 'config' ] [ 'publish_topic' ] [ 'topic' ] self . publish_options = PublishOptions ( ** self . config . extra [ 'config' ] [ 'pub_options' ] ) self . handlers = self . config . extra [ 'handlers' ] self . subscribed_topics = self . config . extra [ 'config' ] [ 'subscribed_topics' ] self . name = self . config . extra [ 'config' ] [ 'name' ] if self . config . extra [ 'config' ] [ 'pub_options' ] [ 'retain' ] is True : self . pool = await asyncpg . create_pool ( user = EVENT_DB_USER , password = EVENT_DB_PASS , host = EVENT_DB_HOST , database = EVENT_DB_NAME ) try : self . join ( self . config . realm ) except AttributeError : pass
Configure the component
12,149
def getback ( config , force = False ) : repo = config . repo active_branch = repo . active_branch if active_branch . name == "master" : error_out ( "You're already on the master branch." ) if repo . is_dirty ( ) : error_out ( 'Repo is "dirty". ({})' . format ( ", " . join ( [ repr ( x . b_path ) for x in repo . index . diff ( None ) ] ) ) ) branch_name = active_branch . name state = read ( config . configfile ) origin_name = state . get ( "ORIGIN_NAME" , "origin" ) upstream_remote = None fork_remote = None for remote in repo . remotes : if remote . name == origin_name : upstream_remote = remote break if not upstream_remote : error_out ( "No remote called {!r} found" . format ( origin_name ) ) repo . heads . master . checkout ( ) upstream_remote . pull ( repo . heads . master ) merged_branches = [ x . strip ( ) for x in repo . git . branch ( "--merged" ) . splitlines ( ) if x . strip ( ) and not x . strip ( ) . startswith ( "*" ) ] was_merged = branch_name in merged_branches certain = was_merged or force if not certain : certain = ( input ( "Are you certain {} is actually merged? [Y/n] " . format ( branch_name ) ) . lower ( ) . strip ( ) != "n" ) if not certain : return 1 if was_merged : repo . git . branch ( "-d" , branch_name ) else : repo . git . branch ( "-D" , branch_name ) fork_remote = None for remote in repo . remotes : if remote . name == state . get ( "FORK_NAME" ) : fork_remote = remote break if fork_remote : fork_remote . push ( ":" + branch_name ) info_out ( "Remote branch on fork deleted too." )
Goes back to the master branch deletes the current branch locally and remotely .
12,150
def get ( _class , api , vid ) : busses = api . vehicles ( vid = vid ) [ 'vehicle' ] return _class . fromapi ( api , api . vehicles ( vid = vid ) [ 'vehicle' ] )
Return a Bus object for a certain vehicle ID vid using API instance api .
12,151
def fromapi ( _class , api , apiresponse ) : bus = apiresponse return _class ( api = api , vid = bus [ 'vid' ] , timeupdated = datetime . strptime ( bus [ 'tmstmp' ] , api . STRPTIME ) , lat = float ( bus [ 'lat' ] ) , lng = float ( bus [ 'lon' ] ) , heading = bus [ 'hdg' ] , pid = bus [ 'pid' ] , intotrip = bus [ 'pdist' ] , route = bus [ 'rt' ] , destination = bus [ 'des' ] , speed = bus [ 'spd' ] , delay = bus . get ( 'dly' ) or False )
Return a Bus object from an API response dict .
12,152
def update ( self ) : vehicle = self . api . vehicles ( vid = self . vid ) [ 'vehicle' ] newbus = self . fromapi ( self . api , vehicle ) self . __dict__ = newbus . __dict__ del newbus
Update this bus by creating a new one and transplanting dictionaries .
12,153
def predictions ( self ) : for prediction in self . api . predictions ( vid = self . vid ) [ 'prd' ] : pobj = Prediction . fromapi ( self . api , prediction ) pobj . _busobj = self yield pobj
Generator that yields prediction objects from an API response .
12,154
def next_stop ( self ) : p = self . api . predictions ( vid = self . vid ) [ 'prd' ] pobj = Prediction . fromapi ( self . api , p [ 0 ] ) pobj . _busobj = self return pobj
Return the next stop for this bus .
12,155
def get ( _class , api , rt ) : if not _class . all_routes : _class . all_routes = _class . update_list ( api , api . routes ( ) [ 'route' ] ) return _class . all_routes [ str ( rt ) ]
Return a Route object for route rt using API instance api .
12,156
def _normalise_path ( path : Union [ str , pathlib . Path ] ) -> pathlib . Path : if isinstance ( path , str ) : return pathlib . Path ( path ) return path
Ensures a path is parsed .
12,157
def root ( path : Union [ str , pathlib . Path ] ) -> _Root : return _Root . from_path ( _normalise_path ( path ) )
Retrieve a root directory object from a path .
12,158
def entity ( path : Union [ str , pathlib . Path ] ) -> _Entity : return _Entity . from_path ( _normalise_path ( path ) )
Retrieve an appropriate entity object from a path .
12,159
def compare ( left : Union [ str , pathlib . Path , _Entity ] , right : Union [ str , pathlib . Path , _Entity ] ) -> Comparison : def normalise ( param : Union [ str , pathlib . Path , _Entity ] ) -> _Entity : if isinstance ( param , str ) : param = pathlib . Path ( param ) if isinstance ( param , pathlib . Path ) : param = _Entity . from_path ( param ) return param return Comparison . compare ( normalise ( left ) , normalise ( right ) )
Compare two paths .
12,160
def read_XPARM ( path_to_XPARM = '.' ) : if not os . path . exists ( path_to_XPARM ) : raise Exception ( "path " + path_to_XPARM + "does not exist" ) if os . path . isdir ( path_to_XPARM ) : candidate = os . path . join ( path_to_XPARM , 'GXPARM.XDS' ) if os . path . isfile ( candidate ) : path_to_XPARM = candidate else : candidate = os . path . join ( path_to_XPARM , 'XPARM.XDS' ) if os . path . isfile ( candidate ) : path_to_XPARM = candidate else : raise Exception ( "files GXPARM.XDS and XPARM.XDS are not found in the folder " + path_to_XPARM ) with open ( path_to_XPARM ) as f : f . readline ( ) text = f . read ( ) f = re . compile ( '-?\d+\.?\d*' ) . finditer ( text ) try : result = dict ( starting_frame = r_get_numbers ( f , 1 ) , starting_angle = r_get_numbers ( f , 1 ) , oscillation_angle = r_get_numbers ( f , 1 ) , rotation_axis = r_get_numbers ( f , 3 ) , wavelength = r_get_numbers ( f , 1 ) , wavevector = r_get_numbers ( f , 3 ) , space_group_nr = r_get_numbers ( f , 1 ) , cell = r_get_numbers ( f , 6 ) , unit_cell_vectors = np . reshape ( r_get_numbers ( f , 9 ) , ( 3 , 3 ) ) , number_of_detector_segments = r_get_numbers ( f , 1 ) , NX = r_get_numbers ( f , 1 ) , NY = r_get_numbers ( f , 1 ) , pixelsize_x = r_get_numbers ( f , 1 ) , pixelsize_y = r_get_numbers ( f , 1 ) , x_center = r_get_numbers ( f , 1 ) , y_center = r_get_numbers ( f , 1 ) , distance_to_detector = r_get_numbers ( f , 1 ) , detector_x = r_get_numbers ( f , 3 ) , detector_y = r_get_numbers ( f , 3 ) , detector_normal = r_get_numbers ( f , 3 ) , detector_segment_crossection = r_get_numbers ( f , 5 ) , detector_segment_geometry = r_get_numbers ( f , 9 ) ) except StopIteration : raise Exception ( 'Wrong format of the XPARM.XDS file' ) try : f . next ( ) except StopIteration : pass else : raise Exception ( 'Wrong format of the XPARM.XDS file' ) return result
Loads the instrumental geometry information from the XPARM . XDS or GXPARM . XDS files at the proposed location
12,161
def create_h5py_with_large_cache ( filename , cache_size_mb ) : propfaid = h5py . h5p . create ( h5py . h5p . FILE_ACCESS ) settings = list ( propfaid . get_cache ( ) ) settings [ 2 ] = 1024 * 1024 * cache_size_mb propfaid . set_cache ( * settings ) fid = h5py . h5f . create ( filename , flags = h5py . h5f . ACC_EXCL , fapl = propfaid ) fin = h5py . File ( fid ) return fin
Allows to open the hdf5 file with specified cache size
12,162
def find_features ( seqs , locus_tag = "all" , utr_len = 200 ) : found_features = [ ] for seq_i in seqs : for feature in seq_i . features : if feature . type == "CDS" and ( locus_tag == "all" or ( 'locus_tag' in feature . qualifiers and feature . qualifiers [ 'locus_tag' ] [ 0 ] == locus_tag ) ) : start = max ( 0 , feature . location . nofuzzy_start - utr_len ) stop = max ( 0 , feature . location . nofuzzy_end + utr_len ) feature_seq = seq_i . seq [ start : stop ] f_match = FeatureMatch ( feature , feature_seq , feature . strand , utr_len ) found_features . append ( f_match ) return found_features
Find features in sequences by locus tag
12,163
def getLevel ( self ) : lvl = 0 p = self while True : p = p . parent if not isinstance ( p , LPort ) : break lvl += 1 return lvl
Get nest - level of this port
12,164
def normalize_LCSH ( subject ) : subject_parts = subject . strip ( ) . split ( '--' ) joined_subject = ' -- ' . join ( [ part . strip ( ) for part in subject_parts ] ) if re . search ( r'[^a-zA-Z0-9]$' , joined_subject ) is None : joined_subject = joined_subject + '.' return joined_subject
Normalize a LCSH subject heading prior to indexing .
12,165
def normalize_UNTL ( subject ) : subject = subject . strip ( ) subject = re . sub ( r'[\s]+' , ' ' , subject ) return subject
Normalize a UNTL subject heading for consistency .
12,166
def UNTL_to_encodedUNTL ( subject ) : subject = normalize_UNTL ( subject ) subject = subject . replace ( ' ' , '_' ) subject = subject . replace ( '_-_' , '/' ) return subject
Normalize a UNTL subject heading to be used in SOLR .
12,167
def untldict_normalizer ( untl_dict , normalizations ) : for element_type , element_list in untl_dict . items ( ) : if element_type in normalizations : norm_qualifier_list = normalizations . get ( element_type ) for element in element_list : qualifier = element . get ( 'qualifier' , None ) if qualifier in norm_qualifier_list : content = element . get ( 'content' , None ) if element_type in ELEMENT_NORMALIZERS : elem_norms = ELEMENT_NORMALIZERS . get ( element_type , None ) if qualifier in elem_norms : if content and content != '' : element [ 'content' ] = elem_norms [ qualifier ] ( content ) return untl_dict
Normalize UNTL elements by their qualifier .
12,168
def start ( config , bugnumber = "" ) : repo = config . repo if bugnumber : summary , bugnumber , url = get_summary ( config , bugnumber ) else : url = None summary = None if summary : summary = input ( 'Summary ["{}"]: ' . format ( summary ) ) . strip ( ) or summary else : summary = input ( "Summary: " ) . strip ( ) branch_name = "" if bugnumber : if is_github ( { "bugnumber" : bugnumber , "url" : url } ) : branch_name = "{}-" . format ( bugnumber ) else : branch_name = "{}-" . format ( bugnumber ) def clean_branch_name ( string ) : string = re . sub ( r"\s+" , " " , string ) string = string . replace ( " " , "-" ) string = string . replace ( "->" , "-" ) . replace ( "=>" , "-" ) for each in "@%^&:'\"/(),[]{}!.?`$<>#*;=" : string = string . replace ( each , "" ) string = re . sub ( "-+" , "-" , string ) string = string . strip ( "-" ) return string . lower ( ) . strip ( ) branch_name += clean_branch_name ( summary ) if not branch_name : error_out ( "Must provide a branch name" ) found = list ( find ( repo , branch_name , exact = True ) ) if found : error_out ( "There is already a branch called {!r}" . format ( found [ 0 ] . name ) ) new_branch = repo . create_head ( branch_name ) new_branch . checkout ( ) if config . verbose : click . echo ( "Checkout out new branch: {}" . format ( branch_name ) ) save ( config . configfile , summary , branch_name , bugnumber = bugnumber , url = url )
Create a new topic branch .
12,169
def conv_cond_concat ( x , y ) : x_shapes = x . get_shape ( ) y_shapes = y . get_shape ( ) return tf . concat ( 3 , [ x , y * tf . ones ( [ x_shapes [ 0 ] , x_shapes [ 1 ] , x_shapes [ 2 ] , y_shapes [ 3 ] ] ) ] )
Concatenate conditioning vector on feature map axis .
12,170
def lrelu_sq ( x ) : dim = len ( x . get_shape ( ) ) - 1 return tf . concat ( dim , [ lrelu ( x ) , tf . minimum ( tf . abs ( x ) , tf . square ( x ) ) ] )
Concatenates lrelu and square
12,171
def avg_grads ( tower_grads ) : average_grads = [ ] for grad_and_vars in zip ( * tower_grads ) : grads = [ ] for g , _ in grad_and_vars : expanded_g = tf . expand_dims ( g , 0 ) grads . append ( expanded_g ) grad = tf . concat ( 0 , grads ) grad = tf . reduce_mean ( grad , 0 ) v = grad_and_vars [ 0 ] [ 1 ] grad_and_var = ( grad , v ) average_grads . append ( grad_and_var ) return average_grads
Calculate the average gradient for each shared variable across all towers .
12,172
def unescape_utf8 ( msg ) : def sub ( m ) : text = m . group ( 0 ) if text [ : 3 ] == "&#x" : return unichr ( int ( text [ 3 : - 1 ] , 16 ) ) else : return unichr ( int ( text [ 2 : - 1 ] ) ) return re . sub ( "&#?\w+;" , sub , urllib . unquote ( msg ) )
convert escaped unicode web entities to unicode
12,173
def ensure ( data_type , check_value , default_value = None ) : if default_value is not None and not isinstance ( default_value , data_type ) : raise ValueError ( "default_value must be the value in the given data " "type." ) elif isinstance ( check_value , data_type ) : return check_value try : new_value = data_type ( check_value ) except : return default_value return new_value
function to ensure the given check value is in the given data type if yes return the check value directly otherwise return the default value
12,174
def mark_resolved ( task_id ) : from . import models models . FailedTask . objects . filter ( task_id = task_id , datetime_resolved = None ) . update ( datetime_resolved = now ( ) )
Mark the specified task as resolved in the FailedTask table .
12,175
def is_equal ( a , b , tol ) : if a == b or abs ( a - b ) <= tol * max ( abs ( a ) , abs ( b ) ) : return True else : return False
Ratio test to check if two floating point numbers are equal .
12,176
def getPortSideView ( self , side ) -> List [ "LPort" ] : if side == PortSide . WEST : return self . west elif side == PortSide . EAST : return self . east elif side == PortSide . NORTH : return self . north elif side == PortSide . SOUTH : return self . south else : raise ValueError ( side )
Returns a sublist view for all ports of given side .
12,177
def iterEdges ( self , filterSelfLoops = False ) : for p in self . iterPorts ( ) : yield from p . iterEdges ( filterSelfLoops = filterSelfLoops )
Iter edges connected from outside of this unit
12,178
def link ( source_path ) : if not os . path . isfile ( source_path ) : raise SourceNotFound ( source_path ) with open ( source_path , 'r' ) as f : content = f . read ( ) block_map = BlockMap ( ) all_block = convert_lines_to_block ( content . splitlines ( ) , block_map , LinkStack ( source_path ) , source_path ) return all_block , block_map . get_variables ( )
Links the content found at source_path and represents a Block that represents the content .
12,179
def process_links ( include_match , block_map , link_stack , source_path ) : leading_whitespace = include_match . group ( 1 ) include_path = include_match . group ( 2 ) block_name = include_match . group ( 3 ) if block_name is not None : block_name = block_name . lstrip ( ':' ) else : block_name = ALL_BLOCK_NAME return retrieve_block_from_map ( source_path , include_path . strip ( ) , block_name . strip ( ) , leading_whitespace , block_map , link_stack )
Process a string of content for include tags .
12,180
def catch_warnings ( action , category = Warning , lineno = 0 , append = False ) : def decorator ( func ) : @ functools . wraps ( func ) def newfunc ( * args , ** kwargs ) : with warnings . catch_warnings ( ) : warnings . simplefilter ( action , category , lineno , append ) return func ( * args , ** kwargs ) return newfunc return decorator
Wrap the function in a warnings . catch_warnings context .
12,181
def _guess_caller ( ) : import inspect global _caller_path caller = inspect . stack ( ) [ 1 ] caller_module = inspect . getmodule ( caller [ 0 ] ) if hasattr ( caller_module , '__file__' ) : _caller_path = os . path . abspath ( caller_module . __file__ ) return _caller_path
try to guess which module import app . py
12,182
def _fix_paths ( self , options ) : for k in ( 'template_path' , 'static_path' ) : if k in options : v = options . pop ( k ) if v is None : continue if not os . path . isabs ( v ) : v = os . path . abspath ( os . path . join ( self . root_path , v ) ) app_log . debug ( 'Fix %s to be absolute: %s' % ( k , v ) ) options [ k ] = v
fix static_path and template_path to be absolute path according to self . root_path so that PWD can be ignoreed .
12,183
def route ( self , url , host = None ) : def fn ( handler_cls ) : handlers = self . _get_handlers_on_host ( host ) handlers . insert ( 0 , ( url , handler_cls ) ) return handler_cls return fn
This is a decorator
12,184
def command_line_config ( self ) : args = sys . argv [ 1 : ] args_dict = { } existed_keys = [ ] new_keys = [ ] for t in args : if not t . startswith ( '--' ) : raise errors . ArgsParseError ( 'Bad arg: %s' % t ) try : key , value = tuple ( t [ 2 : ] . split ( '=' ) ) except : raise errors . ArgsParseError ( 'Bad arg: %s' % t ) args_dict [ key ] = value if key in settings : existed_keys . append ( key ) else : new_keys . append ( key ) if existed_keys : app_log . debug ( 'Changed settings:' ) for i in existed_keys : before = settings [ i ] type_ = type ( before ) if type_ is bool : if args_dict [ i ] == 'True' : _value = True elif args_dict [ i ] == 'False' : _value = False else : raise errors . ArgsParseError ( '%s should only be True or False' % i ) else : _value = type_ ( args_dict [ i ] ) settings [ i ] = _value app_log . debug ( ' %s [%s]%s (%s)' , i , type ( settings [ i ] ) , settings [ i ] , before ) if new_keys : app_log . debug ( 'New settings:' ) for i in new_keys : settings [ i ] = args_dict [ i ] app_log . debug ( ' %s %s' , i , args_dict [ i ] ) self . update_settings ( { } )
settings . py is the basis
12,185
def setup ( self ) : testing = settings . get ( 'TESTING' ) if testing : config = settings [ 'LOGGERS' ] . get ( '' , { } ) set_nose_formatter ( config ) os . environ [ 'TZ' ] = settings [ 'TIME_ZONE' ] time . tzset ( ) if settings . _module : project = os . path . split ( self . root_path ) [ 1 ] if settings [ 'PROJECT' ] : assert settings [ 'PROJECT' ] == project , 'PROJECT specialized in settings (%s) ' 'should be the same as project directory name (%s)' % ( settings [ 'PROJECT' ] , project ) else : settings [ 'PROJECT' ] = project if settings [ 'PROJECT' ] : if settings . _module : _abs = os . path . abspath parent_path = os . path . dirname ( self . root_path ) if not _abs ( parent_path ) in [ _abs ( i ) for i in sys . path ] : sys . path . insert ( 0 , parent_path ) app_log . info ( 'Add %s to sys.path' % _abs ( parent_path ) ) try : __import__ ( settings [ 'PROJECT' ] ) app_log . debug ( 'import package `%s` success' % settings [ 'PROJECT' ] ) except ImportError : raise ImportError ( 'PROJECT could not be imported, may be app.py is outside the project' 'or there is no __init__ in the package.' ) self . is_setuped = True
This function will be called both before run and testing started .
12,186
def _init_application ( self , application = None ) : if application : self . application = application else : self . application = self . make_application ( )
Initialize application object for torext app if a existed application is passed then just use this one without make a new one
12,187
def _log_function ( self , handler ) : if handler . get_status ( ) < 400 : log_method = request_log . info elif handler . get_status ( ) < 500 : log_method = request_log . warning else : log_method = request_log . error for i in settings [ 'LOGGING_IGNORE_URLS' ] : if handler . request . uri . startswith ( i ) : log_method = request_log . debug break request_time = 1000.0 * handler . request . request_time ( ) log_method ( "%d %s %.2fms" , handler . get_status ( ) , handler . _request_summary ( ) , request_time )
Override Application . log_function so that what to log can be controlled .
12,188
def xavier_init ( fan_in , fan_out , constant = 1 ) : low = - constant * np . sqrt ( 6.0 / ( fan_in + fan_out ) ) high = constant * np . sqrt ( 6.0 / ( fan_in + fan_out ) ) return tf . random_uniform ( ( fan_in , fan_out ) , minval = low , maxval = high , dtype = tf . float32 )
Xavier initialization of network weights
12,189
def partial_fit ( self , X ) : opt , cost = self . sess . run ( ( self . optimizer , self . cost ) , feed_dict = { self . x : X } ) return cost
Train model based on mini - batch of input data . Return cost of mini - batch .
12,190
def transform ( self , X ) : return self . sess . run ( self . z_mean , feed_dict = { self . x : X } )
Transform data by mapping it into the latent space .
12,191
def generate ( self , z_mu = None ) : if z_mu is None : z_mu = np . random . normal ( size = self . network_architecture [ "n_z" ] ) return self . sess . run ( self . x_reconstr_mean , feed_dict = { self . z : z_mu } )
Generate data by sampling from latent space . If z_mu is not None data for this point in latent space is generated . Otherwise z_mu is drawn from prior in latent space .
12,192
def reconstruct ( self , X ) : return self . sess . run ( self . x_reconstr_mean , feed_dict = { self . x : X } )
Use VAE to reconstruct given data .
12,193
def get_ajd_bound ( mesh ) : print ( 'Get elements adjacent to boundaries' ) boundary_elements = [ ] str_adj_boundaries = '' boundaries = mesh [ 'boundaries' ] [ '12' ] + mesh [ 'boundaries' ] [ '11' ] for boundary in boundaries : indices = [ nr if ( boundary [ 0 ] in x and boundary [ 1 ] in x ) else np . nan for ( nr , x ) in enumerate ( mesh [ 'elements' ] [ '2' ] ) ] indices = np . array ( indices ) [ ~ np . isnan ( indices ) ] if ( len ( indices ) != 1 ) : print ( 'More than one neighbour found!' ) elif ( len ( indices ) == 0 ) : print ( 'No neighbour found!' ) boundary_elements . append ( indices [ 0 ] ) str_adj_boundaries += '{0}\n' . format ( int ( indices [ 0 ] ) + 1 ) return str_adj_boundaries , boundary_elements
Determine triangular elements adjacend to the boundary elements
12,194
def write_elec_file ( filename , mesh ) : elecs = [ ] electrodes = np . loadtxt ( filename ) for i in electrodes : for nr , j in enumerate ( mesh [ 'nodes' ] ) : if np . isclose ( j [ 1 ] , i [ 0 ] ) and np . isclose ( j [ 2 ] , i [ 1 ] ) : elecs . append ( nr + 1 ) fid = open ( 'elec.dat' , 'w' ) fid . write ( '{0}\n' . format ( len ( elecs ) ) ) for i in elecs : fid . write ( '{0}\n' . format ( i ) ) fid . close ( )
Read in the electrode positions and return the indices of the electrodes
12,195
def state_size ( self ) -> Sequence [ Shape ] : return self . _sizes ( self . _compiler . rddl . state_size )
Returns the MDP state size .
12,196
def action_size ( self ) -> Sequence [ Shape ] : return self . _sizes ( self . _compiler . rddl . action_size )
Returns the MDP action size .
12,197
def interm_size ( self ) -> Sequence [ Shape ] : return self . _sizes ( self . _compiler . rddl . interm_size )
Returns the MDP intermediate state size .
12,198
def output_size ( self ) -> Tuple [ Sequence [ Shape ] , Sequence [ Shape ] , Sequence [ Shape ] , int ] : return ( self . state_size , self . action_size , self . interm_size , 1 )
Returns the simulation cell output size .
12,199
def initial_state ( self ) -> StateTensor : s0 = [ ] for fluent in self . _compiler . compile_initial_state ( self . _batch_size ) : s0 . append ( self . _output_size ( fluent ) ) s0 = tuple ( s0 ) return s0
Returns the initial state tensor .