idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
52,300
def _get_netengine_backend ( self ) : backend_class_name = self . backend . split ( '.' ) [ - 1 ] backend_path = self . backend . lower ( ) module = import_module ( backend_path ) BackendClass = getattr ( module , backend_class_name ) return BackendClass
returns the netengine backend specified in self . backend for internal use only
52,301
def _build_netengine_arguments ( self ) : arguments = { "host" : self . host } if self . config is not None : for key , value in self . config . iteritems ( ) : arguments [ key ] = value if self . port : arguments [ "port" ] = self . port return arguments
returns a python dictionary representing arguments that will be passed to a netengine backend for internal use only
52,302
def model_factory ( schema , resolver = None , base_class = model . Model , name = None ) : schema = copy . deepcopy ( schema ) resolver = resolver class Model ( base_class ) : def __init__ ( self , * args , ** kwargs ) : self . __dict__ [ 'schema' ] = schema self . __dict__ [ 'resolver' ] = resolver base_class . __ini...
Generate a model class based on the provided JSON Schema
52,303
def patch ( self ) : original = self . __dict__ [ '__original__' ] return jsonpatch . make_patch ( original , dict ( self ) ) . to_string ( )
Return a jsonpatch object representing the delta
52,304
def changes ( self ) : deprecation_msg = 'Model.changes will be removed in warlock v2' warnings . warn ( deprecation_msg , DeprecationWarning , stacklevel = 2 ) return copy . deepcopy ( self . __dict__ [ 'changes' ] )
Dumber version of patch method
52,305
def isSupportedContent ( cls , fileContent ) : magic = bytearray ( fileContent ) [ : 4 ] return magic == p ( '>I' , 0xfeedface ) or magic == p ( '>I' , 0xfeedfacf ) or magic == p ( '<I' , 0xfeedface ) or magic == p ( '<I' , 0xfeedfacf )
Returns if the files are valid for this filetype
52,306
def _parseOatHeader ( self , data ) : header = OatHeader . from_buffer ( data ) if header . magic != b'oat\n' : raise BinaryError ( 'No valid OAT file' ) key_value_store_bytes = ( c_ubyte * header . keyValueStoreSize ) . from_buffer ( data , sizeof ( OatHeader ) ) key_value_store = self . __parseKeyValueStore ( key_val...
Returns the OatHeader
52,307
def __parseKeyValueStore ( self , data ) : offset = 0 key_value_store = { } while offset != len ( data ) : key = get_str ( data , offset ) offset += len ( key ) + 1 value = get_str ( data , offset ) offset += len ( value ) + 1 key_value_store [ key ] = value return key_value_store
Returns a dictionary filled with the keys and values of the key value store
52,308
def to_raw_address ( addr , section ) : return addr - section . header . VirtualAddress + section . header . PointerToRawData
Converts the addr from a rva to a pointer to raw data in the file
52,309
def _parseImageDosHeader ( self , data ) : ioh = IMAGE_DOS_HEADER . from_buffer ( data ) if ioh . e_magic != b'MZ' : raise BinaryError ( 'No valid PE/COFF file' ) return ImageDosHeaderData ( header = ioh )
Returns the ImageDosHeader
52,310
def _parseImageNtHeaders ( self , data , imageDosHeader ) : inth = self . _classes . IMAGE_NT_HEADERS . from_buffer ( data , imageDosHeader . header . e_lfanew ) if inth . Signature != b'PE' : raise BinaryError ( 'No valid PE/COFF file' ) return ImageNtHeaderData ( header = inth )
Returns the ImageNtHeaders
52,311
def _parseSections ( self , data , imageDosHeader , imageNtHeaders , parse_header_only = False ) : sections = [ ] optional_header_offset = imageDosHeader . header . e_lfanew + 4 + sizeof ( IMAGE_FILE_HEADER ) offset = optional_header_offset + imageNtHeaders . header . FileHeader . SizeOfOptionalHeader image_section_hea...
Parses the sections in the memory and returns a list of them
52,312
def _getSectionForDataDirectoryEntry ( self , data_directory_entry , sections ) : for section in sections : if data_directory_entry . VirtualAddress >= section . header . VirtualAddress and data_directory_entry . VirtualAddress < section . header . VirtualAddress + section . header . SizeOfRawData : return section
Returns the section which contains the data of DataDirectory
52,313
def _parseDataDirectory ( self , data , sections , imageNtHeaders ) : data_directory_data_list = [ None for i in range ( 15 ) ] export_data_directory = imageNtHeaders . header . OptionalHeader . DataDirectory [ ImageDirectoryEntry . EXPORT ] export_section = self . _getSectionForDataDirectoryEntry ( export_data_directo...
Parses the entries of the DataDirectory and returns a list of the content
52,314
def _parseDataDirectoryExport ( self , data , dataDirectoryEntry , exportSection ) : if not exportSection : return functions = [ ] export_directory = IMAGE_EXPORT_DIRECTORY . from_buffer ( exportSection . raw , to_offset ( dataDirectoryEntry . VirtualAddress , exportSection ) ) offset = to_offset ( export_directory . N...
Parses the EmportDataDirectory and returns an instance of ExportDirectoryData
52,315
def _parseDataDirectoryImport ( self , dataDirectoryEntry , importSection ) : if not importSection : return raw_bytes = ( c_ubyte * dataDirectoryEntry . Size ) . from_buffer ( importSection . raw , to_offset ( dataDirectoryEntry . VirtualAddress , importSection ) ) offset = 0 import_descriptors = [ ] while True : impor...
Parses the ImportDataDirectory and returns a list of ImportDescriptorData
52,316
def __parseThunks ( self , thunkRVA , importSection ) : offset = to_offset ( thunkRVA , importSection ) table_offset = 0 thunks = [ ] while True : thunk = IMAGE_THUNK_DATA . from_buffer ( importSection . raw , offset ) offset += sizeof ( IMAGE_THUNK_DATA ) if thunk . Ordinal == 0 : break thunkData = ThunkData ( header ...
Parses the thunks and returns a list
52,317
def __parseThunkData ( self , thunk , importSection ) : offset = to_offset ( thunk . header . AddressOfData , importSection ) if 0xf0000000 & thunk . header . AddressOfData == 0x80000000 : thunk . ordinal = thunk . header . AddressOfData & 0x0fffffff else : ibn = IMAGE_IMPORT_BY_NAME . from_buffer ( importSection . raw...
Parses the data of a thunk and sets the data
52,318
def get_ptr ( data , offset = None , ptr_type = ctypes . c_void_p ) : ptr = ctypes . cast ( ctypes . pointer ( data ) , ctypes . c_void_p ) if offset : ptr = ctypes . c_void_p ( ptr . value + offset ) if ptr_type != ctypes . c_void_p : ptr = ctypes . cast ( ptr , ptr_type ) return ptr
Returns a void pointer to the data
52,319
def to_ubyte_array ( barray ) : bs = ( ctypes . c_ubyte * len ( barray ) ) ( ) pack_into ( '%ds' % len ( barray ) , bs , 0 , barray ) return bs
Returns a c_ubyte_array filled with the given data of a bytearray or bytes
52,320
def _readFile ( self , fileName ) : with open ( fileName , 'rb' ) as binFile : b = binFile . read ( ) return to_ubyte_array ( b )
Returns the bytes of the file .
52,321
def _parseElfHeader ( self , data ) : ehdr = self . __classes . EHDR . from_buffer ( data ) return EhdrData ( header = ehdr )
Returns the elf header
52,322
def _parseSegments ( self , data , elfHeader ) : offset = elfHeader . header . e_phoff segments = [ ] for i in range ( elfHeader . header . e_phnum ) : phdr = self . __classes . PHDR . from_buffer ( data , offset ) segment_bytes = ( c_ubyte * phdr . p_filesz ) . from_buffer ( data , phdr . p_offset ) phdrData = PhdrDat...
Return a list of segments
52,323
def _parseSections ( self , data , elfHeader ) : offset = elfHeader . header . e_shoff shdrs = [ ] for i in range ( elfHeader . header . e_shnum ) : shdr = self . __classes . SHDR . from_buffer ( data , offset ) section_bytes = None ba_section_bytes = None if shdr . sh_type != SHT . NOBITS : section_bytes = ( c_ubyte *...
Returns a list of sections
52,324
def _parseSymbols ( self , sections ) : for section in sections : strtab = sections [ section . header . sh_link ] if section . header . sh_type in ( int ( SHT . DYNSYM ) , int ( SHT . SYMTAB ) ) : section . symbols = self . __parseSymbolEntriesForSection ( section , strtab )
Sets a list of symbols in each DYNSYM and SYMTAB section
52,325
def _parseRelocations ( self , sections ) : for section in sections : if section . header . sh_link != SHN . UNDEF and section . header . sh_type in ( SHT . REL , SHT . RELA ) : symbols = sections [ section . header . sh_link ] . symbols relocations = self . __parseRelocationEntries ( section , symbols ) section . relo...
Parses the relocations and add those to the section
52,326
def run_with_snapshots ( self , tsnapstart = 0. , tsnapint = 432000. ) : tsnapints = np . ceil ( tsnapint / self . dt ) while ( self . t < self . tmax ) : self . _step_forward ( ) if self . t >= tsnapstart and ( self . tc % tsnapints ) == 0 : yield self . t return
Run the model forward yielding to user code at specified intervals .
52,327
def vertical_modes ( self ) : evals , evecs = np . linalg . eig ( - self . S ) asort = evals . argsort ( ) self . kdi2 = evals [ asort ] self . radii = np . zeros_like ( self . kdi2 ) self . radii [ 0 ] = self . g * self . H / np . abs ( self . f ) self . radii [ 1 : ] = 1. / np . sqrt ( self . kdi2 [ 1 : ] ) self . pm...
Calculate standard vertical modes . Simply the eigenvectors of the stretching matrix S
52,328
def set_U ( self , U ) : self . Ubg = np . asarray ( U ) [ np . newaxis , ... ]
Set background zonal flow
52,329
def _rk4_integrate ( self , x , y , uv0fun , uv1fun , dt ) : u0 , v0 = uv0fun ( x , y ) k1u = dt * u0 k1v = dt * v0 x11 = self . _wrap_x ( x + 0.5 * k1u ) y11 = self . _wrap_y ( y + 0.5 * k1v ) u11 , v11 = uv1fun ( x11 , y11 ) k2u = dt * u11 k2v = dt * v11 x12 = self . _wrap_x ( x + 0.5 * k2u ) y12 = self . _wrap_y ( y...
Integrates positions x y using velocity functions uv0fun uv1fun . Returns dx and dy the displacements .
52,330
def _distance ( self , x0 , y0 , x1 , y1 ) : dx = x1 - x0 dy = y1 - y0 if self . pix : dx [ dx > self . Lx / 2 ] -= self . Lx dx [ dx < - self . Lx / 2 ] += self . Lx if self . piy : dy [ dy > self . Ly / 2 ] -= self . Ly dy [ dy < - self . Ly / 2 ] += self . Ly return dx , dy
Utitlity function to compute distance between points .
52,331
def spec_var ( model , ph ) : var_dens = 2. * np . abs ( ph ) ** 2 / model . M ** 2 var_dens [ ... , 0 ] /= 2 var_dens [ ... , - 1 ] /= 2 return var_dens . sum ( axis = ( - 1 , - 2 ) )
Compute variance of p from Fourier coefficients ph .
52,332
def spec_sum ( ph2 ) : ph2 = 2. * ph2 ph2 [ ... , 0 ] = ph2 [ ... , 0 ] / 2. ph2 [ ... , - 1 ] = ph2 [ ... , - 1 ] / 2. return ph2 . sum ( axis = ( - 1 , - 2 ) )
Compute total spectral sum of the real spectral quantity ph^2 .
52,333
def calc_ispec ( model , ph ) : if model . kk . max ( ) > model . ll . max ( ) : kmax = model . ll . max ( ) else : kmax = model . kk . max ( ) dkr = np . sqrt ( model . dk ** 2 + model . dl ** 2 ) kr = np . arange ( dkr / 2. , kmax + dkr , dkr ) phr = np . zeros ( kr . size ) for i in range ( kr . size ) : fkr = ( mod...
Compute isotropic spectrum phr of ph from 2D spectrum .
52,334
def _initialize_stretching_matrix ( self ) : self . S = np . zeros ( ( self . nz , self . nz ) ) if ( self . nz == 2 ) and ( self . rd ) and ( self . delta ) : self . del1 = self . delta / ( self . delta + 1. ) self . del2 = ( self . delta + 1. ) ** - 1 self . Us = self . Ubg [ 0 ] - self . Ubg [ 1 ] self . F1 = self ....
Set up the stretching matrix
52,335
def set_q1q2 ( self , q1 , q2 , check = False ) : self . set_q ( np . vstack ( [ q1 [ np . newaxis , : , : ] , q2 [ np . newaxis , : , : ] ] ) ) if check : np . testing . assert_allclose ( self . q1 , q1 ) np . testing . assert_allclose ( self . q1 , self . ifft2 ( self . qh1 ) )
Set upper and lower layer PV anomalies .
52,336
def set_U1U2 ( self , U1 , U2 ) : if len ( np . shape ( U1 ) ) == 0 : U1 = U1 * np . ones ( ( self . ny ) ) if len ( np . shape ( U2 ) ) == 0 : U2 = U2 * np . ones ( ( self . ny ) ) self . U1 = U1 self . U2 = U2 self . Ubg = np . array ( [ U1 , U2 ] )
Set background zonal flow .
52,337
def _initialize_model_diagnostics ( self ) : self . add_diagnostic ( 'entspec' , description = 'barotropic enstrophy spectrum' , function = ( lambda self : np . abs ( self . del1 * self . qh [ 0 ] + self . del2 * self . qh [ 1 ] ) ** 2. ) ) self . add_diagnostic ( 'APEflux' , description = 'spectral flux of available p...
Extra diagnostics for two - layer model
52,338
def calc_uv ( self , x , y , prev = False ) : assert len ( x ) == self . N assert len ( y ) == self . N u = np . zeros ( self . N , self . x . dtype ) v = np . zeros ( self . N , self . y . dtype ) for n in xrange ( self . N ) : if prev : x0 = self . xprev [ np . r_ [ : n , n + 1 : self . N ] ] y0 = self . yprev [ np ....
Calculate velocity at x and y points due to vortex velocity field . Assumes x and y are vortex positions and are ordered the same as x0 and y0 . The ordering is used to neglect to vortex self interaction .
52,339
def uv_at_xy ( self , x , y , x0 , y0 , s0 ) : dx , dy = self . distance ( x0 , y0 , x , y ) rr2 = ( dx ** 2 + dy ** 2 ) ** - 1 u = - s0 * dy * r_twopi * rr2 v = s0 * dx * r_twopi * rr2 return u , v
Returns two arrays of u v
52,340
def find ( self , other ) : iset = self . _iset l = binsearch_left_start ( iset , other [ 0 ] - self . _maxlen , 0 , len ( iset ) ) r = binsearch_right_end ( iset , other [ 1 ] , 0 , len ( iset ) ) iopts = iset [ l : r ] iiter = ( s for s in iopts if s [ 0 ] <= other [ 1 ] and s [ 1 ] >= other [ 0 ] ) for o in iiter : ...
Return an interable of elements that overlap other in the tree .
52,341
def loaddict ( filename = DICTIONARY ) : global zhcdicts if zhcdicts : return if filename == _DEFAULT_DICT : zhcdicts = json . loads ( get_module_res ( filename ) . read ( ) . decode ( 'utf-8' ) ) else : with open ( filename , 'rb' ) as f : zhcdicts = json . loads ( f . read ( ) . decode ( 'utf-8' ) ) zhcdicts [ 'SIMPO...
Load the dictionary from a specific JSON file .
52,342
def getdict ( locale ) : global zhcdicts , dict_zhcn , dict_zhsg , dict_zhtw , dict_zhhk , pfsdict if zhcdicts is None : loaddict ( DICTIONARY ) if locale == 'zh-cn' : if dict_zhcn : got = dict_zhcn else : dict_zhcn = zhcdicts [ 'zh2Hans' ] . copy ( ) dict_zhcn . update ( zhcdicts [ 'zh2CN' ] ) got = dict_zhcn elif loc...
Generate or get convertion dict cache for certain locale . Dictionaries are loaded on demand .
52,343
def convtable2dict ( convtable , locale , update = None ) : rdict = update . copy ( ) if update else { } for r in convtable : if ':uni' in r : if locale in r : rdict [ r [ ':uni' ] ] = r [ locale ] elif locale [ : - 1 ] == 'zh-han' : if locale in r : for word in r . values ( ) : rdict [ word ] = r [ locale ] else : v =...
Convert a list of conversion dict to a dict for a certain locale .
52,344
def tokenize ( s , locale , update = None ) : zhdict = getdict ( locale ) pfset = pfsdict [ locale ] if update : zhdict = zhdict . copy ( ) zhdict . update ( update ) newset = set ( ) for word in update : for ch in range ( len ( word ) ) : newset . add ( word [ : ch + 1 ] ) pfset = pfset | newset ch = [ ] N = len ( s )...
Tokenize s according to corresponding locale dictionary . Don t use this for serious text processing .
52,345
def get_qiniu_config ( name , default = None ) : config = os . environ . get ( name , getattr ( settings , name , default ) ) if config is not None : if isinstance ( config , six . string_types ) : return config . strip ( ) else : return config else : raise ImproperlyConfigured ( "Can't find config for '%s' either in e...
Get configuration variable from environment variable or django setting . py
52,346
def load_from_file ( filename ) : if os . path . isdir ( filename ) : logger . error ( "Err: File '%s' is a directory" , filename ) return None if not os . path . isfile ( filename ) : logger . error ( "Err: File '%s' does not exist" , filename ) return None try : with open ( filename , 'r' ) as sourcefile : songs = [ ...
Load a list of filenames from an external text file .
52,347
def parse_argv ( ) : parser = argparse . ArgumentParser ( description = 'Find lyrics for a set of mp3' ' files and embed them as metadata' ) parser . add_argument ( '-j' , '--jobs' , help = 'Number of parallel processes' , type = int , metavar = 'N' , default = 1 ) parser . add_argument ( '-o' , '--overwrite' , help = ...
Parse command line arguments . Settings will be stored in the global variables declared above .
52,348
def decode ( slug ) : if sys . version_info . major != 2 and isinstance ( slug , bytes ) : slug = slug . decode ( 'ascii' ) slug = slug + '==' return uuid . UUID ( bytes = base64 . urlsafe_b64decode ( slug ) )
Returns the uuid . UUID object represented by the given v4 or nice slug
52,349
def filter_against_normal ( self , normal_mutations , maf_min = 0.2 , maf_count_threshold = 20 , count_min = 1 ) : assert ( normal_mutations . chrom == self . chrom ) assert ( normal_mutations . pos == self . pos ) assert ( normal_mutations . ref == self . ref ) def passes_normal_criteria ( mut ) : return ( mut . count...
Filters mutations that are in the given normal
52,350
def add_handlers ( self , room_handler = None , transaction_handler = None , user_handler = None ) : if room_handler : room = resources . Room ( room_handler , self . Api ) self . add_route ( "/rooms/{room_alias}" , room ) if transaction_handler : transaction = resources . Transaction ( transaction_handler , self . Api...
Adds routes to Application that use specified handlers .
52,351
def log_mon_value ( name , value = 1 , ** kwargs ) : message = '{} => {}' . format ( name , value ) log_mon . info ( { 'metric_name' : name , 'value' : value , 'message' : message , ** kwargs } )
simplest monitoring function to be aggregated with sum
52,352
def create_store ( ) : new_storage = _proxy ( 'store' ) _state . store = type ( 'store' , ( object , ) , { } ) new_storage . store = dict ( ) return new_storage . store
A helper for setting the _proxy and slapping the store object for us .
52,353
def request ( request_callback = None , ** kwargs ) : if request_callback is None : return lambda fn : request ( fn , ** kwargs ) else : return Request ( request_callback , ** kwargs ) . decorate_module ( request_callback )
Chisel request decorator
52,354
def add ( self , client ) : if client . pool_id in self . _client_ids : log . info ( "%r is already in the penalty box. Ignoring." , client ) return release = time . time ( ) + self . _min_wait heapq . heappush ( self . _clients , ( release , ( client , self . _min_wait ) ) ) self . _client_ids . add ( client . pool_id...
Add a client to the penalty box .
52,355
def get ( self ) : now = time . time ( ) while self . _clients and self . _clients [ 0 ] [ 0 ] < now : _ , ( client , last_wait ) = heapq . heappop ( self . _clients ) connect_start = time . time ( ) try : client . echo ( "test" ) self . _client_ids . remove ( client . pool_id ) yield client except ( ConnectionError , ...
Get any clients ready to be used .
52,356
def string ( _object ) : if is_callable ( _object ) : _validator = _object @ wraps ( _validator ) def decorated ( value ) : ensure ( isinstance ( value , basestring ) , "not of type string" ) return _validator ( value ) return decorated ensure ( isinstance ( _object , basestring ) , "not of type string" )
Validates a given input is of type string .
52,357
def boolean ( _object ) : if is_callable ( _object ) : _validator = _object @ wraps ( _validator ) def decorated ( value ) : ensure ( isinstance ( value , bool ) , "not of type boolean" ) return _validator ( value ) return decorated ensure ( isinstance ( _object , bool ) , "not of type boolean" )
Validates a given input is of type boolean .
52,358
def dictionary ( _object , * args ) : error_msg = 'not of type dictionary' if is_callable ( _object ) : _validator = _object @ wraps ( _validator ) def decorated ( value ) : ensure ( isinstance ( value , dict ) , error_msg ) return _validator ( value ) return decorated try : ensure ( isinstance ( _object , dict ) , err...
Validates a given input is of type dictionary .
52,359
def array ( _object ) : if is_callable ( _object ) : _validator = _object @ wraps ( _validator ) def decorated ( value ) : ensure ( isinstance ( value , list ) , "not of type array" ) return _validator ( value ) return decorated ensure ( isinstance ( _object , list ) , "not of type array" )
Validates a given input is of type list .
52,360
def integer ( _object ) : if is_callable ( _object ) : _validator = _object @ wraps ( _validator ) def decorated ( value ) : ensure ( isinstance ( value , int ) , "not of type int" ) return _validator ( value ) return decorated ensure ( isinstance ( _object , int ) , "not of type int" )
Validates a given input is of type int ..
52,361
def constant ( cls , value : Value , dtype : tf . DType = tf . float32 ) -> 'TensorFluent' : t = tf . constant ( value , dtype = dtype ) scope = [ ] batch = False return TensorFluent ( t , scope , batch = batch )
Returns a constant value TensorFluent with given dtype .
52,362
def Bernoulli ( cls , mean : 'TensorFluent' , batch_size : Optional [ int ] = None ) -> Tuple [ Distribution , 'TensorFluent' ] : probs = mean . tensor dist = tf . distributions . Bernoulli ( probs = probs , dtype = tf . bool ) batch = mean . batch if not batch and batch_size is not None : t = dist . sample ( batch_siz...
Returns a TensorFluent for the Bernoulli sampling op with given mean parameter .
52,363
def Uniform ( cls , low : 'TensorFluent' , high : 'TensorFluent' , batch_size : Optional [ int ] = None ) -> Tuple [ Distribution , 'TensorFluent' ] : if low . scope != high . scope : raise ValueError ( 'Uniform distribution: parameters must have same scope!' ) dist = tf . distributions . Uniform ( low . tensor , high ...
Returns a TensorFluent for the Uniform sampling op with given low and high parameters .
52,364
def Normal ( cls , mean : 'TensorFluent' , variance : 'TensorFluent' , batch_size : Optional [ int ] = None ) -> Tuple [ Distribution , 'TensorFluent' ] : if mean . scope != variance . scope : raise ValueError ( 'Normal distribution: parameters must have same scope!' ) loc = mean . tensor scale = tf . sqrt ( variance ....
Returns a TensorFluent for the Normal sampling op with given mean and variance .
52,365
def Gamma ( cls , shape : 'TensorFluent' , scale : 'TensorFluent' , batch_size : Optional [ int ] = None ) -> Tuple [ Distribution , 'TensorFluent' ] : if shape . scope != scale . scope : raise ValueError ( 'Gamma distribution: parameters must have same scope!' ) concentration = shape . tensor rate = 1 / scale . tensor...
Returns a TensorFluent for the Gamma sampling op with given shape and scale parameters .
52,366
def Exponential ( cls , mean : 'TensorFluent' , batch_size : Optional [ int ] = None ) -> Tuple [ Distribution , 'TensorFluent' ] : rate = 1 / mean . tensor dist = tf . distributions . Exponential ( rate ) batch = mean . batch if not batch and batch_size is not None : t = dist . sample ( batch_size ) batch = True else ...
Returns a TensorFluent for the Exponential sampling op with given mean parameter .
52,367
def stop_gradient ( cls , x : 'TensorFluent' ) -> 'TensorFluent' : scope = x . scope . as_list ( ) batch = x . batch return TensorFluent ( tf . stop_gradient ( x . tensor ) , scope , batch )
Returns a copy of the input fluent with stop_gradient at tensor level .
52,368
def stop_batch_gradient ( cls , x : 'TensorFluent' , stop_batch : tf . Tensor ) -> 'TensorFluent' : scope = x . scope . as_list ( ) batch = x . batch tensor = tf . where ( stop_batch , tf . stop_gradient ( x . tensor ) , x . tensor ) return TensorFluent ( tensor , scope , batch )
Returns a copy of the inputs fluent with stop_gradient applied at batch level .
52,369
def abs ( cls , x : 'TensorFluent' ) -> 'TensorFluent' : return cls . _unary_op ( x , tf . abs , tf . float32 )
Returns a TensorFluent for the abs function .
52,370
def exp ( cls , x : 'TensorFluent' ) -> 'TensorFluent' : return cls . _unary_op ( x , tf . exp , tf . float32 )
Returns a TensorFluent for the exp function .
52,371
def log ( cls , x : 'TensorFluent' ) -> 'TensorFluent' : return cls . _unary_op ( x , tf . log , tf . float32 )
Returns a TensorFluent for the log function .
52,372
def sqrt ( cls , x : 'TensorFluent' ) -> 'TensorFluent' : return cls . _unary_op ( x , tf . sqrt , tf . float32 )
Returns a TensorFluent for the sqrt function .
52,373
def cos ( cls , x : 'TensorFluent' ) -> 'TensorFluent' : return cls . _unary_op ( x , tf . cos , tf . float32 )
Returns a TensorFluent for the cos function .
52,374
def sin ( cls , x : 'TensorFluent' ) -> 'TensorFluent' : return cls . _unary_op ( x , tf . sin , tf . float32 )
Returns a TensorFluent for the sin function .
52,375
def tan ( cls , x : 'TensorFluent' ) -> 'TensorFluent' : return cls . _unary_op ( x , tf . tan , tf . float32 )
Returns a TensorFluent for the tan function .
52,376
def acos ( cls , x : 'TensorFluent' ) -> 'TensorFluent' : return cls . _unary_op ( x , tf . acos , tf . float32 )
Returns a TensorFluent for the arccos function .
52,377
def asin ( cls , x : 'TensorFluent' ) -> 'TensorFluent' : return cls . _unary_op ( x , tf . asin , tf . float32 )
Returns a TensorFluent for the arcsin function .
52,378
def atan ( cls , x : 'TensorFluent' ) -> 'TensorFluent' : return cls . _unary_op ( x , tf . atan2 , tf . float32 )
Returns a TensorFluent for the arctan function .
52,379
def round ( cls , x : 'TensorFluent' ) -> 'TensorFluent' : return cls . _unary_op ( x , tf . round , tf . float32 )
Returns a TensorFluent for the round function .
52,380
def ceil ( cls , x : 'TensorFluent' ) -> 'TensorFluent' : return cls . _unary_op ( x , tf . ceil , tf . float32 )
Returns a TensorFluent for the ceil function .
52,381
def floor ( cls , x : 'TensorFluent' ) -> 'TensorFluent' : return cls . _unary_op ( x , tf . floor , tf . float32 )
Returns a TensorFluent for the floor function .
52,382
def pow ( cls , x : 'TensorFluent' , y : 'TensorFluent' ) -> 'TensorFluent' : return cls . _binary_op ( x , y , tf . pow , tf . float32 )
Returns a TensorFluent for the pow function . TensorFluent
52,383
def max ( cls , x : 'TensorFluent' , y : 'TensorFluent' ) -> 'TensorFluent' : return cls . _binary_op ( x , y , tf . maximum , tf . float32 )
Returns a TensorFluent for the maximum function . TensorFluent
52,384
def min ( cls , x : 'TensorFluent' , y : 'TensorFluent' ) -> 'TensorFluent' : return cls . _binary_op ( x , y , tf . minimum , tf . float32 )
Returns a TensorFluent for the minimum function .
52,385
def if_then_else ( cls , condition : 'TensorFluent' , true_case : 'TensorFluent' , false_case : 'TensorFluent' ) -> 'TensorFluent' : true = TensorFluent . constant ( True , tf . bool ) false = TensorFluent . constant ( False , tf . bool ) ite = ( condition == true ) * true_case + ( condition == false ) * false_case if ...
Returns a TensorFluent for the control op if - then - else .
52,386
def _binary_op ( cls , x : 'TensorFluent' , y : 'TensorFluent' , op : Callable [ [ tf . Tensor , tf . Tensor ] , tf . Tensor ] , dtype : tf . DType ) -> 'TensorFluent' : s1 = x . scope . as_list ( ) s2 = y . scope . as_list ( ) scope , perm1 , perm2 = TensorFluentScope . broadcast ( s1 , s2 ) if x . batch and perm1 != ...
Returns a TensorFluent for the binary op applied to fluents x and y .
52,387
def _unary_op ( cls , x : 'TensorFluent' , op : Callable [ [ tf . Tensor ] , tf . Tensor ] , dtype : tf . DType ) -> 'TensorFluent' : x = x . cast ( dtype ) t = op ( x . tensor ) scope = x . scope . as_list ( ) batch = x . batch return TensorFluent ( t , scope , batch = batch )
Returns a TensorFluent for the unary op applied to fluent x .
52,388
def _aggregation_op ( cls , op : Callable [ [ tf . Tensor , Optional [ Sequence [ int ] ] ] , tf . Tensor ] , x : 'TensorFluent' , vars_list : List [ str ] ) -> 'TensorFluent' : axis = cls . _varslist2axis ( x , vars_list ) t = op ( x . tensor , axis ) scope = [ ] for var in x . scope . as_list ( ) : if var not in vars...
Returns a TensorFluent for the aggregation op applied to fluent x .
52,389
def _varslist2axis ( cls , fluent : 'TensorFluent' , vars_list : List [ str ] ) -> List [ int ] : axis = [ ] for var in vars_list : if var in fluent . scope . as_list ( ) : ax = fluent . scope . index ( var ) if fluent . batch : ax += 1 axis . append ( ax ) return axis
Maps the vars_list into a list of axis indices corresponding to the fluent scope .
52,390
def cast ( self , dtype : tf . DType ) -> 'TensorFluent' : if self . dtype == dtype : return self t = tf . cast ( self . tensor , dtype ) scope = self . scope . as_list ( ) batch = self . batch return TensorFluent ( t , scope , batch = batch )
Returns a TensorFluent for the cast operation with given dtype .
52,391
def reshape ( self , shape : tf . TensorShape ) -> 'TensorFluent' : t = tf . reshape ( self . tensor , shape ) scope = self . scope . as_list ( ) batch = self . batch return TensorFluent ( t , scope , batch = batch )
Returns a TensorFluent for the reshape operation with given shape .
52,392
def transpose ( self , permutation : Optional [ List [ int ] ] = None ) -> 'TensorFluent' : if permutation == [ ] : return self t = tf . transpose ( self . tensor , permutation ) if permutation != [ ] else self . tensor scope = self . scope . as_list ( ) batch = self . batch return TensorFluent ( t , scope , batch = ba...
Returns a TensorFluent for the transpose operation with given permutation .
52,393
def sum ( self , vars_list : List [ str ] ) -> 'TensorFluent' : operand = self if operand . dtype == tf . bool : operand = operand . cast ( tf . float32 ) return self . _aggregation_op ( tf . reduce_sum , operand , vars_list )
Returns the TensorFluent for the sum aggregation function .
52,394
def avg ( self , vars_list : List [ str ] ) -> 'TensorFluent' : operand = self if operand . dtype == tf . bool : operand = operand . cast ( tf . float32 ) return self . _aggregation_op ( tf . reduce_mean , operand , vars_list )
Returns the TensorFluent for the avg aggregation function .
52,395
def prod ( self , vars_list : List [ str ] ) -> 'TensorFluent' : operand = self if operand . dtype == tf . bool : operand = operand . cast ( tf . float32 ) return self . _aggregation_op ( tf . reduce_prod , operand , vars_list )
Returns the TensorFluent for the prod aggregation function .
52,396
def maximum ( self , vars_list : List [ str ] ) -> 'TensorFluent' : return self . _aggregation_op ( tf . reduce_max , self , vars_list )
Returns the TensorFluent for the maximum aggregation function .
52,397
def minimum ( self , vars_list : List [ str ] ) -> 'TensorFluent' : return self . _aggregation_op ( tf . reduce_min , self , vars_list )
Returns the TensorFluent for the minimum aggregation function .
52,398
def forall ( self , vars_list : List [ str ] ) -> 'TensorFluent' : return self . _aggregation_op ( tf . reduce_all , self , vars_list )
Returns the TensorFluent for the forall aggregation function .
52,399
def exists ( self , vars_list : List [ str ] ) -> 'TensorFluent' : return self . _aggregation_op ( tf . reduce_any , self , vars_list )
Returns the TensorFluent for the exists aggregation function .