idx int64 0 63k | question stringlengths 53 5.28k | target stringlengths 5 805 |
|---|---|---|
52,300 | def _get_netengine_backend ( self ) : backend_class_name = self . backend . split ( '.' ) [ - 1 ] backend_path = self . backend . lower ( ) module = import_module ( backend_path ) BackendClass = getattr ( module , backend_class_name ) return BackendClass | returns the netengine backend specified in self . backend for internal use only |
52,301 | def _build_netengine_arguments ( self ) : arguments = { "host" : self . host } if self . config is not None : for key , value in self . config . iteritems ( ) : arguments [ key ] = value if self . port : arguments [ "port" ] = self . port return arguments | returns a python dictionary representing arguments that will be passed to a netengine backend for internal use only |
52,302 | def model_factory ( schema , resolver = None , base_class = model . Model , name = None ) : schema = copy . deepcopy ( schema ) resolver = resolver class Model ( base_class ) : def __init__ ( self , * args , ** kwargs ) : self . __dict__ [ 'schema' ] = schema self . __dict__ [ 'resolver' ] = resolver base_class . __init__ ( self , * args , ** kwargs ) if resolver is not None : Model . resolver = resolver if name is not None : Model . __name__ = name elif 'name' in schema : Model . __name__ = str ( schema [ 'name' ] ) return Model | Generate a model class based on the provided JSON Schema |
52,303 | def patch ( self ) : original = self . __dict__ [ '__original__' ] return jsonpatch . make_patch ( original , dict ( self ) ) . to_string ( ) | Return a jsonpatch object representing the delta |
52,304 | def changes ( self ) : deprecation_msg = 'Model.changes will be removed in warlock v2' warnings . warn ( deprecation_msg , DeprecationWarning , stacklevel = 2 ) return copy . deepcopy ( self . __dict__ [ 'changes' ] ) | Dumber version of patch method |
52,305 | def isSupportedContent ( cls , fileContent ) : magic = bytearray ( fileContent ) [ : 4 ] return magic == p ( '>I' , 0xfeedface ) or magic == p ( '>I' , 0xfeedfacf ) or magic == p ( '<I' , 0xfeedface ) or magic == p ( '<I' , 0xfeedfacf ) | Returns if the files are valid for this filetype |
52,306 | def _parseOatHeader ( self , data ) : header = OatHeader . from_buffer ( data ) if header . magic != b'oat\n' : raise BinaryError ( 'No valid OAT file' ) key_value_store_bytes = ( c_ubyte * header . keyValueStoreSize ) . from_buffer ( data , sizeof ( OatHeader ) ) key_value_store = self . __parseKeyValueStore ( key_value_store_bytes ) return OatHeaderData ( header = header , keyValueStoreRaw = key_value_store_bytes , keyValueStore = key_value_store ) | Returns the OatHeader |
52,307 | def __parseKeyValueStore ( self , data ) : offset = 0 key_value_store = { } while offset != len ( data ) : key = get_str ( data , offset ) offset += len ( key ) + 1 value = get_str ( data , offset ) offset += len ( value ) + 1 key_value_store [ key ] = value return key_value_store | Returns a dictionary filled with the keys and values of the key value store |
52,308 | def to_raw_address ( addr , section ) : return addr - section . header . VirtualAddress + section . header . PointerToRawData | Converts the addr from a rva to a pointer to raw data in the file |
52,309 | def _parseImageDosHeader ( self , data ) : ioh = IMAGE_DOS_HEADER . from_buffer ( data ) if ioh . e_magic != b'MZ' : raise BinaryError ( 'No valid PE/COFF file' ) return ImageDosHeaderData ( header = ioh ) | Returns the ImageDosHeader |
52,310 | def _parseImageNtHeaders ( self , data , imageDosHeader ) : inth = self . _classes . IMAGE_NT_HEADERS . from_buffer ( data , imageDosHeader . header . e_lfanew ) if inth . Signature != b'PE' : raise BinaryError ( 'No valid PE/COFF file' ) return ImageNtHeaderData ( header = inth ) | Returns the ImageNtHeaders |
52,311 | def _parseSections ( self , data , imageDosHeader , imageNtHeaders , parse_header_only = False ) : sections = [ ] optional_header_offset = imageDosHeader . header . e_lfanew + 4 + sizeof ( IMAGE_FILE_HEADER ) offset = optional_header_offset + imageNtHeaders . header . FileHeader . SizeOfOptionalHeader image_section_header_size = sizeof ( IMAGE_SECTION_HEADER ) for sectionNo in range ( imageNtHeaders . header . FileHeader . NumberOfSections ) : ishdr = IMAGE_SECTION_HEADER . from_buffer ( data , offset ) if parse_header_only : raw = None bytes_ = bytearray ( ) else : size = ishdr . SizeOfRawData raw = ( c_ubyte * size ) . from_buffer ( data , ishdr . PointerToRawData ) bytes_ = bytearray ( raw ) sections . append ( SectionData ( header = ishdr , name = ishdr . Name . decode ( 'ASCII' , errors = 'ignore' ) , bytes = bytes_ , raw = raw ) ) offset += image_section_header_size return sections | Parses the sections in the memory and returns a list of them |
52,312 | def _getSectionForDataDirectoryEntry ( self , data_directory_entry , sections ) : for section in sections : if data_directory_entry . VirtualAddress >= section . header . VirtualAddress and data_directory_entry . VirtualAddress < section . header . VirtualAddress + section . header . SizeOfRawData : return section | Returns the section which contains the data of DataDirectory |
52,313 | def _parseDataDirectory ( self , data , sections , imageNtHeaders ) : data_directory_data_list = [ None for i in range ( 15 ) ] export_data_directory = imageNtHeaders . header . OptionalHeader . DataDirectory [ ImageDirectoryEntry . EXPORT ] export_section = self . _getSectionForDataDirectoryEntry ( export_data_directory , sections ) export_data_directory_data = self . _parseDataDirectoryExport ( data , export_data_directory , export_section ) data_directory_data_list [ ImageDirectoryEntry . EXPORT ] = export_data_directory_data import_data_directory = imageNtHeaders . header . OptionalHeader . DataDirectory [ ImageDirectoryEntry . IMPORT ] import_section = self . _getSectionForDataDirectoryEntry ( import_data_directory , sections ) import_data_directory_data = self . _parseDataDirectoryImport ( import_data_directory , import_section ) data_directory_data_list [ ImageDirectoryEntry . IMPORT ] = import_data_directory_data loadconfig_data_directory = imageNtHeaders . header . OptionalHeader . DataDirectory [ ImageDirectoryEntry . LOAD_CONFIG ] loadconfig_section = self . _getSectionForDataDirectoryEntry ( loadconfig_data_directory , sections ) loadconfig_data = self . _parseLoadConfig ( loadconfig_data_directory , loadconfig_section ) data_directory_data_list [ ImageDirectoryEntry . LOAD_CONFIG ] = loadconfig_data return data_directory_data_list | Parses the entries of the DataDirectory and returns a list of the content |
52,314 | def _parseDataDirectoryExport ( self , data , dataDirectoryEntry , exportSection ) : if not exportSection : return functions = [ ] export_directory = IMAGE_EXPORT_DIRECTORY . from_buffer ( exportSection . raw , to_offset ( dataDirectoryEntry . VirtualAddress , exportSection ) ) offset = to_offset ( export_directory . Name , exportSection ) checkOffset ( offset , exportSection ) name = get_str ( exportSection . raw , offset ) offsetOfNames = to_offset ( export_directory . AddressOfNames , exportSection ) offsetOfAddress = to_offset ( export_directory . AddressOfFunctions , exportSection ) offsetOfNameOrdinals = to_offset ( export_directory . AddressOfNameOrdinals , exportSection ) for i in range ( export_directory . NumberOfNames ) : name_address = c_uint . from_buffer ( exportSection . raw , offsetOfNames ) . value name_offset = to_offset ( name_address , exportSection ) checkOffset ( name_offset , exportSection ) func_name = get_str ( exportSection . raw , name_offset ) ordinal = c_ushort . from_buffer ( exportSection . raw , offsetOfNameOrdinals ) . value func_addr = c_uint . from_buffer ( exportSection . raw , offsetOfAddress ) . value offsetOfNames += 4 offsetOfAddress += 4 offsetOfNameOrdinals += 2 functions . append ( FunctionData ( name = func_name , rva = func_addr , ordinal = ordinal ) ) return ExportDirectoryData ( header = export_directory , name = name , functions = functions ) | Parses the EmportDataDirectory and returns an instance of ExportDirectoryData |
52,315 | def _parseDataDirectoryImport ( self , dataDirectoryEntry , importSection ) : if not importSection : return raw_bytes = ( c_ubyte * dataDirectoryEntry . Size ) . from_buffer ( importSection . raw , to_offset ( dataDirectoryEntry . VirtualAddress , importSection ) ) offset = 0 import_descriptors = [ ] while True : import_descriptor = IMAGE_IMPORT_DESCRIPTOR . from_buffer ( raw_bytes , offset ) if import_descriptor . OriginalFirstThunk == 0 : break else : nameOffset = to_offset ( import_descriptor . Name , importSection ) checkOffset ( nameOffset , importSection ) dllName = get_str ( importSection . raw , nameOffset ) import_name_table = self . __parseThunks ( import_descriptor . OriginalFirstThunk , importSection ) import_address_table = self . __parseThunks ( import_descriptor . FirstThunk , importSection ) import_descriptors . append ( ImportDescriptorData ( header = import_descriptor , dllName = dllName , importNameTable = import_name_table , importAddressTable = import_address_table ) ) offset += sizeof ( IMAGE_IMPORT_DESCRIPTOR ) return import_descriptors | Parses the ImportDataDirectory and returns a list of ImportDescriptorData |
52,316 | def __parseThunks ( self , thunkRVA , importSection ) : offset = to_offset ( thunkRVA , importSection ) table_offset = 0 thunks = [ ] while True : thunk = IMAGE_THUNK_DATA . from_buffer ( importSection . raw , offset ) offset += sizeof ( IMAGE_THUNK_DATA ) if thunk . Ordinal == 0 : break thunkData = ThunkData ( header = thunk , rva = table_offset + thunkRVA , ordinal = None , importByName = None ) if to_offset ( thunk . AddressOfData , importSection ) > 0 and to_offset ( thunk . AddressOfData , importSection ) < len ( self . _bytes ) : self . __parseThunkData ( thunkData , importSection ) thunks . append ( thunkData ) table_offset += 4 return thunks | Parses the thunks and returns a list |
52,317 | def __parseThunkData ( self , thunk , importSection ) : offset = to_offset ( thunk . header . AddressOfData , importSection ) if 0xf0000000 & thunk . header . AddressOfData == 0x80000000 : thunk . ordinal = thunk . header . AddressOfData & 0x0fffffff else : ibn = IMAGE_IMPORT_BY_NAME . from_buffer ( importSection . raw , offset ) checkOffset ( offset + 2 , importSection ) name = get_str ( importSection . raw , offset + 2 ) thunk . importByName = ImportByNameData ( header = ibn , hint = ibn . Hint , name = name ) | Parses the data of a thunk and sets the data |
52,318 | def get_ptr ( data , offset = None , ptr_type = ctypes . c_void_p ) : ptr = ctypes . cast ( ctypes . pointer ( data ) , ctypes . c_void_p ) if offset : ptr = ctypes . c_void_p ( ptr . value + offset ) if ptr_type != ctypes . c_void_p : ptr = ctypes . cast ( ptr , ptr_type ) return ptr | Returns a void pointer to the data |
52,319 | def to_ubyte_array ( barray ) : bs = ( ctypes . c_ubyte * len ( barray ) ) ( ) pack_into ( '%ds' % len ( barray ) , bs , 0 , barray ) return bs | Returns a c_ubyte_array filled with the given data of a bytearray or bytes |
52,320 | def _readFile ( self , fileName ) : with open ( fileName , 'rb' ) as binFile : b = binFile . read ( ) return to_ubyte_array ( b ) | Returns the bytes of the file . |
52,321 | def _parseElfHeader ( self , data ) : ehdr = self . __classes . EHDR . from_buffer ( data ) return EhdrData ( header = ehdr ) | Returns the elf header |
52,322 | def _parseSegments ( self , data , elfHeader ) : offset = elfHeader . header . e_phoff segments = [ ] for i in range ( elfHeader . header . e_phnum ) : phdr = self . __classes . PHDR . from_buffer ( data , offset ) segment_bytes = ( c_ubyte * phdr . p_filesz ) . from_buffer ( data , phdr . p_offset ) phdrData = PhdrData ( header = phdr , raw = segment_bytes , bytes = bytearray ( segment_bytes ) , type = PT [ phdr . p_type ] , vaddr = phdr . p_vaddr , offset = phdr . p_offset ) segments . append ( phdrData ) offset += elfHeader . header . e_phentsize return segments | Return a list of segments |
52,323 | def _parseSections ( self , data , elfHeader ) : offset = elfHeader . header . e_shoff shdrs = [ ] for i in range ( elfHeader . header . e_shnum ) : shdr = self . __classes . SHDR . from_buffer ( data , offset ) section_bytes = None ba_section_bytes = None if shdr . sh_type != SHT . NOBITS : section_bytes = ( c_ubyte * shdr . sh_size ) . from_buffer ( data , shdr . sh_offset ) ba_section_bytes = bytearray ( section_bytes ) shdrs . append ( ShdrData ( name = None , header = shdr , raw = section_bytes , bytes = ba_section_bytes ) ) offset += elfHeader . header . e_shentsize if elfHeader . header . e_shstrndx != SHN . UNDEF : strtab = shdrs [ elfHeader . header . e_shstrndx ] strtab_offset = strtab . header . sh_offset for section in shdrs : section . name = get_str ( strtab . raw , section . header . sh_name ) return shdrs | Returns a list of sections |
52,324 | def _parseSymbols ( self , sections ) : for section in sections : strtab = sections [ section . header . sh_link ] if section . header . sh_type in ( int ( SHT . DYNSYM ) , int ( SHT . SYMTAB ) ) : section . symbols = self . __parseSymbolEntriesForSection ( section , strtab ) | Sets a list of symbols in each DYNSYM and SYMTAB section |
52,325 | def _parseRelocations ( self , sections ) : for section in sections : if section . header . sh_link != SHN . UNDEF and section . header . sh_type in ( SHT . REL , SHT . RELA ) : symbols = sections [ section . header . sh_link ] . symbols relocations = self . __parseRelocationEntries ( section , symbols ) section . relocations = relocations | Parses the relocations and add those to the section |
52,326 | def run_with_snapshots ( self , tsnapstart = 0. , tsnapint = 432000. ) : tsnapints = np . ceil ( tsnapint / self . dt ) while ( self . t < self . tmax ) : self . _step_forward ( ) if self . t >= tsnapstart and ( self . tc % tsnapints ) == 0 : yield self . t return | Run the model forward yielding to user code at specified intervals . |
52,327 | def vertical_modes ( self ) : evals , evecs = np . linalg . eig ( - self . S ) asort = evals . argsort ( ) self . kdi2 = evals [ asort ] self . radii = np . zeros_like ( self . kdi2 ) self . radii [ 0 ] = self . g * self . H / np . abs ( self . f ) self . radii [ 1 : ] = 1. / np . sqrt ( self . kdi2 [ 1 : ] ) self . pmodes = evecs [ : , asort ] Ai = ( self . H / ( self . Hi [ : , np . newaxis ] * ( self . pmodes ** 2 ) ) . sum ( axis = 0 ) ) ** 0.5 self . pmodes = Ai [ np . newaxis , : ] * self . pmodes | Calculate standard vertical modes . Simply the eigenvectors of the stretching matrix S |
52,328 | def set_U ( self , U ) : self . Ubg = np . asarray ( U ) [ np . newaxis , ... ] | Set background zonal flow |
52,329 | def _rk4_integrate ( self , x , y , uv0fun , uv1fun , dt ) : u0 , v0 = uv0fun ( x , y ) k1u = dt * u0 k1v = dt * v0 x11 = self . _wrap_x ( x + 0.5 * k1u ) y11 = self . _wrap_y ( y + 0.5 * k1v ) u11 , v11 = uv1fun ( x11 , y11 ) k2u = dt * u11 k2v = dt * v11 x12 = self . _wrap_x ( x + 0.5 * k2u ) y12 = self . _wrap_y ( y + 0.5 * k2v ) u12 , v12 = uv1fun ( x12 , y12 ) k3u = dt * u12 k3v = dt * v12 x13 = self . _wrap_x ( x + k3u ) y13 = self . _wrap_y ( y + k3v ) u13 , v13 = uv1fun ( x13 , y13 ) k4u = dt * u13 k4v = dt * v13 dx = 6 ** - 1 * ( k1u + 2 * k2u + 2 * k3u + k4u ) dy = 6 ** - 1 * ( k1v + 2 * k2v + 2 * k3v + k4v ) return dx , dy | Integrates positions x y using velocity functions uv0fun uv1fun . Returns dx and dy the displacements . |
52,330 | def _distance ( self , x0 , y0 , x1 , y1 ) : dx = x1 - x0 dy = y1 - y0 if self . pix : dx [ dx > self . Lx / 2 ] -= self . Lx dx [ dx < - self . Lx / 2 ] += self . Lx if self . piy : dy [ dy > self . Ly / 2 ] -= self . Ly dy [ dy < - self . Ly / 2 ] += self . Ly return dx , dy | Utitlity function to compute distance between points . |
52,331 | def spec_var ( model , ph ) : var_dens = 2. * np . abs ( ph ) ** 2 / model . M ** 2 var_dens [ ... , 0 ] /= 2 var_dens [ ... , - 1 ] /= 2 return var_dens . sum ( axis = ( - 1 , - 2 ) ) | Compute variance of p from Fourier coefficients ph . |
52,332 | def spec_sum ( ph2 ) : ph2 = 2. * ph2 ph2 [ ... , 0 ] = ph2 [ ... , 0 ] / 2. ph2 [ ... , - 1 ] = ph2 [ ... , - 1 ] / 2. return ph2 . sum ( axis = ( - 1 , - 2 ) ) | Compute total spectral sum of the real spectral quantity ph^2 . |
52,333 | def calc_ispec ( model , ph ) : if model . kk . max ( ) > model . ll . max ( ) : kmax = model . ll . max ( ) else : kmax = model . kk . max ( ) dkr = np . sqrt ( model . dk ** 2 + model . dl ** 2 ) kr = np . arange ( dkr / 2. , kmax + dkr , dkr ) phr = np . zeros ( kr . size ) for i in range ( kr . size ) : fkr = ( model . wv >= kr [ i ] - dkr / 2 ) & ( model . wv <= kr [ i ] + dkr / 2 ) dth = pi / ( fkr . sum ( ) - 1 ) phr [ i ] = ph [ fkr ] . sum ( ) * kr [ i ] * dth return kr , phr | Compute isotropic spectrum phr of ph from 2D spectrum . |
52,334 | def _initialize_stretching_matrix ( self ) : self . S = np . zeros ( ( self . nz , self . nz ) ) if ( self . nz == 2 ) and ( self . rd ) and ( self . delta ) : self . del1 = self . delta / ( self . delta + 1. ) self . del2 = ( self . delta + 1. ) ** - 1 self . Us = self . Ubg [ 0 ] - self . Ubg [ 1 ] self . F1 = self . rd ** - 2 / ( 1. + self . delta ) self . F2 = self . delta * self . F1 self . S [ 0 , 0 ] , self . S [ 0 , 1 ] = - self . F1 , self . F1 self . S [ 1 , 0 ] , self . S [ 1 , 1 ] = self . F2 , - self . F2 else : for i in range ( self . nz ) : if i == 0 : self . S [ i , i ] = - self . f2 / self . Hi [ i ] / self . gpi [ i ] self . S [ i , i + 1 ] = self . f2 / self . Hi [ i ] / self . gpi [ i ] elif i == self . nz - 1 : self . S [ i , i ] = - self . f2 / self . Hi [ i ] / self . gpi [ i - 1 ] self . S [ i , i - 1 ] = self . f2 / self . Hi [ i ] / self . gpi [ i - 1 ] else : self . S [ i , i - 1 ] = self . f2 / self . Hi [ i ] / self . gpi [ i - 1 ] self . S [ i , i ] = - ( self . f2 / self . Hi [ i ] / self . gpi [ i ] + self . f2 / self . Hi [ i ] / self . gpi [ i - 1 ] ) self . S [ i , i + 1 ] = self . f2 / self . Hi [ i ] / self . gpi [ i ] | Set up the stretching matrix |
52,335 | def set_q1q2 ( self , q1 , q2 , check = False ) : self . set_q ( np . vstack ( [ q1 [ np . newaxis , : , : ] , q2 [ np . newaxis , : , : ] ] ) ) if check : np . testing . assert_allclose ( self . q1 , q1 ) np . testing . assert_allclose ( self . q1 , self . ifft2 ( self . qh1 ) ) | Set upper and lower layer PV anomalies . |
52,336 | def set_U1U2 ( self , U1 , U2 ) : if len ( np . shape ( U1 ) ) == 0 : U1 = U1 * np . ones ( ( self . ny ) ) if len ( np . shape ( U2 ) ) == 0 : U2 = U2 * np . ones ( ( self . ny ) ) self . U1 = U1 self . U2 = U2 self . Ubg = np . array ( [ U1 , U2 ] ) | Set background zonal flow . |
52,337 | def _initialize_model_diagnostics ( self ) : self . add_diagnostic ( 'entspec' , description = 'barotropic enstrophy spectrum' , function = ( lambda self : np . abs ( self . del1 * self . qh [ 0 ] + self . del2 * self . qh [ 1 ] ) ** 2. ) ) self . add_diagnostic ( 'APEflux' , description = 'spectral flux of available potential energy' , function = ( lambda self : self . rd ** - 2 * self . del1 * self . del2 * np . real ( ( self . ph [ 0 ] - self . ph [ 1 ] ) * np . conj ( self . Jptpc ) ) ) ) self . add_diagnostic ( 'KEflux' , description = 'spectral flux of kinetic energy' , function = ( lambda self : np . real ( self . del1 * self . ph [ 0 ] * np . conj ( self . Jpxi [ 0 ] ) ) + np . real ( self . del2 * self . ph [ 1 ] * np . conj ( self . Jpxi [ 1 ] ) ) ) ) self . add_diagnostic ( 'APEgenspec' , description = 'spectrum of APE generation' , function = ( lambda self : self . U [ : , np . newaxis ] * self . rd ** - 2 * self . del1 * self . del2 * np . real ( 1j * self . k * ( self . del1 * self . ph [ 0 ] + self . del2 * self . ph [ 1 ] ) * np . conj ( self . ph [ 0 ] - self . ph [ 1 ] ) ) ) ) self . add_diagnostic ( 'APEgen' , description = 'total APE generation' , function = ( lambda self : self . U * self . rd ** - 2 * self . del1 * self . del2 * np . real ( ( 1j * self . k * ( self . del1 * self . ph [ 0 ] + self . del2 * self . ph [ 1 ] ) * np . conj ( self . ph [ 0 ] - self . ph [ 1 ] ) ) . sum ( ) + ( 1j * self . k [ : , 1 : - 2 ] * ( self . del1 * self . ph [ 0 , : , 1 : - 2 ] + self . del2 * self . ph [ 1 , : , 1 : - 2 ] ) * np . conj ( self . ph [ 0 , : , 1 : - 2 ] - self . ph [ 1 , : , 1 : - 2 ] ) ) . sum ( ) ) / ( self . M ** 2 ) ) ) | Extra diagnostics for two - layer model |
52,338 | def calc_uv ( self , x , y , prev = False ) : assert len ( x ) == self . N assert len ( y ) == self . N u = np . zeros ( self . N , self . x . dtype ) v = np . zeros ( self . N , self . y . dtype ) for n in xrange ( self . N ) : if prev : x0 = self . xprev [ np . r_ [ : n , n + 1 : self . N ] ] y0 = self . yprev [ np . r_ [ : n , n + 1 : self . N ] ] else : x0 = self . x [ np . r_ [ : n , n + 1 : self . N ] ] y0 = self . y [ np . r_ [ : n , n + 1 : self . N ] ] s0 = self . s [ np . r_ [ : n , n + 1 : self . N ] ] u0 , v0 = self . uv_at_xy ( x [ n ] , y [ n ] , x0 , y0 , s0 ) u [ n ] = u0 . sum ( ) v [ n ] = v0 . sum ( ) return u , v | Calculate velocity at x and y points due to vortex velocity field . Assumes x and y are vortex positions and are ordered the same as x0 and y0 . The ordering is used to neglect to vortex self interaction . |
52,339 | def uv_at_xy ( self , x , y , x0 , y0 , s0 ) : dx , dy = self . distance ( x0 , y0 , x , y ) rr2 = ( dx ** 2 + dy ** 2 ) ** - 1 u = - s0 * dy * r_twopi * rr2 v = s0 * dx * r_twopi * rr2 return u , v | Returns two arrays of u v |
52,340 | def find ( self , other ) : iset = self . _iset l = binsearch_left_start ( iset , other [ 0 ] - self . _maxlen , 0 , len ( iset ) ) r = binsearch_right_end ( iset , other [ 1 ] , 0 , len ( iset ) ) iopts = iset [ l : r ] iiter = ( s for s in iopts if s [ 0 ] <= other [ 1 ] and s [ 1 ] >= other [ 0 ] ) for o in iiter : yield o | Return an interable of elements that overlap other in the tree . |
52,341 | def loaddict ( filename = DICTIONARY ) : global zhcdicts if zhcdicts : return if filename == _DEFAULT_DICT : zhcdicts = json . loads ( get_module_res ( filename ) . read ( ) . decode ( 'utf-8' ) ) else : with open ( filename , 'rb' ) as f : zhcdicts = json . loads ( f . read ( ) . decode ( 'utf-8' ) ) zhcdicts [ 'SIMPONLY' ] = frozenset ( zhcdicts [ 'SIMPONLY' ] ) zhcdicts [ 'TRADONLY' ] = frozenset ( zhcdicts [ 'TRADONLY' ] ) | Load the dictionary from a specific JSON file . |
52,342 | def getdict ( locale ) : global zhcdicts , dict_zhcn , dict_zhsg , dict_zhtw , dict_zhhk , pfsdict if zhcdicts is None : loaddict ( DICTIONARY ) if locale == 'zh-cn' : if dict_zhcn : got = dict_zhcn else : dict_zhcn = zhcdicts [ 'zh2Hans' ] . copy ( ) dict_zhcn . update ( zhcdicts [ 'zh2CN' ] ) got = dict_zhcn elif locale == 'zh-tw' : if dict_zhtw : got = dict_zhtw else : dict_zhtw = zhcdicts [ 'zh2Hant' ] . copy ( ) dict_zhtw . update ( zhcdicts [ 'zh2TW' ] ) got = dict_zhtw elif locale == 'zh-hk' or locale == 'zh-mo' : if dict_zhhk : got = dict_zhhk else : dict_zhhk = zhcdicts [ 'zh2Hant' ] . copy ( ) dict_zhhk . update ( zhcdicts [ 'zh2HK' ] ) got = dict_zhhk elif locale == 'zh-sg' or locale == 'zh-my' : if dict_zhsg : got = dict_zhsg else : dict_zhsg = zhcdicts [ 'zh2Hans' ] . copy ( ) dict_zhsg . update ( zhcdicts [ 'zh2SG' ] ) got = dict_zhsg elif locale == 'zh-hans' : got = zhcdicts [ 'zh2Hans' ] elif locale == 'zh-hant' : got = zhcdicts [ 'zh2Hant' ] else : got = { } if locale not in pfsdict : pfsdict [ locale ] = getpfset ( got ) return got | Generate or get convertion dict cache for certain locale . Dictionaries are loaded on demand . |
52,343 | def convtable2dict ( convtable , locale , update = None ) : rdict = update . copy ( ) if update else { } for r in convtable : if ':uni' in r : if locale in r : rdict [ r [ ':uni' ] ] = r [ locale ] elif locale [ : - 1 ] == 'zh-han' : if locale in r : for word in r . values ( ) : rdict [ word ] = r [ locale ] else : v = fallback ( locale , r ) for word in r . values ( ) : rdict [ word ] = v return rdict | Convert a list of conversion dict to a dict for a certain locale . |
52,344 | def tokenize ( s , locale , update = None ) : zhdict = getdict ( locale ) pfset = pfsdict [ locale ] if update : zhdict = zhdict . copy ( ) zhdict . update ( update ) newset = set ( ) for word in update : for ch in range ( len ( word ) ) : newset . add ( word [ : ch + 1 ] ) pfset = pfset | newset ch = [ ] N = len ( s ) pos = 0 while pos < N : i = pos frag = s [ pos ] maxword = None maxpos = 0 while i < N and frag in pfset : if frag in zhdict : maxword = frag maxpos = i i += 1 frag = s [ pos : i + 1 ] if maxword is None : maxword = s [ pos ] pos += 1 else : pos = maxpos + 1 ch . append ( maxword ) return ch | Tokenize s according to corresponding locale dictionary . Don t use this for serious text processing . |
52,345 | def get_qiniu_config ( name , default = None ) : config = os . environ . get ( name , getattr ( settings , name , default ) ) if config is not None : if isinstance ( config , six . string_types ) : return config . strip ( ) else : return config else : raise ImproperlyConfigured ( "Can't find config for '%s' either in environment" "variable or in setting.py" % name ) | Get configuration variable from environment variable or django setting . py |
52,346 | def load_from_file ( filename ) : if os . path . isdir ( filename ) : logger . error ( "Err: File '%s' is a directory" , filename ) return None if not os . path . isfile ( filename ) : logger . error ( "Err: File '%s' does not exist" , filename ) return None try : with open ( filename , 'r' ) as sourcefile : songs = [ line . strip ( ) for line in sourcefile ] except IOError as error : logger . exception ( error ) return None songs = set ( Song . from_filename ( song ) for song in songs ) return songs . difference ( { None } ) | Load a list of filenames from an external text file . |
52,347 | def parse_argv ( ) : parser = argparse . ArgumentParser ( description = 'Find lyrics for a set of mp3' ' files and embed them as metadata' ) parser . add_argument ( '-j' , '--jobs' , help = 'Number of parallel processes' , type = int , metavar = 'N' , default = 1 ) parser . add_argument ( '-o' , '--overwrite' , help = 'Overwrite lyrics of songs' ' that already have them' , action = 'store_true' ) parser . add_argument ( '-s' , '--stats' , help = 'Print a series of statistics at' ' the end of the execution' , action = 'store_true' ) parser . add_argument ( '-v' , '--verbose' , help = 'Set verbosity level (pass it' ' up to three times)' , action = 'count' ) parser . add_argument ( '-d' , '--debug' , help = 'Enable debug output' , action = 'store_true' ) group = parser . add_mutually_exclusive_group ( ) group . add_argument ( '-r' , '--recursive' , help = 'Recursively search for' ' mp3 files' , metavar = 'path' , nargs = '?' , const = '.' ) group . add_argument ( '--from-file' , help = 'Read a list of files from a text' ' file' , type = str ) parser . add_argument ( 'songs' , help = 'The files/songs to search lyrics for' , nargs = '*' ) args = parser . parse_args ( ) CONFIG [ 'overwrite' ] = args . overwrite CONFIG [ 'print_stats' ] = args . stats if args . verbose is None or args . verbose == 0 : logger . setLevel ( logging . CRITICAL ) elif args . verbose == 1 : logger . setLevel ( logging . INFO ) else : logger . setLevel ( logging . DEBUG ) if args . jobs <= 0 : msg = 'Argument -j/--jobs should have a value greater than zero' parser . error ( msg ) else : CONFIG [ 'jobcount' ] = args . jobs songs = set ( ) if args . from_file : songs = load_from_file ( args . from_file ) if not songs : raise ValueError ( 'No file names found in file' ) elif args . recursive : mp3files = glob . iglob ( args . recursive + '/**/*.mp3' , recursive = True ) songs = set ( Song . from_filename ( f ) for f in mp3files ) elif args . songs : if os . path . exists ( args . songs [ 0 ] ) : parser = Song . from_filename else : parser = Song . from_string songs . update ( map ( parser , args . songs ) ) else : songs . add ( get_current_song ( ) ) return songs . difference ( { None } ) | Parse command line arguments . Settings will be stored in the global variables declared above . |
52,348 | def decode ( slug ) : if sys . version_info . major != 2 and isinstance ( slug , bytes ) : slug = slug . decode ( 'ascii' ) slug = slug + '==' return uuid . UUID ( bytes = base64 . urlsafe_b64decode ( slug ) ) | Returns the uuid . UUID object represented by the given v4 or nice slug |
52,349 | def filter_against_normal ( self , normal_mutations , maf_min = 0.2 , maf_count_threshold = 20 , count_min = 1 ) : assert ( normal_mutations . chrom == self . chrom ) assert ( normal_mutations . pos == self . pos ) assert ( normal_mutations . ref == self . ref ) def passes_normal_criteria ( mut ) : return ( mut . count >= maf_count_threshold and mut . maf > maf_min ) or ( mut . count < maf_count_threshold and mut . count > count_min ) nms = normal_mutations muts = MutationsAtSinglePosition ( self . chrom , self . pos , self . cov , self . ref ) for snv in self . snvs : if not ( snv in nms . snvs and passes_normal_criteria ( nms . snvs [ snv ] ) ) : muts . add_snv ( self . snvs [ snv ] ) for dlt in self . deletions : if not ( dlt in nms . deletions and passes_normal_criteria ( nms . deletions [ dlt ] ) ) : muts . add_deletion ( self . deletions [ dlt ] ) for ins in self . insertions : if not ( ins in nms . insertions and passes_normal_criteria ( nms . insertions [ ins ] ) ) : muts . add_insertion ( self . insertions [ ins ] ) return muts | Filters mutations that are in the given normal |
52,350 | def add_handlers ( self , room_handler = None , transaction_handler = None , user_handler = None ) : if room_handler : room = resources . Room ( room_handler , self . Api ) self . add_route ( "/rooms/{room_alias}" , room ) if transaction_handler : transaction = resources . Transaction ( transaction_handler , self . Api ) self . add_route ( "/transactions/{txn_id}" , transaction ) if user_handler : user = resources . User ( user_handler , self . Api ) self . add_route ( "/users/{user_id}" , user ) | Adds routes to Application that use specified handlers . |
52,351 | def log_mon_value ( name , value = 1 , ** kwargs ) : message = '{} => {}' . format ( name , value ) log_mon . info ( { 'metric_name' : name , 'value' : value , 'message' : message , ** kwargs } ) | simplest monitoring function to be aggregated with sum |
52,352 | def create_store ( ) : new_storage = _proxy ( 'store' ) _state . store = type ( 'store' , ( object , ) , { } ) new_storage . store = dict ( ) return new_storage . store | A helper for setting the _proxy and slapping the store object for us . |
52,353 | def request ( request_callback = None , ** kwargs ) : if request_callback is None : return lambda fn : request ( fn , ** kwargs ) else : return Request ( request_callback , ** kwargs ) . decorate_module ( request_callback ) | Chisel request decorator |
52,354 | def add ( self , client ) : if client . pool_id in self . _client_ids : log . info ( "%r is already in the penalty box. Ignoring." , client ) return release = time . time ( ) + self . _min_wait heapq . heappush ( self . _clients , ( release , ( client , self . _min_wait ) ) ) self . _client_ids . add ( client . pool_id ) | Add a client to the penalty box . |
52,355 | def get ( self ) : now = time . time ( ) while self . _clients and self . _clients [ 0 ] [ 0 ] < now : _ , ( client , last_wait ) = heapq . heappop ( self . _clients ) connect_start = time . time ( ) try : client . echo ( "test" ) self . _client_ids . remove ( client . pool_id ) yield client except ( ConnectionError , TimeoutError ) : timer = time . time ( ) - connect_start wait = min ( int ( last_wait * self . _multiplier ) , self . _max_wait ) heapq . heappush ( self . _clients , ( time . time ( ) + wait , ( client , wait ) ) ) log . info ( "%r is still down after a %s second attempt to connect. Retrying in %ss." , client , timer , wait , ) | Get any clients ready to be used . |
52,356 | def string ( _object ) : if is_callable ( _object ) : _validator = _object @ wraps ( _validator ) def decorated ( value ) : ensure ( isinstance ( value , basestring ) , "not of type string" ) return _validator ( value ) return decorated ensure ( isinstance ( _object , basestring ) , "not of type string" ) | Validates a given input is of type string . |
52,357 | def boolean ( _object ) : if is_callable ( _object ) : _validator = _object @ wraps ( _validator ) def decorated ( value ) : ensure ( isinstance ( value , bool ) , "not of type boolean" ) return _validator ( value ) return decorated ensure ( isinstance ( _object , bool ) , "not of type boolean" ) | Validates a given input is of type boolean . |
52,358 | def dictionary ( _object , * args ) : error_msg = 'not of type dictionary' if is_callable ( _object ) : _validator = _object @ wraps ( _validator ) def decorated ( value ) : ensure ( isinstance ( value , dict ) , error_msg ) return _validator ( value ) return decorated try : ensure ( isinstance ( _object , dict ) , error_msg ) except AssertionError : if args : msg = 'did not pass validation against callable: dictionary' raise Invalid ( '' , msg = msg , reason = error_msg , * args ) raise | Validates a given input is of type dictionary . |
52,359 | def array ( _object ) : if is_callable ( _object ) : _validator = _object @ wraps ( _validator ) def decorated ( value ) : ensure ( isinstance ( value , list ) , "not of type array" ) return _validator ( value ) return decorated ensure ( isinstance ( _object , list ) , "not of type array" ) | Validates a given input is of type list . |
52,360 | def integer ( _object ) : if is_callable ( _object ) : _validator = _object @ wraps ( _validator ) def decorated ( value ) : ensure ( isinstance ( value , int ) , "not of type int" ) return _validator ( value ) return decorated ensure ( isinstance ( _object , int ) , "not of type int" ) | Validates a given input is of type int .. |
52,361 | def constant ( cls , value : Value , dtype : tf . DType = tf . float32 ) -> 'TensorFluent' : t = tf . constant ( value , dtype = dtype ) scope = [ ] batch = False return TensorFluent ( t , scope , batch = batch ) | Returns a constant value TensorFluent with given dtype . |
52,362 | def Bernoulli ( cls , mean : 'TensorFluent' , batch_size : Optional [ int ] = None ) -> Tuple [ Distribution , 'TensorFluent' ] : probs = mean . tensor dist = tf . distributions . Bernoulli ( probs = probs , dtype = tf . bool ) batch = mean . batch if not batch and batch_size is not None : t = dist . sample ( batch_size ) batch = True else : t = dist . sample ( ) scope = mean . scope . as_list ( ) return ( dist , TensorFluent ( t , scope , batch = batch ) ) | Returns a TensorFluent for the Bernoulli sampling op with given mean parameter . |
52,363 | def Uniform ( cls , low : 'TensorFluent' , high : 'TensorFluent' , batch_size : Optional [ int ] = None ) -> Tuple [ Distribution , 'TensorFluent' ] : if low . scope != high . scope : raise ValueError ( 'Uniform distribution: parameters must have same scope!' ) dist = tf . distributions . Uniform ( low . tensor , high . tensor ) batch = low . batch or high . batch if not batch and batch_size is not None : t = dist . sample ( batch_size ) batch = True else : t = dist . sample ( ) scope = low . scope . as_list ( ) return ( dist , TensorFluent ( t , scope , batch = batch ) ) | Returns a TensorFluent for the Uniform sampling op with given low and high parameters . |
52,364 | def Normal ( cls , mean : 'TensorFluent' , variance : 'TensorFluent' , batch_size : Optional [ int ] = None ) -> Tuple [ Distribution , 'TensorFluent' ] : if mean . scope != variance . scope : raise ValueError ( 'Normal distribution: parameters must have same scope!' ) loc = mean . tensor scale = tf . sqrt ( variance . tensor ) dist = tf . distributions . Normal ( loc , scale ) batch = mean . batch or variance . batch if not batch and batch_size is not None : t = dist . sample ( batch_size ) batch = True else : t = dist . sample ( ) scope = mean . scope . as_list ( ) return ( dist , TensorFluent ( t , scope , batch = batch ) ) | Returns a TensorFluent for the Normal sampling op with given mean and variance . |
52,365 | def Gamma ( cls , shape : 'TensorFluent' , scale : 'TensorFluent' , batch_size : Optional [ int ] = None ) -> Tuple [ Distribution , 'TensorFluent' ] : if shape . scope != scale . scope : raise ValueError ( 'Gamma distribution: parameters must have same scope!' ) concentration = shape . tensor rate = 1 / scale . tensor dist = tf . distributions . Gamma ( concentration , rate ) batch = shape . batch or scale . batch if not batch and batch_size is not None : t = dist . sample ( batch_size ) batch = True else : t = dist . sample ( ) scope = shape . scope . as_list ( ) return ( dist , TensorFluent ( t , scope , batch = batch ) ) | Returns a TensorFluent for the Gamma sampling op with given shape and scale parameters . |
52,366 | def Exponential ( cls , mean : 'TensorFluent' , batch_size : Optional [ int ] = None ) -> Tuple [ Distribution , 'TensorFluent' ] : rate = 1 / mean . tensor dist = tf . distributions . Exponential ( rate ) batch = mean . batch if not batch and batch_size is not None : t = dist . sample ( batch_size ) batch = True else : t = dist . sample ( ) scope = mean . scope . as_list ( ) return ( dist , TensorFluent ( t , scope , batch = batch ) ) | Returns a TensorFluent for the Exponential sampling op with given mean parameter . |
52,367 | def stop_gradient ( cls , x : 'TensorFluent' ) -> 'TensorFluent' : scope = x . scope . as_list ( ) batch = x . batch return TensorFluent ( tf . stop_gradient ( x . tensor ) , scope , batch ) | Returns a copy of the input fluent with stop_gradient at tensor level . |
52,368 | def stop_batch_gradient ( cls , x : 'TensorFluent' , stop_batch : tf . Tensor ) -> 'TensorFluent' : scope = x . scope . as_list ( ) batch = x . batch tensor = tf . where ( stop_batch , tf . stop_gradient ( x . tensor ) , x . tensor ) return TensorFluent ( tensor , scope , batch ) | Returns a copy of the inputs fluent with stop_gradient applied at batch level . |
52,369 | def abs ( cls , x : 'TensorFluent' ) -> 'TensorFluent' : return cls . _unary_op ( x , tf . abs , tf . float32 ) | Returns a TensorFluent for the abs function . |
52,370 | def exp ( cls , x : 'TensorFluent' ) -> 'TensorFluent' : return cls . _unary_op ( x , tf . exp , tf . float32 ) | Returns a TensorFluent for the exp function . |
52,371 | def log ( cls , x : 'TensorFluent' ) -> 'TensorFluent' : return cls . _unary_op ( x , tf . log , tf . float32 ) | Returns a TensorFluent for the log function . |
52,372 | def sqrt ( cls , x : 'TensorFluent' ) -> 'TensorFluent' : return cls . _unary_op ( x , tf . sqrt , tf . float32 ) | Returns a TensorFluent for the sqrt function . |
52,373 | def cos ( cls , x : 'TensorFluent' ) -> 'TensorFluent' : return cls . _unary_op ( x , tf . cos , tf . float32 ) | Returns a TensorFluent for the cos function . |
52,374 | def sin ( cls , x : 'TensorFluent' ) -> 'TensorFluent' : return cls . _unary_op ( x , tf . sin , tf . float32 ) | Returns a TensorFluent for the sin function . |
52,375 | def tan ( cls , x : 'TensorFluent' ) -> 'TensorFluent' : return cls . _unary_op ( x , tf . tan , tf . float32 ) | Returns a TensorFluent for the tan function . |
52,376 | def acos ( cls , x : 'TensorFluent' ) -> 'TensorFluent' : return cls . _unary_op ( x , tf . acos , tf . float32 ) | Returns a TensorFluent for the arccos function . |
52,377 | def asin ( cls , x : 'TensorFluent' ) -> 'TensorFluent' : return cls . _unary_op ( x , tf . asin , tf . float32 ) | Returns a TensorFluent for the arcsin function . |
52,378 | def atan ( cls , x : 'TensorFluent' ) -> 'TensorFluent' : return cls . _unary_op ( x , tf . atan2 , tf . float32 ) | Returns a TensorFluent for the arctan function . |
52,379 | def round ( cls , x : 'TensorFluent' ) -> 'TensorFluent' : return cls . _unary_op ( x , tf . round , tf . float32 ) | Returns a TensorFluent for the round function . |
52,380 | def ceil ( cls , x : 'TensorFluent' ) -> 'TensorFluent' : return cls . _unary_op ( x , tf . ceil , tf . float32 ) | Returns a TensorFluent for the ceil function . |
52,381 | def floor ( cls , x : 'TensorFluent' ) -> 'TensorFluent' : return cls . _unary_op ( x , tf . floor , tf . float32 ) | Returns a TensorFluent for the floor function . |
52,382 | def pow ( cls , x : 'TensorFluent' , y : 'TensorFluent' ) -> 'TensorFluent' : return cls . _binary_op ( x , y , tf . pow , tf . float32 ) | Returns a TensorFluent for the pow function . TensorFluent |
52,383 | def max ( cls , x : 'TensorFluent' , y : 'TensorFluent' ) -> 'TensorFluent' : return cls . _binary_op ( x , y , tf . maximum , tf . float32 ) | Returns a TensorFluent for the maximum function . TensorFluent |
52,384 | def min ( cls , x : 'TensorFluent' , y : 'TensorFluent' ) -> 'TensorFluent' : return cls . _binary_op ( x , y , tf . minimum , tf . float32 ) | Returns a TensorFluent for the minimum function . |
52,385 | def if_then_else ( cls , condition : 'TensorFluent' , true_case : 'TensorFluent' , false_case : 'TensorFluent' ) -> 'TensorFluent' : true = TensorFluent . constant ( True , tf . bool ) false = TensorFluent . constant ( False , tf . bool ) ite = ( condition == true ) * true_case + ( condition == false ) * false_case if true_case . dtype == tf . bool and false_case . dtype == tf . bool : ite = ite . cast ( tf . bool ) return ite | Returns a TensorFluent for the control op if - then - else . |
52,386 | def _binary_op ( cls , x : 'TensorFluent' , y : 'TensorFluent' , op : Callable [ [ tf . Tensor , tf . Tensor ] , tf . Tensor ] , dtype : tf . DType ) -> 'TensorFluent' : s1 = x . scope . as_list ( ) s2 = y . scope . as_list ( ) scope , perm1 , perm2 = TensorFluentScope . broadcast ( s1 , s2 ) if x . batch and perm1 != [ ] : perm1 = [ 0 ] + [ p + 1 for p in perm1 ] if y . batch and perm2 != [ ] : perm2 = [ 0 ] + [ p + 1 for p in perm2 ] x = x . transpose ( perm1 ) y = y . transpose ( perm2 ) reshape1 , reshape2 = TensorFluentShape . broadcast ( x . shape , y . shape ) if reshape1 is not None : x = x . reshape ( reshape1 ) if reshape2 is not None : y = y . reshape ( reshape2 ) x = x . cast ( dtype ) y = y . cast ( dtype ) t = op ( x . tensor , y . tensor ) batch = x . batch or y . batch return TensorFluent ( t , scope , batch = batch ) | Returns a TensorFluent for the binary op applied to fluents x and y . |
52,387 | def _unary_op ( cls , x : 'TensorFluent' , op : Callable [ [ tf . Tensor ] , tf . Tensor ] , dtype : tf . DType ) -> 'TensorFluent' : x = x . cast ( dtype ) t = op ( x . tensor ) scope = x . scope . as_list ( ) batch = x . batch return TensorFluent ( t , scope , batch = batch ) | Returns a TensorFluent for the unary op applied to fluent x . |
52,388 | def _aggregation_op ( cls , op : Callable [ [ tf . Tensor , Optional [ Sequence [ int ] ] ] , tf . Tensor ] , x : 'TensorFluent' , vars_list : List [ str ] ) -> 'TensorFluent' : axis = cls . _varslist2axis ( x , vars_list ) t = op ( x . tensor , axis ) scope = [ ] for var in x . scope . as_list ( ) : if var not in vars_list : scope . append ( var ) batch = x . batch return TensorFluent ( t , scope , batch = batch ) | Returns a TensorFluent for the aggregation op applied to fluent x . |
52,389 | def _varslist2axis ( cls , fluent : 'TensorFluent' , vars_list : List [ str ] ) -> List [ int ] : axis = [ ] for var in vars_list : if var in fluent . scope . as_list ( ) : ax = fluent . scope . index ( var ) if fluent . batch : ax += 1 axis . append ( ax ) return axis | Maps the vars_list into a list of axis indices corresponding to the fluent scope . |
52,390 | def cast ( self , dtype : tf . DType ) -> 'TensorFluent' : if self . dtype == dtype : return self t = tf . cast ( self . tensor , dtype ) scope = self . scope . as_list ( ) batch = self . batch return TensorFluent ( t , scope , batch = batch ) | Returns a TensorFluent for the cast operation with given dtype . |
52,391 | def reshape ( self , shape : tf . TensorShape ) -> 'TensorFluent' : t = tf . reshape ( self . tensor , shape ) scope = self . scope . as_list ( ) batch = self . batch return TensorFluent ( t , scope , batch = batch ) | Returns a TensorFluent for the reshape operation with given shape . |
52,392 | def transpose ( self , permutation : Optional [ List [ int ] ] = None ) -> 'TensorFluent' : if permutation == [ ] : return self t = tf . transpose ( self . tensor , permutation ) if permutation != [ ] else self . tensor scope = self . scope . as_list ( ) batch = self . batch return TensorFluent ( t , scope , batch = batch ) | Returns a TensorFluent for the transpose operation with given permutation . |
52,393 | def sum ( self , vars_list : List [ str ] ) -> 'TensorFluent' : operand = self if operand . dtype == tf . bool : operand = operand . cast ( tf . float32 ) return self . _aggregation_op ( tf . reduce_sum , operand , vars_list ) | Returns the TensorFluent for the sum aggregation function . |
52,394 | def avg ( self , vars_list : List [ str ] ) -> 'TensorFluent' : operand = self if operand . dtype == tf . bool : operand = operand . cast ( tf . float32 ) return self . _aggregation_op ( tf . reduce_mean , operand , vars_list ) | Returns the TensorFluent for the avg aggregation function . |
52,395 | def prod ( self , vars_list : List [ str ] ) -> 'TensorFluent' : operand = self if operand . dtype == tf . bool : operand = operand . cast ( tf . float32 ) return self . _aggregation_op ( tf . reduce_prod , operand , vars_list ) | Returns the TensorFluent for the prod aggregation function . |
52,396 | def maximum ( self , vars_list : List [ str ] ) -> 'TensorFluent' : return self . _aggregation_op ( tf . reduce_max , self , vars_list ) | Returns the TensorFluent for the maximum aggregation function . |
52,397 | def minimum ( self , vars_list : List [ str ] ) -> 'TensorFluent' : return self . _aggregation_op ( tf . reduce_min , self , vars_list ) | Returns the TensorFluent for the minimum aggregation function . |
52,398 | def forall ( self , vars_list : List [ str ] ) -> 'TensorFluent' : return self . _aggregation_op ( tf . reduce_all , self , vars_list ) | Returns the TensorFluent for the forall aggregation function . |
52,399 | def exists ( self , vars_list : List [ str ] ) -> 'TensorFluent' : return self . _aggregation_op ( tf . reduce_any , self , vars_list ) | Returns the TensorFluent for the exists aggregation function . |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.