idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
3,500
def term ( self , term , ** kwargs ) : if isinstance ( term , ( list , tuple ) ) : for t in term : self . term ( t , ** kwargs ) else : self . clause ( str ( term ) , ** kwargs ) return self
Adds a term to the current query creating a Clause and adds it to the list of clauses making up this Query .
3,501
def is_negated ( self ) : return all ( clause . presence == QueryPresence . PROHIBITED for clause in self . clauses )
A negated query is one in which every clause has a presence of prohibited . These queries require some special processing to return the expected results .
3,502
def send_templated_email ( recipients , template_path , context = None , from_email = settings . DEFAULT_FROM_EMAIL , fail_silently = False , extra_headers = None ) : recipient_pks = [ r . pk for r in recipients if isinstance ( r , get_user_model ( ) ) ] recipient_emails = [ e for e in recipients if not isinstance ( e , get_user_model ( ) ) ] send = _send_task . delay if use_celery else _send msg = send ( recipient_pks , recipient_emails , template_path , context , from_email , fail_silently , extra_headers = extra_headers ) return msg
recipients can be either a list of emails or a list of users if it is users the system will change to the language that the user has set as theyr mother toungue
3,503
def remove_duplicates ( seq ) : last_boundary = False for char in seq : if char == '\x00' : if not last_boundary : last_boundary = True yield char else : last_boundary = False yield char
Removes duplicate boundary token characters from the given character iterable .
3,504
def pretty_print_str ( self ) : retval = '' todo = [ self . root ] while todo : current = todo . pop ( ) for char in reversed ( sorted ( current . keys ( ) ) ) : todo . append ( current [ char ] ) indent = ' ' * ( current . depth * 2 ) retval += indent + current . __unicode__ ( ) + '\n' return retval . rstrip ( '\n' )
Create a string to pretty - print this trie to standard output .
3,505
def _reset_suffix_links ( self ) : self . _suffix_links_set = False for current , _parent in self . dfs ( ) : current . suffix = None current . dict_suffix = None current . longest_prefix = None
Reset all suffix links in all nodes in this trie .
3,506
def _set_suffix_links ( self ) : self . _suffix_links_set = True for current , parent in self . bfs ( ) : if parent is None : continue current . longest_prefix = parent . longest_prefix if parent . has_value : current . longest_prefix = parent if current . has_suffix : continue suffix = parent while True : if not suffix . has_suffix : current . suffix = self . root break else : suffix = suffix . suffix if current . uplink in suffix : current . suffix = suffix [ current . uplink ] break suffix = current . suffix while not suffix . has_value and suffix . has_suffix : suffix = suffix . suffix if suffix . has_value : current . dict_suffix = suffix
Sets all suffix links in all nodes in this trie .
3,507
def greedy_replace ( self , seq ) : if not self . _suffix_links_set : self . _set_suffix_links ( ) current = self . root buffered = '' outstr = '' for char in seq : while char not in current : if current . has_dict_suffix : current = current . dict_suffix outstr += buffered [ : - current . depth ] outstr += current . value buffered = '' current = self . root break elif current . has_suffix : current = current . suffix if current . depth : outstr += buffered [ : - current . depth ] buffered = buffered [ - current . depth : ] else : outstr += buffered buffered = '' break else : current = self . root outstr += buffered buffered = '' break if char in current : buffered += char current = current [ char ] if current . has_value : outstr += buffered [ : - current . depth ] outstr += current . value buffered = '' current = self . root else : assert current is self . root outstr += buffered + char buffered = '' if current . has_dict_suffix : current = current . dict_suffix outstr += buffered [ : - current . depth ] outstr += current . value else : outstr += buffered return outstr
Greedily matches strings in seq and replaces them with their node values .
3,508
def _write_mo ( mo ) : classNotFound = False if ( UcsUtils . FindClassIdInMoMetaIgnoreCase ( mo . classId ) == None ) : classNotFound = True tabsize = 8 outstr = "\n" if classNotFound : outstr += "Managed Object\t\t\t:\t" + str ( UcsUtils . WordU ( mo . classId ) ) + "\n" else : outstr += "Managed Object\t\t\t:\t" + str ( mo . propMoMeta . name ) + "\n" outstr += "-" * len ( "Managed Object" ) + "\n" if ( not classNotFound ) : for prop in UcsUtils . GetUcsPropertyMetaAttributeList ( mo . propMoMeta . name ) : propMeta = UcsUtils . GetUcsPropertyMeta ( mo . propMoMeta . name , prop ) if ( propMeta . access == UcsPropertyMeta . Internal ) : continue val = mo . getattr ( prop ) outstr += str ( prop ) . ljust ( tabsize * 4 ) + ':' + str ( val ) + "\n" else : for prop in mo . __dict__ : if ( prop in [ 'classId' , 'XtraProperty' , 'handle' , 'propMoMeta' , 'dirtyMask' , 'child' ] ) : continue val = mo . __dict__ [ prop ] outstr += str ( UcsUtils . WordU ( prop ) ) . ljust ( tabsize * 4 ) + ':' + str ( val ) + "\n" if mo . __dict__ . has_key ( 'XtraProperty' ) : for xtraProp in mo . __dict__ [ 'XtraProperty' ] : outstr += '[X]' + str ( UcsUtils . WordU ( xtraProp ) ) . ljust ( tabsize * 4 ) + ':' + str ( mo . __dict__ [ 'XtraProperty' ] [ xtraProp ] ) + "\n" outstr += str ( "Ucs" ) . ljust ( tabsize * 4 ) + ':' + str ( mo . handle . _ucs ) + "\n" outstr += "\n" return outstr
Method to return string representation of a managed object .
3,509
def WriteObject ( moList ) : from Ucs import Dn from UcsHandle import UcsMoDiff tabsize = 8 if ( isinstance ( moList , _GenericMO ) == True ) : print str ( moList ) elif ( isinstance ( moList , ExternalMethod ) == True ) : if ( hasattr ( moList , "OutConfigs" ) == True ) : for child in moList . OutConfigs . GetChild ( ) : if ( isinstance ( child , ManagedObject ) == True ) : WriteObject ( child ) elif ( isinstance ( moList , ManagedObject ) == True ) : print str ( _write_mo ( moList ) ) elif ( ( isinstance ( moList , list ) == True ) and ( len ( moList ) > 0 ) ) : if ( isinstance ( moList [ 0 ] , UcsMoDiff ) ) : print "Dn" . ljust ( tabsize * 10 ) , "InputObject" . ljust ( tabsize * 4 ) , "SideIndicator" . ljust ( tabsize * 3 ) , "DiffProperty" print "--" . ljust ( tabsize * 10 ) , "-----------" . ljust ( tabsize * 4 ) , "-------------" . ljust ( tabsize * 3 ) , "------------" for mo in moList : if ( isinstance ( mo , ManagedObject ) == True ) : print str ( _write_mo ( mo ) ) elif ( isinstance ( mo , Dn ) == True ) : print mo . getattr ( "value" ) elif ( isinstance ( mo , UcsMoDiff ) == True ) : WriteMoDiff ( mo )
Writes the managed object on the terminal in form of key value pairs .
3,510
def childWriteXml ( self , w , option ) : ch = [ ] for c in self . child : ch . append ( c . WriteXml ( w , option ) ) return ch
Method writes the xml representation for the object .
3,511
def setattr ( self , key , value ) : if ( UcsUtils . FindClassIdInMoMetaIgnoreCase ( self . classId ) != None ) : if ( key in _ManagedObjectMeta [ self . classId ] ) : propMeta = UcsUtils . GetUcsPropertyMeta ( self . classId , key ) if ( propMeta . ValidatePropertyValue ( value ) == False ) : return False if ( propMeta . mask != None ) : self . dirtyMask |= propMeta . mask self . __dict__ [ key ] = value else : self . __dict__ [ 'XtraProperty' ] [ key ] = value else : self . __dict__ [ 'XtraProperty' ] [ UcsUtils . WordU ( key ) ] = value
This method sets attribute of a Managed Object .
3,512
def getattr ( self , key ) : if ( ( key == "classId" ) and ( self . __dict__ . has_key ( key ) ) ) : return self . __dict__ [ key ] if UcsUtils . FindClassIdInMoMetaIgnoreCase ( self . classId ) : if self . __dict__ . has_key ( key ) : if key in _ManagedObjectMeta [ self . classId ] : return self . __dict__ [ key ] else : if self . __dict__ . has_key ( 'XtraProperty' ) : if self . __dict__ [ 'XtraProperty' ] . has_key ( key ) : return self . __dict__ [ 'XtraProperty' ] [ UcsUtils . WordU ( key ) ] else : raise AttributeError ( key ) else : print "No XtraProperty in mo:" , self . classId , " key:" , key else : if self . __dict__ [ 'XtraProperty' ] . has_key ( key ) : return self . __dict__ [ 'XtraProperty' ] [ UcsUtils . WordU ( key ) ] elif key == "Dn" or key == "Rn" : return None else : raise AttributeError ( key )
This method gets attribute value of a Managed Object .
3,513
def MarkDirty ( self ) : if ( ( UcsUtils . FindClassIdInMoMetaIgnoreCase ( self . classId ) == None ) and ( not self . IsDirty ( ) ) ) : self . dirtyMask = ManagedObject . DUMMYDIRTY else : self . dirtyMask = self . propMoMeta . mask
This method marks the managed object dirty .
3,514
def MakeRn ( self ) : rnPattern = self . propMoMeta . rn for prop in re . findall ( "\[([^\]]*)\]" , rnPattern ) : if prop in UcsUtils . GetUcsPropertyMetaAttributeList ( self . classId ) : if ( self . getattr ( prop ) != None ) : rnPattern = re . sub ( '\[%s\]' % prop , '%s' % self . getattr ( prop ) , rnPattern ) else : raise UcsValidationException ( 'Property "%s" was None in MakeRn' % prop ) else : raise UcsValidationException ( 'Property "%s" was not found in MakeRn arguments' % prop ) return rnPattern
This method returns the Rn for a managed object .
3,515
def LoadFromXml ( self , node , handle ) : self . SetHandle ( handle ) if node . hasAttributes ( ) : attributes = node . attributes attCount = len ( attributes ) for i in range ( attCount ) : attNode = attributes . item ( i ) attr = UcsUtils . WordU ( attNode . localName ) if ( UcsUtils . FindClassIdInMoMetaIgnoreCase ( self . classId ) != None ) : if ( attr in UcsUtils . GetUcsPropertyMetaAttributeList ( self . classId ) ) : self . setattr ( attr , str ( attNode . value ) ) else : self . setattr ( UcsUtils . WordU ( attr ) , str ( attNode . value ) ) else : self . setattr ( UcsUtils . WordU ( attr ) , str ( attNode . value ) ) if self . getattr ( "Rn" ) == None and self . getattr ( "Dn" ) != None : self . setattr ( "Rn" , str ( re . sub ( r'^.*/' , '' , self . getattr ( "Dn" ) ) ) ) if ( node . hasChildNodes ( ) ) : childList = node . childNodes childCount = len ( childList ) for i in range ( childCount ) : childNode = childList . item ( i ) if ( childNode . nodeType != Node . ELEMENT_NODE ) : continue if childNode . localName in self . propMoMeta . fieldNames : pass c = ManagedObject ( UcsUtils . WordU ( childNode . localName ) ) self . child . append ( c ) c . LoadFromXml ( childNode , handle )
Method updates the object from the xml representation of the managed object .
3,516
def setattr ( self , key , value ) : if key in _MethodFactoryMeta [ self . classId ] : self . __dict__ [ key ] = value elif key == 'errorCode' : self . errorCode = value elif key == 'errorDescr' : self . errorDescr = value elif key == 'invocationResult' : self . invocationResult = value elif key == 'response' : self . response = value else : return None
This method sets the attribute of external method object .
3,517
def getattr ( self , key ) : if key in _MethodFactoryMeta [ self . classId ] : return self . __dict__ [ key ] else : return None
This method gets the attribute value of external method object .
3,518
def getErrorResponse ( self , errorCode , errorDescr ) : self . errorCode = errorCode self . errorDescr = errorDescr self . response = "yes" return self
This methods sets error attributes of an external method object .
3,519
def GetUcsPropertyMeta ( classId , key ) : if classId in _ManagedObjectMeta : if key in _ManagedObjectMeta [ classId ] : return _ManagedObjectMeta [ classId ] [ key ] return None
Methods returns the property meta of the provided key for the given classId .
3,520
def GetUcsMethodMeta ( classId , key ) : if classId in _MethodFactoryMeta : if key in _MethodFactoryMeta [ classId ] : return _MethodFactoryMeta [ classId ] [ key ] return None
Methods returns the method meta of the ExternalMethod .
3,521
def GetUcsPropertyMetaAttributeList ( classId ) : if classId in _ManagedObjectMeta : attrList = _ManagedObjectMeta [ classId ] . keys ( ) attrList . remove ( "Meta" ) return attrList if classId in _MethodFactoryMeta : attrList = _MethodFactoryMeta [ classId ] . keys ( ) attrList . remove ( "Meta" ) return attrList nci = UcsUtils . FindClassIdInMoMetaIgnoreCase ( classId ) if ( nci != None ) : attrList = _ManagedObjectMeta [ nci ] . keys ( ) attrList . remove ( "Meta" ) return attrList nci = UcsUtils . FindClassIdInMethodMetaIgnoreCase ( classId ) if ( nci != None ) : attrList = _MethodFactoryMeta [ nci ] . keys ( ) attrList . remove ( "Meta" ) return attrList return None
Methods returns the class meta .
3,522
def IsPropertyInMetaIgnoreCase ( classId , key ) : if classId in _ManagedObjectMeta : for prop in _ManagedObjectMeta [ classId ] : if ( prop . lower ( ) == key . lower ( ) ) : return _ManagedObjectMeta [ classId ] [ prop ] if classId in _MethodFactoryMeta : for prop in _MethodFactoryMeta [ classId ] : if ( prop . lower ( ) == key . lower ( ) ) : return _MethodFactoryMeta [ classId ] [ prop ] return None
Methods returns the property meta of the provided key for the given classId . Given key is case insensitive .
3,523
def CheckRegistryKey ( javaKey ) : from _winreg import ConnectRegistry , HKEY_LOCAL_MACHINE , OpenKey , QueryValueEx path = None try : aReg = ConnectRegistry ( None , HKEY_LOCAL_MACHINE ) rk = OpenKey ( aReg , javaKey ) for i in range ( 1024 ) : currentVersion = QueryValueEx ( rk , "CurrentVersion" ) if currentVersion != None : key = OpenKey ( rk , currentVersion [ 0 ] ) if key != None : path = QueryValueEx ( key , "JavaHome" ) return path [ 0 ] except Exception , err : WriteUcsWarning ( "Not able to access registry." ) return None
Method checks for the java in the registry entries .
3,524
def GetJavaInstallationPath ( ) : import os , platform if platform . system ( ) == "Linux" : path = os . environ . get ( 'JAVA_HOME' ) if not path : raise UcsValidationException ( "Please make sure JAVA is installed and variable JAVA_HOME is set properly." ) else : path = os . path . join ( path , 'bin' ) path = os . path . join ( path , 'javaws' ) if not os . path . exists ( path ) : raise UcsValidationException ( "javaws is not installed on System." ) else : return path elif platform . system ( ) == "Windows" or platform . system ( ) == "Microsoft" : path = os . environ . get ( 'JAVA_HOME' ) if path == None : path = UcsUtils . CheckRegistryKey ( r"SOFTWARE\\JavaSoft\\Java Runtime Environment\\" ) if path == None : path = UcsUtils . CheckRegistryKey ( r"SOFTWARE\\Wow6432Node\\JavaSoft\\Java Runtime Environment" ) if not path : raise UcsValidationException ( "Please make sure JAVA is installed." ) else : path = os . path . join ( path , 'bin' ) path = os . path . join ( path , 'javaws.exe' ) if not os . path . exists ( path ) : raise UcsValidationException ( "javaws.exe is not installed on System." ) else : return path
Method returns the java installation path in the windows or Linux environment .
3,525
def DownloadFile ( hUcs , source , destination ) : import urllib2 from sys import stdout from time import sleep httpAddress = "%s/%s" % ( hUcs . Uri ( ) , source ) file_name = httpAddress . split ( '/' ) [ - 1 ] req = urllib2 . Request ( httpAddress ) req . add_header ( 'Cookie' , 'ucsm-cookie=%s' % ( hUcs . _cookie ) ) res = urllib2 . urlopen ( req ) meta = res . info ( ) file_size = int ( meta . getheaders ( "Content-Length" ) [ 0 ] ) print "Downloading: %s Bytes: %s" % ( file_name , file_size ) f = open ( destination , 'wb' ) file_size_dl = 0 block_sz = 8192 while True : rBuffer = res . read ( block_sz ) if not rBuffer : break file_size_dl += len ( rBuffer ) f . write ( rBuffer ) status = r"%10d [%3.2f%%]" % ( file_size_dl , file_size_dl * 100. / file_size ) status = status + chr ( 8 ) * ( len ( status ) + 1 ) stdout . write ( "\r%s" % status ) stdout . flush ( ) f . close ( )
Method provides the functionality to download file from the UCS . This method is used in BackupUcs and GetTechSupport to download the files from the Ucs .
3,526
def GetSyncMoConfigFilePath ( ) : return os . path . join ( os . path . join ( os . path . dirname ( __file__ ) , "resources" ) , "SyncMoConfig.xml" )
Method returs the path of SyncMoConfig . xml file .
3,527
def GetSyncMoConfig ( ConfigDoc ) : moConfigMap = { } configList = ConfigDoc . getElementsByTagName ( "mo" ) for moConfigNode in configList : classId = None noun = None version = None actionVersion = None action = None ignoreReason = None status = None excludeList = None if moConfigNode . hasAttribute ( "classid" ) : classId = moConfigNode . getAttribute ( "classid" ) if moConfigNode . hasAttribute ( "noun" ) : noun = moConfigNode . getAttribute ( "noun" ) if moConfigNode . hasAttribute ( "version" ) : version = moConfigNode . getAttribute ( "version" ) if moConfigNode . hasAttribute ( "actionVersion" ) : actionVersion = moConfigNode . getAttribute ( "actionVersion" ) if moConfigNode . hasAttribute ( "action" ) : action = moConfigNode . getAttribute ( "action" ) if moConfigNode . hasAttribute ( "ignoreReason" ) : ignoreReason = moConfigNode . getAttribute ( "ignoreReason" ) if moConfigNode . hasAttribute ( "status" ) : status = moConfigNode . getAttribute ( "status" ) if moConfigNode . hasAttribute ( "excludeList" ) : excludeList = moConfigNode . getAttribute ( "excludeList" ) moConfig = None if classId : moConfig = SyncMoConfig ( classId , noun , version , actionVersion , action , ignoreReason , status , excludeList ) if moConfig : if classId in moConfigMap : moConfigMap [ classId ] = moConfig else : moConfigList = [ ] moConfigList . append ( moConfig ) moConfigMap [ classId ] = moConfigList return moConfigMap
Internal support method for SyncManagedObject .
3,528
def Expandkey ( key , clen ) : import sha from string import join from array import array blocks = ( clen + 19 ) / 20 xkey = [ ] seed = key for i in xrange ( blocks ) : seed = sha . new ( key + seed ) . digest ( ) xkey . append ( seed ) j = join ( xkey , '' ) return array ( 'L' , j )
Internal method supporting encryption and decryption functionality .
3,529
def EncryptPassword ( password , key ) : from time import time from array import array import hmac import sha import os import base64 H = UcsUtils . GetShaHash uhash = H ( ',' . join ( str ( x ) for x in [ `time()` , `os.getpid()` , `len(password)` , password , key ] ) ) [ : 16 ] k_enc , k_auth = H ( 'enc' + key + uhash ) , H ( 'auth' + key + uhash ) n = len ( password ) passwordStream = array ( 'L' , password + '0000' [ n & 3 : ] ) xkey = UcsUtils . Expandkey ( k_enc , n + 4 ) for i in xrange ( len ( passwordStream ) ) : passwordStream [ i ] = passwordStream [ i ] ^ xkey [ i ] ct = uhash + passwordStream . tostring ( ) [ : n ] auth = hmac . new ( ct , k_auth , sha ) . digest ( ) encryptStr = ct + auth [ : 8 ] encodedStr = base64 . encodestring ( encryptStr ) encryptedPassword = encodedStr . rstrip ( '\n' ) return encryptedPassword
Encrypts the password using the given key .
3,530
def DecryptPassword ( cipher , key ) : import base64 import hmac import sha from array import array H = UcsUtils . GetShaHash cipher = cipher + "\n" cipher = base64 . decodestring ( cipher ) n = len ( cipher ) - 16 - 8 uhash = cipher [ : 16 ] passwordStream = cipher [ 16 : - 8 ] + "0000" [ n & 3 : ] auth = cipher [ - 8 : ] k_enc , k_auth = H ( 'enc' + key + uhash ) , H ( 'auth' + key + uhash ) vauth = hmac . new ( cipher [ - 8 : ] , k_auth , sha ) . digest ( ) [ : 8 ] passwordStream = array ( 'L' , passwordStream ) xkey = UcsUtils . Expandkey ( k_enc , n + 4 ) for i in xrange ( len ( passwordStream ) ) : passwordStream [ i ] = passwordStream [ i ] ^ xkey [ i ] decryptedPassword = passwordStream . tostring ( ) [ : n ] return decryptedPassword
Decrypts the password using the given key with which the password was encrypted first .
3,531
def LoadFromXml ( self , node ) : import os self . classId = node . localName metaClassId = UcsUtils . FindClassIdInMoMetaIgnoreCase ( self . classId ) if metaClassId : self . classId = metaClassId if node . hasAttribute ( NamingPropertyId . DN ) : self . dn = node . getAttribute ( NamingPropertyId . DN ) if self . dn : self . rn = os . path . basename ( self . dn ) self . WriteToAttributes ( node ) if ( node . hasChildNodes ( ) ) : childList = node . childNodes childCount = len ( childList ) for i in range ( childCount ) : childNode = childList . item ( i ) if ( childNode . nodeType != Node . ELEMENT_NODE ) : continue c = _GenericMO ( ) self . child . append ( c ) c . LoadFromXml ( childNode )
Method updates the object from the xml .
3,532
def WriteXml ( self , w , option , elementName = None ) : if elementName == None : x = w . createElement ( self . classId ) else : x = w . createElement ( elementName ) for prop in self . __dict__ [ 'properties' ] : x . setAttribute ( UcsUtils . WordL ( prop ) , self . __dict__ [ 'properties' ] [ prop ] ) x_child = self . childWriteXml ( w , option ) for xc in x_child : if ( xc != None ) : x . appendChild ( xc ) return x
Method writes the xml representation of the generic managed object .
3,533
def ToManagedObject ( self ) : from Ucs import ClassFactory cln = UcsUtils . WordU ( self . classId ) mo = ClassFactory ( cln ) if mo and ( isinstance ( mo , ManagedObject ) == True ) : metaClassId = UcsUtils . FindClassIdInMoMetaIgnoreCase ( self . classId ) for property in self . properties : if UcsUtils . WordU ( property ) in UcsUtils . GetUcsPropertyMetaAttributeList ( metaClassId ) : mo . setattr ( UcsUtils . WordU ( property ) , self . properties [ property ] ) else : WriteUcsWarning ( "Property %s Not Exist in MO %s" % ( UcsUtils . WordU ( property ) , metaClassId ) ) if len ( self . child ) : for ch in self . child : moch = ch . ToManagedObject ( ) mo . child . append ( moch ) return mo else : return None
Method creates and returns an object of ManagedObject class using the classId and information from the Generic managed object .
3,534
def FromManagedObject ( self ) : import os if ( isinstance ( self . mo , ManagedObject ) == True ) : self . classId = self . mo . classId if self . mo . getattr ( 'Dn' ) : self . dn = self . mo . getattr ( 'Dn' ) if self . mo . getattr ( 'Rn' ) : self . rn = self . mo . getattr ( 'Rn' ) elif self . dn : self . rn = os . path . basename ( self . dn ) for property in UcsUtils . GetUcsPropertyMetaAttributeList ( self . mo . classId ) : self . properties [ property ] = self . mo . getattr ( property ) if len ( self . mo . child ) : for ch in self . mo . child : if not ch . getattr ( 'Dn' ) : _Dn = self . mo . getattr ( 'Dn' ) + "/" + ch . getattr ( 'Rn' ) ch . setattr ( 'Dn' , _Dn ) gmo = _GenericMO ( mo = ch ) self . child . append ( gmo )
Method creates and returns an object of _GenericMO class using the classId and other information from the managed object .
3,535
def GetChildClassId ( self , classId ) : childList = [ ] for ch in self . child : if ch . classId . lower ( ) == classId . lower ( ) : childList . append ( ch ) return childList
Method extracts and returns the child object list same as the given classId
3,536
def _total_seconds ( t ) : return sum ( [ int ( t . days * 86400 + t . seconds ) , int ( round ( t . microseconds / 1000000.0 ) ) ] )
Takes a datetime . timedelta object and returns the delta in seconds .
3,537
def day ( t , now = None , format = '%B %d' ) : t1 = _to_date ( t ) t2 = _to_date ( now or datetime . datetime . now ( ) ) diff = t1 - t2 secs = _total_seconds ( diff ) days = abs ( diff . days ) if days == 0 : return _ ( 'today' ) elif days == 1 : if secs < 0 : return _ ( 'yesterday' ) else : return _ ( 'tomorrow' ) elif days == 7 : if secs < 0 : return _ ( 'last week' ) else : return _ ( 'next week' ) else : return t1 . strftime ( format )
Date delta compared to t . You can override now to specify what date to compare to .
3,538
def duration ( t , now = None , precision = 1 , pad = ', ' , words = None , justnow = datetime . timedelta ( seconds = 10 ) ) : if words is None : words = precision == 1 t1 = _to_datetime ( t ) t2 = _to_datetime ( now or datetime . datetime . now ( ) ) if t1 < t2 : format = _ ( '%s ago' ) else : format = _ ( '%s from now' ) result , remains = delta ( t1 , t2 , words = words , justnow = justnow ) if result in ( _ ( 'just now' ) , _ ( 'yesterday' ) , _ ( 'tomorrow' ) , _ ( 'last week' ) , _ ( 'next week' ) , ) : return result elif precision > 1 and remains : t3 = t2 - datetime . timedelta ( seconds = remains ) return pad . join ( [ result , duration ( t2 , t3 , precision - 1 , pad , words = False ) , ] ) else : return format % ( result , )
Time delta compared to t . You can override now to specify what time to compare to .
3,539
def search ( self , query_string ) : query = self . create_query ( ) parser = QueryParser ( query_string , query ) parser . parse ( ) return self . query ( query )
Performs a search against the index using lunr query syntax .
3,540
def create_query ( self , fields = None ) : if fields is None : return Query ( self . fields ) non_contained_fields = set ( fields ) - set ( self . fields ) if non_contained_fields : raise BaseLunrException ( "Fields {} are not part of the index" , non_contained_fields ) return Query ( fields )
Convenience method to create a Query with the Index s fields .
3,541
def load ( cls , serialized_index ) : from lunr import __TARGET_JS_VERSION__ if isinstance ( serialized_index , basestring ) : serialized_index = json . loads ( serialized_index ) if serialized_index [ "version" ] != __TARGET_JS_VERSION__ : logger . warning ( "Version mismatch when loading serialized index. " "Current version of lunr {} does not match that of serialized " "index {}" . format ( __TARGET_JS_VERSION__ , serialized_index [ "version" ] ) ) field_vectors = { ref : Vector ( elements ) for ref , elements in serialized_index [ "fieldVectors" ] } tokenset_builder = TokenSetBuilder ( ) inverted_index = { } for term , posting in serialized_index [ "invertedIndex" ] : tokenset_builder . insert ( term ) inverted_index [ term ] = posting tokenset_builder . finish ( ) return Index ( fields = serialized_index [ "fields" ] , field_vectors = field_vectors , inverted_index = inverted_index , token_set = tokenset_builder . root , pipeline = Pipeline . load ( serialized_index [ "pipeline" ] ) , )
Load a serialized index
3,542
def configure ( logstash_host = None , logstash_port = None , logdir = None ) : if not ( logstash_host or logstash_port or logdir ) : raise ValueError ( 'you must specify at least one parameter' ) config . logstash . host = logstash_host or config . logstash . host config . logstash . port = logstash_port or config . logstash . port config . logdir = logdir or config . logdir create_logdir ( config . logdir )
Configuration settings .
3,543
def new_logger ( name ) : log = get_task_logger ( name ) handler = logstash . LogstashHandler ( config . logstash . host , config . logstash . port ) log . addHandler ( handler ) create_logdir ( config . logdir ) handler = TimedRotatingFileHandler ( '%s.json' % join ( config . logdir , name ) , when = 'midnight' , utc = True , ) handler . setFormatter ( JSONFormatter ( ) ) log . addHandler ( handler ) return TaskCtxAdapter ( log , { } )
Return new logger which will log both to logstash and to file in JSON format .
3,544
def _zmq_socket_context ( context , socket_type , bind_endpoints ) : socket = context . socket ( socket_type ) try : for endpoint in bind_endpoints : try : socket . bind ( endpoint ) except Exception : _logger . fatal ( "Could not bind to '%s'." , endpoint ) raise yield socket finally : socket . close ( )
A ZeroMQ socket context that both constructs a socket and closes it .
3,545
def _get_with_fallback ( config , section , option , fallback ) : exists = ( config . has_section ( section ) and config . has_option ( section , option ) ) if not exists : return fallback else : return config . get ( section , option )
Get a configuration value using fallback for missing values .
3,546
def run ( options , exit_codeword = None ) : QUERY_ENDP_OPT = 'query-bind-endpoint' STREAM_ENDP_OPT = 'streaming-bind-endpoint' ZMQ_NTHREADS = "zmq-nthreads" if not options . has_section ( config . DEFAULT_SECTION ) : msg = "Missing default section, `{0}`." fmsg = msg . format ( config . DEFAULT_SECTION ) raise config . ConfigurationError ( fmsg ) if not options . has_option ( config . DEFAULT_SECTION , QUERY_ENDP_OPT ) : msg = "Missing (query) bind endpoint in option file: {0}:{1}" fmsg = msg . format ( config . DEFAULT_SECTION , QUERY_ENDP_OPT ) raise config . ConfigurationError ( fmsg ) queryendp = options . get ( config . DEFAULT_SECTION , QUERY_ENDP_OPT ) . split ( "," ) streamendp = _get_with_fallback ( options , config . DEFAULT_SECTION , STREAM_ENDP_OPT , '' ) . split ( "," ) queryendp = filter ( lambda x : x . strip ( ) , queryendp ) streamendp = filter ( lambda x : x . strip ( ) , streamendp ) try : eventstore = config . construct_eventstore ( options ) except config . ConfigurationError as e : _logger . exception ( "Could instantiate event store from config file." ) raise zmq_nthreads = _get_with_fallback ( options , config . DEFAULT_SECTION , ZMQ_NTHREADS , '3' ) try : zmq_nthreads = int ( zmq_nthreads ) except ValueError : msg = "{0}:{1} must be an integer" . format ( config . DEFAULT_SECTION , ZMQ_NTHREADS ) _logger . fatal ( msg ) return 1 with _zmq_context_context ( zmq_nthreads ) as context , _zmq_socket_context ( context , zmq . REP , queryendp ) as querysock , _zmq_socket_context ( context , zmq . PUB , streamendp ) as streamsock : runner = _RewindRunner ( eventstore , querysock , streamsock , ( exit_codeword . encode ( ) if exit_codeword else None ) ) runner . run ( ) return 0
Actually execute the program .
3,547
def main ( argv = None ) : parser = argparse . ArgumentParser ( description = 'Event storage and event proxy.' , usage = '%(prog)s <configfile>' ) parser . add_argument ( '--exit-codeword' , metavar = "MSG" , dest = "exit_message" , default = None , help = "An incoming message that makes" " Rewind quit. Used for testing." ) parser . add_argument ( 'configfile' ) args = argv if argv is not None else sys . argv [ 1 : ] args = parser . parse_args ( args ) config = configparser . SafeConfigParser ( ) with open ( args . configfile ) as f : config . readfp ( f ) exitcode = run ( config , args . exit_message ) return exitcode
Entry point for Rewind .
3,548
def generate ( self ) : key = self . _propose_new_key ( ) while self . key_exists ( key ) : _logger . warning ( 'Previous candidate was used.' ' Regenerating another...' ) key = self . _propose_new_key ( ) return key
Generate a new string and return it .
3,549
def _handle_one_message ( self ) : result = True requesttype = self . query_socket . recv ( ) if requesttype == b"PUBLISH" : self . _handle_incoming_event ( ) elif requesttype == b"QUERY" : self . _handle_event_query ( ) elif ( self . exit_message is not None and requesttype == self . exit_message ) : _logger . warn ( "Asked to quit through an exit message." "I'm quitting..." ) self . query_socket . send ( b'QUIT' ) result = False else : _logger . warn ( "Could not identify request type: %s" , requesttype ) self . _handle_unknown_command ( ) return result
Handle one single incoming message on any socket .
3,550
def _handle_unknown_command ( self ) : while self . query_socket . getsockopt ( zmq . RCVMORE ) : self . query_socket . recv ( ) self . query_socket . send ( b"ERROR Unknown request type" )
Handle an unknown RES command .
3,551
def _handle_event_query ( self ) : assert self . query_socket . getsockopt ( zmq . RCVMORE ) fro = self . query_socket . recv ( ) . decode ( ) assert self . query_socket . getsockopt ( zmq . RCVMORE ) to = self . query_socket . recv ( ) . decode ( ) assert not self . query_socket . getsockopt ( zmq . RCVMORE ) _logger . debug ( "Incoming query: (from, to)=(%s, %s)" , fro , to ) try : events = self . eventstore . get_events ( fro if fro else None , to if to else None ) except eventstores . EventStore . EventKeyDoesNotExistError : _logger . exception ( "A client requested a key that does not" " exist:" ) self . query_socket . send ( b"ERROR Key did not exist" ) return MAX_ELMNTS_PER_REQ = 100 events = itertools . islice ( events , 0 , MAX_ELMNTS_PER_REQ ) events = list ( events ) if len ( events ) == MAX_ELMNTS_PER_REQ : for eventid , eventdata in events [ : - 1 ] : self . query_socket . send ( eventid . encode ( ) , zmq . SNDMORE ) self . query_socket . send ( eventdata , zmq . SNDMORE ) lasteventid , lasteventdata = events [ - 1 ] self . query_socket . send ( lasteventid . encode ( ) , zmq . SNDMORE ) self . query_socket . send ( lasteventdata ) else : for eventid , eventdata in events : self . query_socket . send ( eventid . encode ( ) , zmq . SNDMORE ) self . query_socket . send ( eventdata , zmq . SNDMORE ) self . query_socket . send ( b"END" )
Handle an incoming event query .
3,552
def _handle_incoming_event ( self ) : eventstr = self . query_socket . recv ( ) newid = self . id_generator . generate ( ) assert newid not in ( b"QUERY" , b"PUBLISH" ) , "Generated ID must not be part of req/rep vocabulary." assert not newid . startswith ( "ERROR" ) , "Generated ID must not be part of req/rep vocabulary." self . eventstore . add_event ( newid , eventstr ) self . streaming_socket . send ( newid . encode ( ) , zmq . SNDMORE ) self . streaming_socket . send ( self . oldid . encode ( ) , zmq . SNDMORE ) self . streaming_socket . send ( eventstr ) self . oldid = newid assert not self . query_socket . getsockopt ( zmq . RCVMORE ) self . query_socket . send ( b"PUBLISHED" )
Handle an incoming event .
3,553
def idf ( posting , document_count ) : documents_with_term = 0 for field_name in posting : if field_name == "_index" : continue documents_with_term += len ( posting [ field_name ] . keys ( ) ) x = ( document_count - documents_with_term + 0.5 ) / ( documents_with_term + 0.5 ) return math . log ( 1 + abs ( x ) )
A function to calculate the inverse document frequency for a posting . This is shared between the builder and the index .
3,554
def check_config_options ( _class , required_options , optional_options , options ) : for opt in required_options : if opt not in options : msg = "Required option missing: {0}" raise ConfigurationError ( msg . format ( opt ) ) for opt in options : if opt not in ( required_options + optional_options ) : msg = "Unknown config option to `{0}`: {1}" _logger . warn ( msg . format ( _class , opt ) )
Helper method to check options .
3,555
def accessed ( filename ) : if isinstance ( filename , file ) : filename = filename . name return duration ( os . stat ( filename ) [ stat . ST_ATIME ] )
Retrieve how long ago a file has been accessed .
3,556
def created ( filename ) : if isinstance ( filename , file ) : filename = filename . name return duration ( os . stat ( filename ) [ stat . ST_CTIME ] )
Retrieve how long ago a file has been created .
3,557
def modified ( filename ) : if isinstance ( filename , file ) : filename = filename . name return duration ( os . stat ( filename ) [ stat . ST_MTIME ] )
Retrieve how long ago a file has been modified .
3,558
def size ( filename , format = 'decimal' ) : if isinstance ( filename , file ) : filename = filename . name return filesize ( os . stat ( filename ) [ stat . ST_SIZE ] , format )
Retrieve the size of a file .
3,559
def show_more ( context , label = None , loading = settings . LOADING ) : data = utils . get_data_from_context ( context ) page = data [ 'page' ] if page . has_next ( ) : request = context [ 'request' ] page_number = page . next_page_number ( ) querystring_key = data [ 'querystring_key' ] querystring = utils . get_querystring_for_page ( request , page_number , querystring_key , default_number = data [ 'default_number' ] ) return { 'label' : label , 'loading' : loading , 'path' : iri_to_uri ( data [ 'override_path' ] or request . path ) , 'querystring' : querystring , 'querystring_key' : querystring_key , 'request' : request , } return { }
Show the link to get the next page in a Twitter - like pagination .
3,560
def show_more_table ( context , label = None , loading = settings . LOADING ) : return show_more ( context , label , loading )
Show the link to get the next page in a Twitter - like pagination in a template for table .
3,561
def generate_trimmer ( word_characters ) : start_re = r"^[^{}]+" . format ( word_characters ) end_re = r"[^{}]+$" . format ( word_characters ) def trimmer ( token , i = None , tokens = None ) : def trim ( s , metadata = None ) : s = re . sub ( start_re , "" , s ) s = re . sub ( end_re , "" , s ) return s return token . update ( trim ) return trimmer
Returns a trimmer function from a string of word characters .
3,562
def camelcase ( string ) : out = slug ( string ) . replace ( '-' , ' ' ) . title ( ) . replace ( ' ' , '' ) return out [ 0 ] . lower ( ) + out [ 1 : ]
Return a string in lowerCamelCase
3,563
def position_for_index ( self , index ) : if not self . elements : return 0 start = 0 end = int ( len ( self . elements ) / 2 ) slice_length = end - start pivot_point = int ( slice_length / 2 ) pivot_index = self . elements [ pivot_point * 2 ] while slice_length > 1 : if pivot_index < index : start = pivot_point elif pivot_index > index : end = pivot_point else : break slice_length = end - start pivot_point = start + int ( slice_length / 2 ) pivot_index = self . elements [ pivot_point * 2 ] if pivot_index == index : return pivot_point * 2 elif pivot_index > index : return pivot_point * 2 else : return ( pivot_point + 1 ) * 2
Calculates the position within the vector to insert a given index .
3,564
def insert ( self , insert_index , val ) : def prevent_duplicates ( index , val ) : raise BaseLunrException ( "Duplicate index" ) self . upsert ( insert_index , val , prevent_duplicates )
Inserts an element at an index within the vector .
3,565
def upsert ( self , insert_index , val , fn = None ) : fn = fn or ( lambda current , passed : passed ) self . _magnitude = 0 position = self . position_for_index ( insert_index ) if position < len ( self . elements ) and self . elements [ position ] == insert_index : self . elements [ position + 1 ] = fn ( self . elements [ position + 1 ] , val ) else : self . elements . insert ( position , val ) self . elements . insert ( position , insert_index )
Inserts or updates an existing index within the vector .
3,566
def to_list ( self ) : output = [ ] for i in range ( 1 , len ( self . elements ) , 2 ) : output . append ( self . elements [ i ] ) return output
Converts the vector to an array of the elements within the vector
3,567
def dot ( self , other ) : dot_product = 0 a = self . elements b = other . elements a_len = len ( a ) b_len = len ( b ) i = j = 0 while i < a_len and j < b_len : a_val = a [ i ] b_val = b [ j ] if a_val < b_val : i += 2 elif a_val > b_val : j += 2 else : dot_product += a [ i + 1 ] * b [ j + 1 ] i += 2 j += 2 return dot_product
Calculates the dot product of this vector and another vector .
3,568
def similarity ( self , other ) : if self . magnitude == 0 or other . magnitude == 0 : return 0 return self . dot ( other ) / self . magnitude
Calculates the cosine similarity between this vector and another vector .
3,569
def bban_base10 ( number ) : number = bban_compact ( number ) number = number [ 4 : ] + number [ : 4 ] return '' . join ( [ str ( IBAN_ALPHABET . index ( char ) ) for char in number ] )
Printable Basic Bank Account Number in base - 10 .
3,570
def _add_scheme ( ) : lists = [ urllib . parse . uses_relative , urllib . parse . uses_netloc , urllib . parse . uses_query , ] for l in lists : l . append ( 'mongodb' )
urllib . parse doesn t support the mongodb scheme but it s easy to make it so .
3,571
def field ( self , field_name , boost = 1 , extractor = None ) : if "/" in field_name : raise ValueError ( "Field {} contains illegal character `/`" ) self . _fields [ field_name ] = Field ( field_name , boost , extractor )
Adds a field to the list of document fields that will be indexed .
3,572
def b ( self , number ) : if number < 0 : self . _b = 0 elif number > 1 : self . _b = 1 else : self . _b = number
A parameter to tune the amount of field length normalisation that is applied when calculating relevance scores .
3,573
def add ( self , doc , attributes = None ) : doc_ref = str ( doc [ self . _ref ] ) self . _documents [ doc_ref ] = attributes or { } self . document_count += 1 for field_name , field in self . _fields . items ( ) : extractor = field . extractor field_value = doc [ field_name ] if extractor is None else extractor ( doc ) tokens = Tokenizer ( field_value ) terms = self . pipeline . run ( tokens ) field_ref = FieldRef ( doc_ref , field_name ) field_terms = defaultdict ( int ) self . field_term_frequencies [ str ( field_ref ) ] = field_terms self . field_lengths [ str ( field_ref ) ] = len ( terms ) for term in terms : term_key = str ( term ) field_terms [ term_key ] += 1 if term_key not in self . inverted_index : posting = { _field_name : { } for _field_name in self . _fields } posting [ "_index" ] = self . term_index self . term_index += 1 self . inverted_index [ term_key ] = posting if doc_ref not in self . inverted_index [ term_key ] [ field_name ] : self . inverted_index [ term_key ] [ field_name ] [ doc_ref ] = defaultdict ( list ) for metadata_key in self . metadata_whitelist : metadata = term . metadata [ metadata_key ] self . inverted_index [ term_key ] [ field_name ] [ doc_ref ] [ metadata_key ] . append ( metadata )
Adds a document to the index .
3,574
def build ( self ) : self . _calculate_average_field_lengths ( ) self . _create_field_vectors ( ) self . _create_token_set ( ) return Index ( inverted_index = self . inverted_index , field_vectors = self . field_vectors , token_set = self . token_set , fields = list ( self . _fields . keys ( ) ) , pipeline = self . search_pipeline , )
Builds the index creating an instance of lunr . Index .
3,575
def _create_token_set ( self ) : self . token_set = TokenSet . from_list ( sorted ( list ( self . inverted_index . keys ( ) ) ) )
Creates a token set of all tokens in the index using lunr . TokenSet
3,576
def _calculate_average_field_lengths ( self ) : accumulator = defaultdict ( int ) documents_with_field = defaultdict ( int ) for field_ref , length in self . field_lengths . items ( ) : _field_ref = FieldRef . from_string ( field_ref ) field = _field_ref . field_name documents_with_field [ field ] += 1 accumulator [ field ] += length for field_name in self . _fields : accumulator [ field_name ] /= documents_with_field [ field_name ] self . average_field_length = accumulator
Calculates the average document length for this index
3,577
def _create_field_vectors ( self ) : field_vectors = { } term_idf_cache = { } for field_ref , term_frequencies in self . field_term_frequencies . items ( ) : _field_ref = FieldRef . from_string ( field_ref ) field_name = _field_ref . field_name field_length = self . field_lengths [ field_ref ] field_vector = Vector ( ) field_boost = self . _fields [ field_name ] . boost doc_boost = self . _documents [ _field_ref . doc_ref ] . get ( "boost" , 1 ) for term , tf in term_frequencies . items ( ) : term_index = self . inverted_index [ term ] [ "_index" ] if term not in term_idf_cache : idf = Idf ( self . inverted_index [ term ] , self . document_count ) term_idf_cache [ term ] = idf else : idf = term_idf_cache [ term ] score = ( idf * ( ( self . _k1 + 1 ) * tf ) / ( self . _k1 * ( 1 - self . _b + self . _b * ( field_length / self . average_field_length [ field_name ] ) ) + tf ) ) score *= field_boost score *= doc_boost score_with_precision = round ( score , 3 ) field_vector . insert ( term_index , score_with_precision ) field_vectors [ field_ref ] = field_vector self . field_vectors = field_vectors
Builds a vector space model of every document using lunr . Vector .
3,578
def estimate ( coll , filter = { } , sample = 1 ) : total = coll . estimated_document_count ( ) if not filter and sample == 1 : return total if sample <= 1 : sample *= total pipeline = list ( builtins . filter ( None , [ { '$sample' : { 'size' : sample } } if sample < total else { } , { '$match' : filter } , { '$count' : 'matched' } , ] ) ) docs = next ( coll . aggregate ( pipeline ) ) ratio = docs [ 'matched' ] / sample return int ( total * ratio )
Estimate the number of documents in the collection matching the filter .
3,579
def render ( self , data , accepted_media_type = None , renderer_context = None ) : wrapper = None success = False for wrapper_name in self . wrappers : wrapper_method = getattr ( self , wrapper_name ) try : wrapper = wrapper_method ( data , renderer_context ) except WrapperNotApplicable : pass else : success = True break if not success : raise WrapperNotApplicable ( 'No acceptable wrappers found for response.' , data = data , renderer_context = renderer_context ) renderer_context [ "indent" ] = 4 return super ( JsonApiMixin , self ) . render ( data = wrapper , accepted_media_type = accepted_media_type , renderer_context = renderer_context )
Convert native data to JSON API
3,580
def wrap_parser_error ( self , data , renderer_context ) : response = renderer_context . get ( "response" , None ) status_code = response and response . status_code if status_code != 400 : raise WrapperNotApplicable ( 'Status code must be 400.' ) if list ( data . keys ( ) ) != [ 'detail' ] : raise WrapperNotApplicable ( 'Data must only have "detail" key.' ) view = renderer_context . get ( "view" , None ) model = self . model_from_obj ( view ) if 'detail' in model . _meta . get_all_field_names ( ) : raise WrapperNotApplicable ( ) return self . wrap_error ( data , renderer_context , keys_are_fields = False , issue_is_title = False )
Convert parser errors to the JSON API Error format
3,581
def wrap_field_error ( self , data , renderer_context ) : response = renderer_context . get ( "response" , None ) status_code = response and response . status_code if status_code != 400 : raise WrapperNotApplicable ( 'Status code must be 400.' ) return self . wrap_error ( data , renderer_context , keys_are_fields = True , issue_is_title = False )
Convert field error native data to the JSON API Error format
3,582
def wrap_generic_error ( self , data , renderer_context ) : response = renderer_context . get ( "response" , None ) status_code = response and response . status_code is_error = ( status . is_client_error ( status_code ) or status . is_server_error ( status_code ) ) if not is_error : raise WrapperNotApplicable ( "Status code must be 4xx or 5xx." ) return self . wrap_error ( data , renderer_context , keys_are_fields = False , issue_is_title = True )
Convert generic error native data using the JSON API Error format
3,583
def wrap_error ( self , data , renderer_context , keys_are_fields , issue_is_title ) : response = renderer_context . get ( "response" , None ) status_code = str ( response and response . status_code ) errors = [ ] for field , issues in data . items ( ) : if isinstance ( issues , six . string_types ) : issues = [ issues ] for issue in issues : error = self . dict_class ( ) error [ "status" ] = status_code if issue_is_title : error [ "title" ] = issue else : error [ "detail" ] = issue if keys_are_fields : if field in ( 'non_field_errors' , NON_FIELD_ERRORS ) : error [ "path" ] = '/-' else : error [ "path" ] = '/' + field errors . append ( error ) wrapper = self . dict_class ( ) wrapper [ "errors" ] = errors return wrapper
Convert error native data to the JSON API Error format
3,584
def wrap_options ( self , data , renderer_context ) : request = renderer_context . get ( "request" , None ) method = request and getattr ( request , 'method' ) if method != 'OPTIONS' : raise WrapperNotApplicable ( "Request method must be OPTIONS" ) wrapper = self . dict_class ( ) wrapper [ "meta" ] = data return wrapper
Wrap OPTIONS data as JSON API meta value
3,585
def wrap_paginated ( self , data , renderer_context ) : pagination_keys = [ 'count' , 'next' , 'previous' , 'results' ] for key in pagination_keys : if not ( data and key in data ) : raise WrapperNotApplicable ( 'Not paginated results' ) view = renderer_context . get ( "view" , None ) model = self . model_from_obj ( view ) resource_type = self . model_to_resource_type ( model ) try : from rest_framework . utils . serializer_helpers import ReturnList results = ReturnList ( data [ "results" ] , serializer = data . serializer . fields [ "results" ] , ) except ImportError : results = data [ "results" ] wrapper = self . wrap_default ( results , renderer_context ) pagination = self . dict_class ( ) pagination [ 'previous' ] = data [ 'previous' ] pagination [ 'next' ] = data [ 'next' ] pagination [ 'count' ] = data [ 'count' ] wrapper . setdefault ( 'meta' , self . dict_class ( ) ) wrapper [ 'meta' ] . setdefault ( 'pagination' , self . dict_class ( ) ) wrapper [ 'meta' ] [ 'pagination' ] . setdefault ( resource_type , self . dict_class ( ) ) . update ( pagination ) return wrapper
Convert paginated data to JSON API with meta
3,586
def wrap_default ( self , data , renderer_context ) : wrapper = self . dict_class ( ) view = renderer_context . get ( "view" , None ) request = renderer_context . get ( "request" , None ) model = self . model_from_obj ( view ) resource_type = self . model_to_resource_type ( model ) if isinstance ( data , list ) : many = True resources = data else : many = False resources = [ data ] items = [ ] links = self . dict_class ( ) linked = self . dict_class ( ) meta = self . dict_class ( ) for resource in resources : converted = self . convert_resource ( resource , data , request ) item = converted . get ( 'data' , { } ) linked_ids = converted . get ( 'linked_ids' , { } ) if linked_ids : item [ "links" ] = linked_ids items . append ( item ) links . update ( converted . get ( 'links' , { } ) ) linked = self . update_nested ( linked , converted . get ( 'linked' , { } ) ) meta . update ( converted . get ( 'meta' , { } ) ) if many : wrapper [ resource_type ] = items else : wrapper [ resource_type ] = items [ 0 ] if links : links = self . prepend_links_with_name ( links , resource_type ) wrapper [ "links" ] = links if linked : wrapper [ "linked" ] = linked if meta : wrapper [ "meta" ] = meta return wrapper
Convert native data to a JSON API resource collection
3,587
def acquire_lock ( self ) : try : self . collection . insert_one ( dict ( _id = self . id ) ) except pymongo . errors . DuplicateKeyError : pass unlocked_spec = dict ( _id = self . id , locked = None ) lock_timer = ( timers . Timer . after ( self . lock_timeout ) if self . lock_timeout else timers . NeverExpires ( ) ) while not lock_timer . expired ( ) : locked_spec = { '$set' : dict ( locked = datetime . datetime . utcnow ( ) ) } res = self . collection . update_one ( unlocked_spec , locked_spec ) if res . raw_result [ 'updatedExisting' ] : break time . sleep ( 0.1 ) else : raise LockTimeout ( f"Timeout acquiring lock for {self.id}" ) self . locked = True
Acquire the lock . Blocks indefinitely until lock is available unless lock_timeout was supplied . If the lock_timeout elapses raises LockTimeout .
3,588
def set_boot_device ( self , device , persistent = False ) : operation = "set_boot_device" try : self . sp_manager . create_boot_policy ( ) self . sp_manager . set_boot_device ( device ) except UcsException as ex : raise exception . UcsOperationError ( operation = operation , error = ex )
Set the boot device for the node .
3,589
def get_boot_device ( self ) : operation = 'get_boot_device' try : boot_device = self . sp_manager . get_boot_device ( ) return boot_device except UcsException as ex : print ( _ ( "Cisco client exception: %(msg)s." ) , { 'msg' : ex } ) raise exception . UcsOperationError ( operation = operation , error = ex )
Get the current boot device for the node .
3,590
def lunr ( ref , fields , documents , languages = None ) : if languages is not None and lang . LANGUAGE_SUPPORT : if isinstance ( languages , basestring ) : languages = [ languages ] unsupported_languages = set ( languages ) - set ( lang . SUPPORTED_LANGUAGES ) if unsupported_languages : raise RuntimeError ( "The specified languages {} are not supported, " "please choose one of {}" . format ( ", " . join ( unsupported_languages ) , ", " . join ( lang . SUPPORTED_LANGUAGES . keys ( ) ) , ) ) builder = lang . get_nltk_builder ( languages ) else : builder = Builder ( ) builder . pipeline . add ( trimmer , stop_word_filter , stemmer ) builder . search_pipeline . add ( stemmer ) builder . ref ( ref ) for field in fields : if isinstance ( field , dict ) : builder . field ( ** field ) else : builder . field ( field ) for document in documents : if isinstance ( document , ( tuple , list ) ) : builder . add ( document [ 0 ] , attributes = document [ 1 ] ) else : builder . add ( document ) return builder . build ( )
A convenience function to configure and construct a lunr . Index .
3,591
def from_config ( _config , ** options ) : expected_args = ( 'path' , ) rconfig . check_config_options ( "SQLiteEventStore" , expected_args , tuple ( ) , options ) return SQLiteEventStore ( options [ 'path' ] )
Instantiate an SQLite event store from config .
3,592
def key_exists ( self , key ) : assert isinstance ( key , str ) cursor = self . conn . cursor ( ) with contextlib . closing ( cursor ) : cursor . execute ( 'SELECT COUNT(*) FROM events WHERE uuid=?' , ( key , ) ) res = cursor . fetchone ( ) count = res [ 0 ] if count == 0 : return False else : assert count in ( 0 , 1 ) , "Duplicate event ids detected: {0}" . format ( count ) return True
Check whether a key exists in the event store .
3,593
def count ( self ) : cursor = self . conn . cursor ( ) with contextlib . closing ( cursor ) : cursor . execute ( 'SELECT COUNT(*) FROM events' ) res = cursor . fetchone ( ) return res [ 0 ]
Return the number of events in the db .
3,594
def close ( self ) : if self . conn : self . conn . close ( ) self . conn = None fname = os . path . basename ( self . _path ) checksum_persister = _get_checksum_persister ( self . _path ) hasher = _initialize_hasher ( self . _path ) with contextlib . closing ( checksum_persister ) : checksum_persister [ fname ] = hasher . hexdigest ( )
Close the event store .
3,595
def from_config ( config , ** options ) : expected_args = ( 'path' , ) rconfig . check_config_options ( "LogEventStore" , expected_args , tuple ( ) , options ) return LogEventStore ( options [ 'path' ] )
Instantiate an LogEventStore from config .
3,596
def key_exists ( self , key ) : assert isinstance ( key , str ) self . _close ( ) try : return self . _unsafe_key_exists ( key ) finally : self . _open ( )
Check if key has previously been added to this store .
3,597
def close ( self ) : fname = os . path . basename ( self . _path ) checksum_persister = _get_checksum_persister ( self . _path ) with contextlib . closing ( checksum_persister ) : checksum_persister [ fname ] = self . _hasher . hexdigest ( ) self . _close ( )
Persist a checksum and close the file .
3,598
def from_config ( config , ** options ) : expected_args = ( 'prefix' , 'realclass' ) for arg in expected_args : if arg not in options : msg = "Required option missing: {0}" raise rconfig . ConfigurationError ( msg . format ( arg ) ) classpath = options [ 'realclass' ] classpath_pieces = classpath . split ( '.' ) classname = classpath_pieces [ - 1 ] modulepath = '.' . join ( classpath_pieces [ 0 : - 1 ] ) module = importlib . import_module ( modulepath ) estore_class = getattr ( module , classname ) return RotatedEventStore ( lambda fname : estore_class ( fname ) , options [ 'path' ] , options [ 'prefix' ] )
Instantiate an RotatedEventStore from config .
3,599
def _construct_filename ( self , batchno ) : return os . path . join ( self . dirpath , "{0}.{1}" . format ( self . prefix , batchno ) )
Construct a filename for a database .