idx
int64 0
63k
| question
stringlengths 53
5.28k
| target
stringlengths 5
805
|
|---|---|---|
3,400
|
def send ( self , output_name , frame ) : for input_method in self . _component_connections [ output_name ] : input_method ( frame )
|
Send an output frame .
|
3,401
|
def start_event ( self ) : if self . with_outframe_pool : self . update_config ( ) for name in self . outputs : self . outframe_pool [ name ] = ObjectPool ( Frame , self . new_frame , self . config [ 'outframe_pool_len' ] ) try : self . on_start ( ) except Exception as ex : self . logger . exception ( ex ) raise StopIteration ( )
|
Called by the event loop when it is started .
|
3,402
|
def stop_event ( self ) : self . logger . debug ( 'stopping' ) try : self . on_stop ( ) except Exception as ex : self . logger . exception ( ex ) for name in self . outputs : self . send ( name , None )
|
Called by the event loop when it is stopped .
|
3,403
|
def is_pipe_end ( self ) : for name in self . outputs : if self . _component_connections [ name ] : return False return True
|
Is component the last one in a pipeline .
|
3,404
|
def new_config_event ( self ) : try : self . on_set_config ( ) except Exception as ex : self . logger . exception ( ex ) raise StopIteration ( )
|
Called by the event loop when new config is available .
|
3,405
|
def new_frame_event ( self ) : for out_pool in self . outframe_pool . values ( ) : if not out_pool . available ( ) : return frame_nos = { } for in_buff in self . input_buffer . values ( ) : if not in_buff . available ( ) : return in_frame = in_buff . peek ( ) if in_frame is None : raise StopIteration ( ) if in_frame . frame_no >= 0 : frame_nos [ in_buff ] = in_frame . frame_no else : while in_buff . available ( ) > 1 and in_buff . peek ( 1 ) is not None : in_buff . get ( ) if len ( frame_nos ) > 1 : frame_no = max ( frame_nos . values ( ) ) for in_buff in frame_nos : while frame_nos [ in_buff ] < frame_no and in_buff . available ( ) > 1 : in_buff . get ( ) in_frame = in_buff . peek ( ) if in_frame is None : raise StopIteration ( ) frame_nos [ in_buff ] = in_frame . frame_no if min ( frame_nos . values ( ) ) != max ( frame_nos . values ( ) ) : return try : self . process_frame ( ) except StopIteration : raise except Exception as ex : self . logger . exception ( ex ) raise StopIteration ( )
|
Called by the event loop when a new input or output frame is available .
|
3,406
|
def get_tree_members ( self ) : members = [ ] queue = deque ( ) queue . appendleft ( self ) visited = set ( ) while len ( queue ) : node = queue . popleft ( ) if node not in visited : members . extend ( node . get_member_info ( ) ) queue . extendleft ( node . get_children ( ) ) visited . add ( node ) return [ { attribute : member . get ( attribute ) for attribute in self . attr_list } for member in members if member ]
|
Retrieves all members from this node of the tree down .
|
3,407
|
def deepest_node ( ( subj , pred , obj ) , graph ) : to_return = [ ] def _deepest_node ( ( subj , pred , obj ) , graph ) : children = [ ] if isinstance ( obj , rt . BNode ) : for s , p , o in graph : if str ( s ) == str ( obj ) : children . append ( ( s , p , o ) ) for s , p , o in children : s1 , p1 , o1 = _deepest_node ( ( s , p , o ) , graph ) if "rNews" in str ( o1 ) and ( s1 , p1 , o1 ) not in to_return : to_return . append ( ( s1 , p1 , o1 ) ) return ( s1 , p1 , o1 ) else : return ( subj , pred , obj ) _deepest_node ( ( subj , pred , obj ) , graph ) return to_return
|
recurse down the tree and return a list of the most deeply nested child nodes of the given triple
|
3,408
|
def getattribute ( model , item ) : elements = item . split ( '.' ) element = elements . pop ( 0 ) try : attr = getattr ( model , element , None ) except : return if attr is None : return if callable ( attr ) : try : attr = attr ( ) except : return if elements : return getattribute ( attr , '.' . join ( elements ) ) return attr
|
Chained lookup of item on model
|
3,409
|
def nice_display ( item ) : if hasattr ( item , 'all' ) : return ', ' . join ( map ( text_type , item . all ( ) ) ) return item
|
Display a comma - separated list of models for M2M fields
|
3,410
|
def mostCommonItem ( lst ) : lst = [ l for l in lst if l ] if lst : return max ( set ( lst ) , key = lst . count ) else : return None
|
Choose the most common item from the list or the first item if all items are unique .
|
3,411
|
def safeDbUrl ( db_url ) : url = urlparse ( db_url ) return db_url . replace ( url . password , "****" ) if url . password else db_url
|
Obfuscates password from a database URL .
|
3,412
|
def loadJson ( self , filename ) : jsonConfig = { } if os . path . isfile ( filename ) : jsonConfig = json . loads ( ' ' . join ( open ( filename , 'r' ) . readlines ( ) ) ) return jsonConfig
|
Read parse and return given Json config file
|
3,413
|
def get_readonly_fields ( self , request , obj = None ) : if obj is None : return [ ] return super ( ExportAdmin , self ) . get_readonly_fields ( request , obj )
|
The model can t be changed once the export is created
|
3,414
|
def response_add ( self , request , obj , post_url_continue = POST_URL_CONTINUE ) : if '_addanother' not in request . POST and '_popup' not in request . POST : request . POST [ '_continue' ] = 1 return super ( ExportAdmin , self ) . response_add ( request , obj , post_url_continue )
|
If we re adding save must be save and continue editing
|
3,415
|
def above_score_threshold ( new_data , old_data , strict = False , threshold = PYLINT_SCORE_THRESHOLD ) : success = True score = 0 message = '' if strict : for fscore , fname in new_data [ 'scores' ] : if fscore < threshold : success = False score = - 1 message += "File {} score ({}) below threshold {}\n" . format ( fname , fscore , threshold ) return success , score , message else : if new_data [ 'average' ] < threshold : success = False message = "Failed! Average pylint score ({})" " below threshold (9)!" . format ( new_data [ 'average' ] ) score = - 1 return success , score , message
|
Verifies that the pylint score is above a given threshold .
|
3,416
|
def run ( self , new_pylint_data , old_pylint_data ) : for validator in self . checkers : success , score , message = validator ( new_pylint_data , old_pylint_data ) if not success : return score , message message = self . default_message . format ( new_pylint_data [ 'average' ] ) return self . default_score , message
|
Run the new pylint data through given all current checkers including comparisons to old pylint data .
|
3,417
|
def _get_document ( self , source ) : scheme_url = source if not source . startswith ( "http" ) : scheme_url = "http://%s" % source text = source try : text = urllib . urlopen ( scheme_url ) . read ( ) except : pass else : return ( text , scheme_url ) try : text = open ( source , "r" ) . read ( ) except : pass else : return ( text , source ) return ( text , None )
|
helper open a file or url and return the content and identifier
|
3,418
|
def select_random ( ports = None , exclude_ports = None ) : if ports is None : ports = available_good_ports ( ) if exclude_ports is None : exclude_ports = set ( ) ports . difference_update ( set ( exclude_ports ) ) for port in random . sample ( ports , min ( len ( ports ) , 100 ) ) : if not port_is_used ( port ) : return port raise PortForException ( "Can't select a port" )
|
Returns random unused port number .
|
3,419
|
def good_port_ranges ( ports = None , min_range_len = 20 , border = 3 ) : min_range_len += border * 2 if ports is None : ports = available_ports ( ) ranges = utils . to_ranges ( list ( ports ) ) lenghts = sorted ( [ ( r [ 1 ] - r [ 0 ] , r ) for r in ranges ] , reverse = True ) long_ranges = [ l [ 1 ] for l in lenghts if l [ 0 ] >= min_range_len ] without_borders = [ ( low + border , high - border ) for low , high in long_ranges ] return without_borders
|
Returns a list of good port ranges . Such ranges are large and don t contain ephemeral or well - known ports . Ranges borders are also excluded .
|
3,420
|
def port_is_used ( port , host = '127.0.0.1' ) : unused = _can_bind ( port , host ) and _refuses_connection ( port , host ) return not unused
|
Returns if port is used . Port is considered used if the current process can t bind to it or the port doesn t refuse connections .
|
3,421
|
def current_day ( ) : aoc_now = datetime . datetime . now ( tz = AOC_TZ ) if aoc_now . month != 12 : log . warning ( "current_day is only available in December (EST)" ) return 1 day = min ( aoc_now . day , 25 ) return day
|
Most recent day if it s during the Advent of Code . Happy Holidays! Day 1 is assumed otherwise .
|
3,422
|
def register_linter ( linter ) : if hasattr ( linter , "EXTS" ) and hasattr ( linter , "run" ) : LintFactory . PLUGINS . append ( linter ) else : raise LinterException ( "Linter does not have 'run' method or EXTS variable!" )
|
Register a Linter class for file verification .
|
3,423
|
def escape_query ( query ) : return query . replace ( "\\" , r"\5C" ) . replace ( "*" , r"\2A" ) . replace ( "(" , r"\28" ) . replace ( ")" , r"\29" )
|
Escapes certain filter characters from an LDAP query .
|
3,424
|
def _validate_xor_args ( self , p ) : if len ( p [ 1 ] ) != 2 : raise ValueError ( 'Invalid syntax: XOR only accepts 2 arguments, got {0}: {1}' . format ( len ( p [ 1 ] ) , p ) )
|
Raises ValueError if 2 arguments are not passed to an XOR
|
3,425
|
def _match_value_filter ( self , p , value ) : return self . _VALUE_FILTER_MAP [ p [ 0 ] ] ( value [ p [ 1 ] ] , p [ 2 ] )
|
Returns True of False if value in the pattern p matches the filter .
|
3,426
|
def get_field_keys ( self , pattern = None ) : pattern = pattern or self . pattern self . _validate ( pattern ) keys = set ( ) if len ( pattern ) == 2 and pattern [ 0 ] not in self . _KEY_FILTER_MAP : if pattern [ 0 ] in ( '&' , '|' , '^' ) : for filter_item in pattern [ 1 ] : keys = keys . union ( self . get_field_keys ( filter_item ) ) else : keys = keys . union ( self . get_field_keys ( pattern [ 1 ] ) ) else : keys . add ( pattern [ 1 ] ) return keys
|
Builds a set of all field keys used in the pattern including nested fields .
|
3,427
|
def to_file ( self , path ) : xmp_path = path + '.xmp' if os . path . exists ( xmp_path ) : os . unlink ( xmp_path ) md_path = path md = GExiv2 . Metadata ( ) try : md . open_path ( md_path ) except GLib . GError : md_path = xmp_path with open ( md_path , 'w' ) as of : of . write ( ) md = GExiv2 . Metadata ( ) md . open_path ( md_path ) md . register_xmp_namespace ( 'https://github.com/jim-easterbrook/pyctools' , 'pyctools' ) for tag , value in self . data . items ( ) : if md . get_tag_type ( tag ) in ( 'XmpBag' , 'XmpSeq' ) : md . set_tag_multiple ( tag , value ) else : md . set_tag_string ( tag , value ) if self . comment is not None : md . set_comment ( self . comment ) md . save_file ( md_path )
|
Write metadata to an image video or XMP sidecar file .
|
3,428
|
def image_size ( self ) : xlen = None ylen = None for tag in ( 'Xmp.pyctools.xlen' , 'Exif.Photo.PixelXDimension' , 'Exif.Image.ImageWidth' , 'Xmp.tiff.ImageWidth' ) : if tag in self . data : xlen = int ( self . data [ tag ] ) break for tag in ( 'Xmp.pyctools.ylen' , 'Exif.Photo.PixelYDimension' , 'Exif.Image.ImageLength' , 'Xmp.tiff.ImageLength' ) : if tag in self . data : ylen = int ( self . data [ tag ] ) break if xlen and ylen : return xlen , ylen raise RuntimeError ( 'Metadata does not have image dimensions' )
|
Get image dimensions from metadata .
|
3,429
|
def get ( self , tag , default = None ) : full_tag = 'Xmp.pyctools.' + tag if full_tag in self . data : return self . data [ full_tag ] return default
|
Get a metadata value .
|
3,430
|
def set ( self , tag , value ) : full_tag = 'Xmp.pyctools.' + tag self . data [ full_tag ] = value
|
Set a metadata value .
|
3,431
|
def get_connection ( self , is_read_only = False ) -> redis . StrictRedis : if self . connection is not None : return self . connection if self . is_sentinel : kwargs = dict ( ) if self . password : kwargs [ "password" ] = self . password sentinel = Sentinel ( [ ( self . host , self . port ) ] , ** kwargs ) if is_read_only : connection = sentinel . slave_for ( self . sentinel_service , decode_responses = True ) else : connection = sentinel . master_for ( self . sentinel_service , decode_responses = True ) else : connection = redis . StrictRedis ( host = self . host , port = self . port , decode_responses = True , password = self . password ) self . connection = connection return connection
|
Gets a StrictRedis connection for normal redis or for redis sentinel based upon redis mode in configuration .
|
3,432
|
def calculate_hash_for_file ( name ) : longlongformat = 'q' bytesize = struct . calcsize ( longlongformat ) f = open ( name , "rb" ) filesize = os . path . getsize ( name ) hash = filesize minimum_size = 65536 * 2 assert filesize >= minimum_size , 'Movie {name} must have at least {min} bytes' . format ( min = minimum_size , name = name ) for x in range ( 65536 // bytesize ) : buffer = f . read ( bytesize ) ( l_value , ) = struct . unpack ( longlongformat , buffer ) hash += l_value hash = hash & 0xFFFFFFFFFFFFFFFF f . seek ( max ( 0 , filesize - 65536 ) , 0 ) for x in range ( 65536 // bytesize ) : buffer = f . read ( bytesize ) ( l_value , ) = struct . unpack ( longlongformat , buffer ) hash += l_value hash = hash & 0xFFFFFFFFFFFFFFFF f . close ( ) returnedhash = "%016x" % hash return returnedhash
|
Calculates the hash for the given filename .
|
3,433
|
def set_config ( self , config ) : self . _configmixin_queue . append ( copy . deepcopy ( config ) ) self . new_config ( )
|
Update the component s configuration .
|
3,434
|
def GaussianFilterCore ( x_sigma = 0.0 , y_sigma = 0.0 ) : def filter_1D ( sigma ) : alpha = 1.0 / ( 2.0 * ( max ( sigma , 0.0001 ) ** 2.0 ) ) coefs = [ ] coef = 1.0 while coef > 0.0001 : coefs . append ( coef ) coef = math . exp ( - ( alpha * ( float ( len ( coefs ) ** 2 ) ) ) ) fil_dim = len ( coefs ) - 1 result = numpy . zeros ( 1 + ( fil_dim * 2 ) , dtype = numpy . float32 ) for n , coef in enumerate ( coefs ) : result [ fil_dim - n ] = coef result [ fil_dim + n ] = coef result /= result . sum ( ) return result x_sigma = max ( x_sigma , 0.0 ) y_sigma = max ( y_sigma , 0.0 ) x_fil = filter_1D ( x_sigma ) y_fil = filter_1D ( y_sigma ) result = numpy . empty ( [ y_fil . shape [ 0 ] , x_fil . shape [ 0 ] , 1 ] , dtype = numpy . float32 ) for y in range ( y_fil . shape [ 0 ] ) : for x in range ( x_fil . shape [ 0 ] ) : result [ y , x , 0 ] = x_fil [ x ] * y_fil [ y ] out_frame = Frame ( ) out_frame . data = result out_frame . type = 'fil' audit = out_frame . metadata . get ( 'audit' ) audit += 'data = GaussianFilter()\n' if x_sigma != 0.0 : audit += ' x_sigma: %g\n' % ( x_sigma ) if y_sigma != 0.0 : audit += ' y_sigma: %g\n' % ( y_sigma ) out_frame . metadata . set ( 'audit' , audit ) return out_frame
|
Gaussian filter generator core .
|
3,435
|
def queue_command ( self , command ) : if self . _running : QtCore . QCoreApplication . postEvent ( self , ActionEvent ( command ) , QtCore . Qt . LowEventPriority ) else : self . _incoming . append ( command )
|
Put a command on the queue to be called in the component s thread .
|
3,436
|
def join ( self , timeout = 3600 ) : start = time . time ( ) while self . _running : now = time . time ( ) maxtime = timeout + start - now if maxtime <= 0 : return QCoreApplication . processEvents ( QEventLoop . AllEvents , int ( maxtime * 1000 ) )
|
Wait until the event loop terminates or timeout is reached .
|
3,437
|
def IntraField ( config = { } ) : return Compound ( config = config , deint = SimpleDeinterlace ( ) , interp = Resize ( ) , filgen = FilterGenerator ( yaperture = 8 , ycut = 50 ) , gain = Arithmetic ( func = 'data * pt_float(2)' ) , linkages = { ( 'self' , 'input' ) : [ ( 'deint' , 'input' ) ] , ( 'deint' , 'output' ) : [ ( 'interp' , 'input' ) ] , ( 'interp' , 'output' ) : [ ( 'self' , 'output' ) ] , ( 'filgen' , 'output' ) : [ ( 'gain' , 'input' ) ] , ( 'gain' , 'output' ) : [ ( 'interp' , 'filter' ) ] , } )
|
Intra field interlace to sequential converter .
|
3,438
|
def create ( self , period : int , limit : int ) : self . period = period self . limit = limit
|
Creates a rate limiting rule with rate limiting period and attempt limit
|
3,439
|
def is_rate_limited ( self , namespace : str ) -> bool : return not self . __can_attempt ( namespace = namespace , add_attempt = False )
|
Checks if a namespace is already rate limited or not without making any additional attempts
|
3,440
|
def main ( ) : credentials = get_credentials ( ) http = credentials . authorize ( httplib2 . Http ( ) ) service = discovery . build ( 'calendar' , 'v3' , http = http ) now = datetime . datetime . utcnow ( ) . isoformat ( ) + 'Z' print ( 'Getting the upcoming 10 events' ) eventsResult = service . events ( ) . list ( calendarId = 'primary' , timeMin = now , maxResults = 10 , singleEvents = True , orderBy = 'startTime' ) . execute ( ) events = eventsResult . get ( 'items' , [ ] ) if not events : print ( 'No upcoming events found.' ) for event in events : start = event [ 'start' ] . get ( 'dateTime' , event [ 'start' ] . get ( 'date' ) ) print ( start , event [ 'summary' ] )
|
Shows basic usage of the Google Calendar API .
|
3,441
|
def fix_list_arguments ( self ) : either = [ list ( c . children ) for c in self . either . children ] for case in either : case = [ c for c in case if case . count ( c ) > 1 ] for a in [ e for e in case if type ( e ) == Argument ] : a . value = [ ] return self
|
Find arguments that should accumulate values and fix them .
|
3,442
|
def either ( self ) : if not hasattr ( self , 'children' ) : return Either ( Required ( self ) ) else : ret = [ ] groups = [ [ self ] ] while groups : children = groups . pop ( 0 ) types = [ type ( c ) for c in children ] if Either in types : either = [ c for c in children if type ( c ) is Either ] [ 0 ] children . pop ( children . index ( either ) ) for c in either . children : groups . append ( [ c ] + children ) elif Required in types : required = [ c for c in children if type ( c ) is Required ] [ 0 ] children . pop ( children . index ( required ) ) groups . append ( list ( required . children ) + children ) elif Optional in types : optional = [ c for c in children if type ( c ) is Optional ] [ 0 ] children . pop ( children . index ( optional ) ) groups . append ( list ( optional . children ) + children ) elif OneOrMore in types : oneormore = [ c for c in children if type ( c ) is OneOrMore ] [ 0 ] children . pop ( children . index ( oneormore ) ) groups . append ( list ( oneormore . children ) * 2 + children ) else : ret . append ( children ) return Either ( * [ Required ( * e ) for e in ret ] )
|
Transform pattern into an equivalent with only top - level Either .
|
3,443
|
def syncImage ( img , current , session ) : def _img_str ( i ) : return "%s - %s" % ( i . type , i . description ) for db_img in current . images : img_info = ( img . type , img . md5 , img . size ) db_img_info = ( db_img . type , db_img . md5 , db_img . size ) if db_img_info == img_info : img = None break elif ( db_img . type == img . type and db_img . description == img . description ) : if img . md5 != db_img . md5 : current . images . remove ( db_img ) current . images . append ( img ) session . add ( current ) pout ( Fg . green ( "Updating image" ) + ": " + _img_str ( img ) ) img = None break if img : current . images . append ( img ) session . add ( current ) pout ( Fg . green ( "Adding image" ) + ": " + _img_str ( img ) )
|
Add or updated the Image .
|
3,444
|
def add_fields ( self , form , index ) : super ( ColumnFormSet , self ) . add_fields ( form , index ) form . fields [ 'column' ] . choices = self . get_choices ( )
|
Filter the form s column choices
|
3,445
|
def find_packages ( ) : packages = [ 'pyctools' ] for root , dirs , files in os . walk ( os . path . join ( 'src' , 'pyctools' ) ) : package = '.' . join ( root . split ( os . sep ) [ 1 : ] ) for name in dirs : packages . append ( package + '.' + name ) return packages
|
Walk source directory tree and convert each sub directory to a package name .
|
3,446
|
def extract_param ( param , args , type = None ) : parser = argparse . ArgumentParser ( ) parser . add_argument ( '--' + param , type = type ) res , unused = parser . parse_known_args ( args ) return getattr ( res , param ) , unused
|
From a list of args extract the one param if supplied returning the value and unused args .
|
3,447
|
def get_nltk_builder ( languages ) : all_stemmers = [ ] all_stopwords_filters = [ ] all_word_characters = set ( ) for language in languages : if language == "en" : all_stemmers . append ( lunr . stemmer . stemmer ) all_stopwords_filters . append ( stop_word_filter ) all_word_characters . update ( { r"\w" } ) else : stopwords , word_characters = _get_stopwords_and_word_characters ( language ) all_stemmers . append ( Pipeline . registered_functions [ "stemmer-{}" . format ( language ) ] ) all_stopwords_filters . append ( generate_stop_word_filter ( stopwords , language = language ) ) all_word_characters . update ( word_characters ) builder = Builder ( ) multi_trimmer = generate_trimmer ( "" . join ( sorted ( all_word_characters ) ) ) Pipeline . register_function ( multi_trimmer , "lunr-multi-trimmer-{}" . format ( "-" . join ( languages ) ) ) builder . pipeline . reset ( ) for fn in chain ( [ multi_trimmer ] , all_stopwords_filters , all_stemmers ) : builder . pipeline . add ( fn ) for fn in all_stemmers : builder . search_pipeline . add ( fn ) return builder
|
Returns a builder with stemmers for all languages added to it .
|
3,448
|
def register_languages ( ) : for language in set ( SUPPORTED_LANGUAGES ) - { "en" } : language_stemmer = partial ( nltk_stemmer , get_language_stemmer ( language ) ) Pipeline . register_function ( language_stemmer , "stemmer-{}" . format ( language ) )
|
Register all supported languages to ensure compatibility .
|
3,449
|
def ordinal ( value ) : try : value = int ( value ) except ( TypeError , ValueError ) : raise ValueError if value % 100 in ( 11 , 12 , 13 ) : return '%d%s' % ( value , ORDINAL_SUFFIX [ 0 ] ) else : return '%d%s' % ( value , ORDINAL_SUFFIX [ value % 10 ] )
|
Converts a number to its ordinal representation .
|
3,450
|
def percentage ( value , digits = 2 ) : value = float ( value ) * 100.0 return u'' + '%s %%' % ( _format ( value , digits ) , )
|
Converts a fraction to a formatted percentage .
|
3,451
|
def word ( value , digits = 2 ) : convention = locale . localeconv ( ) decimal_point = convention [ 'decimal_point' ] decimal_zero = re . compile ( r'%s0+' % re . escape ( decimal_point ) ) prefix = value < 0 and '-' or '' value = abs ( int ( value ) ) if value < 1000 : return u'' . join ( [ prefix , decimal_zero . sub ( '' , _format ( value , digits ) ) , ] ) for base , suffix in enumerate ( LARGE_NUMBER_SUFFIX ) : exp = ( base + 2 ) * 3 power = 10 ** exp if value < power : value = value / float ( 10 ** ( exp - 3 ) ) return '' . join ( [ prefix , decimal_zero . sub ( '' , _format ( value , digits ) ) , ' ' , suffix , ] ) raise OverflowError
|
Converts a large number to a formatted number containing the textual suffix for that number .
|
3,452
|
def _full_rename ( args ) : return ( args . ns and all ( map ( args . rename . affects , args . ns ) ) )
|
Return True only if the arguments passed specify exact namespaces and to conduct a rename of every namespace .
|
3,453
|
def apply ( db , op ) : dbname = op [ 'ns' ] . split ( '.' ) [ 0 ] or "admin" opts = bson . CodecOptions ( uuid_representation = bson . binary . STANDARD ) db [ dbname ] . command ( "applyOps" , [ op ] , codec_options = opts )
|
Apply operation in db
|
3,454
|
def since ( self , ts ) : spec = { 'ts' : { '$gt' : ts } } cursor = self . query ( spec ) while True : for doc in cursor : yield doc if not cursor . alive : break time . sleep ( 1 )
|
Query the oplog for items since ts and then return
|
3,455
|
def has_ops_before ( self , ts ) : spec = { 'ts' : { '$lt' : ts } } return bool ( self . coll . find_one ( spec ) )
|
Determine if there are any ops before ts
|
3,456
|
def since ( self , ts ) : while True : items = super ( TailingOplog , self ) . since ( ts ) for doc in items : yield doc ts = doc [ 'ts' ]
|
Tail the oplog starting from ts .
|
3,457
|
def dump ( self , stream ) : items = ( ( 'time' , self . time ) , ( 'inc' , self . inc ) , ) ts = collections . OrderedDict ( items ) json . dump ( dict ( ts = ts ) , stream )
|
Serialize self to text stream .
|
3,458
|
def load ( cls , stream ) : data = json . load ( stream ) [ 'ts' ] return cls ( data [ 'time' ] , data [ 'inc' ] )
|
Load a serialized version of self from text stream .
|
3,459
|
def for_window ( cls , window ) : utcnow = datetime . datetime . utcnow ( ) return cls ( utcnow - window , 0 )
|
Given a timedelta window return a timestamp representing that time .
|
3,460
|
def save ( self , ts ) : with open ( self , 'w' ) as f : Timestamp . wrap ( ts ) . dump ( f )
|
Save timestamp to file .
|
3,461
|
def Tokenizer ( obj , metadata = None , separator = SEPARATOR ) : if obj is None : return [ ] metadata = metadata or { } if isinstance ( obj , ( list , tuple ) ) : return [ Token ( as_string ( element ) . lower ( ) , deepcopy ( metadata ) ) for element in obj ] string = str ( obj ) . strip ( ) . lower ( ) length = len ( string ) tokens = [ ] slice_start = 0 for slice_end in range ( length ) : char = string [ slice_end ] slice_length = slice_end - slice_start if separator . match ( char ) or slice_end == length - 1 : if slice_length > 0 : sl = slice ( slice_start , slice_end if slice_end < length - 1 else None ) token_metadata = { } token_metadata [ "position" ] = [ slice_start , slice_length if slice_end < length - 1 else slice_length + 1 , ] token_metadata [ "index" ] = len ( tokens ) token_metadata . update ( metadata ) tokens . append ( Token ( string [ sl ] , token_metadata ) ) slice_start = slice_end + 1 return tokens
|
Splits a string into tokens ready to be inserted into the search index .
|
3,462
|
def all_collections ( db ) : include_pattern = r'(?!system\.)' return ( db [ name ] for name in db . list_collection_names ( ) if re . match ( include_pattern , name ) )
|
Yield all non - sytem collections in db .
|
3,463
|
def safe_purge_collection ( coll ) : op = ( drop_collection if coll . options ( ) . get ( 'capped' , False ) else purge_collection ) return op ( coll )
|
Cannot remove documents from capped collections in later versions of MongoDB so drop the collection instead .
|
3,464
|
def generate_stop_word_filter ( stop_words , language = None ) : def stop_word_filter ( token , i = None , tokens = None ) : if token and str ( token ) not in stop_words : return token label = ( "stopWordFilter-{}" . format ( language ) if language is not None else "stopWordFilter" ) Pipeline . register_function ( stop_word_filter , label ) return stop_word_filter
|
Builds a stopWordFilter function from the provided list of stop words .
|
3,465
|
def pesn ( number , separator = u'' ) : number = re . sub ( r'[\s-]' , '' , meid ( number ) ) serial = hashlib . sha1 ( unhexlify ( number [ : 14 ] ) ) return separator . join ( [ '80' , serial . hexdigest ( ) [ - 6 : ] . upper ( ) ] )
|
Printable Pseudo Electronic Serial Number .
|
3,466
|
def filesize ( value , format = 'decimal' , digits = 2 ) : if format not in FILESIZE_SUFFIX : raise TypeError base = FILESIZE_BASE [ format ] size = int ( value ) sign = size < 0 and u'-' or '' size = abs ( size ) for i , suffix in enumerate ( FILESIZE_SUFFIX [ format ] ) : unit = base ** ( i + 1 ) if size < unit : result = u'' . join ( [ sign , _format ( base * size / float ( unit ) , digits ) , u' ' , suffix , ] ) if format == 'gnu' : result = result . replace ( ' ' , '' ) return result raise OverflowError
|
Convert a file size into natural readable format . Multiple formats are supported .
|
3,467
|
def create_dn_in_filter ( filter_class , filter_value , helper ) : in_filter = FilterFilter ( ) in_filter . AddChild ( create_dn_wcard_filter ( filter_class , filter_value ) ) return in_filter
|
Creates filter object for given class name and DN values .
|
3,468
|
def get_managed_object ( handle , class_id , params , inMo = None , in_heir = False , dump = False ) : return handle . GetManagedObject ( inMo , class_id , params , inHierarchical = in_heir , dumpXml = dump )
|
Get the specified MO from UCS Manager .
|
3,469
|
def config_managed_object ( p_dn , p_class_id , class_id , mo_config , mo_dn , handle = None , delete = True ) : if handle is None : handle = self . handle try : result = handle . AddManagedObject ( None , classId = class_id , params = mo_config , modifyPresent = True , dumpXml = YesOrNo . FALSE ) return result except UcsException as ex : print ( _ ( "Cisco client exception: %(msg)s" ) , { 'msg' : ex } ) raise exception . UcsOperationError ( 'config_managed_object' , error = ex )
|
Configure the specified MO in UCS Manager .
|
3,470
|
def project ( * args , ** kwargs ) : projection = dict ( * args , ** kwargs ) return { key : int ( value ) for key , value in six . iteritems ( projection ) }
|
Build a projection for MongoDB .
|
3,471
|
def upsert_and_fetch ( coll , doc , ** kwargs ) : return coll . find_one_and_update ( doc , { "$setOnInsert" : doc } , upsert = True , return_document = pymongo . ReturnDocument . AFTER , ** kwargs )
|
Fetch exactly one matching document or upsert the document if not found returning the matching or upserted document .
|
3,472
|
def update ( self , retry = 2 ) -> None : try : _LOGGER . debug ( "Updating device state." ) key = ON_KEY if not self . _flip_on_off else OFF_KEY self . state = self . _device . readCharacteristic ( HANDLE ) == key except ( bluepy . btle . BTLEException , AttributeError ) : if retry < 1 or not self . _connect ( ) : self . available = False _LOGGER . error ( "Failed to update device state." , exc_info = True ) return None return self . update ( retry - 1 ) self . available = True return None
|
Synchronize state with switch .
|
3,473
|
def combine ( self , other ) : for term in other . metadata . keys ( ) : if term not in self . metadata : self . metadata [ term ] = { } fields = other . metadata [ term ] . keys ( ) for field in fields : if field not in self . metadata [ term ] : self . metadata [ term ] [ field ] = { } keys = other . metadata [ term ] [ field ] . keys ( ) for key in keys : if key not in self . metadata [ term ] [ field ] : self . metadata [ term ] [ field ] [ key ] = other . metadata [ term ] [ field ] [ key ] else : self . metadata [ term ] [ field ] [ key ] . extend ( other . metadata [ term ] [ field ] [ key ] )
|
An instance of lunr . MatchData will be created for every term that matches a document .
|
3,474
|
def get_power_state ( self ) : rn_array = [ self . helper . service_profile , ManagedObject ( NamingId . LS_POWER ) . MakeRn ( ) ] try : ls_power = ucs_helper . get_managed_object ( self . helper . handle , LsPower . ClassId ( ) , { LsPower . DN : UcsUtils . MakeDn ( rn_array ) } ) if not ls_power : raise exception . UcsOperationError ( "get_power_state" , "Failed to get LsPower MO, configure valid " "service-profile" ) return ls_power [ 0 ] . getattr ( LsPower . STATE ) except UcsException as ex : raise exception . UcsOperationError ( message = ex )
|
Get current power state of this node
|
3,475
|
def set_power_state ( self , desired_state ) : rn_array = [ self . helper . service_profile , ManagedObject ( NamingId . LS_POWER ) . MakeRn ( ) ] try : ls_power = ucs_helper . get_managed_object ( self . helper . handle , LsPower . ClassId ( ) , { LsPower . DN : UcsUtils . MakeDn ( rn_array ) } ) if not ls_power : raise exception . UcsOperationError ( "set_power_state" , "Failed to get power MO," " configure valid service-profile." ) else : ls_power_set = self . helper . handle . SetManagedObject ( ls_power , LsPower . ClassId ( ) , { LsPower . STATE : desired_state } , dumpXml = YesOrNo . TRUE ) if ls_power_set : power = ls_power_set . pop ( ) return power . getattr ( LsPower . STATE ) else : return states . ERROR except Exception as ex : raise exception . UcsOperationError ( "set_power_state" , "Failed to get power MO," "configure valid servie-profile." )
|
Set power state of this node
|
3,476
|
def reboot ( self ) : if self . get_power_state ( ) == LsPower . CONST_STATE_DOWN : self . set_power_state ( LsPower . CONST_STATE_UP ) else : self . set_power_state ( LsPower . CONST_STATE_HARD_RESET_IMMEDIATE )
|
Hard reset the power of this node .
|
3,477
|
def connect ( uri , factory = pymongo . MongoClient ) : warnings . warn ( "do not use. Just call MongoClient directly." , DeprecationWarning ) return factory ( uri )
|
Use the factory to establish a connection to uri .
|
3,478
|
def connect_gridfs ( uri , db = None ) : return gridfs . GridFS ( db or connect_db ( uri ) , collection = get_collection ( uri ) or 'fs' , )
|
Construct a GridFS instance for a MongoDB URI .
|
3,479
|
def Compare ( fromMo , toMo , diff ) : from UcsBase import UcsUtils if ( fromMo . classId != toMo . classId ) : return CompareStatus . TypesDifferent for prop in UcsUtils . GetUcsPropertyMetaAttributeList ( str ( fromMo . classId ) ) : propMeta = UcsUtils . IsPropertyInMetaIgnoreCase ( fromMo . classId , prop ) if propMeta != None : if ( ( propMeta . access == UcsPropertyMeta . Internal ) or ( propMeta . access == UcsPropertyMeta . ReadOnly ) or ( prop in toMo . _excludePropList ) ) : continue if ( ( toMo . __dict__ . has_key ( prop ) ) and ( fromMo . getattr ( prop ) != toMo . getattr ( prop ) ) ) : diff . append ( prop ) if ( len ( diff ) > 0 ) : return CompareStatus . PropsDifferent return CompareStatus . Equal
|
Internal method to support CompareManagedObject functionality .
|
3,480
|
def TranslateManagedObject ( mObj , xlateOrg , xlateMap ) : from UcsBase import UcsUtils , WriteUcsWarning from Mos import OrgOrg xMO = mObj . Clone ( ) xMO . SetHandle ( mObj . GetHandle ( ) ) if ( xlateOrg != None ) : matchObj = re . match ( r'^(org-[\-\.:_a-zA-Z0-9]{1,16}/)*org-[\-\.:_a-zA-Z0-9]{1,16}' , xMO . Dn ) if matchObj : if UcsUtils . WordL ( xMO . classId ) == OrgOrg . ClassId ( ) : orgMoMeta = UcsUtils . GetUcsPropertyMeta ( UcsUtils . WordU ( OrgOrg . ClassId ( ) ) , "Meta" ) if orgMoMeta == None : WriteUcsWarning ( '[Warning]: Could not translate [%s]' % ( xMO . Dn ) ) return xMO matchObj1 = re . findall ( r'(\[[^\]]+\])' , orgMoMeta . rn ) if matchObj1 : UpdateMoDnAlongWithNamingProperties ( xMO , orgMoMeta , xlateOrg ) else : newDn = re . sub ( "%s" % ( matchObj . group ( 0 ) ) , "%s" % ( xlateOrg ) , xMO . Dn ) xMO . Dn = newDn else : newDn = re . sub ( "^%s/" % ( matchObj . group ( 0 ) ) , "%s/" % ( xlateOrg ) , xMO . Dn ) xMO . Dn = newDn if ( xlateMap != None ) : originalDn = xMO . Dn if originalDn in xlateMap : xMoMeta = UcsUtils . GetUcsPropertyMeta ( UcsUtils . WordU ( xMO . classId ) , "Meta" ) if xMoMeta == None : WriteUcsWarning ( '[Warning]: Could not translate [%s]' % ( originalDn ) ) return xMO matchObj = re . findall ( r'(\[[^\]]+\])' , xMoMeta . rn ) if matchObj : UpdateMoDnAlongWithNamingProperties ( xMO , xMoMeta , xlateMap [ originalDn ] ) else : xMO . Dn = xlateMap [ originalDn ] else : originalDn = re . sub ( r'[/]*[^/]+$' , '' , originalDn ) while ( originalDn != None or originalDn == "" ) : if ( not ( originalDn in xlateMap ) ) : originalDn = re . sub ( r'[/]*[^/]+$' , '' , originalDn ) continue newDn = re . sub ( "^%s/" % ( originalDn ) , "%s/" % ( xlateMap [ originalDn ] ) , xMO . Dn ) xMO . Dn = newDn break return xMO
|
Method used to translate a managedobject . This method is used in CompareManagedObject .
|
3,481
|
def ImportUcsSession ( filePath , key ) : from UcsBase import UcsUtils , WriteUcsWarning , UcsValidationException if filePath is None : raise UcsValidationException ( "filePath parameter is not provided." ) if key is None : raise UcsValidationException ( "key parameter is not provided." ) if not os . path . isfile ( filePath ) or not os . path . exists ( filePath ) : raise UcsValidationException ( '[Error]: File <%s> does not exist ' % ( filePath ) ) doc = xml . dom . minidom . parse ( filePath ) topNode = doc . documentElement if topNode is None or topNode . localName != UcsLoginXml . UCS_HANDLES : return None if ( topNode . hasChildNodes ( ) ) : childList = topNode . childNodes childCount = len ( childList ) for i in range ( childCount ) : childNode = childList . item ( i ) if ( childNode . nodeType != Node . ELEMENT_NODE ) : continue if childNode . localName != UcsLoginXml . UCS : continue lName = None lUsername = None lPassword = None lNoSsl = False lPort = None if childNode . hasAttribute ( UcsLoginXml . NAME ) : lName = childNode . getAttribute ( UcsLoginXml . NAME ) if childNode . hasAttribute ( UcsLoginXml . USER_NAME ) : lUsername = childNode . getAttribute ( UcsLoginXml . USER_NAME ) if childNode . hasAttribute ( UcsLoginXml . PASSWORD ) : lPassword = UcsUtils . DecryptPassword ( childNode . getAttribute ( UcsLoginXml . PASSWORD ) , key ) if childNode . hasAttribute ( UcsLoginXml . NO_SSL ) : lNoSsl = childNode . getAttribute ( UcsLoginXml . NO_SSL ) if childNode . hasAttribute ( UcsLoginXml . PORT ) : lPort = childNode . getAttribute ( UcsLoginXml . PORT ) if ( ( lName is None ) or ( lUsername == None ) or ( lPassword == None ) ) : continue try : handle = UcsHandle ( ) handle . Login ( name = lName , username = lUsername , password = lPassword , noSsl = lNoSsl , port = lPort ) except Exception , err : WriteUcsWarning ( "[Connection Error<%s>] %s" % ( lName , str ( err ) ) )
|
This operation will do a login to each UCS which is present in credential file .
|
3,482
|
def Uri ( self ) : return ( "%s://%s%s" % ( ( "https" , "http" ) [ self . _noSsl == True ] , self . _name , ( ":" + str ( self . _port ) , "" ) [ ( ( ( self . _noSsl == False ) and ( self . _port == 80 ) ) or ( ( self . _noSsl == True ) and ( self . _port == 443 ) ) ) ] ) )
|
Constructs the connection URI from name noSsl and port instance variables .
|
3,483
|
def UndoTransaction ( self ) : from Ucs import ConfigMap self . _transactionInProgress = False self . _configMap = ConfigMap ( )
|
Cancels any running transaction .
|
3,484
|
def CompleteTransaction ( self , dumpXml = None ) : from Ucs import ConfigMap , Pair from UcsBase import ManagedObject , WriteUcsWarning , WriteObject , UcsException self . _transactionInProgress = False ccm = self . ConfigConfMos ( self . _configMap , YesOrNo . FALSE , dumpXml ) self . _configMap = ConfigMap ( ) if ccm . errorCode == 0 : moList = [ ] for child in ccm . OutConfigs . GetChild ( ) : if ( isinstance ( child , Pair ) == True ) : for mo in child . GetChild ( ) : moList . append ( mo ) elif ( isinstance ( child , ManagedObject ) == True ) : moList . append ( child ) return moList else : raise UcsException ( ccm . errorCode , ccm . errorDescr )
|
Completes a transaction .
|
3,485
|
def XmlRawQuery ( self , xml , dumpXml = None ) : if ( dumpXml == None ) : dumpXml = self . _dumpXml uri = self . Uri ( ) + '/nuova' if ( dumpXml in _AffirmativeList ) : print '%s ====> %s' % ( self . _ucs , xml ) w = xml . dom . minidom . Document ( ) if ( self . _noSsl ) : req = urllib2 . Request ( url = uri , data = w . toxml ( ) ) opener = urllib2 . build_opener ( SmartRedirectHandler ( ) ) f = opener . open ( req ) if type ( f ) is list : if ( len ( f ) == 2 and f [ 0 ] == 302 ) : uri = f [ 1 ] req = urllib2 . Request ( url = uri , data = w . toxml ( ) ) f = urllib2 . urlopen ( req ) else : req = urllib2 . Request ( url = uri , data = w . toxml ( ) ) f = urllib2 . urlopen ( req ) rsp = f . read ( ) if ( dumpXml in _AffirmativeList ) : print '%s <==== %s' % ( self . _ucs , rsp ) return rsp
|
Accepts xmlQuery String and returns xml response String . No object manipulation is done in this method .
|
3,486
|
def Logout ( self , dumpXml = None ) : from UcsBase import UcsException if ( self . _cookie == None ) : return True if self . _refreshTimer : self . _refreshTimer . cancel ( ) response = self . AaaLogout ( dumpXml ) self . _cookie = None self . _lastUpdateTime = str ( time . asctime ( ) ) self . _domains = None self . _priv = None self . _sessionId = None self . _version = None if self . _ucs in defaultUcs : del defaultUcs [ self . _ucs ] if ( response . errorCode != 0 ) : raise UcsException ( response . errorCode , response . errorDescr ) return True
|
Logout method disconnects from UCS .
|
3,487
|
def _Start_refresh_timer ( self ) : if self . _refreshPeriod > 60 : interval = self . _refreshPeriod - 60 else : interval = 60 self . _refreshTimer = Timer ( self . _refreshPeriod , self . Refresh ) self . _refreshTimer . setDaemon ( True ) self . _refreshTimer . start ( )
|
Internal method to support auto - refresh functionality .
|
3,488
|
def _start_enqueue_thread ( self ) : self . _enqueueThreadSignal . acquire ( ) self . _enqueueThread = Thread ( target = self . _enqueue_function ) self . _enqueueThread . daemon = True self . _enqueueThread . start ( ) self . _enqueueThreadSignal . wait ( ) self . _enqueueThreadSignal . release ( )
|
Internal method to start the enqueue thread which adds the events in an internal queue .
|
3,489
|
def _add_watch_block ( self , params , filterCb , capacity = 500 , cb = None ) : if ( self . _wbslock == None ) : self . _wbslock = Lock ( ) self . _wbslock . acquire ( ) wb = WatchBlock ( params , filterCb , capacity , cb ) if ( ( wb != None ) and ( wb . cb == None ) ) : wb . cb = wb . _dequeue_default_cb self . _wbs . append ( wb ) self . _wbslock . release ( ) if self . _cookie == None : return None if wb != None and len ( self . _wbs ) == 1 and wb . params [ "pollSec" ] == None : self . _start_enqueue_thread ( ) if self . _enqueueThread == None : return wb self . _enqueueThreadSignal . acquire ( ) self . _enqueueThreadSignal . notify ( ) self . _enqueueThreadSignal . release ( ) return wb
|
Internal method to add a watch block for starting event monitoring .
|
3,490
|
def _remove_watch_block ( self , wb ) : if ( self . _wbslock == None ) : self . _wbslock = Lock ( ) self . _wbslock . acquire ( ) self . _wbs . remove ( wb ) if len ( self . _wbs ) == 0 : self . _stop_enqueue_thread ( ) self . _stop_dequeue_thread ( ) self . _wbslock . release ( )
|
Internal method to remove a watch block for stopping event monitoring .
|
3,491
|
def RemoveEventHandler ( self , wb ) : from UcsBase import WriteUcsWarning if wb in self . _wbs : self . _remove_watch_block ( wb ) else : WriteUcsWarning ( "Event handler not found" )
|
Removes an event handler .
|
3,492
|
def _start_dequeue_thread ( self ) : self . _dequeueThread = Thread ( target = self . _dequeue_function ) self . _dequeueThread . daemon = True self . _dequeueThread . start ( )
|
Internal method to start dequeue thread .
|
3,493
|
def StartGuiSession ( self ) : from UcsBase import WriteUcsWarning , UcsUtils , UcsValidationException import urllib , tempfile , fileinput , os , subprocess , platform osSupport = [ "Windows" , "Linux" , "Microsoft" ] if platform . system ( ) not in osSupport : raise UcsValidationException ( "Currently works with Windows OS and Ubuntu" ) try : javawsPath = UcsUtils . GetJavaInstallationPath ( ) if javawsPath != None : url = "%s/ucsm/ucsm.jnlp" % ( self . Uri ( ) ) source = urllib . urlopen ( url ) . read ( ) jnlpdir = tempfile . gettempdir ( ) jnlpfile = os . path . join ( jnlpdir , "temp.jnlp" ) if os . path . exists ( jnlpfile ) : os . remove ( jnlpfile ) jnlpFH = open ( jnlpfile , "w+" ) jnlpFH . write ( source ) jnlpFH . close ( ) for line in fileinput . input ( jnlpfile , inplace = 1 ) : if re . search ( r'^\s*</resources>\s*$' , line ) : print '\t<property name="log.show.encrypted" value="true"/>' print line , subprocess . call ( [ javawsPath , jnlpfile ] ) if os . path . exists ( jnlpfile ) : os . remove ( jnlpfile ) else : return None except Exception , err : fileinput . close ( ) if os . path . exists ( jnlpfile ) : os . remove ( jnlpfile ) raise
|
Launches the UCSM GUI via specific UCS handle .
|
3,494
|
def ImportUcsBackup ( self , path = None , merge = False , dumpXml = False ) : from UcsBase import WriteUcsWarning , UcsUtils , ManagedObject , WriteObject , UcsUtils , UcsException , UcsValidationException from Ucs import ConfigConfig from Mos import MgmtImporter from datetime import datetime if ( self . _transactionInProgress ) : raise UcsValidationException ( "UCS transaction in progress. Cannot execute ImportUcsBackup. Complete or Undo UCS transaction." ) if not path : raise UcsValidationException ( "path parameter is not provided." ) if not os . path . exists ( path ) : raise UcsValidationException ( "Backup File not found <%s>" % ( path ) ) dn = None filePath = path localFile = os . path . basename ( filePath ) topSystem = ManagedObject ( NamingId . TOP_SYSTEM ) mgmtImporter = ManagedObject ( NamingId . MGMT_IMPORTER ) mgmtImporter . Hostname = os . environ [ 'COMPUTERNAME' ] . lower ( ) + datetime . now ( ) . strftime ( '%Y%m%d%H%M' ) dn = UcsUtils . MakeDn ( [ topSystem . MakeRn ( ) , mgmtImporter . MakeRn ( ) ] ) mgmtImporter . Dn = dn mgmtImporter . Status = Status . CREATED mgmtImporter . RemoteFile = filePath mgmtImporter . Proto = MgmtImporter . CONST_PROTO_HTTP mgmtImporter . AdminState = MgmtImporter . CONST_ADMIN_STATE_ENABLED if merge : mgmtImporter . Action = MgmtImporter . CONST_ACTION_MERGE else : mgmtImporter . Action = MgmtImporter . CONST_ACTION_REPLACE inConfig = ConfigConfig ( ) inConfig . AddChild ( mgmtImporter ) uri = "%s/operations/file-%s/importconfig.txt" % ( self . Uri ( ) , localFile ) if sys . version_info < ( 2 , 6 ) : uploadFileHandle = open ( filePath , 'rb' ) stream = uploadFileHandle . read ( ) else : progress = Progress ( ) stream = file_with_callback ( filePath , 'rb' , progress . update , filePath ) request = urllib2 . Request ( uri ) request . add_header ( 'Cookie' , 'ucsm-cookie=%s' % ( self . _cookie ) ) request . add_data ( stream ) response = urllib2 . urlopen ( request ) . read ( ) if not response : raise UcsValidationException ( "Unable to upload properly." ) ccm = self . ConfigConfMo ( dn = dn , inConfig = inConfig , inHierarchical = YesOrNo . FALSE , dumpXml = dumpXml ) if ( ccm . errorCode != 0 ) : raise UcsException ( ccm . errorCode , ccm . errorDescr ) return ccm . OutConfig . GetChild ( )
|
Imports backUp . This operation will upload the UCSM backup taken earlier via GUI or BackupUcs operation for all configuration system configuration and logical configuration files . User can perform an import while the system is up and running . - path specifies path of the backup file . - merge specifies whether to merge the backup configuration with the existing UCSM configuration .
|
3,495
|
def SendUcsFirmware ( self , path = None , dumpXml = False ) : from UcsBase import WriteUcsWarning , UcsUtils , ManagedObject , WriteObject , UcsUtils , UcsValidationException , UcsException from Ucs import ConfigConfig from Mos import FirmwareDownloader if ( self . _transactionInProgress ) : raise UcsValidationException ( "UCS transaction in progress. Cannot execute SendUcsFirmware. Complete or Undo UCS transaction." ) if not path : raise UcsValidationException ( "path parameter is not provided." ) if not os . path . exists ( path ) : raise UcsValidationException ( "Image not found <%s>" % ( path ) ) dn = None filePath = path localFile = os . path . basename ( filePath ) topSystem = ManagedObject ( NamingId . TOP_SYSTEM ) firmwareCatalogue = ManagedObject ( NamingId . FIRMWARE_CATALOGUE ) firmwareDistributable = ManagedObject ( NamingId . FIRMWARE_DISTRIBUTABLE ) firmwareDistributable . Name = localFile dn = UcsUtils . MakeDn ( [ topSystem . MakeRn ( ) , firmwareCatalogue . MakeRn ( ) , firmwareDistributable . MakeRn ( ) ] ) crDn = self . ConfigResolveDn ( dn , inHierarchical = YesOrNo . FALSE , dumpXml = dumpXml ) if ( crDn . OutConfig . GetChildCount ( ) > 0 ) : raise UcsValidationException ( "Image file <%s> already exist on FI." % ( filePath ) ) firmwareDownloader = ManagedObject ( NamingId . FIRMWARE_DOWNLOADER ) firmwareDownloader . FileName = localFile dn = UcsUtils . MakeDn ( [ topSystem . MakeRn ( ) , firmwareCatalogue . MakeRn ( ) , firmwareDownloader . MakeRn ( ) ] ) firmwareDownloader . Dn = dn firmwareDownloader . Status = Status . CREATED firmwareDownloader . FileName = localFile firmwareDownloader . Server = FirmwareDownloader . CONST_PROTOCOL_LOCAL firmwareDownloader . Protocol = FirmwareDownloader . CONST_PROTOCOL_LOCAL inConfig = ConfigConfig ( ) inConfig . AddChild ( firmwareDownloader ) uri = "%s/operations/file-%s/image.txt" % ( self . Uri ( ) , localFile ) progress = Progress ( ) stream = file_with_callback ( filePath , 'rb' , progress . update , filePath ) request = urllib2 . Request ( uri ) request . add_header ( 'Cookie' , 'ucsm-cookie=%s' % ( self . _cookie ) ) request . add_data ( stream ) response = urllib2 . urlopen ( request ) . read ( ) if not response : raise UcsValidationException ( "Unable to upload properly." ) ccm = self . ConfigConfMo ( dn = dn , inConfig = inConfig , inHierarchical = YesOrNo . FALSE , dumpXml = dumpXml ) if ( ccm . errorCode != 0 ) : raise UcsException ( ccm . errorCode , ccm . errorDescr ) return ccm . OutConfig . GetChild ( )
|
Uploads a specific CCO Image on UCS . - path specifies the path of the image to be uploaded .
|
3,496
|
def GetUcsChild ( self , inMo = None , inDn = None , classId = None , inHierarchical = False , dumpXml = None ) : from UcsBase import UcsValidationException , UcsException , UcsUtils if not inDn and not inMo : raise UcsValidationException ( '[Error]: get_ucs_child: Provide in_mo or in_dn.' ) if inMo : parentDn = inMo . getattr ( "Dn" ) elif inDn : parentDn = inDn crc = self . ConfigResolveChildren ( classId , parentDn , None , inHierarchical , dumpXml ) if crc . errorCode == 0 : moList = UcsUtils . extractMolistFromMethodResponse ( crc , inHierarchical ) return moList else : raise UcsException ( crc . errorCode , crc . error_descr )
|
Gets Child Managed Object from UCS .
|
3,497
|
def code ( sentence , pad = ' ' , format = 'army' ) : try : return ALPHABET [ 'code' ] [ format ] ( sentence , pad or CODE_PADDING [ format ] ) except KeyError : raise TypeError ( 'Unsupported code alphabet "%s"' % ( format , ) )
|
Transform a sentence using the code spelling alphabet multiple international code alphabets are supported .
|
3,498
|
def nato ( sentence , pad = ' ' , format = 'telephony' ) : try : return '' + ALPHABET [ 'nato' ] [ format ] ( sentence , pad ) except KeyError : raise TypeError ( 'Unsupported NATO alphabet "%s"' % ( format , ) )
|
Transform a sentence using the NATO spelling alphabet .
|
3,499
|
def clause ( self , * args , ** kwargs ) : if args and isinstance ( args [ 0 ] , Clause ) : clause = args [ 0 ] else : clause = Clause ( * args , ** kwargs ) if not clause . fields : clause . fields = self . all_fields if ( clause . wildcard & Query . WILDCARD_LEADING ) and ( clause . term [ 0 ] != Query . WILDCARD ) : clause . term = Query . WILDCARD + clause . term if ( clause . wildcard & Query . WILDCARD_TRAILING ) and ( clause . term [ - 1 ] != Query . WILDCARD ) : clause . term = clause . term + Query . WILDCARD self . clauses . append ( clause ) return self
|
Adds a lunr . Clause to this query .
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.