idx int64 0 63k | question stringlengths 61 4.03k | target stringlengths 6 1.23k |
|---|---|---|
54,100 | def coauthors ( corpus , min_weight = 1 , edge_attrs = [ 'ayjid' , 'date' ] , ** kwargs ) : return cooccurrence ( corpus , 'authors' , min_weight = min_weight , edge_attrs = edge_attrs , ** kwargs ) | A graph describing joint authorship in corpus . |
54,101 | def extract_text ( fpath ) : with codecs . open ( fpath , 'r' ) as f : document = f . read ( ) encoding = chardet . detect ( document ) [ 'encoding' ] document = document . decode ( encoding ) tokens = [ ] sentences = [ ] i = 0 for sentence in nltk . tokenize . sent_tokenize ( document ) : sentences . append ( i ) for word in nltk . tokenize . word_tokenize ( sentence ) : tokens . append ( word ) i += 1 contexts = [ ( 'sentence' , sentences ) ] return StructuredFeature ( tokens , contexts ) | Extracts structured text content from a plain - text file at fpath . |
54,102 | def extract_pdf ( fpath ) : with codecs . open ( fpath , 'r' ) as f : document = slate . PDF ( f ) encoding = chardet . detect ( document [ 0 ] ) tokens = [ ] pages = [ ] sentences = [ ] tokenizer = nltk . tokenize . TextTilingTokenizer ( ) i = 0 for page in document : pages . append ( i ) page = page . decode ( encoding [ 'encoding' ] ) for sentence in nltk . tokenize . sent_tokenize ( page ) : sentences . append ( i ) for word in nltk . tokenize . word_tokenize ( sentence ) : if len ( word ) > 15 : words = nltk . tokenize . word_tokenize ( _infer_spaces ( word ) ) if mean ( [ len ( w ) for w in words ] ) > 2 : for w in words : tokens . append ( w ) i += 1 continue tokens . append ( word ) i += 1 contexts = [ ( 'page' , pages ) , ( 'sentence' , sentences ) ] return StructuredFeature ( tokens , contexts ) | Extracts structured text content from a PDF at fpath . |
54,103 | def read ( path , corpus = True , index_by = 'uri' , follow_links = False , ** kwargs ) : parser = ZoteroParser ( path , index_by = index_by , follow_links = follow_links ) papers = parser . parse ( ) if corpus : c = Corpus ( papers , index_by = index_by , ** kwargs ) if c . duplicate_papers : warnings . warn ( "Duplicate papers detected. Use the 'duplicate_papers' attribute of the corpus to get the list" , UserWarning ) for fset_name , fset_values in parser . full_text . iteritems ( ) : c . features [ fset_name ] = StructuredFeatureSet ( fset_values ) return c return papers | Read bibliographic data from Zotero RDF . |
54,104 | def handle_date ( self , value ) : try : return iso8601 . parse_date ( unicode ( value ) ) . year except iso8601 . ParseError : for datefmt in ( "%B %d, %Y" , "%Y-%m" , "%Y-%m-%d" , "%m/%d/%Y" ) : try : return datetime . strptime ( unicode ( value ) , datefmt ) . date ( ) . year except ValueError : pass | Attempt to coerced date to ISO8601 . |
54,105 | def postprocess_link ( self , entry ) : if not self . follow_links : return if type ( entry . link ) is not list : entry . link = [ entry . link ] for link in list ( entry . link ) : if not os . path . exists ( link ) : continue mime_type = magic . from_file ( link , mime = True ) if mime_type == 'application/pdf' : structuredfeature = extract_pdf ( link ) elif mime_type == 'text/plain' : structuredfeature = extract_text ( link ) else : structuredfeature = None if not structuredfeature : continue fset_name = mime_type . split ( '/' ) [ - 1 ] + '_text' if not fset_name in self . full_text : self . full_text [ fset_name ] = { } if hasattr ( self , 'index_by' ) : ident = getattr ( entry , self . index_by ) if type ( ident ) is list : ident = ident [ 0 ] else : ident = entry . uri self . full_text [ fset_name ] [ ident ] = structuredfeature | Attempt to load full - text content from resource . |
54,106 | def webpush ( subscription_info , data = None , vapid_private_key = None , vapid_claims = None , content_encoding = "aes128gcm" , curl = False , timeout = None , ttl = 0 ) : vapid_headers = None if vapid_claims : if not vapid_claims . get ( 'aud' ) : url = urlparse ( subscription_info . get ( 'endpoint' ) ) aud = "{}://{}" . format ( url . scheme , url . netloc ) vapid_claims [ 'aud' ] = aud if not vapid_claims . get ( 'exp' ) : vapid_claims [ 'exp' ] = int ( time . time ( ) ) + ( 12 * 60 * 60 ) if not vapid_private_key : raise WebPushException ( "VAPID dict missing 'private_key'" ) if isinstance ( vapid_private_key , Vapid ) : vv = vapid_private_key elif os . path . isfile ( vapid_private_key ) : vv = Vapid . from_file ( private_key_file = vapid_private_key ) else : vv = Vapid . from_string ( private_key = vapid_private_key ) vapid_headers = vv . sign ( vapid_claims ) response = WebPusher ( subscription_info ) . send ( data , vapid_headers , ttl = ttl , content_encoding = content_encoding , curl = curl , timeout = timeout , ) if not curl and response . status_code > 202 : raise WebPushException ( "Push failed: {} {}" . format ( response . status_code , response . reason ) , response = response ) return response | One call solution to endcode and send data to the endpoint contained in subscription_info using optional VAPID auth headers . |
54,107 | def encode ( self , data , content_encoding = "aes128gcm" ) : if not data : return if not self . auth_key or not self . receiver_key : raise WebPushException ( "No keys specified in subscription info" ) salt = None if content_encoding not in self . valid_encodings : raise WebPushException ( "Invalid content encoding specified. " "Select from " + json . dumps ( self . valid_encodings ) ) if content_encoding == "aesgcm" : salt = os . urandom ( 16 ) server_key = ec . generate_private_key ( ec . SECP256R1 , default_backend ( ) ) crypto_key = server_key . public_key ( ) . public_bytes ( encoding = serialization . Encoding . X962 , format = serialization . PublicFormat . UncompressedPoint ) if isinstance ( data , six . string_types ) : data = bytes ( data . encode ( 'utf8' ) ) if content_encoding == "aes128gcm" : encrypted = http_ece . encrypt ( data , salt = salt , private_key = server_key , dh = self . receiver_key , auth_secret = self . auth_key , version = content_encoding ) reply = CaseInsensitiveDict ( { 'body' : encrypted } ) else : crypto_key = base64 . urlsafe_b64encode ( crypto_key ) . strip ( b'=' ) encrypted = http_ece . encrypt ( data , salt = salt , private_key = server_key , keyid = crypto_key . decode ( ) , dh = self . receiver_key , auth_secret = self . auth_key , version = content_encoding ) reply = CaseInsensitiveDict ( { 'crypto_key' : crypto_key , 'body' : encrypted , } ) if salt : reply [ 'salt' ] = base64 . urlsafe_b64encode ( salt ) . strip ( b'=' ) return reply | Encrypt the data . |
54,108 | def send ( self , data = None , headers = None , ttl = 0 , gcm_key = None , reg_id = None , content_encoding = "aes128gcm" , curl = False , timeout = None ) : if headers is None : headers = dict ( ) encoded = { } headers = CaseInsensitiveDict ( headers ) if data : encoded = self . encode ( data , content_encoding ) if "crypto_key" in encoded : crypto_key = headers . get ( "crypto-key" , "" ) if crypto_key : crypto_key += ';' crypto_key += ( "dh=" + encoded [ "crypto_key" ] . decode ( 'utf8' ) ) headers . update ( { 'crypto-key' : crypto_key } ) if "salt" in encoded : headers . update ( { 'encryption' : "salt=" + encoded [ 'salt' ] . decode ( 'utf8' ) } ) headers . update ( { 'content-encoding' : content_encoding , } ) if gcm_key : if len ( gcm_key ) < 100 : endpoint = 'https://android.googleapis.com/gcm/send' else : endpoint = 'https://fcm.googleapis.com/fcm/send' reg_ids = [ ] if not reg_id : reg_id = self . subscription_info [ 'endpoint' ] . rsplit ( '/' , 1 ) [ - 1 ] reg_ids . append ( reg_id ) gcm_data = dict ( ) gcm_data [ 'registration_ids' ] = reg_ids if data : gcm_data [ 'raw_data' ] = base64 . b64encode ( encoded . get ( 'body' ) ) . decode ( 'utf8' ) gcm_data [ 'time_to_live' ] = int ( headers [ 'ttl' ] if 'ttl' in headers else ttl ) encoded_data = json . dumps ( gcm_data ) headers . update ( { 'Authorization' : 'key=' + gcm_key , 'Content-Type' : 'application/json' , } ) else : encoded_data = encoded . get ( 'body' ) endpoint = self . subscription_info [ 'endpoint' ] if 'ttl' not in headers or ttl : headers [ 'ttl' ] = str ( ttl or 0 ) if curl : return self . as_curl ( endpoint , encoded_data , headers ) return self . requests_method . post ( endpoint , data = encoded_data , headers = headers , timeout = timeout ) | Encode and send the data to the Push Service . |
54,109 | def calendarplot ( data , how = 'sum' , yearlabels = True , yearascending = True , yearlabel_kws = None , subplot_kws = None , gridspec_kws = None , fig_kws = None , ** kwargs ) : yearlabel_kws = yearlabel_kws or { } subplot_kws = subplot_kws or { } gridspec_kws = gridspec_kws or { } fig_kws = fig_kws or { } years = np . unique ( data . index . year ) if not yearascending : years = years [ : : - 1 ] fig , axes = plt . subplots ( nrows = len ( years ) , ncols = 1 , squeeze = False , subplot_kw = subplot_kws , gridspec_kw = gridspec_kws , ** fig_kws ) axes = axes . T [ 0 ] if how is None : by_day = data else : if _pandas_18 : by_day = data . resample ( 'D' ) . agg ( how ) else : by_day = data . resample ( 'D' , how = how ) ylabel_kws = dict ( fontsize = 32 , color = kwargs . get ( 'fillcolor' , 'whitesmoke' ) , fontweight = 'bold' , fontname = 'Arial' , ha = 'center' ) ylabel_kws . update ( yearlabel_kws ) max_weeks = 0 for year , ax in zip ( years , axes ) : yearplot ( by_day , year = year , how = None , ax = ax , ** kwargs ) max_weeks = max ( max_weeks , ax . get_xlim ( ) [ 1 ] ) if yearlabels : ax . set_ylabel ( str ( year ) , ** ylabel_kws ) for ax in axes : ax . set_xlim ( 0 , max_weeks ) plt . tight_layout ( ) return fig , axes | Plot a timeseries as a calendar heatmap . |
54,110 | def geosgeometry_str_to_struct ( value ) : result = geos_ptrn . match ( value ) if not result : return None return { 'srid' : result . group ( 1 ) , 'x' : result . group ( 2 ) , 'y' : result . group ( 3 ) , } | Parses a geosgeometry string into struct . |
54,111 | def get_env ( name , default = None ) : if name in os . environ : return os . environ [ name ] if default is not None : return default error_msg = "Set the {} env variable" . format ( name ) raise ImproperlyConfigured ( error_msg ) | Get the environment variable or return exception |
54,112 | def user_defined_symbols ( self ) : sym_in_current = set ( self . symtable . keys ( ) ) sym_from_construction = set ( self . no_deepcopy ) unique_symbols = sym_in_current . difference ( sym_from_construction ) return unique_symbols | Return a set of symbols that have been added to symtable after construction . |
54,113 | def unimplemented ( self , node ) : self . raise_exception ( node , exc = NotImplementedError , msg = "'%s' not supported" % ( node . __class__ . __name__ ) ) | Unimplemented nodes . |
54,114 | def raise_exception ( self , node , exc = None , msg = '' , expr = None , lineno = None ) : if self . error is None : self . error = [ ] if expr is None : expr = self . expr if len ( self . error ) > 0 and not isinstance ( node , ast . Module ) : msg = '%s' % msg err = ExceptionHolder ( node , exc = exc , msg = msg , expr = expr , lineno = lineno ) self . _interrupt = ast . Break ( ) self . error . append ( err ) if self . error_msg is None : self . error_msg = "at expr='%s'" % ( self . expr ) elif len ( msg ) > 0 : self . error_msg = msg if exc is None : try : exc = self . error [ 0 ] . exc except : exc = RuntimeError raise exc ( self . error_msg ) | Add an exception . |
54,115 | def run ( self , node , expr = None , lineno = None , with_raise = True ) : if time . time ( ) - self . start_time > self . max_time : raise RuntimeError ( ERR_MAX_TIME . format ( self . max_time ) ) out = None if len ( self . error ) > 0 : return out if node is None : return out if isinstance ( node , str ) : node = self . parse ( node ) if lineno is not None : self . lineno = lineno if expr is not None : self . expr = expr try : handler = self . node_handlers [ node . __class__ . __name__ . lower ( ) ] except KeyError : return self . unimplemented ( node ) try : ret = handler ( node ) if isinstance ( ret , enumerate ) : ret = list ( ret ) return ret except : if with_raise : self . raise_exception ( node , expr = expr ) | Execute parsed Ast representation for an expression . |
54,116 | def eval ( self , expr , lineno = 0 , show_errors = True ) : self . lineno = lineno self . error = [ ] self . start_time = time . time ( ) try : node = self . parse ( expr ) except : errmsg = exc_info ( ) [ 1 ] if len ( self . error ) > 0 : errmsg = "\n" . join ( self . error [ 0 ] . get_error ( ) ) if not show_errors : try : exc = self . error [ 0 ] . exc except : exc = RuntimeError raise exc ( errmsg ) print ( errmsg , file = self . err_writer ) return try : return self . run ( node , expr = expr , lineno = lineno ) except : errmsg = exc_info ( ) [ 1 ] if len ( self . error ) > 0 : errmsg = "\n" . join ( self . error [ 0 ] . get_error ( ) ) if not show_errors : try : exc = self . error [ 0 ] . exc except : exc = RuntimeError raise exc ( errmsg ) print ( errmsg , file = self . err_writer ) return | Evaluate a single statement . |
54,117 | def on_module ( self , node ) : out = None for tnode in node . body : out = self . run ( tnode ) return out | Module def . |
54,118 | def on_assert ( self , node ) : if not self . run ( node . test ) : self . raise_exception ( node , exc = AssertionError , msg = node . msg ) return True | Assert statement . |
54,119 | def on_name ( self , node ) : ctx = node . ctx . __class__ if ctx in ( ast . Param , ast . Del ) : return str ( node . id ) else : if node . id in self . symtable : return self . symtable [ node . id ] else : msg = "name '%s' is not defined" % node . id self . raise_exception ( node , exc = NameError , msg = msg ) | Name node . |
54,120 | def on_attribute ( self , node ) : ctx = node . ctx . __class__ if ctx == ast . Store : msg = "attribute for storage: shouldn't be here!" self . raise_exception ( node , exc = RuntimeError , msg = msg ) sym = self . run ( node . value ) if ctx == ast . Del : return delattr ( sym , node . attr ) fmt = "cannnot access attribute '%s' for %s" if node . attr not in UNSAFE_ATTRS : fmt = "no attribute '%s' for %s" try : return getattr ( sym , node . attr ) except AttributeError : pass obj = self . run ( node . value ) msg = fmt % ( node . attr , obj ) self . raise_exception ( node , exc = AttributeError , msg = msg ) | Extract attribute . |
54,121 | def on_assign ( self , node ) : val = self . run ( node . value ) for tnode in node . targets : self . node_assign ( tnode , val ) return | Simple assignment . |
54,122 | def on_augassign ( self , node ) : return self . on_assign ( ast . Assign ( targets = [ node . target ] , value = ast . BinOp ( left = node . target , op = node . op , right = node . value ) ) ) | Augmented assign . |
54,123 | def on_slice ( self , node ) : return slice ( self . run ( node . lower ) , self . run ( node . upper ) , self . run ( node . step ) ) | Simple slice . |
54,124 | def on_extslice ( self , node ) : return tuple ( [ self . run ( tnode ) for tnode in node . dims ] ) | Extended slice . |
54,125 | def on_delete ( self , node ) : for tnode in node . targets : if tnode . ctx . __class__ != ast . Del : break children = [ ] while tnode . __class__ == ast . Attribute : children . append ( tnode . attr ) tnode = tnode . value if tnode . __class__ == ast . Name and tnode . id not in self . readonly_symbols : children . append ( tnode . id ) children . reverse ( ) self . symtable . pop ( '.' . join ( children ) ) else : msg = "could not delete symbol" self . raise_exception ( node , msg = msg ) | Delete statement . |
54,126 | def on_unaryop ( self , node ) : return op2func ( node . op ) ( self . run ( node . operand ) ) | Unary operator . |
54,127 | def on_binop ( self , node ) : return op2func ( node . op ) ( self . run ( node . left ) , self . run ( node . right ) ) | Binary operator . |
54,128 | def on_boolop ( self , node ) : val = self . run ( node . values [ 0 ] ) is_and = ast . And == node . op . __class__ if ( is_and and val ) or ( not is_and and not val ) : for n in node . values [ 1 : ] : val = op2func ( node . op ) ( val , self . run ( n ) ) if ( is_and and not val ) or ( not is_and and val ) : break return val | Boolean operator . |
54,129 | def _printer ( self , * out , ** kws ) : flush = kws . pop ( 'flush' , True ) fileh = kws . pop ( 'file' , self . writer ) sep = kws . pop ( 'sep' , ' ' ) end = kws . pop ( 'sep' , '\n' ) print ( * out , file = fileh , sep = sep , end = end ) if flush : fileh . flush ( ) | Generic print function . |
54,130 | def on_if ( self , node ) : block = node . body if not self . run ( node . test ) : block = node . orelse for tnode in block : self . run ( tnode ) | Regular if - then - else statement . |
54,131 | def on_ifexp ( self , node ) : expr = node . orelse if self . run ( node . test ) : expr = node . body return self . run ( expr ) | If expressions . |
54,132 | def on_while ( self , node ) : while self . run ( node . test ) : self . _interrupt = None for tnode in node . body : self . run ( tnode ) if self . _interrupt is not None : break if isinstance ( self . _interrupt , ast . Break ) : break else : for tnode in node . orelse : self . run ( tnode ) self . _interrupt = None | While blocks . |
54,133 | def on_for ( self , node ) : for val in self . run ( node . iter ) : self . node_assign ( node . target , val ) self . _interrupt = None for tnode in node . body : self . run ( tnode ) if self . _interrupt is not None : break if isinstance ( self . _interrupt , ast . Break ) : break else : for tnode in node . orelse : self . run ( tnode ) self . _interrupt = None | For blocks . |
54,134 | def on_listcomp ( self , node ) : out = [ ] for tnode in node . generators : if tnode . __class__ == ast . comprehension : for val in self . run ( tnode . iter ) : self . node_assign ( tnode . target , val ) add = True for cond in tnode . ifs : add = add and self . run ( cond ) if add : out . append ( self . run ( node . elt ) ) return out | List comprehension . |
54,135 | def on_excepthandler ( self , node ) : return ( self . run ( node . type ) , node . name , node . body ) | Exception handler ... |
54,136 | def on_call ( self , node ) : func = self . run ( node . func ) if not hasattr ( func , '__call__' ) and not isinstance ( func , type ) : msg = "'%s' is not callable!!" % ( func ) self . raise_exception ( node , exc = TypeError , msg = msg ) args = [ self . run ( targ ) for targ in node . args ] starargs = getattr ( node , 'starargs' , None ) if starargs is not None : args = args + self . run ( starargs ) keywords = { } if six . PY3 and func == print : keywords [ 'file' ] = self . writer for key in node . keywords : if not isinstance ( key , ast . keyword ) : msg = "keyword error in function call '%s'" % ( func ) self . raise_exception ( node , msg = msg ) keywords [ key . arg ] = self . run ( key . value ) kwargs = getattr ( node , 'kwargs' , None ) if kwargs is not None : keywords . update ( self . run ( kwargs ) ) try : return func ( * args , ** keywords ) except Exception as ex : self . raise_exception ( node , msg = "Error running function call '%s' with args %s and " "kwargs %s: %s" % ( func . __name__ , args , keywords , ex ) ) | Function execution . |
54,137 | def on_functiondef ( self , node ) : if node . decorator_list : raise Warning ( "decorated procedures not supported!" ) kwargs = [ ] if not valid_symbol_name ( node . name ) or node . name in self . readonly_symbols : errmsg = "invalid function name (reserved word?) %s" % node . name self . raise_exception ( node , exc = NameError , msg = errmsg ) offset = len ( node . args . args ) - len ( node . args . defaults ) for idef , defnode in enumerate ( node . args . defaults ) : defval = self . run ( defnode ) keyval = self . run ( node . args . args [ idef + offset ] ) kwargs . append ( ( keyval , defval ) ) if version_info [ 0 ] == 3 : args = [ tnode . arg for tnode in node . args . args [ : offset ] ] else : args = [ tnode . id for tnode in node . args . args [ : offset ] ] doc = None nb0 = node . body [ 0 ] if isinstance ( nb0 , ast . Expr ) and isinstance ( nb0 . value , ast . Str ) : doc = nb0 . value . s varkws = node . args . kwarg vararg = node . args . vararg if version_info [ 0 ] == 3 : if isinstance ( vararg , ast . arg ) : vararg = vararg . arg if isinstance ( varkws , ast . arg ) : varkws = varkws . arg self . symtable [ node . name ] = Procedure ( node . name , self , doc = doc , lineno = self . lineno , body = node . body , args = args , kwargs = kwargs , vararg = vararg , varkws = varkws ) if node . name in self . no_deepcopy : self . no_deepcopy . remove ( node . name ) | Define procedures . |
54,138 | def safe_pow ( base , exp ) : if exp > MAX_EXPONENT : raise RuntimeError ( "Invalid exponent, max exponent is {}" . format ( MAX_EXPONENT ) ) return base ** exp | safe version of pow |
54,139 | def safe_mult ( a , b ) : if isinstance ( a , str ) and isinstance ( b , int ) and len ( a ) * b > MAX_STR_LEN : raise RuntimeError ( "String length exceeded, max string length is {}" . format ( MAX_STR_LEN ) ) return a * b | safe version of multiply |
54,140 | def safe_add ( a , b ) : if isinstance ( a , str ) and isinstance ( b , str ) and len ( a ) + len ( b ) > MAX_STR_LEN : raise RuntimeError ( "String length exceeded, max string length is {}" . format ( MAX_STR_LEN ) ) return a + b | safe version of add |
54,141 | def safe_lshift ( a , b ) : if b > MAX_SHIFT : raise RuntimeError ( "Invalid left shift, max left shift is {}" . format ( MAX_SHIFT ) ) return a << b | safe version of lshift |
54,142 | def valid_symbol_name ( name ) : if name in RESERVED_WORDS : return False gen = generate_tokens ( io . BytesIO ( name . encode ( 'utf-8' ) ) . readline ) typ , _ , start , end , _ = next ( gen ) if typ == tk_ENCODING : typ , _ , start , end , _ = next ( gen ) return typ == tk_NAME and start == ( 1 , 0 ) and end == ( 1 , len ( name ) ) | Determine whether the input symbol name is a valid name . |
54,143 | def make_symbol_table ( use_numpy = True , ** kws ) : symtable = { } for sym in FROM_PY : if sym in builtins : symtable [ sym ] = builtins [ sym ] for sym in FROM_MATH : if hasattr ( math , sym ) : symtable [ sym ] = getattr ( math , sym ) if HAS_NUMPY and use_numpy : for sym in FROM_NUMPY : if hasattr ( numpy , sym ) : symtable [ sym ] = getattr ( numpy , sym ) for name , sym in NUMPY_RENAMES . items ( ) : if hasattr ( numpy , sym ) : symtable [ name ] = getattr ( numpy , sym ) symtable . update ( LOCALFUNCS ) symtable . update ( kws ) return symtable | Create a default symboltable taking dict of user - defined symbols . |
54,144 | def get_error ( self ) : col_offset = - 1 if self . node is not None : try : col_offset = self . node . col_offset except AttributeError : pass try : exc_name = self . exc . __name__ except AttributeError : exc_name = str ( self . exc ) if exc_name in ( None , 'None' ) : exc_name = 'UnknownError' out = [ " %s" % self . expr ] if col_offset > 0 : out . append ( " %s^^^" % ( ( col_offset ) * ' ' ) ) out . append ( str ( self . msg ) ) return ( exc_name , '\n' . join ( out ) ) | Retrieve error data . |
54,145 | def add_config_path ( self , path ) : abspath = util . abs_pathify ( path ) if abspath not in self . _config_paths : log . info ( "Adding {0} to paths to search" . format ( abspath ) ) self . _config_paths . append ( abspath ) | Add a path for Vyper to search for the config file in . Can be called multiple times to define multiple search paths . |
54,146 | def sub ( self , key ) : subv = Vyper ( ) data = self . get ( key ) if isinstance ( data , dict ) : subv . _config = data return subv else : return None | Returns new Vyper instance representing a sub tree of this instance . |
54,147 | def unmarshall_key ( self , key , cls ) : return setattr ( cls , key , self . get ( key ) ) | Takes a single key and unmarshalls it into a class . |
54,148 | def unmarshall ( self , cls ) : for k , v in self . all_settings ( ) . items ( ) : setattr ( cls , k , v ) return cls | Unmarshalls the config into a class . Make sure that the tags on the attributes of the class are properly set . |
54,149 | def bind_env ( self , * input_ ) : if len ( input_ ) == 0 : return "bind_env missing key to bind to" key = input_ [ 0 ] . lower ( ) if len ( input_ ) == 1 : env_key = self . _merge_with_env_prefix ( key ) else : env_key = input_ [ 1 ] self . _env [ key ] = env_key if self . _key_delimiter in key : parts = input_ [ 0 ] . split ( self . _key_delimiter ) env_info = { "path" : parts [ 1 : - 1 ] , "final_key" : parts [ - 1 ] , "env_key" : env_key } if self . _env . get ( parts [ 0 ] ) is None : self . _env [ parts [ 0 ] ] = [ env_info ] else : self . _env [ parts [ 0 ] ] . append ( env_info ) return None | Binds a Vyper key to a ENV variable . ENV variables are case sensitive . If only a key is provided it will use the env key matching the key uppercased . env_prefix will be used when set when env name is not provided . |
54,150 | def is_set ( self , key ) : path = key . split ( self . _key_delimiter ) lower_case_key = key . lower ( ) val = self . _find ( lower_case_key ) if val is None : source = self . _find ( path [ 0 ] . lower ( ) ) if source is not None and isinstance ( source , dict ) : val = self . _search_dict ( source , path [ 1 : : ] ) return val is not None | Check to see if the key has been set in any of the data locations . |
54,151 | def register_alias ( self , alias , key ) : alias = alias . lower ( ) key = key . lower ( ) if alias != key and alias != self . _real_key ( key ) : exists = self . _aliases . get ( alias ) if exists is None : val = self . _config . get ( alias ) if val : self . _config . pop ( alias ) self . _config [ key ] = val val = self . _kvstore . get ( alias ) if val : self . _kvstore . pop ( alias ) self . _kvstore [ key ] = val val = self . _defaults . get ( alias ) if val : self . _defaults . pop ( alias ) self . _defaults [ key ] = val val = self . _override . get ( alias ) if val : self . _override . pop ( alias ) self . _override [ key ] = val self . _aliases [ alias ] = key else : log . warning ( "Creating circular reference alias {0} {1} {2}" . format ( alias , key , self . _real_key ( key ) ) ) | Aliases provide another accessor for the same key . This enables one to change a name without breaking the application . |
54,152 | def set_default ( self , key , value ) : k = self . _real_key ( key . lower ( ) ) self . _defaults [ k ] = value | Set the default value for this key . Default only used when no value is provided by the user via arg config or env . |
54,153 | def _unmarshall_reader ( self , file_ , d ) : return util . unmarshall_config_reader ( file_ , d , self . _get_config_type ( ) ) | Unmarshall a file into a dict . |
54,154 | def _get_key_value_config ( self ) : for rp in self . _remote_providers : val = self . _get_remote_config ( rp ) self . _kvstore = val return None raise errors . RemoteConfigError ( "No Files Found" ) | Retrieves the first found remote configuration . |
54,155 | def all_keys ( self , uppercase_keys = False ) : d = { } for k in self . _override . keys ( ) : d [ k . upper ( ) if uppercase_keys else k . lower ( ) ] = { } for k in self . _args . keys ( ) : d [ k . upper ( ) if uppercase_keys else k . lower ( ) ] = { } for k in self . _env . keys ( ) : d [ k . upper ( ) if uppercase_keys else k . lower ( ) ] = { } for k in self . _config . keys ( ) : d [ k . upper ( ) if uppercase_keys else k . lower ( ) ] = { } for k in self . _kvstore . keys ( ) : d [ k . upper ( ) if uppercase_keys else k . lower ( ) ] = { } for k in self . _defaults . keys ( ) : d [ k . upper ( ) if uppercase_keys else k . lower ( ) ] = { } for k in self . _aliases . keys ( ) : d [ k . upper ( ) if uppercase_keys else k . lower ( ) ] = { } return d . keys ( ) | Return all keys regardless where they are set . |
54,156 | def all_settings ( self , uppercase_keys = False ) : d = { } for k in self . all_keys ( uppercase_keys ) : d [ k ] = self . get ( k ) return d | Return all settings as a dict . |
54,157 | def debug ( self ) : print ( "Aliases:" ) pprint . pprint ( self . _aliases ) print ( "Override:" ) pprint . pprint ( self . _override ) print ( "Args:" ) pprint . pprint ( self . _args ) print ( "Env:" ) pprint . pprint ( self . _env ) print ( "Config:" ) pprint . pprint ( self . _config ) print ( "Key/Value Store:" ) pprint . pprint ( self . _kvstore ) print ( "Defaults:" ) pprint . pprint ( self . _defaults ) | Prints all configuration registries for debugging purposes . |
54,158 | def server ( ** kwargs ) : start_server ( ** { k : v for k , v in kwargs . items ( ) if v } , blocking = True ) | Starts the Clearly Server . |
54,159 | def start_server ( broker , backend = None , port = 12223 , max_tasks = 10000 , max_workers = 100 , blocking = False , debug = False ) : _setup_logging ( debug ) queue_listener_dispatcher = Queue ( ) listener = EventListener ( broker , queue_listener_dispatcher , backend = backend , max_tasks_in_memory = max_tasks , max_workers_in_memory = max_workers ) dispatcher = StreamingDispatcher ( queue_listener_dispatcher ) clearlysrv = ClearlyServer ( listener , dispatcher ) return _serve ( clearlysrv , port , blocking ) | Starts a Clearly Server programmatically . |
54,160 | def _event_to_pb ( event ) : if isinstance ( event , ( TaskData , Task ) ) : key , klass = 'task' , clearly_pb2 . TaskMessage elif isinstance ( event , ( WorkerData , Worker ) ) : key , klass = 'worker' , clearly_pb2 . WorkerMessage else : raise ValueError ( 'unknown event' ) keys = klass . DESCRIPTOR . fields_by_name . keys ( ) data = { k : v for k , v in getattr ( event , '_asdict' , lambda : { f : getattr ( event , f ) for f in event . _fields } ) ( ) . items ( ) if k in keys } return key , klass ( ** data ) | Supports converting internal TaskData and WorkerData as well as celery Task and Worker to proto buffers messages . |
54,161 | def filter_tasks ( self , request , context ) : _log_request ( request , context ) tasks_pattern , tasks_negate = PATTERN_PARAMS_OP ( request . tasks_filter ) state_pattern = request . state_pattern limit , reverse = request . limit , request . reverse pregex = re . compile ( tasks_pattern ) sregex = re . compile ( state_pattern ) def pcondition ( task ) : return accepts ( pregex , tasks_negate , task . name , task . routing_key ) def scondition ( task ) : return accepts ( sregex , tasks_negate , task . state ) found_tasks = ( task for _ , task in self . listener . memory . tasks_by_time ( limit = limit or None , reverse = reverse ) if pcondition ( task ) and scondition ( task ) ) def callback ( t ) : logger . debug ( '%s iterated %d tasks in %s (%s)' , self . filter_tasks . __name__ , t . count , t . duration_human , t . throughput_human ) for task in about_time ( callback , found_tasks ) : yield ClearlyServer . _event_to_pb ( task ) [ 1 ] | Filter tasks by matching patterns to name routing key and state . |
54,162 | def filter_workers ( self , request , context ) : _log_request ( request , context ) workers_pattern , workers_negate = PATTERN_PARAMS_OP ( request . workers_filter ) hregex = re . compile ( workers_pattern ) def hcondition ( worker ) : return accepts ( hregex , workers_negate , worker . hostname ) found_workers = ( worker for worker in sorted ( self . listener . memory . workers . values ( ) , key = WORKER_HOSTNAME_OP ) if hcondition ( worker ) ) def callback ( t ) : logger . debug ( '%s iterated %d workers in %s (%s)' , self . filter_workers . __name__ , t . count , t . duration_human , t . throughput_human ) for worker in about_time ( callback , found_workers ) : yield ClearlyServer . _event_to_pb ( worker ) [ 1 ] | Filter workers by matching a pattern to hostname . |
54,163 | def seen_tasks ( self , request , context ) : _log_request ( request , context ) result = clearly_pb2 . SeenTasksMessage ( ) result . task_types . extend ( self . listener . memory . task_types ( ) ) return result | Returns all seen task types . |
54,164 | def reset_tasks ( self , request , context ) : _log_request ( request , context ) self . listener . memory . clear_tasks ( ) return clearly_pb2 . Empty ( ) | Resets all captured tasks . |
54,165 | def get_stats ( self , request , context ) : _log_request ( request , context ) m = self . listener . memory return clearly_pb2 . StatsMessage ( task_count = m . task_count , event_count = m . event_count , len_tasks = len ( m . tasks ) , len_workers = len ( m . workers ) ) | Returns the server statistics . |
54,166 | def accepts ( regex , negate , * values ) : return any ( v and regex . search ( v ) for v in values ) != negate | Given a compiled regex and a negate find if any of the values match . |
54,167 | def copy_update ( pb_message , ** kwds ) : result = pb_message . __class__ ( ) result . CopyFrom ( pb_message ) for k , v in kwds . items ( ) : setattr ( result , k , v ) return result | Returns a copy of the PB object with some fields updated . |
54,168 | def __start ( self ) : assert not self . dispatcher_thread self . dispatcher_thread = threading . Thread ( target = self . __run_dispatcher , name = 'clearly-dispatcher' ) self . dispatcher_thread . daemon = True self . running = True self . dispatcher_thread . start ( ) | Starts the real - time engine that captures tasks . |
54,169 | def streaming_client ( self , tasks_regex , tasks_negate , workers_regex , workers_negate ) : cc = CapturingClient ( Queue ( ) , re . compile ( tasks_regex ) , tasks_negate , re . compile ( workers_regex ) , workers_negate ) self . observers . append ( cc ) yield cc . queue self . observers . remove ( cc ) | Connects a client to the streaming capture filtering the events that are sent to it . |
54,170 | def __start ( self ) : assert not self . _listener_thread self . _listener_thread = threading . Thread ( target = self . __run_listener , name = 'clearly-listener' ) self . _listener_thread . daemon = True self . _listener_thread . start ( ) self . _wait_event . wait ( ) self . _wait_event . clear ( ) | Starts the real - time engine that captures events . |
54,171 | def capture ( self , pattern = None , negate = False , workers = None , negate_workers = False , params = None , success = False , error = True , stats = False ) : request = clearly_pb2 . CaptureRequest ( tasks_capture = clearly_pb2 . PatternFilter ( pattern = pattern or '.' , negate = negate ) , workers_capture = clearly_pb2 . PatternFilter ( pattern = workers or '.' , negate = negate_workers ) , ) try : for realtime in self . _stub . capture_realtime ( request ) : if realtime . HasField ( 'task' ) : ClearlyClient . _display_task ( realtime . task , params , success , error ) elif realtime . HasField ( 'worker' ) : ClearlyClient . _display_worker ( realtime . worker , stats ) else : print ( 'unknown event:' , realtime ) break except KeyboardInterrupt : pass | Starts capturing selected events in real - time . You can filter exactly what you want to see as the Clearly Server handles all tasks and workers updates being sent to celery . Several clients can see different sets of events at the same time . |
54,172 | def tasks ( self , pattern = None , negate = False , state = None , limit = None , reverse = True , params = None , success = False , error = True ) : request = clearly_pb2 . FilterTasksRequest ( tasks_filter = clearly_pb2 . PatternFilter ( pattern = pattern or '.' , negate = negate ) , state_pattern = state or '.' , limit = limit , reverse = reverse ) for task in about_time ( ClearlyClient . _fetched_callback , self . _stub . filter_tasks ( request ) ) : ClearlyClient . _display_task ( task , params , success , error ) | Filters stored tasks and displays their current statuses . |
54,173 | def seen_tasks ( self ) : print ( '\n' . join ( self . _stub . seen_tasks ( clearly_pb2 . Empty ( ) ) . task_types ) ) | Shows a list of seen task types . |
54,174 | def detail_action ( ** kwargs ) : def decorator ( func ) : func . action = True func . detail = True func . kwargs = kwargs return func return decorator | Used to mark a method on a ResourceBinding that should be routed for detail actions . |
54,175 | def list_action ( ** kwargs ) : def decorator ( func ) : func . action = True func . detail = False func . kwargs = kwargs return func return decorator | Used to mark a method on a ResourceBinding that should be routed for list actions . |
54,176 | def broadcast_to ( array , shape , subok = False ) : return _broadcast_to ( array , shape , subok = subok , readonly = True ) | Broadcast an array to a new shape . |
54,177 | def _K ( m ) : M = m * ( m - 1 ) // 2 K = np . zeros ( ( M , m ** 2 ) , dtype = np . int64 ) row = 0 for j in range ( 1 , m ) : col = ( j - 1 ) * m + j s = m - j K [ row : ( row + s ) , col : ( col + s ) ] = np . eye ( s ) row += s return K | matrix K_m from Wiktorsson2001 |
54,178 | def wait_for_compactions ( self , timeout = 600 ) : for node in list ( self . nodes . values ( ) ) : if node . is_running ( ) : node . wait_for_compactions ( timeout ) return self | Wait for all compactions to finish on all nodes . |
54,179 | def watch_log_for_alive ( self , nodes , from_mark = None , timeout = 720 , filename = 'system.log' ) : super ( DseNode , self ) . watch_log_for_alive ( nodes , from_mark = from_mark , timeout = timeout , filename = filename ) | Watch the log of this node until it detects that the provided other nodes are marked UP . This method works similarly to watch_log_for_death . |
54,180 | def load ( path , name , cluster ) : node_path = os . path . join ( path , name ) filename = os . path . join ( node_path , 'node.conf' ) with open ( filename , 'r' ) as f : data = yaml . safe_load ( f ) try : itf = data [ 'interfaces' ] initial_token = None if 'initial_token' in data : initial_token = data [ 'initial_token' ] cassandra_version = None if 'cassandra_version' in data : cassandra_version = LooseVersion ( data [ 'cassandra_version' ] ) remote_debug_port = 2000 if 'remote_debug_port' in data : remote_debug_port = data [ 'remote_debug_port' ] binary_interface = None if 'binary' in itf and itf [ 'binary' ] is not None : binary_interface = tuple ( itf [ 'binary' ] ) thrift_interface = None if 'thrift' in itf and itf [ 'thrift' ] is not None : thrift_interface = tuple ( itf [ 'thrift' ] ) node = cluster . create_node ( data [ 'name' ] , data [ 'auto_bootstrap' ] , thrift_interface , tuple ( itf [ 'storage' ] ) , data [ 'jmx_port' ] , remote_debug_port , initial_token , save = False , binary_interface = binary_interface , byteman_port = data [ 'byteman_port' ] , derived_cassandra_version = cassandra_version ) node . status = data [ 'status' ] if 'pid' in data : node . pid = int ( data [ 'pid' ] ) if 'install_dir' in data : node . __install_dir = data [ 'install_dir' ] if 'config_options' in data : node . __config_options = data [ 'config_options' ] if 'dse_config_options' in data : node . _dse_config_options = data [ 'dse_config_options' ] if 'environment_variables' in data : node . __environment_variables = data [ 'environment_variables' ] if 'data_center' in data : node . data_center = data [ 'data_center' ] if 'workloads' in data : node . workloads = data [ 'workloads' ] return node except KeyError as k : raise common . LoadError ( "Error Loading " + filename + ", missing property: " + str ( k ) ) | Load a node from from the path on disk to the config files the node name and the cluster the node is part of . |
54,181 | def get_install_dir ( self ) : if self . __install_dir is None : return self . cluster . get_install_dir ( ) else : common . validate_install_dir ( self . __install_dir ) return self . __install_dir | Returns the path to the cassandra source directory used by this node . |
54,182 | def set_install_dir ( self , install_dir = None , version = None , verbose = False ) : if version is None : self . __install_dir = install_dir if install_dir is not None : common . validate_install_dir ( install_dir ) else : self . __install_dir = self . node_setup ( version , verbose = verbose ) self . _cassandra_version = common . get_version_from_build ( self . __install_dir , cassandra = True ) if self . get_base_cassandra_version ( ) >= 4.0 : self . network_interfaces [ 'thrift' ] = None self . import_config_files ( ) self . import_bin_files ( ) self . __conf_updated = False return self | Sets the path to the cassandra source directory for use by this node . |
54,183 | def show ( self , only_status = False , show_cluster = True ) : self . __update_status ( ) indent = '' . join ( [ " " for i in xrange ( 0 , len ( self . name ) + 2 ) ] ) print_ ( "{}: {}" . format ( self . name , self . __get_status_string ( ) ) ) if not only_status : if show_cluster : print_ ( "{}{}={}" . format ( indent , 'cluster' , self . cluster . name ) ) print_ ( "{}{}={}" . format ( indent , 'auto_bootstrap' , self . auto_bootstrap ) ) if self . network_interfaces [ 'thrift' ] is not None : print_ ( "{}{}={}" . format ( indent , 'thrift' , self . network_interfaces [ 'thrift' ] ) ) if self . network_interfaces [ 'binary' ] is not None : print_ ( "{}{}={}" . format ( indent , 'binary' , self . network_interfaces [ 'binary' ] ) ) print_ ( "{}{}={}" . format ( indent , 'storage' , self . network_interfaces [ 'storage' ] ) ) print_ ( "{}{}={}" . format ( indent , 'jmx_port' , self . jmx_port ) ) print_ ( "{}{}={}" . format ( indent , 'remote_debug_port' , self . remote_debug_port ) ) print_ ( "{}{}={}" . format ( indent , 'byteman_port' , self . byteman_port ) ) print_ ( "{}{}={}" . format ( indent , 'initial_token' , self . initial_token ) ) if self . pid : print_ ( "{}{}={}" . format ( indent , 'pid' , self . pid ) ) | Print infos on this node configuration . |
54,184 | def is_running ( self ) : self . __update_status ( ) return self . status == Status . UP or self . status == Status . DECOMMISSIONED | Return true if the node is running |
54,185 | def grep_log ( self , expr , filename = 'system.log' , from_mark = None ) : matchings = [ ] pattern = re . compile ( expr ) with open ( os . path . join ( self . get_path ( ) , 'logs' , filename ) ) as f : if from_mark : f . seek ( from_mark ) for line in f : m = pattern . search ( line ) if m : matchings . append ( ( line , m ) ) return matchings | Returns a list of lines matching the regular expression in parameter in the Cassandra log of this node |
54,186 | def wait_for_binary_interface ( self , ** kwargs ) : if self . cluster . version ( ) >= '1.2' : self . watch_log_for ( "Starting listening for CQL clients" , ** kwargs ) binary_itf = self . network_interfaces [ 'binary' ] if not common . check_socket_listening ( binary_itf , timeout = 30 ) : warnings . warn ( "Binary interface %s:%s is not listening after 30 seconds, node may have failed to start." % ( binary_itf [ 0 ] , binary_itf [ 1 ] ) ) | Waits for the Binary CQL interface to be listening . If > 1 . 2 will check log for Starting listening for CQL clients before checking for the interface to be listening . |
54,187 | def wait_for_thrift_interface ( self , ** kwargs ) : if self . cluster . version ( ) >= '4' : return self . watch_log_for ( "Listening for thrift clients..." , ** kwargs ) thrift_itf = self . network_interfaces [ 'thrift' ] if not common . check_socket_listening ( thrift_itf , timeout = 30 ) : warnings . warn ( "Thrift interface {}:{} is not listening after 30 seconds, node may have failed to start." . format ( thrift_itf [ 0 ] , thrift_itf [ 1 ] ) ) | Waits for the Thrift interface to be listening . |
54,188 | def wait_for_compactions ( self , timeout = 120 ) : pattern = re . compile ( "pending tasks: 0" ) start = time . time ( ) while time . time ( ) - start < timeout : output , err , rc = self . nodetool ( "compactionstats" ) if pattern . search ( output ) : return time . sleep ( 1 ) raise TimeoutError ( "{} [{}] Compactions did not finish in {} seconds" . format ( time . strftime ( "%d %b %Y %H:%M:%S" , time . gmtime ( ) ) , self . name , timeout ) ) | Wait for all compactions to finish on this node . |
54,189 | def update_startup_byteman_script ( self , byteman_startup_script ) : if self . byteman_port == '0' : raise common . LoadError ( 'Byteman is not installed' ) self . byteman_startup_script = byteman_startup_script self . import_config_files ( ) | Update the byteman startup script i . e . rule injected before the node starts . |
54,190 | def _find_cmd ( self , cmd ) : cdir = self . get_install_cassandra_root ( ) if self . get_base_cassandra_version ( ) >= 2.1 : fcmd = common . join_bin ( cdir , os . path . join ( 'tools' , 'bin' ) , cmd ) else : fcmd = common . join_bin ( cdir , 'bin' , cmd ) try : if os . path . exists ( fcmd ) : os . chmod ( fcmd , stat . S_IRUSR | stat . S_IWUSR | stat . S_IXUSR | stat . S_IRGRP | stat . S_IXGRP | stat . S_IROTH | stat . S_IXOTH ) except : common . warning ( "Couldn't change permissions to use {0}." . format ( cmd ) ) common . warning ( "If it didn't work, you will have to do so manually." ) return fcmd | Locates command under cassandra root and fixes permissions if needed |
54,191 | def data_size ( self , live_data = None ) : if live_data is not None : warnings . warn ( "The 'live_data' keyword argument is deprecated." , DeprecationWarning ) output = self . nodetool ( 'info' ) [ 0 ] return _get_load_from_info_output ( output ) | Uses nodetool info to get the size of a node s data in KB . |
54,192 | def get_sstable_data_files ( self , ks , table ) : p = self . get_sstable_data_files_process ( ks = ks , table = table ) out , _ , _ = handle_external_tool_process ( p , [ "sstableutil" , '--type' , 'final' , ks , table ] ) return sorted ( filter ( lambda s : s . endswith ( '-Data.db' ) , out . splitlines ( ) ) ) | Read sstable data files by using sstableutil so we ignore temporary files |
54,193 | def is_modern_windows_install ( version ) : version = LooseVersion ( str ( version ) ) if is_win ( ) and version >= LooseVersion ( '2.1' ) : return True else : return False | The 2 . 1 release line was when Cassandra received beta windows support . Many features are gated based on that added compatibility . |
54,194 | def get_jdk_version ( ) : try : version = subprocess . check_output ( [ 'java' , '-version' ] , stderr = subprocess . STDOUT ) except OSError : print_ ( "ERROR: Could not find java. Is it in your path?" ) exit ( 1 ) return _get_jdk_version ( version ) | Retrieve the Java version as reported in the quoted string returned by invoking java - version . |
54,195 | def wait_for_any_log ( nodes , pattern , timeout , filename = 'system.log' , marks = None ) : if marks is None : marks = { } for _ in range ( timeout ) : for node in nodes : found = node . grep_log ( pattern , filename = filename , from_mark = marks . get ( node , None ) ) if found : return node time . sleep ( 1 ) raise TimeoutError ( time . strftime ( "%d %b %Y %H:%M:%S" , time . gmtime ( ) ) + " Unable to find: " + repr ( pattern ) + " in any node log within " + str ( timeout ) + "s" ) | Look for a pattern in the system . log of any in a given list of nodes . |
54,196 | def download_version ( version , url = None , verbose = False , binary = False ) : assert_jdk_valid_for_cassandra_version ( version ) archive_url = ARCHIVE if CCM_CONFIG . has_option ( 'repositories' , 'cassandra' ) : archive_url = CCM_CONFIG . get ( 'repositories' , 'cassandra' ) if binary : archive_url = "%s/%s/apache-cassandra-%s-bin.tar.gz" % ( archive_url , version . split ( '-' ) [ 0 ] , version ) if url is None else url else : archive_url = "%s/%s/apache-cassandra-%s-src.tar.gz" % ( archive_url , version . split ( '-' ) [ 0 ] , version ) if url is None else url _ , target = tempfile . mkstemp ( suffix = ".tar.gz" , prefix = "ccm-" ) try : __download ( archive_url , target , show_progress = verbose ) common . info ( "Extracting {} as version {} ..." . format ( target , version ) ) tar = tarfile . open ( target ) dir = tar . next ( ) . name . split ( "/" ) [ 0 ] tar . extractall ( path = __get_dir ( ) ) tar . close ( ) target_dir = os . path . join ( __get_dir ( ) , version ) if os . path . exists ( target_dir ) : rmdirs ( target_dir ) shutil . move ( os . path . join ( __get_dir ( ) , dir ) , target_dir ) if binary : with open ( os . path . join ( target_dir , '0.version.txt' ) , 'w' ) as f : f . write ( version ) else : compile_version ( version , target_dir , verbose = verbose ) except urllib . error . URLError as e : msg = "Invalid version {}" . format ( version ) if url is None else "Invalid url {}" . format ( url ) msg = msg + " (underlying error is: {})" . format ( str ( e ) ) raise ArgumentError ( msg ) except tarfile . ReadError as e : raise ArgumentError ( "Unable to uncompress downloaded file: {}" . format ( str ( e ) ) ) except CCMError as e : try : rmdirs ( target_dir ) common . error ( "Deleted {} due to error" . format ( target_dir ) ) except : raise CCMError ( "Building C* version {} failed. Attempted to delete {} but failed. This will need to be manually deleted" . format ( version , target_dir ) ) raise e | Download extract and build Cassandra tarball . |
54,197 | def get_tagged_version_numbers ( series = 'stable' ) : releases = [ ] if series == 'testing' : tag_regex = re . compile ( '^refs/tags/cassandra-([0-9]+\.[0-9]+\.[0-9]+-.*$)' ) else : tag_regex = re . compile ( '^refs/tags/cassandra-([0-9]+\.[0-9]+\.[0-9]+$)' ) tag_url = urllib . request . urlopen ( GITHUB_TAGS ) for ref in ( i . get ( 'ref' , '' ) for i in json . loads ( tag_url . read ( ) ) ) : m = tag_regex . match ( ref ) if m : releases . append ( LooseVersion ( m . groups ( ) [ 0 ] ) ) releases . sort ( reverse = True ) stable_major_version = LooseVersion ( str ( releases [ 0 ] . version [ 0 ] ) + "." + str ( releases [ 0 ] . version [ 1 ] ) ) stable_releases = [ r for r in releases if r >= stable_major_version ] oldstable_releases = [ r for r in releases if r not in stable_releases ] oldstable_major_version = LooseVersion ( str ( oldstable_releases [ 0 ] . version [ 0 ] ) + "." + str ( oldstable_releases [ 0 ] . version [ 1 ] ) ) oldstable_releases = [ r for r in oldstable_releases if r >= oldstable_major_version ] if series == 'testing' : return [ r . vstring for r in releases ] elif series == 'stable' : return [ r . vstring for r in stable_releases ] elif series == 'oldstable' : return [ r . vstring for r in oldstable_releases ] else : raise AssertionError ( "unknown release series: {series}" . format ( series = series ) ) | Retrieve git tags and find version numbers for a release series |
54,198 | def __connect ( host , port , username , password , private_key ) : ssh = paramiko . SSHClient ( ) ssh . set_missing_host_key_policy ( paramiko . AutoAddPolicy ( ) ) if private_key is not None and password is not None : private_key = paramiko . RSAKey . from_private_key_file ( private_key , password ) elif private_key is not None : private_key = paramiko . RSAKey . from_private_key_file ( private_key , password ) try : ssh . connect ( host , port , username , password , private_key ) except Exception as e : raise e return ssh | Establish remote connection |
54,199 | def execute_ccm_command ( self , ccm_args , is_displayed = True ) : return self . execute ( [ "ccm" ] + ccm_args , profile = self . profile ) | Execute a CCM command on the remote server |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.