idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
56,700
def eliminate_repeats ( text ) : bannedwords = read_file ( 'stopwords.txt' ) alphabet = 'abcdefghijklmnopqrstuvwxyz' words = text . split ( ) standardwords = [ ] for word in words : newstr = '' for char in word : if char in alphabet or char in alphabet . upper ( ) : newstr += char if newstr not in standardwords and newstr != '' and newstr not in bannedwords : standardwords . append ( newstr ) return map ( lambda x : x . lower ( ) , standardwords )
Returns a list of words that occur in the text . Eliminates stopwords .
56,701
def wordcount ( text ) : bannedwords = read_file ( 'stopwords.txt' ) wordcount = { } separated = separate ( text ) for word in separated : if word not in bannedwords : if not wordcount . has_key ( word ) : wordcount [ word ] = 1 else : wordcount [ word ] += 1 return wordcount
Returns the count of the words in a file .
56,702
def tuplecount ( text ) : worddict = wordcount ( text ) countlist = [ ] for key in worddict . keys ( ) : countlist . append ( ( key , worddict [ key ] ) ) countlist = list ( reversed ( sorted ( countlist , key = lambda x : x [ 1 ] ) ) ) return countlist
Changes a dictionary into a list of tuples .
56,703
def get_file_md5 ( filename ) : if os . path . exists ( filename ) : blocksize = 65536 try : hasher = hashlib . md5 ( ) except BaseException : hasher = hashlib . new ( 'md5' , usedForSecurity = False ) with open ( filename , 'rb' ) as afile : buf = afile . read ( blocksize ) while len ( buf ) > 0 : hasher . update ( buf ) buf = afile . read ( blocksize ) return hasher . hexdigest ( ) return ''
Get a file s MD5
56,704
def get_md5 ( string ) : try : hasher = hashlib . md5 ( ) except BaseException : hasher = hashlib . new ( 'md5' , usedForSecurity = False ) hasher . update ( string ) return hasher . hexdigest ( )
Get a string s MD5
56,705
def deploy_signature ( source , dest , user = None , group = None ) : move ( source , dest ) os . chmod ( dest , 0644 ) if user and group : try : uid = pwd . getpwnam ( user ) . pw_uid gid = grp . getgrnam ( group ) . gr_gid os . chown ( dest , uid , gid ) except ( KeyError , OSError ) : pass
Deploy a signature fole
56,706
def get_local_version ( sigdir , sig ) : version = None filename = os . path . join ( sigdir , '%s.cvd' % sig ) if os . path . exists ( filename ) : cmd = [ 'sigtool' , '-i' , filename ] sigtool = Popen ( cmd , stdout = PIPE , stderr = PIPE ) while True : line = sigtool . stdout . readline ( ) if line and line . startswith ( 'Version:' ) : version = line . split ( ) [ 1 ] break if not line : break sigtool . wait ( ) return version
Get the local version of a signature
56,707
def verify_sigfile ( sigdir , sig ) : cmd = [ 'sigtool' , '-i' , '%s/%s.cvd' % ( sigdir , sig ) ] sigtool = Popen ( cmd , stdout = PIPE , stderr = PIPE ) ret_val = sigtool . wait ( ) return ret_val == 0
Verify a signature file
56,708
def check_download ( obj , * args , ** kwargs ) : version = args [ 0 ] workdir = args [ 1 ] signame = args [ 2 ] if version : local_version = get_local_version ( workdir , signame ) if not verify_sigfile ( workdir , signame ) or version != local_version : error ( "[-] \033[91mFailed to verify signature: %s from: %s\033[0m" % ( signame , obj . url ) ) raise ValueError ( 'Failed to verify signature: %s' % signame )
Verify a download
56,709
def download_sig ( opts , sig , version = None ) : code = None downloaded = False useagent = 'ClamAV/0.101.1 (OS: linux-gnu, ARCH: x86_64, CPU: x86_64)' manager = PoolManager ( headers = make_headers ( user_agent = useagent ) , cert_reqs = 'CERT_REQUIRED' , ca_certs = certifi . where ( ) , timeout = Timeout ( connect = 10.0 , read = 60.0 ) ) if version : path = '/%s.cvd' % sig filename = os . path . join ( opts . workdir , '%s.cvd' % sig ) else : path = '/%s.cdiff' % sig filename = os . path . join ( opts . workdir , '%s.cdiff' % sig ) try : req = manager . request ( 'GET' , 'http://%s%s' % ( opts . hostname , path ) ) except BaseException as msg : error ( "Request error: %s" % msg ) data = req . data code = req . status if req . status == 200 : with open ( filename , 'w' ) as handle : handle . write ( data ) downloaded = os . path . exists ( filename ) return downloaded , code
Download signature from hostname
56,710
def copy_sig ( sig , opts , isdiff ) : info ( "[+] \033[92mDeploying signature:\033[0m %s" % sig ) if isdiff : sourcefile = os . path . join ( opts . workdir , '%s.cdiff' % sig ) destfile = os . path . join ( opts . mirrordir , '%s.cdiff' % sig ) else : sourcefile = os . path . join ( opts . workdir , '%s.cvd' % sig ) destfile = os . path . join ( opts . mirrordir , '%s.cvd' % sig ) deploy_signature ( sourcefile , destfile , opts . user , opts . group ) info ( "=> Deployed signature: %s" % sig )
Deploy a sig
56,711
def create_dns_file ( opts , record ) : info ( "[+] \033[92mUpdating dns.txt file\033[0m" ) filename = os . path . join ( opts . mirrordir , 'dns.txt' ) localmd5 = get_file_md5 ( filename ) remotemd5 = get_md5 ( record ) if localmd5 != remotemd5 : create_file ( filename , record ) info ( "=> dns.txt file updated" ) else : info ( "=> No update required L: %s => R: %s" % ( localmd5 , remotemd5 ) )
Create the DNS record file
56,712
def download_diffs ( queue ) : while True : options , signature_type , localver , remotever = queue . get ( ) for num in range ( int ( localver ) , int ( remotever ) + 1 ) : sig_diff = '%s-%d' % ( signature_type , num ) filename = os . path . join ( options . mirrordir , '%s.cdiff' % sig_diff ) if not os . path . exists ( filename ) : update_diff ( options , sig_diff ) queue . task_done ( )
Download the cdiff files
56,713
def work ( options ) : record = get_record ( options ) _ , mainv , dailyv , _ , _ , _ , safebrowsingv , bytecodev = record . split ( ':' ) versions = { 'main' : mainv , 'daily' : dailyv , 'safebrowsing' : safebrowsingv , 'bytecode' : bytecodev } dqueue = Queue ( maxsize = 0 ) dqueue_workers = 3 info ( "[+] \033[92mStarting workers\033[0m" ) for index in range ( dqueue_workers ) : info ( "=> Starting diff download worker: %d" % ( index + 1 ) ) worker = Thread ( target = download_diffs , args = ( dqueue , ) ) worker . setDaemon ( True ) worker . start ( ) mqueue = Queue ( maxsize = 0 ) mqueue_workers = 4 for index in range ( mqueue_workers ) : info ( "=> Starting signature download worker: %d" % ( index + 1 ) ) worker = Thread ( target = update_sig , args = ( mqueue , ) ) worker . setDaemon ( True ) worker . start ( ) for signature_type in [ 'main' , 'daily' , 'bytecode' , 'safebrowsing' ] : if signature_type in [ 'daily' , 'bytecode' , 'safebrowsing' ] : localver = get_local_version ( options . mirrordir , signature_type ) remotever = versions [ signature_type ] if localver is not None : dqueue . put ( ( options , signature_type , localver , remotever ) ) mqueue . put ( ( options , signature_type , versions ) ) info ( "=> Waiting on workers to complete tasks" ) dqueue . join ( ) mqueue . join ( ) info ( "=> Workers done processing queues" ) create_dns_file ( options , record ) sys . exit ( 0 )
The work functions
56,714
def copy_resource ( src , dest ) : package_name = "yass" dest = ( dest + "/" + os . path . basename ( src ) ) . rstrip ( "/" ) if pkg_resources . resource_isdir ( package_name , src ) : if not os . path . isdir ( dest ) : os . makedirs ( dest ) for res in pkg_resources . resource_listdir ( __name__ , src ) : copy_resource ( src + "/" + res , dest ) else : if not os . path . isfile ( dest ) and os . path . splitext ( src ) [ 1 ] not in [ ".pyc" ] : with open ( dest , "wb" ) as f : f . write ( pkg_resources . resource_string ( __name__ , src ) ) else : print ( "File exists: %s " % dest )
To copy package data to destination
56,715
def publish ( endpoint , purge_files , rebuild_manifest , skip_upload ) : print ( "Publishing site to %s ..." % endpoint . upper ( ) ) yass = Yass ( CWD ) target = endpoint . lower ( ) sitename = yass . sitename if not sitename : raise ValueError ( "Missing site name" ) endpoint = yass . config . get ( "hosting.%s" % target ) if not endpoint : raise ValueError ( "%s endpoint is missing in the config" % target . upper ( ) ) if target == "s3" : p = publisher . S3Website ( sitename = sitename , aws_access_key_id = endpoint . get ( "aws_access_key_id" ) , aws_secret_access_key = endpoint . get ( "aws_secret_access_key" ) , region = endpoint . get ( "aws_region" ) ) if not p . website_exists : print ( ">>>" ) print ( "Setting S3 site..." ) if p . create_website ( ) is True : time . sleep ( 10 ) p . create_www_website ( ) print ( "New bucket created: %s" % p . sitename ) if rebuild_manifest : print ( ">>>" ) print ( "Rebuilding site's manifest..." ) p . create_manifest_from_s3_files ( ) if purge_files is True or endpoint . get ( "purge_files" ) is True : print ( ">>>" ) print ( "Purging files..." ) exclude_files = endpoint . get ( "purge_exclude_files" , [ ] ) p . purge_files ( exclude_files = exclude_files ) if not skip_upload : print ( ">>>" ) print ( "Uploading your site..." ) p . upload ( yass . build_dir ) else : print ( ">>>" ) print ( "WARNING: files upload was skipped because of the use of --skip-upload" ) print ( "" ) print ( "Yass! Your site has been successfully published to: " ) print ( p . website_endpoint_url ) footer ( )
Publish the site
56,716
def setup_dns ( endpoint ) : print ( "Setting up DNS..." ) yass = Yass ( CWD ) target = endpoint . lower ( ) sitename = yass . sitename if not sitename : raise ValueError ( "Missing site name" ) endpoint = yass . config . get ( "hosting.%s" % target ) if not endpoint : raise ValueError ( "%s endpoint is missing in the hosting config" % target . upper ( ) ) if target == "s3" : p = publisher . S3Website ( sitename = sitename , aws_access_key_id = endpoint . get ( "aws_access_key_id" ) , aws_secret_access_key = endpoint . get ( "aws_secret_access_key" ) , region = endpoint . get ( "aws_region" ) ) print ( "Setting AWS Route53 for: %s ..." % p . sitename ) p . setup_dns ( ) print ( "" ) print ( "Yass! Route53 setup successfully!" ) print ( "You can now visit the site at :" ) print ( p . sitename_endpoint ) footer ( )
Setup site domain to route to static site
56,717
def create_site ( sitename ) : sitepath = os . path . join ( CWD , sitename ) if os . path . isdir ( sitepath ) : print ( "Site directory '%s' exists already!" % sitename ) else : print ( "Creating site: %s..." % sitename ) os . makedirs ( sitepath ) copy_resource ( "skel/" , sitepath ) stamp_yass_current_version ( sitepath ) print ( "Site created successfully!" ) print ( "CD into '%s' and run 'yass serve' to view the site" % sitename ) footer ( )
Create a new site directory and init Yass
56,718
def init ( ) : yass_conf = os . path . join ( CWD , "yass.yml" ) if os . path . isfile ( yass_conf ) : print ( "::ALERT::" ) print ( "It seems like Yass is already initialized here." ) print ( "If it's a mistake, delete 'yass.yml' in this directory" ) else : print ( "Init Yass in %s ..." % CWD ) copy_resource ( "skel/" , CWD ) stamp_yass_current_version ( CWD ) print ( "Yass init successfully!" ) print ( "Run 'yass serve' to view the site" ) footer ( )
Initialize Yass in the current directory
56,719
def create_page ( pagename ) : page = pagename . lstrip ( "/" ) . rstrip ( "/" ) _ , _ext = os . path . splitext ( pagename ) if not _ext or _ext == "" : page += ".jade" if not page . endswith ( PAGE_FORMAT ) : error ( "Can't create '%s'" % page ) print ( "Invalid filename format" ) print ( "Filename must be in: '%s'" % " | " . join ( PAGE_FORMAT ) ) else : engine = Yass ( CWD ) markup = "jade" if page . endswith ( ".md" ) : markup = "md" if page . endswith ( ".html" ) : markup = "html" dest_file = os . path . join ( engine . pages_dir , page ) dest_dir = os . path . dirname ( dest_file ) content = TPL_HEADER content += TPL_BODY [ markup ] if os . path . isfile ( dest_file ) : error ( "File exists already" ) print ( "Location: %s" % dest_file ) else : if not os . path . isdir ( dest_dir ) : os . makedirs ( dest_dir ) with open ( dest_file , "w" ) as f : f . write ( content ) print ( "New page created: '%s'" % page ) print ( "Location: %s" % dest_file ) footer ( )
Create a new page Omit the extension it will create it as . jade file
56,720
def serve ( port , no_livereload , open_url ) : engine = Yass ( CWD ) if not port : port = engine . config . get ( "local_server.port" , 8000 ) if no_livereload is None : no_livereload = True if engine . config . get ( "local_server.livereload" ) is False else False if open_url is None : open_url = False if engine . config . get ( "local_server.open_url" ) is False else True print ( "Serving at %s" % port ) print ( "Livereload is %s" % ( "OFF" if no_livereload else "ON" ) ) def build_static ( ) : engine . build_static ( ) def build_pages ( ) : engine . build_pages ( ) engine . build ( ) server = Server ( ) if no_livereload is False : server . watch ( engine . static_dir + "/" , build_static ) server . watch ( engine . pages_dir + "/" , build_pages ) server . watch ( engine . templates_dir + "/" , build_pages ) server . watch ( engine . data_dir + "/" , build_pages ) server . serve ( open_url_delay = open_url , port = port , root = engine . build_dir )
Serve the site
56,721
def get_map_location ( self ) : map_data = self . get_map ( ) ( bounds_e , bounds_n ) , ( bounds_w , bounds_s ) = map_data [ "continent_rect" ] ( map_e , map_n ) , ( map_w , map_s ) = map_data [ "map_rect" ] assert bounds_w < bounds_e assert bounds_n < bounds_s assert map_w < map_e assert map_n < map_s meters_to_inches = 39.3701 x , y , z = self . fAvatarPosition map_x = bounds_w + ( ( x * meters_to_inches - map_w ) / ( map_e - map_w ) * ( bounds_e - bounds_w ) ) map_y = bounds_n + ( ( - z * meters_to_inches - map_n ) / ( map_s - map_n ) * ( bounds_s - bounds_n ) ) map_z = y * meters_to_inches return map_x , map_y , map_z
Get the location of the player converted to world coordinates .
56,722
def CreateVertices ( self , points ) : gr = digraph ( ) for z , x , Q in points : node = ( z , x , Q ) gr . add_nodes ( [ node ] ) return gr
Returns a dictionary object with keys that are 2tuples represnting a point .
56,723
def GetFarthestNode ( self , gr , node ) : distance = minmax . shortest_path_bellman_ford ( gr , node ) [ 1 ] min_key = None for key , value in distance . iteritems ( ) : if min_key is None or value < distance [ min_key ] : min_key = key return min_key
node is start node
56,724
def on_success ( self , fn , * args , ** kwargs ) : self . _callbacks . append ( ( fn , args , kwargs ) ) result = self . _resulted_in if result is not _NOTHING_YET : self . _succeed ( result = result )
Call the given callback if or when the connected deferred succeeds .
56,725
def _succeed ( self , result ) : for fn , args , kwargs in self . _callbacks : fn ( result , * args , ** kwargs ) self . _resulted_in = result
Fire the success chain .
56,726
def fetch_config ( filename ) : dir_name = get_source_dir ( ) filename = os . path . join ( 'json' , filename ) fileobj = open ( os . path . join ( dir_name , filename ) , 'r' ) my_dict = json . loads ( fileobj . read ( ) ) return my_dict
Fetch the Configuration schema information
56,727
def populate_args_level ( schema , parser ) : for key , value in schema [ 'properties' ] . iteritems ( ) : if key == 'name' : continue arg = '--%s' % key desc = value [ 'description' ] if 'type' in value : if value [ 'type' ] == 'string' : if 'enum' in value : parser . add_argument ( arg , help = desc , type = str , choices = value [ 'enum' ] ) else : parser . add_argument ( arg , help = desc , type = str ) elif value [ 'type' ] == 'number' : parser . add_argument ( arg , help = desc , type = float ) elif value [ 'type' ] == 'integer' : parser . add_argument ( arg , help = desc , type = int ) elif str ( value [ 'type' ] ) == 'array' : assert value [ 'minItems' ] == value [ 'maxItems' ] if value [ 'items' ] [ 'type' ] != 'number' : raise NotImplementedError ( "Only float arrays work" ) parser . add_argument ( arg , help = desc , type = float , nargs = value [ 'maxItems' ] , metavar = 'N' ) elif value [ 'type' ] == 'object' : pass
Use a schema to populate a command line argument parser
56,728
def set_json ( self , config_json ) : if self . configuration_dict is not None : raise RuntimeError ( "Can only set configuration once" , self . configuration_dict ) schema = fetch_config ( 'ConfigurationSchema.json' ) validictory . validate ( config_json , schema ) config_json [ 'name' ] = self . name config_json [ 'run_number' ] = self . run config_json [ 'src_dir' ] = get_source_dir ( ) config_json [ 'data_dir' ] = get_data_dir ( ) config_json [ 'log_dir' ] = get_log_dir ( ) self . configuration_dict = config_json
Permanently set the JSON configuration
56,729
def file_strip_ext ( afile , skip_version = False , only_known_extensions = False , allow_subformat = True ) : import os afile = afile . split ( ';' ) if len ( afile ) > 1 and allow_subformat and not afile [ - 1 ] . isdigit ( ) : afile = afile [ 0 : - 1 ] if len ( afile ) > 1 and skip_version and afile [ - 1 ] . isdigit ( ) : afile = afile [ 0 : - 1 ] afile = ';' . join ( afile ) nextfile = _extensions . sub ( '' , afile ) if nextfile == afile and not only_known_extensions : nextfile = os . path . splitext ( afile ) [ 0 ] while nextfile != afile : afile = nextfile nextfile = _extensions . sub ( '' , afile ) return nextfile
Strip in the best way the extension from a filename .
56,730
def guess_extension ( amimetype , normalize = False ) : ext = _mimes . guess_extension ( amimetype ) if ext and normalize : ext = { '.asc' : '.txt' , '.obj' : '.bin' } . get ( ext , ext ) from invenio . legacy . bibdocfile . api_normalizer import normalize_format return normalize_format ( ext ) return ext
Tries to guess extension for a mimetype .
56,731
def get_magic_guesses ( fullpath ) : if CFG_HAS_MAGIC == 1 : magic_cookies = _get_magic_cookies ( ) magic_result = [ ] for key in magic_cookies . keys ( ) : magic_result . append ( magic_cookies [ key ] . file ( fullpath ) ) return tuple ( magic_result ) elif CFG_HAS_MAGIC == 2 : magic_result = [ ] for key in ( { 'mime' : False , 'mime_encoding' : False } , { 'mime' : True , 'mime_encoding' : False } , { 'mime' : False , 'mime_encoding' : True } ) : magic_result . append ( _magic_wrapper ( fullpath , ** key ) ) return tuple ( magic_result )
Return all the possible guesses from the magic library about the content of the file .
56,732
def mimes ( self ) : _mimes = MimeTypes ( strict = False ) _mimes . suffix_map . update ( { '.tbz2' : '.tar.bz2' } ) _mimes . encodings_map . update ( { '.bz2' : 'bzip2' } ) if cfg [ 'CFG_BIBDOCFILE_ADDITIONAL_KNOWN_MIMETYPES' ] : for key , value in iteritems ( cfg [ 'CFG_BIBDOCFILE_ADDITIONAL_KNOWN_MIMETYPES' ] ) : _mimes . add_type ( key , value ) del key , value return _mimes
Returns extended MimeTypes .
56,733
def extensions ( self ) : _tmp_extensions = self . mimes . encodings_map . keys ( ) + self . mimes . suffix_map . keys ( ) + self . mimes . types_map [ 1 ] . keys ( ) + cfg [ 'CFG_BIBDOCFILE_ADDITIONAL_KNOWN_FILE_EXTENSIONS' ] extensions = [ ] for ext in _tmp_extensions : if ext . startswith ( '.' ) : extensions . append ( ext ) else : extensions . append ( '.' + ext ) extensions . sort ( ) extensions . reverse ( ) extensions = set ( [ ext . lower ( ) for ext in extensions ] ) extensions = '\\' + '$|\\' . join ( extensions ) + '$' extensions = extensions . replace ( '+' , '\\+' ) return re . compile ( extensions , re . I )
Generate the regular expression to match all the known extensions .
56,734
def start ( self , service ) : try : map ( self . start_class , service . depends ) if service . is_running ( ) : return if service in self . failed : log . warning ( "%s previously failed to start" , service ) return service . start ( ) except Exception : log . exception ( "Unable to start service %s" , service ) self . failed . add ( service )
Start the service catching and logging exceptions
56,735
def start_class ( self , class_ ) : matches = filter ( lambda svc : isinstance ( svc , class_ ) , self ) if not matches : svc = class_ ( ) self . register ( svc ) matches = [ svc ] map ( self . start , matches ) return matches
Start all services of a given class . If this manager doesn t already have a service of that class it constructs one and starts it .
56,736
def stop_class ( self , class_ ) : "Stop all services of a given class" matches = filter ( lambda svc : isinstance ( svc , class_ ) , self ) map ( self . stop , matches )
Stop all services of a given class
56,737
def _get_more_data ( self , file , timeout ) : timeout = datetime . timedelta ( seconds = timeout ) timer = Stopwatch ( ) while timer . split ( ) < timeout : data = file . read ( ) if data : return data raise RuntimeError ( "Timeout" )
Return data from the file if available . If no data is received by the timeout then raise RuntimeError .
56,738
def _run_env ( self ) : env = dict ( os . environ ) env . update ( getattr ( self , 'env' , { } ) , PYTHONUSERBASE = self . env_path , PIP_USER = "1" , ) self . _disable_venv ( env ) return env
Augment the current environment providing the PYTHONUSERBASE .
56,739
def _disable_venv ( self , env ) : venv = env . pop ( 'VIRTUAL_ENV' , None ) if venv : venv_path , sep , env [ 'PATH' ] = env [ 'PATH' ] . partition ( os . pathsep )
Disable virtualenv and venv in the environment .
56,740
def create_env ( self ) : root = path . Path ( os . environ . get ( 'SERVICES_ROOT' , 'services' ) ) self . env_path = ( root / self . name ) . abspath ( ) cmd = [ self . python , '-c' , 'import site; print(site.getusersitepackages())' , ] out = subprocess . check_output ( cmd , env = self . _run_env ) site_packages = out . decode ( ) . strip ( ) path . Path ( site_packages ) . makedirs_p ( )
Create a PEP - 370 environment
56,741
def compaction ( self , request_compaction = False ) : url = self . _service_url + 'compaction/' if request_compaction : response = requests . post ( url , ** self . _instances . _default_request_kwargs ) else : response = requests . get ( url , ** self . _instances . _default_request_kwargs ) return response . json ( )
Retrieve a report on or request compaction for this instance .
56,742
def get_authenticated_connection ( self , user , passwd , db = 'admin' , ssl = True ) : try : connection = self . get_connection ( ssl = ssl ) connection [ db ] . authenticate ( user , passwd ) return connection except pymongo . errors . OperationFailure as ex : logger . exception ( ex ) raise
Get an authenticated connection to this instance .
56,743
def shards ( self , add_shard = False ) : url = self . _service_url + 'shards/' if add_shard : response = requests . post ( url , ** self . _instances . _default_request_kwargs ) else : response = requests . get ( url , ** self . _instances . _default_request_kwargs ) return response . json ( )
Get a list of shards belonging to this instance .
56,744
def new_relic_stats ( self ) : if self . _new_relic_stats is None : if self . type == 'mongodb_sharded' : shards = [ Shard ( self . name , self . _service_url + 'shards/' , self . _client , shard_doc ) for shard_doc in self . shards ( ) . get ( 'data' ) ] fs = [ ] with futures . ThreadPoolExecutor ( len ( shards ) ) as executor : for shard in shards : fs . append ( executor . submit ( shard . get_shard_stats ) ) futures . wait ( fs , timeout = None , return_when = futures . ALL_COMPLETED ) stats_this_second = self . _rollup_shard_stats_to_instance_stats ( { shard . name : future . result ( ) for ( shard , future ) in zip ( shards , fs ) } ) time . sleep ( 1 ) fs = [ ] with futures . ThreadPoolExecutor ( len ( shards ) ) as executor : for shard in shards : fs . append ( executor . submit ( shard . get_shard_stats ) ) futures . wait ( fs , timeout = None , return_when = futures . ALL_COMPLETED ) stats_next_second = self . _rollup_shard_stats_to_instance_stats ( { shard . name : future . result ( ) for ( shard , future ) in zip ( shards , fs ) } ) self . _new_relic_stats = self . _compile_new_relic_stats ( stats_this_second , stats_next_second ) else : response = requests . get ( '{}{}' . format ( self . _url , 'new-relic-stats' ) , ** self . _instances . _default_request_kwargs ) self . _new_relic_stats = json . loads ( response . content ) . get ( 'data' ) if response . status_code == 200 else { } return self . _new_relic_stats
Get stats for this instance .
56,745
def _rollup_shard_stats_to_instance_stats ( self , shard_stats ) : instance_stats = { } opcounters_per_node = [ ] instance_stats [ 'replication_lag' ] = max ( map ( lambda s : s [ 'replication_lag' ] , shard_stats . values ( ) ) ) aggregate_server_statistics = { } for shard_name , stats in shard_stats . items ( ) : for statistic_key in stats . get ( 'shard_stats' ) : if statistic_key != 'connections' and statistic_key in aggregate_server_statistics : aggregate_server_statistics [ statistic_key ] = util . sum_values ( aggregate_server_statistics [ statistic_key ] , stats . get ( 'shard_stats' ) [ statistic_key ] ) else : aggregate_server_statistics [ statistic_key ] = stats . get ( 'shard_stats' ) [ statistic_key ] opcounters_per_node . append ( { shard_name : { member : node_stats [ 'opcounters' ] for member , node_stats in stats . get ( 'per_node_stats' ) . items ( ) } } ) instance_stats [ 'opcounters_per_node' ] = opcounters_per_node instance_stats [ 'aggregate_server_statistics' ] = aggregate_server_statistics return instance_stats
roll up all shard stats to instance level stats
56,746
def _compile_new_relic_stats ( self , stats_this_second , stats_next_second ) : server_statistics_per_second = { } opcounters_per_node_per_second = [ ] for subdoc in [ "opcounters" , "network" ] : first_doc = stats_this_second [ 'aggregate_server_statistics' ] [ subdoc ] second_doc = stats_next_second [ 'aggregate_server_statistics' ] [ subdoc ] keys = set ( first_doc . keys ( ) ) | set ( second_doc . keys ( ) ) server_statistics_per_second [ subdoc ] = { key : int ( second_doc [ key ] ) - int ( first_doc [ key ] ) for key in keys if isinstance ( first_doc [ key ] , int ) } for node1 , node2 in zip ( stats_this_second [ 'opcounters_per_node' ] , stats_next_second [ 'opcounters_per_node' ] ) : node_opcounters_per_second = { } for repl , members in node2 . items ( ) : node_opcounters_per_second [ repl ] = { } for member , ops in members . items ( ) : node_opcounters_per_second [ repl ] [ member ] = { } for op , count in ops . items ( ) : node_opcounters_per_second [ repl ] [ member ] [ op ] = count - node1 [ repl ] [ member ] [ op ] opcounters_per_node_per_second . append ( node_opcounters_per_second ) return { 'opcounters_per_node_per_second' : opcounters_per_node_per_second , 'server_statistics_per_second' : server_statistics_per_second , 'aggregate_server_statistics' : stats_next_second . get ( 'aggregate_server_statistics' ) , 'replication_lag' : stats_next_second . get ( 'replication_lag' ) , 'aggregate_database_statistics' : self . get_aggregate_database_stats ( ) }
from instance stats_this_second and instance stats_next_second compute some per second stats metrics and other aggregated metrics
56,747
def get_stepdown_window ( self ) : url = self . _service_url + 'stepdown/' response = requests . get ( url , ** self . _instances . _default_request_kwargs ) return response . json ( )
Get information on this instance s stepdown window .
56,748
def set_stepdown_window ( self , start , end , enabled = True , scheduled = True , weekly = True ) : if not start < end : raise TypeError ( 'Parameter "start" must occur earlier in time than "end".' ) week_delta = datetime . timedelta ( days = 7 ) if not ( ( end - start ) <= week_delta ) : raise TypeError ( 'Stepdown windows can not be longer than 1 week in length.' ) url = self . _service_url + 'stepdown/' data = { 'start' : int ( start . strftime ( '%s' ) ) , 'end' : int ( end . strftime ( '%s' ) ) , 'enabled' : enabled , 'scheduled' : scheduled , 'weekly' : weekly , } response = requests . post ( url , data = json . dumps ( data ) , ** self . _instances . _default_request_kwargs ) return response . json ( )
Set the stepdown window for this instance .
56,749
def brand ( self , brand ) : allowed_values = [ "visa" , "mastercard" , "americanExpress" , "discover" ] if brand is not None and brand not in allowed_values : raise ValueError ( "Invalid value for `brand` ({0}), must be one of {1}" . format ( brand , allowed_values ) ) self . _brand = brand
Sets the brand of this PaymentCard .
56,750
def latex_quote ( s ) : special = { '_' : r'\_' , '$' : r'\$' , '#' : r'\#' } s = str ( s ) for char , repl in special . items ( ) : new = s . replace ( char , repl ) s = new [ : ] return s
Quote special characters for LaTeX .
56,751
def tree_to_file ( tree : 'BubbleTree' , outfile : str ) : with open ( outfile , 'w' ) as fd : fd . write ( tree_to_bubble ( tree ) )
Compute the bubble representation of given power graph and push it into given file .
56,752
def lines_from_tree ( tree , nodes_and_set : bool = False ) -> iter : NODE = 'NODE\t{}' INCL = 'IN\t{}\t{}' EDGE = 'EDGE\t{}\t{}\t1.0' SET = 'SET\t{}' if nodes_and_set : for node in tree . nodes ( ) : yield NODE . format ( node ) for node in tree . powernodes ( ) : yield SET . format ( node ) for node , includeds in tree . inclusions . items ( ) : for included in includeds : yield INCL . format ( included , node ) for node , succs in tree . edges . items ( ) : for succ in succs : yield EDGE . format ( node , succ )
Yield lines of bubble describing given BubbleTree
56,753
def process_formdata ( self , valuelist ) : if valuelist : time_str = u' ' . join ( valuelist ) try : timetuple = time . strptime ( time_str , self . format ) self . data = datetime . time ( * timetuple [ 3 : 6 ] ) except ValueError : self . data = None raise
Join time string .
56,754
def validate_csrf_token ( self , field ) : if current_app . testing : return super ( InvenioBaseForm , self ) . validate_csrf_token ( field )
Disable CRSF proection during testing .
56,755
def load_exchange_word_vectors ( filename = "database.db" , maximum_number_of_events = None ) : log . info ( "load word vectors of database {filename}" . format ( filename = filename ) ) if not os . path . isfile ( filename ) : log . info ( "database {filename} nonexistent" . format ( filename = filename ) ) program . terminate ( ) raise Exception database = access_database ( filename = filename ) table_exchanges = database [ "exchanges" ] table_name = "exchanges" data = datavision . Dataset ( ) progress = shijian . Progress ( ) progress . engage_quick_calculation_mode ( ) number_of_entries = len ( database [ table_name ] ) index = 0 for index_entry , entry in enumerate ( database [ table_name ] . all ( ) ) : if maximum_number_of_events is not None and index >= int ( maximum_number_of_events ) : log . info ( "loaded maximum requested number of events " + "({maximum_number_of_events})\r" . format ( maximum_number_of_events = maximum_number_of_events ) ) break utteranceWordVector = str ( entry [ "utteranceWordVector" ] ) responseWordVector = str ( entry [ "responseWordVector" ] ) if utteranceWordVector != "None" and responseWordVector != "None" : index += 1 utteranceWordVector = eval ( "np." + utteranceWordVector . replace ( "float32" , "np.float32" ) ) responseWordVector = eval ( "np." + responseWordVector . replace ( "float32" , "np.float32" ) ) data . variable ( index = index , name = "utteranceWordVector" , value = utteranceWordVector ) data . variable ( index = index , name = "responseWordVector" , value = responseWordVector ) print progress . add_datum ( fraction = index_entry / number_of_entries ) , return data
Load exchange data and return dataset .
56,756
def load_HEP_data ( ROOT_filename = "output.root" , tree_name = "nominal" , maximum_number_of_events = None ) : ROOT_file = open_ROOT_file ( ROOT_filename ) tree = ROOT_file . Get ( tree_name ) number_of_events = tree . GetEntries ( ) data = datavision . Dataset ( ) progress = shijian . Progress ( ) progress . engage_quick_calculation_mode ( ) number_of_events_loaded = 0 log . info ( "" ) index = 0 for event in tree : if maximum_number_of_events is not None and number_of_events_loaded >= int ( maximum_number_of_events ) : log . info ( "loaded maximum requested number of events " + "({maximum_number_of_events})\r" . format ( maximum_number_of_events = maximum_number_of_events ) ) break print progress . add_datum ( fraction = ( index + 2 ) / number_of_events ) , if select_event ( event ) : index += 1 data . variable ( index = index , name = "el_1_pt" , value = event . el_pt [ 0 ] ) number_of_events_loaded += 1 log . info ( "" ) return data
Load HEP data and return dataset .
56,757
def sentiment ( text = None , confidence = False ) : try : words = text . split ( " " ) words = [ word for word in words if word ] features = word_features ( words ) classification = classifier . classify ( features ) confidence_classification = classifier . prob_classify ( features ) . prob ( classification ) except : classification = None confidence_classification = None if confidence : return ( classification , confidence_classification ) else : return classification
This function accepts a string text input . It calculates the sentiment of the text pos or neg . By default it returns this calculated sentiment . If selected it returns a tuple of the calculated sentiment and the classificaton confidence .
56,758
def usernames ( self ) : try : return list ( set ( [ tweet . username for tweet in self ] ) ) except : log . error ( "error -- possibly a problem with tweets stored" )
This function returns the list of unique usernames corresponding to the tweets stored in self .
56,759
def user_sentiments ( self , username = None ) : try : return [ tweet . sentiment for tweet in self if tweet . username == username ] except : log . error ( "error -- possibly no username specified" ) return None
This function returns a list of all sentiments of the tweets of a specified user .
56,760
def user_sentiments_most_frequent ( self , username = None , single_most_frequent = True ) : try : sentiment_frequencies = collections . Counter ( self . user_sentiments ( username = username ) ) if single_most_frequent : return sentiment_frequencies . most_common ( 1 ) [ 0 ] [ 0 ] else : return dict ( sentiment_frequencies ) except : log . error ( "error -- possibly no username specified" ) return None
This function returns the most frequent calculated sentiments expressed in tweets of a specified user . By default the single most frequent sentiment is returned . All sentiments with their corresponding frequencies can be returned also .
56,761
def users_sentiments_single_most_frequent ( self , usernames = None , ) : users_sentiments_single_most_frequent = dict ( ) if usernames is None : usernames = self . usernames ( ) try : for username in usernames : sentiment = self . user_sentiments_most_frequent ( username = username , single_most_frequent = True ) users_sentiments_single_most_frequent [ username ] = sentiment return users_sentiments_single_most_frequent except : log . error ( "error -- possibly a problem with tweets stored" ) return None
This function returns the single most frequent calculated sentiment expressed by all stored users or by a list of specified users as a dictionary .
56,762
def format_progress ( i , n ) : if n == 0 : fraction = 0 else : fraction = float ( i ) / n LEN_BAR = 25 num_plus = int ( round ( fraction * LEN_BAR ) ) s_plus = '+' * num_plus s_point = '.' * ( LEN_BAR - num_plus ) return '[{0!s}{1!s}] {2:d}/{3:d} - {4:.1f}%' . format ( s_plus , s_point , i , n , fraction * 100 )
Returns string containing a progress bar a percentage etc .
56,763
def _format_exe_info ( py_len , exeinfo , format , indlevel ) : ret = [ ] ind = " " * indlevel * NIND if format . startswith ( "text" ) else "" if format == "markdown-list" : for si in exeinfo : ret . append ( " - `{0!s}`: {1!s}" . format ( si . filename , si . description ) ) if format == "rest-list" : for si in exeinfo : ret . append ( "* ``{0!s}``: {1!s}" . format ( si . filename , si . description ) ) elif format == "markdown-table" : mask = "%-{0:d}s | %s" . format ( py_len + 2 ) ret . append ( mask % ( "Script name" , "Purpose" ) ) ret . append ( "-" * ( py_len + 3 ) + "|" + "-" * 10 ) for si in exeinfo : ret . append ( mask % ( "`{0!s}`" . format ( si . filename ) , si . description ) ) elif format == "text" : sbc = 1 for si in exeinfo : ss = textwrap . wrap ( si . description , 79 - py_len - sbc - indlevel * NIND ) for i , s in enumerate ( ss ) : if i == 0 : filecolumn = si . filename + " " + ( "." * ( py_len - len ( si . filename ) ) ) else : filecolumn = " " * ( py_len + 1 ) ret . append ( "{}{}{}{}" . format ( ind , filecolumn , " " * sbc , s ) ) ret . append ( "" ) return ret
Renders ExeInfo object in specified format
56,764
def _map_relation ( c , language = 'any' ) : label = c . label ( language ) return { 'id' : c . id , 'type' : c . type , 'uri' : c . uri , 'label' : label . label if label else None }
Map related concept or collection leaving out the relations .
56,765
def add_indicators ( self , indicators = list ( ) , private = False , tags = list ( ) ) : if len ( indicators ) == 0 : raise Exception ( "No indicators were identified." ) self . logger . debug ( "Checking {} indicators" . format ( len ( indicators ) ) ) cleaned = clean_indicators ( indicators ) self . logger . debug ( "Cleaned {} indicators" . format ( len ( cleaned ) ) ) whitelisted = check_whitelist ( cleaned ) self . logger . debug ( "Non-whitelisted {} indicators" . format ( len ( whitelisted ) ) ) indicators = prune_cached ( whitelisted ) hashed = hash_values ( indicators ) self . logger . debug ( "Non-cached {} indicators" . format ( len ( indicators ) ) ) self . logger . debug ( "Processing {} indicators" . format ( len ( indicators ) ) ) request_count = int ( math . ceil ( len ( indicators ) / 100.0 ) ) if request_count == 0 : mesg = "[!] No indicators were left to process after " mesg += "cleaning, whitelisting and checking the cache." return { 'message' : mesg } stats = { 'success' : 0 , 'failure' : 0 , 'requests' : request_count , 'written' : 0 } mesg = "{} indicators found, making {} requests" self . logger . debug ( mesg . format ( len ( indicators ) , request_count ) ) if private : indicators = hashed if type ( tags ) == str : tags = [ t . strip ( ) . lower ( ) for t in tags . split ( ',' ) ] start , end = ( 0 , 100 ) for i , idx in enumerate ( range ( 0 , request_count ) ) : if idx > 0 : time . sleep ( 3 ) self . logger . debug ( "Waiting 3 seconds before next request." ) to_send = { 'indicators' : indicators [ start : end ] , 'tags' : tags } r = self . _send_data ( 'POST' , 'admin' , 'add-indicators' , to_send ) start , end = ( end , end + 100 ) if not r [ 'success' ] : stats [ 'failure' ] += 1 continue stats [ 'success' ] += 1 stats [ 'written' ] += r [ 'writeCount' ] cache_items ( to_send [ 'indicators' ] ) msg = "" msg += "{written} indicators written using {requests} requests: " msg += "{success} success, {failure} failure" stats [ 'message' ] = msg . format ( ** stats ) return stats
Add indicators to the remote instance .
56,766
def get_indicators ( self ) : response = self . _get ( '' , 'get-indicators' ) response [ 'message' ] = "%i indicators:\n%s" % ( len ( response [ 'indicators' ] ) , "\n" . join ( response [ 'indicators' ] ) ) return response
List indicators available on the remote instance .
56,767
def besttype ( x , encoding = "utf-8" , percentify = True ) : def unicodify ( x ) : return to_unicode ( x , encoding ) def percent ( x ) : try : if x . endswith ( "%" ) : x = float ( x [ : - 1 ] ) / 100. else : raise ValueError except ( AttributeError , ValueError ) : raise ValueError return x x = unicodify ( x ) try : x = x . strip ( ) except AttributeError : pass m = re . match ( r , x ) if m is None : for converter in int , float , percent , unicodify : try : return converter ( x ) except ValueError : pass else : x = unicodify ( m . group ( 'value' ) ) return x
Convert string x to the most useful type i . e . int float or unicode string .
56,768
def _onmessage ( cls , kmsg ) : logger . debug ( "{}.ReceivedMessage {}[{}]" . format ( cls . __name__ , kmsg . entrypoint , kmsg . uuid ) , extra = dict ( kmsg = kmsg . dump ( ) ) ) return cls . onmessage ( kmsg )
Call on received message
56,769
def register ( cls , name , entrypoint ) : if not issubclass ( entrypoint , Entrypoint ) : raise ValidationError ( "Invalid type for entry '{}', MUST implement " "kser.entry.Entrypoint" . format ( name ) , extra = dict ( entrypoint = name ) ) cls . ENTRYPOINTS [ name ] = entrypoint logger . debug ( "{}.Registered: {}" . format ( cls . __name__ , name ) )
Register a new entrypoint
56,770
def run ( cls , raw_data ) : logger . debug ( "{}.ReceivedFromKafka: {}" . format ( cls . __name__ , raw_data ) ) try : kmsg = cls . _onmessage ( cls . TRANSPORT . loads ( raw_data ) ) except Exception as exc : logger . error ( "{}.ImportError: Failed to load data from kafka: {}" . format ( cls . __name__ , exc ) , extra = dict ( kafka_raw_data = raw_data ) ) return Result . from_exception ( exc ) try : cls . start_processing ( kmsg ) if kmsg . entrypoint not in cls . ENTRYPOINTS : raise ValidationError ( "Entrypoint '{}' not registred" . format ( kmsg . entrypoint ) , extra = dict ( uuid = kmsg . uuid , entrypoint = kmsg . entrypoint , allowed = list ( cls . ENTRYPOINTS . keys ( ) ) ) ) result = cls . ENTRYPOINTS [ kmsg . entrypoint ] . from_Message ( kmsg ) . execute ( ) except Exception as exc : result = Result . from_exception ( exc , kmsg . uuid ) finally : cls . stop_processing ( ) if result and result . retcode < 300 : return cls . _onsuccess ( kmsg = kmsg , result = result ) else : return cls . _onerror ( kmsg = kmsg , result = result )
description of run
56,771
def deactivate ( self , node_id ) : node = self . node_list [ node_id ] self . node_list [ node_id ] = node . _replace ( active = False )
Deactivate the node identified by node_id .
56,772
def insert ( self , point , data = None ) : assert len ( point ) == self . k if self . size == 0 : if self . region is None : self . region = [ [ - math . inf , math . inf ] ] * self . k axis = 0 return self . new_node ( point , self . region , axis , data ) current_id = 0 while True : parent_node = self . node_list [ current_id ] axis = parent_node . axis if point [ axis ] < parent_node . point [ axis ] : next_id , left = parent_node . left , True else : next_id , left = parent_node . right , False if next_id is None : break current_id = next_id region = parent_node . region [ : ] region [ axis ] = parent_node . region [ axis ] [ : ] limit = parent_node . point [ axis ] if left : self . node_list [ current_id ] = parent_node . _replace ( left = self . size ) region [ axis ] [ 1 ] = limit else : self . node_list [ current_id ] = parent_node . _replace ( right = self . size ) region [ axis ] [ 0 ] = limit return self . new_node ( point , region , ( axis + 1 ) % self . k , data )
Insert a new node in the tree .
56,773
def set_to_public ( self , request , queryset ) : queryset . update ( is_public = True , modified = now ( ) )
Set one or several releases to public
56,774
def loads ( cls , json_data ) : try : return cls ( ** cls . MARSHMALLOW_SCHEMA . loads ( json_data ) ) except marshmallow . exceptions . ValidationError as exc : raise ValidationError ( "Failed to load message" , extra = exc . args [ 0 ] )
description of load
56,775
def format ( self , response ) : res = self . _prepare_response ( response ) res . content = self . _format_data ( res . content , self . charset ) return self . _finalize_response ( res )
Format the data .
56,776
def parse ( self , data , charset = None ) : charset = charset or self . charset return self . _parse_data ( data , charset )
Parse the data .
56,777
def _decode_data ( self , data , charset ) : try : return smart_unicode ( data , charset ) except UnicodeDecodeError : raise errors . BadRequest ( 'wrong charset' )
Decode string data .
56,778
def _parse_data ( self , data , charset ) : return self . _decode_data ( data , charset ) if data else u''
Parse the data
56,779
def _finalize_response ( self , response ) : res = HttpResponse ( content = response . content , content_type = self . _get_content_type ( ) ) res . status_code = response . code return res
Convert the Response object into django s HttpResponse
56,780
def register_mapper ( self , mapper , content_type , shortname = None ) : self . _check_mapper ( mapper ) cont_type_names = self . _get_content_type_names ( content_type , shortname ) self . _datamappers . update ( dict ( [ ( name , mapper ) for name in cont_type_names ] ) )
Register new mapper .
56,781
def select_formatter ( self , request , resource ) : if resource . mapper : return resource . mapper mapper_name = self . _get_name_from_url ( request ) if mapper_name : return self . _get_mapper ( mapper_name ) mapper_name = self . _get_name_from_accept ( request ) if mapper_name : return self . _get_mapper ( mapper_name ) if resource . default_mapper : return resource . default_mapper return self . _get_default_mapper ( )
Select appropriate formatter based on the request .
56,782
def select_parser ( self , request , resource ) : if resource . mapper : return resource . mapper mapper_name = self . _get_name_from_content_type ( request ) if mapper_name : return self . _get_mapper ( mapper_name ) mapper_name = self . _get_name_from_url ( request ) if mapper_name : return self . _get_mapper ( mapper_name ) if resource . default_mapper : return resource . default_mapper return self . _get_default_mapper ( )
Select appropriate parser based on the request .
56,783
def get_mapper_by_content_type ( self , content_type ) : content_type = util . strip_charset ( content_type ) return self . _get_mapper ( content_type )
Returs mapper based on the content type .
56,784
def _get_mapper ( self , mapper_name ) : if mapper_name in self . _datamappers : return self . _datamappers [ mapper_name ] else : return self . _unknown_format ( mapper_name )
Return the mapper based on the given name .
56,785
def _get_name_from_content_type ( self , request ) : content_type = request . META . get ( 'CONTENT_TYPE' , None ) if content_type : return util . strip_charset ( content_type ) return None
Get name from Content - Type header
56,786
def _get_name_from_accept ( self , request ) : accepts = util . parse_accept_header ( request . META . get ( "HTTP_ACCEPT" , "" ) ) if not accepts : return None for accept in accepts : if accept [ 0 ] in self . _datamappers : return accept [ 0 ] raise errors . NotAcceptable ( )
Process the Accept HTTP header .
56,787
def _get_name_from_url ( self , request ) : format = request . GET . get ( 'format' , None ) if not format : match = self . _format_query_pattern . match ( request . path ) if match and match . group ( 'format' ) : format = match . group ( 'format' ) return format
Determine short name for the mapper based on the URL .
56,788
def _check_mapper ( self , mapper ) : if not hasattr ( mapper , 'parse' ) or not callable ( mapper . parse ) : raise ValueError ( 'mapper must implement parse()' ) if not hasattr ( mapper , 'format' ) or not callable ( mapper . format ) : raise ValueError ( 'mapper must implement format()' )
Check that the mapper has valid signature .
56,789
def cleanup ( self , cluster ) : if self . _storage_path and os . path . exists ( self . _storage_path ) : fname = '%s.%s' % ( AnsibleSetupProvider . inventory_file_ending , cluster . name ) inventory_path = os . path . join ( self . _storage_path , fname ) if os . path . exists ( inventory_path ) : try : os . unlink ( inventory_path ) if self . _storage_path_tmp : if len ( os . listdir ( self . _storage_path ) ) == 0 : shutil . rmtree ( self . _storage_path ) except OSError as ex : log . warning ( "AnsibileProvider: Ignoring error while deleting " "inventory file %s: %s" , inventory_path , ex )
Deletes the inventory file used last recently used .
56,790
def based_on ( self , based_on ) : allowed_values = [ "shippingAddress" , "billingAddress" ] if based_on is not None and based_on not in allowed_values : raise ValueError ( "Invalid value for `based_on` ({0}), must be one of {1}" . format ( based_on , allowed_values ) ) self . _based_on = based_on
Sets the based_on of this TaxRate .
56,791
def build_append_file_task ( urllocation , filelocation ) : config = file_utils . get_celcius_config ( ) basename = filelocation . split ( '/' ) [ - 1 ] tmp_filelocation = filelocation . replace ( basename , 'tmp_' + basename ) new_filelocation = filelocation . replace ( basename , 'new_' + basename ) if config [ 'retrieve_command' ] == 'curl' : download_cmd = curl . build_download_file_command ( urllocation , tmp_filelocation ) elif config [ 'retrieve_command' ] == 'wget' : download_cmd = wget . build_download_file_command ( urllocation , tmp_filelocation ) else : print ( "Invalid retrieve command!" ) sys . exit ( 1 ) diff_cmd = diff . build_append_file_command ( filelocation , tmp_filelocation ) compare_cmd = concat . build_and_concat_commands ( [ download_cmd , diff_cmd ] ) redirect_cmd = redirect . redirect_output ( compare_cmd , new_filelocation ) full_cmd = concat . concat_commands ( [ touch . touch ( filelocation ) . build_command ( ) , redirect_cmd , rm . build_force_rm_command ( tmp_filelocation ) . build_command ( ) , rm . build_force_rm_command ( filelocation ) . build_command ( ) , mv . mv ( new_filelocation , filelocation ) . build_command ( ) ] ) return full_cmd
Build a task to watch a specific remote url and append that data to the file . This method should be used when you would like to keep all of the information stored on the local machine but also append the new information found at the url .
56,792
def _create_memory_database_interface ( self ) -> GraphDatabaseInterface : Base = declarative_base ( ) engine = sqlalchemy . create_engine ( "sqlite://" , poolclass = StaticPool ) Session = sessionmaker ( bind = engine ) dbi : GraphDatabaseInterface = create_graph_database_interface ( sqlalchemy , Session ( ) , Base , sqlalchemy . orm . relationship ) Base . metadata . drop_all ( engine ) Base . metadata . create_all ( engine ) return dbi
Creates and returns the in - memory database interface the graph will use .
56,793
def _create_node ( self , index : int , name : str , external_id : Optional [ str ] = None ) -> IGraphNode : return IGraphNode ( graph = self . _graph , index = index , name = name , external_id = external_id )
Returns a new IGraphNode instance with the given index and name .
56,794
def parse ( self ) : self . parse_fields ( ) records = [ ] for line in self . t [ 'data' ] . split ( '\n' ) : if EMPTY_ROW . match ( line ) : continue row = [ self . autoconvert ( line [ start_field : end_field + 1 ] ) for start_field , end_field in self . fields ] records . append ( tuple ( row ) ) self . records = records
Parse the table data string into records .
56,795
def parse_fields ( self ) : rule = self . t [ 'toprule' ] . rstrip ( ) if not ( rule == self . t [ 'midrule' ] . rstrip ( ) and rule == self . t [ 'botrule' ] . rstrip ( ) ) : raise ParseError ( "Table rules differ from each other (check white space)." ) names = self . t [ 'fields' ] . split ( ) nfields = len ( rule . split ( ) ) if nfields != len ( names ) : raise ParseError ( "number of field names (%d) does not match number of fields (%d)" % ( nfields , len ( names ) ) ) fields = [ ] ifield = 0 is_field = rule . startswith ( '=' ) len_rule = len ( rule ) start_field = 0 end_field = 0 for c in xrange ( len_rule ) : char = rule [ c ] if not is_field and char == '=' : start_field = c is_field = True if is_field and ( char == ' ' or c == len_rule - 1 ) : fields . append ( ( start_field , c ) ) ifield += 1 is_field = False self . names = names self . fields = fields
Determine the start and end columns and names of the fields .
56,796
def check_arguments_compatibility ( the_callable , argd ) : if not argd : argd = { } args , dummy , varkw , defaults = inspect . getargspec ( the_callable ) tmp_args = list ( args ) optional_args = [ ] args_dict = { } if defaults : defaults = list ( defaults ) else : defaults = [ ] while defaults : arg = tmp_args . pop ( ) optional_args . append ( arg ) args_dict [ arg ] = defaults . pop ( ) while tmp_args : args_dict [ tmp_args . pop ( ) ] = None for arg , dummy_value in iteritems ( argd ) : if arg in args_dict : del args_dict [ arg ] elif not varkw : raise ValueError ( 'Argument %s not expected when calling callable ' '"%s" with arguments %s' % ( arg , get_callable_signature_as_string ( the_callable ) , argd ) ) for arg in args_dict . keys ( ) : if arg in optional_args : del args_dict [ arg ] if args_dict : raise ValueError ( 'Arguments %s not specified when calling callable ' '"%s" with arguments %s' % ( ', ' . join ( args_dict . keys ( ) ) , get_callable_signature_as_string ( the_callable ) , argd ) )
Check if calling the_callable with the given arguments would be correct or not .
56,797
def _print ( self , text , color = None , ** kwargs ) : COLORS = { 'red' : '\033[91m{}\033[00m' , 'green' : '\033[92m{}\033[00m' , 'yellow' : '\033[93m{}\033[00m' , 'cyan' : '\033[96m{}\033[00m' } _ = COLORS [ color ] six . print_ ( _ . format ( text ) , ** kwargs )
print text with given color to terminal
56,798
def _is_unique ( self , name , path ) : project = None try : project = Project . select ( ) . where ( ( Project . name == name ) | ( Project . path == path ) ) [ 0 ] except : pass return project is None
verify if there is a project with given name or path on the database
56,799
def add ( self , name , path = None , ** kwargs ) : path = path or kwargs . pop ( 'default_path' , None ) if not self . _path_is_valid ( path ) : return if not self . _is_unique ( name , path ) : p = Project . select ( ) . where ( ( Project . name == name ) | ( Project . path == path ) ) [ 0 ] self . _print ( self . _ERROR_PROJECT_EXISTS . format ( name , p . path ) , 'red' ) return Project . create ( name = name , path = path ) self . _print ( self . _SUCCESS_PROJECT_ADDED . format ( name ) , 'green' )
add new project with given name and path to database if the path is not given current working directory will be taken ... as default