idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
58,600
def generate_id ( self , element ) : if not element . has_attribute ( 'id' ) : element . set_attribute ( 'id' , self . prefix_id + str ( self . count ) ) self . count = self . count + 1
Generate a id for a element .
58,601
def fetch_state_data ( self , states ) : print ( "Fetching census data" ) for table in CensusTable . objects . all ( ) : api = self . get_series ( table . series ) for variable in table . variables . all ( ) : estimate = "{}_{}" . format ( table . code , variable . code ) print ( ">> Fetching {} {} {}" . format ( table . year , table . series , estimate ) ) for state in tqdm ( states ) : self . get_state_estimates_by_state ( api = api , table = table , variable = variable , estimate = estimate , state = state , ) self . get_county_estimates_by_state ( api = api , table = table , variable = variable , estimate = estimate , state = state , ) self . get_district_estimates_by_state ( api = api , table = table , variable = variable , estimate = estimate , state = state , )
Fetch census estimates from table .
58,602
def has ( self , name ) : for a in self . all_annotations : if a . name == name : return True return False
Returns True if there is atleast one annotation by a given name otherwise False .
58,603
def get_first ( self , name ) : for a in self . all_annotations : if a . name == name : return a return None
Get the first annotation by a given name .
58,604
def get_all ( self , name ) : return [ annot for annot in self . all_annotations if annot . name == name ]
Get all the annotation by a given name .
58,605
def first_value_of ( self , name , default_value = None ) : vals = self . values_of ( name ) if vals is not None : return vals if type ( vals ) is not list else vals [ 0 ] return default_value
Return the first value of a particular param by name if it exists otherwise false .
58,606
def get_long_description ( ) : with open ( os . path . join ( BASE_DIRECTORY , 'README.md' ) , 'r' , encoding = 'utf-8' ) as readme_file : return readme_file . read ( )
Returns the long description of HaTeMiLe for Python .
58,607
def get_packages ( ) : packages = find_packages ( exclude = [ 'tests' ] ) packages . append ( '' ) packages . append ( 'js' ) packages . append ( LOCALES_DIRECTORY ) for directory in os . listdir ( LOCALES_DIRECTORY ) : packages . append ( LOCALES_DIRECTORY + '.' + directory ) return packages
Returns the packages used for HaTeMiLe for Python .
58,608
def get_package_data ( ) : package_data = { '' : [ '*.xml' ] , 'js' : [ '*.js' ] , LOCALES_DIRECTORY : [ '*' ] } for directory in os . listdir ( LOCALES_DIRECTORY ) : package_data [ LOCALES_DIRECTORY + '.' + directory ] = [ '*.json' ] return package_data
Returns the packages with static files of HaTeMiLe for Python .
58,609
def get_requirements ( ) : requirements = [ ] with open ( os . path . join ( BASE_DIRECTORY , 'requirements.txt' ) , 'r' , encoding = 'utf-8' ) as requirements_file : lines = requirements_file . readlines ( ) for line in lines : requirements . append ( line . strip ( ) ) return requirements
Returns the content of requirements . txt in a list .
58,610
def where_session_id ( cls , session_id ) : try : session = cls . query . filter_by ( session_id = session_id ) . one ( ) return session except ( NoResultFound , MultipleResultsFound ) : return None
Easy way to query by session id
58,611
def count ( cls , user_id ) : return cls . query . with_entities ( cls . user_id ) . filter_by ( user_id = user_id ) . count ( )
Count sessions with user_id
58,612
def get_branch ( ) : if os . getenv ( 'GIT_BRANCH' ) : branch = os . getenv ( 'GIT_BRANCH' ) elif os . getenv ( 'BRANCH_NAME' ) : branch = os . getenv ( 'BRANCH_NAME' ) else : branch = check_output ( "git rev-parse --abbrev-ref HEAD" . split ( " " ) ) . decode ( 'utf-8' ) . strip ( ) return branch . replace ( "/" , "_" )
Returns the current code branch
58,613
def get_version ( ) : try : return check_output ( "git describe --tags" . split ( " " ) ) . decode ( 'utf-8' ) . strip ( ) except CalledProcessError : return check_output ( "git rev-parse --short HEAD" . split ( " " ) ) . decode ( 'utf-8' ) . strip ( )
Returns the current code version
58,614
def jenkins_last_build_sha ( ) : job_url = os . getenv ( 'JOB_URL' ) job_json_url = "{0}/api/json" . format ( job_url ) response = urllib . urlopen ( job_json_url ) job_data = json . loads ( response . read ( ) ) last_completed_build_url = job_data [ 'lastCompletedBuild' ] [ 'url' ] last_complete_build_json_url = "{0}/api/json" . format ( last_completed_build_url ) response = urllib . urlopen ( last_complete_build_json_url ) last_completed_build = json . loads ( response . read ( ) ) return last_completed_build [ 1 ] [ 'lastBuiltRevision' ] [ 'SHA1' ]
Returns the sha of the last completed jenkins build for this project . Expects JOB_URL in environment
58,615
def get_changed_files_from ( old_commit_sha , new_commit_sha ) : return check_output ( "git diff-tree --no-commit-id --name-only -r {0}..{1}" . format ( old_commit_sha , new_commit_sha ) . split ( " " ) ) . decode ( 'utf-8' ) . strip ( )
Returns a list of the files changed between two commits
58,616
def extract_snow_tweets_from_file_generator ( json_file_path ) : with open ( json_file_path , "r" , encoding = "utf-8" ) as fp : for file_line in fp : tweet = json . loads ( file_line ) yield tweet
A generator that opens a file containing many json tweets and yields all the tweets contained inside .
58,617
def extract_all_snow_tweets_from_disk_generator ( json_folder_path ) : json_file_path_generator = ( json_folder_path + "/" + name for name in os . listdir ( json_folder_path ) ) for path in json_file_path_generator : for tweet in extract_snow_tweets_from_file_generator ( path ) : yield tweet
A generator that returns all SNOW tweets stored in disk .
58,618
def store_snow_tweets_from_disk_to_mongodb ( snow_tweets_folder ) : client = pymongo . MongoClient ( "localhost" , 27017 ) db = client [ "snow_tweet_storage" ] collection = db [ "tweets" ] for tweet in extract_all_snow_tweets_from_disk_generator ( snow_tweets_folder ) : collection . insert ( tweet )
Store all SNOW tweets in a mongodb collection .
58,619
def save_file ( f , full_path ) : make_dirs_for_file_path ( full_path , mode = dju_settings . DJU_IMG_CHMOD_DIR ) with open ( full_path , 'wb' ) as t : f . seek ( 0 ) while True : buf = f . read ( dju_settings . DJU_IMG_RW_FILE_BUFFER_SIZE ) if not buf : break t . write ( buf ) os . chmod ( full_path , dju_settings . DJU_IMG_CHMOD_FILE )
Saves file f to full_path and set rules .
58,620
def get_profile_configs ( profile = None , use_cache = True ) : if use_cache and profile in _profile_configs_cache : return _profile_configs_cache [ profile ] profile_conf = None if profile is not None : try : profile_conf = dju_settings . DJU_IMG_UPLOAD_PROFILES [ profile ] except KeyError : if profile != 'default' : raise ValueError ( unicode ( ERROR_MESSAGES [ 'unknown_profile' ] ) % { 'profile' : profile } ) conf = copy . deepcopy ( dju_settings . DJU_IMG_UPLOAD_PROFILE_DEFAULT ) if profile_conf : conf . update ( copy . deepcopy ( profile_conf ) ) for v_i in xrange ( len ( conf [ 'VARIANTS' ] ) ) : v = conf [ 'VARIANTS' ] [ v_i ] conf [ 'VARIANTS' ] [ v_i ] = copy . deepcopy ( dju_settings . DJU_IMG_UPLOAD_PROFILE_VARIANT_DEFAULT ) conf [ 'VARIANTS' ] [ v_i ] . update ( v ) if use_cache : _profile_configs_cache [ profile ] = conf return conf
Returns upload configs for profile .
58,621
def generate_img_id ( profile , ext = None , label = None , tmp = False ) : if ext and not ext . startswith ( '.' ) : ext = '.' + ext if label : label = re . sub ( r'[^a-z0-9_\-]' , '' , label , flags = re . I ) label = re . sub ( r'_+' , '_' , label ) label = label [ : 60 ] return '{profile}:{tmp}{dtstr}_{rand}{label}{ext}' . format ( profile = profile , tmp = ( dju_settings . DJU_IMG_UPLOAD_TMP_PREFIX if tmp else '' ) , dtstr = datetime_to_dtstr ( ) , rand = get_random_string ( 4 , 'abcdefghijklmnopqrstuvwxyz0123456789' ) , label = ( ( '_' + label ) if label else '' ) , ext = ( ext or '' ) , )
Generates img_id .
58,622
def get_relative_path_from_img_id ( img_id , variant_label = None , ext = None , create_dirs = False ) : profile , base_name = img_id . split ( ':' , 1 ) conf = get_profile_configs ( profile ) if not variant_label : status_suffix = dju_settings . DJU_IMG_UPLOAD_MAIN_SUFFIX else : status_suffix = dju_settings . DJU_IMG_UPLOAD_VARIANT_SUFFIX name , file_ext = os . path . splitext ( base_name ) prefix = '' if name . startswith ( dju_settings . DJU_IMG_UPLOAD_TMP_PREFIX ) : name = name [ len ( dju_settings . DJU_IMG_UPLOAD_TMP_PREFIX ) : ] prefix = dju_settings . DJU_IMG_UPLOAD_TMP_PREFIX name_parts = name . split ( '_' , 2 ) name = '{name}{status_suffix}{hash}' . format ( name = name , status_suffix = status_suffix , hash = get_hash ( '_' . join ( name_parts [ : 2 ] ) , variant_label = variant_label ) ) if variant_label : name += '_' + variant_label if ext : file_ext = ext elif variant_label : for var_conf in conf [ 'VARIANTS' ] : var_conf_label = var_conf [ 'LABEL' ] or get_variant_label ( var_conf ) if var_conf_label == variant_label : if var_conf [ 'FORMAT' ] : file_ext = var_conf [ 'FORMAT' ] . lower ( ) break if file_ext and not file_ext . startswith ( '.' ) : file_ext = '.' + file_ext relative_path = os . path . join ( dju_settings . DJU_IMG_UPLOAD_SUBDIR , conf [ 'PATH' ] , name_parts [ 0 ] [ - 2 : ] , ( prefix + name + file_ext ) ) . replace ( '\\' , '/' ) if create_dirs : path = media_path ( relative_path ) make_dirs_for_file_path ( path , mode = dju_settings . DJU_IMG_CHMOD_DIR ) return relative_path
Returns path to file relative MEDIA_URL .
58,623
def is_img_id_exists ( img_id ) : main_rel_path = get_relative_path_from_img_id ( img_id ) main_path = media_path ( main_rel_path ) return os . path . isfile ( main_path )
Checks if img_id has real file on filesystem .
58,624
def is_img_id_valid ( img_id ) : t = re . sub ( r'[^a-z0-9_:\-\.]' , '' , img_id , re . IGNORECASE ) t = re . sub ( r'\.+' , '.' , t ) if img_id != t or img_id . count ( ':' ) != 1 : return False profile , base_name = img_id . split ( ':' , 1 ) if not profile or not base_name : return False try : get_profile_configs ( profile ) except ValueError : return False return True
Checks if img_id is valid .
58,625
def remove_all_files_of_img_id ( img_id ) : files = get_files_by_img_id ( img_id , check_hash = False ) if files : os . remove ( media_path ( files [ 'main' ] ) ) for fn in files [ 'variants' ] . values ( ) : os . remove ( media_path ( fn ) )
Removes all img_id s files .
58,626
def remove_tmp_prefix_from_filename ( filename ) : if not filename . startswith ( dju_settings . DJU_IMG_UPLOAD_TMP_PREFIX ) : raise RuntimeError ( ERROR_MESSAGES [ 'filename_hasnt_tmp_prefix' ] % { 'filename' : filename } ) return filename [ len ( dju_settings . DJU_IMG_UPLOAD_TMP_PREFIX ) : ]
Remove tmp prefix from filename .
58,627
def remove_tmp_prefix_from_file_path ( file_path ) : path , filename = os . path . split ( file_path ) return os . path . join ( path , remove_tmp_prefix_from_filename ( filename ) ) . replace ( '\\' , '/' )
Remove tmp prefix from file path or url .
58,628
def make_permalink ( img_id ) : profile , filename = img_id . split ( ':' , 1 ) new_img_id = profile + ':' + remove_tmp_prefix_from_filename ( filename ) urls = get_files_by_img_id ( img_id ) if urls is None : return urls move_list = { ( urls [ 'main' ] , remove_tmp_prefix_from_file_path ( urls [ 'main' ] ) ) } for var_label , var_file_path in urls [ 'variants' ] . iteritems ( ) : move_list . add ( ( var_file_path , remove_tmp_prefix_from_file_path ( var_file_path ) ) ) for file_path_from , file_path_to in move_list : os . rename ( media_path ( file_path_from ) , media_path ( file_path_to ) ) return new_img_id
Removes tmp prefix from filename and rename main and variant files . Returns img_id without tmp prefix .
58,629
def upload_from_fs ( fn , profile = None , label = None ) : if not os . path . isfile ( fn ) : raise ValueError ( 'File is not exists: {}' . format ( fn ) ) if profile is None : profile = 'default' conf = get_profile_configs ( profile ) with open ( fn , 'rb' ) as f : if not is_image ( f , types = conf [ 'TYPES' ] ) : msg = ( ( 'Format of uploaded file "%(name)s" is not allowed. ' 'Allowed formats is: %(formats)s.' ) % { 'name' : fn , 'formats' : ', ' . join ( map ( lambda t : t . upper ( ) , conf [ 'TYPES' ] ) ) } ) raise RuntimeError ( msg ) return _custom_upload ( f , profile , label , conf )
Saves image from fn with TMP prefix and returns img_id .
58,630
def upload_from_fileobject ( f , profile = None , label = None ) : if profile is None : profile = 'default' conf = get_profile_configs ( profile ) f . seek ( 0 ) if not is_image ( f , types = conf [ 'TYPES' ] ) : msg = ( ( 'Format of uploaded file is not allowed. ' 'Allowed formats is: %(formats)s.' ) % { 'formats' : ', ' . join ( map ( lambda t : t . upper ( ) , conf [ 'TYPES' ] ) ) } ) raise RuntimeError ( msg ) return _custom_upload ( f , profile , label , conf )
Saves image from f with TMP prefix and returns img_id .
58,631
def request ( self , method , url , ** kwargs ) : if "data" in kwargs : kwargs [ "data" ] = json . dumps ( kwargs [ "data" ] ) kwargs [ "headers" ] = { 'Content-Type' : 'application/json' , 'Authorization' : 'token %s' % self . __token__ , } req = make_request ( method , url , ** kwargs ) self . logger . debug ( "Request::{}::{}" . format ( method , url ) , extra = { "request" : kwargs , "response" : { "headers" : req . headers , "code" : req . status_code , "data" : req . content } } ) return req
Unified method to make request to the Github API
58,632
def default_branch ( self , file ) : if isinstance ( self . __default_branch__ , str ) : return self . __default_branch__ elif self . __default_branch__ == GithubProxy . DEFAULT_BRANCH . NO : return self . master_upstream else : return file . sha [ : 8 ]
Decide the name of the default branch given the file and the configuration
58,633
def init_app ( self , app ) : self . app = app self . __blueprint__ = Blueprint ( self . __name__ , self . __name__ , url_prefix = self . __prefix__ , ) for url , name , methods in self . __urls__ : self . blueprint . add_url_rule ( url , view_func = getattr ( self , name ) , endpoint = name . replace ( "r_" , "" ) , methods = methods ) self . app = self . app . register_blueprint ( self . blueprint ) return self . blueprint
Initialize the application and register the blueprint
58,634
def put ( self , file ) : input_ = { "message" : file . logs , "author" : file . author . dict ( ) , "content" : file . base64 , "branch" : file . branch } uri = "{api}/repos/{origin}/contents/{path}" . format ( api = self . github_api_url , origin = self . origin , path = file . path ) data = self . request ( "PUT" , uri , data = input_ ) if data . status_code == 201 : file . pushed = True return file else : decoded_data = json . loads ( data . content . decode ( "utf-8" ) ) return self . ProxyError ( data . status_code , ( decoded_data , "message" ) , step = "put" , context = { "uri" : uri , "params" : input_ } )
Create a new file on github
58,635
def get ( self , file ) : uri = "{api}/repos/{origin}/contents/{path}" . format ( api = self . github_api_url , origin = self . origin , path = file . path ) params = { "ref" : file . branch } data = self . request ( "GET" , uri , params = params ) if data . status_code == 200 : data = json . loads ( data . content . decode ( "utf-8" ) ) file . blob = data [ "sha" ] elif data . status_code == 404 : pass else : decoded_data = json . loads ( data . content . decode ( "utf-8" ) ) return self . ProxyError ( data . status_code , ( decoded_data , "message" ) , step = "get" , context = { "uri" : uri , "params" : params } ) return file
Check on github if a file exists
58,636
def update ( self , file ) : params = { "message" : file . logs , "author" : file . author . dict ( ) , "content" : file . base64 , "sha" : file . blob , "branch" : file . branch } uri = "{api}/repos/{origin}/contents/{path}" . format ( api = self . github_api_url , origin = self . origin , path = file . path ) data = self . request ( "PUT" , uri , data = params ) if data . status_code == 200 : file . pushed = True return file else : reply = json . loads ( data . content . decode ( "utf-8" ) ) return self . ProxyError ( data . status_code , ( reply , "message" ) , step = "update" , context = { "uri" : uri , "params" : params } )
Make an update query on Github API for given file
58,637
def pull_request ( self , file ) : uri = "{api}/repos/{upstream}/pulls" . format ( api = self . github_api_url , upstream = self . upstream , path = file . path ) params = { "title" : "[Proxy] {message}" . format ( message = file . logs ) , "body" : "" , "head" : "{origin}:{branch}" . format ( origin = self . origin . split ( "/" ) [ 0 ] , branch = file . branch ) , "base" : self . master_upstream } data = self . request ( "POST" , uri , data = params ) if data . status_code == 201 : return json . loads ( data . content . decode ( "utf-8" ) ) [ "html_url" ] else : reply = json . loads ( data . content . decode ( "utf-8" ) ) return self . ProxyError ( data . status_code , reply [ "message" ] , step = "pull_request" , context = { "uri" : uri , "params" : params } )
Create a pull request
58,638
def get_ref ( self , branch , origin = None ) : if not origin : origin = self . origin uri = "{api}/repos/{origin}/git/refs/heads/{branch}" . format ( api = self . github_api_url , origin = origin , branch = branch ) data = self . request ( "GET" , uri ) if data . status_code == 200 : data = json . loads ( data . content . decode ( "utf-8" ) ) if isinstance ( data , list ) : return False return data [ "object" ] [ "sha" ] elif data . status_code == 404 : return False else : decoded_data = json . loads ( data . content . decode ( "utf-8" ) ) return self . ProxyError ( data . status_code , ( decoded_data , "message" ) , step = "get_ref" , context = { "uri" : uri } )
Check if a reference exists
58,639
def make_ref ( self , branch ) : master_sha = self . get_ref ( self . master_upstream ) if not isinstance ( master_sha , str ) : return self . ProxyError ( 404 , "The default branch from which to checkout is either not available or does not exist" , step = "make_ref" ) params = { "ref" : "refs/heads/{branch}" . format ( branch = branch ) , "sha" : master_sha } uri = "{api}/repos/{origin}/git/refs" . format ( api = self . github_api_url , origin = self . origin ) data = self . request ( "POST" , uri , data = params ) if data . status_code == 201 : data = json . loads ( data . content . decode ( "utf-8" ) ) return data [ "object" ] [ "sha" ] else : decoded_data = json . loads ( data . content . decode ( "utf-8" ) ) return self . ProxyError ( data . status_code , ( decoded_data , "message" ) , step = "make_ref" , context = { "uri" : uri , "params" : params } )
Make a branch on github
58,640
def check_sha ( self , sha , content ) : rightful_sha = sha256 ( bytes ( "{}{}" . format ( content , self . secret ) , "utf-8" ) ) . hexdigest ( ) return sha == rightful_sha
Check sent sha against the salted hash of the content
58,641
def patch_ref ( self , sha ) : uri = "{api}/repos/{origin}/git/refs/heads/{branch}" . format ( api = self . github_api_url , origin = self . origin , branch = self . master_fork ) data = { "sha" : sha , "force" : True } reply = self . request ( "PATCH" , uri , data = data ) if reply . status_code == 200 : dic = json . loads ( reply . content . decode ( "utf-8" ) ) return dic [ "object" ] [ "sha" ] else : dic = json . loads ( reply . content . decode ( "utf-8" ) ) return self . ProxyError ( reply . status_code , ( dic , "message" ) , step = "patch" , context = { "uri" : uri , "data" : data } )
Patch reference on the origin master branch
58,642
def r_receive ( self , filename ) : content = request . data . decode ( "utf-8" ) if not content : error = self . ProxyError ( 300 , "Content is missing" ) return error . response ( ) author_name = request . args . get ( "author_name" , self . default_author . name ) author_email = request . args . get ( "author_email" , self . default_author . email ) author = Author ( author_name , author_email ) date = request . args . get ( "date" , datetime . datetime . now ( ) . date ( ) . isoformat ( ) ) logs = request . args . get ( "logs" , "{} updated {}" . format ( author . name , filename ) ) self . logger . info ( "Receiving query from {}" . format ( author_name ) , extra = { "IP" : request . remote_addr } ) secure_sha = None if "fproxy-secure-hash" in request . headers : secure_sha = request . headers [ "fproxy-secure-hash" ] if not secure_sha or not self . check_sha ( secure_sha , content ) : error = self . ProxyError ( 300 , "Hash does not correspond with content" ) return error . response ( ) file = File ( path = filename , content = content , author = author , date = date , logs = logs ) file . branch = request . args . get ( "branch" , self . default_branch ( file ) ) branch_status = self . get_ref ( file . branch ) if isinstance ( branch_status , self . ProxyError ) : return branch_status . response ( ) elif not branch_status : branch_status = self . make_ref ( file . branch ) if isinstance ( branch_status , self . ProxyError ) : return branch_status . response ( ) file = self . get ( file ) if isinstance ( file , self . ProxyError ) : return file . response ( ) if file . blob : file = self . update ( file ) else : file = self . put ( file ) if isinstance ( file , self . ProxyError ) : return file . response ( ) pr_url = self . pull_request ( file ) if isinstance ( pr_url , self . ProxyError ) : return pr_url . response ( ) reply = { "status" : "success" , "message" : "The workflow was well applied" , "pr_url" : pr_url } data = jsonify ( reply ) data . status_code = 201 return data
Function which receives the data from Perseids
58,643
def r_update ( self ) : upstream = self . get_ref ( self . master_upstream , origin = self . upstream ) if isinstance ( upstream , bool ) : return ( ProxyError ( 404 , "Upstream Master branch '{0}' does not exist" . format ( self . master_upstream ) , step = "get_upstream_ref" ) ) . response ( ) elif isinstance ( upstream , self . ProxyError ) : return upstream . response ( ) new_sha = self . patch_ref ( upstream ) if isinstance ( new_sha , self . ProxyError ) : return new_sha . response ( ) self . logger . info ( "Updated repository {} to sha {}" . format ( self . origin , new_sha ) , extra = { "former_sha" : upstream } ) return jsonify ( { "status" : "success" , "commit" : new_sha } )
Updates a fork Master
58,644
def delete_where_user_id ( cls , user_id ) : result = cls . where_user_id ( user_id ) if result is None : return None result . delete ( ) return True
delete by email
58,645
def int_filter ( text ) : res = list ( ) for char in text : if char . isdigit ( ) : res . append ( char ) return int ( "" . join ( res ) )
Extract integer from text .
58,646
def float_filter ( text ) : res = list ( ) for char in text : if ( char . isdigit ( ) or ( char == "." ) ) : res . append ( char ) return float ( "" . join ( res ) )
Extract float from text .
58,647
def load ( self , filename , offset ) : try : self . offset = offset except IOError as e : print ( e )
Will eventually load information for Apple_Boot volume . Not yet implemented
58,648
def resolve ( accessor : hexdi . core . clstype ) -> __gentype__ . T : return hexdi . core . get_root_container ( ) . resolve ( accessor = accessor )
shortcut for resolving from root container
58,649
def bind_type ( type_to_bind : hexdi . core . restype , accessor : hexdi . core . clstype , lifetime_manager : hexdi . core . ltype ) : hexdi . core . get_root_container ( ) . bind_type ( type_to_bind , accessor , lifetime_manager )
shortcut for bind_type on root container
58,650
def bind_permanent ( type_to_bind : hexdi . core . restype , accessor : hexdi . core . clstype ) : hexdi . core . get_root_container ( ) . bind_type ( type_to_bind , accessor , lifetime . PermanentLifeTimeManager )
shortcut for bind_type with PermanentLifeTimeManager on root container
58,651
def bind_transient ( type_to_bind : hexdi . core . restype , accessor : hexdi . core . clstype ) : hexdi . core . get_root_container ( ) . bind_type ( type_to_bind , accessor , lifetime . PerResolveLifeTimeManager )
shortcut for bind_type with PerResolveLifeTimeManager on root container
58,652
def get_series ( self , series ) : if series == "acs1" : return self . census . acs1dp elif series == "acs5" : return self . census . acs5 elif series == "sf1" : return self . census . sf1 elif series == "sf3" : return self . census . sf3 else : return None
Returns a census series API handler .
58,653
def setup_system_repository ( self , repository_type , reset_on_start , repository_class = None ) : cnf = dict ( messaging_enable = True , messaging_reset_on_start = reset_on_start ) system_repo = self . new ( repository_type , name = REPOSITORY_DOMAINS . SYSTEM , repository_class = repository_class , configuration = cnf ) self . set ( system_repo )
Sets up the system repository with the given repository type .
58,654
def initialize_all ( self ) : for repo in itervalues_ ( self . __repositories ) : if not repo . is_initialized : repo . initialize ( )
Convenience method to initialize all repositories that have not been initialized yet .
58,655
async def file ( location , mime_type = None , headers = None , _range = None ) : filename = path . split ( location ) [ - 1 ] async with open_async ( location , mode = 'rb' ) as _file : if _range : await _file . seek ( _range . start ) out_stream = await _file . read ( _range . size ) headers [ 'Content-Range' ] = 'bytes %s-%s/%s' % ( _range . start , _range . end , _range . total ) else : out_stream = await _file . read ( ) mime_type = mime_type or guess_type ( filename ) [ 0 ] or 'text/plain' return HTTPResponse ( status = 200 , headers = headers , content_type = mime_type , body_bytes = out_stream )
Return a response object with file data .
58,656
def get_bundles ( ) : global _cached_bundles if not _cached_bundles : _cached_bundles = BundleManager ( ) for bundle_conf in bundles_settings . BUNDLES : _cached_bundles [ bundle_conf [ 0 ] ] = Bundle ( bundle_conf ) return _cached_bundles
Used to cache the bundle definitions rather than loading from config every time they re used
58,657
def get_bundle_versions ( ) : global _cached_versions if not bundles_settings . BUNDLES_VERSION_FILE : _cached_versions = { } if _cached_versions is None : locs = { } try : execfile ( bundles_settings . BUNDLES_VERSION_FILE , locs ) _cached_versions = locs [ 'BUNDLES_VERSIONS' ] except IOError : _cached_versions = { } return _cached_versions
Used to cache the bundle versions rather than loading them from the bundle versions file every time they re used
58,658
def get_url ( self , version = None ) : if self . fixed_bundle_url : return self . fixed_bundle_url return '%s.%s.%s' % ( os . path . join ( self . bundle_url_root , self . bundle_filename ) , version or self . get_version ( ) , self . bundle_type )
Return the filename of the bundled bundle
58,659
def get_file_urls ( self ) : if self . use_bundle : return [ self . get_url ( ) ] return [ bundle_file . file_url for bundle_file in self . files ]
Return a list of file urls - will return a single item if settings . USE_BUNDLES is True
58,660
def export_batch ( self ) : batch = self . batch_cls ( model = self . model , history_model = self . history_model , using = self . using ) if batch . items : try : json_file = self . json_file_cls ( batch = batch , path = self . path ) json_file . write ( ) except JSONDumpFileError as e : raise TransactionExporterError ( e ) batch . close ( ) return batch return None
Returns a batch instance after exporting a batch of txs .
58,661
def _check_key ( self , key ) : self . setup_schema ( ) if key not in self . _attrs and key not in self : raise KeyError ( key )
Ensure key is either in schema s attributes or already set on self .
58,662
def hirise_edr ( self , pid , chunk_size = 1024 * 1024 ) : productid = "{}*" . format ( pid ) query = { "target" : "mars" , "query" : "product" , "results" : "f" , "output" : "j" , "pt" : "EDR" , "iid" : "HiRISE" , "ihid" : "MRO" , "productid" : productid } products = query_ode ( self . ode_url , query ) if len ( products ) > 30 : print ( "Error: Too many products selected for in query, Make PID more specific" ) sys . exit ( 1 ) if not isinstance ( products , list ) : print ( "Error: Too few responses from server to be a full HiRISE EDR, " ) else : for product in products : download_edr_img_files ( product , self . https , chunk_size )
Download a HiRISE EDR set of . IMG files to the CWD
58,663
def detect ( self , filename , offset , standalone = False ) : r = RawStruct ( filename = filename , offset = offset + SIG_OFFSET , length = SIG_SIZE ) oem_id = r . data if oem_id == b"NTFS " : return True return False
Verifies NTFS filesystem signature .
58,664
def load ( cls , v ) : if v is None : return [ ] if isinstance ( v , list ) : return [ Action ( s ) for s in v ] elif isinstance ( v , str ) : return [ Action ( v ) ] else : raise ParseError ( "Couldn't parse action: %r" % v )
Load the action from configuration
58,665
def load_stream ( cls , st ) : y = yaml . load ( st ) return [ Automaton ( k , v ) for k , v in y . iteritems ( ) ]
Load Automatons from a stream
58,666
def make_dot ( self , filename_or_stream , auts ) : if isinstance ( filename_or_stream , str ) : stream = file ( filename_or_stream , 'w' ) else : stream = filename_or_stream dot = DotFile ( stream ) for aut in auts : dot . start ( aut . name ) dot . node ( 'shape=Mrecord width=1.5' ) for st in aut . states : label = st . name if st . entering : label += '|%s' % '\\l' . join ( str ( st ) for st in st . entering ) if st . leaving : label += '|%s' % '\\l' . join ( str ( st ) for st in st . leaving ) label = '{%s}' % label dot . state ( st . name , label = label ) for st in aut . states : for tr in st . transitions : dot . transition ( tr . s_from . name , tr . s_to . name , tr . when ) dot . end ( ) dot . finish ( )
Create a graphviz . dot representation of the automaton .
58,667
def create ( self , url ) : bucket , obj_key = _parse_url ( url ) if not bucket : raise InvalidURL ( url , "You must specify a bucket and (optional) path" ) if obj_key : target = "/" . join ( ( bucket , obj_key ) ) else : target = bucket return self . call ( "CreateBucket" , bucket = target )
Create a bucket directory or empty file .
58,668
def destroy ( self , url , recursive = False ) : bucket , obj_key = _parse_url ( url ) if not bucket : raise InvalidURL ( url , "You must specify a bucket and (optional) path" ) if obj_key : target = "/" . join ( ( bucket , obj_key ) ) else : target = bucket if recursive : for obj in self . get ( url , delimiter = '' ) : self . destroy ( obj [ 'url' ] ) return self . call ( "DeleteBucket" , bucket = target )
Destroy a bucket directory or file . Specifying recursive = True recursively deletes all subdirectories and files .
58,669
def upload ( self , local_path , remote_url ) : bucket , key = _parse_url ( remote_url ) with open ( local_path , 'rb' ) as fp : return self . call ( "PutObject" , bucket = bucket , key = key , body = fp )
Copy a local file to an S3 location .
58,670
def download ( self , remote_url , local_path , buffer_size = 8 * 1024 ) : bucket , key = _parse_url ( remote_url ) response_file = self . call ( "GetObject" , bucket = bucket , key = key ) [ 'Body' ] with open ( local_path , 'wb' ) as fp : buf = response_file . read ( buffer_size ) while buf : fp . write ( buf ) buf = response_file . read ( buffer_size )
Copy S3 data to a local file .
58,671
def copy ( self , src_url , dst_url ) : src_bucket , src_key = _parse_url ( src_url ) dst_bucket , dst_key = _parse_url ( dst_url ) if not dst_bucket : dst_bucket = src_bucket params = { 'copy_source' : '/' . join ( ( src_bucket , src_key ) ) , 'bucket' : dst_bucket , 'key' : dst_key , } return self . call ( "CopyObject" , ** params )
Copy an S3 object to another S3 location .
58,672
def move ( self , src_url , dst_url ) : self . copy ( src_url , dst_url ) self . destroy ( src_url )
Copy a single S3 object to another S3 location then delete the original object .
58,673
def get_shard_names ( self ) : results = [ ] for shard_num in range ( 0 , self . num_shards ( ) ) : shard_name = self . get_shard_name ( shard_num ) results . append ( shard_name ) return results
get_shard_names returns an array containing the names of the shards in the cluster . This is determined with num_shards and shard_name_format
58,674
def get_canonical_key_id ( self , key_id ) : shard_num = self . get_shard_num_by_key_id ( key_id ) return self . _canonical_keys [ shard_num ]
get_canonical_key_id is used by get_canonical_key see the comment for that method for more explanation .
58,675
def get_shard_by_num ( self , shard_num ) : if shard_num < 0 or shard_num >= self . num_shards ( ) : raise ValueError ( "requested invalid shard# {0}" . format ( shard_num ) ) return self . _shards [ shard_num ]
get_shard_by_num returns the shard at index shard_num .
58,676
def _get_key_id_from_key ( self , key ) : key_id = key regex = '{0}([^{1}]*){2}' . format ( self . _hash_start , self . _hash_stop , self . _hash_stop ) m = re . search ( regex , key ) if m is not None : key_id = m . group ( 1 ) return key_id
_get_key_id_from_key returns the key id from a key if found . otherwise it just returns the key to be used as the key id .
58,677
def compute_canonical_key_ids ( self , search_amplifier = 100 ) : canonical_keys = { } num_shards = self . num_shards ( ) num_iterations = ( num_shards ** 2 ) * search_amplifier for key_id in range ( 1 , num_iterations ) : shard_num = self . get_shard_num_by_key ( str ( key_id ) ) if shard_num in canonical_keys : continue canonical_keys [ shard_num ] = str ( key_id ) if len ( canonical_keys ) == num_shards : break if len ( canonical_keys ) != num_shards : raise ValueError ( "Failed to compute enough keys. " + "Wanted %d, got %d (search_amp=%d)." . format ( num_shards , len ( canonical_keys ) , search_amplifier ) ) return canonical_keys
A canonical key id is the lowest integer key id that maps to a particular shard . The mapping to canonical key ids depends on the number of shards .
58,678
def keys ( self , args ) : results = { } for shard_num in range ( 0 , self . num_shards ( ) ) : shard = self . get_shard_by_num ( shard_num ) results [ shard_num ] = shard . keys ( args ) return results
keys wrapper that queries every shard . This is an expensive operation .
58,679
def mget ( self , args ) : key_map = collections . defaultdict ( list ) results = { } for key in args : shard_num = self . get_shard_num_by_key ( key ) key_map [ shard_num ] . append ( key ) for shard_num in key_map . keys ( ) : shard = self . get_shard_by_num ( shard_num ) results [ shard_num ] = shard . mget ( key_map [ shard_num ] ) return results
mget wrapper that batches keys per shard and execute as few mgets as necessary to fetch the keys from all the shards involved .
58,680
def mset ( self , args ) : key_map = collections . defaultdict ( dict ) result_count = 0 for key in args . keys ( ) : value = args [ key ] shard_num = self . get_shard_num_by_key ( key ) key_map [ shard_num ] [ key ] = value for shard_num in key_map . keys ( ) : shard = self . get_shard_by_num ( shard_num ) result_count += shard . mset ( key_map [ shard_num ] ) return result_count
mset wrapper that batches keys per shard and execute as few msets as necessary to set the keys in all the shards involved .
58,681
def id_generator ( start = 0 ) : count = start while True : send_value = ( yield count ) if not send_value is None : if send_value < count : raise ValueError ( 'Values from ID generator must increase ' 'monotonically (current value: %d; value ' 'sent to generator: %d).' % ( count , send_value ) ) count = send_value else : count += 1
Generator for sequential numeric numbers .
58,682
def generative ( func ) : def wrap ( inst , * args , ** kw ) : clone = type ( inst ) . __new__ ( type ( inst ) ) clone . __dict__ = inst . __dict__ . copy ( ) return func ( clone , * args , ** kw ) return update_wrapper ( wrap , func )
Marks an instance method as generative .
58,683
def truncate ( message , limit = 500 ) : if len ( message ) > limit : trc_msg = '' . join ( [ message [ : limit // 2 - 2 ] , ' .. ' , message [ len ( message ) - limit // 2 + 2 : ] ] ) else : trc_msg = message return trc_msg
Truncates the message to the given limit length . The beginning and the end of the message are left untouched .
58,684
def remove_board ( board_id ) : log . debug ( 'remove %s' , board_id ) lines = boards_txt ( ) . lines ( ) lines = filter ( lambda x : not x . strip ( ) . startswith ( board_id + '.' ) , lines ) boards_txt ( ) . write_lines ( lines )
remove board .
58,685
def make_route ( self , route ) -> dict : middleware = route [ 'middleware' ] if 'middleware' in route else None route [ 'methods' ] . append ( 'OPTIONS' ) return { 'url' : route [ 'url' ] , 'name' : route [ 'name' ] , 'methods' : route [ 'methods' ] , 'middleware' : middleware , 'callback' : { 'module' : route [ 'function' ] . __module__ , 'class' : route [ 'function' ] . __qualname__ . rsplit ( '.' , 1 ) [ 0 ] , 'function' : route [ 'function' ] . __name__ } }
Construct a route to be parsed into flask App
58,686
def diffusion_driver ( self ) : if self . _diffusion_driver is None : return self , if isinstance ( self . _diffusion_driver , list ) : return tuple ( self . _diffusion_driver ) if isinstance ( self . _diffusion_driver , tuple ) : return self . _diffusion_driver return self . _diffusion_driver ,
diffusion driver are the underlying dW of each process X in a SDE like dX = m dt + s dW
58,687
def reset_codenames ( self , dry_run = None , clear_existing = None ) : self . created_codenames = [ ] self . updated_names = [ ] actions = [ "add" , "change" , "delete" , "view" ] if django . VERSION >= ( 2 , 1 ) : actions . append ( "view" ) for app in django_apps . get_app_configs ( ) : for model in app . get_models ( ) : try : getattr ( model , model . _meta . simple_history_manager_attribute ) except AttributeError : pass else : self . update_or_create ( model , dry_run = dry_run , clear_existing = clear_existing ) if dry_run : print ( "This is a dry-run. No modifications were made." ) if self . created_codenames : print ( "The following historical permission.codenames were be added:" ) pprint ( self . created_codenames ) else : print ( "No historical permission.codenames were added." ) if self . updated_names : print ( "The following historical permission.names were updated:" ) pprint ( self . updated_names ) else : print ( "No historical permission.names were updated." )
Ensures all historical model codenames exist in Django s Permission model .
58,688
def is_resource_class_member_attribute ( rc , attr_name ) : attr = get_resource_class_attribute ( rc , attr_name ) return attr . kind == RESOURCE_ATTRIBUTE_KINDS . MEMBER
Checks if the given attribute name is a member attribute of the given registered resource .
58,689
def is_resource_class_collection_attribute ( rc , attr_name ) : attr = get_resource_class_attribute ( rc , attr_name ) return attr . kind == RESOURCE_ATTRIBUTE_KINDS . COLLECTION
Checks if the given attribute name is a collection attribute of the given registered resource .
58,690
def get_joke ( ) : page = requests . get ( "http://ron-swanson-quotes.herokuapp.com/v2/quotes" ) if page . status_code == 200 : jokes = [ ] jokes = json . loads ( page . content . decode ( page . encoding ) ) return '"' + jokes [ 0 ] + '" - Ron Swanson' return None
Return a Ron Swanson quote .
58,691
def window ( iterable , n = 2 , cast = tuple ) : it = iter ( iterable ) win = deque ( ( next ( it ) for _ in repeat ( None , n ) ) , maxlen = n ) if len ( win ) < n : raise ValueError ( 'Window size was greater than iterable length' ) yield cast ( win ) append = win . append for e in it : append ( e ) yield cast ( win )
This function passes a running window along the length of the given iterable . By default the return value is a tuple but the cast parameter can be used to change the final result .
58,692
def main ( ) : coloredlogs . install ( syslog = True ) context_opts = { } program_opts = { } try : options , arguments = getopt . getopt ( sys . argv [ 1 : ] , 'fur:vqh' , [ 'force' , 'use-sudo' , 'remote-host=' , 'verbose' , 'quiet' , 'help' , ] ) for option , value in options : if option in ( '-f' , '--force' ) : program_opts [ 'force' ] = True elif option in ( '-u' , '--use-sudo' ) : context_opts [ 'sudo' ] = True elif option in ( '-r' , '--remote-host' ) : context_opts [ 'ssh_alias' ] = value elif option in ( '-v' , '--verbose' ) : coloredlogs . increase_verbosity ( ) elif option in ( '-q' , '--quiet' ) : coloredlogs . decrease_verbosity ( ) elif option in ( '-h' , '--help' ) : usage ( __doc__ ) sys . exit ( 0 ) else : assert False , "Unhandled option!" if not arguments : usage ( __doc__ ) sys . exit ( 0 ) if len ( arguments ) != 1 : raise Exception ( "Expected a filename as the first and only argument!" ) program_opts [ 'filename' ] = arguments [ 0 ] except Exception as e : warning ( "Error: %s" , e ) sys . exit ( 1 ) try : program_opts [ 'context' ] = create_context ( ** context_opts ) UpdateDotDee ( ** program_opts ) . update_file ( ) except Exception as e : logger . exception ( "Encountered unexpected exception, aborting!" ) sys . exit ( 1 )
Command line interface for the update - dotdee program .
58,693
def add_months ( self , value : int ) -> datetime : self . value = self . value + relativedelta ( months = value ) return self . value
Add a number of months to the given date
58,694
def from_date ( self , value : date ) -> datetime : assert isinstance ( value , date ) self . value = datetime ( value . year , value . month , value . day ) return self . value
Initializes from the given date value
58,695
def get_day_name ( self ) -> str : weekday = self . value . isoweekday ( ) - 1 return calendar . day_name [ weekday ]
Returns the day name
58,696
def to_iso_string ( self ) -> str : assert isinstance ( self . value , datetime ) return datetime . isoformat ( self . value )
Returns full ISO string for the given date
58,697
def end_of_day ( self ) -> datetime : self . value = datetime ( self . value . year , self . value . month , self . value . day , 23 , 59 , 59 ) return self . value
End of day
58,698
def end_of_month ( self ) -> datetime : result = self . value + relativedelta ( months = 1 ) result = result . replace ( day = 1 ) result = result - relativedelta ( days = 1 ) self . value = result return self . value
Provides end of the month for the given date
58,699
def is_end_of_month ( self ) -> bool : end_of_month = Datum ( ) end_of_month . end_of_month ( ) return self . value == end_of_month . value
Checks if the date is at the end of the month