idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
14,600
def check_cache ( path ) : if not os . path . exists ( path ) : return True else : mod_date = datetime . fromtimestamp ( os . path . getmtime ( path ) ) if mod_date < ( datetime . now ( ) - timedelta ( days = 30 ) ) : return True else : return False
Return true if the cache file holding list of all datasets does not exist or is older than 30 days
14,601
def bcdc_package_show ( package ) : params = { "id" : package } r = requests . get ( bcdata . BCDC_API_URL + "package_show" , params = params ) if r . status_code != 200 : raise ValueError ( "{d} is not present in DataBC API list" . format ( d = package ) ) return r . json ( ) [ "result" ]
Query DataBC Catalogue API about given package
14,602
def list_tables ( refresh = False , cache_file = None ) : if not cache_file : cache_file = os . path . join ( str ( Path . home ( ) ) , ".bcdata" ) if refresh or check_cache ( cache_file ) : wfs = WebFeatureService ( url = bcdata . OWS_URL , version = "2.0.0" ) bcdata_objects = [ i . strip ( "pub:" ) for i in list ( wfs . contents ) ] with open ( cache_file , "w" ) as outfile : json . dump ( sorted ( bcdata_objects ) , outfile ) else : with open ( cache_file , "r" ) as infile : bcdata_objects = json . load ( infile ) return bcdata_objects
Return a list of all datasets available via WFS
14,603
def make_request ( parameters ) : r = requests . get ( bcdata . WFS_URL , params = parameters ) return r . json ( ) [ "features" ]
Submit a getfeature request to DataBC WFS and return features
14,604
def define_request ( dataset , query = None , crs = "epsg:4326" , bounds = None , sortby = None , pagesize = 10000 ) : table = validate_name ( dataset ) n = bcdata . get_count ( table , query = query ) chunks = math . ceil ( n / pagesize ) if chunks > 1 and not sortby : sortby = get_sortkey ( table ) param_dicts = [ ] for i in range ( chunks ) : request = { "service" : "WFS" , "version" : "2.0.0" , "request" : "GetFeature" , "typeName" : table , "outputFormat" : "json" , "SRSNAME" : crs , } if sortby : request [ "sortby" ] = sortby if query : request [ "CQL_FILTER" ] = query if bounds : request [ "bbox" ] = "," . join ( [ str ( b ) for b in bounds ] ) if chunks > 1 : request [ "startIndex" ] = i * pagesize request [ "count" ] = pagesize param_dicts . append ( request ) return param_dicts
Define the getfeature request parameters required to download a dataset
14,605
def get_data ( dataset , query = None , crs = "epsg:4326" , bounds = None , sortby = None , pagesize = 10000 , max_workers = 5 , ) : param_dicts = define_request ( dataset , query , crs , bounds , sortby , pagesize ) with ThreadPoolExecutor ( max_workers = max_workers ) as executor : results = executor . map ( make_request , param_dicts ) outjson = dict ( type = "FeatureCollection" , features = [ ] ) for result in results : outjson [ "features" ] += result return outjson
Get GeoJSON featurecollection from DataBC WFS
14,606
def get_features ( dataset , query = None , crs = "epsg:4326" , bounds = None , sortby = None , pagesize = 10000 , max_workers = 5 , ) : param_dicts = define_request ( dataset , query , crs , bounds , sortby , pagesize ) with ThreadPoolExecutor ( max_workers = max_workers ) as executor : for result in executor . map ( make_request , param_dicts ) : for feature in result : yield feature
Yield features from DataBC WFS
14,607
def _get_sorted ( self , resources ) : tmp = [ ] for resource in resources : path = resource . _path priority = path . count ( '/' ) * 10 - path . count ( '{' ) tmp . append ( ( priority , resource ) ) return [ resource for prio , resource in reversed ( sorted ( tmp ) ) ]
Order the resources by priority - the most specific paths come first .
14,608
def set_cfme_caselevel ( testcase , caselevels ) : tier = testcase . get ( "caselevel" ) if tier is None : return try : caselevel = caselevels [ int ( tier ) ] except IndexError : caselevel = "component" except ValueError : return testcase [ "caselevel" ] = caselevel
Converts tier to caselevel .
14,609
def get_requirements_transform_cfme ( config ) : def requirement_transform ( requirement ) : requirement = copy . deepcopy ( requirement ) if "id" in requirement : del requirement [ "id" ] return requirement return requirement_transform
Return requirement transformation function for CFME .
14,610
def get_requirements_transform_cloudtp ( config ) : def requirement_transform ( requirement ) : requirement = copy . deepcopy ( requirement ) if "id" in requirement : del requirement [ "id" ] if not requirement . get ( "assignee-id" ) : requirement [ "assignee-id" ] = "mkourim" if not requirement . get ( "approver-ids" ) : requirement [ "approver-ids" ] = "mkourim:approved" return requirement return requirement_transform
Return requirement transformation function for CLOUDTP .
14,611
def render_archive ( entries ) : context = GLOBAL_TEMPLATE_CONTEXT . copy ( ) context [ 'entries' ] = entries _render ( context , 'archive_index.html' , os . path . join ( CONFIG [ 'output_to' ] , 'archive/index.html' ) ) ,
Creates the archive page
14,612
def find_new_posts_and_pages ( db ) : Q = Query ( ) for root , dirs , files in os . walk ( CONFIG [ 'content_root' ] ) : for filename in sorted ( [ f for f in files if f . endswith ( ( 'md' , 'markdown' ) ) ] ) : fullpath = os . path . join ( root , filename ) _p = fullpath . split ( CONFIG [ 'content_root' ] ) [ - 1 ] . lstrip ( '/' ) new_mtime = int ( os . path . getmtime ( fullpath ) ) e , item = None , None for collection in [ 'posts' , 'pages' ] : item = db [ collection ] . get ( Q . filename == _p ) if item : if new_mtime > item [ 'mtime' ] : db [ collection ] . update ( { 'mtime' : new_mtime } , doc_ids = [ item . doc_id ] ) e = Entry ( fullpath , doc_id = item . doc_id ) break if not item : e = Entry ( fullpath ) if e : yield e , e . id
Walk content dir put each post and page in the database
14,613
def _get_last_entries ( db , qty ) : doc_ids = [ post . doc_id for post in db . posts . all ( ) ] doc_ids = sorted ( doc_ids , reverse = True ) entries = [ Entry ( os . path . join ( CONFIG [ 'content_root' ] , db . posts . get ( doc_id = doc_id ) [ 'filename' ] ) , doc_id ) for doc_id in doc_ids ] entries . sort ( key = operator . attrgetter ( 'date' ) , reverse = True ) return entries [ : qty ] , entries
get all entries and the last qty entries
14,614
def update_index ( entries ) : context = GLOBAL_TEMPLATE_CONTEXT . copy ( ) context [ 'entries' ] = entries context [ 'last_build' ] = datetime . datetime . now ( ) . strftime ( "%Y-%m-%dT%H:%M:%SZ" ) list ( map ( lambda x : _render ( context , x [ 0 ] , os . path . join ( CONFIG [ 'output_to' ] , x [ 1 ] ) ) , ( ( 'entry_index.html' , 'index.html' ) , ( 'atom.xml' , 'atom.xml' ) ) ) )
find the last 10 entries in the database and create the main page . Each entry in has an doc_id so we only get the last 10 doc_ids .
14,615
def build ( config ) : logger . info ( "\nRendering website now...\n" ) logger . info ( "entries:" ) tags = dict ( ) entries = list ( ) for post , post_id in find_new_posts_and_pages ( DB ) : if post . render ( ) : if post . header [ 'kind' ] in [ 'writing' , 'link' ] : for tag in post . tags : tag . posts = [ post_id ] tags [ tag . name ] = tag entries . append ( post ) logger . info ( "%s" % post . path ) for name , to in tags . items ( ) : logger . info ( "updating tag %s" % name ) to . render ( ) logger . info ( "Updating index" ) last_entries , all_entries = _get_last_entries ( DB , config [ 'INDEX_SIZE' ] ) last_entries = list ( _filter_none_public ( last_entries ) ) update_index ( last_entries ) logger . info ( "Updating archive" ) entries = [ Entry . entry_from_db ( os . path . join ( CONFIG [ 'content_root' ] , e . get ( 'filename' ) ) , e . doc_id ) for e in DB . posts . all ( ) ] all_entries = list ( _filter_none_public ( all_entries ) ) all_entries . sort ( key = operator . attrgetter ( 'date' ) , reverse = True ) render_archive ( all_entries [ config [ 'ARCHIVE_SIZE' ] : ] )
Incremental build of the website
14,616
def preview ( ) : Handler = http . server . SimpleHTTPRequestHandler socketserver . TCPServer . allow_reuse_address = True port = CONFIG [ 'http_port' ] httpd = socketserver . TCPServer ( ( "" , port ) , Handler ) os . chdir ( CONFIG [ 'output_to' ] ) try : logger . info ( "and ready to test at " "http://127.0.0.1:%d" % CONFIG [ 'http_port' ] ) logger . info ( "Hit Ctrl+C to exit" ) httpd . serve_forever ( ) except KeyboardInterrupt : httpd . shutdown ( )
launch an HTTP to preview the website
14,617
def entries ( self ) : Tags = Query ( ) tag = self . table . get ( Tags . name == self . name ) posts = tag [ 'post_ids' ] for id in posts : post = self . db . posts . get ( doc_id = id ) if not post : raise ValueError ( "No post found for doc_id %s" % id ) yield Entry ( os . path . join ( CONFIG [ 'content_root' ] , post [ 'filename' ] ) , id )
return the actual lists of entries tagged with
14,618
def render ( self ) : context = GLOBAL_TEMPLATE_CONTEXT . copy ( ) context [ 'tag' ] = self entries = list ( self . entries ) entries . sort ( key = operator . attrgetter ( 'date' ) , reverse = True ) context [ 'entries' ] = entries render_to = os . path . join ( CONFIG [ 'output_to' ] , 'tags' , self . slug ) if not os . path . exists ( render_to ) : os . makedirs ( render_to ) _render ( context , 'tag_index.html' , os . path . join ( render_to , 'index.html' ) ) context [ 'entries' ] = context [ 'entries' ] [ : 10 ] context [ 'last_build' ] = datetime . datetime . now ( ) . strftime ( "%Y-%m-%dT%H:%M:%SZ" ) _render ( context , 'atom.xml' , os . path . join ( render_to , 'atom.xml' ) ) return True
Render html page and atom feed
14,619
def tags ( self ) : if 'tags' in self . header : tags = [ Tag ( t ) for t in self . header [ 'tags' ] ] list ( map ( lambda t : setattr ( t , 'posts' , [ self . id ] ) , tags ) ) return tags else : return [ ]
this property is always called after prepare
14,620
def prepare ( self ) : self . body_html = markdown ( codecs . open ( self . abspath , 'r' ) . read ( ) , extras = [ 'fenced-code-blocks' , 'hilite' , 'tables' , 'metadata' ] ) self . header = self . body_html . metadata if 'tags' in self . header : self . header [ 'tags' ] = [ t . strip ( ) . lower ( ) for t in self . header [ 'tags' ] . split ( ',' ) ] else : self . header [ 'tags' ] = ( "" , ) self . date = self . header . get ( 'published' , datetime . datetime . now ( ) ) if isinstance ( self . date , str ) : self . date = datetime . datetime . strptime ( self . date , "%Y-%m-%d" ) for k , v in self . header . items ( ) : try : setattr ( self , k , v ) except AttributeError : pass if self . id : return rec = { 'filename' : self . path , 'mtime' : int ( os . path . getmtime ( self . abspath ) ) } if self . header [ 'kind' ] == 'writing' : _id = Entry . db . posts . insert ( rec ) elif self . header [ 'kind' ] == 'page' : _id = Entry . db . pages . insert ( rec ) self . id = _id
a blog post without tags causes an error ...
14,621
def tracker ( ) : application = mmi . tracker . app ( ) application . listen ( 22222 ) logger . info ( 'serving at port 22222' ) tornado . ioloop . IOLoop . instance ( ) . start ( )
start a tracker to register running models
14,622
def runner ( engine , configfile , output_vars , interval , pause , mpi , tracker , port , bmi_class ) : runner = mmi . runner . Runner ( engine = engine , configfile = configfile , output_vars = output_vars , interval = interval , pause = pause , mpi = mpi , tracker = tracker , port = port , bmi_class = bmi_class ) runner . run ( )
run a BMI compatible model
14,623
def do_extra_polishing ( self ) : for f in self . EXTRA_POLISH_FUNCTIONS : if not hasattr ( f , 'polish_commit_indexes' ) : if hasattr ( f , 'polish_urls' ) and self . URL in f . polish_urls : f ( ) if not hasattr ( f , 'polish_urls' ) : if hasattr ( f , 'polish_commit_indexes' ) and self . CURRENT_COMMIT_INDEX in f . polish_commit_indexes : f ( ) if hasattr ( f , 'polish_commit_indexes' ) and hasattr ( f , 'polish_urls' ) : if self . URL in f . polish_urls and self . CURRENT_COMMIT_INDEX in f . polish_commit_indexes : f ( )
Goes over each EXTRA_POLISH_FUNCTION to see if it applies to this page if so calls it
14,624
def total_charges ( self ) : selected_charges = Charge . objects . filter ( invoice = self ) . charges ( ) . exclude ( product_code = CARRIED_FORWARD ) return total_amount ( selected_charges )
Represents the goods acquired in the invoice .
14,625
def due ( self ) : invoice_charges = Charge . objects . filter ( invoice = self ) invoice_transactions = Transaction . successful . filter ( invoice = self ) return total_amount ( invoice_charges ) - total_amount ( invoice_transactions )
The amount due for this invoice . Takes into account all entities in the invoice . Can be < 0 if the invoice was overpaid .
14,626
def setup ( ) : install_requirements = [ "attrdict" ] if sys . version_info [ : 2 ] < ( 3 , 4 ) : install_requirements . append ( "pathlib" ) setup_requirements = [ 'six' , 'setuptools>=17.1' , 'setuptools_scm' ] needs_sphinx = { 'build_sphinx' , 'docs' , 'upload_docs' , } . intersection ( sys . argv ) if needs_sphinx : setup_requirements . append ( 'sphinx' ) setuptools . setup ( author = "David Gidwani" , author_email = "david.gidwani@gmail.com" , classifiers = [ "Development Status :: 4 - Beta" , "Intended Audience :: Developers" , "License :: OSI Approved :: BSD License" , "Operating System :: OS Independent" , "Programming Language :: Python" , "Programming Language :: Python :: 2" , "Programming Language :: Python :: 2.7" , "Programming Language :: Python :: 3" , "Programming Language :: Python :: 3.3" , "Programming Language :: Python :: 3.4" , "Topic :: Software Development" , "Topic :: Software Development :: Libraries :: Python Modules" , ] , description = "Painless access to namespaced environment variables" , download_url = "https://github.com/darvid/biome/tarball/0.1" , install_requires = install_requirements , keywords = "conf config configuration environment" , license = "BSD" , long_description = readme ( ) , name = "biome" , package_dir = { '' : 'src' } , packages = setuptools . find_packages ( './src' ) , setup_requires = setup_requirements , tests_require = [ "pytest" ] , url = "https://github.com/darvid/biome" , use_scm_version = True , )
Package setup entrypoint .
14,627
def preprocessor ( accepts , exports , flag = None ) : def decorator ( f ) : preprocessors . append ( ( accepts , exports , flag , f ) ) return f return decorator
Decorator to add a new preprocessor
14,628
def postprocessor ( accepts , flag = None ) : def decorator ( f ) : postprocessors . append ( ( accepts , flag , f ) ) return f return decorator
Decorator to add a new postprocessor
14,629
def coffee ( input , output , ** kw ) : subprocess . call ( [ current_app . config . get ( 'COFFEE_BIN' ) , '-c' , '-o' , output , input ] )
Process CoffeeScript files
14,630
def cancel_charge ( charge_id : str ) -> None : logger . info ( 'cancelling-charge' , charge_id = charge_id ) with transaction . atomic ( ) : charge = Charge . all_charges . get ( pk = charge_id ) if charge . deleted : raise ChargeAlreadyCancelledError ( "Cannot cancel deleted charge." ) if Charge . all_charges . filter ( reverses = charge_id ) . exists ( ) : raise ChargeAlreadyCancelledError ( "Cannot cancel reversed charge." ) if charge . invoice is None : charge . deleted = True charge . save ( ) else : add_charge ( account_id = charge . account_id , reverses_id = charge_id , amount = - charge . amount , product_code = REVERSAL_PRODUCT_CODE )
Cancels an existing charge .
14,631
def _log_request ( self , request ) : msg = [ ] for d in self . LOG_DATA : val = getattr ( request , d ) if val : msg . append ( d + ': ' + repr ( val ) ) for d in self . LOG_HEADERS : if d in request . headers and request . headers [ d ] : msg . append ( d + ': ' + repr ( request . headers [ d ] ) ) logger . info ( "Request information: %s" , ', ' . join ( msg ) )
Log the most important parts of this request .
14,632
def get_mortgage_payment_per_payment_frequency ( self ) : r = self . get_interest_rate_per_payment_frequency ( ) n = self . get_total_number_of_payments_per_frequency ( ) p = self . _loan_amount mortgage = None top = None bottom = None top = r + 1 top = math . pow ( top , n ) top = r * top bottom = r + 1 bottom = math . pow ( bottom , n ) bottom = bottom - 1 if bottom == 0 : return Money ( amount = 0.00 , currency = self . _currency ) mortgage = ( top / bottom ) mortgage = mortgage * p return mortgage
Function will return the amount paid per payment based on the frequency .
14,633
def info ( self ) : url = self . api_url + self . info_url resp = self . session . get ( url ) if resp . status_code != 200 : error = { 'description' : "Info HTTP response not valid" } raise CFException ( error , resp . status_code ) try : info = resp . json ( ) except ValueError as e : error = { 'description' : "Info HTTP response not valid, %s" % str ( e ) } raise CFException ( error , resp . status_code ) return info
Gets info endpoint . Used to perform login auth .
14,634
def clean_blobstore_cache ( self ) : url = self . api_url + self . blobstores_builpack_cache_url resp , rcode = self . request ( 'DELETE' , url ) if rcode != 202 : raise CFException ( resp , rcode ) return resp
Deletes all of the existing buildpack caches in the blobstore
14,635
def search ( self , query , locations : list = None ) : cas_number = re . search ( r"\b[1-9]{1}[0-9]{1,5}-\d{2}-\d\b" , str ( query ) ) if cas_number : query = cas_number [ 0 ] search_type = 'cas' else : try : query = int ( query ) search_type = 'barcode' except ValueError : query = f"%{query}%" search_type = 'name' if not locations : locations = self . get_locations ( filter_to_my_group = True ) locations = [ loc . inventory_id for loc in locations ] data = { 'groupid' : self . groupid , 'searchtype' : search_type , 'searchterm' : query , 'limitlocations' : locations . append ( 1 ) } r = self . _post ( 'search-search' , referer_path = 'search' , data = data ) if r [ 'searchresults' ] [ 'containers' ] : containers = [ ] for container in r [ 'searchresults' ] [ 'containers' ] : loc = Location ( name = container . get ( 'location' ) ) ct = Container ( inventory_id = container . get ( 'id' ) , compound_id = container . get ( 'sid' ) , name = container . get ( 'containername' ) , location = loc , size = container . get ( 'size' ) , smiles = container . get ( 'smiles' ) , cas = container . get ( 'cas' ) , comments = container . get ( 'comments' ) , barcode = container . get ( 'barcode' ) , supplier = container . get ( 'supplier' ) , date_acquired = container . get ( 'dateacquired' ) , owner = container . get ( 'owner' ) ) containers . append ( ct ) return containers else : return [ ]
Search using the CAS number barcode or chemical name
14,636
def get_groups ( self ) : resp = self . _post ( 'general-retrievelocations' , 'locations' ) final_resp = [ ] if resp [ 'groupinfo' ] : for group in resp [ 'groupinfo' ] : final_resp . append ( Group ( name = group . get ( 'name' ) , inventory_id = group . get ( 'id' ) ) ) return final_resp
Retrieve groups listed in ChemInventory
14,637
def get_locations ( self , filter_to_my_group = False ) : resp = self . _post ( 'general-retrievelocations' , 'locations' ) groups = { } if resp [ 'groupinfo' ] : for group in resp [ 'groupinfo' ] : groups [ group [ 'id' ] ] = Group ( name = group . get ( 'name' ) , inventory_id = group . get ( 'id' ) ) final_resp = [ ] if resp [ 'data' ] : if filter_to_my_group : resp [ 'data' ] = { self . groupid : resp [ 'data' ] [ self . groupid ] } for groupid , sublocation in resp [ 'data' ] . items ( ) : if type ( sublocation ) is dict : sublocation = [ loc for _ , loc in sublocation . items ( ) ] sublocation = flatten_list ( sublocation ) if type ( sublocation ) is list : sublocation = flatten_list ( sublocation ) for location in sublocation : group = groups [ groupid ] final_resp . append ( Location ( name = location . get ( 'name' ) , inventory_id = location . get ( 'id' ) , parent = location . get ( 'parent' ) , group = group , barcode = location . get ( 'barcode' ) ) ) return final_resp
Retrieve Locations listed in ChemInventory
14,638
def get_containers ( self , include_only = [ ] ) : locations = self . get_locations ( ) if len ( locations ) == 0 : raise ValueError ( "No locations for containers exist in Cheminventory" ) final_locations = [ ] if include_only : for location in locations : check = location in include_only or location . group in include_only if check : final_locations . append ( location ) if len ( final_locations ) == 0 : raise ValueError ( f"Location(s) or group(s) {include_only} is/are not in the database." ) else : final_locations = locations containers = [ ] for location in final_locations : containers += self . _get_location_containers ( location . inventory_id ) return containers
Download all the containers owned by a group
14,639
def get_unicode_str ( obj ) : if isinstance ( obj , six . text_type ) : return obj if isinstance ( obj , six . binary_type ) : return obj . decode ( "utf-8" , errors = "ignore" ) return six . text_type ( obj )
Makes sure obj is a unicode string .
14,640
def init_log ( log_level ) : log_level = log_level or "INFO" logging . basicConfig ( format = "%(name)s:%(levelname)s:%(message)s" , level = getattr ( logging , log_level . upper ( ) , logging . INFO ) , )
Initializes logging .
14,641
def get_xml_root ( xml_file ) : try : xml_root = etree . parse ( os . path . expanduser ( xml_file ) , NO_BLANKS_PARSER ) . getroot ( ) except Exception as err : raise Dump2PolarionException ( "Failed to parse XML file '{}': {}" . format ( xml_file , err ) ) return xml_root
Returns XML root .
14,642
def get_xml_root_from_str ( xml_str ) : try : xml_root = etree . fromstring ( xml_str . encode ( "utf-8" ) , NO_BLANKS_PARSER ) except Exception as err : raise Dump2PolarionException ( "Failed to parse XML string: {}" . format ( err ) ) return xml_root
Returns XML root from string .
14,643
def prettify_xml ( xml_root ) : xml_string = etree . tostring ( xml_root , encoding = "utf-8" , xml_declaration = True , pretty_print = True ) return get_unicode_str ( xml_string )
Returns pretty - printed string representation of element tree .
14,644
def get_session ( credentials , config ) : session = requests . Session ( ) session . verify = False auth_url = config . get ( "auth_url" ) if auth_url : cookie = session . post ( auth_url , data = { "j_username" : credentials [ 0 ] , "j_password" : credentials [ 1 ] , "submit" : "Log In" , "rememberme" : "true" , } , headers = { "Content-Type" : "application/x-www-form-urlencoded" } , ) if not cookie : raise Dump2PolarionException ( "Cookie was not retrieved from {}." . format ( auth_url ) ) else : session . auth = credentials return session
Gets requests session .
14,645
def find_vcs_root ( path , dirs = ( ".git" , ) ) : prev , path = None , os . path . abspath ( path ) while prev != path : if any ( os . path . exists ( os . path . join ( path , d ) ) for d in dirs ) : return path prev , path = path , os . path . abspath ( os . path . join ( path , os . pardir ) ) return None
Searches up from a given path to find the project root .
14,646
def pack ( self ) : " Pack and save file " pack_name = self . args . prefix + op . basename ( self . path ) pack_path = op . join ( self . args . output or self . basedir , pack_name ) self . out ( "Packing: %s" % self . path ) self . out ( "Output: %s" % pack_path ) if self . args . format : ext = self . get_ext ( self . path ) self . parsers [ ext ] = self . args . format out = "" . join ( self . merge ( self . parse ( self . path ) ) ) try : open ( pack_path , 'w' ) . write ( out ) self . out ( "Linked file saved as: '%s'." % pack_path ) except IOError , ex : raise ZetaError ( ex )
Pack and save file
14,647
def parse_path ( self , path , curdir ) : " Normilize path. " if path . startswith ( 'http://' ) : return path elif path . startswith ( 'zeta://' ) : zpath = op . join ( LIBDIR , path [ len ( 'zeta://' ) : ] ) if self . args . directory and not op . exists ( zpath ) : return op . join ( self . args . directory , path [ len ( 'zeta://' ) : ] ) return zpath return op . abspath ( op . normpath ( op . join ( curdir , path ) ) )
Normilize path .
14,648
def out ( msg , error = False ) : " Send message to shell " pipe = stdout if error : pipe = stderr msg = color_msg ( msg , "warning" ) pipe . write ( "%s\n" % msg )
Send message to shell
14,649
def _image_url ( image , dst_color = None , src_color = None ) : if src_color and dst_color : if not Image : raise Exception ( "Images manipulation require PIL" ) file = StringValue ( image ) . value path = None if callable ( STATIC_ROOT ) : try : _file , _storage = list ( STATIC_ROOT ( file ) ) [ 0 ] d_obj = _storage . modified_time ( _file ) filetime = int ( time . mktime ( d_obj . timetuple ( ) ) ) if dst_color : path = _storage . open ( _file ) except : filetime = 'NA' else : _path = os . path . join ( STATIC_ROOT , file ) if os . path . exists ( _path ) : filetime = int ( os . path . getmtime ( _path ) ) if dst_color : path = open ( _path , 'rb' ) else : filetime = 'NA' BASE_URL = STATIC_URL if path : src_color = tuple ( int ( round ( c ) ) for c in ColorValue ( src_color ) . value [ : 3 ] ) if src_color else ( 0 , 0 , 0 ) dst_color = [ int ( round ( c ) ) for c in ColorValue ( dst_color ) . value [ : 3 ] ] file_name , file_ext = os . path . splitext ( os . path . normpath ( file ) . replace ( '\\' , '_' ) . replace ( '/' , '_' ) ) key = ( filetime , src_color , dst_color ) key = file_name + '-' + base64 . urlsafe_b64encode ( hashlib . md5 ( repr ( key ) ) . digest ( ) ) . rstrip ( '=' ) . replace ( '-' , '_' ) asset_file = key + file_ext asset_path = os . path . join ( ASSETS_ROOT , asset_file ) if os . path . exists ( asset_path ) : file = asset_file BASE_URL = ASSETS_URL filetime = int ( os . path . getmtime ( asset_path ) ) else : image = Image . open ( path ) image = image . convert ( "RGBA" ) pixdata = image . load ( ) for y in xrange ( image . size [ 1 ] ) : for x in xrange ( image . size [ 0 ] ) : if pixdata [ x , y ] [ : 3 ] == src_color : new_color = tuple ( dst_color + [ pixdata [ x , y ] [ 3 ] ] ) pixdata [ x , y ] = new_color try : image . save ( asset_path ) file = asset_file BASE_URL = ASSETS_URL except IOError : log . exception ( "Error while saving image" ) url = 'url("%s%s?_=%s")' % ( BASE_URL , file , filetime ) return StringValue ( url )
Generates a path to an asset found relative to the project s images directory .
14,650
def _image_width ( image ) : if not Image : raise Exception ( "Images manipulation require PIL" ) file = StringValue ( image ) . value path = None try : width = sprite_images [ file ] [ 0 ] except KeyError : width = 0 if callable ( STATIC_ROOT ) : try : _file , _storage = list ( STATIC_ROOT ( file ) ) [ 0 ] path = _storage . open ( _file ) except : pass else : _path = os . path . join ( STATIC_ROOT , file ) if os . path . exists ( _path ) : path = open ( _path , 'rb' ) if path : image = Image . open ( path ) size = image . size width = size [ 0 ] sprite_images [ file ] = size return NumberValue ( width , 'px' )
Returns the width of the image found at the path supplied by image relative to your project s images directory .
14,651
def _nth ( lst , n = 1 ) : n = StringValue ( n ) . value lst = ListValue ( lst ) . value try : n = int ( float ( n ) ) - 1 n = n % len ( lst ) except : if n . lower ( ) == 'first' : n = 0 elif n . lower ( ) == 'last' : n = - 1 try : ret = lst [ n ] except KeyError : lst = [ v for k , v in sorted ( lst . items ( ) ) if isinstance ( k , int ) ] try : ret = lst [ n ] except : ret = '' return ret . __class__ ( ret )
Return the Nth item in the string
14,652
def normalize_selectors ( self , _selectors , extra_selectors = None , extra_parents = None ) : _selectors = _spaces_re . sub ( ' ' , _selectors ) if isinstance ( extra_selectors , basestring ) : extra_selectors = extra_selectors . split ( ',' ) if isinstance ( extra_parents , basestring ) : extra_parents = extra_parents . split ( '&' ) parents = set ( ) if ' extends ' in _selectors : selectors = set ( ) for key in _selectors . split ( ',' ) : child , _ , parent = key . partition ( ' extends ' ) child = child . strip ( ) parent = parent . strip ( ) selectors . add ( child ) parents . update ( s . strip ( ) for s in parent . split ( '&' ) if s . strip ( ) ) else : selectors = set ( s . strip ( ) for s in _selectors . split ( ',' ) if s . strip ( ) ) if extra_selectors : selectors . update ( s . strip ( ) for s in extra_selectors if s . strip ( ) ) selectors . discard ( '' ) if not selectors : return '' if extra_parents : parents . update ( s . strip ( ) for s in extra_parents if s . strip ( ) ) parents . discard ( '' ) if parents : return ',' . join ( sorted ( selectors ) ) + ' extends ' + '&' . join ( sorted ( parents ) ) return ',' . join ( sorted ( selectors ) )
Normalizes or extends selectors in a string . An optional extra parameter that can be a list of extra selectors to be added to the final normalized selectors string .
14,653
def _get_properties ( self , rule , p_selectors , p_parents , p_children , scope , media , c_lineno , c_property , c_codestr ) : prop , value = ( _prop_split_re . split ( c_property , 1 ) + [ None ] ) [ : 2 ] try : is_var = ( c_property [ len ( prop ) ] == '=' ) except IndexError : is_var = False prop = prop . strip ( ) prop = self . do_glob_math ( prop , rule [ CONTEXT ] , rule [ OPTIONS ] , rule , True ) if prop : if value : value = value . strip ( ) value = self . calculate ( value , rule [ CONTEXT ] , rule [ OPTIONS ] , rule ) _prop = ( scope or '' ) + prop if is_var or prop . startswith ( '$' ) and value is not None : if isinstance ( value , basestring ) : if '!default' in value : if _prop in rule [ CONTEXT ] : value = None else : value = value . replace ( '!default' , '' ) . replace ( ' ' , ' ' ) . strip ( ) elif isinstance ( value , ListValue ) : value = ListValue ( value ) for k , v in value . value . items ( ) : if v == '!default' : if _prop in rule [ CONTEXT ] : value = None else : del value . value [ k ] value = value . first ( ) if len ( value ) == 1 else value break if value is not None : rule [ CONTEXT ] [ _prop ] = value else : _prop = self . apply_vars ( _prop , rule [ CONTEXT ] , rule [ OPTIONS ] , rule , True ) rule [ PROPERTIES ] . append ( ( c_lineno , _prop , to_str ( value ) if value is not None else None ) )
Implements properties and variables extraction
14,654
def link_with_parents ( self , parent , c_selectors , c_rules ) : parent_found = None for p_selectors , p_rules in self . parts . items ( ) : _p_selectors , _ , _ = p_selectors . partition ( ' extends ' ) _p_selectors = _p_selectors . split ( ',' ) new_selectors = set ( ) found = False for p_selector in _p_selectors : if parent in p_selector : for c_selector in c_selectors . split ( ',' ) : _c_selector , _parent = c_selector , parent lcp = self . longest_common_prefix ( _c_selector , _parent ) if lcp : _c_selector = _c_selector [ lcp : ] _parent = _parent [ lcp : ] lcs = self . longest_common_suffix ( _c_selector , _parent ) if lcs : _c_selector = _c_selector [ : - lcs ] _parent = _parent [ : - lcs ] if _c_selector and _parent : prev_symbol = '(?<![#.:])' if _parent [ 0 ] in ( '#' , '.' , ':' ) else r'(?<![-\w#.:])' post_symbol = r'(?![-\w])' new_parent = re . sub ( prev_symbol + _parent + post_symbol , _c_selector , p_selector ) if p_selector != new_parent : new_selectors . add ( new_parent ) found = True if found : parent_found = parent_found or [ ] parent_found . extend ( p_rules ) if new_selectors : new_selectors = self . normalize_selectors ( p_selectors , new_selectors ) if new_selectors != p_selectors : del self . parts [ p_selectors ] self . parts . setdefault ( new_selectors , [ ] ) self . parts [ new_selectors ] . extend ( p_rules ) deps = set ( ) for c_rule in c_rules or [ ] : c_rule [ SELECTORS ] = c_selectors deps . add ( c_rule [ POSITION ] ) for p_rule in p_rules : p_rule [ SELECTORS ] = new_selectors p_rule [ DEPS ] . update ( deps ) return parent_found
Link with a parent for the current child rule . If parents found returns a list of parent rules to the child
14,655
def parse_extends ( self ) : for _selectors , rules in self . parts . items ( ) : if ' extends ' in _selectors : selectors , _ , parent = _selectors . partition ( ' extends ' ) parents = parent . split ( '&' ) del self . parts [ _selectors ] for parent in parents : new_selectors = selectors + ' extends ' + parent self . parts . setdefault ( new_selectors , [ ] ) self . parts [ new_selectors ] . extend ( rules ) rules = [ ] cnt = 0 parents_left = True while parents_left and cnt < 10 : cnt += 1 parents_left = False for _selectors in self . parts . keys ( ) : selectors , _ , parent = _selectors . partition ( ' extends ' ) if parent : parents_left = True if _selectors not in self . parts : continue rules = self . parts [ _selectors ] del self . parts [ _selectors ] self . parts . setdefault ( selectors , [ ] ) self . parts [ selectors ] . extend ( rules ) parents = self . link_with_parents ( parent , selectors , rules ) if parents is None : log . warn ( "Parent rule not found: %s" , parent ) else : new_context = { } new_options = { } for parent in parents : new_context . update ( parent [ CONTEXT ] ) new_options . update ( parent [ OPTIONS ] ) for rule in rules : _new_context = new_context . copy ( ) _new_context . update ( rule [ CONTEXT ] ) rule [ CONTEXT ] = _new_context _new_options = new_options . copy ( ) _new_options . update ( rule [ OPTIONS ] ) rule [ OPTIONS ] = _new_options
For each part create the inheritance parts from the extends
14,656
def _wrap ( fn ) : def _func ( * args ) : merged = None _args = [ ] for arg in args : if merged . __class__ != arg . __class__ : if merged is None : merged = arg . __class__ ( None ) else : merged = Value . _merge_type ( merged , arg ) ( None ) merged . merge ( arg ) if isinstance ( arg , Value ) : arg = arg . value _args . append ( arg ) merged . value = fn ( * _args ) return merged return _func
Wrapper function to allow calling any function using Value objects as parameters .
14,657
def token ( self , i , restrict = None ) : tokens_len = len ( self . tokens ) if i == tokens_len : tokens_len += self . scan ( restrict ) if i < tokens_len : if restrict and self . restrictions [ i ] and restrict > self . restrictions [ i ] : raise NotImplementedError ( "Unimplemented: restriction set changed" ) return self . tokens [ i ] raise NoMoreTokens ( )
Get the i th token and if i is one past the end then scan for another token ; restrict is a list of tokens that are allowed or 0 for any token .
14,658
def scan ( self , restrict ) : while True : best_pat = None best_pat_len = 0 for p , regexp in self . patterns : if restrict and p not in restrict and p not in self . ignore : continue m = regexp . match ( self . input , self . pos ) if m : best_pat = p best_pat_len = len ( m . group ( 0 ) ) break if best_pat is None : msg = "Bad Token" if restrict : msg = "Trying to find one of " + ", " . join ( restrict ) raise SyntaxError ( self . pos , msg ) if best_pat in self . ignore : self . pos += best_pat_len else : end_pos = self . pos + best_pat_len token = ( self . pos , end_pos , best_pat , self . input [ self . pos : end_pos ] ) self . pos = end_pos if not self . tokens or token != self . tokens [ - 1 ] : self . tokens . append ( token ) self . restrictions . append ( restrict ) return 1 break return 0
Should scan another token and add it to the list self . tokens and add the restriction to self . restrictions
14,659
def main ( ) : x = [ - 6.34 , - 2.23 , - 3.6 , 3.46 ] y = [ 6.34 , 2.23 , - 3.6 , 3.46 ] n = [ 35.0 , 51.9 , 35.8 , 78.9 ] t = [ 15. , 17.5 , 20. , 27.5 ] dt = [ ti - min ( t ) for ti in t ] plot = Plot ( ) plot . scatter ( [ 0 ] , [ 0 ] , mark = 'triangle' ) plot . add_pin_at_xy ( 0 , 0 , 'Station 503' , use_arrow = False , location = 'below' ) plot . scatter_table ( x , y , dt , n ) plot . set_scalebar ( location = "lower right" ) plot . set_colorbar ( '$\Delta$t [ns]' ) plot . set_axis_equal ( ) plot . set_mlimits ( max = 16. ) plot . set_slimits ( min = 10. , max = 100. ) plot . set_xlabel ( 'x [m]' ) plot . set_ylabel ( 'y [m]' ) plot . save ( 'event_display' ) x508 = [ 6.12 , 0.00 , - 3.54 , 3.54 ] y508 = [ - 6.12 , - 13.23 , - 3.54 , 3.54 ] n508 = [ 5.6 , 16.7 , 36.6 , 9.0 ] t508 = [ 15. , 22.5 , 22.5 , 30. ] dt508 = [ ti - min ( t508 ) for ti in t508 ] plot = MultiPlot ( 1 , 2 , width = r'.33\linewidth' ) plot . set_xlimits_for_all ( min = - 10 , max = 15 ) plot . set_ylimits_for_all ( min = - 15 , max = 10 ) plot . set_mlimits_for_all ( min = 0. , max = 16. ) plot . set_colorbar ( '$\Delta$t [ns]' , False ) plot . set_colormap ( 'blackwhite' ) plot . set_scalebar_for_all ( location = "upper right" ) p = plot . get_subplot_at ( 0 , 0 ) p . scatter ( [ 0 ] , [ 0 ] , mark = 'triangle' ) p . add_pin_at_xy ( 0 , 0 , 'Station 503' , use_arrow = False , location = 'below' ) p . scatter_table ( x , y , dt , n ) p . set_axis_equal ( ) p = plot . get_subplot_at ( 0 , 1 ) p . scatter ( [ 0 ] , [ 0 ] , mark = 'triangle' ) p . add_pin_at_xy ( 0 , 0 , 'Station 508' , use_arrow = False , location = 'below' ) p . scatter_table ( x508 , y508 , dt508 , n508 ) p . set_axis_equal ( ) plot . show_yticklabels_for_all ( [ ( 0 , 0 ) ] ) plot . show_xticklabels_for_all ( [ ( 0 , 0 ) , ( 0 , 1 ) ] ) plot . set_xlabel ( 'x [m]' ) plot . set_ylabel ( 'y [m]' ) plot . save ( 'multi_event_display' )
Event display for an event of station 503
14,660
def _model_unpickle ( cls , data ) : auto_field_value = data [ 'pk' ] try : obj = cls . objects . get ( pk = auto_field_value ) except Exception as e : if isinstance ( e , OperationalError ) : logger . debug ( "Caught OperationalError, closing database connection." , exc_info = e ) from django . db import connection connection . close ( ) obj = cls . objects . get ( pk = auto_field_value ) else : raise return obj
Unpickle a model by retrieving it from the database .
14,661
def task ( func , * args , ** kwargs ) : from celery import shared_task if 'serializer' not in kwargs : kwargs [ 'serializer' ] = DJANGO_CEREAL_PICKLE return shared_task ( func , * args , ** kwargs )
A task decorator that uses the django - cereal pickler as the default serializer .
14,662
def find_files ( directory , pattern , recursively = True ) : for root , dirs , files in os . walk ( directory ) : for basename in files : if fnmatch . fnmatch ( basename , pattern ) : yield root , basename if not recursively : break
Yield a list of files with their base directories recursively or not .
14,663
def validate ( self ) : if self . error : return False for v in self . validators : self . error = v ( self . value ) if self . error : return False return True
Run the form value through the validators and update the error field if needed
14,664
def form_group_classes ( self ) : classes = [ 'form-group' ] if self . style == styles . BOOTSTRAP_4 and self . form_type == formtype . HORIZONTAL : classes . append ( 'row' ) if self . error and self . style == styles . BOOTSTRAP_3 : classes . append ( 'has-error' ) if self . form_group_css_class : classes . append ( self . form_group_css_class ) return ' ' . join ( classes )
Full list of classes for the class attribute of the form group . Returned as a string with spaces separating each class ready for insertion into the class attribute .
14,665
def input_classes ( self ) : classes = [ self . base_input_css_class ] if self . css_class : classes . append ( self . css_class ) if self . style == styles . BOOTSTRAP_4 and self . error : classes . append ( 'is-invalid' ) return ' ' . join ( classes )
Full list of classes for the class attribute of the input returned as a string with spaces separating each class .
14,666
def render ( self ) : return Markup ( env . get_template ( 'form.html' ) . render ( form = self , render_open_tag = True , render_close_tag = True , render_before = True , render_sections = True , render_after = True , generate_csrf_token = None if self . disable_csrf else _csrf_generation_function ) )
Render the form and all sections to HTML
14,667
def render_before_sections ( self ) : return Markup ( env . get_template ( 'form.html' ) . render ( form = self , render_open_tag = True , render_close_tag = False , render_before = True , render_sections = False , render_after = False , generate_csrf_token = None if self . action else _csrf_generation_function ) )
Render the form up to the first section . This will open the form tag but not close it .
14,668
def read_form_data ( self ) : if self . processed_data : raise exceptions . AlreadyProcessed ( 'The data has already been processed for this form' ) if self . readonly : return if request . method == self . method : if self . method == 'POST' : data = request . form else : data = request . args if self . submitted_hidden_input_name in data : self . processed_data = True for field in self . all_fields : if field . readonly : pass else : field . extract_value ( data ) if not field . validate ( ) : log . debug ( 'Validation error in field \'%s\': %s' % ( field . name , field . error ) ) self . has_errors = True
Attempt to read the form data from the request
14,669
def get_if_present ( self , name , default = None ) : if not self . processed_data : raise exceptions . FormNotProcessed ( 'The form data has not been processed yet' ) if name in self . field_dict : return self [ name ] return default
Returns the value for a field but if the field doesn t exist will return default instead
14,670
def disable_validation ( self , field_name ) : field = self . field_dict . get ( field_name ) if not field : raise exceptions . FieldNotFound ( 'Field not found: \'%s\' when trying to disable validation' % field_name ) field . validators = [ ]
Disable the validation rules for a field
14,671
def create_single_button_clone ( self , submit_text = 'Submit' , submit_css_class = 'btn-primary' , read_form_data = True , form_type = None ) : from . basicfields import BooleanCheckbox , HiddenField , SubmitButton fields = [ ] for field in self . all_fields : if field . allow_missing : if field . value is None or field . value == '' : continue elif isinstance ( field , BooleanCheckbox ) and not field . value : continue elif isinstance ( field , SubmitButton ) : continue fields . append ( HiddenField ( field . name , field . value ) ) form = Form ( fields , action = self . action , method = self . method , submit_css_class = submit_css_class , submit_text = submit_text , read_form_data = read_form_data , disable_csrf = self . disable_csrf , readonly = False , form_type = form_type if form_type else self . form_type ) return form
This will create a copy of this form with all of inputs replaced with hidden inputs and with a single submit button . This allows you to easily create a button that will submit a post request which is identical to the current state of the form . You could then if required change some of the values in the hidden inputs .
14,672
def polish ( commit_indexes = None , urls = None ) : def decorator ( f ) : if commit_indexes : f . polish_commit_indexes = commit_indexes if urls : f . polish_urls = urls @ wraps ( f ) def wrappee ( * args , ** kwargs ) : return f ( * args , ** kwargs ) return wrappee return decorator
Apply certain behaviors to commits or URLs that need polishing before they are ready for screenshots
14,673
def timestamp_to_datetime ( cls , time_stamp , localized = True ) : ret = datetime . datetime . utcfromtimestamp ( time_stamp ) if localized : ret = localize ( ret , pytz . utc ) return ret
Converts a UTC timestamp to a datetime . datetime .
14,674
def _imm_default_init ( self , * args , ** kwargs ) : for ( k , v ) in six . iteritems ( { k : v for dct in ( args + ( kwargs , ) ) for ( k , v ) in dct } ) : setattr ( self , k , v )
An immutable s defalt initialization function is to accept any number of dictionaries followed by any number of keyword args and to turn them all into the parameters of the immutable that is being created .
14,675
def _imm_init_getattribute ( self , name ) : values = _imm_value_data ( self ) params = _imm_param_data ( self ) if name in values : _imm_init_to_trans ( self ) return getattr ( self , name ) elif name in params : dd = object . __getattribute__ ( self , '__dict__' ) if name in dd : return dd [ name ] else : raise RuntimeError ( 'Required immutable parameter %s requested before set' % name ) else : return object . __getattribute__ ( self , name )
During the initial transient state getattribute works on params ; as soon as a non - param is requested all checks are forced and the getattr switches to standard transient form .
14,676
def _imm_getattribute ( self , name ) : if _imm_is_init ( self ) : return _imm_init_getattribute ( self , name ) else : dd = object . __getattribute__ ( self , '__dict__' ) if name == '__dict__' : return dd curval = dd . get ( name , dd ) if curval is not dd : return dd [ name ] values = _imm_value_data ( self ) if name not in values : return object . __getattribute__ ( self , name ) ( args , memfn , _ ) = values [ name ] value = memfn ( * [ getattr ( self , arg ) for arg in args ] ) dd [ name ] = value if name in _imm_const_data ( self ) : try : _imm_check ( self , [ name ] ) except : del dd [ name ] raise return value
An immutable s getattribute calculates lazy values when not yet cached in the object then adds them as attributes .
14,677
def _imm_init_setattr ( self , name , value ) : params = _imm_param_data ( self ) if name in params : tx_fn = params [ name ] [ 1 ] value = value if tx_fn is None else tx_fn ( value ) object . __getattribute__ ( self , '__dict__' ) [ name ] = value else : raise TypeError ( 'Attempt to change non-parameter \'%s\' of initializing immutable' % name )
An immutable s initial setattr allows only param s to be set and does not run checks on the new parameters until a full parameter - set has been specified at which point it runs all checks and switches over to a normal setattr and getattr method .
14,678
def _imm_trans_setattr ( self , name , value ) : params = _imm_param_data ( self ) dd = object . __getattribute__ ( self , '__dict__' ) if name in params : ( _ , tx_fn , arg_lists , check_fns , deps ) = params [ name ] value = value if tx_fn is None else tx_fn ( value ) old_deps = { } orig_value = dd [ name ] for dep in deps : if dep in dd : old_deps [ dep ] = dd [ dep ] del dd [ dep ] try : dd [ name ] = value for ( args , check_fn ) in zip ( arg_lists , check_fns ) : if not check_fn ( * [ getattr ( self , arg ) for arg in args ] ) : raise RuntimeError ( ( 'Changing value of immutable attribute \'%s\'' + ' caused validation failure: %s' ) % ( name , ( args , check_fn ) ) ) old_deps = None finally : if old_deps : for ( dep , val ) in six . iteritems ( old_deps ) : dd [ dep ] = val dd [ name ] = orig_value else : raise TypeError ( 'Attempt to change non-parameter member \'%s\' of transient immutable' % name )
An immutable s transient setattr allows params to be set and runs checks as they are .
14,679
def _imm_setattr ( self , name , value ) : if _imm_is_persist ( self ) : raise TypeError ( 'Attempt to change parameter \'%s\' of non-transient immutable' % name ) elif _imm_is_trans ( self ) : return _imm_trans_setattr ( self , name , value ) else : return _imm_init_setattr ( self , name , value )
A persistent immutable s setattr simply does not allow attributes to be set .
14,680
def _imm_trans_delattr ( self , name ) : ( params , values ) = ( _imm_param_data ( self ) , _imm_value_data ( self ) ) if name in params : dflt = params [ name ] [ 0 ] if dflt is None : raise TypeError ( 'Attempt to reset required parameter \'%s\' of immutable' % name ) setattr ( self , name , dflt [ 0 ] ) elif name in values : dd = object . __getattribute__ ( self , '__dict__' ) if name in dd : del dd [ name ] if name in _imm_const_data ( self ) : _imm_check ( imm , [ name ] ) else : raise TypeError ( 'Cannot delete non-value non-param attribute \'%s\' from immutable' % name )
A transient immutable s delattr allows the object s value - caches to be invalidated ; a var that is deleted returns to its default - value in a transient immutable otherwise raises an exception .
14,681
def _imm_delattr ( self , name ) : if _imm_is_persist ( self ) : values = _imm_value_data ( self ) if name in values : dd = object . __getattribute__ ( self , '__dict__' ) if name in dd : del dd [ name ] if name in _imm_const_data ( self ) : _imm_check ( imm , [ name ] ) else : raise TypeError ( 'Attempt to reset parameter \'%s\' of non-transient immutable' % name ) else : return _imm_trans_delattr ( self , name )
A persistent immutable s delattr allows the object s value - caches to be invalidated otherwise raises an exception .
14,682
def _imm_dir ( self ) : dir0 = set ( dir ( self . __class__ ) ) dir0 . update ( self . __dict__ . keys ( ) ) dir0 . update ( six . iterkeys ( _imm_value_data ( self ) ) ) return sorted ( list ( dir0 ) )
An immutable object s dir function should list not only its attributes but also its un - cached lazy values .
14,683
def _imm_repr ( self ) : return ( type ( self ) . __name__ + ( '(' if _imm_is_persist ( self ) else '*(' ) + ', ' . join ( [ k + '=' + str ( v ) for ( k , v ) in six . iteritems ( imm_params ( self ) ) ] ) + ')' )
The default representation function for an immutable object .
14,684
def _imm_new ( cls ) : imm = object . __new__ ( cls ) params = cls . _pimms_immutable_data_ [ 'params' ] for ( p , dat ) in six . iteritems ( params ) : dat = dat [ 0 ] if dat : object . __setattr__ ( imm , p , dat [ 0 ] ) _imm_clear ( imm ) dd = object . __getattribute__ ( imm , '__dict__' ) dd [ '_pimms_immutable_is_init' ] = True return imm
All immutable new classes use a hack to make sure the post - init cleanup occurs .
14,685
def _scan_file ( filename , sentinel , source_type = 'import' ) : filename = os . path . abspath ( filename ) real_filename = os . path . realpath ( filename ) if os . path . getsize ( filename ) <= max_file_size : if real_filename not in sentinel and os . path . isfile ( filename ) : sentinel . add ( real_filename ) basename = os . path . basename ( filename ) scope , imports = ast_scan_file ( filename ) if scope is not None and imports is not None : for imp in imports : yield ( source_type , imp . module , None ) if 'INSTALLED_APPS' in scope and basename == 'settings.py' : log . info ( 'Found Django settings: %s' , filename ) for item in django . handle_django_settings ( filename ) : yield item else : log . warn ( 'Could not scan imports from: %s' , filename ) else : log . warn ( 'File size too large: %s' , filename )
Generator that performs the actual scanning of files .
14,686
def _scan_directory ( directory , sentinel , depth = 0 ) : directory = os . path . abspath ( directory ) real_directory = os . path . realpath ( directory ) if depth < max_directory_depth and real_directory not in sentinel and os . path . isdir ( directory ) : sentinel . add ( real_directory ) for item in os . listdir ( directory ) : if item in ( '.' , '..' ) : continue p = os . path . abspath ( os . path . join ( directory , item ) ) if ( os . path . isdir ( p ) and _dir_ignore . search ( p ) ) or ( os . path . isfile ( p ) and _ext_ignore . search ( p ) ) : continue yield p
Basically os . listdir with some filtering .
14,687
def _get_project_conf ( ) : config_settings = { } project_root = find_vcs_root ( "." ) if project_root is None : return config_settings for conf_dir in PROJECT_CONF_DIRS : conf_dir = conf_dir . lstrip ( "./" ) joined_dir = os . path . join ( project_root , conf_dir ) if conf_dir else project_root joined_glob = os . path . join ( joined_dir , PROJECT_CONF ) conf_files = glob . glob ( joined_glob ) if conf_files : break else : conf_files = [ ] for conf_file in conf_files : try : with io . open ( conf_file , encoding = "utf-8" ) as input_file : loaded_settings = yaml . safe_load ( input_file ) except EnvironmentError : logger . warning ( "Failed to load config from %s" , conf_file ) else : logger . info ( "Config loaded from %s" , conf_file ) config_settings . update ( loaded_settings ) return config_settings
Loads configuration from project config file .
14,688
def get_config ( config_file = None , config_values = None , load_project_conf = True ) : config_values = config_values or { } config_settings = { } default_conf = _get_default_conf ( ) user_conf = _get_user_conf ( config_file ) if config_file else { } project_conf = { } if user_conf or not load_project_conf else _get_project_conf ( ) if not ( user_conf or project_conf or config_values ) : if load_project_conf : raise Dump2PolarionException ( "Failed to find configuration file for the project " "and no configuration file or values passed." ) raise Dump2PolarionException ( "No configuration file or values passed." ) config_settings . update ( default_conf ) config_settings . update ( user_conf ) config_settings . update ( project_conf ) config_settings . update ( config_values ) _populate_urls ( config_settings ) _set_legacy_project_id ( config_settings ) _set_legacy_custom_fields ( config_settings ) _check_config ( config_settings ) return config_settings
Loads config file and returns its content .
14,689
def verify_predictions ( predictions ) : predictions = np . array ( predictions , copy = False ) if not np . array_equal ( predictions , predictions . astype ( bool ) ) : raise ValueError ( "predictions contains invalid values. " + "The only permitted values are 0 or 1." ) if predictions . ndim == 1 : predictions = predictions [ : , np . newaxis ] return predictions
Ensures that predictions is stored as a numpy array and checks that all values are either 0 or 1 .
14,690
def verify_scores ( scores ) : scores = np . array ( scores , copy = False ) if np . any ( ~ np . isfinite ( scores ) ) : raise ValueError ( "scores contains invalid values. " + "Please check that all values are finite." ) if scores . ndim == 1 : scores = scores [ : , np . newaxis ] return scores
Ensures that scores is stored as a numpy array and checks that all values are finite .
14,691
def verify_consistency ( predictions , scores , proba , opt_class ) : if predictions . shape != scores . shape : raise ValueError ( "predictions and scores arrays have inconsistent " + "dimensions." ) n_class = scores . shape [ 1 ] if scores . ndim > 1 else 1 if proba is None : proba = np . repeat ( False , n_class ) if opt_class is None : opt_class = np . repeat ( True , n_class ) proba = np . array ( proba , dtype = bool , ndmin = 1 ) opt_class = np . array ( opt_class , dtype = bool , ndmin = 1 ) if np . sum ( opt_class ) < 1 : raise ValueError ( "opt_class should contain at least one True value." ) if predictions . shape [ 1 ] != len ( proba ) : raise ValueError ( "mismatch in shape of proba and predictions." ) if predictions . shape [ 1 ] != len ( opt_class ) : raise ValueError ( "mismatch in shape of opt_class and predictions." ) for m in range ( n_class ) : if ( np . any ( np . logical_or ( scores [ : , m ] < 0 , scores [ : , m ] > 1 ) ) and proba [ m ] ) : warnings . warn ( "scores fall outside the [0,1] interval for " + "classifier {}. Setting proba[m]=False." . format ( m ) ) proba [ m ] = False return proba , opt_class
Verifies that all arrays have consistent dimensions . Also verifies that the scores are consistent with proba .
14,692
def verify_identifiers ( identifiers , n_items ) : if identifiers is None : return identifiers identifiers = np . array ( identifiers , copy = False ) if len ( identifiers ) != n_items : raise ValueError ( "identifiers has inconsistent dimension." ) if len ( np . unique ( identifiers ) ) != n_items : raise ValueError ( "identifiers contains duplicate values." ) return identifiers
Ensure that identifiers has a compatible length and that its elements are unique
14,693
def scores_to_probs ( scores , proba , eps = 0.01 ) : if np . any ( ~ proba ) : probs = copy . deepcopy ( scores ) n_class = len ( proba ) for m in range ( n_class ) : if not proba [ m ] : max_extreme_score = max ( np . abs ( np . min ( scores [ : , m ] ) ) , np . abs ( np . max ( scores [ : , m ] ) ) ) k = np . log ( ( 1 - eps ) / eps ) / max_extreme_score self . _probs [ : , m ] = expit ( k * self . scores [ : , m ] ) return probs else : return scores
Transforms scores to probabilities by applying the logistic function
14,694
def fire_metric ( metric_name , metric_value ) : metric_value = float ( metric_value ) metric = { metric_name : metric_value } metric_client . fire_metrics ( ** metric ) return "Fired metric <{}> with value <{}>" . format ( metric_name , metric_value )
Fires a metric using the MetricsApiClient
14,695
def fire_failed_msisdn_lookup ( self , to_identity ) : payload = { "to_identity" : to_identity } hooks = Hook . objects . filter ( event = "identity.no_address" ) for hook in hooks : hook . deliver_hook ( None , payload_override = { "hook" : hook . dict ( ) , "data" : payload } )
Fires a webhook in the event of a None to_addr .
14,696
def dump_data ( self , filename , queryset ) : with gzip . open ( filename , "wb" ) as f : for outbound in queryset . iterator ( ) : data = OutboundArchiveSerializer ( outbound ) . data data = JSONRenderer ( ) . render ( data ) f . write ( data ) f . write ( "\n" . encode ( "utf-8" ) )
Serializes the queryset into a newline separated JSON format and places it into a gzipped file
14,697
def create_archived_outbound ( self , date , filename ) : with open ( filename , "rb" ) as f : f = File ( f ) ArchivedOutbounds . objects . create ( date = date , archive = f )
Creates the required ArchivedOutbound entry with the file specified at filename
14,698
def manages ( self , account ) : account_slug = str ( account ) for organization in self . request . session . get ( 'roles' , { } ) . get ( 'manager' , [ ] ) : if account_slug == organization [ 'slug' ] : return True return False
Returns True if the request . user is a manager for account . account will be converted to a string and compared to an organization slug .
14,699
def get_queryset ( self ) : kwargs = { } if self . ends_at : kwargs . update ( { '%s__lt' % self . date_field : self . ends_at } ) return super ( BeforeMixin , self ) . get_queryset ( ) . filter ( ** kwargs )
Implements before date filtering on date_field