idx
int64
0
63k
question
stringlengths
61
4.03k
target
stringlengths
6
1.23k
5,300
def check ( self , val ) : if self . type is None : return True is_list = isinstance ( val , list ) if not self . listable and is_list : return False if self . type == KEY_TYPES . NUMERIC and not is_number ( val ) : return False elif ( self . type == KEY_TYPES . TIME and not is_number ( val ) and '-' not in val and '/' not in val ) : return False elif self . type == KEY_TYPES . STRING : if is_list : if not isinstance ( val [ 0 ] , basestring ) : return False elif not isinstance ( val , basestring ) : return False elif self . type == KEY_TYPES . BOOL : if is_list and not isinstance ( val [ 0 ] , bool ) : return False elif not isinstance ( val , bool ) : return False return True
Make sure given value is consistent with this Key specification .
5,301
def get_logger ( name = None , stream_fmt = None , file_fmt = None , date_fmt = None , stream_level = None , file_level = None , tofile = None , tostr = True ) : if tofile is None and not tostr : raise ValueError ( "Must log to something: `tofile` or `tostr` must be `True`." ) logger = logging . getLogger ( name ) if hasattr ( logger , '_OSC_LOGGER' ) : return logger else : logger . _OSC_LOGGER = True logger . _LOADED = _LOADED_LEVEL while len ( logger . handlers ) > 0 : logger . handlers . pop ( ) logger . propagate = 0 if file_level is None : file_level = _FILE_LEVEL_DEF if stream_level is None : stream_level = _STREAM_LEVEL_DEF logger . setLevel ( int ( np . min ( [ file_level , stream_level ] ) ) ) if date_fmt is None : date_fmt = '%Y/%m/%d %H:%M:%S' if tofile is not None : if file_fmt is None : file_fmt = "%(asctime)s %(levelname)8.8s [%(filename)20.20s:" file_fmt += "%(funcName)-20.20s]%(indent)s%(message)s" fileFormatter = IndentFormatter ( file_fmt , datefmt = date_fmt ) fileHandler = logging . FileHandler ( tofile , 'w' ) fileHandler . setFormatter ( fileFormatter ) fileHandler . setLevel ( file_level ) logger . addHandler ( fileHandler ) logger . filename = tofile if tostr : if stream_fmt is None : stream_fmt = "%(indent)s%(message)s" strFormatter = IndentFormatter ( stream_fmt , datefmt = date_fmt ) strHandler = logging . StreamHandler ( ) strHandler . setFormatter ( strFormatter ) strHandler . setLevel ( stream_level ) logger . addHandler ( strHandler ) return logger
Create a standard logger object which logs to file and or stdout stream .
5,302
def log_raise ( log , err_str , err_type = RuntimeError ) : log . error ( err_str ) for handle in log . handlers : handle . flush ( ) raise err_type ( err_str )
Log an error message and raise an error .
5,303
def log_memory ( log , pref = None , lvl = logging . DEBUG , raise_flag = True ) : import os import sys cyc_str = "" KB = 1024.0 if pref is not None : cyc_str += "{}: " . format ( pref ) UNIT = KB * KB if sys . platform == 'darwin' else KB good = False try : import resource max_self = resource . getrusage ( resource . RUSAGE_SELF ) . ru_maxrss max_child = resource . getrusage ( resource . RUSAGE_CHILDREN ) . ru_maxrss _str = "RSS Max Self: {:7.2f} [MB], Child: {:7.2f} [MB]" . format ( max_self / UNIT , max_child / UNIT ) cyc_str += _str except Exception as err : log . log ( lvl , "resource.getrusage failed. '{}'" . format ( str ( err ) ) ) if raise_flag : raise else : good = True try : import psutil process = psutil . Process ( os . getpid ( ) ) rss = process . memory_info ( ) . rss cpu_perc = process . cpu_percent ( ) mem_perc = process . memory_percent ( ) num_thr = process . num_threads ( ) _str = "; RSS: {:7.2f} [MB], {:7.2f}%; Threads: {:3d}, CPU: {:7.2f}%" . format ( rss / UNIT , mem_perc , num_thr , cpu_perc ) cyc_str += _str except Exception as err : log . log ( lvl , "psutil.Process failed. '{}'" . format ( str ( err ) ) ) if raise_flag : raise else : good = True if good : log . log ( lvl , cyc_str ) return
Log the current memory usage .
5,304
def load_args ( self , args , clargs ) : args = self . parser . parse_args ( args = clargs , namespace = args ) if args . subcommand is None : self . parser . print_help ( ) args = None return args
Parse arguments and return configuration settings .
5,305
def _setup_argparse ( self ) : parser = argparse . ArgumentParser ( prog = 'catalog' , description = 'Parent Catalog class for astrocats.' ) subparsers = parser . add_subparsers ( description = 'valid subcommands' , dest = 'subcommand' ) self . _add_parser_arguments_import ( subparsers ) self . _add_parser_arguments_git ( subparsers ) self . _add_parser_arguments_analyze ( subparsers ) return parser
Create argparse instance and setup with appropriate parameters .
5,306
def _add_parser_arguments_import ( self , subparsers ) : import_pars = subparsers . add_parser ( "import" , help = "Import data." ) import_pars . add_argument ( '--update' , '-u' , dest = 'update' , default = False , action = 'store_true' , help = 'Only update catalog using live sources.' ) import_pars . add_argument ( '--load-stubs' , dest = 'load_stubs' , default = False , action = 'store_true' , help = 'Load stubs before running.' ) import_pars . add_argument ( '--archived' , '-a' , dest = 'archived' , default = False , action = 'store_true' , help = 'Always use task caches.' ) import_pars . add_argument ( '--tasks' , dest = 'args_task_list' , nargs = '*' , default = None , help = 'space delimited list of tasks to perform.' ) import_pars . add_argument ( '--yes' , dest = 'yes_task_list' , nargs = '+' , default = None , help = 'space delimited list of tasks to turn on.' ) import_pars . add_argument ( '--no' , dest = 'no_task_list' , nargs = '+' , default = None , help = 'space delimited list of tasks to turn off.' ) import_pars . add_argument ( '--min-task-priority' , dest = 'min_task_priority' , default = None , help = 'minimum priority for a task to run' ) import_pars . add_argument ( '--max-task-priority' , dest = 'max_task_priority' , default = None , help = 'maximum priority for a task to run' ) import_pars . add_argument ( '--task-groups' , dest = 'task_groups' , default = None , help = 'predefined group(s) of tasks to run.' ) return import_pars
Create parser for import subcommand and associated arguments .
5,307
def _add_parser_arguments_git ( self , subparsers ) : subparsers . add_parser ( "git-clone" , help = "Clone all defined data repositories if they dont exist." ) subparsers . add_parser ( "git-push" , help = "Add all files to data repositories, commit, and push." ) subparsers . add_parser ( "git-pull" , help = "'Pull' all data repositories." ) subparsers . add_parser ( "git-reset-local" , help = "Hard reset all data repositories using local 'HEAD'." ) subparsers . add_parser ( "git-reset-origin" , help = "Hard reset all data repositories using 'origin/master'." ) subparsers . add_parser ( "git-status" , help = "Get the 'git status' of all data repositories." ) return
Create a sub - parsers for git subcommands .
5,308
def _add_parser_arguments_analyze ( self , subparsers ) : lyze_pars = subparsers . add_parser ( "analyze" , help = "Perform basic analysis on this catalog." ) lyze_pars . add_argument ( '--count' , '-c' , dest = 'count' , default = False , action = 'store_true' , help = 'Determine counts of entries, files, etc.' ) return lyze_pars
Create a parser for the analyze subcommand .
5,309
def compress_gz ( fname ) : import shutil import gzip comp_fname = fname + '.gz' with codecs . open ( fname , 'rb' ) as f_in , gzip . open ( comp_fname , 'wb' ) as f_out : shutil . copyfileobj ( f_in , f_out ) os . remove ( fname ) return comp_fname
Compress the file with the given name and delete the uncompressed file .
5,310
def IOC_TYPECHECK ( t ) : result = ctypes . sizeof ( t ) assert result <= _IOC_SIZEMASK , result return result
Returns the size of given type and check its suitability for use in an ioctl command number .
5,311
def IOR ( type , nr , size ) : return IOC ( IOC_READ , type , nr , IOC_TYPECHECK ( size ) )
An ioctl with read parameters .
5,312
def IOW ( type , nr , size ) : return IOC ( IOC_WRITE , type , nr , IOC_TYPECHECK ( size ) )
An ioctl with write parameters .
5,313
def IOWR ( type , nr , size ) : return IOC ( IOC_READ | IOC_WRITE , type , nr , IOC_TYPECHECK ( size ) )
An ioctl with both read an writes parameters .
5,314
def _get_last_dirs ( path , num = 1 ) : head , tail = os . path . split ( path ) last_path = str ( tail ) for ii in range ( num ) : head , tail = os . path . split ( head ) last_path = os . path . join ( tail , last_path ) last_path = "..." + last_path return last_path
Get a path including only the trailing num directories .
5,315
def analyze ( self , args ) : self . log . info ( "Running catalog analysis" ) if args . count : self . count ( ) return
Run the analysis routines determined from the given args .
5,316
def count ( self ) : self . log . info ( "Running 'count'" ) retvals = { } num_tasks = self . _count_tasks ( ) retvals [ 'num_tasks' ] = num_tasks num_files = self . _count_repo_files ( ) retvals [ 'num_files' ] = num_files return retvals
Analyze the counts of ... things .
5,317
def _count_tasks ( self ) : self . log . warning ( "Tasks:" ) tasks , task_names = self . catalog . _load_task_list_from_file ( ) num_tasks = len ( tasks ) num_tasks_act = len ( [ tt for tt , vv in tasks . items ( ) if vv . active ] ) num_task_files = os . path . join ( self . catalog . PATHS . tasks_dir , '*.py' ) num_task_files = len ( glob ( num_task_files ) ) tasks_str = "{} ({} default active) with {} task-files." . format ( num_tasks , num_tasks_act , num_task_files ) self . log . warning ( tasks_str ) return num_tasks
Count the number of tasks both in the json and directory .
5,318
def _count_repo_files ( self ) : self . log . warning ( "Files:" ) num_files = 0 repos = self . catalog . PATHS . get_all_repo_folders ( ) num_type = np . zeros ( len ( self . _COUNT_FILE_TYPES ) , dtype = int ) num_ign = 0 for rep in repos : last_path = _get_last_dirs ( rep , 2 ) n_all = self . _count_files_by_type ( rep , '*' ) n_type = np . zeros ( len ( self . _COUNT_FILE_TYPES ) , dtype = int ) for ii , ftype in enumerate ( self . _COUNT_FILE_TYPES ) : n_type [ ii ] = self . _count_files_by_type ( rep , '*.' + ftype ) n_ign = self . _count_files_by_type ( rep , '*' , ignore = False ) n_ign -= n_all f_str = self . _file_nums_str ( n_all , n_type , n_ign ) f_str = "{}: {}" . format ( last_path , f_str ) self . log . warning ( f_str ) num_files += n_all num_type += n_type num_ign += n_ign f_str = self . _file_nums_str ( num_files , num_type , num_ign ) self . log . warning ( f_str ) return num_files
Count the number of files in the data repositories .
5,319
def _file_nums_str ( self , n_all , n_type , n_ign ) : n_oth = n_all - np . sum ( n_type ) f_str = "{} Files" . format ( n_all ) + " (" if len ( n_type ) : f_str += ", " . join ( "{} {}" . format ( name , num ) for name , num in zip ( self . _COUNT_FILE_TYPES , n_type ) ) f_str += ", " f_str += "other {}; {} ignored)" . format ( n_oth , n_ign ) return f_str
Construct a string showing the number of different file types .
5,320
def _count_files_by_type ( self , path , pattern , ignore = True ) : files = glob ( os . path . join ( path , pattern ) ) files = [ ff for ff in files if os . path . split ( ff ) [ - 1 ] not in self . _IGNORE_FILES or not ignore ] num_files = len ( files ) return num_files
Count files in the given path with the given pattern .
5,321
def bibcode_from_url ( cls , url ) : try : code = url . split ( '/abs/' ) code = code [ 1 ] . strip ( ) return code except : return None
Given a URL try to find the ADS bibcode .
5,322
def _get_save_path ( self , bury = False ) : filename = self . get_filename ( self [ self . _KEYS . NAME ] ) if bury : outdir = self . catalog . get_repo_boneyard ( ) else : repo_folders = self . catalog . PATHS . get_repo_output_folders ( ) if not len ( repo_folders ) : err_str = ( "No output data repositories found. Cannot save.\n" "Make sure that repo names are correctly configured " "in the `input/repos.json` file, and either manually or " "automatically (using `astrocats CATALOG git-clone`) " "clone the appropriate data repositories." ) self . catalog . log . error ( err_str ) raise RuntimeError ( err_str ) outdir = repo_folders [ 0 ] return outdir , filename
Return the path that this Entry should be saved to .
5,323
def _ordered ( self , odict ) : ndict = OrderedDict ( ) if isinstance ( odict , CatDict ) or isinstance ( odict , Entry ) : key = odict . sort_func else : key = None nkeys = list ( sorted ( odict . keys ( ) , key = key ) ) for key in nkeys : if isinstance ( odict [ key ] , OrderedDict ) : odict [ key ] = self . _ordered ( odict [ key ] ) if isinstance ( odict [ key ] , list ) : if ( not ( odict [ key ] and not isinstance ( odict [ key ] [ 0 ] , OrderedDict ) ) ) : nlist = [ ] for item in odict [ key ] : if isinstance ( item , OrderedDict ) : nlist . append ( self . _ordered ( item ) ) else : nlist . append ( item ) odict [ key ] = nlist ndict [ key ] = odict [ key ] return ndict
Convert the object into a plain OrderedDict .
5,324
def get_hash ( self , keys = [ ] ) : if not len ( keys ) : keys = list ( self . keys ( ) ) string_rep = '' oself = self . _ordered ( deepcopy ( self ) ) for key in keys : string_rep += json . dumps ( oself . get ( key , '' ) , sort_keys = True ) return hashlib . sha512 ( string_rep . encode ( ) ) . hexdigest ( ) [ : 16 ]
Return a unique hash associated with the listed keys .
5,325
def _clean_quantity ( self , quantity ) : value = quantity . get ( QUANTITY . VALUE , '' ) . strip ( ) error = quantity . get ( QUANTITY . E_VALUE , '' ) . strip ( ) unit = quantity . get ( QUANTITY . U_VALUE , '' ) . strip ( ) kind = quantity . get ( QUANTITY . KIND , '' ) if isinstance ( kind , list ) and not isinstance ( kind , string_types ) : kind = [ x . strip ( ) for x in kind ] else : kind = kind . strip ( ) if not value : return False if is_number ( value ) : value = '%g' % Decimal ( value ) if error : error = '%g' % Decimal ( error ) if value : quantity [ QUANTITY . VALUE ] = value if error : quantity [ QUANTITY . E_VALUE ] = error if unit : quantity [ QUANTITY . U_VALUE ] = unit if kind : quantity [ QUANTITY . KIND ] = kind return True
Clean quantity value before it is added to entry .
5,326
def _check_cat_dict_source ( self , cat_dict_class , key_in_self , ** kwargs ) : source = kwargs . get ( cat_dict_class . _KEYS . SOURCE , None ) if source is None : raise CatDictError ( "{}: `source` must be provided!" . format ( self [ self . _KEYS . NAME ] ) , warn = True ) for x in source . split ( ',' ) : if not is_integer ( x ) : raise CatDictError ( "{}: `source` is comma-delimited list of " " integers!" . format ( self [ self . _KEYS . NAME ] ) , warn = True ) if self . is_erroneous ( key_in_self , source ) : self . _log . info ( "This source is erroneous, skipping" ) return None if ( self . catalog . args is not None and not self . catalog . args . private and self . is_private ( key_in_self , source ) ) : self . _log . info ( "This source is private, skipping" ) return None return source
Check that a source exists and that a quantity isn t erroneous .
5,327
def _add_cat_dict ( self , cat_dict_class , key_in_self , check_for_dupes = True , compare_to_existing = True , ** kwargs ) : if cat_dict_class != Error : try : source = self . _check_cat_dict_source ( cat_dict_class , key_in_self , ** kwargs ) except CatDictError as err : if err . warn : self . _log . info ( "'{}' Not adding '{}': '{}'" . format ( self [ self . _KEYS . NAME ] , key_in_self , str ( err ) ) ) return False if source is None : return False new_entry = self . _init_cat_dict ( cat_dict_class , key_in_self , ** kwargs ) if new_entry is None : return False if compare_to_existing and cat_dict_class != Error : for item in self . get ( key_in_self , [ ] ) : if new_entry . is_duplicate_of ( item ) : item . append_sources_from ( new_entry ) return new_entry if key_in_self == self . _KEYS . ALIAS : if ( check_for_dupes and 'aliases' in dir ( self . catalog ) and new_entry [ QUANTITY . VALUE ] in self . catalog . aliases ) : possible_dupe = self . catalog . aliases [ new_entry [ QUANTITY . VALUE ] ] if ( possible_dupe != self [ self . _KEYS . NAME ] and possible_dupe in self . catalog . entries ) : self . dupe_of . append ( possible_dupe ) if 'aliases' in dir ( self . catalog ) : self . catalog . aliases [ new_entry [ QUANTITY . VALUE ] ] = self [ self . _KEYS . NAME ] self . setdefault ( key_in_self , [ ] ) . append ( new_entry ) if ( key_in_self == self . _KEYS . ALIAS and check_for_dupes and self . dupe_of ) : self . merge_dupes ( ) return True
Add a CatDict to this Entry .
5,328
def init_from_file ( cls , catalog , name = None , path = None , clean = False , merge = True , pop_schema = True , ignore_keys = [ ] , compare_to_existing = True , try_gzip = False , filter_on = { } ) : if not catalog : from astrocats . catalog . catalog import Catalog log = logging . getLogger ( ) catalog = Catalog ( None , log ) catalog . log . debug ( "init_from_file()" ) if name is None and path is None : err = ( "Either entry `name` or `path` must be specified to load " "entry." ) log . error ( err ) raise ValueError ( err ) load_path = '' if path is not None : load_path = path name = '' else : repo_paths = catalog . PATHS . get_repo_output_folders ( ) for rep in repo_paths : filename = cls . get_filename ( name ) newpath = os . path . join ( rep , filename + '.json' ) if os . path . isfile ( newpath ) : load_path = newpath break if load_path is None or not os . path . isfile ( load_path ) : return None new_entry = cls ( catalog , name ) if try_gzip and not load_path . endswith ( '.gz' ) : try_gzip = False new_entry . _load_data_from_json ( load_path , clean = clean , merge = merge , pop_schema = pop_schema , ignore_keys = ignore_keys , compare_to_existing = compare_to_existing , gzip = try_gzip , filter_on = filter_on ) return new_entry
Construct a new Entry instance from an input file .
5,329
def add_alias ( self , alias , source , clean = True ) : if clean : alias = self . catalog . clean_entry_name ( alias ) self . add_quantity ( self . _KEYS . ALIAS , alias , source ) return alias
Add an alias optionally cleaning the alias string .
5,330
def add_error ( self , value , ** kwargs ) : kwargs . update ( { ERROR . VALUE : value } ) self . _add_cat_dict ( Error , self . _KEYS . ERRORS , ** kwargs ) return
Add an Error instance to this entry .
5,331
def add_photometry ( self , compare_to_existing = True , ** kwargs ) : self . _add_cat_dict ( Photometry , self . _KEYS . PHOTOMETRY , compare_to_existing = compare_to_existing , ** kwargs ) return
Add a Photometry instance to this entry .
5,332
def merge_dupes ( self ) : for dupe in self . dupe_of : if dupe in self . catalog . entries : if self . catalog . entries [ dupe ] . _stub : self . catalog . load_entry_from_name ( dupe , delete = True , merge = False ) self . catalog . copy_entry_to_entry ( self . catalog . entries [ dupe ] , self ) del self . catalog . entries [ dupe ] self . dupe_of = [ ]
Merge two entries that correspond to the same entry .
5,333
def add_quantity ( self , quantities , value , source , check_for_dupes = True , compare_to_existing = True , ** kwargs ) : success = True for quantity in listify ( quantities ) : kwargs . update ( { QUANTITY . VALUE : value , QUANTITY . SOURCE : source } ) cat_dict = self . _add_cat_dict ( Quantity , quantity , compare_to_existing = compare_to_existing , check_for_dupes = check_for_dupes , ** kwargs ) if isinstance ( cat_dict , CatDict ) : self . _append_additional_tags ( quantity , source , cat_dict ) success = False return success
Add an Quantity instance to this entry .
5,334
def add_self_source ( self ) : return self . add_source ( bibcode = self . catalog . OSC_BIBCODE , name = self . catalog . OSC_NAME , url = self . catalog . OSC_URL , secondary = True )
Add a source that refers to the catalog itself .
5,335
def add_source ( self , allow_alias = False , ** kwargs ) : if not allow_alias and SOURCE . ALIAS in kwargs : err_str = "`{}` passed in kwargs, this shouldn't happen!" . format ( SOURCE . ALIAS ) self . _log . error ( err_str ) raise RuntimeError ( err_str ) if SOURCE . ALIAS not in kwargs : kwargs [ SOURCE . ALIAS ] = str ( self . num_sources ( ) + 1 ) source_obj = self . _init_cat_dict ( Source , self . _KEYS . SOURCES , ** kwargs ) if source_obj is None : return None for item in self . get ( self . _KEYS . SOURCES , '' ) : if source_obj . is_duplicate_of ( item ) : return item [ item . _KEYS . ALIAS ] self . setdefault ( self . _KEYS . SOURCES , [ ] ) . append ( source_obj ) return source_obj [ source_obj . _KEYS . ALIAS ]
Add a Source instance to this entry .
5,336
def add_model ( self , allow_alias = False , ** kwargs ) : if not allow_alias and MODEL . ALIAS in kwargs : err_str = "`{}` passed in kwargs, this shouldn't happen!" . format ( SOURCE . ALIAS ) self . _log . error ( err_str ) raise RuntimeError ( err_str ) if MODEL . ALIAS not in kwargs : kwargs [ MODEL . ALIAS ] = str ( self . num_models ( ) + 1 ) model_obj = self . _init_cat_dict ( Model , self . _KEYS . MODELS , ** kwargs ) if model_obj is None : return None for item in self . get ( self . _KEYS . MODELS , '' ) : if model_obj . is_duplicate_of ( item ) : return item [ item . _KEYS . ALIAS ] self . setdefault ( self . _KEYS . MODELS , [ ] ) . append ( model_obj ) return model_obj [ model_obj . _KEYS . ALIAS ]
Add a Model instance to this entry .
5,337
def add_spectrum ( self , compare_to_existing = True , ** kwargs ) : spec_key = self . _KEYS . SPECTRA source = self . _check_cat_dict_source ( Spectrum , spec_key , ** kwargs ) if source is None : return None new_spectrum = self . _init_cat_dict ( Spectrum , spec_key , ** kwargs ) if new_spectrum is None : return None is_dupe = False for item in self . get ( spec_key , [ ] ) : if new_spectrum . is_duplicate_of ( item ) : if SPECTRUM . EXCLUDE in new_spectrum : item [ SPECTRUM . EXCLUDE ] = new_spectrum [ SPECTRUM . EXCLUDE ] elif SPECTRUM . EXCLUDE in item : item . update ( new_spectrum ) is_dupe = True break if not is_dupe : self . setdefault ( spec_key , [ ] ) . append ( new_spectrum ) return
Add a Spectrum instance to this entry .
5,338
def check ( self ) : if self . _KEYS . SCHEMA not in self : self [ self . _KEYS . SCHEMA ] = self . catalog . SCHEMA . URL if ( self . _KEYS . NAME not in self or len ( self [ self . _KEYS . NAME ] ) == 0 ) : raise ValueError ( "Entry name is empty:\n\t{}" . format ( json . dumps ( self , indent = 2 ) ) ) return
Check that the entry has the required fields .
5,339
def get_aliases ( self , includename = True ) : alias_quanta = self . get ( self . _KEYS . ALIAS , [ ] ) aliases = [ aq [ QUANTITY . VALUE ] for aq in alias_quanta ] if includename and self [ self . _KEYS . NAME ] not in aliases : aliases = [ self [ self . _KEYS . NAME ] ] + aliases return aliases
Retrieve the aliases of this object as a list of strings .
5,340
def get_entry_text ( self , fname ) : if fname . split ( '.' ) [ - 1 ] == 'gz' : with gz . open ( fname , 'rt' ) as f : filetext = f . read ( ) else : with codecs . open ( fname , 'r' ) as f : filetext = f . read ( ) return filetext
Retrieve the raw text from a file .
5,341
def get_source_by_alias ( self , alias ) : for source in self . get ( self . _KEYS . SOURCES , [ ] ) : if source [ self . _KEYS . ALIAS ] == alias : return source raise ValueError ( "Source '{}': alias '{}' not found!" . format ( self [ self . _KEYS . NAME ] , alias ) )
Given an alias find the corresponding source in this entry .
5,342
def get_stub ( self ) : stub = type ( self ) ( self . catalog , self [ self . _KEYS . NAME ] , stub = True ) if self . _KEYS . ALIAS in self : stub [ self . _KEYS . ALIAS ] = self [ self . _KEYS . ALIAS ] if self . _KEYS . DISTINCT_FROM in self : stub [ self . _KEYS . DISTINCT_FROM ] = self [ self . _KEYS . DISTINCT_FROM ] if self . _KEYS . RA in self : stub [ self . _KEYS . RA ] = self [ self . _KEYS . RA ] if self . _KEYS . DEC in self : stub [ self . _KEYS . DEC ] = self [ self . _KEYS . DEC ] if self . _KEYS . DISCOVER_DATE in self : stub [ self . _KEYS . DISCOVER_DATE ] = self [ self . _KEYS . DISCOVER_DATE ] if self . _KEYS . SOURCES in self : stub [ self . _KEYS . SOURCES ] = self [ self . _KEYS . SOURCES ] return stub
Get a new Entry which contains the stub of this one .
5,343
def is_erroneous ( self , field , sources ) : if self . _KEYS . ERRORS in self : my_errors = self [ self . _KEYS . ERRORS ] for alias in sources . split ( ',' ) : source = self . get_source_by_alias ( alias ) bib_err_values = [ err [ ERROR . VALUE ] for err in my_errors if err [ ERROR . KIND ] == SOURCE . BIBCODE and err [ ERROR . EXTRA ] == field ] if ( SOURCE . BIBCODE in source and source [ SOURCE . BIBCODE ] in bib_err_values ) : return True name_err_values = [ err [ ERROR . VALUE ] for err in my_errors if err [ ERROR . KIND ] == SOURCE . NAME and err [ ERROR . EXTRA ] == field ] if ( SOURCE . NAME in source and source [ SOURCE . NAME ] in name_err_values ) : return True return False
Check if attribute has been marked as being erroneous .
5,344
def is_private ( self , key , sources ) : if key == ENTRY . ALIAS : return False return all ( [ SOURCE . PRIVATE in self . get_source_by_alias ( x ) for x in sources . split ( ',' ) ] )
Check if attribute is private .
5,345
def save ( self , bury = False , final = False ) : outdir , filename = self . _get_save_path ( bury = bury ) if final : self . sanitize ( ) jsonstring = json . dumps ( { self [ self . _KEYS . NAME ] : self . _ordered ( self ) } , indent = '\t' if sys . version_info [ 0 ] >= 3 else 4 , separators = ( ',' , ':' ) , ensure_ascii = False ) if not os . path . isdir ( outdir ) : raise RuntimeError ( "Output directory '{}' for event '{}' does " "not exist." . format ( outdir , self [ self . _KEYS . NAME ] ) ) save_name = os . path . join ( outdir , filename + '.json' ) with codecs . open ( save_name , 'w' , encoding = 'utf8' ) as sf : sf . write ( jsonstring ) if not os . path . exists ( save_name ) : raise RuntimeError ( "File '{}' was not saved!" . format ( save_name ) ) return save_name
Write entry to JSON file in the proper location .
5,346
def sort_func ( self , key ) : if key == self . _KEYS . SCHEMA : return 'aaa' if key == self . _KEYS . NAME : return 'aab' if key == self . _KEYS . SOURCES : return 'aac' if key == self . _KEYS . ALIAS : return 'aad' if key == self . _KEYS . MODELS : return 'aae' if key == self . _KEYS . PHOTOMETRY : return 'zzy' if key == self . _KEYS . SPECTRA : return 'zzz' return key
Used to sort keys when writing Entry to JSON format .
5,347
def set_pd_mag_from_counts ( photodict , c = '' , ec = '' , lec = '' , uec = '' , zp = DEFAULT_ZP , sig = DEFAULT_UL_SIGMA ) : with localcontext ( ) as ctx : if lec == '' or uec == '' : lec = ec uec = ec prec = max ( get_sig_digits ( str ( c ) , strip_zeroes = False ) , get_sig_digits ( str ( lec ) , strip_zeroes = False ) , get_sig_digits ( str ( uec ) , strip_zeroes = False ) ) + 1 ctx . prec = prec dlec = Decimal ( str ( lec ) ) duec = Decimal ( str ( uec ) ) if c != '' : dc = Decimal ( str ( c ) ) dzp = Decimal ( str ( zp ) ) dsig = Decimal ( str ( sig ) ) photodict [ PHOTOMETRY . ZERO_POINT ] = str ( zp ) if c == '' or float ( c ) < float ( sig ) * float ( uec ) : photodict [ PHOTOMETRY . UPPER_LIMIT ] = True photodict [ PHOTOMETRY . UPPER_LIMIT_SIGMA ] = str ( sig ) photodict [ PHOTOMETRY . MAGNITUDE ] = str ( dzp - ( D25 * ( dsig * duec ) . log10 ( ) ) ) dnec = Decimal ( '10.0' ) ** ( ( dzp - Decimal ( photodict [ PHOTOMETRY . MAGNITUDE ] ) ) / D25 ) photodict [ PHOTOMETRY . E_UPPER_MAGNITUDE ] = str ( D25 * ( ( dnec + duec ) . log10 ( ) - dnec . log10 ( ) ) ) else : photodict [ PHOTOMETRY . MAGNITUDE ] = str ( dzp - D25 * dc . log10 ( ) ) photodict [ PHOTOMETRY . E_UPPER_MAGNITUDE ] = str ( D25 * ( ( dc + duec ) . log10 ( ) - dc . log10 ( ) ) ) photodict [ PHOTOMETRY . E_LOWER_MAGNITUDE ] = str ( D25 * ( dc . log10 ( ) - ( dc - dlec ) . log10 ( ) ) )
Set photometry dictionary from a counts measurement .
5,348
def set_pd_mag_from_flux_density ( photodict , fd = '' , efd = '' , lefd = '' , uefd = '' , sig = DEFAULT_UL_SIGMA ) : with localcontext ( ) as ctx : if lefd == '' or uefd == '' : lefd = efd uefd = efd prec = max ( get_sig_digits ( str ( fd ) , strip_zeroes = False ) , get_sig_digits ( str ( lefd ) , strip_zeroes = False ) , get_sig_digits ( str ( uefd ) , strip_zeroes = False ) ) + 1 ctx . prec = prec dlefd = Decimal ( str ( lefd ) ) duefd = Decimal ( str ( uefd ) ) if fd != '' : dfd = Decimal ( str ( fd ) ) dsig = Decimal ( str ( sig ) ) if fd == '' or float ( fd ) < DEFAULT_UL_SIGMA * float ( uefd ) : photodict [ PHOTOMETRY . UPPER_LIMIT ] = True photodict [ PHOTOMETRY . UPPER_LIMIT_SIGMA ] = str ( sig ) photodict [ PHOTOMETRY . MAGNITUDE ] = str ( Decimal ( '23.9' ) - D25 * ( dsig * duefd ) . log10 ( ) ) if fd : photodict [ PHOTOMETRY . E_UPPER_MAGNITUDE ] = str ( D25 * ( ( dfd + duefd ) . log10 ( ) - dfd . log10 ( ) ) ) else : photodict [ PHOTOMETRY . MAGNITUDE ] = str ( Decimal ( '23.9' ) - D25 * dfd . log10 ( ) ) photodict [ PHOTOMETRY . E_UPPER_MAGNITUDE ] = str ( D25 * ( ( dfd + duefd ) . log10 ( ) - dfd . log10 ( ) ) ) photodict [ PHOTOMETRY . E_LOWER_MAGNITUDE ] = str ( D25 * ( dfd . log10 ( ) - ( dfd - dlefd ) . log10 ( ) ) )
Set photometry dictionary from a flux density measurement .
5,349
def _check ( self ) : super ( Photometry , self ) . _check ( ) err_str = None has_flux = self . _KEYS . FLUX in self has_flux_dens = self . _KEYS . FLUX_DENSITY in self has_u_flux = self . _KEYS . U_FLUX in self has_u_flux_dens = self . _KEYS . U_FLUX_DENSITY in self has_freq = self . _KEYS . FREQUENCY in self has_band = self . _KEYS . BAND in self has_ener = self . _KEYS . ENERGY in self has_u_freq = self . _KEYS . U_FREQUENCY in self has_u_ener = self . _KEYS . U_ENERGY in self if has_flux or has_flux_dens : if not any ( [ has_freq , has_band , has_ener ] ) : err_str = ( "Has `{}` or `{}`" . format ( self . _KEYS . FLUX , self . _KEYS . FLUX_DENSITY ) + " but None of `{}`, `{}`, `{}`" . format ( self . _KEYS . FREQUENCY , self . _KEYS . BAND , self . _KEYS . ENERGY ) ) elif has_flux and not has_u_flux : err_str = "`{}` provided without `{}`." . format ( self . _KEYS . FLUX , self . _KEYS . U_FLUX ) elif has_flux_dens and not has_u_flux_dens : err_str = "`{}` provided without `{}`." . format ( self . _KEYS . FLUX_DENSITY , self . _KEYS . U_FLUX_DENSITY ) elif has_freq and not has_u_freq : err_str = "`{}` provided without `{}`." . format ( self . _KEYS . FREQUENCY , self . _KEYS . U_FREQUENCY ) elif has_ener and not has_u_ener : err_str = "`{}` provided without `{}`." . format ( self . _KEYS . ENERGY , self . _KEYS . U_ENERGY ) if err_str is not None : raise ValueError ( err_str ) return
Check that entry attributes are legal .
5,350
def sort_func ( self , key ) : if key == self . _KEYS . TIME : return 'aaa' if key == self . _KEYS . MODEL : return 'zzy' if key == self . _KEYS . SOURCE : return 'zzz' return key
Specify order for attributes .
5,351
def by_resource_user_and_perm ( cls , user_id , perm_name , resource_id , db_session = None ) : db_session = get_db_session ( db_session ) query = db_session . query ( cls . model ) . filter ( cls . model . user_id == user_id ) query = query . filter ( cls . model . resource_id == resource_id ) query = query . filter ( cls . model . perm_name == perm_name ) return query . first ( )
return all instances by user name perm name and resource id
5,352
def tdSensor ( self ) : protocol = create_string_buffer ( 20 ) model = create_string_buffer ( 20 ) sid = c_int ( ) datatypes = c_int ( ) self . _lib . tdSensor ( protocol , sizeof ( protocol ) , model , sizeof ( model ) , byref ( sid ) , byref ( datatypes ) ) return { 'protocol' : self . _to_str ( protocol ) , 'model' : self . _to_str ( model ) , 'id' : sid . value , 'datatypes' : datatypes . value }
Get the next sensor while iterating .
5,353
def tdSensorValue ( self , protocol , model , sid , datatype ) : value = create_string_buffer ( 20 ) timestamp = c_int ( ) self . _lib . tdSensorValue ( protocol , model , sid , datatype , value , sizeof ( value ) , byref ( timestamp ) ) return { 'value' : self . _to_str ( value ) , 'timestamp' : timestamp . value }
Get the sensor value for a given sensor .
5,354
def tdController ( self ) : cid = c_int ( ) ctype = c_int ( ) name = create_string_buffer ( 255 ) available = c_int ( ) self . _lib . tdController ( byref ( cid ) , byref ( ctype ) , name , sizeof ( name ) , byref ( available ) ) return { 'id' : cid . value , 'type' : ctype . value , 'name' : self . _to_str ( name ) , 'available' : available . value }
Get the next controller while iterating .
5,355
def ziggurat_model_init ( user = None , group = None , user_group = None , group_permission = None , user_permission = None , user_resource_permission = None , group_resource_permission = None , resource = None , external_identity = None , * args , ** kwargs ) : models = ModelProxy ( ) models . User = user models . Group = group models . UserGroup = user_group models . GroupPermission = group_permission models . UserPermission = user_permission models . UserResourcePermission = user_resource_permission models . GroupResourcePermission = group_resource_permission models . Resource = resource models . ExternalIdentity = external_identity model_service_mapping = import_model_service_mappings ( ) if kwargs . get ( "passwordmanager" ) : user . passwordmanager = kwargs [ "passwordmanager" ] else : user . passwordmanager = make_passwordmanager ( kwargs . get ( "passwordmanager_schemes" ) ) for name , cls in models . items ( ) : services = model_service_mapping . get ( name , [ ] ) for service in services : setattr ( service , "model" , cls ) setattr ( service , "models_proxy" , models )
This function handles attaching model to service if model has one specified as _ziggurat_service Also attached a proxy object holding all model definitions that services might use
5,356
def messages ( request , year = None , month = None , day = None , template = "gnotty/messages.html" ) : query = request . REQUEST . get ( "q" ) prev_url , next_url = None , None messages = IRCMessage . objects . all ( ) if hide_joins_and_leaves ( request ) : messages = messages . filter ( join_or_leave = False ) if query : search = Q ( message__icontains = query ) | Q ( nickname__icontains = query ) messages = messages . filter ( search ) . order_by ( "-message_time" ) elif year and month and day : messages = messages . filter ( message_time__year = year , message_time__month = month , message_time__day = day ) day_delta = timedelta ( days = 1 ) this_date = date ( int ( year ) , int ( month ) , int ( day ) ) prev_date = this_date - day_delta next_date = this_date + day_delta prev_url = reverse ( "gnotty_day" , args = prev_date . timetuple ( ) [ : 3 ] ) next_url = reverse ( "gnotty_day" , args = next_date . timetuple ( ) [ : 3 ] ) else : return redirect ( "gnotty_year" , year = datetime . now ( ) . year ) context = dict ( settings ) context [ "messages" ] = messages context [ "prev_url" ] = prev_url context [ "next_url" ] = next_url return render ( request , template , context )
Show messages for the given query or day .
5,357
def delete_expired_locks ( self ) : ttl_seconds = self . get_mutex_ttl_seconds ( ) if ttl_seconds is not None : DBMutex . objects . filter ( creation_time__lte = timezone . now ( ) - timedelta ( seconds = ttl_seconds ) ) . delete ( )
Deletes all expired mutex locks if a ttl is provided .
5,358
def start ( self ) : self . delete_expired_locks ( ) try : with transaction . atomic ( ) : self . lock = DBMutex . objects . create ( lock_id = self . lock_id ) except IntegrityError : raise DBMutexError ( 'Could not acquire lock: {0}' . format ( self . lock_id ) )
Acquires the db mutex lock . Takes the necessary steps to delete any stale locks . Throws a DBMutexError if it can t acquire the lock .
5,359
def stop ( self ) : if not DBMutex . objects . filter ( id = self . lock . id ) . exists ( ) : raise DBMutexTimeoutError ( 'Lock {0} expired before function completed' . format ( self . lock_id ) ) else : self . lock . delete ( )
Releases the db mutex lock . Throws an error if the lock was released before the function finished .
5,360
def decorate_callable ( self , func ) : def wrapper ( * args , ** kwargs ) : try : with self : result = func ( * args , ** kwargs ) return result except DBMutexError as e : if self . suppress_acquisition_exceptions : LOG . error ( e ) else : raise e functools . update_wrapper ( wrapper , func ) return wrapper
Decorates a function with the db_mutex decorator by using this class as a context manager around it .
5,361
def groupfinder ( userid , request ) : if userid and hasattr ( request , "user" ) and request . user : groups = [ "group:%s" % g . id for g in request . user . groups ] return groups return [ ]
Default groupfinder implementaion for pyramid applications
5,362
def apply_repulsion ( repulsion , nodes , barnes_hut_optimize = False , region = None , barnes_hut_theta = 1.2 ) : if not barnes_hut_optimize : for i in range ( 0 , len ( nodes ) ) : for j in range ( 0 , i ) : repulsion . apply_node_to_node ( nodes [ i ] , nodes [ j ] ) else : for i in range ( 0 , len ( nodes ) ) : region . apply_force ( nodes [ i ] , repulsion , barnes_hut_theta )
Iterate through the nodes or edges and apply the forces directly to the node objects .
5,363
def apply_gravity ( repulsion , nodes , gravity , scaling_ratio ) : for i in range ( 0 , len ( nodes ) ) : repulsion . apply_gravitation ( nodes [ i ] , gravity / scaling_ratio )
Iterate through the nodes or edges and apply the gravity directly to the node objects .
5,364
def by_external_id_and_provider ( cls , external_id , provider_name , db_session = None ) : db_session = get_db_session ( db_session ) query = db_session . query ( cls . model ) query = query . filter ( cls . model . external_id == external_id ) query = query . filter ( cls . model . provider_name == provider_name ) return query . first ( )
Returns ExternalIdentity instance based on search params
5,365
def user_by_external_id_and_provider ( cls , external_id , provider_name , db_session = None ) : db_session = get_db_session ( db_session ) query = db_session . query ( cls . models_proxy . User ) query = query . filter ( cls . model . external_id == external_id ) query = query . filter ( cls . model . provider_name == provider_name ) query = query . filter ( cls . models_proxy . User . id == cls . model . local_user_id ) return query . first ( )
Returns User instance based on search params
5,366
def by_user_and_perm ( cls , user_id , perm_name , db_session = None ) : db_session = get_db_session ( db_session ) query = db_session . query ( cls . model ) . filter ( cls . model . user_id == user_id ) query = query . filter ( cls . model . perm_name == perm_name ) return query . first ( )
return by user and permission name
5,367
def node_is_subclass ( cls , * subclass_names ) : if not isinstance ( cls , ( ClassDef , Instance ) ) : return False for base_cls in cls . bases : try : for inf in base_cls . inferred ( ) : if inf . qname ( ) in subclass_names : return True if inf != cls and node_is_subclass ( inf , * subclass_names ) : return True except InferenceError : continue return False
Checks if cls node has parent with subclass_name .
5,368
def is_field_method ( node ) : name = node . attrname parent = node . last_child ( ) inferred = safe_infer ( parent ) if not inferred : return False for cls_name , inst in FIELD_TYPES . items ( ) : if node_is_instance ( inferred , cls_name ) and hasattr ( inst , name ) : return True return False
Checks if a call to a field instance method is valid . A call is valid if the call is a method of the underlying type . So in a StringField the methods from str are valid in a ListField the methods from list are valid and so on ...
5,369
def get_node_parent_class ( node ) : while node . parent : if isinstance ( node , ClassDef ) : return node node = node . parent
Supposes that node is a mongoengine field in a class and tries to get its parent class
5,370
def get_field_definition ( node ) : name = node . attrname cls = get_node_parent_class ( node ) definition = cls . lookup ( name ) [ 1 ] [ 0 ] . statement ( ) return definition
node is a class attribute that is a mongoengine . Returns the definition statement for the attribute
5,371
def get_field_embedded_doc ( node ) : definition = get_field_definition ( node ) cls_name = definition . last_child ( ) . last_child ( ) cls = next ( cls_name . infer ( ) ) return cls
Returns de ClassDef for the related embedded document in a embedded document field .
5,372
def node_is_embedded_doc_attr ( node ) : embedded_doc = get_field_embedded_doc ( node . last_child ( ) ) name = node . attrname try : r = bool ( embedded_doc . lookup ( name ) [ 1 ] [ 0 ] ) except IndexError : r = False return r
Checks if a node is a valid field or method in a embedded document .
5,373
def _dispatcher ( self , connection , event ) : super ( BaseBot , self ) . _dispatcher ( connection , event ) for handler in self . events [ event . eventtype ( ) ] : handler ( self , connection , event )
This is the method in SimpleIRCClient that all IRC events get passed through . Here we map events to our own custom event handlers and call them .
5,374
def message_channel ( self , message ) : self . log ( None , message ) super ( BaseBot , self ) . message_channel ( message )
We won t receive our own messages so log them manually .
5,375
def on_pubmsg ( self , connection , event ) : for message in event . arguments ( ) : self . log ( event , message ) command_args = filter ( None , message . split ( ) ) command_name = command_args . pop ( 0 ) for handler in self . events [ "command" ] : if handler . event . args [ "command" ] == command_name : self . handle_command_event ( event , handler , command_args )
Log any public messages and also handle the command event .
5,376
def handle_command_event ( self , event , command , args ) : argspec = getargspec ( command ) num_all_args = len ( argspec . args ) - 2 num_pos_args = num_all_args - len ( argspec . defaults or [ ] ) if num_pos_args <= len ( args ) <= num_all_args : response = command ( self , event , * args ) elif num_all_args == num_pos_args : s = "s are" if num_all_args != 1 else " is" response = "%s arg%s required" % ( num_all_args , s ) else : bits = ( num_pos_args , num_all_args ) response = "between %s and %s args are required" % bits response = "%s: %s" % ( self . get_nickname ( event ) , response ) self . message_channel ( response )
Command handler - treats each word in the message that triggered the command as an argument to the command and does some validation to ensure that the number of arguments match .
5,377
def handle_timer_event ( self , handler ) : while True : handler ( self ) sleep ( handler . event . args [ "seconds" ] )
Runs each timer handler in a separate greenlet thread .
5,378
def handle_webhook_event ( self , environ , url , params ) : for handler in self . events [ "webhook" ] : urlpattern = handler . event . args [ "urlpattern" ] if not urlpattern or match ( urlpattern , url ) : response = handler ( self , environ , url , params ) if response : return response
Webhook handler - each handler for the webhook event takes an initial pattern argument for matching the URL requested . Here we match the URL to the pattern for each webhook handler and bail out if it returns a response .
5,379
def DeviceFactory ( id , lib = None ) : lib = lib or Library ( ) if lib . tdGetDeviceType ( id ) == const . TELLSTICK_TYPE_GROUP : return DeviceGroup ( id , lib = lib ) return Device ( id , lib = lib )
Create the correct device instance based on device type and return it .
5,380
def process_callback ( self , block = True ) : try : ( callback , args ) = self . _queue . get ( block = block ) try : callback ( * args ) finally : self . _queue . task_done ( ) except queue . Empty : return False return True
Dispatch a single callback in the current thread .
5,381
def devices ( self ) : devices = [ ] count = self . lib . tdGetNumberOfDevices ( ) for i in range ( count ) : device = DeviceFactory ( self . lib . tdGetDeviceId ( i ) , lib = self . lib ) devices . append ( device ) return devices
Return all known devices .
5,382
def sensors ( self ) : sensors = [ ] try : while True : sensor = self . lib . tdSensor ( ) sensors . append ( Sensor ( lib = self . lib , ** sensor ) ) except TelldusError as e : if e . error != const . TELLSTICK_ERROR_DEVICE_NOT_FOUND : raise return sensors
Return all known sensors .
5,383
def controllers ( self ) : controllers = [ ] try : while True : controller = self . lib . tdController ( ) del controller [ "name" ] del controller [ "available" ] controllers . append ( Controller ( lib = self . lib , ** controller ) ) except TelldusError as e : if e . error != const . TELLSTICK_ERROR_NOT_FOUND : raise return controllers
Return all known controllers .
5,384
def add_device ( self , name , protocol , model = None , ** parameters ) : device = Device ( self . lib . tdAddDevice ( ) , lib = self . lib ) try : device . name = name device . protocol = protocol if model : device . model = model for key , value in parameters . items ( ) : device . set_parameter ( key , value ) return DeviceFactory ( device . id , lib = self . lib ) except Exception : import sys exc_info = sys . exc_info ( ) try : device . remove ( ) except : pass if "with_traceback" in dir ( Exception ) : raise exc_info [ 0 ] . with_traceback ( exc_info [ 1 ] , exc_info [ 2 ] ) else : exec ( "raise exc_info[0], exc_info[1], exc_info[2]" )
Add a new device .
5,385
def add_group ( self , name , devices ) : device = self . add_device ( name , "group" ) device . add_to_group ( devices ) return device
Add a new device group .
5,386
def connect_controller ( self , vid , pid , serial ) : self . lib . tdConnectTellStickController ( vid , pid , serial )
Connect a controller .
5,387
def disconnect_controller ( self , vid , pid , serial ) : self . lib . tdDisconnectTellStickController ( vid , pid , serial )
Disconnect a controller .
5,388
def parameters ( self ) : parameters = { } for name in self . PARAMETERS : try : parameters [ name ] = self . get_parameter ( name ) except AttributeError : pass return parameters
Get dict with all set parameters .
5,389
def get_parameter ( self , name ) : default_value = "$%!)(INVALID)(!%$" value = self . lib . tdGetDeviceParameter ( self . id , name , default_value ) if value == default_value : raise AttributeError ( name ) return value
Get a parameter .
5,390
def set_parameter ( self , name , value ) : self . lib . tdSetDeviceParameter ( self . id , name , str ( value ) )
Set a parameter .
5,391
def devices_in_group ( self ) : try : devices = self . get_parameter ( 'devices' ) except AttributeError : return [ ] ctor = DeviceFactory return [ ctor ( int ( x ) , lib = self . lib ) for x in devices . split ( ',' ) if x ]
Fetch list of devices in group .
5,392
def _prepPointsForSegments ( points ) : while 1 : point = points [ - 1 ] if point . segmentType : break else : point = points . pop ( ) points . insert ( 0 , point ) continue break
Move any off curves at the end of the contour to the beginning of the contour . This makes segmentation easier .
5,393
def _reversePoints ( points ) : points = _copyPoints ( points ) firstOnCurve = None for index , point in enumerate ( points ) : if point . segmentType is not None : firstOnCurve = index break lastSegmentType = points [ firstOnCurve ] . segmentType points = reversed ( points ) final = [ ] for point in points : segmentType = point . segmentType if segmentType is not None : point . segmentType = lastSegmentType lastSegmentType = segmentType final . append ( point ) _prepPointsForSegments ( final ) return final
Reverse the points . This differs from the reversal point pen in RoboFab in that it doesn t worry about maintaing the start point position . That has no benefit within the context of this module .
5,394
def _convertPointsToSegments ( points , willBeReversed = False ) : previousOnCurve = None for point in reversed ( points ) : if point . segmentType is not None : previousOnCurve = point . coordinates break assert previousOnCurve is not None offCurves = [ ] segments = [ ] for point in points : if point . segmentType is None : offCurves . append ( point ) else : segment = InputSegment ( points = offCurves + [ point ] , previousOnCurve = previousOnCurve , willBeReversed = willBeReversed ) segments . append ( segment ) offCurves = [ ] previousOnCurve = point . coordinates assert not offCurves return segments
Compile points into InputSegment objects .
5,395
def _tValueForPointOnCubicCurve ( point , cubicCurve , isHorizontal = 0 ) : pt1 , pt2 , pt3 , pt4 = cubicCurve a , b , c , d = bezierTools . calcCubicParameters ( pt1 , pt2 , pt3 , pt4 ) solutions = bezierTools . solveCubic ( a [ isHorizontal ] , b [ isHorizontal ] , c [ isHorizontal ] , d [ isHorizontal ] - point [ isHorizontal ] ) solutions = [ t for t in solutions if 0 <= t < 1 ] if not solutions and not isHorizontal : return _tValueForPointOnCubicCurve ( point , ( pt1 , pt2 , pt3 , pt4 ) , isHorizontal = 1 ) if len ( solutions ) > 1 : intersectionLenghts = { } for t in solutions : tp = _getCubicPoint ( t , pt1 , pt2 , pt3 , pt4 ) dist = _distance ( tp , point ) intersectionLenghts [ dist ] = t minDist = min ( intersectionLenghts . keys ( ) ) solutions = [ intersectionLenghts [ minDist ] ] return solutions
Finds a t value on a curve from a point . The points must be originaly be a point on the curve . This will only back trace the t value needed to split the curve in parts
5,396
def _scalePoints ( points , scale = 1 , convertToInteger = True ) : if convertToInteger : points = [ ( int ( round ( x * scale ) ) , int ( round ( y * scale ) ) ) for ( x , y ) in points ] else : points = [ ( x * scale , y * scale ) for ( x , y ) in points ] return points
Scale points and optionally convert them to integers .
5,397
def _scaleSinglePoint ( point , scale = 1 , convertToInteger = True ) : x , y = point if convertToInteger : return int ( round ( x * scale ) ) , int ( round ( y * scale ) ) else : return ( x * scale , y * scale )
Scale a single point
5,398
def _estimateCubicCurveLength ( pt0 , pt1 , pt2 , pt3 , precision = 10 ) : points = [ ] length = 0 step = 1.0 / precision factors = range ( 0 , precision + 1 ) for i in factors : points . append ( _getCubicPoint ( i * step , pt0 , pt1 , pt2 , pt3 ) ) for i in range ( len ( points ) - 1 ) : pta = points [ i ] ptb = points [ i + 1 ] length += _distance ( pta , ptb ) return length
Estimate the length of this curve by iterating through it and averaging the length of the flat bits .
5,399
def split ( self , tValues ) : if self . segmentType == "curve" : on1 = self . previousOnCurve off1 = self . points [ 0 ] . coordinates off2 = self . points [ 1 ] . coordinates on2 = self . points [ 2 ] . coordinates return bezierTools . splitCubicAtT ( on1 , off1 , off2 , on2 , * tValues ) elif self . segmentType == "line" : segments = [ ] x1 , y1 = self . previousOnCurve x2 , y2 = self . points [ 0 ] . coordinates dx = x2 - x1 dy = y2 - y1 pp = x1 , y1 for t in tValues : np = ( x1 + dx * t , y1 + dy * t ) segments . append ( [ pp , np ] ) pp = np segments . append ( [ pp , ( x2 , y2 ) ] ) return segments elif self . segmentType == "qcurve" : raise NotImplementedError else : raise NotImplementedError
Split the segment according the t values