text_prompt
stringlengths
157
13.1k
code_prompt
stringlengths
7
19.8k
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def log_memory(log, pref=None, lvl=logging.DEBUG, raise_flag=True): """Log the current memory usage. """
import os import sys cyc_str = "" KB = 1024.0 if pref is not None: cyc_str += "{}: ".format(pref) # Linux returns units in Bytes; OSX in kilobytes UNIT = KB*KB if sys.platform == 'darwin' else KB good = False # Use the `resource` module to check the maximum memory usage of this process try: import resource max_self = resource.getrusage(resource.RUSAGE_SELF).ru_maxrss max_child = resource.getrusage(resource.RUSAGE_CHILDREN).ru_maxrss _str = "RSS Max Self: {:7.2f} [MB], Child: {:7.2f} [MB]".format( max_self/UNIT, max_child/UNIT) cyc_str += _str except Exception as err: log.log(lvl, "resource.getrusage failed. '{}'".format(str(err))) if raise_flag: raise else: good = True # Use the `psutil` module to check the current memory/cpu usage of this process try: import psutil process = psutil.Process(os.getpid()) rss = process.memory_info().rss cpu_perc = process.cpu_percent() mem_perc = process.memory_percent() num_thr = process.num_threads() _str = "; RSS: {:7.2f} [MB], {:7.2f}%; Threads: {:3d}, CPU: {:7.2f}%".format( rss/UNIT, mem_perc, num_thr, cpu_perc) cyc_str += _str except Exception as err: log.log(lvl, "psutil.Process failed. '{}'".format(str(err))) if raise_flag: raise else: good = True if good: log.log(lvl, cyc_str) return
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def load_args(self, args, clargs): """Parse arguments and return configuration settings. """
# Parse All Arguments args = self.parser.parse_args(args=clargs, namespace=args) # Print the help information if no subcommand is given # subcommand is required for operation if args.subcommand is None: self.parser.print_help() args = None return args
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _setup_argparse(self): """Create `argparse` instance, and setup with appropriate parameters. """
parser = argparse.ArgumentParser( prog='catalog', description='Parent Catalog class for astrocats.') subparsers = parser.add_subparsers( description='valid subcommands', dest='subcommand') # Data Import # ----------- # Add the 'import' command, and related arguments self._add_parser_arguments_import(subparsers) # Git Subcommands # --------------- self._add_parser_arguments_git(subparsers) # Analyze Catalogs # ---------------- # Add the 'analyze' command, and related arguments self._add_parser_arguments_analyze(subparsers) return parser
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _add_parser_arguments_import(self, subparsers): """Create parser for 'import' subcommand, and associated arguments. """
import_pars = subparsers.add_parser( "import", help="Import data.") import_pars.add_argument( '--update', '-u', dest='update', default=False, action='store_true', help='Only update catalog using live sources.') import_pars.add_argument( '--load-stubs', dest='load_stubs', default=False, action='store_true', help='Load stubs before running.') import_pars.add_argument( '--archived', '-a', dest='archived', default=False, action='store_true', help='Always use task caches.') # Control which 'tasks' are executed # ---------------------------------- import_pars.add_argument( '--tasks', dest='args_task_list', nargs='*', default=None, help='space delimited list of tasks to perform.') import_pars.add_argument( '--yes', dest='yes_task_list', nargs='+', default=None, help='space delimited list of tasks to turn on.') import_pars.add_argument( '--no', dest='no_task_list', nargs='+', default=None, help='space delimited list of tasks to turn off.') import_pars.add_argument( '--min-task-priority', dest='min_task_priority', default=None, help='minimum priority for a task to run') import_pars.add_argument( '--max-task-priority', dest='max_task_priority', default=None, help='maximum priority for a task to run') import_pars.add_argument( '--task-groups', dest='task_groups', default=None, help='predefined group(s) of tasks to run.') return import_pars
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _add_parser_arguments_git(self, subparsers): """Create a sub-parsers for git subcommands. """
subparsers.add_parser( "git-clone", help="Clone all defined data repositories if they dont exist.") subparsers.add_parser( "git-push", help="Add all files to data repositories, commit, and push.") subparsers.add_parser( "git-pull", help="'Pull' all data repositories.") subparsers.add_parser( "git-reset-local", help="Hard reset all data repositories using local 'HEAD'.") subparsers.add_parser( "git-reset-origin", help="Hard reset all data repositories using 'origin/master'.") subparsers.add_parser( "git-status", help="Get the 'git status' of all data repositories.") return
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _add_parser_arguments_analyze(self, subparsers): """Create a parser for the 'analyze' subcommand. """
lyze_pars = subparsers.add_parser( "analyze", help="Perform basic analysis on this catalog.") lyze_pars.add_argument( '--count', '-c', dest='count', default=False, action='store_true', help='Determine counts of entries, files, etc.') return lyze_pars
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def compress_gz(fname): """Compress the file with the given name and delete the uncompressed file. The compressed filename is simply the input filename with '.gz' appended. Arguments --------- fname : str Name of the file to compress and delete. Returns ------- comp_fname : str Name of the compressed file produced. Equal to `fname + '.gz'`. """
import shutil import gzip comp_fname = fname + '.gz' with codecs.open(fname, 'rb') as f_in, gzip.open( comp_fname, 'wb') as f_out: shutil.copyfileobj(f_in, f_out) os.remove(fname) return comp_fname
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def IOC_TYPECHECK(t): """ Returns the size of given type, and check its suitability for use in an ioctl command number. """
result = ctypes.sizeof(t) assert result <= _IOC_SIZEMASK, result return result
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def IOR(type, nr, size): """ An ioctl with read parameters. size (ctype type or instance) Type/structure of the argument passed to ioctl's "arg" argument. """
return IOC(IOC_READ, type, nr, IOC_TYPECHECK(size))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def IOW(type, nr, size): """ An ioctl with write parameters. size (ctype type or instance) Type/structure of the argument passed to ioctl's "arg" argument. """
return IOC(IOC_WRITE, type, nr, IOC_TYPECHECK(size))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def IOWR(type, nr, size): """ An ioctl with both read an writes parameters. size (ctype type or instance) Type/structure of the argument passed to ioctl's "arg" argument. """
return IOC(IOC_READ | IOC_WRITE, type, nr, IOC_TYPECHECK(size))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _get_last_dirs(path, num=1): """Get a path including only the trailing `num` directories. Returns ------- last_path : str """
head, tail = os.path.split(path) last_path = str(tail) for ii in range(num): head, tail = os.path.split(head) last_path = os.path.join(tail, last_path) last_path = "..." + last_path return last_path
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def analyze(self, args): """Run the analysis routines determined from the given `args`. """
self.log.info("Running catalog analysis") if args.count: self.count() return
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def count(self): Returns ------- retvals : dict Dictionary of 'property-name: counts' pairs for further processing """
self.log.info("Running 'count'") retvals = {} # Numbers of 'tasks' num_tasks = self._count_tasks() retvals['num_tasks'] = num_tasks # Numbers of 'files' num_files = self._count_repo_files() retvals['num_files'] = num_files return retvals
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _count_tasks(self): """Count the number of tasks, both in the json and directory. Returns ------- num_tasks : int The total number of all tasks included in the `tasks.json` file. """
self.log.warning("Tasks:") tasks, task_names = self.catalog._load_task_list_from_file() # Total number of all tasks num_tasks = len(tasks) # Number which are active by default num_tasks_act = len([tt for tt, vv in tasks.items() if vv.active]) # Number of python files in the tasks directory num_task_files = os.path.join(self.catalog.PATHS.tasks_dir, '*.py') num_task_files = len(glob(num_task_files)) tasks_str = "{} ({} default active) with {} task-files.".format( num_tasks, num_tasks_act, num_task_files) self.log.warning(tasks_str) return num_tasks
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _count_repo_files(self): """Count the number of files in the data repositories. `_COUNT_FILE_TYPES` are used to determine which file types are checked explicitly. `_IGNORE_FILES` determine which files are ignored in (most) counts. Returns ------- repo_files : int Total number of (non-ignored) files in all data repositories. """
self.log.warning("Files:") num_files = 0 repos = self.catalog.PATHS.get_all_repo_folders() num_type = np.zeros(len(self._COUNT_FILE_TYPES), dtype=int) num_ign = 0 for rep in repos: # Get the last portion of the filepath for this repo last_path = _get_last_dirs(rep, 2) # Get counts for different file types n_all = self._count_files_by_type(rep, '*') n_type = np.zeros(len(self._COUNT_FILE_TYPES), dtype=int) for ii, ftype in enumerate(self._COUNT_FILE_TYPES): n_type[ii] = self._count_files_by_type(rep, '*.' + ftype) # Get the number of ignored files # (total including ignore, minus 'all') n_ign = self._count_files_by_type(rep, '*', ignore=False) n_ign -= n_all f_str = self._file_nums_str(n_all, n_type, n_ign) f_str = "{}: {}".format(last_path, f_str) self.log.warning(f_str) # Update cumulative counts num_files += n_all num_type += n_type num_ign += n_ign f_str = self._file_nums_str(num_files, num_type, num_ign) self.log.warning(f_str) return num_files
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _file_nums_str(self, n_all, n_type, n_ign): """Construct a string showing the number of different file types. Returns ------- f_str : str """
# 'other' is the difference between all and named n_oth = n_all - np.sum(n_type) f_str = "{} Files".format(n_all) + " (" if len(n_type): f_str += ", ".join("{} {}".format(name, num) for name, num in zip(self._COUNT_FILE_TYPES, n_type)) f_str += ", " f_str += "other {}; {} ignored)".format(n_oth, n_ign) return f_str
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _count_files_by_type(self, path, pattern, ignore=True): """Count files in the given path, with the given pattern. If `ignore = True`, skip files in the `_IGNORE_FILES` list. Returns ------- num_files : int """
# Get all files matching the given path and pattern files = glob(os.path.join(path, pattern)) # Count the files files = [ff for ff in files if os.path.split(ff)[-1] not in self._IGNORE_FILES or not ignore] num_files = len(files) return num_files
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def bibcode_from_url(cls, url): """Given a URL, try to find the ADS bibcode. Currently: only `ads` URLs will work, e.g. Returns ------- code : str or 'None' The Bibcode if found, otherwise 'None' """
try: code = url.split('/abs/') code = code[1].strip() return code except: return None
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _get_save_path(self, bury=False): """Return the path that this Entry should be saved to."""
filename = self.get_filename(self[self._KEYS.NAME]) # Put objects that shouldn't belong in this catalog in the boneyard if bury: outdir = self.catalog.get_repo_boneyard() # Get normal repository save directory else: repo_folders = self.catalog.PATHS.get_repo_output_folders() # If no repo folders exist, raise an error -- cannot save if not len(repo_folders): err_str = ( "No output data repositories found. Cannot save.\n" "Make sure that repo names are correctly configured " "in the `input/repos.json` file, and either manually or " "automatically (using `astrocats CATALOG git-clone`) " "clone the appropriate data repositories.") self.catalog.log.error(err_str) raise RuntimeError(err_str) outdir = repo_folders[0] return outdir, filename
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _ordered(self, odict): """Convert the object into a plain OrderedDict."""
ndict = OrderedDict() if isinstance(odict, CatDict) or isinstance(odict, Entry): key = odict.sort_func else: key = None nkeys = list(sorted(odict.keys(), key=key)) for key in nkeys: if isinstance(odict[key], OrderedDict): odict[key] = self._ordered(odict[key]) if isinstance(odict[key], list): if (not (odict[key] and not isinstance(odict[key][0], OrderedDict))): nlist = [] for item in odict[key]: if isinstance(item, OrderedDict): nlist.append(self._ordered(item)) else: nlist.append(item) odict[key] = nlist ndict[key] = odict[key] return ndict
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_hash(self, keys=[]): """Return a unique hash associated with the listed keys."""
if not len(keys): keys = list(self.keys()) string_rep = '' oself = self._ordered(deepcopy(self)) for key in keys: string_rep += json.dumps(oself.get(key, ''), sort_keys=True) return hashlib.sha512(string_rep.encode()).hexdigest()[:16]
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _clean_quantity(self, quantity): """Clean quantity value before it is added to entry."""
value = quantity.get(QUANTITY.VALUE, '').strip() error = quantity.get(QUANTITY.E_VALUE, '').strip() unit = quantity.get(QUANTITY.U_VALUE, '').strip() kind = quantity.get(QUANTITY.KIND, '') if isinstance(kind, list) and not isinstance(kind, string_types): kind = [x.strip() for x in kind] else: kind = kind.strip() if not value: return False if is_number(value): value = '%g' % Decimal(value) if error: error = '%g' % Decimal(error) if value: quantity[QUANTITY.VALUE] = value if error: quantity[QUANTITY.E_VALUE] = error if unit: quantity[QUANTITY.U_VALUE] = unit if kind: quantity[QUANTITY.KIND] = kind return True
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _check_cat_dict_source(self, cat_dict_class, key_in_self, **kwargs): """Check that a source exists and that a quantity isn't erroneous."""
# Make sure that a source is given source = kwargs.get(cat_dict_class._KEYS.SOURCE, None) if source is None: raise CatDictError( "{}: `source` must be provided!".format(self[self._KEYS.NAME]), warn=True) # Check that source is a list of integers for x in source.split(','): if not is_integer(x): raise CatDictError( "{}: `source` is comma-delimited list of " " integers!".format(self[self._KEYS.NAME]), warn=True) # If this source/data is erroneous, skip it if self.is_erroneous(key_in_self, source): self._log.info("This source is erroneous, skipping") return None # If this source/data is private, skip it if (self.catalog.args is not None and not self.catalog.args.private and self.is_private(key_in_self, source)): self._log.info("This source is private, skipping") return None return source
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _add_cat_dict(self, cat_dict_class, key_in_self, check_for_dupes=True, compare_to_existing=True, **kwargs): """Add a `CatDict` to this `Entry`. CatDict only added if initialization succeeds and it doesn't already exist within the Entry. """
# Make sure that a source is given, and is valid (nor erroneous) if cat_dict_class != Error: try: source = self._check_cat_dict_source(cat_dict_class, key_in_self, **kwargs) except CatDictError as err: if err.warn: self._log.info("'{}' Not adding '{}': '{}'".format(self[ self._KEYS.NAME], key_in_self, str(err))) return False if source is None: return False # Try to create a new instance of this subclass of `CatDict` new_entry = self._init_cat_dict(cat_dict_class, key_in_self, **kwargs) if new_entry is None: return False # Compare this new entry with all previous entries to make sure is new if compare_to_existing and cat_dict_class != Error: for item in self.get(key_in_self, []): if new_entry.is_duplicate_of(item): item.append_sources_from(new_entry) # Return the entry in case we want to use any additional # tags to augment the old entry return new_entry # If this is an alias, add it to the parent catalog's reverse # dictionary linking aliases to names for fast lookup. if key_in_self == self._KEYS.ALIAS: # Check if this adding this alias makes us a dupe, if so mark # ourselves as a dupe. if (check_for_dupes and 'aliases' in dir(self.catalog) and new_entry[QUANTITY.VALUE] in self.catalog.aliases): possible_dupe = self.catalog.aliases[new_entry[QUANTITY.VALUE]] # print(possible_dupe) if (possible_dupe != self[self._KEYS.NAME] and possible_dupe in self.catalog.entries): self.dupe_of.append(possible_dupe) if 'aliases' in dir(self.catalog): self.catalog.aliases[new_entry[QUANTITY.VALUE]] = self[ self._KEYS.NAME] self.setdefault(key_in_self, []).append(new_entry) if (key_in_self == self._KEYS.ALIAS and check_for_dupes and self.dupe_of): self.merge_dupes() return True
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def init_from_file(cls, catalog, name=None, path=None, clean=False, merge=True, pop_schema=True, ignore_keys=[], compare_to_existing=True, try_gzip=False, filter_on={}): """Construct a new `Entry` instance from an input file. The input file can be given explicitly by `path`, or a path will be constructed appropriately if possible. Arguments --------- catalog : `astrocats.catalog.catalog.Catalog` instance The parent catalog object of which this entry belongs. name : str or 'None' The name of this entry, e.g. `SN1987A` for a `Supernova` entry. If no `path` is given, a path is constructed by trying to find a file in one of the 'output' repositories with this `name`. note: either `name` or `path` must be provided. path : str or 'None' The absolutely path of the input file. note: either `name` or `path` must be provided. clean : bool Whether special sanitization processing should be done on the input data. This is mostly for input files from the 'internal' repositories. """
if not catalog: from astrocats.catalog.catalog import Catalog log = logging.getLogger() catalog = Catalog(None, log) catalog.log.debug("init_from_file()") if name is None and path is None: err = ("Either entry `name` or `path` must be specified to load " "entry.") log.error(err) raise ValueError(err) # If the path is given, use that to load from load_path = '' if path is not None: load_path = path name = '' # If the name is given, try to find a path for it else: repo_paths = catalog.PATHS.get_repo_output_folders() for rep in repo_paths: filename = cls.get_filename(name) newpath = os.path.join(rep, filename + '.json') if os.path.isfile(newpath): load_path = newpath break if load_path is None or not os.path.isfile(load_path): # FIX: is this warning worthy? return None # Create a new `Entry` instance new_entry = cls(catalog, name) # Check if .gz file if try_gzip and not load_path.endswith('.gz'): try_gzip = False # Fill it with data from json file new_entry._load_data_from_json( load_path, clean=clean, merge=merge, pop_schema=pop_schema, ignore_keys=ignore_keys, compare_to_existing=compare_to_existing, gzip=try_gzip, filter_on=filter_on) return new_entry
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def add_alias(self, alias, source, clean=True): """Add an alias, optionally 'cleaning' the alias string. Calls the parent `catalog` method `clean_entry_name` - to apply the same name-cleaning as is applied to entry names themselves. Returns ------- alias : str The stored version of the alias (cleaned or not). """
if clean: alias = self.catalog.clean_entry_name(alias) self.add_quantity(self._KEYS.ALIAS, alias, source) return alias
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def add_error(self, value, **kwargs): """Add an `Error` instance to this entry."""
kwargs.update({ERROR.VALUE: value}) self._add_cat_dict(Error, self._KEYS.ERRORS, **kwargs) return
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def add_photometry(self, compare_to_existing=True, **kwargs): """Add a `Photometry` instance to this entry."""
self._add_cat_dict( Photometry, self._KEYS.PHOTOMETRY, compare_to_existing=compare_to_existing, **kwargs) return
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def merge_dupes(self): """Merge two entries that correspond to the same entry."""
for dupe in self.dupe_of: if dupe in self.catalog.entries: if self.catalog.entries[dupe]._stub: # merge = False to avoid infinite recursion self.catalog.load_entry_from_name( dupe, delete=True, merge=False) self.catalog.copy_entry_to_entry(self.catalog.entries[dupe], self) del self.catalog.entries[dupe] self.dupe_of = []
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def add_quantity(self, quantities, value, source, check_for_dupes=True, compare_to_existing=True, **kwargs): """Add an `Quantity` instance to this entry."""
success = True for quantity in listify(quantities): kwargs.update({QUANTITY.VALUE: value, QUANTITY.SOURCE: source}) cat_dict = self._add_cat_dict( Quantity, quantity, compare_to_existing=compare_to_existing, check_for_dupes=check_for_dupes, **kwargs) if isinstance(cat_dict, CatDict): self._append_additional_tags(quantity, source, cat_dict) success = False return success
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def add_self_source(self): """Add a source that refers to the catalog itself. For now this points to the Open Supernova Catalog by default. """
return self.add_source( bibcode=self.catalog.OSC_BIBCODE, name=self.catalog.OSC_NAME, url=self.catalog.OSC_URL, secondary=True)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def add_source(self, allow_alias=False, **kwargs): """Add a `Source` instance to this entry."""
if not allow_alias and SOURCE.ALIAS in kwargs: err_str = "`{}` passed in kwargs, this shouldn't happen!".format( SOURCE.ALIAS) self._log.error(err_str) raise RuntimeError(err_str) # Set alias number to be +1 of current number of sources if SOURCE.ALIAS not in kwargs: kwargs[SOURCE.ALIAS] = str(self.num_sources() + 1) source_obj = self._init_cat_dict(Source, self._KEYS.SOURCES, **kwargs) if source_obj is None: return None for item in self.get(self._KEYS.SOURCES, ''): if source_obj.is_duplicate_of(item): return item[item._KEYS.ALIAS] self.setdefault(self._KEYS.SOURCES, []).append(source_obj) return source_obj[source_obj._KEYS.ALIAS]
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def add_model(self, allow_alias=False, **kwargs): """Add a `Model` instance to this entry."""
if not allow_alias and MODEL.ALIAS in kwargs: err_str = "`{}` passed in kwargs, this shouldn't happen!".format( SOURCE.ALIAS) self._log.error(err_str) raise RuntimeError(err_str) # Set alias number to be +1 of current number of models if MODEL.ALIAS not in kwargs: kwargs[MODEL.ALIAS] = str(self.num_models() + 1) model_obj = self._init_cat_dict(Model, self._KEYS.MODELS, **kwargs) if model_obj is None: return None for item in self.get(self._KEYS.MODELS, ''): if model_obj.is_duplicate_of(item): return item[item._KEYS.ALIAS] self.setdefault(self._KEYS.MODELS, []).append(model_obj) return model_obj[model_obj._KEYS.ALIAS]
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def add_spectrum(self, compare_to_existing=True, **kwargs): """Add a `Spectrum` instance to this entry."""
spec_key = self._KEYS.SPECTRA # Make sure that a source is given, and is valid (nor erroneous) source = self._check_cat_dict_source(Spectrum, spec_key, **kwargs) if source is None: return None # Try to create a new instance of `Spectrum` new_spectrum = self._init_cat_dict(Spectrum, spec_key, **kwargs) if new_spectrum is None: return None is_dupe = False for item in self.get(spec_key, []): # Only the `filename` should be compared for duplicates. If a # duplicate is found, that means the previous `exclude` array # should be saved to the new object, and the old deleted if new_spectrum.is_duplicate_of(item): if SPECTRUM.EXCLUDE in new_spectrum: item[SPECTRUM.EXCLUDE] = new_spectrum[SPECTRUM.EXCLUDE] elif SPECTRUM.EXCLUDE in item: item.update(new_spectrum) is_dupe = True break if not is_dupe: self.setdefault(spec_key, []).append(new_spectrum) return
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def check(self): """Check that the entry has the required fields."""
# Make sure there is a schema key in dict if self._KEYS.SCHEMA not in self: self[self._KEYS.SCHEMA] = self.catalog.SCHEMA.URL # Make sure there is a name key in dict if (self._KEYS.NAME not in self or len(self[self._KEYS.NAME]) == 0): raise ValueError("Entry name is empty:\n\t{}".format( json.dumps( self, indent=2))) return
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_aliases(self, includename=True): """Retrieve the aliases of this object as a list of strings. Arguments --------- includename : bool Include the 'name' parameter in the list of aliases. """
# empty list if doesnt exist alias_quanta = self.get(self._KEYS.ALIAS, []) aliases = [aq[QUANTITY.VALUE] for aq in alias_quanta] if includename and self[self._KEYS.NAME] not in aliases: aliases = [self[self._KEYS.NAME]] + aliases return aliases
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_entry_text(self, fname): """Retrieve the raw text from a file."""
if fname.split('.')[-1] == 'gz': with gz.open(fname, 'rt') as f: filetext = f.read() else: with codecs.open(fname, 'r') as f: filetext = f.read() return filetext
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_source_by_alias(self, alias): """Given an alias, find the corresponding source in this entry. If the given alias doesn't exist (e.g. there are no sources), then a `ValueError` is raised. Arguments --------- alias : str The str-integer (e.g. '8') of the target source. Returns ------- source : `astrocats.catalog.source.Source` object The source object corresponding to the passed alias. """
for source in self.get(self._KEYS.SOURCES, []): if source[self._KEYS.ALIAS] == alias: return source raise ValueError("Source '{}': alias '{}' not found!".format(self[ self._KEYS.NAME], alias))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_stub(self): """Get a new `Entry` which contains the 'stub' of this one. The 'stub' is only the name and aliases. Usage: ----- To convert a normal entry into a stub (for example), overwrite the entry in place, i.e. Returns ------- stub : `astrocats.catalog.entry.Entry` subclass object The type of the returned object is this instance's type. """
stub = type(self)(self.catalog, self[self._KEYS.NAME], stub=True) if self._KEYS.ALIAS in self: stub[self._KEYS.ALIAS] = self[self._KEYS.ALIAS] if self._KEYS.DISTINCT_FROM in self: stub[self._KEYS.DISTINCT_FROM] = self[self._KEYS.DISTINCT_FROM] if self._KEYS.RA in self: stub[self._KEYS.RA] = self[self._KEYS.RA] if self._KEYS.DEC in self: stub[self._KEYS.DEC] = self[self._KEYS.DEC] if self._KEYS.DISCOVER_DATE in self: stub[self._KEYS.DISCOVER_DATE] = self[self._KEYS.DISCOVER_DATE] if self._KEYS.SOURCES in self: stub[self._KEYS.SOURCES] = self[self._KEYS.SOURCES] return stub
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def is_erroneous(self, field, sources): """Check if attribute has been marked as being erroneous."""
if self._KEYS.ERRORS in self: my_errors = self[self._KEYS.ERRORS] for alias in sources.split(','): source = self.get_source_by_alias(alias) bib_err_values = [ err[ERROR.VALUE] for err in my_errors if err[ERROR.KIND] == SOURCE.BIBCODE and err[ERROR.EXTRA] == field ] if (SOURCE.BIBCODE in source and source[SOURCE.BIBCODE] in bib_err_values): return True name_err_values = [ err[ERROR.VALUE] for err in my_errors if err[ERROR.KIND] == SOURCE.NAME and err[ERROR.EXTRA] == field ] if (SOURCE.NAME in source and source[SOURCE.NAME] in name_err_values): return True return False
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def is_private(self, key, sources): """Check if attribute is private."""
# aliases are always public. if key == ENTRY.ALIAS: return False return all([ SOURCE.PRIVATE in self.get_source_by_alias(x) for x in sources.split(',') ])
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def save(self, bury=False, final=False): """Write entry to JSON file in the proper location. Arguments --------- bury : bool final : bool If this is the 'final' save, perform additional sanitization and cleaning operations. """
outdir, filename = self._get_save_path(bury=bury) if final: self.sanitize() # FIX: use 'dump' not 'dumps' jsonstring = json.dumps( { self[self._KEYS.NAME]: self._ordered(self) }, indent='\t' if sys.version_info[0] >= 3 else 4, separators=(',', ':'), ensure_ascii=False) if not os.path.isdir(outdir): raise RuntimeError("Output directory '{}' for event '{}' does " "not exist.".format(outdir, self[ self._KEYS.NAME])) save_name = os.path.join(outdir, filename + '.json') with codecs.open(save_name, 'w', encoding='utf8') as sf: sf.write(jsonstring) if not os.path.exists(save_name): raise RuntimeError("File '{}' was not saved!".format(save_name)) return save_name
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def sort_func(self, key): """Used to sort keys when writing Entry to JSON format. Should be supplemented/overridden by inheriting classes. """
if key == self._KEYS.SCHEMA: return 'aaa' if key == self._KEYS.NAME: return 'aab' if key == self._KEYS.SOURCES: return 'aac' if key == self._KEYS.ALIAS: return 'aad' if key == self._KEYS.MODELS: return 'aae' if key == self._KEYS.PHOTOMETRY: return 'zzy' if key == self._KEYS.SPECTRA: return 'zzz' return key
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def set_pd_mag_from_counts(photodict, c='', ec='', lec='', uec='', zp=DEFAULT_ZP, sig=DEFAULT_UL_SIGMA): """Set photometry dictionary from a counts measurement."""
with localcontext() as ctx: if lec == '' or uec == '': lec = ec uec = ec prec = max( get_sig_digits(str(c), strip_zeroes=False), get_sig_digits(str(lec), strip_zeroes=False), get_sig_digits(str(uec), strip_zeroes=False)) + 1 ctx.prec = prec dlec = Decimal(str(lec)) duec = Decimal(str(uec)) if c != '': dc = Decimal(str(c)) dzp = Decimal(str(zp)) dsig = Decimal(str(sig)) photodict[PHOTOMETRY.ZERO_POINT] = str(zp) if c == '' or float(c) < float(sig) * float(uec): photodict[PHOTOMETRY.UPPER_LIMIT] = True photodict[PHOTOMETRY.UPPER_LIMIT_SIGMA] = str(sig) photodict[PHOTOMETRY.MAGNITUDE] = str(dzp - (D25 * (dsig * duec ).log10())) dnec = Decimal('10.0') ** ( (dzp - Decimal(photodict[PHOTOMETRY.MAGNITUDE])) / D25) photodict[PHOTOMETRY.E_UPPER_MAGNITUDE] = str(D25 * ( (dnec + duec).log10() - dnec.log10())) else: photodict[PHOTOMETRY.MAGNITUDE] = str(dzp - D25 * dc.log10()) photodict[PHOTOMETRY.E_UPPER_MAGNITUDE] = str(D25 * ( (dc + duec).log10() - dc.log10())) photodict[PHOTOMETRY.E_LOWER_MAGNITUDE] = str(D25 * ( dc.log10() - (dc - dlec).log10()))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def set_pd_mag_from_flux_density(photodict, fd='', efd='', lefd='', uefd='', sig=DEFAULT_UL_SIGMA): """Set photometry dictionary from a flux density measurement. `fd` is assumed to be in microjanskys. """
with localcontext() as ctx: if lefd == '' or uefd == '': lefd = efd uefd = efd prec = max( get_sig_digits(str(fd), strip_zeroes=False), get_sig_digits(str(lefd), strip_zeroes=False), get_sig_digits(str(uefd), strip_zeroes=False)) + 1 ctx.prec = prec dlefd = Decimal(str(lefd)) duefd = Decimal(str(uefd)) if fd != '': dfd = Decimal(str(fd)) dsig = Decimal(str(sig)) if fd == '' or float(fd) < DEFAULT_UL_SIGMA * float(uefd): photodict[PHOTOMETRY.UPPER_LIMIT] = True photodict[PHOTOMETRY.UPPER_LIMIT_SIGMA] = str(sig) photodict[PHOTOMETRY.MAGNITUDE] = str(Decimal('23.9') - D25 * ( dsig * duefd).log10()) if fd: photodict[PHOTOMETRY.E_UPPER_MAGNITUDE] = str(D25 * ( (dfd + duefd).log10() - dfd.log10())) else: photodict[PHOTOMETRY.MAGNITUDE] = str(Decimal('23.9') - D25 * dfd.log10()) photodict[PHOTOMETRY.E_UPPER_MAGNITUDE] = str(D25 * ( (dfd + duefd).log10() - dfd.log10())) photodict[PHOTOMETRY.E_LOWER_MAGNITUDE] = str(D25 * ( dfd.log10() - (dfd - dlefd).log10()))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _check(self): """Check that entry attributes are legal."""
# Run the super method super(Photometry, self)._check() err_str = None has_flux = self._KEYS.FLUX in self has_flux_dens = self._KEYS.FLUX_DENSITY in self has_u_flux = self._KEYS.U_FLUX in self has_u_flux_dens = self._KEYS.U_FLUX_DENSITY in self has_freq = self._KEYS.FREQUENCY in self has_band = self._KEYS.BAND in self has_ener = self._KEYS.ENERGY in self has_u_freq = self._KEYS.U_FREQUENCY in self has_u_ener = self._KEYS.U_ENERGY in self if has_flux or has_flux_dens: if not any([has_freq, has_band, has_ener]): err_str = ("Has `{}` or `{}`".format(self._KEYS.FLUX, self._KEYS.FLUX_DENSITY) + " but None of `{}`, `{}`, `{}`".format( self._KEYS.FREQUENCY, self._KEYS.BAND, self._KEYS.ENERGY)) elif has_flux and not has_u_flux: err_str = "`{}` provided without `{}`.".format( self._KEYS.FLUX, self._KEYS.U_FLUX) elif has_flux_dens and not has_u_flux_dens: err_str = "`{}` provided without `{}`.".format( self._KEYS.FLUX_DENSITY, self._KEYS.U_FLUX_DENSITY) elif has_freq and not has_u_freq: err_str = "`{}` provided without `{}`.".format( self._KEYS.FREQUENCY, self._KEYS.U_FREQUENCY) elif has_ener and not has_u_ener: err_str = "`{}` provided without `{}`.".format( self._KEYS.ENERGY, self._KEYS.U_ENERGY) if err_str is not None: raise ValueError(err_str) return
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def sort_func(self, key): """Specify order for attributes."""
if key == self._KEYS.TIME: return 'aaa' if key == self._KEYS.MODEL: return 'zzy' if key == self._KEYS.SOURCE: return 'zzz' return key
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def by_resource_user_and_perm( cls, user_id, perm_name, resource_id, db_session=None ): """ return all instances by user name, perm name and resource id :param user_id: :param perm_name: :param resource_id: :param db_session: :return: """
db_session = get_db_session(db_session) query = db_session.query(cls.model).filter(cls.model.user_id == user_id) query = query.filter(cls.model.resource_id == resource_id) query = query.filter(cls.model.perm_name == perm_name) return query.first()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def tdSensor(self): """Get the next sensor while iterating. :return: a dict with the keys: protocol, model, id, datatypes. """
protocol = create_string_buffer(20) model = create_string_buffer(20) sid = c_int() datatypes = c_int() self._lib.tdSensor(protocol, sizeof(protocol), model, sizeof(model), byref(sid), byref(datatypes)) return {'protocol': self._to_str(protocol), 'model': self._to_str(model), 'id': sid.value, 'datatypes': datatypes.value}
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def tdSensorValue(self, protocol, model, sid, datatype): """Get the sensor value for a given sensor. :return: a dict with the keys: value, timestamp. """
value = create_string_buffer(20) timestamp = c_int() self._lib.tdSensorValue(protocol, model, sid, datatype, value, sizeof(value), byref(timestamp)) return {'value': self._to_str(value), 'timestamp': timestamp.value}
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def tdController(self): """Get the next controller while iterating. :return: a dict with the keys: id, type, name, available. """
cid = c_int() ctype = c_int() name = create_string_buffer(255) available = c_int() self._lib.tdController(byref(cid), byref(ctype), name, sizeof(name), byref(available)) return {'id': cid.value, 'type': ctype.value, 'name': self._to_str(name), 'available': available.value}
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def ziggurat_model_init( user=None, group=None, user_group=None, group_permission=None, user_permission=None, user_resource_permission=None, group_resource_permission=None, resource=None, external_identity=None, *args, **kwargs ): """ This function handles attaching model to service if model has one specified as `_ziggurat_service`, Also attached a proxy object holding all model definitions that services might use :param args: :param kwargs: :param passwordmanager, the password manager to override default one :param passwordmanager_schemes, list of schemes for default passwordmanager to use :return: """
models = ModelProxy() models.User = user models.Group = group models.UserGroup = user_group models.GroupPermission = group_permission models.UserPermission = user_permission models.UserResourcePermission = user_resource_permission models.GroupResourcePermission = group_resource_permission models.Resource = resource models.ExternalIdentity = external_identity model_service_mapping = import_model_service_mappings() if kwargs.get("passwordmanager"): user.passwordmanager = kwargs["passwordmanager"] else: user.passwordmanager = make_passwordmanager( kwargs.get("passwordmanager_schemes") ) for name, cls in models.items(): # if model has a manager attached attached the class also to manager services = model_service_mapping.get(name, []) for service in services: setattr(service, "model", cls) setattr(service, "models_proxy", models)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def messages(request, year=None, month=None, day=None, template="gnotty/messages.html"): """ Show messages for the given query or day. """
query = request.REQUEST.get("q") prev_url, next_url = None, None messages = IRCMessage.objects.all() if hide_joins_and_leaves(request): messages = messages.filter(join_or_leave=False) if query: search = Q(message__icontains=query) | Q(nickname__icontains=query) messages = messages.filter(search).order_by("-message_time") elif year and month and day: messages = messages.filter(message_time__year=year, message_time__month=month, message_time__day=day) day_delta = timedelta(days=1) this_date = date(int(year), int(month), int(day)) prev_date = this_date - day_delta next_date = this_date + day_delta prev_url = reverse("gnotty_day", args=prev_date.timetuple()[:3]) next_url = reverse("gnotty_day", args=next_date.timetuple()[:3]) else: return redirect("gnotty_year", year=datetime.now().year) context = dict(settings) context["messages"] = messages context["prev_url"] = prev_url context["next_url"] = next_url return render(request, template, context)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def delete_expired_locks(self): """ Deletes all expired mutex locks if a ttl is provided. """
ttl_seconds = self.get_mutex_ttl_seconds() if ttl_seconds is not None: DBMutex.objects.filter(creation_time__lte=timezone.now() - timedelta(seconds=ttl_seconds)).delete()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def start(self): """ Acquires the db mutex lock. Takes the necessary steps to delete any stale locks. Throws a DBMutexError if it can't acquire the lock. """
# Delete any expired locks first self.delete_expired_locks() try: with transaction.atomic(): self.lock = DBMutex.objects.create(lock_id=self.lock_id) except IntegrityError: raise DBMutexError('Could not acquire lock: {0}'.format(self.lock_id))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def stop(self): """ Releases the db mutex lock. Throws an error if the lock was released before the function finished. """
if not DBMutex.objects.filter(id=self.lock.id).exists(): raise DBMutexTimeoutError('Lock {0} expired before function completed'.format(self.lock_id)) else: self.lock.delete()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def decorate_callable(self, func): """ Decorates a function with the db_mutex decorator by using this class as a context manager around it. """
def wrapper(*args, **kwargs): try: with self: result = func(*args, **kwargs) return result except DBMutexError as e: if self.suppress_acquisition_exceptions: LOG.error(e) else: raise e functools.update_wrapper(wrapper, func) return wrapper
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def groupfinder(userid, request): """ Default groupfinder implementaion for pyramid applications :param userid: :param request: :return: """
if userid and hasattr(request, "user") and request.user: groups = ["group:%s" % g.id for g in request.user.groups] return groups return []
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def apply_repulsion(repulsion, nodes, barnes_hut_optimize=False, region=None, barnes_hut_theta=1.2): """ Iterate through the nodes or edges and apply the forces directly to the node objects. """
if not barnes_hut_optimize: for i in range(0, len(nodes)): for j in range(0, i): repulsion.apply_node_to_node(nodes[i], nodes[j]) else: for i in range(0, len(nodes)): region.apply_force(nodes[i], repulsion, barnes_hut_theta)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def apply_gravity(repulsion, nodes, gravity, scaling_ratio): """ Iterate through the nodes or edges and apply the gravity directly to the node objects. """
for i in range(0, len(nodes)): repulsion.apply_gravitation(nodes[i], gravity / scaling_ratio)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def by_external_id_and_provider(cls, external_id, provider_name, db_session=None): """ Returns ExternalIdentity instance based on search params :param external_id: :param provider_name: :param db_session: :return: ExternalIdentity """
db_session = get_db_session(db_session) query = db_session.query(cls.model) query = query.filter(cls.model.external_id == external_id) query = query.filter(cls.model.provider_name == provider_name) return query.first()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def user_by_external_id_and_provider( cls, external_id, provider_name, db_session=None ): """ Returns User instance based on search params :param external_id: :param provider_name: :param db_session: :return: User """
db_session = get_db_session(db_session) query = db_session.query(cls.models_proxy.User) query = query.filter(cls.model.external_id == external_id) query = query.filter(cls.model.provider_name == provider_name) query = query.filter(cls.models_proxy.User.id == cls.model.local_user_id) return query.first()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def by_user_and_perm(cls, user_id, perm_name, db_session=None): """ return by user and permission name :param user_id: :param perm_name: :param db_session: :return: """
db_session = get_db_session(db_session) query = db_session.query(cls.model).filter(cls.model.user_id == user_id) query = query.filter(cls.model.perm_name == perm_name) return query.first()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def node_is_subclass(cls, *subclass_names): """Checks if cls node has parent with subclass_name."""
if not isinstance(cls, (ClassDef, Instance)): return False # if cls.bases == YES: # return False for base_cls in cls.bases: try: for inf in base_cls.inferred(): # pragma no branch if inf.qname() in subclass_names: return True if inf != cls and node_is_subclass( # pragma no branch inf, *subclass_names): # check up the hierarchy in case we are a subclass of # a subclass of a subclass ... return True except InferenceError: # pragma no cover continue return False
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def is_field_method(node): """Checks if a call to a field instance method is valid. A call is valid if the call is a method of the underlying type. So, in a StringField the methods from str are valid, in a ListField the methods from list are
name = node.attrname parent = node.last_child() inferred = safe_infer(parent) if not inferred: return False for cls_name, inst in FIELD_TYPES.items(): if node_is_instance(inferred, cls_name) and hasattr(inst, name): return True return False
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_node_parent_class(node): """Supposes that node is a mongoengine field in a class and tries to get its parent class"""
while node.parent: # pragma no branch if isinstance(node, ClassDef): return node node = node.parent
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_field_definition(node): """"node is a class attribute that is a mongoengine. Returns the definition statement for the attribute """
name = node.attrname cls = get_node_parent_class(node) definition = cls.lookup(name)[1][0].statement() return definition
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_field_embedded_doc(node): """Returns de ClassDef for the related embedded document in a embedded document field."""
definition = get_field_definition(node) cls_name = definition.last_child().last_child() cls = next(cls_name.infer()) return cls
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def node_is_embedded_doc_attr(node): """Checks if a node is a valid field or method in a embedded document. """
embedded_doc = get_field_embedded_doc(node.last_child()) name = node.attrname try: r = bool(embedded_doc.lookup(name)[1][0]) except IndexError: r = False return r
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _dispatcher(self, connection, event): """ This is the method in ``SimpleIRCClient`` that all IRC events get passed through. Here we map events to our own custom event handlers, and call them. """
super(BaseBot, self)._dispatcher(connection, event) for handler in self.events[event.eventtype()]: handler(self, connection, event)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def message_channel(self, message): """ We won't receive our own messages, so log them manually. """
self.log(None, message) super(BaseBot, self).message_channel(message)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def on_pubmsg(self, connection, event): """ Log any public messages, and also handle the command event. """
for message in event.arguments(): self.log(event, message) command_args = filter(None, message.split()) command_name = command_args.pop(0) for handler in self.events["command"]: if handler.event.args["command"] == command_name: self.handle_command_event(event, handler, command_args)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def handle_command_event(self, event, command, args): """ Command handler - treats each word in the message that triggered the command as an argument to the command, and does some validation to ensure that the number of arguments match. """
argspec = getargspec(command) num_all_args = len(argspec.args) - 2 # Ignore self/event args num_pos_args = num_all_args - len(argspec.defaults or []) if num_pos_args <= len(args) <= num_all_args: response = command(self, event, *args) elif num_all_args == num_pos_args: s = "s are" if num_all_args != 1 else " is" response = "%s arg%s required" % (num_all_args, s) else: bits = (num_pos_args, num_all_args) response = "between %s and %s args are required" % bits response = "%s: %s" % (self.get_nickname(event), response) self.message_channel(response)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def handle_timer_event(self, handler): """ Runs each timer handler in a separate greenlet thread. """
while True: handler(self) sleep(handler.event.args["seconds"])
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def handle_webhook_event(self, environ, url, params): """ Webhook handler - each handler for the webhook event takes an initial pattern argument for matching the URL requested. Here we match the URL to the pattern for each webhook handler, and bail out if it returns a response. """
for handler in self.events["webhook"]: urlpattern = handler.event.args["urlpattern"] if not urlpattern or match(urlpattern, url): response = handler(self, environ, url, params) if response: return response
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def DeviceFactory(id, lib=None): """Create the correct device instance based on device type and return it. :return: a :class:`Device` or :class:`DeviceGroup` instance. """
lib = lib or Library() if lib.tdGetDeviceType(id) == const.TELLSTICK_TYPE_GROUP: return DeviceGroup(id, lib=lib) return Device(id, lib=lib)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def process_callback(self, block=True): """Dispatch a single callback in the current thread. :param boolean block: If True, blocks waiting for a callback to come. :return: True if a callback was processed; otherwise False. """
try: (callback, args) = self._queue.get(block=block) try: callback(*args) finally: self._queue.task_done() except queue.Empty: return False return True
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def devices(self): """Return all known devices. :return: list of :class:`Device` or :class:`DeviceGroup` instances. """
devices = [] count = self.lib.tdGetNumberOfDevices() for i in range(count): device = DeviceFactory(self.lib.tdGetDeviceId(i), lib=self.lib) devices.append(device) return devices
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def sensors(self): """Return all known sensors. :return: list of :class:`Sensor` instances. """
sensors = [] try: while True: sensor = self.lib.tdSensor() sensors.append(Sensor(lib=self.lib, **sensor)) except TelldusError as e: if e.error != const.TELLSTICK_ERROR_DEVICE_NOT_FOUND: raise return sensors
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def controllers(self): """Return all known controllers. Requires Telldus core library version >= 2.1.2. :return: list of :class:`Controller` instances. """
controllers = [] try: while True: controller = self.lib.tdController() del controller["name"] del controller["available"] controllers.append(Controller(lib=self.lib, **controller)) except TelldusError as e: if e.error != const.TELLSTICK_ERROR_NOT_FOUND: raise return controllers
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def add_device(self, name, protocol, model=None, **parameters): """Add a new device. :return: a :class:`Device` or :class:`DeviceGroup` instance. """
device = Device(self.lib.tdAddDevice(), lib=self.lib) try: device.name = name device.protocol = protocol if model: device.model = model for key, value in parameters.items(): device.set_parameter(key, value) # Return correct type return DeviceFactory(device.id, lib=self.lib) except Exception: import sys exc_info = sys.exc_info() try: device.remove() except: pass if "with_traceback" in dir(Exception): raise exc_info[0].with_traceback(exc_info[1], exc_info[2]) else: exec("raise exc_info[0], exc_info[1], exc_info[2]")
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def add_group(self, name, devices): """Add a new device group. :return: a :class:`DeviceGroup` instance. """
device = self.add_device(name, "group") device.add_to_group(devices) return device
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def connect_controller(self, vid, pid, serial): """Connect a controller."""
self.lib.tdConnectTellStickController(vid, pid, serial)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def disconnect_controller(self, vid, pid, serial): """Disconnect a controller."""
self.lib.tdDisconnectTellStickController(vid, pid, serial)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def parameters(self): """Get dict with all set parameters."""
parameters = {} for name in self.PARAMETERS: try: parameters[name] = self.get_parameter(name) except AttributeError: pass return parameters
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_parameter(self, name): """Get a parameter."""
default_value = "$%!)(INVALID)(!%$" value = self.lib.tdGetDeviceParameter(self.id, name, default_value) if value == default_value: raise AttributeError(name) return value
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def set_parameter(self, name, value): """Set a parameter."""
self.lib.tdSetDeviceParameter(self.id, name, str(value))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def devices_in_group(self): """Fetch list of devices in group."""
try: devices = self.get_parameter('devices') except AttributeError: return [] ctor = DeviceFactory return [ctor(int(x), lib=self.lib) for x in devices.split(',') if x]
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _prepPointsForSegments(points): """ Move any off curves at the end of the contour to the beginning of the contour. This makes segmentation easier. """
while 1: point = points[-1] if point.segmentType: break else: point = points.pop() points.insert(0, point) continue break
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _reversePoints(points): """ Reverse the points. This differs from the reversal point pen in RoboFab in that it doesn't worry about maintaing the start point position. That has no benefit within the context of this module. """
# copy the points points = _copyPoints(points) # find the first on curve type and recycle # it for the last on curve type firstOnCurve = None for index, point in enumerate(points): if point.segmentType is not None: firstOnCurve = index break lastSegmentType = points[firstOnCurve].segmentType # reverse the points points = reversed(points) # work through the reversed remaining points final = [] for point in points: segmentType = point.segmentType if segmentType is not None: point.segmentType = lastSegmentType lastSegmentType = segmentType final.append(point) # move any offcurves at the end of the points # to the start of the points _prepPointsForSegments(final) # done return final
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _convertPointsToSegments(points, willBeReversed=False): """ Compile points into InputSegment objects. """
# get the last on curve previousOnCurve = None for point in reversed(points): if point.segmentType is not None: previousOnCurve = point.coordinates break assert previousOnCurve is not None # gather the segments offCurves = [] segments = [] for point in points: # off curve, hold. if point.segmentType is None: offCurves.append(point) else: segment = InputSegment( points=offCurves + [point], previousOnCurve=previousOnCurve, willBeReversed=willBeReversed ) segments.append(segment) offCurves = [] previousOnCurve = point.coordinates assert not offCurves return segments
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _tValueForPointOnCubicCurve(point, cubicCurve, isHorizontal=0): """ Finds a t value on a curve from a point. The points must be originaly be a point on the curve. This will only back trace the t value, needed to split the curve in parts """
pt1, pt2, pt3, pt4 = cubicCurve a, b, c, d = bezierTools.calcCubicParameters(pt1, pt2, pt3, pt4) solutions = bezierTools.solveCubic(a[isHorizontal], b[isHorizontal], c[isHorizontal], d[isHorizontal] - point[isHorizontal]) solutions = [t for t in solutions if 0 <= t < 1] if not solutions and not isHorizontal: # can happen that a horizontal line doens intersect, try the vertical return _tValueForPointOnCubicCurve(point, (pt1, pt2, pt3, pt4), isHorizontal=1) if len(solutions) > 1: intersectionLenghts = {} for t in solutions: tp = _getCubicPoint(t, pt1, pt2, pt3, pt4) dist = _distance(tp, point) intersectionLenghts[dist] = t minDist = min(intersectionLenghts.keys()) solutions = [intersectionLenghts[minDist]] return solutions
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _scalePoints(points, scale=1, convertToInteger=True): """ Scale points and optionally convert them to integers. """
if convertToInteger: points = [ (int(round(x * scale)), int(round(y * scale))) for (x, y) in points ] else: points = [(x * scale, y * scale) for (x, y) in points] return points
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _scaleSinglePoint(point, scale=1, convertToInteger=True): """ Scale a single point """
x, y = point if convertToInteger: return int(round(x * scale)), int(round(y * scale)) else: return (x * scale, y * scale)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _estimateCubicCurveLength(pt0, pt1, pt2, pt3, precision=10): """ Estimate the length of this curve by iterating through it and averaging the length of the flat bits. """
points = [] length = 0 step = 1.0 / precision factors = range(0, precision + 1) for i in factors: points.append(_getCubicPoint(i * step, pt0, pt1, pt2, pt3)) for i in range(len(points) - 1): pta = points[i] ptb = points[i + 1] length += _distance(pta, ptb) return length
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def split(self, tValues): """ Split the segment according the t values """
if self.segmentType == "curve": on1 = self.previousOnCurve off1 = self.points[0].coordinates off2 = self.points[1].coordinates on2 = self.points[2].coordinates return bezierTools.splitCubicAtT(on1, off1, off2, on2, *tValues) elif self.segmentType == "line": segments = [] x1, y1 = self.previousOnCurve x2, y2 = self.points[0].coordinates dx = x2 - x1 dy = y2 - y1 pp = x1, y1 for t in tValues: np = (x1+dx*t, y1+dy*t) segments.append([pp, np]) pp = np segments.append([pp, (x2, y2)]) return segments elif self.segmentType == "qcurve": raise NotImplementedError else: raise NotImplementedError
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def getData(self): """ Return a list of normalized InputPoint objects for the contour drawn with this pen. """
# organize the points into segments # 1. make sure there is an on curve haveOnCurve = False for point in self._points: if point.segmentType is not None: haveOnCurve = True break # 2. move the off curves to front of the list if haveOnCurve: _prepPointsForSegments(self._points) # 3. ignore double points on start and end firstPoint = self._points[0] lastPoint = self._points[-1] if firstPoint.segmentType is not None and lastPoint.segmentType is not None: if firstPoint.coordinates == lastPoint.coordinates: if (firstPoint.segmentType in ["line", "move"]): del self._points[0] else: raise AssertionError("Unhandled point type sequence") # done return self._points
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def reCurveFromEntireInputContour(self, inputContour): """ Match if entire input contour matches entire output contour, allowing for different start point. """
if self.clockwise: inputFlat = inputContour.clockwiseFlat else: inputFlat = inputContour.counterClockwiseFlat outputFlat = [] for segment in self.segments: # XXX this could be expensive assert segment.segmentType == "flat" outputFlat += segment.points # test lengths haveMatch = False if len(inputFlat) == len(outputFlat): if inputFlat == outputFlat: haveMatch = True else: inputStart = inputFlat[0] if inputStart in outputFlat: # there should be only one occurance of the point # but handle it just in case if outputFlat.count(inputStart) > 1: startIndexes = [index for index, point in enumerate(outputFlat) if point == inputStart] else: startIndexes = [outputFlat.index(inputStart)] # slice and dice to test possible orders for startIndex in startIndexes: test = outputFlat[startIndex:] + outputFlat[:startIndex] if inputFlat == test: haveMatch = True break if haveMatch: # clear out the flat points self.segments = [] # replace with the appropriate points from the input if self.clockwise: inputSegments = inputContour.clockwiseSegments else: inputSegments = inputContour.counterClockwiseSegments for inputSegment in inputSegments: self.segments.append( OutputSegment( segmentType=inputSegment.segmentType, points=[ OutputPoint( coordinates=point.coordinates, segmentType=point.segmentType, smooth=point.smooth, name=point.name, kwargs=point.kwargs ) for point in inputSegment.points ], final=True ) ) inputSegment.used = True # reset the direction of the final contour self.clockwise = inputContour.clockwise return True return False
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _is_custom_qs_manager(funcdef): """Checks if a function definition is a queryset manager created with the @queryset_manager decorator."""
decors = getattr(funcdef, 'decorators', None) if decors: for dec in decors.get_children(): try: if dec.name == 'queryset_manager': # pragma no branch return True except AttributeError: continue return False