_id
stringlengths
2
7
title
stringlengths
1
88
partition
stringclasses
3 values
text
stringlengths
75
19.8k
language
stringclasses
1 value
meta_information
dict
q11900
globalsfilter
train
def globalsfilter(input_dict, check_all=False, filters=None, exclude_private=None, exclude_capitalized=None, exclude_uppercase=None, exclude_unsupported=None, excluded_names=None): """Keep only objects that can be pickled""" output_dict = {} for key, value in list(input_dict.items()): excluded = (exclude_private and key.startswith('_')) or \ (exclude_capitalized and key[0].isupper()) or \ (exclude_uppercase and key.isupper() and len(key) > 1 and not key[1:].isdigit()) or \ (key in excluded_names) or \ (exclude_unsupported and \ not is_supported(value, check_all=check_all, filters=filters)) if not excluded: output_dict[key] = value return output_dict
python
{ "resource": "" }
q11901
get_supported_types
train
def get_supported_types(): """ Return a dictionnary containing types lists supported by the namespace browser. Note: If you update this list, don't forget to update variablexplorer.rst in spyder-docs """ from datetime import date, timedelta editable_types = [int, float, complex, list, set, dict, tuple, date, timedelta] + list(TEXT_TYPES) + list(INT_TYPES) try: from numpy import ndarray, matrix, generic editable_types += [ndarray, matrix, generic] except: pass try: from pandas import DataFrame, Series, DatetimeIndex editable_types += [DataFrame, Series, Index] except: pass picklable_types = editable_types[:] try: from spyder.pil_patch import Image editable_types.append(Image.Image) except: pass return dict(picklable=picklable_types, editable=editable_types)
python
{ "resource": "" }
q11902
SpyderKernel._pdb_frame
train
def _pdb_frame(self): """Return current Pdb frame if there is any""" if self._pdb_obj is not None and self._pdb_obj.curframe is not None: return self._pdb_obj.curframe
python
{ "resource": "" }
q11903
SpyderKernel.get_namespace_view
train
def get_namespace_view(self): """ Return the namespace view This is a dictionary with the following structure {'a': {'color': '#800000', 'size': 1, 'type': 'str', 'view': '1'}} Here: * 'a' is the variable name * 'color' is the color used to show it * 'size' and 'type' are self-evident * and'view' is its value or the text shown in the last column """ from spyder_kernels.utils.nsview import make_remote_view settings = self.namespace_view_settings if settings: ns = self._get_current_namespace() view = repr(make_remote_view(ns, settings, EXCLUDED_NAMES)) return view else: return repr(None)
python
{ "resource": "" }
q11904
SpyderKernel.get_var_properties
train
def get_var_properties(self): """ Get some properties of the variables in the current namespace """ from spyder_kernels.utils.nsview import get_remote_data settings = self.namespace_view_settings if settings: ns = self._get_current_namespace() data = get_remote_data(ns, settings, mode='editable', more_excluded_names=EXCLUDED_NAMES) properties = {} for name, value in list(data.items()): properties[name] = { 'is_list': isinstance(value, (tuple, list)), 'is_dict': isinstance(value, dict), 'is_set': isinstance(value, set), 'len': self._get_len(value), 'is_array': self._is_array(value), 'is_image': self._is_image(value), 'is_data_frame': self._is_data_frame(value), 'is_series': self._is_series(value), 'array_shape': self._get_array_shape(value), 'array_ndim': self._get_array_ndim(value) } return repr(properties) else: return repr(None)
python
{ "resource": "" }
q11905
SpyderKernel.send_spyder_msg
train
def send_spyder_msg(self, spyder_msg_type, content=None, data=None): """ Publish custom messages to the Spyder frontend. Parameters ---------- spyder_msg_type: str The spyder message type content: dict The (JSONable) content of the message data: any Any object that is serializable by cloudpickle (should be most things). Will arrive as cloudpickled bytes in `.buffers[0]`. """ import cloudpickle if content is None: content = {} content['spyder_msg_type'] = spyder_msg_type msg = self.session.send( self.iopub_socket, 'spyder_msg', content=content, buffers=[cloudpickle.dumps(data, protocol=PICKLE_PROTOCOL)], parent=self._parent_header, ) self.log.debug(msg)
python
{ "resource": "" }
q11906
SpyderKernel.set_value
train
def set_value(self, name, value, PY2_frontend): """Set the value of a variable""" import cloudpickle ns = self._get_reference_namespace(name) # We send serialized values in a list of one element # from Spyder to the kernel, to be able to send them # at all in Python 2 svalue = value[0] # We need to convert svalue to bytes if the frontend # runs in Python 2 and the kernel runs in Python 3 if PY2_frontend and not PY2: svalue = bytes(svalue, 'latin-1') # Deserialize and set value in namespace dvalue = cloudpickle.loads(svalue) ns[name] = dvalue self.log.debug(ns)
python
{ "resource": "" }
q11907
SpyderKernel.load_data
train
def load_data(self, filename, ext): """Load data from filename""" from spyder_kernels.utils.iofuncs import iofunctions from spyder_kernels.utils.misc import fix_reference_name glbs = self._mglobals() load_func = iofunctions.load_funcs[ext] data, error_message = load_func(filename) if error_message: return error_message for key in list(data.keys()): new_key = fix_reference_name(key, blacklist=list(glbs.keys())) if new_key != key: data[new_key] = data.pop(key) try: glbs.update(data) except Exception as error: return str(error) return None
python
{ "resource": "" }
q11908
SpyderKernel.save_namespace
train
def save_namespace(self, filename): """Save namespace into filename""" from spyder_kernels.utils.nsview import get_remote_data from spyder_kernels.utils.iofuncs import iofunctions ns = self._get_current_namespace() settings = self.namespace_view_settings data = get_remote_data(ns, settings, mode='picklable', more_excluded_names=EXCLUDED_NAMES).copy() return iofunctions.save(data, filename)
python
{ "resource": "" }
q11909
SpyderKernel.publish_pdb_state
train
def publish_pdb_state(self): """ Publish Variable Explorer state and Pdb step through send_spyder_msg. """ if self._pdb_obj and self._do_publish_pdb_state: state = dict(namespace_view = self.get_namespace_view(), var_properties = self.get_var_properties(), step = self._pdb_step) self.send_spyder_msg('pdb_state', content={'pdb_state': state}) self._do_publish_pdb_state = True
python
{ "resource": "" }
q11910
SpyderKernel.is_defined
train
def is_defined(self, obj, force_import=False): """Return True if object is defined in current namespace""" from spyder_kernels.utils.dochelpers import isdefined ns = self._get_current_namespace(with_magics=True) return isdefined(obj, force_import=force_import, namespace=ns)
python
{ "resource": "" }
q11911
SpyderKernel._get_current_namespace
train
def _get_current_namespace(self, with_magics=False): """ Return current namespace This is globals() if not debugging, or a dictionary containing both locals() and globals() for current frame when debugging """ ns = {} glbs = self._mglobals() if self._pdb_frame is None: ns.update(glbs) else: ns.update(glbs) ns.update(self._pdb_locals) # Add magics to ns so we can show help about them on the Help # plugin if with_magics: line_magics = self.shell.magics_manager.magics['line'] cell_magics = self.shell.magics_manager.magics['cell'] ns.update(line_magics) ns.update(cell_magics) return ns
python
{ "resource": "" }
q11912
SpyderKernel._get_reference_namespace
train
def _get_reference_namespace(self, name): """ Return namespace where reference name is defined It returns the globals() if reference has not yet been defined """ glbs = self._mglobals() if self._pdb_frame is None: return glbs else: lcls = self._pdb_locals if name in lcls: return lcls else: return glbs
python
{ "resource": "" }
q11913
SpyderKernel._set_spyder_breakpoints
train
def _set_spyder_breakpoints(self, breakpoints): """Set all Spyder breakpoints in an active pdb session""" if not self._pdb_obj: return # Breakpoints come serialized from Spyder. We send them # in a list of one element to be able to send them at all # in Python 2 serialized_breakpoints = breakpoints[0] breakpoints = pickle.loads(serialized_breakpoints) self._pdb_obj.set_spyder_breakpoints(breakpoints)
python
{ "resource": "" }
q11914
SpyderKernel._set_mpl_backend
train
def _set_mpl_backend(self, backend, pylab=False): """ Set a backend for Matplotlib. backend: A parameter that can be passed to %matplotlib (e.g. 'inline' or 'tk'). """ import traceback from IPython.core.getipython import get_ipython generic_error = ( "\n" + "="*73 + "\n" "NOTE: The following error appeared when setting " "your Matplotlib backend!!\n" + "="*73 + "\n\n" "{0}" ) magic = 'pylab' if pylab else 'matplotlib' error = None try: get_ipython().run_line_magic(magic, backend) except RuntimeError as err: # This catches errors generated by ipykernel when # trying to set a backend. See issue 5541 if "GUI eventloops" in str(err): import matplotlib previous_backend = matplotlib.get_backend() if not backend in previous_backend.lower(): # Only inform about an error if the user selected backend # and the one set by Matplotlib are different. Else this # message is very confusing. error = ( "\n" "NOTE: Spyder *can't* set your selected Matplotlib " "backend because there is a previous backend already " "in use.\n\n" "Your backend will be {0}".format(previous_backend) ) del matplotlib # This covers other RuntimeError's else: error = generic_error.format(traceback.format_exc()) except Exception: error = generic_error.format(traceback.format_exc()) self._mpl_backend_error = error
python
{ "resource": "" }
q11915
SpyderKernel._load_autoreload_magic
train
def _load_autoreload_magic(self): """Load %autoreload magic.""" from IPython.core.getipython import get_ipython try: get_ipython().run_line_magic('reload_ext', 'autoreload') get_ipython().run_line_magic('autoreload', '2') except Exception: pass
python
{ "resource": "" }
q11916
SpyderKernel._load_wurlitzer
train
def _load_wurlitzer(self): """Load wurlitzer extension.""" # Wurlitzer has no effect on Windows if not os.name == 'nt': from IPython.core.getipython import get_ipython # Enclose this in a try/except because if it fails the # console will be totally unusable. # Fixes spyder-ide/spyder#8668 try: get_ipython().run_line_magic('reload_ext', 'wurlitzer') except Exception: pass
python
{ "resource": "" }
q11917
import_spydercustomize
train
def import_spydercustomize(): """Import our customizations into the kernel.""" here = osp.dirname(__file__) parent = osp.dirname(here) customize_dir = osp.join(parent, 'customize') # Remove current directory from sys.path to prevent kernel # crashes when people name Python files or modules with # the same name as standard library modules. # See spyder-ide/spyder#8007 while '' in sys.path: sys.path.remove('') # Import our customizations site.addsitedir(customize_dir) import spydercustomize # Remove our customize path from sys.path try: sys.path.remove(customize_dir) except ValueError: pass
python
{ "resource": "" }
q11918
varexp
train
def varexp(line): """ Spyder's variable explorer magic Used to generate plots, histograms and images of the variables displayed on it. """ ip = get_ipython() #analysis:ignore funcname, name = line.split() try: import guiqwt.pyplot as pyplot except: import matplotlib.pyplot as pyplot __fig__ = pyplot.figure(); __items__ = getattr(pyplot, funcname[2:])(ip.user_ns[name]) pyplot.show() del __fig__, __items__
python
{ "resource": "" }
q11919
fix_reference_name
train
def fix_reference_name(name, blacklist=None): """Return a syntax-valid Python reference name from an arbitrary name""" name = "".join(re.split(r'[^0-9a-zA-Z_]', name)) while name and not re.match(r'([a-zA-Z]+[0-9a-zA-Z_]*)$', name): if not re.match(r'[a-zA-Z]', name[0]): name = name[1:] continue name = str(name) if not name: name = "data" if blacklist is not None and name in blacklist: get_new_name = lambda index: name+('%03d' % index) index = 0 while get_new_name(index) in blacklist: index += 1 name = get_new_name(index) return name
python
{ "resource": "" }
q11920
post_mortem_excepthook
train
def post_mortem_excepthook(type, value, tb): """ For post mortem exception handling, print a banner and enable post mortem debugging. """ clear_post_mortem() ipython_shell = get_ipython() ipython_shell.showtraceback((type, value, tb)) p = pdb.Pdb(ipython_shell.colors) if not type == SyntaxError: # wait for stderr to print (stderr.flush does not work in this case) time.sleep(0.1) _print('*' * 40) _print('Entering post mortem debugging...') _print('*' * 40) # add ability to move between frames p.send_initial_notification = False p.reset() frame = tb.tb_frame prev = frame while frame.f_back: prev = frame frame = frame.f_back frame = prev # wait for stdout to print time.sleep(0.1) p.interaction(frame, tb)
python
{ "resource": "" }
q11921
set_post_mortem
train
def set_post_mortem(): """ Enable the post mortem debugging excepthook. """ def ipython_post_mortem_debug(shell, etype, evalue, tb, tb_offset=None): post_mortem_excepthook(etype, evalue, tb) ipython_shell = get_ipython() ipython_shell.set_custom_exc((Exception,), ipython_post_mortem_debug)
python
{ "resource": "" }
q11922
runcell
train
def runcell(cellname, filename): """ Run a code cell from an editor as a file. Currently looks for code in an `ipython` property called `cell_code`. This property must be set by the editor prior to calling this function. This function deletes the contents of `cell_code` upon completion. Parameters ---------- cellname : str Used as a reference in the history log of which cell was run with the fuction. This variable is not used. filename : str Needed to allow for proper traceback links. """ try: filename = filename.decode('utf-8') except (UnicodeError, TypeError, AttributeError): # UnicodeError, TypeError --> eventually raised in Python 2 # AttributeError --> systematically raised in Python 3 pass ipython_shell = get_ipython() namespace = _get_globals() namespace['__file__'] = filename try: cell_code = ipython_shell.cell_code except AttributeError: _print("--Run Cell Error--\n" "Please use only through Spyder's Editor; " "shouldn't be called manually from the console") return # Trigger `post_execute` to exit the additional pre-execution. # See Spyder PR #7310. ipython_shell.events.trigger('post_execute') ipython_shell.run_cell(cell_code) namespace.pop('__file__') del ipython_shell.cell_code
python
{ "resource": "" }
q11923
UserModuleReloader.create_pathlist
train
def create_pathlist(self, initial_pathlist): """ Add to pathlist Python library paths to be skipped from module reloading. """ # Get standard installation paths try: paths = sysconfig.get_paths() standard_paths = [paths['stdlib'], paths['purelib'], paths['scripts'], paths['data']] except Exception: standard_paths = [] # Get user installation path # See Spyder issue 8776 try: import site if getattr(site, 'getusersitepackages', False): # Virtualenvs don't have this function but # conda envs do user_path = [site.getusersitepackages()] elif getattr(site, 'USER_SITE', False): # However, it seems virtualenvs have this # constant user_path = [site.USER_SITE] else: user_path = [] except Exception: user_path = [] return initial_pathlist + standard_paths + user_path
python
{ "resource": "" }
q11924
UserModuleReloader.is_module_reloadable
train
def is_module_reloadable(self, module, modname): """Decide if a module is reloadable or not.""" if self.has_cython: # Don't return cached inline compiled .PYX files return False else: if (self.is_module_in_pathlist(module) or self.is_module_in_namelist(modname)): return False else: return True
python
{ "resource": "" }
q11925
UserModuleReloader.is_module_in_pathlist
train
def is_module_in_pathlist(self, module): """Decide if a module can be reloaded or not according to its path.""" modpath = getattr(module, '__file__', None) # Skip module according to different criteria if modpath is None: # *module* is a C module that is statically linked into the # interpreter. There is no way to know its path, so we # choose to ignore it. return True elif any([p in modpath for p in self.pathlist]): # We don't want to reload modules that belong to the # standard library or installed to site-packages, # just modules created by the user. return True elif not os.name == 'nt': # Module paths containing the strings below can be ihherited # from the default Linux installation, Homebrew or the user # site-packages in a virtualenv. patterns = [r'^/usr/lib.*', r'^/usr/local/lib.*', r'^/usr/.*/dist-packages/.*', r'^/home/.*/.local/lib.*', r'^/Library/.*', r'^/Users/.*/Library/.*', r'^/Users/.*/.local/.*', ] if [p for p in patterns if re.search(p, modpath)]: return True else: return False else: return False
python
{ "resource": "" }
q11926
UserModuleReloader.activate_cython
train
def activate_cython(self): """ Activate Cython support. We need to run this here because if the support is active, we don't to run the UMR at all. """ run_cython = os.environ.get("SPY_RUN_CYTHON") == "True" if run_cython: try: __import__('Cython') self.has_cython = True except Exception: pass if self.has_cython: # Import pyximport to enable Cython files support for # import statement import pyximport pyx_setup_args = {} # Add Numpy include dir to pyximport/distutils try: import numpy pyx_setup_args['include_dirs'] = numpy.get_include() except Exception: pass # Setup pyximport and enable Cython files reload pyximport.install(setup_args=pyx_setup_args, reload_support=True)
python
{ "resource": "" }
q11927
UserModuleReloader.run
train
def run(self): """ Delete user modules to force Python to deeply reload them Do not del modules which are considered as system modules, i.e. modules installed in subdirectories of Python interpreter's binary Do not del C modules """ self.modnames_to_reload = [] for modname, module in list(sys.modules.items()): if modname not in self.previous_modules: # Decide if a module can be reloaded or not if self.is_module_reloadable(module, modname): self.modnames_to_reload.append(modname) del sys.modules[modname] else: continue # Report reloaded modules if self.verbose and self.modnames_to_reload: modnames = self.modnames_to_reload _print("\x1b[4;33m%s\x1b[24m%s\x1b[0m"\ % ("Reloaded modules", ": "+", ".join(modnames)))
python
{ "resource": "" }
q11928
get_matlab_value
train
def get_matlab_value(val): """ Extract a value from a Matlab file From the oct2py project, see https://pythonhosted.org/oct2py/conversions.html """ import numpy as np # Extract each item of a list. if isinstance(val, list): return [get_matlab_value(v) for v in val] # Ignore leaf objects. if not isinstance(val, np.ndarray): return val # Convert user defined classes. if hasattr(val, 'classname'): out = dict() for name in val.dtype.names: out[name] = get_matlab_value(val[name].squeeze().tolist()) cls = type(val.classname, (object,), out) return cls() # Extract struct data. elif val.dtype.names: out = MatlabStruct() for name in val.dtype.names: out[name] = get_matlab_value(val[name].squeeze().tolist()) val = out # Extract cells. elif val.dtype.kind == 'O': val = val.squeeze().tolist() if not isinstance(val, list): val = [val] val = get_matlab_value(val) # Compress singleton values. elif val.size == 1: val = val.item() # Compress empty values. elif val.size == 0: if val.dtype.kind in 'US': val = '' else: val = [] return val
python
{ "resource": "" }
q11929
load_pickle
train
def load_pickle(filename): """Load a pickle file as a dictionary""" try: if pd: return pd.read_pickle(filename), None else: with open(filename, 'rb') as fid: data = pickle.load(fid) return data, None except Exception as err: return None, str(err)
python
{ "resource": "" }
q11930
load_json
train
def load_json(filename): """Load a json file as a dictionary""" try: if PY2: args = 'rb' else: args = 'r' with open(filename, args) as fid: data = json.load(fid) return data, None except Exception as err: return None, str(err)
python
{ "resource": "" }
q11931
load_dictionary
train
def load_dictionary(filename): """Load dictionary from .spydata file""" filename = osp.abspath(filename) old_cwd = getcwd() tmp_folder = tempfile.mkdtemp() os.chdir(tmp_folder) data = None error_message = None try: with tarfile.open(filename, "r") as tar: tar.extractall() pickle_filename = glob.glob('*.pickle')[0] # 'New' format (Spyder >=2.2 for Python 2 and Python 3) with open(pickle_filename, 'rb') as fdesc: data = pickle.loads(fdesc.read()) saved_arrays = {} if load_array is not None: # Loading numpy arrays saved with np.save try: saved_arrays = data.pop('__saved_arrays__') for (name, index), fname in list(saved_arrays.items()): arr = np.load( osp.join(tmp_folder, fname) ) if index is None: data[name] = arr elif isinstance(data[name], dict): data[name][index] = arr else: data[name].insert(index, arr) except KeyError: pass # Except AttributeError from e.g. trying to load function no longer present except (AttributeError, EOFError, ValueError) as error: error_message = to_text_string(error) # To ensure working dir gets changed back and temp dir wiped no matter what finally: os.chdir(old_cwd) try: shutil.rmtree(tmp_folder) except OSError as error: error_message = to_text_string(error) return data, error_message
python
{ "resource": "" }
q11932
getobj
train
def getobj(txt, last=False): """Return the last valid object name in string""" txt_end = "" for startchar, endchar in ["[]", "()"]: if txt.endswith(endchar): pos = txt.rfind(startchar) if pos: txt_end = txt[pos:] txt = txt[:pos] tokens = re.split(SYMBOLS, txt) token = None try: while token is None or re.match(SYMBOLS, token): token = tokens.pop() if token.endswith('.'): token = token[:-1] if token.startswith('.'): # Invalid object name return None if last: #XXX: remove this statement as well as the "last" argument token += txt[ txt.rfind(token) + len(token) ] token += txt_end if token: return token except IndexError: return None
python
{ "resource": "" }
q11933
getsource
train
def getsource(obj): """Wrapper around inspect.getsource""" try: try: src = to_text_string(inspect.getsource(obj)) except TypeError: if hasattr(obj, '__class__'): src = to_text_string(inspect.getsource(obj.__class__)) else: # Bindings like VTK or ITK require this case src = getdoc(obj) return src except (TypeError, IOError): return
python
{ "resource": "" }
q11934
getargs
train
def getargs(obj): """Get the names and default values of a function's arguments""" if inspect.isfunction(obj) or inspect.isbuiltin(obj): func_obj = obj elif inspect.ismethod(obj): func_obj = get_meth_func(obj) elif inspect.isclass(obj) and hasattr(obj, '__init__'): func_obj = getattr(obj, '__init__') else: return [] if not hasattr(func_obj, 'func_code'): # Builtin: try to extract info from doc args = getargsfromdoc(func_obj) if args is not None: return args else: # Example: PyQt5 return getargsfromdoc(obj) args, _, _ = inspect.getargs(func_obj.func_code) if not args: return getargsfromdoc(obj) # Supporting tuple arguments in def statement: for i_arg, arg in enumerate(args): if isinstance(arg, list): args[i_arg] = "(%s)" % ", ".join(arg) defaults = get_func_defaults(func_obj) if defaults is not None: for index, default in enumerate(defaults): args[index+len(args)-len(defaults)] += '='+repr(default) if inspect.isclass(obj) or inspect.ismethod(obj): if len(args) == 1: return None if 'self' in args: args.remove('self') return args
python
{ "resource": "" }
q11935
escape_tex
train
def escape_tex(value): """ Make text tex safe """ newval = value for pattern, replacement in LATEX_SUBS: newval = pattern.sub(replacement, newval) return newval
python
{ "resource": "" }
q11936
classnameify
train
def classnameify(s): """ Makes a classname """ return ''.join(w if w in ACRONYMS else w.title() for w in s.split('_'))
python
{ "resource": "" }
q11937
packagenameify
train
def packagenameify(s): """ Makes a package name """ return ''.join(w if w in ACRONYMS else w.title() for w in s.split('.')[-1:])
python
{ "resource": "" }
q11938
get_args
train
def get_args(): """ Get and parse arguments. """ import argparse parser = argparse.ArgumentParser( description="Swift Navigation SBP Example.") parser.add_argument( "-s", "--serial-port", default=[DEFAULT_SERIAL_PORT], nargs=1, help="specify the serial port to use.") parser.add_argument( "-b", "--baud", default=[DEFAULT_SERIAL_BAUD], nargs=1, help="specify the baud rate to use.") parser.add_argument( "-a", "--address", default=[DEFAULT_UDP_ADDRESS], nargs=1, help="specify the serial port to use.") parser.add_argument( "-p", "--udp-port", default=[DEFAULT_UDP_PORT], nargs=1, help="specify the baud rate to use.") return parser.parse_args()
python
{ "resource": "" }
q11939
convert
train
def convert(value): """Converts to a C language appropriate identifier format. """ s0 = "Sbp" + value if value in COLLISIONS else value s1 = re.sub('(.)([A-Z][a-z]+)', r'\1_\2', s0) return re.sub('([a-z0-9])([A-Z])', r'\1_\2', s1).lower() + "_t"
python
{ "resource": "" }
q11940
mk_id
train
def mk_id(field): """Builds an identifier from a field. """ name = field.type_id if name == "string": return "%s" % ("char") elif name == "array" and field.size: if field.options['fill'].value not in CONSTRUCT_CODE: return "%s" % convert(field.options['fill'].value) else: return "%s" % field.options['fill'].value elif name == "array": return "%s" % convert(field.options['fill'].value) elif name not in CONSTRUCT_CODE: return convert(name) else: return name
python
{ "resource": "" }
q11941
mk_size
train
def mk_size(field): """Builds an identifier for a container type. """ name = field.type_id if name == "string" and field.options.get('size', None): return "%s[%d];" % (field.identifier, field.options.get('size').value) elif name == "string": return "%s[0];" % field.identifier elif name == "array" and field.options.get('size', None): return "%s[%d];" % (field.identifier, field.options.get('size').value) elif name == "array": return "%s[0];" % field.identifier else: return '%s;' % field.identifier
python
{ "resource": "" }
q11942
SBP._get_framed
train
def _get_framed(self, buf, offset, insert_payload): """Returns the framed message and updates the CRC. """ header_offset = offset + self._header_len self.length = insert_payload(buf, header_offset, self.payload) struct.pack_into(self._header_fmt, buf, offset, self.preamble, self.msg_type, self.sender, self.length) crc_offset = header_offset + self.length preamble_bytes = 1 crc_over_len = self._header_len + self.length - preamble_bytes self.crc = crc16jit(buf, offset+1, 0, crc_over_len) struct.pack_into(self._crc_fmt, buf, crc_offset, self.crc) length = preamble_bytes + crc_over_len + self._crc_len return length
python
{ "resource": "" }
q11943
SBP.unpack
train
def unpack(d): """Unpack and return a framed binary message. """ p = SBP._parser.parse(d) assert p.preamble == SBP_PREAMBLE, "Invalid preamble 0x%x." % p.preamble return SBP(p.msg_type, p.sender, p.length, p.payload, p.crc)
python
{ "resource": "" }
q11944
SBP.to_json
train
def to_json(self, sort_keys=False): """Produce a JSON-encoded SBP message. """ d = self.to_json_dict() return json.dumps(d, sort_keys=sort_keys)
python
{ "resource": "" }
q11945
SBP.from_json
train
def from_json(s): """Given a JSON-encoded message, build an object. """ d = json.loads(s) sbp = SBP.from_json_dict(d) return sbp
python
{ "resource": "" }
q11946
parse_type
train
def parse_type(field): """ Function to pull a type from the binary payload. """ if field.type_id == 'string': if 'size' in field.options: return "parser.getString(%d)" % field.options['size'].value else: return "parser.getString()" elif field.type_id in JAVA_TYPE_MAP: # Primitive java types have extractor methods in SBPMessage.Parser return "parser.get" + field.type_id.capitalize() + "()" if field.type_id == 'array': # Call function to build array t = field.options['fill'].value if t in JAVA_TYPE_MAP: if 'size' in field.options: return "parser.getArrayof%s(%d)" % (t.capitalize(), field.options['size'].value) else: return "parser.getArrayof%s()" % t.capitalize() else: if 'size' in field.options: return "parser.getArray(%s.class, %d)" % (t, field.options['size'].value) else: return "parser.getArray(%s.class)" % t else: # This is an inner class, call default constructor return "new %s().parse(parser)" % field.type_id
python
{ "resource": "" }
q11947
build_type
train
def build_type(field): """ Function to pack a type into the binary payload. """ if field.type_id == 'string': if 'size' in field.options: return "builder.putString(%s, %d)" % (field.identifier, field.options['size'].value) else: return "builder.putString(%s)" % field.identifier elif field.type_id in JAVA_TYPE_MAP: # Primitive java types have extractor methods in SBPMessage.Builder return "builder.put%s(%s)" % (field.type_id.capitalize(), field.identifier) if field.type_id == 'array': # Call function to build array t = field.options['fill'].value if t in JAVA_TYPE_MAP: if 'size' in field.options: return "builder.putArrayof%s(%s, %d)" % (t.capitalize(), field.identifier, field.options['size'].value) else: return "builder.putArrayof%s(%s)" % (t.capitalize(), field.identifier) else: if 'size' in field.options: return "builder.putArray(%s, %d)" % (field.identifier, field.options['size'].value) else: return "builder.putArray(%s)" % field.identifier else: return "%s.build(builder)" % field.identifier
python
{ "resource": "" }
q11948
render_table
train
def render_table(output_dir, packages, jenv=JENV): """ Render and output dispatch table """ destination_filename = output_dir + "/com/swiftnav/sbp/client/MessageTable.java" with open(destination_filename, 'w+') as f: print(destination_filename) f.write(jenv.get_template(TEMPLATE_TABLE_NAME).render(packages=packages))
python
{ "resource": "" }
q11949
construct_format
train
def construct_format(f, type_map=CONSTRUCT_CODE): """ Formats for binary-parser library. """ formatted = "" if type_map.get(f.type_id, None): return "%s('%s')" % (type_map.get(f.type_id), f.identifier) elif f.type_id == 'string' and f.options.get('size', None): return "string('%s', { length: %d })" % (f.identifier, f.options['size'].value) elif f.type_id == 'string': return "string('%s', { greedy: true })" % (f.identifier) elif f.type_id == 'array': fill = f.options['fill'].value f_ = copy.copy(f) f_.type_id = fill size = f.options.get('size', None) size_fn = f.options.get('size_fn', None) field_type = type_map.get(f_.type_id, None) if field_type is None: field_type = "%s.prototype.parser" % f_.type_id else: field_type = "'%s'" % field_type if size is not None: d = { "'uint16'" : "'uint16le'", "'uint32'" : "'uint32le'", "'uint64'" : "'uint16le'", "'int16'" : "'int16le'", "'int32'" : "'int32le'", "'int64'" : "'int16le'" } field_type_arr = d.get(field_type, field_type) return "array('%s', { length: %d, type: %s })" % (f.identifier, size.value, field_type_arr) elif f.options.get('size_fn') is not None: return "array('%s', { type: %s, length: '%s' })" % (f_.identifier, field_type, size_fn.value) else: return "array('%s', { type: %s, readUntil: 'eof' })" % (f_.identifier, field_type) else: return "nest('%s', { type: %s.prototype.parser })" % (f.identifier, f.type_id) return formatted
python
{ "resource": "" }
q11950
js_classnameify
train
def js_classnameify(s): """ Makes a classname. """ if not '_' in s: return s return ''.join(w[0].upper() + w[1:].lower() for w in s.split('_'))
python
{ "resource": "" }
q11951
MsgEphemerisGPSDepF.from_binary
train
def from_binary(self, d): """Given a binary payload d, update the appropriate payload fields of the message. """ p = MsgEphemerisGPSDepF._parser.parse(d) for n in self.__class__.__slots__: setattr(self, n, getattr(p, n))
python
{ "resource": "" }
q11952
Framer._readall
train
def _readall(self, size): """ Read until all bytes are collected. Parameters ---------- size : int Number of bytes to read. """ data = b"" while len(data) < size: d = self._read(size - len(data)) if self._broken: raise StopIteration if not d: # NOTE (Buro/jgross): Force a yield here to another thread. In # case the stream fails midstream, the spinning here causes # the UI thread to lock up without yielding. time.sleep(0) continue data += d return data
python
{ "resource": "" }
q11953
Framer._receive
train
def _receive(self): """ Read and build SBP message. """ # preamble - not readall(1) to allow breaking before messages, # empty input preamble = self._read(1) if not preamble: return None elif ord(preamble) != SBP_PREAMBLE: if self._verbose: print("Host Side Unhandled byte: 0x%02x" % ord(preamble)) return None # hdr hdr = self._readall(5) msg_crc = crc16(hdr) msg_type, sender, msg_len = struct.unpack("<HHB", hdr) # data data = self._readall(msg_len) msg_crc = crc16(data, msg_crc) # crc crc = self._readall(2) crc, = struct.unpack("<H", crc) if crc != msg_crc: if self._verbose: print("crc mismatch: 0x%04X 0x%04X" % (msg_crc, crc)) return None msg = SBP(msg_type, sender, msg_len, data, crc) try: msg = self._dispatch(msg) except Exception as exc: warnings.warn("SBP dispatch error: %s" % (exc,)) return msg
python
{ "resource": "" }
q11954
dispatch
train
def dispatch(msg, table=_SBP_TABLE): """ Dispatch an SBP message type based on its `msg_type` and parse its payload. Parameters ---------- driver : :class:`SBP` A parsed SBP object. table : dict Any table mapping unique SBP message type IDs to SBP message constructors. Returns ---------- SBP message with a parsed payload. """ try: return table[msg.msg_type](msg) except KeyError: warn = "No message found for msg_type id %d for msg %s." \ % (msg.msg_type, msg) warnings.warn(warn, RuntimeWarning) return msg except FormatFieldError: warnings.warn("SBP payload deserialization error! 0x%x" % msg.msg_type, RuntimeWarning) return msg
python
{ "resource": "" }
q11955
Handler._recv_thread
train
def _recv_thread(self): """ Internal thread to iterate over source messages and dispatch callbacks. """ for msg, metadata in self._source: if msg.msg_type: self._call(msg, **metadata) # Break any upstream iterators for sink in self._sinks: i = sink() if i is not None: i.breakiter() self._dead = True
python
{ "resource": "" }
q11956
Handler.filter
train
def filter(self, msg_type=None, maxsize=0): """ Get a filtered iterator of messages for synchronous, blocking use in another thread. """ if self._dead: return iter(()) iterator = Handler._SBPQueueIterator(maxsize) # We use a weakref so that the iterator may be garbage collected if it's # consumer no longer has a reference. ref = weakref.ref(iterator) self._sinks.append(ref) def feediter(msg, **metadata): i = ref() if i is not None: i(msg, **metadata) else: raise Handler._DeadCallbackException self.add_callback(feediter, msg_type) return iterator
python
{ "resource": "" }
q11957
Handler.add_callback
train
def add_callback(self, callback, msg_type=None): """ Add per message type or global callback. Parameters ---------- callback : fn Callback function msg_type : int | iterable Message type to register callback against. Default `None` means global callback. Iterable type adds the callback to all the message types. """ cb_keys = self._to_iter(msg_type) if cb_keys is not None: for msg_type_ in cb_keys: self._callbacks[msg_type_].add(callback) else: self._callbacks[msg_type].add(callback)
python
{ "resource": "" }
q11958
Handler.remove_callback
train
def remove_callback(self, callback, msg_type=None): """ Remove per message type of global callback. Parameters ---------- callback : fn Callback function msg_type : int | iterable Message type to remove callback from. Default `None` means global callback. Iterable type removes the callback from all the message types. """ if msg_type is None: msg_type = self._callbacks.keys() cb_keys = self._to_iter(msg_type) if cb_keys is not None: for msg_type_ in cb_keys: try: self._callbacks[msg_type_].remove(callback) except KeyError: pass else: self._callbacks[msg_type].remove(callback)
python
{ "resource": "" }
q11959
Handler._gc_dead_sinks
train
def _gc_dead_sinks(self): """ Remove any dead weakrefs. """ deadsinks = [] for i in self._sinks: if i() is None: deadsinks.append(i) for i in deadsinks: self._sinks.remove(i)
python
{ "resource": "" }
q11960
Handler.wait
train
def wait(self, msg_type, timeout=1.0): """ Wait for a SBP message. Parameters ---------- msg_type : int SBP message type. timeout : float Waiting period """ event = threading.Event() payload = {'data': None} def cb(sbp_msg, **metadata): payload['data'] = sbp_msg event.set() self.add_callback(cb, msg_type) event.wait(timeout) self.remove_callback(cb, msg_type) return payload['data']
python
{ "resource": "" }
q11961
Handler.wait_callback
train
def wait_callback(self, callback, msg_type=None, timeout=1.0): """ Wait for a SBP message with a callback. Parameters ---------- callback : fn Callback function msg_type : int | iterable Message type to register callback against. Default `None` means global callback. Iterable type adds the callback to all the message types. timeout : float Waiting period """ event = threading.Event() def cb(msg, **metadata): callback(msg, **metadata) event.set() self.add_callback(cb, msg_type) event.wait(timeout) self.remove_callback(cb, msg_type)
python
{ "resource": "" }
q11962
fmt_repr
train
def fmt_repr(obj): """ Return pretty printed string representation of an object. """ items = {k: v for k, v in list(obj.__dict__.items())} return "<%s: {%s}>" % (obj.__class__.__name__, pprint.pformat(items, width=1))
python
{ "resource": "" }
q11963
SettingMonitor.capture_setting
train
def capture_setting(self, sbp_msg, **metadata): """Callback to extract and store setting values from SBP_MSG_SETTINGS_READ_RESP Messages of any type other than SBP_MSG_SETTINGS_READ_RESP are ignored """ if sbp_msg.msg_type == SBP_MSG_SETTINGS_READ_RESP: section, setting, value = sbp_msg.payload.split(b'\0')[:3] self.settings.append((section, setting, value))
python
{ "resource": "" }
q11964
SettingMonitor.wait_for_setting_value
train
def wait_for_setting_value(self, section, setting, value, wait_time=5.0): """Function to wait wait_time seconds to see a SBP_MSG_SETTINGS_READ_RESP message with a user-specified value """ expire = time.time() + wait_time ok = False while not ok and time.time() < expire: settings = [x for x in self.settings if (x[0], x[1]) == (section, setting)] # Check to see if the last setting has the value we want if len(settings) > 0: ok = settings[-1][2] == value time.sleep(0.1) return ok
python
{ "resource": "" }
q11965
construct_format
train
def construct_format(f, type_map=CONSTRUCT_CODE): """ Formats for Construct. """ formatted = "" if type_map.get(f.type_id, None): return "'{identifier}' / {type_id}".format(type_id=type_map.get(f.type_id), identifier=f.identifier) elif f.type_id == 'string' and f.options.get('size', None): return "'{id}'/ construct.Bytes({size})".format(id=f.identifier, size=f.options['size'].value) elif f.type_id == 'string': return "'{id}' / construct.GreedyBytes".format(id=f.identifier) elif f.type_id == 'array' and f.options.get('size', None): fill = f.options['fill'].value f_ = copy.copy(f) f_.type_id = fill s = f.options.get('size', None).value return "'{id}' / construct.Array({size}, {type})".format(id=f.identifier, size=s, type=type_map.get(f_.type_id, 'construct.Byte')) elif f.type_id == 'array': fill = f.options['fill'].value f_ = copy.copy(f) f_.type_id = fill return "construct.GreedyRange(%s)" % construct_format(f_) else: return "'%s' / construct.Struct(%s._parser)" % (f.identifier, f.type_id) return formatted
python
{ "resource": "" }
q11966
exclude_fields
train
def exclude_fields(obj, exclude=EXCLUDE): """ Return dict of object without parent attrs. """ return dict([(k, getattr(obj, k)) for k in obj.__slots__ if k not in exclude])
python
{ "resource": "" }
q11967
walk_json_dict
train
def walk_json_dict(coll): """ Flatten a parsed SBP object into a dicts and lists, which are compatible for JSON output. Parameters ---------- coll : dict """ if isinstance(coll, dict): return dict((k, walk_json_dict(v)) for (k, v) in iter(coll.items())) elif isinstance(coll, bytes): return coll.decode('ascii') elif hasattr(coll, '__iter__') and not isinstance(coll, str): return [walk_json_dict(seq) for seq in coll] else: return coll
python
{ "resource": "" }
q11968
containerize
train
def containerize(coll): """Walk attribute fields passed from an SBP message and convert to Containers where appropriate. Needed for Construct proper serialization. Parameters ---------- coll : dict """ if isinstance(coll, Container): [setattr(coll, k, containerize(v)) for (k, v) in coll.items()] return coll elif isinstance(coll, dict): return containerize(Container(**coll)) elif isinstance(coll, list): for j, i in enumerate(coll): if isinstance(i, dict): coll[j] = containerize(Container(**i)) return coll else: return coll
python
{ "resource": "" }
q11969
fmt_repr
train
def fmt_repr(obj): """Print a orphaned string representation of an object without the clutter of its parent object. """ items = ["%s = %r" % (k, v) for k, v in list(exclude_fields(obj).items())] return "<%s: {%s}>" % (obj.__class__.__name__, ', '.join(items))
python
{ "resource": "" }
q11970
read_spec
train
def read_spec(filename, verbose=False): """ Read an SBP specification. Parameters ---------- filename : str Local filename for specification. verbose : bool Print out some debugging info Returns ---------- Raises ---------- Exception On empty file. yaml.YAMLError On Yaml parsing error voluptuous.Invalid On invalid SBP schema """ contents = None with open(filename, 'r') as f: contents = yaml.load(f) if contents is None: raise Exception("Empty yaml file: %s." % filename) try: s.package_schema(contents) except Exception as e: sys.stderr.write("Invalid SBP YAML specification: %s.\n" % filename) raise e return contents
python
{ "resource": "" }
q11971
get_files
train
def get_files(input_file): """ Initializes an index of files to generate, returns the base directory and index. """ file_index = {} base_dir = None if os.path.isfile(input_file): file_index[input_file] = None base_dir = os.path.dirname(input_file) elif os.path.isdir(input_file): base_dir = input_file for inf in glob.glob(input_file + s.SBP_EXTENSION): file_index[os.path.abspath(inf)] = None for inf in glob.glob(input_file + '/*'): base, index = get_files(os.path.abspath(inf)) z = file_index.copy() z.update(index) file_index = z return (base_dir, file_index)
python
{ "resource": "" }
q11972
resolve_deps
train
def resolve_deps(base_dir, file_index): """ Given a base directory and an initial set of files, retrieves dependencies and adds them to the file_index. """ def flatten(tree, index = {}): for include in tree.get('include', []): fname = base_dir + "/" + include assert os.path.exists(fname), "File %s does not exist." % fname if fname not in index: index[fname] = read_spec(fname) index.update(flatten(index[fname], file_index)) return index for fname, contents in file_index.items(): file_index[fname] = read_spec(fname) file_index.update(flatten(file_index[fname], file_index)) return file_index
python
{ "resource": "" }
q11973
mk_package
train
def mk_package(contents): """Instantiates a package specification from a parsed "AST" of a package. Parameters ---------- contents : dict Returns ---------- PackageSpecification """ package = contents.get('package', None) description = contents.get('description', None) include = contents.get('include', []) definitions = contents.get('definitions', []) resolved = [mk_definition(defn) for defn in definitions] return sbp.PackageSpecification(identifier=package, description=description, includes=include, definitions=resolved, render_source=contents.get('render_source', True), stable=contents.get('stable', False), public=contents.get('public', True))
python
{ "resource": "" }
q11974
mk_definition
train
def mk_definition(defn): """Instantiates a struct or SBP message specification from a parsed "AST" of a struct or message. Parameters ---------- defn : dict Returns ---------- A Definition or a specialization of a definition, like a Struct """ assert len(defn) == 1 identifier, contents = next(iter(defn.items())) fs = [mk_field(f) for f in contents.get('fields', [])] return sbp.resolve_type(sbp.Definition(identifier=identifier, sbp_id=contents.get('id', None), short_desc=contents.get('short_desc', None), desc=contents.get('desc', None), type_id=contents.get('type'), fields=fs, public=contents.get('public', True)))
python
{ "resource": "" }
q11975
mk_field
train
def mk_field(field): """Instantiates a field specification from a parsed "AST" of a field. Parameters ---------- field : dict Returns ---------- A Field or a specialization of a field, like a bitfield. """ assert len(field) == 1 identifier, contents = next(iter(field.items())) contents = dict(list({'units': '', 'n_with_values': 0}.items()) + list(contents.items())) return sbp.resolve_type(sbp.Field(identifier=identifier, type_id=contents.pop('type'), options=contents))
python
{ "resource": "" }
q11976
to_global
train
def to_global(s): """ Format a global variable name. """ if s.startswith('GPSTime'): s = 'Gps' + s[3:] if '_' in s: s = "".join([i.capitalize() for i in s.split("_")]) return s[0].lower() + s[1:]
python
{ "resource": "" }
q11977
to_data
train
def to_data(s): """ Format a data variable name. """ if s.startswith('GPSTime'): s = 'Gps' + s[3:] if '_' in s: return "".join([i.capitalize() for i in s.split("_")]) return s
python
{ "resource": "" }
q11978
to_type
train
def to_type(f, type_map=CONSTRUCT_CODE): """ Format a the proper type. """ name = f.type_id if name.startswith('GPSTime'): name = 'Gps' + name[3:] if type_map.get(name, None): return type_map.get(name, None) elif name == 'array': fill = f.options['fill'].value f_ = copy.copy(f) f_.type_id = fill return "[%s]" % to_type(f_) return name
python
{ "resource": "" }
q11979
to_identifier
train
def to_identifier(s): """ Convert snake_case to camel_case. """ if s.startswith('GPS'): s = 'Gps' + s[3:] return ''.join([i.capitalize() for i in s.split('_')]) if '_' in s else s
python
{ "resource": "" }
q11980
get_asset_tarball
train
def get_asset_tarball(asset_name, src_dir, dest_project, dest_folder, json_out): """ If the src_dir contains a "resources" directory its contents are archived and the archived file is uploaded to the platform """ if os.path.isdir(os.path.join(src_dir, "resources")): temp_dir = tempfile.mkdtemp() try: resource_file = os.path.join(temp_dir, asset_name + "_resources.tar.gz") cmd = ["tar", "-czf", resource_file, "-C", os.path.join(src_dir, "resources"), "."] subprocess.check_call(cmd) file_id = dx_upload(resource_file, dest_project, dest_folder, json_out) return file_id finally: shutil.rmtree(temp_dir)
python
{ "resource": "" }
q11981
get_user_id
train
def get_user_id(user_id_or_username): """Gets the user ID based on the value `user_id_or_username` specified on the command-line, being extra lenient and lowercasing the value in all cases. """ user_id_or_username = user_id_or_username.lower() if not user_id_or_username.startswith("user-"): user_id = "user-" + user_id_or_username.lower() else: user_id = user_id_or_username return user_id
python
{ "resource": "" }
q11982
register_parser
train
def register_parser(parser, subparsers_action=None, categories=('other', ), add_help=True): """Attaches `parser` to the global ``parser_map``. If `add_help` is truthy, then adds the helpstring of `parser` into the output of ``dx help...``, for each category in `categories`. :param subparsers_action: A special action object that is returned by ``ArgumentParser.add_subparsers(...)``, or None. :type subparsers_action: argparse._SubParsersAction, or None. """ name = re.sub('^dx ', '', parser.prog) if subparsers_action is None: subparsers_action = subparsers if isinstance(categories, basestring): categories = (categories, ) parser_map[name] = parser if add_help: _help = subparsers_action._choices_actions[-1].help parser_categories['all']['cmds'].append((name, _help)) for category in categories: parser_categories[category]['cmds'].append((name, _help))
python
{ "resource": "" }
q11983
_is_retryable_exception
train
def _is_retryable_exception(e): """Returns True if the exception is always safe to retry. This is True if the client was never able to establish a connection to the server (for example, name resolution failed or the connection could otherwise not be initialized). Conservatively, if we can't tell whether a network connection could have been established, we return False. """ if isinstance(e, urllib3.exceptions.ProtocolError): e = e.args[1] if isinstance(e, (socket.gaierror, socket.herror)): return True if isinstance(e, socket.error) and e.errno in _RETRYABLE_SOCKET_ERRORS: return True if isinstance(e, urllib3.exceptions.NewConnectionError): return True return False
python
{ "resource": "" }
q11984
_extract_msg_from_last_exception
train
def _extract_msg_from_last_exception(): ''' Extract a useful error message from the last thrown exception ''' last_exc_type, last_error, last_traceback = sys.exc_info() if isinstance(last_error, exceptions.DXAPIError): # Using the same code path as below would not # produce a useful message when the error contains a # 'details' hash (which would have a last line of # '}') return last_error.error_message() else: return traceback.format_exc().splitlines()[-1].strip()
python
{ "resource": "" }
q11985
_calculate_retry_delay
train
def _calculate_retry_delay(response, num_attempts): ''' Returns the time in seconds that we should wait. :param num_attempts: number of attempts that have been made to the resource, including the most recent failed one :type num_attempts: int ''' if response is not None and response.status == 503 and 'retry-after' in response.headers: try: return int(response.headers['retry-after']) except ValueError: # In RFC 2616, retry-after can be formatted as absolute time # instead of seconds to wait. We don't bother to parse that, # but the apiserver doesn't generate such responses anyway. pass if num_attempts <= 1: return 1 num_attempts = min(num_attempts, 7) return randint(2 ** (num_attempts - 2), 2 ** (num_attempts - 1))
python
{ "resource": "" }
q11986
get_auth_server_name
train
def get_auth_server_name(host_override=None, port_override=None, protocol='https'): """ Chooses the auth server name from the currently configured API server name. Raises DXError if the auth server name cannot be guessed and the overrides are not provided (or improperly provided). """ if host_override is not None or port_override is not None: if host_override is None or port_override is None: raise exceptions.DXError("Both host and port must be specified if either is specified") return protocol + '://' + host_override + ':' + str(port_override) elif APISERVER_HOST == 'stagingapi.dnanexus.com': return 'https://stagingauth.dnanexus.com' elif APISERVER_HOST == 'api.dnanexus.com': return 'https://auth.dnanexus.com' elif APISERVER_HOST == 'stagingapi.cn.dnanexus.com': return 'https://stagingauth.cn.dnanexus.com:7001' elif APISERVER_HOST == 'api.cn.dnanexus.com': return 'https://auth.cn.dnanexus.com:8001' elif APISERVER_HOST == "localhost" or APISERVER_HOST == "127.0.0.1": if "DX_AUTHSERVER_HOST" not in os.environ or "DX_AUTHSERVER_PORT" not in os.environ: err_msg = "Must set authserver env vars (DX_AUTHSERVER_HOST, DX_AUTHSERVER_PORT) if apiserver is {apiserver}." raise exceptions.DXError(err_msg.format(apiserver=APISERVER_HOST)) else: return os.environ["DX_AUTHSERVER_HOST"] + ":" + os.environ["DX_AUTHSERVER_PORT"] else: err_msg = "Could not determine which auth server is associated with {apiserver}." raise exceptions.DXError(err_msg.format(apiserver=APISERVER_HOST))
python
{ "resource": "" }
q11987
append_underlying_workflow_describe
train
def append_underlying_workflow_describe(globalworkflow_desc): """ Adds the "workflowDescribe" field to the config for each region of the global workflow. The value is the description of an underlying workflow in that region. """ if not globalworkflow_desc or \ globalworkflow_desc['class'] != 'globalworkflow' or \ not 'regionalOptions' in globalworkflow_desc: return globalworkflow_desc for region, config in globalworkflow_desc['regionalOptions'].items(): workflow_id = config['workflow'] workflow_desc = dxpy.api.workflow_describe(workflow_id) globalworkflow_desc['regionalOptions'][region]['workflowDescribe'] = workflow_desc return globalworkflow_desc
python
{ "resource": "" }
q11988
escape_unicode_string
train
def escape_unicode_string(u): """ Escapes the nonprintable chars 0-31 and 127, and backslash; preferably with a friendly equivalent such as '\n' if available, but otherwise with a Python-style backslashed hex escape. """ def replacer(matchobj): if ord(matchobj.group(1)) == 127: return "\\x7f" if ord(matchobj.group(1)) == 92: # backslash return "\\\\" return REPLACEMENT_TABLE[ord(matchobj.group(1))] return re.sub("([\\000-\\037\\134\\177])", replacer, u)
python
{ "resource": "" }
q11989
flatten_json_array
train
def flatten_json_array(json_string, array_name): """ Flattens all arrays with the same name in the JSON string :param json_string: JSON string :type json_string: str :param array_name: Array name to flatten :type array_name: str """ result = re.sub('"{}": \\[\r?\n\\s*'.format(array_name), '"{}": ['.format(array_name), json_string, flags=re.MULTILINE) flatten_regexp = re.compile('"{}": \\[(.*)(?<=,)\r?\n\\s*'.format(array_name), flags=re.MULTILINE) while flatten_regexp.search(result): result = flatten_regexp.sub('"{}": [\\1 '.format(array_name), result) result = re.sub('"{}": \\[(.*)\r?\n\\s*\\]'.format(array_name), '"{}": [\\1]'.format(array_name), result, flags=re.MULTILINE) return result
python
{ "resource": "" }
q11990
format_exception
train
def format_exception(e): """Returns a string containing the type and text of the exception. """ from .utils.printing import fill return '\n'.join(fill(line) for line in traceback.format_exception_only(type(e), e))
python
{ "resource": "" }
q11991
DXAPIError.error_message
train
def error_message(self): "Returns a one-line description of the error." output = self.msg + ", code " + str(self.code) output += ". Request Time={}, Request ID={}".format(self.timestamp, self.req_id) if self.name != self.__class__.__name__: output = self.name + ": " + output return output
python
{ "resource": "" }
q11992
DXGlobalWorkflow.publish
train
def publish(self, **kwargs): """ Publishes the global workflow, so all users can find it and use it on the platform. The current user must be a developer of the workflow. """ if self._dxid is not None: return dxpy.api.global_workflow_publish(self._dxid, **kwargs) else: return dxpy.api.global_workflow_publish('globalworkflow-' + self._name, alias=self._alias, **kwargs)
python
{ "resource": "" }
q11993
DXGlobalWorkflow._get_run_input
train
def _get_run_input(self, workflow_input, project=None, **kwargs): """ Checks the region in which the global workflow is run and returns the input associated with the underlying workflow from that region. """ region = dxpy.api.project_describe(project, input_params={"fields": {"region": True}})["region"] dxworkflow = self.get_underlying_workflow(region) return dxworkflow._get_run_input(workflow_input, **kwargs)
python
{ "resource": "" }
q11994
build
train
def build(src_dir, parallel_build=True): """ Runs any build scripts that are found in the specified directory. In particular, runs ``./configure`` if it exists, followed by ``make -jN`` if it exists (building with as many parallel tasks as there are CPUs on the system). """ # TODO: use Gentoo or deb buildsystem config_script = os.path.join(src_dir, "configure") if os.path.isfile(config_script) and os.access(config_script, os.X_OK): logger.debug("Running ./configure in {cwd}".format(cwd=os.path.abspath(src_dir))) try: subprocess.check_call([config_script]) except subprocess.CalledProcessError as e: raise AppBuilderException("./configure in target directory failed with exit code %d" % (e.returncode,)) if os.path.isfile(os.path.join(src_dir, "Makefile")) \ or os.path.isfile(os.path.join(src_dir, "makefile")) \ or os.path.isfile(os.path.join(src_dir, "GNUmakefile")): if parallel_build: make_shortcmd = "make -j%d" % (NUM_CORES,) else: make_shortcmd = "make" logger.debug("Building with {make} in {cwd}".format(make=make_shortcmd, cwd=os.path.abspath(src_dir))) try: make_cmd = ["make", "-C", src_dir] if parallel_build: make_cmd.append("-j" + str(NUM_CORES)) subprocess.check_call(make_cmd) except subprocess.CalledProcessError as e: raise AppBuilderException("%s in target directory failed with exit code %d" % (make_shortcmd, e.returncode))
python
{ "resource": "" }
q11995
_create_or_update_version
train
def _create_or_update_version(app_name, version, app_spec, try_update=True): """ Creates a new version of the app. Returns an app_id, or None if the app has already been created and published. """ # This has a race condition since the app could have been created or # published since we last looked. try: app_id = dxpy.api.app_new(app_spec)["id"] return app_id except dxpy.exceptions.DXAPIError as e: # TODO: detect this error more reliably if e.name == 'InvalidInput' and e.msg == 'Specified name and version conflict with an existing alias': print('App %s/%s already exists' % (app_spec["name"], version), file=sys.stderr) # The version number was already taken, so app/new doesn't work. # However, maybe it hasn't been published yet, so we might be able # to app-xxxx/update it. app_describe = dxpy.api.app_describe("app-" + app_name, alias=version) if app_describe.get("published", 0) > 0: return None return _update_version(app_name, version, app_spec, try_update=try_update) raise e
python
{ "resource": "" }
q11996
_update_version
train
def _update_version(app_name, version, app_spec, try_update=True): """ Updates a version of the app in place. Returns an app_id, or None if the app has already been published. """ if not try_update: return None try: app_id = dxpy.api.app_update("app-" + app_name, version, app_spec)["id"] return app_id except dxpy.exceptions.DXAPIError as e: if e.name == 'InvalidState': print('App %s/%s has already been published' % (app_spec["name"], version), file=sys.stderr) return None raise e
python
{ "resource": "" }
q11997
create_app
train
def create_app(applet_id, applet_name, src_dir, publish=False, set_default=False, billTo=None, try_versions=None, try_update=True, confirm=True, regional_options=None): """ Creates a new app object from the specified applet. .. deprecated:: 0.204.0 Use :func:`create_app_multi_region()` instead. """ # In this case we don't know the region of the applet, so we use the # legacy API {"applet": applet_id} without specifying a region # specifically. return _create_app(dict(applet=applet_id), applet_name, src_dir, publish=publish, set_default=set_default, billTo=billTo, try_versions=try_versions, try_update=try_update, confirm=confirm)
python
{ "resource": "" }
q11998
get_enabled_regions
train
def get_enabled_regions(app_spec, from_command_line): """Returns a list of the regions in which the app should be enabled. Also validates that app_spec['regionalOptions'], if supplied, is well-formed. :param app_spec: app specification :type app_spec: dict :param from_command_line: The regions specified on the command-line via --region :type from_command_line: list or None """ enabled_regions = dxpy.executable_builder.get_enabled_regions('app', app_spec, from_command_line, AppBuilderException) if enabled_regions is not None and len(enabled_regions) == 0: raise AssertionError("This app should be enabled in at least one region") return enabled_regions
python
{ "resource": "" }
q11999
get_implicit_depends_on
train
def get_implicit_depends_on(input_hash, depends_on): ''' Add DNAnexus links to non-closed data objects in input_hash to depends_on ''' q = [] for field in input_hash: possible_dep = get_nonclosed_data_obj_link(input_hash[field]) if possible_dep is not None: depends_on.append(possible_dep) elif isinstance(input_hash[field], list) or isinstance(input_hash[field], dict): q.append(input_hash[field]) while len(q) > 0: thing = q.pop() if isinstance(thing, list): for i in range(len(thing)): possible_dep = get_nonclosed_data_obj_link(thing[i]) if possible_dep is not None: depends_on.append(possible_dep) elif isinstance(thing[i], list) or isinstance(thing[i], dict): q.append(thing[i]) else: for field in thing: possible_dep = get_nonclosed_data_obj_link(thing[field]) if possible_dep is not None: depends_on.append(possible_dep) elif isinstance(thing[field], list) or isinstance(thing[field], dict): q.append(thing[field])
python
{ "resource": "" }