code stringlengths 51 2.34k | docstring stringlengths 11 171 |
|---|---|
def device_mounted(self, device):
if not self._mounter.is_handleable(device):
return
browse_action = ('browse', _('Browse directory'),
self._mounter.browse, device)
terminal_action = ('terminal', _('Open terminal'),
self._mounter.te... | Show mount notification for specified device object. |
def runGetFeatureSet(self, id_):
compoundId = datamodel.FeatureSetCompoundId.parse(id_)
dataset = self.getDataRepository().getDataset(compoundId.dataset_id)
featureSet = dataset.getFeatureSet(id_)
return self.runGetRequest(featureSet) | Runs a getFeatureSet request for the specified ID. |
def read_file(self, fasta_path):
fasta_dictionary = {}
for (identifier, sequence) in self.iterate_over_file(fasta_path):
fasta_dictionary[identifier] = sequence
return fasta_dictionary | Read the contents of a FASTA file into a dictionary |
def keep_longest(head, update, down_path):
if update is None:
return 'f'
if head is None:
return 's'
return 'f' if len(head) >= len(update) else 's' | Keep longest field among `head` and `update`. |
def log_wrapper(self):
log = logging.getLogger('client.py')
try:
debug = self.params["debug"]
log.setLevel(logging.DEBUG)
except KeyError:
log.setLevel(logging.INFO)
stream = logging.StreamHandler()
logformat = logging.Formatter(
'%... | Wrapper to set logging parameters for output |
def update_keys(self):
self.keys = self.queue_model.get_waiting_keys(self.queues)
if not self.keys:
self.log('No queues yet', level='warning')
self.last_update_keys = datetime.utcnow() | Update the redis keys to listen for new jobs priorities. |
def isAuthorized(self, request):
restrictions = self.get_view_restrictions()
if restrictions and request is None:
return False
else:
return all(restriction.accept_request(request)
for restriction in restrictions) | Is the user authorized for the requested action with this event? |
def copyHdfsFileToLocal(hdfsFilePath, localFilePath, hdfsClient, override=True):
if not hdfsClient.exists(hdfsFilePath):
raise Exception('HDFS file {} does not exist!'.format(hdfsFilePath))
try:
file_status = hdfsClient.get_file_status(hdfsFilePath)
if file_status.type != 'FILE':
... | Copy file from HDFS to local |
def process_lists(self):
for l1_idx, obj1 in enumerate(self.l1):
for l2_idx, obj2 in enumerate(self.l2):
if self.equal(obj1, obj2):
self.matches.add((l1_idx, l2_idx)) | Do any preprocessing of the lists. |
def add_user(config, group, username):
client = Client()
client.prepare_connection()
group_api = API(client)
try:
group_api.add_user(group, username)
except ldap_tools.exceptions.NoGroupsFound:
print("Group ({}) not found".format(group))
except lda... | Add specified user to specified group. |
def insert_values(self):
if self._insert_values is None:
self._insert_values = self._prepare_insert(
tmpl=self._insert_values_tmpl,
placeholder_for_id=True,
record_class=self.record_class,
field_names=self.field_names,
)
... | SQL statement that inserts records without ID. |
def visit_boolean_query(self, node):
left = node.left.accept(self)
right = node.right.accept(self)
is_journal_keyword_op = isinstance(left, KeywordOp) and left.left == Keyword('journal')
if is_journal_keyword_op:
journal_and_volume_conjunction = _restructure_if_volume_follows... | Convert BooleanRule into AndOp or OrOp nodes. |
def reset_actions(self):
if self._driver.w3c:
self.w3c_actions.clear_actions()
self._actions = [] | Clears actions that are already stored locally and on the remote end |
def do_list():
dirs = os.walk(CONFIG_ROOT).next()[1]
if dirs:
print "List of available configurations:\n"
for d in dirs:
print " * {}".format(d)
else:
print "No configurations available." | CLI action "list configurations". |
def similarity_transformation(rot, mat):
return np.dot(rot, np.dot(mat, np.linalg.inv(rot))) | R x M x R^-1 |
def put(self):
cred_payload = utils.uni_to_str(json.loads(request.get_data()))
return self.manager.update_credential(cred_payload) | Update a credential by file path |
def _split_docker_uuid(uuid):
if uuid:
uuid = uuid.split(':')
if len(uuid) == 2:
tag = uuid[1]
repo = uuid[0]
return repo, tag
return None, None | Split a smartos docker uuid into repo and tag |
def play(self):
if self.state == STATE_PAUSED:
self._player.set_state(Gst.State.PLAYING)
self.state = STATE_PLAYING | Change state to playing. |
def diff_dict(dict1, dict2, ignore_missing=False):
unidict1 = dict_unicodeize(dict1)
unidict2 = dict_unicodeize(dict2)
if ((not ignore_missing) and (len(unidict1) != len(unidict2))) or \
(ignore_missing and (len(unidict1) >= len(unidict2))):
return True
for comp_k, comp_v in iteritems(uni... | Performs a base type comparison between two dicts |
def getcolor(spec):
if isinstance(spec, str):
from matplotlib import colors
return asarray(colors.hex2color(colors.cnames[spec]))
else:
return spec | Turn optional color string spec into an array. |
def _erads2bt(self, data, channel_name):
cal_info = CALIB[self.platform_id][channel_name]
alpha = cal_info["ALPHA"]
beta = cal_info["BETA"]
wavenumber = CALIB[self.platform_id][channel_name]["VC"]
return (self._tl15(data, wavenumber) - beta) / alpha | Computation based on effective radiance. |
def numlistbetween(num1, num2, option='list', listoption='string'):
if option == 'list':
if listoption == 'string':
output = ''
output += str(num1)
for currentnum in range(num1 + 1, num2 + 1):
output += ','
output += str(currentnum)
... | List Or Count The Numbers Between Two Numbers |
def _discover_uri_type(uri):
parsed_uri = urlparse(uri)
if not parsed_uri.netloc:
if parsed_uri.scheme == 'data':
type_ = INLINE_REFERENCE_TYPE
else:
type_ = INTERNAL_REFERENCE_TYPE
else:
type_ = EXTERNAL_REFERENCE_TYPE
return type_ | Given a ``uri``, determine if it is internal or external. |
def _handle_streamer_finished(self, index, succeeded, highest_ack):
self._logger.debug("Rolling back streamer %d after streaming, highest ack from streaming subsystem was %d", index, highest_ack)
self.acknowledge_streamer(index, highest_ack, False) | Callback when a streamer finishes processing. |
def delete_record(self, record_id):
self._delete(
urljoin(self.base_url, "informationobjects/{}".format(record_id)),
expected_response=204,
)
return {"status": "Deleted"} | Delete a record with record_id. |
def event_handler(msg: EventMsgDict) -> Event:
e = create_event_from_msg(msg)
if e.currentTarget is None:
if e.type not in ['mount', 'unmount']:
id = msg['currentTarget']['id']
logger.warning('No such element: wdom_id={}'.format(id))
return e
e.currentTarget.on_event_... | Handle events emitted on browser. |
async def message_fetcher_coroutine(self, loop):
Global.LOGGER.debug('registering callbacks for message fetcher coroutine')
self.isrunning = True
while self.isrunning:
loop.call_soon(self._fetch_messages)
loop.call_soon(self._perform_system_check)
await asynci... | Register callback for message fetcher coroutines |
def init_module(filesystem):
FakePath.filesystem = filesystem
FakePathlibModule.PureWindowsPath._flavour = _FakeWindowsFlavour(
filesystem)
FakePathlibModule.PurePosixPath._flavour = _FakePosixFlavour(filesystem) | Initializes the fake module with the fake file system. |
def live_chat_banner(context):
context = copy(context)
oldchat = LiveChat.chat_finder.get_last_live_chat()
if oldchat:
context['last_live_chat'] = {
'title': oldchat.title,
'chat_ends_at': oldchat.chat_ends_at,
'expert': oldchat.expert,
'url': reverse(... | Display any available live chats as advertisements. |
def _get_current_output(self):
output = []
for item in self.items:
out = self.py3.get_output(item)
if out and "separator" not in out[-1]:
out[-1]["separator"] = True
output += out
return output | Get child modules output. |
def short_path(path, cwd=None):
if not isinstance(path, str):
return path
if cwd is None:
cwd = os.getcwd()
abspath = os.path.abspath(path)
relpath = os.path.relpath(path, cwd)
if len(abspath) <= len(relpath):
return abspath
return relpath | Return relative or absolute path name, whichever is shortest. |
def delete(self, item):
uri = "/%s/%s" % (self.uri_base, utils.get_id(item))
return self._delete(uri) | Deletes the specified item. |
def resample(grid, wl, flux):
flux_rs = (interpolate.interp1d(wl, flux))(grid)
return flux_rs | Resample spectrum onto desired grid |
def refresh(self):
kwd = {
'pager': '',
'title': '',
}
self.render('list/post_list.html',
kwd=kwd,
userinfo=self.userinfo,
view=MPost.query_dated(10),
postrecs=MPost.query_dated(10),
... | List the post of dated. |
def list_qos_policies(self, retrieve_all=True, **_params):
return self.list('policies', self.qos_policies_path,
retrieve_all, **_params) | Fetches a list of all qos policies for a project. |
def dipole_moment(r_array, charge_array):
return np.sum(r_array * charge_array[:, np.newaxis], axis=0) | Return the dipole moment of a neutral system. |
def insert(self, item):
'Insert a new item. If equal keys are found, add to the left'
k = self._key(item)
i = bisect_left(self._keys, k)
self._keys.insert(i, k)
self._items.insert(i, item) | Insert a new item. If equal keys are found, add to the left |
def redeem(self, account_code):
redemption_path = '%s/redeem' % (self.redemption_code)
if hasattr(self, '_url'):
url = urljoin(self._url, '/redeem')
else:
url = urljoin(recurly.base_uri(), self.collection_path + '/' + redemption_path)
recipient_account = _Recipien... | Redeem this gift card on the specified account code |
def download(self):
self.page = requests.get(self.url)
self.tree = html.fromstring(self.page.text) | Downloads HTML from url. |
def _distance(self, x0, y0, x1, y1):
dx = x1-x0
dy = y1-y0
if self.pix:
dx[ dx > self.Lx/2 ] -= self.Lx
dx[ dx < -self.Lx/2 ] += self.Lx
if self.piy:
dy[ dy > self.Ly/2 ] -= self.Ly
dy[ dy < -self.Ly/2 ] += self.Ly
return dx, dy | Utitlity function to compute distance between points. |
def cmd_dropobject(self, obj):
latlon = self.module('map').click_position
if self.last_click is not None and self.last_click == latlon:
return
self.last_click = latlon
if latlon is not None:
obj.setpos(latlon[0], latlon[1])
self.aircraft.append(obj) | drop an object on the map |
def afx_adafactor():
hparams = afx_adam()
hparams.optimizer = "Adafactor"
hparams.learning_rate_schedule = "rsqrt_decay"
hparams.learning_rate_warmup_steps = 10000
return hparams | Adafactor with recommended learning rate schedule. |
def run_simulation(c1, c2):
print('running simulation...')
traits = character.CharacterCollection(character.fldr)
c1 = traits.generate_random_character()
c2 = traits.generate_random_character()
print(c1)
print(c2)
rules = battle.BattleRules(battle.rules_file)
b = battle.Battle(c1, c2, tr... | using character and planet, run the simulation |
def handle_cmd_options():
parser = OptionParser()
parser.add_option("-s", "--silent", action="store_true", dest="silent",
help="print any warnings", default=False)
(options, args) = parser.parse_args()
return options, args | Get the options from the command line. |
def _get_response(self, **kwargs):
url = self.read_url + "?output=JSON&token=%s" % self.read_token
for key in kwargs:
if key and kwargs[key]:
val = kwargs[key]
if isinstance(val, (list, tuple)):
val = ",".join(val)
url += "&... | Make the GET request. |
def extern_clone_val(self, context_handle, val):
c = self._ffi.from_handle(context_handle)
return c.to_value(self._ffi.from_handle(val[0])) | Clone the given Handle. |
def _get_thumbnail_url(image):
lhs, rhs = splitext(image.url)
lhs += THUMB_EXT
thumb_url = f'{lhs}{rhs}'
return thumb_url | Given a large image, return the thumbnail url |
def mouseMoveEvent(self, event):
text = self.get_line_at(event.pos())
if get_error_match(text):
if not self.__cursor_changed:
QApplication.setOverrideCursor(QCursor(Qt.PointingHandCursor))
self.__cursor_changed = True
event.accept()
... | Show Pointing Hand Cursor on error messages |
def printout(*args, **kwargs):
color = kwargs.pop('color', {})
style = kwargs.pop('style', {})
prefx = kwargs.pop('prefix', '')
suffx = kwargs.pop('suffix', '')
ind = kwargs.pop('indent', 0)
print_args = []
for arg in args:
arg = str(arg)
arg = colorize(arg, **color)
... | Print function with extra options for formating text in terminals. |
def init_edge_number(self) -> int:
return len(frozenset(frozenset(edge) for edge in self.initial_edges())) | Return the number of edges present in the non-compressed graph |
def spin_sx(self):
return conversions.secondary_spin(self.mass1, self.mass2, self.spin1x,
self.spin2x) | Returns the x-component of the spin of the secondary mass. |
def fix_e303(self, result):
delete_linenum = int(result['info'].split('(')[1].split(')')[0]) - 2
delete_linenum = max(1, delete_linenum)
cnt = 0
line = result['line'] - 2
modified_lines = []
while cnt < delete_linenum and line >= 0:
if not self.source[line].st... | Remove extra blank lines. |
def build_directory():
if os.path.exists('locale'):
pass
else:
os.mkdir('locale')
if os.path.exists(WHOOSH_DB_DIR):
pass
else:
os.makedirs(WHOOSH_DB_DIR) | Build the directory for Whoosh database, and locale. |
def author_name_from_json(author_json):
"concatenate an author name from json data"
author_name = None
if author_json.get('type'):
if author_json.get('type') == 'group' and author_json.get('name'):
author_name = author_json.get('name')
elif author_json.get('type') == 'person' and... | concatenate an author name from json data |
def profile(request):
serializer_class = registration_settings.PROFILE_SERIALIZER_CLASS
if request.method in ['POST', 'PUT', 'PATCH']:
partial = request.method == 'PATCH'
serializer = serializer_class(
instance=request.user,
data=request.data,
partial=partial,... | Get or set user profile. |
def update_selected(self, linenum):
self.parents = _get_parents(self.funcs, linenum)
update_selected_cb(self.parents, self.method_cb)
self.parents = _get_parents(self.classes, linenum)
update_selected_cb(self.parents, self.class_cb) | Updates the dropdowns to reflect the current class and function. |
def geometryType(self):
if self._geomType is None:
if self.geometry is not None:
self._geomType = self.geometry.type
else:
self._geomType = "Table"
return self._geomType | returns the feature's geometry type |
def home(self) -> str:
self.hardware.home()
self.current_position = self._position()
return 'Homed' | Return the robot to the home position and update the position tracker |
def publish(self, topic, payload = None, qos = 0, retain = False):
payloadlen = len(payload)
if topic is None or qos < 0 or qos > 2:
print "PUBLISH:err inval"
return NC.ERR_INVAL
if payloadlen > (250 * 1024 * 1204):
self.logger.error("PUBLISH:err payload len:%... | Publish some payload to server. |
def element(self, inp=None):
if inp is not None:
if isinstance(inp, np.ndarray):
return complex(inp.reshape([1])[0])
else:
return complex(inp)
else:
return complex(0.0, 0.0) | Return a complex number from ``inp`` or from scratch. |
def search(self, start_ts, end_ts):
for meta_collection_name in self._meta_collections():
meta_coll = self.meta_database[meta_collection_name]
for ts_ns_doc in meta_coll.find(
{"_ts": {"$lte": end_ts, "$gte": start_ts}}
):
yield ts_ns_doc | Called to query Mongo for documents in a time range. |
def _get_taxids(self, taxids=None):
taxid_keys = set(self.taxid2asscs.keys())
return taxid_keys if taxids is None else set(taxids).intersection(taxid_keys) | Return user-specified taxids or taxids in self.taxid2asscs |
def mr_dim_ind(self):
mr_dim_ind = self._cube.mr_dim_ind
if self._cube.ndim == 3:
if isinstance(mr_dim_ind, int):
if mr_dim_ind == 0:
return None
return mr_dim_ind - 1
elif isinstance(mr_dim_ind, tuple):
mr_dim_i... | Get the correct index of the MR dimension in the cube slice. |
def graphql_impl(
schema,
source,
root_value,
context_value,
variable_values,
operation_name,
field_resolver,
type_resolver,
middleware,
execution_context_class,
) -> AwaitableOrValue[ExecutionResult]:
schema_validation_errors = validate_schema(schema)
if schema_validatio... | Execute a query, return asynchronously only if necessary. |
def running_apps(device_id):
if not is_valid_device_id(device_id):
abort(403)
if device_id not in devices:
abort(404)
return jsonify(running_apps=devices[device_id].running_apps) | Get running apps via HTTP GET. |
def _token_to_ids(self, token):
cache_location = hash(token) % self._cache_size
cache_key, cache_value = self._token_to_ids_cache[cache_location]
if cache_key == token:
return cache_value
subwords = self._token_to_subwords(token)
ids = []
for subword in subwords:
if subword == _UNDER... | Convert a single token to a list of integer ids. |
def _read_checkpoint_vars(model_path):
reader = tf.train.NewCheckpointReader(model_path)
reader = CheckpointReaderAdapter(reader)
ckpt_vars = reader.get_variable_to_shape_map().keys()
return reader, set(ckpt_vars) | return a set of strings |
def remover(file_path):
if os.path.isfile(file_path):
os.remove(file_path)
return True
elif os.path.isdir(file_path):
shutil.rmtree(file_path)
return True
else:
return False | Delete a file or directory path only if it exists. |
async def sdiff(self, keys, *args):
"Return the difference of sets specified by ``keys``"
args = list_or_args(keys, args)
return await self.execute_command('SDIFF', *args) | Return the difference of sets specified by ``keys`` |
def _delete_duplicates(l, keep_last):
seen=set()
result=[]
if keep_last:
l.reverse()
for i in l:
try:
if i not in seen:
result.append(i)
seen.add(i)
except TypeError:
result.append(i)
if keep_last:
result.reverse... | Delete duplicates from a sequence, keeping the first or last. |
def _parseAttrs(self, attrsStr):
attributes = dict()
for attrStr in self.SPLIT_ATTR_COL_RE.split(attrsStr):
name, vals = self._parseAttrVal(attrStr)
if name in attributes:
raise GFF3Exception(
"duplicated attribute name: {}".format(name),
... | Parse the attributes and values |
def delete(self, mail):
self.stats['mail_deleted'] += 1
if self.conf.dry_run:
logger.info("Skip deletion of {!r}.".format(mail))
return
logger.debug("Deleting {!r}...".format(mail))
os.unlink(mail.path)
logger.info("{} deleted.".format(mail.path)) | Delete a mail from the filesystem. |
def num2bytes(value, size):
res = []
for _ in range(size):
res.append(value & 0xFF)
value = value >> 8
assert value == 0
return bytes(bytearray(list(reversed(res)))) | Convert an unsigned integer to MSB-first bytes with specified size. |
def from_dict(ddict):
d = Definition(ddict['name'], ddict['line'], ddict['column'],
ddict['icon'], ddict['description'],
ddict['user_data'], ddict['path'])
for child_dict in ddict['children']:
d.children.append(Definition.from_dict(child_dict))
... | Deserializes a definition from a simple dict. |
def deploy(self, environment, target_name, stream_output=None):
try:
remote_server_command(
[
"rsync", "-lrv", "--safe-links", "--munge-links",
"--delete", "--inplace", "--chmod=ugo=rwX",
"--exclude=.datacats-environment",
... | Return True if deployment was successful |
def output_notebook(
d3js_url="//d3js.org/d3.v3.min",
requirejs_url="//cdnjs.cloudflare.com/ajax/libs/require.js/2.1.10/require.min.js",
html_template=None
):
if html_template is None:
html_template = read_lib('html', 'setup')
setup_html = populate_template(
html_template... | Import required Javascript libraries to Jupyter Notebook. |
def contents_list_pairs(self):
return (tuple(getattr(self.generator, name) for name in names)
for names in self.info.get('contents_lists', [])) | Iterator over pairs of normal and hidden contents |
def similarity(self, other):
if len(self.items) > len(other.items):
first, second = self, other
else:
first, second = other, self
items = list(first.items)
length = len(items)
sim = self.Similarity(0.0 if length else 1.0)
cname = self.__class__.__n... | Calculate similarity based on best matching permutation of items. |
def warp_object(self, tileMapObj):
print "Collision"
if tileMapObj.can_warp:
if self.map_association != self.exitWarp.map_association:
TileMapManager.load(exitWarp.map_association)
tileMapObj.parent.coords = self.exitWarp.coords | Warp the tile map object from one warp to another. |
def favstar(self, class_name, obj_id, action):
session = db.session()
FavStar = models.FavStar
count = 0
favs = session.query(FavStar).filter_by(
class_name=class_name, obj_id=obj_id,
user_id=g.user.get_id()).all()
if action == 'select':
if not... | Toggle favorite stars on Slices and Dashboard |
def fan_speed(self, value):
if value not in range(1, 10):
raise exceptions.RoasterValueError
self._fan_speed.value = value | Verifies the value is between 1 and 9 inclusively. |
def save(self):
with open(self.filename, 'w') as plist_file:
plist_file.write(str(self.soup)) | Save current property list representation to the original file. |
def Set(self, value, context=None):
if self.has_error: return
if self.value is None:
self.value = value
self._context["old_value"] = value
self._context.update({"old_" + k: v for k, v in context.items()})
elif self.value != value:
self.has_error = True
self._context["new_value"... | Receives a value for the object and some context on its source. |
def update_fileserver(self, interval, backends):
def _do_update():
log.debug(
'Performing fileserver updates for items with an update '
'interval of %d', interval
)
for backend, update_args in six.iteritems(backends):
backend_na... | Threading target which handles all updates for a given wait interval |
def _ods2code(self):
ods = ODSReader(self.ods_file, clonespannedcolumns=True)
tables = ods.sheets
for tab_id, table in enumerate(tables):
for row_id in xrange(len(table)):
for col_id in xrange(len(table[row_id])):
key = row_id, col_id, tab_id
... | Updates code in code_array |
def open(self, update=False):
filename = os.path.basename(self.source_path)
folder, _ext = os.path.splitext(filename)
self.path = os.path.sep.join([self.directory, folder, filename])
if not os.path.exists(os.path.dirname(self.path)):
os.makedirs(os.path.dirname(self.path))
... | Opens pdf file to read from. |
def update_security_of_password(self, ID, data):
log.info('Update security of password %s with %s' % (ID, data))
self.put('passwords/%s/security.json' % ID, data) | Update security of a password. |
def _ParseWtmp():
users = {}
wtmp_struct_size = UtmpStruct.GetSize()
filenames = glob.glob("/var/log/wtmp*") + ["/var/run/utmp"]
for filename in filenames:
try:
wtmp = open(filename, "rb").read()
except IOError:
continue
for offset in range(0, len(wtmp), wtmp_struct_size):
try:
... | Parse wtmp and utmp and extract the last logon time. |
def received_message(self, m):
m = str(m)
logger.debug("Incoming upstream WS: %s", m)
uwsgi.websocket_send(m)
logger.debug("Send ok") | Push upstream messages to downstream. |
def read_rcfile():
files = [
'{}/.millipederc'.format(os.environ.get('HOME')),
'/usr/local/etc/millipederc',
'/etc/millipederc',
]
for filepath in files:
if os.path.isfile(filepath):
with open(filepath) as rcfile:
return parse_rcfile(rcfile)
re... | Try to read a rcfile from a list of paths |
def getTextWords(page):
CheckParent(page)
dl = page.getDisplayList()
tp = dl.getTextPage()
l = tp._extractTextWords_AsList()
del dl
del tp
return l | Return the text words as a list with the bbox for each word. |
def apply(self, data, data_type='point', reference=None, **kwargs):
if data_type == 'point':
return self.apply_to_point(data)
elif data_type == 'vector':
return self.apply_to_vector(data)
elif data_type == 'image':
return self.apply_to_image(data, reference, *... | Apply transform to data |
def write_array_empty(self, key, value):
arr = np.empty((1,) * value.ndim)
self._handle.create_array(self.group, key, arr)
getattr(self.group, key)._v_attrs.value_type = str(value.dtype)
getattr(self.group, key)._v_attrs.shape = value.shape | write a 0-len array |
async def insert(self, task: Task) -> None:
if not isinstance(task, Task):
task = task()
if task.name not in self.all_tasks:
task.tasky = self
self.all_tasks[task.name] = task
await task.init()
elif task != self.all_tasks[task.name]:
ra... | Insert the given task class into the Tasky event loop. |
def initialize(dirs):
if biolite and dirs.get("work"):
base_dir = utils.safe_makedir(os.path.join(dirs["work"], "provenance"))
p_db = os.path.join(base_dir, "biolite.db")
biolite.config.resources["database"] = p_db
biolite.database.connect() | Initialize the biolite database to load provenance information. |
def mag_calibration(self):
self.calibration_state = self.CAL_MAG
self.mag_dialog = SK8MagDialog(self.sk8.get_imu(self.spinIMU.value()), self)
if self.mag_dialog.exec_() == QDialog.Rejected:
return
self.calculate_mag_calibration(self.mag_dialog.samples) | Perform magnetometer calibration for current IMU. |
def unlearnValue(self):
defaultValue = self.defaultParamInfo.get(field = "p_filename",
native = 0, prompt = 0)
self.choice.set(defaultValue) | Unlearn a parameter value by setting it back to its default |
def catch_no_credentials(message, **info):
try:
yield
except NoCredentialsError as error:
if hasattr(error, "response"):
info['error_code'] = error.response["ResponseMetadata"]["HTTPStatusCode"]
info['error_message'] = error.response["Error"]["Message"]
else:
... | Turn a NoCredentialsError into a BadAmazon |
def getConfigDirectory():
if platform.system() == 'Windows':
return os.path.join(os.environ['APPDATA'], 'ue4cli')
else:
return os.path.join(os.environ['HOME'], '.config', 'ue4cli') | Determines the platform-specific config directory location for ue4cli |
def check_venv(self):
if self.zappa:
venv = self.zappa.get_current_venv()
else:
venv = Zappa.get_current_venv()
if not venv:
raise ClickException(
click.style("Zappa", bold=True) + " requires an " + click.style("active virtual environment", bol... | Ensure we're inside a virtualenv. |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.