code stringlengths 51 2.34k | docstring stringlengths 11 171 |
|---|---|
def _handle_end_relation(self):
self._result.append(Relation(result=self._result, **self._curr))
self._curr = {} | Handle closing relation element |
def romanize(number):
roman = []
for numeral, value in NUMERALS:
times, number = divmod(number, value)
roman.append(times * numeral)
return ''.join(roman) | Convert `number` to a Roman numeral. |
def old_values(self):
def get_old_values_and_key(item):
values = item.old_values
values.update({self._key: item.past_dict[self._key]})
return values
return [get_old_values_and_key(el)
for el in self._get_recursive_difference('all')
if e... | Returns the old values from the diff |
def loadable_modules(self):
with self._mutex:
if not self._loadable_modules:
self._loadable_modules = []
for mp in self._obj.get_loadable_modules():
self._loadable_modules.append(utils.nvlist_to_dict(mp.properties))
return self._loadable_mo... | The list of loadable module profile dictionaries. |
def admin_docker_list_view(context, request):
return {
'paginator': Page(
context.all,
url_maker=lambda p: request.path_url + "?page=%s" % p,
page=int(request.params.get('page', 1)),
items_per_page=6
)
} | Show list of docker images. |
def SendKeys(keys,
pause=0.05,
with_spaces=False,
with_tabs=False,
with_newlines=False,
turn_off_numlock=True):
"Parse the keys and type them"
keys = parse_keys(keys, with_spaces, with_tabs, with_newlines)
for k in keys:
k.Run()
... | Parse the keys and type them |
def insert_def(self, index, def_item):
self.defs.insert(index, def_item)
for other in self.others:
other.insert_def(index, def_item) | Inserts a def universally. |
def maybe_specialize(term, domain):
if isinstance(term, LoadableTerm):
return term.specialize(domain)
return term | Specialize a term if it's loadable. |
def coerce_value(type, value):
if isinstance(type, GraphQLNonNull):
return coerce_value(type.of_type, value)
if value is None:
return None
if isinstance(type, GraphQLList):
item_type = type.of_type
if not isinstance(value, string_types) and isinstance(value, Iterable):
... | Given a type and any value, return a runtime value coerced to match the type. |
def _get_minutes_from_last_update(self, time):
time_from_last_update = time - self.last_update_time
return int(time_from_last_update.total_seconds() / 60) | How much minutes passed from last update to given time |
def _get_tls_context(self) -> ssl.SSLContext:
if self.tls_context is not None:
context = self.tls_context
else:
context = ssl.create_default_context(ssl.Purpose.SERVER_AUTH)
context.check_hostname = bool(self.validate_certs)
if self.validate_certs:
... | Build an SSLContext object from the options we've been given. |
def force_unicode(value):
if not isinstance(value, (str, unicode)):
value = unicode(value)
if isinstance(value, str):
value = value.decode('utf-8')
return value | return an utf-8 unicode entry |
def read_book_uri_from_console():
db_path: str = input("Enter book_url or leave blank for the default settings value: ")
if db_path:
if db_path.startswith("sqlite://"):
db_path_uri = db_path
else:
db_path_uri = "file:///" + db_path
else:
cfg = settings.Setting... | Prompts the user to enter book url in console |
def focused_on_tweet(request):
slots = request.get_slot_map()
if "Index" in slots and slots["Index"]:
index = int(slots['Index'])
elif "Ordinal" in slots and slots["Index"]:
parse_ordinal = lambda inp : int("".join([l for l in inp if l in string.digits]))
index = parse_ordinal(slots[... | Return index if focused on tweet False if couldn't |
def _logoutclient(self, useruuid, clientuuid):
self.log("Cleaning up client of logged in user.", lvl=debug)
try:
self._users[useruuid].clients.remove(clientuuid)
if len(self._users[useruuid].clients) == 0:
self.log("Last client of user disconnected.", lvl=verbose)... | Log out a client and possibly associated user |
def spec(self):
return dict(
headers=self.header_lines,
start=self.start_line,
comments=self.comment_lines,
end=self.end_line
) | Return a dict with values that can be fed directly into SelectiveRowGenerator |
def imu_changed(self, val):
self.current_imuid = '{}_IMU{}'.format(self.sk8.get_device_name(), val)
self.update_data_display(self.get_current_data()) | Handle clicks on the IMU index spinner. |
def element_type(self, type_):
return self.__find_xxx_type(
type_,
self.element_type_index,
self.element_type_typedef,
'container_element_type') | returns reference to the class value\\mapped type declaration |
def entry_stats(entries, top_n=10):
wc = Counter()
for content in entries.values_list("rendered_content", flat=True):
content = strip_tags(content)
content = re.sub('\s+', ' ', content)
content = re.sub('[^A-Za-z ]+', '', content)
words = [w.lower() for w in content.split()]
... | Calculates stats for the given ``QuerySet`` of ``Entry``s. |
def key_file(self):
if self.auth_key:
key_file_path = os.path.join(orchestration_mkdtemp(), 'key')
with open(key_file_path, 'w') as fd:
fd.write(self.auth_key)
os.chmod(key_file_path, stat.S_IRUSR)
return key_file_path | Get the path to the key file containig our auth key, or None. |
def copy(self):
return self.__class__(self.record.copy(),
self.variables,
self.info.copy(),
self.vartype) | Create a shallow copy. |
def stream(self, chunk_size=64*1024):
'Returns a generator to iterate over the file contents'
return self.jfs.stream(url=self.path, params={'mode':'bin'}, chunk_size=chunk_size) | Returns a generator to iterate over the file contents |
def _apply_cond(self, apply_fn, grad, var, *args, **kwargs):
grad_acc = self.get_slot(var, "grad_acc")
def apply_adam(grad_acc, apply_fn, grad, var, *args, **kwargs):
total_grad = (grad_acc + grad) / tf.cast(self._n_t, grad.dtype)
adam_op = apply_fn(total_grad, var, *args, **kwargs)
with tf.co... | Apply conditionally if counter is zero. |
def on_go(self, target):
if not target:
Log.error("expecting target")
with self.lock:
if not self._go:
DEBUG and self._name and Log.note("Adding target to signal {{name|quote}}", name=self.name)
if not self.job_queue:
self.job_q... | RUN target WHEN SIGNALED |
def handle_typical_memberdefs_no_overload(self, signature, memberdef_nodes):
for n in memberdef_nodes:
self.add_text(['\n', '%feature("docstring") ', signature, ' "', '\n'])
if self.with_function_signature:
self.add_line_with_subsequent_indent(self.get_function_signature(... | Produce standard documentation for memberdef_nodes. |
def xml(self, operator='set', indent = ""):
xml = indent + "<meta id=\"" + self.key + "\""
if operator != 'set':
xml += " operator=\"" + operator + "\""
if not self.value:
xml += " />"
else:
xml += ">" + self.value + "</meta>"
return xml | Serialize the metadata field to XML |
def run(endpoint, name=None):
try:
if os.isatty(0):
for data in stream_skypipe_output(endpoint, name):
sys.stdout.write(data)
sys.stdout.flush()
else:
with skypipe_input_stream(endpoint, name) as stream:
for line in stream_stdin... | Runs the skypipe client |
def _get_biallelic_variant(self, variant, info, _check_alleles=True):
info = info.iloc[0, :]
assert not info.multiallelic
self._impute2_file.seek(info.seek)
genotypes = self._parse_impute2_line(self._impute2_file.readline())
variant_alleles = variant._encode_alleles([
... | Creates a bi-allelic variant. |
def ci_data(namespace, name, branch='master'):
with repository(namespace, name, branch) as (path, latest, cache):
if not path or not latest:
return {'build_success': NOT_FOUND, 'status': NOT_FOUND}
elif latest in cache:
return json.loads(cache[latest])
starting = {'status... | Returns or starts the ci data collection process |
def _assemble_modification(stmt):
sub_str = _assemble_agent_str(stmt.sub)
if stmt.enz is not None:
enz_str = _assemble_agent_str(stmt.enz)
if _get_is_direct(stmt):
mod_str = ' ' + _mod_process_verb(stmt) + ' '
else:
mod_str = ' leads to the ' + _mod_process_noun(s... | Assemble Modification statements into text. |
def setSize(self, w, h):
self.setW(w)
self.setH(h)
return self | Sets the new size of the region |
def untlpydict2dcformatteddict(untl_dict, **kwargs):
ark = kwargs.get('ark', None)
domain_name = kwargs.get('domain_name', None)
scheme = kwargs.get('scheme', 'http')
resolve_values = kwargs.get('resolve_values', None)
resolve_urls = kwargs.get('resolve_urls', None)
verbose_vocabularies = kwargs... | Convert a UNTL data dictionary to a formatted DC data dictionary. |
def _get_redditor_listing(subpath=''):
def _listing(self, sort='new', time='all', *args, **kwargs):
kwargs.setdefault('params', {})
kwargs['params'].setdefault('sort', sort)
kwargs['params'].setdefault('t', time)
url = urljoin(self._url, subpath)
return self.reddit_session.ge... | Return function to generate Redditor listings. |
def _get_md5sum(self, fpath):
try:
current_md5 = hashlib.md5()
if isinstance(fpath, six.string_types) and os.path.exists(fpath):
with open(fpath, "rb") as fh:
for chunk in self._read_chunks(fh):
current_md5.update(chunk)
... | Calculate the md5sum of the specific image file |
def handle_time(msg):
return msg.copy(ack=0, payload=calendar.timegm(time.localtime())) | Process an internal time request message. |
def read_yaml(file_path, Loader=yaml.Loader, object_pairs_hook=OrderedDict):
class OrderedLoader(Loader):
pass
def construct_mapping(loader, node):
loader.flatten_mapping(node)
return object_pairs_hook(loader.construct_pairs(node))
OrderedLoader.add_constructor(
yaml.resolver... | Read YAML file and return as python dictionary |
def _thread_worker_fn(samples, batchify_fn, dataset):
if isinstance(samples[0], (list, tuple)):
batch = [batchify_fn([dataset[i] for i in shard]) for shard in samples]
else:
batch = batchify_fn([dataset[i] for i in samples])
return batch | Threadpool worker function for processing data. |
def files_in_dir(path, extension):
ends = '.{0}'.format(extension)
return (f for f in os.listdir(path) if f.endswith(ends)) | Enumartes the files in path with the given extension |
def ndims(self):
if self._dims is None:
return None
else:
if self._ndims is None:
self._ndims = len(self._dims)
return self._ndims | Returns the rank of this shape, or None if it is unspecified. |
def script(vm_):
script_name = config.get_cloud_config_value('script', vm_, __opts__)
if not script_name:
script_name = 'bootstrap-salt'
return salt.utils.cloud.os_script(
script_name,
vm_,
__opts__,
salt.utils.cloud.salt_config_to_yaml(
salt.utils.cloud.m... | Return the script deployment object |
def initGrid(self):
blinker = [(4, 4), (4, 5), (4, 6)]
toad = [(9, 5), (9, 6), (9, 7), (10, 4), (10, 5), (10, 6)]
glider = [(4, 11), (5, 12), (6, 10), (6, 11), (6, 12)]
r_pentomino = [(10, 60), (9, 61), (10, 61), (11, 61), (9, 62)]
self.grid = {}
if self.test:
... | Initialise the game grid |
def save_var(self, key, value, **kwargs):
'Save one variable to the database.'
self.__check_or_create_vars_table()
column_type = get_column_type(value)
tmp = quote(self.__vars_table_tmp)
self.execute(u'DROP TABLE IF EXISTS %s' % tmp, commit = False)
self.execute(u'CREATE TABLE %s (`value` %s)' %... | Save one variable to the database. |
def load_numpy(self, hash_list):
assert len(hash_list) == 1
self._check_hashes(hash_list)
with open(self.object_path(hash_list[0]), 'rb') as fd:
return np.load(fd, allow_pickle=False) | Loads a numpy array. |
def commandline_text(bytestring):
'Convert bytestring from command line to unicode, using default file system encoding'
if six.PY3:
return bytestring
unicode_string = bytestring.decode(sys.getfilesystemencoding())
return unicode_string | Convert bytestring from command line to unicode, using default file system encoding |
def conjugate_quat(quat):
return Quat(-quat.x, -quat.y, -quat.z, quat.w) | Negate the vector part of the quaternion. |
def _refresh_multi_axis(self):
d = self.declaration
self.viewbox = pg.ViewBox()
_plots = [c for c in self.parent().children() if isinstance(c,AbstractQtPlotItem)]
i = _plots.index(self)
if i==0:
self.axis = self.widget.getAxis('right')
self.widget.showAxi... | If linked axis' are used, setup and link them |
def _set_box(self):
net_volume = 0.0
for idx, mol in enumerate(self.mols):
length = max([np.max(mol.cart_coords[:, i])-np.min(mol.cart_coords[:, i])
for i in range(3)]) + 2.0
net_volume += (length**3.0) * float(self.param_list[idx]['number'])
le... | Set the box size for the molecular assembly |
def auto_invalidate(self):
current = datetime.now()
if current > self._invalidated + timedelta(seconds=self._timetolive):
self.invalidate() | Invalidate the cache if the current time is past the time to live. |
def sum_transactions(transactions):
workdays_per_year = 250
previous_date = None
rate = 0
day_sum = 0
for transaction in transactions:
date, action, value = _parse_transaction_entry(transaction)
if previous_date is None:
previous_date = date
elapsed = workdays.net... | Sums transactions into a total of remaining vacation days. |
def expanded_counts_map(self):
if self.hpx._ipix is None:
return self.counts
output = np.zeros(
(self.counts.shape[0], self.hpx._maxpix), self.counts.dtype)
for i in range(self.counts.shape[0]):
output[i][self.hpx._ipix] = self.counts[i]
return output | return the full counts map |
def _get_securitygroupname_id(securitygroupname_list):
securitygroupid_set = set()
if not isinstance(securitygroupname_list, list):
securitygroupname_list = [securitygroupname_list]
params = {'Action': 'DescribeSecurityGroups'}
for sg in aws.query(params, location=get_location(),
... | Returns the SecurityGroupId of a SecurityGroupName to use |
def _get_thumbnail_filename(filename, append_text="-thumbnail"):
name, ext = os.path.splitext(filename)
return ''.join([name, append_text, ext]) | Returns a thumbnail version of the file name. |
def cli(env, account_id, content_url):
manager = SoftLayer.CDNManager(env.client)
manager.load_content(account_id, content_url) | Cache one or more files on all edge nodes. |
def _check_ruby(ret, ruby, user=None):
match_version = True
match_micro_version = False
micro_version_regex = re.compile(r'-([0-9]{4}\.[0-9]{2}|p[0-9]+)$')
if micro_version_regex.search(ruby):
match_micro_version = True
if re.search('^[a-z]+$', ruby):
match_version = False
ruby =... | Check that ruby is installed |
def _filter(msgdata, mailparser, mdfolder, mailfilters):
if mailfilters:
for f in mailfilters:
msg = mailparser.parse(StringIO(msgdata))
rule = f(msg, folder=mdfolder)
if rule:
yield rule
return | Filter msgdata by mailfilters |
def read_with_selection(func):
def wrapper(*args, **kwargs):
selection = kwargs.pop('selection', None) or []
tab = func(*args, **kwargs)
if selection:
return filter_table(tab, selection)
return tab
return _safe_wraps(wrapper, func) | Decorate a Table read method to apply ``selection`` keyword |
def versions_request(self):
ret = self.handle_api_exceptions('GET', '', api_ver='')
return [str_dict(x) for x in ret.json()] | List Available REST API Versions |
def validate_complex(prop, value, xpath_map=None):
if value is not None:
validate_type(prop, value, dict)
if prop in _complex_definitions:
complex_keys = _complex_definitions[prop]
else:
complex_keys = {} if xpath_map is None else xpath_map
for complex_prop, c... | Default validation for single complex data structure |
def update(self):
stats = self.get_init_value()
if self.input_method == 'local':
stats = cpu_percent.get(percpu=True)
else:
pass
self.stats = stats
return self.stats | Update per-CPU stats using the input method. |
def order_sections(self, key, reverse=True):
fsort = lambda s: s.__dict__[key]
return sorted(self.sections, key=fsort, reverse=reverse) | Sort sections according to the value of key. |
def verification_update(self, cluster_id, status):
data = {'verification': {'status': status}}
return self._patch("/clusters/%s" % cluster_id, data) | Start a verification for a Cluster. |
def draw_line(self, ax, line, force_trans=None):
coordinates, data = self.process_transform(line.get_transform(),
ax, line.get_xydata(),
force_trans=force_trans)
linestyle = utils.get_line_style(line)
... | Process a matplotlib line and call renderer.draw_line |
def json(self):
return {
"elevation": self.elevation,
"latitude": self.latitude,
"longitude": self.longitude,
"icao_code": self.icao_code,
"name": self.name,
"quality": self.quality,
"wban_ids": self.wban_ids,
"recen... | Return a JSON-serializeable object containing station metadata. |
def add_edge(self, u, v, attr_dict=None, **attr):
if attr_dict is None:
attr_dict = attr
else:
try:
attr_dict.update(attr)
except AttributeError:
raise NetworkXError(
"The attr_dict argument must be a dictionary."
... | Version of add_edge that only writes to the database once |
def normalized(self):
norm = self.magnitude()
return Vector(self.x / norm, self.y / norm, self.z / norm) | Returns a normalized copy of this vector. |
def _put(self, timestamp, value):
idx = self._lookup(timestamp)
if idx is not None:
self._values[idx] = (timestamp, value)
else:
self._values.append((timestamp, value)) | Replace the value associated with "timestamp" or add the new value |
def _clean_xmldict_single_dic(self, dictionary):
for k, v in dictionary.items():
if v is None:
dictionary[k] = '' | Every None replace by '' in the dic, as xml parsers puts None in those fiels, which is not valid for IAR |
def stop(self):
if self.interrupted:
return
for thread in self.worker_threads:
thread.interrupted = True
self.interrupted = True | Stops the coordinator thread and all related threads. |
def _find_rule_no(self, mac):
ipt_cmd = ['iptables', '-L', '--line-numbers']
cmdo = dsl.execute(ipt_cmd, self._root_helper, log_output=False)
for o in cmdo.split('\n'):
if mac in o.lower():
rule_no = o.split()[0]
LOG.info('Found rule %(rule)s for %(mac... | Find rule number associated with a given mac. |
def deprecated(function):
def IssueDeprecationWarning(*args, **kwargs):
warnings.simplefilter('default', DeprecationWarning)
warnings.warn('Call to deprecated function: {0:s}.'.format(
function.__name__), category=DeprecationWarning, stacklevel=2)
return function(*args, **kwargs)
IssueDeprecatio... | Decorator to mark functions or methods as deprecated. |
def longest(*args):
internal_assert(len(args) >= 2, "longest expects at least two args")
matcher = args[0] + skip_whitespace
for elem in args[1:]:
matcher ^= elem + skip_whitespace
return matcher | Match the longest of the given grammar elements. |
def _calc_relative_path_lengths(self, x, y):
path_lengths = np.sqrt(np.diff(x) ** 2 + np.diff(y) ** 2)
total_length = np.sum(path_lengths)
cummulative_lengths = np.cumsum(path_lengths)
relative_path_lengths = cummulative_lengths / total_length
return relative_path_lengths | Determine the relative path length at each x,y position. |
def charge(self, user, vault_id=None):
assert self.is_in_vault(user)
if vault_id:
user_vault = self.get(user=user, vault_id=vault_id)
else:
user_vault = self.get(user=user) | If vault_id is not passed this will assume that there is only one instane of user and vault_id in the db. |
def get(args):
m = RiverManager(args.hosts)
r = m.get(args.name)
if r:
print(json.dumps(r, indent=2))
else:
sys.exit(1) | Get a river by name. |
def printDeadCells(self):
columnCasualties = numpy.zeros(self.numberOfColumns())
for cell in self.deadCells:
col = self.columnForCell(cell)
columnCasualties[col] += 1
for col in range(self.numberOfColumns()):
print col, columnCasualties[col] | Print statistics for the dead cells |
def startswith(self, other):
try:
other = UrlPath.from_object(other)
except ValueError:
raise TypeError('startswith first arg must be UrlPath, str, PathParam, not {}'.format(type(other)))
else:
return self._nodes[:len(other._nodes)] == other._nodes | Return True if this path starts with the other path. |
def show_options(self):
from safe.gui.tools.options_dialog import OptionsDialog
dialog = OptionsDialog(
iface=self.iface,
parent=self.iface.mainWindow())
dialog.show_option_dialog()
if dialog.exec_():
self.dock_widget.read_settings()
from s... | Show the options dialog. |
def _storeAccessContext(snmpEngine):
execCtx = snmpEngine.observer.getExecutionContext('rfc3412.receiveMessage:request')
return {
'securityModel': execCtx['securityModel'],
'securityName': execCtx['securityName'],
'securityLevel': execCtx['securityLevel'],
... | Copy received message metadata while it lasts |
def have_authenticated_user(client_ip, repository, session_token):
if repository not in config['repositories']: return False
repository_path = config['repositories'][repository]['path']
conn = auth_db_connect(cpjoin(repository_path, 'auth_transient.db'))
user_lock = read_user_lock(repository_path)
a... | check user submitted session token against the db and that ip has not changed |
def on_failure(self, exc, task_id, args, kwargs, einfo):
log.error('[{}] failed due to {}'.format(task_id, getattr(einfo, 'traceback', None)))
super(LoggedTask, self).on_failure(exc, task_id, args, kwargs, einfo) | Capture the exception that caused the task to fail, if any. |
def all_host_infos():
output = []
output.append(["Operating system", os()])
output.append(["CPUID information", cpu()])
output.append(["CC information", compiler()])
output.append(["JDK information", from_cmd("java -version")])
output.append(["MPI information", from_cmd("mpirun -version")])
... | Summarize all host information. |
def extendMarkdown(self, md, md_globals=None):
if any(
x not in md.treeprocessors
for x in self.REQUIRED_EXTENSION_INTERNAL_NAMES):
raise RuntimeError(
"The attr_cols markdown extension depends the following"
" extensions which must pre... | Initializes markdown extension components. |
def skip_common_stack_elements(stacktrace, base_case):
for i, (trace, base) in enumerate(zip(stacktrace, base_case)):
if trace != base:
return stacktrace[i:]
return stacktrace[-1:] | Skips items that the target stacktrace shares with the base stacktrace. |
def _explode_lines(shape):
if shape.geom_type == 'LineString':
return [shape]
elif shape.geom_type == 'MultiLineString':
return shape.geoms
elif shape.geom_type == 'GeometryCollection':
lines = []
for geom in shape.geoms:
lines.extend(_explode_lines(geom))
... | Return a list of LineStrings which make up the shape. |
def _CreateRouter(self, router_cls, params=None):
if not router_cls.params_type and params:
raise ApiCallRouterDoesNotExpectParameters(
"%s is not configurable" % router_cls)
rdf_params = None
if router_cls.params_type:
rdf_params = router_cls.params_type()
if params:
rdf... | Creates a router with a given name and params. |
def reformat(found_sequences):
for (pdb_id, chain, file_name), sequence in sorted(found_sequences.iteritems()):
header = sequence[0]
assert(header[0] == '>')
tokens = header.split('|')
tokens[0] = tokens[0][:5]
assert(len(tokens[0]) == 5)
sequence[0] = "|".join(tokens... | Truncate the FASTA headers so that the first field is a 4-character ID. |
def add_date(self, date):
self.lines = self.parser.add_date(date, self.lines) | Add the given date to the textual representation. |
def _compute_e2_factor(self, imt, vs30):
e2 = np.zeros_like(vs30)
if imt.name == "PGV":
period = 1
elif imt.name == "PGA":
period = 0
else:
period = imt.period
if period < 0.35:
return e2
else:
idx = vs30 <= 1000... | Compute and return e2 factor, equation 19, page 80. |
def import_seaborn():
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter("always")
try:
import seaborn.apionly as sns
if (w and issubclass(w[-1].category, UserWarning) and
("seaborn.apionly module" in str(w[-1].message))):
... | import seaborn and handle deprecation of apionly module |
def load_from_json(file_path):
if os.path.exists(file_path):
raw_data = open(file_path, 'rb').read()
return json.loads(base64.decodestring(raw_data).decode('utf-8')) | Load the stored data from json, and return as a dict. |
def projector_functions(self):
projector_functions = OrderedDict()
for (mesh, values, attrib) in self._parse_all_radfuncs("projector_function"):
state = attrib["state"]
projector_functions[state] = RadialFunction(mesh, values)
return projector_functions | Dictionary with the PAW projectors indexed by state. |
def remove_parameter(self, parameter_name):
if parameter_name in self.paramorder:
index = self.paramorder.index(parameter_name)
del self.paramorder[index]
if parameter_name in self._parameters:
del self._parameters[parameter_name] | Removes the specified parameter from the list. |
def field_pk_from_json(self, data):
model = get_model(data['app'], data['model'])
return PkOnlyModel(self, model, data['pk']) | Load a PkOnlyModel from a JSON dict. |
def rescan(self):
self._pathfiles = {}
for path in self.basepaths:
self.scan_path(path) | Rescans the base paths to find new code files. |
def colorize(lead, num, color):
if num != 0 and ANSIBLE_COLOR and color is not None:
return "%s%s%-15s" % (stringc(lead, color), stringc("=", color), stringc(str(num), color))
else:
return "%s=%-4s" % (lead, str(num)) | Print 'lead' = 'num' in 'color' |
def get(self, object_type, object_id):
if object_id == 0:
return json_success(json.dumps([]))
query = db.session.query(TaggedObject).filter(and_(
TaggedObject.object_type == object_type,
TaggedObject.object_id == object_id))
tags = [{'id': obj.tag.id, 'name': ... | List all tags a given object has. |
def create_cell_renderer_text(self, tree_view, title="title", assign=0, editable=False):
renderer = Gtk.CellRendererText()
renderer.set_property('editable', editable)
column = Gtk.TreeViewColumn(title, renderer, text=assign)
tree_view.append_column(column) | Function creates a CellRendererText with title |
def __ipv4_netmask(value):
valid, errmsg = False, 'dotted quad or integer CIDR (0->32)'
valid, value, _ = __int(value)
if not (valid and 0 <= value <= 32):
valid = salt.utils.validate.net.netmask(value)
return (valid, value, errmsg) | validate an IPv4 dotted quad or integer CIDR netmask |
def updatePlayer(name, settings):
player = delPlayer(name)
_validate(settings)
player.update(settings)
player.save()
getKnownPlayers()[player.name] = player
return player | update an existing PlayerRecord setting and save to disk file |
def _parse(data, obj_name, attr_map):
parsed_xml = minidom.parseString(data)
parsed_objects = []
for obj in parsed_xml.getElementsByTagName(obj_name):
parsed_obj = {}
for (py_name, xml_name) in attr_map.items():
parsed_obj[py_name] = _get_minidom_tag_value(obj, xml_name)
... | parse xml data into a python map |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.